Crate-ify and delete unused code in syntax::parse
This commit is contained in:
@@ -51,16 +51,16 @@ pub struct StringReader<'a> {
|
||||
pub ch: Option<char>,
|
||||
pub filemap: Lrc<syntax_pos::FileMap>,
|
||||
/// Stop reading src at this index.
|
||||
pub end_src_index: usize,
|
||||
end_src_index: usize,
|
||||
/// Whether to record new-lines and multibyte chars in filemap.
|
||||
/// This is only necessary the first time a filemap is lexed.
|
||||
/// If part of a filemap is being re-lexed, this should be set to false.
|
||||
pub save_new_lines_and_multibyte: bool,
|
||||
save_new_lines_and_multibyte: bool,
|
||||
// cached:
|
||||
peek_tok: token::Token,
|
||||
peek_span: Span,
|
||||
peek_span_src_raw: Span,
|
||||
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||
fatal_errs: Vec<DiagnosticBuilder<'a>>,
|
||||
// cache a direct reference to the source text, so that we don't have to
|
||||
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
|
||||
src: Lrc<String>,
|
||||
@@ -70,7 +70,7 @@ pub struct StringReader<'a> {
|
||||
/// The raw source span which *does not* take `override_span` into account
|
||||
span_src_raw: Span,
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
pub override_span: Option<Span>,
|
||||
crate override_span: Option<Span>,
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
@@ -163,11 +163,9 @@ impl<'a> StringReader<'a> {
|
||||
sp: self.peek_span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
/// For comments.rs, which hackily pokes into next_pos and ch
|
||||
pub fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
|
||||
fn new_raw(sess: &'a ParseSess, filemap: Lrc<syntax_pos::FileMap>,
|
||||
override_span: Option<Span>) -> Self {
|
||||
let mut sr = StringReader::new_raw_internal(sess, filemap, override_span);
|
||||
sr.bump();
|
||||
@@ -240,17 +238,17 @@ impl<'a> StringReader<'a> {
|
||||
sr
|
||||
}
|
||||
|
||||
pub fn ch_is(&self, c: char) -> bool {
|
||||
fn ch_is(&self, c: char) -> bool {
|
||||
self.ch == Some(c)
|
||||
}
|
||||
|
||||
/// Report a fatal lexical error with a given span.
|
||||
pub fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
|
||||
fn fatal_span(&self, sp: Span, m: &str) -> FatalError {
|
||||
self.sess.span_diagnostic.span_fatal(sp, m)
|
||||
}
|
||||
|
||||
/// Report a lexical error with a given span.
|
||||
pub fn err_span(&self, sp: Span, m: &str) {
|
||||
fn err_span(&self, sp: Span, m: &str) {
|
||||
self.sess.span_diagnostic.span_err(sp, m)
|
||||
}
|
||||
|
||||
@@ -375,7 +373,7 @@ impl<'a> StringReader<'a> {
|
||||
/// Calls `f` with a string slice of the source text spanning from `start`
|
||||
/// up to but excluding `self.pos`, meaning the slice does not include
|
||||
/// the character `self.ch`.
|
||||
pub fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
|
||||
fn with_str_from<T, F>(&self, start: BytePos, f: F) -> T
|
||||
where F: FnOnce(&str) -> T
|
||||
{
|
||||
self.with_str_from_to(start, self.pos, f)
|
||||
@@ -384,13 +382,13 @@ impl<'a> StringReader<'a> {
|
||||
/// Create a Name from a given offset to the current offset, each
|
||||
/// adjusted 1 towards each other (assumes that on either side there is a
|
||||
/// single-byte delimiter).
|
||||
pub fn name_from(&self, start: BytePos) -> ast::Name {
|
||||
fn name_from(&self, start: BytePos) -> ast::Name {
|
||||
debug!("taking an ident from {:?} to {:?}", start, self.pos);
|
||||
self.with_str_from(start, Symbol::intern)
|
||||
}
|
||||
|
||||
/// As name_from, with an explicit endpoint.
|
||||
pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
|
||||
fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name {
|
||||
debug!("taking an ident from {:?} to {:?}", start, end);
|
||||
self.with_str_from_to(start, end, Symbol::intern)
|
||||
}
|
||||
@@ -454,7 +452,7 @@ impl<'a> StringReader<'a> {
|
||||
|
||||
/// Advance the StringReader by one character. If a newline is
|
||||
/// discovered, add it to the FileMap's list of line start offsets.
|
||||
pub fn bump(&mut self) {
|
||||
crate fn bump(&mut self) {
|
||||
let next_src_index = self.src_index(self.next_pos);
|
||||
if next_src_index < self.end_src_index {
|
||||
let next_ch = char_at(&self.src, next_src_index);
|
||||
@@ -481,7 +479,7 @@ impl<'a> StringReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nextch(&self) -> Option<char> {
|
||||
fn nextch(&self) -> Option<char> {
|
||||
let next_src_index = self.src_index(self.next_pos);
|
||||
if next_src_index < self.end_src_index {
|
||||
Some(char_at(&self.src, next_src_index))
|
||||
@@ -490,11 +488,11 @@ impl<'a> StringReader<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nextch_is(&self, c: char) -> bool {
|
||||
fn nextch_is(&self, c: char) -> bool {
|
||||
self.nextch() == Some(c)
|
||||
}
|
||||
|
||||
pub fn nextnextch(&self) -> Option<char> {
|
||||
fn nextnextch(&self) -> Option<char> {
|
||||
let next_src_index = self.src_index(self.next_pos);
|
||||
if next_src_index < self.end_src_index {
|
||||
let next_next_src_index =
|
||||
@@ -506,7 +504,7 @@ impl<'a> StringReader<'a> {
|
||||
None
|
||||
}
|
||||
|
||||
pub fn nextnextch_is(&self, c: char) -> bool {
|
||||
fn nextnextch_is(&self, c: char) -> bool {
|
||||
self.nextnextch() == Some(c)
|
||||
}
|
||||
|
||||
@@ -1732,7 +1730,7 @@ impl<'a> StringReader<'a> {
|
||||
|
||||
// This tests the character for the unicode property 'PATTERN_WHITE_SPACE' which
|
||||
// is guaranteed to be forward compatible. http://unicode.org/reports/tr31/#R3
|
||||
pub fn is_pattern_whitespace(c: Option<char>) -> bool {
|
||||
crate fn is_pattern_whitespace(c: Option<char>) -> bool {
|
||||
c.map_or(false, Pattern_White_Space)
|
||||
}
|
||||
|
||||
@@ -1747,14 +1745,14 @@ fn is_dec_digit(c: Option<char>) -> bool {
|
||||
in_range(c, '0', '9')
|
||||
}
|
||||
|
||||
pub fn is_doc_comment(s: &str) -> bool {
|
||||
fn is_doc_comment(s: &str) -> bool {
|
||||
let res = (s.starts_with("///") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'/') ||
|
||||
s.starts_with("//!");
|
||||
debug!("is {:?} a doc comment? {}", s, res);
|
||||
res
|
||||
}
|
||||
|
||||
pub fn is_block_doc_comment(s: &str) -> bool {
|
||||
fn is_block_doc_comment(s: &str) -> bool {
|
||||
// Prevent `/**/` from being parsed as a doc comment
|
||||
let res = ((s.starts_with("/**") && *s.as_bytes().get(3).unwrap_or(&b' ') != b'*') ||
|
||||
s.starts_with("/*!")) && s.len() >= 5;
|
||||
|
||||
Reference in New Issue
Block a user