Refactor how spans are combined in the parser.

This commit is contained in:
Jeffrey Seyfried
2017-03-15 00:22:48 +00:00
parent ec7c0aece1
commit f08d5ad4c5
22 changed files with 363 additions and 438 deletions

View File

@@ -9,7 +9,7 @@
// except according to those terms.
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
use codemap::CodeMap;
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
@@ -68,6 +68,10 @@ pub struct StringReader<'a> {
open_braces: Vec<(token::DelimToken, Span)>,
}
fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }
}
impl<'a> StringReader<'a> {
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
@@ -225,12 +229,12 @@ impl<'a> StringReader<'a> {
/// Report a fatal error spanning [`from_pos`, `to_pos`).
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
self.fatal_span(syntax_pos::mk_sp(from_pos, to_pos), m)
self.fatal_span(mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`).
fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
self.err_span(syntax_pos::mk_sp(from_pos, to_pos), m)
self.err_span(mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -254,7 +258,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_fatal(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -278,7 +282,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_err(syntax_pos::mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@@ -302,11 +306,11 @@ impl<'a> StringReader<'a> {
None => {
if self.is_eof() {
self.peek_tok = token::Eof;
self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos);
self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos);
} else {
let start_bytepos = self.pos;
self.peek_tok = self.next_token_inner()?;
self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos);
self.peek_span = mk_sp(start_bytepos, self.pos);
};
}
}
@@ -489,7 +493,7 @@ impl<'a> StringReader<'a> {
if let Some(c) = self.ch {
if c.is_whitespace() {
let msg = "called consume_any_line_comment, but there was whitespace";
self.sess.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), msg);
self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg);
}
}
@@ -532,13 +536,13 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
})
} else {
Some(TokenAndSpan {
tok: token::Comment,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
};
}
@@ -571,7 +575,7 @@ impl<'a> StringReader<'a> {
}
return Some(TokenAndSpan {
tok: token::Shebang(self.name_from(start)),
sp: syntax_pos::mk_sp(start, self.pos),
sp: mk_sp(start, self.pos),
});
}
}
@@ -599,7 +603,7 @@ impl<'a> StringReader<'a> {
}
let c = Some(TokenAndSpan {
tok: token::Whitespace,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
});
debug!("scanning whitespace: {:?}", c);
c
@@ -661,7 +665,7 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: syntax_pos::mk_sp(start_bpos, self.pos),
sp: mk_sp(start_bpos, self.pos),
})
})
}
@@ -858,7 +862,7 @@ impl<'a> StringReader<'a> {
let valid = if self.ch_is('{') {
self.scan_unicode_escape(delim) && !ascii_only
} else {
let span = syntax_pos::mk_sp(start, self.pos);
let span = mk_sp(start, self.pos);
self.sess.span_diagnostic
.struct_span_err(span, "incorrect unicode escape sequence")
.span_help(span,
@@ -896,13 +900,13 @@ impl<'a> StringReader<'a> {
},
c);
if e == '\r' {
err.span_help(syntax_pos::mk_sp(escaped_pos, pos),
err.span_help(mk_sp(escaped_pos, pos),
"this is an isolated carriage return; consider \
checking your editor and version control \
settings");
}
if (e == '{' || e == '}') && !ascii_only {
err.span_help(syntax_pos::mk_sp(escaped_pos, pos),
err.span_help(mk_sp(escaped_pos, pos),
"if used in a formatting string, curly braces \
are escaped with `{{` and `}}`");
}
@@ -1735,7 +1739,7 @@ mod tests {
sp: Span {
lo: BytePos(21),
hi: BytePos(23),
expn_id: NO_EXPANSION,
ctxt: NO_EXPANSION,
},
};
assert_eq!(tok1, tok2);
@@ -1749,7 +1753,7 @@ mod tests {
sp: Span {
lo: BytePos(24),
hi: BytePos(28),
expn_id: NO_EXPANSION,
ctxt: NO_EXPANSION,
},
};
assert_eq!(tok3, tok4);
@@ -1908,7 +1912,7 @@ mod tests {
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();
assert_eq!(comment.tok, token::Comment);
assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7)));
assert_eq!((comment.sp.lo, comment.sp.hi), (BytePos(0), BytePos(7)));
assert_eq!(lexer.next_token().tok, token::Whitespace);
assert_eq!(lexer.next_token().tok,
token::DocComment(Symbol::intern("/// test")));