Simplify hygiene::Mark application, and

remove variant `Token::SubstNt` in favor of `quoted::TokenTree::MetaVar`.
This commit is contained in:
Jeffrey Seyfried
2017-03-28 05:32:43 +00:00
parent fc9ccfdbe0
commit d4488b7df9
26 changed files with 160 additions and 172 deletions

View File

@@ -66,14 +66,15 @@ pub struct StringReader<'a> {
token: token::Token,
span: Span,
open_braces: Vec<(token::DelimToken, Span)>,
}
fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }
pub override_span: Option<Span>,
}
impl<'a> StringReader<'a> {
fn next_token(&mut self) -> TokenAndSpan {
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
}
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
let res = self.try_next_token();
self.unwrap_or_abort(res)
}
@@ -175,6 +176,7 @@ impl<'a> StringReader<'a> {
token: token::Eof,
span: syntax_pos::DUMMY_SP,
open_braces: Vec::new(),
override_span: None,
}
}
@@ -229,12 +231,12 @@ impl<'a> StringReader<'a> {
/// Report a fatal error spanning [`from_pos`, `to_pos`).
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
self.fatal_span(mk_sp(from_pos, to_pos), m)
self.fatal_span(self.mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`).
fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
self.err_span(mk_sp(from_pos, to_pos), m)
self.err_span(self.mk_sp(from_pos, to_pos), m)
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -258,7 +260,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -282,7 +284,7 @@ impl<'a> StringReader<'a> {
for c in c.escape_default() {
m.push(c)
}
self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..])
self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..])
}
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@@ -306,11 +308,11 @@ impl<'a> StringReader<'a> {
None => {
if self.is_eof() {
self.peek_tok = token::Eof;
self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos);
self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos);
} else {
let start_bytepos = self.pos;
self.peek_tok = self.next_token_inner()?;
self.peek_span = mk_sp(start_bytepos, self.pos);
self.peek_span = self.mk_sp(start_bytepos, self.pos);
};
}
}
@@ -502,7 +504,7 @@ impl<'a> StringReader<'a> {
if let Some(c) = self.ch {
if c.is_whitespace() {
let msg = "called consume_any_line_comment, but there was whitespace";
self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg);
self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg);
}
}
@@ -545,13 +547,13 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: mk_sp(start_bpos, self.pos),
sp: self.mk_sp(start_bpos, self.pos),
})
})
} else {
Some(TokenAndSpan {
tok: token::Comment,
sp: mk_sp(start_bpos, self.pos),
sp: self.mk_sp(start_bpos, self.pos),
})
}
}
@@ -584,7 +586,7 @@ impl<'a> StringReader<'a> {
}
return Some(TokenAndSpan {
tok: token::Shebang(self.name_from(start)),
sp: mk_sp(start, self.pos),
sp: self.mk_sp(start, self.pos),
});
}
}
@@ -612,7 +614,7 @@ impl<'a> StringReader<'a> {
}
let c = Some(TokenAndSpan {
tok: token::Whitespace,
sp: mk_sp(start_bpos, self.pos),
sp: self.mk_sp(start_bpos, self.pos),
});
debug!("scanning whitespace: {:?}", c);
c
@@ -674,7 +676,7 @@ impl<'a> StringReader<'a> {
Some(TokenAndSpan {
tok: tok,
sp: mk_sp(start_bpos, self.pos),
sp: self.mk_sp(start_bpos, self.pos),
})
})
}
@@ -869,7 +871,7 @@ impl<'a> StringReader<'a> {
let valid = if self.ch_is('{') {
self.scan_unicode_escape(delim) && !ascii_only
} else {
let span = mk_sp(start, self.pos);
let span = self.mk_sp(start, self.pos);
self.sess.span_diagnostic
.struct_span_err(span, "incorrect unicode escape sequence")
.span_help(span,
@@ -907,13 +909,13 @@ impl<'a> StringReader<'a> {
},
c);
if e == '\r' {
err.span_help(mk_sp(escaped_pos, pos),
err.span_help(self.mk_sp(escaped_pos, pos),
"this is an isolated carriage return; consider \
checking your editor and version control \
settings");
}
if (e == '{' || e == '}') && !ascii_only {
err.span_help(mk_sp(escaped_pos, pos),
err.span_help(self.mk_sp(escaped_pos, pos),
"if used in a formatting string, curly braces \
are escaped with `{{` and `}}`");
}