Remove token::{Open,Close}Delim.
By replacing them with `{Open,Close}{Param,Brace,Bracket,Invisible}`.
PR #137902 made `ast::TokenKind` more like `lexer::TokenKind` by
replacing the compound `BinOp{,Eq}(BinOpToken)` variants with fieldless
variants `Plus`, `Minus`, `Star`, etc. This commit does a similar thing
with delimiters. It also makes `ast::TokenKind` more similar to
`parser::TokenType`.
This requires a few new methods:
- `TokenKind::is_{,open_,close_}delim()` replace various kinds of
pattern matches.
- `Delimiter::as_{open,close}_token_kind` are used to convert
`Delimiter` values to `TokenKind`.
Despite these additions, it's a net reduction in lines of code. This is
because e.g. `token::OpenParen` is so much shorter than
`token::OpenDelim(Delimiter::Parenthesis)` that many multi-line forms
reduce to single line forms. And many places where the number of lines
doesn't change are still easier to read, just because the names are
shorter, e.g.:
```
- } else if self.token != token::CloseDelim(Delimiter::Brace) {
+ } else if self.token != token::CloseBrace {
```
This commit is contained in:
@@ -371,12 +371,12 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
rustc_lexer::TokenKind::Semi => token::Semi,
|
||||
rustc_lexer::TokenKind::Comma => token::Comma,
|
||||
rustc_lexer::TokenKind::Dot => token::Dot,
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseDelim(Delimiter::Parenthesis),
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseDelim(Delimiter::Brace),
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseDelim(Delimiter::Bracket),
|
||||
rustc_lexer::TokenKind::OpenParen => token::OpenParen,
|
||||
rustc_lexer::TokenKind::CloseParen => token::CloseParen,
|
||||
rustc_lexer::TokenKind::OpenBrace => token::OpenBrace,
|
||||
rustc_lexer::TokenKind::CloseBrace => token::CloseBrace,
|
||||
rustc_lexer::TokenKind::OpenBracket => token::OpenBracket,
|
||||
rustc_lexer::TokenKind::CloseBracket => token::CloseBracket,
|
||||
rustc_lexer::TokenKind::At => token::At,
|
||||
rustc_lexer::TokenKind::Pound => token::Pound,
|
||||
rustc_lexer::TokenKind::Tilde => token::Tilde,
|
||||
|
||||
@@ -18,38 +18,33 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
|
||||
let mut buf = Vec::new();
|
||||
loop {
|
||||
match self.token.kind {
|
||||
token::OpenDelim(delim) => {
|
||||
// Invisible delimiters cannot occur here because `TokenTreesReader` parses
|
||||
// code directly from strings, with no macro expansion involved.
|
||||
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
|
||||
buf.push(match self.lex_token_tree_open_delim(delim) {
|
||||
Ok(val) => val,
|
||||
Err(errs) => return Err(errs),
|
||||
})
|
||||
}
|
||||
token::CloseDelim(delim) => {
|
||||
// Invisible delimiters cannot occur here because `TokenTreesReader` parses
|
||||
// code directly from strings, with no macro expansion involved.
|
||||
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
|
||||
return if is_delimited {
|
||||
Ok((open_spacing, TokenStream::new(buf)))
|
||||
} else {
|
||||
Err(vec![self.close_delim_err(delim)])
|
||||
};
|
||||
}
|
||||
token::Eof => {
|
||||
return if is_delimited {
|
||||
Err(vec![self.eof_err()])
|
||||
} else {
|
||||
Ok((open_spacing, TokenStream::new(buf)))
|
||||
};
|
||||
}
|
||||
_ => {
|
||||
// Get the next normal token.
|
||||
let (this_tok, this_spacing) = self.bump();
|
||||
buf.push(TokenTree::Token(this_tok, this_spacing));
|
||||
}
|
||||
if let Some(delim) = self.token.kind.open_delim() {
|
||||
// Invisible delimiters cannot occur here because `TokenTreesReader` parses
|
||||
// code directly from strings, with no macro expansion involved.
|
||||
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
|
||||
buf.push(match self.lex_token_tree_open_delim(delim) {
|
||||
Ok(val) => val,
|
||||
Err(errs) => return Err(errs),
|
||||
})
|
||||
} else if let Some(delim) = self.token.kind.close_delim() {
|
||||
// Invisible delimiters cannot occur here because `TokenTreesReader` parses
|
||||
// code directly from strings, with no macro expansion involved.
|
||||
debug_assert!(!matches!(delim, Delimiter::Invisible(_)));
|
||||
return if is_delimited {
|
||||
Ok((open_spacing, TokenStream::new(buf)))
|
||||
} else {
|
||||
Err(vec![self.close_delim_err(delim)])
|
||||
};
|
||||
} else if self.token.kind == token::Eof {
|
||||
return if is_delimited {
|
||||
Err(vec![self.eof_err()])
|
||||
} else {
|
||||
Ok((open_spacing, TokenStream::new(buf)))
|
||||
};
|
||||
} else {
|
||||
// Get the next normal token.
|
||||
let (this_tok, this_spacing) = self.bump();
|
||||
buf.push(TokenTree::Token(this_tok, this_spacing));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -111,9 +106,9 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
let delim_span = DelimSpan::from_pair(pre_span, self.token.span);
|
||||
let sm = self.psess.source_map();
|
||||
|
||||
let close_spacing = match self.token.kind {
|
||||
// Correct delimiter.
|
||||
token::CloseDelim(close_delim) if close_delim == open_delim => {
|
||||
let close_spacing = if let Some(close_delim) = self.token.kind.close_delim() {
|
||||
if close_delim == open_delim {
|
||||
// Correct delimiter.
|
||||
let (open_brace, open_brace_span) = self.diag_info.open_braces.pop().unwrap();
|
||||
let close_brace_span = self.token.span;
|
||||
|
||||
@@ -134,9 +129,8 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
|
||||
// Move past the closing delimiter.
|
||||
self.bump_minimal()
|
||||
}
|
||||
// Incorrect delimiter.
|
||||
token::CloseDelim(close_delim) => {
|
||||
} else {
|
||||
// Incorrect delimiter.
|
||||
let mut unclosed_delimiter = None;
|
||||
let mut candidate = None;
|
||||
|
||||
@@ -182,14 +176,13 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
|
||||
Spacing::Alone
|
||||
}
|
||||
}
|
||||
token::Eof => {
|
||||
// Silently recover, the EOF token will be seen again
|
||||
// and an error emitted then. Thus we don't pop from
|
||||
// self.open_braces here. The choice of spacing value here
|
||||
// doesn't matter.
|
||||
Spacing::Alone
|
||||
}
|
||||
_ => unreachable!(),
|
||||
} else {
|
||||
assert_eq!(self.token.kind, token::Eof);
|
||||
// Silently recover, the EOF token will be seen again
|
||||
// and an error emitted then. Thus we don't pop from
|
||||
// self.open_braces here. The choice of spacing value here
|
||||
// doesn't matter.
|
||||
Spacing::Alone
|
||||
};
|
||||
|
||||
let spacing = DelimSpacing::new(open_spacing, close_spacing);
|
||||
|
||||
@@ -5,7 +5,7 @@ use rustc_span::{BytePos, Pos, Span, kw};
|
||||
|
||||
use super::Lexer;
|
||||
use crate::errors::TokenSubstitution;
|
||||
use crate::token::{self, Delimiter};
|
||||
use crate::token;
|
||||
|
||||
#[rustfmt::skip] // for line breaks
|
||||
pub(super) static UNICODE_ARRAY: &[(char, &str, &str)] = &[
|
||||
@@ -315,12 +315,12 @@ const ASCII_ARRAY: &[(&str, &str, Option<token::TokenKind>)] = &[
|
||||
("!", "Exclamation Mark", Some(token::Bang)),
|
||||
("?", "Question Mark", Some(token::Question)),
|
||||
(".", "Period", Some(token::Dot)),
|
||||
("(", "Left Parenthesis", Some(token::OpenDelim(Delimiter::Parenthesis))),
|
||||
(")", "Right Parenthesis", Some(token::CloseDelim(Delimiter::Parenthesis))),
|
||||
("[", "Left Square Bracket", Some(token::OpenDelim(Delimiter::Bracket))),
|
||||
("]", "Right Square Bracket", Some(token::CloseDelim(Delimiter::Bracket))),
|
||||
("{", "Left Curly Brace", Some(token::OpenDelim(Delimiter::Brace))),
|
||||
("}", "Right Curly Brace", Some(token::CloseDelim(Delimiter::Brace))),
|
||||
("(", "Left Parenthesis", Some(token::OpenParen)),
|
||||
(")", "Right Parenthesis", Some(token::CloseParen)),
|
||||
("[", "Left Square Bracket", Some(token::OpenBracket)),
|
||||
("]", "Right Square Bracket", Some(token::CloseBracket)),
|
||||
("{", "Left Curly Brace", Some(token::OpenBrace)),
|
||||
("}", "Right Curly Brace", Some(token::CloseBrace)),
|
||||
("*", "Asterisk", Some(token::Star)),
|
||||
("/", "Slash", Some(token::Slash)),
|
||||
("\\", "Backslash", None),
|
||||
|
||||
Reference in New Issue
Block a user