Rollup merge of #141570 - chenyukang:yukang-fix-eq_unspanned, r=workingjubilee

Fix incorrect eq_unspanned in TokenStream

Fixes rust-lang/rust#141522

r? ``@workingjubilee``

should we remove this function?
since it's used in several places, i'd prefer to keep it.
This commit is contained in:
Matthias Krüger
2025-06-04 16:24:07 +02:00
committed by GitHub
3 changed files with 12 additions and 16 deletions

View File

@@ -57,7 +57,9 @@ impl TokenTree {
match (self, other) { match (self, other) {
(TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind, (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
(TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => { (TokenTree::Delimited(.., delim, tts), TokenTree::Delimited(.., delim2, tts2)) => {
delim == delim2 && tts.eq_unspanned(tts2) delim == delim2
&& tts.len() == tts2.len()
&& tts.iter().zip(tts2.iter()).all(|(a, b)| a.eq_unspanned(b))
} }
_ => false, _ => false,
} }
@@ -694,18 +696,6 @@ impl TokenStream {
TokenStreamIter::new(self) TokenStreamIter::new(self)
} }
/// Compares two `TokenStream`s, checking equality without regarding span information.
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
let mut iter1 = self.iter();
let mut iter2 = other.iter();
for (tt1, tt2) in iter::zip(&mut iter1, &mut iter2) {
if !tt1.eq_unspanned(tt2) {
return false;
}
}
iter1.next().is_none() && iter2.next().is_none()
}
/// Create a token stream containing a single token with alone spacing. The /// Create a token stream containing a single token with alone spacing. The
/// spacing used for the final token in a constructed stream doesn't matter /// spacing used for the final token in a constructed stream doesn't matter
/// because it's never used. In practice we arbitrarily use /// because it's never used. In practice we arbitrarily use

View File

@@ -14,6 +14,10 @@ fn sp(a: u32, b: u32) -> Span {
Span::with_root_ctxt(BytePos(a), BytePos(b)) Span::with_root_ctxt(BytePos(a), BytePos(b))
} }
fn cmp_token_stream(a: &TokenStream, b: &TokenStream) -> bool {
a.len() == b.len() && a.iter().zip(b.iter()).all(|(x, y)| x.eq_unspanned(y))
}
#[test] #[test]
fn test_concat() { fn test_concat() {
create_default_session_globals_then(|| { create_default_session_globals_then(|| {
@@ -25,7 +29,7 @@ fn test_concat() {
eq_res.push_stream(test_snd); eq_res.push_stream(test_snd);
assert_eq!(test_res.iter().count(), 5); assert_eq!(test_res.iter().count(), 5);
assert_eq!(eq_res.iter().count(), 5); assert_eq!(eq_res.iter().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true); assert_eq!(cmp_token_stream(&test_res, &eq_res), true);
}) })
} }
@@ -104,7 +108,7 @@ fn test_dotdotdot() {
stream.push_tree(TokenTree::token_joint(token::Dot, sp(0, 1))); stream.push_tree(TokenTree::token_joint(token::Dot, sp(0, 1)));
stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2))); stream.push_tree(TokenTree::token_joint(token::Dot, sp(1, 2)));
stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3))); stream.push_tree(TokenTree::token_alone(token::Dot, sp(2, 3)));
assert!(stream.eq_unspanned(&string_to_ts("..."))); assert!(cmp_token_stream(&stream, &string_to_ts("...")));
assert_eq!(stream.iter().count(), 1); assert_eq!(stream.iter().count(), 1);
}) })
} }

View File

@@ -960,5 +960,7 @@ pub fn eq_attr_args(l: &AttrArgs, r: &AttrArgs) -> bool {
} }
pub fn eq_delim_args(l: &DelimArgs, r: &DelimArgs) -> bool { pub fn eq_delim_args(l: &DelimArgs, r: &DelimArgs) -> bool {
l.delim == r.delim && l.tokens.eq_unspanned(&r.tokens) l.delim == r.delim
&& l.tokens.len() == r.tokens.len()
&& l.tokens.iter().zip(r.tokens.iter()).all(|(a, b)| a.eq_unspanned(b))
} }