use pattern matching for slices destructuring

This commit is contained in:
Cedric
2019-06-08 10:49:46 +02:00
parent c1c60d292e
commit 5fb099dc78
7 changed files with 45 additions and 59 deletions

View File

@@ -33,8 +33,8 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
span: Span, span: Span,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> { -> Box<dyn MacResult+'cx> {
let code = match (token_tree.len(), token_tree.get(0)) { let code = match token_tree {
(1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. }))) => code, &[TokenTree::Token(Token { kind: token::Ident(code, _), .. })] => code,
_ => unreachable!() _ => unreachable!()
}; };
@@ -66,22 +66,15 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
span: Span, span: Span,
token_tree: &[TokenTree]) token_tree: &[TokenTree])
-> Box<dyn MacResult+'cx> { -> Box<dyn MacResult+'cx> {
let (code, description) = match ( let (code, description) = match token_tree {
token_tree.len(), &[TokenTree::Token(Token { kind: token::Ident(code, _), .. })] => {
token_tree.get(0),
token_tree.get(1),
token_tree.get(2)
) {
(1, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), None, None) => {
(code, None) (code, None)
}, },
(3, Some(&TokenTree::Token(Token { kind: token::Ident(code, _), .. })), &[TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
Some(&TokenTree::Token(Token { kind: token::Comma, .. })), TokenTree::Token(Token { kind: token::Comma, .. }),
Some(&TokenTree::Token(Token { TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})] => {
kind: token::Literal(token::Lit { symbol, .. }), ..
}))) => {
(code, Some(symbol)) (code, Some(symbol))
} },
_ => unreachable!() _ => unreachable!()
}; };

View File

@@ -424,47 +424,40 @@ mod tests {
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
let tts: &[TokenTree] = &tts[..]; let tts: &[TokenTree] = &tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { match tts {
( &[TokenTree::Token(Token {kind: token::Ident(name_macro_rules, false), ..}),
4, TokenTree::Token(Token { kind: token::Not, .. }),
Some(&TokenTree::Token(Token { TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }),
kind: token::Ident(name_macro_rules, false), .. TokenTree::Delimited(_, macro_delim, ref macro_tts)
})), ]
Some(&TokenTree::Token(Token { kind: token::Not, .. })),
Some(&TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. })),
Some(&TokenTree::Delimited(_, macro_delim, ref macro_tts)),
)
if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => { if name_macro_rules == sym::macro_rules && name_zip.as_str() == "zip" => {
let tts = &macro_tts.trees().collect::<Vec<_>>(); let tts = &macro_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { match tts {
( &[
3, TokenTree::Delimited(_, first_delim, ref first_tts),
Some(&TokenTree::Delimited(_, first_delim, ref first_tts)), TokenTree::Token(Token { kind: token::FatArrow, .. }),
Some(&TokenTree::Token(Token { kind: token::FatArrow, .. })), TokenTree::Delimited(_, second_delim, ref second_tts),
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)), ]
)
if macro_delim == token::Paren => { if macro_delim == token::Paren => {
let tts = &first_tts.trees().collect::<Vec<_>>(); let tts = &first_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) { match tts {
( &[
2, TokenTree::Token(Token { kind: token::Dollar, .. }),
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), TokenTree::Token(Token {
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), .. kind: token::Ident(name, false), ..
})), }),
) ]
if first_delim == token::Paren && name.as_str() == "a" => {}, if first_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts), _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
} }
let tts = &second_tts.trees().collect::<Vec<_>>(); let tts = &second_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) { match tts {
( &[
2, TokenTree::Token(Token { kind: token::Dollar, .. }),
Some(&TokenTree::Token(Token { kind: token::Dollar, .. })), TokenTree::Token(Token {
Some(&TokenTree::Token(Token {
kind: token::Ident(name, false), .. kind: token::Ident(name, false), ..
})), }),
) ]
if second_delim == token::Paren && name.as_str() == "a" => {}, if second_delim == token::Paren && name.as_str() == "a" => {},
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts), _ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
} }

View File

@@ -82,8 +82,8 @@ pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<
// } // }
let new = { let new = {
let other_f = match (other_fs.len(), other_fs.get(0)) { let other_f = match other_fs {
(1, Some(o_f)) => o_f, [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"), _ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
}; };

View File

@@ -25,8 +25,8 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt<'_>,
-> P<Expr> -> P<Expr>
{ {
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| { let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
let other_f = match (other_fs.len(), other_fs.get(0)) { let other_f = match other_fs {
(1, Some(o_f)) => o_f, [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"), _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"),
}; };

View File

@@ -143,8 +143,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_
// } // }
let new = { let new = {
let other_f = match (other_fs.len(), other_fs.get(0)) { let other_f = match other_fs {
(1, Some(o_f)) => o_f, [o_f] => o_f,
_ => { _ => {
cx.span_bug(span, cx.span_bug(span,
"not exactly 2 arguments in `derive(PartialOrd)`") "not exactly 2 arguments in `derive(PartialOrd)`")
@@ -193,8 +193,8 @@ fn cs_op(less: bool,
}; };
let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| { let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| {
let other_f = match (other_fs.len(), other_fs.get(0)) { let other_f = match other_fs {
(1, Some(o_f)) => o_f, [o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
}; };

View File

@@ -52,8 +52,8 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt<'_>,
} }
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> { fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) { let state_expr = match &substr.nonself_args {
(1, Some(o_f)) => o_f, &[o_f] => o_f,
_ => { _ => {
cx.span_bug(trait_span, cx.span_bug(trait_span,
"incorrect number of arguments in `derive(Hash)`") "incorrect number of arguments in `derive(Hash)`")

View File

@@ -16,11 +16,11 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
feature_gate::EXPLAIN_TRACE_MACROS); feature_gate::EXPLAIN_TRACE_MACROS);
} }
match (tt.len(), tt.first()) { match tt {
(1, Some(TokenTree::Token(token))) if token.is_keyword(kw::True) => { [TokenTree::Token(token)] if token.is_keyword(kw::True) => {
cx.set_trace_macros(true); cx.set_trace_macros(true);
} }
(1, Some(TokenTree::Token(token))) if token.is_keyword(kw::False) => { [TokenTree::Token(token)] if token.is_keyword(kw::False) => {
cx.set_trace_macros(false); cx.set_trace_macros(false);
} }
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),