syntax: Make quasiquoter use absolute paths
As part of removing `pub use` glob, two extra import globs were injected to make `quote_expr!` work. However the globs caused `unused_import` warning in some places. Quasiquoter needed the globs since it generated idents (e.g. `TyU`) rather than absolute paths (`::syntax::ast::TyU`). This patch removes the extra globs and makes quasiquoter use absolute paths. Fixes #14618
This commit is contained in:
@@ -20,7 +20,6 @@
|
|||||||
html_root_url = "http://doc.rust-lang.org/")]
|
html_root_url = "http://doc.rust-lang.org/")]
|
||||||
|
|
||||||
#![feature(macro_registrar, managed_boxes, quote)]
|
#![feature(macro_registrar, managed_boxes, quote)]
|
||||||
#![allow(unused_imports)] // `quote_expr!` adds some `use` globs which may be unused
|
|
||||||
|
|
||||||
extern crate regex;
|
extern crate regex;
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
|
|||||||
@@ -401,6 +401,16 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
|
|||||||
vec!(e_str))
|
vec!(e_str))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn mk_ast_path(cx: &ExtCtxt, sp: Span, name: &str) -> @ast::Expr {
|
||||||
|
let idents = vec!(id_ext("syntax"), id_ext("ast"), id_ext(name));
|
||||||
|
cx.expr_path(cx.path_global(sp, idents))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> @ast::Expr {
|
||||||
|
let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name));
|
||||||
|
cx.expr_path(cx.path_global(sp, idents))
|
||||||
|
}
|
||||||
|
|
||||||
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr {
|
fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr {
|
||||||
let name = match bop {
|
let name = match bop {
|
||||||
PLUS => "PLUS",
|
PLUS => "PLUS",
|
||||||
@@ -414,116 +424,96 @@ fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr {
|
|||||||
SHL => "SHL",
|
SHL => "SHL",
|
||||||
SHR => "SHR"
|
SHR => "SHR"
|
||||||
};
|
};
|
||||||
cx.expr_ident(sp, id_ext(name))
|
mk_token_path(cx, sp, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
|
fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
|
||||||
|
|
||||||
match *tok {
|
match *tok {
|
||||||
BINOP(binop) => {
|
BINOP(binop) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp, mk_token_path(cx, sp, "BINOP"), vec!(mk_binop(cx, sp, binop)));
|
||||||
id_ext("BINOP"),
|
|
||||||
vec!(mk_binop(cx, sp, binop)));
|
|
||||||
}
|
}
|
||||||
BINOPEQ(binop) => {
|
BINOPEQ(binop) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp, mk_token_path(cx, sp, "BINOPEQ"),
|
||||||
id_ext("BINOPEQ"),
|
vec!(mk_binop(cx, sp, binop)));
|
||||||
vec!(mk_binop(cx, sp, binop)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_CHAR(i) => {
|
LIT_CHAR(i) => {
|
||||||
let e_char = cx.expr_lit(sp, ast::LitChar(i));
|
let e_char = cx.expr_lit(sp, ast::LitChar(i));
|
||||||
|
|
||||||
return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), vec!(e_char));
|
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_CHAR"), vec!(e_char));
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_INT(i, ity) => {
|
LIT_INT(i, ity) => {
|
||||||
let s_ity = match ity {
|
let s_ity = match ity {
|
||||||
ast::TyI => "TyI".to_string(),
|
ast::TyI => "TyI",
|
||||||
ast::TyI8 => "TyI8".to_string(),
|
ast::TyI8 => "TyI8",
|
||||||
ast::TyI16 => "TyI16".to_string(),
|
ast::TyI16 => "TyI16",
|
||||||
ast::TyI32 => "TyI32".to_string(),
|
ast::TyI32 => "TyI32",
|
||||||
ast::TyI64 => "TyI64".to_string()
|
ast::TyI64 => "TyI64"
|
||||||
};
|
};
|
||||||
let e_ity = cx.expr_ident(sp, id_ext(s_ity.as_slice()));
|
let e_ity = mk_ast_path(cx, sp, s_ity);
|
||||||
|
|
||||||
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
|
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
|
||||||
|
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_INT"), vec!(e_i64, e_ity));
|
||||||
return cx.expr_call_ident(sp,
|
|
||||||
id_ext("LIT_INT"),
|
|
||||||
vec!(e_i64, e_ity));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_UINT(u, uty) => {
|
LIT_UINT(u, uty) => {
|
||||||
let s_uty = match uty {
|
let s_uty = match uty {
|
||||||
ast::TyU => "TyU".to_string(),
|
ast::TyU => "TyU",
|
||||||
ast::TyU8 => "TyU8".to_string(),
|
ast::TyU8 => "TyU8",
|
||||||
ast::TyU16 => "TyU16".to_string(),
|
ast::TyU16 => "TyU16",
|
||||||
ast::TyU32 => "TyU32".to_string(),
|
ast::TyU32 => "TyU32",
|
||||||
ast::TyU64 => "TyU64".to_string()
|
ast::TyU64 => "TyU64"
|
||||||
};
|
};
|
||||||
let e_uty = cx.expr_ident(sp, id_ext(s_uty.as_slice()));
|
let e_uty = mk_ast_path(cx, sp, s_uty);
|
||||||
|
|
||||||
let e_u64 = cx.expr_lit(sp, ast::LitUint(u, ast::TyU64));
|
let e_u64 = cx.expr_lit(sp, ast::LitUint(u, ast::TyU64));
|
||||||
|
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_UINT"), vec!(e_u64, e_uty));
|
||||||
return cx.expr_call_ident(sp,
|
|
||||||
id_ext("LIT_UINT"),
|
|
||||||
vec!(e_u64, e_uty));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_INT_UNSUFFIXED(i) => {
|
LIT_INT_UNSUFFIXED(i) => {
|
||||||
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
|
let e_i64 = cx.expr_lit(sp, ast::LitInt(i, ast::TyI64));
|
||||||
|
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_INT_UNSUFFIXED"), vec!(e_i64));
|
||||||
return cx.expr_call_ident(sp,
|
|
||||||
id_ext("LIT_INT_UNSUFFIXED"),
|
|
||||||
vec!(e_i64));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_FLOAT(fident, fty) => {
|
LIT_FLOAT(fident, fty) => {
|
||||||
let s_fty = match fty {
|
let s_fty = match fty {
|
||||||
ast::TyF32 => "TyF32".to_string(),
|
ast::TyF32 => "TyF32",
|
||||||
ast::TyF64 => "TyF64".to_string(),
|
ast::TyF64 => "TyF64",
|
||||||
ast::TyF128 => "TyF128".to_string()
|
ast::TyF128 => "TyF128"
|
||||||
};
|
};
|
||||||
let e_fty = cx.expr_ident(sp, id_ext(s_fty.as_slice()));
|
let e_fty = mk_ast_path(cx, sp, s_fty);
|
||||||
|
|
||||||
let e_fident = mk_ident(cx, sp, fident);
|
let e_fident = mk_ident(cx, sp, fident);
|
||||||
|
return cx.expr_call(sp, mk_token_path(cx, sp, "LIT_FLOAT"), vec!(e_fident, e_fty));
|
||||||
return cx.expr_call_ident(sp,
|
|
||||||
id_ext("LIT_FLOAT"),
|
|
||||||
vec!(e_fident, e_fty));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_STR(ident) => {
|
LIT_STR(ident) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp,
|
||||||
id_ext("LIT_STR"),
|
mk_token_path(cx, sp, "LIT_STR"),
|
||||||
vec!(mk_ident(cx, sp, ident)));
|
vec!(mk_ident(cx, sp, ident)));
|
||||||
}
|
}
|
||||||
|
|
||||||
LIT_STR_RAW(ident, n) => {
|
LIT_STR_RAW(ident, n) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp,
|
||||||
id_ext("LIT_STR_RAW"),
|
mk_token_path(cx, sp, "LIT_STR_RAW"),
|
||||||
vec!(mk_ident(cx, sp, ident),
|
vec!(mk_ident(cx, sp, ident), cx.expr_uint(sp, n)));
|
||||||
cx.expr_uint(sp, n)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
IDENT(ident, b) => {
|
IDENT(ident, b) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp,
|
||||||
id_ext("IDENT"),
|
mk_token_path(cx, sp, "IDENT"),
|
||||||
vec!(mk_ident(cx, sp, ident),
|
vec!(mk_ident(cx, sp, ident), cx.expr_bool(sp, b)));
|
||||||
cx.expr_bool(sp, b)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
LIFETIME(ident) => {
|
LIFETIME(ident) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp,
|
||||||
id_ext("LIFETIME"),
|
mk_token_path(cx, sp, "LIFETIME"),
|
||||||
vec!(mk_ident(cx, sp, ident)));
|
vec!(mk_ident(cx, sp, ident)));
|
||||||
}
|
}
|
||||||
|
|
||||||
DOC_COMMENT(ident) => {
|
DOC_COMMENT(ident) => {
|
||||||
return cx.expr_call_ident(sp,
|
return cx.expr_call(sp,
|
||||||
id_ext("DOC_COMMENT"),
|
mk_token_path(cx, sp, "DOC_COMMENT"),
|
||||||
vec!(mk_ident(cx, sp, ident)));
|
vec!(mk_ident(cx, sp, ident)));
|
||||||
}
|
}
|
||||||
|
|
||||||
INTERPOLATED(_) => fail!("quote! with interpolated token"),
|
INTERPOLATED(_) => fail!("quote! with interpolated token"),
|
||||||
@@ -565,19 +555,16 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr {
|
|||||||
EOF => "EOF",
|
EOF => "EOF",
|
||||||
_ => fail!()
|
_ => fail!()
|
||||||
};
|
};
|
||||||
cx.expr_ident(sp, id_ext(name))
|
mk_token_path(cx, sp, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> {
|
fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> {
|
||||||
|
|
||||||
match *tt {
|
match *tt {
|
||||||
|
|
||||||
ast::TTTok(sp, ref tok) => {
|
ast::TTTok(sp, ref tok) => {
|
||||||
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
let e_sp = cx.expr_ident(sp, id_ext("_sp"));
|
||||||
let e_tok = cx.expr_call_ident(sp,
|
let e_tok = cx.expr_call(sp,
|
||||||
id_ext("TTTok"),
|
mk_ast_path(cx, sp, "TTTok"),
|
||||||
vec!(e_sp, mk_token(cx, sp, tok)));
|
vec!(e_sp, mk_token(cx, sp, tok)));
|
||||||
let e_push =
|
let e_push =
|
||||||
cx.expr_method_call(sp,
|
cx.expr_method_call(sp,
|
||||||
cx.expr_ident(sp, id_ext("tt")),
|
cx.expr_ident(sp, id_ext("tt")),
|
||||||
@@ -695,8 +682,6 @@ fn expand_wrapper(cx: &ExtCtxt,
|
|||||||
cx_expr: @ast::Expr,
|
cx_expr: @ast::Expr,
|
||||||
expr: @ast::Expr) -> @ast::Expr {
|
expr: @ast::Expr) -> @ast::Expr {
|
||||||
let uses = [
|
let uses = [
|
||||||
&["syntax", "ast"],
|
|
||||||
&["syntax", "parse", "token"],
|
|
||||||
&["syntax", "ext", "quote", "rt"],
|
&["syntax", "ext", "quote", "rt"],
|
||||||
].iter().map(|path| {
|
].iter().map(|path| {
|
||||||
let path = path.iter().map(|s| s.to_string()).collect();
|
let path = path.iter().map(|s| s.to_string()).collect();
|
||||||
|
|||||||
Reference in New Issue
Block a user