rustc: Rename rustc_macro to proc_macro

This commit blanket renames the `rustc_macro` infrastructure to `proc_macro`,
which reflects the general consensus of #35900. A follow up PR to Cargo will be
required to purge the `rustc-macro` name as well.
This commit is contained in:
Alex Crichton
2016-10-03 09:49:39 -07:00
parent 7a26aeca77
commit 2148bdfcc7
86 changed files with 613 additions and 615 deletions

View File

@@ -4,12 +4,8 @@ name = "proc_macro"
version = "0.0.0"
[lib]
name = "proc_macro"
path = "lib.rs"
crate-type = ["dylib"]
[dependencies]
log = { path = "../liblog" }
rustc_plugin = { path = "../librustc_plugin" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }

View File

@@ -1,89 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate syntax;
extern crate syntax_pos;
use syntax::ast::Ident;
use syntax::codemap::DUMMY_SP;
use syntax::parse::token::{self, Token, keywords, str_to_ident};
use syntax::tokenstream::{self, TokenTree, TokenStream};
use std::rc::Rc;
/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and
/// provide TokenStream concatenation as a generic operator.
pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream {
TokenStream::concat(ts1, ts2)
}
/// Checks if two identifiers have the same name, disregarding context. This allows us to
/// fake 'reserved' keywords.
// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is
// probably the easiest way to do that.
pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
let tid = match *tident {
TokenTree::Token(_, Token::Ident(ref id)) => id,
_ => {
return false;
}
};
tid.name == id.name
}
// ____________________________________________________________________________________________
// Conversion operators
/// Convert a `&str` into a Token.
pub fn str_to_token_ident(s: &str) -> Token {
Token::Ident(str_to_ident(s))
}
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
/// corresponds to it.
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
Token::Ident(str_to_ident(&kw.name().as_str()[..]))
}
// ____________________________________________________________________________________________
// Build Procedures
/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified
/// delimiter.
pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream {
let tts = ts.to_tts();
TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP,
Rc::new(tokenstream::Delimited {
delim: delim,
open_span: DUMMY_SP,
tts: tts,
close_span: DUMMY_SP,
}))])
}
/// Takes `ts` and returns `[ts]`.
pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Bracket)
}
/// Takes `ts` and returns `{ts}`.
pub fn build_brace_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Brace)
}
/// Takes `ts` and returns `(ts)`.
pub fn build_paren_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Paren)
}
/// Constructs `()`.
pub fn build_empty_args() -> TokenStream {
build_paren_delimited(TokenStream::mk_empty())
}

View File

@@ -8,130 +8,160 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Proc_Macro
//! A support library for macro authors when defining new macros.
//!
//! A library for procedural macro writers.
//! This library, provided by the standard distribution, provides the types
//! consumed in the interfaces of procedurally defined macro definitions.
//! Currently the primary use of this crate is to provide the ability to define
//! new custom derive modes through `#[proc_macro_derive]`.
//!
//! ## Usage
//! This package provides the `qquote!` macro for syntax creation, and the prelude
//! (at libproc_macro::prelude) provides a number of operations:
//! - `concat`, for concatenating two TokenStreams.
//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context.
//! - `str_to_token_ident`, for converting an `&str` into a Token.
//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a
//! Token.
//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter
//! by wrapping the TokenStream in the delimiter.
//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for
//! easing the above.
//! - `build_empty_args`, which returns a TokenStream containing `()`.
//! - `lex`, which takes an `&str` and returns the TokenStream it represents.
//! Added recently as part of [RFC 1681] this crate is currently *unstable* and
//! requires the `#![feature(proc_macro_lib)]` directive to use.
//!
//! The `qquote!` macro also imports `syntax::ext::proc_macro_shim::prelude::*`, so you
//! will need to `extern crate syntax` for usage. (This is a temporary solution until more
//! of the external API in libproc_macro is stabilized to support the token construction
//! operations that the qausiquoter relies on.) The shim file also provides additional
//! operations, such as `build_block_emitter` (as used in the `cond` example below).
//!
//! ## TokenStreams
//!
//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of
//! TokenTrees, where indexing treats delimited values as a single term. That is, the term
//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where,
//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`.
//!
//! If a user has a TokenStream that is a single, delimited value, they can use
//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream
//! as:
//! ```
//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)`
//! ```
//!
//! Check the TokenStream documentation for more information; the structure also provides
//! cheap concatenation and slicing.
//!
//! ## Quasiquotation
//!
//! The quasiquoter creates output that, when run, constructs the tokenstream specified as
//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will
//! construct the TokenStream `5 | + | 5`.
//!
//! ### Unquoting
//!
//! Unquoting is currently done as `unquote`, and works by taking the single next
//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works
//! fine, but `unquote foo` is also supported.
//!
//! A simple example might be:
//!
//!```
//!fn double(tmp: TokenStream) -> TokenStream {
//! qquote!(unquote(tmp) * 2)
//!}
//!```
//!
//! ### Large Example: Implementing Scheme's `cond`
//!
//! Below is the full implementation of Scheme's `cond` operator.
//!
//! ```
//! fn cond_rec(input: TokenStream) -> TokenStream {
//! if input.is_empty() { return quote!(); }
//!
//! let next = input.slice(0..1);
//! let rest = input.slice_from(1..);
//!
//! let clause : TokenStream = match next.maybe_delimited() {
//! Some(ts) => ts,
//! _ => panic!("Invalid input"),
//! };
//!
//! // clause is ([test]) [rhs]
//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
//!
//! let test: TokenStream = clause.slice(0..1);
//! let rhs: TokenStream = clause.slice_from(1..);
//!
//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
//! quote!({unquote(rhs)})
//! } else {
//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
//! }
//! }
//! ```
//! [RFC 1681]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md
//!
//! Note that this crate is intentionally very bare-bones currently. The main
//! type, `TokenStream`, only supports `fmt::Display` and `FromStr`
//! implementations, indicating that it can only go to and come from a string.
//! This functionality is intended to be expanded over time as more surface
//! area for macro authors is stabilized.
#![crate_name = "proc_macro"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![feature(plugin_registrar)]
#![crate_type = "dylib"]
#![unstable(feature = "proc_macro_lib", issue = "27812")]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![crate_type = "dylib"]
#![cfg_attr(not(stage0), deny(warnings))]
#![deny(missing_docs)]
#![feature(staged_api)]
#![feature(rustc_diagnostic_macros)]
#![feature(rustc_private)]
#![feature(staged_api)]
#![feature(lang_items)]
extern crate rustc_plugin;
extern crate syntax;
extern crate syntax_pos;
#[macro_use] extern crate log;
mod qquote;
pub mod build;
pub mod parse;
pub mod prelude;
use qquote::qquote;
use std::fmt;
use std::str::FromStr;
use rustc_plugin::Registry;
use syntax::ast;
use syntax::parse;
use syntax::ptr::P;
// ____________________________________________________________________________________________
// Main macro definition
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("qquote", qquote);
/// The main type provided by this crate, representing an abstract stream of
/// tokens.
///
/// This is both the input and output of `#[proc_macro_derive]` definitions.
/// Currently it's required to be a list of valid Rust items, but this
/// restriction may be lifted in the future.
///
/// The API of this type is intentionally bare-bones, but it'll be expanded over
/// time!
pub struct TokenStream {
inner: Vec<P<ast::Item>>,
}
/// Error returned from `TokenStream::from_str`.
#[derive(Debug)]
pub struct LexError {
_inner: (),
}
/// Permanently unstable internal implementation details of this crate. This
/// should not be used.
///
/// These methods are used by the rest of the compiler to generate instances of
/// `TokenStream` to hand to macro definitions, as well as consume the output.
///
/// Note that this module is also intentionally separate from the rest of the
/// crate. This allows the `#[unstable]` directive below to naturally apply to
/// all of the contents.
#[unstable(feature = "proc_macro_internals", issue = "27812")]
#[doc(hidden)]
pub mod __internal {
use std::cell::Cell;
use syntax::ast;
use syntax::ptr::P;
use syntax::parse::ParseSess;
use super::TokenStream;
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
TokenStream { inner: vec![item] }
}
pub fn token_stream_items(stream: TokenStream) -> Vec<P<ast::Item>> {
stream.inner
}
pub trait Registry {
fn register_custom_derive(&mut self,
trait_name: &str,
expand: fn(TokenStream) -> TokenStream);
}
// Emulate scoped_thread_local!() here essentially
thread_local! {
static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _);
}
pub fn set_parse_sess<F, R>(sess: &ParseSess, f: F) -> R
where F: FnOnce() -> R
{
struct Reset { prev: *const ParseSess }
impl Drop for Reset {
fn drop(&mut self) {
CURRENT_SESS.with(|p| p.set(self.prev));
}
}
CURRENT_SESS.with(|p| {
let _reset = Reset { prev: p.get() };
p.set(sess);
f()
})
}
pub fn with_parse_sess<F, R>(f: F) -> R
where F: FnOnce(&ParseSess) -> R
{
let p = CURRENT_SESS.with(|p| p.get());
assert!(!p.is_null());
f(unsafe { &*p })
}
}
impl FromStr for TokenStream {
type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> {
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let cfg = Vec::new();
let name = "<proc-macro source code>".to_string();
let mut parser = parse::new_parser_from_source_str(sess, cfg, name,
src);
let mut ret = TokenStream { inner: Vec::new() };
loop {
match parser.parse_item() {
Ok(Some(item)) => ret.inner.push(item),
Ok(None) => return Ok(ret),
Err(mut err) => {
err.cancel();
return Err(LexError { _inner: () })
}
}
}
})
}
}
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for item in self.inner.iter() {
let item = syntax::print::pprust::item_to_string(item);
try!(f.write_str(&item));
try!(f.write_str("\n"));
}
Ok(())
}
}

View File

@@ -1,26 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Parsing utilities for writing procedural macros.
extern crate syntax;
use syntax::parse::{ParseSess, filemap_to_tts};
use syntax::tokenstream::TokenStream;
/// Map a string to tts, using a made-up filename. For example, `lex(15)` will return a
/// TokenStream containing the literal 15.
pub fn lex(source_str: &str) -> TokenStream {
let ps = ParseSess::new();
TokenStream::from_tts(filemap_to_tts(&ps,
ps.codemap().new_filemap("procmacro_lex".to_string(),
None,
source_str.to_owned())))
}

View File

@@ -1,12 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use build::*;
pub use parse::*;

View File

@@ -1,470 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Quasiquoter
//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
//!
//! ## Ouput
//! The quasiquoter produces output of the form:
//! let tmp0 = ...;
//! let tmp1 = ...;
//! ...
//! concat(from_tokens(...), concat(...))
//!
//! To the more explicit, the quasiquoter produces a series of bindings that each
//! construct TokenStreams via constructing Tokens and using `from_tokens`, ultimately
//! invoking `concat` on these bindings (and inlined expressions) to construct a
//! TokenStream that resembles the output syntax.
//!
extern crate rustc_plugin;
extern crate syntax;
extern crate syntax_pos;
use build::*;
use parse::lex;
use qquote::int_build::*;
use syntax::ast::Ident;
use syntax::codemap::Span;
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::proc_macro_shim::build_block_emitter;
use syntax::parse::token::{self, Token, gensym_ident, str_to_ident};
use syntax::print::pprust;
use syntax::tokenstream::{TokenTree, TokenStream};
// ____________________________________________________________________________________________
// Main definition
/// The user should use the macro, not this procedure.
pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<base::MacResult + 'cx> {
debug!("\nTTs in: {:?}\n", pprust::tts_to_string(&tts[..]));
let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned()));
debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..]));
let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"),
lex("use proc_macro::prelude::*;"));
build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output)))
}
// ____________________________________________________________________________________________
// Datatype Definitions
#[derive(Debug)]
struct QDelimited {
delim: token::DelimToken,
open_span: Span,
tts: Vec<QTT>,
close_span: Span,
}
#[derive(Debug)]
enum QTT {
TT(TokenTree),
QDL(QDelimited),
QIdent(TokenTree),
}
type Bindings = Vec<(Ident, TokenStream)>;
// ____________________________________________________________________________________________
// Quasiquoter Algorithm
// This algorithm works as follows:
// Input: TokenStream
// 1. Walk the TokenStream, gathering up the unquoted expressions and marking them separately.
// 2. Hoist any unquoted term into its own let-binding via a gensym'd identifier
// 3. Convert the body from a `complex expression` into a simplified one via `convert_complex_tts
// 4. Stitch everything together with `concat`.
fn qquoter<'cx>(cx: &'cx mut ExtCtxt, ts: TokenStream) -> TokenStream {
if ts.is_empty() {
return lex("TokenStream::mk_empty()");
}
let qq_res = qquote_iter(cx, 0, ts);
let mut bindings = qq_res.0;
let body = qq_res.1;
let mut cct_res = convert_complex_tts(cx, body);
bindings.append(&mut cct_res.0);
if bindings.is_empty() {
cct_res.1
} else {
debug!("BINDINGS");
for b in bindings.clone() {
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
}
TokenStream::concat(unravel(bindings), cct_res.1)
}
}
fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindings, Vec<QTT>) {
let mut depth = depth;
let mut bindings: Bindings = Vec::new();
let mut output: Vec<QTT> = Vec::new();
let mut iter = ts.iter();
loop {
let next = iter.next();
if next.is_none() {
break;
}
let next = next.unwrap().clone();
match next {
TokenTree::Token(_, Token::Ident(id)) if is_unquote(id) => {
if depth == 0 {
let exp = iter.next();
if exp.is_none() {
break;
} // produce an error or something first
let exp = vec![exp.unwrap().to_owned()];
debug!("RHS: {:?}", exp.clone());
let new_id = gensym_ident("tmp");
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
bindings.push((new_id, TokenStream::from_tts(exp)));
debug!("BINDINGS");
for b in bindings.clone() {
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
}
output.push(QTT::QIdent(as_tt(Token::Ident(new_id.clone()))));
} else {
depth = depth - 1;
output.push(QTT::TT(next.clone()));
}
}
TokenTree::Token(_, Token::Ident(id)) if is_qquote(id) => {
depth = depth + 1;
}
TokenTree::Delimited(_, ref dl) => {
let br = qquote_iter(cx, depth, TokenStream::from_tts(dl.tts.clone().to_owned()));
let mut bind_ = br.0;
let res_ = br.1;
bindings.append(&mut bind_);
let new_dl = QDelimited {
delim: dl.delim,
open_span: dl.open_span,
tts: res_,
close_span: dl.close_span,
};
output.push(QTT::QDL(new_dl));
}
t => {
output.push(QTT::TT(t));
}
}
}
(bindings, output)
}
// ____________________________________________________________________________________________
// Turns QQTs into a TokenStream and some Bindings.
/// Construct a chain of concatenations.
fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream {
let mut pushes: Vec<TokenStream> =
tss.into_iter().filter(|&ref ts| !ts.is_empty()).collect();
let mut output = match pushes.pop() {
Some(ts) => ts,
None => {
return TokenStream::mk_empty();
}
};
while let Some(ts) = pushes.pop() {
output = build_fn_call(str_to_ident("concat"),
concat(concat(ts,
from_tokens(vec![Token::Comma])),
output));
}
output
}
/// This converts the vector of QTTs into a seet of Bindings for construction and the main
/// body as a TokenStream.
fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<QTT>) -> (Bindings, TokenStream) {
let mut pushes: Vec<TokenStream> = Vec::new();
let mut bindings: Bindings = Vec::new();
let mut iter = tts.into_iter();
loop {
let next = iter.next();
if next.is_none() {
break;
}
let next = next.unwrap();
match next {
QTT::TT(TokenTree::Token(_, t)) => {
let token_out = emit_token(t);
pushes.push(token_out);
}
// FIXME handle sequence repetition tokens
QTT::QDL(qdl) => {
debug!(" QDL: {:?} ", qdl.tts);
let new_id = gensym_ident("qdl_tmp");
let mut cct_rec = convert_complex_tts(cx, qdl.tts);
bindings.append(&mut cct_rec.0);
bindings.push((new_id, cct_rec.1));
let sep = build_delim_tok(qdl.delim);
pushes.push(build_mod_call(vec![str_to_ident("proc_macro"),
str_to_ident("build"),
str_to_ident("build_delimited")],
concat(from_tokens(vec![Token::Ident(new_id)]),
concat(lex(","), sep))));
}
QTT::QIdent(t) => {
pushes.push(TokenStream::from_tts(vec![t]));
pushes.push(TokenStream::mk_empty());
}
_ => panic!("Unhandled case!"),
}
}
(bindings, unravel_concats(pushes))
}
// ____________________________________________________________________________________________
// Utilities
/// Unravels Bindings into a TokenStream of `let` declarations.
fn unravel(binds: Bindings) -> TokenStream {
let mut output = TokenStream::mk_empty();
for b in binds {
output = concat(output, build_let(b.0, b.1));
}
output
}
/// Checks if the Ident is `unquote`.
fn is_unquote(id: Ident) -> bool {
let qq = str_to_ident("unquote");
id.name == qq.name // We disregard context; unquote is _reserved_
}
/// Checks if the Ident is `quote`.
fn is_qquote(id: Ident) -> bool {
let qq = str_to_ident("qquote");
id.name == qq.name // We disregard context; qquote is _reserved_
}
mod int_build {
extern crate syntax;
extern crate syntax_pos;
use parse::*;
use build::*;
use syntax::ast::{self, Ident};
use syntax::codemap::{DUMMY_SP};
use syntax::parse::token::{self, Token, keywords, str_to_ident};
use syntax::tokenstream::{TokenTree, TokenStream};
// ____________________________________________________________________________________________
// Emitters
pub fn emit_token(t: Token) -> TokenStream {
concat(lex("TokenStream::from_tokens"),
build_paren_delimited(build_vec(build_token_tt(t))))
}
pub fn emit_lit(l: token::Lit, n: Option<ast::Name>) -> TokenStream {
let suf = match n {
Some(n) => format!("Some(ast::Name({}))", n.0),
None => "None".to_string(),
};
let lit = match l {
token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()),
token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()),
token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()),
token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()),
token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()),
token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()),
_ => panic!("Unsupported literal"),
};
let res = format!("Token::Literal({},{})", lit, suf);
debug!("{}", res);
lex(&res)
}
// ____________________________________________________________________________________________
// Token Builders
pub fn build_binop_tok(bot: token::BinOpToken) -> TokenStream {
match bot {
token::BinOpToken::Plus => lex("Token::BinOp(BinOpToken::Plus)"),
token::BinOpToken::Minus => lex("Token::BinOp(BinOpToken::Minus)"),
token::BinOpToken::Star => lex("Token::BinOp(BinOpToken::Star)"),
token::BinOpToken::Slash => lex("Token::BinOp(BinOpToken::Slash)"),
token::BinOpToken::Percent => lex("Token::BinOp(BinOpToken::Percent)"),
token::BinOpToken::Caret => lex("Token::BinOp(BinOpToken::Caret)"),
token::BinOpToken::And => lex("Token::BinOp(BinOpToken::And)"),
token::BinOpToken::Or => lex("Token::BinOp(BinOpToken::Or)"),
token::BinOpToken::Shl => lex("Token::BinOp(BinOpToken::Shl)"),
token::BinOpToken::Shr => lex("Token::BinOp(BinOpToken::Shr)"),
}
}
pub fn build_binopeq_tok(bot: token::BinOpToken) -> TokenStream {
match bot {
token::BinOpToken::Plus => lex("Token::BinOpEq(BinOpToken::Plus)"),
token::BinOpToken::Minus => lex("Token::BinOpEq(BinOpToken::Minus)"),
token::BinOpToken::Star => lex("Token::BinOpEq(BinOpToken::Star)"),
token::BinOpToken::Slash => lex("Token::BinOpEq(BinOpToken::Slash)"),
token::BinOpToken::Percent => lex("Token::BinOpEq(BinOpToken::Percent)"),
token::BinOpToken::Caret => lex("Token::BinOpEq(BinOpToken::Caret)"),
token::BinOpToken::And => lex("Token::BinOpEq(BinOpToken::And)"),
token::BinOpToken::Or => lex("Token::BinOpEq(BinOpToken::Or)"),
token::BinOpToken::Shl => lex("Token::BinOpEq(BinOpToken::Shl)"),
token::BinOpToken::Shr => lex("Token::BinOpEq(BinOpToken::Shr)"),
}
}
pub fn build_delim_tok(dt: token::DelimToken) -> TokenStream {
match dt {
token::DelimToken::Paren => lex("DelimToken::Paren"),
token::DelimToken::Bracket => lex("DelimToken::Bracket"),
token::DelimToken::Brace => lex("DelimToken::Brace"),
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
}
}
pub fn build_token_tt(t: Token) -> TokenStream {
match t {
Token::Eq => lex("Token::Eq"),
Token::Lt => lex("Token::Lt"),
Token::Le => lex("Token::Le"),
Token::EqEq => lex("Token::EqEq"),
Token::Ne => lex("Token::Ne"),
Token::Ge => lex("Token::Ge"),
Token::Gt => lex("Token::Gt"),
Token::AndAnd => lex("Token::AndAnd"),
Token::OrOr => lex("Token::OrOr"),
Token::Not => lex("Token::Not"),
Token::Tilde => lex("Token::Tilde"),
Token::BinOp(tok) => build_binop_tok(tok),
Token::BinOpEq(tok) => build_binopeq_tok(tok),
Token::At => lex("Token::At"),
Token::Dot => lex("Token::Dot"),
Token::DotDot => lex("Token::DotDot"),
Token::DotDotDot => lex("Token::DotDotDot"),
Token::Comma => lex("Token::Comma"),
Token::Semi => lex("Token::Semi"),
Token::Colon => lex("Token::Colon"),
Token::ModSep => lex("Token::ModSep"),
Token::RArrow => lex("Token::RArrow"),
Token::LArrow => lex("Token::LArrow"),
Token::FatArrow => lex("Token::FatArrow"),
Token::Pound => lex("Token::Pound"),
Token::Dollar => lex("Token::Dollar"),
Token::Question => lex("Token::Question"),
Token::OpenDelim(dt) => {
match dt {
token::DelimToken::Paren => lex("Token::OpenDelim(DelimToken::Paren)"),
token::DelimToken::Bracket => lex("Token::OpenDelim(DelimToken::Bracket)"),
token::DelimToken::Brace => lex("Token::OpenDelim(DelimToken::Brace)"),
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
}
}
Token::CloseDelim(dt) => {
match dt {
token::DelimToken::Paren => lex("Token::CloseDelim(DelimToken::Paren)"),
token::DelimToken::Bracket => lex("Token::CloseDelim(DelimToken::Bracket)"),
token::DelimToken::Brace => lex("Token::CloseDelim(DelimToken::Brace)"),
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
}
}
Token::Underscore => lex("_"),
Token::Literal(lit, sfx) => emit_lit(lit, sfx),
// fix ident expansion information... somehow
Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)),
Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))",
ident.name)),
_ => panic!("Unhandled case!"),
}
}
// ____________________________________________________________________________________________
// Conversion operators
pub fn as_tt(t: Token) -> TokenTree {
// FIXME do something nicer with the spans
TokenTree::Token(DUMMY_SP, t)
}
// ____________________________________________________________________________________________
// Build Procedures
/// Takes `input` and returns `vec![input]`.
pub fn build_vec(ts: TokenStream) -> TokenStream {
build_mac_call(str_to_ident("vec"), ts)
// tts.clone().to_owned()
}
/// Takes `ident` and `rhs` and produces `let ident = rhs;`.
pub fn build_let(id: Ident, tts: TokenStream) -> TokenStream {
concat(from_tokens(vec![keyword_to_token_ident(keywords::Let),
Token::Ident(id),
Token::Eq]),
concat(tts, from_tokens(vec![Token::Semi])))
}
/// Takes `ident ...`, and `args ...` and produces `ident::...(args ...)`.
pub fn build_mod_call(ids: Vec<Ident>, args: TokenStream) -> TokenStream {
let call = from_tokens(intersperse(ids.into_iter().map(|id| Token::Ident(id)).collect(),
Token::ModSep));
concat(call, build_paren_delimited(args))
}
/// Takes `ident` and `args ...` and produces `ident(args ...)`.
pub fn build_fn_call(name: Ident, args: TokenStream) -> TokenStream {
concat(from_tokens(vec![Token::Ident(name)]), build_paren_delimited(args))
}
/// Takes `ident` and `args ...` and produces `ident!(args ...)`.
pub fn build_mac_call(name: Ident, args: TokenStream) -> TokenStream {
concat(from_tokens(vec![Token::Ident(name), Token::Not]),
build_paren_delimited(args))
}
// ____________________________________________________________________________________________
// Utilities
/// A wrapper around `TokenStream::from_tokens` to avoid extra namespace specification and
/// provide it as a generic operator.
pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
TokenStream::from_tokens(tokens)
}
pub fn intersperse<T>(vs: Vec<T>, t: T) -> Vec<T>
where T: Clone
{
if vs.len() < 2 {
return vs;
}
let mut output = vec![vs.get(0).unwrap().to_owned()];
for v in vs.into_iter().skip(1) {
output.push(t.clone());
output.push(v);
}
output
}
}