Auto merge of #28642 - petrochenkov:name3, r=nrc

This PR removes random remaining `Ident`s outside of libsyntax and performs general cleanup
In particular, interfaces of `Name` and `Ident` are tidied up, `Name`s and `Ident`s being small `Copy` aggregates are always passed to functions by value, and `Ident`s are never used as keys in maps, because `Ident` comparisons are tricky.

Although this PR closes https://github.com/rust-lang/rust/issues/6993 there's still work related to it:
- `Name` can be made `NonZero` to compress numerous `Option<Name>`s and `Option<Ident>`s but it requires const unsafe functions.
- Implementation of `PartialEq` on `Ident` should be eliminated and replaced with explicit hygienic, non-hygienic or member-wise comparisons.
- Finally, large parts of AST can potentially be converted to `Name`s in the same way as HIR to clearly separate identifiers used in hygienic and non-hygienic contexts.

r? @nrc
This commit is contained in:
bors
2015-09-26 14:48:56 +00:00
70 changed files with 337 additions and 401 deletions

View File

@@ -67,40 +67,38 @@ use std::fmt;
use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder};
// FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name".
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Name(pub u32);
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
/// a fresh u32. This u32 is a reference to a table stored
// in thread-local storage.
// The special value EMPTY_CTXT is used to indicate an empty
// syntax context.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct SyntaxContext(pub u32);
/// An identifier contains a Name (index into the interner
/// table) and a SyntaxContext to track renaming and
/// macro expansion per Flatt et al., "Macros
/// That Work Together"
#[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)]
/// macro expansion per Flatt et al., "Macros That Work Together"
#[derive(Clone, Copy, Eq, Hash)]
pub struct Ident {
pub name: Name,
pub ctxt: SyntaxContext
}
impl Ident {
/// Construct an identifier with the given name and an empty context:
pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}}
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}#{}", self.name, self.ctxt)
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.name, f)
impl Name {
pub fn as_str(self) -> token::InternedString {
token::InternedString::new_from_name(self)
}
}
impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Name(nm) = *self;
write!(f, "{}({})", self, nm)
write!(f, "{}({})", self, self.0)
}
}
@@ -110,6 +108,29 @@ impl fmt::Display for Name {
}
}
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
}
}
pub const EMPTY_CTXT : SyntaxContext = SyntaxContext(0);
impl Ident {
pub fn new(name: Name, ctxt: SyntaxContext) -> Ident {
Ident {name: name, ctxt: ctxt}
}
pub fn with_empty_ctxt(name: Name) -> Ident {
Ident {name: name, ctxt: EMPTY_CTXT}
}
}
impl PartialEq for Ident {
fn eq(&self, other: &Ident) -> bool {
if self.ctxt == other.ctxt {
@@ -119,74 +140,27 @@ impl PartialEq for Ident {
// idents that have different contexts. You can't fix this without
// knowing whether the comparison should be hygienic or non-hygienic.
// if it should be non-hygienic (most things are), just compare the
// 'name' fields of the idents. Or, even better, replace the idents
// with Name's.
// 'name' fields of the idents.
//
// On the other hand, if the comparison does need to be hygienic,
// one example and its non-hygienic counterpart would be:
// syntax::parse::token::Token::mtwt_eq
// syntax::ext::tt::macro_parser::token_name_eq
panic!("not allowed to compare these idents: {:?}, {:?}. \
Probably related to issue \\#6993", self, other);
panic!("idents with different contexts are compared with operator `==`: \
{:?}, {:?}.", self, other);
}
}
}
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
/// a fresh u32
// I'm representing this syntax context as an index into
// a table, in order to work around a compiler bug
// that's causing unreleased memory to cause core dumps
// and also perhaps to save some work in destructor checks.
// the special uint '0' will be used to indicate an empty
// syntax context.
// this uint is a reference to a table stored in thread-local
// storage.
pub type SyntaxContext = u32;
pub const EMPTY_CTXT : SyntaxContext = 0;
pub const ILLEGAL_CTXT : SyntaxContext = 1;
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Clone, Copy)]
pub struct Name(pub u32);
impl<T: AsRef<str>> PartialEq<T> for Name {
fn eq(&self, other: &T) -> bool {
self.as_str() == other.as_ref()
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}#{}", self.name, self.ctxt.0)
}
}
impl Name {
pub fn as_str(&self) -> token::InternedString {
token::InternedString::new_from_name(*self)
}
pub fn usize(&self) -> usize {
let Name(nm) = *self;
nm as usize
}
pub fn ident(&self) -> Ident {
Ident { name: *self, ctxt: 0 }
}
}
/// A mark represents a unique id associated with a macro expansion
pub type Mrk = u32;
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.name, f)
}
}
@@ -202,8 +176,8 @@ impl Decodable for Ident {
}
}
/// Function name (not all functions have names)
pub type FnIdent = Option<Ident>;
/// A mark represents a unique id associated with a macro expansion
pub type Mrk = u32;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime {
@@ -841,19 +815,16 @@ pub enum Expr_ {
///
/// This is desugared to a `match` expression.
ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while loop, with an optional label
///
/// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while-let loop, with an optional label
///
/// `'label: while let pat = expr { block }`
///
/// This is desugared to a combination of `loop` and `match` expressions.
ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A for loop, with an optional label
///
/// `'label: for pat in expr { block }`
@@ -863,7 +834,6 @@ pub enum Expr_ {
/// Conditionless loop (can be exited with break, continue, or return)
///
/// `'label: loop { block }`
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
@@ -1223,13 +1193,6 @@ pub struct MutTy {
pub mutbl: Mutability,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeField {
pub ident: Ident,
pub mt: MutTy,
pub span: Span,
}
/// Represents a method's signature in a trait declaration,
/// or in an implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]

View File

@@ -576,21 +576,21 @@ mod tests {
use ast::*;
use super::*;
fn ident_to_segment(id : &Ident) -> PathSegment {
PathSegment {identifier: id.clone(),
fn ident_to_segment(id: Ident) -> PathSegment {
PathSegment {identifier: id,
parameters: PathParameters::none()}
}
#[test] fn idents_name_eq_test() {
assert!(segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>()));
&[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(78),SyntaxContext(182))]
.iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
assert!(!segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>()));
&[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(77),SyntaxContext(182))]
.iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
}
}

View File

@@ -1083,7 +1083,6 @@ pub struct MalformedCodemapPositions {
#[cfg(test)]
mod tests {
use super::*;
use std::rc::Rc;
#[test]
fn t1 () {

View File

@@ -842,7 +842,7 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
#[cfg(test)]
mod test {
use super::{EmitterWriter, Level};
use codemap::{mk_sp, CodeMap, BytePos};
use codemap::{mk_sp, CodeMap};
use std::sync::{Arc, Mutex};
use std::io::{self, Write};
use std::str::from_utf8;

View File

@@ -138,7 +138,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
));
}
});
let sym = Ident::new(token::gensym(&format!(
let sym = Ident::with_empty_ctxt(token::gensym(&format!(
"__register_diagnostic_{}", code
)));
MacEager::items(SmallVector::many(vec![

View File

@@ -28,7 +28,7 @@ pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
EntryPointType::Start
} else if attr::contains_name(&item.attrs, "main") {
EntryPointType::MainAttr
} else if item.ident.name == "main" {
} else if item.ident.name.as_str() == "main" {
if depth == 1 {
// This is a top-level function so can be 'main'
EntryPointType::MainNamed

View File

@@ -646,7 +646,7 @@ impl<'a> ExtCtxt<'a> {
loop {
if self.codemap().with_expn_info(expn_id, |info| {
info.map_or(None, |i| {
if i.callee.name() == "include" {
if i.callee.name().as_str() == "include" {
// Stop going up the backtrace once include! is encountered
return None;
}
@@ -899,9 +899,9 @@ impl SyntaxEnv {
unreachable!()
}
pub fn find(&self, k: &Name) -> Option<Rc<SyntaxExtension>> {
pub fn find(&self, k: Name) -> Option<Rc<SyntaxExtension>> {
for frame in self.chain.iter().rev() {
match frame.map.get(k) {
match frame.map.get(&k) {
Some(v) => return Some(v.clone()),
None => {}
}

View File

@@ -73,7 +73,6 @@ pub trait AstBuilder {
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn typaram(&self,
span: Span,
@@ -443,14 +442,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
Vec::new()))
}
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer)
}

View File

@@ -524,7 +524,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac,
return None;
}
let extname = pth.segments[0].identifier.name;
match fld.cx.syntax_env.find(&extname) {
match fld.cx.syntax_env.find(extname) {
None => {
fld.cx.span_err(
pth.span,
@@ -593,7 +593,7 @@ fn expand_loop_block(loop_block: P<Block>,
fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) {
match opt_ident {
Some(label) => {
let new_label = fresh_name(&label);
let new_label = fresh_name(label);
let rename = (label, new_label);
// The rename *must not* be added to the pending list of current
@@ -689,7 +689,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
let fm = fresh_mark();
let items = {
let expanded = match fld.cx.syntax_env.find(&extname) {
let expanded = match fld.cx.syntax_env.find(extname) {
None => {
fld.cx.span_err(path_span,
&format!("macro undefined: '{}!'",
@@ -892,7 +892,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
// generate fresh names, push them to a new pending list
let idents = pattern_bindings(&*expanded_pat);
let mut new_pending_renames =
idents.iter().map(|ident| (*ident, fresh_name(ident))).collect();
idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
// rewrite the pattern using the new names (the old
// ones have already been applied):
let rewritten_pat = {
@@ -951,7 +951,7 @@ fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm {
// all of the pats must have the same set of bindings, so use the
// first one to extract them and generate new names:
let idents = pattern_bindings(&*expanded_pats[0]);
let new_renames = idents.into_iter().map(|id| (id, fresh_name(&id))).collect();
let new_renames = idents.into_iter().map(|id| (id, fresh_name(id))).collect();
// apply the renaming, but only to the PatIdents:
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat));
@@ -1061,7 +1061,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
return DummyResult::raw_pat(span);
}
let extname = pth.segments[0].identifier.name;
let marked_after = match fld.cx.syntax_env.find(&extname) {
let marked_after = match fld.cx.syntax_env.find(extname) {
None => {
fld.cx.span_err(pth.span,
&format!("macro undefined: '{}!'",
@@ -1134,10 +1134,7 @@ pub struct IdentRenamer<'a> {
impl<'a> Folder for IdentRenamer<'a> {
fn fold_ident(&mut self, id: Ident) -> Ident {
Ident {
name: id.name,
ctxt: mtwt::apply_renames(self.renames, id.ctxt),
}
Ident::new(id.name, mtwt::apply_renames(self.renames, id.ctxt))
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
@@ -1161,8 +1158,8 @@ impl<'a> Folder for PatIdentRenamer<'a> {
pat.map(|ast::Pat {id, node, span}| match node {
ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => {
let new_ident = Ident{name: ident.name,
ctxt: mtwt::apply_renames(self.renames, ident.ctxt)};
let new_ident = Ident::new(ident.name,
mtwt::apply_renames(self.renames, ident.ctxt));
let new_node =
ast::PatIdent(binding_mode,
Spanned{span: self.new_span(sp), node: new_ident},
@@ -1254,7 +1251,7 @@ macro_rules! partition {
fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(&attr.name())) {
match fld.cx.syntax_env.find(intern(&attr.name())) {
Some(rc) => match *rc {
$variant(..) => true,
_ => false
@@ -1276,7 +1273,7 @@ fn expand_decorators(a: Annotatable,
{
for attr in a.attrs() {
let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) {
match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc {
MultiDecorator(ref dec) => {
attr::mark_used(&attr);
@@ -1327,7 +1324,7 @@ fn expand_item_multi_modifier(mut it: Annotatable,
for attr in &modifiers {
let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) {
match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc {
MultiModifier(ref mac) => {
attr::mark_used(attr);
@@ -1407,7 +1404,7 @@ fn expand_and_rename_fn_decl_and_block(fn_decl: P<ast::FnDecl>, block: P<ast::Bl
let expanded_decl = fld.fold_fn_decl(fn_decl);
let idents = fn_decl_arg_bindings(&*expanded_decl);
let renames =
idents.iter().map(|id : &ast::Ident| (*id,fresh_name(id))).collect();
idents.iter().map(|id| (*id,fresh_name(*id))).collect();
// first, a renamer for the PatIdents, for the fn_decl:
let mut rename_pat_fld = PatIdentRenamer{renames: &renames};
let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl);
@@ -1628,10 +1625,7 @@ struct Marker { mark: Mrk }
impl Folder for Marker {
fn fold_ident(&mut self, id: Ident) -> Ident {
ast::Ident {
name: id.name,
ctxt: mtwt::apply_mark(self.mark, id.ctxt)
}
ast::Ident::new(id.name, mtwt::apply_mark(self.mark, id.ctxt))
}
fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
Spanned {
@@ -2104,7 +2098,7 @@ foo_module!();
// find the xx binding
let bindings = crate_bindings(&cr);
let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| b.name == "xx").collect();
bindings.iter().filter(|b| b.name.as_str() == "xx").collect();
let cxbinds: &[&ast::Ident] = &cxbinds[..];
let cxbind = match (cxbinds.len(), cxbinds.get(0)) {
(1, Some(b)) => *b,
@@ -2116,7 +2110,7 @@ foo_module!();
// the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1
&& p.segments[0].identifier.name == "xx"
&& p.segments[0].identifier.name.as_str() == "xx"
}).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:");

View File

@@ -35,7 +35,7 @@ use std::collections::HashMap;
pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Name,SyntaxContext,Name),SyntaxContext>>,
}
#[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy, Clone)]
@@ -66,8 +66,9 @@ pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
/// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m);
* table.mark_memo.borrow_mut().entry(key)
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt)))
*table.mark_memo.borrow_mut().entry(key).or_insert_with(|| {
SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt)))
})
}
/// Extend a syntax context with a given rename
@@ -81,10 +82,11 @@ fn apply_rename_internal(id: Ident,
to: Name,
ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to);
let key = (ctxt, id.name, id.ctxt, to);
* table.rename_memo.borrow_mut().entry(key)
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt)))
*table.rename_memo.borrow_mut().entry(key).or_insert_with(|| {
SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt)))
})
}
/// Apply a list of renamings to a context
@@ -185,20 +187,20 @@ fn resolve_internal(id: Ident,
}
let resolved = {
let result = (*table.table.borrow())[id.ctxt as usize];
let result = (*table.table.borrow())[id.ctxt.0 as usize];
match result {
EmptyCtxt => id.name,
// ignore marks here:
Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt},
resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table),
// do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt},
resolve_internal(Ident::new(name, ctxt),
table, resolve_table);
let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt},
resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table);
if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table)
@@ -229,7 +231,7 @@ fn marksof_internal(ctxt: SyntaxContext,
let mut result = Vec::new();
let mut loopvar = ctxt;
loop {
let table_entry = (*table.table.borrow())[loopvar as usize];
let table_entry = (*table.table.borrow())[loopvar.0 as usize];
match table_entry {
EmptyCtxt => {
return result;
@@ -256,7 +258,7 @@ fn marksof_internal(ctxt: SyntaxContext,
/// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as usize] {
match (*sctable.table.borrow())[ctxt.0 as usize] {
Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark")
}
@@ -302,7 +304,7 @@ mod tests {
}
fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s}
Ident::new(Name(n), s)
}
// because of the SCTable, I now need a tidy way of
@@ -328,7 +330,7 @@ mod tests {
let mut result = Vec::new();
loop {
let table = table.table.borrow();
match (*table)[sc as usize] {
match (*table)[sc.0 as usize] {
EmptyCtxt => {return result;},
Mark(mrk,tail) => {
result.push(M(mrk));
@@ -349,15 +351,15 @@ mod tests {
fn test_unfold_refold(){
let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4);
let test_sc = vec!(M(3),R(id(101,EMPTY_CTXT),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),SyntaxContext(4));
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0));
assert!((*table)[3] == Rename(id(101,0),Name(14),2));
assert!((*table)[4] == Mark(3,3));
assert!((*table)[2] == Mark(9,EMPTY_CTXT));
assert!((*table)[3] == Rename(id(101,EMPTY_CTXT),Name(14),SyntaxContext(2)));
assert!((*table)[4] == Mark(3,SyntaxContext(3)));
}
assert_eq!(refold_test_sc(4,&t),test_sc);
assert_eq!(refold_test_sc(SyntaxContext(4),&t),test_sc);
}
// extend a syntax context with a sequence of marks given
@@ -371,11 +373,11 @@ mod tests {
#[test] fn unfold_marks_test() {
let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3);
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),SyntaxContext(3));
{
let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0));
assert!((*table)[3] == Mark(3,2));
assert!((*table)[2] == Mark(7,EMPTY_CTXT));
assert!((*table)[3] == Mark(3,SyntaxContext(2)));
}
}
@@ -396,7 +398,7 @@ mod tests {
assert_eq! (marksof_internal (ans, stopname,&t), [16]);}
// rename where stop doesn't match:
{ let chain = vec!(M(9),
R(id(name1.usize() as u32,
R(id(name1.0,
apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)),
M(14));
@@ -405,7 +407,7 @@ mod tests {
// rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9),
R(id(name1.usize() as u32, name1sc),
R(id(name1.0, name1sc),
stopname),
M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
@@ -474,10 +476,10 @@ mod tests {
#[test]
fn hashing_tests () {
let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3);
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),SyntaxContext(3));
// using the same one again should result in the same index:
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2);
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
// I'm assuming that the rename table will behave the same....
}
@@ -496,10 +498,10 @@ mod tests {
#[test]
fn new_resolves_test() {
let renames = vec!((Ident{name:Name(23),ctxt:EMPTY_CTXT},Name(24)),
(Ident{name:Name(29),ctxt:EMPTY_CTXT},Name(29)));
let renames = vec!((Ident::with_empty_ctxt(Name(23)),Name(24)),
(Ident::with_empty_ctxt(Name(29)),Name(29)));
let new_ctxt1 = apply_renames(&renames,EMPTY_CTXT);
assert_eq!(resolve(Ident{name:Name(23),ctxt:new_ctxt1}),Name(24));
assert_eq!(resolve(Ident{name:Name(29),ctxt:new_ctxt1}),Name(29));
assert_eq!(resolve(Ident::new(Name(23),new_ctxt1)),Name(24));
assert_eq!(resolve(Ident::new(Name(29),new_ctxt1)),Name(29));
}
}

View File

@@ -464,7 +464,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
($name: expr, $suffix: expr, $($args: expr),*) => {{
let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
let suffix = match $suffix {
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::new(name))),
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
None => cx.expr_none(sp)
};
cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
@@ -489,31 +489,32 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
}
token::Literal(token::Byte(i), suf) => {
let e_byte = mk_name(cx, sp, i.ident());
let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Byte", suf, e_byte);
}
token::Literal(token::Char(i), suf) => {
let e_char = mk_name(cx, sp, i.ident());
let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Char", suf, e_char);
}
token::Literal(token::Integer(i), suf) => {
let e_int = mk_name(cx, sp, i.ident());
let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Integer", suf, e_int);
}
token::Literal(token::Float(fident), suf) => {
let e_fident = mk_name(cx, sp, fident.ident());
let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
return mk_lit!("Float", suf, e_fident);
}
token::Literal(token::Str_(ident), suf) => {
return mk_lit!("Str_", suf, mk_name(cx, sp, ident.ident()))
return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
}
token::Literal(token::StrRaw(ident, n), suf) => {
return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_usize(sp, n))
return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
cx.expr_usize(sp, n))
}
token::Ident(ident, style) => {
@@ -535,7 +536,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::DocComment(ident) => {
return cx.expr_call(sp,
mk_token_path(cx, sp, "DocComment"),
vec!(mk_name(cx, sp, ident.ident())));
vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
}
token::MatchNt(name, kind, namep, kindp) => {

View File

@@ -79,7 +79,7 @@ pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*;
use ast;
use ast::{TokenTree, Ident};
use ast::{TokenTree, Name};
use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span};
use codemap;
@@ -202,9 +202,9 @@ pub enum NamedMatch {
}
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> {
-> HashMap<Name, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) {
ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) {
match m {
&TtSequence(_, ref seq) => {
for next_m in &seq.tts {
@@ -217,7 +217,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
}
}
&TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) {
match ret_val.entry(bind_name.name) {
Vacant(spot) => {
spot.insert(res[*idx].clone());
*idx += 1;
@@ -246,7 +246,7 @@ pub enum ParseResult<T> {
Error(codemap::Span, String)
}
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
/// Perform a token equality check, ignoring syntax context (that is, an

View File

@@ -282,7 +282,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
};
// Extract the arguments:
let lhses = match **argument_map.get(&lhs_nm).unwrap() {
let lhses = match **argument_map.get(&lhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured lhs")
};
@@ -291,7 +291,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
check_lhs_nt_follows(cx, &**lhs, def.span);
}
let rhses = match **argument_map.get(&rhs_nm).unwrap() {
let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured rhs")
};
@@ -510,14 +510,14 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
"pat" => {
match *tok {
FatArrow | Comma | Eq => Ok(true),
Ident(i, _) if i.name == "if" || i.name == "in" => Ok(true),
Ident(i, _) if i.name.as_str() == "if" || i.name.as_str() == "in" => Ok(true),
_ => Ok(false)
}
},
"path" | "ty" => {
match *tok {
Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true),
Ident(i, _) if i.name == "as" => Ok(true),
Ident(i, _) if i.name.as_str() == "as" => Ok(true),
_ => Ok(false)
}
},

View File

@@ -10,7 +10,7 @@
use self::LockstepIterSize::*;
use ast;
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name};
use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@@ -38,7 +38,7 @@ pub struct TtReader<'a> {
/// the unzipped tree:
stack: Vec<TtFrame>,
/* for MBE-style macro transcription */
interpolations: HashMap<Ident, Rc<NamedMatch>>,
interpolations: HashMap<Name, Rc<NamedMatch>>,
imported_from: Option<Ident>,
// Some => return imported_from as the next token
@@ -56,7 +56,7 @@ pub struct TtReader<'a> {
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>,
src: Vec<ast::TokenTree>)
-> TtReader<'a> {
@@ -70,7 +70,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None.
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>,
src: Vec<ast::TokenTree>,
desugar_doc_comments: bool)
@@ -117,7 +117,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
}
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
let matched_opt = r.interpolations.get(&name).cloned();
let matched_opt = r.interpolations.get(&name.name).cloned();
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
}

View File

@@ -35,7 +35,7 @@ use codemap::{CodeMap, Span};
use diagnostic::SpanHandler;
use visit;
use visit::{FnKind, Visitor};
use parse::token::{self, InternedString};
use parse::token::InternedString;
use std::ascii::AsciiExt;
use std::cmp;
@@ -673,7 +673,7 @@ struct MacroVisitor<'a> {
impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
fn visit_mac(&mut self, mac: &ast::Mac) {
let path = &mac.node.path;
let id = path.segments.last().unwrap().identifier;
let name = path.segments.last().unwrap().identifier.name.as_str();
// Issue 22234: If you add a new case here, make sure to also
// add code to catch the macro during or after expansion.
@@ -683,19 +683,19 @@ impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
// catch uses of these macros within conditionally-compiled
// code, e.g. `#[cfg]`-guarded functions.
if id == token::str_to_ident("asm") {
if name == "asm" {
self.context.gate_feature("asm", path.span, EXPLAIN_ASM);
}
else if id == token::str_to_ident("log_syntax") {
else if name == "log_syntax" {
self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX);
}
else if id == token::str_to_ident("trace_macros") {
else if name == "trace_macros" {
self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS);
}
else if id == token::str_to_ident("concat_idents") {
else if name == "concat_idents" {
self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS);
}
}

View File

@@ -40,7 +40,7 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr(
attr::mk_attr_id(),
self.id_to_interned_str(s.ident()),
self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
self.span.lo,
self.span.hi
);
@@ -137,9 +137,8 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => {
// we need to get the position of this token before we bump.
let Span { lo, hi, .. } = self.span;
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(),
self.id_to_interned_str(s.ident()),
lo, hi);
let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
if attr.node.style == ast::AttrInner {
attrs.push(attr);
panictry!(self.bump());

View File

@@ -744,8 +744,8 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))),
Some(&ast::TtDelimited(_, ref macro_delimed)),
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
if name_macro_rules.name.as_str() == "macro_rules"
&& name_zip.name.as_str() == "zip" => {
let tts = &macro_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
@@ -763,7 +763,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
)
if first_delimed.delim == token::Paren
&& ident.name == "a" => {},
&& ident.name.as_str() == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed),
}
let tts = &second_delimed.tts[..];
@@ -774,7 +774,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
)
if second_delimed.delim == token::Paren
&& ident.name == "a" => {},
&& ident.name.as_str() == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed),
}
},

View File

@@ -4658,7 +4658,7 @@ impl<'a> Parser<'a> {
(fields, None)
// Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(token::Paren) {
let fields = try!(self.parse_tuple_struct_body(&class_name, &mut generics));
let fields = try!(self.parse_tuple_struct_body(class_name, &mut generics));
(fields, Some(ast::DUMMY_NODE_ID))
} else {
let token_str = self.this_token_to_string();
@@ -4693,7 +4693,7 @@ impl<'a> Parser<'a> {
}
pub fn parse_tuple_struct_body(&mut self,
class_name: &ast::Ident,
class_name: ast::Ident,
generics: &mut ast::Generics)
-> PResult<Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;`
@@ -5723,10 +5723,10 @@ impl<'a> Parser<'a> {
Option<ast::Name>)>> {
let ret = match self.token {
token::Literal(token::Str_(s), suf) => {
(self.id_to_interned_str(s.ident()), ast::CookedStr, suf)
(self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::CookedStr, suf)
}
token::Literal(token::StrRaw(s, n), suf) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n), suf)
(self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::RawStr(n), suf)
}
_ => return Ok(None)
};

View File

@@ -453,7 +453,7 @@ macro_rules! declare_special_idents_and_keywords {(
#[allow(non_upper_case_globals)]
pub const $si_static: ast::Ident = ast::Ident {
name: ast::Name($si_name),
ctxt: 0,
ctxt: ast::EMPTY_CTXT,
};
)*
}
@@ -462,7 +462,7 @@ macro_rules! declare_special_idents_and_keywords {(
use ast;
$(
#[allow(non_upper_case_globals)]
pub const $si_static: ast::Name = ast::Name($si_name);
pub const $si_static: ast::Name = ast::Name($si_name);
)*
}
@@ -729,19 +729,19 @@ pub fn gensym(s: &str) -> ast::Name {
/// Maps a string to an identifier with an empty syntax context.
#[inline]
pub fn str_to_ident(s: &str) -> ast::Ident {
ast::Ident::new(intern(s))
ast::Ident::with_empty_ctxt(intern(s))
}
/// Maps a string to a gensym'ed identifier.
#[inline]
pub fn gensym_ident(s: &str) -> ast::Ident {
ast::Ident::new(gensym(s))
ast::Ident::with_empty_ctxt(gensym(s))
}
// create a fresh name that maps to the same string as the old one.
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
// that is, that the new name and the old one are connected to ptr_eq strings.
pub fn fresh_name(src: &ast::Ident) -> ast::Name {
pub fn fresh_name(src: ast::Ident) -> ast::Name {
let interner = get_ident_interner();
interner.gensym_copy(src.name)
// following: debug version. Could work in final except that it's incompatible with
@@ -753,7 +753,7 @@ pub fn fresh_name(src: &ast::Ident) -> ast::Name {
// create a fresh mark.
pub fn fresh_mark() -> ast::Mrk {
gensym("mark").usize() as u32
gensym("mark").0
}
#[cfg(test)]
@@ -763,7 +763,7 @@ mod tests {
use ext::mtwt;
fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident {
ast::Ident { name: id.name, ctxt:mtwt::apply_mark(m, id.ctxt) }
ast::Ident::new(id.name, mtwt::apply_mark(m, id.ctxt))
}
#[test] fn mtwt_token_eq_test() {

View File

@@ -297,7 +297,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::NtBlock(ref e) => block_to_string(&**e),
token::NtStmt(ref e) => stmt_to_string(&**e),
token::NtPat(ref e) => pat_to_string(&**e),
token::NtIdent(ref e, _) => ident_to_string(&**e),
token::NtIdent(ref e, _) => ident_to_string(**e),
token::NtTT(ref e) => tt_to_string(&**e),
token::NtArm(ref e) => arm_to_string(&*e),
token::NtImplItem(ref e) => impl_item_to_string(&**e),
@@ -376,8 +376,8 @@ pub fn path_to_string(p: &ast::Path) -> String {
to_string(|s| s.print_path(p, false, 0))
}
pub fn ident_to_string(id: &ast::Ident) -> String {
to_string(|s| s.print_ident(*id))
pub fn ident_to_string(id: ast::Ident) -> String {
to_string(|s| s.print_ident(id))
}
pub fn fun_to_string(decl: &ast::FnDecl,
@@ -2857,7 +2857,6 @@ impl<'a> State<'a> {
ast::ViewPathSimple(ident, ref path) => {
try!(self.print_path(path, false, 0));
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name !=
ident.name {
try!(space(&mut self.s));

View File

@@ -69,7 +69,7 @@ impl<T: Eq + Hash + Clone + 'static> Interner<T> {
pub fn get(&self, idx: Name) -> T {
let vect = self.vect.borrow();
(*vect)[idx.usize()].clone()
(*vect)[idx.0 as usize].clone()
}
pub fn len(&self) -> usize {
@@ -196,13 +196,13 @@ impl StrInterner {
let new_idx = Name(self.len() as u32);
// leave out of map to avoid colliding
let mut vect = self.vect.borrow_mut();
let existing = (*vect)[idx.usize()].clone();
let existing = (*vect)[idx.0 as usize].clone();
vect.push(existing);
new_idx
}
pub fn get(&self, idx: Name) -> RcStr {
(*self.vect.borrow())[idx.usize()].clone()
(*self.vect.borrow())[idx.0 as usize].clone()
}
pub fn len(&self) -> usize {