Auto merge of #21677 - japaric:no-range, r=alexcrichton

Note: Do not merge until we get a newer snapshot that includes #21374

There was some type inference fallout (see 4th commit) because type inference with `a..b` is not as good as with `range(a, b)` (see #21672).

r? @alexcrichton
This commit is contained in:
bors
2015-01-29 16:28:52 +00:00
366 changed files with 1314 additions and 1337 deletions

View File

@@ -73,7 +73,7 @@ impl<F> ItemModifier for F
}
}
#[derive(Show,Clone)]
#[derive(Debug,Clone)]
pub enum Annotatable {
Item(P<ast::Item>),
TraitItem(ast::TraitItem),

View File

@@ -770,7 +770,7 @@ impl<'a> MethodDef<'a> {
let mut raw_fields = Vec::new(); // ~[[fields of self],
// [fields of next Self arg], [etc]]
let mut patterns = Vec::new();
for i in range(0us, self_args.len()) {
for i in 0us..self_args.len() {
let struct_path= cx.path(DUMMY_SP, vec!( type_ident ));
let (pat, ident_expr) =
trait_.create_struct_pattern(cx,
@@ -1154,7 +1154,7 @@ impl<'a> MethodDef<'a> {
// to an uninhabited type (e.g. a zero-variant enum or a
// type holding such an enum), but do not feature-gate
// zero-variant enums themselves, then attempting to
// derive Show on such a type could here generate code
// derive Debug on such a type could here generate code
// that needs the feature gate enabled.)
return cx.expr_unreachable(sp);

View File

@@ -107,8 +107,14 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
"Rand" => expand!(rand::expand_deriving_rand),
// NOTE(stage0): remove "Show"
"Show" => expand!(show::expand_deriving_show),
"Show" => {
cx.span_warn(titem.span,
"derive(Show) is deprecated \
in favor of derive(Debug)");
expand!(show::expand_deriving_show)
},
"Debug" => expand!(show::expand_deriving_show),
"Default" => expand!(default::expand_deriving_default),

View File

@@ -661,7 +661,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
name_ordering: Vec<String>,
names: HashMap<String, P<ast::Expr>>)
-> P<ast::Expr> {
let arg_types: Vec<_> = range(0, args.len()).map(|_| None).collect();
let arg_types: Vec<_> = (0..args.len()).map(|_| None).collect();
let mut cx = Context {
ecx: ecx,
args: args,

View File

@@ -38,7 +38,7 @@ pub struct SCTable {
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>,
}
#[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Show, Copy)]
#[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum SyntaxContext_ {
EmptyCtxt,
Mark (Mrk,SyntaxContext),
@@ -309,7 +309,7 @@ mod tests {
// because of the SCTable, I now need a tidy way of
// creating syntax objects. Sigh.
#[derive(Clone, PartialEq, Show)]
#[derive(Clone, PartialEq, Debug)]
enum TestSC {
M(Mrk),
R(Ident,Name)

View File

@@ -665,7 +665,7 @@ fn mk_tt(cx: &ExtCtxt, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
}
ref tt @ ast::TtToken(_, MatchNt(..)) => {
let mut seq = vec![];
for i in range(0, tt.len()) {
for i in 0..tt.len() {
seq.push(tt.get_tt(i));
}
mk_tts(cx, &seq[])

View File

@@ -166,7 +166,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: BytePos)
-> Box<MatcherPos> {
let match_idx_hi = count_names(&ms[]);
let matches: Vec<_> = range(0, match_idx_hi).map(|_| Vec::new()).collect();
let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect();
box MatcherPos {
stack: vec![],
top_elts: TtSeq(ms),
@@ -339,7 +339,7 @@ pub fn parse(sess: &ParseSess,
// most of the time.
// Only touch the binders we have actually bound
for idx in range(ei.match_lo, ei.match_hi) {
for idx in ei.match_lo..ei.match_hi {
let sub = (ei.matches[idx]).clone();
(&mut new_pos.matches[idx])
.push(Rc::new(MatchedSeq(sub, mk_sp(ei.sp_lo,
@@ -385,14 +385,14 @@ pub fn parse(sess: &ParseSess,
new_ei.match_cur += seq.num_captures;
new_ei.idx += 1us;
//we specifically matched zero repeats.
for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
for idx in ei.match_cur..ei.match_cur + seq.num_captures {
(&mut new_ei.matches[idx]).push(Rc::new(MatchedSeq(vec![], sp)));
}
cur_eis.push(new_ei);
}
let matches: Vec<_> = range(0, ei.matches.len())
let matches: Vec<_> = (0..ei.matches.len())
.map(|_| Vec::new()).collect();
let ei_t = ei;
cur_eis.push(box MatcherPos {
@@ -495,7 +495,7 @@ pub fn parse(sess: &ParseSess,
}
cur_eis.push(ei);
for _ in range(0, rust_parser.tokens_consumed) {
for _ in 0..rust_parser.tokens_consumed {
let _ = rdr.next_token();
}
}