From a4541b02a33e7c4fdcc8f50459bad6ab99463919 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Wed, 15 Apr 2015 20:56:16 -0700 Subject: [PATCH 01/13] syntax: remove #![feature(box_syntax, box_patterns)] --- src/libsyntax/ext/base.rs | 8 ++++---- src/libsyntax/ext/deriving/cmp/partial_ord.rs | 2 +- src/libsyntax/ext/deriving/decodable.rs | 6 +++--- src/libsyntax/ext/deriving/encodable.rs | 6 +++--- src/libsyntax/ext/deriving/generic/mod.rs | 2 +- src/libsyntax/ext/deriving/generic/ty.rs | 8 ++++---- src/libsyntax/ext/deriving/hash.rs | 2 +- src/libsyntax/ext/deriving/primitive.rs | 4 ++-- src/libsyntax/ext/deriving/show.rs | 2 +- src/libsyntax/ext/source_util.rs | 2 +- src/libsyntax/ext/tt/macro_parser.rs | 12 ++++++------ src/libsyntax/ext/tt/macro_rules.rs | 8 ++++---- src/libsyntax/ext/tt/transcribe.rs | 4 ++-- src/libsyntax/fold.rs | 6 +++--- src/libsyntax/lib.rs | 2 -- src/libsyntax/parse/mod.rs | 6 +++--- src/libsyntax/parse/parser.rs | 6 +++--- src/libsyntax/print/pprust.rs | 2 +- src/libsyntax/ptr.rs | 2 +- 19 files changed, 44 insertions(+), 46 deletions(-) diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 55afac1a1de43..50ab430f148ca 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -262,10 +262,10 @@ macro_rules! make_MacEager { impl MacEager { $( pub fn $fld(v: $t) -> Box { - box MacEager { + Box::new(MacEager { $fld: Some(v), ..Default::default() - } + }) } )* } @@ -331,7 +331,7 @@ impl DummyResult { /// Use this as a return value after hitting any errors and /// calling `span_err`. pub fn any(sp: Span) -> Box { - box DummyResult { expr_only: false, span: sp } + Box::new(DummyResult { expr_only: false, span: sp }) } /// Create a default MacResult that can only be an expression. @@ -340,7 +340,7 @@ impl DummyResult { /// if an error is encountered internally, the user will receive /// an error that they also used it in the wrong place. pub fn expr(sp: Span) -> Box { - box DummyResult { expr_only: true, span: sp } + Box::new(DummyResult { expr_only: true, span: sp }) } /// A plain dummy expression. diff --git a/src/libsyntax/ext/deriving/cmp/partial_ord.rs b/src/libsyntax/ext/deriving/cmp/partial_ord.rs index 9da2db25f7ea2..fe6a8fea78c70 100644 --- a/src/libsyntax/ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax/ext/deriving/cmp/partial_ord.rs @@ -47,7 +47,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, let ordering_ty = Literal(path_std!(cx, core::cmp::Ordering)); let ret_ty = Literal(Path::new_(pathvec_std!(cx, core::option::Option), None, - vec![box ordering_ty], + vec![Box::new(ordering_ty)], true)); let inline = cx.meta_word(span, InternedString::new("inline")); diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 14f0004101c81..0b31f06f87d06 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -68,14 +68,14 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, vec!(), true)))) }, explicit_self: None, - args: vec!(Ptr(box Literal(Path::new_local("__D")), + args: vec!(Ptr(Box::new(Literal(Path::new_local("__D"))), Borrowed(None, MutMutable))), ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, - vec!(box Self_, box Literal(Path::new_( + vec!(Box::new(Self_), Box::new(Literal(Path::new_( vec!["__D", "Error"], None, vec![], false - ))), + )))), true )), attributes: Vec::new(), diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index 175f986f6dda8..92944d649332f 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -144,14 +144,14 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, vec!(), true)))) }, explicit_self: borrowed_explicit_self(), - args: vec!(Ptr(box Literal(Path::new_local("__S")), + args: vec!(Ptr(Box::new(Literal(Path::new_local("__S"))), Borrowed(None, MutMutable))), ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, - vec!(box Tuple(Vec::new()), box Literal(Path::new_( + vec!(Box::new(Tuple(Vec::new())), Box::new(Literal(Path::new_( vec!["__S", "Error"], None, vec![], false - ))), + )))), true )), attributes: Vec::new(), diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index 4685b4b295417..f73a3969bed82 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -807,7 +807,7 @@ impl<'a> MethodDef<'a> { Self_ if nonstatic => { self_args.push(arg_expr); } - Ptr(box Self_, _) if nonstatic => { + Ptr(ref ty, _) if **ty == Self_ && nonstatic => { self_args.push(cx.expr_deref(trait_.span, arg_expr)) } _ => { diff --git a/src/libsyntax/ext/deriving/generic/ty.rs b/src/libsyntax/ext/deriving/generic/ty.rs index ec13b86a8ae2e..9e8e68c0b8cce 100644 --- a/src/libsyntax/ext/deriving/generic/ty.rs +++ b/src/libsyntax/ext/deriving/generic/ty.rs @@ -24,7 +24,7 @@ use parse::token::special_idents; use ptr::P; /// The types of pointers -#[derive(Clone)] +#[derive(Clone, Eq, PartialEq)] pub enum PtrTy<'a> { /// &'lifetime mut Borrowed(Option<&'a str>, ast::Mutability), @@ -34,7 +34,7 @@ pub enum PtrTy<'a> { /// A path, e.g. `::std::option::Option::` (global). Has support /// for type parameters and a lifetime. -#[derive(Clone)] +#[derive(Clone, Eq, PartialEq)] pub struct Path<'a> { pub path: Vec<&'a str> , pub lifetime: Option<&'a str>, @@ -85,7 +85,7 @@ impl<'a> Path<'a> { } /// A type. Supports pointers, Self, and literals -#[derive(Clone)] +#[derive(Clone, Eq, PartialEq)] pub enum Ty<'a> { Self_, /// &/Box/ Ty @@ -109,7 +109,7 @@ pub fn borrowed_explicit_self<'r>() -> Option>> { } pub fn borrowed_self<'r>() -> Ty<'r> { - borrowed(box Self_) + borrowed(Box::new(Self_)) } pub fn nil_ty<'r>() -> Ty<'r> { diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index 2f6734b1a1433..915d99796152d 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -41,7 +41,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, vec![path_std!(cx, core::hash::Hasher)])], }, explicit_self: borrowed_explicit_self(), - args: vec!(Ptr(box Literal(arg), Borrowed(None, MutMutable))), + args: vec!(Ptr(Box::new(Literal(arg)), Borrowed(None, MutMutable))), ret_ty: nil_ty(), attributes: vec![], combine_substructure: combine_substructure(Box::new(|a, b, c| { diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index 625f759fcedf8..3d0645fd6e3cf 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -40,7 +40,7 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, args: vec!(Literal(path_local!(i64))), ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option), None, - vec!(box Self_), + vec!(Box::new(Self_)), true)), // #[inline] liable to cause code-bloat attributes: attrs.clone(), @@ -55,7 +55,7 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, args: vec!(Literal(path_local!(u64))), ret_ty: Literal(Path::new_(pathvec_std!(cx, core::option::Option), None, - vec!(box Self_), + vec!(Box::new(Self_)), true)), // #[inline] liable to cause code-bloat attributes: attrs, diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index f3b0e8a768126..3ce1f6f12ceab 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -25,7 +25,7 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt, push: &mut FnMut(P)) { // &mut ::std::fmt::Formatter - let fmtr = Ptr(box Literal(path_std!(cx, core::fmt::Formatter)), + let fmtr = Ptr(Box::new(Literal(path_std!(cx, core::fmt::Formatter))), Borrowed(None, ast::MutMutable)); let trait_def = TraitDef { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index d91659bafe4f0..08bb4ca106486 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -128,7 +128,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree } } - box ExpandResult { p: p } + Box::new(ExpandResult { p: p }) } // include_str! : read the given file, insert it as a literal string expr diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 58df4038403a6..5521c68e75c69 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -165,7 +165,7 @@ pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: ByteP -> Box { let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); - box MatcherPos { + Box::new(MatcherPos { stack: vec![], top_elts: TtSeq(ms), sep: sep, @@ -176,7 +176,7 @@ pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: ByteP match_cur: 0, match_hi: match_idx_hi, sp_lo: lo - } + }) } /// NamedMatch is a pattern-match result for a single token::MATCH_NONTERMINAL: @@ -396,7 +396,7 @@ pub fn parse(sess: &ParseSess, let matches: Vec<_> = (0..ei.matches.len()) .map(|_| Vec::new()).collect(); let ei_t = ei; - cur_eis.push(box MatcherPos { + cur_eis.push(Box::new(MatcherPos { stack: vec![], sep: seq.separator.clone(), idx: 0, @@ -407,7 +407,7 @@ pub fn parse(sess: &ParseSess, up: Some(ei_t), sp_lo: sp.lo, top_elts: Tt(TtSequence(sp, seq)), - }); + })); } TtToken(_, MatchNt(..)) => { // Built-in nonterminals never start with these tokens, @@ -533,7 +533,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { "ty" => token::NtTy(p.parse_ty()), // this could be handled like a token, since it is one "ident" => match p.token { - token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(box sn,b) } + token::Ident(sn,b) => { panictry!(p.bump()); token::NtIdent(Box::new(sn),b) } _ => { let token_str = pprust::token_to_string(&p.token); panic!(p.fatal(&format!("expected ident, found {}", @@ -541,7 +541,7 @@ pub fn parse_nt(p: &mut Parser, sp: Span, name: &str) -> Nonterminal { } }, "path" => { - token::NtPath(box panictry!(p.parse_path(LifetimeAndTypesWithoutColons))) + token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))) } "meta" => token::NtMeta(p.parse_meta_item()), _ => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 730da6cc59419..27a00290ee01e 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -192,7 +192,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, panictry!(p.check_unknown_macro_variable()); // Let the context choose how to interpret the result. // Weird, but useful for X-macros. - return box ParserAnyMacro { + return Box::new(ParserAnyMacro { parser: RefCell::new(p), // Pass along the original expansion site and the name of the macro @@ -200,7 +200,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // macro leaves unparsed tokens. site_span: sp, macro_ident: name - } + }) } Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; @@ -281,12 +281,12 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, _ => cx.span_bug(def.span, "wrong-structured rhs") }; - let exp: Box<_> = box MacroRulesMacroExpander { + let exp: Box<_> = Box::new(MacroRulesMacroExpander { name: def.ident, imported_from: def.imported_from, lhses: lhses, rhses: rhses, - }; + }); NormalTT(exp, Some(def.span), def.allow_internal_unstable) } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index e39b46a2d3e11..368a9f0c27e56 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -294,9 +294,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // sidestep the interpolation tricks for ident because // (a) idents can be in lots of places, so it'd be a pain // (b) we actually can, since it's a token. - MatchedNonterminal(NtIdent(box sn, b)) => { + MatchedNonterminal(NtIdent(ref sn, b)) => { r.cur_span = sp; - r.cur_tok = token::Ident(sn, b); + r.cur_tok = token::Ident(**sn, b); return ret_val; } MatchedNonterminal(ref other_whole_nt) => { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index d7033ce7e48a4..8ba36cefc65dd 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -677,10 +677,10 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), - token::NtIdent(box id, is_mod_name) => - token::NtIdent(box fld.fold_ident(id), is_mod_name), + token::NtIdent(id, is_mod_name) => + token::NtIdent(Box::new(fld.fold_ident(*id)), is_mod_name), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), - token::NtPath(box path) => token::NtPath(box fld.fold_path(path)), + token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&*tt))), } } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 3f36d0e8eda0f..99fb2798e7a3a 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -25,8 +25,6 @@ html_favicon_url = "http://www.rust-lang.org/favicon.ico", html_root_url = "http://doc.rust-lang.org/nightly/")] -#![feature(box_patterns)] -#![feature(box_syntax)] #![feature(collections)] #![feature(core)] #![feature(libc)] diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 4b7b7b66582e1..c078787120f9e 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -288,7 +288,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) // parsing tt's probably shouldn't require a parser at all. let cfg = Vec::new(); let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, box srdr); + let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); panictry!(p1.parse_all_token_trees()) } @@ -297,7 +297,7 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec, cfg: ast::CrateConfig) -> Parser<'a> { let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts); - let mut p = Parser::new(sess, cfg, box trdr); + let mut p = Parser::new(sess, cfg, Box::new(trdr)); panictry!(p.check_unknown_macro_variable()); p } @@ -360,7 +360,7 @@ pub mod with_hygiene { use super::lexer::make_reader_with_embedded_idents as make_reader; let cfg = Vec::new(); let srdr = make_reader(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, box srdr); + let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); panictry!(p1.parse_all_token_trees()) } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0515d1ae945bd..dd00ad313215d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -897,7 +897,7 @@ impl<'a> Parser<'a> { self.last_span = self.span; // Stash token for error recovery (sometimes; clone is not necessarily cheap). self.last_token = if self.token.is_ident() || self.token.is_path() { - Some(box self.token.clone()) + Some(Box::new(self.token.clone())) } else { None }; @@ -1578,8 +1578,8 @@ impl<'a> Parser<'a> { token::Interpolated(token::NtPath(_)) => Some(try!(self.bump_and_get())), _ => None, }; - if let Some(token::Interpolated(token::NtPath(box path))) = found { - return Ok(path); + if let Some(token::Interpolated(token::NtPath(path))) = found { + return Ok(*path); } let lo = self.span.lo; diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index c51b5d039784d..1a5c295cdd6db 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -188,7 +188,7 @@ pub fn to_string(f: F) -> String where F: FnOnce(&mut State) -> io::Result<()>, { use std::raw::TraitObject; - let mut s = rust_printer(box Vec::new()); + let mut s = rust_printer(Box::new(Vec::new())); f(&mut s).unwrap(); eof(&mut s.s).unwrap(); let wr = unsafe { diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs index 5032cd57eeb37..83e321f110c58 100644 --- a/src/libsyntax/ptr.rs +++ b/src/libsyntax/ptr.rs @@ -52,7 +52,7 @@ pub struct P { /// Construct a `P` from a `T` value. pub fn P(value: T) -> P { P { - ptr: box value + ptr: Box::new(value) } } From ca0ee4c6454fd272457e98c20a461ec9f93b2ac4 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Wed, 15 Apr 2015 22:12:12 -0700 Subject: [PATCH 02/13] syntax: Remove uses of #[feature(slice_patterns)] --- src/libsyntax/config.rs | 11 ++++- src/libsyntax/diagnostics/plugin.rs | 23 ++++++---- src/libsyntax/ext/deriving/cmp/ord.rs | 4 +- src/libsyntax/ext/deriving/cmp/partial_eq.rs | 8 ++-- src/libsyntax/ext/deriving/cmp/partial_ord.rs | 8 ++-- src/libsyntax/ext/deriving/hash.rs | 4 +- src/libsyntax/ext/deriving/primitive.rs | 4 +- src/libsyntax/ext/expand.rs | 4 +- src/libsyntax/ext/trace_macros.rs | 7 ++- src/libsyntax/lib.rs | 1 - src/libsyntax/parse/mod.rs | 46 +++++++++++++------ 11 files changed, 73 insertions(+), 47 deletions(-) diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 489a7721d7ba7..366806bc19b49 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -284,8 +284,15 @@ impl<'a> fold::Folder for CfgAttrFolder<'a> { return fold::noop_fold_attribute(attr, self); } - let (cfg, mi) = match attr.meta_item_list() { - Some([ref cfg, ref mi]) => (cfg, mi), + let attr_list = match attr.meta_item_list() { + Some(attr_list) => attr_list, + None => { + self.diag.span_err(attr.span, "expected `#[cfg_attr(, )]`"); + return None; + } + }; + let (cfg, mi) = match (attr_list.len(), attr_list.get(0), attr_list.get(1)) { + (2, Some(cfg), Some(mi)) => (cfg, mi), _ => { self.diag.span_err(attr.span, "expected `#[cfg_attr(, )]`"); return None; diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 6fcf39f0b174b..ac25a303182c5 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -54,8 +54,8 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box { - let code = match token_tree { - [ast::TtToken(_, token::Ident(code, _))] => code, + let code = match (token_tree.len(), token_tree.get(0)) { + (1, Some(&ast::TtToken(_, token::Ident(code, _)))) => code, _ => unreachable!() }; with_used_diagnostics(|diagnostics| { @@ -84,13 +84,18 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box { - let (code, description) = match token_tree { - [ast::TtToken(_, token::Ident(ref code, _))] => { + let (code, description) = match ( + token_tree.len(), + token_tree.get(0), + token_tree.get(1), + token_tree.get(2) + ) { + (1, Some(&ast::TtToken(_, token::Ident(ref code, _))), None, None) => { (code, None) }, - [ast::TtToken(_, token::Ident(ref code, _)), - ast::TtToken(_, token::Comma), - ast::TtToken(_, token::Literal(token::StrRaw(description, _), None))] => { + (3, Some(&ast::TtToken(_, token::Ident(ref code, _))), + Some(&ast::TtToken(_, token::Comma)), + Some(&ast::TtToken(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) } _ => unreachable!() @@ -130,8 +135,8 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, span: Span, token_tree: &[TokenTree]) -> Box { - let name = match token_tree { - [ast::TtToken(_, token::Ident(ref name, _))] => name, + let name = match (token_tree.len(), token_tree.get(0)) { + (1, Some(&ast::TtToken(_, token::Ident(ref name, _)))) => name, _ => unreachable!() }; diff --git a/src/libsyntax/ext/deriving/cmp/ord.rs b/src/libsyntax/ext/deriving/cmp/ord.rs index b2a4ef1dafbc8..94cc0d9c493f2 100644 --- a/src/libsyntax/ext/deriving/cmp/ord.rs +++ b/src/libsyntax/ext/deriving/cmp/ord.rs @@ -106,8 +106,8 @@ pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, // } let new = { - let other_f = match other_fs { - [ref o_f] => o_f, + let other_f = match (other_fs.len(), other_fs.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), }; diff --git a/src/libsyntax/ext/deriving/cmp/partial_eq.rs b/src/libsyntax/ext/deriving/cmp/partial_eq.rs index f02e5ee14126d..61eb81c6755e2 100644 --- a/src/libsyntax/ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax/ext/deriving/cmp/partial_eq.rs @@ -29,8 +29,8 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, cs_fold( true, // use foldl |cx, span, subexpr, self_f, other_fs| { - let other_f = match other_fs { - [ref o_f] => o_f, + let other_f = match (other_fs.len(), other_fs.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`") }; @@ -46,8 +46,8 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, cs_fold( true, // use foldl |cx, span, subexpr, self_f, other_fs| { - let other_f = match other_fs { - [ref o_f] => o_f, + let other_f = match (other_fs.len(), other_fs.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`") }; diff --git a/src/libsyntax/ext/deriving/cmp/partial_ord.rs b/src/libsyntax/ext/deriving/cmp/partial_ord.rs index fe6a8fea78c70..dbb779decace2 100644 --- a/src/libsyntax/ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax/ext/deriving/cmp/partial_ord.rs @@ -150,8 +150,8 @@ pub fn cs_partial_cmp(cx: &mut ExtCtxt, span: Span, // } let new = { - let other_f = match other_fs { - [ref o_f] => o_f, + let other_f = match (other_fs.len(), other_fs.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"), }; @@ -208,8 +208,8 @@ fn cs_op(less: bool, equal: bool, cx: &mut ExtCtxt, get use the binops to avoid auto-deref dereferencing too many layers of pointers, if the type includes pointers. */ - let other_f = match other_fs { - [ref o_f] => o_f, + let other_f = match (other_fs.len(), other_fs.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`") }; diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index 915d99796152d..b9835eda791f7 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -56,8 +56,8 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, } fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { - let state_expr = match substr.nonself_args { - [ref state_expr] => state_expr, + let state_expr = match (substr.nonself_args.len(), substr.nonself_args.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`") }; let call_hash = |span, thing_expr| { diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index 3d0645fd6e3cf..a972cfe135511 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -71,8 +71,8 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, } fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> P { - let n = match substr.nonself_args { - [ref n] => n, + let n = match (substr.nonself_args.len(), substr.nonself_args.get(0)) { + (1, Some(o_f)) => o_f, _ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(FromPrimitive)`") }; diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 0945f8dd02103..d1db956adb3f5 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1962,8 +1962,8 @@ foo_module!(); "xx" == string }).collect(); let cxbinds: &[&ast::Ident] = &cxbinds[..]; - let cxbind = match cxbinds { - [b] => b, + let cxbind = match (cxbinds.len(), cxbinds.get(0)) { + (1, Some(b)) => *b, _ => panic!("expected just one binding for ext_cx") }; let resolved_binding = mtwt::resolve(*cxbind); diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index 3fcc6a8d69241..646e6fec40553 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -28,12 +28,11 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, return base::DummyResult::any(sp); } - - match tt { - [ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::True) => { + match (tt.len(), tt.first()) { + (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::True) => { cx.set_trace_macros(true); } - [ast::TtToken(_, ref tok)] if tok.is_keyword(keywords::False) => { + (1, Some(&ast::TtToken(_, ref tok))) if tok.is_keyword(keywords::False) => { cx.set_trace_macros(false); } _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"), diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 99fb2798e7a3a..a70707b3ea146 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -35,7 +35,6 @@ #![feature(path_ext)] #![feature(str_char)] #![feature(into_cow)] -#![feature(slice_patterns)] extern crate arena; extern crate fmt_macros; diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index c078787120f9e..51fb09a7526e6 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -834,28 +834,44 @@ mod test { fn string_to_tts_macro () { let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string()); let tts: &[ast::TokenTree] = &tts[..]; - match tts { - [ast::TtToken(_, token::Ident(name_macro_rules, token::Plain)), - ast::TtToken(_, token::Not), - ast::TtToken(_, token::Ident(name_zip, token::Plain)), - ast::TtDelimited(_, ref macro_delimed)] + + match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { + ( + 4, + Some(&ast::TtToken(_, token::Ident(name_macro_rules, token::Plain))), + Some(&ast::TtToken(_, token::Not)), + Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), + Some(&ast::TtDelimited(_, ref macro_delimed)), + ) if name_macro_rules.as_str() == "macro_rules" && name_zip.as_str() == "zip" => { - match ¯o_delimed.tts[..] { - [ast::TtDelimited(_, ref first_delimed), - ast::TtToken(_, token::FatArrow), - ast::TtDelimited(_, ref second_delimed)] + let tts = ¯o_delimed.tts[..]; + match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { + ( + 3, + Some(&ast::TtDelimited(_, ref first_delimed)), + Some(&ast::TtToken(_, token::FatArrow)), + Some(&ast::TtDelimited(_, ref second_delimed)), + ) if macro_delimed.delim == token::Paren => { - match &first_delimed.tts[..] { - [ast::TtToken(_, token::Dollar), - ast::TtToken(_, token::Ident(name, token::Plain))] + let tts = &first_delimed.tts[..]; + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&ast::TtToken(_, token::Dollar)), + Some(&ast::TtToken(_, token::Ident(name, token::Plain))), + ) if first_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } - match &second_delimed.tts[..] { - [ast::TtToken(_, token::Dollar), - ast::TtToken(_, token::Ident(name, token::Plain))] + let tts = &second_delimed.tts[..]; + match (tts.len(), tts.get(0), tts.get(1)) { + ( + 2, + Some(&ast::TtToken(_, token::Dollar)), + Some(&ast::TtToken(_, token::Ident(name, token::Plain))), + ) if second_delimed.delim == token::Paren && name.as_str() == "a" => {}, _ => panic!("value 4: {:?}", **second_delimed), From cfb9d286ea0169a69580c457e0994e02023c0c1a Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Wed, 15 Apr 2015 22:15:50 -0700 Subject: [PATCH 03/13] syntax: remove uses of `.into_cow()` --- src/libsyntax/lib.rs | 1 - src/libsyntax/parse/lexer/mod.rs | 8 ++++---- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index a70707b3ea146..deb3e158bb582 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -34,7 +34,6 @@ #![feature(unicode)] #![feature(path_ext)] #![feature(str_char)] -#![feature(into_cow)] extern crate arena; extern crate fmt_macros; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index f891318659a87..9cd3db45784e4 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -16,7 +16,7 @@ use ext::tt::transcribe::tt_next_token; use parse::token; use parse::token::str_to_ident; -use std::borrow::{IntoCow, Cow}; +use std::borrow::Cow; use std::char; use std::fmt; use std::mem::replace; @@ -293,7 +293,7 @@ impl<'a> StringReader<'a> { let next = i + ch.len_utf8(); if ch == '\r' { if next < s.len() && s.char_at(next) == '\n' { - return translate_crlf_(self, start, s, errmsg, i).into_cow(); + return translate_crlf_(self, start, s, errmsg, i).into(); } let pos = start + BytePos(i as u32); let end_pos = start + BytePos(next as u32); @@ -301,7 +301,7 @@ impl<'a> StringReader<'a> { } i = next; } - return s.into_cow(); + return s.into(); fn translate_crlf_(rdr: &StringReader, start: BytePos, s: &str, errmsg: &str, mut i: usize) -> String { @@ -564,7 +564,7 @@ impl<'a> StringReader<'a> { let string = if has_cr { self.translate_crlf(start_bpos, string, "bare CR not allowed in block doc-comment") - } else { string.into_cow() }; + } else { string.into() }; token::DocComment(token::intern(&string[..])) } else { token::Comment From e3dd68d0a4b519db7dfea655a2bec4ea4aadb163 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 19:54:05 -0700 Subject: [PATCH 04/13] syntax: Remove use of TraitObject in pretty printer --- src/libsyntax/print/pprust.rs | 21 ++++++++------------- 1 file changed, 8 insertions(+), 13 deletions(-) diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 1a5c295cdd6db..3ccc8f2619f2d 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -28,7 +28,7 @@ use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; -use std::{ascii, mem}; +use std::ascii; use std::io::{self, Write, Read}; use std::iter; @@ -187,18 +187,13 @@ impl<'a> State<'a> { pub fn to_string(f: F) -> String where F: FnOnce(&mut State) -> io::Result<()>, { - use std::raw::TraitObject; - let mut s = rust_printer(Box::new(Vec::new())); - f(&mut s).unwrap(); - eof(&mut s.s).unwrap(); - let wr = unsafe { - // FIXME(pcwalton): A nasty function to extract the string from an `Write` - // that we "know" to be a `Vec` that works around the lack of checked - // downcasts. - let obj: &TraitObject = mem::transmute(&s.s.out); - mem::transmute::<*mut (), &Vec>(obj.data) - }; - String::from_utf8(wr.clone()).unwrap() + let mut wr = Vec::new(); + { + let mut printer = rust_printer(Box::new(&mut wr)); + f(&mut printer).unwrap(); + eof(&mut printer.s).unwrap(); + } + String::from_utf8(wr).unwrap() } pub fn binop_to_string(op: BinOpToken) -> &'static str { From bc6d990adbad6109cf096acc04cc5ab7dfd5b5ff Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 19:56:37 -0700 Subject: [PATCH 05/13] syntax: Don't use unstable fn to convert single element to a slice --- src/libsyntax/util/small_vector.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 6b864d5294766..153f9d4a26df1 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -64,7 +64,10 @@ impl SmallVector { let result: &[T] = &[]; result } - One(ref v) => slice::ref_slice(v), + One(ref v) => { + // FIXME: Could be replaced with `slice::ref_slice(v)` when it is stable. + unsafe { slice::from_raw_parts(v, 1) } + } Many(ref vs) => vs } } From 855365895228a78041ae16946eeaef5e45a37a64 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 20:38:00 -0700 Subject: [PATCH 06/13] syntax: remove #[feature(quote, unsafe_destructor)] --- src/libsyntax/diagnostics/plugin.rs | 48 ++++++++++++++++++++++++++--- src/libsyntax/lib.rs | 1 - 2 files changed, 43 insertions(+), 6 deletions(-) diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index ac25a303182c5..6de4edafa0bf5 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -77,7 +77,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, )); } }); - MacEager::expr(quote_expr!(ecx, ())) + MacEager::expr(ecx.expr_tuple(span, Vec::new())) } pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, @@ -128,7 +128,15 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, let sym = Ident::new(token::gensym(&( "__register_diagnostic_".to_string() + &token::get_ident(*code) ))); - MacEager::items(SmallVector::many(vec![quote_item!(ecx, mod $sym {}).unwrap()])) + MacEager::items(SmallVector::many(vec![ + ecx.item_mod( + span, + span, + sym, + Vec::new(), + Vec::new() + ) + ])) } pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, @@ -153,7 +161,37 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, (descriptions.len(), ecx.expr_vec(span, descriptions)) }); - MacEager::items(SmallVector::many(vec![quote_item!(ecx, - pub static $name: [(&'static str, &'static str); $count] = $expr; - ).unwrap()])) + let static_ = ecx.lifetime(span, ecx.name_of("'static")); + let ty_str = ecx.ty_rptr( + span, + ecx.ty_ident(span, ecx.ident_of("str")), + Some(static_), + ast::MutImmutable, + ); + + let ty = ecx.ty( + span, + ast::TyFixedLengthVec( + ecx.ty( + span, + ast::TyTup(vec![ty_str.clone(), ty_str]) + ), + ecx.expr_usize(span, count), + ), + ); + + MacEager::items(SmallVector::many(vec![ + P(ast::Item { + ident: name.clone(), + attrs: Vec::new(), + id: ast::DUMMY_NODE_ID, + node: ast::ItemStatic( + ty, + ast::MutImmutable, + expr, + ), + vis: ast::Public, + span: span, + }) + ])) } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index deb3e158bb582..8d77894a549fe 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -28,7 +28,6 @@ #![feature(collections)] #![feature(core)] #![feature(libc)] -#![feature(quote, unsafe_destructor)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(unicode)] From 2937cce70c7c860b215f2d6f22f45c0dfe8ba914 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 21:19:50 -0700 Subject: [PATCH 07/13] syntax: Replace String::from_str with the stable String::from --- src/libsyntax/ast_util.rs | 2 +- src/libsyntax/codemap.rs | 4 ++-- src/libsyntax/diagnostic.rs | 2 +- src/libsyntax/parse/lexer/comments.rs | 2 +- src/libsyntax/parse/parser.rs | 2 +- src/libsyntax/print/pp.rs | 2 +- src/libsyntax/print/pprust.rs | 4 ++-- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 0ad75c5ec8cbc..78f06ce5fd5d0 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -238,7 +238,7 @@ pub fn name_to_dummy_lifetime(name: Name) -> Lifetime { pub fn impl_pretty_name(trait_ref: &Option, ty: Option<&Ty>) -> Ident { let mut pretty = match ty { Some(t) => pprust::ty_to_string(t), - None => String::from_str("..") + None => String::from("..") }; match *trait_ref { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index dfdaa47d8b925..0ad70c3379b98 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -557,9 +557,9 @@ impl CodeMap { // FIXME #12884: no efficient/safe way to remove from the start of a string // and reuse the allocation. let mut src = if src.starts_with("\u{feff}") { - String::from_str(&src[3..]) + String::from(&src[3..]) } else { - String::from_str(&src[..]) + String::from(&src[..]) }; // Append '\n' in case it's not already there. diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index 620a8927134ba..a7453636c445c 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -644,7 +644,7 @@ fn highlight_lines(err: &mut EmitterWriter, } try!(write!(&mut err.dst, "{}", s)); - let mut s = String::from_str("^"); + let mut s = String::from("^"); let count = match lastc { // Most terminals have a tab stop every eight columns by default '\t' => 8 - col%8, diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index bda01d5a6541c..02f1a52aaf339 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -246,7 +246,7 @@ fn read_block_comment(rdr: &mut StringReader, rdr.bump(); rdr.bump(); - let mut curr_line = String::from_str("/*"); + let mut curr_line = String::from("/*"); // doc-comments are not really comments, they are attributes if (rdr.curr_is('*') && !rdr.nextch_is('*')) || rdr.curr_is('!') { diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index dd00ad313215d..0fcf47d9e711d 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -4834,7 +4834,7 @@ impl<'a> Parser<'a> { let mut included_mod_stack = self.sess.included_mod_stack.borrow_mut(); match included_mod_stack.iter().position(|p| *p == path) { Some(i) => { - let mut err = String::from_str("circular modules: "); + let mut err = String::from("circular modules: "); let len = included_mod_stack.len(); for p in &included_mod_stack[i.. len] { err.push_str(&p.to_string_lossy()); diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 3fd4e31b4771e..15aaf9cf390fd 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -131,7 +131,7 @@ pub fn buf_str(toks: &[Token], assert_eq!(n, szs.len()); let mut i = left; let mut l = lim; - let mut s = string::String::from_str("["); + let mut s = string::String::from("["); while i != right && l != 0 { l -= 1; if i != left { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 3ccc8f2619f2d..5a57e09fcfff2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -2794,13 +2794,13 @@ impl<'a> State<'a> { match lit.node { ast::LitStr(ref st, style) => self.print_string(&st, style), ast::LitByte(byte) => { - let mut res = String::from_str("b'"); + let mut res = String::from("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); res.push('\''); word(&mut self.s, &res[..]) } ast::LitChar(ch) => { - let mut res = String::from_str("'"); + let mut res = String::from("'"); res.extend(ch.escape_default()); res.push('\''); word(&mut self.s, &res[..]) From a2cfe385050503528f13324317e52d944ce75efa Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 21:21:26 -0700 Subject: [PATCH 08/13] syntax: Replace [].tail with the stable [1..] syntax --- src/libsyntax/ext/deriving/generic/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/libsyntax/ext/deriving/generic/mod.rs b/src/libsyntax/ext/deriving/generic/mod.rs index f73a3969bed82..4c05cd973ff6f 100644 --- a/src/libsyntax/ext/deriving/generic/mod.rs +++ b/src/libsyntax/ext/deriving/generic/mod.rs @@ -1103,7 +1103,7 @@ impl<'a> MethodDef<'a> { subpats.push(p); idents }; - for self_arg_name in self_arg_names.tail() { + for self_arg_name in &self_arg_names[1..] { let (p, idents) = mk_self_pat(cx, &self_arg_name[..]); subpats.push(p); self_pats_idents.push(idents); From 21143aae94bcb11a4d3b2e2b37e5766edd26a856 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 21:38:24 -0700 Subject: [PATCH 09/13] syntax: Replace Vec::map_in_place with stable mut iterator --- src/libsyntax/codemap.rs | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 0ad70c3379b98..6acc56fb41aa2 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -594,8 +594,8 @@ impl CodeMap { pub fn new_imported_filemap(&self, filename: FileName, source_len: usize, - file_local_lines: Vec, - file_local_multibyte_chars: Vec) + mut file_local_lines: Vec, + mut file_local_multibyte_chars: Vec) -> Rc { let mut files = self.files.borrow_mut(); let start_pos = match files.last() { @@ -606,19 +606,21 @@ impl CodeMap { let end_pos = Pos::from_usize(start_pos + source_len); let start_pos = Pos::from_usize(start_pos); - let lines = file_local_lines.map_in_place(|pos| pos + start_pos); - let multibyte_chars = file_local_multibyte_chars.map_in_place(|mbc| MultiByteChar { - pos: mbc.pos + start_pos, - bytes: mbc.bytes - }); + for pos in &mut file_local_lines { + *pos = *pos + start_pos; + } + + for mbc in &mut file_local_multibyte_chars { + mbc.pos = mbc.pos + start_pos; + } let filemap = Rc::new(FileMap { name: filename, src: None, start_pos: start_pos, end_pos: end_pos, - lines: RefCell::new(lines), - multibyte_chars: RefCell::new(multibyte_chars), + lines: RefCell::new(file_local_lines), + multibyte_chars: RefCell::new(file_local_multibyte_chars), }); files.push(filemap.clone()); From c3da1a1912216f614060e9e76509d68fb81c2887 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Thu, 16 Apr 2015 21:57:38 -0700 Subject: [PATCH 10/13] syntax: replace Vec::push_all with stable Vec::extend --- src/libsyntax/parse/parser.rs | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 0fcf47d9e711d..7d353ae9e1d8c 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -436,10 +436,11 @@ impl<'a> Parser<'a> { // leave it in the input Ok(()) } else { - let mut expected = edible.iter().map(|x| TokenType::Token(x.clone())) - .collect::>(); - expected.extend(inedible.iter().map(|x| TokenType::Token(x.clone()))); - expected.push_all(&*self.expected_tokens); + let mut expected = edible.iter() + .map(|x| TokenType::Token(x.clone())) + .chain(inedible.iter().map(|x| TokenType::Token(x.clone()))) + .chain(self.expected_tokens.iter().cloned()) + .collect::>(); expected.sort_by(|a, b| a.to_string().cmp(&b.to_string())); expected.dedup(); let expect = tokens_to_string(&expected[..]); @@ -490,8 +491,10 @@ impl<'a> Parser<'a> { debug!("commit_expr {:?}", e); if let ExprPath(..) = e.node { // might be unit-struct construction; check for recoverableinput error. - let mut expected = edible.iter().cloned().collect::>(); - expected.push_all(inedible); + let expected = edible.iter() + .cloned() + .chain(inedible.iter().cloned()) + .collect::>(); try!(self.check_for_erroneous_unit_struct_expecting(&expected[..])); } self.expect_one_of(edible, inedible) @@ -509,8 +512,10 @@ impl<'a> Parser<'a> { if self.last_token .as_ref() .map_or(false, |t| t.is_ident() || t.is_path()) { - let mut expected = edible.iter().cloned().collect::>(); - expected.push_all(&inedible); + let expected = edible.iter() + .cloned() + .chain(inedible.iter().cloned()) + .collect::>(); try!(self.check_for_erroneous_unit_struct_expecting(&expected)); } self.expect_one_of(edible, inedible) @@ -1187,7 +1192,7 @@ impl<'a> Parser<'a> { debug!("parse_trait_methods(): parsing provided method"); let (inner_attrs, body) = try!(p.parse_inner_attrs_and_block()); - attrs.push_all(&inner_attrs[..]); + attrs.extend(inner_attrs.iter().cloned()); Some(body) } From 83b1d7fd6f83f091cd6a02f3a3dc246cbec39677 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Sun, 19 Apr 2015 15:41:50 -0700 Subject: [PATCH 11/13] syntax: Remove #[feature(path_ext)] Replace Path::exists with stable metadata call. --- src/libsyntax/lib.rs | 1 - src/libsyntax/parse/parser.rs | 5 +++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 8d77894a549fe..82f6be38797c7 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -31,7 +31,6 @@ #![feature(rustc_private)] #![feature(staged_api)] #![feature(unicode)] -#![feature(path_ext)] #![feature(str_char)] extern crate arena; diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 7d353ae9e1d8c..796bc2a3513f3 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -78,6 +78,7 @@ use parse::PResult; use diagnostic::FatalError; use std::collections::HashSet; +use std::fs; use std::io::prelude::*; use std::mem; use std::path::{Path, PathBuf}; @@ -4775,8 +4776,8 @@ impl<'a> Parser<'a> { let secondary_path_str = format!("{}/mod.rs", mod_name); let default_path = dir_path.join(&default_path_str[..]); let secondary_path = dir_path.join(&secondary_path_str[..]); - let default_exists = default_path.exists(); - let secondary_exists = secondary_path.exists(); + let default_exists = fs::metadata(&default_path).is_ok(); + let secondary_exists = fs::metadata(&secondary_path).is_ok(); if !self.owns_directory { self.span_err(id_sp, From 7f9180fcb97e667d88260e1b3d396f8078e30b2d Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Mon, 20 Apr 2015 17:51:10 -0700 Subject: [PATCH 12/13] syntax: Change ExpnId::{from,to}_llvm_cookie to {from,to}_u32 --- src/librustc_trans/back/write.rs | 2 +- src/librustc_trans/trans/asm.rs | 2 +- src/libsyntax/codemap.rs | 10 ++++------ 3 files changed, 6 insertions(+), 8 deletions(-) diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index de21d62651499..488a4a8082f9d 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -348,7 +348,7 @@ unsafe extern "C" fn report_inline_asm<'a, 'b>(cgcx: &'a CodegenContext<'a>, match cgcx.lto_ctxt { Some((sess, _)) => { - sess.codemap().with_expn_info(ExpnId::from_llvm_cookie(cookie), |info| match info { + sess.codemap().with_expn_info(ExpnId::from_u32(cookie), |info| match info { Some(ei) => sess.span_err(ei.call_site, msg), None => sess.err(msg), }); diff --git a/src/librustc_trans/trans/asm.rs b/src/librustc_trans/trans/asm.rs index d6c85e8b17345..27128827e2676 100644 --- a/src/librustc_trans/trans/asm.rs +++ b/src/librustc_trans/trans/asm.rs @@ -138,7 +138,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, ia: &ast::InlineAsm) let kind = llvm::LLVMGetMDKindIDInContext(bcx.ccx().llcx(), key.as_ptr() as *const c_char, key.len() as c_uint); - let val: llvm::ValueRef = C_i32(bcx.ccx(), ia.expn_id.to_llvm_cookie()); + let val: llvm::ValueRef = C_i32(bcx.ccx(), ia.expn_id.into_u32() as i32); llvm::LLVMSetMetadata(r, kind, llvm::LLVMMDNodeInContext(bcx.ccx().llcx(), &val, 1)); diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 6acc56fb41aa2..a0c29a2371b40 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -26,7 +26,6 @@ use std::rc::Rc; use std::fmt; -use libc::c_uint; use serialize::{Encodable, Decodable, Encoder, Decoder}; @@ -287,13 +286,12 @@ pub const NO_EXPANSION: ExpnId = ExpnId(!0); pub const COMMAND_LINE_EXPN: ExpnId = ExpnId(!1); impl ExpnId { - pub fn from_llvm_cookie(cookie: c_uint) -> ExpnId { - ExpnId(cookie) + pub fn from_u32(id: u32) -> ExpnId { + ExpnId(id) } - pub fn to_llvm_cookie(self) -> i32 { - let ExpnId(cookie) = self; - cookie as i32 + pub fn into_u32(self) -> u32 { + self.0 } } From 19c8d701743922a709a4eb6554f562996b7baa27 Mon Sep 17 00:00:00 2001 From: Erick Tryzelaar Date: Tue, 21 Apr 2015 10:19:53 -0700 Subject: [PATCH 13/13] syntax: Copy unstable str::char_at into libsyntax --- src/libsyntax/lib.rs | 1 + src/libsyntax/parse/lexer/comments.rs | 7 ++++--- src/libsyntax/parse/lexer/mod.rs | 19 ++++++++++--------- src/libsyntax/parse/mod.rs | 8 ++++---- src/libsyntax/str.rs | 13 +++++++++++++ src/libsyntax/util/parser_testing.rs | 13 +++++++------ 6 files changed, 39 insertions(+), 22 deletions(-) create mode 100644 src/libsyntax/str.rs diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 82f6be38797c7..d8beeb6a5503b 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -92,6 +92,7 @@ pub mod parse; pub mod ptr; pub mod show_span; pub mod std_inject; +pub mod str; pub mod test; pub mod visit; diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 02f1a52aaf339..fb3a96f4c2887 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -13,11 +13,12 @@ pub use self::CommentStyle::*; use ast; use codemap::{BytePos, CharPos, CodeMap, Pos}; use diagnostic; -use parse::lexer::{is_whitespace, Reader}; -use parse::lexer::{StringReader, TokenAndSpan}; use parse::lexer::is_block_doc_comment; +use parse::lexer::{StringReader, TokenAndSpan}; +use parse::lexer::{is_whitespace, Reader}; use parse::lexer; use print::pprust; +use str::char_at; use std::io::Read; use std::usize; @@ -209,7 +210,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { let mut col = col.to_usize(); let mut cursor: usize = 0; while col > 0 && cursor < len { - let ch = s.char_at(cursor); + let ch = char_at(s, cursor); if !ch.is_whitespace() { return None; } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 9cd3db45784e4..8e37b983e21e0 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -13,8 +13,9 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap; use diagnostic::SpanHandler; use ext::tt::transcribe::tt_next_token; -use parse::token; use parse::token::str_to_ident; +use parse::token; +use str::char_at; use std::borrow::Cow; use std::char; @@ -289,10 +290,10 @@ impl<'a> StringReader<'a> { s: &'b str, errmsg: &'b str) -> Cow<'b, str> { let mut i = 0; while i < s.len() { - let ch = s.char_at(i); + let ch = char_at(s, i); let next = i + ch.len_utf8(); if ch == '\r' { - if next < s.len() && s.char_at(next) == '\n' { + if next < s.len() && char_at(s, next) == '\n' { return translate_crlf_(self, start, s, errmsg, i).into(); } let pos = start + BytePos(i as u32); @@ -308,12 +309,12 @@ impl<'a> StringReader<'a> { let mut buf = String::with_capacity(s.len()); let mut j = 0; while i < s.len() { - let ch = s.char_at(i); + let ch = char_at(s, i); let next = i + ch.len_utf8(); if ch == '\r' { if j < i { buf.push_str(&s[j..i]); } j = next; - if next >= s.len() || s.char_at(next) != '\n' { + if next >= s.len() || char_at(s, next) != '\n' { let pos = start + BytePos(i as u32); let end_pos = start + BytePos(next as u32); rdr.err_span_(pos, end_pos, errmsg); @@ -335,7 +336,7 @@ impl<'a> StringReader<'a> { if current_byte_offset < self.source_text.len() { assert!(self.curr.is_some()); let last_char = self.curr.unwrap(); - let ch = self.source_text.char_at(current_byte_offset); + let ch = char_at(&self.source_text, current_byte_offset); let next = current_byte_offset + ch.len_utf8(); let byte_offset_diff = next - current_byte_offset; self.pos = self.pos + Pos::from_usize(byte_offset_diff); @@ -357,7 +358,7 @@ impl<'a> StringReader<'a> { pub fn nextch(&self) -> Option { let offset = self.byte_offset(self.pos).to_usize(); if offset < self.source_text.len() { - Some(self.source_text.char_at(offset)) + Some(char_at(&self.source_text, offset)) } else { None } @@ -371,9 +372,9 @@ impl<'a> StringReader<'a> { let offset = self.byte_offset(self.pos).to_usize(); let s = &self.source_text[..]; if offset >= s.len() { return None } - let next = offset + s.char_at(offset).len_utf8(); + let next = offset + char_at(s, offset).len_utf8(); if next < s.len() { - Some(s.char_at(next)) + Some(char_at(s, next)) } else { None } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 51fb09a7526e6..1333e27058f1d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -16,7 +16,7 @@ use diagnostic::{SpanHandler, mk_span_handler, default_handler, Auto, FatalError use parse::attr::ParserAttr; use parse::parser::Parser; use ptr::P; - +use str::char_at; use std::cell::{Cell, RefCell}; use std::fs::File; @@ -536,7 +536,7 @@ pub fn raw_str_lit(lit: &str) -> String { // check if `s` looks like i32 or u1234 etc. fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s.len() > 1 && - first_chars.contains(&s.char_at(0)) && + first_chars.contains(&char_at(s, 0)) && s[1..].chars().all(|c| '0' <= c && c <= '9') } @@ -673,8 +673,8 @@ pub fn integer_lit(s: &str, suffix: Option<&str>, sd: &SpanHandler, sp: Span) -> let orig = s; let mut ty = ast::UnsuffixedIntLit(ast::Plus); - if s.char_at(0) == '0' && s.len() > 1 { - match s.char_at(1) { + if char_at(s, 0) == '0' && s.len() > 1 { + match char_at(s, 1) { 'x' => base = 16, 'o' => base = 8, 'b' => base = 2, diff --git a/src/libsyntax/str.rs b/src/libsyntax/str.rs new file mode 100644 index 0000000000000..d0f47629b10e5 --- /dev/null +++ b/src/libsyntax/str.rs @@ -0,0 +1,13 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub fn char_at(s: &str, byte: usize) -> char { + s[byte..].chars().next().unwrap() +} diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index c6646fe93a21b..6adeb30a94ec4 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -15,6 +15,7 @@ use parse::new_parser_from_source_str; use parse::parser::Parser; use parse::token; use ptr::P; +use str::char_at; /// Map a string to tts, using a made-up filename: pub fn string_to_tts(source_str: String) -> Vec { @@ -96,24 +97,24 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool { else if idx_a == a.len() {return false;} else if idx_b == b.len() { // maybe the stuff left in a is all ws? - if is_whitespace(a.char_at(idx_a)) { + if is_whitespace(char_at(a, idx_a)) { return scan_for_non_ws_or_end(a,idx_a) == a.len(); } else { return false; } } // ws in both given and pattern: - else if is_whitespace(a.char_at(idx_a)) - && is_whitespace(b.char_at(idx_b)) { + else if is_whitespace(char_at(a, idx_a)) + && is_whitespace(char_at(b, idx_b)) { idx_a = scan_for_non_ws_or_end(a,idx_a); idx_b = scan_for_non_ws_or_end(b,idx_b); } // ws in given only: - else if is_whitespace(a.char_at(idx_a)) { + else if is_whitespace(char_at(a, idx_a)) { idx_a = scan_for_non_ws_or_end(a,idx_a); } // *don't* silently eat ws in expected only. - else if a.char_at(idx_a) == b.char_at(idx_b) { + else if char_at(a, idx_a) == char_at(b, idx_b) { idx_a += 1; idx_b += 1; } @@ -129,7 +130,7 @@ pub fn matches_codepattern(a : &str, b : &str) -> bool { fn scan_for_non_ws_or_end(a : &str, idx: usize) -> usize { let mut i = idx; let len = a.len(); - while (i < len) && (is_whitespace(a.char_at(i))) { + while (i < len) && (is_whitespace(char_at(a, i))) { i += 1; } i