diff --git a/src/libfourcc/lib.rs b/src/libfourcc/lib.rs index 270416305dd1c..c465c8f1e1608 100644 --- a/src/libfourcc/lib.rs +++ b/src/libfourcc/lib.rs @@ -131,7 +131,11 @@ struct Ident { } fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (@ast::Expr, Option) { - let p = &mut parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.to_owned()); + let p = &mut parse::new_parser_from_tts(cx.parse_sess(), + cx.cfg(), + tts.iter() + .map(|x| (*x).clone()) + .collect()); let ex = p.parse_expr(); let id = if p.token == token::EOF { None @@ -151,7 +155,7 @@ fn parse_tts(cx: &ExtCtxt, tts: &[ast::TokenTree]) -> (@ast::Expr, Option fn target_endian_little(cx: &ExtCtxt, sp: Span) -> bool { let meta = cx.meta_name_value(sp, InternedString::new("target_endian"), ast::LitStr(InternedString::new("little"), ast::CookedStr)); - contains(cx.cfg(), meta) + contains(cx.cfg().as_slice(), meta) } // FIXME (10872): This is required to prevent an LLVM assert on Windows diff --git a/src/librustc/back/link.rs b/src/librustc/back/link.rs index bacc98a01356a..4d86848482e8d 100644 --- a/src/librustc/back/link.rs +++ b/src/librustc/back/link.rs @@ -519,7 +519,7 @@ pub fn crate_id_hash(crate_id: &CrateId) -> ~str { pub fn build_link_meta(krate: &ast::Crate, output: &OutputFilenames) -> LinkMeta { let r = LinkMeta { - crateid: find_crate_id(krate.attrs, output), + crateid: find_crate_id(krate.attrs.as_slice(), output), crate_hash: Svh::calculate(krate), }; info!("{}", r); diff --git a/src/librustc/driver/driver.rs b/src/librustc/driver/driver.rs index cd7d39e468810..88526dd15a913 100644 --- a/src/librustc/driver/driver.rs +++ b/src/librustc/driver/driver.rs @@ -35,6 +35,8 @@ use std::io::fs; use std::io::MemReader; use std::os; use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; use collections::{HashMap, HashSet}; use getopts::{optopt, optmulti, optflag, optflagopt}; use getopts; @@ -101,15 +103,15 @@ pub fn default_configuration(sess: Session) -> }; let mk = attr::mk_name_value_item_str; - return ~[ // Target bindings. + return vec!(// Target bindings. attr::mk_word_item(fam.clone()), mk(InternedString::new("target_os"), tos), mk(InternedString::new("target_family"), fam), mk(InternedString::new("target_arch"), InternedString::new(arch)), mk(InternedString::new("target_endian"), InternedString::new(end)), mk(InternedString::new("target_word_size"), - InternedString::new(wordsz)), - ]; + InternedString::new(wordsz)) + ); } pub fn append_configuration(cfg: &mut ast::CrateConfig, @@ -119,8 +121,7 @@ pub fn append_configuration(cfg: &mut ast::CrateConfig, } } -pub fn build_configuration(sess: Session) -> - ast::CrateConfig { +pub fn build_configuration(sess: Session) -> ast::CrateConfig { // Combine the configuration requested by the session (command line) with // some default and generated configuration items let default_cfg = default_configuration(sess); @@ -135,7 +136,8 @@ pub fn build_configuration(sess: Session) -> } else { InternedString::new("nogc") }); - return vec::append(user_cfg, default_cfg); + return vec_ng::append(user_cfg.move_iter().collect(), + default_cfg.as_slice()); } // Convert strings provided as --cfg [cfgspec] into a crate_cfg @@ -143,7 +145,10 @@ fn parse_cfgspecs(cfgspecs: ~[~str]) -> ast::CrateConfig { cfgspecs.move_iter().map(|s| { let sess = parse::new_parse_sess(); - parse::parse_meta_from_source_str("cfgspec".to_str(), s, ~[], sess) + parse::parse_meta_from_source_str("cfgspec".to_str(), + s, + Vec::new(), + sess) }).collect::() } @@ -193,7 +198,9 @@ pub fn phase_2_configure_and_expand(sess: Session, let time_passes = sess.time_passes(); sess.building_library.set(session::building_library(sess.opts, &krate)); - sess.crate_types.set(session::collect_crate_types(&sess, krate.attrs)); + sess.crate_types.set(session::collect_crate_types(&sess, + krate.attrs + .as_slice())); time(time_passes, "gated feature checking", (), |_| front::feature_gate::check_crate(sess, &krate)); @@ -472,7 +479,7 @@ fn write_out_deps(sess: Session, input: &Input, outputs: &OutputFilenames, krate: &ast::Crate) -> io::IoResult<()> { - let id = link::find_crate_id(krate.attrs, outputs); + let id = link::find_crate_id(krate.attrs.as_slice(), outputs); let mut out_filenames = ~[]; for output_type in sess.opts.output_types.iter() { @@ -546,8 +553,11 @@ pub fn compile_input(sess: Session, cfg: ast::CrateConfig, input: &Input, let loader = &mut Loader::new(sess); phase_2_configure_and_expand(sess, loader, krate) }; - let outputs = build_output_filenames(input, outdir, output, - expanded_crate.attrs, sess); + let outputs = build_output_filenames(input, + outdir, + output, + expanded_crate.attrs.as_slice(), + sess); write_out_deps(sess, input, &outputs, &expanded_crate).unwrap(); @@ -1180,7 +1190,7 @@ mod test { let sessopts = build_session_options(matches); let sess = build_session(sessopts, None); let cfg = build_configuration(sess); - assert!((attr::contains_name(cfg, "test"))); + assert!((attr::contains_name(cfg.as_slice(), "test"))); } // When the user supplies --test and --cfg test, don't implicitly add diff --git a/src/librustc/driver/session.rs b/src/librustc/driver/session.rs index 9a33c54d50f2b..d404360bf1499 100644 --- a/src/librustc/driver/session.rs +++ b/src/librustc/driver/session.rs @@ -27,6 +27,7 @@ use syntax::{abi, ast, codemap}; use syntax; use std::cell::{Cell, RefCell}; +use std::vec_ng::Vec; use collections::{HashMap,HashSet}; pub struct Config { @@ -319,7 +320,7 @@ pub fn basic_options() -> @Options { addl_lib_search_paths: @RefCell::new(HashSet::new()), maybe_sysroot: None, target_triple: host_triple(), - cfg: ~[], + cfg: Vec::new(), test: false, parse_only: false, no_trans: false, @@ -451,7 +452,8 @@ pub fn building_library(options: &Options, krate: &ast::Crate) -> bool { CrateTypeStaticlib | CrateTypeDylib | CrateTypeRlib => return true } } - match syntax::attr::first_attr_value_str_by_name(krate.attrs, "crate_type") { + match syntax::attr::first_attr_value_str_by_name(krate.attrs.as_slice(), + "crate_type") { Some(s) => { s.equiv(&("lib")) || s.equiv(&("rlib")) || diff --git a/src/librustc/front/config.rs b/src/librustc/front/config.rs index f2130033eed92..26d72f843515e 100644 --- a/src/librustc/front/config.rs +++ b/src/librustc/front/config.rs @@ -21,7 +21,7 @@ struct Context<'a> { // any items that do not belong in the current configuration pub fn strip_unconfigured_items(krate: ast::Crate) -> ast::Crate { let config = krate.config.clone(); - strip_items(krate, |attrs| in_cfg(config, attrs)) + strip_items(krate, |attrs| in_cfg(config.as_slice(), attrs)) } impl<'a> fold::Folder for Context<'a> { @@ -117,7 +117,7 @@ fn fold_item_underscore(cx: &mut Context, item: &ast::Item_) -> ast::Item_ { ast::ItemEnum(ref def, ref generics) => { let mut variants = def.variants.iter().map(|c| c.clone()). filter_map(|v| { - if !(cx.in_cfg)(v.node.attrs) { + if !(cx.in_cfg)(v.node.attrs.as_slice()) { None } else { Some(match v.node.kind { @@ -147,7 +147,7 @@ fn fold_item_underscore(cx: &mut Context, item: &ast::Item_) -> ast::Item_ { fn fold_struct(cx: &Context, def: &ast::StructDef) -> @ast::StructDef { let mut fields = def.fields.iter().map(|c| c.clone()).filter(|m| { - (cx.in_cfg)(m.node.attrs) + (cx.in_cfg)(m.node.attrs.as_slice()) }); @ast::StructDef { fields: fields.collect(), @@ -189,25 +189,25 @@ fn fold_block(cx: &mut Context, b: ast::P) -> ast::P { } fn item_in_cfg(cx: &Context, item: &ast::Item) -> bool { - return (cx.in_cfg)(item.attrs); + return (cx.in_cfg)(item.attrs.as_slice()); } fn foreign_item_in_cfg(cx: &Context, item: &ast::ForeignItem) -> bool { - return (cx.in_cfg)(item.attrs); + return (cx.in_cfg)(item.attrs.as_slice()); } fn view_item_in_cfg(cx: &Context, item: &ast::ViewItem) -> bool { - return (cx.in_cfg)(item.attrs); + return (cx.in_cfg)(item.attrs.as_slice()); } fn method_in_cfg(cx: &Context, meth: &ast::Method) -> bool { - return (cx.in_cfg)(meth.attrs); + return (cx.in_cfg)(meth.attrs.as_slice()); } fn trait_method_in_cfg(cx: &Context, meth: &ast::TraitMethod) -> bool { match *meth { - ast::Required(ref meth) => (cx.in_cfg)(meth.attrs), - ast::Provided(meth) => (cx.in_cfg)(meth.attrs) + ast::Required(ref meth) => (cx.in_cfg)(meth.attrs.as_slice()), + ast::Provided(meth) => (cx.in_cfg)(meth.attrs.as_slice()) } } diff --git a/src/librustc/front/feature_gate.rs b/src/librustc/front/feature_gate.rs index 4450fbb04a931..b0a901f30be65 100644 --- a/src/librustc/front/feature_gate.rs +++ b/src/librustc/front/feature_gate.rs @@ -171,7 +171,7 @@ impl Visitor<()> for Context { } ast::ItemForeignMod(..) => { - if attr::contains_name(i.attrs, "link_args") { + if attr::contains_name(i.attrs.as_slice(), "link_args") { self.gate_feature("link_args", i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ @@ -180,7 +180,7 @@ impl Visitor<()> for Context { } ast::ItemFn(..) => { - if attr::contains_name(i.attrs, "macro_registrar") { + if attr::contains_name(i.attrs.as_slice(), "macro_registrar") { self.gate_feature("macro_registrar", i.span, "cross-crate macro exports are \ experimental and possibly buggy"); @@ -188,7 +188,7 @@ impl Visitor<()> for Context { } ast::ItemStruct(..) => { - if attr::contains_name(i.attrs, "simd") { + if attr::contains_name(i.attrs.as_slice(), "simd") { self.gate_feature("simd", i.span, "SIMD types are experimental and possibly buggy"); } diff --git a/src/librustc/front/std_inject.rs b/src/librustc/front/std_inject.rs index f4bc1c1906353..eec44cc31b1a3 100644 --- a/src/librustc/front/std_inject.rs +++ b/src/librustc/front/std_inject.rs @@ -11,7 +11,8 @@ use driver::session::Session; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; use syntax::ast; use syntax::attr; use syntax::codemap::DUMMY_SP; @@ -43,11 +44,11 @@ pub fn maybe_inject_prelude(sess: Session, krate: ast::Crate) -> ast::Crate { } fn use_std(krate: &ast::Crate) -> bool { - !attr::contains_name(krate.attrs, "no_std") + !attr::contains_name(krate.attrs.as_slice(), "no_std") } fn use_uv(krate: &ast::Crate) -> bool { - !attr::contains_name(krate.attrs, "no_uv") + !attr::contains_name(krate.attrs.as_slice(), "no_uv") } fn no_prelude(attrs: &[ast::Attribute]) -> bool { @@ -72,28 +73,27 @@ pub fn with_version(krate: &str) -> Option<(InternedString, ast::StrStyle)> { impl fold::Folder for StandardLibraryInjector { fn fold_crate(&mut self, krate: ast::Crate) -> ast::Crate { - let mut vis = ~[ast::ViewItem { + let mut vis = vec!(ast::ViewItem { node: ast::ViewItemExternMod(token::str_to_ident("std"), with_version("std"), ast::DUMMY_NODE_ID), - attrs: ~[ + attrs: vec!( attr::mk_attr(attr::mk_list_item( InternedString::new("phase"), - ~[ + vec!( attr::mk_word_item(InternedString::new("syntax")), attr::mk_word_item(InternedString::new("link") - )])) - ], + ))))), vis: ast::Inherited, span: DUMMY_SP - }]; + }); if use_uv(&krate) && !self.sess.building_library.get() { vis.push(ast::ViewItem { node: ast::ViewItemExternMod(token::str_to_ident("green"), with_version("green"), ast::DUMMY_NODE_ID), - attrs: ~[], + attrs: Vec::new(), vis: ast::Inherited, span: DUMMY_SP }); @@ -101,13 +101,13 @@ impl fold::Folder for StandardLibraryInjector { node: ast::ViewItemExternMod(token::str_to_ident("rustuv"), with_version("rustuv"), ast::DUMMY_NODE_ID), - attrs: ~[], + attrs: Vec::new(), vis: ast::Inherited, span: DUMMY_SP }); } - vis.push_all(krate.module.view_items); + vis.push_all_move(krate.module.view_items.clone()); let new_module = ast::Mod { view_items: vis, ..krate.module.clone() @@ -134,7 +134,7 @@ struct PreludeInjector { impl fold::Folder for PreludeInjector { fn fold_crate(&mut self, krate: ast::Crate) -> ast::Crate { - if !no_prelude(krate.attrs) { + if !no_prelude(krate.attrs.as_slice()) { // only add `use std::prelude::*;` if there wasn't a // `#[no_implicit_prelude];` at the crate level. ast::Crate { @@ -147,7 +147,7 @@ impl fold::Folder for PreludeInjector { } fn fold_item(&mut self, item: @ast::Item) -> SmallVector<@ast::Item> { - if !no_prelude(item.attrs) { + if !no_prelude(item.attrs.as_slice()) { // only recur if there wasn't `#[no_implicit_prelude];` // on this item, i.e. this means that the prelude is not // implicitly imported though the whole subtree @@ -161,7 +161,7 @@ impl fold::Folder for PreludeInjector { let prelude_path = ast::Path { span: DUMMY_SP, global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: token::str_to_ident("std"), lifetimes: opt_vec::Empty, @@ -171,19 +171,18 @@ impl fold::Folder for PreludeInjector { identifier: token::str_to_ident("prelude"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, - }, - ], + }), }; let vp = @codemap::dummy_spanned(ast::ViewPathGlob(prelude_path, ast::DUMMY_NODE_ID)); let vi2 = ast::ViewItem { - node: ast::ViewItemUse(~[vp]), - attrs: ~[], + node: ast::ViewItemUse(vec!(vp)), + attrs: Vec::new(), vis: ast::Inherited, span: DUMMY_SP, }; - let vis = vec::append(~[vi2], module.view_items); + let vis = vec_ng::append(vec!(vi2), module.view_items.as_slice()); // FIXME #2543: Bad copy. let new_module = ast::Mod { diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index 45b1a42898c2a..333504b7e8247 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -10,6 +10,8 @@ // Code that generates a test runner to run all the tests in a crate +#[allow(dead_code)]; +#[allow(unused_imports)]; use driver::session; use front::config; @@ -18,6 +20,8 @@ use metadata::creader::Loader; use std::cell::RefCell; use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; use syntax::ast_util::*; use syntax::attr::AttrMetaMethods; use syntax::attr; @@ -57,7 +61,7 @@ pub fn modify_for_testing(sess: session::Session, // We generate the test harness when building in the 'test' // configuration, either with the '--test' or '--cfg test' // command line options. - let should_test = attr::contains_name(krate.config, "test"); + let should_test = attr::contains_name(krate.config.as_slice(), "test"); if should_test { generate_test_harness(sess, krate) @@ -189,13 +193,13 @@ fn strip_test_functions(krate: ast::Crate) -> ast::Crate { // When not compiling with --test we should not compile the // #[test] functions config::strip_items(krate, |attrs| { - !attr::contains_name(attrs, "test") && - !attr::contains_name(attrs, "bench") + !attr::contains_name(attrs.as_slice(), "test") && + !attr::contains_name(attrs.as_slice(), "bench") }) } fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool { - let has_test_attr = attr::contains_name(i.attrs, "test"); + let has_test_attr = attr::contains_name(i.attrs.as_slice(), "test"); fn has_test_signature(i: @ast::Item) -> bool { match &i.node { @@ -224,7 +228,7 @@ fn is_test_fn(cx: &TestCtxt, i: @ast::Item) -> bool { } fn is_bench_fn(i: @ast::Item) -> bool { - let has_bench_attr = attr::contains_name(i.attrs, "bench"); + let has_bench_attr = attr::contains_name(i.attrs.as_slice(), "bench"); fn has_test_signature(i: @ast::Item) -> bool { match i.node { @@ -251,20 +255,22 @@ fn is_ignored(cx: &TestCtxt, i: @ast::Item) -> bool { i.attrs.iter().any(|attr| { // check ignore(cfg(foo, bar)) attr.name().equiv(&("ignore")) && match attr.meta_item_list() { - Some(ref cfgs) => attr::test_cfg(cx.config, cfgs.iter().map(|x| *x)), + Some(ref cfgs) => { + attr::test_cfg(cx.config.as_slice(), cfgs.iter().map(|x| *x)) + } None => true } }) } fn should_fail(i: @ast::Item) -> bool { - attr::contains_name(i.attrs, "should_fail") + attr::contains_name(i.attrs.as_slice(), "should_fail") } fn add_test_module(cx: &TestCtxt, m: &ast::Mod) -> ast::Mod { let testmod = mk_test_module(cx); ast::Mod { - items: vec::append_one(m.items.clone(), testmod), + items: vec_ng::append_one(m.items.clone(), testmod), ..(*m).clone() } } @@ -291,9 +297,9 @@ fn mk_std(cx: &TestCtxt) -> ast::ViewItem { let id_test = token::str_to_ident("test"); let vi = if cx.is_test_crate { ast::ViewItemUse( - ~[@nospan(ast::ViewPathSimple(id_test, - path_node(~[id_test]), - ast::DUMMY_NODE_ID))]) + vec!(@nospan(ast::ViewPathSimple(id_test, + path_node(~[id_test]), + ast::DUMMY_NODE_ID)))) } else { ast::ViewItemExternMod(id_test, with_version("test"), @@ -301,16 +307,21 @@ fn mk_std(cx: &TestCtxt) -> ast::ViewItem { }; ast::ViewItem { node: vi, - attrs: ~[], + attrs: Vec::new(), vis: ast::Inherited, span: DUMMY_SP } } -fn mk_test_module(cx: &TestCtxt) -> @ast::Item { +#[cfg(stage0)] +fn mk_test_module(_: &TestCtxt) -> @ast::Item { + fail!("test disabled in this stage due to quasiquoter") +} +#[cfg(not(stage0))] +fn mk_test_module(cx: &TestCtxt) -> @ast::Item { // Link to test crate - let view_items = ~[mk_std(cx)]; + let view_items = vec!(mk_std(cx)); // A constant vector of test descriptors. let tests = mk_tests(cx); @@ -326,7 +337,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item { let testmod = ast::Mod { view_items: view_items, - items: ~[mainfn, tests], + items: vec!(mainfn, tests), }; let item_ = ast::ItemMod(testmod); @@ -337,7 +348,7 @@ fn mk_test_module(cx: &TestCtxt) -> @ast::Item { let item = ast::Item { ident: token::str_to_ident("__test"), - attrs: ~[resolve_unexported_attr], + attrs: vec!(resolve_unexported_attr), id: ast::DUMMY_NODE_ID, node: item_, vis: ast::Public, @@ -377,6 +388,12 @@ fn path_node_global(ids: ~[ast::Ident]) -> ast::Path { } } +#[cfg(stage0)] +fn mk_tests(_: &TestCtxt) -> @ast::Item { + fail!("tests disabled in this stage due to quasiquoter") +} + +#[cfg(not(stage0))] fn mk_tests(cx: &TestCtxt) -> @ast::Item { // The vector of test_descs for this crate let test_descs = mk_test_descs(cx); @@ -389,14 +406,14 @@ fn mk_tests(cx: &TestCtxt) -> @ast::Item { } fn is_test_crate(krate: &ast::Crate) -> bool { - match attr::find_crateid(krate.attrs) { + match attr::find_crateid(krate.attrs.as_slice()) { Some(ref s) if "test" == s.name => true, _ => false } } fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr { - let mut descs = ~[]; + let mut descs = Vec::new(); { let testfns = cx.testfns.borrow(); debug!("building test vector from {} tests", testfns.get().len()); @@ -418,6 +435,12 @@ fn mk_test_descs(cx: &TestCtxt) -> @ast::Expr { } } +#[cfg(stage0)] +fn mk_test_desc_and_fn_rec(_: &TestCtxt, _: &Test) -> @ast::Expr { + fail!("tests disabled in this stage due to quasiquoter") +} + +#[cfg(not(stage0))] fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr { let span = test.span; let path = test.path.clone(); diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index 2e3b9be5f9b86..235e1c72455c2 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -54,6 +54,7 @@ use std::os; use std::str; use std::task; use std::vec; +use std::vec_ng::Vec; use syntax::ast; use syntax::diagnostic::Emitter; use syntax::diagnostic; @@ -334,19 +335,22 @@ pub fn run_compiler(args: &[~str]) { d::compile_input(sess, cfg, &input, &odir, &ofile); } -fn parse_crate_attrs(sess: session::Session, - input: &d::Input) -> ~[ast::Attribute] { - match *input { +fn parse_crate_attrs(sess: session::Session, input: &d::Input) -> + ~[ast::Attribute] { + let result = match *input { d::FileInput(ref ifile) => { - parse::parse_crate_attrs_from_file(ifile, ~[], sess.parse_sess) + parse::parse_crate_attrs_from_file(ifile, + Vec::new(), + sess.parse_sess) } d::StrInput(ref src) => { parse::parse_crate_attrs_from_source_str(d::anon_src(), (*src).clone(), - ~[], + Vec::new(), sess.parse_sess) } - } + }; + result.move_iter().collect() } /// Run a procedure which will detect failures in the compiler and print nicer diff --git a/src/librustc/metadata/creader.rs b/src/librustc/metadata/creader.rs index 165c1abdeedfa..58268d1169b2f 100644 --- a/src/librustc/metadata/creader.rs +++ b/src/librustc/metadata/creader.rs @@ -23,6 +23,7 @@ use metadata::loader; use metadata::loader::Os; use std::cell::RefCell; +use std::vec_ng::Vec; use collections::HashMap; use syntax::ast; use syntax::abi; @@ -140,7 +141,7 @@ fn visit_view_item(e: &mut Env, i: &ast::ViewItem) { let should_load = i.attrs.iter().all(|attr| { attr.name().get() != "phase" || attr.meta_item_list().map_or(false, |phases| { - attr::contains_name(phases, "link") + attr::contains_name(phases.as_slice(), "link") }) }); @@ -420,8 +421,9 @@ impl CrateLoader for Loader { } } - fn get_exported_macros(&mut self, cnum: ast::CrateNum) -> ~[~str] { - csearch::get_exported_macros(self.env.sess.cstore, cnum) + fn get_exported_macros(&mut self, cnum: ast::CrateNum) -> Vec<~str> { + csearch::get_exported_macros(self.env.sess.cstore, cnum).move_iter() + .collect() } fn get_registrar_symbol(&mut self, cnum: ast::CrateNum) -> Option<~str> { diff --git a/src/librustc/metadata/decoder.rs b/src/librustc/metadata/decoder.rs index 42754aedba704..7e5c20fae65eb 100644 --- a/src/librustc/metadata/decoder.rs +++ b/src/librustc/metadata/decoder.rs @@ -1057,7 +1057,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] { let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let n = token::intern_and_get_ident(nd.as_str_slice()); let subitems = get_meta_items(meta_item_doc); - items.push(attr::mk_list_item(n, subitems)); + items.push(attr::mk_list_item(n, subitems.move_iter().collect())); true }); return items; diff --git a/src/librustc/metadata/encoder.rs b/src/librustc/metadata/encoder.rs index 5bcc113ef946b..3a490845c6098 100644 --- a/src/librustc/metadata/encoder.rs +++ b/src/librustc/metadata/encoder.rs @@ -349,7 +349,7 @@ fn encode_enum_variant_info(ecx: &EncodeContext, encode_name(ebml_w, variant.node.name.name); encode_parent_item(ebml_w, local_def(id)); encode_visibility(ebml_w, variant.node.vis); - encode_attributes(ebml_w, variant.node.attrs); + encode_attributes(ebml_w, variant.node.attrs.as_slice()); match variant.node.kind { ast::TupleVariantKind(ref args) if args.len() > 0 && generics.ty_params.len() == 0 => { @@ -357,7 +357,10 @@ fn encode_enum_variant_info(ecx: &EncodeContext, } ast::TupleVariantKind(_) => {}, ast::StructVariantKind(def) => { - let idx = encode_info_for_struct(ecx, ebml_w, def.fields, index); + let idx = encode_info_for_struct(ecx, + ebml_w, + def.fields.as_slice(), + index); encode_struct_fields(ebml_w, def); let bkts = create_index(idx); encode_index(ebml_w, bkts, write_i64); @@ -516,7 +519,7 @@ fn each_auxiliary_node_id(item: @Item, callback: |NodeId| -> bool) -> bool { // If this is a newtype struct, return the constructor. match struct_def.ctor_id { Some(ctor_id) if struct_def.fields.len() > 0 && - struct_def.fields[0].node.kind == + struct_def.fields.get(0).node.kind == ast::UnnamedField => { continue_ = callback(ctor_id); } @@ -799,13 +802,17 @@ fn encode_info_for_method(ecx: &EncodeContext, let elem = ast_map::PathName(m.ident.name); encode_path(ebml_w, impl_path.chain(Some(elem).move_iter())); match ast_method_opt { - Some(ast_method) => encode_attributes(ebml_w, ast_method.attrs), + Some(ast_method) => { + encode_attributes(ebml_w, ast_method.attrs.as_slice()) + } None => () } for &ast_method in ast_method_opt.iter() { let num_params = tpt.generics.type_param_defs().len(); - if num_params > 0u || is_default_impl || should_inline(ast_method.attrs) { + if num_params > 0u || + is_default_impl || + should_inline(ast_method.attrs.as_slice()) { (ecx.encode_inlined_item)( ecx, ebml_w, IIMethodRef(local_def(parent_id), false, ast_method)); } else { @@ -930,8 +937,8 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(ebml_w, item.ident.name); encode_path(ebml_w, path); - encode_attributes(ebml_w, item.attrs); - if tps_len > 0u || should_inline(item.attrs) { + encode_attributes(ebml_w, item.attrs.as_slice()); + if tps_len > 0u || should_inline(item.attrs.as_slice()) { (ecx.encode_inlined_item)(ecx, ebml_w, IIItemRef(item)); } else { encode_symbol(ecx, ebml_w, item.id); @@ -986,7 +993,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(ebml_w, ecx, item.id); encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(ebml_w, item.ident.name); - encode_attributes(ebml_w, item.attrs); + encode_attributes(ebml_w, item.attrs.as_slice()); for v in (*enum_definition).variants.iter() { encode_variant_id(ebml_w, local_def(v.node.id)); } @@ -1002,7 +1009,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_enum_variant_info(ecx, ebml_w, item.id, - (*enum_definition).variants, + (*enum_definition).variants.as_slice(), index, generics); } @@ -1012,7 +1019,7 @@ fn encode_info_for_item(ecx: &EncodeContext, the index, and the index needs to be in the item for the class itself */ let idx = encode_info_for_struct(ecx, ebml_w, - struct_def.fields, index); + struct_def.fields.as_slice(), index); /* Index the class*/ add_to_index(item, ebml_w, index); @@ -1025,7 +1032,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_item_variances(ebml_w, ecx, item.id); encode_name(ebml_w, item.ident.name); - encode_attributes(ebml_w, item.attrs); + encode_attributes(ebml_w, item.attrs.as_slice()); encode_path(ebml_w, path.clone()); encode_visibility(ebml_w, vis); @@ -1065,7 +1072,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_family(ebml_w, 'i'); encode_bounds_and_type(ebml_w, ecx, &lookup_item_type(tcx, def_id)); encode_name(ebml_w, item.ident.name); - encode_attributes(ebml_w, item.attrs); + encode_attributes(ebml_w, item.attrs.as_slice()); match ty.node { ast::TyPath(ref path, ref bounds, _) if path.segments .len() == 1 => { @@ -1097,7 +1104,7 @@ fn encode_info_for_item(ecx: &EncodeContext, let num_implemented_methods = ast_methods.len(); for (i, m) in imp.methods.iter().enumerate() { let ast_method = if i < num_implemented_methods { - Some(ast_methods[i]) + Some(*ast_methods.get(i)) } else { None }; { @@ -1129,7 +1136,7 @@ fn encode_info_for_item(ecx: &EncodeContext, encode_region_param_defs(ebml_w, trait_def.generics.region_param_defs()); encode_trait_ref(ebml_w, ecx, trait_def.trait_ref, tag_item_trait_ref); encode_name(ebml_w, item.ident.name); - encode_attributes(ebml_w, item.attrs); + encode_attributes(ebml_w, item.attrs.as_slice()); encode_visibility(ebml_w, vis); for &method_def_id in ty::trait_method_def_ids(tcx, def_id).iter() { ebml_w.start_tag(tag_item_trait_method); @@ -1195,14 +1202,14 @@ fn encode_info_for_item(ecx: &EncodeContext, } } - match ms[i] { - Required(ref tm) => { - encode_attributes(ebml_w, tm.attrs); + match ms.get(i) { + &Required(ref tm) => { + encode_attributes(ebml_w, tm.attrs.as_slice()); encode_method_sort(ebml_w, 'r'); } - Provided(m) => { - encode_attributes(ebml_w, m.attrs); + &Provided(m) => { + encode_attributes(ebml_w, m.attrs.as_slice()); // If this is a static method, we've already encoded // this. if method_ty.explicit_self != SelfStatic { diff --git a/src/librustc/middle/astencode.rs b/src/librustc/middle/astencode.rs index 0ab826fb1ad96..3fb127f470e97 100644 --- a/src/librustc/middle/astencode.rs +++ b/src/librustc/middle/astencode.rs @@ -36,6 +36,7 @@ use std::libc; use std::cast; use std::io::Seek; use std::rc::Rc; +use std::vec_ng::Vec; use serialize::ebml::reader; use serialize::ebml; @@ -334,8 +335,8 @@ impl Folder for NestedItemsDropper { } }).collect(); let blk_sans_items = ast::P(ast::Block { - view_items: ~[], // I don't know if we need the view_items here, - // but it doesn't break tests! + view_items: Vec::new(), // I don't know if we need the view_items + // here, but it doesn't break tests! stmts: stmts_sans_items, expr: blk.expr, id: blk.id, @@ -396,7 +397,10 @@ fn renumber_and_map_ast(xcx: @ExtendedDecodeContext, map: &ast_map::Map, path: ~[ast_map::PathElem], ii: ast::InlinedItem) -> ast::InlinedItem { - ast_map::map_decoded_item(map, path, AstRenumberer { xcx: xcx }, |fld| { + ast_map::map_decoded_item(map, + path.move_iter().collect(), + AstRenumberer { xcx: xcx }, + |fld| { match ii { ast::IIItem(i) => { ast::IIItem(fld.fold_item(i).expect_one("expected one item")) @@ -1436,7 +1440,9 @@ trait fake_ext_ctxt { #[cfg(test)] impl fake_ext_ctxt for @parse::ParseSess { - fn cfg(&self) -> ast::CrateConfig { ~[] } + fn cfg(&self) -> ast::CrateConfig { + Vec::new() + } fn parse_sess(&self) -> @parse::ParseSess { *self } fn call_site(&self) -> Span { codemap::Span { diff --git a/src/librustc/middle/borrowck/check_loans.rs b/src/librustc/middle/borrowck/check_loans.rs index f974b324f0564..3a500a82664dd 100644 --- a/src/librustc/middle/borrowck/check_loans.rs +++ b/src/librustc/middle/borrowck/check_loans.rs @@ -831,10 +831,10 @@ fn check_loans_in_expr<'a>(this: &mut CheckLoanCtxt<'a>, this.check_assignment(dest); } ast::ExprCall(f, ref args) => { - this.check_call(expr, Some(f), f.span, *args); + this.check_call(expr, Some(f), f.span, args.as_slice()); } ast::ExprMethodCall(_, _, ref args) => { - this.check_call(expr, None, expr.span, *args); + this.check_call(expr, None, expr.span, args.as_slice()); } ast::ExprIndex(_, rval) | ast::ExprBinary(_, _, rval) if method_map.get().contains_key(&expr.id) => { diff --git a/src/librustc/middle/cfg/construct.rs b/src/librustc/middle/cfg/construct.rs index 12853fe6742da..b76d4cb858903 100644 --- a/src/librustc/middle/cfg/construct.rs +++ b/src/librustc/middle/cfg/construct.rs @@ -298,7 +298,8 @@ impl CFGBuilder { let mut guard_exit = discr_exit; for arm in arms.iter() { guard_exit = self.opt_expr(arm.guard, guard_exit); // 2 - let pats_exit = self.pats_any(arm.pats, guard_exit); // 3 + let pats_exit = self.pats_any(arm.pats.as_slice(), + guard_exit); // 3 let body_exit = self.block(arm.body, pats_exit); // 4 self.add_contained_edge(body_exit, expr_exit); // 5 } @@ -348,15 +349,15 @@ impl CFGBuilder { } ast::ExprVec(ref elems, _) => { - self.straightline(expr, pred, *elems) + self.straightline(expr, pred, elems.as_slice()) } ast::ExprCall(func, ref args) => { - self.call(expr, pred, func, *args) + self.call(expr, pred, func, args.as_slice()) } ast::ExprMethodCall(_, _, ref args) => { - self.call(expr, pred, args[0], args.slice_from(1)) + self.call(expr, pred, *args.get(0), args.slice_from(1)) } ast::ExprIndex(l, r) | @@ -369,7 +370,7 @@ impl CFGBuilder { } ast::ExprTup(ref exprs) => { - self.straightline(expr, pred, *exprs) + self.straightline(expr, pred, exprs.as_slice()) } ast::ExprStruct(_, ref fields, base) => { diff --git a/src/librustc/middle/check_match.rs b/src/librustc/middle/check_match.rs index 7292633eec081..0ca5ad8b44c5a 100644 --- a/src/librustc/middle/check_match.rs +++ b/src/librustc/middle/check_match.rs @@ -76,10 +76,10 @@ fn check_expr(v: &mut CheckMatchVisitor, for arm in arms.iter() { check_legality_of_move_bindings(cx, arm.guard.is_some(), - arm.pats); + arm.pats.as_slice()); } - check_arms(cx, *arms); + check_arms(cx, arms.as_slice()); /* Check for exhaustiveness */ // Check for empty enum, because is_useful only works on inhabited // types. @@ -104,11 +104,15 @@ fn check_expr(v: &mut CheckMatchVisitor, } _ => { /* We assume only enum types can be uninhabited */ } } - let arms = arms.iter().filter_map(unguarded_pat).collect::<~[~[@Pat]]>().concat_vec(); - if arms.is_empty() { + + let pats: ~[@Pat] = arms.iter() + .filter_map(unguarded_pat) + .flat_map(|pats| pats.move_iter()) + .collect(); + if pats.is_empty() { cx.tcx.sess.span_err(ex.span, "non-exhaustive patterns"); } else { - check_exhaustive(cx, ex.span, arms); + check_exhaustive(cx, ex.span, pats); } } _ => () @@ -671,7 +675,7 @@ fn specialize(cx: &MatchCheckCtxt, } DefVariant(_, id, _) if variant(id) == *ctor_id => { let args = match args { - Some(args) => args, + Some(args) => args.iter().map(|x| *x).collect(), None => vec::from_elem(arity, wild()) }; Some(vec::append(args, r.tail())) @@ -682,7 +686,9 @@ fn specialize(cx: &MatchCheckCtxt, DefStruct(..) => { let new_args; match args { - Some(args) => new_args = args, + Some(args) => { + new_args = args.iter().map(|x| *x).collect() + } None => new_args = vec::from_elem(arity, wild()) } Some(vec::append(new_args, r.tail())) @@ -741,7 +747,9 @@ fn specialize(cx: &MatchCheckCtxt, } } } - PatTup(args) => Some(vec::append(args, r.tail())), + PatTup(args) => { + Some(vec::append(args.iter().map(|x| *x).collect(), r.tail())) + } PatUniq(a) | PatRegion(a) => { Some(vec::append(~[a], r.tail())) } @@ -804,20 +812,32 @@ fn specialize(cx: &MatchCheckCtxt, vec(_) => { let num_elements = before.len() + after.len(); if num_elements < arity && slice.is_some() { - Some(vec::append( - [ - before, - vec::from_elem( - arity - num_elements, wild()), - after - ].concat_vec(), - r.tail() - )) + let mut result = ~[]; + for pat in before.iter() { + result.push((*pat).clone()); + } + for _ in iter::range(0, arity - num_elements) { + result.push(wild()) + } + for pat in after.iter() { + result.push((*pat).clone()); + } + for pat in r.tail().iter() { + result.push((*pat).clone()); + } + Some(result) } else if num_elements == arity { - Some(vec::append( - vec::append(before, after), - r.tail() - )) + let mut result = ~[]; + for pat in before.iter() { + result.push((*pat).clone()); + } + for pat in after.iter() { + result.push((*pat).clone()); + } + for pat in r.tail().iter() { + result.push((*pat).clone()); + } + Some(result) } else { None } diff --git a/src/librustc/middle/const_eval.rs b/src/librustc/middle/const_eval.rs index 0b516a7f05fcf..cd52f24b8eaee 100644 --- a/src/librustc/middle/const_eval.rs +++ b/src/librustc/middle/const_eval.rs @@ -117,7 +117,7 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt, None => None, Some(ast_map::NodeItem(it)) => match it.node { ItemEnum(ast::EnumDef { variants: ref variants }, _) => { - variant_expr(*variants, variant_def.node) + variant_expr(variants.as_slice(), variant_def.node) } _ => None }, @@ -144,7 +144,7 @@ pub fn lookup_variant_by_id(tcx: ty::ctxt, c, d)) { csearch::found(ast::IIItem(item)) => match item.node { ItemEnum(ast::EnumDef { variants: ref variants }, _) => { - variant_expr(*variants, variant_def.node) + variant_expr(variants.as_slice(), variant_def.node) } _ => None }, @@ -509,7 +509,9 @@ pub fn eval_const_expr_partial(tcx: &T, e: &Expr) pub fn lit_to_const(lit: &Lit) -> const_val { match lit.node { LitStr(ref s, _) => const_str((*s).clone()), - LitBinary(ref data) => const_binary(data.clone()), + LitBinary(ref data) => { + const_binary(Rc::new(data.borrow().iter().map(|x| *x).collect())) + } LitChar(n) => const_uint(n as u64), LitInt(n, _) => const_int(n), LitUint(n, _) => const_uint(n), diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index e81e6b0f96d2d..1e38f5d9bd7f5 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -531,7 +531,9 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> { // determine the bits for the body and then union // them into `in_out`, which reflects all bodies to date let mut body = guards.to_owned(); - self.walk_pat_alternatives(arm.pats, body, loop_scopes); + self.walk_pat_alternatives(arm.pats.as_slice(), + body, + loop_scopes); self.walk_block(arm.body, body, loop_scopes); join_bits(&self.dfcx.oper, body, in_out); } @@ -562,7 +564,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> { } ast::ExprVec(ref exprs, _) => { - self.walk_exprs(*exprs, in_out, loop_scopes) + self.walk_exprs(exprs.as_slice(), in_out, loop_scopes) } ast::ExprRepeat(l, r, _) => { @@ -579,11 +581,11 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> { ast::ExprCall(f, ref args) => { self.walk_expr(f, in_out, loop_scopes); - self.walk_call(expr.id, *args, in_out, loop_scopes); + self.walk_call(expr.id, args.as_slice(), in_out, loop_scopes); } ast::ExprMethodCall(_, _, ref args) => { - self.walk_call(expr.id, *args, in_out, loop_scopes); + self.walk_call(expr.id, args.as_slice(), in_out, loop_scopes); } ast::ExprIndex(l, r) | @@ -596,7 +598,7 @@ impl<'a, O:DataFlowOperator> PropagationContext<'a, O> { } ast::ExprTup(ref exprs) => { - self.walk_exprs(*exprs, in_out, loop_scopes); + self.walk_exprs(exprs.as_slice(), in_out, loop_scopes); } ast::ExprBinary(op, l, r) if ast_util::lazy_binop(op) => { diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 7d66c80dea08b..a0a34ff4f32b5 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -198,7 +198,7 @@ impl Visitor<()> for MarkSymbolVisitor { fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool { contains_lint(attrs, allow, DEAD_CODE_LINT_STR) - || attr::contains_name(attrs, "lang") + || attr::contains_name(attrs.as_slice(), "lang") } // This visitor seeds items that @@ -220,7 +220,7 @@ struct LifeSeeder { impl Visitor<()> for LifeSeeder { fn visit_item(&mut self, item: &ast::Item, _: ()) { - if has_allow_dead_code_or_lang_attr(item.attrs) { + if has_allow_dead_code_or_lang_attr(item.attrs.as_slice()) { self.worklist.push(item.id); } match item.node { @@ -240,7 +240,7 @@ impl Visitor<()> for LifeSeeder { // Check for method here because methods are not ast::Item match *fk { visit::FkMethod(_, _, method) => { - if has_allow_dead_code_or_lang_attr(method.attrs) { + if has_allow_dead_code_or_lang_attr(method.attrs.as_slice()) { self.worklist.push(id); } } diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index f6af8b86b5d45..2623ddb15297e 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -54,7 +54,7 @@ pub fn find_entry_point(session: Session, krate: &Crate, ast_map: &ast_map::Map) } // If the user wants no main function at all, then stop here. - if attr::contains_name(krate.attrs, "no_main") { + if attr::contains_name(krate.attrs.as_slice(), "no_main") { session.entry_type.set(Some(session::EntryNone)); return } @@ -95,7 +95,7 @@ fn find_item(item: &Item, ctxt: &mut EntryContext) { }); } - if attr::contains_name(item.attrs, "main") { + if attr::contains_name(item.attrs.as_slice(), "main") { if ctxt.attr_main_fn.is_none() { ctxt.attr_main_fn = Some((item.id, item.span)); } else { @@ -105,7 +105,7 @@ fn find_item(item: &Item, ctxt: &mut EntryContext) { } } - if attr::contains_name(item.attrs, "start") { + if attr::contains_name(item.attrs.as_slice(), "start") { if ctxt.start_fn.is_none() { ctxt.start_fn = Some((item.id, item.span)); } else { diff --git a/src/librustc/middle/kind.rs b/src/librustc/middle/kind.rs index 672a3e44ccba2..5b7ac704e2a22 100644 --- a/src/librustc/middle/kind.rs +++ b/src/librustc/middle/kind.rs @@ -160,7 +160,7 @@ fn check_impl_of_trait(cx: &mut Context, it: &Item, trait_ref: &TraitRef, self_t } fn check_item(cx: &mut Context, item: &Item) { - if !attr::contains_name(item.attrs, "unsafe_destructor") { + if !attr::contains_name(item.attrs.as_slice(), "unsafe_destructor") { match item.node { ItemImpl(_, Some(ref trait_ref), self_type, _) => { check_impl_of_trait(cx, item, trait_ref, self_type); diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 0194d4a251048..bedf8ed05290b 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -114,7 +114,7 @@ struct LanguageItemVisitor<'a> { impl<'a> Visitor<()> for LanguageItemVisitor<'a> { fn visit_item(&mut self, item: &ast::Item, _: ()) { - match extract(item.attrs) { + match extract(item.attrs.as_slice()) { Some(value) => { let item_index = self.this.item_refs.find_equiv(&value).map(|x| *x); diff --git a/src/librustc/middle/lint.rs b/src/librustc/middle/lint.rs index 693f6fb35f4fc..cef32797ca268 100644 --- a/src/librustc/middle/lint.rs +++ b/src/librustc/middle/lint.rs @@ -548,7 +548,9 @@ impl<'a> Context<'a> { attr.name().equiv(&("doc")) && match attr.meta_item_list() { None => false, - Some(l) => attr::contains_name(l, "hidden") + Some(l) => { + attr::contains_name(l.as_slice(), "hidden") + } } }); @@ -1070,7 +1072,8 @@ fn check_unused_result(cx: &Context, s: &ast::Stmt) { if ast_util::is_local(did) { match cx.tcx.map.get(did.node) { ast_map::NodeItem(it) => { - if attr::contains_name(it.attrs, "must_use") { + if attr::contains_name(it.attrs.as_slice(), + "must_use") { cx.span_lint(UnusedMustUse, s.span, "unused result which must be used"); warned = true; @@ -1234,8 +1237,9 @@ fn check_unused_mut_pat(cx: &Context, p: &ast::Pat) { ref path, _) if pat_util::pat_is_binding(cx.tcx.def_map, p)=> { // `let mut _a = 1;` doesn't need a warning. let initial_underscore = if path.segments.len() == 1 { - token::get_ident(path.segments[0].identifier).get() - .starts_with("_") + token::get_ident(path.segments + .get(0) + .identifier).get().starts_with("_") } else { cx.tcx.sess.span_bug(p.span, "mutable binding that doesn't consist \ @@ -1353,7 +1357,11 @@ fn check_missing_doc_item(cx: &Context, it: &ast::Item) { ast::ItemTrait(..) => "a trait", _ => return }; - check_missing_doc_attrs(cx, Some(it.id), it.attrs, it.span, desc); + check_missing_doc_attrs(cx, + Some(it.id), + it.attrs.as_slice(), + it.span, + desc); } fn check_missing_doc_method(cx: &Context, m: &ast::Method) { @@ -1386,24 +1394,39 @@ fn check_missing_doc_method(cx: &Context, m: &ast::Method) { } } } - check_missing_doc_attrs(cx, Some(m.id), m.attrs, m.span, "a method"); + check_missing_doc_attrs(cx, + Some(m.id), + m.attrs.as_slice(), + m.span, + "a method"); } fn check_missing_doc_ty_method(cx: &Context, tm: &ast::TypeMethod) { - check_missing_doc_attrs(cx, Some(tm.id), tm.attrs, tm.span, "a type method"); + check_missing_doc_attrs(cx, + Some(tm.id), + tm.attrs.as_slice(), + tm.span, + "a type method"); } fn check_missing_doc_struct_field(cx: &Context, sf: &ast::StructField) { match sf.node.kind { ast::NamedField(_, vis) if vis != ast::Private => - check_missing_doc_attrs(cx, Some(cx.cur_struct_def_id), sf.node.attrs, - sf.span, "a struct field"), + check_missing_doc_attrs(cx, + Some(cx.cur_struct_def_id), + sf.node.attrs.as_slice(), + sf.span, + "a struct field"), _ => {} } } fn check_missing_doc_variant(cx: &Context, v: &ast::Variant) { - check_missing_doc_attrs(cx, Some(v.node.id), v.node.attrs, v.span, "a variant"); + check_missing_doc_attrs(cx, + Some(v.node.id), + v.node.attrs.as_slice(), + v.span, + "a variant"); } /// Checks for use of items with #[deprecated], #[experimental] and @@ -1500,13 +1523,13 @@ fn check_stability(cx: &Context, e: &ast::Expr) { impl<'a> Visitor<()> for Context<'a> { fn visit_item(&mut self, it: &ast::Item, _: ()) { - self.with_lint_attrs(it.attrs, |cx| { + self.with_lint_attrs(it.attrs.as_slice(), |cx| { check_item_ctypes(cx, it); check_item_non_camel_case_types(cx, it); check_item_non_uppercase_statics(cx, it); check_heap_item(cx, it); check_missing_doc_item(cx, it); - check_attrs_usage(cx, it.attrs); + check_attrs_usage(cx, it.attrs.as_slice()); cx.visit_ids(|v| v.visit_item(it, ())); @@ -1515,15 +1538,15 @@ impl<'a> Visitor<()> for Context<'a> { } fn visit_foreign_item(&mut self, it: &ast::ForeignItem, _: ()) { - self.with_lint_attrs(it.attrs, |cx| { - check_attrs_usage(cx, it.attrs); + self.with_lint_attrs(it.attrs.as_slice(), |cx| { + check_attrs_usage(cx, it.attrs.as_slice()); visit::walk_foreign_item(cx, it, ()); }) } fn visit_view_item(&mut self, i: &ast::ViewItem, _: ()) { - self.with_lint_attrs(i.attrs, |cx| { - check_attrs_usage(cx, i.attrs); + self.with_lint_attrs(i.attrs.as_slice(), |cx| { + check_attrs_usage(cx, i.attrs.as_slice()); visit::walk_view_item(cx, i, ()); }) } @@ -1579,9 +1602,9 @@ impl<'a> Visitor<()> for Context<'a> { match *fk { visit::FkMethod(_, _, m) => { - self.with_lint_attrs(m.attrs, |cx| { + self.with_lint_attrs(m.attrs.as_slice(), |cx| { check_missing_doc_method(cx, m); - check_attrs_usage(cx, m.attrs); + check_attrs_usage(cx, m.attrs.as_slice()); cx.visit_ids(|v| { v.visit_fn(fk, decl, body, span, id, ()); @@ -1595,9 +1618,9 @@ impl<'a> Visitor<()> for Context<'a> { fn visit_ty_method(&mut self, t: &ast::TypeMethod, _: ()) { - self.with_lint_attrs(t.attrs, |cx| { + self.with_lint_attrs(t.attrs.as_slice(), |cx| { check_missing_doc_ty_method(cx, t); - check_attrs_usage(cx, t.attrs); + check_attrs_usage(cx, t.attrs.as_slice()); visit::walk_ty_method(cx, t, ()); }) @@ -1616,18 +1639,18 @@ impl<'a> Visitor<()> for Context<'a> { } fn visit_struct_field(&mut self, s: &ast::StructField, _: ()) { - self.with_lint_attrs(s.node.attrs, |cx| { + self.with_lint_attrs(s.node.attrs.as_slice(), |cx| { check_missing_doc_struct_field(cx, s); - check_attrs_usage(cx, s.node.attrs); + check_attrs_usage(cx, s.node.attrs.as_slice()); visit::walk_struct_field(cx, s, ()); }) } fn visit_variant(&mut self, v: &ast::Variant, g: &ast::Generics, _: ()) { - self.with_lint_attrs(v.node.attrs, |cx| { + self.with_lint_attrs(v.node.attrs.as_slice(), |cx| { check_missing_doc_variant(cx, v); - check_attrs_usage(cx, v.node.attrs); + check_attrs_usage(cx, v.node.attrs.as_slice()); visit::walk_variant(cx, v, g, ()); }) @@ -1675,17 +1698,21 @@ pub fn check_crate(tcx: ty::ctxt, for &(lint, level) in tcx.sess.opts.lint_opts.iter() { cx.set_level(lint, level, CommandLine); } - cx.with_lint_attrs(krate.attrs, |cx| { + cx.with_lint_attrs(krate.attrs.as_slice(), |cx| { cx.visit_id(ast::CRATE_NODE_ID); cx.visit_ids(|v| { v.visited_outermost = true; visit::walk_crate(v, krate, ()); }); - check_crate_attrs_usage(cx, krate.attrs); + check_crate_attrs_usage(cx, krate.attrs.as_slice()); // since the root module isn't visited as an item (because it isn't an item), warn for it // here. - check_missing_doc_attrs(cx, None, krate.attrs, krate.span, "crate"); + check_missing_doc_attrs(cx, + None, + krate.attrs.as_slice(), + krate.span, + "crate"); visit::walk_crate(cx, krate, ()); }); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index eb123a91119af..ab4de1ed71d79 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -1129,7 +1129,8 @@ impl Liveness { let guard_succ = self.propagate_through_opt_expr(arm.guard, body_succ); let arm_succ = - self.define_bindings_in_arm_pats(arm.pats, guard_succ); + self.define_bindings_in_arm_pats(arm.pats.as_slice(), + guard_succ); self.merge_from_succ(ln, arm_succ, first_merge); first_merge = false; }; @@ -1194,7 +1195,7 @@ impl Liveness { } ExprVec(ref exprs, _) => { - self.propagate_through_exprs(*exprs, succ) + self.propagate_through_exprs(exprs.as_slice(), succ) } ExprRepeat(element, count, _) => { @@ -1215,7 +1216,7 @@ impl Liveness { let t_ret = ty::ty_fn_ret(ty::expr_ty(self.tcx, f)); let succ = if ty::type_is_bot(t_ret) {self.s.exit_ln} else {succ}; - let succ = self.propagate_through_exprs(*args, succ); + let succ = self.propagate_through_exprs(args.as_slice(), succ); self.propagate_through_expr(f, succ) } @@ -1225,11 +1226,11 @@ impl Liveness { let t_ret = ty::node_id_to_type(self.tcx, expr.id); let succ = if ty::type_is_bot(t_ret) {self.s.exit_ln} else {succ}; - self.propagate_through_exprs(*args, succ) + self.propagate_through_exprs(args.as_slice(), succ) } ExprTup(ref exprs) => { - self.propagate_through_exprs(*exprs, succ) + self.propagate_through_exprs(exprs.as_slice(), succ) } ExprBinary(op, l, r) if ast_util::lazy_binop(op) => { @@ -1493,7 +1494,7 @@ fn check_local(this: &mut Liveness, local: &Local) { } fn check_arm(this: &mut Liveness, arm: &Arm) { - this.arm_pats_bindings(arm.pats, |ln, var, sp, id| { + this.arm_pats_bindings(arm.pats.as_slice(), |ln, var, sp, id| { this.warn_about_unused(sp, id, ln, var); }); visit::walk_arm(this, arm, ()); diff --git a/src/librustc/middle/moves.rs b/src/librustc/middle/moves.rs index 4864f72d8fcbd..b96a4e4c16064 100644 --- a/src/librustc/middle/moves.rs +++ b/src/librustc/middle/moves.rs @@ -409,11 +409,11 @@ impl VisitContext { } } self.use_expr(callee, mode); - self.use_fn_args(*args); + self.use_fn_args(args.as_slice()); } ExprMethodCall(_, _, ref args) => { // callee.m(args) - self.use_fn_args(*args); + self.use_fn_args(args.as_slice()); } ExprStruct(_, ref fields, opt_with) => { @@ -468,7 +468,7 @@ impl VisitContext { } ExprTup(ref exprs) => { - self.consume_exprs(*exprs); + self.consume_exprs(exprs.as_slice()); } ExprIf(cond_expr, then_blk, opt_else_expr) => { @@ -497,7 +497,7 @@ impl VisitContext { } ExprVec(ref exprs, _) => { - self.consume_exprs(*exprs); + self.consume_exprs(exprs.as_slice()); } ExprAddrOf(_, base) => { // &base diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index afe4d00103618..3d90566ee7931 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -759,7 +759,7 @@ impl<'a> Visitor<()> for PrivacyVisitor<'a> { fn visit_item(&mut self, item: &ast::Item, _: ()) { // Do not check privacy inside items with the resolve_unexported // attribute. This is used for the test runner. - if attr::contains_name(item.attrs, "!resolve_unexported") { + if attr::contains_name(item.attrs.as_slice(), "!resolve_unexported") { return; } @@ -788,7 +788,8 @@ impl<'a> Visitor<()> for PrivacyVisitor<'a> { } ast::ExprMethodCall(ident, _, ref args) => { // see above - let t = ty::type_autoderef(ty::expr_ty(self.tcx, args[0])); + let t = ty::type_autoderef(ty::expr_ty(self.tcx, + *args.get(0))); match ty::get(t).sty { ty::ty_enum(_, _) | ty::ty_struct(_, _) => { match self.method_map.borrow().get().find(&expr.id) { @@ -857,7 +858,7 @@ impl<'a> Visitor<()> for PrivacyVisitor<'a> { lifetimes: opt_vec::Empty, types: opt_vec::Empty, }; - let segs = ~[seg]; + let segs = vec!(seg); let path = ast::Path { global: false, span: pid.span, diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 4b2bc5c966f89..305c60d8215c9 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -44,7 +44,7 @@ fn generics_require_inlining(generics: &ast::Generics) -> bool { // monomorphized or it was marked with `#[inline]`. This will only return // true for functions. fn item_might_be_inlined(item: &ast::Item) -> bool { - if attributes_specify_inlining(item.attrs) { + if attributes_specify_inlining(item.attrs.as_slice()) { return true } @@ -59,7 +59,7 @@ fn item_might_be_inlined(item: &ast::Item) -> bool { fn method_might_be_inlined(tcx: ty::ctxt, method: &ast::Method, impl_src: ast::DefId) -> bool { - if attributes_specify_inlining(method.attrs) || + if attributes_specify_inlining(method.attrs.as_slice()) || generics_require_inlining(&method.generics) { return true } @@ -217,7 +217,7 @@ impl ReachableContext { } Some(ast_map::NodeMethod(method)) => { if generics_require_inlining(&method.generics) || - attributes_specify_inlining(method.attrs) { + attributes_specify_inlining(method.attrs.as_slice()) { true } else { let impl_did = tcx.map.get_parent_did(node_id); @@ -324,7 +324,7 @@ impl ReachableContext { // Statics with insignificant addresses are not reachable // because they're inlined specially into all other crates. ast::ItemStatic(..) => { - if attr::contains_name(item.attrs, + if attr::contains_name(item.attrs.as_slice(), "address_insignificant") { let mut reachable_symbols = self.reachable_symbols.borrow_mut(); diff --git a/src/librustc/middle/resolve.rs b/src/librustc/middle/resolve.rs index 8f0261dfb9170..58de36a796825 100644 --- a/src/librustc/middle/resolve.rs +++ b/src/librustc/middle/resolve.rs @@ -3690,7 +3690,7 @@ impl Resolver { generics, implemented_traits, self_type, - *methods); + methods.as_slice()); } ItemTrait(ref generics, ref traits, ref methods) => { @@ -3764,7 +3764,7 @@ impl Resolver { ItemStruct(ref struct_def, ref generics) => { self.resolve_struct(item.id, generics, - struct_def.fields); + struct_def.fields.as_slice()); } ItemMod(ref module_) => { @@ -4187,8 +4187,10 @@ impl Resolver { // check that all of the arms in an or-pattern have exactly the // same set of bindings, with the same binding modes for each. fn check_consistent_bindings(&mut self, arm: &Arm) { - if arm.pats.len() == 0 { return; } - let map_0 = self.binding_mode_map(arm.pats[0]); + if arm.pats.len() == 0 { + return + } + let map_0 = self.binding_mode_map(*arm.pats.get(0)); for (i, p) in arm.pats.iter().enumerate() { let map_i = self.binding_mode_map(*p); @@ -4408,7 +4410,7 @@ impl Resolver { // such a value is simply disallowed (since it's rarely // what you want). - let ident = path.segments[0].identifier; + let ident = path.segments.get(0).identifier; let renamed = mtwt_resolve(ident); match self.resolve_bare_identifier_pattern(ident) { diff --git a/src/librustc/middle/trans/_match.rs b/src/librustc/middle/trans/_match.rs index 580a0fea90e43..d57557c5f1be3 100644 --- a/src/librustc/middle/trans/_match.rs +++ b/src/librustc/middle/trans/_match.rs @@ -652,7 +652,9 @@ fn enter_opt<'r,'b>( // FIXME: Must we clone? match *subpats { None => Some(vec::from_elem(variant_size, dummy)), - _ => (*subpats).clone(), + Some(ref subpats) => { + Some((*subpats).iter().map(|x| *x).collect()) + } } } else { None @@ -719,8 +721,15 @@ fn enter_opt<'r,'b>( let this_opt = vec_len(n, vec_len_ge(before.len()), (lo, hi)); if opt_eq(tcx, &this_opt, opt) { - Some(vec::append_one((*before).clone(), slice) + - *after) + let mut new_before = ~[]; + for pat in before.iter() { + new_before.push(*pat); + } + new_before.push(slice); + for pat in after.iter() { + new_before.push(*pat); + } + Some(new_before) } else { None } @@ -728,7 +737,11 @@ fn enter_opt<'r,'b>( None if i >= lo && i <= hi => { let n = before.len(); if opt_eq(tcx, &vec_len(n, vec_len_eq, (lo,hi)), opt) { - Some((*before).clone()) + let mut new_before = ~[]; + for pat in before.iter() { + new_before.push(*pat); + } + Some(new_before) } else { None } @@ -811,7 +824,13 @@ fn enter_tup<'r,'b>( let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: DUMMY_SP}; enter_match(bcx, dm, m, col, val, |p| { match p.node { - ast::PatTup(ref elts) => Some((*elts).clone()), + ast::PatTup(ref elts) => { + let mut new_elts = ~[]; + for elt in elts.iter() { + new_elts.push((*elt).clone()) + } + Some(new_elts) + } _ => { assert_is_binding_or_wild(bcx, p); Some(vec::from_elem(n_elts, dummy)) @@ -838,7 +857,9 @@ fn enter_tuple_struct<'r,'b>( let dummy = @ast::Pat {id: 0, node: ast::PatWild, span: DUMMY_SP}; enter_match(bcx, dm, m, col, val, |p| { match p.node { - ast::PatEnum(_, Some(ref elts)) => Some((*elts).clone()), + ast::PatEnum(_, Some(ref elts)) => { + Some(elts.iter().map(|x| (*x)).collect()) + } _ => { assert_is_binding_or_wild(bcx, p); Some(vec::from_elem(n_elts, dummy)) @@ -1094,7 +1115,7 @@ fn collect_record_or_struct_fields<'a>( ast::PatStruct(_, ref fs, _) => { match ty::get(node_id_type(bcx, br.pats[col].id)).sty { ty::ty_struct(..) => { - extend(&mut fields, *fs); + extend(&mut fields, fs.as_slice()); found = true; } _ => () @@ -1866,7 +1887,7 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>, let mut matches = ~[]; for arm in arms.iter() { let body = fcx.new_id_block("case_body", arm.body.id); - let bindings_map = create_bindings_map(bcx, arm.pats[0]); + let bindings_map = create_bindings_map(bcx, *arm.pats.get(0)); let arm_data = ArmData { bodycx: body, arm: arm, @@ -2172,7 +2193,7 @@ fn bind_irrefutable_pat<'a>( val); for sub_pat in sub_pats.iter() { for (i, argval) in args.vals.iter().enumerate() { - bcx = bind_irrefutable_pat(bcx, sub_pat[i], + bcx = bind_irrefutable_pat(bcx, *sub_pat.get(i), *argval, binding_mode, cleanup_scope); } diff --git a/src/librustc/middle/trans/asm.rs b/src/librustc/middle/trans/asm.rs index db99bd53704eb..130bcb4190224 100644 --- a/src/librustc/middle/trans/asm.rs +++ b/src/librustc/middle/trans/asm.rs @@ -100,13 +100,20 @@ pub fn trans_inline_asm<'a>(bcx: &'a Block<'a>, ia: &ast::InlineAsm) let r = ia.asm.get().with_c_str(|a| { constraints.with_c_str(|c| { - InlineAsmCall(bcx, a, c, inputs, output_type, ia.volatile, ia.alignstack, dialect) + InlineAsmCall(bcx, + a, + c, + inputs.as_slice(), + output_type, + ia.volatile, + ia.alignstack, + dialect) }) }); // Again, based on how many outputs we have if numOutputs == 1 { - Store(bcx, r, outputs[0]); + Store(bcx, r, *outputs.get(0)); } else { for (i, o) in outputs.iter().enumerate() { let v = ExtractValue(bcx, r, i); diff --git a/src/librustc/middle/trans/base.rs b/src/librustc/middle/trans/base.rs index 2bebba8c40463..b8301cbc33abf 100644 --- a/src/librustc/middle/trans/base.rs +++ b/src/librustc/middle/trans/base.rs @@ -1473,7 +1473,11 @@ pub fn trans_closure<'a>(ccx: @CrateContext, let arg_tys = ty::ty_fn_args(node_id_type(bcx, id)); let arg_datums = create_datums_for_fn_args(&fcx, arg_tys); - bcx = copy_args_to_allocas(&fcx, arg_scope, bcx, decl.inputs, arg_datums); + bcx = copy_args_to_allocas(&fcx, + arg_scope, + bcx, + decl.inputs.as_slice(), + arg_datums); bcx = maybe_load_env(bcx); @@ -1637,7 +1641,7 @@ pub fn trans_enum_def(ccx: @CrateContext, enum_definition: &ast::EnumDef, match variant.node.kind { ast::TupleVariantKind(ref args) if args.len() > 0 => { let llfn = get_item_val(ccx, variant.node.id); - trans_enum_variant(ccx, id, variant, *args, + trans_enum_variant(ccx, id, variant, args.as_slice(), disr_val, None, llfn); } ast::TupleVariantKind(_) => { @@ -1667,10 +1671,16 @@ pub fn trans_item(ccx: @CrateContext, item: &ast::Item) { if purity == ast::ExternFn { let llfndecl = get_item_val(ccx, item.id); foreign::trans_rust_fn_with_foreign_abi( - ccx, decl, body, item.attrs, llfndecl, item.id); + ccx, decl, body, item.attrs.as_slice(), llfndecl, item.id); } else if !generics.is_type_parameterized() { let llfn = get_item_val(ccx, item.id); - trans_fn(ccx, decl, body, llfn, None, item.id, item.attrs); + trans_fn(ccx, + decl, + body, + llfn, + None, + item.id, + item.attrs.as_slice()); } else { // Be sure to travel more than just one layer deep to catch nested // items in blocks and such. @@ -1679,7 +1689,7 @@ pub fn trans_item(ccx: @CrateContext, item: &ast::Item) { } } ast::ItemImpl(ref generics, _, _, ref ms) => { - meth::trans_impl(ccx, item.ident, *ms, generics, item.id); + meth::trans_impl(ccx, item.ident, ms.as_slice(), generics, item.id); } ast::ItemMod(ref m) => { trans_mod(ccx, m); @@ -1695,7 +1705,7 @@ pub fn trans_item(ccx: @CrateContext, item: &ast::Item) { consts::trans_const(ccx, m, item.id); // Do static_assert checking. It can't really be done much earlier // because we need to get the value of the bool out of LLVM - if attr::contains_name(item.attrs, "static_assert") { + if attr::contains_name(item.attrs.as_slice(), "static_assert") { if m == ast::MutMutable { ccx.sess.span_fatal(expr.span, "cannot have static_assert on a mutable \ @@ -1738,7 +1748,7 @@ pub fn trans_struct_def(ccx: @CrateContext, struct_def: @ast::StructDef) { // otherwise this is a unit-like struct. Some(ctor_id) if struct_def.fields.len() > 0 => { let llfndecl = get_item_val(ccx, ctor_id); - trans_tuple_struct(ccx, struct_def.fields, + trans_tuple_struct(ccx, struct_def.fields.as_slice(), ctor_id, None, llfndecl); } Some(_) | None => {} @@ -1925,7 +1935,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { let val = match item { ast_map::NodeItem(i) => { let ty = ty::node_id_to_type(ccx.tcx, i.id); - let sym = exported_name(ccx, id, ty, i.attrs); + let sym = exported_name(ccx, id, ty, i.attrs.as_slice()); let v = match i.node { ast::ItemStatic(_, _, expr) => { @@ -1974,7 +1984,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { // Apply the `unnamed_addr` attribute if // requested - if attr::contains_name(i.attrs, + if attr::contains_name(i.attrs.as_slice(), "address_insignificant"){ { let reachable = @@ -2006,7 +2016,8 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { inlineable = true; } - if attr::contains_name(i.attrs, "thread_local") { + if attr::contains_name(i.attrs.as_slice(), + "thread_local") { lib::llvm::set_thread_local(g, true); } @@ -2034,14 +2045,16 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { sym, i.id) }; - set_llvm_fn_attrs(i.attrs, llfn); + set_llvm_fn_attrs(i.attrs.as_slice(), llfn); llfn } _ => fail!("get_item_val: weird result in table") }; - match attr::first_attr_value_str_by_name(i.attrs, "link_section") { + match attr::first_attr_value_str_by_name(i.attrs + .as_slice(), + "link_section") { Some(sect) => unsafe { sect.get().with_c_str(|buf| { llvm::LLVMSetSection(v, buf); @@ -2087,7 +2100,8 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { // with weak linkage, but if we're building a // library then we've already declared the crate map // so use that instead. - if attr::contains_name(ni.attrs, "crate_map") { + if attr::contains_name(ni.attrs.as_slice(), + "crate_map") { if ccx.sess.building_library.get() { let s = "_rust_crate_map_toplevel"; let g = unsafe { @@ -2126,7 +2140,10 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { let ty = ty::node_id_to_type(ccx.tcx, id); let parent = ccx.tcx.map.get_parent(id); let enm = ccx.tcx.map.expect_item(parent); - let sym = exported_name(ccx, id, ty, enm.attrs); + let sym = exported_name(ccx, + id, + ty, + enm.attrs.as_slice()); llfn = match enm.node { ast::ItemEnum(_, _) => { @@ -2154,7 +2171,11 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef { let parent = ccx.tcx.map.get_parent(id); let struct_item = ccx.tcx.map.expect_item(parent); let ty = ty::node_id_to_type(ccx.tcx, ctor_id); - let sym = exported_name(ccx, id, ty, struct_item.attrs); + let sym = exported_name(ccx, + id, + ty, + struct_item.attrs + .as_slice()); let llfn = register_fn(ccx, struct_item.span, sym, ctor_id, ty); set_inline_hint(llfn); @@ -2190,10 +2211,10 @@ fn register_method(ccx: @CrateContext, id: ast::NodeId, m: &ast::Method) -> ValueRef { let mty = ty::node_id_to_type(ccx.tcx, id); - let sym = exported_name(ccx, id, mty, m.attrs); + let sym = exported_name(ccx, id, mty, m.attrs.as_slice()); let llfn = register_fn(ccx, m.span, sym, id, mty); - set_llvm_fn_attrs(m.attrs, llfn); + set_llvm_fn_attrs(m.attrs.as_slice(), llfn); llfn } diff --git a/src/librustc/middle/trans/consts.rs b/src/librustc/middle/trans/consts.rs index b3c3b22c1f5ba..793c70770a932 100644 --- a/src/librustc/middle/trans/consts.rs +++ b/src/librustc/middle/trans/consts.rs @@ -75,7 +75,9 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit) ast::LitBool(b) => C_bool(b), ast::LitNil => C_nil(), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()), - ast::LitBinary(ref data) => C_binary_slice(cx, *data.borrow()), + ast::LitBinary(ref data) => { + C_binary_slice(cx, data.borrow().as_slice()) + } } } @@ -529,7 +531,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, ast::ExprTup(ref es) => { let ety = ty::expr_ty(cx.tcx, e); let repr = adt::represent_type(cx, ety); - let (vals, inlineable) = map_list(*es); + let (vals, inlineable) = map_list(es.as_slice()); (adt::trans_const(cx, repr, 0, vals), inlineable) } ast::ExprStruct(_, ref fs, ref base_opt) => { @@ -564,7 +566,10 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, }) } ast::ExprVec(ref es, ast::MutImmutable) => { - let (v, _, inlineable) = const_vec(cx, e, *es, is_local); + let (v, _, inlineable) = const_vec(cx, + e, + es.as_slice(), + is_local); (v, inlineable) } ast::ExprVstore(sub, ast::ExprVstoreSlice) => { @@ -576,7 +581,10 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, } } ast::ExprVec(ref es, ast::MutImmutable) => { - let (cv, llunitty, _) = const_vec(cx, e, *es, is_local); + let (cv, llunitty, _) = const_vec(cx, + e, + es.as_slice(), + is_local); let llty = val_ty(cv); let gv = "const".with_c_str(|name| { llvm::LLVMAddGlobal(cx.llmod, llty.to_ref(), name) @@ -657,7 +665,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, Some(ast::DefStruct(_)) => { let ety = ty::expr_ty(cx.tcx, e); let repr = adt::represent_type(cx, ety); - let (arg_vals, inlineable) = map_list(*args); + let (arg_vals, inlineable) = map_list(args.as_slice()); (adt::trans_const(cx, repr, 0, arg_vals), inlineable) } Some(ast::DefVariant(enum_did, variant_did, _)) => { @@ -666,7 +674,7 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr, let vinfo = ty::enum_variant_with_id(cx.tcx, enum_did, variant_did); - let (arg_vals, inlineable) = map_list(*args); + let (arg_vals, inlineable) = map_list(args.as_slice()); (adt::trans_const(cx, repr, vinfo.disr_val, arg_vals), inlineable) } diff --git a/src/librustc/middle/trans/debuginfo.rs b/src/librustc/middle/trans/debuginfo.rs index 9dc119b024f54..44e6bbf91cf7c 100644 --- a/src/librustc/middle/trans/debuginfo.rs +++ b/src/librustc/middle/trans/debuginfo.rs @@ -690,7 +690,7 @@ pub fn create_function_debug_context(cx: &CrateContext, { let mut scope_map = fn_debug_context.scope_map.borrow_mut(); populate_scope_map(cx, - arg_pats, + arg_pats.as_slice(), top_level_block, fn_metadata, scope_map.get()); @@ -2650,7 +2650,7 @@ fn populate_scope_map(cx: &CrateContext, // they all must contain the same binding names for arm_ref in arms.iter() { - let arm_span = arm_ref.pats[0].span; + let arm_span = arm_ref.pats.get(0).span; with_new_scope(cx, arm_span, diff --git a/src/librustc/middle/trans/expr.rs b/src/librustc/middle/trans/expr.rs index 987d60957d175..b033086125dec 100644 --- a/src/librustc/middle/trans/expr.rs +++ b/src/librustc/middle/trans/expr.rs @@ -731,13 +731,18 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>, controlflow::trans_if(bcx, expr.id, cond, thn, els, dest) } ast::ExprMatch(discr, ref arms) => { - _match::trans_match(bcx, expr, discr, *arms, dest) + _match::trans_match(bcx, expr, discr, arms.as_slice(), dest) } ast::ExprBlock(blk) => { controlflow::trans_block(bcx, blk, dest) } ast::ExprStruct(_, ref fields, base) => { - trans_rec_or_struct(bcx, (*fields), base, expr.span, expr.id, dest) + trans_rec_or_struct(bcx, + fields.as_slice(), + base, + expr.span, + expr.id, + dest) } ast::ExprTup(ref args) => { let repr = adt::represent_type(bcx.ccx(), expr_ty(bcx, expr)); @@ -777,10 +782,19 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>, closure::trans_expr_fn(bcx, sigil, decl, body, expr.id, dest) } ast::ExprCall(f, ref args) => { - callee::trans_call(bcx, expr, f, callee::ArgExprs(*args), expr.id, dest) + callee::trans_call(bcx, + expr, + f, + callee::ArgExprs(args.as_slice()), + expr.id, + dest) } ast::ExprMethodCall(_, _, ref args) => { - callee::trans_method_call(bcx, expr, args[0], callee::ArgExprs(*args), dest) + callee::trans_method_call(bcx, + expr, + *args.get(0), + callee::ArgExprs(args.as_slice()), + dest) } ast::ExprBinary(_, lhs, rhs) => { // if not overloaded, would be RvalueDatumExpr diff --git a/src/librustc/middle/trans/foreign.rs b/src/librustc/middle/trans/foreign.rs index d0eef924356ab..7f90810bae48e 100644 --- a/src/librustc/middle/trans/foreign.rs +++ b/src/librustc/middle/trans/foreign.rs @@ -713,7 +713,8 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext, // the massive simplifications that have occurred. pub fn link_name(i: @ast::ForeignItem) -> InternedString { - match attr::first_attr_value_str_by_name(i.attrs, "link_name") { + match attr::first_attr_value_str_by_name(i.attrs.as_slice(), + "link_name") { None => token::get_ident(i.ident), Some(ln) => ln.clone(), } diff --git a/src/librustc/middle/trans/inline.rs b/src/librustc/middle/trans/inline.rs index 878d7d9f39cad..896d97f037405 100644 --- a/src/librustc/middle/trans/inline.rs +++ b/src/librustc/middle/trans/inline.rs @@ -74,7 +74,7 @@ pub fn maybe_instantiate_inline(ccx: @CrateContext, fn_id: ast::DefId) let g = get_item_val(ccx, item.id); // see the comment in get_item_val() as to why this check is // performed here. - if !attr::contains_name(item.attrs, + if !attr::contains_name(item.attrs.as_slice(), "address_insignificant") { SetLinkage(g, AvailableExternallyLinkage); } diff --git a/src/librustc/middle/trans/monomorphize.rs b/src/librustc/middle/trans/monomorphize.rs index 7e047ae8c105d..c76d1cbcd20a4 100644 --- a/src/librustc/middle/trans/monomorphize.rs +++ b/src/librustc/middle/trans/monomorphize.rs @@ -201,7 +201,7 @@ pub fn monomorphic_fn(ccx: @CrateContext, .. } => { let d = mk_lldecl(); - set_llvm_fn_attrs(i.attrs, d); + set_llvm_fn_attrs(i.attrs.as_slice(), d); trans_fn(ccx, decl, body, d, Some(psubsts), fn_id.node, []); d } @@ -232,7 +232,7 @@ pub fn monomorphic_fn(ccx: @CrateContext, trans_enum_variant(ccx, parent, v, - (*args).clone(), + args.as_slice(), this_tv.disr_val, Some(psubsts), d); @@ -244,7 +244,7 @@ pub fn monomorphic_fn(ccx: @CrateContext, } ast_map::NodeMethod(mth) => { let d = mk_lldecl(); - set_llvm_fn_attrs(mth.attrs, d); + set_llvm_fn_attrs(mth.attrs.as_slice(), d); trans_fn(ccx, mth.decl, mth.body, d, Some(psubsts), mth.id, []); d } @@ -252,7 +252,7 @@ pub fn monomorphic_fn(ccx: @CrateContext, match *method { ast::Provided(mth) => { let d = mk_lldecl(); - set_llvm_fn_attrs(mth.attrs, d); + set_llvm_fn_attrs(mth.attrs.as_slice(), d); trans_fn(ccx, mth.decl, mth.body, d, Some(psubsts), mth.id, []); d } @@ -266,7 +266,7 @@ pub fn monomorphic_fn(ccx: @CrateContext, let d = mk_lldecl(); set_inline_hint(d); base::trans_tuple_struct(ccx, - struct_def.fields, + struct_def.fields.as_slice(), struct_def.ctor_id.expect("ast-mapped tuple struct \ didn't have a ctor id"), Some(psubsts), diff --git a/src/librustc/middle/ty.rs b/src/librustc/middle/ty.rs index 2ca19f0b61d82..210adc1d0a5ec 100644 --- a/src/librustc/middle/ty.rs +++ b/src/librustc/middle/ty.rs @@ -3730,8 +3730,11 @@ pub fn provided_trait_methods(cx: ctxt, id: ast::DefId) -> ~[@Method] { Some(ast_map::NodeItem(item)) => { match item.node { ItemTrait(_, _, ref ms) => { - let (_, p) = ast_util::split_trait_methods(*ms); - p.map(|m| method(cx, ast_util::local_def(m.id))) + let (_, p) = + ast_util::split_trait_methods(ms.as_slice()); + p.iter() + .map(|m| method(cx, ast_util::local_def(m.id))) + .collect() } _ => { cx.sess.bug(format!("provided_trait_methods: \ @@ -3947,7 +3950,7 @@ impl VariantInfo { }, ast::StructVariantKind(ref struct_def) => { - let fields: &[StructField] = struct_def.fields; + let fields: &[StructField] = struct_def.fields.as_slice(); assert!(fields.len() > 0); @@ -4280,7 +4283,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] { Some(ast_map::NodeItem(i)) => { match i.node { ast::ItemStruct(struct_def, _) => { - struct_field_tys(struct_def.fields) + struct_field_tys(struct_def.fields.as_slice()) } _ => cx.sess.bug("struct ID bound to non-struct") } @@ -4288,7 +4291,7 @@ pub fn lookup_struct_fields(cx: ctxt, did: ast::DefId) -> ~[field_ty] { Some(ast_map::NodeVariant(ref variant)) => { match (*variant).node.kind { ast::StructVariantKind(struct_def) => { - struct_field_tys(struct_def.fields) + struct_field_tys(struct_def.fields.as_slice()) } _ => { cx.sess.bug("struct ID bound to enum variant that isn't \ diff --git a/src/librustc/middle/typeck/astconv.rs b/src/librustc/middle/typeck/astconv.rs index e569f0756e682..e1157d29d9d82 100644 --- a/src/librustc/middle/typeck/astconv.rs +++ b/src/librustc/middle/typeck/astconv.rs @@ -60,7 +60,7 @@ use middle::typeck::rscope::{RegionScope}; use middle::typeck::lookup_def_tcx; use util::ppaux::Repr; -use std::vec; +use std::vec_ng::Vec; use syntax::abi::AbiSet; use syntax::{ast, ast_util}; use syntax::codemap::Span; @@ -186,8 +186,8 @@ fn ast_path_substs( } match anon_regions { - Ok(v) => opt_vec::from(v), - Err(()) => opt_vec::from(vec::from_fn(expected_num_region_params, + Ok(v) => opt_vec::from(v.move_iter().collect()), + Err(()) => opt_vec::from(Vec::from_fn(expected_num_region_params, |_| ty::ReStatic)) // hokey } }; @@ -519,7 +519,9 @@ pub fn ast_ty_to_ty( |tmt| ty::mk_rptr(tcx, r, tmt)) } ast::TyTup(ref fields) => { - let flds = fields.map(|&t| ast_ty_to_ty(this, rscope, t)); + let flds = fields.iter() + .map(|&t| ast_ty_to_ty(this, rscope, t)) + .collect(); ty::mk_tup(tcx, flds) } ast::TyBareFn(ref bf) => { diff --git a/src/librustc/middle/typeck/check/_match.rs b/src/librustc/middle/typeck/check/_match.rs index 6620f8502edf3..def80e39821ee 100644 --- a/src/librustc/middle/typeck/check/_match.rs +++ b/src/librustc/middle/typeck/check/_match.rs @@ -20,6 +20,7 @@ use middle::typeck::infer; use middle::typeck::require_same_types; use collections::{HashMap, HashSet}; +use std::vec_ng::Vec; use syntax::ast; use syntax::ast_util; use syntax::parse::token; @@ -40,7 +41,7 @@ pub fn check_match(fcx: @FnCtxt, for arm in arms.iter() { let mut pcx = pat_ctxt { fcx: fcx, - map: pat_id_map(tcx.def_map, arm.pats[0]), + map: pat_id_map(tcx.def_map, *arm.pats.get(0)), }; for p in arm.pats.iter() { check_pat(&mut pcx, *p, discrim_ty);} @@ -108,13 +109,13 @@ pub struct pat_ctxt { } pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, - subpats: &Option<~[@ast::Pat]>, expected: ty::t) { + subpats: &Option>, expected: ty::t) { // Typecheck the path. let fcx = pcx.fcx; let tcx = pcx.fcx.ccx.tcx; - let arg_types; + let arg_types: ~[ty::t]; let kind_name; // structure_of requires type variables to be resolved. @@ -174,8 +175,10 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, fcx.write_error(pat.id); kind_name = "[error]"; arg_types = subpats.clone() - .unwrap_or_default() - .map(|_| ty::mk_err()); + .unwrap_or_default() + .move_iter() + .map(|_| ty::mk_err()) + .collect(); } } } @@ -223,8 +226,10 @@ pub fn check_pat_variant(pcx: &pat_ctxt, pat: &ast::Pat, path: &ast::Path, fcx.write_error(pat.id); kind_name = "[error]"; arg_types = subpats.clone() - .unwrap_or_default() - .map(|_| ty::mk_err()); + .unwrap_or_default() + .iter() + .map(|_| ty::mk_err()) + .collect(); } } @@ -509,7 +514,7 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { } } ast::PatIdent(_, ref path, _) => { - check_pat_variant(pcx, pat, path, &Some(~[]), expected); + check_pat_variant(pcx, pat, path, &Some(Vec::new()), expected); } ast::PatEnum(ref path, ref subpats) => { check_pat_variant(pcx, pat, path, subpats, expected); @@ -521,12 +526,18 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { match *structure { ty::ty_struct(cid, ref substs) => { check_struct_pat(pcx, pat.id, pat.span, expected, path, - *fields, etc, cid, substs); + fields.as_slice(), etc, cid, substs); } ty::ty_enum(eid, ref substs) => { - check_struct_like_enum_variant_pat( - pcx, pat.id, pat.span, expected, path, *fields, etc, eid, - substs); + check_struct_like_enum_variant_pat(pcx, + pat.id, + pat.span, + expected, + path, + fields.as_slice(), + etc, + eid, + substs); } _ => { // See [Note-Type-error-reporting] in middle/typeck/infer/mod.rs @@ -540,9 +551,19 @@ pub fn check_pat(pcx: &pat_ctxt, pat: &ast::Pat, expected: ty::t) { let def_map = tcx.def_map.borrow(); match def_map.get().find(&pat.id) { Some(&ast::DefStruct(supplied_def_id)) => { - check_struct_pat(pcx, pat.id, pat.span, ty::mk_err(), path, *fields, etc, - supplied_def_id, - &ty::substs { self_ty: None, tps: ~[], regions: ty::ErasedRegions} ); + check_struct_pat(pcx, + pat.id, + pat.span, + ty::mk_err(), + path, + fields.as_slice(), + etc, + supplied_def_id, + &ty::substs { + self_ty: None, + tps: ~[], + regions: ty::ErasedRegions, + }); } _ => () // Error, but we're already in an error case } diff --git a/src/librustc/middle/typeck/check/mod.rs b/src/librustc/middle/typeck/check/mod.rs index 4339d2c62be9f..6e9cfc9d0d2b0 100644 --- a/src/librustc/middle/typeck/check/mod.rs +++ b/src/librustc/middle/typeck/check/mod.rs @@ -387,7 +387,7 @@ impl Visitor<()> for GatherLocalsVisitor { { let locals = self.fcx.inh.locals.borrow(); debug!("Pattern binding {} is assigned to {}", - token::get_ident(path.segments[0].identifier), + token::get_ident(path.segments.get(0).identifier), self.fcx.infcx().ty_to_str( locals.get().get_copy(&p.id))); } @@ -554,7 +554,7 @@ pub fn check_item(ccx: @CrateCtxt, it: &ast::Item) { ast::ItemEnum(ref enum_definition, _) => { check_enum_variants(ccx, it.span, - enum_definition.variants, + enum_definition.variants.as_slice(), it.id); } ast::ItemFn(decl, _, _, _, body) => { @@ -588,7 +588,7 @@ pub fn check_item(ccx: @CrateCtxt, it: &ast::Item) { &impl_tpt.generics, ast_trait_ref, impl_trait_ref, - *ms); + ms.as_slice()); vtable::resolve_impl(ccx.tcx, it, &impl_tpt.generics, impl_trait_ref); } None => { } @@ -1397,9 +1397,12 @@ pub fn impl_self_ty(vcx: &VtableContext, n_rps); let tps = vcx.infcx.next_ty_vars(n_tps); - let substs = substs {regions: ty::NonerasedRegions(opt_vec::from(rps)), - self_ty: None, - tps: tps}; + let substs = substs { + regions: ty::NonerasedRegions(opt_vec::from(rps.move_iter() + .collect())), + self_ty: None, + tps: tps, + }; let substd_ty = ty::subst(tcx, &substs, raw_ty); ty_param_substs_and_ty { substs: substs, ty: substd_ty } @@ -1453,7 +1456,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, // Verify that no lifetimes or type parameters are present anywhere // except the final two elements of the path. for i in range(0, path.segments.len() - 2) { - for lifetime in path.segments[i].lifetimes.iter() { + for lifetime in path.segments.get(i).lifetimes.iter() { function_context.tcx() .sess .span_err(lifetime.span, @@ -1462,7 +1465,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, break; } - for typ in path.segments[i].types.iter() { + for typ in path.segments.get(i).types.iter() { function_context.tcx() .sess .span_err(typ.span, @@ -1493,7 +1496,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, ast::FromImpl(_) => "impl", }; - let trait_segment = &path.segments[path.segments.len() - 2]; + let trait_segment = &path.segments.get(path.segments.len() - 2); // Make sure lifetime parameterization agrees with the trait or // implementation type. @@ -1567,7 +1570,7 @@ fn check_type_parameter_positions_in_path(function_context: @FnCtxt, _ => { // Verify that no lifetimes or type parameters are present on // the penultimate segment of the path. - let segment = &path.segments[path.segments.len() - 2]; + let segment = &path.segments.get(path.segments.len() - 2); for lifetime in segment.lifetimes.iter() { function_context.tcx() .sess @@ -2415,7 +2418,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, // Generate the struct type. let regions = fcx.infcx().next_region_vars( infer::BoundRegionInTypeOrImpl(span), - region_parameter_count); + region_parameter_count).move_iter().collect(); let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count); let substitutions = substs { regions: ty::NonerasedRegions(opt_vec::from(regions)), @@ -2473,7 +2476,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, // Generate the enum type. let regions = fcx.infcx().next_region_vars( infer::BoundRegionInTypeOrImpl(span), - region_parameter_count); + region_parameter_count).move_iter().collect(); let type_parameters = fcx.infcx().next_ty_vars(type_parameter_count); let substitutions = substs { regions: ty::NonerasedRegions(opt_vec::from(regions)), @@ -2866,7 +2869,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, } } ast::ExprMatch(discrim, ref arms) => { - _match::check_match(fcx, expr, discrim, *arms); + _match::check_match(fcx, expr, discrim, arms.as_slice()); } ast::ExprFnBlock(decl, body) => { check_expr_fn(fcx, @@ -2891,7 +2894,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, fcx.write_ty(id, fcx.node_ty(b.id)); } ast::ExprCall(f, ref args) => { - check_call(fcx, expr, f, *args); + check_call(fcx, expr, f, args.as_slice()); let f_ty = fcx.expr_ty(f); let (args_bot, args_err) = args.iter().fold((false, false), |(rest_bot, rest_err), a| { @@ -2907,7 +2910,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, } } ast::ExprMethodCall(ident, ref tps, ref args) => { - check_method_call(fcx, expr, ident, *args, *tps); + check_method_call(fcx, expr, ident, args.as_slice(), tps.as_slice()); let arg_tys = args.map(|a| fcx.expr_ty(*a)); let (args_bot, args_err) = arg_tys.iter().fold((false, false), |(rest_bot, rest_err), a| { @@ -3093,11 +3096,11 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, match def_map.get().find(&id) { Some(&ast::DefStruct(type_def_id)) => { check_struct_constructor(fcx, id, expr.span, type_def_id, - *fields, base_expr); + fields.as_slice(), base_expr); } Some(&ast::DefVariant(enum_id, variant_id, _)) => { check_struct_enum_variant(fcx, id, expr.span, enum_id, - variant_id, *fields); + variant_id, fields.as_slice()); } _ => { tcx.sess.span_bug(path.span, @@ -3106,7 +3109,7 @@ pub fn check_expr_with_unifier(fcx: @FnCtxt, } } ast::ExprField(base, field, ref tys) => { - check_field(fcx, expr, base, field.name, *tys); + check_field(fcx, expr, base, field.name, tys.as_slice()); } ast::ExprIndex(base, idx) => { check_expr(fcx, base); @@ -3670,7 +3673,7 @@ pub fn instantiate_path(fcx: @FnCtxt, opt_vec::from(fcx.infcx().next_region_vars( infer::BoundRegionInTypeOrImpl(span), - num_expected_regions)) + num_expected_regions).move_iter().collect()) }; let regions = ty::NonerasedRegions(regions); diff --git a/src/librustc/middle/typeck/check/regionck.rs b/src/librustc/middle/typeck/check/regionck.rs index 5e2ad9fc35bd0..ed03ced5ca066 100644 --- a/src/librustc/middle/typeck/check/regionck.rs +++ b/src/librustc/middle/typeck/check/regionck.rs @@ -437,13 +437,18 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { match expr.node { ast::ExprCall(callee, ref args) => { constrain_callee(rcx, callee.id, expr, callee); - constrain_call(rcx, Some(callee.id), expr, None, *args, false); + constrain_call(rcx, + Some(callee.id), + expr, + None, + args.as_slice(), + false); visit::walk_expr(rcx, expr, ()); } ast::ExprMethodCall(_, _, ref args) => { - constrain_call(rcx, None, expr, Some(args[0]), + constrain_call(rcx, None, expr, Some(*args.get(0)), args.slice_from(1), false); visit::walk_expr(rcx, expr, ()); @@ -545,7 +550,7 @@ fn visit_expr(rcx: &mut Rcx, expr: &ast::Expr) { } ast::ExprMatch(discr, ref arms) => { - link_match(rcx, discr, *arms); + link_match(rcx, discr, arms.as_slice()); visit::walk_expr(rcx, expr, ()); } diff --git a/src/librustc/middle/typeck/coherence.rs b/src/librustc/middle/typeck/coherence.rs index 37929957ae6ba..e47b6e722f386 100644 --- a/src/librustc/middle/typeck/coherence.rs +++ b/src/librustc/middle/typeck/coherence.rs @@ -524,7 +524,8 @@ impl CoherenceChecker { let type_parameters = self.inference_context.next_ty_vars(bounds_count); let substitutions = substs { - regions: ty::NonerasedRegions(opt_vec::from(region_parameters)), + regions: ty::NonerasedRegions(opt_vec::from( + region_parameters.move_iter().collect())), self_ty: None, tps: type_parameters }; diff --git a/src/librustc/middle/typeck/collect.rs b/src/librustc/middle/typeck/collect.rs index f048637d68fb0..f7733335c9177 100644 --- a/src/librustc/middle/typeck/collect.rs +++ b/src/librustc/middle/typeck/collect.rs @@ -149,7 +149,7 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt, ast::TupleVariantKind(ref args) if args.len() > 0 => { let rs = ExplicitRscope; let input_tys = args.map(|va| ccx.to_ty(&rs, va.ty)); - ty::mk_ctor_fn(tcx, scope, input_tys, enum_ty) + ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty) } ast::TupleVariantKind(_) => { @@ -166,7 +166,7 @@ pub fn get_enum_variant_types(ccx: &CrateCtxt, let input_tys = struct_def.fields.map( |f| ty::node_id_to_type(ccx.tcx, f.node.id)); - ty::mk_ctor_fn(tcx, scope, input_tys, enum_ty) + ty::mk_ctor_fn(tcx, scope, input_tys.as_slice(), enum_ty) } }; @@ -235,8 +235,11 @@ pub fn ensure_trait_methods(ccx: &CrateCtxt, trait_id: ast::NodeId) { let trait_def_id = local_def(trait_id); let mut trait_method_def_ids = tcx.trait_method_def_ids .borrow_mut(); - trait_method_def_ids.get().insert(trait_def_id, - method_def_ids); + trait_method_def_ids.get() + .insert(trait_def_id, + @method_def_ids.iter() + .map(|x| *x) + .collect()); } _ => {} // Ignore things that aren't traits. } @@ -575,7 +578,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { write_ty_to_tcx(tcx, it.id, tpt.ty); get_enum_variant_types(ccx, tpt.ty, - enum_definition.variants, + enum_definition.variants.as_slice(), generics); }, ast::ItemImpl(ref generics, ref opt_trait_ref, selfty, ref ms) => { @@ -604,7 +607,7 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { convert_methods(ccx, ImplContainer(local_def(it.id)), - *ms, + ms.as_slice(), selfty, &i_ty_generics, generics, @@ -626,11 +629,11 @@ pub fn convert(ccx: &CrateCtxt, it: &ast::Item) { // Run convert_methods on the provided methods. let (_, provided_methods) = - split_trait_methods(*trait_methods); + split_trait_methods(trait_methods.as_slice()); let untransformed_rcvr_ty = ty::mk_self(tcx, local_def(it.id)); convert_methods(ccx, TraitContainer(local_def(it.id)), - provided_methods, + provided_methods.as_slice(), untransformed_rcvr_ty, &trait_def.generics, generics, @@ -701,7 +704,8 @@ pub fn convert_struct(ccx: &CrateCtxt, let mut tcache = tcx.tcache.borrow_mut(); tcache.get().insert(local_def(ctor_id), tpt); } - } else if struct_def.fields[0].node.kind == ast::UnnamedField { + } else if struct_def.fields.get(0).node.kind == + ast::UnnamedField { // Tuple-like. let inputs = { let tcache = tcx.tcache.borrow(); @@ -709,7 +713,10 @@ pub fn convert_struct(ccx: &CrateCtxt, |field| tcache.get().get( &local_def(field.node.id)).ty) }; - let ctor_fn_ty = ty::mk_ctor_fn(tcx, ctor_id, inputs, selfty); + let ctor_fn_ty = ty::mk_ctor_fn(tcx, + ctor_id, + inputs.as_slice(), + selfty); write_ty_to_tcx(tcx, ctor_id, ctor_fn_ty); { let mut tcache = tcx.tcache.borrow_mut(); @@ -802,7 +809,10 @@ pub fn trait_def_of_item(ccx: &CrateCtxt, it: &ast::Item) -> @ty::TraitDef { let self_ty = ty::mk_self(tcx, def_id); let ty_generics = ty_generics(ccx, generics, 0); let substs = mk_item_substs(ccx, &ty_generics, Some(self_ty)); - let bounds = ensure_supertraits(ccx, it.id, it.span, *supertraits); + let bounds = ensure_supertraits(ccx, + it.id, + it.span, + supertraits.as_slice()); let trait_ref = @ty::TraitRef {def_id: def_id, substs: substs}; let trait_def = @ty::TraitDef {generics: ty_generics, @@ -980,7 +990,7 @@ pub fn ty_generics(ccx: &CrateCtxt, def } } - })) + }).move_iter().collect()) }; fn compute_bounds( @@ -1032,7 +1042,10 @@ pub fn ty_of_foreign_fn_decl(ccx: &CrateCtxt, -> ty::ty_param_bounds_and_ty { let ty_generics = ty_generics(ccx, ast_generics, 0); let rb = BindingRscope::new(def_id.node); - let input_tys = decl.inputs.map(|a| ty_of_arg(ccx, &rb, a, None) ); + let input_tys = decl.inputs + .iter() + .map(|a| ty_of_arg(ccx, &rb, a, None)) + .collect(); let output_ty = ast_ty_to_ty(ccx, &rb, decl.output); let t_fn = ty::mk_bare_fn( diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index b47143869e9fc..1384bf182a762 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -603,7 +603,7 @@ impl Repr for OptVec { fn repr(&self, tcx: ctxt) -> ~str { match *self { opt_vec::Empty => ~"[]", - opt_vec::Vec(ref v) => repr_vec(tcx, *v) + opt_vec::Vec(ref v) => repr_vec(tcx, v.as_slice()) } } } diff --git a/src/librustdoc/clean.rs b/src/librustdoc/clean.rs index fc517a8c2e8da..ce922e7d6951f 100644 --- a/src/librustdoc/clean.rs +++ b/src/librustdoc/clean.rs @@ -29,6 +29,7 @@ use std; use doctree; use visit_ast; use std::local_data; +use std::vec_ng::Vec; pub trait Clean { fn clean(&self) -> T; @@ -39,6 +40,13 @@ impl, U> Clean<~[U]> for ~[T] { self.iter().map(|x| x.clean()).collect() } } + +impl, U> Clean> for Vec { + fn clean(&self) -> Vec { + self.iter().map(|x| x.clean()).collect() + } +} + impl, U> Clean for @T { fn clean(&self) -> U { (**self).clean() @@ -54,10 +62,10 @@ impl, U> Clean> for Option { } } -impl, U> Clean<~[U]> for syntax::opt_vec::OptVec { - fn clean(&self) -> ~[U] { +impl, U> Clean> for syntax::opt_vec::OptVec { + fn clean(&self) -> Vec { match self { - &syntax::opt_vec::Empty => ~[], + &syntax::opt_vec::Empty => Vec::new(), &syntax::opt_vec::Vec(ref v) => v.clean() } } @@ -196,6 +204,25 @@ impl Clean for doctree::Module { } else { ~"" }; + let mut foreigns = ~[]; + for subforeigns in self.foreigns.clean().move_iter() { + for foreign in subforeigns.move_iter() { + foreigns.push(foreign) + } + } + let items: ~[~[Item]] = ~[ + self.structs.clean().move_iter().collect(), + self.enums.clean().move_iter().collect(), + self.fns.clean().move_iter().collect(), + foreigns, + self.mods.clean().move_iter().collect(), + self.typedefs.clean().move_iter().collect(), + self.statics.clean().move_iter().collect(), + self.traits.clean().move_iter().collect(), + self.impls.clean().move_iter().collect(), + self.view_items.clean().move_iter().collect(), + self.macros.clean().move_iter().collect() + ]; Item { name: Some(name), attrs: self.attrs.clean(), @@ -204,12 +231,7 @@ impl Clean for doctree::Module { id: self.id, inner: ModuleItem(Module { is_crate: self.is_crate, - items: [self.structs.clean(), self.enums.clean(), - self.fns.clean(), self.foreigns.clean().concat_vec(), - self.mods.clean(), self.typedefs.clean(), - self.statics.clean(), self.traits.clean(), - self.impls.clean(), self.view_items.clean(), - self.macros.clean()].concat_vec() + items: items.concat_vec(), }) } } @@ -227,7 +249,7 @@ impl Clean for ast::MetaItem { match self.node { ast::MetaWord(ref s) => Word(s.get().to_owned()), ast::MetaList(ref s, ref l) => { - List(s.get().to_owned(), l.clean()) + List(s.get().to_owned(), l.clean().move_iter().collect()) } ast::MetaNameValue(ref s, ref v) => { NameValue(s.get().to_owned(), lit_to_str(v)) @@ -276,7 +298,7 @@ impl Clean for ast::TyParam { TyParam { name: self.ident.clean(), id: self.id, - bounds: self.bounds.clean(), + bounds: self.bounds.clean().move_iter().collect(), } } } @@ -323,8 +345,8 @@ pub struct Generics { impl Clean for ast::Generics { fn clean(&self) -> Generics { Generics { - lifetimes: self.lifetimes.clean(), - type_params: self.ty_params.clean(), + lifetimes: self.lifetimes.clean().move_iter().collect(), + type_params: self.ty_params.clean().move_iter().collect(), } } } @@ -353,7 +375,7 @@ impl Clean for ast::Method { }; Item { name: Some(self.ident.clean()), - attrs: self.attrs.clean(), + attrs: self.attrs.clean().move_iter().collect(), source: self.span.clean(), id: self.id.clone(), visibility: self.vis.clean(), @@ -391,7 +413,7 @@ impl Clean for ast::TypeMethod { }; Item { name: Some(self.ident.clean()), - attrs: self.attrs.clean(), + attrs: self.attrs.clean().move_iter().collect(), source: self.span.clean(), id: self.id, visibility: None, @@ -464,12 +486,12 @@ impl Clean for ast::ClosureTy { ClosureDecl { sigil: self.sigil, region: self.region.clean(), - lifetimes: self.lifetimes.clean(), + lifetimes: self.lifetimes.clean().move_iter().collect(), decl: self.decl.clean(), onceness: self.onceness, purity: self.purity, bounds: match self.bounds { - Some(ref x) => x.clean(), + Some(ref x) => x.clean().move_iter().collect(), None => ~[] }, } @@ -673,8 +695,11 @@ impl Clean for ast::Ty { TyFixedLengthVec(ty, ref e) => FixedVector(~ty.clean(), e.span.to_src()), TyTup(ref tys) => Tuple(tys.iter().map(|x| x.clean()).collect()), - TyPath(ref p, ref tpbs, id) => - resolve_type(p.clean(), tpbs.clean(), id), + TyPath(ref p, ref tpbs, id) => { + resolve_type(p.clean(), + tpbs.clean().map(|x| x.move_iter().collect()), + id) + } TyClosure(ref c) => Closure(~c.clean()), TyBareFn(ref barefn) => BareFunction(~barefn.clean()), TyBot => Bottom, @@ -696,7 +721,7 @@ impl Clean for ast::StructField { }; Item { name: name.clean(), - attrs: self.node.attrs.clean(), + attrs: self.node.attrs.clean().move_iter().collect(), source: self.span.clean(), visibility: vis, id: self.node.id, @@ -755,7 +780,7 @@ impl Clean for syntax::ast::StructDef { fn clean(&self) -> VariantStruct { VariantStruct { struct_type: doctree::struct_type_from_def(self), - fields: self.fields.clean(), + fields: self.fields.clean().move_iter().collect(), fields_stripped: false, } } @@ -862,7 +887,7 @@ impl Clean for ast::Path { fn clean(&self) -> Path { Path { global: self.global, - segments: self.segments.clean() + segments: self.segments.clean().move_iter().collect(), } } } @@ -878,8 +903,8 @@ impl Clean for ast::PathSegment { fn clean(&self) -> PathSegment { PathSegment { name: self.identifier.clean(), - lifetimes: self.lifetimes.clean(), - types: self.types.clean() + lifetimes: self.lifetimes.clean().move_iter().collect(), + types: self.types.clean().move_iter().collect() } } } @@ -941,7 +966,7 @@ impl Clean for ast::BareFnTy { BareFunctionDecl { purity: self.purity, generics: Generics { - lifetimes: self.lifetimes.clean(), + lifetimes: self.lifetimes.clean().move_iter().collect(), type_params: ~[], }, decl: self.decl.clean(), @@ -1028,7 +1053,7 @@ impl Clean for ast::ViewItem { fn clean(&self) -> Item { Item { name: None, - attrs: self.attrs.clean(), + attrs: self.attrs.clean().move_iter().collect(), source: self.span.clean(), id: 0, visibility: self.vis.clean(), @@ -1055,7 +1080,9 @@ impl Clean for ast::ViewItem_ { }; ExternMod(i.clean(), string, *id) } - &ast::ViewItemUse(ref vp) => Import(vp.clean()) + &ast::ViewItemUse(ref vp) => { + Import(vp.clean().move_iter().collect()) + } } } } @@ -1083,8 +1110,10 @@ impl Clean for ast::ViewPath { SimpleImport(i.clean(), resolve_use_source(p.clean(), id)), ast::ViewPathGlob(ref p, id) => GlobImport(resolve_use_source(p.clean(), id)), - ast::ViewPathList(ref p, ref pl, id) => - ImportList(resolve_use_source(p.clean(), id), pl.clean()), + ast::ViewPathList(ref p, ref pl, id) => { + ImportList(resolve_use_source(p.clean(), id), + pl.clean().move_iter().collect()) + } } } } @@ -1104,8 +1133,8 @@ impl Clean for ast::PathListIdent { } } -impl Clean<~[Item]> for ast::ForeignMod { - fn clean(&self) -> ~[Item] { +impl Clean> for ast::ForeignMod { + fn clean(&self) -> Vec { self.items.clean() } } @@ -1130,7 +1159,7 @@ impl Clean for ast::ForeignItem { }; Item { name: Some(self.ident.clean()), - attrs: self.attrs.clean(), + attrs: self.attrs.clean().move_iter().collect(), source: self.span.clean(), id: self.id, visibility: self.vis.clean(), diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 994ee7ea03ef0..dc15b7f73a9ba 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -11,6 +11,7 @@ //! Rust AST Visitor. Extracts useful information and massages it into a form //! usable for clean +use std::vec_ng::Vec; use syntax::abi::AbiSet; use syntax::ast; use syntax::ast_util; @@ -39,11 +40,17 @@ impl<'a> RustdocVisitor<'a> { } pub fn visit(&mut self, krate: &ast::Crate) { - self.attrs = krate.attrs.clone(); + self.attrs = krate.attrs.iter().map(|x| (*x).clone()).collect(); - self.module = self.visit_mod_contents(krate.span, krate.attrs.clone(), - ast::Public, ast::CRATE_NODE_ID, - &krate.module, None); + self.module = self.visit_mod_contents(krate.span, + krate.attrs + .iter() + .map(|x| *x) + .collect(), + ast::Public, + ast::CRATE_NODE_ID, + &krate.module, + None); self.module.is_crate = true; } @@ -56,9 +63,9 @@ impl<'a> RustdocVisitor<'a> { struct_type: struct_type, name: item.ident, vis: item.vis, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), generics: generics.clone(), - fields: sd.fields.clone(), + fields: sd.fields.iter().map(|x| (*x).clone()).collect(), where: item.span } } @@ -70,7 +77,7 @@ impl<'a> RustdocVisitor<'a> { for x in def.variants.iter() { vars.push(Variant { name: x.node.name, - attrs: x.node.attrs.clone(), + attrs: x.node.attrs.iter().map(|x| *x).collect(), vis: x.node.vis, id: x.node.id, kind: x.node.kind.clone(), @@ -82,7 +89,7 @@ impl<'a> RustdocVisitor<'a> { variants: vars, vis: it.vis, generics: params.clone(), - attrs: it.attrs.clone(), + attrs: it.attrs.iter().map(|x| *x).collect(), id: it.id, where: it.span, } @@ -95,7 +102,7 @@ impl<'a> RustdocVisitor<'a> { Function { id: item.id, vis: item.vis, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), decl: fd.clone(), name: item.ident, where: item.span, @@ -130,11 +137,11 @@ impl<'a> RustdocVisitor<'a> { ast::ViewItemUse(ref paths) => { // rustc no longer supports "use foo, bar;" assert_eq!(paths.len(), 1); - match self.visit_view_path(paths[0], om) { + match self.visit_view_path(*paths.get(0), om) { None => return, Some(path) => { ast::ViewItem { - node: ast::ViewItemUse(~[path]), + node: ast::ViewItemUse(vec!(path)), .. item.clone() } } @@ -152,7 +159,7 @@ impl<'a> RustdocVisitor<'a> { if self.resolve_id(id, false, om) { return None } } ast::ViewPathList(ref p, ref paths, ref b) => { - let mut mine = ~[]; + let mut mine = Vec::new(); for path in paths.iter() { if !self.resolve_id(path.node.id, false, om) { mine.push(path.clone()); @@ -217,9 +224,15 @@ impl<'a> RustdocVisitor<'a> { debug!("Visiting item {:?}", item); match item.node { ast::ItemMod(ref m) => { - om.mods.push(self.visit_mod_contents(item.span, item.attrs.clone(), - item.vis, item.id, m, - Some(item.ident))); + om.mods.push(self.visit_mod_contents(item.span, + item.attrs + .iter() + .map(|x| *x) + .collect(), + item.vis, + item.id, + m, + Some(item.ident))); }, ast::ItemEnum(ref ed, ref gen) => om.enums.push(self.visit_enum_def(item, ed, gen)), @@ -233,7 +246,7 @@ impl<'a> RustdocVisitor<'a> { gen: gen.clone(), name: item.ident, id: item.id, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), where: item.span, vis: item.vis, }; @@ -246,7 +259,7 @@ impl<'a> RustdocVisitor<'a> { expr: exp.clone(), id: item.id, name: item.ident, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), where: item.span, vis: item.vis, }; @@ -255,11 +268,11 @@ impl<'a> RustdocVisitor<'a> { ast::ItemTrait(ref gen, ref tr, ref met) => { let t = Trait { name: item.ident, - methods: met.clone(), + methods: met.iter().map(|x| (*x).clone()).collect(), generics: gen.clone(), - parents: tr.clone(), + parents: tr.iter().map(|x| (*x).clone()).collect(), id: item.id, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), where: item.span, vis: item.vis, }; @@ -270,8 +283,8 @@ impl<'a> RustdocVisitor<'a> { generics: gen.clone(), trait_: tr.clone(), for_: ty, - methods: meths.clone(), - attrs: item.attrs.clone(), + methods: meths.iter().map(|x| *x).collect(), + attrs: item.attrs.iter().map(|x| *x).collect(), id: item.id, where: item.span, vis: item.vis, @@ -284,7 +297,7 @@ impl<'a> RustdocVisitor<'a> { ast::ItemMac(ref _m) => { om.macros.push(Macro { id: item.id, - attrs: item.attrs.clone(), + attrs: item.attrs.iter().map(|x| *x).collect(), name: item.ident, where: item.span, }) diff --git a/src/libstd/hash/mod.rs b/src/libstd/hash/mod.rs index 027a1d9010e3e..0d319c5d74ef9 100644 --- a/src/libstd/hash/mod.rs +++ b/src/libstd/hash/mod.rs @@ -70,6 +70,7 @@ use option::{Option, Some, None}; use rc::Rc; use str::{Str, StrSlice}; use vec::{Vector, ImmutableVector}; +use vec_ng::Vec; /// Reexport the `sip::hash` function as our default hasher. pub use hash = self::sip::hash; @@ -207,6 +208,13 @@ impl> Hash for ~[T] { } } +impl> Hash for Vec { + #[inline] + fn hash(&self, state: &mut S) { + self.as_slice().hash(state); + } +} + impl<'a, S: Writer, T: Hash> Hash for &'a T { #[inline] fn hash(&self, state: &mut S) { diff --git a/src/libstd/vec_ng.rs b/src/libstd/vec_ng.rs index 52d3405f8c148..9b6acdd9b9ee5 100644 --- a/src/libstd/vec_ng.rs +++ b/src/libstd/vec_ng.rs @@ -15,6 +15,8 @@ use cast::{forget, transmute}; use clone::Clone; use cmp::{Eq, Ordering, TotalEq, TotalOrd}; use container::Container; +use default::Default; +use fmt; use iter::{DoubleEndedIterator, FromIterator, Iterator}; use libc::{free, c_void}; use mem::{size_of, move_val_init}; @@ -26,7 +28,8 @@ use ptr::RawPtr; use ptr; use rt::global_heap::{malloc_raw, realloc_raw}; use raw::Slice; -use vec::{ImmutableVector, Items, MutItems, MutableVector, RevItems}; +use vec::{ImmutableEqVector, ImmutableVector, Items, MutItems, MutableVector}; +use vec::{RevItems}; pub struct Vec { priv len: uint, @@ -80,6 +83,26 @@ impl Vec { self.push((*element).clone()) } } + + + pub fn grow(&mut self, n: uint, initval: &T) { + let new_len = self.len() + n; + self.reserve(new_len); + let mut i: uint = 0u; + + while i < n { + self.push((*initval).clone()); + i += 1u; + } + } + + pub fn grow_set(&mut self, index: uint, initval: &T, val: T) { + let l = self.len(); + if index >= l { + self.grow(index - l + 1u, initval); + } + *self.get_mut(index) = val; + } } impl Clone for Vec { @@ -340,6 +363,18 @@ impl Vec { pub fn slice_from<'a>(&'a self, start: uint) -> &'a [T] { self.as_slice().slice_from(start) } + + #[inline] + pub fn init<'a>(&'a self) -> &'a [T] { + self.slice(0, self.len() - 1) + } +} + +impl Vec { + /// Return true if a vector contains an element with the given value + pub fn contains(&self, x: &T) -> bool { + self.as_slice().contains(x) + } } #[inline] @@ -348,6 +383,14 @@ pub fn append(mut first: Vec, second: &[T]) -> Vec { first } +/// Appends one element to the vector provided. The vector itself is then +/// returned for use again. +#[inline] +pub fn append_one(mut lhs: Vec, x: T) -> Vec { + lhs.push(x); + lhs +} + #[unsafe_destructor] impl Drop for Vec { fn drop(&mut self) { @@ -360,6 +403,18 @@ impl Drop for Vec { } } +impl Default for Vec { + fn default() -> Vec { + Vec::new() + } +} + +impl fmt::Show for Vec { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.as_slice().fmt(f) + } +} + pub struct MoveItems { priv allocation: *mut c_void, // the block of memory allocated for the vector priv iter: Items<'static, T> diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs index 861cd8ae7d301..a06415bc083a8 100644 --- a/src/libsyntax/abi.rs +++ b/src/libsyntax/abi.rs @@ -9,6 +9,7 @@ // except according to those terms. use std::fmt; +use std::vec_ng::Vec; use std::fmt::Show; #[deriving(Eq)] @@ -117,8 +118,8 @@ pub fn lookup(name: &str) -> Option { res } -pub fn all_names() -> ~[&'static str] { - AbiDatas.map(|d| d.name) +pub fn all_names() -> Vec<&'static str> { + AbiDatas.iter().map(|d| d.name).collect() } impl Abi { @@ -232,7 +233,7 @@ impl AbiSet { } pub fn check_valid(&self) -> Option<(Abi, Abi)> { - let mut abis = ~[]; + let mut abis = Vec::new(); self.each(|abi| { abis.push(abi); true }); for (i, abi) in abis.iter().enumerate() { diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f6dca713e711e..947463d8f47b1 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -23,6 +23,7 @@ use std::cell::RefCell; use collections::HashMap; use std::option::Option; use std::rc::Rc; +use std::vec_ng::Vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; /// A pointer abstraction. FIXME(eddyb) #10676 use Rc in the future. @@ -98,7 +99,7 @@ pub type SyntaxContext = u32; // it should cut down on memory use *a lot*; applying a mark // to a tree containing 50 identifiers would otherwise generate pub struct SCTable { - table: RefCell<~[SyntaxContext_]>, + table: RefCell >, mark_memo: RefCell>, rename_memo: RefCell>, } @@ -164,7 +165,7 @@ pub struct Path { /// module (like paths in an import). global: bool, /// The segments in the path: the things separated by `::`. - segments: ~[PathSegment], + segments: Vec , } /// A segment of a path: an identifier, an optional lifetime, and a set of @@ -288,12 +289,12 @@ pub enum DefRegion { // The set of MetaItems that define the compilation environment of the crate, // used to drive conditional compilation -pub type CrateConfig = ~[@MetaItem]; +pub type CrateConfig = Vec<@MetaItem> ; #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Crate { module: Mod, - attrs: ~[Attribute], + attrs: Vec , config: CrateConfig, span: Span, } @@ -303,7 +304,7 @@ pub type MetaItem = Spanned; #[deriving(Clone, Encodable, Decodable, Hash)] pub enum MetaItem_ { MetaWord(InternedString), - MetaList(InternedString, ~[@MetaItem]), + MetaList(InternedString, Vec<@MetaItem> ), MetaNameValue(InternedString, Lit), } @@ -334,8 +335,8 @@ impl Eq for MetaItem_ { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Block { - view_items: ~[ViewItem], - stmts: ~[@Stmt], + view_items: Vec , + stmts: Vec<@Stmt> , expr: Option<@Expr>, id: NodeId, rules: BlockCheckMode, @@ -373,17 +374,17 @@ pub enum Pat_ { // records this pattern's NodeId in an auxiliary // set (of "pat_idents that refer to nullary enums") PatIdent(BindingMode, Path, Option<@Pat>), - PatEnum(Path, Option<~[@Pat]>), /* "none" means a * pattern where + PatEnum(Path, Option >), /* "none" means a * pattern where * we don't bind the fields to names */ - PatStruct(Path, ~[FieldPat], bool), - PatTup(~[@Pat]), + PatStruct(Path, Vec , bool), + PatTup(Vec<@Pat> ), PatUniq(@Pat), PatRegion(@Pat), // reference pattern PatLit(@Expr), PatRange(@Expr, @Expr), // [a, b, ..i, y, z] is represented as // PatVec(~[a, b], Some(i), ~[y, z]) - PatVec(~[@Pat], Option<@Pat>, ~[@Pat]) + PatVec(Vec<@Pat> , Option<@Pat>, Vec<@Pat> ) } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -488,7 +489,7 @@ pub enum Decl_ { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Arm { - pats: ~[@Pat], + pats: Vec<@Pat> , guard: Option<@Expr>, body: P, } @@ -526,10 +527,10 @@ pub enum Expr_ { ExprVstore(@Expr, ExprVstore), // First expr is the place; second expr is the value. ExprBox(@Expr, @Expr), - ExprVec(~[@Expr], Mutability), - ExprCall(@Expr, ~[@Expr]), - ExprMethodCall(Ident, ~[P], ~[@Expr]), - ExprTup(~[@Expr]), + ExprVec(Vec<@Expr> , Mutability), + ExprCall(@Expr, Vec<@Expr> ), + ExprMethodCall(Ident, Vec> , Vec<@Expr> ), + ExprTup(Vec<@Expr> ), ExprBinary(BinOp, @Expr, @Expr), ExprUnary(UnOp, @Expr), ExprLit(@Lit), @@ -541,14 +542,14 @@ pub enum Expr_ { // Conditionless loop (can be exited with break, cont, or ret) // FIXME #6993: change to Option ExprLoop(P, Option), - ExprMatch(@Expr, ~[Arm]), + ExprMatch(@Expr, Vec ), ExprFnBlock(P, P), ExprProc(P, P), ExprBlock(P), ExprAssign(@Expr, @Expr), ExprAssignOp(BinOp, @Expr, @Expr), - ExprField(@Expr, Ident, ~[P]), + ExprField(@Expr, Ident, Vec> ), ExprIndex(@Expr, @Expr), /// Expression that looks like a "name". For example, @@ -569,7 +570,7 @@ pub enum Expr_ { ExprMac(Mac), // A struct literal expression. - ExprStruct(Path, ~[Field], Option<@Expr> /* base */), + ExprStruct(Path, Vec , Option<@Expr> /* base */), // A vector literal constructed from one repeated element. ExprRepeat(@Expr /* element */, @Expr /* count */, Mutability), @@ -600,14 +601,14 @@ pub enum TokenTree { TTTok(Span, ::parse::token::Token), // a delimited sequence (the delimiters appear as the first // and last elements of the vector) - TTDelim(@~[TokenTree]), + TTDelim(@Vec ), // These only make sense for right-hand-sides of MBE macros: // a kleene-style repetition sequence with a span, a TTForest, // an optional separator, and a boolean where true indicates // zero or more (..), and false indicates one or more (+). - TTSeq(Span, @~[TokenTree], Option<::parse::token::Token>, bool), + TTSeq(Span, @Vec , Option<::parse::token::Token>, bool), // a syntactic variable that will be filled in by macro expansion. TTNonterminal(Span, Ident) @@ -673,7 +674,7 @@ pub enum Matcher_ { MatchTok(::parse::token::Token), // match repetitions of a sequence: body, separator, zero ok?, // lo, hi position-in-match-array used: - MatchSeq(~[Matcher], Option<::parse::token::Token>, bool, uint, uint), + MatchSeq(Vec , Option<::parse::token::Token>, bool, uint, uint), // parse a Rust NT: name to bind, name of NT, position in match array: MatchNonterminal(Ident, Ident, uint) } @@ -686,7 +687,7 @@ pub type Mac = Spanned; // There's only one flavor, now, so this could presumably be simplified. #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum Mac_ { - MacInvocTT(Path, ~[TokenTree], SyntaxContext), // new macro-invocation + MacInvocTT(Path, Vec , SyntaxContext), // new macro-invocation } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -700,7 +701,7 @@ pub type Lit = Spanned; #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum Lit_ { LitStr(InternedString, StrStyle), - LitBinary(Rc<~[u8]>), + LitBinary(Rc >), LitChar(u32), LitInt(i64, IntTy), LitUint(u64, UintTy), @@ -729,7 +730,7 @@ pub struct TypeField { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct TypeMethod { ident: Ident, - attrs: ~[Attribute], + attrs: Vec , purity: Purity, decl: P, generics: Generics, @@ -858,7 +859,7 @@ pub enum Ty_ { TyRptr(Option, MutTy), TyClosure(@ClosureTy), TyBareFn(@BareFnTy), - TyTup(~[P]), + TyTup(Vec> ), TyPath(Path, Option>, NodeId), // for #7264; see above TyTypeof(@Expr), // TyInfer means the type should be inferred instead of it having been @@ -878,8 +879,8 @@ pub struct InlineAsm { asm: InternedString, asm_str_style: StrStyle, clobbers: InternedString, - inputs: ~[(InternedString, @Expr)], - outputs: ~[(InternedString, @Expr)], + inputs: Vec<(InternedString, @Expr)> , + outputs: Vec<(InternedString, @Expr)> , volatile: bool, alignstack: bool, dialect: AsmDialect @@ -914,7 +915,7 @@ impl Arg { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct FnDecl { - inputs: ~[Arg], + inputs: Vec , output: P, cf: RetStyle, variadic: bool @@ -957,7 +958,7 @@ pub type ExplicitSelf = Spanned; #[deriving(Eq, Encodable, Decodable, Hash)] pub struct Method { ident: Ident, - attrs: ~[Attribute], + attrs: Vec , generics: Generics, explicit_self: ExplicitSelf, purity: Purity, @@ -970,15 +971,15 @@ pub struct Method { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Mod { - view_items: ~[ViewItem], - items: ~[@Item], + view_items: Vec , + items: Vec<@Item> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct ForeignMod { abis: AbiSet, - view_items: ~[ViewItem], - items: ~[@ForeignItem], + view_items: Vec , + items: Vec<@ForeignItem> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] @@ -989,19 +990,19 @@ pub struct VariantArg { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub enum VariantKind { - TupleVariantKind(~[VariantArg]), + TupleVariantKind(Vec ), StructVariantKind(@StructDef), } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct EnumDef { - variants: ~[P], + variants: Vec> , } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Variant_ { name: Ident, - attrs: ~[Attribute], + attrs: Vec , kind: VariantKind, id: NodeId, disr_expr: Option<@Expr>, @@ -1034,13 +1035,13 @@ pub enum ViewPath_ { ViewPathGlob(Path, NodeId), // foo::bar::{a,b,c} - ViewPathList(Path, ~[PathListIdent], NodeId) + ViewPathList(Path, Vec , NodeId) } #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct ViewItem { node: ViewItem_, - attrs: ~[Attribute], + attrs: Vec , vis: Visibility, span: Span, } @@ -1052,7 +1053,7 @@ pub enum ViewItem_ { // (containing arbitrary characters) from which to fetch the crate sources // For example, extern crate whatever = "github.com/mozilla/rust" ViewItemExternMod(Ident, Option<(InternedString,StrStyle)>, NodeId), - ViewItemUse(~[@ViewPath]), + ViewItemUse(Vec<@ViewPath> ), } // Meta-data associated with an item @@ -1109,7 +1110,7 @@ pub struct StructField_ { kind: StructFieldKind, id: NodeId, ty: P, - attrs: ~[Attribute], + attrs: Vec , } pub type StructField = Spanned; @@ -1122,7 +1123,7 @@ pub enum StructFieldKind { #[deriving(Eq, Encodable, Decodable, Hash)] pub struct StructDef { - fields: ~[StructField], /* fields, not including ctor */ + fields: Vec , /* fields, not including ctor */ /* ID of the constructor. This is only used for tuple- or enum-like * structs. */ ctor_id: Option @@ -1135,7 +1136,7 @@ pub struct StructDef { #[deriving(Clone, Eq, Encodable, Decodable, Hash)] pub struct Item { ident: Ident, - attrs: ~[Attribute], + attrs: Vec , id: NodeId, node: Item_, vis: Visibility, @@ -1151,11 +1152,11 @@ pub enum Item_ { ItemTy(P, Generics), ItemEnum(EnumDef, Generics), ItemStruct(@StructDef, Generics), - ItemTrait(Generics, ~[TraitRef], ~[TraitMethod]), + ItemTrait(Generics, Vec , Vec ), ItemImpl(Generics, Option, // (optional) trait this impl implements P, // self - ~[@Method]), + Vec<@Method> ), // a macro invocation (which includes macro definition) ItemMac(Mac), } @@ -1163,7 +1164,7 @@ pub enum Item_ { #[deriving(Eq, Encodable, Decodable, Hash)] pub struct ForeignItem { ident: Ident, - attrs: ~[Attribute], + attrs: Vec , node: ForeignItem_, id: NodeId, span: Span, @@ -1193,6 +1194,8 @@ mod test { use codemap::*; use super::*; + use std::vec_ng::Vec; + fn is_freeze() {} // Assert that the AST remains Freeze (#10693). @@ -1205,9 +1208,9 @@ mod test { #[test] fn check_asts_encodable() { let e = Crate { - module: Mod {view_items: ~[], items: ~[]}, - attrs: ~[], - config: ~[], + module: Mod {view_items: Vec::new(), items: Vec::new()}, + attrs: Vec::new(), + config: Vec::new(), span: Span { lo: BytePos(10), hi: BytePos(20), diff --git a/src/libsyntax/ast_map.rs b/src/libsyntax/ast_map.rs index 31c258b36c02b..56a99736866f6 100644 --- a/src/libsyntax/ast_map.rs +++ b/src/libsyntax/ast_map.rs @@ -23,6 +23,7 @@ use std::cell::RefCell; use std::iter; use std::vec; use std::fmt; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum PathElem { @@ -134,7 +135,7 @@ enum MapEntry { } struct InlinedParent { - path: ~[PathElem], + path: Vec , // Required by NodeTraitMethod and NodeMethod. def_id: DefId } @@ -185,13 +186,17 @@ pub struct Map { /// /// Also, indexing is pretty quick when you've got a vector and /// plain old integers. - priv map: RefCell<~[MapEntry]> + priv map: RefCell > } impl Map { fn find_entry(&self, id: NodeId) -> Option { let map = self.map.borrow(); - map.get().get(id as uint).map(|x| *x) + if map.get().len() > id as uint { + Some(*map.get().get(id as uint)) + } else { + None + } } /// Retrieve the Node corresponding to `id`, failing if it cannot @@ -522,7 +527,7 @@ impl<'a, F: FoldOps> Folder for Ctx<'a, F> { } pub fn map_crate(krate: Crate, fold_ops: F) -> (Crate, Map) { - let map = Map { map: RefCell::new(~[]) }; + let map = Map { map: RefCell::new(Vec::new()) }; let krate = { let mut cx = Ctx { map: &map, @@ -557,7 +562,7 @@ pub fn map_crate(krate: Crate, fold_ops: F) -> (Crate, Map) { // crate. The `path` should be the path to the item but should not include // the item itself. pub fn map_decoded_item(map: &Map, - path: ~[PathElem], + path: Vec , fold_ops: F, fold: |&mut Ctx| -> InlinedItem) -> InlinedItem { diff --git a/src/libsyntax/ast_util.rs b/src/libsyntax/ast_util.rs index 4cf4aefa0e2a8..db9ea480e9620 100644 --- a/src/libsyntax/ast_util.rs +++ b/src/libsyntax/ast_util.rs @@ -23,6 +23,7 @@ use std::cmp; use collections::HashMap; use std::u32; use std::local_data; +use std::vec_ng::Vec; pub fn path_name_i(idents: &[Ident]) -> ~str { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") @@ -180,8 +181,8 @@ pub fn is_call_expr(e: @Expr) -> bool { pub fn block_from_expr(e: @Expr) -> P { P(Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(e), id: e.id, rules: DefaultBlock, @@ -193,13 +194,13 @@ pub fn ident_to_path(s: Span, identifier: Ident) -> Path { ast::Path { span: s, global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: identifier, lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), } } @@ -216,7 +217,7 @@ pub fn is_unguarded(a: &Arm) -> bool { } } -pub fn unguarded_pat(a: &Arm) -> Option<~[@Pat]> { +pub fn unguarded_pat(a: &Arm) -> Option > { if is_unguarded(a) { Some(/* FIXME (#2543) */ a.pats.clone()) } else { @@ -241,7 +242,7 @@ pub fn impl_pretty_name(trait_ref: &Option, ty: &Ty) -> Ident { token::gensym_ident(pretty) } -pub fn public_methods(ms: ~[@Method]) -> ~[@Method] { +pub fn public_methods(ms: Vec<@Method> ) -> Vec<@Method> { ms.move_iter().filter(|m| { match m.vis { Public => true, @@ -271,9 +272,9 @@ pub fn trait_method_to_ty_method(method: &TraitMethod) -> TypeMethod { } pub fn split_trait_methods(trait_methods: &[TraitMethod]) - -> (~[TypeMethod], ~[@Method]) { - let mut reqd = ~[]; - let mut provd = ~[]; + -> (Vec , Vec<@Method> ) { + let mut reqd = Vec::new(); + let mut provd = Vec::new(); for trt_method in trait_methods.iter() { match *trt_method { Required(ref tm) => reqd.push((*tm).clone()), @@ -724,7 +725,7 @@ pub fn new_rename_internal(id: Ident, // FIXME #8215 : currently pub to allow testing pub fn new_sctable_internal() -> SCTable { SCTable { - table: RefCell::new(~[EmptyCtxt,IllegalCtxt]), + table: RefCell::new(vec!(EmptyCtxt,IllegalCtxt)), mark_memo: RefCell::new(HashMap::new()), rename_memo: RefCell::new(HashMap::new()), } @@ -754,7 +755,7 @@ pub fn display_sctable(table : &SCTable) { /// Add a value to the end of a vec, return its index -fn idx_push(vec: &mut ~[T], val: T) -> u32 { +fn idx_push(vec: &mut Vec , val: T) -> u32 { vec.push(val); (vec.len() - 1) as u32 } @@ -795,7 +796,7 @@ pub fn resolve_internal(id : Ident, let resolved = { let result = { let table = table.table.borrow(); - table.get()[id.ctxt] + *table.get().get(id.ctxt as uint) }; match result { EmptyCtxt => id.name, @@ -831,20 +832,20 @@ pub fn resolve_internal(id : Ident, } /// Compute the marks associated with a syntax context. -pub fn mtwt_marksof(ctxt: SyntaxContext, stopname: Name) -> ~[Mrk] { +pub fn mtwt_marksof(ctxt: SyntaxContext, stopname: Name) -> Vec { marksof(ctxt, stopname, get_sctable()) } // the internal function for computing marks // it's not clear to me whether it's better to use a [] mutable // vector or a cons-list for this. -pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] { - let mut result = ~[]; +pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> Vec { + let mut result = Vec::new(); let mut loopvar = ctxt; loop { let table_entry = { let table = table.table.borrow(); - table.get()[loopvar] + *table.get().get(loopvar as uint) }; match table_entry { EmptyCtxt => { @@ -873,7 +874,7 @@ pub fn marksof(ctxt: SyntaxContext, stopname: Name, table: &SCTable) -> ~[Mrk] { pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk { let sctable = get_sctable(); let table = sctable.table.borrow(); - match table.get()[ctxt] { + match *table.get().get(ctxt as uint) { ast::Mark(mrk,_) => mrk, _ => fail!("can't retrieve outer mark when outside is not a mark") } @@ -881,7 +882,7 @@ pub fn mtwt_outer_mark(ctxt: SyntaxContext) -> Mrk { /// Push a name... unless it matches the one on top, in which /// case pop and discard (so two of the same marks cancel) -pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) { +pub fn xorPush(marks: &mut Vec , mark: Mrk) { if (marks.len() > 0) && (getLast(marks) == mark) { marks.pop().unwrap(); } else { @@ -891,7 +892,7 @@ pub fn xorPush(marks: &mut ~[Mrk], mark: Mrk) { // get the last element of a mutable array. // FIXME #4903: , must be a separate procedure for now. -pub fn getLast(arr: &~[Mrk]) -> Mrk { +pub fn getLast(arr: &Vec ) -> Mrk { *arr.last().unwrap() } @@ -901,7 +902,7 @@ pub fn getLast(arr: &~[Mrk]) -> Mrk { pub fn path_name_eq(a : &ast::Path, b : &ast::Path) -> bool { (a.span == b.span) && (a.global == b.global) - && (segments_name_eq(a.segments, b.segments)) + && (segments_name_eq(a.segments.as_slice(), b.segments.as_slice())) } // are two arrays of segments equal when compared unhygienically? @@ -938,6 +939,8 @@ mod test { use opt_vec; use collections::HashMap; + use std::vec_ng::Vec; + fn ident_to_segment(id : &Ident) -> PathSegment { PathSegment {identifier:id.clone(), lifetimes: opt_vec::Empty, @@ -956,21 +959,21 @@ mod test { } #[test] fn xorpush_test () { - let mut s = ~[]; + let mut s = Vec::new(); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[]); + assert_eq!(s.clone(), Vec::new()); xorPush(&mut s, 14); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); xorPush(&mut s, 15); - assert_eq!(s.clone(), ~[14, 15]); + assert_eq!(s.clone(), vec!(14, 15)); xorPush(&mut s, 16); - assert_eq!(s.clone(), ~[14, 15, 16]); + assert_eq!(s.clone(), vec!(14, 15, 16)); xorPush(&mut s, 16); - assert_eq!(s.clone(), ~[14, 15]); + assert_eq!(s.clone(), vec!(14, 15)); xorPush(&mut s, 15); - assert_eq!(s.clone(), ~[14]); + assert_eq!(s.clone(), vec!(14)); } fn id(n: Name, s: SyntaxContext) -> Ident { @@ -987,7 +990,7 @@ mod test { // unfold a vector of TestSC values into a SCTable, // returning the resulting index - fn unfold_test_sc(tscs : ~[TestSC], tail: SyntaxContext, table: &SCTable) + fn unfold_test_sc(tscs : Vec , tail: SyntaxContext, table: &SCTable) -> SyntaxContext { tscs.rev_iter().fold(tail, |tail : SyntaxContext, tsc : &TestSC| {match *tsc { @@ -996,11 +999,11 @@ mod test { } // gather a SyntaxContext back into a vector of TestSCs - fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> ~[TestSC] { - let mut result = ~[]; + fn refold_test_sc(mut sc: SyntaxContext, table : &SCTable) -> Vec { + let mut result = Vec::new(); loop { let table = table.table.borrow(); - match table.get()[sc] { + match *table.get().get(sc as uint) { EmptyCtxt => {return result;}, Mark(mrk,tail) => { result.push(M(mrk)); @@ -1020,20 +1023,20 @@ mod test { #[test] fn test_unfold_refold(){ let mut t = new_sctable_internal(); - let test_sc = ~[M(3),R(id(101,0),14),M(9)]; + let test_sc = vec!(M(3),R(id(101,0),14),M(9)); assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4); { let table = t.table.borrow(); - assert!(table.get()[2] == Mark(9,0)); - assert!(table.get()[3] == Rename(id(101,0),14,2)); - assert!(table.get()[4] == Mark(3,3)); + assert!(*table.get().get(2) == Mark(9,0)); + assert!(*table.get().get(3) == Rename(id(101,0),14,2)); + assert!(*table.get().get(4) == Mark(3,3)); } assert_eq!(refold_test_sc(4,&t),test_sc); } // extend a syntax context with a sequence of marks given // in a vector. v[0] will be the outermost mark. - fn unfold_marks(mrks: ~[Mrk], tail: SyntaxContext, table: &SCTable) + fn unfold_marks(mrks: Vec , tail: SyntaxContext, table: &SCTable) -> SyntaxContext { mrks.rev_iter().fold(tail, |tail:SyntaxContext, mrk:&Mrk| {new_mark_internal(*mrk,tail,table)}) @@ -1042,11 +1045,11 @@ mod test { #[test] fn unfold_marks_test() { let mut t = new_sctable_internal(); - assert_eq!(unfold_marks(~[3,7],EMPTY_CTXT,&mut t),3); + assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3); { let table = t.table.borrow(); - assert!(table.get()[2] == Mark(7,0)); - assert!(table.get()[3] == Mark(3,2)); + assert!(*table.get().get(2) == Mark(7,0)); + assert!(*table.get().get(3) == Mark(3,2)); } } @@ -1054,32 +1057,32 @@ mod test { let stopname = 242; let name1 = 243; let mut t = new_sctable_internal(); - assert_eq!(marksof (EMPTY_CTXT,stopname,&t),~[]); + assert_eq!(marksof (EMPTY_CTXT,stopname,&t),Vec::new()); // FIXME #5074: ANF'd to dodge nested calls - { let ans = unfold_marks(~[4,98],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans,stopname,&t),~[4,98]);} + { let ans = unfold_marks(vec!(4,98),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans,stopname,&t),vec!(4,98));} // does xoring work? - { let ans = unfold_marks(~[5,5,16],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans,stopname,&t), ~[16]);} + { let ans = unfold_marks(vec!(5,5,16),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans,stopname,&t), vec!(16));} // does nested xoring work? - { let ans = unfold_marks(~[5,10,10,5,16],EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname,&t), ~[16]);} + { let ans = unfold_marks(vec!(5,10,10,5,16),EMPTY_CTXT,&mut t); + assert_eq! (marksof (ans, stopname,&t), vec!(16));} // rename where stop doesn't match: - { let chain = ~[M(9), + { let chain = vec!(M(9), R(id(name1, new_mark_internal (4, EMPTY_CTXT,&mut t)), 100101102), - M(14)]; + M(14)); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname, &t), ~[9,14]);} + assert_eq! (marksof (ans, stopname, &t), vec!(9,14));} // rename where stop does match { let name1sc = new_mark_internal(4, EMPTY_CTXT, &mut t); - let chain = ~[M(9), + let chain = vec!(M(9), R(id(name1, name1sc), stopname), - M(14)]; + M(14)); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); - assert_eq! (marksof (ans, stopname, &t), ~[9]); } + assert_eq! (marksof (ans, stopname, &t), vec!(9)); } } @@ -1090,32 +1093,32 @@ mod test { // - ctxt is MT assert_eq!(resolve_internal(id(a,EMPTY_CTXT),&mut t, &mut rt),a); // - simple ignored marks - { let sc = unfold_marks(~[1,2,3],EMPTY_CTXT,&mut t); + { let sc = unfold_marks(vec!(1,2,3),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),a);} // - orthogonal rename where names don't match - { let sc = unfold_test_sc(~[R(id(50,EMPTY_CTXT),51),M(12)],EMPTY_CTXT,&mut t); + { let sc = unfold_test_sc(vec!(R(id(50,EMPTY_CTXT),51),M(12)),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),a);} // - rename where names do match, but marks don't { let sc1 = new_mark_internal(1,EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50), + let sc = unfold_test_sc(vec!(R(id(a,sc1),50), M(1), - M(2)], + M(2)), EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), a);} // - rename where names and marks match - { let sc1 = unfold_test_sc(~[M(1),M(2)],EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50),M(1),M(2)],EMPTY_CTXT,&mut t); + { let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t); + let sc = unfold_test_sc(vec!(R(id(a,sc1),50),M(1),M(2)),EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 50); } // - rename where names and marks match by literal sharing - { let sc1 = unfold_test_sc(~[M(1),M(2)],EMPTY_CTXT,&mut t); - let sc = unfold_test_sc(~[R(id(a,sc1),50)],sc1,&mut t); + { let sc1 = unfold_test_sc(vec!(M(1),M(2)),EMPTY_CTXT,&mut t); + let sc = unfold_test_sc(vec!(R(id(a,sc1),50)),sc1,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 50); } // - two renames of the same var.. can only happen if you use // local-expand to prevent the inner binding from being renamed // during the rename-pass caused by the first: println!("about to run bad test"); - { let sc = unfold_test_sc(~[R(id(a,EMPTY_CTXT),50), - R(id(a,EMPTY_CTXT),51)], + { let sc = unfold_test_sc(vec!(R(id(a,EMPTY_CTXT),50), + R(id(a,EMPTY_CTXT),51)), EMPTY_CTXT,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt), 51); } // the simplest double-rename: @@ -1126,8 +1129,8 @@ mod test { let sc = new_mark_internal(9,a50_to_a51,&mut t); assert_eq!(resolve_internal(id(a,sc),&mut t, &mut rt),51); // but mark on the inside does: - let a50_to_a51_b = unfold_test_sc(~[R(id(a,a_to_a50),51), - M(9)], + let a50_to_a51_b = unfold_test_sc(vec!(R(id(a,a_to_a50),51), + M(9)), a_to_a50, &mut t); assert_eq!(resolve_internal(id(a,a50_to_a51_b),&mut t, &mut rt),50);} diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 6a3ca911d7657..ed56ef15a1c8b 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -21,6 +21,7 @@ use parse::token; use crateid::CrateId; use collections::HashSet; +use std::vec_ng::Vec; pub trait AttrMetaMethods { // This could be changed to `fn check_name(&self, name: InternedString) -> @@ -146,7 +147,7 @@ pub fn mk_name_value_item(name: InternedString, value: ast::Lit) @dummy_spanned(MetaNameValue(name, value)) } -pub fn mk_list_item(name: InternedString, items: ~[@MetaItem]) -> @MetaItem { +pub fn mk_list_item(name: InternedString, items: Vec<@MetaItem> ) -> @MetaItem { @dummy_spanned(MetaList(name, items)) } @@ -212,12 +213,12 @@ pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str) /* Higher-level applications */ -pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { +pub fn sort_meta_items(items: &[@MetaItem]) -> Vec<@MetaItem> { // This is sort of stupid here, but we need to sort by // human-readable strings. let mut v = items.iter() .map(|&mi| (mi.name(), mi)) - .collect::<~[(InternedString, @MetaItem)]>(); + .collect:: >(); v.sort_by(|&(ref a, _), &(ref b, _)| a.cmp(b)); @@ -226,7 +227,8 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { match m.node { MetaList(ref n, ref mis) => { @Spanned { - node: MetaList((*n).clone(), sort_meta_items(*mis)), + node: MetaList((*n).clone(), + sort_meta_items(mis.as_slice())), .. /*bad*/ (*m).clone() } } @@ -239,11 +241,11 @@ pub fn sort_meta_items(items: &[@MetaItem]) -> ~[@MetaItem] { * From a list of crate attributes get only the meta_items that affect crate * linkage */ -pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] { - let mut result = ~[]; +pub fn find_linkage_metas(attrs: &[Attribute]) -> Vec<@MetaItem> { + let mut result = Vec::new(); for attr in attrs.iter().filter(|at| at.name().equiv(&("link"))) { match attr.meta().node { - MetaList(_, ref items) => result.push_all(*items), + MetaList(_, ref items) => result.push_all(items.as_slice()), _ => () } } @@ -272,9 +274,9 @@ pub fn find_inline_attr(attrs: &[Attribute]) -> InlineAttr { match attr.node.value.node { MetaWord(ref n) if n.equiv(&("inline")) => InlineHint, MetaList(ref n, ref items) if n.equiv(&("inline")) => { - if contains_name(*items, "always") { + if contains_name(items.as_slice(), "always") { InlineAlways - } else if contains_name(*items, "never") { + } else if contains_name(items.as_slice(), "never") { InlineNever } else { InlineHint diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index d114d8971f747..6f17505c90227 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -23,6 +23,7 @@ source code snippets, etc. use std::cell::RefCell; use std::cmp; +use std::vec_ng::Vec; use serialize::{Encodable, Decodable, Encoder, Decoder}; pub trait Pos { @@ -188,8 +189,7 @@ pub type FileName = ~str; pub struct FileLines { file: @FileMap, - lines: ~[uint] -} + lines: Vec } /// Identifies an offset of a multi-byte character in a FileMap pub struct MultiByteChar { @@ -210,9 +210,9 @@ pub struct FileMap { /// The start position of this source in the CodeMap start_pos: BytePos, /// Locations of lines beginnings in the source code - lines: RefCell<~[BytePos]>, + lines: RefCell >, /// Locations of multi-byte characters in the source code - multibyte_chars: RefCell<~[MultiByteChar]>, + multibyte_chars: RefCell >, } impl FileMap { @@ -225,14 +225,14 @@ impl FileMap { // the new charpos must be > the last one (or it's the first one). let mut lines = self.lines.borrow_mut();; let line_len = lines.get().len(); - assert!(line_len == 0 || (lines.get()[line_len - 1] < pos)) + assert!(line_len == 0 || (*lines.get().get(line_len - 1) < pos)) lines.get().push(pos); } // get a line from the list of pre-computed line-beginnings pub fn get_line(&self, line: int) -> ~str { let mut lines = self.lines.borrow_mut(); - let begin: BytePos = lines.get()[line] - self.start_pos; + let begin: BytePos = *lines.get().get(line as uint) - self.start_pos; let begin = begin.to_uint(); let slice = self.src.slice_from(begin); match slice.find('\n') { @@ -257,13 +257,13 @@ impl FileMap { } pub struct CodeMap { - files: RefCell<~[@FileMap]> + files: RefCell > } impl CodeMap { pub fn new() -> CodeMap { CodeMap { - files: RefCell::new(~[]), + files: RefCell::new(Vec::new()), } } @@ -278,8 +278,8 @@ impl CodeMap { name: filename, src: src, start_pos: Pos::from_uint(start_pos), - lines: RefCell::new(~[]), - multibyte_chars: RefCell::new(~[]), + lines: RefCell::new(Vec::new()), + multibyte_chars: RefCell::new(Vec::new()), }; files.get().push(filemap); @@ -330,7 +330,7 @@ impl CodeMap { pub fn span_to_lines(&self, sp: Span) -> @FileLines { let lo = self.lookup_char_pos(sp.lo); let hi = self.lookup_char_pos(sp.hi); - let mut lines = ~[]; + let mut lines = Vec::new(); for i in range(lo.line - 1u, hi.line as uint) { lines.push(i); }; @@ -374,7 +374,7 @@ impl CodeMap { let mut b = len; while b - a > 1u { let m = (a + b) / 2u; - if files[m].start_pos > pos { + if files.get(m).start_pos > pos { b = m; } else { a = m; @@ -384,7 +384,7 @@ impl CodeMap { // filemap, but are not the filemaps we want (because they are length 0, they cannot // contain what we are looking for). So, rewind until we find a useful filemap. loop { - let lines = files[a].lines.borrow(); + let lines = files.get(a).lines.borrow(); let lines = lines.get(); if lines.len() > 0 { break; @@ -406,13 +406,13 @@ impl CodeMap { let idx = self.lookup_filemap_idx(pos); let files = self.files.borrow(); - let f = files.get()[idx]; + let f = *files.get().get(idx); let mut a = 0u; let mut lines = f.lines.borrow_mut(); let mut b = lines.get().len(); while b - a > 1u { let m = (a + b) / 2u; - if lines.get()[m] > pos { b = m; } else { a = m; } + if *lines.get().get(m) > pos { b = m; } else { a = m; } } return FileMapAndLine {fm: f, line: a}; } @@ -422,7 +422,7 @@ impl CodeMap { let line = a + 1u; // Line numbers start at 1 let chpos = self.bytepos_to_file_charpos(pos); let lines = f.lines.borrow(); - let linebpos = lines.get()[a]; + let linebpos = *lines.get().get(a); let linechpos = self.bytepos_to_file_charpos(linebpos); debug!("codemap: byte pos {:?} is on the line at byte pos {:?}", pos, linebpos); @@ -441,7 +441,7 @@ impl CodeMap { -> FileMapAndBytePos { let idx = self.lookup_filemap_idx(bpos); let files = self.files.borrow(); - let fm = files.get()[idx]; + let fm = *files.get().get(idx); let offset = bpos - fm.start_pos; return FileMapAndBytePos {fm: fm, pos: offset}; } @@ -451,7 +451,7 @@ impl CodeMap { debug!("codemap: converting {:?} to char pos", bpos); let idx = self.lookup_filemap_idx(bpos); let files = self.files.borrow(); - let map = files.get()[idx]; + let map = files.get().get(idx); // The number of extra bytes due to multibyte chars in the FileMap let mut total_extra_bytes = 0; diff --git a/src/libsyntax/crateid.rs b/src/libsyntax/crateid.rs index b5f02fb7e6441..e5136b7081b33 100644 --- a/src/libsyntax/crateid.rs +++ b/src/libsyntax/crateid.rs @@ -19,6 +19,7 @@ use std::fmt; /// to be `0.0`. use std::from_str::FromStr; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub struct CrateId { @@ -48,25 +49,27 @@ impl fmt::Show for CrateId { impl FromStr for CrateId { fn from_str(s: &str) -> Option { - let pieces: ~[&str] = s.splitn('#', 1).collect(); - let path = pieces[0].to_owned(); + let pieces: Vec<&str> = s.splitn('#', 1).collect(); + let path = pieces.get(0).to_owned(); if path.starts_with("/") || path.ends_with("/") || path.starts_with(".") || path.is_empty() { return None; } - let path_pieces: ~[&str] = path.rsplitn('/', 1).collect(); - let inferred_name = path_pieces[0]; + let path_pieces: Vec<&str> = path.rsplitn('/', 1).collect(); + let inferred_name = *path_pieces.get(0); let (name, version) = if pieces.len() == 1 { (inferred_name.to_owned(), None) } else { - let hash_pieces: ~[&str] = pieces[1].splitn(':', 1).collect(); + let hash_pieces: Vec<&str> = pieces.get(1) + .splitn(':', 1) + .collect(); let (hash_name, hash_version) = if hash_pieces.len() == 1 { - ("", hash_pieces[0]) + ("", *hash_pieces.get(0)) } else { - (hash_pieces[0], hash_pieces[1]) + (*hash_pieces.get(0), *hash_pieces.get(1)) }; let name = if !hash_name.is_empty() { @@ -89,7 +92,7 @@ impl FromStr for CrateId { }; Some(CrateId { - path: path, + path: path.clone(), name: name, version: version, }) diff --git a/src/libsyntax/diagnostic.rs b/src/libsyntax/diagnostic.rs index cb7034a375dd0..c0c64d6fd60b2 100644 --- a/src/libsyntax/diagnostic.rs +++ b/src/libsyntax/diagnostic.rs @@ -325,7 +325,7 @@ fn highlight_lines(err: &mut EmitterWriter, if lines.lines.len() == 1u { let lo = cm.lookup_char_pos(sp.lo); let mut digits = 0u; - let mut num = (lines.lines[0] + 1u) / 10u; + let mut num = (*lines.lines.get(0) + 1u) / 10u; // how many digits must be indent past? while num > 0u { num /= 10u; digits += 1u; } @@ -337,7 +337,7 @@ fn highlight_lines(err: &mut EmitterWriter, // part of the 'filename:line ' part of the previous line. let skip = fm.name.len() + digits + 3u; for _ in range(0, skip) { s.push_char(' '); } - let orig = fm.get_line(lines.lines[0] as int); + let orig = fm.get_line(*lines.lines.get(0) as int); for pos in range(0u, left-skip) { let curChar = orig[pos] as char; // Whenever a tab occurs on the previous line, we insert one on diff --git a/src/libsyntax/ext/asm.rs b/src/libsyntax/ext/asm.rs index 1bf82573c4949..6080613460da2 100644 --- a/src/libsyntax/ext/asm.rs +++ b/src/libsyntax/ext/asm.rs @@ -20,6 +20,8 @@ use parse; use parse::token::InternedString; use parse::token; +use std::vec_ng::Vec; + enum State { Asm, Outputs, @@ -42,12 +44,14 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); let mut asm = InternedString::new(""); let mut asm_str_style = None; - let mut outputs = ~[]; - let mut inputs = ~[]; + let mut outputs = Vec::new(); + let mut inputs = Vec::new(); let mut cons = ~""; let mut volatile = false; let mut alignstack = false; @@ -119,7 +123,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) } } Clobbers => { - let mut clobs = ~[]; + let mut clobs = Vec::new(); while p.token != token::EOF && p.token != token::COLON && p.token != token::MOD_SEP { diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 0636d19163e12..e9fe21eded60c 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -20,6 +20,7 @@ use parse::token::{InternedString, intern, str_to_ident}; use util::small_vector::SmallVector; use collections::HashMap; +use std::vec_ng::Vec; // new-style macro! tt code: // @@ -74,7 +75,7 @@ pub trait IdentMacroExpander { cx: &mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: ~[ast::TokenTree]) + token_tree: Vec ) -> MacResult; } @@ -83,14 +84,14 @@ impl IdentMacroExpander for BasicIdentMacroExpander { cx: &mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: ~[ast::TokenTree]) + token_tree: Vec ) -> MacResult { (self.expander)(cx, sp, ident, token_tree) } } pub type IdentMacroExpanderFn = - fn(&mut ExtCtxt, Span, ast::Ident, ~[ast::TokenTree]) -> MacResult; + fn(&mut ExtCtxt, Span, ast::Ident, Vec ) -> MacResult; pub type MacroCrateRegistrationFun = fn(|ast::Name, SyntaxExtension|); @@ -154,13 +155,13 @@ impl BlockInfo { pub fn new() -> BlockInfo { BlockInfo { macros_escape: false, - pending_renames: ~[], + pending_renames: Vec::new(), } } } // a list of ident->name renamings -pub type RenameList = ~[(ast::Ident,Name)]; +pub type RenameList = Vec<(ast::Ident,Name)> ; // The base map of methods for expanding syntax extension // AST nodes into full ASTs @@ -271,7 +272,7 @@ pub struct MacroCrate { pub trait CrateLoader { fn load_crate(&mut self, krate: &ast::ViewItem) -> MacroCrate; - fn get_exported_macros(&mut self, crate_num: ast::CrateNum) -> ~[~str]; + fn get_exported_macros(&mut self, crate_num: ast::CrateNum) -> Vec<~str> ; fn get_registrar_symbol(&mut self, crate_num: ast::CrateNum) -> Option<~str>; } @@ -284,7 +285,7 @@ pub struct ExtCtxt<'a> { backtrace: Option<@ExpnInfo>, loader: &'a mut CrateLoader, - mod_path: ~[ast::Ident], + mod_path: Vec , trace_mac: bool } @@ -296,7 +297,7 @@ impl<'a> ExtCtxt<'a> { cfg: cfg, backtrace: None, loader: loader, - mod_path: ~[], + mod_path: Vec::new(), trace_mac: false } } @@ -329,7 +330,7 @@ impl<'a> ExtCtxt<'a> { pub fn backtrace(&self) -> Option<@ExpnInfo> { self.backtrace } pub fn mod_push(&mut self, i: ast::Ident) { self.mod_path.push(i); } pub fn mod_pop(&mut self) { self.mod_path.pop().unwrap(); } - pub fn mod_path(&self) -> ~[ast::Ident] { self.mod_path.clone() } + pub fn mod_path(&self) -> Vec { self.mod_path.clone() } pub fn bt_push(&mut self, ei: codemap::ExpnInfo) { match ei { ExpnInfo {call_site: cs, callee: ref callee} => { @@ -458,11 +459,13 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt, /// parsing error, emit a non-fatal error and return None. pub fn get_exprs_from_tts(cx: &ExtCtxt, sp: Span, - tts: &[ast::TokenTree]) -> Option<~[@ast::Expr]> { + tts: &[ast::TokenTree]) -> Option > { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); - let mut es = ~[]; + tts.iter() + .map(|x| (*x).clone()) + .collect()); + let mut es = Vec::new(); while p.token != token::EOF { if es.len() != 0 && !p.eat(&token::COMMA) { cx.span_err(sp, "expected token: `,`"); @@ -507,12 +510,12 @@ impl Drop for MapChainFrame { // Only generic to make it easy to test pub struct SyntaxEnv { - priv chain: ~[MapChainFrame], + priv chain: Vec , } impl SyntaxEnv { pub fn new() -> SyntaxEnv { - let mut map = SyntaxEnv { chain: ~[] }; + let mut map = SyntaxEnv { chain: Vec::new() }; map.push_frame(); map } @@ -553,6 +556,7 @@ impl SyntaxEnv { } pub fn info<'a>(&'a mut self) -> &'a mut BlockInfo { - &mut self.chain[self.chain.len()-1].info + let last_chain_index = self.chain.len() - 1; + &mut self.chain.get_mut(last_chain_index).info } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 1ddd579a2f112..34625923ea1f6 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -21,6 +21,8 @@ use opt_vec::OptVec; use parse::token::special_idents; use parse::token; +use std::vec_ng::Vec; + pub struct Field { ident: ast::Ident, ex: @ast::Expr @@ -34,14 +36,14 @@ mod syntax { pub trait AstBuilder { // paths - fn path(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path; + fn path(&self, span: Span, strs: Vec ) -> ast::Path; fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path; - fn path_global(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path; + fn path_global(&self, span: Span, strs: Vec ) -> ast::Path; fn path_all(&self, sp: Span, global: bool, - idents: ~[ast::Ident], + idents: Vec , lifetimes: OptVec, - types: ~[P]) + types: Vec> ) -> ast::Path; // types @@ -61,8 +63,8 @@ pub trait AstBuilder { fn ty_infer(&self, sp: Span) -> P; fn ty_nil(&self) -> P; - fn ty_vars(&self, ty_params: &OptVec) -> ~[P]; - fn ty_vars_global(&self, ty_params: &OptVec) -> ~[P]; + fn ty_vars(&self, ty_params: &OptVec) -> Vec> ; + fn ty_vars_global(&self, ty_params: &OptVec) -> Vec> ; fn ty_field_imm(&self, span: Span, name: Ident, ty: P) -> ast::TypeField; fn strip_bounds(&self, bounds: &Generics) -> Generics; @@ -87,11 +89,11 @@ pub trait AstBuilder { -> @ast::Stmt; // blocks - fn block(&self, span: Span, stmts: ~[@ast::Stmt], expr: Option<@ast::Expr>) -> P; + fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P; fn block_expr(&self, expr: @ast::Expr) -> P; fn block_all(&self, span: Span, - view_items: ~[ast::ViewItem], - stmts: ~[@ast::Stmt], + view_items: Vec , + stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P; // expressions @@ -109,19 +111,19 @@ pub trait AstBuilder { fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr; fn expr_mut_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr; fn expr_field_access(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr; - fn expr_call(&self, span: Span, expr: @ast::Expr, args: ~[@ast::Expr]) -> @ast::Expr; - fn expr_call_ident(&self, span: Span, id: ast::Ident, args: ~[@ast::Expr]) -> @ast::Expr; - fn expr_call_global(&self, sp: Span, fn_path: ~[ast::Ident], - args: ~[@ast::Expr]) -> @ast::Expr; + fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_call_global(&self, sp: Span, fn_path: Vec , + args: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_method_call(&self, span: Span, expr: @ast::Expr, ident: ast::Ident, - args: ~[@ast::Expr]) -> @ast::Expr; + args: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_block(&self, b: P) -> @ast::Expr; fn expr_cast(&self, sp: Span, expr: @ast::Expr, ty: P) -> @ast::Expr; fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field; - fn expr_struct(&self, span: Span, path: ast::Path, fields: ~[ast::Field]) -> @ast::Expr; - fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: ~[ast::Field]) -> @ast::Expr; + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec ) -> @ast::Expr; + fn expr_struct_ident(&self, span: Span, id: ast::Ident, fields: Vec ) -> @ast::Expr; fn expr_lit(&self, sp: Span, lit: ast::Lit_) -> @ast::Expr; @@ -131,9 +133,9 @@ pub trait AstBuilder { fn expr_bool(&self, sp: Span, value: bool) -> @ast::Expr; fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr; - fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; - fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; + fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; + fn expr_vec_ng(&self, sp: Span) -> @ast::Expr; + fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr; fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr; fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr; @@ -152,55 +154,55 @@ pub trait AstBuilder { span: Span, ident: ast::Ident, bm: ast::BindingMode) -> @ast::Pat; - fn pat_enum(&self, span: Span, path: ast::Path, subpats: ~[@ast::Pat]) -> @ast::Pat; + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat; fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: ~[ast::FieldPat]) -> @ast::Pat; + path: ast::Path, field_pats: Vec ) -> @ast::Pat; - fn arm(&self, span: Span, pats: ~[@ast::Pat], expr: @ast::Expr) -> ast::Arm; + fn arm(&self, span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm; fn arm_unreachable(&self, span: Span) -> ast::Arm; - fn expr_match(&self, span: Span, arg: @ast::Expr, arms: ~[ast::Arm]) -> @ast::Expr; + fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec ) -> @ast::Expr; fn expr_if(&self, span: Span, cond: @ast::Expr, then: @ast::Expr, els: Option<@ast::Expr>) -> @ast::Expr; fn lambda_fn_decl(&self, span: Span, fn_decl: P, blk: P) -> @ast::Expr; - fn lambda(&self, span: Span, ids: ~[ast::Ident], blk: P) -> @ast::Expr; + fn lambda(&self, span: Span, ids: Vec , blk: P) -> @ast::Expr; fn lambda0(&self, span: Span, blk: P) -> @ast::Expr; fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> @ast::Expr; - fn lambda_expr(&self, span: Span, ids: ~[ast::Ident], blk: @ast::Expr) -> @ast::Expr; + fn lambda_expr(&self, span: Span, ids: Vec , blk: @ast::Expr) -> @ast::Expr; fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr; fn lambda_expr_1(&self, span: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr; - fn lambda_stmts(&self, span: Span, ids: ~[ast::Ident], blk: ~[@ast::Stmt]) -> @ast::Expr; - fn lambda_stmts_0(&self, span: Span, stmts: ~[@ast::Stmt]) -> @ast::Expr; - fn lambda_stmts_1(&self, span: Span, stmts: ~[@ast::Stmt], ident: ast::Ident) -> @ast::Expr; + fn lambda_stmts(&self, span: Span, ids: Vec , blk: Vec<@ast::Stmt> ) -> @ast::Expr; + fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr; + fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr; // items fn item(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], node: ast::Item_) -> @ast::Item; + name: Ident, attrs: Vec , node: ast::Item_) -> @ast::Item; fn arg(&self, span: Span, name: Ident, ty: P) -> ast::Arg; // FIXME unused self - fn fn_decl(&self, inputs: ~[ast::Arg], output: P) -> P; + fn fn_decl(&self, inputs: Vec , output: P) -> P; fn item_fn_poly(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec , output: P, generics: Generics, body: P) -> @ast::Item; fn item_fn(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec , output: P, body: P) -> @ast::Item; - fn variant(&self, span: Span, name: Ident, tys: ~[P]) -> ast::Variant; + fn variant(&self, span: Span, name: Ident, tys: Vec> ) -> ast::Variant; fn item_enum_poly(&self, span: Span, name: Ident, @@ -216,8 +218,8 @@ pub trait AstBuilder { fn item_struct(&self, span: Span, name: Ident, struct_def: ast::StructDef) -> @ast::Item; fn item_mod(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], - vi: ~[ast::ViewItem], items: ~[@ast::Item]) -> @ast::Item; + name: Ident, attrs: Vec , + vi: Vec , items: Vec<@ast::Item> ) -> @ast::Item; fn item_ty_poly(&self, span: Span, @@ -232,7 +234,7 @@ pub trait AstBuilder { fn meta_list(&self, sp: Span, name: InternedString, - mis: ~[@ast::MetaItem]) + mis: Vec<@ast::MetaItem> ) -> @ast::MetaItem; fn meta_name_value(&self, sp: Span, @@ -241,35 +243,35 @@ pub trait AstBuilder { -> @ast::MetaItem; fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem; + vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem; fn view_use_simple(&self, sp: Span, vis: ast::Visibility, path: ast::Path) -> ast::ViewItem; fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem; fn view_use_list(&self, sp: Span, vis: ast::Visibility, - path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::ViewItem; + path: Vec , imports: &[ast::Ident]) -> ast::ViewItem; fn view_use_glob(&self, sp: Span, - vis: ast::Visibility, path: ~[ast::Ident]) -> ast::ViewItem; + vis: ast::Visibility, path: Vec ) -> ast::ViewItem; } impl<'a> AstBuilder for ExtCtxt<'a> { - fn path(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path { - self.path_all(span, false, strs, opt_vec::Empty, ~[]) + fn path(&self, span: Span, strs: Vec ) -> ast::Path { + self.path_all(span, false, strs, opt_vec::Empty, Vec::new()) } fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path { - self.path(span, ~[id]) + self.path(span, vec!(id)) } - fn path_global(&self, span: Span, strs: ~[ast::Ident]) -> ast::Path { - self.path_all(span, true, strs, opt_vec::Empty, ~[]) + fn path_global(&self, span: Span, strs: Vec ) -> ast::Path { + self.path_all(span, true, strs, opt_vec::Empty, Vec::new()) } fn path_all(&self, sp: Span, global: bool, - mut idents: ~[ast::Ident], + mut idents: Vec , lifetimes: OptVec, - types: ~[P]) + types: Vec> ) -> ast::Path { let last_identifier = idents.pop().unwrap(); - let mut segments: ~[ast::PathSegment] = idents.move_iter() + let mut segments: Vec = idents.move_iter() .map(|ident| { ast::PathSegment { identifier: ident, @@ -335,13 +337,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.ty_path( self.path_all(DUMMY_SP, true, - ~[ + vec!( self.ident_of("std"), self.ident_of("option"), self.ident_of("Option") - ], + ), opt_vec::Empty, - ~[ ty ]), None) + vec!( ty )), None) } fn ty_field_imm(&self, span: Span, name: Ident, ty: P) -> ast::TypeField { @@ -379,15 +381,15 @@ impl<'a> AstBuilder for ExtCtxt<'a> { // these are strange, and probably shouldn't be used outside of // pipes. Specifically, the global version possible generates // incorrect code. - fn ty_vars(&self, ty_params: &OptVec) -> ~[P] { + fn ty_vars(&self, ty_params: &OptVec) -> Vec> { opt_vec::take_vec( ty_params.map(|p| self.ty_ident(DUMMY_SP, p.ident))) } - fn ty_vars_global(&self, ty_params: &OptVec) -> ~[P] { + fn ty_vars_global(&self, ty_params: &OptVec) -> Vec> { opt_vec::take_vec( ty_params.map(|p| self.ty_path( - self.path_global(DUMMY_SP, ~[p.ident]), None))) + self.path_global(DUMMY_SP, vec!(p.ident)), None))) } fn strip_bounds(&self, generics: &Generics) -> Generics { @@ -459,17 +461,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { @respan(sp, ast::StmtDecl(@decl, ast::DUMMY_NODE_ID)) } - fn block(&self, span: Span, stmts: ~[@ast::Stmt], expr: Option<@Expr>) -> P { - self.block_all(span, ~[], stmts, expr) + fn block(&self, span: Span, stmts: Vec<@ast::Stmt> , expr: Option<@Expr>) -> P { + self.block_all(span, Vec::new(), stmts, expr) } fn block_expr(&self, expr: @ast::Expr) -> P { - self.block_all(expr.span, ~[], ~[], Some(expr)) + self.block_all(expr.span, Vec::new(), Vec::new(), Some(expr)) } fn block_all(&self, span: Span, - view_items: ~[ast::ViewItem], - stmts: ~[@ast::Stmt], + view_items: Vec , + stmts: Vec<@ast::Stmt> , expr: Option<@ast::Expr>) -> P { P(ast::Block { view_items: view_items, @@ -517,7 +519,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_field_access(&self, sp: Span, expr: @ast::Expr, ident: ast::Ident) -> @ast::Expr { - self.expr(sp, ast::ExprField(expr, ident, ~[])) + self.expr(sp, ast::ExprField(expr, ident, Vec::new())) } fn expr_addr_of(&self, sp: Span, e: @ast::Expr) -> @ast::Expr { self.expr(sp, ast::ExprAddrOf(ast::MutImmutable, e)) @@ -526,23 +528,23 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr(sp, ast::ExprAddrOf(ast::MutMutable, e)) } - fn expr_call(&self, span: Span, expr: @ast::Expr, args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call(&self, span: Span, expr: @ast::Expr, args: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(span, ast::ExprCall(expr, args)) } - fn expr_call_ident(&self, span: Span, id: ast::Ident, args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call_ident(&self, span: Span, id: ast::Ident, args: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(span, ast::ExprCall(self.expr_ident(span, id), args)) } - fn expr_call_global(&self, sp: Span, fn_path: ~[ast::Ident], - args: ~[@ast::Expr]) -> @ast::Expr { + fn expr_call_global(&self, sp: Span, fn_path: Vec , + args: Vec<@ast::Expr> ) -> @ast::Expr { let pathexpr = self.expr_path(self.path_global(sp, fn_path)); self.expr_call(sp, pathexpr, args) } fn expr_method_call(&self, span: Span, expr: @ast::Expr, ident: ast::Ident, - mut args: ~[@ast::Expr]) -> @ast::Expr { + mut args: Vec<@ast::Expr> ) -> @ast::Expr { args.unshift(expr); - self.expr(span, ast::ExprMethodCall(ident, ~[], args)) + self.expr(span, ast::ExprMethodCall(ident, Vec::new(), args)) } fn expr_block(&self, b: P) -> @ast::Expr { self.expr(b.span, ast::ExprBlock(b)) @@ -550,11 +552,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn field_imm(&self, span: Span, name: Ident, e: @ast::Expr) -> ast::Field { ast::Field { ident: respan(span, name), expr: e, span: span } } - fn expr_struct(&self, span: Span, path: ast::Path, fields: ~[ast::Field]) -> @ast::Expr { + fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec ) -> @ast::Expr { self.expr(span, ast::ExprStruct(path, fields, None)) } fn expr_struct_ident(&self, span: Span, - id: ast::Ident, fields: ~[ast::Field]) -> @ast::Expr { + id: ast::Ident, fields: Vec ) -> @ast::Expr { self.expr_struct(span, self.path_ident(span, id), fields) } @@ -577,13 +579,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_vstore(&self, sp: Span, expr: @ast::Expr, vst: ast::ExprVstore) -> @ast::Expr { self.expr(sp, ast::ExprVstore(expr, vst)) } - fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { + fn expr_vec(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { self.expr(sp, ast::ExprVec(exprs, ast::MutImmutable)) } - fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { - self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreUniq) + fn expr_vec_ng(&self, sp: Span) -> @ast::Expr { + self.expr_call_global(sp, + vec!(self.ident_of("std"), + self.ident_of("vec_ng"), + self.ident_of("Vec"), + self.ident_of("new")), + Vec::new()) } - fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { + fn expr_vec_slice(&self, sp: Span, exprs: Vec<@ast::Expr> ) -> @ast::Expr { self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) } fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr { @@ -600,20 +607,18 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr { - let some = ~[ + let some = vec!( self.ident_of("std"), self.ident_of("option"), - self.ident_of("Some"), - ]; - self.expr_call_global(sp, some, ~[expr]) + self.ident_of("Some")); + self.expr_call_global(sp, some, vec!(expr)) } fn expr_none(&self, sp: Span) -> @ast::Expr { - let none = self.path_global(sp, ~[ + let none = self.path_global(sp, vec!( self.ident_of("std"), self.ident_of("option"), - self.ident_of("None"), - ]); + self.ident_of("None"))); self.expr_path(none) } @@ -621,17 +626,15 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let loc = self.codemap().lookup_char_pos(span.lo); self.expr_call_global( span, - ~[ + vec!( self.ident_of("std"), self.ident_of("rt"), - self.ident_of("begin_unwind"), - ], - ~[ + self.ident_of("begin_unwind")), + vec!( self.expr_str(span, msg), self.expr_str(span, token::intern_and_get_ident(loc.file.name)), - self.expr_uint(span, loc.line), - ]) + self.expr_uint(span, loc.line))) } fn expr_unreachable(&self, span: Span) -> @ast::Expr { @@ -662,17 +665,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let pat = ast::PatIdent(bm, path, None); self.pat(span, pat) } - fn pat_enum(&self, span: Span, path: ast::Path, subpats: ~[@ast::Pat]) -> @ast::Pat { + fn pat_enum(&self, span: Span, path: ast::Path, subpats: Vec<@ast::Pat> ) -> @ast::Pat { let pat = ast::PatEnum(path, Some(subpats)); self.pat(span, pat) } fn pat_struct(&self, span: Span, - path: ast::Path, field_pats: ~[ast::FieldPat]) -> @ast::Pat { + path: ast::Path, field_pats: Vec ) -> @ast::Pat { let pat = ast::PatStruct(path, field_pats, false); self.pat(span, pat) } - fn arm(&self, _span: Span, pats: ~[@ast::Pat], expr: @ast::Expr) -> ast::Arm { + fn arm(&self, _span: Span, pats: Vec<@ast::Pat> , expr: @ast::Expr) -> ast::Arm { ast::Arm { pats: pats, guard: None, @@ -681,10 +684,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn arm_unreachable(&self, span: Span) -> ast::Arm { - self.arm(span, ~[self.pat_wild(span)], self.expr_unreachable(span)) + self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span)) } - fn expr_match(&self, span: Span, arg: @ast::Expr, arms: ~[ast::Arm]) -> @Expr { + fn expr_match(&self, span: Span, arg: @ast::Expr, arms: Vec ) -> @Expr { self.expr(span, ast::ExprMatch(arg, arms)) } @@ -698,24 +701,22 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn_decl: P, blk: P) -> @ast::Expr { self.expr(span, ast::ExprFnBlock(fn_decl, blk)) } - fn lambda(&self, span: Span, ids: ~[ast::Ident], blk: P) -> @ast::Expr { + fn lambda(&self, span: Span, ids: Vec , blk: P) -> @ast::Expr { let fn_decl = self.fn_decl( ids.map(|id| self.arg(span, *id, self.ty_infer(span))), self.ty_infer(span)); self.expr(span, ast::ExprFnBlock(fn_decl, blk)) } - fn lambda0(&self, _span: Span, blk: P) -> @ast::Expr { - let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); - quote_expr!(self, || $blk_e ) + fn lambda0(&self, span: Span, blk: P) -> @ast::Expr { + self.lambda(span, Vec::new(), blk) } - fn lambda1(&self, _span: Span, blk: P, ident: ast::Ident) -> @ast::Expr { - let blk_e = self.expr(blk.span, ast::ExprBlock(blk)); - quote_expr!(self, |$ident| $blk_e ) + fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> @ast::Expr { + self.lambda(span, vec!(ident), blk) } - fn lambda_expr(&self, span: Span, ids: ~[ast::Ident], expr: @ast::Expr) -> @ast::Expr { + fn lambda_expr(&self, span: Span, ids: Vec , expr: @ast::Expr) -> @ast::Expr { self.lambda(span, ids, self.block_expr(expr)) } fn lambda_expr_0(&self, span: Span, expr: @ast::Expr) -> @ast::Expr { @@ -725,13 +726,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.lambda1(span, self.block_expr(expr), ident) } - fn lambda_stmts(&self, span: Span, ids: ~[ast::Ident], stmts: ~[@ast::Stmt]) -> @ast::Expr { + fn lambda_stmts(&self, + span: Span, + ids: Vec, + stmts: Vec<@ast::Stmt>) + -> @ast::Expr { self.lambda(span, ids, self.block(span, stmts, None)) } - fn lambda_stmts_0(&self, span: Span, stmts: ~[@ast::Stmt]) -> @ast::Expr { + fn lambda_stmts_0(&self, span: Span, stmts: Vec<@ast::Stmt> ) -> @ast::Expr { self.lambda0(span, self.block(span, stmts, None)) } - fn lambda_stmts_1(&self, span: Span, stmts: ~[@ast::Stmt], ident: ast::Ident) -> @ast::Expr { + fn lambda_stmts_1(&self, span: Span, stmts: Vec<@ast::Stmt> , ident: ast::Ident) -> @ast::Expr { self.lambda1(span, self.block(span, stmts, None), ident) } @@ -745,7 +750,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } // FIXME unused self - fn fn_decl(&self, inputs: ~[ast::Arg], output: P) -> P { + fn fn_decl(&self, inputs: Vec , output: P) -> P { P(ast::FnDecl { inputs: inputs, output: output, @@ -755,7 +760,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn item(&self, span: Span, - name: Ident, attrs: ~[ast::Attribute], node: ast::Item_) -> @ast::Item { + name: Ident, attrs: Vec , node: ast::Item_) -> @ast::Item { // FIXME: Would be nice if our generated code didn't violate // Rust coding conventions @ast::Item { ident: name, @@ -769,13 +774,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_fn_poly(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec , output: P, generics: Generics, body: P) -> @ast::Item { self.item(span, name, - ~[], + Vec::new(), ast::ItemFn(self.fn_decl(inputs, output), ast::ImpureFn, AbiSet::Rust(), @@ -786,7 +791,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_fn(&self, span: Span, name: Ident, - inputs: ~[ast::Arg], + inputs: Vec , output: P, body: P ) -> @ast::Item { @@ -799,7 +804,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { body) } - fn variant(&self, span: Span, name: Ident, tys: ~[P]) -> ast::Variant { + fn variant(&self, span: Span, name: Ident, tys: Vec> ) -> ast::Variant { let args = tys.move_iter().map(|ty| { ast::VariantArg { ty: ty, id: ast::DUMMY_NODE_ID } }).collect(); @@ -807,7 +812,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { respan(span, ast::Variant_ { name: name, - attrs: ~[], + attrs: Vec::new(), kind: ast::TupleVariantKind(args), id: ast::DUMMY_NODE_ID, disr_expr: None, @@ -818,7 +823,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_enum_poly(&self, span: Span, name: Ident, enum_definition: ast::EnumDef, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemEnum(enum_definition, generics)) + self.item(span, name, Vec::new(), ast::ItemEnum(enum_definition, generics)) } fn item_enum(&self, span: Span, name: Ident, @@ -839,13 +844,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_struct_poly(&self, span: Span, name: Ident, struct_def: ast::StructDef, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemStruct(@struct_def, generics)) + self.item(span, name, Vec::new(), ast::ItemStruct(@struct_def, generics)) } fn item_mod(&self, span: Span, name: Ident, - attrs: ~[ast::Attribute], - vi: ~[ast::ViewItem], - items: ~[@ast::Item]) -> @ast::Item { + attrs: Vec , + vi: Vec , + items: Vec<@ast::Item> ) -> @ast::Item { self.item( span, name, @@ -859,7 +864,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn item_ty_poly(&self, span: Span, name: Ident, ty: P, generics: Generics) -> @ast::Item { - self.item(span, name, ~[], ast::ItemTy(ty, generics)) + self.item(span, name, Vec::new(), ast::ItemTy(ty, generics)) } fn item_ty(&self, span: Span, name: Ident, ty: P) -> @ast::Item { @@ -880,7 +885,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn meta_list(&self, sp: Span, name: InternedString, - mis: ~[@ast::MetaItem]) + mis: Vec<@ast::MetaItem> ) -> @ast::MetaItem { @respan(sp, ast::MetaList(name, mis)) } @@ -893,10 +898,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn view_use(&self, sp: Span, - vis: ast::Visibility, vp: ~[@ast::ViewPath]) -> ast::ViewItem { + vis: ast::Visibility, vp: Vec<@ast::ViewPath> ) -> ast::ViewItem { ast::ViewItem { node: ast::ViewItemUse(vp), - attrs: ~[], + attrs: Vec::new(), vis: vis, span: sp } @@ -910,30 +915,32 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn view_use_simple_(&self, sp: Span, vis: ast::Visibility, ident: ast::Ident, path: ast::Path) -> ast::ViewItem { self.view_use(sp, vis, - ~[@respan(sp, + vec!(@respan(sp, ast::ViewPathSimple(ident, path, - ast::DUMMY_NODE_ID))]) + ast::DUMMY_NODE_ID)))) } fn view_use_list(&self, sp: Span, vis: ast::Visibility, - path: ~[ast::Ident], imports: &[ast::Ident]) -> ast::ViewItem { + path: Vec , imports: &[ast::Ident]) -> ast::ViewItem { let imports = imports.map(|id| { respan(sp, ast::PathListIdent_ { name: *id, id: ast::DUMMY_NODE_ID }) }); self.view_use(sp, vis, - ~[@respan(sp, + vec!(@respan(sp, ast::ViewPathList(self.path(sp, path), - imports, - ast::DUMMY_NODE_ID))]) + imports.iter() + .map(|x| *x) + .collect(), + ast::DUMMY_NODE_ID)))) } fn view_use_glob(&self, sp: Span, - vis: ast::Visibility, path: ~[ast::Ident]) -> ast::ViewItem { + vis: ast::Visibility, path: Vec ) -> ast::ViewItem { self.view_use(sp, vis, - ~[@respan(sp, - ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID))]) + vec!(@respan(sp, + ast::ViewPathGlob(self.path(sp, path), ast::DUMMY_NODE_ID)))) } } diff --git a/src/libsyntax/ext/bytes.rs b/src/libsyntax/ext/bytes.rs index 68aa757c524fb..6123fd4d3d490 100644 --- a/src/libsyntax/ext/bytes.rs +++ b/src/libsyntax/ext/bytes.rs @@ -17,6 +17,7 @@ use ext::base; use ext::build::AstBuilder; use std::char; +use std::vec_ng::Vec; pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { // Gather all argument expressions @@ -24,7 +25,7 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> None => return MacResult::dummy_expr(sp), Some(e) => e, }; - let mut bytes = ~[]; + let mut bytes = Vec::new(); for expr in exprs.iter() { match expr.node { diff --git a/src/libsyntax/ext/cfg.rs b/src/libsyntax/ext/cfg.rs index 295c456c9d0bc..5d11a0d1e2ff2 100644 --- a/src/libsyntax/ext/cfg.rs +++ b/src/libsyntax/ext/cfg.rs @@ -26,12 +26,16 @@ use parse::token::InternedString; use parse::token; use parse; +use std::vec_ng::Vec; + pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); - let mut cfgs = ~[]; + let mut cfgs = Vec::new(); // parse `cfg!(meta_item, meta_item(x,y), meta_item="foo", ...)` while p.token != token::EOF { cfgs.push(p.parse_meta_item()); @@ -42,7 +46,8 @@ pub fn expand_cfg(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::M // test_cfg searches for meta items looking like `cfg(foo, ...)` let in_cfg = &[cx.meta_list(sp, InternedString::new("cfg"), cfgs)]; - let matches_cfg = attr::test_cfg(cx.cfg(), in_cfg.iter().map(|&x| x)); + let matches_cfg = attr::test_cfg(cx.cfg().as_slice(), + in_cfg.iter().map(|&x| x)); let e = cx.expr_bool(sp, matches_cfg); MRExpr(e) } diff --git a/src/libsyntax/ext/concat_idents.rs b/src/libsyntax/ext/concat_idents.rs index 85cfd4f61e414..2552586939811 100644 --- a/src/libsyntax/ext/concat_idents.rs +++ b/src/libsyntax/ext/concat_idents.rs @@ -48,13 +48,13 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ast::Path { span: sp, global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: res, lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ] + ) } ), span: sp, diff --git a/src/libsyntax/ext/deriving/clone.rs b/src/libsyntax/ext/deriving/clone.rs index f52a2accd8d82..feda1694ff193 100644 --- a/src/libsyntax/ext/deriving/clone.rs +++ b/src/libsyntax/ext/deriving/clone.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_clone(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,22 +23,22 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "clone", "Clone"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "clone", "Clone")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "clone", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, combine_substructure: |c, s, sub| cs_clone("Clone", c, s, sub) } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -49,16 +51,16 @@ pub fn expand_deriving_deep_clone(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "clone", "DeepClone"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "clone", "DeepClone")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "deep_clone", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, @@ -66,7 +68,7 @@ pub fn expand_deriving_deep_clone(cx: &mut ExtCtxt, // call deep_clone (not clone) here. combine_substructure: |c, s, sub| cs_clone("DeepClone", c, s, sub) } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -80,7 +82,7 @@ fn cs_clone( let ctor_ident; let all_fields; let subcall = |field: &FieldInfo| - cx.expr_method_call(field.span, field.self_, clone_ident, ~[]); + cx.expr_method_call(field.span, field.self_, clone_ident, Vec::new()); match *substr.fields { Struct(ref af) => { @@ -99,7 +101,7 @@ fn cs_clone( name)) } - if all_fields.len() >= 1 && all_fields[0].name.is_none() { + if all_fields.len() >= 1 && all_fields.get(0).name.is_none() { // enum-like let subcalls = all_fields.map(subcall); cx.expr_call_ident(trait_span, ctor_ident, subcalls) diff --git a/src/libsyntax/ext/deriving/cmp/eq.rs b/src/libsyntax/ext/deriving/cmp/eq.rs index b031f69084d33..1e7199ccc9557 100644 --- a/src/libsyntax/ext/deriving/cmp/eq.rs +++ b/src/libsyntax/ext/deriving/cmp/eq.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_eq(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -36,8 +38,8 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: true, combine_substructure: $f @@ -47,14 +49,14 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "Eq"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "Eq")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( md!("eq", cs_eq), md!("ne", cs_ne) - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/ord.rs b/src/libsyntax/ext/deriving/cmp/ord.rs index 10a416045cbda..66f459882397c 100644 --- a/src/libsyntax/ext/deriving/cmp/ord.rs +++ b/src/libsyntax/ext/deriving/cmp/ord.rs @@ -15,6 +15,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_ord(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -26,8 +28,8 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: false, combine_substructure: |cx, span, substr| cs_op($op, $equal, cx, span, substr) @@ -37,16 +39,16 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "Ord"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "Ord")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( md!("lt", true, false), md!("le", true, true), md!("gt", false, false), md!("ge", false, true) - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/totaleq.rs b/src/libsyntax/ext/deriving/cmp/totaleq.rs index 2bfab8646a6aa..2b3c0b9ea6915 100644 --- a/src/libsyntax/ext/deriving/cmp/totaleq.rs +++ b/src/libsyntax/ext/deriving/cmp/totaleq.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -26,22 +28,22 @@ pub fn expand_deriving_totaleq(cx: &mut ExtCtxt, let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "TotalEq"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "TotalEq")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "equals", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["bool"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: true, combine_substructure: cs_equals } - ] + ) }; trait_def.expand(cx, mitem, item, push) } diff --git a/src/libsyntax/ext/deriving/cmp/totalord.rs b/src/libsyntax/ext/deriving/cmp/totalord.rs index 2e6c4a5422892..89a344bdb7b3c 100644 --- a/src/libsyntax/ext/deriving/cmp/totalord.rs +++ b/src/libsyntax/ext/deriving/cmp/totalord.rs @@ -14,7 +14,9 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; + use std::cmp::{Ordering, Equal, Less, Greater}; +use std::vec_ng::Vec; pub fn expand_deriving_totalord(cx: &mut ExtCtxt, span: Span, @@ -23,22 +25,22 @@ pub fn expand_deriving_totalord(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "cmp", "TotalOrd"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "cmp", "TotalOrd")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "cmp", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[borrowed_self()], - ret_ty: Literal(Path::new(~["std", "cmp", "Ordering"])), + args: vec!(borrowed_self()), + ret_ty: Literal(Path::new(vec!("std", "cmp", "Ordering"))), inline: true, const_nonmatching: false, combine_substructure: cs_cmp } - ] + ) }; trait_def.expand(cx, mitem, item, push) @@ -52,9 +54,9 @@ pub fn ordering_const(cx: &mut ExtCtxt, span: Span, cnst: Ordering) -> ast::Path Greater => "Greater" }; cx.path_global(span, - ~[cx.ident_of("std"), + vec!(cx.ident_of("std"), cx.ident_of("cmp"), - cx.ident_of(cnst)]) + cx.ident_of(cnst))) } pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, @@ -99,7 +101,7 @@ pub fn cs_cmp(cx: &mut ExtCtxt, span: Span, let if_ = cx.expr_if(span, cond, old, Some(cx.expr_ident(span, test_id))); - cx.expr_block(cx.block(span, ~[assign], Some(if_))) + cx.expr_block(cx.block(span, vec!(assign), Some(if_))) }, cx.expr_path(equals_path.clone()), |cx, span, list, _| { diff --git a/src/libsyntax/ext/deriving/decodable.rs b/src/libsyntax/ext/deriving/decodable.rs index 7aaa66cbfb5de..bc6d69c7ccabe 100644 --- a/src/libsyntax/ext/deriving/decodable.rs +++ b/src/libsyntax/ext/deriving/decodable.rs @@ -21,6 +21,8 @@ use ext::deriving::generic::*; use parse::token::InternedString; use parse::token; +use std::vec_ng::Vec; + pub fn expand_deriving_decodable(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -28,27 +30,26 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new_(~["serialize", "Decodable"], None, - ~[~Literal(Path::new_local("__D"))], true), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new_(vec!("serialize", "Decodable"), None, + vec!(~Literal(Path::new_local("__D"))), true), + additional_bounds: Vec::new(), generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("__D", ~[Path::new(~["serialize", "Decoder"])])], + lifetimes: Vec::new(), + bounds: vec!(("__D", vec!(Path::new(vec!("serialize", "Decoder"))))), }, - methods: ~[ + methods: vec!( MethodDef { name: "decode", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[Ptr(~Literal(Path::new_local("__D")), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new_local("__D")), + Borrowed(None, MutMutable))), ret_ty: Self, inline: false, const_nonmatching: true, combine_substructure: decodable_substructure, - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -57,13 +58,13 @@ pub fn expand_deriving_decodable(cx: &mut ExtCtxt, fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { let decoder = substr.nonself_args[0]; - let recurse = ~[cx.ident_of("serialize"), + let recurse = vec!(cx.ident_of("serialize"), cx.ident_of("Decodable"), - cx.ident_of("decode")]; + cx.ident_of("decode")); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_d"); let blkdecoder = cx.expr_ident(trait_span, blkarg); - let calldecode = cx.expr_call_global(trait_span, recurse, ~[blkdecoder]); + let calldecode = cx.expr_call_global(trait_span, recurse, vec!(blkdecoder)); let lambdadecode = cx.lambda_expr_1(trait_span, calldecode, blkarg); return match *substr.fields { @@ -80,24 +81,24 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, summary, |cx, span, name, field| { cx.expr_method_call(span, blkdecoder, read_struct_field, - ~[cx.expr_str(span, name), + vec!(cx.expr_str(span, name), cx.expr_uint(span, field), - lambdadecode]) + lambdadecode)) }); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.expr_uint(trait_span, nfields), cx.lambda_expr_1(trait_span, result, blkarg) - ]) + )) } StaticEnum(_, ref fields) => { let variant = cx.ident_of("i"); - let mut arms = ~[]; - let mut variants = ~[]; + let mut arms = Vec::new(); + let mut variants = Vec::new(); let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { @@ -110,29 +111,29 @@ fn decodable_substructure(cx: &mut ExtCtxt, trait_span: Span, |cx, span, _, field| { let idx = cx.expr_uint(span, field); cx.expr_method_call(span, blkdecoder, rvariant_arg, - ~[idx, lambdadecode]) + vec!(idx, lambdadecode)) }); arms.push(cx.arm(v_span, - ~[cx.pat_lit(v_span, cx.expr_uint(v_span, i))], + vec!(cx.pat_lit(v_span, cx.expr_uint(v_span, i))), decoded)); } arms.push(cx.arm_unreachable(trait_span)); let result = cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms); - let lambda = cx.lambda_expr(trait_span, ~[blkarg, variant], result); + let lambda = cx.lambda_expr(trait_span, vec!(blkarg, variant), result); let variant_vec = cx.expr_vec(trait_span, variants); let result = cx.expr_method_call(trait_span, blkdecoder, cx.ident_of("read_enum_variant"), - ~[variant_vec, lambda]); + vec!(variant_vec, lambda)); cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.lambda_expr_1(trait_span, result, blkarg) - ]) + )) } _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)") }; diff --git a/src/libsyntax/ext/deriving/default.rs b/src/libsyntax/ext/deriving/default.rs index c5ef86273b65a..8259459f57ab6 100644 --- a/src/libsyntax/ext/deriving/default.rs +++ b/src/libsyntax/ext/deriving/default.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_default(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,34 +23,33 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "default", "Default"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "default", "Default")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "default", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, combine_substructure: default_substructure - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) } fn default_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { - let default_ident = ~[ + let default_ident = vec!( cx.ident_of("std"), cx.ident_of("default"), cx.ident_of("Default"), cx.ident_of("default") - ]; - let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ~[]); + ); + let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new()); return match *substr.fields { StaticStruct(_, ref summary) => { diff --git a/src/libsyntax/ext/deriving/encodable.rs b/src/libsyntax/ext/deriving/encodable.rs index ae23013b7ccc6..091ff7b9c90bd 100644 --- a/src/libsyntax/ext/deriving/encodable.rs +++ b/src/libsyntax/ext/deriving/encodable.rs @@ -89,6 +89,8 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use parse::token; +use std::vec_ng::Vec; + pub fn expand_deriving_encodable(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -96,27 +98,26 @@ pub fn expand_deriving_encodable(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new_(~["serialize", "Encodable"], None, - ~[~Literal(Path::new_local("__E"))], true), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new_(vec!("serialize", "Encodable"), None, + vec!(~Literal(Path::new_local("__E"))), true), + additional_bounds: Vec::new(), generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("__E", ~[Path::new(~["serialize", "Encoder"])])], + lifetimes: Vec::new(), + bounds: vec!(("__E", vec!(Path::new(vec!("serialize", "Encoder"))))), }, - methods: ~[ + methods: vec!( MethodDef { name: "encode", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[Ptr(~Literal(Path::new_local("__E")), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new_local("__E")), + Borrowed(None, MutMutable))), ret_ty: nil_ty(), inline: false, const_nonmatching: true, combine_substructure: encodable_substructure, - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -133,7 +134,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, return match *substr.fields { Struct(ref fields) => { let emit_struct_field = cx.ident_of("emit_struct_field"); - let mut stmts = ~[]; + let mut stmts = Vec::new(); for (i, &FieldInfo { name, self_, @@ -146,13 +147,13 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, token::intern_and_get_ident(format!("_field{}", i)) } }; - let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); + let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_struct_field, - ~[cx.expr_str(span, name), + vec!(cx.expr_str(span, name), cx.expr_uint(span, i), - lambda]); + lambda)); stmts.push(cx.stmt_expr(call)); } @@ -160,11 +161,11 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), cx.expr_uint(trait_span, fields.len()), blk - ]) + )) } EnumMatching(idx, variant, ref fields) => { @@ -175,14 +176,14 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let me = cx.stmt_let(trait_span, false, blkarg, encoder); let encoder = cx.expr_ident(trait_span, blkarg); let emit_variant_arg = cx.ident_of("emit_enum_variant_arg"); - let mut stmts = ~[]; + let mut stmts = Vec::new(); for (i, &FieldInfo { self_, span, .. }) in fields.iter().enumerate() { - let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); + let enc = cx.expr_method_call(span, self_, encode, vec!(blkencoder)); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder, emit_variant_arg, - ~[cx.expr_uint(span, i), - lambda]); + vec!(cx.expr_uint(span, i), + lambda)); stmts.push(cx.stmt_expr(call)); } @@ -190,19 +191,19 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, let name = cx.expr_str(trait_span, token::get_ident(variant.node.name)); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), - ~[name, + vec!(name, cx.expr_uint(trait_span, idx), cx.expr_uint(trait_span, fields.len()), - blk]); + blk)); let blk = cx.lambda_expr_1(trait_span, call, blkarg); let ret = cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_enum"), - ~[ + vec!( cx.expr_str(trait_span, token::get_ident(substr.type_ident)), blk - ]); - cx.expr_block(cx.block(trait_span, ~[me], Some(ret))) + )); + cx.expr_block(cx.block(trait_span, vec!(me), Some(ret))) } _ => cx.bug("expected Struct or EnumMatching in deriving(Encodable)") diff --git a/src/libsyntax/ext/deriving/generic.rs b/src/libsyntax/ext/deriving/generic.rs index 24d4efb1b0e68..1dc474551cf7c 100644 --- a/src/libsyntax/ext/deriving/generic.rs +++ b/src/libsyntax/ext/deriving/generic.rs @@ -188,7 +188,8 @@ use opt_vec; use parse::token::InternedString; use parse::token; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; pub use self::ty::*; mod ty; @@ -197,20 +198,19 @@ pub struct TraitDef<'a> { /// The span for the current #[deriving(Foo)] header. span: Span, - attributes: ~[ast::Attribute], + attributes: Vec , /// Path of the trait, including any type parameters path: Path<'a>, /// Additional bounds required of any type parameters of the type, /// other than the current trait - additional_bounds: ~[Ty<'a>], + additional_bounds: Vec> , /// Any extra lifetimes and/or bounds, e.g. `D: serialize::Decoder` generics: LifetimeBounds<'a>, - methods: ~[MethodDef<'a>] -} + methods: Vec> } pub struct MethodDef<'a> { @@ -225,7 +225,7 @@ pub struct MethodDef<'a> { explicit_self: Option>>, /// Arguments other than the self argument - args: ~[Ty<'a>], + args: Vec> , /// Return type ret_ty: Ty<'a>, @@ -264,39 +264,38 @@ pub struct FieldInfo { self_: @Expr, /// The expressions corresponding to references to this field in /// the other Self arguments. - other: ~[@Expr] -} + other: Vec<@Expr> } /// Fields for a static method pub enum StaticFields { /// Tuple structs/enum variants like this - Unnamed(~[Span]), + Unnamed(Vec ), /// Normal structs/struct variants. - Named(~[(Ident, Span)]) + Named(Vec<(Ident, Span)> ) } /// A summary of the possible sets of fields. See above for details /// and examples pub enum SubstructureFields<'a> { - Struct(~[FieldInfo]), + Struct(Vec ), /** Matching variants of the enum: variant index, ast::Variant, fields: the field name is only non-`None` in the case of a struct variant. */ - EnumMatching(uint, &'a ast::Variant, ~[FieldInfo]), + EnumMatching(uint, &'a ast::Variant, Vec ), /** non-matching variants of the enum, [(variant index, ast::Variant, [field span, field ident, fields])] (i.e. all fields for self are in the first tuple, for other1 are in the second tuple, etc.) */ - EnumNonMatching(&'a [(uint, P, ~[(Span, Option, @Expr)])]), + EnumNonMatching(&'a [(uint, P, Vec<(Span, Option, @Expr)> )]), /// A static method where Self is a struct. StaticStruct(&'a ast::StructDef, StaticFields), /// A static method where Self is an enum. - StaticEnum(&'a ast::EnumDef, ~[(Ident, Span, StaticFields)]) + StaticEnum(&'a ast::EnumDef, Vec<(Ident, Span, StaticFields)> ) } @@ -316,7 +315,7 @@ representing each variant: (variant index, ast::Variant instance, pub type EnumNonMatchFunc<'a> = 'a |&mut ExtCtxt, Span, - &[(uint, P, ~[(Span, Option, @Expr)])], + &[(uint, P, Vec<(Span, Option, @Expr)> )], &[@Expr]| -> @Expr; @@ -360,7 +359,7 @@ impl<'a> TraitDef<'a> { cx: &mut ExtCtxt, type_ident: Ident, generics: &Generics, - methods: ~[@ast::Method]) -> @ast::Item { + methods: Vec<@ast::Method> ) -> @ast::Item { let trait_path = self.path.to_path(cx, self.span, type_ident, generics); let mut trait_generics = self.generics.to_generics(cx, self.span, @@ -397,7 +396,7 @@ impl<'a> TraitDef<'a> { // Create the type of `self`. let self_type = cx.ty_path( - cx.path_all(self.span, false, ~[ type_ident ], self_lifetimes, + cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes, opt_vec::take_vec(self_ty_params)), None); let doc_attr = cx.attribute( @@ -412,7 +411,7 @@ impl<'a> TraitDef<'a> { cx.item( self.span, ident, - vec::append(~[doc_attr], self.attributes), + vec_ng::append(vec!(doc_attr), self.attributes.as_slice()), ast::ItemImpl(trait_generics, opt_trait_ref, self_type, methods.map(|x| *x))) } @@ -433,13 +432,15 @@ impl<'a> TraitDef<'a> { self, struct_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) } else { method_def.expand_struct_method_body(cx, self, struct_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) }; method_def.create_method(cx, self, @@ -467,13 +468,15 @@ impl<'a> TraitDef<'a> { self, enum_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) } else { method_def.expand_enum_method_body(cx, self, enum_def, type_ident, - self_args, nonself_args) + self_args.as_slice(), + nonself_args.as_slice()) }; method_def.create_method(cx, self, @@ -524,11 +527,11 @@ impl<'a> MethodDef<'a> { trait_: &TraitDef, type_ident: Ident, generics: &Generics) - -> (ast::ExplicitSelf, ~[@Expr], ~[@Expr], ~[(Ident, P)]) { + -> (ast::ExplicitSelf, Vec<@Expr> , Vec<@Expr> , Vec<(Ident, P)> ) { - let mut self_args = ~[]; - let mut nonself_args = ~[]; - let mut arg_tys = ~[]; + let mut self_args = Vec::new(); + let mut nonself_args = Vec::new(); + let mut arg_tys = Vec::new(); let mut nonstatic = false; let ast_explicit_self = match self.explicit_self { @@ -575,7 +578,7 @@ impl<'a> MethodDef<'a> { type_ident: Ident, generics: &Generics, explicit_self: ast::ExplicitSelf, - arg_types: ~[(Ident, P)], + arg_types: Vec<(Ident, P)> , body: @Expr) -> @ast::Method { // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); @@ -598,16 +601,16 @@ impl<'a> MethodDef<'a> { let body_block = cx.block_expr(body); let attrs = if self.inline { - ~[ + vec!( cx .attribute(trait_.span, cx .meta_word(trait_.span, InternedString::new( "inline"))) - ] + ) } else { - ~[] + Vec::new() }; // Create the method. @@ -655,9 +658,9 @@ impl<'a> MethodDef<'a> { nonself_args: &[@Expr]) -> @Expr { - let mut raw_fields = ~[]; // ~[[fields of self], + let mut raw_fields = Vec::new(); // ~[[fields of self], // [fields of next Self arg], [etc]] - let mut patterns = ~[]; + let mut patterns = Vec::new(); for i in range(0u, self_args.len()) { let (pat, ident_expr) = trait_.create_struct_pattern(cx, type_ident, struct_def, format!("__self_{}", i), @@ -668,14 +671,15 @@ impl<'a> MethodDef<'a> { // transpose raw_fields let fields = if raw_fields.len() > 0 { - raw_fields[0].iter() - .enumerate() - .map(|(i, &(span, opt_id, field))| { - let other_fields = raw_fields.tail().map(|l| { - match &l[i] { + raw_fields.get(0) + .iter() + .enumerate() + .map(|(i, &(span, opt_id, field))| { + let other_fields = raw_fields.tail().iter().map(|l| { + match l.get(i) { &(_, _, ex) => ex } - }); + }).collect(); FieldInfo { span: span, name: opt_id, @@ -703,7 +707,7 @@ impl<'a> MethodDef<'a> { // matter. for (&arg_expr, &pat) in self_args.iter().zip(patterns.iter()) { body = cx.expr_match(trait_.span, arg_expr, - ~[ cx.arm(trait_.span, ~[pat], body) ]) + vec!( cx.arm(trait_.span, vec!(pat), body) )) } body } @@ -759,7 +763,7 @@ impl<'a> MethodDef<'a> { self_args: &[@Expr], nonself_args: &[@Expr]) -> @Expr { - let mut matches = ~[]; + let mut matches = Vec::new(); self.build_enum_match(cx, trait_, enum_def, type_ident, self_args, nonself_args, None, &mut matches, 0) @@ -795,8 +799,8 @@ impl<'a> MethodDef<'a> { self_args: &[@Expr], nonself_args: &[@Expr], matching: Option, - matches_so_far: &mut ~[(uint, P, - ~[(Span, Option, @Expr)])], + matches_so_far: &mut Vec<(uint, P, + Vec<(Span, Option, @Expr)> )> , match_count: uint) -> @Expr { if match_count == self_args.len() { // we've matched against all arguments, so make the final @@ -822,17 +826,17 @@ impl<'a> MethodDef<'a> { Some(variant_index) => { // `ref` inside let matches is buggy. Causes havoc wih rusc. // let (variant_index, ref self_vec) = matches_so_far[0]; - let (variant, self_vec) = match matches_so_far[0] { - (_, v, ref s) => (v, s) + let (variant, self_vec) = match matches_so_far.get(0) { + &(_, v, ref s) => (v, s) }; - let mut enum_matching_fields = vec::from_elem(self_vec.len(), ~[]); + let mut enum_matching_fields = Vec::from_elem(self_vec.len(), Vec::new()); for triple in matches_so_far.tail().iter() { match triple { &(_, _, ref other_fields) => { for (i, &(_, _, e)) in other_fields.iter().enumerate() { - enum_matching_fields[i].push(e); + enum_matching_fields.get_mut(i).push(e); } } } @@ -851,7 +855,7 @@ impl<'a> MethodDef<'a> { substructure = EnumMatching(variant_index, variant, field_tuples); } None => { - substructure = EnumNonMatching(*matches_so_far); + substructure = EnumNonMatching(matches_so_far.as_slice()); } } self.call_substructure_method(cx, trait_, type_ident, @@ -865,7 +869,7 @@ impl<'a> MethodDef<'a> { format!("__arg_{}", match_count) }; - let mut arms = ~[]; + let mut arms = Vec::new(); // the code for nonmatching variants only matters when // we've seen at least one other variant already @@ -879,7 +883,7 @@ impl<'a> MethodDef<'a> { }; // matching-variant match - let variant = enum_def.variants[index]; + let variant = *enum_def.variants.get(index); let (pattern, idents) = trait_.create_enum_variant_pattern(cx, variant, current_match_str, @@ -895,7 +899,7 @@ impl<'a> MethodDef<'a> { matches_so_far, match_count + 1); matches_so_far.pop().unwrap(); - arms.push(cx.arm(trait_.span, ~[ pattern ], arm_expr)); + arms.push(cx.arm(trait_.span, vec!( pattern ), arm_expr)); if enum_def.variants.len() > 1 { let e = &EnumNonMatching(&[]); @@ -904,7 +908,7 @@ impl<'a> MethodDef<'a> { e); let wild_arm = cx.arm( trait_.span, - ~[ cx.pat_wild(trait_.span) ], + vec!( cx.pat_wild(trait_.span) ), wild_expr); arms.push(wild_arm); } @@ -933,7 +937,7 @@ impl<'a> MethodDef<'a> { match_count + 1); matches_so_far.pop().unwrap(); - let arm = cx.arm(trait_.span, ~[ pattern ], arm_expr); + let arm = cx.arm(trait_.span, vec!( pattern ), arm_expr); arms.push(arm); } } @@ -997,8 +1001,8 @@ impl<'a> TraitDef<'a> { fn summarise_struct(&self, cx: &mut ExtCtxt, struct_def: &StructDef) -> StaticFields { - let mut named_idents = ~[]; - let mut just_spans = ~[]; + let mut named_idents = Vec::new(); + let mut just_spans = Vec::new(); for field in struct_def.fields.iter(){ let sp = self.set_expn_info(cx, field.span); match field.node.kind { @@ -1020,9 +1024,9 @@ impl<'a> TraitDef<'a> { fn create_subpatterns(&self, cx: &mut ExtCtxt, - field_paths: ~[ast::Path], + field_paths: Vec , mutbl: ast::Mutability) - -> ~[@ast::Pat] { + -> Vec<@ast::Pat> { field_paths.map(|path| { cx.pat(path.span, ast::PatIdent(ast::BindByRef(mutbl), (*path).clone(), None)) @@ -1035,18 +1039,18 @@ impl<'a> TraitDef<'a> { struct_def: &StructDef, prefix: &str, mutbl: ast::Mutability) - -> (@ast::Pat, ~[(Span, Option, @Expr)]) { + -> (@ast::Pat, Vec<(Span, Option, @Expr)> ) { if struct_def.fields.is_empty() { return ( cx.pat_ident_binding_mode( self.span, struct_ident, ast::BindByValue(ast::MutImmutable)), - ~[]); + Vec::new()); } - let matching_path = cx.path(self.span, ~[ struct_ident ]); + let matching_path = cx.path(self.span, vec!( struct_ident )); - let mut paths = ~[]; - let mut ident_expr = ~[]; + let mut paths = Vec::new(); + let mut ident_expr = Vec::new(); let mut struct_type = Unknown; for (i, struct_field) in struct_def.fields.iter().enumerate() { @@ -1096,20 +1100,20 @@ impl<'a> TraitDef<'a> { variant: &ast::Variant, prefix: &str, mutbl: ast::Mutability) - -> (@ast::Pat, ~[(Span, Option, @Expr)]) { + -> (@ast::Pat, Vec<(Span, Option, @Expr)> ) { let variant_ident = variant.node.name; match variant.node.kind { ast::TupleVariantKind(ref variant_args) => { if variant_args.is_empty() { return (cx.pat_ident_binding_mode(variant.span, variant_ident, ast::BindByValue(ast::MutImmutable)), - ~[]); + Vec::new()); } let matching_path = cx.path_ident(variant.span, variant_ident); - let mut paths = ~[]; - let mut ident_expr = ~[]; + let mut paths = Vec::new(); + let mut ident_expr = Vec::new(); for (i, va) in variant_args.iter().enumerate() { let sp = self.set_expn_info(cx, va.ty.span); let path = cx.path_ident(sp, cx.ident_of(format!("{}_{}", prefix, i))); @@ -1151,11 +1155,19 @@ pub fn cs_fold(use_foldl: bool, EnumMatching(_, _, ref all_fields) | Struct(ref all_fields) => { if use_foldl { all_fields.iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_, field.other) + f(cx, + field.span, + old, + field.self_, + field.other.as_slice()) }) } else { all_fields.rev_iter().fold(base, |old, field| { - f(cx, field.span, old, field.self_, field.other) + f(cx, + field.span, + old, + field.self_, + field.other.as_slice()) }) } }, @@ -1179,7 +1191,7 @@ f(cx, span, ~[self_1.method(__arg_1_1, __arg_2_1), ~~~ */ #[inline] -pub fn cs_same_method(f: |&mut ExtCtxt, Span, ~[@Expr]| -> @Expr, +pub fn cs_same_method(f: |&mut ExtCtxt, Span, Vec<@Expr> | -> @Expr, enum_nonmatch_f: EnumNonMatchFunc, cx: &mut ExtCtxt, trait_span: Span, diff --git a/src/libsyntax/ext/deriving/hash.rs b/src/libsyntax/ext/deriving/hash.rs index acae4f9efa616..1d6cfab120d2d 100644 --- a/src/libsyntax/ext/deriving/hash.rs +++ b/src/libsyntax/ext/deriving/hash.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_hash(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -22,23 +24,23 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, let hash_trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "hash", "Hash"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "hash", "Hash")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "hash", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[Ptr(~Literal(Path::new(~["std", "hash", "sip", "SipState"])), - Borrowed(None, MutMutable))], + args: vec!(Ptr(~Literal(Path::new(vec!("std", "hash", "sip", "SipState"))), + Borrowed(None, MutMutable))), ret_ty: nil_ty(), inline: true, const_nonmatching: false, combine_substructure: hash_substructure } - ] + ) }; hash_trait_def.expand(cx, mitem, item, push); @@ -51,10 +53,10 @@ fn hash_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) }; let hash_ident = substr.method_ident; let call_hash = |span, thing_expr| { - let expr = cx.expr_method_call(span, thing_expr, hash_ident, ~[state_expr]); + let expr = cx.expr_method_call(span, thing_expr, hash_ident, vec!(state_expr)); cx.stmt_expr(expr) }; - let mut stmts = ~[]; + let mut stmts = Vec::new(); let fields = match *substr.fields { Struct(ref fs) => fs, diff --git a/src/libsyntax/ext/deriving/primitive.rs b/src/libsyntax/ext/deriving/primitive.rs index 03192cc1cd2f2..ecd042eb172ef 100644 --- a/src/libsyntax/ext/deriving/primitive.rs +++ b/src/libsyntax/ext/deriving/primitive.rs @@ -16,6 +16,8 @@ use ext::build::AstBuilder; use ext::deriving::generic::*; use parse::token::InternedString; +use std::vec_ng::Vec; + pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -23,21 +25,20 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "num", "FromPrimitive"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "num", "FromPrimitive")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "from_i64", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[ - Literal(Path::new(~["i64"])), - ], - ret_ty: Literal(Path::new_(~["std", "option", "Option"], + args: vec!( + Literal(Path::new(vec!("i64")))), + ret_ty: Literal(Path::new_(vec!("std", "option", "Option"), None, - ~[~Self], + vec!(~Self), true)), // liable to cause code-bloat inline: true, @@ -48,19 +49,17 @@ pub fn expand_deriving_from_primitive(cx: &mut ExtCtxt, name: "from_u64", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[ - Literal(Path::new(~["u64"])), - ], - ret_ty: Literal(Path::new_(~["std", "option", "Option"], + args: vec!( + Literal(Path::new(vec!("u64")))), + ret_ty: Literal(Path::new_(vec!("std", "option", "Option"), None, - ~[~Self], + vec!(~Self), true)), // liable to cause code-bloat inline: true, const_nonmatching: false, combine_substructure: |c, s, sub| cs_from("u64", c, s, sub), - }, - ] + }) }; trait_def.expand(cx, mitem, item, push) @@ -84,7 +83,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure return cx.expr_fail(trait_span, InternedString::new("")); } - let mut arms = ~[]; + let mut arms = Vec::new(); for variant in enum_def.variants.iter() { match variant.node.kind { @@ -109,7 +108,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure // arm for `_ if $guard => $body` let arm = ast::Arm { - pats: ~[cx.pat_wild(span)], + pats: vec!(cx.pat_wild(span)), guard: Some(guard), body: cx.block_expr(body), }; @@ -128,7 +127,7 @@ fn cs_from(name: &str, cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure // arm for `_ => None` let arm = ast::Arm { - pats: ~[cx.pat_wild(trait_span)], + pats: vec!(cx.pat_wild(trait_span)), guard: None, body: cx.block_expr(cx.expr_none(trait_span)), }; diff --git a/src/libsyntax/ext/deriving/rand.rs b/src/libsyntax/ext/deriving/rand.rs index 6efe480159280..da9679eb65578 100644 --- a/src/libsyntax/ext/deriving/rand.rs +++ b/src/libsyntax/ext/deriving/rand.rs @@ -16,6 +16,8 @@ use ext::build::{AstBuilder}; use ext::deriving::generic::*; use opt_vec; +use std::vec_ng::Vec; + pub fn expand_deriving_rand(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -23,48 +25,48 @@ pub fn expand_deriving_rand(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "rand", "Rand"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "rand", "Rand")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "rand", generics: LifetimeBounds { - lifetimes: ~[], - bounds: ~[("R", - ~[ Path::new(~["std", "rand", "Rng"]) ])] + lifetimes: Vec::new(), + bounds: vec!(("R", + vec!( Path::new(vec!("std", "rand", "Rng")) ))) }, explicit_self: None, - args: ~[ + args: vec!( Ptr(~Literal(Path::new_local("R")), Borrowed(None, ast::MutMutable)) - ], + ), ret_ty: Self, inline: false, const_nonmatching: false, combine_substructure: rand_substructure } - ] + ) }; trait_def.expand(cx, mitem, item, push) } fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { let rng = match substr.nonself_args { - [rng] => ~[ rng ], + [rng] => vec!( rng ), _ => cx.bug("Incorrect number of arguments to `rand` in `deriving(Rand)`") }; - let rand_ident = ~[ + let rand_ident = vec!( cx.ident_of("std"), cx.ident_of("rand"), cx.ident_of("Rand"), cx.ident_of("rand") - ]; + ); let rand_call = |cx: &mut ExtCtxt, span| { cx.expr_call_global(span, rand_ident.clone(), - ~[ rng[0] ]) + vec!( *rng.get(0) )) }; return match *substr.fields { @@ -84,13 +86,13 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) true, rand_ident.clone(), opt_vec::Empty, - ~[]); + Vec::new()); let rand_name = cx.expr_path(rand_name); // ::std::rand::Rand::rand(rng) let rv_call = cx.expr_call(trait_span, rand_name, - ~[ rng[0] ]); + vec!( *rng.get(0) )); // need to specify the uint-ness of the random number let uint_ty = cx.ty_ident(trait_span, cx.ident_of("uint")); @@ -113,15 +115,15 @@ fn rand_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) let pat = cx.pat_lit(v_span, i_expr); let thing = rand_thing(cx, v_span, ident, summary, |cx, sp| rand_call(cx, sp)); - cx.arm(v_span, ~[ pat ], thing) - }).collect::<~[ast::Arm]>(); + cx.arm(v_span, vec!( pat ), thing) + }).collect:: >(); // _ => {} at the end. Should never occur arms.push(cx.arm_unreachable(trait_span)); let match_expr = cx.expr_match(trait_span, rand_variant, arms); - let block = cx.block(trait_span, ~[ let_statement ], Some(match_expr)); + let block = cx.block(trait_span, vec!( let_statement ), Some(match_expr)); cx.expr_block(block) } _ => cx.bug("Non-static method in `deriving(Rand)`") diff --git a/src/libsyntax/ext/deriving/show.rs b/src/libsyntax/ext/deriving/show.rs index 4b9925c8d9f3e..51399d8efabe9 100644 --- a/src/libsyntax/ext/deriving/show.rs +++ b/src/libsyntax/ext/deriving/show.rs @@ -19,6 +19,7 @@ use ext::deriving::generic::*; use parse::token; use collections::HashMap; +use std::vec_ng::Vec; pub fn expand_deriving_show(cx: &mut ExtCtxt, span: Span, @@ -26,27 +27,27 @@ pub fn expand_deriving_show(cx: &mut ExtCtxt, item: @Item, push: |@Item|) { // &mut ::std::fmt::Formatter - let fmtr = Ptr(~Literal(Path::new(~["std", "fmt", "Formatter"])), + let fmtr = Ptr(~Literal(Path::new(vec!("std", "fmt", "Formatter"))), Borrowed(None, ast::MutMutable)); let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "fmt", "Show"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "fmt", "Show")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "fmt", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[fmtr], - ret_ty: Literal(Path::new(~["std", "fmt", "Result"])), + args: vec!(fmtr), + ret_ty: Literal(Path::new(vec!("std", "fmt", "Result"))), inline: false, const_nonmatching: false, combine_substructure: show_substructure } - ] + ) }; trait_def.expand(cx, mitem, item, push) } @@ -70,7 +71,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let mut format_string = token::get_ident(name).get().to_owned(); // the internal fields we're actually formatting - let mut exprs = ~[]; + let mut exprs = Vec::new(); // Getting harder... making the format string: match *substr.fields { @@ -79,7 +80,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, EnumMatching(_, _, ref fields) if fields.len() == 0 => {} Struct(ref fields) | EnumMatching(_, _, ref fields) => { - if fields[0].name.is_none() { + if fields.get(0).name.is_none() { // tuple struct/"normal" variant format_string.push_str("("); @@ -124,10 +125,10 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, let formatter = substr.nonself_args[0]; let buf = cx.expr_field_access(span, formatter, cx.ident_of("buf")); - let std_write = ~[cx.ident_of("std"), cx.ident_of("fmt"), cx.ident_of("write")]; + let std_write = vec!(cx.ident_of("std"), cx.ident_of("fmt"), cx.ident_of("write")); let args = cx.ident_of("__args"); - let write_call = cx.expr_call_global(span, std_write, ~[buf, cx.expr_ident(span, args)]); - let format_closure = cx.lambda_expr(span, ~[args], write_call); + let write_call = cx.expr_call_global(span, std_write, vec!(buf, cx.expr_ident(span, args))); + let format_closure = cx.lambda_expr(span, vec!(args), write_call); let s = token::intern_and_get_ident(format_string); let format_string = cx.expr_str(span, s); @@ -135,6 +136,6 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, // phew, not our responsibility any more! format::expand_preparsed_format_args(cx, span, format_closure, - format_string, exprs, ~[], + format_string, exprs, Vec::new(), HashMap::new()) } diff --git a/src/libsyntax/ext/deriving/ty.rs b/src/libsyntax/ext/deriving/ty.rs index 1d3dd9185caae..b88cd117911c7 100644 --- a/src/libsyntax/ext/deriving/ty.rs +++ b/src/libsyntax/ext/deriving/ty.rs @@ -21,6 +21,8 @@ use codemap::{Span,respan}; use opt_vec; use opt_vec::OptVec; +use std::vec_ng::Vec; + /// The types of pointers pub enum PtrTy<'a> { Send, // ~ @@ -30,22 +32,22 @@ pub enum PtrTy<'a> { /// A path, e.g. `::std::option::Option::` (global). Has support /// for type parameters and a lifetime. pub struct Path<'a> { - path: ~[&'a str], + path: Vec<&'a str> , lifetime: Option<&'a str>, - params: ~[~Ty<'a>], + params: Vec<~Ty<'a>> , global: bool } impl<'a> Path<'a> { - pub fn new<'r>(path: ~[&'r str]) -> Path<'r> { - Path::new_(path, None, ~[], true) + pub fn new<'r>(path: Vec<&'r str> ) -> Path<'r> { + Path::new_(path, None, Vec::new(), true) } pub fn new_local<'r>(path: &'r str) -> Path<'r> { - Path::new_(~[ path ], None, ~[], false) + Path::new_(vec!( path ), None, Vec::new(), false) } - pub fn new_<'r>(path: ~[&'r str], + pub fn new_<'r>(path: Vec<&'r str> , lifetime: Option<&'r str>, - params: ~[~Ty<'r>], + params: Vec<~Ty<'r>> , global: bool) -> Path<'r> { Path { @@ -87,7 +89,7 @@ pub enum Ty<'a> { // parameter, and things like `int` Literal(Path<'a>), // includes nil - Tuple(~[Ty<'a>]) + Tuple(Vec> ) } pub fn borrowed_ptrty<'r>() -> PtrTy<'r> { @@ -106,7 +108,7 @@ pub fn borrowed_self<'r>() -> Ty<'r> { } pub fn nil_ty() -> Ty<'static> { - Tuple(~[]) + Tuple(Vec::new()) } fn mk_lifetime(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Option { @@ -172,7 +174,7 @@ impl<'a> Ty<'a> { }); let lifetimes = self_generics.lifetimes.clone(); - cx.path_all(span, false, ~[self_ty], lifetimes, + cx.path_all(span, false, vec!(self_ty), lifetimes, opt_vec::take_vec(self_params)) } Literal(ref p) => { @@ -188,14 +190,14 @@ impl<'a> Ty<'a> { fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, bounds: &[Path], self_ident: Ident, self_generics: &Generics) -> ast::TyParam { let bounds = opt_vec::from( - bounds.map(|b| { + bounds.iter().map(|b| { let path = b.to_path(cx, span, self_ident, self_generics); cx.typarambound(path) - })); + }).collect()); cx.typaram(cx.ident_of(name), bounds, None) } -fn mk_generics(lifetimes: ~[ast::Lifetime], ty_params: ~[ast::TyParam]) -> Generics { +fn mk_generics(lifetimes: Vec , ty_params: Vec ) -> Generics { Generics { lifetimes: opt_vec::from(lifetimes), ty_params: opt_vec::from(ty_params) @@ -204,14 +206,14 @@ fn mk_generics(lifetimes: ~[ast::Lifetime], ty_params: ~[ast::TyParam]) -> Gene /// Lifetimes and bounds on type parameters pub struct LifetimeBounds<'a> { - lifetimes: ~[&'a str], - bounds: ~[(&'a str, ~[Path<'a>])] + lifetimes: Vec<&'a str>, + bounds: Vec<(&'a str, Vec>)>, } impl<'a> LifetimeBounds<'a> { pub fn empty() -> LifetimeBounds<'static> { LifetimeBounds { - lifetimes: ~[], bounds: ~[] + lifetimes: Vec::new(), bounds: Vec::new() } } pub fn to_generics(&self, @@ -226,7 +228,12 @@ impl<'a> LifetimeBounds<'a> { let ty_params = self.bounds.map(|t| { match t { &(ref name, ref bounds) => { - mk_ty_param(cx, span, *name, *bounds, self_ty, self_generics) + mk_ty_param(cx, + span, + *name, + bounds.as_slice(), + self_ty, + self_generics) } } }); diff --git a/src/libsyntax/ext/deriving/zero.rs b/src/libsyntax/ext/deriving/zero.rs index 90f4fa0eb5897..98c0ec9d07238 100644 --- a/src/libsyntax/ext/deriving/zero.rs +++ b/src/libsyntax/ext/deriving/zero.rs @@ -14,6 +14,8 @@ use ext::base::ExtCtxt; use ext::build::AstBuilder; use ext::deriving::generic::*; +use std::vec_ng::Vec; + pub fn expand_deriving_zero(cx: &mut ExtCtxt, span: Span, mitem: @MetaItem, @@ -21,16 +23,16 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, push: |@Item|) { let trait_def = TraitDef { span: span, - attributes: ~[], - path: Path::new(~["std", "num", "Zero"]), - additional_bounds: ~[], + attributes: Vec::new(), + path: Path::new(vec!("std", "num", "Zero")), + additional_bounds: Vec::new(), generics: LifetimeBounds::empty(), - methods: ~[ + methods: vec!( MethodDef { name: "zero", generics: LifetimeBounds::empty(), explicit_self: None, - args: ~[], + args: Vec::new(), ret_ty: Self, inline: true, const_nonmatching: false, @@ -40,8 +42,8 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, name: "is_zero", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: ~[], - ret_ty: Literal(Path::new(~["bool"])), + args: Vec::new(), + ret_ty: Literal(Path::new(vec!("bool"))), inline: true, const_nonmatching: false, combine_substructure: |cx, span, substr| { @@ -52,19 +54,19 @@ pub fn expand_deriving_zero(cx: &mut ExtCtxt, cx, span, substr) } } - ] + ) }; trait_def.expand(cx, mitem, item, push) } fn zero_substructure(cx: &mut ExtCtxt, trait_span: Span, substr: &Substructure) -> @Expr { - let zero_ident = ~[ + let zero_ident = vec!( cx.ident_of("std"), cx.ident_of("num"), cx.ident_of("Zero"), cx.ident_of("zero") - ]; - let zero_call = |span| cx.expr_call_global(span, zero_ident.clone(), ~[]); + ); + let zero_call = |span| cx.expr_call_global(span, zero_ident.clone(), Vec::new()); return match *substr.fields { StaticStruct(_, ref summary) => { diff --git a/src/libsyntax/ext/env.rs b/src/libsyntax/ext/env.rs index aacb2a7408738..b0b5fa26015cc 100644 --- a/src/libsyntax/ext/env.rs +++ b/src/libsyntax/ext/env.rs @@ -19,6 +19,7 @@ use codemap::Span; use ext::base::*; use ext::base; use ext::build::AstBuilder; +use opt_vec; use parse::token; use std::os; @@ -31,8 +32,30 @@ pub fn expand_option_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) }; let e = match os::getenv(var) { - None => quote_expr!(cx, ::std::option::None::<&'static str>), - Some(s) => quote_expr!(cx, ::std::option::Some($s)) + None => { + cx.expr_path(cx.path_all(sp, + true, + vec!(cx.ident_of("std"), + cx.ident_of("option"), + cx.ident_of("None")), + opt_vec::Empty, + vec!(cx.ty_rptr(sp, + cx.ty_ident(sp, + cx.ident_of("str")), + Some(cx.lifetime(sp, + cx.ident_of( + "static").name)), + ast::MutImmutable)))) + } + Some(s) => { + cx.expr_call_global(sp, + vec!(cx.ident_of("std"), + cx.ident_of("option"), + cx.ident_of("Some")), + vec!(cx.expr_str(sp, + token::intern_and_get_ident( + s)))) + } }; MRExpr(e) } @@ -48,7 +71,9 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) Some(exprs) => exprs }; - let var = match expr_to_str(cx, exprs[0], "expected string literal") { + let var = match expr_to_str(cx, + *exprs.get(0), + "expected string literal") { None => return MacResult::dummy_expr(sp), Some((v, _style)) => v }; @@ -59,7 +84,7 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) var)) } 2 => { - match expr_to_str(cx, exprs[1], "expected string literal") { + match expr_to_str(cx, *exprs.get(1), "expected string literal") { None => return MacResult::dummy_expr(sp), Some((s, _style)) => s } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index b49f9fb3a384d..b162e17f53de1 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -31,6 +31,7 @@ use util::small_vector::SmallVector; use std::cast; use std::unstable::dynamic_lib::DynamicLibrary; use std::os; +use std::vec_ng::Vec; pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { match e.node { @@ -53,7 +54,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { // let compilation continue return MacResult::raw_dummy_expr(e.span); } - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); // leaving explicit deref here to highlight unbox op: let marked_after = match fld.extsbox.find(&extname.name) { @@ -77,7 +78,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { }); let fm = fresh_mark(); // mark before: - let marked_before = mark_tts(*tts,fm); + let marked_before = mark_tts(tts.as_slice(), fm); // The span that we pass to the expanders we want to // be the root of the call stack. That's the most @@ -87,7 +88,7 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { let expanded = match expandfun.expand(fld.cx, mac_span.call_site, - marked_before) { + marked_before.as_slice()) { MRExpr(e) => e, MRAny(any_macro) => any_macro.make_expr(), _ => { @@ -169,21 +170,24 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { let none_arm = { let break_expr = fld.cx.expr(span, ast::ExprBreak(opt_ident)); let none_pat = fld.cx.pat_ident(span, none_ident); - fld.cx.arm(span, ~[none_pat], break_expr) + fld.cx.arm(span, vec!(none_pat), break_expr) }; // `Some() => ` let some_arm = fld.cx.arm(span, - ~[fld.cx.pat_enum(span, some_path, ~[src_pat])], + vec!(fld.cx.pat_enum(span, some_path, vec!(src_pat))), fld.cx.expr_block(src_loop_block)); // `match i.next() { ... }` let match_expr = { let next_call_expr = - fld.cx.expr_method_call(span, fld.cx.expr_path(local_path), next_ident, ~[]); + fld.cx.expr_method_call(span, + fld.cx.expr_path(local_path), + next_ident, + Vec::new()); - fld.cx.expr_match(span, next_call_expr, ~[none_arm, some_arm]) + fld.cx.expr_match(span, next_call_expr, vec!(none_arm, some_arm)) }; // ['ident:] loop { ... } @@ -196,8 +200,8 @@ pub fn expand_expr(e: @ast::Expr, fld: &mut MacroExpander) -> @ast::Expr { // `match &mut { i => loop { ... } }` let discrim = fld.cx.expr_mut_addr_of(span, src_expr); let i_pattern = fld.cx.pat_ident(span, local_ident); - let arm = fld.cx.arm(span, ~[i_pattern], loop_expr); - fld.cx.expr_match(span, discrim, ~[arm]) + let arm = fld.cx.arm(span, vec!(i_pattern), loop_expr); + fld.cx.expr_match(span, discrim, vec!(arm)) } ast::ExprLoop(loop_block, opt_ident) => { @@ -221,7 +225,7 @@ fn rename_loop_label(opt_ident: Option, let new_label = fresh_name(&label); let rename = (label, new_label); fld.extsbox.info().pending_renames.push(rename); - let mut pending_renames = ~[rename]; + let mut pending_renames = vec!(rename); let mut rename_fld = renames_to_fold(&mut pending_renames); (Some(rename_fld.fold_ident(label)), rename_fld.fold_block(loop_block)) @@ -276,7 +280,7 @@ pub fn expand_item(it: @ast::Item, fld: &mut MacroExpander) ast::ItemMac(..) => expand_item_mac(it, fld), ast::ItemMod(_) | ast::ItemForeignMod(_) => { fld.cx.mod_push(it.ident); - let macro_escape = contains_macro_escape(it.attrs); + let macro_escape = contains_macro_escape(it.attrs.as_slice()); let result = with_exts_frame!(fld.extsbox, macro_escape, noop_fold_item(it, fld)); @@ -309,7 +313,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) _ => fld.cx.span_bug(it.span, "invalid item macro invocation") }; - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); let fm = fresh_mark(); let expanded = match fld.extsbox.find(&extname.name) { @@ -339,8 +343,8 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_before = mark_tts(tts,fm); - expander.expand(fld.cx, it.span, marked_before) + let marked_before = mark_tts(tts.as_slice(), fm); + expander.expand(fld.cx, it.span, marked_before.as_slice()) } Some(&IdentTT(ref expander, span)) => { if it.ident.name == parse::token::special_idents::invalid.name { @@ -358,7 +362,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) } }); // mark before expansion: - let marked_tts = mark_tts(tts,fm); + let marked_tts = mark_tts(tts.as_slice(), fm); expander.expand(fld.cx, it.span, it.ident, marked_tts) } _ => { @@ -391,7 +395,7 @@ pub fn expand_item_mac(it: @ast::Item, fld: &mut MacroExpander) // yikes... no idea how to apply the mark to this. I'm afraid // we're going to have to wait-and-see on this one. fld.extsbox.insert(intern(name), ext); - if attr::contains_name(it.attrs, "macro_export") { + if attr::contains_name(it.attrs.as_slice(), "macro_export") { SmallVector::one(it) } else { SmallVector::zero() @@ -504,7 +508,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { fld.cx.span_err(pth.span, "expected macro name without module separators"); return SmallVector::zero(); } - let extname = pth.segments[0].identifier; + let extname = pth.segments.get(0).identifier; let extnamestr = token::get_ident(extname); let marked_after = match fld.extsbox.find(&extname.name) { None => { @@ -523,7 +527,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { }); let fm = fresh_mark(); // mark before expansion: - let marked_tts = mark_tts(tts,fm); + let marked_tts = mark_tts(tts.as_slice(), fm); // See the comment in expand_expr for why we want the original span, // not the current mac.span. @@ -531,7 +535,7 @@ pub fn expand_stmt(s: &Stmt, fld: &mut MacroExpander) -> SmallVector<@Stmt> { let expanded = match expandfun.expand(fld.cx, mac_span.call_site, - marked_tts) { + marked_tts.as_slice()) { MRExpr(e) => { @codemap::Spanned { node: StmtExpr(e, ast::DUMMY_NODE_ID), @@ -607,10 +611,10 @@ fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) // oh dear heaven... this is going to include the enum // names, as well... but that should be okay, as long as // the new names are gensyms for the old ones. - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); name_finder.visit_pat(expanded_pat,()); // generate fresh names, push them to a new pending list - let mut new_pending_renames = ~[]; + let mut new_pending_renames = Vec::new(); for ident in name_finder.ident_accumulator.iter() { let new_name = fresh_name(ident); new_pending_renames.push((*ident,new_name)); @@ -657,7 +661,7 @@ fn expand_non_macro_stmt(s: &Stmt, fld: &mut MacroExpander) // array (passed in to the traversal) #[deriving(Clone)] struct NewNameFinderContext { - ident_accumulator: ~[ast::Ident], + ident_accumulator: Vec , } impl Visitor<()> for NewNameFinderContext { @@ -676,7 +680,8 @@ impl Visitor<()> for NewNameFinderContext { span: _, segments: ref segments } if segments.len() == 1 => { - self.ident_accumulator.push(segments[0].identifier) + self.ident_accumulator.push(segments.get(0) + .identifier) } // I believe these must be enums... _ => () @@ -700,7 +705,7 @@ impl Visitor<()> for NewNameFinderContext { // return a visitor that extracts the pat_ident paths // from a given thingy and puts them in a mutable // array (passed in to the traversal) -pub fn new_name_finder(idents: ~[ast::Ident]) -> NewNameFinderContext { +pub fn new_name_finder(idents: Vec ) -> NewNameFinderContext { NewNameFinderContext { ident_accumulator: idents, } @@ -843,7 +848,7 @@ impl Folder for Marker { let macro = match m.node { MacInvocTT(ref path, ref tts, ctxt) => { MacInvocTT(self.fold_path(path), - fold_tts(*tts, self), + fold_tts(tts.as_slice(), self), new_mark(self.mark, ctxt)) } }; @@ -860,7 +865,7 @@ fn new_mark_folder(m: Mrk) -> Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: &[TokenTree], m: Mrk) -> ~[TokenTree] { +fn mark_tts(tts: &[TokenTree], m: Mrk) -> Vec { fold_tts(tts, &mut new_mark_folder(m)) } @@ -912,12 +917,14 @@ mod test { use visit; use visit::Visitor; + use std::vec_ng::Vec; + // a visitor that extracts the paths // from a given thingy and puts them in a mutable // array (passed in to the traversal) #[deriving(Clone)] struct NewPathExprFinderContext { - path_accumulator: ~[ast::Path], + path_accumulator: Vec , } impl Visitor<()> for NewPathExprFinderContext { @@ -941,7 +948,7 @@ mod test { // return a visitor that extracts the paths // from a given pattern and puts them in a mutable // array (passed in to the traversal) - pub fn new_path_finder(paths: ~[ast::Path]) -> NewPathExprFinderContext { + pub fn new_path_finder(paths: Vec ) -> NewPathExprFinderContext { NewPathExprFinderContext { path_accumulator: paths } @@ -954,7 +961,7 @@ mod test { fail!("lolwut") } - fn get_exported_macros(&mut self, _: ast::CrateNum) -> ~[~str] { + fn get_exported_macros(&mut self, _: ast::CrateNum) -> Vec<~str> { fail!("lolwut") } @@ -975,7 +982,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"", src, - ~[],sess); + Vec::new(),sess); // should fail: let mut loader = ErrLoader; expand_crate(sess,&mut loader,crate_ast); @@ -990,7 +997,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"", src, - ~[],sess); + Vec::new(),sess); // should fail: let mut loader = ErrLoader; expand_crate(sess,&mut loader,crate_ast); @@ -1004,7 +1011,7 @@ mod test { let crate_ast = parse::parse_crate_from_source_str( ~"", src, - ~[], sess); + Vec::new(), sess); // should fail: let mut loader = ErrLoader; expand_crate(sess, &mut loader, crate_ast); @@ -1014,10 +1021,10 @@ mod test { let attr1 = make_dummy_attr ("foo"); let attr2 = make_dummy_attr ("bar"); let escape_attr = make_dummy_attr ("macro_escape"); - let attrs1 = ~[attr1, escape_attr, attr2]; - assert_eq!(contains_macro_escape (attrs1),true); - let attrs2 = ~[attr1,attr2]; - assert_eq!(contains_macro_escape (attrs2),false); + let attrs1 = vec!(attr1, escape_attr, attr2); + assert_eq!(contains_macro_escape(attrs1.as_slice()),true); + let attrs2 = vec!(attr1,attr2); + assert_eq!(contains_macro_escape(attrs2.as_slice()),false); } // make a MetaWord outer attribute with the given name @@ -1082,48 +1089,30 @@ mod test { // in principle, you might want to control this boolean on a per-varref basis, // but that would make things even harder to understand, and might not be // necessary for thorough testing. - type RenamingTest = (&'static str, ~[~[uint]], bool); + type RenamingTest = (&'static str, Vec>, bool); #[test] fn automatic_renaming () { - let tests: ~[RenamingTest] = - ~[// b & c should get new names throughout, in the expr too: + let tests: Vec = + vec!(// b & c should get new names throughout, in the expr too: ("fn a() -> int { let b = 13; let c = b; b+c }", - ~[~[0,1],~[2]], false), + vec!(vec!(0,1),vec!(2)), false), // both x's should be renamed (how is this causing a bug?) ("fn main () {let x: int = 13;x;}", - ~[~[0]], false), + vec!(vec!(0)), false), // the use of b after the + should be renamed, the other one not: ("macro_rules! f (($x:ident) => (b + $x)) fn a() -> int { let b = 13; f!(b)}", - ~[~[1]], false), + vec!(vec!(1)), false), // the b before the plus should not be renamed (requires marks) ("macro_rules! f (($x:ident) => ({let b=9; ($x + b)})) fn a() -> int { f!(b)}", - ~[~[1]], false), + vec!(vec!(1)), false), // the marks going in and out of letty should cancel, allowing that $x to // capture the one following the semicolon. // this was an awesome test case, and caught a *lot* of bugs. ("macro_rules! letty(($x:ident) => (let $x = 15;)) macro_rules! user(($x:ident) => ({letty!($x); $x})) fn main() -> int {user!(z)}", - ~[~[0]], false), - // no longer a fixme #8062: this test exposes a *potential* bug; our system does - // not behave exactly like MTWT, but a conversation with Matthew Flatt - // suggests that this can only occur in the presence of local-expand, which - // we have no plans to support. - // ("fn main() {let hrcoo = 19; macro_rules! getx(()=>(hrcoo)); getx!();}", - // ~[~[0]], true) - // FIXME #6994: the next string exposes the bug referred to in issue 6994, so I'm - // commenting it out. - // the z flows into and out of two macros (g & f) along one path, and one - // (just g) along the other, so the result of the whole thing should - // be "let z_123 = 3; z_123" - //"macro_rules! g (($x:ident) => - // ({macro_rules! f(($y:ident)=>({let $y=3;$x}));f!($x)})) - // fn a(){g!(z)}" - // create a really evil test case where a $x appears inside a binding of $x - // but *shouldnt* bind because it was inserted by a different macro.... - // can't write this test case until we have macro-generating macros. - ]; + vec!(vec!(0)), false)); for (idx,s) in tests.iter().enumerate() { run_renaming_test(s,idx); } @@ -1137,20 +1126,20 @@ mod test { }; let cr = expand_crate_str(teststr.to_owned()); // find the bindings: - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); visit::walk_crate(&mut name_finder,&cr,()); let bindings = name_finder.ident_accumulator; // find the varrefs: - let mut path_finder = new_path_finder(~[]); + let mut path_finder = new_path_finder(Vec::new()); visit::walk_crate(&mut path_finder,&cr,()); let varrefs = path_finder.path_accumulator; // must be one check clause for each binding: assert_eq!(bindings.len(),bound_connections.len()); for (binding_idx,shouldmatch) in bound_connections.iter().enumerate() { - let binding_name = mtwt_resolve(bindings[binding_idx]); - let binding_marks = mtwt_marksof(bindings[binding_idx].ctxt,invalid_name); + let binding_name = mtwt_resolve(*bindings.get(binding_idx)); + let binding_marks = mtwt_marksof(bindings.get(binding_idx).ctxt,invalid_name); // shouldmatch can't name varrefs that don't exist: assert!((shouldmatch.len() == 0) || (varrefs.len() > *shouldmatch.iter().max().unwrap())); @@ -1159,13 +1148,18 @@ mod test { // it should be a path of length 1, and it should // be free-identifier=? or bound-identifier=? to the given binding assert_eq!(varref.segments.len(),1); - let varref_name = mtwt_resolve(varref.segments[0].identifier); - let varref_marks = mtwt_marksof(varref.segments[0].identifier.ctxt, + let varref_name = mtwt_resolve(varref.segments + .get(0) + .identifier); + let varref_marks = mtwt_marksof(varref.segments + .get(0) + .identifier + .ctxt, invalid_name); if !(varref_name==binding_name) { println!("uh oh, should match but doesn't:"); println!("varref: {:?}",varref); - println!("binding: {:?}", bindings[binding_idx]); + println!("binding: {:?}", *bindings.get(binding_idx)); ast_util::display_sctable(get_sctable()); } assert_eq!(varref_name,binding_name); @@ -1176,7 +1170,8 @@ mod test { } } else { let fail = (varref.segments.len() == 1) - && (mtwt_resolve(varref.segments[0].identifier) == binding_name); + && (mtwt_resolve(varref.segments.get(0).identifier) == + binding_name); // temp debugging: if fail { println!("failure on test {}",test_idx); @@ -1185,11 +1180,13 @@ mod test { println!("uh oh, matches but shouldn't:"); println!("varref: {:?}",varref); // good lord, you can't make a path with 0 segments, can you? - let string = token::get_ident(varref.segments[0].identifier); + let string = token::get_ident(varref.segments + .get(0) + .identifier); println!("varref's first segment's uint: {}, and string: \"{}\"", - varref.segments[0].identifier.name, + varref.segments.get(0).identifier.name, string.get()); - println!("binding: {:?}", bindings[binding_idx]); + println!("binding: {:?}", *bindings.get(binding_idx)); ast_util::display_sctable(get_sctable()); } assert!(!fail); @@ -1205,40 +1202,41 @@ foo_module!() "; let cr = expand_crate_str(crate_str); // find the xx binding - let mut name_finder = new_name_finder(~[]); + let mut name_finder = new_name_finder(Vec::new()); visit::walk_crate(&mut name_finder, &cr, ()); let bindings = name_finder.ident_accumulator; - let cxbinds: ~[&ast::Ident] = + let cxbinds: Vec<&ast::Ident> = bindings.iter().filter(|b| { let ident = token::get_ident(**b); let string = ident.get(); "xx" == string }).collect(); - let cxbinds: &[&ast::Ident] = cxbinds; + let cxbinds: &[&ast::Ident] = cxbinds.as_slice(); let cxbind = match cxbinds { [b] => b, _ => fail!("expected just one binding for ext_cx") }; let resolved_binding = mtwt_resolve(*cxbind); // find all the xx varrefs: - let mut path_finder = new_path_finder(~[]); + let mut path_finder = new_path_finder(Vec::new()); visit::walk_crate(&mut path_finder, &cr, ()); let varrefs = path_finder.path_accumulator; // the xx binding should bind all of the xx varrefs: for (idx,v) in varrefs.iter().filter(|p| { p.segments.len() == 1 - && "xx" == token::get_ident(p.segments[0].identifier).get() + && "xx" == token::get_ident(p.segments.get(0).identifier).get() }).enumerate() { - if mtwt_resolve(v.segments[0].identifier) != resolved_binding { + if mtwt_resolve(v.segments.get(0).identifier) != + resolved_binding { println!("uh oh, xx binding didn't match xx varref:"); println!("this is xx varref \\# {:?}",idx); println!("binding: {:?}",cxbind); println!("resolves to: {:?}",resolved_binding); - println!("varref: {:?}",v.segments[0].identifier); + println!("varref: {:?}",v.segments.get(0).identifier); println!("resolves to: {:?}", - mtwt_resolve(v.segments[0].identifier)); + mtwt_resolve(v.segments.get(0).identifier)); let table = get_sctable(); println!("SC table:"); @@ -1249,17 +1247,18 @@ foo_module!() } } } - assert_eq!(mtwt_resolve(v.segments[0].identifier),resolved_binding); + assert_eq!(mtwt_resolve(v.segments.get(0).identifier), + resolved_binding); }; } #[test] fn pat_idents(){ let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); - let mut pat_idents = new_name_finder(~[]); + let mut pat_idents = new_name_finder(Vec::new()); pat_idents.visit_pat(pat, ()); assert_eq!(pat_idents.ident_accumulator, - strs_to_idents(~["a","c","b","d"])); + strs_to_idents(vec!("a","c","b","d"))); } } diff --git a/src/libsyntax/ext/format.rs b/src/libsyntax/ext/format.rs index 2642ee00458c1..7752d88596820 100644 --- a/src/libsyntax/ext/format.rs +++ b/src/libsyntax/ext/format.rs @@ -22,6 +22,7 @@ use rsparse = parse; use std::fmt::parse; use collections::{HashMap, HashSet}; use std::vec; +use std::vec_ng::Vec; #[deriving(Eq)] enum ArgumentType { @@ -41,20 +42,20 @@ struct Context<'a> { // Parsed argument expressions and the types that we've found so far for // them. - args: ~[@ast::Expr], - arg_types: ~[Option], + args: Vec<@ast::Expr>, + arg_types: Vec>, // Parsed named expressions and the types that we've found for them so far. // Note that we keep a side-array of the ordering of the named arguments // found to be sure that we can translate them in the same order that they // were declared in. names: HashMap<~str, @ast::Expr>, name_types: HashMap<~str, ArgumentType>, - name_ordering: ~[~str], + name_ordering: Vec<~str>, // Collection of the compiled `rt::Piece` structures - pieces: ~[@ast::Expr], + pieces: Vec<@ast::Expr> , name_positions: HashMap<~str, uint>, - method_statics: ~[@ast::Item], + method_statics: Vec<@ast::Item> , // Updated as arguments are consumed or methods are entered nest_level: uint, @@ -70,16 +71,17 @@ struct Context<'a> { /// Some((fmtstr, unnamed arguments, ordering of named arguments, /// named arguments)) fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> (@ast::Expr, Option<(@ast::Expr, ~[@ast::Expr], ~[~str], - HashMap<~str, @ast::Expr>)>) -{ - let mut args = ~[]; + -> (@ast::Expr, Option<(@ast::Expr, Vec<@ast::Expr>, Vec<~str>, + HashMap<~str, @ast::Expr>)>) { + let mut args = Vec::new(); let mut names = HashMap::<~str, @ast::Expr>::new(); - let mut order = ~[]; + let mut order = Vec::new(); let mut p = rsparse::new_parser_from_tts(ecx.parse_sess(), ecx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); // Parse the leading function expression (maybe a block, maybe a path) let extra = p.parse_expr(); if !p.eat(&token::COMMA) { @@ -276,14 +278,14 @@ impl<'a> Context<'a> { return; } { - let arg_type = match self.arg_types[arg] { - None => None, - Some(ref x) => Some(x) + let arg_type = match self.arg_types.get(arg) { + &None => None, + &Some(ref x) => Some(x) }; - self.verify_same(self.args[arg].span, &ty, arg_type); + self.verify_same(self.args.get(arg).span, &ty, arg_type); } - if self.arg_types[arg].is_none() { - self.arg_types[arg] = Some(ty); + if self.arg_types.get(arg).is_none() { + *self.arg_types.get_mut(arg) = Some(ty); } } @@ -357,7 +359,7 @@ impl<'a> Context<'a> { /// These attributes are applied to all statics that this syntax extension /// will generate. - fn static_attrs(&self) -> ~[ast::Attribute] { + fn static_attrs(&self) -> Vec { // Flag statics as `address_insignificant` so LLVM can merge duplicate // globals as much as possible (which we're generating a whole lot of). let unnamed = self.ecx @@ -371,41 +373,41 @@ impl<'a> Context<'a> { InternedString::new("dead_code")); let allow_dead_code = self.ecx.meta_list(self.fmtsp, InternedString::new("allow"), - ~[dead_code]); + vec!(dead_code)); let allow_dead_code = self.ecx.attribute(self.fmtsp, allow_dead_code); - return ~[unnamed, allow_dead_code]; + return vec!(unnamed, allow_dead_code); } - fn parsepath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("parse"), self.ecx.ident_of(s)] + fn parsepath(&self, s: &str) -> Vec { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("parse"), self.ecx.ident_of(s)) } - fn rtpath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("rt"), self.ecx.ident_of(s)] + fn rtpath(&self, s: &str) -> Vec { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("rt"), self.ecx.ident_of(s)) } - fn ctpath(&self, s: &str) -> ~[ast::Ident] { - ~[self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("parse"), self.ecx.ident_of(s)] + fn ctpath(&self, s: &str) -> Vec { + vec!(self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), + self.ecx.ident_of("parse"), self.ecx.ident_of(s)) } fn none(&self) -> @ast::Expr { - let none = self.ecx.path_global(self.fmtsp, ~[ + let none = self.ecx.path_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("option"), - self.ecx.ident_of("None")]); + self.ecx.ident_of("None"))); self.ecx.expr_path(none) } fn some(&self, e: @ast::Expr) -> @ast::Expr { - let p = self.ecx.path_global(self.fmtsp, ~[ + let p = self.ecx.path_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("option"), - self.ecx.ident_of("Some")]); + self.ecx.ident_of("Some"))); let p = self.ecx.expr_path(p); - self.ecx.expr_call(self.fmtsp, p, ~[e]) + self.ecx.expr_call(self.fmtsp, p, vec!(e)) } fn trans_count(&self, c: parse::Count) -> @ast::Expr { @@ -413,11 +415,11 @@ impl<'a> Context<'a> { match c { parse::CountIs(i) => { self.ecx.expr_call_global(sp, self.rtpath("CountIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } parse::CountIsParam(i) => { self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } parse::CountImplied => { let path = self.ecx.path_global(sp, self.rtpath("CountImplied")); @@ -434,7 +436,7 @@ impl<'a> Context<'a> { }; let i = i + self.args.len(); self.ecx.expr_call_global(sp, self.rtpath("CountIsParam"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } } } @@ -450,21 +452,19 @@ impl<'a> Context<'a> { }).collect(); let s = token::intern_and_get_ident(arm.selector); let selector = self.ecx.expr_str(sp, s); - self.ecx.expr_struct(sp, p, ~[ + self.ecx.expr_struct(sp, p, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("selector"), selector), self.ecx.field_imm(sp, self.ecx.ident_of("result"), - self.ecx.expr_vec_slice(sp, result)), - ]) + self.ecx.expr_vec_slice(sp, result)))) }).collect(); let default = default.iter().map(|p| { self.trans_piece(p) }).collect(); - self.ecx.expr_call_global(sp, self.rtpath("Select"), ~[ + self.ecx.expr_call_global(sp, self.rtpath("Select"), vec!( self.ecx.expr_vec_slice(sp, arms), - self.ecx.expr_vec_slice(sp, default), - ]) + self.ecx.expr_vec_slice(sp, default))) } parse::Plural(offset, ref arms, ref default) => { let offset = match offset { @@ -487,23 +487,21 @@ impl<'a> Context<'a> { } }; let selector = self.ecx.expr_call_global(sp, - lr, ~[selarg]); - self.ecx.expr_struct(sp, p, ~[ + lr, vec!(selarg)); + self.ecx.expr_struct(sp, p, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("selector"), selector), self.ecx.field_imm(sp, self.ecx.ident_of("result"), - self.ecx.expr_vec_slice(sp, result)), - ]) + self.ecx.expr_vec_slice(sp, result)))) }).collect(); let default = default.iter().map(|p| { self.trans_piece(p) }).collect(); - self.ecx.expr_call_global(sp, self.rtpath("Plural"), ~[ + self.ecx.expr_call_global(sp, self.rtpath("Plural"), vec!( offset, self.ecx.expr_vec_slice(sp, arms), - self.ecx.expr_vec_slice(sp, default), - ]) + self.ecx.expr_vec_slice(sp, default))) } }; let life = self.ecx.lifetime(sp, self.ecx.ident_of("static").name); @@ -512,7 +510,7 @@ impl<'a> Context<'a> { true, self.rtpath("Method"), opt_vec::with(life), - ~[] + Vec::new() ), None); let st = ast::ItemStatic(ty, ast::MutImmutable, method); let static_name = self.ecx.ident_of(format!("__STATIC_METHOD_{}", @@ -530,13 +528,13 @@ impl<'a> Context<'a> { let s = token::intern_and_get_ident(s); self.ecx.expr_call_global(sp, self.rtpath("String"), - ~[ + vec!( self.ecx.expr_str(sp, s) - ]) + )) } parse::CurrentArgument => { let nil = self.ecx.expr_lit(sp, ast::LitNil); - self.ecx.expr_call_global(sp, self.rtpath("CurrentArgument"), ~[nil]) + self.ecx.expr_call_global(sp, self.rtpath("CurrentArgument"), vec!(nil)) } parse::Argument(ref arg) => { // Translate the position @@ -549,7 +547,7 @@ impl<'a> Context<'a> { } parse::ArgumentIs(i) => { self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } // Named arguments are converted to positional arguments at // the end of the list of arguments @@ -560,7 +558,7 @@ impl<'a> Context<'a> { }; let i = i + self.args.len(); self.ecx.expr_call_global(sp, self.rtpath("ArgumentIs"), - ~[self.ecx.expr_uint(sp, i)]) + vec!(self.ecx.expr_uint(sp, i))) } }; @@ -583,13 +581,12 @@ impl<'a> Context<'a> { let prec = self.trans_count(arg.format.precision); let width = self.trans_count(arg.format.width); let path = self.ecx.path_global(sp, self.rtpath("FormatSpec")); - let fmt = self.ecx.expr_struct(sp, path, ~[ + let fmt = self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("fill"), fill), self.ecx.field_imm(sp, self.ecx.ident_of("align"), align), self.ecx.field_imm(sp, self.ecx.ident_of("flags"), flags), self.ecx.field_imm(sp, self.ecx.ident_of("precision"), prec), - self.ecx.field_imm(sp, self.ecx.ident_of("width"), width), - ]); + self.ecx.field_imm(sp, self.ecx.ident_of("width"), width))); // Translate the method (if any) let method = match arg.method { @@ -600,12 +597,11 @@ impl<'a> Context<'a> { } }; let path = self.ecx.path_global(sp, self.rtpath("Argument")); - let s = self.ecx.expr_struct(sp, path, ~[ + let s = self.ecx.expr_struct(sp, path, vec!( self.ecx.field_imm(sp, self.ecx.ident_of("position"), pos), self.ecx.field_imm(sp, self.ecx.ident_of("format"), fmt), - self.ecx.field_imm(sp, self.ecx.ident_of("method"), method), - ]); - self.ecx.expr_call_global(sp, self.rtpath("Argument"), ~[s]) + self.ecx.field_imm(sp, self.ecx.ident_of("method"), method))); + self.ecx.expr_call_global(sp, self.rtpath("Argument"), vec!(s)) } } } @@ -613,11 +609,11 @@ impl<'a> Context<'a> { /// Actually builds the expression which the iformat! block will be expanded /// to fn to_expr(&self, extra: @ast::Expr) -> @ast::Expr { - let mut lets = ~[]; - let mut locals = ~[]; + let mut lets = Vec::new(); + let mut locals = Vec::new(); let mut names = vec::from_fn(self.name_positions.len(), |_| None); - let mut pats = ~[]; - let mut heads = ~[]; + let mut pats = Vec::new(); + let mut heads = Vec::new(); // First, declare all of our methods that are statics for &method in self.method_statics.iter() { @@ -631,15 +627,14 @@ impl<'a> Context<'a> { let fmt = self.ecx.expr_vec(self.fmtsp, self.pieces.clone()); let piece_ty = self.ecx.ty_path(self.ecx.path_all( self.fmtsp, - true, ~[ + true, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), self.ecx.ident_of("rt"), - self.ecx.ident_of("Piece"), - ], + self.ecx.ident_of("Piece")), opt_vec::with( self.ecx.lifetime(self.fmtsp, self.ecx.ident_of("static").name)), - ~[] + Vec::new() ), None); let ty = ast::TyFixedLengthVec( piece_ty, @@ -661,7 +656,9 @@ impl<'a> Context<'a> { // of each variable because we don't want to move out of the arguments // passed to this function. for (i, &e) in self.args.iter().enumerate() { - if self.arg_types[i].is_none() { continue } // error already generated + if self.arg_types.get(i).is_none() { + continue // error already generated + } let name = self.ecx.ident_of(format!("__arg{}", i)); pats.push(self.ecx.pat_ident(e.span, name)); @@ -696,18 +693,17 @@ impl<'a> Context<'a> { // Now create the fmt::Arguments struct with all our locals we created. let fmt = self.ecx.expr_ident(self.fmtsp, static_name); let args_slice = self.ecx.expr_ident(self.fmtsp, slicename); - let result = self.ecx.expr_call_global(self.fmtsp, ~[ + let result = self.ecx.expr_call_global(self.fmtsp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), self.ecx.ident_of("Arguments"), - self.ecx.ident_of("new"), - ], ~[fmt, args_slice]); + self.ecx.ident_of("new")), vec!(fmt, args_slice)); // We did all the work of making sure that the arguments // structure is safe, so we can safely have an unsafe block. let result = self.ecx.expr_block(P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(result), id: ast::DUMMY_NODE_ID, rules: ast::UnsafeBlock(ast::CompilerGenerated), @@ -716,8 +712,8 @@ impl<'a> Context<'a> { let resname = self.ecx.ident_of("__args"); lets.push(self.ecx.stmt_let(self.fmtsp, false, resname, result)); let res = self.ecx.expr_ident(self.fmtsp, resname); - let result = self.ecx.expr_call(extra.span, extra, ~[ - self.ecx.expr_addr_of(extra.span, res)]); + let result = self.ecx.expr_call(extra.span, extra, vec!( + self.ecx.expr_addr_of(extra.span, res))); let body = self.ecx.expr_block(self.ecx.block(self.fmtsp, lets, Some(result))); @@ -749,15 +745,15 @@ impl<'a> Context<'a> { // But the nested match expression is proved to perform not as well // as series of let's; the first approach does. let pat = self.ecx.pat(self.fmtsp, ast::PatTup(pats)); - let arm = self.ecx.arm(self.fmtsp, ~[pat], body); + let arm = self.ecx.arm(self.fmtsp, vec!(pat), body); let head = self.ecx.expr(self.fmtsp, ast::ExprTup(heads)); - self.ecx.expr_match(self.fmtsp, head, ~[arm]) + self.ecx.expr_match(self.fmtsp, head, vec!(arm)) } fn format_arg(&self, sp: Span, argno: Position, arg: @ast::Expr) -> @ast::Expr { let ty = match argno { - Exact(ref i) => self.arg_types[*i].get_ref(), + Exact(ref i) => self.arg_types.get(*i).get_ref(), Named(ref s) => self.name_types.get(s) }; @@ -787,31 +783,27 @@ impl<'a> Context<'a> { } } String => { - return self.ecx.expr_call_global(sp, ~[ + return self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentstr"), - ], ~[arg]) + self.ecx.ident_of("argumentstr")), vec!(arg)) } Unsigned => { - return self.ecx.expr_call_global(sp, ~[ + return self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argumentuint"), - ], ~[arg]) + self.ecx.ident_of("argumentuint")), vec!(arg)) } }; - let format_fn = self.ecx.path_global(sp, ~[ + let format_fn = self.ecx.path_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of(fmt_fn), - ]); - self.ecx.expr_call_global(sp, ~[ + self.ecx.ident_of(fmt_fn))); + self.ecx.expr_call_global(sp, vec!( self.ecx.ident_of("std"), self.ecx.ident_of("fmt"), - self.ecx.ident_of("argument"), - ], ~[self.ecx.expr_path(format_fn), arg]) + self.ecx.ident_of("argument")), vec!(self.ecx.expr_path(format_fn), arg)) } } @@ -832,10 +824,10 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span, /// expression. pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, extra: @ast::Expr, - efmt: @ast::Expr, args: ~[@ast::Expr], - name_ordering: ~[~str], + efmt: @ast::Expr, args: Vec<@ast::Expr>, + name_ordering: Vec<~str>, names: HashMap<~str, @ast::Expr>) -> @ast::Expr { - let arg_types = vec::from_fn(args.len(), |_| None); + let arg_types = Vec::from_fn(args.len(), |_| None); let mut cx = Context { ecx: ecx, args: args, @@ -846,8 +838,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, name_ordering: name_ordering, nest_level: 0, next_arg: 0, - pieces: ~[], - method_statics: ~[], + pieces: Vec::new(), + method_statics: Vec::new(), fmtsp: sp, }; cx.fmtsp = efmt.span; @@ -884,7 +876,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, // Make sure that all arguments were used and all arguments have types. for (i, ty) in cx.arg_types.iter().enumerate() { if ty.is_none() { - cx.ecx.span_err(cx.args[i].span, "argument never used"); + cx.ecx.span_err(cx.args.get(i).span, "argument never used"); } } for (name, e) in cx.names.iter() { diff --git a/src/libsyntax/ext/log_syntax.rs b/src/libsyntax/ext/log_syntax.rs index 5ee4084d207ba..b94928238e9bb 100644 --- a/src/libsyntax/ext/log_syntax.rs +++ b/src/libsyntax/ext/log_syntax.rs @@ -20,7 +20,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, -> base::MacResult { cx.print_backtrace(); - println!("{}", print::pprust::tt_to_str(&ast::TTDelim(@tt.to_owned()))); + println!("{}", print::pprust::tt_to_str(&ast::TTDelim( + @tt.iter().map(|x| (*x).clone()).collect()))); //trivial expression MRExpr(@ast::Expr { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 35a5cbd235ae4..e96597d41594b 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -17,6 +17,8 @@ use parse::token::*; use parse::token; use parse; +use std::vec_ng::Vec; + /** * * Quasiquoting works via token trees. @@ -35,17 +37,19 @@ pub mod rt { use parse; use print::pprust; + use std::vec_ng::Vec; + pub use ast::*; pub use parse::token::*; pub use parse::new_parser_from_tts; pub use codemap::{BytePos, Span, dummy_spanned}; pub trait ToTokens { - fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree]; + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec ; } - impl ToTokens for ~[TokenTree] { - fn to_tokens(&self, _cx: &ExtCtxt) -> ~[TokenTree] { + impl ToTokens for Vec { + fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { (*self).clone() } } @@ -201,7 +205,7 @@ pub mod rt { macro_rules! impl_to_tokens( ($t:ty) => ( impl ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { cx.parse_tts(self.to_source()) } } @@ -211,7 +215,7 @@ pub mod rt { macro_rules! impl_to_tokens_self( ($t:ty) => ( impl<'a> ToTokens for $t { - fn to_tokens(&self, cx: &ExtCtxt) -> ~[TokenTree] { + fn to_tokens(&self, cx: &ExtCtxt) -> Vec { cx.parse_tts(self.to_source()) } } @@ -242,7 +246,7 @@ pub mod rt { fn parse_item(&self, s: ~str) -> @ast::Item; fn parse_expr(&self, s: ~str) -> @ast::Expr; fn parse_stmt(&self, s: ~str) -> @ast::Stmt; - fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree]; + fn parse_tts(&self, s: ~str) -> Vec ; } impl<'a> ExtParseUtils for ExtCtxt<'a> { @@ -266,7 +270,7 @@ pub mod rt { parse::parse_stmt_from_source_str("".to_str(), s, self.cfg(), - ~[], + Vec::new(), self.parse_sess()) } @@ -277,7 +281,7 @@ pub mod rt { self.parse_sess()) } - fn parse_tts(&self, s: ~str) -> ~[ast::TokenTree] { + fn parse_tts(&self, s: ~str) -> Vec { parse::parse_tts_from_source_str("".to_str(), s, self.cfg(), @@ -298,16 +302,16 @@ pub fn expand_quote_tokens(cx: &mut ExtCtxt, pub fn expand_quote_expr(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let expanded = expand_parse_call(cx, sp, "parse_expr", ~[], tts); + let expanded = expand_parse_call(cx, sp, "parse_expr", Vec::new(), tts); base::MRExpr(expanded) } pub fn expand_quote_item(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let e_attrs = cx.expr_vec_uniq(sp, ~[]); + let e_attrs = cx.expr_vec_ng(sp); let expanded = expand_parse_call(cx, sp, "parse_item", - ~[e_attrs], tts); + vec!(e_attrs), tts); base::MRExpr(expanded) } @@ -316,7 +320,7 @@ pub fn expand_quote_pat(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> base::MacResult { let e_refutable = cx.expr_lit(sp, ast::LitBool(true)); let expanded = expand_parse_call(cx, sp, "parse_pat", - ~[e_refutable], tts); + vec!(e_refutable), tts); base::MRExpr(expanded) } @@ -325,20 +329,20 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt, tts: &[ast::TokenTree]) -> base::MacResult { let e_param_colons = cx.expr_lit(sp, ast::LitBool(false)); let expanded = expand_parse_call(cx, sp, "parse_ty", - ~[e_param_colons], tts); + vec!(e_param_colons), tts); base::MRExpr(expanded) } pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) -> base::MacResult { - let e_attrs = cx.expr_vec_uniq(sp, ~[]); + let e_attrs = cx.expr_vec_ng(sp); let expanded = expand_parse_call(cx, sp, "parse_stmt", - ~[e_attrs], tts); + vec!(e_attrs), tts); base::MRExpr(expanded) } -fn ids_ext(strs: ~[~str]) -> ~[ast::Ident] { +fn ids_ext(strs: Vec<~str> ) -> Vec { strs.map(|str| str_to_ident(*str)) } @@ -352,7 +356,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr { cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), - ~[e_str]) + vec!(e_str)) } fn mk_binop(cx: &ExtCtxt, sp: Span, bop: token::BinOp) -> @ast::Expr { @@ -377,18 +381,18 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { BINOP(binop) => { return cx.expr_call_ident(sp, id_ext("BINOP"), - ~[mk_binop(cx, sp, binop)]); + vec!(mk_binop(cx, sp, binop))); } BINOPEQ(binop) => { return cx.expr_call_ident(sp, id_ext("BINOPEQ"), - ~[mk_binop(cx, sp, binop)]); + vec!(mk_binop(cx, sp, binop))); } LIT_CHAR(i) => { let e_char = cx.expr_lit(sp, ast::LitChar(i)); - return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), ~[e_char]); + return cx.expr_call_ident(sp, id_ext("LIT_CHAR"), vec!(e_char)); } LIT_INT(i, ity) => { @@ -405,7 +409,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_INT"), - ~[e_i64, e_ity]); + vec!(e_i64, e_ity)); } LIT_UINT(u, uty) => { @@ -422,7 +426,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_UINT"), - ~[e_u64, e_uty]); + vec!(e_u64, e_uty)); } LIT_INT_UNSUFFIXED(i) => { @@ -430,7 +434,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_INT_UNSUFFIXED"), - ~[e_i64]); + vec!(e_i64)); } LIT_FLOAT(fident, fty) => { @@ -444,39 +448,39 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { return cx.expr_call_ident(sp, id_ext("LIT_FLOAT"), - ~[e_fident, e_fty]); + vec!(e_fident, e_fty)); } LIT_STR(ident) => { return cx.expr_call_ident(sp, id_ext("LIT_STR"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } LIT_STR_RAW(ident, n) => { return cx.expr_call_ident(sp, id_ext("LIT_STR_RAW"), - ~[mk_ident(cx, sp, ident), - cx.expr_uint(sp, n)]); + vec!(mk_ident(cx, sp, ident), + cx.expr_uint(sp, n))); } IDENT(ident, b) => { return cx.expr_call_ident(sp, id_ext("IDENT"), - ~[mk_ident(cx, sp, ident), - cx.expr_bool(sp, b)]); + vec!(mk_ident(cx, sp, ident), + cx.expr_bool(sp, b))); } LIFETIME(ident) => { return cx.expr_call_ident(sp, id_ext("LIFETIME"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } DOC_COMMENT(ident) => { return cx.expr_call_ident(sp, id_ext("DOC_COMMENT"), - ~[mk_ident(cx, sp, ident)]); + vec!(mk_ident(cx, sp, ident))); } INTERPOLATED(_) => fail!("quote! with interpolated token"), @@ -523,7 +527,7 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> @ast::Expr { } -fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { +fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> Vec<@ast::Stmt> { match *tt { @@ -531,16 +535,16 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { let e_sp = cx.expr_ident(sp, id_ext("_sp")); let e_tok = cx.expr_call_ident(sp, id_ext("TTTok"), - ~[e_sp, mk_token(cx, sp, tok)]); + vec!(e_sp, mk_token(cx, sp, tok))); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("push"), - ~[e_tok]); - ~[cx.stmt_expr(e_push)] + vec!(e_tok)); + vec!(cx.stmt_expr(e_push)) } - ast::TTDelim(ref tts) => mk_tts(cx, sp, **tts), + ast::TTDelim(ref tts) => mk_tts(cx, sp, tts.as_slice()), ast::TTSeq(..) => fail!("TTSeq in quote!"), ast::TTNonterminal(sp, ident) => { @@ -551,22 +555,22 @@ fn mk_tt(cx: &ExtCtxt, sp: Span, tt: &ast::TokenTree) -> ~[@ast::Stmt] { cx.expr_method_call(sp, cx.expr_ident(sp, ident), id_ext("to_tokens"), - ~[cx.expr_ident(sp, id_ext("ext_cx"))]); + vec!(cx.expr_ident(sp, id_ext("ext_cx")))); let e_push = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("tt")), id_ext("push_all_move"), - ~[e_to_toks]); + vec!(e_to_toks)); - ~[cx.stmt_expr(e_push)] + vec!(cx.stmt_expr(e_push)) } } } fn mk_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) - -> ~[@ast::Stmt] { - let mut ss = ~[]; + -> Vec<@ast::Stmt> { + let mut ss = Vec::new(); for tt in tts.iter() { ss.push_all_move(mk_tt(cx, sp, tt)); } @@ -583,7 +587,9 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let mut p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), - tts.to_owned()); + tts.iter() + .map(|x| (*x).clone()) + .collect()); p.quote_depth += 1u; let cx_expr = p.parse_expr(); @@ -623,20 +629,20 @@ fn expand_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree]) let e_sp = cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("call_site"), - ~[]); + Vec::new()); let stmt_let_sp = cx.stmt_let(sp, false, id_ext("_sp"), e_sp); - let stmt_let_tt = cx.stmt_let(sp, true, - id_ext("tt"), - cx.expr_vec_uniq(sp, ~[])); + let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); + let mut vector = vec!(stmt_let_sp, stmt_let_tt); + vector.push_all_move(mk_tts(cx, sp, tts.as_slice())); let block = cx.expr_block( cx.block_all(sp, - ~[], - ~[stmt_let_sp, stmt_let_tt] + mk_tts(cx, sp, tts), + Vec::new(), + vector, Some(cx.expr_ident(sp, id_ext("tt"))))); (cx_expr, block) @@ -646,36 +652,36 @@ fn expand_wrapper(cx: &ExtCtxt, sp: Span, cx_expr: @ast::Expr, expr: @ast::Expr) -> @ast::Expr { - let uses = ~[ cx.view_use_glob(sp, ast::Inherited, - ids_ext(~[~"syntax", + let uses = vec!( cx.view_use_glob(sp, ast::Inherited, + ids_ext(vec!(~"syntax", ~"ext", ~"quote", - ~"rt"])) ]; + ~"rt"))) ); let stmt_let_ext_cx = cx.stmt_let(sp, false, id_ext("ext_cx"), cx_expr); - cx.expr_block(cx.block_all(sp, uses, ~[stmt_let_ext_cx], Some(expr))) + cx.expr_block(cx.block_all(sp, uses, vec!(stmt_let_ext_cx), Some(expr))) } fn expand_parse_call(cx: &ExtCtxt, sp: Span, parse_method: &str, - arg_exprs: ~[@ast::Expr], + arg_exprs: Vec<@ast::Expr> , tts: &[ast::TokenTree]) -> @ast::Expr { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); let cfg_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), - id_ext("cfg"), ~[]); + id_ext("cfg"), Vec::new()); let parse_sess_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), - id_ext("parse_sess"), ~[]); + id_ext("parse_sess"), Vec::new()); let new_parser_call = cx.expr_call(sp, cx.expr_ident(sp, id_ext("new_parser_from_tts")), - ~[parse_sess_call(), cfg_call(), tts_expr]); + vec!(parse_sess_call(), cfg_call(), tts_expr)); let expr = cx.expr_method_call(sp, new_parser_call, id_ext(parse_method), arg_exprs); diff --git a/src/libsyntax/ext/registrar.rs b/src/libsyntax/ext/registrar.rs index f0bad1b40ebbd..4c18eb83afcee 100644 --- a/src/libsyntax/ext/registrar.rs +++ b/src/libsyntax/ext/registrar.rs @@ -15,15 +15,18 @@ use diagnostic; use visit; use visit::Visitor; +use std::vec_ng::Vec; + struct MacroRegistrarContext { - registrars: ~[(ast::NodeId, Span)], + registrars: Vec<(ast::NodeId, Span)> , } impl Visitor<()> for MacroRegistrarContext { fn visit_item(&mut self, item: &ast::Item, _: ()) { match item.node { ast::ItemFn(..) => { - if attr::contains_name(item.attrs, "macro_registrar") { + if attr::contains_name(item.attrs.as_slice(), + "macro_registrar") { self.registrars.push((item.id, item.span)); } } @@ -36,7 +39,7 @@ impl Visitor<()> for MacroRegistrarContext { pub fn find_macro_registrar(diagnostic: @diagnostic::SpanHandler, krate: &ast::Crate) -> Option { - let mut ctx = MacroRegistrarContext { registrars: ~[] }; + let mut ctx = MacroRegistrarContext { registrars: Vec::new() }; visit::walk_crate(&mut ctx, krate, ()); match ctx.registrars.len() { diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index c81ee55c23768..b31388f58eb9f 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -142,6 +142,7 @@ pub fn expand_include_bin(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) return MacResult::dummy_expr(sp); } Ok(bytes) => { + let bytes = bytes.iter().map(|x| *x).collect(); base::MRExpr(cx.expr_lit(sp, ast::LitBinary(Rc::new(bytes)))) } } diff --git a/src/libsyntax/ext/trace_macros.rs b/src/libsyntax/ext/trace_macros.rs index db2c9dcddb6dc..183cccde18e86 100644 --- a/src/libsyntax/ext/trace_macros.rs +++ b/src/libsyntax/ext/trace_macros.rs @@ -24,7 +24,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, let cfg = cx.cfg(); let tt_rdr = new_tt_reader(cx.parse_sess().span_diagnostic, None, - tt.to_owned()); + tt.iter().map(|x| (*x).clone()).collect()); let mut rust_parser = Parser(sess, cfg.clone(), tt_rdr.dup()); if rust_parser.is_keyword(keywords::True) { diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index edd875a57a749..c9d3150c2cd41 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -22,7 +22,7 @@ use parse::token::{Token, EOF, Nonterminal}; use parse::token; use collections::HashMap; -use std::vec; +use std::vec_ng::Vec; /* This is an Earley-like parser, without support for in-grammar nonterminals, only by calling out to the main rust parser for named nonterminals (which it @@ -99,11 +99,11 @@ nonempty body. */ #[deriving(Clone)] pub struct MatcherPos { - elts: ~[ast::Matcher], // maybe should be <'>? Need to understand regions. + elts: Vec , // maybe should be <'>? Need to understand regions. sep: Option, idx: uint, up: Option<~MatcherPos>, - matches: ~[~[@NamedMatch]], + matches: Vec>, match_lo: uint, match_hi: uint, sp_lo: BytePos, } @@ -112,12 +112,14 @@ pub fn count_names(ms: &[Matcher]) -> uint { ms.iter().fold(0, |ct, m| { ct + match m.node { MatchTok(_) => 0u, - MatchSeq(ref more_ms, _, _, _, _) => count_names((*more_ms)), + MatchSeq(ref more_ms, _, _, _, _) => { + count_names(more_ms.as_slice()) + } MatchNonterminal(_, _, _) => 1u }}) } -pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option, lo: BytePos) +pub fn initial_matcher_pos(ms: Vec , sep: Option, lo: BytePos) -> ~MatcherPos { let mut match_idx_hi = 0u; for elt in ms.iter() { @@ -131,7 +133,7 @@ pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option, lo: BytePos) } } } - let matches = vec::from_fn(count_names(ms), |_i| ~[]); + let matches = Vec::from_fn(count_names(ms.as_slice()), |_i| Vec::new()); ~MatcherPos { elts: ms, sep: sep, @@ -164,7 +166,7 @@ pub fn initial_matcher_pos(ms: ~[Matcher], sep: Option, lo: BytePos) // ast::Matcher it was derived from. pub enum NamedMatch { - MatchedSeq(~[@NamedMatch], codemap::Span), + MatchedSeq(Vec<@NamedMatch> , codemap::Span), MatchedNonterminal(Nonterminal) } @@ -206,9 +208,9 @@ pub enum ParseResult { pub fn parse_or_else(sess: @ParseSess, cfg: ast::CrateConfig, rdr: R, - ms: ~[Matcher]) + ms: Vec ) -> HashMap { - match parse(sess, cfg, rdr, ms) { + match parse(sess, cfg, rdr, ms.as_slice()) { Success(m) => m, Failure(sp, str) => sess.span_diagnostic.span_fatal(sp, str), Error(sp, str) => sess.span_diagnostic.span_fatal(sp, str) @@ -230,13 +232,17 @@ pub fn parse(sess: @ParseSess, rdr: R, ms: &[Matcher]) -> ParseResult { - let mut cur_eis = ~[]; - cur_eis.push(initial_matcher_pos(ms.to_owned(), None, rdr.peek().sp.lo)); + let mut cur_eis = Vec::new(); + cur_eis.push(initial_matcher_pos(ms.iter() + .map(|x| (*x).clone()) + .collect(), + None, + rdr.peek().sp.lo)); loop { - let mut bb_eis = ~[]; // black-box parsed by parser.rs - let mut next_eis = ~[]; // or proceed normally - let mut eof_eis = ~[]; + let mut bb_eis = Vec::new(); // black-box parsed by parser.rs + let mut next_eis = Vec::new(); // or proceed normally + let mut eof_eis = Vec::new(); let TokenAndSpan {tok: tok, sp: sp} = rdr.peek(); @@ -274,8 +280,9 @@ pub fn parse(sess: @ParseSess, // Only touch the binders we have actually bound for idx in range(ei.match_lo, ei.match_hi) { - let sub = ei.matches[idx].clone(); - new_pos.matches[idx] + let sub = (*ei.matches.get(idx)).clone(); + new_pos.matches + .get_mut(idx) .push(@MatchedSeq(sub, mk_sp(ei.sp_lo, sp.hi))); } @@ -308,7 +315,7 @@ pub fn parse(sess: @ParseSess, eof_eis.push(ei); } } else { - match ei.elts[idx].node.clone() { + match ei.elts.get(idx).node.clone() { /* need to descend into sequence */ MatchSeq(ref matchers, ref sep, zero_ok, match_idx_lo, match_idx_hi) => { @@ -317,13 +324,15 @@ pub fn parse(sess: @ParseSess, new_ei.idx += 1u; //we specifically matched zero repeats. for idx in range(match_idx_lo, match_idx_hi) { - new_ei.matches[idx].push(@MatchedSeq(~[], sp)); + new_ei.matches + .get_mut(idx) + .push(@MatchedSeq(Vec::new(), sp)); } cur_eis.push(new_ei); } - let matches = vec::from_elem(ei.matches.len(), ~[]); + let matches = Vec::from_elem(ei.matches.len(), Vec::new()); let ei_t = ei; cur_eis.push(~MatcherPos { elts: (*matchers).clone(), @@ -351,11 +360,11 @@ pub fn parse(sess: @ParseSess, /* error messages here could be improved with links to orig. rules */ if token_name_eq(&tok, &EOF) { if eof_eis.len() == 1u { - let mut v = ~[]; - for dv in eof_eis[0u].matches.mut_iter() { + let mut v = Vec::new(); + for dv in eof_eis.get_mut(0).matches.mut_iter() { v.push(dv.pop().unwrap()); } - return Success(nameize(sess, ms, v)); + return Success(nameize(sess, ms, v.as_slice())); } else if eof_eis.len() > 1u { return Error(sp, ~"ambiguity: multiple successful parses"); } else { @@ -365,7 +374,7 @@ pub fn parse(sess: @ParseSess, if (bb_eis.len() > 0u && next_eis.len() > 0u) || bb_eis.len() > 1u { let nts = bb_eis.map(|ei| { - match ei.elts[ei.idx].node { + match ei.elts.get(ei.idx).node { MatchNonterminal(bind, name, _) => { format!("{} ('{}')", token::get_ident(name), @@ -390,10 +399,10 @@ pub fn parse(sess: @ParseSess, let mut rust_parser = Parser(sess, cfg.clone(), rdr.dup()); let mut ei = bb_eis.pop().unwrap(); - match ei.elts[ei.idx].node { + match ei.elts.get(ei.idx).node { MatchNonterminal(_, name, idx) => { let name_string = token::get_ident(name); - ei.matches[idx].push(@MatchedNonterminal( + ei.matches.get_mut(idx).push(@MatchedNonterminal( parse_nt(&mut rust_parser, name_string.get()))); ei.idx += 1u; } @@ -413,12 +422,12 @@ pub fn parse(sess: @ParseSess, pub fn parse_nt(p: &mut Parser, name: &str) -> Nonterminal { match name { - "item" => match p.parse_item(~[]) { + "item" => match p.parse_item(Vec::new()) { Some(i) => token::NtItem(i), None => p.fatal("expected an item keyword") }, "block" => token::NtBlock(p.parse_block()), - "stmt" => token::NtStmt(p.parse_stmt(~[])), + "stmt" => token::NtStmt(p.parse_stmt(Vec::new())), "pat" => token::NtPat(p.parse_pat()), "expr" => token::NtExpr(p.parse_expr()), "ty" => token::NtTy(p.parse_ty(false /* no need to disambiguate*/)), diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 45fe24ebf68ff..712d5f6bd27da 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -25,9 +25,11 @@ use parse::token::{special_idents, gensym_ident}; use parse::token::{FAT_ARROW, SEMI, NtMatchers, NtTT, EOF}; use parse::token; use print; -use std::cell::RefCell; use util::small_vector::SmallVector; +use std::cell::RefCell; +use std::vec_ng::Vec; + struct ParserAnyMacro { parser: RefCell, } @@ -90,8 +92,8 @@ impl AnyMacro for ParserAnyMacro { struct MacroRulesMacroExpander { name: Ident, - lhses: @~[@NamedMatch], - rhses: @~[@NamedMatch], + lhses: @Vec<@NamedMatch> , + rhses: @Vec<@NamedMatch> , } impl MacroExpander for MacroRulesMacroExpander { @@ -100,7 +102,12 @@ impl MacroExpander for MacroRulesMacroExpander { sp: Span, arg: &[ast::TokenTree]) -> MacResult { - generic_extension(cx, sp, self.name, arg, *self.lhses, *self.rhses) + generic_extension(cx, + sp, + self.name, + arg, + self.lhses.as_slice(), + self.rhses.as_slice()) } } @@ -115,7 +122,9 @@ fn generic_extension(cx: &ExtCtxt, if cx.trace_macros() { println!("{}! \\{ {} \\}", token::get_ident(name), - print::pprust::tt_to_str(&TTDelim(@arg.to_owned()))); + print::pprust::tt_to_str(&TTDelim(@arg.iter() + .map(|x| (*x).clone()) + .collect()))); } // Which arm's failure should we report? (the one furthest along) @@ -128,8 +137,12 @@ fn generic_extension(cx: &ExtCtxt, match **lhs { MatchedNonterminal(NtMatchers(ref mtcs)) => { // `None` is because we're not interpolating - let arg_rdr = new_tt_reader(s_d, None, arg.to_owned()); - match parse(cx.parse_sess(), cx.cfg(), arg_rdr, *mtcs) { + let arg_rdr = new_tt_reader(s_d, + None, + arg.iter() + .map(|x| (*x).clone()) + .collect()); + match parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtcs.as_slice()) { Success(named_matches) => { let rhs = match *rhses[i] { // okay, what's your transcriber? @@ -137,7 +150,10 @@ fn generic_extension(cx: &ExtCtxt, match *tt { // cut off delimiters; don't parse 'em TTDelim(ref tts) => { - (*tts).slice(1u,(*tts).len()-1u).to_owned() + (*tts).slice(1u,(*tts).len()-1u) + .iter() + .map(|x| (*x).clone()) + .collect() } _ => cx.span_fatal( sp, "macro rhs must be delimited") @@ -174,7 +190,7 @@ fn generic_extension(cx: &ExtCtxt, pub fn add_new_extension(cx: &mut ExtCtxt, sp: Span, name: Ident, - arg: ~[ast::TokenTree]) + arg: Vec ) -> base::MacResult { // these spans won't matter, anyways fn ms(m: Matcher_) -> Matcher { @@ -191,15 +207,14 @@ pub fn add_new_extension(cx: &mut ExtCtxt, // The grammar for macro_rules! is: // $( $lhs:mtcs => $rhs:tt );+ // ...quasiquoting this would be nice. - let argument_gram = ~[ - ms(MatchSeq(~[ + let argument_gram = vec!( + ms(MatchSeq(vec!( ms(MatchNonterminal(lhs_nm, special_idents::matchers, 0u)), ms(MatchTok(FAT_ARROW)), - ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u)), - ], Some(SEMI), false, 0u, 2u)), + ms(MatchNonterminal(rhs_nm, special_idents::tt, 1u))), Some(SEMI), false, 0u, 2u)), //to phase into semicolon-termination instead of //semicolon-separation - ms(MatchSeq(~[ms(MatchTok(SEMI))], None, true, 2u, 2u))]; + ms(MatchSeq(vec!(ms(MatchTok(SEMI))), None, true, 2u, 2u))); // Parse the macro_rules! invocation (`none` is for no interpolations): diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index a8c9fe372269c..a3f179e851ad3 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -18,11 +18,12 @@ use parse::token; use parse::lexer::TokenAndSpan; use std::cell::{Cell, RefCell}; +use std::vec_ng::Vec; use collections::HashMap; ///an unzipping of `TokenTree`s struct TtFrame { - forest: @~[ast::TokenTree], + forest: @Vec , idx: Cell, dotdotdoted: bool, sep: Option, @@ -35,8 +36,8 @@ pub struct TtReader { priv stack: RefCell<@TtFrame>, /* for MBE-style macro transcription */ priv interpolations: RefCell>, - priv repeat_idx: RefCell<~[uint]>, - priv repeat_len: RefCell<~[uint]>, + priv repeat_idx: RefCell >, + priv repeat_len: RefCell >, /* cached: */ cur_tok: RefCell, cur_span: RefCell, @@ -47,7 +48,7 @@ pub struct TtReader { * should) be none. */ pub fn new_tt_reader(sp_diag: @SpanHandler, interp: Option>, - src: ~[ast::TokenTree]) + src: Vec ) -> TtReader { let r = TtReader { sp_diag: sp_diag, @@ -62,8 +63,8 @@ pub fn new_tt_reader(sp_diag: @SpanHandler, None => RefCell::new(HashMap::new()), Some(x) => RefCell::new(x), }, - repeat_idx: RefCell::new(~[]), - repeat_len: RefCell::new(~[]), + repeat_idx: RefCell::new(Vec::new()), + repeat_len: RefCell::new(Vec::new()), /* dummy values, never read: */ cur_tok: RefCell::new(EOF), cur_span: RefCell::new(DUMMY_SP), @@ -106,7 +107,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: @NamedMatch) // end of the line; duplicate henceforth ad } - MatchedSeq(ref ads, _) => ads[*idx] + MatchedSeq(ref ads, _) => *ads.get(*idx) } } let repeat_idx = r.repeat_idx.borrow(); @@ -217,7 +218,8 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { r.stack.get().idx.set(0u); { let mut repeat_idx = r.repeat_idx.borrow_mut(); - repeat_idx.get()[repeat_idx.get().len() - 1u] += 1u; + let last_repeat_idx = repeat_idx.get().len() - 1u; + *repeat_idx.get().get_mut(last_repeat_idx) += 1u; } match r.stack.get().sep.clone() { Some(tk) => { @@ -231,7 +233,7 @@ pub fn tt_next_token(r: &TtReader) -> TokenAndSpan { loop { /* because it's easiest, this handles `TTDelim` not starting with a `TTTok`, even though it won't happen */ // FIXME(pcwalton): Bad copy. - match r.stack.get().forest[r.stack.get().idx.get()].clone() { + match (*r.stack.get().forest.get(r.stack.get().idx.get())).clone() { TTDelim(tts) => { r.stack.set(@TtFrame { forest: tts, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index e62abac443ebb..b01ba7718ba58 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -16,18 +16,20 @@ use parse::token; use opt_vec::OptVec; use util::small_vector::SmallVector; +use std::vec_ng::Vec; + // We may eventually want to be able to fold over type parameters, too. pub trait Folder { fn fold_crate(&mut self, c: Crate) -> Crate { noop_fold_crate(c, self) } - fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> ~[@MetaItem] { - meta_items.map(|x| fold_meta_item_(*x, self)) + fn fold_meta_items(&mut self, meta_items: &[@MetaItem]) -> Vec<@MetaItem> { + meta_items.iter().map(|x| fold_meta_item_(*x, self)).collect() } - fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> ~[@ViewPath] { - view_paths.map(|view_path| { + fn fold_view_paths(&mut self, view_paths: &[@ViewPath]) -> Vec<@ViewPath> { + view_paths.iter().map(|view_path| { let inner_view_path = match view_path.node { ViewPathSimple(ref ident, ref path, node_id) => { ViewPathSimple(ident.clone(), @@ -60,7 +62,7 @@ pub trait Folder { node: inner_view_path, span: self.new_span(view_path.span), } - }) + }).collect() } fn fold_view_item(&mut self, vi: &ViewItem) -> ViewItem { @@ -275,7 +277,7 @@ pub trait Folder { node: match macro.node { MacInvocTT(ref p, ref tts, ctxt) => { MacInvocTT(self.fold_path(p), - fold_tts(*tts, self), + fold_tts(tts.as_slice(), self), ctxt) } }, @@ -283,8 +285,8 @@ pub trait Folder { } } - fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> ~[@Expr] { - es.map(|x| f(*x)) + fn map_exprs(&self, f: |@Expr| -> @Expr, es: &[@Expr]) -> Vec<@Expr> { + es.iter().map(|x| f(*x)).collect() } fn new_id(&mut self, i: NodeId) -> NodeId { @@ -370,21 +372,21 @@ fn fold_arg_(a: &Arg, fld: &mut T) -> Arg { // since many token::IDENT are not necessary part of let bindings and most // token::LIFETIME are certainly not loop labels. But we can't tell in their // token form. So this is less ideal and hacky but it works. -pub fn fold_tts(tts: &[TokenTree], fld: &mut T) -> ~[TokenTree] { - tts.map(|tt| { +pub fn fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec { + tts.iter().map(|tt| { match *tt { TTTok(span, ref tok) => TTTok(span,maybe_fold_ident(tok,fld)), - TTDelim(tts) => TTDelim(@fold_tts(*tts, fld)), + TTDelim(tts) => TTDelim(@fold_tts(tts.as_slice(), fld)), TTSeq(span, pattern, ref sep, is_optional) => TTSeq(span, - @fold_tts(*pattern, fld), + @fold_tts(pattern.as_slice(), fld), sep.as_ref().map(|tok|maybe_fold_ident(tok,fld)), is_optional), TTNonterminal(sp,ref ident) => TTNonterminal(sp,fld.fold_ident(*ident)) } - }) + }).collect() } // apply ident folder if it's an ident, otherwise leave it alone @@ -518,7 +520,7 @@ pub fn noop_fold_view_item(vi: &ViewItem, folder: &mut T) folder.new_id(node_id)) } ViewItemUse(ref view_paths) => { - ViewItemUse(folder.fold_view_paths(*view_paths)) + ViewItemUse(folder.fold_view_paths(view_paths.as_slice())) } }; ViewItem { @@ -881,7 +883,7 @@ mod test { // this version doesn't care about getting comments or docstrings in. fn fake_print_crate(s: &mut pprust::State, krate: &ast::Crate) -> io::IoResult<()> { - pprust::print_mod(s, &krate.module, krate.attrs) + pprust::print_mod(s, &krate.module, krate.attrs.as_slice()) } // change every identifier to "zz" diff --git a/src/libsyntax/opt_vec.rs b/src/libsyntax/opt_vec.rs index 325df0ba77775..ec81fff51c791 100644 --- a/src/libsyntax/opt_vec.rs +++ b/src/libsyntax/opt_vec.rs @@ -15,20 +15,21 @@ * other useful things like `push()` and `len()`. */ -use std::vec; use std::default::Default; +use std::vec; +use std::vec_ng::Vec; #[deriving(Clone, Encodable, Decodable, Hash)] pub enum OptVec { Empty, - Vec(~[T]) + Vec(Vec ) } pub fn with(t: T) -> OptVec { - Vec(~[t]) + Vec(vec!(t)) } -pub fn from(t: ~[T]) -> OptVec { +pub fn from(t: Vec ) -> OptVec { if t.len() == 0 { Empty } else { @@ -44,7 +45,7 @@ impl OptVec { return; } Empty => { - *self = Vec(~[t]); + *self = Vec(vec!(t)); } } } @@ -87,7 +88,7 @@ impl OptVec { pub fn get<'a>(&'a self, i: uint) -> &'a T { match *self { Empty => fail!("invalid index {}", i), - Vec(ref v) => &v[i] + Vec(ref v) => v.get(i) } } @@ -121,11 +122,11 @@ impl OptVec { } #[inline] - pub fn map_to_vec(&self, op: |&T| -> B) -> ~[B] { + pub fn map_to_vec(&self, op: |&T| -> B) -> Vec { self.iter().map(op).collect() } - pub fn mapi_to_vec(&self, op: |uint, &T| -> B) -> ~[B] { + pub fn mapi_to_vec(&self, op: |uint, &T| -> B) -> Vec { let mut index = 0; self.map_to_vec(|a| { let i = index; @@ -135,19 +136,19 @@ impl OptVec { } } -pub fn take_vec(v: OptVec) -> ~[T] { +pub fn take_vec(v: OptVec) -> Vec { match v { - Empty => ~[], + Empty => Vec::new(), Vec(v) => v } } impl OptVec { pub fn prepend(&self, t: T) -> OptVec { - let mut v0 = ~[t]; + let mut v0 = vec!(t); match *self { Empty => {} - Vec(ref v1) => { v0.push_all(*v1); } + Vec(ref v1) => { v0.push_all(v1.as_slice()); } } return Vec(v0); } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index c9bea78d02db5..0a74c7ca82124 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -15,21 +15,23 @@ use parse::token; use parse::parser::Parser; use parse::token::INTERPOLATED; +use std::vec_ng::Vec; + // a parser that can parse attributes. pub trait ParserAttr { - fn parse_outer_attributes(&mut self) -> ~[ast::Attribute]; + fn parse_outer_attributes(&mut self) -> Vec ; fn parse_attribute(&mut self, permit_inner: bool) -> ast::Attribute; fn parse_inner_attrs_and_next(&mut self) - -> (~[ast::Attribute], ~[ast::Attribute]); + -> (Vec , Vec ); fn parse_meta_item(&mut self) -> @ast::MetaItem; - fn parse_meta_seq(&mut self) -> ~[@ast::MetaItem]; - fn parse_optional_meta(&mut self) -> ~[@ast::MetaItem]; + fn parse_meta_seq(&mut self) -> Vec<@ast::MetaItem> ; + fn parse_optional_meta(&mut self) -> Vec<@ast::MetaItem> ; } impl ParserAttr for Parser { // Parse attributes that appear before an item - fn parse_outer_attributes(&mut self) -> ~[ast::Attribute] { - let mut attrs: ~[ast::Attribute] = ~[]; + fn parse_outer_attributes(&mut self) -> Vec { + let mut attrs: Vec = Vec::new(); loop { debug!("parse_outer_attributes: self.token={:?}", self.token); @@ -116,9 +118,9 @@ impl ParserAttr for Parser { // you can make the 'next' field an Option, but the result is going to be // more useful as a vector. fn parse_inner_attrs_and_next(&mut self) - -> (~[ast::Attribute], ~[ast::Attribute]) { - let mut inner_attrs: ~[ast::Attribute] = ~[]; - let mut next_outer_attrs: ~[ast::Attribute] = ~[]; + -> (Vec , Vec ) { + let mut inner_attrs: Vec = Vec::new(); + let mut next_outer_attrs: Vec = Vec::new(); loop { let attr = match self.token { token::INTERPOLATED(token::NtAttr(..)) => { @@ -188,17 +190,17 @@ impl ParserAttr for Parser { } // matches meta_seq = ( COMMASEP(meta_item) ) - fn parse_meta_seq(&mut self) -> ~[@ast::MetaItem] { + fn parse_meta_seq(&mut self) -> Vec<@ast::MetaItem> { self.parse_seq(&token::LPAREN, &token::RPAREN, seq_sep_trailing_disallowed(token::COMMA), |p| p.parse_meta_item()).node } - fn parse_optional_meta(&mut self) -> ~[@ast::MetaItem] { + fn parse_optional_meta(&mut self) -> Vec<@ast::MetaItem> { match self.token { token::LPAREN => self.parse_meta_seq(), - _ => ~[] + _ => Vec::new() } } } diff --git a/src/libsyntax/parse/comments.rs b/src/libsyntax/parse/comments.rs index bd1c4f9babb81..c2a2097de2442 100644 --- a/src/libsyntax/parse/comments.rs +++ b/src/libsyntax/parse/comments.rs @@ -20,6 +20,7 @@ use parse::token; use std::io; use std::str; use std::uint; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum CommentStyle { @@ -32,7 +33,7 @@ pub enum CommentStyle { #[deriving(Clone)] pub struct Comment { style: CommentStyle, - lines: ~[~str], + lines: Vec<~str> , pos: BytePos } @@ -54,28 +55,28 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle { pub fn strip_doc_comment_decoration(comment: &str) -> ~str { /// remove whitespace-only lines from the start/end of lines - fn vertical_trim(lines: ~[~str]) -> ~[~str] { + fn vertical_trim(lines: Vec<~str> ) -> Vec<~str> { let mut i = 0u; let mut j = lines.len(); // first line of all-stars should be omitted - if lines.len() > 0 && lines[0].chars().all(|c| c == '*') { + if lines.len() > 0 && lines.get(0).chars().all(|c| c == '*') { i += 1; } - while i < j && lines[i].trim().is_empty() { + while i < j && lines.get(i).trim().is_empty() { i += 1; } // like the first, a last line of all stars should be omitted - if j > i && lines[j - 1].chars().skip(1).all(|c| c == '*') { + if j > i && lines.get(j - 1).chars().skip(1).all(|c| c == '*') { j -= 1; } - while j > i && lines[j - 1].trim().is_empty() { + while j > i && lines.get(j - 1).trim().is_empty() { j -= 1; } - return lines.slice(i, j).to_owned(); + return lines.slice(i, j).iter().map(|x| (*x).clone()).collect(); } /// remove a "[ \t]*\*" block from each line, if possible - fn horizontal_trim(lines: ~[~str]) -> ~[~str] { + fn horizontal_trim(lines: Vec<~str> ) -> Vec<~str> { let mut i = uint::MAX; let mut can_trim = true; let mut first = true; @@ -122,7 +123,7 @@ pub fn strip_doc_comment_decoration(comment: &str) -> ~str { let lines = comment.slice(3u, comment.len() - 2u) .lines_any() .map(|s| s.to_owned()) - .collect::<~[~str]>(); + .collect:: >(); let lines = vertical_trim(lines); let lines = horizontal_trim(lines); @@ -157,9 +158,9 @@ fn consume_non_eol_whitespace(rdr: &StringReader) { } } -fn push_blank_line_comment(rdr: &StringReader, comments: &mut ~[Comment]) { +fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec ) { debug!(">>> blank-line comment"); - let v: ~[~str] = ~[]; + let v: Vec<~str> = Vec::new(); comments.push(Comment { style: BlankLine, lines: v, @@ -168,7 +169,7 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut ~[Comment]) { } fn consume_whitespace_counting_blank_lines(rdr: &StringReader, - comments: &mut ~[Comment]) { + comments: &mut Vec ) { while is_whitespace(rdr.curr.get()) && !is_eof(rdr) { if rdr.col.get() == CharPos(0u) && rdr.curr_is('\n') { push_blank_line_comment(rdr, &mut *comments); @@ -179,22 +180,22 @@ fn consume_whitespace_counting_blank_lines(rdr: &StringReader, fn read_shebang_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec ) { debug!(">>> shebang comment"); let p = rdr.last_pos.get(); debug!("<<< shebang comment"); comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, - lines: ~[read_one_line_comment(rdr)], + lines: vec!(read_one_line_comment(rdr)), pos: p }); } fn read_line_comments(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec ) { debug!(">>> line comments"); let p = rdr.last_pos.get(); - let mut lines: ~[~str] = ~[]; + let mut lines: Vec<~str> = Vec::new(); while rdr.curr_is('/') && nextch_is(rdr, '/') { let line = read_one_line_comment(rdr); debug!("{}", line); @@ -232,7 +233,7 @@ fn all_whitespace(s: &str, col: CharPos) -> Option { return Some(cursor); } -fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], +fn trim_whitespace_prefix_and_push_line(lines: &mut Vec<~str> , s: ~str, col: CharPos) { let len = s.len(); let s1 = match all_whitespace(s, col) { @@ -249,10 +250,10 @@ fn trim_whitespace_prefix_and_push_line(lines: &mut ~[~str], fn read_block_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec ) { debug!(">>> block comment"); let p = rdr.last_pos.get(); - let mut lines: ~[~str] = ~[]; + let mut lines: Vec<~str> = Vec::new(); let col: CharPos = rdr.col.get(); bump(rdr); bump(rdr); @@ -324,7 +325,7 @@ fn peeking_at_comment(rdr: &StringReader) -> bool { fn consume_comment(rdr: &StringReader, code_to_the_left: bool, - comments: &mut ~[Comment]) { + comments: &mut Vec ) { debug!(">>> consume comment"); if rdr.curr_is('/') && nextch_is(rdr, '/') { read_line_comments(rdr, code_to_the_left, comments); @@ -348,15 +349,15 @@ pub fn gather_comments_and_literals(span_diagnostic: @diagnostic::SpanHandler, path: ~str, srdr: &mut io::Reader) - -> (~[Comment], ~[Literal]) { + -> (Vec , Vec ) { let src = srdr.read_to_end().unwrap(); let src = str::from_utf8_owned(src).unwrap(); let cm = CodeMap::new(); let filemap = cm.new_filemap(path, src); let rdr = lexer::new_low_level_string_reader(span_diagnostic, filemap); - let mut comments: ~[Comment] = ~[]; - let mut literals: ~[Literal] = ~[]; + let mut comments: Vec = Vec::new(); + let mut literals: Vec = Vec::new(); let mut first_read: bool = true; while !is_eof(&rdr) { loop { diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 87706df5e31d0..884fc306f22ea 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -1005,6 +1005,7 @@ mod test { use parse::token; use parse::token::{str_to_ident}; use std::io::util; + use std::vec_ng::Vec; // represents a testing reader (incl. both reader and interner) struct Env { @@ -1048,7 +1049,7 @@ mod test { // check that the given reader produces the desired stream // of tokens (stop checking after exhausting the expected vec) - fn check_tokenization (env: Env, expected: ~[token::Token]) { + fn check_tokenization (env: Env, expected: Vec ) { for expected_tok in expected.iter() { let TokenAndSpan {tok:actual_tok, sp: _} = env.string_reader.next_token(); @@ -1064,32 +1065,32 @@ mod test { #[test] fn doublecolonparsing () { let env = setup (~"a b"); check_tokenization (env, - ~[mk_ident("a",false), - mk_ident("b",false)]); + vec!(mk_ident("a",false), + mk_ident("b",false))); } #[test] fn dcparsing_2 () { let env = setup (~"a::b"); check_tokenization (env, - ~[mk_ident("a",true), + vec!(mk_ident("a",true), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn dcparsing_3 () { let env = setup (~"a ::b"); check_tokenization (env, - ~[mk_ident("a",false), + vec!(mk_ident("a",false), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn dcparsing_4 () { let env = setup (~"a:: b"); check_tokenization (env, - ~[mk_ident("a",true), + vec!(mk_ident("a",true), token::MOD_SEP, - mk_ident("b",false)]); + mk_ident("b",false))); } #[test] fn character_a() { diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 8f45f91148453..9e5db1770bf31 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -21,6 +21,7 @@ use parse::parser::Parser; use std::cell::RefCell; use std::io::File; use std::str; +use std::vec_ng::Vec; pub mod lexer; pub mod parser; @@ -42,7 +43,7 @@ pub struct ParseSess { cm: @codemap::CodeMap, // better be the same as the one in the reader! span_diagnostic: @SpanHandler, // better be the same as the one in the reader! /// Used to determine and report recursive mod inclusions - included_mod_stack: RefCell<~[Path]>, + included_mod_stack: RefCell >, } pub fn new_parse_sess() -> @ParseSess { @@ -50,7 +51,7 @@ pub fn new_parse_sess() -> @ParseSess { @ParseSess { cm: cm, span_diagnostic: mk_span_handler(default_handler(), cm), - included_mod_stack: RefCell::new(~[]), + included_mod_stack: RefCell::new(Vec::new()), } } @@ -60,7 +61,7 @@ pub fn new_parse_sess_special_handler(sh: @SpanHandler, @ParseSess { cm: cm, span_diagnostic: sh, - included_mod_stack: RefCell::new(~[]), + included_mod_stack: RefCell::new(Vec::new()), } } @@ -82,7 +83,7 @@ pub fn parse_crate_attrs_from_file( input: &Path, cfg: ast::CrateConfig, sess: @ParseSess -) -> ~[ast::Attribute] { +) -> Vec { let mut parser = new_parser_from_file(sess, cfg, input); let (inner, _) = parser.parse_inner_attrs_and_next(); return inner; @@ -104,7 +105,7 @@ pub fn parse_crate_attrs_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, sess: @ParseSess) - -> ~[ast::Attribute] { + -> Vec { let mut p = new_parser_from_source_str(sess, cfg, name, @@ -144,7 +145,7 @@ pub fn parse_meta_from_source_str(name: ~str, pub fn parse_stmt_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, - attrs: ~[ast::Attribute], + attrs: Vec , sess: @ParseSess) -> @ast::Stmt { let mut p = new_parser_from_source_str( @@ -160,7 +161,7 @@ pub fn parse_tts_from_source_str(name: ~str, source: ~str, cfg: ast::CrateConfig, sess: @ParseSess) - -> ~[ast::TokenTree] { + -> Vec { let mut p = new_parser_from_source_str( sess, cfg, @@ -214,7 +215,7 @@ pub fn filemap_to_parser(sess: @ParseSess, // compiler expands into it pub fn new_parser_from_tts(sess: @ParseSess, cfg: ast::CrateConfig, - tts: ~[ast::TokenTree]) -> Parser { + tts: Vec ) -> Parser { tts_to_parser(sess,tts,cfg) } @@ -256,10 +257,10 @@ pub fn string_to_filemap(sess: @ParseSess, source: ~str, path: ~str) // given a filemap, produce a sequence of token-trees pub fn filemap_to_tts(sess: @ParseSess, filemap: @FileMap) - -> ~[ast::TokenTree] { + -> Vec { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. - let cfg = ~[]; + let cfg = Vec::new(); let srdr = lexer::new_string_reader(sess.span_diagnostic, filemap); let mut p1 = Parser(sess, cfg, ~srdr); p1.parse_all_token_trees() @@ -267,7 +268,7 @@ pub fn filemap_to_tts(sess: @ParseSess, filemap: @FileMap) // given tts and cfg, produce a parser pub fn tts_to_parser(sess: @ParseSess, - tts: ~[ast::TokenTree], + tts: Vec , cfg: ast::CrateConfig) -> Parser { let trdr = lexer::new_tt_reader(sess.span_diagnostic, None, tts); Parser(sess, cfg, ~trdr) @@ -288,6 +289,7 @@ mod test { use std::io; use std::io::MemWriter; use std::str; + use std::vec_ng::Vec; use codemap::{Span, BytePos, Spanned}; use opt_vec; use ast; @@ -318,13 +320,13 @@ mod test { node: ast::ExprPath(ast::Path { span: sp(0, 1), global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("a"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span: sp(0, 1) }) @@ -337,7 +339,7 @@ mod test { node: ast::ExprPath(ast::Path { span: sp(0, 6), global: true, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("a"), lifetimes: opt_vec::Empty, @@ -348,7 +350,7 @@ mod test { lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ] + ) }), span: sp(0, 6) }) @@ -362,27 +364,28 @@ mod test { // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); - let tts: &[ast::TokenTree] = tts; + let tts: &[ast::TokenTree] = tts.as_slice(); match tts { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), ast::TTTok(_,_), ast::TTDelim(delim_elts)] => { - let delim_elts: &[ast::TokenTree] = *delim_elts; + let delim_elts: &[ast::TokenTree] = delim_elts.as_slice(); match delim_elts { [ast::TTTok(_,token::LPAREN), ast::TTDelim(first_set), ast::TTTok(_,token::FAT_ARROW), ast::TTDelim(second_set), ast::TTTok(_,token::RPAREN)] => { - let first_set: &[ast::TokenTree] = *first_set; + let first_set: &[ast::TokenTree] = + first_set.as_slice(); match first_set { [ast::TTTok(_,token::LPAREN), ast::TTTok(_,token::DOLLAR), ast::TTTok(_,_), ast::TTTok(_,token::RPAREN)] => { let second_set: &[ast::TokenTree] = - *second_set; + second_set.as_slice(); match second_set { [ast::TTTok(_,token::LPAREN), ast::TTTok(_,token::DOLLAR), @@ -550,13 +553,13 @@ mod test { node:ast::ExprPath(ast::Path{ span: sp(7, 8), global: false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("d"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span:sp(7,8) })), @@ -572,13 +575,13 @@ mod test { node: ast::ExprPath(ast::Path { span:sp(0,1), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }), span: sp(0,1)}, ast::DUMMY_NODE_ID), @@ -599,13 +602,13 @@ mod test { ast::Path { span:sp(0,1), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None /* no idea */), span: sp(0,1)}); @@ -618,22 +621,22 @@ mod test { assert!(string_to_item(~"fn a (b : int) { b; }") == Some( @ast::Item{ident:str_to_ident("a"), - attrs:~[], + attrs:Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemFn(ast::P(ast::FnDecl { - inputs: ~[ast::Arg{ + inputs: vec!(ast::Arg{ ty: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, node: ast::TyPath(ast::Path{ span:sp(10,13), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("int"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None, ast::DUMMY_NODE_ID), span:sp(10,13) }), @@ -644,21 +647,21 @@ mod test { ast::Path { span:sp(6,7), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident("b"), lifetimes: opt_vec::Empty, types: opt_vec::Empty, } - ], + ), }, None // no idea ), span: sp(6,7) }, id: ast::DUMMY_NODE_ID - }], + }), output: ast::P(ast::Ty{id: ast::DUMMY_NODE_ID, node: ast::TyNil, span:sp(15,15)}), // not sure @@ -672,15 +675,15 @@ mod test { ty_params: opt_vec::Empty, }, ast::P(ast::Block { - view_items: ~[], - stmts: ~[@Spanned{ + view_items: Vec::new(), + stmts: vec!(@Spanned{ node: ast::StmtSemi(@ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath( ast::Path{ span:sp(17,18), global:false, - segments: ~[ + segments: vec!( ast::PathSegment { identifier: str_to_ident( @@ -690,11 +693,11 @@ mod test { types: opt_vec::Empty } - ], + ), }), span: sp(17,18)}, ast::DUMMY_NODE_ID), - span: sp(17,18)}], + span: sp(17,18)}), expr: None, id: ast::DUMMY_NODE_ID, rules: ast::DefaultBlock, // no idea diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2fd6d34adf195..9b209aadf19e5 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -82,7 +82,8 @@ use std::cell::Cell; use collections::HashSet; use std::kinds::marker; use std::mem::replace; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; #[allow(non_camel_case_types)] #[deriving(Eq)] @@ -93,7 +94,7 @@ enum restriction { RESTRICT_NO_BAR_OR_DOUBLEBAR_OP, } -type ItemInfo = (Ident, Item_, Option<~[Attribute]>); +type ItemInfo = (Ident, Item_, Option >); /// How to parse a path. There are four different kinds of paths, all of which /// are parsed somewhat differently. @@ -129,7 +130,7 @@ pub struct PathAndBounds { enum ItemOrViewItem { // Indicates a failure to parse any kind of item. The attributes are // returned. - IoviNone(~[Attribute]), + IoviNone(Vec ), IoviItem(@Item), IoviForeignItem(@ForeignItem), IoviViewItem(ViewItem) @@ -257,7 +258,7 @@ macro_rules! maybe_whole ( }; match __found__ { Some(INTERPOLATED(token::$constructor(x))) => { - return (~[], x) + return (Vec::new(), x) } _ => {} } @@ -266,21 +267,20 @@ macro_rules! maybe_whole ( ) -fn maybe_append(lhs: ~[Attribute], rhs: Option<~[Attribute]>) - -> ~[Attribute] { +fn maybe_append(lhs: Vec , rhs: Option >) + -> Vec { match rhs { None => lhs, - Some(ref attrs) => vec::append(lhs, (*attrs)) + Some(ref attrs) => vec_ng::append(lhs, attrs.as_slice()) } } struct ParsedItemsAndViewItems { - attrs_remaining: ~[Attribute], - view_items: ~[ViewItem], - items: ~[@Item], - foreign_items: ~[@ForeignItem] -} + attrs_remaining: Vec , + view_items: Vec , + items: Vec<@Item> , + foreign_items: Vec<@ForeignItem> } /* ident is handled by common.rs */ @@ -314,8 +314,8 @@ pub fn Parser(sess: @ParseSess, cfg: ast::CrateConfig, rdr: ~Reader:) restriction: UNRESTRICTED, quote_depth: 0, obsolete_set: HashSet::new(), - mod_path_stack: ~[], - open_braces: ~[], + mod_path_stack: Vec::new(), + open_braces: Vec::new(), nopod: marker::NoPod } } @@ -343,9 +343,9 @@ pub struct Parser { /// extra detail when the same error is seen twice obsolete_set: HashSet, /// Used to determine the path to externally loaded source files - mod_path_stack: ~[InternedString], + mod_path_stack: Vec , /// Stack of spans of open delimiters. Used for error message. - open_braces: ~[Span], + open_braces: Vec , /* do not copy the parser; its state is tied to outside state */ priv nopod: marker::NoPod } @@ -407,8 +407,11 @@ impl Parser { } else if inedible.contains(&self.token) { // leave it in the input } else { - let expected = vec::append(edible.to_owned(), inedible); - let expect = tokens_to_str(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible); + let expect = tokens_to_str(expected.as_slice()); let actual = self.this_token_to_str(); self.fatal( if expected.len() != 1 { @@ -446,8 +449,12 @@ impl Parser { match e.node { ExprPath(..) => { // might be unit-struct construction; check for recoverableinput error. - let expected = vec::append(edible.to_owned(), inedible); - self.check_for_erroneous_unit_struct_expecting(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible); + self.check_for_erroneous_unit_struct_expecting( + expected.as_slice()); } _ => {} } @@ -465,8 +472,12 @@ impl Parser { debug!("commit_stmt {:?}", s); let _s = s; // unused, but future checks might want to inspect `s`. if self.last_token.as_ref().map_or(false, |t| is_ident_or_path(*t)) { - let expected = vec::append(edible.to_owned(), inedible); - self.check_for_erroneous_unit_struct_expecting(expected); + let expected = vec_ng::append(edible.iter() + .map(|x| (*x).clone()) + .collect(), + inedible.as_slice()); + self.check_for_erroneous_unit_struct_expecting( + expected.as_slice()); } self.expect_one_of(edible, inedible) } @@ -578,9 +589,9 @@ impl Parser { &mut self, sep: &token::Token, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec { let mut first = true; - let mut vector = ~[]; + let mut vector = Vec::new(); while self.token != token::BINOP(token::OR) && self.token != token::OROR { if first { @@ -655,7 +666,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec { let val = self.parse_seq_to_before_end(ket, sep, f); self.bump(); val @@ -669,9 +680,9 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec { let mut first: bool = true; - let mut v: ~[T] = ~[]; + let mut v: Vec = Vec::new(); while self.token != *ket { match sep.sep { Some(ref t) => { @@ -695,7 +706,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> ~[T] { + -> Vec { self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); self.bump(); @@ -710,7 +721,7 @@ impl Parser { ket: &token::Token, sep: SeqSep, f: |&mut Parser| -> T) - -> Spanned<~[T]> { + -> Spanned > { let lo = self.span.lo; self.expect(bra); let result = self.parse_seq_to_before_end(ket, sep, f); @@ -950,7 +961,7 @@ impl Parser { }; let inputs = if self.eat(&token::OROR) { - ~[] + Vec::new() } else { self.expect_or(); let inputs = self.parse_seq_to_before_or( @@ -1034,7 +1045,7 @@ impl Parser { } // parse the methods in a trait declaration - pub fn parse_trait_methods(&mut self) -> ~[TraitMethod] { + pub fn parse_trait_methods(&mut self) -> Vec { self.parse_unspanned_seq( &token::LBRACE, &token::RBRACE, @@ -1083,7 +1094,7 @@ impl Parser { debug!("parse_trait_methods(): parsing provided method"); let (inner_attrs, body) = p.parse_inner_attrs_and_block(); - let attrs = vec::append(attrs, inner_attrs); + let attrs = vec_ng::append(attrs, inner_attrs.as_slice()); Provided(@ast::Method { ident: ident, attrs: attrs, @@ -1176,7 +1187,7 @@ impl Parser { // (t) is a parenthesized ty // (t,) is the type of a tuple with only one field, // of type t - let mut ts = ~[self.parse_ty(false)]; + let mut ts = vec!(self.parse_ty(false)); let mut one_tuple = false; while self.token == token::COMMA { self.bump(); @@ -1190,7 +1201,7 @@ impl Parser { if ts.len() == 1 && !one_tuple { self.expect(&token::RPAREN); - return ts[0] + return *ts.get(0) } let t = TyTup(ts); @@ -1479,7 +1490,7 @@ impl Parser { // Parse any number of segments and bound sets. A segment is an // identifier followed by an optional lifetime and a set of types. // A bound set is a set of type parameter bounds. - let mut segments = ~[]; + let mut segments = Vec::new(); loop { // First, parse an identifier. let identifier = self.parse_ident(); @@ -1541,7 +1552,7 @@ impl Parser { let span = mk_sp(lo, self.last_span.hi); // Assemble the path segments. - let mut path_segments = ~[]; + let mut path_segments = Vec::new(); let mut bounds = None; let last_segment_index = segments.len() - 1; for (i, segment_and_bounds) in segments.move_iter().enumerate() { @@ -1690,11 +1701,11 @@ impl Parser { ExprBinary(binop, lhs, rhs) } - pub fn mk_call(&mut self, f: @Expr, args: ~[@Expr]) -> ast::Expr_ { + pub fn mk_call(&mut self, f: @Expr, args: Vec<@Expr> ) -> ast::Expr_ { ExprCall(f, args) } - fn mk_method_call(&mut self, ident: Ident, tps: ~[P], args: ~[@Expr]) -> ast::Expr_ { + fn mk_method_call(&mut self, ident: Ident, tps: Vec> , args: Vec<@Expr> ) -> ast::Expr_ { ExprMethodCall(ident, tps, args) } @@ -1702,7 +1713,7 @@ impl Parser { ExprIndex(expr, idx) } - pub fn mk_field(&mut self, expr: @Expr, ident: Ident, tys: ~[P]) -> ast::Expr_ { + pub fn mk_field(&mut self, expr: @Expr, ident: Ident, tys: Vec> ) -> ast::Expr_ { ExprField(expr, ident, tys) } @@ -1754,7 +1765,7 @@ impl Parser { let lit = @spanned(lo, hi, LitNil); return self.mk_expr(lo, hi, ExprLit(lit)); } - let mut es = ~[self.parse_expr()]; + let mut es = vec!(self.parse_expr()); self.commit_expr(*es.last().unwrap(), &[], &[token::COMMA, token::RPAREN]); while self.token == token::COMMA { self.bump(); @@ -1770,7 +1781,7 @@ impl Parser { self.commit_expr_expecting(*es.last().unwrap(), token::RPAREN); return if es.len() == 1 && !trailing_comma { - self.mk_expr(lo, hi, ExprParen(es[0])) + self.mk_expr(lo, hi, ExprParen(*es.get(0))) } else { self.mk_expr(lo, hi, ExprTup(es)) @@ -1786,8 +1797,8 @@ impl Parser { let decl = self.parse_proc_decl(); let body = self.parse_expr(); let fakeblock = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(body), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -1840,7 +1851,7 @@ impl Parser { if self.token == token::RBRACKET { // Empty vector. self.bump(); - ex = ExprVec(~[], mutbl); + ex = ExprVec(Vec::new(), mutbl); } else { // Nonempty vector. let first_expr = self.parse_expr(); @@ -1860,11 +1871,13 @@ impl Parser { seq_sep_trailing_allowed(token::COMMA), |p| p.parse_expr() ); - ex = ExprVec(~[first_expr] + remaining_exprs, mutbl); + let mut exprs = vec!(first_expr); + exprs.push_all_move(remaining_exprs); + ex = ExprVec(exprs, mutbl); } else { // Vector with one element. self.expect(&token::RBRACKET); - ex = ExprVec(~[first_expr], mutbl); + ex = ExprVec(vec!(first_expr), mutbl); } } hi = self.last_span.hi; @@ -1919,7 +1932,7 @@ impl Parser { if self.looking_at_struct_literal() { // It's a struct literal. self.bump(); - let mut fields = ~[]; + let mut fields = Vec::new(); let mut base = None; while self.token != token::RBRACE { @@ -1981,7 +1994,7 @@ impl Parser { self.expect(&token::LT); self.parse_generic_values_after_lt() } else { - (opt_vec::Empty, ~[]) + (opt_vec::Empty, Vec::new()) }; // expr.f() method call @@ -2143,7 +2156,7 @@ impl Parser { // Parse the open delimiter. self.open_braces.push(self.span); - let mut result = ~[parse_any_tt_tok(self)]; + let mut result = vec!(parse_any_tt_tok(self)); let trees = self.parse_seq_to_before_end(&close_delim, @@ -2163,15 +2176,15 @@ impl Parser { // parse a stream of tokens into a list of TokenTree's, // up to EOF. - pub fn parse_all_token_trees(&mut self) -> ~[TokenTree] { - let mut tts = ~[]; + pub fn parse_all_token_trees(&mut self) -> Vec { + let mut tts = Vec::new(); while self.token != token::EOF { tts.push(self.parse_token_tree()); } tts } - pub fn parse_matchers(&mut self) -> ~[Matcher] { + pub fn parse_matchers(&mut self) -> Vec { // unification of Matcher's and TokenTree's would vastly improve // the interpolation of Matcher's maybe_whole!(self, NtMatchers); @@ -2192,8 +2205,8 @@ impl Parser { pub fn parse_matcher_subseq_upto(&mut self, name_idx: @Cell, ket: &token::Token) - -> ~[Matcher] { - let mut ret_val = ~[]; + -> Vec { + let mut ret_val = Vec::new(); let mut lparens = 0u; while self.token != *ket || lparens > 0u { @@ -2478,7 +2491,7 @@ impl Parser { _ => { // No argument list - `do foo {` P(FnDecl { - inputs: ~[], + inputs: Vec::new(), output: P(Ty { id: ast::DUMMY_NODE_ID, node: TyInfer, @@ -2513,8 +2526,8 @@ impl Parser { let decl = parse_decl(self); let body = parse_body(self); let fakeblock = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(body), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -2601,7 +2614,7 @@ impl Parser { let lo = self.last_span.lo; let discriminant = self.parse_expr(); self.commit_expr_expecting(discriminant, token::LBRACE); - let mut arms: ~[Arm] = ~[]; + let mut arms: Vec = Vec::new(); while self.token != token::RBRACE { let pats = self.parse_pats(); let mut guard = None; @@ -2622,8 +2635,8 @@ impl Parser { } let blk = P(ast::Block { - view_items: ~[], - stmts: ~[], + view_items: Vec::new(), + stmts: Vec::new(), expr: Some(expr), id: ast::DUMMY_NODE_ID, rules: DefaultBlock, @@ -2662,8 +2675,8 @@ impl Parser { } // parse patterns, separated by '|' s - fn parse_pats(&mut self) -> ~[@Pat] { - let mut pats = ~[]; + fn parse_pats(&mut self) -> Vec<@Pat> { + let mut pats = Vec::new(); loop { pats.push(self.parse_pat()); if self.token == token::BINOP(token::OR) { self.bump(); } @@ -2673,10 +2686,10 @@ impl Parser { fn parse_pat_vec_elements( &mut self, - ) -> (~[@Pat], Option<@Pat>, ~[@Pat]) { - let mut before = ~[]; + ) -> (Vec<@Pat> , Option<@Pat>, Vec<@Pat> ) { + let mut before = Vec::new(); let mut slice = None; - let mut after = ~[]; + let mut after = Vec::new(); let mut first = true; let mut before_slice = true; @@ -2733,8 +2746,8 @@ impl Parser { } // parse the fields of a struct-like pattern - fn parse_pat_fields(&mut self) -> (~[ast::FieldPat], bool) { - let mut fields = ~[]; + fn parse_pat_fields(&mut self) -> (Vec , bool) { + let mut fields = Vec::new(); let mut etc = false; let mut first = true; while self.token != token::RBRACE { @@ -2900,7 +2913,7 @@ impl Parser { let expr = self.mk_expr(lo, hi, ExprLit(lit)); pat = PatLit(expr); } else { - let mut fields = ~[self.parse_pat()]; + let mut fields = vec!(self.parse_pat()); if self.look_ahead(1, |t| *t != token::RPAREN) { while self.token == token::COMMA { self.bump(); @@ -3002,7 +3015,7 @@ impl Parser { pat = PatStruct(enum_path, fields, etc); } _ => { - let mut args: ~[@Pat] = ~[]; + let mut args: Vec<@Pat> = Vec::new(); match self.token { token::LPAREN => { let is_star = self.look_ahead(1, |t| { @@ -3128,7 +3141,7 @@ impl Parser { // parse a structure field fn parse_name_and_ty(&mut self, pr: Visibility, - attrs: ~[Attribute]) -> StructField { + attrs: Vec ) -> StructField { let lo = self.span.lo; if !is_plain_ident(&self.token) { self.fatal("expected ident"); @@ -3146,7 +3159,7 @@ impl Parser { // parse a statement. may include decl. // precondition: any attributes are parsed already - pub fn parse_stmt(&mut self, item_attrs: ~[Attribute]) -> @Stmt { + pub fn parse_stmt(&mut self, item_attrs: Vec ) -> @Stmt { maybe_whole!(self, NtStmt); fn check_expected_item(p: &mut Parser, found_attrs: bool) { @@ -3229,7 +3242,7 @@ impl Parser { self.mk_item( lo, hi, id /*id is good here*/, ItemMac(spanned(lo, hi, MacInvocTT(pth, tts, EMPTY_CTXT))), - Inherited, ~[/*no attrs*/]))), + Inherited, Vec::new(/*no attrs*/)))), ast::DUMMY_NODE_ID)); } @@ -3275,12 +3288,12 @@ impl Parser { } self.expect(&token::LBRACE); - return self.parse_block_tail_(lo, DefaultBlock, ~[]); + return self.parse_block_tail_(lo, DefaultBlock, Vec::new()); } // parse a block. Inner attrs are allowed. fn parse_inner_attrs_and_block(&mut self) - -> (~[Attribute], P) { + -> (Vec , P) { maybe_whole!(pair_empty self, NtBlock); @@ -3299,13 +3312,13 @@ impl Parser { // necessary, and this should take a qualifier. // some blocks start with "#{"... fn parse_block_tail(&mut self, lo: BytePos, s: BlockCheckMode) -> P { - self.parse_block_tail_(lo, s, ~[]) + self.parse_block_tail_(lo, s, Vec::new()) } // parse the rest of a block expression or function body fn parse_block_tail_(&mut self, lo: BytePos, s: BlockCheckMode, - first_item_attrs: ~[Attribute]) -> P { - let mut stmts = ~[]; + first_item_attrs: Vec ) -> P { + let mut stmts = Vec::new(); let mut expr = None; // wouldn't it be more uniform to parse view items only, here? @@ -3328,12 +3341,12 @@ impl Parser { while self.token != token::RBRACE { // parsing items even when they're not allowed lets us give // better error messages and recover more gracefully. - attributes_box.push_all(self.parse_outer_attributes()); + attributes_box.push_all(self.parse_outer_attributes().as_slice()); match self.token { token::SEMI => { if !attributes_box.is_empty() { self.span_err(self.last_span, "expected item after attributes"); - attributes_box = ~[]; + attributes_box = Vec::new(); } self.bump(); // empty } @@ -3342,7 +3355,7 @@ impl Parser { } _ => { let stmt = self.parse_stmt(attributes_box); - attributes_box = ~[]; + attributes_box = Vec::new(); match stmt.node { StmtExpr(e, stmt_id) => { // expression without semicolon @@ -3510,7 +3523,7 @@ impl Parser { } } - fn parse_generic_values_after_lt(&mut self) -> (OptVec, ~[P]) { + fn parse_generic_values_after_lt(&mut self) -> (OptVec, Vec> ) { let lifetimes = self.parse_lifetimes(); let result = self.parse_seq_to_gt( Some(token::COMMA), @@ -3519,9 +3532,9 @@ impl Parser { } fn parse_fn_args(&mut self, named_args: bool, allow_variadic: bool) - -> (~[Arg], bool) { + -> (Vec , bool) { let sp = self.span; - let mut args: ~[Option] = + let mut args: Vec> = self.parse_unspanned_seq( &token::LPAREN, &token::RPAREN, @@ -3716,7 +3729,7 @@ impl Parser { fn_inputs } token::RPAREN => { - ~[Arg::new_self(explicit_self_sp, mutbl_self)] + vec!(Arg::new_self(explicit_self_sp, mutbl_self)) } _ => { let token_str = self.this_token_to_str(); @@ -3749,7 +3762,7 @@ impl Parser { fn parse_fn_block_decl(&mut self) -> P { let inputs_captures = { if self.eat(&token::OROR) { - ~[] + Vec::new() } else { self.parse_unspanned_seq( &token::BINOP(token::OR), @@ -3812,7 +3825,7 @@ impl Parser { fn mk_item(&mut self, lo: BytePos, hi: BytePos, ident: Ident, node: Item_, vis: Visibility, - attrs: ~[Attribute]) -> @Item { + attrs: Vec ) -> @Item { @Item { ident: ident, attrs: attrs, @@ -3832,7 +3845,7 @@ impl Parser { } // parse a method in a trait impl, starting with `attrs` attributes. - fn parse_method(&mut self, already_parsed_attrs: Option<~[Attribute]>) -> @Method { + fn parse_method(&mut self, already_parsed_attrs: Option >) -> @Method { let next_attrs = self.parse_outer_attributes(); let attrs = match already_parsed_attrs { Some(mut a) => { a.push_all_move(next_attrs); a } @@ -3851,7 +3864,7 @@ impl Parser { let (inner_attrs, body) = self.parse_inner_attrs_and_block(); let hi = body.span.hi; - let attrs = vec::append(attrs, inner_attrs); + let attrs = vec_ng::append(attrs, inner_attrs.as_slice()); @ast::Method { ident: ident, attrs: attrs, @@ -3877,7 +3890,7 @@ impl Parser { self.bump(); traits = self.parse_trait_ref_list(&token::LBRACE); } else { - traits = ~[]; + traits = Vec::new(); } let meths = self.parse_trait_methods(); @@ -3925,7 +3938,7 @@ impl Parser { None }; - let mut meths = ~[]; + let mut meths = Vec::new(); self.expect(&token::LBRACE); let (inner_attrs, next) = self.parse_inner_attrs_and_next(); let mut method_attrs = Some(next); @@ -3948,7 +3961,7 @@ impl Parser { } // parse B + C<~str,int> + D - fn parse_trait_ref_list(&mut self, ket: &token::Token) -> ~[TraitRef] { + fn parse_trait_ref_list(&mut self, ket: &token::Token) -> Vec { self.parse_seq_to_before_end( ket, seq_sep_trailing_disallowed(token::BINOP(token::PLUS)), @@ -3961,13 +3974,13 @@ impl Parser { let class_name = self.parse_ident(); let generics = self.parse_generics(); - let mut fields: ~[StructField]; + let mut fields: Vec ; let is_tuple_like; if self.eat(&token::LBRACE) { // It's a record-like struct. is_tuple_like = false; - fields = ~[]; + fields = Vec::new(); while self.token != token::RBRACE { fields.push(self.parse_struct_decl_field()); } @@ -3998,7 +4011,7 @@ impl Parser { } else if self.eat(&token::SEMI) { // It's a unit-like struct. is_tuple_like = true; - fields = ~[]; + fields = Vec::new(); } else { let token_str = self.this_token_to_str(); self.fatal(format!("expected `\\{`, `(`, or `;` after struct \ @@ -4019,7 +4032,7 @@ impl Parser { // parse a structure field declaration pub fn parse_single_struct_field(&mut self, vis: Visibility, - attrs: ~[Attribute]) + attrs: Vec ) -> StructField { let a_var = self.parse_name_and_ty(vis, attrs); match self.token { @@ -4064,7 +4077,7 @@ impl Parser { // attributes (of length 0 or 1), parse all of the items in a module fn parse_mod_items(&mut self, term: token::Token, - first_item_attrs: ~[Attribute]) + first_item_attrs: Vec ) -> Mod { // parse all of the items up to closing or an attribute. // view items are legal here. @@ -4074,7 +4087,7 @@ impl Parser { items: starting_items, .. } = self.parse_items_and_view_items(first_item_attrs, true, true); - let mut items: ~[@Item] = starting_items; + let mut items: Vec<@Item> = starting_items; let attrs_remaining_len = attrs_remaining.len(); // don't think this other loop is even necessary.... @@ -4083,7 +4096,8 @@ impl Parser { while self.token != term { let mut attrs = self.parse_outer_attributes(); if first { - attrs = attrs_remaining + attrs; + attrs = vec_ng::append(attrs_remaining.clone(), + attrs.as_slice()); first = false; } debug!("parse_mod_items: parse_item_or_view_item(attrs={:?})", @@ -4162,10 +4176,10 @@ impl Parser { id: ast::Ident, outer_attrs: &[ast::Attribute], id_sp: Span) - -> (ast::Item_, ~[ast::Attribute]) { + -> (ast::Item_, Vec ) { let mut prefix = Path::new(self.sess.cm.span_to_filename(self.span)); prefix.pop(); - let mod_path = Path::new(".").join_many(self.mod_path_stack); + let mod_path = Path::new(".").join_many(self.mod_path_stack.as_slice()); let dir_path = prefix.join(&mod_path); let file_path = match ::attr::first_attr_value_str_by_name( outer_attrs, "path") { @@ -4195,14 +4209,14 @@ impl Parser { }; self.eval_src_mod_from_path(file_path, - outer_attrs.to_owned(), + outer_attrs.iter().map(|x| *x).collect(), id_sp) } fn eval_src_mod_from_path(&mut self, path: Path, - outer_attrs: ~[ast::Attribute], - id_sp: Span) -> (ast::Item_, ~[ast::Attribute]) { + outer_attrs: Vec , + id_sp: Span) -> (ast::Item_, Vec ) { { let mut included_mod_stack = self.sess .included_mod_stack @@ -4232,7 +4246,7 @@ impl Parser { &path, id_sp); let (inner, next) = p0.parse_inner_attrs_and_next(); - let mod_attrs = vec::append(outer_attrs, inner); + let mod_attrs = vec_ng::append(outer_attrs, inner.as_slice()); let first_item_outer_attrs = next; let m0 = p0.parse_mod_items(token::EOF, first_item_outer_attrs); { @@ -4246,7 +4260,7 @@ impl Parser { // parse a function declaration from a foreign module fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, - attrs: ~[Attribute]) -> @ForeignItem { + attrs: Vec ) -> @ForeignItem { let lo = self.span.lo; // Parse obsolete purity. @@ -4269,7 +4283,7 @@ impl Parser { // parse a static item from a foreign module fn parse_item_foreign_static(&mut self, vis: ast::Visibility, - attrs: ~[Attribute]) -> @ForeignItem { + attrs: Vec ) -> @ForeignItem { let lo = self.span.lo; self.expect_keyword(keywords::Static); @@ -4303,7 +4317,7 @@ impl Parser { // parse_foreign_items. fn parse_foreign_mod_items(&mut self, abis: AbiSet, - first_item_attrs: ~[Attribute]) + first_item_attrs: Vec ) -> ForeignMod { let ParsedItemsAndViewItems { attrs_remaining: attrs_remaining, @@ -4332,7 +4346,7 @@ impl Parser { fn parse_item_extern_crate(&mut self, lo: BytePos, visibility: Visibility, - attrs: ~[Attribute]) + attrs: Vec ) -> ItemOrViewItem { let (maybe_path, ident) = match self.token { @@ -4377,7 +4391,7 @@ impl Parser { lo: BytePos, opt_abis: Option, visibility: Visibility, - attrs: ~[Attribute]) + attrs: Vec ) -> ItemOrViewItem { self.expect(&token::LBRACE); @@ -4410,7 +4424,7 @@ impl Parser { // parse a structure-like enum variant definition // this should probably be renamed or refactored... fn parse_struct_def(&mut self) -> @StructDef { - let mut fields: ~[StructField] = ~[]; + let mut fields: Vec = Vec::new(); while self.token != token::RBRACE { fields.push(self.parse_struct_decl_field()); } @@ -4424,7 +4438,7 @@ impl Parser { // parse the part of an "enum" decl following the '{' fn parse_enum_def(&mut self, _generics: &ast::Generics) -> EnumDef { - let mut variants = ~[]; + let mut variants = Vec::new(); let mut all_nullary = true; let mut have_disr = false; while self.token != token::RBRACE { @@ -4435,7 +4449,7 @@ impl Parser { let ident; let kind; - let mut args = ~[]; + let mut args = Vec::new(); let mut disr_expr = None; ident = self.parse_ident(); if self.eat(&token::LBRACE) { @@ -4462,7 +4476,7 @@ impl Parser { disr_expr = Some(self.parse_expr()); kind = TupleVariantKind(args); } else { - kind = TupleVariantKind(~[]); + kind = TupleVariantKind(Vec::new()); } let vr = ast::Variant_ { @@ -4551,13 +4565,13 @@ impl Parser { // NB: this function no longer parses the items inside an // extern crate. fn parse_item_or_view_item(&mut self, - attrs: ~[Attribute], + attrs: Vec , macros_allowed: bool) -> ItemOrViewItem { match self.token { INTERPOLATED(token::NtItem(item)) => { self.bump(); - let new_attrs = vec::append(attrs, item.attrs); + let new_attrs = vec_ng::append(attrs, item.attrs.as_slice()); return IoviItem(@Item { attrs: new_attrs, ..(*item).clone() @@ -4663,7 +4677,8 @@ impl Parser { } if self.eat_keyword(keywords::Mod) { // MODULE ITEM - let (ident, item_, extra_attrs) = self.parse_item_mod(attrs); + let (ident, item_, extra_attrs) = + self.parse_item_mod(attrs.as_slice()); let item = self.mk_item(lo, self.last_span.hi, ident, @@ -4732,7 +4747,7 @@ impl Parser { // parse a foreign item; on failure, return IoviNone. fn parse_foreign_item(&mut self, - attrs: ~[Attribute], + attrs: Vec , macros_allowed: bool) -> ItemOrViewItem { maybe_whole!(iovi self, NtItem); @@ -4756,7 +4771,7 @@ impl Parser { // this is the fall-through for parsing items. fn parse_macro_use_or_failure( &mut self, - attrs: ~[Attribute], + attrs: Vec , macros_allowed: bool, lo: BytePos, visibility: Visibility @@ -4820,7 +4835,7 @@ impl Parser { return IoviNone(attrs); } - pub fn parse_item(&mut self, attrs: ~[Attribute]) -> Option<@Item> { + pub fn parse_item(&mut self, attrs: Vec ) -> Option<@Item> { match self.parse_item_or_view_item(attrs, true) { IoviNone(_) => None, IoviViewItem(_) => @@ -4854,20 +4869,20 @@ impl Parser { let path = ast::Path { span: mk_sp(lo, self.span.hi), global: false, - segments: ~[] + segments: Vec::new() }; return @spanned(lo, self.span.hi, ViewPathList(path, idents, ast::DUMMY_NODE_ID)); } let first_ident = self.parse_ident(); - let mut path = ~[first_ident]; + let mut path = vec!(first_ident); match self.token { token::EQ => { // x = foo::bar self.bump(); let path_lo = self.span.lo; - path = ~[self.parse_ident()]; + path = vec!(self.parse_ident()); while self.token == token::MOD_SEP { self.bump(); let id = self.parse_ident(); @@ -4947,7 +4962,7 @@ impl Parser { } _ => () } - let last = path[path.len() - 1u]; + let last = *path.get(path.len() - 1u); let path = ast::Path { span: mk_sp(lo, self.span.hi), global: false, @@ -4965,8 +4980,8 @@ impl Parser { } // matches view_paths = view_path | view_path , view_paths - fn parse_view_paths(&mut self) -> ~[@ViewPath] { - let mut vp = ~[self.parse_view_path()]; + fn parse_view_paths(&mut self) -> Vec<@ViewPath> { + let mut vp = vec!(self.parse_view_path()); while self.token == token::COMMA { self.bump(); self.obsolete(self.last_span, ObsoleteMultipleImport); @@ -4980,15 +4995,16 @@ impl Parser { // - mod_items uses extern_mod_allowed = true // - block_tail_ uses extern_mod_allowed = false fn parse_items_and_view_items(&mut self, - first_item_attrs: ~[Attribute], + first_item_attrs: Vec , mut extern_mod_allowed: bool, macros_allowed: bool) -> ParsedItemsAndViewItems { - let mut attrs = vec::append(first_item_attrs, - self.parse_outer_attributes()); + let mut attrs = vec_ng::append(first_item_attrs, + self.parse_outer_attributes() + .as_slice()); // First, parse view items. - let mut view_items : ~[ast::ViewItem] = ~[]; - let mut items = ~[]; + let mut view_items : Vec = Vec::new(); + let mut items = Vec::new(); // I think this code would probably read better as a single // loop with a mutable three-state-variable (for extern crates, @@ -5001,7 +5017,7 @@ impl Parser { attrs_remaining: attrs, view_items: view_items, items: items, - foreign_items: ~[] + foreign_items: Vec::new() } } IoviViewItem(view_item) => { @@ -5056,18 +5072,19 @@ impl Parser { attrs_remaining: attrs, view_items: view_items, items: items, - foreign_items: ~[] + foreign_items: Vec::new() } } // Parses a sequence of foreign items. Stops when it finds program // text that can't be parsed as an item - fn parse_foreign_items(&mut self, first_item_attrs: ~[Attribute], + fn parse_foreign_items(&mut self, first_item_attrs: Vec , macros_allowed: bool) -> ParsedItemsAndViewItems { - let mut attrs = vec::append(first_item_attrs, - self.parse_outer_attributes()); - let mut foreign_items = ~[]; + let mut attrs = vec_ng::append(first_item_attrs, + self.parse_outer_attributes() + .as_slice()); + let mut foreign_items = Vec::new(); loop { match self.parse_foreign_item(attrs, macros_allowed) { IoviNone(returned_attrs) => { @@ -5095,8 +5112,8 @@ impl Parser { ParsedItemsAndViewItems { attrs_remaining: attrs, - view_items: ~[], - items: ~[], + view_items: Vec::new(), + items: Vec::new(), foreign_items: foreign_items } } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index edc5e613f9116..1499a1b4c19be 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -21,6 +21,7 @@ use std::char; use std::fmt; use std::local_data; use std::path::BytesContainer; +use std::vec_ng::Vec; #[allow(non_camel_case_types)] #[deriving(Clone, Encodable, Decodable, Eq, Hash, Show)] @@ -115,7 +116,7 @@ pub enum Nonterminal { NtAttr(@ast::Attribute), // #[foo] NtPath(~ast::Path), NtTT( @ast::TokenTree), // needs @ed to break a circularity - NtMatchers(~[ast::Matcher]) + NtMatchers(Vec ) } impl fmt::Show for Nonterminal { @@ -412,13 +413,11 @@ macro_rules! declare_special_idents_and_keywords {( // The indices here must correspond to the numbers in // special_idents, in Keyword to_ident(), and in static // constants below. - let init_vec = ~[ - $( $si_str, )* - $( $sk_str, )* - $( $rk_str, )* - ]; - - interner::StrInterner::prefill(init_vec) + let mut init_vec = Vec::new(); + $(init_vec.push($si_str);)* + $(init_vec.push($sk_str);)* + $(init_vec.push($rk_str);)* + interner::StrInterner::prefill(init_vec.as_slice()) } }} diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 14d8c662aae0e..e9e0e4835933b 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -62,7 +62,7 @@ */ use std::io; -use std::vec; +use std::vec_ng::Vec; #[deriving(Clone, Eq)] pub enum Breaks { @@ -119,7 +119,7 @@ pub fn tok_str(t: Token) -> ~str { } } -pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, +pub fn buf_str(toks: Vec , szs: Vec , left: uint, right: uint, lim: uint) -> ~str { let n = toks.len(); assert_eq!(n, szs.len()); @@ -131,7 +131,7 @@ pub fn buf_str(toks: ~[Token], szs: ~[int], left: uint, right: uint, if i != left { s.push_str(", "); } - s.push_str(format!("{}={}", szs[i], tok_str(toks[i].clone()))); + s.push_str(format!("{}={}", szs.get(i), tok_str(toks.get(i).clone()))); i += 1u; i %= n; } @@ -156,9 +156,9 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer { // fall behind. let n: uint = 3 * linewidth; debug!("mk_printer {}", linewidth); - let token: ~[Token] = vec::from_elem(n, Eof); - let size: ~[int] = vec::from_elem(n, 0); - let scan_stack: ~[uint] = vec::from_elem(n, 0u); + let token: Vec = Vec::from_elem(n, Eof); + let size: Vec = Vec::from_elem(n, 0); + let scan_stack: Vec = Vec::from_elem(n, 0u); Printer { out: out, buf_len: n, @@ -174,7 +174,7 @@ pub fn mk_printer(out: ~io::Writer, linewidth: uint) -> Printer { scan_stack_empty: true, top: 0, bottom: 0, - print_stack: ~[], + print_stack: Vec::new(), pending_indentation: 0 } } @@ -264,8 +264,8 @@ pub struct Printer { space: int, // number of spaces left on line left: uint, // index of left side of input stream right: uint, // index of right side of input stream - token: ~[Token], // ring-buffr stream goes through - size: ~[int], // ring-buffer of calculated sizes + token: Vec , // ring-buffr stream goes through + size: Vec , // ring-buffer of calculated sizes left_total: int, // running size of stream "...left" right_total: int, // running size of stream "...right" // pseudo-stack, really a ring too. Holds the @@ -274,23 +274,23 @@ pub struct Printer { // Begin (if there is any) on top of it. Stuff is flushed off the // bottom as it becomes irrelevant due to the primary ring-buffer // advancing. - scan_stack: ~[uint], + scan_stack: Vec , scan_stack_empty: bool, // top==bottom disambiguator top: uint, // index of top of scan_stack bottom: uint, // index of bottom of scan_stack // stack of blocks-in-progress being flushed by print - print_stack: ~[PrintStackElem], + print_stack: Vec , // buffered indentation to avoid writing trailing whitespace pending_indentation: int, } impl Printer { pub fn last_token(&mut self) -> Token { - self.token[self.right].clone() + (*self.token.get(self.right)).clone() } // be very careful with this! pub fn replace_last_token(&mut self, t: Token) { - self.token[self.right] = t; + *self.token.get_mut(self.right) = t; } pub fn pretty_print(&mut self, t: Token) -> io::IoResult<()> { debug!("pp ~[{},{}]", self.left, self.right); @@ -298,8 +298,9 @@ impl Printer { Eof => { if !self.scan_stack_empty { self.check_stack(0); - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); } self.indent(0); Ok(()) @@ -313,8 +314,8 @@ impl Printer { } else { self.advance_right(); } debug!("pp Begin({})/buffer ~[{},{}]", b.offset, self.left, self.right); - self.token[self.right] = t; - self.size[self.right] = -self.right_total; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -self.right_total; self.scan_push(self.right); Ok(()) } @@ -325,8 +326,8 @@ impl Printer { } else { debug!("pp End/buffer ~[{},{}]", self.left, self.right); self.advance_right(); - self.token[self.right] = t; - self.size[self.right] = -1; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -1; self.scan_push(self.right); Ok(()) } @@ -342,8 +343,8 @@ impl Printer { b.offset, self.left, self.right); self.check_stack(0); self.scan_push(self.right); - self.token[self.right] = t; - self.size[self.right] = -self.right_total; + *self.token.get_mut(self.right) = t; + *self.size.get_mut(self.right) = -self.right_total; self.right_total += b.blank_space; Ok(()) } @@ -356,8 +357,8 @@ impl Printer { debug!("pp String('{}')/buffer ~[{},{}]", *s, self.left, self.right); self.advance_right(); - self.token[self.right] = t.clone(); - self.size[self.right] = len; + *self.token.get_mut(self.right) = t.clone(); + *self.size.get_mut(self.right) = len; self.right_total += len; self.check_stream() } @@ -371,13 +372,15 @@ impl Printer { debug!("scan window is {}, longer than space on line ({})", self.right_total - self.left_total, self.space); if !self.scan_stack_empty { - if self.left == self.scan_stack[self.bottom] { + if self.left == *self.scan_stack.get(self.bottom) { debug!("setting {} to infinity and popping", self.left); - self.size[self.scan_pop_bottom()] = SIZE_INFINITY; + let scanned = self.scan_pop_bottom(); + *self.size.get_mut(scanned) = SIZE_INFINITY; } } - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); if self.left != self.right { try!(self.check_stream()); } @@ -393,26 +396,30 @@ impl Printer { self.top %= self.buf_len; assert!((self.top != self.bottom)); } - self.scan_stack[self.top] = x; + *self.scan_stack.get_mut(self.top) = x; } pub fn scan_pop(&mut self) -> uint { assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.top]; + let x = *self.scan_stack.get(self.top); if self.top == self.bottom { self.scan_stack_empty = true; - } else { self.top += self.buf_len - 1u; self.top %= self.buf_len; } + } else { + self.top += self.buf_len - 1u; self.top %= self.buf_len; + } return x; } pub fn scan_top(&mut self) -> uint { assert!((!self.scan_stack_empty)); - return self.scan_stack[self.top]; + return *self.scan_stack.get(self.top); } pub fn scan_pop_bottom(&mut self) -> uint { assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.bottom]; + let x = *self.scan_stack.get(self.bottom); if self.top == self.bottom { self.scan_stack_empty = true; - } else { self.bottom += 1u; self.bottom %= self.buf_len; } + } else { + self.bottom += 1u; self.bottom %= self.buf_len; + } return x; } pub fn advance_right(&mut self) { @@ -435,8 +442,9 @@ impl Printer { if self.left != self.right { self.left += 1u; self.left %= self.buf_len; - let left = self.token[self.left].clone(); - try!(self.advance_left(left, self.size[self.left])); + let left = (*self.token.get(self.left)).clone(); + let left_size = *self.size.get(self.left); + try!(self.advance_left(left, left_size)); } ret } else { @@ -446,22 +454,28 @@ impl Printer { pub fn check_stack(&mut self, k: int) { if !self.scan_stack_empty { let x = self.scan_top(); - match self.token[x] { - Begin(_) => { + match self.token.get(x) { + &Begin(_) => { if k > 0 { - self.size[self.scan_pop()] = self.size[x] + + let popped = self.scan_pop(); + *self.size.get_mut(popped) = *self.size.get(x) + self.right_total; self.check_stack(k - 1); } } - End => { + &End => { // paper says + not =, but that makes no sense. - self.size[self.scan_pop()] = 1; + let popped = self.scan_pop(); + *self.size.get_mut(popped) = 1; self.check_stack(k + 1); } _ => { - self.size[self.scan_pop()] = self.size[x] + self.right_total; - if k > 0 { self.check_stack(k); } + let popped = self.scan_pop(); + *self.size.get_mut(popped) = *self.size.get(x) + + self.right_total; + if k > 0 { + self.check_stack(k); + } } } } @@ -481,7 +495,7 @@ impl Printer { let print_stack = &mut self.print_stack; let n = print_stack.len(); if n != 0u { - print_stack[n - 1u] + *print_stack.get(n - 1u) } else { PrintStackElem { offset: 0, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 688494ec5eeab..d027efc1d42f6 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -33,6 +33,7 @@ use std::char; use std::str; use std::io; use std::io::MemWriter; +use std::vec_ng::Vec; // The &mut State is stored here to prevent recursive type. pub enum AnnNode<'a, 'b> { @@ -60,10 +61,10 @@ pub struct State<'a> { s: pp::Printer, cm: Option<@CodeMap>, intr: @token::IdentInterner, - comments: Option<~[comments::Comment]>, - literals: Option<~[comments::Literal]>, + comments: Option >, + literals: Option >, cur_cmnt_and_lit: CurrentCommentAndLiteral, - boxes: RefCell<~[pp::Breaks]>, + boxes: RefCell >, ann: &'a PpAnn } @@ -98,7 +99,7 @@ pub fn rust_printer_annotated<'a>(writer: ~io::Writer, ann: &'a PpAnn) -> State< cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(~[]), + boxes: RefCell::new(Vec::new()), ann: ann } } @@ -140,14 +141,14 @@ pub fn print_crate(cm: @CodeMap, cur_cmnt: 0, cur_lit: 0 }, - boxes: RefCell::new(~[]), + boxes: RefCell::new(Vec::new()), ann: ann }; print_crate_(&mut s, krate) } pub fn print_crate_(s: &mut State, krate: &ast::Crate) -> io::IoResult<()> { - try!(print_mod(s, &krate.module, krate.attrs)); + try!(print_mod(s, &krate.module, krate.attrs.as_slice())); try!(print_remaining_comments(s)); try!(eof(&mut s.s)); Ok(()) @@ -319,7 +320,7 @@ pub fn in_cbox(s: &mut State) -> bool { let boxes = s.boxes.borrow(); let len = boxes.get().len(); if len == 0u { return false; } - return boxes.get()[len - 1u] == pp::Consistent; + return *boxes.get().get(len - 1u) == pp::Consistent; } pub fn hardbreak_if_not_bol(s: &mut State) -> io::IoResult<()> { @@ -463,7 +464,7 @@ pub fn print_type(s: &mut State, ty: &ast::Ty) -> io::IoResult<()> { } ast::TyTup(ref elts) => { try!(popen(s)); - try!(commasep(s, Inconsistent, *elts, print_type_ref)); + try!(commasep(s, Inconsistent, elts.as_slice(), print_type_ref)); if elts.len() == 1 { try!(word(&mut s.s, ",")); } @@ -517,7 +518,7 @@ pub fn print_foreign_item(s: &mut State, item: &ast::ForeignItem) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); match item.node { ast::ForeignItemFn(decl, ref generics) => { try!(print_fn(s, decl, None, AbiSet::Rust(), item.ident, generics, @@ -545,7 +546,7 @@ pub fn print_foreign_item(s: &mut State, pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); { let ann_node = NodeItem(s, item); try!(s.ann.pre(ann_node)); @@ -580,21 +581,21 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { item.vis )); try!(word(&mut s.s, " ")); - try!(print_block_with_attrs(s, body, item.attrs)); + try!(print_block_with_attrs(s, body, item.attrs.as_slice())); } ast::ItemMod(ref _mod) => { try!(head(s, visibility_qualified(item.vis, "mod"))); try!(print_ident(s, item.ident)); try!(nbsp(s)); try!(bopen(s)); - try!(print_mod(s, _mod, item.attrs)); + try!(print_mod(s, _mod, item.attrs.as_slice())); try!(bclose(s, item.span)); } ast::ItemForeignMod(ref nmod) => { try!(head(s, "extern")); try!(word_nbsp(s, nmod.abis.to_str())); try!(bopen(s)); - try!(print_foreign_mod(s, nmod, item.attrs)); + try!(print_foreign_mod(s, nmod, item.attrs.as_slice())); try!(bclose(s, item.span)); } ast::ItemTy(ty, ref params) => { @@ -646,7 +647,7 @@ pub fn print_item(s: &mut State, item: &ast::Item) -> io::IoResult<()> { try!(space(&mut s.s)); try!(bopen(s)); - try!(print_inner_attributes(s, item.attrs)); + try!(print_inner_attributes(s, item.attrs.as_slice())); for meth in methods.iter() { try!(print_method(s, *meth)); } @@ -706,7 +707,7 @@ pub fn print_enum_def(s: &mut State, enum_definition: &ast::EnumDef, try!(print_ident(s, ident)); try!(print_generics(s, generics)); try!(space(&mut s.s)); - try!(print_variants(s, enum_definition.variants, span)); + try!(print_variants(s, enum_definition.variants.as_slice(), span)); Ok(()) } @@ -717,7 +718,7 @@ pub fn print_variants(s: &mut State, for &v in variants.iter() { try!(space_if_not_bol(s)); try!(maybe_print_comment(s, v.span.lo)); - try!(print_outer_attributes(s, v.node.attrs)); + try!(print_outer_attributes(s, v.node.attrs.as_slice())); try!(ibox(s, indent_unit)); try!(print_variant(s, v)); try!(word(&mut s.s, ",")); @@ -761,7 +762,10 @@ pub fn print_struct(s: &mut State, if ast_util::struct_def_is_tuple_like(struct_def) { if !struct_def.fields.is_empty() { try!(popen(s)); - try!(commasep(s, Inconsistent, struct_def.fields, |s, field| { + try!(commasep(s, + Inconsistent, + struct_def.fields.as_slice(), + |s, field| { match field.node.kind { ast::NamedField(..) => fail!("unexpected named field"), ast::UnnamedField => { @@ -787,7 +791,8 @@ pub fn print_struct(s: &mut State, ast::NamedField(ident, visibility) => { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, field.span.lo)); - try!(print_outer_attributes(s, field.node.attrs)); + try!(print_outer_attributes(s, + field.node.attrs.as_slice())); try!(print_visibility(s, visibility)); try!(print_ident(s, ident)); try!(word_nbsp(s, ":")); @@ -857,7 +862,10 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> { arg: &ast::VariantArg) -> io::IoResult<()> { print_type(s, arg.ty) } - try!(commasep(s, Consistent, *args, print_variant_arg)); + try!(commasep(s, + Consistent, + args.as_slice(), + print_variant_arg)); try!(pclose(s)); } } @@ -881,7 +889,7 @@ pub fn print_variant(s: &mut State, v: &ast::Variant) -> io::IoResult<()> { pub fn print_ty_method(s: &mut State, m: &ast::TypeMethod) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, m.span.lo)); - try!(print_outer_attributes(s, m.attrs)); + try!(print_outer_attributes(s, m.attrs.as_slice())); try!(print_ty_fn(s, None, None, @@ -907,12 +915,12 @@ pub fn print_trait_method(s: &mut State, pub fn print_method(s: &mut State, meth: &ast::Method) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, meth.span.lo)); - try!(print_outer_attributes(s, meth.attrs)); + try!(print_outer_attributes(s, meth.attrs.as_slice())); try!(print_fn(s, meth.decl, Some(meth.purity), AbiSet::Rust(), meth.ident, &meth.generics, Some(meth.explicit_self.node), meth.vis)); try!(word(&mut s.s, " ")); - print_block_with_attrs(s, meth.body, meth.attrs) + print_block_with_attrs(s, meth.body, meth.attrs.as_slice()) } pub fn print_outer_attributes(s: &mut State, @@ -1184,7 +1192,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { try!(word(&mut s.s, "mut")); if exprs.len() > 0u { try!(nbsp(s)); } } - try!(commasep_exprs(s, Inconsistent, *exprs)); + try!(commasep_exprs(s, Inconsistent, exprs.as_slice())); try!(word(&mut s.s, "]")); try!(end(s)); } @@ -1207,7 +1215,11 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { ast::ExprStruct(ref path, ref fields, wth) => { try!(print_path(s, path, true)); try!(word(&mut s.s, "{")); - try!(commasep_cmnt(s, Consistent, (*fields), print_field, get_span)); + try!(commasep_cmnt(s, + Consistent, + fields.as_slice(), + print_field, + get_span)); match wth { Some(expr) => { try!(ibox(s, indent_unit)); @@ -1225,7 +1237,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { } ast::ExprTup(ref exprs) => { try!(popen(s)); - try!(commasep_exprs(s, Inconsistent, *exprs)); + try!(commasep_exprs(s, Inconsistent, exprs.as_slice())); if exprs.len() == 1 { try!(word(&mut s.s, ",")); } @@ -1233,16 +1245,16 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { } ast::ExprCall(func, ref args) => { try!(print_expr(s, func)); - try!(print_call_post(s, *args)); + try!(print_call_post(s, args.as_slice())); } ast::ExprMethodCall(ident, ref tys, ref args) => { let base_args = args.slice_from(1); - try!(print_expr(s, args[0])); + try!(print_expr(s, *args.get(0))); try!(word(&mut s.s, ".")); try!(print_ident(s, ident)); if tys.len() > 0u { try!(word(&mut s.s, "::<")); - try!(commasep(s, Inconsistent, *tys, print_type_ref)); + try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref)); try!(word(&mut s.s, ">")); } try!(print_call_post(s, base_args)); @@ -1455,7 +1467,7 @@ pub fn print_expr(s: &mut State, expr: &ast::Expr) -> io::IoResult<()> { try!(print_ident(s, id)); if tys.len() > 0u { try!(word(&mut s.s, "::<")); - try!(commasep(s, Inconsistent, *tys, print_type_ref)); + try!(commasep(s, Inconsistent, tys.as_slice(), print_type_ref)); try!(word(&mut s.s, ">")); } } @@ -1649,7 +1661,7 @@ fn print_path_(s: &mut State, } try!(commasep(s, Inconsistent, - segment.types.map_to_vec(|&t| t), + segment.types.map_to_vec(|&t| t).as_slice(), print_type_ref)); } @@ -1708,7 +1720,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { Some(ref args) => { if !args.is_empty() { try!(popen(s)); - try!(commasep(s, Inconsistent, *args, + try!(commasep(s, Inconsistent, args.as_slice(), |s, &p| print_pat(s, p))); try!(pclose(s)); } else { } @@ -1727,7 +1739,7 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { Ok(()) } fn get_span(f: &ast::FieldPat) -> codemap::Span { return f.pat.span; } - try!(commasep_cmnt(s, Consistent, *fields, + try!(commasep_cmnt(s, Consistent, fields.as_slice(), |s, f| print_field(s,f), get_span)); if etc { @@ -1738,7 +1750,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { } ast::PatTup(ref elts) => { try!(popen(s)); - try!(commasep(s, Inconsistent, *elts, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + elts.as_slice(), + |s, &p| print_pat(s, p))); if elts.len() == 1 { try!(word(&mut s.s, ",")); } @@ -1761,7 +1776,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { } ast::PatVec(ref before, slice, ref after) => { try!(word(&mut s.s, "[")); - try!(commasep(s, Inconsistent, *before, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + before.as_slice(), + |s, &p| print_pat(s, p))); for &p in slice.iter() { if !before.is_empty() { try!(word_space(s, ",")); } match *p { @@ -1773,7 +1791,10 @@ pub fn print_pat(s: &mut State, pat: &ast::Pat) -> io::IoResult<()> { try!(print_pat(s, p)); if !after.is_empty() { try!(word_space(s, ",")); } } - try!(commasep(s, Inconsistent, *after, |s, &p| print_pat(s, p))); + try!(commasep(s, + Inconsistent, + after.as_slice(), + |s, &p| print_pat(s, p))); try!(word(&mut s.s, "]")); } } @@ -1842,7 +1863,7 @@ pub fn print_fn_args(s: &mut State, decl: &ast::FnDecl, for &explicit_self in opt_explicit_self.iter() { let m = match explicit_self { ast::SelfStatic => ast::MutImmutable, - _ => match decl.inputs[0].pat.node { + _ => match decl.inputs.get(0).pat.node { ast::PatIdent(ast::BindByValue(m), _, _) => m, _ => ast::MutImmutable } @@ -1981,12 +2002,12 @@ pub fn print_generics(s: &mut State, } } - let mut ints = ~[]; + let mut ints = Vec::new(); for i in range(0u, total) { ints.push(i); } - try!(commasep(s, Inconsistent, ints, + try!(commasep(s, Inconsistent, ints.as_slice(), |s, &i| print_item(s, generics, i))); try!(word(&mut s.s, ">")); } @@ -2041,7 +2062,7 @@ pub fn print_view_path(s: &mut State, vp: &ast::ViewPath) -> io::IoResult<()> { try!(print_path(s, path, false)); try!(word(&mut s.s, "::{")); } - try!(commasep(s, Inconsistent, (*idents), |s, w| { + try!(commasep(s, Inconsistent, idents.as_slice(), |s, w| { print_ident(s, w.node.name) })); word(&mut s.s, "}") @@ -2057,7 +2078,7 @@ pub fn print_view_paths(s: &mut State, pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> { try!(hardbreak_if_not_bol(s)); try!(maybe_print_comment(s, item.span.lo)); - try!(print_outer_attributes(s, item.attrs)); + try!(print_outer_attributes(s, item.attrs.as_slice())); try!(print_visibility(s, item.vis)); match item.node { ast::ViewItemExternMod(id, ref optional_path, _) => { @@ -2073,7 +2094,7 @@ pub fn print_view_item(s: &mut State, item: &ast::ViewItem) -> io::IoResult<()> ast::ViewItemUse(ref vps) => { try!(head(s, "use")); - try!(print_view_paths(s, *vps)); + try!(print_view_paths(s, vps.as_slice())); } } try!(word(&mut s.s, ";")); @@ -2103,7 +2124,7 @@ pub fn print_arg(s: &mut State, input: &ast::Arg) -> io::IoResult<()> { match input.pat.node { ast::PatIdent(_, ref path, _) if path.segments.len() == 1 && - path.segments[0].identifier.name == + path.segments.get(0).identifier.name == parse::token::special_idents::invalid.name => { // Do nothing. } @@ -2286,7 +2307,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) -> io::IoResult<()> { ast::LitBinary(ref arr) => { try!(ibox(s, indent_unit)); try!(word(&mut s.s, "[")); - try!(commasep_cmnt(s, Inconsistent, *arr.borrow(), + try!(commasep_cmnt(s, Inconsistent, arr.borrow().as_slice(), |s, u| word(&mut s.s, format!("{}", *u)), |_| lit.span)); try!(word(&mut s.s, "]")); @@ -2303,7 +2324,7 @@ pub fn next_lit(s: &mut State, pos: BytePos) -> Option { match s.literals { Some(ref lits) => { while s.cur_cmnt_and_lit.cur_lit < lits.len() { - let ltrl = (*lits)[s.cur_cmnt_and_lit.cur_lit].clone(); + let ltrl = (*(*lits).get(s.cur_cmnt_and_lit.cur_lit)).clone(); if ltrl.pos > pos { return None; } s.cur_cmnt_and_lit.cur_lit += 1u; if ltrl.pos == pos { return Some(ltrl); } @@ -2335,7 +2356,7 @@ pub fn print_comment(s: &mut State, comments::Mixed => { assert_eq!(cmnt.lines.len(), 1u); try!(zerobreak(&mut s.s)); - try!(word(&mut s.s, cmnt.lines[0])); + try!(word(&mut s.s, *cmnt.lines.get(0))); try!(zerobreak(&mut s.s)); } comments::Isolated => { @@ -2352,7 +2373,7 @@ pub fn print_comment(s: &mut State, comments::Trailing => { try!(word(&mut s.s, " ")); if cmnt.lines.len() == 1u { - try!(word(&mut s.s, cmnt.lines[0])); + try!(word(&mut s.s, *cmnt.lines.get(0))); try!(hardbreak(&mut s.s)); } else { try!(ibox(s, 0u)); @@ -2414,7 +2435,7 @@ pub fn next_comment(s: &mut State) -> Option { match s.comments { Some(ref cmnts) => { if s.cur_cmnt_and_lit.cur_cmnt < cmnts.len() { - Some(cmnts[s.cur_cmnt_and_lit.cur_cmnt].clone()) + Some((*cmnts.get(s.cur_cmnt_and_lit.cur_cmnt)).clone()) } else { None } @@ -2535,12 +2556,14 @@ mod test { use codemap; use parse::token; + use std::vec_ng::Vec; + #[test] fn test_fun_to_str() { let abba_ident = token::str_to_ident("abba"); let decl = ast::FnDecl { - inputs: ~[], + inputs: Vec::new(), output: ast::P(ast::Ty {id: 0, node: ast::TyNil, span: codemap::DUMMY_SP}), @@ -2559,9 +2582,9 @@ mod test { let var = codemap::respan(codemap::DUMMY_SP, ast::Variant_ { name: ident, - attrs: ~[], + attrs: Vec::new(), // making this up as I go.... ? - kind: ast::TupleVariantKind(~[]), + kind: ast::TupleVariantKind(Vec::new()), id: 0, disr_expr: None, vis: ast::Public, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 7b885df0317ee..ba154a8d8923c 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -21,10 +21,11 @@ use std::cmp::Equiv; use std::fmt; use std::hash::Hash; use std::rc::Rc; +use std::vec_ng::Vec; pub struct Interner { priv map: RefCell>, - priv vect: RefCell<~[T]>, + priv vect: RefCell >, } // when traits can extend traits, we should extend index to get [] @@ -32,7 +33,7 @@ impl Interner { pub fn new() -> Interner { Interner { map: RefCell::new(HashMap::new()), - vect: RefCell::new(~[]), + vect: RefCell::new(Vec::new()), } } @@ -68,7 +69,7 @@ impl Interner { pub fn get(&self, idx: Name) -> T { let vect = self.vect.borrow(); - vect.get()[idx].clone() + (*vect.get().get(idx as uint)).clone() } pub fn len(&self) -> uint { @@ -134,7 +135,7 @@ impl RcStr { // &str rather than RcStr, resulting in less allocation. pub struct StrInterner { priv map: RefCell>, - priv vect: RefCell<~[RcStr]>, + priv vect: RefCell >, } // when traits can extend traits, we should extend index to get [] @@ -142,7 +143,7 @@ impl StrInterner { pub fn new() -> StrInterner { StrInterner { map: RefCell::new(HashMap::new()), - vect: RefCell::new(~[]), + vect: RefCell::new(Vec::new()), } } @@ -189,21 +190,21 @@ impl StrInterner { let new_idx = self.len() as Name; // leave out of map to avoid colliding let mut vect = self.vect.borrow_mut(); - let existing = vect.get()[idx].clone(); + let existing = (*vect.get().get(idx as uint)).clone(); vect.get().push(existing); new_idx } pub fn get(&self, idx: Name) -> RcStr { let vect = self.vect.borrow(); - vect.get()[idx].clone() + (*vect.get().get(idx as uint)).clone() } /// Returns this string with lifetime tied to the interner. Since /// strings may never be removed from the interner, this is safe. pub fn get_ref<'a>(&'a self, idx: Name) -> &'a str { let vect = self.vect.borrow(); - let s: &str = vect.get()[idx].as_slice(); + let s: &str = vect.get().get(idx as uint).as_slice(); unsafe { cast::transmute(s) } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 8c7ad028a8ee0..03fc30e2fd771 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -15,22 +15,24 @@ use parse::{new_parser_from_source_str}; use parse::parser::Parser; use parse::token; +use std::vec_ng::Vec; + // map a string to tts, using a made-up filename: return both the TokenTree's // and the ParseSess -pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) { +pub fn string_to_tts_and_sess (source_str : ~str) -> (Vec , @ParseSess) { let ps = new_parse_sess(); (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) } // map a string to tts, using a made-up filename: -pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] { +pub fn string_to_tts(source_str : ~str) -> Vec { let (tts,_) = string_to_tts_and_sess(source_str); tts } pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) { let ps = new_parse_sess(); - (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) + (new_parser_from_source_str(ps,Vec::new(),~"bogofile",source_str),ps) } // map string to parser (via tts) @@ -69,14 +71,14 @@ pub fn string_to_expr (source_str : ~str) -> @ast::Expr { // parse a string, return an item pub fn string_to_item (source_str : ~str) -> Option<@ast::Item> { with_error_checking_parse(source_str, |p| { - p.parse_item(~[]) + p.parse_item(Vec::new()) }) } // parse a string, return a stmt pub fn string_to_stmt(source_str : ~str) -> @ast::Stmt { with_error_checking_parse(source_str, |p| { - p.parse_stmt(~[]) + p.parse_stmt(Vec::new()) }) } @@ -87,7 +89,7 @@ pub fn string_to_pat(source_str : ~str) -> @ast::Pat { } // convert a vector of strings to a vector of ast::Ident's -pub fn strs_to_idents(ids: ~[&str]) -> ~[ast::Ident] { +pub fn strs_to_idents(ids: Vec<&str> ) -> Vec { ids.map(|u| token::str_to_ident(*u)) } diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index d6cc35a6f9d3e..9eb9871bb2141 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -7,14 +7,16 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. + use std::mem; -use std::vec; +use std::vec_ng::Vec; +use std::vec_ng; /// A vector type optimized for cases where the size is almost always 0 or 1 pub enum SmallVector { priv Zero, priv One(T), - priv Many(~[T]), + priv Many(Vec ), } impl Container for SmallVector { @@ -46,7 +48,7 @@ impl SmallVector { One(v) } - pub fn many(vs: ~[T]) -> SmallVector { + pub fn many(vs: Vec ) -> SmallVector { Many(vs) } @@ -56,7 +58,7 @@ impl SmallVector { One(..) => { let one = mem::replace(self, Zero); match one { - One(v1) => mem::replace(self, Many(~[v1, v])), + One(v1) => mem::replace(self, Many(vec!(v1, v))), _ => unreachable!() }; } @@ -73,7 +75,7 @@ impl SmallVector { pub fn get<'a>(&'a self, idx: uint) -> &'a T { match *self { One(ref v) if idx == 0 => v, - Many(ref vs) => &vs[idx], + Many(ref vs) => vs.get(idx), _ => fail!("out of bounds access") } } @@ -104,7 +106,7 @@ impl SmallVector { pub enum MoveItems { priv ZeroIterator, priv OneIterator(T), - priv ManyIterator(vec::MoveItems), + priv ManyIterator(vec_ng::MoveItems), } impl Iterator for MoveItems { @@ -136,13 +138,15 @@ impl Iterator for MoveItems { mod test { use super::*; + use std::vec_ng::Vec; + #[test] fn test_len() { let v: SmallVector = SmallVector::zero(); assert_eq!(0, v.len()); assert_eq!(1, SmallVector::one(1).len()); - assert_eq!(5, SmallVector::many(~[1, 2, 3, 4, 5]).len()); + assert_eq!(5, SmallVector::many(vec!(1, 2, 3, 4, 5)).len()); } #[test] @@ -161,7 +165,7 @@ mod test { #[test] fn test_from_iterator() { - let v: SmallVector = (~[1, 2, 3]).move_iter().collect(); + let v: SmallVector = (vec!(1, 2, 3)).move_iter().collect(); assert_eq!(3, v.len()); assert_eq!(&1, v.get(0)); assert_eq!(&2, v.get(1)); @@ -171,14 +175,14 @@ mod test { #[test] fn test_move_iter() { let v = SmallVector::zero(); - let v: ~[int] = v.move_iter().collect(); - assert_eq!(~[], v); + let v: Vec = v.move_iter().collect(); + assert_eq!(Vec::new(), v); let v = SmallVector::one(1); - assert_eq!(~[1], v.move_iter().collect()); + assert_eq!(vec!(1), v.move_iter().collect()); - let v = SmallVector::many(~[1, 2, 3]); - assert_eq!(~[1, 2, 3], v.move_iter().collect()); + let v = SmallVector::many(vec!(1, 2, 3)); + assert_eq!(vec!(1, 2, 3), v.move_iter().collect()); } #[test] @@ -190,12 +194,12 @@ mod test { #[test] #[should_fail] fn test_expect_one_many() { - SmallVector::many(~[1, 2]).expect_one(""); + SmallVector::many(vec!(1, 2)).expect_one(""); } #[test] fn test_expect_one_one() { assert_eq!(1, SmallVector::one(1).expect_one("")); - assert_eq!(1, SmallVector::many(~[1]).expect_one("")); + assert_eq!(1, SmallVector::many(vec!(1)).expect_one("")); } } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 39989977d69fa..2edfd367f4ef2 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -637,7 +637,7 @@ pub fn walk_expr>(visitor: &mut V, expression: &Expr, en visitor.visit_expr(subexpression, env.clone()) } ExprVec(ref subexpressions, _) => { - walk_exprs(visitor, *subexpressions, env.clone()) + walk_exprs(visitor, subexpressions.as_slice(), env.clone()) } ExprRepeat(element, count, _) => { visitor.visit_expr(element, env.clone()); @@ -662,7 +662,7 @@ pub fn walk_expr>(visitor: &mut V, expression: &Expr, en visitor.visit_expr(callee_expression, env.clone()) } ExprMethodCall(_, ref types, ref arguments) => { - walk_exprs(visitor, *arguments, env.clone()); + walk_exprs(visitor, arguments.as_slice(), env.clone()); for &typ in types.iter() { visitor.visit_ty(typ, env.clone()) }