diff --git a/src/libproc_macro_plugin/qquote.rs b/src/libproc_macro_plugin/qquote.rs index e5a3abc2ea982..1ae906e0aa4e0 100644 --- a/src/libproc_macro_plugin/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -34,8 +34,9 @@ use syntax::codemap::Span; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::proc_macro_shim::build_block_emitter; -use syntax::parse::token::{self, Token, gensym_ident, str_to_ident}; +use syntax::parse::token::{self, Token}; use syntax::print::pprust; +use syntax::symbol::Symbol; use syntax::tokenstream::{TokenTree, TokenStream}; // ____________________________________________________________________________________________ @@ -124,7 +125,7 @@ fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindi } // produce an error or something first let exp = vec![exp.unwrap().to_owned()]; debug!("RHS: {:?}", exp.clone()); - let new_id = gensym_ident("tmp"); + let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp")); debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone())); debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec()); bindings.push((new_id, TokenStream::from_tts(exp))); @@ -179,7 +180,7 @@ fn unravel_concats(tss: Vec) -> TokenStream { }; while let Some(ts) = pushes.pop() { - output = build_fn_call(str_to_ident("concat"), + output = build_fn_call(Ident::from_str("concat"), concat(concat(ts, from_tokens(vec![Token::Comma])), output)); @@ -209,18 +210,19 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec) -> (Bindings, T // FIXME handle sequence repetition tokens QTT::QDL(qdl) => { debug!(" QDL: {:?} ", qdl.tts); - let new_id = gensym_ident("qdl_tmp"); + let new_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp")); let mut cct_rec = convert_complex_tts(cx, qdl.tts); bindings.append(&mut cct_rec.0); bindings.push((new_id, cct_rec.1)); let sep = build_delim_tok(qdl.delim); - pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"), - str_to_ident("build"), - str_to_ident("build_delimited")], - concat(from_tokens(vec![Token::Ident(new_id)]), - concat(lex(","), sep)))); + pushes.push(build_mod_call( + vec![Ident::from_str("proc_macro_tokens"), + Ident::from_str("build"), + Ident::from_str("build_delimited")], + concat(from_tokens(vec![Token::Ident(new_id)]), concat(lex(","), sep)), + )); } QTT::QIdent(t) => { pushes.push(TokenStream::from_tts(vec![t])); @@ -250,13 +252,13 @@ fn unravel(binds: Bindings) -> TokenStream { /// Checks if the Ident is `unquote`. fn is_unquote(id: Ident) -> bool { - let qq = str_to_ident("unquote"); + let qq = Ident::from_str("unquote"); id.name == qq.name // We disregard context; unquote is _reserved_ } /// Checks if the Ident is `quote`. fn is_qquote(id: Ident) -> bool { - let qq = str_to_ident("qquote"); + let qq = Ident::from_str("qquote"); id.name == qq.name // We disregard context; qquote is _reserved_ } @@ -266,7 +268,8 @@ mod int_build { use syntax::ast::{self, Ident}; use syntax::codemap::{DUMMY_SP}; - use syntax::parse::token::{self, Token, keywords, str_to_ident}; + use syntax::parse::token::{self, Token, Lit}; + use syntax::symbol::keywords; use syntax::tokenstream::{TokenTree, TokenStream}; // ____________________________________________________________________________________________ @@ -277,19 +280,19 @@ mod int_build { build_paren_delimited(build_vec(build_token_tt(t)))) } - pub fn emit_lit(l: token::Lit, n: Option) -> TokenStream { + pub fn emit_lit(l: Lit, n: Option) -> TokenStream { let suf = match n { - Some(n) => format!("Some(ast::Name({}))", n.0), + Some(n) => format!("Some(ast::Name({}))", n.as_u32()), None => "None".to_string(), }; let lit = match l { - token::Lit::Byte(n) => format!("Lit::Byte(token::intern(\"{}\"))", n.to_string()), - token::Lit::Char(n) => format!("Lit::Char(token::intern(\"{}\"))", n.to_string()), - token::Lit::Integer(n) => format!("Lit::Integer(token::intern(\"{}\"))", n.to_string()), - token::Lit::Float(n) => format!("Lit::Float(token::intern(\"{}\"))", n.to_string()), - token::Lit::Str_(n) => format!("Lit::Str_(token::intern(\"{}\"))", n.to_string()), - token::Lit::ByteStr(n) => format!("Lit::ByteStr(token::intern(\"{}\"))", n.to_string()), + Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()), + Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()), + Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()), _ => panic!("Unsupported literal"), }; @@ -388,9 +391,10 @@ mod int_build { Token::Underscore => lex("_"), Token::Literal(lit, sfx) => emit_lit(lit, sfx), // fix ident expansion information... somehow - Token::Ident(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", ident.name)), - Token::Lifetime(ident) => lex(&format!("Token::Ident(str_to_ident(\"{}\"))", - ident.name)), + Token::Ident(ident) => + lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), + Token::Lifetime(ident) => + lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), _ => panic!("Unhandled case!"), } } @@ -408,7 +412,7 @@ mod int_build { /// Takes `input` and returns `vec![input]`. pub fn build_vec(ts: TokenStream) -> TokenStream { - build_mac_call(str_to_ident("vec"), ts) + build_mac_call(Ident::from_str("vec"), ts) // tts.clone().to_owned() } diff --git a/src/libproc_macro_tokens/build.rs b/src/libproc_macro_tokens/build.rs index 7b7590b863b71..d39aba0aa7787 100644 --- a/src/libproc_macro_tokens/build.rs +++ b/src/libproc_macro_tokens/build.rs @@ -13,7 +13,8 @@ extern crate syntax_pos; use syntax::ast::Ident; use syntax::codemap::DUMMY_SP; -use syntax::parse::token::{self, Token, keywords, str_to_ident}; +use syntax::parse::token::{self, Token}; +use syntax::symbol::keywords; use syntax::tokenstream::{self, TokenTree, TokenStream}; use std::rc::Rc; @@ -43,13 +44,13 @@ pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool { /// Convert a `&str` into a Token. pub fn str_to_token_ident(s: &str) -> Token { - Token::Ident(str_to_ident(s)) + Token::Ident(Ident::from_str(s)) } /// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that /// corresponds to it. pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token { - Token::Ident(str_to_ident(&kw.name().as_str()[..])) + Token::Ident(Ident::from_str(&kw.name().as_str()[..])) } // ____________________________________________________________________________________________ diff --git a/src/librustc/hir/check_attr.rs b/src/librustc/hir/check_attr.rs index 8ba52cdb64f5f..abc35634d15f4 100644 --- a/src/librustc/hir/check_attr.rs +++ b/src/librustc/hir/check_attr.rs @@ -64,7 +64,7 @@ impl<'a> CheckAttrVisitor<'a> { None => continue, }; - let (message, label) = match &*name { + let (message, label) = match &*name.as_str() { "C" => { conflicting_reprs += 1; if target != Target::Struct && @@ -120,7 +120,7 @@ impl<'a> CheckAttrVisitor<'a> { } fn check_attribute(&self, attr: &ast::Attribute, target: Target) { - let name: &str = &attr.name(); + let name: &str = &attr.name().as_str(); match name { "inline" => self.check_inline(attr, target), "repr" => self.check_repr(attr, target), diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 05c4ae521803a..9547e09afe003 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -53,8 +53,8 @@ use syntax::ast::*; use syntax::errors; use syntax::ptr::P; use syntax::codemap::{respan, Spanned}; -use syntax::parse::token; use syntax::std_inject; +use syntax::symbol::{Symbol, keywords}; use syntax::visit::{self, Visitor}; use syntax_pos::Span; @@ -149,7 +149,7 @@ impl<'a> LoweringContext<'a> { } fn str_to_ident(&self, s: &'static str) -> Name { - token::gensym(s) + Symbol::gensym(s) } fn with_parent_def(&mut self, parent_id: NodeId, f: F) -> T @@ -400,8 +400,8 @@ impl<'a> LoweringContext<'a> { // Don't expose `Self` (recovered "keyword used as ident" parse error). // `rustc::ty` expects `Self` to be only used for a trait's `Self`. // Instead, use gensym("Self") to create a distinct name that looks the same. - if name == token::keywords::SelfType.name() { - name = token::gensym("Self"); + if name == keywords::SelfType.name() { + name = Symbol::gensym("Self"); } hir::TyParam { @@ -540,7 +540,7 @@ impl<'a> LoweringContext<'a> { hir::StructField { span: f.span, id: f.id, - name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())), + name: f.ident.map(|ident| ident.name).unwrap_or(Symbol::intern(&index.to_string())), vis: self.lower_visibility(&f.vis), ty: self.lower_ty(&f.ty), attrs: self.lower_attrs(&f.attrs), @@ -1189,7 +1189,7 @@ impl<'a> LoweringContext<'a> { e.span, hir::PopUnstableBlock, ThinVec::new()); - this.field(token::intern(s), signal_block, ast_expr.span) + this.field(Symbol::intern(s), signal_block, ast_expr.span) }).collect(); let attrs = ast_expr.attrs.clone(); @@ -1953,9 +1953,9 @@ impl<'a> LoweringContext<'a> { fn std_path_components(&mut self, components: &[&str]) -> Vec { let mut v = Vec::new(); if let Some(s) = self.crate_root { - v.push(token::intern(s)); + v.push(Symbol::intern(s)); } - v.extend(components.iter().map(|s| token::intern(s))); + v.extend(components.iter().map(|s| Symbol::intern(s))); return v; } diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 421843a7f11d8..0d4c0c9689f27 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -19,7 +19,7 @@ use middle::cstore::InlinedItem; use syntax::ast::*; use syntax::ext::hygiene::Mark; use syntax::visit; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; /// Creates def ids for nodes in the HIR. pub struct DefCollector<'a> { @@ -169,7 +169,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { this.with_parent(variant_def_index, |this| { for (index, field) in v.node.data.fields().iter().enumerate() { let name = field.ident.map(|ident| ident.name) - .unwrap_or_else(|| token::intern(&index.to_string())); + .unwrap_or_else(|| Symbol::intern(&index.to_string())); this.create_def(field.id, DefPathData::Field(name.as_str())); } @@ -188,7 +188,7 @@ impl<'a> visit::Visitor for DefCollector<'a> { for (index, field) in struct_def.fields().iter().enumerate() { let name = field.ident.map(|ident| ident.name.as_str()) - .unwrap_or(token::intern(&index.to_string()).as_str()); + .unwrap_or(Symbol::intern(&index.to_string()).as_str()); this.create_def(field.id, DefPathData::Field(name)); } } diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 38157c7e56564..83d3627d8e616 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -14,7 +14,7 @@ use std::fmt::Write; use std::hash::{Hash, Hasher}; use std::collections::hash_map::DefaultHasher; use syntax::ast; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use ty::TyCtxt; use util::nodemap::NodeMap; @@ -115,9 +115,9 @@ impl DefPath { pub fn to_string(&self, tcx: TyCtxt) -> String { let mut s = String::with_capacity(self.data.len() * 16); - s.push_str(&tcx.original_crate_name(self.krate)); + s.push_str(&tcx.original_crate_name(self.krate).as_str()); s.push_str("/"); - s.push_str(&tcx.crate_disambiguator(self.krate)); + s.push_str(&tcx.crate_disambiguator(self.krate).as_str()); for component in &self.data { write!(s, @@ -137,8 +137,8 @@ impl DefPath { } pub fn deterministic_hash_to(&self, tcx: TyCtxt, state: &mut H) { - tcx.original_crate_name(self.krate).hash(state); - tcx.crate_disambiguator(self.krate).hash(state); + tcx.original_crate_name(self.krate).as_str().hash(state); + tcx.crate_disambiguator(self.krate).as_str().hash(state); self.data.hash(state); } } @@ -328,7 +328,7 @@ impl DefPathData { LifetimeDef(ref name) | EnumVariant(ref name) | Binding(ref name) | - Field(ref name) => Some(token::intern(name)), + Field(ref name) => Some(Symbol::intern(name)), Impl | CrateRoot | @@ -343,7 +343,7 @@ impl DefPathData { pub fn as_interned_str(&self) -> InternedString { use self::DefPathData::*; - match *self { + let s = match *self { TypeNs(ref name) | ValueNs(ref name) | Module(ref name) | @@ -353,43 +353,24 @@ impl DefPathData { EnumVariant(ref name) | Binding(ref name) | Field(ref name) => { - name.clone() - } - - Impl => { - InternedString::new("{{impl}}") + return name.clone(); } // note that this does not show up in user printouts - CrateRoot => { - InternedString::new("{{root}}") - } + CrateRoot => "{{root}}", // note that this does not show up in user printouts - InlinedRoot(_) => { - InternedString::new("{{inlined-root}}") - } - - Misc => { - InternedString::new("{{?}}") - } - - ClosureExpr => { - InternedString::new("{{closure}}") - } - - StructCtor => { - InternedString::new("{{constructor}}") - } - - Initializer => { - InternedString::new("{{initializer}}") - } + InlinedRoot(_) => "{{inlined-root}}", + + Impl => "{{impl}}", + Misc => "{{?}}", + ClosureExpr => "{{closure}}", + StructCtor => "{{constructor}}", + Initializer => "{{initializer}}", + ImplTrait => "{{impl-Trait}}", + }; - ImplTrait => { - InternedString::new("{{impl-Trait}}") - } - } + Symbol::intern(s).as_str() } pub fn to_string(&self) -> String { diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 06cfc8aee8c9d..a90577b34261c 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -765,7 +765,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { None => return false, Some((node_id, name)) => (node_id, name), }; - if &part[..] != mod_name.as_str() { + if mod_name != &**part { return false; } cursor = self.map.get_parent(mod_id); @@ -803,8 +803,7 @@ impl<'a, 'ast> NodesMatchingSuffix<'a, 'ast> { // We are looking at some node `n` with a given name and parent // id; do their names match what I am seeking? fn matches_names(&self, parent_of_n: NodeId, name: Name) -> bool { - name.as_str() == &self.item_name[..] && - self.suffix_matches(parent_of_n) + name == &**self.item_name && self.suffix_matches(parent_of_n) } } diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 9f5ff6914b0cc..b5f892f0ff7ac 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -40,8 +40,8 @@ use syntax::codemap::{self, respan, Spanned}; use syntax::abi::Abi; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; -use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax::tokenstream::TokenTree; use syntax::util::ThinVec; @@ -1163,18 +1163,18 @@ pub enum Ty_ { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsmOutput { - pub constraint: InternedString, + pub constraint: Symbol, pub is_rw: bool, pub is_indirect: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsm { - pub asm: InternedString, + pub asm: Symbol, pub asm_str_style: StrStyle, pub outputs: HirVec, - pub inputs: HirVec, - pub clobbers: HirVec, + pub inputs: HirVec, + pub clobbers: HirVec, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index 807bbec3b5888..b9d1d7e4efbb9 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -13,13 +13,14 @@ pub use self::AnnNode::*; use syntax::abi::Abi; use syntax::ast; use syntax::codemap::{CodeMap, Spanned}; -use syntax::parse::token::{self, keywords, BinOpToken}; +use syntax::parse::token::{self, BinOpToken}; use syntax::parse::lexer::comments; use syntax::print::pp::{self, break_offset, word, space, hardbreak}; use syntax::print::pp::{Breaks, eof}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; use syntax::print::pprust::{self as ast_pp, PrintState}; use syntax::ptr::P; +use syntax::symbol::keywords; use syntax_pos::{self, BytePos}; use errors; @@ -1499,19 +1500,19 @@ impl<'a> State<'a> { hir::ExprInlineAsm(ref a, ref outputs, ref inputs) => { word(&mut self.s, "asm!")?; self.popen()?; - self.print_string(&a.asm, a.asm_str_style)?; + self.print_string(&a.asm.as_str(), a.asm_str_style)?; self.word_space(":")?; let mut out_idx = 0; self.commasep(Inconsistent, &a.outputs, |s, out| { - let mut ch = out.constraint.chars(); + let constraint = out.constraint.as_str(); + let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)? } - _ => s.print_string(&out.constraint, - ast::StrStyle::Cooked)?, + _ => s.print_string(&constraint, ast::StrStyle::Cooked)?, } s.popen()?; s.print_expr(&outputs[out_idx])?; @@ -1524,7 +1525,7 @@ impl<'a> State<'a> { let mut in_idx = 0; self.commasep(Inconsistent, &a.inputs, |s, co| { - s.print_string(&co, ast::StrStyle::Cooked)?; + s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; s.popen()?; s.print_expr(&inputs[in_idx])?; s.pclose()?; @@ -1535,7 +1536,7 @@ impl<'a> State<'a> { self.word_space(":")?; self.commasep(Inconsistent, &a.clobbers, |s, co| { - s.print_string(&co, ast::StrStyle::Cooked)?; + s.print_string(&co.as_str(), ast::StrStyle::Cooked)?; Ok(()) })?; diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs index 58caac4034e34..8db09d0b73d32 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting.rs @@ -91,8 +91,8 @@ use std::cell::{Cell, RefCell}; use std::char::from_u32; use std::fmt; use syntax::ast; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::{self, Pos, Span}; use errors::DiagnosticBuilder; @@ -1219,7 +1219,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { names.push(lt_name); } names.sort(); - let name = token::intern(&names[0]); + let name = Symbol::intern(&names[0]); return (name_to_dummy_lifetime(name), Kept); } return (self.life_giver.give_lifetime(), Fresh); @@ -1931,7 +1931,7 @@ impl LifeGiver { let mut s = String::from("'"); s.push_str(&num_to_string(self.counter.get())); if !self.taken.contains(&s) { - lifetime = name_to_dummy_lifetime(token::intern(&s[..])); + lifetime = name_to_dummy_lifetime(Symbol::intern(&s)); self.generated.borrow_mut().push(lifetime); break; } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index a490b58964a71..4a082944010b2 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -40,7 +40,6 @@ use std::default::Default as StdDefault; use std::mem; use std::fmt; use syntax::attr; -use syntax::parse::token::InternedString; use syntax::ast; use syntax_pos::{MultiSpan, Span}; use errors::{self, Diagnostic, DiagnosticBuilder}; @@ -384,8 +383,7 @@ macro_rules! run_lints { ($cx:expr, $f:ident, $ps:ident, $($args:expr),*) => ({ /// Parse the lint attributes into a vector, with `Err`s for malformed lint /// attributes. Writing this as an iterator is an enormous mess. // See also the hir version just below. -pub fn gather_attrs(attrs: &[ast::Attribute]) - -> Vec> { +pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec> { let mut out = vec![]; for attr in attrs { let r = gather_attr(attr); @@ -394,18 +392,17 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) out } -pub fn gather_attr(attr: &ast::Attribute) - -> Vec> { +pub fn gather_attr(attr: &ast::Attribute) -> Vec> { let mut out = vec![]; - let level = match Level::from_str(&attr.name()) { + let level = match Level::from_str(&attr.name().as_str()) { None => return out, Some(lvl) => lvl, }; attr::mark_used(attr); - let meta = &attr.node.value; + let meta = &attr.value; let metas = if let Some(metas) = meta.meta_item_list() { metas } else { @@ -414,9 +411,7 @@ pub fn gather_attr(attr: &ast::Attribute) }; for li in metas { - out.push(li.word().map_or(Err(li.span), |word| { - Ok((word.name().clone(), level, word.span)) - })); + out.push(li.word().map_or(Err(li.span), |word| Ok((word.name(), level, word.span)))); } out @@ -629,10 +624,10 @@ pub trait LintContext: Sized { continue; } Ok((lint_name, level, span)) => { - match self.lints().find_lint(&lint_name, &self.sess(), Some(span)) { + match self.lints().find_lint(&lint_name.as_str(), &self.sess(), Some(span)) { Ok(lint_id) => vec![(lint_id, level, span)], Err(FindLintError::NotFound) => { - match self.lints().lint_groups.get(&lint_name[..]) { + match self.lints().lint_groups.get(&*lint_name.as_str()) { Some(&(ref v, _)) => v.iter() .map(|lint_id: &LintId| (*lint_id, level, span)) @@ -1193,8 +1188,7 @@ fn check_lint_name_attribute(cx: &LateContext, attr: &ast::Attribute) { continue; } Ok((lint_name, _, span)) => { - match check_lint_name(&cx.lints, - &lint_name[..]) { + match check_lint_name(&cx.lints, &lint_name.as_str()) { CheckLintNameResult::Ok => (), CheckLintNameResult::Warning(ref msg) => { cx.span_lint(builtin::RENAMED_AND_REMOVED_LINTS, diff --git a/src/librustc/middle/const_val.rs b/src/librustc/middle/const_val.rs index 3482971cd1999..9677082a43a3c 100644 --- a/src/librustc/middle/const_val.rs +++ b/src/librustc/middle/const_val.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use syntax::ast; use std::rc::Rc; use hir::def_id::DefId; diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index f61978271e7f6..168aba774333e 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -39,7 +39,7 @@ use syntax::ast; use syntax::attr; use syntax::ext::base::SyntaxExtension; use syntax::ptr::P; -use syntax::parse::token::InternedString; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc_back::target::Target; use hir; @@ -52,7 +52,7 @@ pub use self::NativeLibraryKind::{NativeStatic, NativeFramework, NativeUnknown}; #[derive(Clone, Debug)] pub struct LinkMeta { - pub crate_name: String, + pub crate_name: Symbol, pub crate_hash: Svh, } @@ -92,8 +92,8 @@ pub enum NativeLibraryKind { #[derive(Clone, Hash, RustcEncodable, RustcDecodable)] pub struct NativeLibrary { pub kind: NativeLibraryKind, - pub name: String, - pub cfg: Option>, + pub name: Symbol, + pub cfg: Option, } /// The data we save and restore about an inlined item or method. This is not @@ -205,11 +205,11 @@ pub trait CrateStore<'tcx> { fn extern_crate(&self, cnum: CrateNum) -> Option; /// The name of the crate as it is referred to in source code of the current /// crate. - fn crate_name(&self, cnum: CrateNum) -> InternedString; + fn crate_name(&self, cnum: CrateNum) -> Symbol; /// The name of the crate as it is stored in the crate's metadata. - fn original_crate_name(&self, cnum: CrateNum) -> InternedString; + fn original_crate_name(&self, cnum: CrateNum) -> Symbol; fn crate_hash(&self, cnum: CrateNum) -> Svh; - fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString; + fn crate_disambiguator(&self, cnum: CrateNum) -> Symbol; fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option; fn native_libraries(&self, cnum: CrateNum) -> Vec; fn reachable_ids(&self, cnum: CrateNum) -> Vec; @@ -375,13 +375,13 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { bug!("panic_strategy") } fn extern_crate(&self, cnum: CrateNum) -> Option { bug!("extern_crate") } - fn crate_name(&self, cnum: CrateNum) -> InternedString { bug!("crate_name") } - fn original_crate_name(&self, cnum: CrateNum) -> InternedString { + fn crate_name(&self, cnum: CrateNum) -> Symbol { bug!("crate_name") } + fn original_crate_name(&self, cnum: CrateNum) -> Symbol { bug!("original_crate_name") } fn crate_hash(&self, cnum: CrateNum) -> Svh { bug!("crate_hash") } fn crate_disambiguator(&self, cnum: CrateNum) - -> InternedString { bug!("crate_disambiguator") } + -> Symbol { bug!("crate_disambiguator") } fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option { bug!("plugin_registrar_fn") } fn native_libraries(&self, cnum: CrateNum) -> Vec diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 991398813752f..f47eab013c2b2 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -309,8 +309,7 @@ fn has_allow_dead_code_or_lang_attr(attrs: &[ast::Attribute]) -> bool { let dead_code = lint::builtin::DEAD_CODE.name_lower(); for attr in lint::gather_attrs(attrs) { match attr { - Ok((ref name, lint::Allow, _)) - if &name[..] == dead_code => return true, + Ok((name, lint::Allow, _)) if name == &*dead_code => return true, _ => (), } } @@ -499,8 +498,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { span: syntax_pos::Span, name: ast::Name, node_type: &str) { - let name = name.as_str(); - if !name.starts_with("_") { + if !name.as_str().starts_with("_") { self.tcx .sess .add_lint(lint::builtin::DEAD_CODE, diff --git a/src/librustc/middle/entry.rs b/src/librustc/middle/entry.rs index 9dd54457a3499..65aedae347a8d 100644 --- a/src/librustc/middle/entry.rs +++ b/src/librustc/middle/entry.rs @@ -92,7 +92,7 @@ fn entry_point_type(item: &Item, at_root: bool) -> EntryPointType { EntryPointType::Start } else if attr::contains_name(&item.attrs, "main") { EntryPointType::MainAttr - } else if item.name.as_str() == "main" { + } else if item.name == "main" { if at_root { // This is a top-level function so can be 'main' EntryPointType::MainNamed diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index cf08b59312d50..80cf64865abea 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -55,7 +55,7 @@ impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> { ty::TyFnDef(.., ref bfty) => bfty.abi == RustIntrinsic, _ => return false }; - intrinsic && self.infcx.tcx.item_name(def_id).as_str() == "transmute" + intrinsic && self.infcx.tcx.item_name(def_id) == "transmute" } fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>, id: ast::NodeId) { diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 9b4b1396669a3..5af9a2f02742e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -30,7 +30,7 @@ use middle::weak_lang_items; use util::nodemap::FxHashMap; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::Symbol; use hir::itemlikevisit::ItemLikeVisitor; use hir; @@ -152,7 +152,7 @@ struct LanguageItemCollector<'a, 'tcx: 'a> { impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> { fn visit_item(&mut self, item: &hir::Item) { if let Some(value) = extract(&item.attrs) { - let item_index = self.item_refs.get(&value[..]).cloned(); + let item_index = self.item_refs.get(&*value.as_str()).cloned(); if let Some(item_index) = item_index { self.collect_item(item_index, self.ast_map.local_def_id(item.id)) @@ -160,7 +160,7 @@ impl<'a, 'v, 'tcx> ItemLikeVisitor<'v> for LanguageItemCollector<'a, 'tcx> { let span = self.ast_map.span(item.id); span_err!(self.session, span, E0522, "definition of an unknown language item: `{}`.", - &value[..]); + value); } } } @@ -243,12 +243,10 @@ impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> { } } -pub fn extract(attrs: &[ast::Attribute]) -> Option { +pub fn extract(attrs: &[ast::Attribute]) -> Option { for attribute in attrs { match attribute.value_str() { - Some(ref value) if attribute.check_name("lang") => { - return Some(value.clone()); - } + Some(value) if attribute.check_name("lang") => return Some(value), _ => {} } } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index d381188d56b54..cf53fcf2dac86 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -123,8 +123,8 @@ use std::io::prelude::*; use std::io; use std::rc::Rc; use syntax::ast::{self, NodeId}; -use syntax::parse::token::keywords; use syntax::ptr::P; +use syntax::symbol::keywords; use syntax_pos::Span; use hir::Expr; diff --git a/src/librustc/middle/recursion_limit.rs b/src/librustc/middle/recursion_limit.rs index 0764e817f4307..7f89461a3f4b6 100644 --- a/src/librustc/middle/recursion_limit.rs +++ b/src/librustc/middle/recursion_limit.rs @@ -25,7 +25,7 @@ pub fn update_recursion_limit(sess: &Session, krate: &ast::Crate) { } if let Some(s) = attr.value_str() { - if let Some(n) = s.parse().ok() { + if let Some(n) = s.as_str().parse().ok() { sess.recursion_limit.set(n); return; } diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index f682dfbf1be95..a0043d0a88620 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -27,7 +27,7 @@ use middle::region; use ty; use std::mem::replace; use syntax::ast; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use util::nodemap::NodeMap; diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 7e4efc7ddca06..86a89eff3a476 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -21,7 +21,7 @@ use hir::def::Def; use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE}; use ty::{self, TyCtxt, AdtKind}; use middle::privacy::AccessLevels; -use syntax::parse::token::InternedString; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; use syntax::ast; use syntax::ast::{NodeId, Attribute}; @@ -36,7 +36,6 @@ use hir::pat_util::EnumerateAndAdjustIterator; use std::mem::replace; use std::cmp::Ordering; -use std::ops::Deref; #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Copy, Debug, Eq, Hash)] pub enum StabilityLevel { @@ -151,10 +150,11 @@ impl<'a, 'tcx: 'a> Annotator<'a, 'tcx> { // Check if deprecated_since < stable_since. If it is, // this is *almost surely* an accident. - if let (&Some(attr::RustcDeprecation {since: ref dep_since, ..}), - &attr::Stable {since: ref stab_since}) = (&stab.rustc_depr, &stab.level) { + if let (&Some(attr::RustcDeprecation {since: dep_since, ..}), + &attr::Stable {since: stab_since}) = (&stab.rustc_depr, &stab.level) { // Explicit version of iter::order::lt to handle parse errors properly - for (dep_v, stab_v) in dep_since.split(".").zip(stab_since.split(".")) { + for (dep_v, stab_v) in + dep_since.as_str().split(".").zip(stab_since.as_str().split(".")) { if let (Ok(dep_v), Ok(stab_v)) = (dep_v.parse::(), stab_v.parse()) { match dep_v.cmp(&stab_v) { Ordering::Less => { @@ -356,7 +356,7 @@ impl<'a, 'tcx> Index<'tcx> { /// features and possibly prints errors. Returns a list of all /// features used. pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> FxHashMap { + -> FxHashMap { let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck); let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features; @@ -376,8 +376,8 @@ pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) struct Checker<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - active_features: FxHashSet, - used_features: FxHashMap, + active_features: FxHashSet, + used_features: FxHashMap, // Within a block where feature gate checking can be skipped. in_skip_block: u32, } @@ -407,10 +407,10 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { if !self.active_features.contains(feature) { let msg = match *reason { Some(ref r) => format!("use of unstable library feature '{}': {}", - &feature, &r), + &feature.as_str(), &r), None => format!("use of unstable library feature '{}'", &feature) }; - emit_feature_err(&self.tcx.sess.parse_sess, &feature, span, + emit_feature_err(&self.tcx.sess.parse_sess, &feature.as_str(), span, GateIssue::Library(Some(issue)), &msg); } } @@ -455,7 +455,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> { // When compiling with --test we don't enforce stability on the // compiler-generated test module, demarcated with `DUMMY_SP` plus the // name `__test` - if item.span == DUMMY_SP && item.name.as_str() == "__test" { return } + if item.span == DUMMY_SP && item.name == "__test" { return } check_item(self.tcx, item, true, &mut |id, sp, stab, depr| self.check(id, sp, stab, depr)); @@ -735,10 +735,10 @@ impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { /// were expected to be library features), and the list of features used from /// libraries, identify activated features that don't exist and error about them. pub fn check_unused_or_stable_features(sess: &Session, - lib_features_used: &FxHashMap) { let ref declared_lib_features = sess.features.borrow().declared_lib_features; - let mut remaining_lib_features: FxHashMap + let mut remaining_lib_features: FxHashMap = declared_lib_features.clone().into_iter().collect(); fn format_stable_since_msg(version: &str) -> String { @@ -746,7 +746,7 @@ pub fn check_unused_or_stable_features(sess: &Session, } for &(ref stable_lang_feature, span) in &sess.features.borrow().declared_stable_lang_features { - let version = find_lang_feature_accepted_version(stable_lang_feature.deref()) + let version = find_lang_feature_accepted_version(&stable_lang_feature.as_str()) .expect("unexpectedly couldn't find version feature was stabilized"); sess.add_lint(lint::builtin::STABLE_FEATURES, ast::CRATE_NODE_ID, @@ -761,7 +761,7 @@ pub fn check_unused_or_stable_features(sess: &Session, sess.add_lint(lint::builtin::STABLE_FEATURES, ast::CRATE_NODE_ID, span, - format_stable_since_msg(version.deref())); + format_stable_since_msg(&version.as_str())); } } None => ( /* used but undeclared, handled during the previous ast visit */ ) diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 12d32bf31b13d..ec6971d596b14 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -16,7 +16,7 @@ use middle::lang_items; use rustc_back::PanicStrategy; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::Symbol; use syntax_pos::Span; use hir::intravisit::Visitor; use hir::intravisit; @@ -55,10 +55,10 @@ pub fn check_crate(krate: &hir::Crate, verify(sess, items); } -pub fn link_name(attrs: &[ast::Attribute]) -> Option { +pub fn link_name(attrs: &[ast::Attribute]) -> Option { lang_items::extract(attrs).and_then(|name| { - $(if &name[..] == stringify!($name) { - Some(InternedString::new(stringify!($sym))) + $(if name == stringify!($name) { + Some(Symbol::intern(stringify!($sym))) } else)* { None } @@ -126,7 +126,7 @@ impl<'a> Context<'a> { impl<'a, 'v> Visitor<'v> for Context<'a> { fn visit_foreign_item(&mut self, i: &hir::ForeignItem) { if let Some(lang_item) = lang_items::extract(&i.attrs) { - self.register(&lang_item, i.span); + self.register(&lang_item.as_str(), i.span); } intravisit::walk_foreign_item(self, i) } diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 16522a73f56a5..103e2a949df19 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -25,9 +25,8 @@ use lint; use middle::cstore; use syntax::ast::{self, IntTy, UintTy}; -use syntax::attr; use syntax::parse; -use syntax::parse::token::InternedString; +use syntax::symbol::Symbol; use syntax::feature_gate::UnstableFeatures; use errors::{ColorConfig, FatalError, Handler}; @@ -41,6 +40,7 @@ use std::collections::btree_map::Values as BTreeMapValuesIter; use std::fmt; use std::hash::Hasher; use std::collections::hash_map::DefaultHasher; +use std::collections::HashSet; use std::iter::FromIterator; use std::path::PathBuf; @@ -927,8 +927,6 @@ pub fn default_lib_output() -> CrateType { } pub fn default_configuration(sess: &Session) -> ast::CrateConfig { - use syntax::parse::token::intern_and_get_ident as intern; - let end = &sess.target.target.target_endian; let arch = &sess.target.target.arch; let wordsz = &sess.target.target.target_pointer_width; @@ -938,55 +936,46 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { let max_atomic_width = sess.target.target.max_atomic_width(); let fam = if let Some(ref fam) = sess.target.target.options.target_family { - intern(fam) + Symbol::intern(fam) } else if sess.target.target.options.is_like_windows { - InternedString::new("windows") + Symbol::intern("windows") } else { - InternedString::new("unix") + Symbol::intern("unix") }; - let mk = attr::mk_name_value_item_str; - let mut ret = vec![ // Target bindings. - mk(InternedString::new("target_os"), intern(os)), - mk(InternedString::new("target_family"), fam.clone()), - mk(InternedString::new("target_arch"), intern(arch)), - mk(InternedString::new("target_endian"), intern(end)), - mk(InternedString::new("target_pointer_width"), intern(wordsz)), - mk(InternedString::new("target_env"), intern(env)), - mk(InternedString::new("target_vendor"), intern(vendor)), - ]; - match &fam[..] { - "windows" | "unix" => ret.push(attr::mk_word_item(fam)), - _ => (), + let mut ret = HashSet::new(); + // Target bindings. + ret.insert((Symbol::intern("target_os"), Some(Symbol::intern(os)))); + ret.insert((Symbol::intern("target_family"), Some(fam))); + ret.insert((Symbol::intern("target_arch"), Some(Symbol::intern(arch)))); + ret.insert((Symbol::intern("target_endian"), Some(Symbol::intern(end)))); + ret.insert((Symbol::intern("target_pointer_width"), Some(Symbol::intern(wordsz)))); + ret.insert((Symbol::intern("target_env"), Some(Symbol::intern(env)))); + ret.insert((Symbol::intern("target_vendor"), Some(Symbol::intern(vendor)))); + if fam == "windows" || fam == "unix" { + ret.insert((fam, None)); } if sess.target.target.options.has_elf_tls { - ret.push(attr::mk_word_item(InternedString::new("target_thread_local"))); + ret.insert((Symbol::intern("target_thread_local"), None)); } for &i in &[8, 16, 32, 64, 128] { if i <= max_atomic_width { let s = i.to_string(); - ret.push(mk(InternedString::new("target_has_atomic"), intern(&s))); + ret.insert((Symbol::intern("target_has_atomic"), Some(Symbol::intern(&s)))); if &s == wordsz { - ret.push(mk(InternedString::new("target_has_atomic"), intern("ptr"))); + ret.insert((Symbol::intern("target_has_atomic"), Some(Symbol::intern("ptr")))); } } } if sess.opts.debug_assertions { - ret.push(attr::mk_word_item(InternedString::new("debug_assertions"))); + ret.insert((Symbol::intern("debug_assertions"), None)); } if sess.opts.crate_types.contains(&CrateTypeProcMacro) { - ret.push(attr::mk_word_item(InternedString::new("proc_macro"))); + ret.insert((Symbol::intern("proc_macro"), None)); } return ret; } -pub fn append_configuration(cfg: &mut ast::CrateConfig, - name: InternedString) { - if !cfg.iter().any(|mi| mi.name() == name) { - cfg.push(attr::mk_word_item(name)) - } -} - pub fn build_configuration(sess: &Session, mut user_cfg: ast::CrateConfig) -> ast::CrateConfig { @@ -995,11 +984,10 @@ pub fn build_configuration(sess: &Session, let default_cfg = default_configuration(sess); // If the user wants a test runner, then add the test cfg if sess.opts.test { - append_configuration(&mut user_cfg, InternedString::new("test")) + user_cfg.insert((Symbol::intern("test"), None)); } - let mut v = user_cfg.into_iter().collect::>(); - v.extend_from_slice(&default_cfg[..]); - v + user_cfg.extend(default_cfg.iter().cloned()); + user_cfg } pub fn build_target_config(opts: &Options, sp: &Handler) -> Config { @@ -1245,11 +1233,14 @@ pub fn parse_cfgspecs(cfgspecs: Vec ) -> ast::CrateConfig { let meta_item = panictry!(parser.parse_meta_item()); if !parser.reader.is_eof() { - early_error(ErrorOutputType::default(), &format!("invalid --cfg argument: {}", - s)) + early_error(ErrorOutputType::default(), &format!("invalid --cfg argument: {}", s)) + } else if meta_item.is_meta_item_list() { + let msg = + format!("invalid predicate in --cfg command line argument: `{}`", meta_item.name()); + early_error(ErrorOutputType::default(), &msg) } - meta_item + (meta_item.name(), meta_item.value_str()) }).collect::() } @@ -1773,9 +1764,7 @@ mod tests { use std::rc::Rc; use super::{OutputType, OutputTypes, Externs}; use rustc_back::PanicStrategy; - use syntax::{ast, attr}; - use syntax::parse::token::InternedString; - use syntax::codemap::dummy_spanned; + use syntax::symbol::Symbol; fn optgroups() -> Vec { super::rustc_optgroups().into_iter() @@ -1804,9 +1793,7 @@ mod tests { let (sessopts, cfg) = build_session_options_and_crate_config(matches); let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); let cfg = build_configuration(&sess, cfg); - assert!(attr::contains(&cfg, &dummy_spanned(ast::MetaItemKind::Word({ - InternedString::new("test") - })))); + assert!(cfg.contains(&(Symbol::intern("test"), None))); } // When the user supplies --test and --cfg test, don't implicitly add @@ -1827,7 +1814,7 @@ mod tests { let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); let cfg = build_configuration(&sess, cfg); - let mut test_items = cfg.iter().filter(|m| m.name() == "test"); + let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test"); assert!(test_items.next().is_some()); assert!(test_items.next().is_none()); } diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 9becbd99eb33a..9577a25b3f830 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -28,7 +28,7 @@ use syntax::json::JsonEmitter; use syntax::feature_gate; use syntax::parse; use syntax::parse::ParseSess; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::{ast, codemap}; use syntax::feature_gate::AttributeType; use syntax_pos::{Span, MultiSpan}; @@ -89,7 +89,7 @@ pub struct Session { // forms a unique global identifier for the crate. It is used to allow // multiple crates with the same name to coexist. See the // trans::back::symbol_names module for more information. - pub crate_disambiguator: RefCell, + pub crate_disambiguator: RefCell, pub features: RefCell, /// The maximum recursion limit for potentially infinitely recursive @@ -129,8 +129,8 @@ pub struct PerfStats { } impl Session { - pub fn local_crate_disambiguator(&self) -> token::InternedString { - self.crate_disambiguator.borrow().clone() + pub fn local_crate_disambiguator(&self) -> Symbol { + *self.crate_disambiguator.borrow() } pub fn struct_span_warn<'a, S: Into>(&'a self, sp: S, @@ -610,7 +610,7 @@ pub fn build_session_(sopts: config::Options, plugin_attributes: RefCell::new(Vec::new()), crate_types: RefCell::new(Vec::new()), dependency_formats: RefCell::new(FxHashMap()), - crate_disambiguator: RefCell::new(token::intern("").as_str()), + crate_disambiguator: RefCell::new(Symbol::intern("")), features: RefCell::new(feature_gate::Features::new()), recursion_limit: Cell::new(64), next_node_id: Cell::new(NodeId::new(1)), diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index e0a397ad28fbd..21009711cb18a 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -246,12 +246,13 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let err_sp = item.meta().span.substitute_dummy(span); let def = self.tcx.lookup_trait_def(trait_ref.def_id); let trait_str = def.trait_ref.to_string(); - if let Some(ref istring) = item.value_str() { + if let Some(istring) = item.value_str() { + let istring = &*istring.as_str(); let generic_map = def.generics.types.iter().map(|param| { (param.name.as_str().to_string(), trait_ref.substs.type_for_def(param).to_string()) }).collect::>(); - let parser = Parser::new(&istring); + let parser = Parser::new(istring); let mut errored = false; let err: String = parser.filter_map(|p| { match p { diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index a2d45fa271499..76bead99343a7 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -26,8 +26,8 @@ use super::util; use hir::def_id::DefId; use infer::InferOk; use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap}; -use syntax::parse::token; use syntax::ast; +use syntax::symbol::Symbol; use ty::subst::Subst; use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder}; @@ -1245,7 +1245,7 @@ fn confirm_callable_candidate<'cx, 'gcx, 'tcx>( let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here projection_ty: ty::ProjectionTy { trait_ref: trait_ref, - item_name: token::intern(FN_OUTPUT_NAME), + item_name: Symbol::intern(FN_OUTPUT_NAME), }, ty: ret_type }); diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 45450456e8a65..20855c46b6800 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -49,7 +49,7 @@ use std::rc::Rc; use std::iter; use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use hir; @@ -561,7 +561,7 @@ pub struct GlobalCtxt<'tcx> { /// The definite name of the current crate after taking into account /// attributes, commandline parameters, etc. - pub crate_name: token::InternedString, + pub crate_name: Symbol, /// Data layout specification for the current target. pub data_layout: TargetDataLayout, @@ -574,7 +574,7 @@ pub struct GlobalCtxt<'tcx> { /// Map from function to the `#[derive]` mode that it's defining. Only used /// by `proc-macro` crates. - pub derive_macros: RefCell>, + pub derive_macros: RefCell>, } impl<'tcx> GlobalCtxt<'tcx> { @@ -588,15 +588,15 @@ impl<'tcx> GlobalCtxt<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn crate_name(self, cnum: CrateNum) -> token::InternedString { + pub fn crate_name(self, cnum: CrateNum) -> Symbol { if cnum == LOCAL_CRATE { - self.crate_name.clone() + self.crate_name } else { self.sess.cstore.crate_name(cnum) } } - pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString { + pub fn original_crate_name(self, cnum: CrateNum) -> Symbol { if cnum == LOCAL_CRATE { self.crate_name.clone() } else { @@ -604,7 +604,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString { + pub fn crate_disambiguator(self, cnum: CrateNum) -> Symbol { if cnum == LOCAL_CRATE { self.sess.local_crate_disambiguator() } else { @@ -835,7 +835,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { custom_coerce_unsized_kinds: RefCell::new(DefIdMap()), cast_kinds: RefCell::new(NodeMap()), fragment_infos: RefCell::new(DefIdMap()), - crate_name: token::intern_and_get_ident(crate_name), + crate_name: Symbol::intern(crate_name), data_layout: data_layout, layout_cache: RefCell::new(FxHashMap()), layout_depth: Cell::new(0), diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 8222583d9a7da..b93a8698f603c 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -12,7 +12,7 @@ use hir::map::DefPathData; use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use ty::{self, Ty, TyCtxt}; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use std::cell::Cell; @@ -94,14 +94,14 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let Some(extern_crate_def_id) = opt_extern_crate { self.push_item_path(buffer, extern_crate_def_id); } else { - buffer.push(&self.crate_name(cnum)); + buffer.push(&self.crate_name(cnum).as_str()); } } } RootMode::Absolute => { // In absolute mode, just write the crate name // unconditionally. - buffer.push(&self.original_crate_name(cnum)); + buffer.push(&self.original_crate_name(cnum).as_str()); } } } @@ -126,7 +126,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { return true; } None => { - buffer.push(&self.crate_name(cur_def.krate)); + buffer.push(&self.crate_name(cur_def.krate).as_str()); cur_path.iter().rev().map(|segment| buffer.push(&segment.as_str())).count(); return true; } @@ -136,7 +136,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { cur_path.push(self.sess.cstore.def_key(cur_def) .disambiguated_data.data.get_opt_name().unwrap_or_else(|| - token::intern(""))); + Symbol::intern(""))); match visible_parent_map.get(&cur_def) { Some(&def) => cur_def = def, None => return false, diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index f5c23401a4e67..e94e93158c47a 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -44,7 +44,7 @@ use std::vec::IntoIter; use std::mem; use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{DUMMY_SP, Span}; use rustc_const_math::ConstInt; @@ -2344,7 +2344,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { if let Some(id) = self.map.as_local_node_id(id) { self.map.name(id) } else if id.index == CRATE_DEF_INDEX { - token::intern(&self.sess.cstore.original_crate_name(id.krate)) + self.sess.cstore.original_crate_name(id.krate) } else { let def_key = self.sess.cstore.def_key(id); // The name of a StructCtor is that of its struct parent. @@ -2747,7 +2747,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` /// with the name of the crate containing the impl. - pub fn span_of_impl(self, impl_did: DefId) -> Result { + pub fn span_of_impl(self, impl_did: DefId) -> Result { if impl_did.is_local() { let node_id = self.map.as_local_node_id(impl_did).unwrap(); Ok(self.map.span(node_id)) diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index 56466d5968298..81896ecfb5341 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -23,7 +23,7 @@ use std::fmt; use std::ops; use syntax::abi; use syntax::ast::{self, Name}; -use syntax::parse::token::{keywords, InternedString}; +use syntax::symbol::{keywords, InternedString}; use serialize; diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index a63c7ba6a25ce..d04825d560444 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -25,8 +25,8 @@ use std::fmt; use std::usize; use syntax::abi::Abi; -use syntax::parse::token; use syntax::ast::CRATE_NODE_ID; +use syntax::symbol::Symbol; use hir; pub fn verbose() -> bool { @@ -284,7 +284,7 @@ fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter, ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => { - let name = token::intern("'r"); + let name = Symbol::intern("'r"); let _ = write!(f, "{}", name); ty::BrNamed(tcx.map.local_def_id(CRATE_NODE_ID), name, diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs index b8c26a0512ff8..916d17dcc91de 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs @@ -169,7 +169,7 @@ fn is_rustc_peek<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, { let name = tcx.item_name(def_id); if abi == Abi::RustIntrinsic || abi == Abi::PlatformIntrinsic { - if name.as_str() == "rustc_peek" { + if name == "rustc_peek" { return Some((args, source_info.span)); } } diff --git a/src/librustc_borrowck/borrowck/mir/mod.rs b/src/librustc_borrowck/borrowck/mir/mod.rs index 836832de5b9c4..9035c2ab3c236 100644 --- a/src/librustc_borrowck/borrowck/mir/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/mod.rs @@ -11,7 +11,6 @@ use borrowck::BorrowckCtxt; use syntax::ast::{self, MetaItem}; -use syntax::ptr::P; use syntax_pos::{Span, DUMMY_SP}; use rustc::hir; @@ -35,7 +34,7 @@ use self::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals}; use self::dataflow::{DefinitelyInitializedLvals}; use self::gather_moves::{MoveData, MovePathIndex, LookupResult}; -fn has_rustc_mir_with(attrs: &[ast::Attribute], name: &str) -> Option> { +fn has_rustc_mir_with(attrs: &[ast::Attribute], name: &str) -> Option { for attr in attrs { if attr.check_name("rustc_mir") { let items = attr.meta_item_list(); diff --git a/src/librustc_const_eval/eval.rs b/src/librustc_const_eval/eval.rs index 5f0c94744a118..c265fa97e2957 100644 --- a/src/librustc_const_eval/eval.rs +++ b/src/librustc_const_eval/eval.rs @@ -1221,7 +1221,7 @@ fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind, use syntax::ast::*; use syntax::ast::LitIntType::*; match *lit { - LitKind::Str(ref s, _) => Ok(Str((*s).clone())), + LitKind::Str(ref s, _) => Ok(Str(s.as_str())), LitKind::ByteStr(ref data) => Ok(ByteStr(data.clone())), LitKind::Byte(n) => Ok(Integral(U8(n))), LitKind::Int(n, Signed(ity)) => { @@ -1249,15 +1249,15 @@ fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind, infer(Infer(n), tcx, &ty::TyUint(ity)).map(Integral) }, - LitKind::Float(ref n, fty) => { - parse_float(n, Some(fty)).map(Float) + LitKind::Float(n, fty) => { + parse_float(&n.as_str(), Some(fty)).map(Float) } - LitKind::FloatUnsuffixed(ref n) => { + LitKind::FloatUnsuffixed(n) => { let fty_hint = match ty_hint.map(|t| &t.sty) { Some(&ty::TyFloat(fty)) => Some(fty), _ => None }; - parse_float(n, fty_hint).map(Float) + parse_float(&n.as_str(), fty_hint).map(Float) } LitKind::Bool(b) => Ok(Bool(b)), LitKind::Char(c) => Ok(Char(c)), diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 6a3a1bbb55ca2..228119e6cc7da 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -53,7 +53,8 @@ use std::path::{Path, PathBuf}; use syntax::{ast, diagnostics, visit}; use syntax::attr; use syntax::ext::base::ExtCtxt; -use syntax::parse::{self, PResult, token}; +use syntax::parse::{self, PResult}; +use syntax::symbol::Symbol; use syntax::util::node_count::NodeCounter; use syntax; use syntax_ext; @@ -210,9 +211,6 @@ pub fn compile_input(sess: &Session, tcx.print_debug_stats(); } - // Discard interned strings as they are no longer required. - token::clear_ident_interner(); - Ok((outputs, trans)) })?? }; @@ -563,8 +561,7 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, *sess.features.borrow_mut() = features; *sess.crate_types.borrow_mut() = collect_crate_types(sess, &krate.attrs); - *sess.crate_disambiguator.borrow_mut() = - token::intern(&compute_crate_disambiguator(sess)).as_str(); + *sess.crate_disambiguator.borrow_mut() = Symbol::intern(&compute_crate_disambiguator(sess)); time(time_passes, "recursion limit", || { middle::recursion_limit::update_recursion_limit(sess, &krate); @@ -1107,7 +1104,7 @@ pub fn phase_6_link_output(sess: &Session, outputs: &OutputFilenames) { time(sess.time_passes(), "linking", - || link::link_binary(sess, trans, outputs, &trans.link.crate_name)); + || link::link_binary(sess, trans, outputs, &trans.link.crate_name.as_str())); } fn escape_dep_filename(filename: &str) -> String { @@ -1358,11 +1355,3 @@ pub fn build_output_filenames(input: &Input, } } } - -// For use by the `rusti` project (https://github.com/murarth/rusti). -pub fn reset_thread_local_state() { - // These may be left in an incoherent state after a previous compile. - syntax::ext::hygiene::reset_hygiene_data(); - // `clear_ident_interner` can be used to free memory, but it does not restore the initial state. - token::reset_ident_interner(); -} diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 7e60c40220f84..a0863e0d29422 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -95,12 +95,11 @@ use std::str; use std::sync::{Arc, Mutex}; use std::thread; -use syntax::{ast, json}; +use syntax::ast; use syntax::codemap::{CodeMap, FileLoader, RealFileLoader}; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; use syntax::parse::{self, PResult}; -use syntax_pos::MultiSpan; -use errors::emitter::Emitter; +use syntax_pos::{DUMMY_SP, MultiSpan}; #[cfg(test)] pub mod test; @@ -374,37 +373,11 @@ fn handle_explain(code: &str, } } -fn check_cfg(cfg: &ast::CrateConfig, - output: ErrorOutputType) { - let emitter: Box = match output { - config::ErrorOutputType::HumanReadable(color_config) => { - Box::new(errors::emitter::EmitterWriter::stderr(color_config, None)) - } - config::ErrorOutputType::Json => Box::new(json::JsonEmitter::basic()), - }; - let handler = errors::Handler::with_emitter(true, false, emitter); - - let mut saw_invalid_predicate = false; - for item in cfg.iter() { - if item.is_meta_item_list() { - saw_invalid_predicate = true; - handler.emit(&MultiSpan::new(), - &format!("invalid predicate in --cfg command line argument: `{}`", - item.name()), - errors::Level::Fatal); - } - } - - if saw_invalid_predicate { - panic!(errors::FatalError); - } -} - impl<'a> CompilerCalls<'a> for RustcDefaultCalls { fn early_callback(&mut self, matches: &getopts::Matches, _: &config::Options, - cfg: &ast::CrateConfig, + _: &ast::CrateConfig, descriptions: &errors::registry::Registry, output: ErrorOutputType) -> Compilation { @@ -413,7 +386,6 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { return Compilation::Stop; } - check_cfg(cfg, output); Compilation::Continue } @@ -640,24 +612,27 @@ impl RustcDefaultCalls { let allow_unstable_cfg = UnstableFeatures::from_environment() .is_nightly_build(); - for cfg in &sess.parse_sess.config { - if !allow_unstable_cfg && GatedCfg::gate(cfg).is_some() { + let mut cfgs = Vec::new(); + for &(name, ref value) in sess.parse_sess.config.iter() { + let gated_cfg = GatedCfg::gate(&ast::MetaItem { + name: name, + node: ast::MetaItemKind::Word, + span: DUMMY_SP, + }); + if !allow_unstable_cfg && gated_cfg.is_some() { continue; } - if cfg.is_word() { - println!("{}", cfg.name()); - } else if let Some(s) = cfg.value_str() { - println!("{}=\"{}\"", cfg.name(), s); - } else if cfg.is_meta_item_list() { - // Right now there are not and should not be any - // MetaItemKind::List items in the configuration returned by - // `build_configuration`. - panic!("Found an unexpected list in cfg attribute '{}'!", cfg.name()) + cfgs.push(if let &Some(ref value) = value { + format!("{}=\"{}\"", name, value) } else { - // There also shouldn't be literals. - panic!("Found an unexpected literal in cfg attribute '{}'!", cfg.name()) - } + format!("{}", name) + }); + } + + cfgs.sort(); + for cfg in cfgs { + println!("{}", cfg); } } PrintRequest::TargetCPUs => { diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index ecbf28c1082f9..6c99c9d71b815 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -450,15 +450,15 @@ impl<'ast> PrinterSupport<'ast> for HygieneAnnotation<'ast> { impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> { fn post(&self, s: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> { match node { - pprust::NodeIdent(&ast::Ident { name: ast::Name(nm), ctxt }) => { + pprust::NodeIdent(&ast::Ident { name, ctxt }) => { pp::space(&mut s.s)?; // FIXME #16420: this doesn't display the connections // between syntax contexts - s.synth_comment(format!("{}{:?}", nm, ctxt)) + s.synth_comment(format!("{}{:?}", name.as_u32(), ctxt)) } - pprust::NodeName(&ast::Name(nm)) => { + pprust::NodeName(&name) => { pp::space(&mut s.s)?; - s.synth_comment(nm.to_string()) + s.synth_comment(name.as_u32().to_string()) } _ => Ok(()), } diff --git a/src/librustc_driver/target_features.rs b/src/librustc_driver/target_features.rs index 57a9edc5c586b..876323d599e85 100644 --- a/src/librustc_driver/target_features.rs +++ b/src/librustc_driver/target_features.rs @@ -8,13 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::{ast, attr}; +use syntax::ast; use llvm::LLVMRustHasFeature; use rustc::session::Session; use rustc_trans::back::write::create_target_machine; use syntax::feature_gate::UnstableFeatures; -use syntax::parse::token::InternedString; -use syntax::parse::token::intern_and_get_ident as intern; +use syntax::symbol::Symbol; use libc::c_char; // WARNING: the features must be known to LLVM or the feature @@ -41,11 +40,11 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) { _ => &[], }; - let tf = InternedString::new("target_feature"); + let tf = Symbol::intern("target_feature"); for feat in whitelist { assert_eq!(feat.chars().last(), Some('\0')); if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { - cfg.push(attr::mk_name_value_item_str(tf.clone(), intern(&feat[..feat.len() - 1]))) + cfg.insert((tf, Some(Symbol::intern(&feat[..feat.len() - 1])))); } } @@ -74,6 +73,6 @@ pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) { } if crt_static { - cfg.push(attr::mk_name_value_item_str(tf.clone(), intern("crt-static"))); + cfg.insert((tf, Some(Symbol::intern("crt-static")))); } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 782c74c8c78c7..a4f0e29038477 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -34,8 +34,8 @@ use syntax::codemap::CodeMap; use errors; use errors::emitter::Emitter; use errors::{Level, DiagnosticBuilder}; -use syntax::parse::token; use syntax::feature_gate::UnstableFeatures; +use syntax::symbol::Symbol; use rustc::hir; @@ -288,11 +288,11 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { pub fn t_param(&self, index: u32) -> Ty<'tcx> { let name = format!("T{}", index); - self.infcx.tcx.mk_param(index, token::intern(&name[..])) + self.infcx.tcx.mk_param(index, Symbol::intern(&name[..])) } pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region { - let name = token::intern(name); + let name = Symbol::intern(name); self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion { index: index, name: name, diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index 998cbae2cce15..87e6b2befdc32 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -57,7 +57,6 @@ use std::env; use std::fs::File; use std::io::Write; use syntax::ast; -use syntax::parse::token::InternedString; use syntax_pos::Span; use {ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED}; @@ -97,7 +96,7 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { } type Sources = Vec<(Span, DefId, DepNode)>; -type Targets = Vec<(Span, InternedString, ast::NodeId, DepNode)>; +type Targets = Vec<(Span, ast::Name, ast::NodeId, DepNode)>; struct IfThisChanged<'a, 'tcx:'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, @@ -106,7 +105,7 @@ struct IfThisChanged<'a, 'tcx:'a> { } impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { - fn argument(&self, attr: &ast::Attribute) -> Option { + fn argument(&self, attr: &ast::Attribute) -> Option { let mut value = None; for list_item in attr.meta_item_list().unwrap_or_default() { match list_item.word() { @@ -127,8 +126,8 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { let dep_node_interned = self.argument(attr); let dep_node = match dep_node_interned { None => DepNode::Hir(def_id), - Some(ref n) => { - match DepNode::from_label_string(&n[..], def_id) { + Some(n) => { + match DepNode::from_label_string(&n.as_str(), def_id) { Ok(n) => n, Err(()) => { self.tcx.sess.span_fatal( @@ -142,8 +141,8 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { } else if attr.check_name(ATTR_THEN_THIS_WOULD_NEED) { let dep_node_interned = self.argument(attr); let dep_node = match dep_node_interned { - Some(ref n) => { - match DepNode::from_label_string(&n[..], def_id) { + Some(n) => { + match DepNode::from_label_string(&n.as_str(), def_id) { Ok(n) => n, Err(()) => { self.tcx.sess.span_fatal( @@ -159,7 +158,7 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { } }; self.then_this_would_need.push((attr.span, - dep_node_interned.clone().unwrap(), + dep_node_interned.unwrap(), node_id, dep_node)); } diff --git a/src/librustc_incremental/calculate_svh/mod.rs b/src/librustc_incremental/calculate_svh/mod.rs index f98e698a1c9d4..250ef061e5109 100644 --- a/src/librustc_incremental/calculate_svh/mod.rs +++ b/src/librustc_incremental/calculate_svh/mod.rs @@ -172,8 +172,8 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> { let crate_disambiguator = self.tcx.sess.local_crate_disambiguator(); "crate_disambiguator".hash(&mut crate_state); - crate_disambiguator.len().hash(&mut crate_state); - crate_disambiguator.hash(&mut crate_state); + crate_disambiguator.as_str().len().hash(&mut crate_state); + crate_disambiguator.as_str().hash(&mut crate_state); // add each item (in some deterministic order) to the overall // crate hash. diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index a1ece48462b14..4bad264ac8749 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -18,6 +18,7 @@ use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::attr; use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax::tokenstream; use rustc::hir; @@ -169,8 +170,8 @@ enum SawAbiComponent<'a> { // FIXME (#14132): should we include (some function of) // ident.ctxt as well? - SawIdent(token::InternedString), - SawStructDef(token::InternedString), + SawIdent(InternedString), + SawStructDef(InternedString), SawLifetime, SawLifetimeDef(usize), @@ -232,11 +233,11 @@ enum SawAbiComponent<'a> { #[derive(Hash)] enum SawExprComponent<'a> { - SawExprLoop(Option), - SawExprField(token::InternedString), + SawExprLoop(Option), + SawExprField(InternedString), SawExprTupField(usize), - SawExprBreak(Option), - SawExprAgain(Option), + SawExprBreak(Option), + SawExprAgain(Option), SawExprBox, SawExprArray, @@ -246,6 +247,8 @@ enum SawExprComponent<'a> { SawExprBinary(hir::BinOp_), SawExprUnary(hir::UnOp), SawExprLit(ast::LitKind), + SawExprLitStr(InternedString, ast::StrStyle), + SawExprLitFloat(InternedString, Option), SawExprCast, SawExprType, SawExprIf, @@ -314,7 +317,7 @@ fn saw_expr<'a>(node: &'a Expr_, ExprUnary(op, _) => { (SawExprUnary(op), unop_can_panic_at_runtime(op)) } - ExprLit(ref lit) => (SawExprLit(lit.node.clone()), false), + ExprLit(ref lit) => (saw_lit(lit), false), ExprCast(..) => (SawExprCast, false), ExprType(..) => (SawExprType, false), ExprIf(..) => (SawExprIf, false), @@ -341,6 +344,15 @@ fn saw_expr<'a>(node: &'a Expr_, } } +fn saw_lit(lit: &ast::Lit) -> SawExprComponent<'static> { + match lit.node { + ast::LitKind::Str(s, style) => SawExprLitStr(s.as_str(), style), + ast::LitKind::Float(s, ty) => SawExprLitFloat(s.as_str(), Some(ty)), + ast::LitKind::FloatUnsuffixed(s) => SawExprLitFloat(s.as_str(), None), + ref node @ _ => SawExprLit(node.clone()), + } +} + #[derive(Hash)] enum SawItemComponent { SawItemExternCrate, @@ -874,22 +886,16 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { // ignoring span information, it doesn't matter here self.hash_discriminant(&meta_item.node); + meta_item.name.as_str().len().hash(self.st); + meta_item.name.as_str().hash(self.st); + match meta_item.node { - ast::MetaItemKind::Word(ref s) => { - s.len().hash(self.st); - s.hash(self.st); - } - ast::MetaItemKind::NameValue(ref s, ref lit) => { - s.len().hash(self.st); - s.hash(self.st); - lit.node.hash(self.st); - } - ast::MetaItemKind::List(ref s, ref items) => { - s.len().hash(self.st); - s.hash(self.st); + ast::MetaItemKind::Word => {} + ast::MetaItemKind::NameValue(ref lit) => saw_lit(lit).hash(self.st), + ast::MetaItemKind::List(ref items) => { // Sort subitems so the hash does not depend on their order let indices = self.indices_sorted_by(&items, |p| { - (p.name(), fnv::hash(&p.literal().map(|i| &i.node))) + (p.name().map(Symbol::as_str), fnv::hash(&p.literal().map(saw_lit))) }); items.len().hash(self.st); for (index, &item_index) in indices.iter().enumerate() { @@ -901,7 +907,7 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { self.hash_meta_item(meta_item); } ast::NestedMetaItemKind::Literal(ref lit) => { - lit.node.hash(self.st); + saw_lit(lit).hash(self.st); } } } @@ -914,11 +920,11 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { let indices = self.indices_sorted_by(attributes, |attr| attr.name()); for i in indices { - let attr = &attributes[i].node; + let attr = &attributes[i]; if !attr.is_sugared_doc && - !IGNORED_ATTRIBUTES.contains(&&*attr.value.name()) { + !IGNORED_ATTRIBUTES.contains(&&*attr.value.name().as_str()) { SawAttribute(attr.style).hash(self.st); - self.hash_meta_item(&*attr.value); + self.hash_meta_item(&attr.value); } } } diff --git a/src/librustc_incremental/persist/directory.rs b/src/librustc_incremental/persist/directory.rs index d238121872be6..546feb212243a 100644 --- a/src/librustc_incremental/persist/directory.rs +++ b/src/librustc_incremental/persist/directory.rs @@ -84,8 +84,8 @@ impl DefIdDirectory { assert_eq!(old_info.krate, krate); let old_name: &str = &old_info.name; let old_disambiguator: &str = &old_info.disambiguator; - let new_name: &str = &tcx.crate_name(krate); - let new_disambiguator: &str = &tcx.crate_disambiguator(krate); + let new_name: &str = &tcx.crate_name(krate).as_str(); + let new_disambiguator: &str = &tcx.crate_disambiguator(krate).as_str(); old_name == new_name && old_disambiguator == new_disambiguator } } @@ -99,8 +99,8 @@ impl DefIdDirectory { let new_krates: HashMap<_, _> = once(LOCAL_CRATE) .chain(tcx.sess.cstore.crates()) - .map(|krate| (make_key(&tcx.crate_name(krate), - &tcx.crate_disambiguator(krate)), krate)) + .map(|krate| (make_key(&tcx.crate_name(krate).as_str(), + &tcx.crate_disambiguator(krate).as_str()), krate)) .collect(); let ids = self.paths.iter() diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 0cd1c88fb877b..85c35bf79ce81 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -48,7 +48,6 @@ use rustc::hir::def_id::DefId; use rustc::hir::itemlikevisit::ItemLikeVisitor; use syntax::ast::{self, Attribute, NestedMetaItem}; use rustc_data_structures::fx::{FxHashSet, FxHashMap}; -use syntax::parse::token::InternedString; use syntax_pos::Span; use rustc::ty::TyCtxt; use ich::Fingerprint; @@ -88,12 +87,11 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> { } impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { - fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode { for item in attr.meta_item_list().unwrap_or(&[]) { if item.check_name(LABEL) { let value = expect_associated_value(self.tcx, item); - match DepNode::from_label_string(&value[..], def_id) { + match DepNode::from_label_string(&value.as_str(), def_id) { Ok(def_id) => return def_id, Err(()) => { self.tcx.sess.span_fatal( @@ -276,13 +274,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool { if item.check_name(CFG) { let value = expect_associated_value(tcx, item); debug!("check_config: searching for cfg {:?}", value); - for cfg in &config[..] { - if cfg.check_name(&value[..]) { - debug!("check_config: matched {:?}", cfg); - return true; - } - } - return false; + return config.contains(&(value, None)); } } @@ -291,7 +283,7 @@ fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool { &format!("no cfg attribute")); } -fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> InternedString { +fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> ast::Name { if let Some(value) = item.value_str() { value } else { diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs index 2572a9c1d78f6..26181dbaf50ff 100644 --- a/src/librustc_incremental/persist/fs.rs +++ b/src/librustc_incremental/persist/fs.rs @@ -604,7 +604,7 @@ fn string_to_timestamp(s: &str) -> Result { } fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf { - crate_path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum)) + crate_path(tcx.sess, &tcx.crate_name(cnum).as_str(), &tcx.crate_disambiguator(cnum).as_str()) } /// Finds the session directory containing the correct metadata hashes file for diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index 6320a923d690c..4440cb41dc5f2 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -81,19 +81,12 @@ impl NonCamelCaseTypes { .concat() } - let s = name.as_str(); - if !is_camel_case(name) { - let c = to_camel_case(&s); + let c = to_camel_case(&name.as_str()); let m = if c.is_empty() { - format!("{} `{}` should have a camel case name such as `CamelCase`", - sort, - s) + format!("{} `{}` should have a camel case name such as `CamelCase`", sort, name) } else { - format!("{} `{}` should have a camel case name such as `{}`", - sort, - s, - c) + format!("{} `{}` should have a camel case name such as `{}`", sort, name, c) }; cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); } @@ -241,8 +234,8 @@ impl LateLintPass for NonSnakeCase { .and_then(|at| at.value_str().map(|s| (at, s))); if let Some(ref name) = cx.tcx.sess.opts.crate_name { self.check_snake_case(cx, "crate", name, None); - } else if let Some((attr, ref name)) = attr_crate_name { - self.check_snake_case(cx, "crate", name, Some(attr.span)); + } else if let Some((attr, name)) = attr_crate_name { + self.check_snake_case(cx, "crate", &name.as_str(), Some(attr.span)); } } @@ -326,21 +319,19 @@ pub struct NonUpperCaseGlobals; impl NonUpperCaseGlobals { fn check_upper_case(cx: &LateContext, sort: &str, name: ast::Name, span: Span) { - let s = name.as_str(); - - if s.chars().any(|c| c.is_lowercase()) { - let uc = NonSnakeCase::to_snake_case(&s).to_uppercase(); - if uc != &s[..] { + if name.as_str().chars().any(|c| c.is_lowercase()) { + let uc = NonSnakeCase::to_snake_case(&name.as_str()).to_uppercase(); + if name != &*uc { cx.span_lint(NON_UPPER_CASE_GLOBALS, span, &format!("{} `{}` should have an upper case name such as `{}`", sort, - s, + name, uc)); } else { cx.span_lint(NON_UPPER_CASE_GLOBALS, span, - &format!("{} `{}` should have an upper case name", sort, s)); + &format!("{} `{}` should have an upper case name", sort, name)); } } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 51ffb1ebc8e99..0b2ae58852300 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -45,6 +45,7 @@ use std::collections::HashSet; use syntax::ast; use syntax::attr; use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes}; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc::hir::{self, PatKind}; @@ -633,9 +634,9 @@ impl Deprecated { stability: &Option<&attr::Stability>, deprecation: &Option) { // Deprecated attributes apply in-crate and cross-crate. - if let Some(&attr::Stability{rustc_depr: Some(attr::RustcDeprecation{ref reason, ..}), ..}) + if let Some(&attr::Stability{rustc_depr: Some(attr::RustcDeprecation{reason, ..}), ..}) = *stability { - output(cx, DEPRECATED, span, Some(&reason)) + output(cx, DEPRECATED, span, Some(reason)) } else if let Some(ref depr_entry) = *deprecation { if let Some(parent_depr) = cx.tcx.lookup_deprecation_entry(self.parent_def(cx)) { if parent_depr.same_origin(depr_entry) { @@ -643,10 +644,10 @@ impl Deprecated { } } - output(cx, DEPRECATED, span, depr_entry.attr.note.as_ref().map(|x| &**x)) + output(cx, DEPRECATED, span, depr_entry.attr.note) } - fn output(cx: &LateContext, lint: &'static Lint, span: Span, note: Option<&str>) { + fn output(cx: &LateContext, lint: &'static Lint, span: Span, note: Option) { let msg = if let Some(note) = note { format!("use of deprecated item: {}", note) } else { @@ -772,9 +773,9 @@ impl LintPass for DeprecatedAttr { impl EarlyLintPass for DeprecatedAttr { fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) { - let name = &*attr.name(); + let name = attr.name(); for &&(n, _, ref g) in &self.depr_attrs { - if n == name { + if name == n { if let &AttributeGate::Gated(Stability::Deprecated(link), ref name, ref reason, @@ -1228,7 +1229,7 @@ impl LateLintPass for MutableTransmutes { ty::TyFnDef(.., ref bfty) if bfty.abi == RustIntrinsic => (), _ => return false, } - cx.tcx.item_name(def_id).as_str() == "transmute" + cx.tcx.item_name(def_id) == "transmute" } } } diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index 4155d3e67a26d..8aa5e9e0a9460 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -219,9 +219,9 @@ impl LateLintPass for TypeLimits { ty::TyFloat(t) => { let (min, max) = float_ty_range(t); let lit_val: f64 = match lit.node { - ast::LitKind::Float(ref v, _) | - ast::LitKind::FloatUnsuffixed(ref v) => { - match v.parse() { + ast::LitKind::Float(v, _) | + ast::LitKind::FloatUnsuffixed(v) => { + match v.as_str().parse() { Ok(f) => f, Err(_) => return, } diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index 0668d362037dd..fa452017f0cf1 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -20,7 +20,7 @@ use std::collections::hash_map::Entry::{Occupied, Vacant}; use syntax::ast; use syntax::attr; use syntax::feature_gate::{BUILTIN_ATTRIBUTES, AttributeType}; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax::ptr::P; use syntax_pos::Span; @@ -48,7 +48,7 @@ impl UnusedMut { let name = path1.node; if let hir::BindByValue(hir::MutMutable) = mode { if !name.as_str().starts_with("_") { - match mutables.entry(name.0 as usize) { + match mutables.entry(name) { Vacant(entry) => { entry.insert(vec![id]); } @@ -162,7 +162,7 @@ impl LateLintPass for UnusedResults { // check for #[must_use="..."] if let Some(s) = attr.value_str() { msg.push_str(": "); - msg.push_str(&s); + msg.push_str(&s.as_str()); } cx.span_lint(UNUSED_MUST_USE, sp, &msg); return true; @@ -274,10 +274,10 @@ impl LateLintPass for UnusedAttributes { // Has a plugin registered this attribute as one which must be used at // the crate level? let plugin_crate = plugin_attributes.iter() - .find(|&&(ref x, t)| &*attr.name() == x && AttributeType::CrateLevel == t) + .find(|&&(ref x, t)| attr.name() == &**x && AttributeType::CrateLevel == t) .is_some(); if known_crate || plugin_crate { - let msg = match attr.node.style { + let msg = match attr.style { ast::AttrStyle::Outer => { "crate-level attribute should be an inner attribute: add an exclamation \ mark: #![foo]" diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 5384535024e53..79a0d0d0389a9 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -37,7 +37,7 @@ use syntax::abi::Abi; use syntax::attr; use syntax::ext::base::SyntaxExtension; use syntax::feature_gate::{self, GateIssue}; -use syntax::parse::token::{InternedString, intern}; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; use log; @@ -52,7 +52,7 @@ pub struct CrateLoader<'a> { cstore: &'a CStore, next_crate_num: CrateNum, foreign_item_map: FxHashMap>, - local_crate_name: String, + local_crate_name: Symbol, } fn dump_crates(cstore: &CStore) { @@ -70,8 +70,8 @@ fn dump_crates(cstore: &CStore) { #[derive(Debug)] struct ExternCrateInfo { - ident: String, - name: String, + ident: Symbol, + name: Symbol, id: ast::NodeId, dep_kind: DepKind, } @@ -80,7 +80,7 @@ fn register_native_lib(sess: &Session, cstore: &CStore, span: Option, lib: NativeLibrary) { - if lib.name.is_empty() { + if lib.name.as_str().is_empty() { match span { Some(span) => { struct_span_err!(sess, span, E0454, @@ -147,7 +147,7 @@ impl<'a> CrateLoader<'a> { cstore: cstore, next_crate_num: cstore.next_crate_num(), foreign_item_map: FxHashMap(), - local_crate_name: local_crate_name.to_owned(), + local_crate_name: Symbol::intern(local_crate_name), } } @@ -160,12 +160,12 @@ impl<'a> CrateLoader<'a> { Some(name) => { validate_crate_name(Some(self.sess), &name.as_str(), Some(i.span)); - name.to_string() + name } - None => i.ident.to_string(), + None => i.ident.name, }; Some(ExternCrateInfo { - ident: i.ident.to_string(), + ident: i.ident.name, name: name, id: i.id, dep_kind: if attr::contains_name(&i.attrs, "no_link") { @@ -179,7 +179,7 @@ impl<'a> CrateLoader<'a> { } } - fn existing_match(&self, name: &str, hash: Option<&Svh>, kind: PathKind) + fn existing_match(&self, name: Symbol, hash: Option<&Svh>, kind: PathKind) -> Option { let mut ret = None; self.cstore.iter_crate_data(|cnum, data| { @@ -201,7 +201,7 @@ impl<'a> CrateLoader<'a> { // `source` stores paths which are normalized which may be different // from the strings on the command line. let source = self.cstore.used_crate_source(cnum); - if let Some(locs) = self.sess.opts.externs.get(name) { + if let Some(locs) = self.sess.opts.externs.get(&*name.as_str()) { let found = locs.iter().any(|l| { let l = fs::canonicalize(l).ok(); source.dylib.as_ref().map(|p| &p.0) == l.as_ref() || @@ -233,7 +233,7 @@ impl<'a> CrateLoader<'a> { root: &CrateRoot) { // Check for (potential) conflicts with the local crate if self.local_crate_name == root.name && - self.sess.local_crate_disambiguator() == &root.disambiguator[..] { + self.sess.local_crate_disambiguator() == root.disambiguator { span_fatal!(self.sess, span, E0519, "the current crate is indistinguishable from one of its \ dependencies: it has the same crate-name `{}` and was \ @@ -258,8 +258,8 @@ impl<'a> CrateLoader<'a> { fn register_crate(&mut self, root: &Option, - ident: &str, - name: &str, + ident: Symbol, + name: Symbol, span: Span, lib: Library, dep_kind: DepKind) @@ -290,7 +290,7 @@ impl<'a> CrateLoader<'a> { let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); let cmeta = Rc::new(cstore::CrateMetadata { - name: name.to_string(), + name: name, extern_crate: Cell::new(None), key_map: metadata.load_key_map(crate_root.index), proc_macros: crate_root.macro_derive_registrar.map(|_| { @@ -314,8 +314,8 @@ impl<'a> CrateLoader<'a> { fn resolve_crate(&mut self, root: &Option, - ident: &str, - name: &str, + ident: Symbol, + name: Symbol, hash: Option<&Svh>, span: Span, path_kind: PathKind, @@ -456,13 +456,12 @@ impl<'a> CrateLoader<'a> { let deps = crate_root.crate_deps.decode(metadata); let map: FxHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); - let dep_name = &dep.name.as_str(); let dep_kind = match dep_kind { DepKind::MacrosOnly => DepKind::MacrosOnly, _ => dep.kind, }; let (local_cnum, ..) = self.resolve_crate( - root, dep_name, dep_name, Some(&dep.hash), span, PathKind::Dependency, dep_kind, + root, dep.name, dep.name, Some(&dep.hash), span, PathKind::Dependency, dep_kind, ); (CrateNum::new(crate_num + 1), local_cnum) }).collect(); @@ -482,13 +481,11 @@ impl<'a> CrateLoader<'a> { let target_triple = &self.sess.opts.target_triple[..]; let is_cross = target_triple != config::host_triple(); let mut target_only = false; - let ident = info.ident.clone(); - let name = info.name.clone(); let mut locate_ctxt = locator::Context { sess: self.sess, span: span, - ident: &ident[..], - crate_name: &name[..], + ident: info.ident, + crate_name: info.name, hash: None, filesearch: self.sess.host_filesearch(PathKind::Crate), target: &self.sess.host, @@ -582,11 +579,11 @@ impl<'a> CrateLoader<'a> { trait_name: &str, expand: fn(TokenStream) -> TokenStream, attributes: &[&'static str]) { - let attrs = attributes.iter().map(|s| InternedString::new(s)).collect(); + let attrs = attributes.iter().cloned().map(Symbol::intern).collect(); let derive = SyntaxExtension::CustomDerive( Box::new(CustomDerive::new(expand, attrs)) ); - self.0.push((intern(trait_name), Rc::new(derive))); + self.0.push((Symbol::intern(trait_name), Rc::new(derive))); } } @@ -604,8 +601,8 @@ impl<'a> CrateLoader<'a> { pub fn find_plugin_registrar(&mut self, span: Span, name: &str) -> Option<(PathBuf, Svh, DefIndex)> { let ekrate = self.read_extension_crate(span, &ExternCrateInfo { - name: name.to_string(), - ident: name.to_string(), + name: Symbol::intern(name), + ident: Symbol::intern(name), id: ast::DUMMY_NODE_ID, dep_kind: DepKind::MacrosOnly, }); @@ -642,7 +639,7 @@ impl<'a> CrateLoader<'a> { let libs = self.cstore.get_used_libraries(); for (foreign_lib, list) in self.foreign_item_map.iter() { let is_static = libs.borrow().iter().any(|lib| { - *foreign_lib == lib.name && lib.kind == cstore::NativeStatic + lib.name == &**foreign_lib && lib.kind == cstore::NativeStatic }); if is_static { for id in list { @@ -705,8 +702,8 @@ impl<'a> CrateLoader<'a> { // in terms of everyone has a compatible panic runtime format, that's // performed later as part of the `dependency_format` module. let name = match desired_strategy { - PanicStrategy::Unwind => "panic_unwind", - PanicStrategy::Abort => "panic_abort", + PanicStrategy::Unwind => Symbol::intern("panic_unwind"), + PanicStrategy::Abort => Symbol::intern("panic_abort"), }; info!("panic runtime not found -- loading {}", name); @@ -788,9 +785,9 @@ impl<'a> CrateLoader<'a> { // * Staticlibs and Rust dylibs use system malloc // * Rust dylibs used as dependencies to rust use jemalloc let name = if need_lib_alloc && !self.sess.opts.cg.prefer_dynamic { - &self.sess.target.target.options.lib_allocation_crate + Symbol::intern(&self.sess.target.target.options.lib_allocation_crate) } else { - &self.sess.target.target.options.exe_allocation_crate + Symbol::intern(&self.sess.target.target.options.exe_allocation_crate) }; let dep_kind = DepKind::Implicit; let (cnum, data) = @@ -852,8 +849,8 @@ impl<'a> CrateLoader<'a> { impl<'a> CrateLoader<'a> { pub fn preprocess(&mut self, krate: &ast::Crate) { for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") { - if let Some(ref linkarg) = attr.value_str() { - self.cstore.add_used_link_args(&linkarg); + if let Some(linkarg) = attr.value_str() { + self.cstore.add_used_link_args(&linkarg.as_str()); } } } @@ -866,7 +863,7 @@ impl<'a> CrateLoader<'a> { // First, add all of the custom #[link_args] attributes for m in i.attrs.iter().filter(|a| a.check_name("link_args")) { if let Some(linkarg) = m.value_str() { - self.cstore.add_used_link_args(&linkarg); + self.cstore.add_used_link_args(&linkarg.as_str()); } } @@ -878,7 +875,7 @@ impl<'a> CrateLoader<'a> { }; let kind = items.iter().find(|k| { k.check_name("kind") - }).and_then(|a| a.value_str()); + }).and_then(|a| a.value_str()).map(Symbol::as_str); let kind = match kind.as_ref().map(|s| &s[..]) { Some("static") => cstore::NativeStatic, Some("dylib") => cstore::NativeUnknown, @@ -900,7 +897,7 @@ impl<'a> CrateLoader<'a> { struct_span_err!(self.sess, m.span, E0459, "#[link(...)] specified without `name = \"foo\"`") .span_label(m.span, &format!("missing `name` argument")).emit(); - InternedString::new("foo") + Symbol::intern("foo") } }; let cfg = items.iter().find(|k| { @@ -910,7 +907,7 @@ impl<'a> CrateLoader<'a> { list[0].meta_item().unwrap().clone() }); let lib = NativeLibrary { - name: n.to_string(), + name: n, kind: kind, cfg: cfg, }; @@ -941,7 +938,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { for &(ref name, kind) in &self.sess.opts.libs { let lib = NativeLibrary { - name: name.clone(), + name: Symbol::intern(name), kind: kind, cfg: None, }; @@ -959,7 +956,7 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { let info = self.extract_crate_info(item).unwrap(); let (cnum, ..) = self.resolve_crate( - &None, &info.ident, &info.name, None, item.span, PathKind::Crate, info.dep_kind, + &None, info.ident, info.name, None, item.span, PathKind::Crate, info.dep_kind, ); let def_id = definitions.opt_local_def_id(item.id).unwrap(); diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index 37853b7473a65..ce47b936ddc0e 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -29,6 +29,7 @@ use std::path::PathBuf; use flate::Bytes; use syntax::{ast, attr}; use syntax::ext::base::SyntaxExtension; +use syntax::symbol::Symbol; use syntax_pos; pub use rustc::middle::cstore::{NativeLibrary, LinkagePreference}; @@ -58,7 +59,7 @@ pub struct ImportedFileMap { } pub struct CrateMetadata { - pub name: String, + pub name: Symbol, /// Information about the extern crate that caused this crate to /// be loaded. If this is `None`, then the crate was injected @@ -213,7 +214,7 @@ impl CStore { } pub fn add_used_library(&self, lib: NativeLibrary) { - assert!(!lib.name.is_empty()); + assert!(!lib.name.as_str().is_empty()); self.used_libraries.borrow_mut().push(lib); } @@ -249,14 +250,14 @@ impl CStore { } impl CrateMetadata { - pub fn name(&self) -> &str { - &self.root.name + pub fn name(&self) -> Symbol { + self.root.name } pub fn hash(&self) -> Svh { self.root.hash } - pub fn disambiguator(&self) -> &str { - &self.root.disambiguator + pub fn disambiguator(&self) -> Symbol { + self.root.disambiguator } pub fn is_staged_api(&self) -> bool { diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 2018d829597d4..407166203de87 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -31,7 +31,8 @@ use rustc_back::PanicStrategy; use std::path::PathBuf; use syntax::ast; use syntax::attr; -use syntax::parse::{token, new_parser_from_source_str}; +use syntax::parse::new_parser_from_source_str; +use syntax::symbol::Symbol; use syntax_pos::mk_sp; use rustc::hir::svh::Svh; use rustc_back::target::Target; @@ -262,14 +263,14 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_data(cnum).panic_strategy() } - fn crate_name(&self, cnum: CrateNum) -> token::InternedString + fn crate_name(&self, cnum: CrateNum) -> Symbol { - token::intern_and_get_ident(&self.get_crate_data(cnum).name[..]) + self.get_crate_data(cnum).name } - fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString + fn original_crate_name(&self, cnum: CrateNum) -> Symbol { - token::intern_and_get_ident(&self.get_crate_data(cnum).name()) + self.get_crate_data(cnum).name() } fn extern_crate(&self, cnum: CrateNum) -> Option @@ -282,9 +283,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_hash(cnum) } - fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString + fn crate_disambiguator(&self, cnum: CrateNum) -> Symbol { - token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator()) + self.get_crate_data(cnum).disambiguator() } fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 3af9d291ae55a..fb1314992c094 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -934,7 +934,7 @@ impl<'a, 'tcx> CrateMetadata { .decode(self) .map(|mut attr| { // Need new unique IDs: old thread-local IDs won't map to new threads. - attr.node.id = attr::mk_attr_id(); + attr.id = attr::mk_attr_id(); attr }) .collect() diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 3ab542442a1fb..2f4b0d5c87b0d 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -34,7 +34,7 @@ use std::rc::Rc; use std::u32; use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; -use syntax; +use syntax::symbol::Symbol; use syntax_pos; use rustc::hir::{self, PatKind}; @@ -600,7 +600,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { if let PatKind::Binding(_, ref path1, _) = arg.pat.node { path1.node } else { - syntax::parse::token::intern("") + Symbol::intern("") } })) } @@ -1119,7 +1119,7 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let deps = get_ordered_deps(self.cstore); self.lazy_seq(deps.iter().map(|&(_, ref dep)| { CrateDep { - name: syntax::parse::token::intern(dep.name()), + name: dep.name(), hash: dep.hash(), kind: dep.dep_kind.get(), } @@ -1279,10 +1279,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro); let root = self.lazy(&CrateRoot { rustc_version: rustc_version(), - name: link_meta.crate_name.clone(), + name: link_meta.crate_name, triple: tcx.sess.opts.target_triple.clone(), hash: link_meta.crate_hash, - disambiguator: tcx.sess.local_crate_disambiguator().to_string(), + disambiguator: tcx.sess.local_crate_disambiguator(), panic_strategy: tcx.sess.panic_strategy(), plugin_registrar_fn: tcx.sess .plugin_registrar_fn diff --git a/src/librustc_metadata/locator.rs b/src/librustc_metadata/locator.rs index b677a63edc064..f5196f7ea8428 100644 --- a/src/librustc_metadata/locator.rs +++ b/src/librustc_metadata/locator.rs @@ -227,6 +227,7 @@ use rustc_llvm as llvm; use rustc_llvm::{False, ObjectFile, mk_section_iter}; use rustc_llvm::archive_ro::ArchiveRO; use errors::DiagnosticBuilder; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc_back::target::Target; @@ -249,8 +250,8 @@ pub struct CrateMismatch { pub struct Context<'a> { pub sess: &'a Session, pub span: Span, - pub ident: &'a str, - pub crate_name: &'a str, + pub ident: Symbol, + pub crate_name: Symbol, pub hash: Option<&'a Svh>, // points to either self.sess.target.target or self.sess.host, must match triple pub target: &'a Target, @@ -422,7 +423,7 @@ impl<'a> Context<'a> { // must be loaded via -L plus some filtering. if self.hash.is_none() { self.should_match_name = false; - if let Some(s) = self.sess.opts.externs.get(self.crate_name) { + if let Some(s) = self.sess.opts.externs.get(&self.crate_name.as_str()) { return self.find_commandline_library(s.iter()); } self.should_match_name = true; @@ -533,7 +534,7 @@ impl<'a> Context<'a> { if let Some((ref p, _)) = lib.rlib { err.note(&format!("path: {}", p.display())); } - note_crate_name(&mut err, &lib.metadata.get_root().name); + note_crate_name(&mut err, &lib.metadata.get_root().name.as_str()); } err.emit(); None diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs index 32c8c5e2ee879..e11719dc40f2a 100644 --- a/src/librustc_metadata/schema.rs +++ b/src/librustc_metadata/schema.rs @@ -22,6 +22,7 @@ use rustc_back::PanicStrategy; use rustc_serialize as serialize; use syntax::{ast, attr}; +use syntax::symbol::Symbol; use syntax_pos::{self, Span}; use std::marker::PhantomData; @@ -163,10 +164,10 @@ pub enum LazyState { #[derive(RustcEncodable, RustcDecodable)] pub struct CrateRoot { pub rustc_version: String, - pub name: String, + pub name: Symbol, pub triple: String, pub hash: hir::svh::Svh, - pub disambiguator: String, + pub disambiguator: Symbol, pub panic_strategy: PanicStrategy, pub plugin_registrar_fn: Option, pub macro_derive_registrar: Option, diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 458a952543e40..d281b2a32d045 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -18,7 +18,7 @@ use rustc::util::nodemap::NodeMap; use rustc::hir; use syntax::abi::Abi; use syntax::ast; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 038300068fce1..83809ba763589 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -29,7 +29,7 @@ use rustc::hir::map::blocks::FnLikeNode; use rustc::infer::InferCtxt; use rustc::ty::subst::Subst; use rustc::ty::{self, Ty, TyCtxt}; -use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use rustc::hir; use rustc_const_math::{ConstInt, ConstUsize}; @@ -121,7 +121,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { self.tcx.mk_nil() } - pub fn str_literal(&mut self, value: token::InternedString) -> Literal<'tcx> { + pub fn str_literal(&mut self, value: InternedString) -> Literal<'tcx> { Literal::Value { value: ConstVal::Str(value) } } @@ -145,7 +145,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { self_ty: Ty<'tcx>, params: &[Ty<'tcx>]) -> (Ty<'tcx>, Literal<'tcx>) { - let method_name = token::intern(method_name); + let method_name = Symbol::intern(method_name); let substs = self.tcx.mk_substs_trait(self_ty, params); for item in self.tcx.associated_items(trait_def_id) { if item.kind == ty::AssociatedKind::Method && item.name == method_name { diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 828efbf373131..89c3efaafcdcc 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -21,7 +21,8 @@ use rustc::session::Session; use syntax::ast::*; use syntax::attr; use syntax::codemap::Spanned; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax_pos::Span; use errors; @@ -39,7 +40,7 @@ impl<'a> AstValidator<'a> { if label.name == keywords::StaticLifetime.name() { self.err_handler().span_err(span, &format!("invalid label name `{}`", label.name)); } - if label.name.as_str() == "'_" { + if label.name == "'_" { self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE, id, span, @@ -89,7 +90,7 @@ impl<'a> AstValidator<'a> { impl<'a> Visitor for AstValidator<'a> { fn visit_lifetime(&mut self, lt: &Lifetime) { - if lt.name.as_str() == "'_" { + if lt.name == "'_" { self.session.add_lint(lint::builtin::LIFETIME_UNDERSCORE, lt.id, lt.span, diff --git a/src/librustc_passes/hir_stats.rs b/src/librustc_passes/hir_stats.rs index 3bdaf276b40ce..dafb7bc6e6099 100644 --- a/src/librustc_passes/hir_stats.rs +++ b/src/librustc_passes/hir_stats.rs @@ -240,7 +240,7 @@ impl<'v> hir_visit::Visitor<'v> for StatCollector<'v> { hir_visit::walk_assoc_type_binding(self, type_binding) } fn visit_attribute(&mut self, attr: &'v ast::Attribute) { - self.record("Attribute", Id::Attr(attr.node.id), attr); + self.record("Attribute", Id::Attr(attr.id), attr); } fn visit_macro_def(&mut self, macro_def: &'v hir::MacroDef) { self.record("MacroDef", Id::Node(macro_def.id), macro_def); diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 4438241999a39..1bfc445fca98d 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -69,9 +69,9 @@ pub fn load_plugins(sess: &Session, for plugin in plugins { // plugins must have a name and can't be key = value match plugin.name() { - Some(ref name) if !plugin.is_value_str() => { + Some(name) if !plugin.is_value_str() => { let args = plugin.meta_item_list().map(ToOwned::to_owned); - loader.load_plugin(plugin.span, name, args.unwrap_or_default()); + loader.load_plugin(plugin.span, &name.as_str(), args.unwrap_or_default()); }, _ => call_malformed_plugin_attribute(sess, attr.span), } diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index 88e248e2efa38..fe2f9713d1beb 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -17,7 +17,7 @@ use rustc::mir::transform::MirMapPass; use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT}; use syntax::ext::base::MacroExpanderFn; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::ast; use syntax::feature_gate::AttributeType; use syntax_pos::Span; @@ -101,7 +101,7 @@ impl<'a> Registry<'a> { /// /// This is the most general hook into `libsyntax`'s expansion behavior. pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxExtension) { - if name.as_str() == "macro_rules" { + if name == "macro_rules" { panic!("user-defined macros may not be named `macro_rules`"); } self.syntax_exts.push((name, match extension { @@ -121,7 +121,7 @@ impl<'a> Registry<'a> { /// It builds for you a `NormalTT` that calls `expander`, /// and also takes care of interning the macro's name. pub fn register_macro(&mut self, name: &str, expander: MacroExpanderFn) { - self.register_syntax_extension(token::intern(name), + self.register_syntax_extension(Symbol::intern(name), NormalTT(Box::new(expander), None, false)); } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 627c72ff8c925..e988cacdce809 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -31,7 +31,6 @@ use std::rc::Rc; use syntax::ast::Name; use syntax::attr; -use syntax::parse::token; use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind}; use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind}; @@ -41,7 +40,7 @@ use syntax::ext::base::Determinacy::Undetermined; use syntax::ext::expand::mark_tts; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax_pos::{Span, DUMMY_SP}; @@ -139,7 +138,7 @@ impl<'b> Resolver<'b> { match view_path.node { ViewPathSimple(binding, ref full_path) => { let mut source = full_path.segments.last().unwrap().identifier; - let source_name = source.name.as_str(); + let source_name = source.name; if source_name == "mod" || source_name == "self" { resolve_error(self, view_path.span, @@ -607,7 +606,7 @@ impl<'b> Resolver<'b> { if attr.check_name("macro_escape") { let msg = "macro_escape is a deprecated synonym for macro_use"; let mut err = self.session.struct_span_warn(attr.span, msg); - if let ast::AttrStyle::Inner = attr.node.style { + if let ast::AttrStyle::Inner = attr.style { err.help("consider an outer attribute, #[macro_use] mod ...").emit(); } else { err.emit(); @@ -632,7 +631,7 @@ impl<'b> Resolver<'b> { match attr.meta_item_list() { Some(names) => for attr in names { if let Some(word) = attr.word() { - imports.imports.push((token::intern(&word.name()), attr.span())); + imports.imports.push((word.name(), attr.span())); } else { span_err!(self.session, attr.span(), E0466, "bad macro import"); } @@ -646,7 +645,7 @@ impl<'b> Resolver<'b> { if let Some(names) = attr.meta_item_list() { for attr in names { if let Some(word) = attr.word() { - imports.reexports.push((token::intern(&word.name()), attr.span())); + imports.reexports.push((word.name(), attr.span())); } else { bad_macro_reexport(self, attr.span()); } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index a3a60e4f6d754..26df986825b84 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -57,7 +57,7 @@ use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ast::{self, FloatTy}; use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy}; use syntax::ext::base::SyntaxExtension; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit::{self, FnKind, Visitor}; @@ -90,7 +90,7 @@ mod resolve_imports; enum SuggestionType { Macro(String), - Function(token::InternedString), + Function(Symbol), NotFound, } @@ -1039,7 +1039,7 @@ impl PrimitiveTypeTable { } fn intern(&mut self, string: &str, primitive_type: PrimTy) { - self.primitive_types.insert(token::intern(string), primitive_type); + self.primitive_types.insert(Symbol::intern(string), primitive_type); } } @@ -1462,7 +1462,6 @@ impl<'a> Resolver<'a> { let name = module_path[index].name; match self.resolve_name_in_module(search_module, name, TypeNS, false, false, span) { Failed(_) => { - let segment_name = name.as_str(); let module_name = module_to_string(search_module); let msg = if "???" == &module_name { let current_module = self.current_module; @@ -1480,10 +1479,10 @@ impl<'a> Resolver<'a> { format!("Did you mean `{}{}`?", prefix, path_str) } - None => format!("Maybe a missing `extern crate {};`?", segment_name), + None => format!("Maybe a missing `extern crate {};`?", name), } } else { - format!("Could not find `{}` in `{}`", segment_name, module_name) + format!("Could not find `{}` in `{}`", name, module_name) }; return Failed(span.map(|span| (span, msg))); @@ -1651,7 +1650,7 @@ impl<'a> Resolver<'a> { /// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) * fn resolve_module_prefix(&mut self, module_path: &[Ident], span: Option) -> ResolveResult> { - if &*module_path[0].name.as_str() == "$crate" { + if module_path[0].name == "$crate" { return Success(PrefixFound(self.resolve_crate_var(module_path[0].ctxt), 1)); } @@ -1667,7 +1666,7 @@ impl<'a> Resolver<'a> { self.module_map[&self.current_module.normal_ancestor_id.unwrap()]; // Now loop through all the `super`s we find. - while i < module_path.len() && "super" == module_path[i].name.as_str() { + while i < module_path.len() && module_path[i].name == "super" { debug!("(resolving module prefix) resolving `super` at {}", module_to_string(&containing_module)); if let Some(parent) = containing_module.parent { @@ -2635,7 +2634,7 @@ impl<'a> Resolver<'a> { let qualified_binding = self.resolve_module_relative_path(span, segments, namespace); match (qualified_binding, unqualified_def) { (Ok(binding), Some(ref ud)) if binding.def() == ud.def && - segments[0].identifier.name.as_str() != "$crate" => { + segments[0].identifier.name != "$crate" => { self.session .add_lint(lint::builtin::UNUSED_QUALIFICATIONS, id, @@ -2881,7 +2880,7 @@ impl<'a> Resolver<'a> { } fn find_best_match(&mut self, name: &str) -> SuggestionType { - if let Some(macro_name) = self.macro_names.iter().find(|n| n.as_str() == name) { + if let Some(macro_name) = self.macro_names.iter().find(|&n| n == &name) { return SuggestionType::Macro(format!("{}!", macro_name)); } @@ -2891,7 +2890,7 @@ impl<'a> Resolver<'a> { .flat_map(|rib| rib.bindings.keys().map(|ident| &ident.name)); if let Some(found) = find_best_match_for_name(names, name, None) { - if name != found { + if found != name { return SuggestionType::Function(found); } } SuggestionType::NotFound @@ -3000,8 +2999,7 @@ impl<'a> Resolver<'a> { false // Stop advancing }); - if method_scope && - &path_name[..] == keywords::SelfValue.name().as_str() { + if method_scope && keywords::SelfValue.name() == &*path_name { resolve_error(self, expr.span, ResolutionError::SelfNotAvailableInStaticMethod); @@ -3606,7 +3604,7 @@ fn module_to_string(module: Module) -> String { } } else { // danger, shouldn't be ident? - names.push(token::str_to_ident("")); + names.push(Ident::from_str("")); collect_mod(names, module.parent.unwrap()); } } diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 524d491a464e2..b2cc6306db9e7 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -27,7 +27,6 @@ use syntax::ext::expand::Expansion; use syntax::ext::hygiene::Mark; use syntax::ext::tt::macro_rules; use syntax::fold::Folder; -use syntax::parse::token::intern; use syntax::ptr::P; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit::Visitor; @@ -116,7 +115,7 @@ impl<'a> base::Resolver for Resolver<'a> { impl<'a, 'b> Folder for EliminateCrateVar<'a, 'b> { fn fold_path(&mut self, mut path: ast::Path) -> ast::Path { let ident = path.segments[0].identifier; - if &ident.name.as_str() == "$crate" { + if ident.name == "$crate" { path.global = true; let module = self.0.resolve_crate_var(ident.ctxt); if module.is_local() { @@ -152,7 +151,7 @@ impl<'a> base::Resolver for Resolver<'a> { } fn add_macro(&mut self, scope: Mark, mut def: ast::MacroDef, export: bool) { - if &def.ident.name.as_str() == "macro_rules" { + if def.ident.name == "macro_rules" { self.session.span_err(def.span, "user-defined macros may not be named `macro_rules`"); } @@ -207,8 +206,7 @@ impl<'a> base::Resolver for Resolver<'a> { fn find_attr_invoc(&mut self, attrs: &mut Vec) -> Option { for i in 0..attrs.len() { - let name = intern(&attrs[i].name()); - match self.builtin_macros.get(&name).cloned() { + match self.builtin_macros.get(&attrs[i].name()).cloned() { Some(binding) => match *self.get_macro(binding) { MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { return Some(attrs.remove(i)) diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index e83c2359979c0..87b2b88fe33fc 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -39,7 +39,8 @@ use std::collections::hash_map::DefaultHasher; use std::hash::*; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; +use syntax::symbol::keywords; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; use syntax::ptr::P; diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 778f018414165..bd5da588a864d 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -54,7 +54,8 @@ use std::path::{Path, PathBuf}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::lexer::comments::strip_doc_comment_decoration; -use syntax::parse::token::{self, keywords, InternedString}; +use syntax::parse::token; +use syntax::symbol::{Symbol, keywords}; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{ty_to_string, arg_to_string}; use syntax::codemap::MacroAttribute; @@ -119,7 +120,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { } }; result.push(CrateData { - name: (&self.tcx.sess.cstore.crate_name(n)[..]).to_owned(), + name: self.tcx.sess.cstore.crate_name(n).to_string(), number: n.as_u32(), span: span, }); @@ -728,16 +729,16 @@ impl Visitor for PathCollector { } fn docs_for_attrs(attrs: &[Attribute]) -> String { - let doc = InternedString::new("doc"); + let doc = Symbol::intern("doc"); let mut result = String::new(); for attr in attrs { if attr.name() == doc { - if let Some(ref val) = attr.value_str() { - if attr.node.is_sugared_doc { - result.push_str(&strip_doc_comment_decoration(val)); + if let Some(val) = attr.value_str() { + if attr.is_sugared_doc { + result.push_str(&strip_doc_comment_decoration(&val.as_str())); } else { - result.push_str(val); + result.push_str(&val.as_str()); } result.push('\n'); } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 9ec764b82f869..e06aefd865f1b 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -18,7 +18,8 @@ use std::path::Path; use syntax::ast; use syntax::parse::lexer::{self, Reader, StringReader}; -use syntax::parse::token::{self, keywords, Token}; +use syntax::parse::token::{self, Token}; +use syntax::symbol::keywords; use syntax_pos::*; #[derive(Clone)] diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index 8c704cc32993c..665e12cbe8795 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -88,7 +88,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, AsmDialect::Intel => llvm::AsmDialect::Intel, }; - let asm = CString::new(ia.asm.as_bytes()).unwrap(); + let asm = CString::new(ia.asm.as_str().as_bytes()).unwrap(); let constraint_cstr = CString::new(all_constraints).unwrap(); let r = InlineAsmCall(bcx, asm.as_ptr(), diff --git a/src/librustc_trans/assert_module_sources.rs b/src/librustc_trans/assert_module_sources.rs index 264ed4cd12fc1..898e65ce391e6 100644 --- a/src/librustc_trans/assert_module_sources.rs +++ b/src/librustc_trans/assert_module_sources.rs @@ -29,7 +29,6 @@ use rustc::ty::TyCtxt; use syntax::ast; -use syntax::parse::token::InternedString; use {ModuleSource, ModuleTranslation}; @@ -77,7 +76,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { } let mname = self.field(attr, MODULE); - let mtrans = self.modules.iter().find(|mtrans| &mtrans.name[..] == &mname[..]); + let mtrans = self.modules.iter().find(|mtrans| *mtrans.name == *mname.as_str()); let mtrans = match mtrans { Some(m) => m, None => { @@ -113,7 +112,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { } } - fn field(&self, attr: &ast::Attribute, name: &str) -> InternedString { + fn field(&self, attr: &ast::Attribute, name: &str) -> ast::Name { for item in attr.meta_item_list().unwrap_or(&[]) { if item.check_name(name) { if let Some(value) = item.value_str() { @@ -137,7 +136,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { let config = &self.tcx.sess.parse_sess.config; let value = self.field(attr, CFG); debug!("check_config(config={:?}, value={:?})", config, value); - if config.iter().any(|c| c.check_name(&value[..])) { + if config.iter().any(|&(name, _)| name == value) { debug!("check_config: matched"); return true; } diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 95d63311ee6e4..d0339775a78a1 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -44,6 +44,7 @@ use std::str; use flate; use syntax::ast; use syntax::attr; +use syntax::symbol::Symbol; use syntax_pos::Span; // RLIB LLVM-BYTECODE OBJECT LAYOUT @@ -93,8 +94,8 @@ pub fn find_crate_name(sess: Option<&Session>, if let Some(sess) = sess { if let Some(ref s) = sess.opts.crate_name { - if let Some((attr, ref name)) = attr_crate_name { - if *s != &name[..] { + if let Some((attr, name)) = attr_crate_name { + if name != &**s { let msg = format!("--crate-name and #[crate_name] are \ required to match, but `{}` != `{}`", s, name); @@ -130,7 +131,7 @@ pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap, name: &str) -> LinkMeta { let r = LinkMeta { - crate_name: name.to_owned(), + crate_name: Symbol::intern(name), crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate].to_smaller_hash()), }; info!("{:?}", r); @@ -429,7 +430,7 @@ fn link_rlib<'a>(sess: &'a Session, NativeLibraryKind::NativeFramework | NativeLibraryKind::NativeUnknown => continue, } - ab.add_native_library(&lib.name); + ab.add_native_library(&lib.name.as_str()); } // After adding all files to the archive, we need to update the @@ -615,7 +616,7 @@ fn link_staticlib(sess: &Session, objects: &[PathBuf], out_filename: &Path, let skip_object_files = native_libs.iter().any(|lib| { lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) }); - ab.add_rlib(path, &name, sess.lto(), skip_object_files).unwrap(); + ab.add_rlib(path, &name.as_str(), sess.lto(), skip_object_files).unwrap(); all_native_libs.extend(sess.cstore.native_libraries(cnum)); }); @@ -934,15 +935,15 @@ fn add_local_native_libraries(cmd: &mut Linker, sess: &Session) { // don't otherwise explicitly reference them. This can occur for // libraries which are just providing bindings, libraries with generic // functions, etc. - cmd.link_whole_staticlib(&l.name, &search_path); + cmd.link_whole_staticlib(&l.name.as_str(), &search_path); } cmd.hint_dynamic(); for lib in others { match lib.kind { - NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name), - NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name), + NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name.as_str()), + NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name.as_str()), NativeLibraryKind::NativeStatic => bug!(), } } @@ -1185,8 +1186,8 @@ fn add_upstream_native_libraries(cmd: &mut Linker, sess: &Session) { continue } match lib.kind { - NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name), - NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name), + NativeLibraryKind::NativeUnknown => cmd.link_dylib(&lib.name.as_str()), + NativeLibraryKind::NativeFramework => cmd.link_framework(&lib.name.as_str()), // ignore statically included native libraries here as we've // already included them when we included the rust library diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 0ad663f05b48b..ff40cfda5ff7c 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -113,7 +113,7 @@ use rustc::hir::map::definitions::{DefPath, DefPathData}; use rustc::util::common::record_time; use syntax::attr; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, @@ -275,7 +275,7 @@ impl ItemPathBuffer for SymbolPathBuffer { } fn push(&mut self, text: &str) { - self.names.push(token::intern(text).as_str()); + self.names.push(Symbol::intern(text).as_str()); } } @@ -288,7 +288,7 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, krate: LOCAL_CRATE, }; let hash = get_symbol_hash(scx, &empty_def_path, t, None); - let path = [token::intern_and_get_ident(prefix)]; + let path = [Symbol::intern(prefix).as_str()]; mangle(path.iter().cloned(), &hash) } diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 0c0b7fbf4afea..78a676d30337c 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -1128,11 +1128,11 @@ pub fn set_link_section(ccx: &CrateContext, llval: ValueRef, attrs: &[ast::Attribute]) { if let Some(sect) = attr::first_attr_value_str_by_name(attrs, "link_section") { - if contains_null(§) { + if contains_null(§.as_str()) { ccx.sess().fatal(&format!("Illegal null byte in link_section value: `{}`", §)); } unsafe { - let buf = CString::new(sect.as_bytes()).unwrap(); + let buf = CString::new(sect.as_str().as_bytes()).unwrap(); llvm::LLVMSetSection(llval, buf.as_ptr()); } } diff --git a/src/librustc_trans/collector.rs b/src/librustc_trans/collector.rs index 5902b0b1ce075..5c7b004375ed4 100644 --- a/src/librustc_trans/collector.rs +++ b/src/librustc_trans/collector.rs @@ -663,7 +663,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { -> bool { (bare_fn_ty.abi == Abi::RustIntrinsic || bare_fn_ty.abi == Abi::PlatformIntrinsic) && - tcx.item_name(def_id).as_str() == "drop_in_place" + tcx.item_name(def_id) == "drop_in_place" } } } diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index df70a6e81166b..29925d964da25 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -52,8 +52,7 @@ use std::ffi::CString; use std::cell::{Cell, RefCell, Ref}; use syntax::ast; -use syntax::parse::token::InternedString; -use syntax::parse::token; +use syntax::symbol::{Symbol, InternedString}; use syntax_pos::{DUMMY_SP, Span}; pub use context::{CrateContext, SharedCrateContext}; @@ -225,7 +224,7 @@ impl<'a, 'tcx> VariantInfo<'tcx> { VariantInfo { discr: Disr(0), fields: v.iter().enumerate().map(|(i, &t)| { - Field(token::intern(&i.to_string()), t) + Field(Symbol::intern(&i.to_string()), t) }).collect() } } diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs index 670a84565fafb..4186721c122ac 100644 --- a/src/librustc_trans/consts.rs +++ b/src/librustc_trans/consts.rs @@ -123,7 +123,7 @@ pub fn get_static(ccx: &CrateContext, def_id: DefId) -> ValueRef { // extern "C" fn() from being non-null, so we can't just declare a // static and call it a day. Some linkages (like weak) will make it such // that the static actually has a null value. - let linkage = match base::llvm_linkage_by_name(&name) { + let linkage = match base::llvm_linkage_by_name(&name.as_str()) { Some(linkage) => linkage, None => { ccx.sess().span_fatal(span, "invalid linkage specified"); diff --git a/src/librustc_trans/context.rs b/src/librustc_trans/context.rs index 7657fc7d1c8b0..771c5ef6d9d28 100644 --- a/src/librustc_trans/context.rs +++ b/src/librustc_trans/context.rs @@ -42,7 +42,7 @@ use std::ptr; use std::rc::Rc; use std::str; use syntax::ast; -use syntax::parse::token::InternedString; +use syntax::symbol::InternedString; use abi::FnType; pub struct Stats { diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index 5b9ef78ddc22e..5d01ca892b316 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -44,10 +44,8 @@ use std::ffi::CString; use std::fmt::Write; use std::path::Path; use std::ptr; -use std::rc::Rc; -use syntax::util::interner::Interner; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::{Interner, InternedString}; use syntax_pos::{self, Span}; @@ -117,9 +115,8 @@ impl<'tcx> TypeMap<'tcx> { unique_type_id: UniqueTypeId, metadata: DIType) { if self.unique_id_to_metadata.insert(unique_type_id, metadata).is_some() { - let unique_type_id_str = self.get_unique_type_id_as_string(unique_type_id); bug!("Type metadata for unique id '{}' is already in the TypeMap!", - &unique_type_id_str[..]); + self.get_unique_type_id_as_string(unique_type_id)); } } @@ -133,7 +130,7 @@ impl<'tcx> TypeMap<'tcx> { // Get the string representation of a UniqueTypeId. This method will fail if // the id is unknown. - fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> Rc { + fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> &str { let UniqueTypeId(interner_key) = unique_type_id; self.unique_id_interner.get(interner_key) } @@ -182,7 +179,7 @@ impl<'tcx> TypeMap<'tcx> { -> UniqueTypeId { let enum_type_id = self.get_unique_type_id_of_type(cx, enum_type); let enum_variant_type_id = format!("{}::{}", - &self.get_unique_type_id_as_string(enum_type_id), + self.get_unique_type_id_as_string(enum_type_id), variant_name); let interner_key = self.unique_id_interner.intern(&enum_variant_type_id); UniqueTypeId(interner_key) @@ -623,14 +620,12 @@ pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let metadata_for_uid = match type_map.find_metadata_for_unique_id(unique_type_id) { Some(metadata) => metadata, None => { - let unique_type_id_str = - type_map.get_unique_type_id_as_string(unique_type_id); span_bug!(usage_site_span, "Expected type metadata for unique \ type id '{}' to already be in \ the debuginfo::TypeMap but it \ was not. (Ty = {})", - &unique_type_id_str[..], + type_map.get_unique_type_id_as_string(unique_type_id), t); } }; @@ -638,14 +633,12 @@ pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match type_map.find_metadata_for_type(t) { Some(metadata) => { if metadata != metadata_for_uid { - let unique_type_id_str = - type_map.get_unique_type_id_as_string(unique_type_id); span_bug!(usage_site_span, "Mismatch between Ty and \ UniqueTypeId maps in \ debuginfo::TypeMap. \ UniqueTypeId={}, Ty={}", - &unique_type_id_str[..], + type_map.get_unique_type_id_as_string(unique_type_id), t); } } @@ -809,7 +802,7 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext, }; fn fallback_path(scc: &SharedCrateContext) -> CString { - CString::new(scc.link_meta().crate_name.clone()).unwrap() + CString::new(scc.link_meta().crate_name.to_string()).unwrap() } } @@ -1526,13 +1519,10 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let enum_llvm_type = type_of::type_of(cx, enum_type); let (enum_type_size, enum_type_align) = size_and_align_of(cx, enum_llvm_type); - let unique_type_id_str = debug_context(cx) - .type_map - .borrow() - .get_unique_type_id_as_string(unique_type_id); - let enum_name = CString::new(enum_name).unwrap(); - let unique_type_id_str = CString::new(unique_type_id_str.as_bytes()).unwrap(); + let unique_type_id_str = CString::new( + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id).as_bytes() + ).unwrap(); let enum_metadata = unsafe { llvm::LLVMRustDIBuilderCreateUnionType( DIB(cx), @@ -1566,7 +1556,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fn get_enum_discriminant_name(cx: &CrateContext, def_id: DefId) - -> token::InternedString { + -> InternedString { cx.tcx().item_name(def_id).as_str() } } @@ -1669,11 +1659,10 @@ fn create_struct_stub(cx: &CrateContext, -> DICompositeType { let (struct_size, struct_align) = size_and_align_of(cx, struct_llvm_type); - let unique_type_id_str = debug_context(cx).type_map - .borrow() - .get_unique_type_id_as_string(unique_type_id); let name = CString::new(struct_type_name).unwrap(); - let unique_type_id = CString::new(unique_type_id_str.as_bytes()).unwrap(); + let unique_type_id = CString::new( + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id).as_bytes() + ).unwrap(); let metadata_stub = unsafe { // LLVMRustDIBuilderCreateStructType() wants an empty array. A null // pointer will lead to hard to trace and debug LLVM assertions @@ -1707,11 +1696,10 @@ fn create_union_stub(cx: &CrateContext, -> DICompositeType { let (union_size, union_align) = size_and_align_of(cx, union_llvm_type); - let unique_type_id_str = debug_context(cx).type_map - .borrow() - .get_unique_type_id_as_string(unique_type_id); let name = CString::new(union_type_name).unwrap(); - let unique_type_id = CString::new(unique_type_id_str.as_bytes()).unwrap(); + let unique_type_id = CString::new( + debug_context(cx).type_map.borrow().get_unique_type_id_as_string(unique_type_id).as_bytes() + ).unwrap(); let metadata_stub = unsafe { // LLVMRustDIBuilderCreateUnionType() wants an empty array. A null // pointer will lead to hard to trace and debug LLVM assertions diff --git a/src/librustc_trans/debuginfo/namespace.rs b/src/librustc_trans/debuginfo/namespace.rs index 5953ec4aaedfa..a0477c9fc1eee 100644 --- a/src/librustc_trans/debuginfo/namespace.rs +++ b/src/librustc_trans/debuginfo/namespace.rs @@ -35,7 +35,7 @@ pub fn mangled_name_of_item(ccx: &CrateContext, def_id: DefId, extra: &str) -> S } let name = match def_key.disambiguated_data.data { - DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate), + DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate).as_str(), data => data.as_interned_str() }; @@ -64,7 +64,7 @@ pub fn item_namespace(ccx: &CrateContext, def_id: DefId) -> DIScope { }); let namespace_name = match def_key.disambiguated_data.data { - DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate), + DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate).as_str(), data => data.as_interned_str() }; diff --git a/src/librustc_trans/debuginfo/type_names.rs b/src/librustc_trans/debuginfo/type_names.rs index 956402edc1166..fce1ce5610555 100644 --- a/src/librustc_trans/debuginfo/type_names.rs +++ b/src/librustc_trans/debuginfo/type_names.rs @@ -156,7 +156,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, qualified: bool, output: &mut String) { if qualified { - output.push_str(&cx.tcx().crate_name(def_id.krate)); + output.push_str(&cx.tcx().crate_name(def_id.krate).as_str()); for path_element in cx.tcx().def_path(def_id).data { output.push_str("::"); output.push_str(&path_element.data.as_interned_str()); diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index b1b09d3ca20de..016a76a72531b 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -30,7 +30,7 @@ use rustc::ty::{self, Ty}; use Disr; use rustc::hir; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use rustc::session::Session; use syntax_pos::{Span, DUMMY_SP}; @@ -107,7 +107,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let sig = tcx.erase_late_bound_regions_and_normalize(&fty.sig); let arg_tys = sig.inputs; let ret_ty = sig.output; - let name = tcx.item_name(def_id).as_str(); + let name = &*tcx.item_name(def_id).as_str(); let span = match call_debug_location { DebugLoc::ScopeAt(_, span) => span, @@ -123,15 +123,15 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, Call(bcx, llfn, &[], call_debug_location); Unreachable(bcx); return Result::new(bcx, C_undef(Type::nil(ccx).ptr_to())); - } else if &name[..] == "unreachable" { + } else if name == "unreachable" { Unreachable(bcx); return Result::new(bcx, C_nil(ccx)); } let llret_ty = type_of::type_of(ccx, ret_ty); - let simple = get_simple_intrinsic(ccx, &name); - let llval = match (simple, &name[..]) { + let simple = get_simple_intrinsic(ccx, name); + let llval = match (simple, name) { (Some(llfn), _) => { Call(bcx, llfn, &llargs, call_debug_location) } @@ -208,7 +208,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, } (_, "type_name") => { let tp_ty = substs.type_at(0); - let ty_name = token::intern_and_get_ident(&tp_ty.to_string()); + let ty_name = Symbol::intern(&tp_ty.to_string()).as_str(); C_str_slice(ccx, ty_name) } (_, "type_id") => { @@ -340,7 +340,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let sty = &arg_tys[0].sty; match int_type_width_signed(sty, ccx) { Some((width, signed)) => - match &*name { + match name { "ctlz" => count_zeros_intrinsic(bcx, &format!("llvm.ctlz.i{}", width), llargs[0], call_debug_location), "cttz" => count_zeros_intrinsic(bcx, &format!("llvm.cttz.i{}", width), @@ -394,7 +394,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let sty = &arg_tys[0].sty; match float_type_width(sty) { Some(_width) => - match &*name { + match name { "fadd_fast" => FAddFast(bcx, llargs[0], llargs[1], call_debug_location), "fsub_fast" => FSubFast(bcx, llargs[0], llargs[1], call_debug_location), "fmul_fast" => FMulFast(bcx, llargs[0], llargs[1], call_debug_location), diff --git a/src/librustc_trans/mir/block.rs b/src/librustc_trans/mir/block.rs index b22bcf9825a2a..c0000506d2455 100644 --- a/src/librustc_trans/mir/block.rs +++ b/src/librustc_trans/mir/block.rs @@ -30,7 +30,7 @@ use glue; use type_::Type; use rustc_data_structures::fx::FxHashMap; -use syntax::parse::token; +use syntax::symbol::Symbol; use super::{MirContext, LocalRef}; use super::analyze::CleanupKind; @@ -321,7 +321,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // Get the location information. let loc = bcx.sess().codemap().lookup_char_pos(span.lo); - let filename = token::intern_and_get_ident(&loc.file.name); + let filename = Symbol::intern(&loc.file.name).as_str(); let filename = C_str_slice(bcx.ccx(), filename); let line = C_u32(bcx.ccx(), loc.line as u32); @@ -351,7 +351,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { const_err) } mir::AssertMessage::Math(ref err) => { - let msg_str = token::intern_and_get_ident(err.description()); + let msg_str = Symbol::intern(err.description()).as_str(); let msg_str = C_str_slice(bcx.ccx(), msg_str); let msg_file_line = C_struct(bcx.ccx(), &[msg_str, filename, line], diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 12b17c26cbc41..12cbfcef7d26b 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -21,7 +21,7 @@ use machine; use type_of; use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use std::cell::Ref; use std::iter; diff --git a/src/librustc_trans/partitioning.rs b/src/librustc_trans/partitioning.rs index 09a1cbd319ac4..a36960993e471 100644 --- a/src/librustc_trans/partitioning.rs +++ b/src/librustc_trans/partitioning.rs @@ -132,7 +132,7 @@ use std::sync::Arc; use std::collections::hash_map::DefaultHasher; use symbol_map::SymbolMap; use syntax::ast::NodeId; -use syntax::parse::token::{self, InternedString}; +use syntax::symbol::{Symbol, InternedString}; use trans_item::TransItem; use util::nodemap::{FxHashMap, FxHashSet}; @@ -272,7 +272,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, // If the partitioning should produce a fixed count of codegen units, merge // until that count is reached. if let PartitioningStrategy::FixedUnitCount(count) = strategy { - merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]); + merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name.as_str()); debug_dump(scx, "POST MERGING:", initial_partitioning.codegen_units.iter()); } @@ -320,7 +320,7 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, let codegen_unit_name = match characteristic_def_id { Some(def_id) => compute_codegen_unit_name(tcx, def_id, is_volatile), - None => InternedString::new(FALLBACK_CODEGEN_UNIT), + None => Symbol::intern(FALLBACK_CODEGEN_UNIT).as_str(), }; let make_codegen_unit = || { @@ -365,7 +365,7 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, // always ensure we have at least one CGU; otherwise, if we have a // crate with just types (for example), we could wind up with no CGU if codegen_units.is_empty() { - let codegen_unit_name = InternedString::new(FALLBACK_CODEGEN_UNIT); + let codegen_unit_name = Symbol::intern(FALLBACK_CODEGEN_UNIT).as_str(); codegen_units.entry(codegen_unit_name.clone()) .or_insert_with(|| CodegenUnit::empty(codegen_unit_name.clone())); } @@ -523,7 +523,7 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut mod_path = String::with_capacity(64); let def_path = tcx.def_path(def_id); - mod_path.push_str(&tcx.crate_name(def_path.krate)); + mod_path.push_str(&tcx.crate_name(def_path.krate).as_str()); for part in tcx.def_path(def_id) .data @@ -542,14 +542,11 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mod_path.push_str(".volatile"); } - return token::intern_and_get_ident(&mod_path[..]); + return Symbol::intern(&mod_path[..]).as_str(); } fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString { - token::intern_and_get_ident(&format!("{}{}{}", - crate_name, - NUMBERED_CODEGEN_UNIT_MARKER, - index)[..]) + Symbol::intern(&format!("{}{}{}", crate_name, NUMBERED_CODEGEN_UNIT_MARKER, index)).as_str() } fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, diff --git a/src/librustc_trans/trans_item.rs b/src/librustc_trans/trans_item.rs index 7fa59127704dc..f2d1e375793b3 100644 --- a/src/librustc_trans/trans_item.rs +++ b/src/librustc_trans/trans_item.rs @@ -285,7 +285,7 @@ impl<'a, 'tcx> TransItem<'tcx> { let attributes = tcx.get_attrs(def_id); if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") { - if let Some(linkage) = base::llvm_linkage_by_name(&name) { + if let Some(linkage) = base::llvm_linkage_by_name(&name.as_str()) { Some(linkage) } else { let span = tcx.map.span_if_local(def_id); @@ -531,7 +531,7 @@ impl<'a, 'tcx> DefPathBasedNames<'a, 'tcx> { // some_crate:: if !(self.omit_local_crate_name && def_id.is_local()) { - output.push_str(&self.tcx.crate_name(def_path.krate)); + output.push_str(&self.tcx.crate_name(def_path.krate).as_str()); output.push_str("::"); } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 9bde6b0c4d995..c5db8bc8cedc7 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -71,7 +71,7 @@ use util::nodemap::{NodeMap, FxHashSet}; use std::cell::RefCell; use syntax::{abi, ast}; use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::{Span, Pos}; use errors::DiagnosticBuilder; @@ -645,7 +645,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { }; let output_binding = ConvertedBinding { - item_name: token::intern(FN_OUTPUT_NAME), + item_name: Symbol::intern(FN_OUTPUT_NAME), ty: output, span: output_span }; @@ -1252,7 +1252,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { if bounds.len() > 1 { let spans = bounds.iter().map(|b| { self.tcx().associated_items(b.def_id()).find(|item| { - item.kind == ty::AssociatedKind::Type && item.name.as_str() == assoc_name + item.kind == ty::AssociatedKind::Type && item.name == assoc_name }) .and_then(|item| self.tcx().map.as_local_node_id(item.def_id)) .and_then(|node_id| self.tcx().map.opt_span(node_id)) diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 900c22a8176ed..e72dba858c562 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -20,7 +20,7 @@ use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::hir; use syntax_pos::Span; -use syntax::parse::token; +use syntax::symbol::Symbol; #[derive(Copy, Clone, Debug)] enum AutoderefKind { @@ -120,7 +120,7 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { let normalized = traits::normalize_projection_type(&mut selcx, ty::ProjectionTy { trait_ref: trait_ref, - item_name: token::intern("Target"), + item_name: Symbol::intern("Target"), }, cause, 0); @@ -198,7 +198,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (PreferMutLvalue, Some(trait_did)) => { self.lookup_method_in_trait(span, base_expr, - token::intern("deref_mut"), + Symbol::intern("deref_mut"), trait_did, base_ty, None) @@ -211,7 +211,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (None, Some(trait_did)) => { self.lookup_method_in_trait(span, base_expr, - token::intern("deref"), + Symbol::intern("deref"), trait_did, base_ty, None) diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 7606a5b7a4d46..548f37cea06bc 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -16,7 +16,7 @@ use hir::def_id::{DefId, LOCAL_CRATE}; use hir::print; use rustc::{infer, traits}; use rustc::ty::{self, LvaluePreference, Ty}; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::ptr::P; use syntax_pos::Span; @@ -160,9 +160,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { -> Option> { // Try the options that are least restrictive on the caller first. for &(opt_trait_def_id, method_name) in - &[(self.tcx.lang_items.fn_trait(), token::intern("call")), - (self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), - (self.tcx.lang_items.fn_once_trait(), token::intern("call_once"))] { + &[(self.tcx.lang_items.fn_trait(), Symbol::intern("call")), + (self.tcx.lang_items.fn_mut_trait(), Symbol::intern("call_mut")), + (self.tcx.lang_items.fn_once_trait(), Symbol::intern("call_once"))] { let trait_def_id = match opt_trait_def_id { Some(def_id) => def_id, None => continue, diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 77106b8b0c3a1..a07573a7b9eab 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -21,7 +21,7 @@ use {CrateCtxt, require_same_types}; use syntax::abi::Abi; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_pos::Span; use rustc::hir; @@ -75,7 +75,7 @@ fn equate_intrinsic_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, /// and in libcore/intrinsics.rs pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { fn param<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, n: u32) -> Ty<'tcx> { - let name = token::intern(&format!("P{}", n)); + let name = Symbol::intern(&format!("P{}", n)); ccx.tcx.mk_param(n, name) } @@ -326,7 +326,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { let param = |n| { - let name = token::intern(&format!("P{}", n)); + let name = Symbol::intern(&format!("P{}", n)); ccx.tcx.mk_param(n, name) }; diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 66a532fd76acf..bd34e993e1e86 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -342,7 +342,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let def = pick.item.def(); if let probe::InherentImplPick = pick.kind { if !pick.item.vis.is_accessible_from(self.body_id, &self.tcx.map) { - let msg = format!("{} `{}` is private", def.kind_name(), &method_name.as_str()); + let msg = format!("{} `{}` is private", def.kind_name(), method_name); self.tcx.sess.span_err(span, &msg); } } diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index f08178e49fb2e..d0ebd6124f096 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -115,8 +115,8 @@ use syntax::ast; use syntax::attr; use syntax::codemap::{self, original_sp, Spanned}; use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; +use syntax::symbol::{Symbol, InternedString, keywords}; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::{self, BytePos, Span}; @@ -931,7 +931,8 @@ fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, if let Some(ref attr) = item.attrs.iter().find(|a| { a.check_name("rustc_on_unimplemented") }) { - if let Some(ref istring) = attr.value_str() { + if let Some(istring) = attr.value_str() { + let istring = istring.as_str(); let parser = Parser::new(&istring); let types = &generics.types; for token in parser { @@ -942,7 +943,7 @@ fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, Position::ArgumentNamed(s) if s == "Self" => (), // So is `{A}` if A is a type parameter Position::ArgumentNamed(s) => match types.iter().find(|t| { - t.name.as_str() == s + t.name == s }) { Some(_) => (), None => { @@ -2369,7 +2370,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (PreferMutLvalue, Some(trait_did)) => { self.lookup_method_in_trait_adjusted(expr.span, Some(&base_expr), - token::intern("index_mut"), + Symbol::intern("index_mut"), trait_did, autoderefs, unsize, @@ -2384,7 +2385,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { (None, Some(trait_did)) => { self.lookup_method_in_trait_adjusted(expr.span, Some(&base_expr), - token::intern("index"), + Symbol::intern("index"), trait_did, autoderefs, unsize, @@ -3027,7 +3028,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn suggest_field_name(variant: ty::VariantDef<'tcx>, field: &Spanned, skip : Vec) - -> Option { + -> Option { let name = field.node.as_str(); let names = variant.fields.iter().filter_map(|field| { // ignore already set fields and private fields from non-local crates @@ -3126,7 +3127,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { ty::TyAdt(adt, ..) if adt.is_enum() => { struct_span_err!(self.tcx.sess, field.name.span, E0559, "{} `{}::{}` has no field named `{}`", - kind_name, actual, variant.name.as_str(), field.name.node) + kind_name, actual, variant.name, field.name.node) } _ => { struct_span_err!(self.tcx.sess, field.name.span, E0560, @@ -3146,7 +3147,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match ty.sty { ty::TyAdt(adt, ..) if adt.is_enum() => { err.span_label(field.name.span, &format!("`{}::{}` does not have this field", - ty, variant.name.as_str())); + ty, variant.name)); } _ => { err.span_label(field.name.span, &format!("`{}` does not have this field", ty)); diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index 8b4975b7e3a2f..adb8c6be42bc1 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -14,7 +14,7 @@ use super::FnCtxt; use hir::def_id::DefId; use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue}; use syntax::ast; -use syntax::parse::token; +use syntax::symbol::Symbol; use rustc::hir; impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -182,7 +182,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let rhs_ty_var = self.next_ty_var(); let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var], - token::intern(name), trait_def_id, + Symbol::intern(name), trait_def_id, lhs_expr) { Ok(return_ty) => return_ty, Err(()) => { @@ -248,9 +248,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { -> Ty<'tcx> { assert!(op.is_by_value()); - match self.lookup_op_method(ex, operand_ty, vec![], - token::intern(mname), trait_did, - operand_expr) { + let mname = Symbol::intern(mname); + match self.lookup_op_method(ex, operand_ty, vec![], mname, trait_did, operand_expr) { Ok(t) => t, Err(()) => { self.type_error_message(ex.span, |actual| { diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 535b6bcdcba17..48d79a3ba4c87 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -79,7 +79,7 @@ use rustc_const_math::ConstInt; use std::cell::RefCell; use syntax::{abi, ast, attr}; -use syntax::parse::token::{self, keywords}; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::Span; use rustc::hir::{self, map as hir_map, print as pprust}; @@ -585,7 +585,7 @@ fn convert_closure<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let upvar_decls : Vec<_> = tcx.with_freevars(node_id, |fv| { fv.iter().enumerate().map(|(i, _)| ty::TypeParameterDef { index: (base_generics.count() as u32) + (i as u32), - name: token::intern(""), + name: Symbol::intern(""), def_id: def_id, default_def_id: base_def_id, default: None, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index a141d0e4788dd..2cc1882ce3eae 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -24,9 +24,9 @@ use syntax::abi::Abi; use syntax::ast; use syntax::attr; use syntax::codemap::Spanned; -use syntax::parse::token::keywords; use syntax::ptr::P; use syntax::print::pprust as syntax_pprust; +use syntax::symbol::keywords; use syntax_pos::{self, DUMMY_SP, Pos}; use rustc_trans::back::link; @@ -242,7 +242,7 @@ impl Clean for CrateNum { } }); ExternalCrate { - name: (&cx.sess().cstore.crate_name(self.0)[..]).to_owned(), + name: cx.sess().cstore.crate_name(self.0).to_string(), attrs: cx.sess().cstore.item_attrs(root).clean(cx), primitives: primitives, } @@ -2577,7 +2577,7 @@ impl Clean> for doctree::Import { // #[doc(no_inline)] attribute is present. // Don't inline doc(hidden) imports so they can be stripped at a later stage. let denied = self.vis != hir::Public || self.attrs.iter().any(|a| { - &a.name()[..] == "doc" && match a.meta_item_list() { + a.name() == "doc" && match a.meta_item_list() { Some(l) => attr::list_contains_name(l, "no_inline") || attr::list_contains_name(l, "hidden"), None => false, diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index f5cd089e923d7..bb07efdd9e723 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -14,71 +14,43 @@ pub use self::TyParamBound::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; pub use self::PathParameters::*; +pub use symbol::Symbol as Name; pub use util::ThinVec; use syntax_pos::{mk_sp, Span, DUMMY_SP, ExpnId}; use codemap::{respan, Spanned}; use abi::Abi; use ext::hygiene::SyntaxContext; -use parse::token::{self, keywords, InternedString}; use print::pprust; use ptr::P; +use symbol::{Symbol, keywords}; use tokenstream::{TokenTree}; +use std::collections::HashSet; use std::fmt; use std::rc::Rc; use std::u32; use serialize::{self, Encodable, Decodable, Encoder, Decoder}; -/// A name is a part of an identifier, representing a string or gensym. It's -/// the result of interning. -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct Name(pub u32); - /// An identifier contains a Name (index into the interner /// table) and a SyntaxContext to track renaming and /// macro expansion per Flatt et al., "Macros That Work Together" #[derive(Clone, Copy, PartialEq, Eq, Hash)] pub struct Ident { - pub name: Name, + pub name: Symbol, pub ctxt: SyntaxContext } -impl Name { - pub fn as_str(self) -> token::InternedString { - token::InternedString::new_from_name(self) - } -} - -impl fmt::Debug for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}({})", self, self.0) - } -} - -impl fmt::Display for Name { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.as_str(), f) - } -} - -impl Encodable for Name { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.as_str()) - } -} - -impl Decodable for Name { - fn decode(d: &mut D) -> Result { - Ok(token::intern(&d.read_str()?)) - } -} - impl Ident { pub const fn with_empty_ctxt(name: Name) -> Ident { Ident { name: name, ctxt: SyntaxContext::empty() } } + + /// Maps a string to an identifier with an empty syntax context. + pub fn from_str(s: &str) -> Ident { + Ident::with_empty_ctxt(Symbol::intern(s)) + } } impl fmt::Debug for Ident { @@ -401,7 +373,7 @@ impl Generics { } pub fn span_for_name(&self, name: &str) -> Option { for t in &self.ty_params { - if t.ident.name.as_str() == name { + if t.ident.name == name { return Some(t.span); } } @@ -479,7 +451,7 @@ pub struct WhereEqPredicate { /// The set of MetaItems that define the compilation environment of the crate, /// used to drive conditional compilation -pub type CrateConfig = Vec>; +pub type CrateConfig = HashSet<(Name, Option)>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Crate { @@ -498,7 +470,7 @@ pub type NestedMetaItem = Spanned; #[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug, PartialEq)] pub enum NestedMetaItemKind { /// A full MetaItem, for recursive meta items. - MetaItem(P), + MetaItem(MetaItem), /// A literal. /// /// E.g. "foo", 64, true @@ -508,53 +480,30 @@ pub enum NestedMetaItemKind { /// A spanned compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` -pub type MetaItem = Spanned; +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub struct MetaItem { + pub name: Name, + pub node: MetaItemKind, + pub span: Span, +} /// A compile-time attribute item. /// /// E.g. `#[test]`, `#[derive(..)]` or `#[feature = "foo"]` -#[derive(Clone, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum MetaItemKind { /// Word meta item. /// /// E.g. `test` as in `#[test]` - Word(InternedString), + Word, /// List meta item. /// /// E.g. `derive(..)` as in `#[derive(..)]` - List(InternedString, Vec), + List(Vec), /// Name value meta item. /// /// E.g. `feature = "foo"` as in `#[feature = "foo"]` - NameValue(InternedString, Lit), -} - -// can't be derived because the MetaItemKind::List requires an unordered comparison -impl PartialEq for MetaItemKind { - fn eq(&self, other: &MetaItemKind) -> bool { - use self::MetaItemKind::*; - match *self { - Word(ref ns) => match *other { - Word(ref no) => (*ns) == (*no), - _ => false - }, - List(ref ns, ref miss) => match *other { - List(ref no, ref miso) => { - ns == no && - miss.iter().all(|mi| { - miso.iter().any(|x| x.node == mi.node) - }) - } - _ => false - }, - NameValue(ref ns, ref vs) => match *other { - NameValue(ref no, ref vo) => { - (*ns) == (*no) && vs.node == vo.node - } - _ => false - }, - } - } + NameValue(Lit) } /// A Block (`{ .. }`). @@ -1149,7 +1098,7 @@ pub enum LitIntType { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum LitKind { /// A string literal (`"foo"`) - Str(InternedString, StrStyle), + Str(Symbol, StrStyle), /// A byte string (`b"foo"`) ByteStr(Rc>), /// A byte char (`b'f'`) @@ -1159,9 +1108,9 @@ pub enum LitKind { /// An integer literal (`1`) Int(u64, LitIntType), /// A float literal (`1f64` or `1E10f64`) - Float(InternedString, FloatTy), + Float(Symbol, FloatTy), /// A float literal without a suffix (`1.0 or 1.0E10`) - FloatUnsuffixed(InternedString), + FloatUnsuffixed(Symbol), /// A boolean literal Bool(bool), } @@ -1493,7 +1442,7 @@ pub enum AsmDialect { /// E.g. `"={eax}"(result)` as in `asm!("mov eax, 2" : "={eax}"(result) : : : "intel")`` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsmOutput { - pub constraint: InternedString, + pub constraint: Symbol, pub expr: P, pub is_rw: bool, pub is_indirect: bool, @@ -1504,11 +1453,11 @@ pub struct InlineAsmOutput { /// E.g. `asm!("NOP");` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct InlineAsm { - pub asm: InternedString, + pub asm: Symbol, pub asm_str_style: StrStyle, pub outputs: Vec, - pub inputs: Vec<(InternedString, P)>, - pub clobbers: Vec, + pub inputs: Vec<(Symbol, P)>, + pub clobbers: Vec, pub volatile: bool, pub alignstack: bool, pub dialect: AsmDialect, @@ -1755,8 +1704,6 @@ impl ViewPath_ { } } -/// Meta-data associated with an item -pub type Attribute = Spanned; /// Distinguishes between Attributes that decorate items and Attributes that /// are contained as statements within items. These two cases need to be @@ -1770,13 +1717,15 @@ pub enum AttrStyle { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub struct AttrId(pub usize); +/// Meta-data associated with an item /// Doc-comments are promoted to attributes that have is_sugared_doc = true #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub struct Attribute_ { +pub struct Attribute { pub id: AttrId, pub style: AttrStyle, - pub value: P, + pub value: MetaItem, pub is_sugared_doc: bool, + pub span: Span, } /// TraitRef's appear in impls. diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 2977e340a3c0c..45c120e0b95ca 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -15,17 +15,17 @@ pub use self::ReprAttr::*; pub use self::IntType::*; use ast; -use ast::{AttrId, Attribute, Attribute_}; +use ast::{AttrId, Attribute, Name}; use ast::{MetaItem, MetaItemKind, NestedMetaItem, NestedMetaItemKind}; use ast::{Lit, Expr, Item, Local, Stmt, StmtKind}; -use codemap::{respan, spanned, dummy_spanned}; +use codemap::{spanned, dummy_spanned, mk_sp}; use syntax_pos::{Span, BytePos, DUMMY_SP}; use errors::Handler; use feature_gate::{Features, GatedCfg}; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; -use parse::token::InternedString; -use parse::{ParseSess, token}; +use parse::ParseSess; use ptr::P; +use symbol::Symbol; use util::ThinVec; use std::cell::{RefCell, Cell}; @@ -37,8 +37,8 @@ thread_local! { } enum AttrError { - MultipleItem(InternedString), - UnknownMetaItem(InternedString), + MultipleItem(Name), + UnknownMetaItem(Name), MissingSince, MissingFeature, MultipleStabilityLevels, @@ -61,7 +61,7 @@ fn handle_errors(diag: &Handler, span: Span, error: AttrError) { pub fn mark_used(attr: &Attribute) { debug!("Marking {:?} as used.", attr); - let AttrId(id) = attr.node.id; + let AttrId(id) = attr.id; USED_ATTRS.with(|slot| { let idx = (id / 64) as usize; let shift = id % 64; @@ -73,7 +73,7 @@ pub fn mark_used(attr: &Attribute) { } pub fn is_used(attr: &Attribute) -> bool { - let AttrId(id) = attr.node.id; + let AttrId(id) = attr.id; USED_ATTRS.with(|slot| { let idx = (id / 64) as usize; let shift = id % 64; @@ -84,7 +84,7 @@ pub fn is_used(attr: &Attribute) -> bool { pub fn mark_known(attr: &Attribute) { debug!("Marking {:?} as known.", attr); - let AttrId(id) = attr.node.id; + let AttrId(id) = attr.id; KNOWN_ATTRS.with(|slot| { let idx = (id / 64) as usize; let shift = id % 64; @@ -96,7 +96,7 @@ pub fn mark_known(attr: &Attribute) { } pub fn is_known(attr: &Attribute) -> bool { - let AttrId(id) = attr.node.id; + let AttrId(id) = attr.id; KNOWN_ATTRS.with(|slot| { let idx = (id / 64) as usize; let shift = id % 64; @@ -107,7 +107,7 @@ pub fn is_known(attr: &Attribute) -> bool { impl NestedMetaItem { /// Returns the MetaItem if self is a NestedMetaItemKind::MetaItem. - pub fn meta_item(&self) -> Option<&P> { + pub fn meta_item(&self) -> Option<&MetaItem> { match self.node { NestedMetaItemKind::MetaItem(ref item) => Some(&item), _ => None @@ -134,18 +134,18 @@ impl NestedMetaItem { /// Returns the name of the meta item, e.g. `foo` in `#[foo]`, /// `#[foo="bar"]` and `#[foo(bar)]`, if self is a MetaItem - pub fn name(&self) -> Option { + pub fn name(&self) -> Option { self.meta_item().and_then(|meta_item| Some(meta_item.name())) } /// Gets the string value if self is a MetaItem and the MetaItem is a /// MetaItemKind::NameValue variant containing a string, otherwise None. - pub fn value_str(&self) -> Option { + pub fn value_str(&self) -> Option { self.meta_item().and_then(|meta_item| meta_item.value_str()) } /// Returns a MetaItem if self is a MetaItem with Kind Word. - pub fn word(&self) -> Option<&P> { + pub fn word(&self) -> Option<&MetaItem> { self.meta_item().and_then(|meta_item| if meta_item.is_word() { Some(meta_item) } else { @@ -186,16 +186,16 @@ impl NestedMetaItem { impl Attribute { pub fn check_name(&self, name: &str) -> bool { - let matches = name == &self.name()[..]; + let matches = self.name() == name; if matches { mark_used(self); } matches } - pub fn name(&self) -> InternedString { self.meta().name() } + pub fn name(&self) -> Name { self.meta().name() } - pub fn value_str(&self) -> Option { + pub fn value_str(&self) -> Option { self.meta().value_str() } @@ -218,17 +218,13 @@ impl Attribute { } impl MetaItem { - pub fn name(&self) -> InternedString { - match self.node { - MetaItemKind::Word(ref n) => (*n).clone(), - MetaItemKind::NameValue(ref n, _) => (*n).clone(), - MetaItemKind::List(ref n, _) => (*n).clone(), - } + pub fn name(&self) -> Name { + self.name } - pub fn value_str(&self) -> Option { + pub fn value_str(&self) -> Option { match self.node { - MetaItemKind::NameValue(_, ref v) => { + MetaItemKind::NameValue(ref v) => { match v.node { ast::LitKind::Str(ref s, _) => Some((*s).clone()), _ => None, @@ -240,14 +236,14 @@ impl MetaItem { pub fn meta_item_list(&self) -> Option<&[NestedMetaItem]> { match self.node { - MetaItemKind::List(_, ref l) => Some(&l[..]), + MetaItemKind::List(ref l) => Some(&l[..]), _ => None } } pub fn is_word(&self) -> bool { match self.node { - MetaItemKind::Word(_) => true, + MetaItemKind::Word => true, _ => false, } } @@ -255,7 +251,7 @@ impl MetaItem { pub fn span(&self) -> Span { self.span } pub fn check_name(&self, name: &str) -> bool { - name == &self.name()[..] + self.name() == name } pub fn is_value_str(&self) -> bool { @@ -270,7 +266,7 @@ impl MetaItem { impl Attribute { /// Extract the MetaItem from inside this Attribute. pub fn meta(&self) -> &MetaItem { - &self.node.value + &self.value } /// Convert self to a normal #[doc="foo"] comment, if it is a @@ -279,16 +275,15 @@ impl Attribute { pub fn with_desugared_doc(&self, f: F) -> T where F: FnOnce(&Attribute) -> T, { - if self.node.is_sugared_doc { + if self.is_sugared_doc { let comment = self.value_str().unwrap(); let meta = mk_name_value_item_str( - InternedString::new("doc"), - token::intern_and_get_ident(&strip_doc_comment_decoration( - &comment))); - if self.node.style == ast::AttrStyle::Outer { - f(&mk_attr_outer(self.node.id, meta)) + Symbol::intern("doc"), + Symbol::intern(&strip_doc_comment_decoration(&comment.as_str()))); + if self.style == ast::AttrStyle::Outer { + f(&mk_attr_outer(self.id, meta)) } else { - f(&mk_attr_inner(self.node.id, meta)) + f(&mk_attr_inner(self.id, meta)) } } else { f(self) @@ -298,41 +293,37 @@ impl Attribute { /* Constructors */ -pub fn mk_name_value_item_str(name: InternedString, value: InternedString) - -> P { +pub fn mk_name_value_item_str(name: Name, value: Symbol) -> MetaItem { let value_lit = dummy_spanned(ast::LitKind::Str(value, ast::StrStyle::Cooked)); mk_spanned_name_value_item(DUMMY_SP, name, value_lit) } -pub fn mk_name_value_item(name: InternedString, value: ast::Lit) - -> P { +pub fn mk_name_value_item(name: Name, value: ast::Lit) -> MetaItem { mk_spanned_name_value_item(DUMMY_SP, name, value) } -pub fn mk_list_item(name: InternedString, items: Vec) -> P { +pub fn mk_list_item(name: Name, items: Vec) -> MetaItem { mk_spanned_list_item(DUMMY_SP, name, items) } -pub fn mk_list_word_item(name: InternedString) -> ast::NestedMetaItem { +pub fn mk_list_word_item(name: Name) -> ast::NestedMetaItem { dummy_spanned(NestedMetaItemKind::MetaItem(mk_spanned_word_item(DUMMY_SP, name))) } -pub fn mk_word_item(name: InternedString) -> P { +pub fn mk_word_item(name: Name) -> MetaItem { mk_spanned_word_item(DUMMY_SP, name) } -pub fn mk_spanned_name_value_item(sp: Span, name: InternedString, value: ast::Lit) - -> P { - P(respan(sp, MetaItemKind::NameValue(name, value))) +pub fn mk_spanned_name_value_item(sp: Span, name: Name, value: ast::Lit) -> MetaItem { + MetaItem { span: sp, name: name, node: MetaItemKind::NameValue(value) } } -pub fn mk_spanned_list_item(sp: Span, name: InternedString, items: Vec) - -> P { - P(respan(sp, MetaItemKind::List(name, items))) +pub fn mk_spanned_list_item(sp: Span, name: Name, items: Vec) -> MetaItem { + MetaItem { span: sp, name: name, node: MetaItemKind::List(items) } } -pub fn mk_spanned_word_item(sp: Span, name: InternedString) -> P { - P(respan(sp, MetaItemKind::Word(name))) +pub fn mk_spanned_word_item(sp: Span, name: Name) -> MetaItem { + MetaItem { span: sp, name: name, node: MetaItemKind::Word } } @@ -349,71 +340,63 @@ pub fn mk_attr_id() -> AttrId { } /// Returns an inner attribute with the given value. -pub fn mk_attr_inner(id: AttrId, item: P) -> Attribute { +pub fn mk_attr_inner(id: AttrId, item: MetaItem) -> Attribute { mk_spanned_attr_inner(DUMMY_SP, id, item) } /// Returns an innter attribute with the given value and span. -pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: P) -> Attribute { - respan(sp, - Attribute_ { - id: id, - style: ast::AttrStyle::Inner, - value: item, - is_sugared_doc: false, - }) +pub fn mk_spanned_attr_inner(sp: Span, id: AttrId, item: MetaItem) -> Attribute { + Attribute { + id: id, + style: ast::AttrStyle::Inner, + value: item, + is_sugared_doc: false, + span: sp, + } } /// Returns an outer attribute with the given value. -pub fn mk_attr_outer(id: AttrId, item: P) -> Attribute { +pub fn mk_attr_outer(id: AttrId, item: MetaItem) -> Attribute { mk_spanned_attr_outer(DUMMY_SP, id, item) } /// Returns an outer attribute with the given value and span. -pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: P) -> Attribute { - respan(sp, - Attribute_ { - id: id, - style: ast::AttrStyle::Outer, - value: item, - is_sugared_doc: false, - }) +pub fn mk_spanned_attr_outer(sp: Span, id: AttrId, item: MetaItem) -> Attribute { + Attribute { + id: id, + style: ast::AttrStyle::Outer, + value: item, + is_sugared_doc: false, + span: sp, + } } -pub fn mk_doc_attr_outer(id: AttrId, item: P, is_sugared_doc: bool) -> Attribute { - dummy_spanned(Attribute_ { +pub fn mk_doc_attr_outer(id: AttrId, item: MetaItem, is_sugared_doc: bool) -> Attribute { + Attribute { id: id, style: ast::AttrStyle::Outer, value: item, is_sugared_doc: is_sugared_doc, - }) + span: DUMMY_SP, + } } -pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, - hi: BytePos) +pub fn mk_sugared_doc_attr(id: AttrId, text: Symbol, lo: BytePos, hi: BytePos) -> Attribute { - let style = doc_comment_style(&text); + let style = doc_comment_style(&text.as_str()); let lit = spanned(lo, hi, ast::LitKind::Str(text, ast::StrStyle::Cooked)); - let attr = Attribute_ { + Attribute { id: id, style: style, - value: P(spanned(lo, hi, MetaItemKind::NameValue(InternedString::new("doc"), lit))), - is_sugared_doc: true - }; - spanned(lo, hi, attr) -} - -/* Searching */ -/// Check if `needle` occurs in `haystack` by a structural -/// comparison. This is slightly subtle, and relies on ignoring the -/// span included in the `==` comparison a plain MetaItem. -pub fn contains(haystack: &[P], needle: &MetaItem) -> bool { - debug!("attr::contains (name={})", needle.name()); - haystack.iter().any(|item| { - debug!(" testing: {}", item.name()); - item.node == needle.node - }) + value: MetaItem { + span: mk_sp(lo, hi), + name: Symbol::intern("doc"), + node: MetaItemKind::NameValue(lit), + }, + is_sugared_doc: true, + span: mk_sp(lo, hi), + } } pub fn list_contains_name(items: &[NestedMetaItem], name: &str) -> bool { @@ -432,15 +415,13 @@ pub fn contains_name(attrs: &[Attribute], name: &str) -> bool { }) } -pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) - -> Option { +pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) -> Option { attrs.iter() .find(|at| at.check_name(name)) .and_then(|at| at.value_str()) } -pub fn last_meta_item_value_str_by_name(items: &[P], name: &str) - -> Option { +pub fn last_meta_item_value_str_by_name(items: &[MetaItem], name: &str) -> Option { items.iter() .rev() .find(|mi| mi.check_name(name)) @@ -449,12 +430,12 @@ pub fn last_meta_item_value_str_by_name(items: &[P], name: &str) /* Higher-level applications */ -pub fn find_crate_name(attrs: &[Attribute]) -> Option { +pub fn find_crate_name(attrs: &[Attribute]) -> Option { first_attr_value_str_by_name(attrs, "crate_name") } /// Find the value of #[export_name=*] attribute and check its validity. -pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option { +pub fn find_export_name_attr(diag: &Handler, attrs: &[Attribute]) -> Option { attrs.iter().fold(None, |ia,attr| { if attr.check_name("export_name") { if let s@Some(_) = attr.value_str() { @@ -488,13 +469,14 @@ pub enum InlineAttr { /// Determine what `#[inline]` attribute is present in `attrs`, if any. pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr { - attrs.iter().fold(InlineAttr::None, |ia,attr| { - match attr.node.value.node { - MetaItemKind::Word(ref n) if n == "inline" => { + attrs.iter().fold(InlineAttr::None, |ia, attr| { + match attr.value.node { + _ if attr.value.name != "inline" => ia, + MetaItemKind::Word => { mark_used(attr); InlineAttr::Hint } - MetaItemKind::List(ref n, ref items) if n == "inline" => { + MetaItemKind::List(ref items) => { mark_used(attr); if items.len() != 1 { diagnostic.map(|d|{ span_err!(d, attr.span, E0534, "expected one argument"); }); @@ -527,7 +509,7 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { /// Tests if a cfg-pattern matches the cfg set pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool { match cfg.node { - ast::MetaItemKind::List(ref pred, ref mis) => { + ast::MetaItemKind::List(ref mis) => { for mi in mis.iter() { if !mi.is_meta_item() { handle_errors(&sess.span_diagnostic, mi.span, AttrError::UnsupportedLiteral); @@ -537,7 +519,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat // The unwraps below may look dangerous, but we've already asserted // that they won't fail with the loop above. - match &pred[..] { + match &*cfg.name.as_str() { "any" => mis.iter().any(|mi| { cfg_matches(mi.meta_item().unwrap(), sess, features) }), @@ -558,11 +540,11 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat } } }, - ast::MetaItemKind::Word(_) | ast::MetaItemKind::NameValue(..) => { + ast::MetaItemKind::Word | ast::MetaItemKind::NameValue(..) => { if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) { gated_cfg.check_and_emit(sess, feats); } - contains(&sess.config, cfg) + sess.config.contains(&(cfg.name(), cfg.value_str())) } } } @@ -571,7 +553,7 @@ pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Feat #[derive(RustcEncodable, RustcDecodable, Clone, Debug, PartialEq, Eq, Hash)] pub struct Stability { pub level: StabilityLevel, - pub feature: InternedString, + pub feature: Symbol, pub rustc_depr: Option, } @@ -579,20 +561,20 @@ pub struct Stability { #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub enum StabilityLevel { // Reason for the current stability level and the relevant rust-lang issue - Unstable { reason: Option, issue: u32 }, - Stable { since: InternedString }, + Unstable { reason: Option, issue: u32 }, + Stable { since: Symbol }, } #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub struct RustcDeprecation { - pub since: InternedString, - pub reason: InternedString, + pub since: Symbol, + pub reason: Symbol, } #[derive(RustcEncodable, RustcDecodable, PartialEq, PartialOrd, Clone, Debug, Eq, Hash)] pub struct Deprecation { - pub since: Option, - pub note: Option, + pub since: Option, + pub note: Option, } impl StabilityLevel { @@ -611,7 +593,6 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, 'outer: for attr in attrs_iter { let tag = attr.name(); - let tag = &*tag; if tag != "rustc_deprecated" && tag != "unstable" && tag != "stable" { continue // not a stability level } @@ -619,7 +600,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, mark_used(attr); if let Some(metas) = attr.meta_item_list() { - let get = |meta: &MetaItem, item: &mut Option| { + let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false @@ -633,7 +614,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, } }; - match tag { + match &*tag.as_str() { "rustc_deprecated" => { if rustc_depr.is_some() { span_err!(diagnostic, item_sp, E0540, @@ -645,7 +626,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, let mut reason = None; for meta in metas { if let Some(mi) = meta.meta_item() { - match &*mi.name() { + match &*mi.name().as_str() { "since" => if !get(mi, &mut since) { continue 'outer }, "reason" => if !get(mi, &mut reason) { continue 'outer }, _ => { @@ -688,7 +669,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, let mut issue = None; for meta in metas { if let Some(mi) = meta.meta_item() { - match &*mi.name() { + match &*mi.name().as_str() { "feature" => if !get(mi, &mut feature) { continue 'outer }, "reason" => if !get(mi, &mut reason) { continue 'outer }, "issue" => if !get(mi, &mut issue) { continue 'outer }, @@ -710,7 +691,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, level: Unstable { reason: reason, issue: { - if let Ok(issue) = issue.parse() { + if let Ok(issue) = issue.as_str().parse() { issue } else { span_err!(diagnostic, attr.span(), E0545, @@ -743,7 +724,7 @@ fn find_stability_generic<'a, I>(diagnostic: &Handler, let mut since = None; for meta in metas { if let NestedMetaItemKind::MetaItem(ref mi) = meta.node { - match &*mi.name() { + match &*mi.name().as_str() { "feature" => if !get(mi, &mut feature) { continue 'outer }, "since" => if !get(mi, &mut since) { continue 'outer }, _ => { @@ -821,7 +802,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, } depr = if let Some(metas) = attr.meta_item_list() { - let get = |meta: &MetaItem, item: &mut Option| { + let get = |meta: &MetaItem, item: &mut Option| { if item.is_some() { handle_errors(diagnostic, meta.span, AttrError::MultipleItem(meta.name())); return false @@ -839,7 +820,7 @@ fn find_deprecation_generic<'a, I>(diagnostic: &Handler, let mut note = None; for meta in metas { if let NestedMetaItemKind::MetaItem(ref mi) = meta.node { - match &*mi.name() { + match &*mi.name().as_str() { "since" => if !get(mi, &mut since) { continue 'outer }, "note" => if !get(mi, &mut note) { continue 'outer }, _ => { @@ -875,7 +856,7 @@ pub fn find_deprecation(diagnostic: &Handler, attrs: &[Attribute], find_deprecation_generic(diagnostic, attrs.iter(), item_sp) } -pub fn require_unique_names(diagnostic: &Handler, metas: &[P]) { +pub fn require_unique_names(diagnostic: &Handler, metas: &[MetaItem]) { let mut set = HashSet::new(); for meta in metas { let name = meta.name(); @@ -896,8 +877,8 @@ pub fn require_unique_names(diagnostic: &Handler, metas: &[P]) { /// structure layout, and `packed` to remove padding. pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec { let mut acc = Vec::new(); - match attr.node.value.node { - ast::MetaItemKind::List(ref s, ref items) if s == "repr" => { + match attr.value.node { + ast::MetaItemKind::List(ref items) if attr.value.name == "repr" => { mark_used(attr); for item in items { if !item.is_meta_item() { @@ -906,7 +887,7 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec } if let Some(mi) = item.word() { - let word = &*mi.name(); + let word = &*mi.name().as_str(); let hint = match word { // Can't use "extern" because it's not a lexical identifier. "C" => Some(ReprExtern), diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index 49012ad036a9a..3cdfa718eabae 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -871,6 +871,7 @@ impl CodeMapper for CodeMap { #[cfg(test)] mod tests { use super::*; + use symbol::keywords; use std::rc::Rc; #[test] @@ -1097,10 +1098,9 @@ mod tests { #[test] fn t11() { // Test span_to_expanded_string works with expansion - use ast::Name; let cm = init_code_map(); let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let format = ExpnFormat::MacroBang(Name(0u32)); + let format = ExpnFormat::MacroBang(keywords::Invalid.name()); let callee = NameAndSpan { format: format, allow_internal_unstable: false, span: None }; @@ -1197,11 +1197,9 @@ mod tests { fn init_expansion_chain(cm: &CodeMap) -> Span { // Creates an expansion chain containing two recursive calls // root -> expA -> expA -> expB -> expB -> end - use ast::Name; - let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION }; - let format_root = ExpnFormat::MacroBang(Name(0u32)); + let format_root = ExpnFormat::MacroBang(keywords::Invalid.name()); let callee_root = NameAndSpan { format: format_root, allow_internal_unstable: false, span: Some(root) }; @@ -1210,7 +1208,7 @@ mod tests { let id_a1 = cm.record_expansion(info_a1); let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 }; - let format_a = ExpnFormat::MacroBang(Name(1u32)); + let format_a = ExpnFormat::MacroBang(keywords::As.name()); let callee_a = NameAndSpan { format: format_a, allow_internal_unstable: false, span: Some(span_a1) }; @@ -1223,7 +1221,7 @@ mod tests { let id_b1 = cm.record_expansion(info_b1); let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 }; - let format_b = ExpnFormat::MacroBang(Name(2u32)); + let format_b = ExpnFormat::MacroBang(keywords::Box.name()); let callee_b = NameAndSpan { format: format_b, allow_internal_unstable: false, span: None }; diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 02429f02738fd..89eea3f6f8b28 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -12,7 +12,7 @@ use attr::HasAttrs; use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; use {fold, attr}; use ast; -use codemap::{Spanned, respan}; +use codemap::Spanned; use parse::ParseSess; use ptr::P; @@ -106,12 +106,13 @@ impl<'a> StripUnconfigured<'a> { match (cfg.meta_item(), mi.meta_item()) { (Some(cfg), Some(mi)) => if cfg_matches(&cfg, self.sess, self.features) { - self.process_cfg_attr(respan(mi.span, ast::Attribute_ { + self.process_cfg_attr(ast::Attribute { id: attr::mk_attr_id(), - style: attr.node.style, + style: attr.style, value: mi.clone(), is_sugared_doc: false, - })) + span: mi.span, + }) } else { None }, @@ -131,8 +132,8 @@ impl<'a> StripUnconfigured<'a> { return false; } - let mis = match attr.node.value.node { - ast::MetaItemKind::List(_, ref mis) if is_cfg(&attr) => mis, + let mis = match attr.value.node { + ast::MetaItemKind::List(ref mis) if is_cfg(&attr) => mis, _ => return true }; @@ -160,7 +161,7 @@ impl<'a> StripUnconfigured<'a> { attr.span, GateIssue::Language, EXPLAIN_STMT_ATTR_SYNTAX); - if attr.node.is_sugared_doc { + if attr.is_sugared_doc { err.help("`///` is for documentation comments. For a plain comment, use `//`."); } err.emit(); diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 81c8e0bdb8262..fe5cb87ad59b5 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -19,6 +19,7 @@ use ext::base::{ExtCtxt, MacEager, MacResult}; use ext::build::AstBuilder; use parse::token; use ptr::P; +use symbol::Symbol; use tokenstream::{TokenTree}; use util::small_vector::SmallVector; @@ -141,7 +142,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, )); } }); - let sym = Ident::with_empty_ctxt(token::gensym(&format!( + let sym = Ident::with_empty_ctxt(Symbol::gensym(&format!( "__register_diagnostic_{}", code ))); MacEager::items(SmallVector::many(vec![ @@ -194,11 +195,11 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (count, expr) = with_registered_diagnostics(|diagnostics| { let descriptions: Vec> = - diagnostics.iter().filter_map(|(code, info)| { + diagnostics.iter().filter_map(|(&code, info)| { info.description.map(|description| { ecx.expr_tuple(span, vec![ - ecx.expr_str(span, code.as_str()), - ecx.expr_str(span, description.as_str()) + ecx.expr_str(span, code), + ecx.expr_str(span, description) ]) }) }).collect(); diff --git a/src/libsyntax/entry.rs b/src/libsyntax/entry.rs index 7014e576e2b8a..93ca1948ed84b 100644 --- a/src/libsyntax/entry.rs +++ b/src/libsyntax/entry.rs @@ -28,7 +28,7 @@ pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType { EntryPointType::Start } else if attr::contains_name(&item.attrs, "main") { EntryPointType::MainAttr - } else if item.ident.name.as_str() == "main" { + } else if item.ident.name == "main" { if depth == 1 { // This is a top-level function so can be 'main' EntryPointType::MainNamed diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 7f66b060052ea..ddf4cf11f2048 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -20,8 +20,8 @@ use ext::hygiene::Mark; use fold::{self, Folder}; use parse::{self, parser}; use parse::token; -use parse::token::{InternedString, str_to_ident}; use ptr::P; +use symbol::Symbol; use util::small_vector::SmallVector; use std::path::PathBuf; @@ -643,7 +643,7 @@ impl<'a> ExtCtxt<'a> { loop { if self.codemap().with_expn_info(expn_id, |info| { info.map_or(None, |i| { - if i.callee.name().as_str() == "include" { + if i.callee.name() == "include" { // Stop going up the backtrace once include! is encountered return None; } @@ -735,7 +735,7 @@ impl<'a> ExtCtxt<'a> { self.ecfg.trace_mac = x } pub fn ident_of(&self, st: &str) -> ast::Ident { - str_to_ident(st) + ast::Ident::from_str(st) } pub fn std_path(&self, components: &[&str]) -> Vec { let mut v = Vec::new(); @@ -746,7 +746,7 @@ impl<'a> ExtCtxt<'a> { return v } pub fn name_of(&self, st: &str) -> ast::Name { - token::intern(st) + Symbol::intern(st) } } @@ -754,7 +754,7 @@ impl<'a> ExtCtxt<'a> { /// emitting `err_msg` if `expr` is not a string literal. This does not stop /// compilation on error, merely emits a non-fatal error and returns None. pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) - -> Option> { + -> Option> { // Update `expr.span`'s expn_id now in case expr is an `include!` macro invocation. let expr = expr.map(|mut expr| { expr.span.expn_id = cx.backtrace(); @@ -765,7 +765,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &st let expr = cx.expander().fold_expr(expr); match expr.node { ast::ExprKind::Lit(ref l) => match l.node { - ast::LitKind::Str(ref s, style) => return Some(respan(expr.span, (s.clone(), style))), + ast::LitKind::Str(s, style) => return Some(respan(expr.span, (s, style))), _ => cx.span_err(l.span, err_msg) }, _ => cx.span_err(expr.span, err_msg) @@ -774,7 +774,7 @@ pub fn expr_to_spanned_string(cx: &mut ExtCtxt, expr: P, err_msg: &st } pub fn expr_to_string(cx: &mut ExtCtxt, expr: P, err_msg: &str) - -> Option<(InternedString, ast::StrStyle)> { + -> Option<(Symbol, ast::StrStyle)> { expr_to_spanned_string(cx, expr, err_msg).map(|s| s.node) } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index c3e28cbb006a0..324afc20051d4 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -14,8 +14,8 @@ use attr; use syntax_pos::{Span, DUMMY_SP, Pos}; use codemap::{dummy_spanned, respan, Spanned}; use ext::base::ExtCtxt; -use parse::token::{self, keywords, InternedString}; use ptr::P; +use symbol::{Symbol, keywords}; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -149,7 +149,7 @@ pub trait AstBuilder { fn expr_vec(&self, sp: Span, exprs: Vec>) -> P; fn expr_vec_ng(&self, sp: Span) -> P; fn expr_vec_slice(&self, sp: Span, exprs: Vec>) -> P; - fn expr_str(&self, sp: Span, s: InternedString) -> P; + fn expr_str(&self, sp: Span, s: Symbol) -> P; fn expr_some(&self, sp: Span, expr: P) -> P; fn expr_none(&self, sp: Span) -> P; @@ -158,7 +158,7 @@ pub trait AstBuilder { fn expr_tuple(&self, sp: Span, exprs: Vec>) -> P; - fn expr_fail(&self, span: Span, msg: InternedString) -> P; + fn expr_fail(&self, span: Span, msg: Symbol) -> P; fn expr_unreachable(&self, span: Span) -> P; fn expr_ok(&self, span: Span, expr: P) -> P; @@ -275,22 +275,22 @@ pub trait AstBuilder { generics: Generics) -> P; fn item_ty(&self, span: Span, name: Ident, ty: P) -> P; - fn attribute(&self, sp: Span, mi: P) -> ast::Attribute; + fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute; - fn meta_word(&self, sp: Span, w: InternedString) -> P; + fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem; - fn meta_list_item_word(&self, sp: Span, w: InternedString) -> ast::NestedMetaItem; + fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem; fn meta_list(&self, sp: Span, - name: InternedString, + name: ast::Name, mis: Vec ) - -> P; + -> ast::MetaItem; fn meta_name_value(&self, sp: Span, - name: InternedString, + name: ast::Name, value: ast::LitKind) - -> P; + -> ast::MetaItem; fn item_use(&self, sp: Span, vis: ast::Visibility, vp: P) -> P; @@ -755,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_vec_slice(&self, sp: Span, exprs: Vec>) -> P { self.expr_addr_of(sp, self.expr_vec(sp, exprs)) } - fn expr_str(&self, sp: Span, s: InternedString) -> P { + fn expr_str(&self, sp: Span, s: Symbol) -> P { self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked)) } @@ -785,10 +785,9 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr(sp, ast::ExprKind::Tup(exprs)) } - fn expr_fail(&self, span: Span, msg: InternedString) -> P { + fn expr_fail(&self, span: Span, msg: Symbol) -> P { let loc = self.codemap().lookup_char_pos(span.lo); - let expr_file = self.expr_str(span, - token::intern_and_get_ident(&loc.file.name)); + let expr_file = self.expr_str(span, Symbol::intern(&loc.file.name)); let expr_line = self.expr_u32(span, loc.line as u32); let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); @@ -801,9 +800,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn expr_unreachable(&self, span: Span) -> P { - self.expr_fail(span, - InternedString::new( - "internal error: entered unreachable code")) + self.expr_fail(span, Symbol::intern("internal error: entered unreachable code")) } fn expr_ok(&self, sp: Span, expr: P) -> P { @@ -1146,25 +1143,25 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.item_ty_poly(span, name, ty, Generics::default()) } - fn attribute(&self, sp: Span, mi: P) -> ast::Attribute { + fn attribute(&self, sp: Span, mi: ast::MetaItem) -> ast::Attribute { attr::mk_spanned_attr_outer(sp, attr::mk_attr_id(), mi) } - fn meta_word(&self, sp: Span, w: InternedString) -> P { + fn meta_word(&self, sp: Span, w: ast::Name) -> ast::MetaItem { attr::mk_spanned_word_item(sp, w) } - fn meta_list_item_word(&self, sp: Span, w: InternedString) -> ast::NestedMetaItem { + fn meta_list_item_word(&self, sp: Span, w: ast::Name) -> ast::NestedMetaItem { respan(sp, ast::NestedMetaItemKind::MetaItem(attr::mk_spanned_word_item(sp, w))) } - fn meta_list(&self, sp: Span, name: InternedString, mis: Vec) - -> P { + fn meta_list(&self, sp: Span, name: ast::Name, mis: Vec) + -> ast::MetaItem { attr::mk_spanned_list_item(sp, name, mis) } - fn meta_name_value(&self, sp: Span, name: InternedString, value: ast::LitKind) - -> P { + fn meta_name_value(&self, sp: Span, name: ast::Name, value: ast::LitKind) + -> ast::MetaItem { attr::mk_spanned_name_value_item(sp, name, respan(sp, value)) } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 8e0c3ce8448da..844fb77e29d79 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -23,10 +23,11 @@ use fold; use fold::*; use parse::{ParseSess, PResult, lexer}; use parse::parser::Parser; -use parse::token::{self, intern, keywords}; +use parse::token; use print::pprust; use ptr::P; use std_inject; +use symbol::keywords; use tokenstream::{TokenTree, TokenStream}; use util::small_vector::SmallVector; use visit::Visitor; @@ -190,7 +191,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { self.cx.crate_root = std_inject::injected_crate_name(&krate); let mut module = ModuleData { - mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)], + mod_path: vec![Ident::from_str(&self.cx.ecfg.crate_name)], directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)), }; module.directory.pop(); @@ -246,7 +247,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.resolver.resolve_macro(scope, &mac.node.path, force) } InvocationKind::Attr { ref attr, .. } => { - let ident = ast::Ident::with_empty_ctxt(intern(&*attr.name())); + let ident = Ident::with_empty_ctxt(attr.name()); let path = ast::Path::from_ident(attr.span, ident); self.cx.resolver.resolve_macro(scope, &path, force) } @@ -341,7 +342,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); - let name = intern(&attr.name()); + let name = attr.name(); self.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { @@ -353,12 +354,12 @@ impl<'a, 'b> MacroExpander<'a, 'b> { match *ext { MultiModifier(ref mac) => { - let item = mac.expand(self.cx, attr.span, &attr.node.value, item); + let item = mac.expand(self.cx, attr.span, &attr.value, item); kind.expect_from_annotatables(item) } MultiDecorator(ref mac) => { let mut items = Vec::new(); - mac.expand(self.cx, attr.span, &attr.node.value, &item, + mac.expand(self.cx, attr.span, &attr.value, &item, &mut |item| items.push(item)); items.push(item); kind.expect_from_annotatables(items) @@ -779,7 +780,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { if inline_module { if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") { self.cx.current_expansion.no_noninline_mod = false; - module.directory.push(&*path); + module.directory.push(&*path.as_str()); } else { module.directory.push(&*item.ident.name.as_str()); } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index e323dd2f62327..4fe57a8345e94 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -13,8 +13,8 @@ use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; use fold::*; -use parse::token::{intern, keywords}; use ptr::P; +use symbol::{Symbol, keywords}; use util::move_map::MoveMap; use util::small_vector::SmallVector; @@ -227,7 +227,7 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion { span: DUMMY_SP, global: false, segments: vec![ast::PathSegment { - identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")), + identifier: ast::Ident::with_empty_ctxt(Symbol::intern("macro_rules")), parameters: ast::PathParameters::none(), }], }, diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs index dc3a01f41bc9d..21ce89a6dd5be 100644 --- a/src/libsyntax/ext/proc_macro_shim.rs +++ b/src/libsyntax/ext/proc_macro_shim.rs @@ -66,6 +66,7 @@ pub mod prelude { pub use ast::Ident; pub use codemap::{DUMMY_SP, Span}; pub use ext::base::{ExtCtxt, MacResult}; - pub use parse::token::{self, Token, DelimToken, keywords, str_to_ident}; + pub use parse::token::{self, Token, DelimToken}; + pub use symbol::keywords; pub use tokenstream::{TokenTree, TokenStream}; } diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 969cfa292ce80..aa777a19a9bcb 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -33,6 +33,7 @@ pub mod rt { use parse::{self, token, classify}; use ptr::P; use std::rc::Rc; + use symbol::Symbol; use tokenstream::{self, TokenTree}; @@ -211,7 +212,7 @@ pub mod rt { impl_to_tokens_slice! { P, [] } impl_to_tokens_slice! { ast::Arg, [TokenTree::Token(DUMMY_SP, token::Comma)] } - impl ToTokens for P { + impl ToTokens for ast::MetaItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtMeta(self.clone()); vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] @@ -223,13 +224,13 @@ pub mod rt { let mut r = vec![]; // FIXME: The spans could be better r.push(TokenTree::Token(self.span, token::Pound)); - if self.node.style == ast::AttrStyle::Inner { + if self.style == ast::AttrStyle::Inner { r.push(TokenTree::Token(self.span, token::Not)); } r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { delim: token::Bracket, open_span: self.span, - tts: self.node.value.to_tokens(cx), + tts: self.value.to_tokens(cx), close_span: self.span, }))); r @@ -238,8 +239,7 @@ pub mod rt { impl ToTokens for str { fn to_tokens(&self, cx: &ExtCtxt) -> Vec { - let lit = ast::LitKind::Str( - token::intern_and_get_ident(self), ast::StrStyle::Cooked); + let lit = ast::LitKind::Str(Symbol::intern(self), ast::StrStyle::Cooked); dummy_spanned(lit).to_tokens(cx) } } @@ -405,7 +405,7 @@ pub fn parse_block_panic(parser: &mut Parser) -> P { panictry!(parser.parse_block()) } -pub fn parse_meta_item_panic(parser: &mut Parser) -> P { +pub fn parse_meta_item_panic(parser: &mut Parser) -> ast::MetaItem { panictry!(parser.parse_meta_item()) } @@ -527,17 +527,17 @@ pub fn expand_quote_matcher(cx: &mut ExtCtxt, base::MacEager::expr(expanded) } -fn ids_ext(strs: Vec ) -> Vec { - strs.iter().map(|str| str_to_ident(&(*str))).collect() +fn ids_ext(strs: Vec) -> Vec { + strs.iter().map(|s| ast::Ident::from_str(s)).collect() } -fn id_ext(str: &str) -> ast::Ident { - str_to_ident(str) +fn id_ext(s: &str) -> ast::Ident { + ast::Ident::from_str(s) } // Lift an ident to the expr that evaluates to that ident. fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { - let e_str = cx.expr_str(sp, ident.name.as_str()); + let e_str = cx.expr_str(sp, ident.name); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), @@ -546,7 +546,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { // Lift a name to the expr that evaluates to that name fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { - let e_str = cx.expr_str(sp, ident.name.as_str()); + let e_str = cx.expr_str(sp, ident.name); cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("name_of"), diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index bda84cdaf39eb..320d49b64634c 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -17,6 +17,7 @@ use parse::token; use parse; use print::pprust; use ptr::P; +use symbol::Symbol; use tokenstream; use util::small_vector::SmallVector; @@ -60,15 +61,13 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) let topmost = cx.expansion_cause(); let loc = cx.codemap().lookup_char_pos(topmost.lo); - let filename = token::intern_and_get_ident(&loc.file.name); - base::MacEager::expr(cx.expr_str(topmost, filename)) + base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name))) } pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) -> Box { let s = pprust::tts_to_string(tts); - base::MacEager::expr(cx.expr_str(sp, - token::intern_and_get_ident(&s[..]))) + base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s))) } pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) @@ -77,9 +76,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree]) let mod_path = &cx.current_expansion.module.mod_path; let string = mod_path.iter().map(|x| x.to_string()).collect::>().join("::"); - base::MacEager::expr(cx.expr_str( - sp, - token::intern_and_get_ident(&string[..]))) + base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string))) } /// include! : parse the given file as an expr @@ -144,10 +141,9 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT // Add this input file to the code map to make it available as // dependency information let filename = format!("{}", file.display()); - let interned = token::intern_and_get_ident(&src[..]); cx.codemap().new_filemap_and_lines(&filename, None, &src); - base::MacEager::expr(cx.expr_str(sp, interned)) + base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src))) } Err(_) => { cx.span_err(sp, diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 552d4de961740..59b8b50e88cb6 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -20,9 +20,10 @@ use ext::tt::macro_parser::{parse, parse_failure_msg}; use parse::ParseSess; use parse::lexer::new_tt_reader; use parse::parser::{Parser, Restrictions}; -use parse::token::{self, gensym_ident, NtTT, Token}; +use parse::token::{self, NtTT, Token}; use parse::token::Token::*; use print; +use symbol::Symbol; use tokenstream::{self, TokenTree}; use std::collections::{HashMap}; @@ -187,16 +188,16 @@ impl IdentMacroExpander for MacroRulesExpander { /// Converts a `macro_rules!` invocation into a syntax extension. pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { - let lhs_nm = gensym_ident("lhs"); - let rhs_nm = gensym_ident("rhs"); + let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs")); + let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs")); // The pattern that macro_rules matches. // The grammar for macro_rules! is: // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways - let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); - let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); + let match_lhs_tok = MatchNt(lhs_nm, ast::Ident::from_str("tt")); + let match_rhs_tok = MatchNt(rhs_nm, ast::Ident::from_str("tt")); let argument_gram = vec![ TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: vec![ @@ -790,8 +791,7 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result "pat" => { match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), - Ident(i) if (i.name.as_str() == "if" || - i.name.as_str() == "in") => Ok(true), + Ident(i) if i.name == "if" || i.name == "in" => Ok(true), _ => Ok(false) } }, @@ -799,8 +799,8 @@ fn is_in_follow(tok: &Token, frag: &str) -> Result match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), - MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true), - Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true), + MatchNt(_, ref frag) if frag.name == "block" => Ok(true), + Ident(i) if i.name == "as" || i.name == "where" => Ok(true), _ => Ok(false) } }, diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index ea66fdc31cf08..680896e599b66 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -33,7 +33,7 @@ use syntax_pos::Span; use errors::{DiagnosticBuilder, Handler}; use visit::{self, FnKind, Visitor}; use parse::ParseSess; -use parse::token::InternedString; +use symbol::Symbol; use std::ascii::AsciiExt; use std::env; @@ -59,9 +59,9 @@ macro_rules! declare_features { /// A set of features to be used by later passes. pub struct Features { /// #![feature] attrs for stable language features, for error reporting - pub declared_stable_lang_features: Vec<(InternedString, Span)>, + pub declared_stable_lang_features: Vec<(Symbol, Span)>, /// #![feature] attrs for non-language (library) features - pub declared_lib_features: Vec<(InternedString, Span)>, + pub declared_lib_features: Vec<(Symbol, Span)>, $(pub $feature: bool),+ } @@ -757,7 +757,7 @@ pub struct GatedCfg { impl GatedCfg { pub fn gate(cfg: &ast::MetaItem) -> Option { - let name = cfg.name(); + let name = &*cfg.name().as_str(); GATED_CFGS.iter() .position(|info| info.0 == name) .map(|idx| { @@ -804,7 +804,7 @@ macro_rules! gate_feature { impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); - let name = &*attr.name(); + let name = &*attr.name().as_str(); for &(n, ty, ref gateage) in BUILTIN_ATTRIBUTES { if n == name { if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { @@ -991,11 +991,11 @@ fn contains_novel_literal(item: &ast::MetaItem) -> bool { use ast::NestedMetaItemKind::*; match item.node { - Word(..) => false, - NameValue(_, ref lit) => !lit.node.is_str(), - List(_, ref list) => list.iter().any(|li| { + Word => false, + NameValue(ref lit) => !lit.node.is_str(), + List(ref list) => list.iter().any(|li| { match li.node { - MetaItem(ref mi) => contains_novel_literal(&**mi), + MetaItem(ref mi) => contains_novel_literal(&mi), Literal(_) => true, } }), @@ -1013,7 +1013,7 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { self.context.check_attribute(attr, false); } - if contains_novel_literal(&*(attr.node.value)) { + if contains_novel_literal(&attr.value) { gate_feature_post!(&self, attr_literals, attr.span, "non-string literals in attributes, or string \ literals in top-level positions, are experimental"); @@ -1121,9 +1121,8 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { } fn visit_foreign_item(&mut self, i: &ast::ForeignItem) { - let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, - "link_name") { - Some(val) => val.starts_with("llvm."), + let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, "link_name") { + Some(val) => val.as_str().starts_with("llvm."), _ => false }; if links_to_llvm { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 2e62f23578d81..ff0255a2f21f2 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -22,8 +22,9 @@ use ast::*; use ast; use syntax_pos::Span; use codemap::{Spanned, respan}; -use parse::token::{self, keywords}; +use parse::token; use ptr::P; +use symbol::keywords; use tokenstream::*; use util::small_vector::SmallVector; use util::move_map::MoveMap; @@ -43,7 +44,7 @@ pub trait Folder : Sized { noop_fold_crate(c, self) } - fn fold_meta_items(&mut self, meta_items: Vec>) -> Vec> { + fn fold_meta_items(&mut self, meta_items: Vec) -> Vec { noop_fold_meta_items(meta_items, self) } @@ -51,7 +52,7 @@ pub trait Folder : Sized { noop_fold_meta_list_item(list_item, self) } - fn fold_meta_item(&mut self, meta_item: P) -> P { + fn fold_meta_item(&mut self, meta_item: MetaItem) -> MetaItem { noop_fold_meta_item(meta_item, self) } @@ -293,8 +294,7 @@ pub trait Folder : Sized { } } -pub fn noop_fold_meta_items(meta_items: Vec>, fld: &mut T) - -> Vec> { +pub fn noop_fold_meta_items(meta_items: Vec, fld: &mut T) -> Vec { meta_items.move_map(|x| fld.fold_meta_item(x)) } @@ -486,16 +486,13 @@ pub fn noop_fold_local(l: P, fld: &mut T) -> P { }) } -pub fn noop_fold_attribute(at: Attribute, fld: &mut T) -> Option { - let Spanned {node: Attribute_ {id, style, value, is_sugared_doc}, span} = at; - Some(Spanned { - node: Attribute_ { - id: id, - style: style, - value: fld.fold_meta_item(value), - is_sugared_doc: is_sugared_doc - }, - span: fld.new_span(span) +pub fn noop_fold_attribute(attr: Attribute, fld: &mut T) -> Option { + Some(Attribute { + id: attr.id, + style: attr.style, + value: fld.fold_meta_item(attr.value), + is_sugared_doc: attr.is_sugared_doc, + span: fld.new_span(attr.span), }) } @@ -522,17 +519,18 @@ pub fn noop_fold_meta_list_item(li: NestedMetaItem, fld: &mut T) } } -pub fn noop_fold_meta_item(mi: P, fld: &mut T) -> P { - mi.map(|Spanned {node, span}| Spanned { - node: match node { - MetaItemKind::Word(id) => MetaItemKind::Word(id), - MetaItemKind::List(id, mis) => { - MetaItemKind::List(id, mis.move_map(|e| fld.fold_meta_list_item(e))) - } - MetaItemKind::NameValue(id, s) => MetaItemKind::NameValue(id, s) +pub fn noop_fold_meta_item(mi: MetaItem, fld: &mut T) -> MetaItem { + MetaItem { + name: mi.name, + node: match mi.node { + MetaItemKind::Word => MetaItemKind::Word, + MetaItemKind::List(mis) => { + MetaItemKind::List(mis.move_map(|e| fld.fold_meta_list_item(e))) + }, + MetaItemKind::NameValue(s) => MetaItemKind::NameValue(s), }, - span: fld.new_span(span) - }) + span: fld.new_span(mi.span) + } } pub fn noop_fold_arg(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg { @@ -1334,9 +1332,8 @@ pub fn noop_fold_vis(vis: Visibility, folder: &mut T) -> Visibility { #[cfg(test)] mod tests { use std::io; - use ast; + use ast::{self, Ident}; use util::parser_testing::{string_to_crate, matches_codepattern}; - use parse::token; use print::pprust; use fold; use super::*; @@ -1352,7 +1349,7 @@ mod tests { impl Folder for ToZzIdentFolder { fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident { - token::str_to_ident("zz") + Ident::from_str("zz") } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fold::noop_fold_mac(mac, self) diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 34280812421a1..5a1b0d4005e17 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -27,6 +27,7 @@ #![feature(associated_consts)] #![feature(const_fn)] #![feature(libc)] +#![feature(optin_builtin_traits)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(str_escape)] @@ -83,7 +84,6 @@ pub mod diagnostics { pub mod diagnostic_list; pub mod util { - pub mod interner; pub mod lev_distance; pub mod node_count; pub mod parser; @@ -118,6 +118,7 @@ pub mod ptr; pub mod show_span; pub mod std_inject; pub mod str; +pub mod symbol; pub mod test; pub mod tokenstream; pub mod visit; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 983c882eafca3..ded676da3c676 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -11,12 +11,11 @@ use attr; use ast; use syntax_pos::{mk_sp, Span}; -use codemap::{spanned, Spanned}; +use codemap::spanned; use parse::common::SeqSep; use parse::PResult; use parse::token; use parse::parser::{Parser, TokenType}; -use ptr::P; #[derive(PartialEq, Eq, Debug)] enum InnerAttributeParsePolicy<'a> { @@ -49,13 +48,9 @@ impl<'a> Parser<'a> { just_parsed_doc_comment = false; } token::DocComment(s) => { - let attr = ::attr::mk_sugared_doc_attr( - attr::mk_attr_id(), - self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), - self.span.lo, - self.span.hi - ); - if attr.node.style != ast::AttrStyle::Outer { + let Span { lo, hi, .. } = self.span; + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + if attr.style != ast::AttrStyle::Outer { let mut err = self.fatal("expected outer doc comment"); err.note("inner doc comments like this (starting with \ `//!` or `/*!`) can only appear before items"); @@ -145,14 +140,12 @@ impl<'a> Parser<'a> { style = ast::AttrStyle::Inner; } - Ok(Spanned { + Ok(ast::Attribute { + id: attr::mk_attr_id(), + style: style, + value: value, + is_sugared_doc: false, span: span, - node: ast::Attribute_ { - id: attr::mk_attr_id(), - style: style, - value: value, - is_sugared_doc: false, - }, }) } @@ -172,15 +165,14 @@ impl<'a> Parser<'a> { } let attr = self.parse_attribute(true)?; - assert!(attr.node.style == ast::AttrStyle::Inner); + assert!(attr.style == ast::AttrStyle::Inner); attrs.push(attr); } token::DocComment(s) => { // we need to get the position of this token before we bump. let Span { lo, hi, .. } = self.span; - let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)); - let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi); - if attr.node.style == ast::AttrStyle::Inner { + let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), s, lo, hi); + if attr.style == ast::AttrStyle::Inner { attrs.push(attr); self.bump(); } else { @@ -213,7 +205,7 @@ impl<'a> Parser<'a> { /// /// meta_item : IDENT ( '=' UNSUFFIXED_LIT | '(' meta_item_inner? ')' )? ; /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; - pub fn parse_meta_item(&mut self) -> PResult<'a, P> { + pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { token::Interpolated(ref nt) => match **nt { token::NtMeta(ref e) => Some(e.clone()), @@ -229,24 +221,15 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let ident = self.parse_ident()?; - let name = self.id_to_interned_str(ident); - match self.token { - token::Eq => { - self.bump(); - let lit = self.parse_unsuffixed_lit()?; - let hi = self.prev_span.hi; - Ok(P(spanned(lo, hi, ast::MetaItemKind::NameValue(name, lit)))) - } - token::OpenDelim(token::Paren) => { - let inner_items = self.parse_meta_seq()?; - let hi = self.prev_span.hi; - Ok(P(spanned(lo, hi, ast::MetaItemKind::List(name, inner_items)))) - } - _ => { - let hi = self.prev_span.hi; - Ok(P(spanned(lo, hi, ast::MetaItemKind::Word(name)))) - } - } + let node = if self.eat(&token::Eq) { + ast::MetaItemKind::NameValue(self.parse_unsuffixed_lit()?) + } else if self.token == token::OpenDelim(token::Paren) { + ast::MetaItemKind::List(self.parse_meta_seq()?) + } else { + ast::MetaItemKind::Word + }; + let hi = self.prev_span.hi; + Ok(ast::MetaItem { name: ident.name, node: node, span: mk_sp(lo, hi) }) } /// matches meta_item_inner : (meta_item | UNSUFFIXED_LIT) ; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index cf48c445c80eb..681dec0ab564d 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -8,13 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, Ident}; use syntax_pos::{self, BytePos, CharPos, Pos, Span}; use codemap::CodeMap; use errors::{FatalError, Handler, DiagnosticBuilder}; use ext::tt::transcribe::tt_next_token; -use parse::token::{self, keywords, str_to_ident}; +use parse::token; use str::char_at; +use symbol::{Symbol, keywords}; use rustc_unicode::property::Pattern_White_Space; use std::borrow::Cow; @@ -350,13 +351,13 @@ impl<'a> StringReader<'a> { /// single-byte delimiter). pub fn name_from(&self, start: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, self.pos); - self.with_str_from(start, token::intern) + self.with_str_from(start, Symbol::intern) } /// As name_from, with an explicit endpoint. pub fn name_from_to(&self, start: BytePos, end: BytePos) -> ast::Name { debug!("taking an ident from {:?} to {:?}", start, end); - self.with_str_from_to(start, end, token::intern) + self.with_str_from_to(start, end, Symbol::intern) } /// Calls `f` with a string slice of the source text spanning from `start` @@ -492,7 +493,7 @@ impl<'a> StringReader<'a> { if string == "_" { None } else { - Some(token::intern(string)) + Some(Symbol::intern(string)) } }) } @@ -540,7 +541,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start_bpos, |string| { // comments with only more "/"s are not doc comments let tok = if is_doc_comment(string) { - token::DocComment(token::intern(string)) + token::DocComment(Symbol::intern(string)) } else { token::Comment }; @@ -669,7 +670,7 @@ impl<'a> StringReader<'a> { } else { string.into() }; - token::DocComment(token::intern(&string[..])) + token::DocComment(Symbol::intern(&string[..])) } else { token::Comment }; @@ -758,7 +759,7 @@ impl<'a> StringReader<'a> { self.err_span_(start_bpos, self.pos, "no valid digits found for number"); - return token::Integer(token::intern("0")); + return token::Integer(Symbol::intern("0")); } // might be a float, but don't be greedy if this is actually an @@ -1097,7 +1098,7 @@ impl<'a> StringReader<'a> { token::Underscore } else { // FIXME: perform NFKC normalization here. (Issue #2253) - token::Ident(str_to_ident(string)) + token::Ident(Ident::from_str(string)) } })); } @@ -1277,13 +1278,13 @@ impl<'a> StringReader<'a> { // expansion purposes. See #12512 for the gory details of why // this is necessary. let ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(&format!("'{}", lifetime_name)) + Ident::from_str(&format!("'{}", lifetime_name)) }); // Conjure up a "keyword checking ident" to make sure that // the lifetime name is not a keyword. let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { - str_to_ident(lifetime_name) + Ident::from_str(lifetime_name) }); let keyword_checking_token = &token::Ident(keyword_checking_ident); let last_bpos = self.pos; @@ -1310,7 +1311,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("0") + Symbol::intern("0") }; self.bump(); // advance ch past token let suffix = self.scan_optional_raw_name(); @@ -1352,7 +1353,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start_bpos + BytePos(1)) } else { - token::intern("??") + Symbol::intern("??") }; self.bump(); let suffix = self.scan_optional_raw_name(); @@ -1424,7 +1425,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from_to(content_start_bpos, content_end_bpos) } else { - token::intern("??") + Symbol::intern("??") }; let suffix = self.scan_optional_raw_name(); return Ok(token::Literal(token::StrRaw(id, hash_count), suffix)); @@ -1551,7 +1552,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("?") + Symbol::intern("?") }; self.bump(); // advance ch past token return token::Byte(id); @@ -1584,7 +1585,7 @@ impl<'a> StringReader<'a> { let id = if valid { self.name_from(start) } else { - token::intern("??") + Symbol::intern("??") }; self.bump(); return token::ByteStr(id); @@ -1700,11 +1701,12 @@ fn ident_continue(c: Option) -> bool { mod tests { use super::*; + use ast::Ident; + use symbol::Symbol; use syntax_pos::{BytePos, Span, NO_EXPANSION}; use codemap::CodeMap; use errors; use parse::token; - use parse::token::str_to_ident; use std::io; use std::rc::Rc; @@ -1732,7 +1734,7 @@ mod tests { &sh, "/* my source file */ fn main() { println!(\"zebra\"); }\n" .to_string()); - let id = str_to_ident("fn"); + let id = Ident::from_str("fn"); assert_eq!(string_reader.next_token().tok, token::Comment); assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); @@ -1751,7 +1753,7 @@ mod tests { // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { - tok: token::Ident(str_to_ident("main")), + tok: token::Ident(Ident::from_str("main")), sp: Span { lo: BytePos(24), hi: BytePos(28), @@ -1773,7 +1775,7 @@ mod tests { // make the identifier by looking up the string in the interner fn mk_ident(id: &str) -> token::Token { - token::Ident(str_to_ident(id)) + token::Ident(Ident::from_str(id)) } #[test] @@ -1813,7 +1815,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, - token::Literal(token::Char(token::intern("a")), None)); + token::Literal(token::Char(Symbol::intern("a")), None)); } #[test] @@ -1821,7 +1823,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, - token::Literal(token::Char(token::intern(" ")), None)); + token::Literal(token::Char(Symbol::intern(" ")), None)); } #[test] @@ -1829,7 +1831,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, - token::Literal(token::Char(token::intern("\\n")), None)); + token::Literal(token::Char(Symbol::intern("\\n")), None)); } #[test] @@ -1837,7 +1839,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, - token::Lifetime(token::str_to_ident("'abc"))); + token::Lifetime(Ident::from_str("'abc"))); } #[test] @@ -1847,7 +1849,7 @@ mod tests { assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) .next_token() .tok, - token::Literal(token::StrRaw(token::intern("\"#a\\b\x00c\""), 3), None)); + token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None)); } #[test] @@ -1857,11 +1859,11 @@ mod tests { macro_rules! test { ($input: expr, $tok_type: ident, $tok_contents: expr) => {{ assert_eq!(setup(&cm, &sh, format!("{}suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(token::intern($tok_contents)), - Some(token::intern("suffix")))); + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), + Some(Symbol::intern("suffix")))); // with a whitespace separator: assert_eq!(setup(&cm, &sh, format!("{} suffix", $input)).next_token().tok, - token::Literal(token::$tok_type(token::intern($tok_contents)), + token::Literal(token::$tok_type(Symbol::intern($tok_contents)), None)); }} } @@ -1877,14 +1879,14 @@ mod tests { test!("1.0e10", Float, "1.0e10"); assert_eq!(setup(&cm, &sh, "2us".to_string()).next_token().tok, - token::Literal(token::Integer(token::intern("2")), - Some(token::intern("us")))); + token::Literal(token::Integer(Symbol::intern("2")), + Some(Symbol::intern("us")))); assert_eq!(setup(&cm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::StrRaw(token::intern("raw"), 3), - Some(token::intern("suffix")))); + token::Literal(token::StrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); assert_eq!(setup(&cm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok, - token::Literal(token::ByteStrRaw(token::intern("raw"), 3), - Some(token::intern("suffix")))); + token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3), + Some(Symbol::intern("suffix")))); } #[test] @@ -1904,7 +1906,7 @@ mod tests { _ => panic!("expected a comment!"), } assert_eq!(lexer.next_token().tok, - token::Literal(token::Char(token::intern("a")), None)); + token::Literal(token::Char(Symbol::intern("a")), None)); } #[test] @@ -1917,6 +1919,6 @@ mod tests { assert_eq!(comment.sp, ::syntax_pos::mk_sp(BytePos(0), BytePos(7))); assert_eq!(lexer.next_token().tok, token::Whitespace); assert_eq!(lexer.next_token().tok, - token::DocComment(token::intern("/// test"))); + token::DocComment(Symbol::intern("/// test"))); } } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 12408c7d3c95b..be340a5b5aa93 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -16,12 +16,13 @@ use syntax_pos::{self, Span, FileMap}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; use feature_gate::UnstableFeatures; use parse::parser::Parser; -use parse::token::InternedString; use ptr::P; use str::char_at; +use symbol::Symbol; use tokenstream; use std::cell::RefCell; +use std::collections::HashSet; use std::iter; use std::path::{Path, PathBuf}; use std::rc::Rc; @@ -64,7 +65,7 @@ impl ParseSess { ParseSess { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), - config: Vec::new(), + config: HashSet::new(), included_mod_stack: RefCell::new(vec![]), code_map: code_map } @@ -116,7 +117,7 @@ pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a Pa } pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) - -> PResult<'a, P> { + -> PResult<'a, ast::MetaItem> { new_parser_from_source_str(sess, name, source).parse_meta_item() } @@ -371,13 +372,18 @@ fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool { s[1..].chars().all(|c| '0' <= c && c <= '9') } -fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, - sd: &Handler, sp: Span) -> ast::LitKind { +fn filtered_float_lit(data: Symbol, suffix: Option, sd: &Handler, sp: Span) + -> ast::LitKind { debug!("filtered_float_lit: {}, {:?}", data, suffix); - match suffix.as_ref().map(|s| &**s) { - Some("f32") => ast::LitKind::Float(data, ast::FloatTy::F32), - Some("f64") => ast::LitKind::Float(data, ast::FloatTy::F64), - Some(suf) => { + let suffix = match suffix { + Some(suffix) => suffix, + None => return ast::LitKind::FloatUnsuffixed(data), + }; + + match &*suffix.as_str() { + "f32" => ast::LitKind::Float(data, ast::FloatTy::F32), + "f64" => ast::LitKind::Float(data, ast::FloatTy::F64), + suf => { if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..])) @@ -391,16 +397,13 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, ast::LitKind::FloatUnsuffixed(data) } - None => ast::LitKind::FloatUnsuffixed(data) } } -pub fn float_lit(s: &str, suffix: Option, - sd: &Handler, sp: Span) -> ast::LitKind { +pub fn float_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { debug!("float_lit: {:?}, {:?}", s, suffix); // FIXME #2252: bounds checking float literals is deferred until trans let s = s.chars().filter(|&c| c != '_').collect::(); - let data = token::intern_and_get_ident(&s); - filtered_float_lit(data, suffix.as_ref().map(|s| &**s), sd, sp) + filtered_float_lit(Symbol::intern(&s), suffix, sd, sp) } /// Parse a string representing a byte literal into its final form. Similar to `char_lit` @@ -495,11 +498,7 @@ pub fn byte_str_lit(lit: &str) -> Rc> { Rc::new(res) } -pub fn integer_lit(s: &str, - suffix: Option, - sd: &Handler, - sp: Span) - -> ast::LitKind { +pub fn integer_lit(s: &str, suffix: Option, sd: &Handler, sp: Span) -> ast::LitKind { // s can only be ascii, byte indexing is fine let s2 = s.chars().filter(|&c| c != '_').collect::(); @@ -521,16 +520,15 @@ pub fn integer_lit(s: &str, } // 1f64 and 2f32 etc. are valid float literals. - if let Some(ref suf) = suffix { - if looks_like_width_suffix(&['f'], suf) { + if let Some(suf) = suffix { + if looks_like_width_suffix(&['f'], &suf.as_str()) { match base { 16 => sd.span_err(sp, "hexadecimal float literal is not supported"), 8 => sd.span_err(sp, "octal float literal is not supported"), 2 => sd.span_err(sp, "binary float literal is not supported"), _ => () } - let ident = token::intern_and_get_ident(&s); - return filtered_float_lit(ident, Some(&suf), sd, sp) + return filtered_float_lit(Symbol::intern(&s), Some(suf), sd, sp) } } @@ -538,9 +536,9 @@ pub fn integer_lit(s: &str, s = &s[2..]; } - if let Some(ref suf) = suffix { - if suf.is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} - ty = match &**suf { + if let Some(suf) = suffix { + if suf.as_str().is_empty() { sd.span_bug(sp, "found empty literal suffix in Some")} + ty = match &*suf.as_str() { "isize" => ast::LitIntType::Signed(ast::IntTy::Is), "i8" => ast::LitIntType::Signed(ast::IntTy::I8), "i16" => ast::LitIntType::Signed(ast::IntTy::I16), @@ -551,7 +549,7 @@ pub fn integer_lit(s: &str, "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16), "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32), "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64), - _ => { + suf => { // i and u look like widths, so lets // give an error message along those lines if looks_like_width_suffix(&['i', 'u'], suf) { @@ -599,12 +597,11 @@ mod tests { use std::rc::Rc; use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION}; use codemap::Spanned; - use ast::{self, PatKind}; + use ast::{self, Ident, PatKind}; use abi::Abi; use attr::first_attr_value_str_by_name; use parse; use parse::parser::Parser; - use parse::token::{str_to_ident}; use print::pprust::item_to_string; use ptr::P; use tokenstream::{self, TokenTree}; @@ -626,7 +623,7 @@ mod tests { global: false, segments: vec![ ast::PathSegment { - identifier: str_to_ident("a"), + identifier: Ident::from_str("a"), parameters: ast::PathParameters::none(), } ], @@ -645,11 +642,11 @@ mod tests { global: true, segments: vec![ ast::PathSegment { - identifier: str_to_ident("a"), + identifier: Ident::from_str("a"), parameters: ast::PathParameters::none(), }, ast::PathSegment { - identifier: str_to_ident("b"), + identifier: Ident::from_str("b"), parameters: ast::PathParameters::none(), } ] @@ -678,8 +675,8 @@ mod tests { Some(&TokenTree::Token(_, token::Ident(name_zip))), Some(&TokenTree::Delimited(_, ref macro_delimed)), ) - if name_macro_rules.name.as_str() == "macro_rules" - && name_zip.name.as_str() == "zip" => { + if name_macro_rules.name == "macro_rules" + && name_zip.name == "zip" => { let tts = ¯o_delimed.tts[..]; match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { ( @@ -696,8 +693,7 @@ mod tests { Some(&TokenTree::Token(_, token::Dollar)), Some(&TokenTree::Token(_, token::Ident(ident))), ) - if first_delimed.delim == token::Paren - && ident.name.as_str() == "a" => {}, + if first_delimed.delim == token::Paren && ident.name == "a" => {}, _ => panic!("value 3: {:?}", **first_delimed), } let tts = &second_delimed.tts[..]; @@ -708,7 +704,7 @@ mod tests { Some(&TokenTree::Token(_, token::Ident(ident))), ) if second_delimed.delim == token::Paren - && ident.name.as_str() == "a" => {}, + && ident.name == "a" => {}, _ => panic!("value 4: {:?}", **second_delimed), } }, @@ -724,17 +720,17 @@ mod tests { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))), - TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), + TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))), + TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))), TokenTree::Delimited( sp(5, 14), Rc::new(tokenstream::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(8, 9), token::Colon), - TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), + TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))), ], close_span: sp(13, 14), })), @@ -744,7 +740,7 @@ mod tests { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), @@ -765,7 +761,7 @@ mod tests { global: false, segments: vec![ ast::PathSegment { - identifier: str_to_ident("d"), + identifier: Ident::from_str("d"), parameters: ast::PathParameters::none(), } ], @@ -788,7 +784,7 @@ mod tests { global:false, segments: vec![ ast::PathSegment { - identifier: str_to_ident("b"), + identifier: Ident::from_str("b"), parameters: ast::PathParameters::none(), } ], @@ -812,7 +808,7 @@ mod tests { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), Spanned{ span:sp(0, 1), - node: str_to_ident("b") + node: Ident::from_str("b") }, None), span: sp(0,1)})); @@ -824,7 +820,7 @@ mod tests { // this test depends on the intern order of "fn" and "i32" assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()), Some( - P(ast::Item{ident:str_to_ident("a"), + P(ast::Item{ident:Ident::from_str("a"), attrs:Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Fn(P(ast::FnDecl { @@ -835,8 +831,7 @@ mod tests { global:false, segments: vec![ ast::PathSegment { - identifier: - str_to_ident("i32"), + identifier: Ident::from_str("i32"), parameters: ast::PathParameters::none(), } ], @@ -849,7 +844,7 @@ mod tests { ast::BindingMode::ByValue(ast::Mutability::Immutable), Spanned{ span: sp(6,7), - node: str_to_ident("b")}, + node: Ident::from_str("b")}, None ), span: sp(6,7) @@ -884,9 +879,7 @@ mod tests { global:false, segments: vec![ ast::PathSegment { - identifier: - str_to_ident( - "b"), + identifier: Ident::from_str("b"), parameters: ast::PathParameters::none(), } @@ -998,12 +991,12 @@ mod tests { let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[..], "/// doc comment"); + assert_eq!(doc, "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| &*a.name() == "doc") + let docs = item.attrs.iter().filter(|a| a.name() == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::>(); let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; assert_eq!(&docs[..], b); @@ -1011,7 +1004,7 @@ mod tests { let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); - assert_eq!(&doc[..], "/** doc comment\n * with CRLF */"); + assert_eq!(doc, "/** doc comment\n * with CRLF */"); } #[test] diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 2e38ca82d5dbb..4997e464c2bf5 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -48,13 +48,14 @@ use parse::classify; use parse::common::SeqSep; use parse::lexer::{Reader, TokenAndSpan}; use parse::obsolete::ObsoleteSyntax; -use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString}; +use parse::token::{self, MatchNt, SubstNt}; use parse::{new_sub_parser_from_file, ParseSess}; use util::parser::{AssocOp, Fixity}; use print::pprust; use ptr::P; use parse::PResult; use tokenstream::{self, Delimited, SequenceRepetition, TokenTree}; +use symbol::{Symbol, keywords}; use util::ThinVec; use std::collections::HashSet; @@ -998,10 +999,6 @@ impl<'a> Parser<'a> { &self.sess.span_diagnostic } - pub fn id_to_interned_str(&mut self, id: Ident) -> InternedString { - id.name.as_str() - } - /// Is the current token one of the keywords that signals a bare function /// type? pub fn token_is_bare_fn_keyword(&mut self) -> bool { @@ -1523,34 +1520,28 @@ impl<'a> Parser<'a> { // float literals, so all the handling is done // internally. token::Integer(s) => { - (false, parse::integer_lit(&s.as_str(), - suf.as_ref().map(|s| s.as_str()), - &self.sess.span_diagnostic, - self.span)) + let diag = &self.sess.span_diagnostic; + (false, parse::integer_lit(&s.as_str(), suf, diag, self.span)) } token::Float(s) => { - (false, parse::float_lit(&s.as_str(), - suf.as_ref().map(|s| s.as_str()), - &self.sess.span_diagnostic, - self.span)) + let diag = &self.sess.span_diagnostic; + (false, parse::float_lit(&s.as_str(), suf, diag, self.span)) } token::Str_(s) => { - (true, - LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), - ast::StrStyle::Cooked)) + let s = Symbol::intern(&parse::str_lit(&s.as_str())); + (true, LitKind::Str(s, ast::StrStyle::Cooked)) } token::StrRaw(s, n) => { - (true, - LitKind::Str( - token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), - ast::StrStyle::Raw(n))) + let s = Symbol::intern(&parse::raw_str_lit(&s.as_str())); + (true, LitKind::Str(s, ast::StrStyle::Raw(n))) + } + token::ByteStr(i) => { + (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))) + } + token::ByteStrRaw(i, _) => { + (true, LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))) } - token::ByteStr(i) => - (true, LitKind::ByteStr(parse::byte_str_lit(&i.as_str()))), - token::ByteStrRaw(i, _) => - (true, - LitKind::ByteStr(Rc::new(i.to_string().into_bytes()))), }; if suffix_illegal { @@ -2544,7 +2535,7 @@ impl<'a> Parser<'a> { let prev_span = self.prev_span; let fstr = n.as_str(); let mut err = self.diagnostic().struct_span_err(prev_span, - &format!("unexpected token: `{}`", n.as_str())); + &format!("unexpected token: `{}`", n)); if fstr.chars().all(|x| "0123456789.".contains(x)) { let float = match fstr.parse::().ok() { Some(f) => f, @@ -2627,7 +2618,7 @@ impl<'a> Parser<'a> { }))); } else if self.token.is_keyword(keywords::Crate) { let ident = match self.token { - token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id }, + token::Ident(id) => ast::Ident { name: Symbol::intern("$crate"), ..id }, _ => unreachable!(), }; self.bump(); @@ -3751,9 +3742,7 @@ impl<'a> Parser<'a> { /// Emit an expected item after attributes error. fn expected_item_err(&self, attrs: &[Attribute]) { let message = match attrs.last() { - Some(&Attribute { node: ast::Attribute_ { is_sugared_doc: true, .. }, .. }) => { - "expected item after doc comment" - } + Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment", _ => "expected item after attributes", }; @@ -4837,7 +4826,7 @@ impl<'a> Parser<'a> { Visibility::Inherited => (), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid) => sid.name == intern("macro_rules"), + token::Ident(sid) => sid.name == Symbol::intern("macro_rules"), _ => false, }; if is_macro_rules { @@ -5304,17 +5293,16 @@ impl<'a> Parser<'a> { fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions { if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") { - self.directory.push(&*path); + self.directory.push(&*path.as_str()); self.restrictions - Restrictions::NO_NONINLINE_MOD } else { - let default_path = self.id_to_interned_str(id); - self.directory.push(&*default_path); + self.directory.push(&*id.name.as_str()); self.restrictions } } pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option { - ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d)) + ::attr::first_attr_value_str_by_name(attrs, "path").map(|d| dir_path.join(&*d.as_str())) } /// Returns either a path to a module, or . @@ -6128,26 +6116,17 @@ impl<'a> Parser<'a> { }) } - pub fn parse_optional_str(&mut self) - -> Option<(InternedString, - ast::StrStyle, - Option)> { + pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option)> { let ret = match self.token { - token::Literal(token::Str_(s), suf) => { - let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)); - (s, ast::StrStyle::Cooked, suf) - } - token::Literal(token::StrRaw(s, n), suf) => { - let s = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)); - (s, ast::StrStyle::Raw(n), suf) - } + token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf), + token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf), _ => return None }; self.bump(); Some(ret) } - pub fn parse_str(&mut self) -> PResult<'a, (InternedString, StrStyle)> { + pub fn parse_str(&mut self) -> PResult<'a, (Symbol, StrStyle)> { match self.parse_optional_str() { Some((s, style, suf)) => { let sp = self.prev_span; diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 0198ee073d239..8ac39dd462e7c 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -16,13 +16,10 @@ pub use self::Token::*; use ast::{self}; use ptr::P; -use util::interner::Interner; +use symbol::keywords; use tokenstream; -use serialize::{Decodable, Decoder, Encodable, Encoder}; -use std::cell::RefCell; use std::fmt; -use std::ops::Deref; use std::rc::Rc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -301,7 +298,7 @@ pub enum Nonterminal { NtTy(P), NtIdent(ast::SpannedIdent), /// Stuff inside brackets for attributes - NtMeta(P), + NtMeta(ast::MetaItem), NtPath(ast::Path), NtTT(tokenstream::TokenTree), // These are not exposed to macros, but are used by quasiquote. @@ -335,270 +332,3 @@ impl fmt::Debug for Nonterminal { } } } - -// In this macro, there is the requirement that the name (the number) must be monotonically -// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero. -macro_rules! declare_keywords {( - $( ($index: expr, $konst: ident, $string: expr) )* -) => { - pub mod keywords { - use ast; - #[derive(Clone, Copy, PartialEq, Eq)] - pub struct Keyword { - ident: ast::Ident, - } - impl Keyword { - #[inline] pub fn ident(self) -> ast::Ident { self.ident } - #[inline] pub fn name(self) -> ast::Name { self.ident.name } - } - $( - #[allow(non_upper_case_globals)] - pub const $konst: Keyword = Keyword { - ident: ast::Ident::with_empty_ctxt(ast::Name($index)) - }; - )* - } - - fn mk_fresh_ident_interner() -> IdentInterner { - Interner::prefill(&[$($string,)*]) - } -}} - -// NB: leaving holes in the ident table is bad! a different ident will get -// interned with the id from the hole, but it will be between the min and max -// of the reserved words, and thus tagged as "reserved". -// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, -// this should be rarely necessary though if the keywords are kept in alphabetic order. -declare_keywords! { - // Invalid identifier - (0, Invalid, "") - - // Strict keywords used in the language. - (1, As, "as") - (2, Box, "box") - (3, Break, "break") - (4, Const, "const") - (5, Continue, "continue") - (6, Crate, "crate") - (7, Else, "else") - (8, Enum, "enum") - (9, Extern, "extern") - (10, False, "false") - (11, Fn, "fn") - (12, For, "for") - (13, If, "if") - (14, Impl, "impl") - (15, In, "in") - (16, Let, "let") - (17, Loop, "loop") - (18, Match, "match") - (19, Mod, "mod") - (20, Move, "move") - (21, Mut, "mut") - (22, Pub, "pub") - (23, Ref, "ref") - (24, Return, "return") - (25, SelfValue, "self") - (26, SelfType, "Self") - (27, Static, "static") - (28, Struct, "struct") - (29, Super, "super") - (30, Trait, "trait") - (31, True, "true") - (32, Type, "type") - (33, Unsafe, "unsafe") - (34, Use, "use") - (35, Where, "where") - (36, While, "while") - - // Keywords reserved for future use. - (37, Abstract, "abstract") - (38, Alignof, "alignof") - (39, Become, "become") - (40, Do, "do") - (41, Final, "final") - (42, Macro, "macro") - (43, Offsetof, "offsetof") - (44, Override, "override") - (45, Priv, "priv") - (46, Proc, "proc") - (47, Pure, "pure") - (48, Sizeof, "sizeof") - (49, Typeof, "typeof") - (50, Unsized, "unsized") - (51, Virtual, "virtual") - (52, Yield, "yield") - - // Weak keywords, have special meaning only in specific contexts. - (53, Default, "default") - (54, StaticLifetime, "'static") - (55, Union, "union") -} - -// looks like we can get rid of this completely... -pub type IdentInterner = Interner; - -// if an interner exists in TLS, return it. Otherwise, prepare a -// fresh one. -// FIXME(eddyb) #8726 This should probably use a thread-local reference. -pub fn with_ident_interner T>(f: F) -> T { - thread_local!(static KEY: RefCell = { - RefCell::new(mk_fresh_ident_interner()) - }); - KEY.with(|interner| f(&mut *interner.borrow_mut())) -} - -/// Reset the ident interner to its initial state. -pub fn reset_ident_interner() { - with_ident_interner(|interner| *interner = mk_fresh_ident_interner()); -} - -pub fn clear_ident_interner() { - with_ident_interner(|interner| *interner = IdentInterner::new()); -} - -/// Represents a string stored in the thread-local interner. Because the -/// interner lives for the life of the thread, this can be safely treated as an -/// immortal string, as long as it never crosses between threads. -/// -/// FIXME(pcwalton): You must be careful about what you do in the destructors -/// of objects stored in TLS, because they may run after the interner is -/// destroyed. In particular, they must not access string contents. This can -/// be fixed in the future by just leaking all strings until thread death -/// somehow. -#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] -pub struct InternedString { - string: Rc, -} - -impl InternedString { - #[inline] - pub fn new(string: &'static str) -> InternedString { - InternedString { - string: Rc::__from_str(string), - } - } - - #[inline] - pub fn new_from_name(name: ast::Name) -> InternedString { - with_ident_interner(|interner| InternedString { string: interner.get(name) }) - } -} - -impl Deref for InternedString { - type Target = str; - - fn deref(&self) -> &str { &self.string } -} - -impl fmt::Debug for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.string, f) - } -} - -impl fmt::Display for InternedString { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.string, f) - } -} - -impl<'a> PartialEq<&'a str> for InternedString { - #[inline(always)] - fn eq(&self, other: & &'a str) -> bool { - PartialEq::eq(&self.string[..], *other) - } - #[inline(always)] - fn ne(&self, other: & &'a str) -> bool { - PartialEq::ne(&self.string[..], *other) - } -} - -impl<'a> PartialEq for &'a str { - #[inline(always)] - fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(*self, &other.string[..]) - } - #[inline(always)] - fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(*self, &other.string[..]) - } -} - -impl PartialEq for InternedString { - #[inline(always)] - fn eq(&self, other: &str) -> bool { - PartialEq::eq(&self.string[..], other) - } - #[inline(always)] - fn ne(&self, other: &str) -> bool { - PartialEq::ne(&self.string[..], other) - } -} - -impl PartialEq for str { - #[inline(always)] - fn eq(&self, other: &InternedString) -> bool { - PartialEq::eq(self, &other.string[..]) - } - #[inline(always)] - fn ne(&self, other: &InternedString) -> bool { - PartialEq::ne(self, &other.string[..]) - } -} - -impl Decodable for InternedString { - fn decode(d: &mut D) -> Result { - Ok(intern(&d.read_str()?).as_str()) - } -} - -impl Encodable for InternedString { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - s.emit_str(&self.string) - } -} - -/// Interns and returns the string contents of an identifier, using the -/// thread-local interner. -#[inline] -pub fn intern_and_get_ident(s: &str) -> InternedString { - intern(s).as_str() -} - -/// Maps a string to its interned representation. -#[inline] -pub fn intern(s: &str) -> ast::Name { - with_ident_interner(|interner| interner.intern(s)) -} - -/// gensym's a new usize, using the current interner. -#[inline] -pub fn gensym(s: &str) -> ast::Name { - with_ident_interner(|interner| interner.gensym(s)) -} - -/// Maps a string to an identifier with an empty syntax context. -#[inline] -pub fn str_to_ident(s: &str) -> ast::Ident { - ast::Ident::with_empty_ctxt(intern(s)) -} - -/// Maps a string to a gensym'ed identifier. -#[inline] -pub fn gensym_ident(s: &str) -> ast::Ident { - ast::Ident::with_empty_ctxt(gensym(s)) -} - -// create a fresh name that maps to the same string as the old one. -// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src))); -// that is, that the new name and the old one are connected to ptr_eq strings. -pub fn fresh_name(src: ast::Ident) -> ast::Name { - with_ident_interner(|interner| interner.gensym_copy(src.name)) - // following: debug version. Could work in final except that it's incompatible with - // good error messages and uses of struct names in ambiguous could-be-binding - // locations. Also definitely destroys the guarantee given above about ptr_eq. - /*let num = rand::thread_rng().gen_uint_range(0,0xffff); - gensym(format!("{}_{}",ident_to_string(src),num))*/ -} diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 203c19285ac2c..3820f5ea90ccc 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -19,7 +19,7 @@ use attr; use codemap::{self, CodeMap}; use syntax_pos::{self, BytePos}; use errors; -use parse::token::{self, keywords, BinOpToken, Token, InternedString}; +use parse::token::{self, BinOpToken, Token}; use parse::lexer::comments; use parse; use print::pp::{self, break_offset, word, space, zerobreak, hardbreak}; @@ -27,6 +27,7 @@ use print::pp::{Breaks, eof}; use print::pp::Breaks::{Consistent, Inconsistent}; use ptr::P; use std_inject; +use symbol::{Symbol, keywords}; use tokenstream::{self, TokenTree}; use std::ascii; @@ -119,14 +120,13 @@ pub fn print_crate<'a>(cm: &'a CodeMap, // of the feature gate, so we fake them up here. // #![feature(prelude_import)] - let prelude_import_meta = attr::mk_list_word_item(InternedString::new("prelude_import")); - let list = attr::mk_list_item(InternedString::new("feature"), - vec![prelude_import_meta]); + let prelude_import_meta = attr::mk_list_word_item(Symbol::intern("prelude_import")); + let list = attr::mk_list_item(Symbol::intern("feature"), vec![prelude_import_meta]); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), list); try!(s.print_attribute(&fake_attr)); // #![no_std] - let no_std_meta = attr::mk_word_item(InternedString::new("no_std")); + let no_std_meta = attr::mk_word_item(Symbol::intern("no_std")); let fake_attr = attr::mk_attr_inner(attr::mk_attr_id(), no_std_meta); try!(s.print_attribute(&fake_attr)); } @@ -630,7 +630,7 @@ pub trait PrintState<'a> { _ => () } match lit.node { - ast::LitKind::Str(ref st, style) => self.print_string(&st, style), + ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style), ast::LitKind::Byte(byte) => { let mut res = String::from("b'"); res.extend(ascii::escape_default(byte).map(|c| c as char)); @@ -664,7 +664,7 @@ pub trait PrintState<'a> { &f, t.ty_to_string())) } - ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f[..]), + ast::LitKind::FloatUnsuffixed(ref f) => word(self.writer(), &f.as_str()), ast::LitKind::Bool(val) => { if val { word(self.writer(), "true") } else { word(self.writer(), "false") } } @@ -727,7 +727,7 @@ pub trait PrintState<'a> { trailing_hardbreak: bool) -> io::Result<()> { let mut count = 0; for attr in attrs { - if attr.node.style == kind { + if attr.style == kind { try!(self.print_attribute_inline(attr, is_inline)); if is_inline { try!(self.nbsp()); @@ -751,11 +751,11 @@ pub trait PrintState<'a> { try!(self.hardbreak_if_not_bol()); } try!(self.maybe_print_comment(attr.span.lo)); - if attr.node.is_sugared_doc { - try!(word(self.writer(), &attr.value_str().unwrap())); + if attr.is_sugared_doc { + try!(word(self.writer(), &attr.value_str().unwrap().as_str())); hardbreak(self.writer()) } else { - match attr.node.style { + match attr.style { ast::AttrStyle::Inner => try!(word(self.writer(), "#![")), ast::AttrStyle::Outer => try!(word(self.writer(), "#[")), } @@ -778,16 +778,16 @@ pub trait PrintState<'a> { fn print_meta_item(&mut self, item: &ast::MetaItem) -> io::Result<()> { try!(self.ibox(INDENT_UNIT)); match item.node { - ast::MetaItemKind::Word(ref name) => { - try!(word(self.writer(), &name)); + ast::MetaItemKind::Word => { + try!(word(self.writer(), &item.name.as_str())); } - ast::MetaItemKind::NameValue(ref name, ref value) => { - try!(self.word_space(&name[..])); + ast::MetaItemKind::NameValue(ref value) => { + try!(self.word_space(&item.name.as_str())); try!(self.word_space("=")); try!(self.print_literal(value)); } - ast::MetaItemKind::List(ref name, ref items) => { - try!(word(self.writer(), &name)); + ast::MetaItemKind::List(ref items) => { + try!(word(self.writer(), &item.name.as_str())); try!(self.popen()); try!(self.commasep(Consistent, &items[..], @@ -2220,19 +2220,18 @@ impl<'a> State<'a> { ast::ExprKind::InlineAsm(ref a) => { try!(word(&mut self.s, "asm!")); try!(self.popen()); - try!(self.print_string(&a.asm, a.asm_str_style)); + try!(self.print_string(&a.asm.as_str(), a.asm_str_style)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, &a.outputs, - |s, out| { - let mut ch = out.constraint.chars(); + try!(self.commasep(Inconsistent, &a.outputs, |s, out| { + let constraint = out.constraint.as_str(); + let mut ch = constraint.chars(); match ch.next() { Some('=') if out.is_rw => { try!(s.print_string(&format!("+{}", ch.as_str()), ast::StrStyle::Cooked)) } - _ => try!(s.print_string(&out.constraint, - ast::StrStyle::Cooked)) + _ => try!(s.print_string(&constraint, ast::StrStyle::Cooked)) } try!(s.popen()); try!(s.print_expr(&out.expr)); @@ -2242,9 +2241,8 @@ impl<'a> State<'a> { try!(space(&mut self.s)); try!(self.word_space(":")); - try!(self.commasep(Inconsistent, &a.inputs, - |s, &(ref co, ref o)| { - try!(s.print_string(&co, ast::StrStyle::Cooked)); + try!(self.commasep(Inconsistent, &a.inputs, |s, &(co, ref o)| { + try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked)); try!(s.popen()); try!(s.print_expr(&o)); try!(s.pclose()); @@ -2255,7 +2253,7 @@ impl<'a> State<'a> { try!(self.commasep(Inconsistent, &a.clobbers, |s, co| { - try!(s.print_string(&co, ast::StrStyle::Cooked)); + try!(s.print_string(&co.as_str(), ast::StrStyle::Cooked)); Ok(()) })); @@ -3082,12 +3080,11 @@ mod tests { use ast; use codemap; - use parse::token; use syntax_pos; #[test] fn test_fun_to_string() { - let abba_ident = token::str_to_ident("abba"); + let abba_ident = ast::Ident::from_str("abba"); let decl = ast::FnDecl { inputs: Vec::new(), @@ -3103,7 +3100,7 @@ mod tests { #[test] fn test_variant_to_string() { - let ident = token::str_to_ident("principal_skinner"); + let ident = ast::Ident::from_str("principal_skinner"); let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ { name: ident, diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 1b63a2b70763a..6a291ad9c408a 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -10,10 +10,10 @@ use ast; use attr; +use symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use codemap::{self, ExpnInfo, NameAndSpan, MacroAttribute}; -use parse::token::{intern, InternedString, keywords}; -use parse::{token, ParseSess}; +use parse::ParseSess; use ptr::P; /// Craft a span that will be ignored by the stability lint's @@ -23,7 +23,7 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span { let info = ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(intern("std_inject")), + format: MacroAttribute(Symbol::intern("std_inject")), span: None, allow_internal_unstable: true, } @@ -53,14 +53,14 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, None => return krate, }; - let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string())); + let crate_name = Symbol::intern(&alt_std_name.unwrap_or(name.to_string())); krate.module.items.insert(0, P(ast::Item { attrs: vec![attr::mk_attr_outer(attr::mk_attr_id(), - attr::mk_word_item(InternedString::new("macro_use")))], + attr::mk_word_item(Symbol::intern("macro_use")))], vis: ast::Visibility::Inherited, node: ast::ItemKind::ExternCrate(Some(crate_name)), - ident: token::str_to_ident(name), + ident: ast::Ident::from_str(name), id: ast::DUMMY_NODE_ID, span: DUMMY_SP, })); @@ -68,22 +68,21 @@ pub fn maybe_inject_crates_ref(sess: &ParseSess, let span = ignored_span(sess, DUMMY_SP); krate.module.items.insert(0, P(ast::Item { attrs: vec![ast::Attribute { - node: ast::Attribute_ { - style: ast::AttrStyle::Outer, - value: P(ast::MetaItem { - node: ast::MetaItemKind::Word(token::intern_and_get_ident("prelude_import")), - span: span, - }), - id: attr::mk_attr_id(), - is_sugared_doc: false, + style: ast::AttrStyle::Outer, + value: ast::MetaItem { + name: Symbol::intern("prelude_import"), + node: ast::MetaItemKind::Word, + span: span, }, + id: attr::mk_attr_id(), + is_sugared_doc: false, span: span, }], vis: ast::Visibility::Inherited, node: ast::ItemKind::Use(P(codemap::dummy_spanned(ast::ViewPathGlob(ast::Path { global: false, segments: vec![name, "prelude", "v1"].into_iter().map(|name| ast::PathSegment { - identifier: token::str_to_ident(name), + identifier: ast::Ident::from_str(name), parameters: ast::PathParameters::none(), }).collect(), span: span, diff --git a/src/libsyntax/symbol.rs b/src/libsyntax/symbol.rs new file mode 100644 index 0000000000000..fe9a176179ce6 --- /dev/null +++ b/src/libsyntax/symbol.rs @@ -0,0 +1,303 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! An "interner" is a data structure that associates values with usize tags and +//! allows bidirectional lookup; i.e. given a value, one can easily find the +//! type, and vice versa. + +use serialize::{Decodable, Decoder, Encodable, Encoder}; +use std::cell::RefCell; +use std::collections::HashMap; +use std::fmt; + +/// A symbol is an interned or gensymed string. +#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct Symbol(u32); + +// The interner in thread-local, so `Symbol` shouldn't move between threads. +impl !Send for Symbol { } + +impl Symbol { + /// Maps a string to its interned representation. + pub fn intern(string: &str) -> Self { + with_interner(|interner| interner.intern(string)) + } + + /// gensym's a new usize, using the current interner. + pub fn gensym(string: &str) -> Self { + with_interner(|interner| interner.gensym(string)) + } + + pub fn as_str(self) -> InternedString { + with_interner(|interner| unsafe { + InternedString { + string: ::std::mem::transmute::<&str, &str>(interner.get(self)) + } + }) + } + + pub fn as_u32(self) -> u32 { + self.0 + } +} + +impl fmt::Debug for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}({})", self, self.0) + } +} + +impl fmt::Display for Symbol { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.as_str(), f) + } +} + +impl Encodable for Symbol { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(&self.as_str()) + } +} + +impl Decodable for Symbol { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?)) + } +} + +impl<'a> PartialEq<&'a str> for Symbol { + fn eq(&self, other: &&str) -> bool { + *self.as_str() == **other + } +} + +#[derive(Default)] +pub struct Interner { + names: HashMap, Symbol>, + strings: Vec>, +} + +impl Interner { + pub fn new() -> Self { + Interner::default() + } + + fn prefill(init: &[&str]) -> Self { + let mut this = Interner::new(); + for &string in init { + this.intern(string); + } + this + } + + pub fn intern(&mut self, string: &str) -> Symbol { + if let Some(&name) = self.names.get(string) { + return name; + } + + let name = Symbol(self.strings.len() as u32); + let string = string.to_string().into_boxed_str(); + self.strings.push(string.clone()); + self.names.insert(string, name); + name + } + + fn gensym(&mut self, string: &str) -> Symbol { + let gensym = Symbol(self.strings.len() as u32); + // leave out of `names` to avoid colliding + self.strings.push(string.to_string().into_boxed_str()); + gensym + } + + pub fn get(&self, name: Symbol) -> &str { + &self.strings[name.0 as usize] + } +} + +// In this macro, there is the requirement that the name (the number) must be monotonically +// increasing by one in the special identifiers, starting at 0; the same holds for the keywords, +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* +) => { + pub mod keywords { + use ast; + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: ast::Ident, + } + impl Keyword { + #[inline] pub fn ident(self) -> ast::Ident { self.ident } + #[inline] pub fn name(self) -> ast::Name { self.ident.name } + } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: ast::Ident::with_empty_ctxt(ast::Name($index)) + }; + )* + } + + impl Interner { + fn fresh() -> Self { + Interner::prefill(&[$($string,)*]) + } + } +}} + +// NB: leaving holes in the ident table is bad! a different ident will get +// interned with the id from the hole, but it will be between the min and max +// of the reserved words, and thus tagged as "reserved". +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") +} + +// If an interner exists in TLS, return it. Otherwise, prepare a fresh one. +fn with_interner T>(f: F) -> T { + thread_local!(static INTERNER: RefCell = { + RefCell::new(Interner::fresh()) + }); + INTERNER.with(|interner| f(&mut *interner.borrow_mut())) +} + +/// Represents a string stored in the thread-local interner. Because the +/// interner lives for the life of the thread, this can be safely treated as an +/// immortal string, as long as it never crosses between threads. +/// +/// FIXME(pcwalton): You must be careful about what you do in the destructors +/// of objects stored in TLS, because they may run after the interner is +/// destroyed. In particular, they must not access string contents. This can +/// be fixed in the future by just leaking all strings until thread death +/// somehow. +#[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] +pub struct InternedString { + string: &'static str, +} + +impl !Send for InternedString { } + +impl ::std::ops::Deref for InternedString { + type Target = str; + fn deref(&self) -> &str { self.string } +} + +impl fmt::Debug for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(self.string, f) + } +} + +impl fmt::Display for InternedString { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(self.string, f) + } +} + +impl Decodable for InternedString { + fn decode(d: &mut D) -> Result { + Ok(Symbol::intern(&d.read_str()?).as_str()) + } +} + +impl Encodable for InternedString { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_str(self.string) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use ast::Name; + + #[test] + fn interner_tests() { + let mut i: Interner = Interner::new(); + // first one is zero: + assert_eq!(i.intern("dog"), Name(0)); + // re-use gets the same entry: + assert_eq!(i.intern ("dog"), Name(0)); + // different string gets a different #: + assert_eq!(i.intern("cat"), Name(1)); + assert_eq!(i.intern("cat"), Name(1)); + // dog is still at zero + assert_eq!(i.intern("dog"), Name(0)); + // gensym gets 3 + assert_eq!(i.gensym("zebra"), Name(2)); + // gensym of same string gets new number : + assert_eq!(i.gensym("zebra"), Name(3)); + // gensym of *existing* string gets new number: + assert_eq!(i.gensym("dog"), Name(4)); + } +} diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 59a7e75d12557..4de3baf7d14fb 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -34,21 +34,21 @@ use ext::expand::ExpansionConfig; use fold::Folder; use util::move_map::MoveMap; use fold; -use parse::token::{intern, keywords, InternedString}; use parse::{token, ParseSess}; use print::pprust; -use ast; +use ast::{self, Ident}; use ptr::P; +use symbol::{self, Symbol, keywords}; use util::small_vector::SmallVector; enum ShouldPanic { No, - Yes(Option), + Yes(Option), } struct Test { span: Span, - path: Vec , + path: Vec , bench: bool, ignore: bool, should_panic: ShouldPanic @@ -57,14 +57,14 @@ struct Test { struct TestCtxt<'a> { sess: &'a ParseSess, span_diagnostic: &'a errors::Handler, - path: Vec, + path: Vec, ext_cx: ExtCtxt<'a>, testfns: Vec, - reexport_test_harness_main: Option, + reexport_test_harness_main: Option, is_test_crate: bool, // top-level re-export submodule, filled out after folding is finished - toplevel_reexport: Option, + toplevel_reexport: Option, } // Traverse the crate, collecting all the test functions, eliding any @@ -91,10 +91,10 @@ pub fn modify_for_testing(sess: &ParseSess, struct TestHarnessGenerator<'a> { cx: TestCtxt<'a>, - tests: Vec, + tests: Vec, // submodule name, gensym'd identifier for re-exports - tested_submods: Vec<(ast::Ident, ast::Ident)>, + tested_submods: Vec<(Ident, Ident)>, } impl<'a> fold::Folder for TestHarnessGenerator<'a> { @@ -191,8 +191,8 @@ impl fold::Folder for EntryPointCleaner { EntryPointType::MainAttr | EntryPointType::Start => folded.map(|ast::Item {id, ident, attrs, node, vis, span}| { - let allow_str = InternedString::new("allow"); - let dead_code_str = InternedString::new("dead_code"); + let allow_str = Symbol::intern("allow"); + let dead_code_str = Symbol::intern("dead_code"); let word_vec = vec![attr::mk_list_word_item(dead_code_str)]; let allow_dead_code_item = attr::mk_list_item(allow_str, word_vec); let allow_dead_code = attr::mk_attr_outer(attr::mk_attr_id(), @@ -222,15 +222,18 @@ impl fold::Folder for EntryPointCleaner { fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } } -fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec, - tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P, ast::Ident) { - let super_ = token::str_to_ident("super"); +fn mk_reexport_mod(cx: &mut TestCtxt, + parent: ast::NodeId, + tests: Vec, + tested_submods: Vec<(Ident, Ident)>) + -> (P, Ident) { + let super_ = Ident::from_str("super"); // Generate imports with `#[allow(private_in_public)]` to work around issue #36768. let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list( DUMMY_SP, - InternedString::new("allow"), - vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, InternedString::new("private_in_public"))], + Symbol::intern("allow"), + vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, Symbol::intern("private_in_public"))], )); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public, @@ -247,7 +250,7 @@ fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec, + reexport_test_harness_main: Option, krate: ast::Crate, sd: &errors::Handler) -> ast::Crate { // Remove the entry points @@ -286,7 +289,7 @@ fn generate_test_harness(sess: &ParseSess, cx.ext_cx.bt_push(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(intern("test")), + format: MacroAttribute(Symbol::intern("test")), span: None, allow_internal_unstable: false, } @@ -306,7 +309,7 @@ fn ignored_span(cx: &TestCtxt, sp: Span) -> Span { let info = ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(intern("test")), + format: MacroAttribute(Symbol::intern("test")), span: None, allow_internal_unstable: true, } @@ -456,7 +459,7 @@ mod __test { */ fn mk_std(cx: &TestCtxt) -> P { - let id_test = token::str_to_ident("test"); + let id_test = Ident::from_str("test"); let (vi, vis, ident) = if cx.is_test_crate { (ast::ItemKind::Use( P(nospan(ast::ViewPathSimple(id_test, @@ -487,16 +490,17 @@ fn mk_main(cx: &mut TestCtxt) -> P { let ecx = &cx.ext_cx; // test::test_main_static - let test_main_path = ecx.path(sp, vec![token::str_to_ident("test"), - token::str_to_ident("test_main_static")]); + let test_main_path = + ecx.path(sp, vec![Ident::from_str("test"), Ident::from_str("test_main_static")]); + // test::test_main_static(...) let test_main_path_expr = ecx.expr_path(test_main_path); - let tests_ident_expr = ecx.expr_ident(sp, token::str_to_ident("TESTS")); + let tests_ident_expr = ecx.expr_ident(sp, Ident::from_str("TESTS")); let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![tests_ident_expr]); let call_test_main = ecx.stmt_expr(call_test_main); // #![main] - let main_meta = ecx.meta_word(sp, token::intern_and_get_ident("main")); + let main_meta = ecx.meta_word(sp, Symbol::intern("main")); let main_attr = ecx.attribute(sp, main_meta); // pub fn main() { ... } let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![])); @@ -506,7 +510,7 @@ fn mk_main(cx: &mut TestCtxt) -> P { dummy_spanned(ast::Constness::NotConst), ::abi::Abi::Rust, ast::Generics::default(), main_body); let main = P(ast::Item { - ident: token::str_to_ident("main"), + ident: Ident::from_str("main"), attrs: vec![main_attr], id: ast::DUMMY_NODE_ID, node: main, @@ -533,7 +537,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { items: vec![import, mainfn, tests], }; let item_ = ast::ItemKind::Mod(testmod); - let mod_ident = token::gensym_ident("__test"); + let mod_ident = Ident::with_empty_ctxt(Symbol::gensym("__test")); let mut expander = cx.ext_cx.monotonic_expander(); let item = expander.fold_item(P(ast::Item { @@ -544,13 +548,13 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { vis: ast::Visibility::Public, span: DUMMY_SP, })).pop().unwrap(); - let reexport = cx.reexport_test_harness_main.as_ref().map(|s| { + let reexport = cx.reexport_test_harness_main.map(|s| { // building `use = __test::main` - let reexport_ident = token::str_to_ident(&s); + let reexport_ident = Ident::with_empty_ctxt(s); let use_path = nospan(ast::ViewPathSimple(reexport_ident, - path_node(vec![mod_ident, token::str_to_ident("main")]))); + path_node(vec![mod_ident, Ident::from_str("main")]))); expander.fold_item(P(ast::Item { id: ast::DUMMY_NODE_ID, @@ -571,7 +575,7 @@ fn nospan(t: T) -> codemap::Spanned { codemap::Spanned { node: t, span: DUMMY_SP } } -fn path_node(ids: Vec ) -> ast::Path { +fn path_node(ids: Vec) -> ast::Path { ast::Path { span: DUMMY_SP, global: false, @@ -582,7 +586,7 @@ fn path_node(ids: Vec ) -> ast::Path { } } -fn path_name_i(idents: &[ast::Ident]) -> String { +fn path_name_i(idents: &[Ident]) -> String { // FIXME: Bad copies (#2543 -- same for everything else that says "bad") idents.iter().map(|i| i.to_string()).collect::>().join("::") } @@ -614,7 +618,7 @@ fn mk_tests(cx: &TestCtxt) -> P { fn is_test_crate(krate: &ast::Crate) -> bool { match attr::find_crate_name(&krate.attrs) { - Some(ref s) if "test" == &s[..] => true, + Some(s) if "test" == &*s.as_str() => true, _ => false } } @@ -660,7 +664,7 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { // path to the #[test] function: "foo::bar::baz" let path_string = path_name_i(&path[..]); - let name_expr = ecx.expr_str(span, token::intern_and_get_ident(&path_string[..])); + let name_expr = ecx.expr_str(span, Symbol::intern(&path_string)); // self::test::StaticTestName($name_expr) let name_expr = ecx.expr_call(span, @@ -673,10 +677,10 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> P { }; let fail_expr = match test.should_panic { ShouldPanic::No => ecx.expr_path(should_panic_path("No")), - ShouldPanic::Yes(ref msg) => { - match *msg { - Some(ref msg) => { - let msg = ecx.expr_str(span, msg.clone()); + ShouldPanic::Yes(msg) => { + match msg { + Some(msg) => { + let msg = ecx.expr_str(span, msg); let path = should_panic_path("YesWithMessage"); ecx.expr_call(span, ecx.expr_path(path), vec![msg]) } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 9ef6c07e489dc..0d5dcaf339feb 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -34,6 +34,7 @@ use parse::lexer; use parse; use parse::token::{self, Token, Lit, Nonterminal}; use print::pprust; +use symbol::Symbol; use std::fmt; use std::iter::*; @@ -173,10 +174,10 @@ impl TokenTree { TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), + tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( - token::StrRaw(token::intern(&stripped), num_of_hashes), None))], + token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))], close_span: sp, })) } @@ -295,7 +296,7 @@ impl TokenTree { pub fn maybe_str(&self) -> Option { match *self { TokenTree::Token(sp, Token::Literal(Lit::Str_(s), _)) => { - let l = LitKind::Str(token::intern_and_get_ident(&parse::str_lit(&s.as_str())), + let l = LitKind::Str(Symbol::intern(&parse::str_lit(&s.as_str())), ast::StrStyle::Cooked); Some(Spanned { node: l, @@ -303,7 +304,7 @@ impl TokenTree { }) } TokenTree::Token(sp, Token::Literal(Lit::StrRaw(s, n), _)) => { - let l = LitKind::Str(token::intern_and_get_ident(&parse::raw_str_lit(&s.as_str())), + let l = LitKind::Str(Symbol::intern(&parse::raw_str_lit(&s.as_str())), ast::StrStyle::Raw(n)); Some(Spanned { node: l, @@ -871,8 +872,9 @@ impl Index for InternalTS { #[cfg(test)] mod tests { use super::*; + use syntax::ast::Ident; use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP}; - use parse::token::{self, str_to_ident, Token}; + use parse::token::{self, Token}; use util::parser_testing::string_to_tts; use std::rc::Rc; @@ -967,15 +969,17 @@ mod tests { let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string())) .slice(2..3); let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8), - token::Ident(str_to_ident("bar")))]); + token::Ident(Ident::from_str("bar")))]); assert_eq!(test_res, test_eqs) } #[test] fn test_is_empty() { let test0 = TokenStream::from_tts(Vec::new()); - let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1), - Token::Ident(str_to_ident("a")))]); + let test1 = TokenStream::from_tts( + vec![TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a")))] + ); + let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string())); assert_eq!(test0.is_empty(), true); @@ -1035,20 +1039,20 @@ mod tests { assert_eq!(test0, None); let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), - token::Ident(str_to_ident("bar"))), + token::Ident(Ident::from_str("bar"))), TokenTree::Token(sp(4, 6), token::ModSep), TokenTree::Token(sp(6, 9), - token::Ident(str_to_ident("baz")))]); + token::Ident(Ident::from_str("baz")))]); assert_eq!(test1, Some(test1_expected)); let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4), - token::Ident(str_to_ident("foo"))), + token::Ident(Ident::from_str("foo"))), TokenTree::Token(sp(4, 5), token::Comma), TokenTree::Token(sp(5, 8), - token::Ident(str_to_ident("bar"))), + token::Ident(Ident::from_str("bar"))), TokenTree::Token(sp(8, 9), token::Comma), TokenTree::Token(sp(9, 12), - token::Ident(str_to_ident("baz")))]); + token::Ident(Ident::from_str("baz")))]); assert_eq!(test2, Some(test2_expected)); assert_eq!(test3, None); @@ -1069,7 +1073,7 @@ mod tests { assert_eq!(test0, None); assert_eq!(test1, None); - assert_eq!(test2, Some(str_to_ident("foo"))); + assert_eq!(test2, Some(Ident::from_str("foo"))); assert_eq!(test3, None); assert_eq!(test4, None); } @@ -1079,9 +1083,9 @@ mod tests { let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string())); let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string())); - let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))), + let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("foo"))), TokenTree::Token(sp(3, 4), token::Comma), - TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))), + TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("bar"))), TokenTree::Token(sp(7, 8), token::Comma)]; let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8), Rc::new(Delimited { @@ -1094,11 +1098,11 @@ mod tests { assert_eq!(test0, test0_stream); - let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))), + let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("foo"))), TokenTree::Token(sp(7, 8), token::Comma), - TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))]; + TokenTree::Token(sp(8, 11), token::Ident(Ident::from_str("bar")))]; - let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))), + let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("baz"))), TokenTree::Delimited(sp(3, 12), Rc::new(Delimited { delim: token::DelimToken::Paren, diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs deleted file mode 100644 index f56c6cedcd186..0000000000000 --- a/src/libsyntax/util/interner.rs +++ /dev/null @@ -1,111 +0,0 @@ -// Copyright 2012 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! An "interner" is a data structure that associates values with usize tags and -//! allows bidirectional lookup; i.e. given a value, one can easily find the -//! type, and vice versa. - -use ast::Name; - -use std::collections::HashMap; -use std::rc::Rc; - -#[derive(Default)] -pub struct Interner { - names: HashMap, Name>, - strings: Vec>, -} - -/// When traits can extend traits, we should extend index to get [] -impl Interner { - pub fn new() -> Self { - Interner::default() - } - - pub fn prefill(init: &[&str]) -> Self { - let mut this = Interner::new(); - for &string in init { - this.intern(string); - } - this - } - - pub fn intern(&mut self, string: &str) -> Name { - if let Some(&name) = self.names.get(string) { - return name; - } - - let name = Name(self.strings.len() as u32); - let string = Rc::__from_str(string); - self.strings.push(string.clone()); - self.names.insert(string, name); - name - } - - pub fn gensym(&mut self, string: &str) -> Name { - let gensym = Name(self.strings.len() as u32); - // leave out of `names` to avoid colliding - self.strings.push(Rc::__from_str(string)); - gensym - } - - /// Create a gensym with the same name as an existing entry. - pub fn gensym_copy(&mut self, name: Name) -> Name { - let gensym = Name(self.strings.len() as u32); - // leave out of `names` to avoid colliding - let string = self.strings[name.0 as usize].clone(); - self.strings.push(string); - gensym - } - - pub fn get(&self, name: Name) -> Rc { - self.strings[name.0 as usize].clone() - } - - pub fn find(&self, string: &str) -> Option { - self.names.get(string).cloned() - } -} - -#[cfg(test)] -mod tests { - use super::*; - use ast::Name; - - #[test] - fn interner_tests() { - let mut i: Interner = Interner::new(); - // first one is zero: - assert_eq!(i.intern("dog"), Name(0)); - // re-use gets the same entry: - assert_eq!(i.intern ("dog"), Name(0)); - // different string gets a different #: - assert_eq!(i.intern("cat"), Name(1)); - assert_eq!(i.intern("cat"), Name(1)); - // dog is still at zero - assert_eq!(i.intern("dog"), Name(0)); - // gensym gets 3 - assert_eq!(i.gensym("zebra"), Name(2)); - // gensym of same string gets new number : - assert_eq!(i.gensym("zebra"), Name(3)); - // gensym of *existing* string gets new number: - assert_eq!(i.gensym("dog"), Name(4)); - // gensym tests again with gensym_copy: - assert_eq!(i.gensym_copy(Name(2)), Name(5)); - assert_eq!(&*i.get(Name(5)), "zebra"); - assert_eq!(i.gensym_copy(Name(2)), Name(6)); - assert_eq!(&*i.get(Name(6)), "zebra"); - assert_eq!(&*i.get(Name(0)), "dog"); - assert_eq!(&*i.get(Name(1)), "cat"); - assert_eq!(&*i.get(Name(2)), "zebra"); - assert_eq!(&*i.get(Name(3)), "zebra"); - assert_eq!(&*i.get(Name(4)), "dog"); - } -} diff --git a/src/libsyntax/util/lev_distance.rs b/src/libsyntax/util/lev_distance.rs index e0796c34e57ef..a6fff2d707469 100644 --- a/src/libsyntax/util/lev_distance.rs +++ b/src/libsyntax/util/lev_distance.rs @@ -8,9 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::Name; use std::cmp; -use parse::token::InternedString; +use symbol::Symbol; /// To find the Levenshtein distance between two strings pub fn lev_distance(a: &str, b: &str) -> usize { @@ -48,14 +47,14 @@ pub fn lev_distance(a: &str, b: &str) -> usize { /// to one-third of the given word pub fn find_best_match_for_name<'a, T>(iter_names: T, lookup: &str, - dist: Option) -> Option - where T: Iterator { + dist: Option) -> Option + where T: Iterator { let max_dist = dist.map_or_else(|| cmp::max(lookup.len(), 3) / 3, |d| d); iter_names - .filter_map(|name| { + .filter_map(|&name| { let dist = lev_distance(lookup, &name.as_str()); match dist <= max_dist { // filter the unwanted cases - true => Some((name.as_str(), dist)), + true => Some((name, dist)), false => None, } }) diff --git a/src/libsyntax/util/parser.rs b/src/libsyntax/util/parser.rs index df4eb1c9ed7d8..ce24fe1eb61e2 100644 --- a/src/libsyntax/util/parser.rs +++ b/src/libsyntax/util/parser.rs @@ -7,7 +7,8 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. -use parse::token::{Token, BinOpToken, keywords}; +use parse::token::{Token, BinOpToken}; +use symbol::keywords; use ast::BinOpKind; /// Associative operator with precedence. diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 76d3f2a063c18..e703dc6b4191c 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -8,11 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use ast::{self, Ident}; use parse::{ParseSess,PResult,filemap_to_tts}; use parse::{lexer, new_parser_from_source_str}; use parse::parser::Parser; -use parse::token; use ptr::P; use tokenstream; use std::iter::Peekable; @@ -78,9 +77,9 @@ pub fn string_to_pat(source_str: String) -> P { }) } -/// Convert a vector of strings to a vector of ast::Ident's -pub fn strs_to_idents(ids: Vec<&str> ) -> Vec { - ids.iter().map(|u| token::str_to_ident(*u)).collect() +/// Convert a vector of strings to a vector of Ident's +pub fn strs_to_idents(ids: Vec<&str> ) -> Vec { + ids.iter().map(|u| Ident::from_str(*u)).collect() } /// Does the given string match the pattern? whitespace in the first string diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index e4d0cb7404603..a5e083f926a07 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -17,9 +17,9 @@ use syntax::codemap; use syntax::ext::base; use syntax::ext::base::*; use syntax::feature_gate; -use syntax::parse::token::intern; use syntax::parse::{self, token}; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax::ast::AsmDialect; use syntax_pos::Span; use syntax::tokenstream; @@ -73,7 +73,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, }) .unwrap_or(tts.len()); let mut p = cx.new_parser_from_tts(&tts[first_colon..]); - let mut asm = token::InternedString::new(""); + let mut asm = Symbol::intern(""); let mut asm_str_style = None; let mut outputs = Vec::new(); let mut inputs = Vec::new(); @@ -135,11 +135,12 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, // It's the opposite of '=&' which means that the memory // cannot be shared with any other operand (usually when // a register is clobbered early.) - let mut ch = constraint.chars(); + let constraint_str = constraint.as_str(); + let mut ch = constraint_str.chars(); let output = match ch.next() { Some('=') => None, Some('+') => { - Some(token::intern_and_get_ident(&format!("={}", ch.as_str()))) + Some(Symbol::intern(&format!("={}", ch.as_str()))) } _ => { cx.span_err(span, "output operand constraint lacks '=' or '+'"); @@ -148,9 +149,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, }; let is_rw = output.is_some(); - let is_indirect = constraint.contains("*"); + let is_indirect = constraint_str.contains("*"); outputs.push(ast::InlineAsmOutput { - constraint: output.unwrap_or(constraint.clone()), + constraint: output.unwrap_or(constraint), expr: out, is_rw: is_rw, is_indirect: is_indirect, @@ -166,9 +167,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let (constraint, _str_style) = panictry!(p.parse_str()); - if constraint.starts_with("=") { + if constraint.as_str().starts_with("=") { cx.span_err(p.prev_span, "input operand constraint contains '='"); - } else if constraint.starts_with("+") { + } else if constraint.as_str().starts_with("+") { cx.span_err(p.prev_span, "input operand constraint contains '+'"); } @@ -190,7 +191,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, if OPTIONS.iter().any(|&opt| s == opt) { cx.span_warn(p.prev_span, "expected a clobber, found an option"); - } else if s.starts_with("{") || s.ends_with("}") { + } else if s.as_str().starts_with("{") || s.as_str().ends_with("}") { cx.span_err(p.prev_span, "clobber should not be surrounded by braces"); } @@ -242,7 +243,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: sp, callee: codemap::NameAndSpan { - format: codemap::MacroBang(intern("asm")), + format: codemap::MacroBang(Symbol::intern("asm")), span: None, allow_internal_unstable: false, }, @@ -251,7 +252,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, MacEager::expr(P(ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::InlineAsm(P(ast::InlineAsm { - asm: token::intern_and_get_ident(&asm), + asm: asm, asm_str_style: asm_str_style.unwrap(), outputs: outputs, inputs: inputs, diff --git a/src/libsyntax_ext/concat.rs b/src/libsyntax_ext/concat.rs index 02b44f2d012ea..bfe18dc4060c9 100644 --- a/src/libsyntax_ext/concat.rs +++ b/src/libsyntax_ext/concat.rs @@ -11,7 +11,7 @@ use syntax::ast; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_pos; use syntax::tokenstream; @@ -33,7 +33,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) | ast::LitKind::FloatUnsuffixed(ref s) => { - accumulator.push_str(&s); + accumulator.push_str(&s.as_str()); } ast::LitKind::Char(c) => { accumulator.push(c); @@ -57,5 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, } } } - base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&accumulator[..]))) + base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator))) } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index e56c6e2229a75..b26e33eb384dc 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -13,7 +13,6 @@ use syntax::ext::base::*; use syntax::ext::base; use syntax::feature_gate; use syntax::parse::token; -use syntax::parse::token::str_to_ident; use syntax::ptr::P; use syntax_pos::Span; use syntax::tokenstream::TokenTree; @@ -51,7 +50,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, } } } - let res = str_to_ident(&res_str); + let res = ast::Ident::from_str(&res_str); struct Result { ident: ast::Ident, diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index d7bc2a6faeeb9..d14b59d6c70e2 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -15,8 +15,8 @@ use syntax::ast::{self, Expr, Generics, ItemKind, MetaItem, VariantData}; use syntax::attr; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::Span; pub fn expand_deriving_clone(cx: &mut ExtCtxt, @@ -74,7 +74,7 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, _ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item"), } - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index fa0fb2492c551..6ab5987a159ca 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_eq(cx: &mut ExtCtxt, @@ -23,9 +23,9 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, InternedString::new("inline")); - let hidden = cx.meta_list_item_word(span, InternedString::new("hidden")); - let doc = cx.meta_list(span, InternedString::new("doc"), vec![hidden]); + let inline = cx.meta_word(span, Symbol::intern("inline")); + let hidden = cx.meta_list_item_word(span, Symbol::intern("hidden")); + let doc = cx.meta_list(span, Symbol::intern("doc"), vec![hidden]); let attrs = vec![cx.attribute(span, inline), cx.attribute(span, doc)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 6b2e36e63b657..9fc3d997585d7 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_ord(cx: &mut ExtCtxt, @@ -23,7 +23,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index c46d4b34173f6..f2a050ce971ed 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, @@ -64,7 +64,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, macro_rules! md { ($name:expr, $f:ident) => { { - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 597ff306b3dd8..ce4d549d696f4 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -16,8 +16,8 @@ use deriving::generic::ty::*; use syntax::ast::{self, BinOpKind, Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, @@ -27,7 +27,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, push: &mut FnMut(Annotatable)) { macro_rules! md { ($name:expr, $op:expr, $equal:expr) => { { - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, @@ -51,7 +51,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, vec![Box::new(ordering_ty)], true)); - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let partial_cmp_def = MethodDef { diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index e101757ad2322..1076a6a6d63a5 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -17,10 +17,9 @@ use syntax::attr::{mark_used, mark_known}; use syntax::codemap::Span; use syntax::ext::base::*; use syntax::fold::Folder; -use syntax::parse::token::InternedString; use syntax::visit::Visitor; -struct MarkAttrs<'a>(&'a [InternedString]); +struct MarkAttrs<'a>(&'a [ast::Name]); impl<'a> Visitor for MarkAttrs<'a> { fn visit_attribute(&mut self, attr: &Attribute) { @@ -33,13 +32,11 @@ impl<'a> Visitor for MarkAttrs<'a> { pub struct CustomDerive { inner: fn(TokenStream) -> TokenStream, - attrs: Vec, + attrs: Vec, } impl CustomDerive { - pub fn new(inner: fn(TokenStream) -> TokenStream, - attrs: Vec) - -> CustomDerive { + pub fn new(inner: fn(TokenStream) -> TokenStream, attrs: Vec) -> CustomDerive { CustomDerive { inner: inner, attrs: attrs } } } diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index f367fed9cc2ce..a767716466cb1 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -11,11 +11,10 @@ use deriving::generic::*; use deriving::generic::ty::*; -use syntax::ast; +use syntax::ast::{self, Ident}; use syntax::ast::{Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; use syntax_pos::{DUMMY_SP, Span}; @@ -69,9 +68,8 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P P P P P P unreachable!(), }; - let expr = cx.expr_method_call(span, builder_expr, token::str_to_ident("finish"), vec![]); + let expr = cx.expr_method_call(span, builder_expr, Ident::from_str("finish"), vec![]); stmts.push(cx.stmt_expr(expr)); let block = cx.block(span, stmts); diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index dc1f7b4e6201e..e2634c60dcaad 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -18,9 +18,8 @@ use syntax::ast; use syntax::ast::{Expr, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_rustc_decodable(cx: &mut ExtCtxt, @@ -131,7 +130,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), - vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()), + vec![cx.expr_str(trait_span, substr.type_ident.name), cx.expr_usize(trait_span, nfields), cx.lambda1(trait_span, result, blkarg)]) } @@ -143,7 +142,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, let rvariant_arg = cx.ident_of("read_enum_variant_arg"); for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() { - variants.push(cx.expr_str(v_span, ident.name.as_str())); + variants.push(cx.expr_str(v_span, ident.name)); let path = cx.path(trait_span, vec![substr.type_ident, ident]); let decoded = decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| { @@ -175,7 +174,7 @@ fn decodable_substructure(cx: &mut ExtCtxt, cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), - vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()), + vec![cx.expr_str(trait_span, substr.type_ident.name), cx.lambda1(trait_span, result, blkarg)]) } _ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"), @@ -191,7 +190,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, fields: &StaticFields, mut getarg: F) -> P - where F: FnMut(&mut ExtCtxt, Span, InternedString, usize) -> P + where F: FnMut(&mut ExtCtxt, Span, Symbol, usize) -> P { match *fields { Unnamed(ref fields, is_tuple) => { @@ -202,10 +201,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter() .enumerate() .map(|(i, &span)| { - getarg(cx, - span, - token::intern_and_get_ident(&format!("_field{}", i)), - i) + getarg(cx, span, Symbol::intern(&format!("_field{}", i)), i) }) .collect(); @@ -217,7 +213,7 @@ fn decode_static_fields(cx: &mut ExtCtxt, let fields = fields.iter() .enumerate() .map(|(i, &(ident, span))| { - let arg = getarg(cx, span, ident.name.as_str(), i); + let arg = getarg(cx, span, ident.name, i); cx.field_imm(span, ident, arg) }) .collect(); diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs index b15fd2b49a655..69391f48c2288 100644 --- a/src/libsyntax_ext/deriving/default.rs +++ b/src/libsyntax_ext/deriving/default.rs @@ -14,8 +14,8 @@ use deriving::generic::ty::*; use syntax::ast::{Expr, MetaItem}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token::InternedString; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_default(cx: &mut ExtCtxt, @@ -23,7 +23,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let trait_def = TraitDef { span: span, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index ebbddc6e48084..092738ab8a03d 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -95,8 +95,8 @@ use deriving::generic::ty::*; use syntax::ast::{Expr, ExprKind, MetaItem, Mutability}; use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; pub fn expand_deriving_rustc_encodable(cx: &mut ExtCtxt, @@ -192,8 +192,8 @@ fn encodable_substructure(cx: &mut ExtCtxt, let mut stmts = Vec::new(); for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() { let name = match name { - Some(id) => id.name.as_str(), - None => token::intern_and_get_ident(&format!("_field{}", i)), + Some(id) => id.name, + None => Symbol::intern(&format!("_field{}", i)), }; let self_ref = cx.expr_addr_of(span, self_.clone()); let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); @@ -226,7 +226,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), - vec![cx.expr_str(trait_span, substr.type_ident.name.as_str()), + vec![cx.expr_str(trait_span, substr.type_ident.name), cx.expr_usize(trait_span, fields.len()), blk]) } @@ -265,7 +265,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, } let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); - let name = cx.expr_str(trait_span, variant.node.name.name.as_str()); + let name = cx.expr_str(trait_span, variant.node.name.name); let call = cx.expr_method_call(trait_span, blkencoder, cx.ident_of("emit_enum_variant"), @@ -277,8 +277,7 @@ fn encodable_substructure(cx: &mut ExtCtxt, let ret = cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_enum"), - vec![cx.expr_str(trait_span, - substr.type_ident.name.as_str()), + vec![cx.expr_str(trait_span ,substr.type_ident.name), blk]); cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)])) } diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index e6b63be3efc0d..63cd7678321ef 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -198,8 +198,8 @@ use syntax::ext::base::{Annotatable, ExtCtxt}; use syntax::ext::build::AstBuilder; use syntax::codemap::{self, dummy_spanned, respan}; use syntax::util::move_map::MoveMap; -use syntax::parse::token::{InternedString, keywords}; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::{DUMMY_SP, Span}; use errors::Handler; @@ -442,7 +442,7 @@ impl<'a> TraitDef<'a> { attrs.extend(item.attrs .iter() .filter(|a| { - match &a.name()[..] { + match &*a.name().as_str() { "allow" | "warn" | "deny" | "forbid" | "stable" | "unstable" => true, _ => false, } @@ -639,15 +639,15 @@ impl<'a> TraitDef<'a> { let attr = cx.attribute(self.span, cx.meta_word(self.span, - InternedString::new("automatically_derived"))); + Symbol::intern("automatically_derived"))); // Just mark it now since we know that it'll end up used downstream attr::mark_used(&attr); let opt_trait_ref = Some(trait_ref); - let unused_qual = cx.attribute(self.span, - cx.meta_list(self.span, - InternedString::new("allow"), - vec![cx.meta_list_item_word(self.span, - InternedString::new("unused_qualifications"))])); + let unused_qual = { + let word = cx.meta_list_item_word(self.span, Symbol::intern("unused_qualifications")); + cx.attribute(self.span, cx.meta_list(self.span, Symbol::intern("allow"), vec![word])) + }; + let mut a = vec![attr, unused_qual]; a.extend(self.attributes.iter().cloned()); diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index b1d473820f774..535d7de19e341 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -16,8 +16,8 @@ use syntax::codemap; use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension}; use syntax::ext::build::AstBuilder; use syntax::feature_gate::{self, emit_feature_err}; -use syntax::parse::token::{intern, intern_and_get_ident}; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; macro_rules! pathvec { @@ -80,7 +80,7 @@ fn allow_unstable(cx: &mut ExtCtxt, span: Span, attr_name: &str) -> Span { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(intern(attr_name)), + format: codemap::MacroAttribute(Symbol::intern(attr_name)), span: Some(span), allow_internal_unstable: true, }, @@ -105,9 +105,10 @@ pub fn expand_derive(cx: &mut ExtCtxt, } }; + let derive = Symbol::intern("derive"); let mut derive_attrs = Vec::new(); item = item.map_attrs(|attrs| { - let partition = attrs.into_iter().partition(|attr| &attr.name() == "derive"); + let partition = attrs.into_iter().partition(|attr| attr.name() == derive); derive_attrs = partition.0; partition.1 }); @@ -115,7 +116,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, // Expand `#[derive]`s after other attribute macro invocations. if cx.resolver.find_attr_invoc(&mut item.attrs.clone()).is_some() { return vec![Annotatable::Item(item.map_attrs(|mut attrs| { - attrs.push(cx.attribute(span, P(mitem.clone()))); + attrs.push(cx.attribute(span, mitem.clone())); attrs.extend(derive_attrs); attrs }))]; @@ -135,7 +136,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, let mut traits = get_traits(mitem, cx); for derive_attr in derive_attrs { - traits.extend(get_traits(&derive_attr.node.value, cx)); + traits.extend(get_traits(&derive_attr.value, cx)); } // First, weed out malformed #[derive] @@ -158,9 +159,8 @@ pub fn expand_derive(cx: &mut ExtCtxt, let tword = titem.word().unwrap(); let tname = tword.name(); - if is_builtin_trait(&tname) || { - let derive_mode = - ast::Path::from_ident(titem.span, ast::Ident::with_empty_ctxt(intern(&tname))); + if is_builtin_trait(tname) || { + let derive_mode = ast::Path::from_ident(titem.span, ast::Ident::with_empty_ctxt(tname)); cx.resolver.resolve_macro(cx.current_expansion.mark, &derive_mode, false).map(|ext| { if let SyntaxExtension::CustomDerive(_) = *ext { true } else { false } }).unwrap_or(false) @@ -176,7 +176,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, feature_gate::EXPLAIN_CUSTOM_DERIVE); } else { cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE); - let name = intern_and_get_ident(&format!("derive_{}", tname)); + let name = Symbol::intern(&format!("derive_{}", tname)); let mitem = cx.meta_word(titem.span, name); new_attributes.push(cx.attribute(mitem.span, mitem)); } @@ -186,9 +186,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, item = item.map(|mut i| { i.attrs.extend(new_attributes); if traits.len() > 0 { - let list = cx.meta_list(mitem.span, - intern_and_get_ident("derive"), - traits); + let list = cx.meta_list(mitem.span, derive, traits); i.attrs.push(cx.attribute(mitem.span, list)); } i @@ -217,7 +215,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, let macros_11_derive = traits.iter() .cloned() .enumerate() - .filter(|&(_, ref name)| !is_builtin_trait(&name.name().unwrap())) + .filter(|&(_, ref name)| !is_builtin_trait(name.name().unwrap())) .next(); if let Some((i, titem)) = macros_11_derive { if !cx.ecfg.features.unwrap().proc_macro { @@ -226,24 +224,20 @@ pub fn expand_derive(cx: &mut ExtCtxt, emit_feature_err(cx.parse_sess, "proc_macro", titem.span, issue, msg); } - let tname = ast::Ident::with_empty_ctxt(intern(&titem.name().unwrap())); + let tname = ast::Ident::with_empty_ctxt(titem.name().unwrap()); let path = ast::Path::from_ident(titem.span, tname); let ext = cx.resolver.resolve_macro(cx.current_expansion.mark, &path, false).unwrap(); traits.remove(i); if traits.len() > 0 { item = item.map(|mut i| { - let list = cx.meta_list(mitem.span, - intern_and_get_ident("derive"), - traits); + let list = cx.meta_list(mitem.span, derive, traits); i.attrs.push(cx.attribute(mitem.span, list)); i }); } let titem = cx.meta_list_item_word(titem.span, titem.name().unwrap()); - let mitem = cx.meta_list(titem.span, - intern_and_get_ident("derive"), - vec![titem]); + let mitem = cx.meta_list(titem.span, derive, vec![titem]); let item = Annotatable::Item(item); if let SyntaxExtension::CustomDerive(ref ext) = *ext { return ext.expand(cx, mitem.span, &mitem, item); @@ -257,9 +251,10 @@ pub fn expand_derive(cx: &mut ExtCtxt, // RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted) // `#[structural_match]` attribute. - if traits.iter().filter_map(|t| t.name()).any(|t| t == "PartialEq") && - traits.iter().filter_map(|t| t.name()).any(|t| t == "Eq") { - let structural_match = intern_and_get_ident("structural_match"); + let (partial_eq, eq) = (Symbol::intern("PartialEq"), Symbol::intern("Eq")); + if traits.iter().any(|t| t.name() == Some(partial_eq)) && + traits.iter().any(|t| t.name() == Some(eq)) { + let structural_match = Symbol::intern("structural_match"); let span = allow_unstable(cx, span, "derive(PartialEq, Eq)"); let meta = cx.meta_word(span, structural_match); item = item.map(|mut i| { @@ -272,9 +267,10 @@ pub fn expand_derive(cx: &mut ExtCtxt, // the same as the copy implementation. // // Add a marker attribute here picked up during #[derive(Clone)] - if traits.iter().filter_map(|t| t.name()).any(|t| t == "Clone") && - traits.iter().filter_map(|t| t.name()).any(|t| t == "Copy") { - let marker = intern_and_get_ident("rustc_copy_clone_marker"); + let (copy, clone) = (Symbol::intern("Copy"), Symbol::intern("Clone")); + if traits.iter().any(|t| t.name() == Some(clone)) && + traits.iter().any(|t| t.name() == Some(copy)) { + let marker = Symbol::intern("rustc_copy_clone_marker"); let span = allow_unstable(cx, span, "derive(Copy, Clone)"); let meta = cx.meta_word(span, marker); item = item.map(|mut i| { @@ -286,14 +282,14 @@ pub fn expand_derive(cx: &mut ExtCtxt, let mut items = Vec::new(); for titem in traits.iter() { let tname = titem.word().unwrap().name(); - let name = intern_and_get_ident(&format!("derive({})", tname)); + let name = Symbol::intern(&format!("derive({})", tname)); let mitem = cx.meta_word(titem.span, name); let span = Span { expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { call_site: titem.span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(intern(&format!("derive({})", tname))), + format: codemap::MacroAttribute(Symbol::intern(&format!("derive({})", tname))), span: Some(titem.span), allow_internal_unstable: true, }, @@ -302,7 +298,7 @@ pub fn expand_derive(cx: &mut ExtCtxt, }; let my_item = Annotatable::Item(item); - expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| { + expand_builtin(&tname.as_str(), cx, span, &mitem, &my_item, &mut |a| { items.push(a); }); item = my_item.expect_item(); @@ -314,8 +310,8 @@ pub fn expand_derive(cx: &mut ExtCtxt, macro_rules! derive_traits { ($( $name:expr => $func:path, )+) => { - pub fn is_builtin_trait(name: &str) -> bool { - match name { + pub fn is_builtin_trait(name: ast::Name) -> bool { + match &*name.as_str() { $( $name )|+ => true, _ => false, } @@ -412,7 +408,7 @@ fn call_intrinsic(cx: &ExtCtxt, span.expn_id = cx.codemap().record_expansion(codemap::ExpnInfo { call_site: span, callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(intern("derive")), + format: codemap::MacroAttribute(Symbol::intern("derive")), span: Some(span), allow_internal_unstable: true, }, diff --git a/src/libsyntax_ext/env.rs b/src/libsyntax_ext/env.rs index 5c081b98962e3..ecf0a8f377ea3 100644 --- a/src/libsyntax_ext/env.rs +++ b/src/libsyntax_ext/env.rs @@ -17,7 +17,7 @@ use syntax::ast; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_pos::Span; use syntax::tokenstream; @@ -32,7 +32,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, Some(v) => v, }; - let e = match env::var(&var[..]) { + let e = match env::var(&*var.as_str()) { Err(..) => { cx.expr_path(cx.path_all(sp, true, @@ -49,7 +49,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, Ok(s) => { cx.expr_call_global(sp, cx.std_path(&["option", "Option", "Some"]), - vec![cx.expr_str(sp, token::intern_and_get_ident(&s[..]))]) + vec![cx.expr_str(sp, Symbol::intern(&s))]) } }; MacEager::expr(e) @@ -73,7 +73,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, Some((v, _style)) => v, }; let msg = match exprs.next() { - None => token::intern_and_get_ident(&format!("environment variable `{}` not defined", var)), + None => Symbol::intern(&format!("environment variable `{}` not defined", var)), Some(second) => { match expr_to_string(cx, second, "expected string literal") { None => return DummyResult::expr(sp), @@ -87,12 +87,12 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, return DummyResult::expr(sp); } - let e = match env::var(&var[..]) { + let e = match env::var(&*var.as_str()) { Err(_) => { - cx.span_err(sp, &msg); + cx.span_err(sp, &msg.as_str()); cx.expr_usize(sp, 0) } - Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)), + Ok(s) => cx.expr_str(sp, Symbol::intern(&s)), }; MacEager::expr(e) } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 6eba8baf5b824..d2afa08cadaf4 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -17,8 +17,9 @@ use syntax::ast; use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{self, keywords}; +use syntax::parse::token; use syntax::ptr::P; +use syntax::symbol::{Symbol, keywords}; use syntax_pos::{Span, DUMMY_SP}; use syntax::tokenstream; @@ -369,7 +370,7 @@ impl<'a, 'b> Context<'a, 'b> { /// Translate the accumulated string literals to a literal expression fn trans_literal_string(&mut self) -> P { let sp = self.fmtsp; - let s = token::intern_and_get_ident(&self.literal); + let s = Symbol::intern(&self.literal); self.literal.clear(); self.ecx.expr_str(sp, s) } @@ -727,7 +728,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, fmtsp: fmt.span, }; - let mut parser = parse::Parser::new(&fmt.node.0); + let fmt_str = &*fmt.node.0.as_str(); + let mut parser = parse::Parser::new(fmt_str); let mut pieces = vec![]; loop { @@ -808,7 +810,6 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, // Decide if we want to look for foreign formatting directives. if args_used < args_unused { use super::format_foreign as foreign; - let fmt_str = &fmt.node.0[..]; // The set of foreign substitutions we've explained. This prevents spamming the user // with `%d should be written as {}` over and over again. diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 1ebac19b4f029..e83fd55cd714b 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -53,7 +53,7 @@ use std::rc::Rc; use syntax::ast; use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension}; use syntax::ext::tt::macro_rules::MacroRulesExpander; -use syntax::parse::token::intern; +use syntax::symbol::Symbol; pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, user_exts: Vec, @@ -62,11 +62,11 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext)); }; - register(intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); + register(Symbol::intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); macro_rules! register { ($( $name:ident: $f:expr, )*) => { $( - register(intern(stringify!($name)), + register(Symbol::intern(stringify!($name)), NormalTT(Box::new($f as MacroExpanderFn), None, false)); )* } } @@ -112,9 +112,10 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, } // format_args uses `unstable` things internally. - register(intern("format_args"), NormalTT(Box::new(format::expand_format_args), None, true)); + register(Symbol::intern("format_args"), + NormalTT(Box::new(format::expand_format_args), None, true)); - register(intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); + register(Symbol::intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); for (name, ext) in user_exts { register(name, ext); diff --git a/src/libsyntax_ext/proc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs index 36fd6408b4f31..bbdbda701ae8e 100644 --- a/src/libsyntax_ext/proc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -17,20 +17,20 @@ use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::ext::expand::ExpansionConfig; use syntax::parse::ParseSess; -use syntax::parse::token::{self, InternedString}; use syntax::feature_gate::Features; use syntax::fold::Folder; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::{Span, DUMMY_SP}; use syntax::visit::{self, Visitor}; use deriving; struct CustomDerive { - trait_name: InternedString, + trait_name: ast::Name, function_name: Ident, span: Span, - attrs: Vec, + attrs: Vec, } struct CollectCustomDerives<'a> { @@ -183,7 +183,7 @@ impl<'a> Visitor for CollectCustomDerives<'a> { self.handler.span_err(trait_attr.span(), "must only be one word"); } - if deriving::is_builtin_trait(&trait_name) { + if deriving::is_builtin_trait(trait_name) { self.handler.span_err(trait_attr.span(), "cannot override a built-in #[derive] mode"); } @@ -271,29 +271,29 @@ fn mk_registrar(cx: &mut ExtCtxt, let eid = cx.codemap().record_expansion(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(token::intern("proc_macro")), + format: MacroAttribute(Symbol::intern("proc_macro")), span: None, allow_internal_unstable: true, } }); let span = Span { expn_id: eid, ..DUMMY_SP }; - let proc_macro = token::str_to_ident("proc_macro"); + let proc_macro = Ident::from_str("proc_macro"); let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None)); - let __internal = token::str_to_ident("__internal"); - let registry = token::str_to_ident("Registry"); - let registrar = token::str_to_ident("registrar"); - let register_custom_derive = token::str_to_ident("register_custom_derive"); + let __internal = Ident::from_str("__internal"); + let registry = Ident::from_str("Registry"); + let registrar = Ident::from_str("registrar"); + let register_custom_derive = Ident::from_str("register_custom_derive"); let stmts = custom_derives.iter().map(|cd| { let path = cx.path_global(cd.span, vec![cd.function_name]); - let trait_name = cx.expr_str(cd.span, cd.trait_name.clone()); + let trait_name = cx.expr_str(cd.span, cd.trait_name); let attrs = cx.expr_vec_slice( span, - cd.attrs.iter().map(|s| cx.expr_str(cd.span, s.clone())).collect::>() + cd.attrs.iter().map(|&s| cx.expr_str(cd.span, s)).collect::>() ); (path, trait_name, attrs) }).map(|(path, trait_name, attrs)| { @@ -316,15 +316,14 @@ fn mk_registrar(cx: &mut ExtCtxt, cx.ty(span, ast::TyKind::Tup(Vec::new())), cx.block(span, stmts)); - let derive_registrar = token::intern_and_get_ident("rustc_derive_registrar"); - let derive_registrar = cx.meta_word(span, derive_registrar); + let derive_registrar = cx.meta_word(span, Symbol::intern("rustc_derive_registrar")); let derive_registrar = cx.attribute(span, derive_registrar); let func = func.map(|mut i| { i.attrs.push(derive_registrar); i.vis = ast::Visibility::Public; i }); - let ident = ast::Ident::with_empty_ctxt(token::gensym("registrar")); + let ident = ast::Ident::with_empty_ctxt(Symbol::gensym("registrar")); let module = cx.item_mod(span, span, ident, Vec::new(), vec![krate, func]).map(|mut i| { i.vis = ast::Visibility::Public; i diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 9578af6810078..48be8e0c53c2e 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -11,7 +11,7 @@ use syntax::ext::base::ExtCtxt; use syntax::ext::base; use syntax::feature_gate; -use syntax::parse::token::keywords; +use syntax::symbol::keywords; use syntax_pos::Span; use syntax::tokenstream::TokenTree; diff --git a/src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs b/src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs index 8ea131da338cb..8647797270f9a 100644 --- a/src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs +++ b/src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs @@ -36,7 +36,7 @@ impl LintPass for Pass { impl EarlyLintPass for Pass { fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { - if it.ident.name.as_str() == "lintme" { + if it.ident.name == "lintme" { cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); } } diff --git a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs index 409f9dbf03c54..dc88bfc40595f 100644 --- a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs @@ -19,8 +19,9 @@ extern crate rustc_plugin; use syntax::ast::{self, Item, MetaItem, ItemKind}; use syntax::ext::base::*; -use syntax::parse::{self, token}; +use syntax::parse; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax::tokenstream::TokenTree; use syntax_pos::Span; use rustc_plugin::Registry; @@ -34,11 +35,11 @@ pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("make_a_1", expand_make_a_1); reg.register_macro("identity", expand_identity); reg.register_syntax_extension( - token::intern("into_multi_foo"), + Symbol::intern("into_multi_foo"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. MultiModifier(Box::new(expand_into_foo_multi))); reg.register_syntax_extension( - token::intern("duplicate"), + Symbol::intern("duplicate"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. MultiDecorator(Box::new(expand_duplicate))); } @@ -102,9 +103,9 @@ fn expand_duplicate(cx: &mut ExtCtxt, push: &mut FnMut(Annotatable)) { let copy_name = match mi.node { - ast::MetaItemKind::List(_, ref xs) => { + ast::MetaItemKind::List(ref xs) => { if let Some(word) = xs[0].word() { - token::str_to_ident(&word.name()) + ast::Ident::with_empty_ctxt(word.name()) } else { cx.span_err(mi.span, "Expected word"); return; diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index 4a7033d44b878..8acab3369e48f 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -16,8 +16,8 @@ extern crate syntax; extern crate syntax_pos; use syntax::ast; -use syntax::parse; use syntax::print::pprust; +use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; fn main() { @@ -30,7 +30,7 @@ fn main() { cx.bt_push(syntax::codemap::ExpnInfo { call_site: DUMMY_SP, callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(parse::token::intern("")), + format: syntax::codemap::MacroBang(Symbol::intern("")), allow_internal_unstable: false, span: None, } diff --git a/src/test/parse-fail/attr-bad-meta.rs b/src/test/parse-fail/attr-bad-meta.rs index 7def91da5eca4..092adbf29e340 100644 --- a/src/test/parse-fail/attr-bad-meta.rs +++ b/src/test/parse-fail/attr-bad-meta.rs @@ -10,7 +10,7 @@ // compile-flags: -Z parse-only -// error-pattern:expected `]` +// error-pattern:expected one of `=` or `]` // asterisk is bogus #[attr*] diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index d2a16ac750704..d692bb519c149 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -19,8 +19,8 @@ extern crate syntax_pos; use syntax::ast; use syntax::codemap; -use syntax::parse; use syntax::print::pprust; +use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; fn main() { @@ -33,7 +33,7 @@ fn main() { cx.bt_push(syntax::codemap::ExpnInfo { call_site: DUMMY_SP, callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(parse::token::intern("")), + format: syntax::codemap::MacroBang(Symbol::intern("")), allow_internal_unstable: false, span: None, } diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index ed127b017b6a9..0336fe277c51f 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -25,6 +25,7 @@ use rustc_driver::driver::{compile_input, CompileController, anon_src}; use rustc_metadata::cstore::CStore; use rustc_errors::registry::Registry; +use std::collections::HashSet; use std::path::PathBuf; use std::rc::Rc; @@ -65,7 +66,7 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc) { fn compile(code: String, output: PathBuf, sysroot: PathBuf) { let (sess, cstore) = basic_sess(sysroot); - let cfg = build_configuration(&sess, vec![]); + let cfg = build_configuration(&sess, HashSet::new()); let control = CompileController::basic(); let input = Input::Str { name: anon_src(), input: code }; compile_input(&sess, &cstore, &input, &None, &Some(output), None, &control); diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs index 48919fe876a22..664bb9da89a57 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs @@ -20,10 +20,10 @@ extern crate syntax; use proc_macro_tokens::build::ident_eq; +use syntax::ast::Ident; use syntax::ext::base::{ExtCtxt, MacResult}; use syntax::ext::proc_macro_shim::build_block_emitter; use syntax::tokenstream::{TokenTree, TokenStream}; -use syntax::parse::token::str_to_ident; use syntax::codemap::Span; use rustc_plugin::Registry; @@ -57,7 +57,7 @@ fn cond_rec(input: TokenStream) -> TokenStream { let test: TokenStream = clause.slice(0..1); let rhs: TokenStream = clause.slice_from(1..); - if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { qquote!({unquote(rhs)}) } else { qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 0ea4cec75cdda..31a5f5968bab6 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -26,7 +26,7 @@ use syntax::ast::Ident; use syntax::codemap::{DUMMY_SP, Span}; use syntax::ext::proc_macro_shim::build_block_emitter; use syntax::ext::base::{ExtCtxt, MacResult}; -use syntax::parse::token::{self, Token, DelimToken, keywords, str_to_ident}; +use syntax::parse::token::{self, Token, DelimToken}; use syntax::tokenstream::{TokenTree, TokenStream}; #[plugin_registrar] @@ -58,7 +58,7 @@ fn cond_rec(input: TokenStream) -> TokenStream { let test: TokenStream = clause.slice(0..1); let rhs: TokenStream = clause.slice_from(1..); - if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { qquote!({unquote(rhs)}) } else { qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs index 169c96b438529..6a2d159a4bdd9 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs @@ -52,7 +52,7 @@ fn cond_rec(input: TokenStream) -> TokenStream { let test: TokenStream = clause.slice(0..1); let rhs: TokenStream = clause.slice_from(1..); - if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { + if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() { qquote!({unquote(rhs)}) } else { qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs index e750d1fb1e3e6..da82545ca7210 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs @@ -25,12 +25,12 @@ use syntax::ast::*; use syntax::codemap::Span; use syntax::ext::base::*; use syntax::ext::build::AstBuilder; -use syntax::parse::token::{intern, InternedString}; +use syntax::symbol::Symbol; use syntax::ptr::P; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(intern("derive_CustomPartialEq"), + reg.register_syntax_extension(Symbol::intern("derive_CustomPartialEq"), MultiDecorator(Box::new(expand_deriving_partial_eq))); } @@ -52,7 +52,7 @@ fn expand_deriving_partial_eq(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, it substr) } - let inline = cx.meta_word(span, InternedString::new("inline")); + let inline = cx.meta_word(span, Symbol::intern("inline")); let attrs = vec![cx.attribute(span, inline)]; let methods = vec![MethodDef { name: "eq", diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs index 6b688b006bd4a..07f7d6bad7bf2 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs @@ -23,7 +23,7 @@ extern crate rustc_plugin; use syntax::ast; use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure}; use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self}; use syntax_pos::Span; @@ -32,7 +32,7 @@ use rustc_plugin::Registry; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_syntax_extension( - token::intern("derive_TotalSum"), + Symbol::intern("derive_TotalSum"), MultiDecorator(box expand)); } @@ -66,7 +66,7 @@ fn expand(cx: &mut ExtCtxt, |cx, span, subexpr, field, _| { cx.expr_binary(span, ast::BinOpKind::Add, subexpr, cx.expr_method_call(span, field, - token::str_to_ident("total_sum"), vec![])) + ast::Ident::from_str("total_sum"), vec![])) }, zero, box |cx, span, _, _| { cx.span_bug(span, "wtf??"); }, diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs index 6b58fee157584..50b16a0e26fb4 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs @@ -23,7 +23,7 @@ extern crate rustc_plugin; use syntax::ast; use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; -use syntax::parse::token; +use syntax::symbol::Symbol; use syntax::ptr::P; use syntax_ext::deriving::generic::{TraitDef, MethodDef, combine_substructure}; use syntax_ext::deriving::generic::{Substructure, Struct, EnumMatching}; @@ -34,7 +34,7 @@ use rustc_plugin::Registry; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { reg.register_syntax_extension( - token::intern("derive_TotalSum"), + Symbol::intern("derive_TotalSum"), MultiDecorator(box expand)); } diff --git a/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs b/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs index 8ea131da338cb..8647797270f9a 100644 --- a/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs +++ b/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs @@ -36,7 +36,7 @@ impl LintPass for Pass { impl EarlyLintPass for Pass { fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { - if it.ident.name.as_str() == "lintme" { + if it.ident.name == "lintme" { cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); } } diff --git a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs index 7257444ee8703..29cc6b7db9474 100644 --- a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs @@ -23,6 +23,7 @@ use syntax::ext::base::*; use syntax::ext::quote::rt::ToTokens; use syntax::parse::{self, token}; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax::tokenstream::TokenTree; use syntax_pos::Span; use rustc_plugin::Registry; @@ -36,15 +37,15 @@ pub fn plugin_registrar(reg: &mut Registry) { reg.register_macro("make_a_1", expand_make_a_1); reg.register_macro("identity", expand_identity); reg.register_syntax_extension( - token::intern("into_multi_foo"), + Symbol::intern("into_multi_foo"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. MultiModifier(Box::new(expand_into_foo_multi))); reg.register_syntax_extension( - token::intern("duplicate"), + Symbol::intern("duplicate"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. MultiDecorator(Box::new(expand_duplicate))); reg.register_syntax_extension( - token::intern("caller"), + Symbol::intern("caller"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. MultiDecorator(Box::new(expand_caller))); } @@ -108,9 +109,9 @@ fn expand_duplicate(cx: &mut ExtCtxt, it: &Annotatable, push: &mut FnMut(Annotatable)) { let copy_name = match mi.node { - ast::MetaItemKind::List(_, ref xs) => { + ast::MetaItemKind::List(ref xs) => { if let Some(word) = xs[0].word() { - token::str_to_ident(&word.name()) + ast::Ident::with_empty_ctxt(word.name()) } else { cx.span_err(mi.span, "Expected word"); return; @@ -179,7 +180,7 @@ fn expand_caller(cx: &mut ExtCtxt, } let fn_name = match list[0].name() { - Some(name) => token::str_to_ident(&name), + Some(name) => ast::Ident::with_empty_ctxt(name), None => cx.span_fatal(list[0].span(), "First parameter must be an ident.") }; diff --git a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index f21c914a76c9c..ba2af77cdb297 100644 --- a/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -22,9 +22,9 @@ use std::borrow::ToOwned; use syntax::ast; use syntax::ext::build::AstBuilder; use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager, NormalTT}; -use syntax::parse::token; use syntax::print::pprust; use syntax::ptr::P; +use syntax::symbol::Symbol; use syntax_pos::Span; use syntax::tokenstream; use rustc_plugin::Registry; @@ -40,15 +40,14 @@ impl TTMacroExpander for Expander { _: &[tokenstream::TokenTree]) -> Box { let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i)) .collect::>().join(", "); - let interned = token::intern_and_get_ident(&args[..]); - MacEager::expr(ecx.expr_str(sp, interned)) + MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args))) } } #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { let args = reg.args().to_owned(); - reg.register_syntax_extension(token::intern("plugin_args"), + reg.register_syntax_extension(Symbol::intern("plugin_args"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. NormalTT(Box::new(Expander { args: args, }), None, false)); } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index 9fce19f46f65c..f97fb04aadf6a 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -18,18 +18,19 @@ use proc_macro_tokens::prelude::*; use rustc_plugin::Registry; use syntax::ext::base::SyntaxExtension; use syntax::ext::proc_macro_shim::prelude::*; +use syntax::symbol::Symbol; #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(token::intern("attr_tru"), + reg.register_syntax_extension(Symbol::intern("attr_tru"), SyntaxExtension::AttrProcMacro(Box::new(attr_tru))); - reg.register_syntax_extension(token::intern("attr_identity"), + reg.register_syntax_extension(Symbol::intern("attr_identity"), SyntaxExtension::AttrProcMacro(Box::new(attr_identity))); - reg.register_syntax_extension(token::intern("tru"), + reg.register_syntax_extension(Symbol::intern("tru"), SyntaxExtension::ProcMacro(Box::new(tru))); - reg.register_syntax_extension(token::intern("ret_tru"), + reg.register_syntax_extension(Symbol::intern("ret_tru"), SyntaxExtension::ProcMacro(Box::new(ret_tru))); - reg.register_syntax_extension(token::intern("identity"), + reg.register_syntax_extension(Symbol::intern("identity"), SyntaxExtension::ProcMacro(Box::new(identity))); } diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 6ac0d5ad1a3bc..2b3857048f367 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -18,8 +18,8 @@ extern crate syntax_pos; extern crate rustc; extern crate rustc_plugin; -use syntax::parse::token::{str_to_ident, NtExpr, NtPat}; -use syntax::ast::{Pat}; +use syntax::parse::token::{NtExpr, NtPat}; +use syntax::ast::{Ident, Pat}; use syntax::tokenstream::{TokenTree}; use syntax::ext::base::{ExtCtxt, MacResult, MacEager}; use syntax::ext::build::AstBuilder; @@ -44,12 +44,12 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree]) } }; - let matched_nt = match *map[&str_to_ident("matched")] { + let matched_nt = match *map[&Ident::from_str("matched")] { MatchedNonterminal(ref nt) => nt.clone(), _ => unreachable!(), }; - let mac_expr = match (&*matched_nt, &*map[&str_to_ident("pat")]) { + let mac_expr = match (&*matched_nt, &*map[&Ident::from_str("pat")]) { (&NtExpr(ref matched_expr), &MatchedSeq(ref pats, seq_sp)) => { let pats: Vec> = pats.iter().map(|pat_nt| { match **pat_nt { diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs index 914da3f746773..948b20c14771f 100644 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -18,9 +18,6 @@ extern crate proc_macro_tokens; use proc_macro_tokens::prelude::*; extern crate syntax; -use syntax::ast::Ident; -use syntax::codemap::DUMMY_SP; -use syntax::parse::token::{self, Token, keywords, str_to_ident}; fn main() { let lex_true = lex("true"); diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 7c0c24163fe61..b4ed57192ccf6 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -16,7 +16,7 @@ extern crate syntax; extern crate syntax_pos; use syntax::print::pprust::*; -use syntax::parse::token::intern; +use syntax::symbol::Symbol; use syntax_pos::DUMMY_SP; fn main() { @@ -29,7 +29,7 @@ fn main() { cx.bt_push(syntax::codemap::ExpnInfo { call_site: DUMMY_SP, callee: syntax::codemap::NameAndSpan { - format: syntax::codemap::MacroBang(intern("")), + format: syntax::codemap::MacroBang(Symbol::intern("")), allow_internal_unstable: false, span: None, } @@ -97,7 +97,7 @@ fn main() { // quote_meta_item! let meta = quote_meta_item!(cx, cfg(foo = "bar")); - check!(meta_item_to_string, meta, *quote_meta_item!(cx, $meta); r#"cfg(foo = "bar")"#); + check!(meta_item_to_string, meta, quote_meta_item!(cx, $meta); r#"cfg(foo = "bar")"#); let attr = quote_attr!(cx, #![$meta]); check!(attribute_to_string, attr; r#"#![cfg(foo = "bar")]"#);