From d4488b7df97e62bfeed8c30b1922ce55ff254594 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Tue, 28 Mar 2017 05:32:43 +0000 Subject: [PATCH 01/12] Simplify `hygiene::Mark` application, and remove variant `Token::SubstNt` in favor of `quoted::TokenTree::MetaVar`. --- src/libproc_macro/lib.rs | 28 +++++----- src/librustc/ich/impls_syntax.rs | 3 +- src/librustc_metadata/cstore_impl.rs | 2 +- src/librustdoc/html/highlight.rs | 2 +- src/libsyntax/ext/base.rs | 14 ----- src/libsyntax/ext/expand.rs | 36 ++++++------- src/libsyntax/ext/quote.rs | 4 +- src/libsyntax/ext/tt/macro_parser.rs | 24 ++++----- src/libsyntax/ext/tt/macro_rules.rs | 11 ++-- src/libsyntax/ext/tt/quoted.rs | 13 +++-- src/libsyntax/ext/tt/transcribe.rs | 60 ++++++++++++--------- src/libsyntax/fold.rs | 1 - src/libsyntax/parse/lexer/mod.rs | 42 ++++++++------- src/libsyntax/parse/mod.rs | 13 +++-- src/libsyntax/parse/parser.rs | 5 +- src/libsyntax/parse/token.rs | 3 -- src/libsyntax/print/pprust.rs | 1 - src/libsyntax_ext/concat_idents.rs | 7 ++- src/libsyntax_ext/deriving/custom.rs | 15 ++---- src/libsyntax_ext/format.rs | 13 +++-- src/libsyntax_ext/proc_macro_impl.rs | 4 +- src/libsyntax_pos/hygiene.rs | 24 ++++----- src/libsyntax_pos/lib.rs | 2 +- src/test/compile-fail/asm-out-assign-imm.rs | 1 - src/test/compile-fail/macro-context.rs | 2 +- src/test/ui/token/macro-incomplete-parse.rs | 2 +- 26 files changed, 160 insertions(+), 172 deletions(-) diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index f3d0521a2af6c..4744baf1b42fe 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -87,6 +87,8 @@ pub mod __internal { use std::rc::Rc; use syntax::ast; + use syntax::ext::base::ExtCtxt; + use syntax::ext::hygiene::Mark; use syntax::ptr::P; use syntax::parse::{self, token, ParseSess}; use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_}; @@ -107,7 +109,7 @@ pub mod __internal { } pub fn token_stream_parse_items(stream: TokenStream) -> Result>, LexError> { - with_parse_sess(move |sess| { + with_sess(move |(sess, _)| { let mut parser = parse::stream_to_parser(sess, stream.inner); let mut items = Vec::new(); @@ -140,13 +142,14 @@ pub mod __internal { // Emulate scoped_thread_local!() here essentially thread_local! { - static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _); + static CURRENT_SESS: Cell<(*const ParseSess, Mark)> = + Cell::new((0 as *const _, Mark::root())); } - pub fn set_parse_sess(sess: &ParseSess, f: F) -> R + pub fn set_sess(cx: &ExtCtxt, f: F) -> R where F: FnOnce() -> R { - struct Reset { prev: *const ParseSess } + struct Reset { prev: (*const ParseSess, Mark) } impl Drop for Reset { fn drop(&mut self) { @@ -156,18 +159,18 @@ pub mod __internal { CURRENT_SESS.with(|p| { let _reset = Reset { prev: p.get() }; - p.set(sess); + p.set((cx.parse_sess, cx.current_expansion.mark)); f() }) } - pub fn with_parse_sess(f: F) -> R - where F: FnOnce(&ParseSess) -> R + pub fn with_sess(f: F) -> R + where F: FnOnce((&ParseSess, Mark)) -> R { let p = CURRENT_SESS.with(|p| p.get()); - assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \ - before set_parse_sess()!"); - f(unsafe { &*p }) + assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \ + before set_parse_sess()!"); + f(unsafe { (&*p.0, p.1) }) } } @@ -181,10 +184,11 @@ impl FromStr for TokenStream { type Err = LexError; fn from_str(src: &str) -> Result { - __internal::with_parse_sess(|sess| { + __internal::with_sess(|(sess, mark)| { let src = src.to_string(); let name = "".to_string(); - let stream = parse::parse_stream_from_source_str(name, src, sess); + let call_site = mark.expn_info().unwrap().call_site; + let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); Ok(__internal::token_stream_wrap(stream)) }) } diff --git a/src/librustc/ich/impls_syntax.rs b/src/librustc/ich/impls_syntax.rs index b9cc3b5fb937f..b827284271ed2 100644 --- a/src/librustc/ich/impls_syntax.rs +++ b/src/librustc/ich/impls_syntax.rs @@ -283,8 +283,7 @@ fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token, } token::Token::Ident(ident) | - token::Token::Lifetime(ident) | - token::Token::SubstNt(ident) => ident.name.hash_stable(hcx, hasher), + token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher), token::Token::Interpolated(ref non_terminal) => { // FIXME(mw): This could be implemented properly. It's just a diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index c49712086d52c..0649553e382e3 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -372,7 +372,7 @@ impl CrateStore for cstore::CStore { let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body); let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; - let body = filemap_to_stream(&sess.parse_sess, filemap); + let body = filemap_to_stream(&sess.parse_sess, filemap, None); // Mark the attrs as used let attrs = data.get_item_attrs(id.index, &self.dep_graph); diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index a40d1e6bdc917..1f8c88d8ecf96 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -319,7 +319,7 @@ impl<'a> Classifier<'a> { token::Lifetime(..) => Class::Lifetime, token::Underscore | token::Eof | token::Interpolated(..) | - token::SubstNt(..) | token::Tilde | token::At => Class::None, + token::Tilde | token::At => Class::None, }; // Anything that didn't return above is the simple case where we the diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 8089fad5f36d8..af5eabf06f87b 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -903,17 +903,3 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, } Some(es) } - -pub struct ChangeSpan { - pub span: Span -} - -impl Folder for ChangeSpan { - fn new_span(&mut self, _sp: Span) -> Span { - self.span - } - - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) - } -} diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index f8a26287bd47b..11efef4549976 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -16,7 +16,7 @@ use config::{is_test_or_bench, StripUnconfigured}; use errors::FatalError; use ext::base::*; use ext::derive::{add_derived_markers, collect_derives}; -use ext::hygiene::Mark; +use ext::hygiene::{Mark, SyntaxContext}; use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, is_builtin_attr}; use fold; @@ -470,7 +470,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> { Ok(()) }; - let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark)); let opt_expanded = match *ext { SyntaxExtension::DeclMacro(ref expand, def_span) => { if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s), @@ -478,7 +477,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.span_err(path.span, &msg); return kind.dummy(span); } - kind.make_from(expand.expand(self.cx, span, marked_tts)) + kind.make_from(expand.expand(self.cx, span, mac.node.stream())) } NormalTT(ref expandfun, def_info, allow_internal_unstable) => { @@ -487,7 +486,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { self.cx.span_err(path.span, &msg); return kind.dummy(span); } - kind.make_from(expandfun.expand(self.cx, span, marked_tts)) + kind.make_from(expandfun.expand(self.cx, span, mac.node.stream())) } IdentTT(ref expander, tt_span, allow_internal_unstable) => { @@ -506,7 +505,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }); - let input: Vec<_> = marked_tts.into_trees().collect(); + let input: Vec<_> = mac.node.stream().into_trees().collect(); kind.make_from(expander.expand(self.cx, span, ident, input)) } @@ -541,21 +540,17 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }, }); - let tok_result = expandfun.expand(self.cx, span, marked_tts); + let tok_result = expandfun.expand(self.cx, span, mac.node.stream()); Some(self.parse_expansion(tok_result, kind, path, span)) } }; - let expanded = if let Some(expanded) = opt_expanded { - expanded - } else { + unwrap_or!(opt_expanded, { let msg = format!("non-{kind} macro in {kind} position: {name}", name = path.segments[0].identifier.name, kind = kind.name()); self.cx.span_err(path.span, &msg); - return kind.dummy(span); - }; - - expanded.fold_with(&mut Marker(mark)) + kind.dummy(span) + }) } /// Expand a derive invocation. Returns the result of expansion. @@ -621,8 +616,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }; parser.ensure_complete_parse(path, kind.name(), span); - // FIXME better span info - expansion.fold_with(&mut ChangeSpan { span: span }) + expansion } } @@ -673,7 +667,9 @@ impl<'a> Parser<'a> { if self.token != token::Eof { let msg = format!("macro expansion ignores token `{}` and any following", self.this_token_to_string()); - let mut err = self.diagnostic().struct_span_err(self.span, &msg); + let mut def_site_span = self.span; + def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice. + let mut err = self.diagnostic().struct_span_err(def_site_span, &msg); let msg = format!("caused by the macro expansion here; the usage \ of `{}!` is likely invalid in {} context", macro_path, kind_name); @@ -787,12 +783,12 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream { Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), }; - string_to_stream(text, parse_sess) + string_to_stream(text, parse_sess, item.span()) } -fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { +fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream { let filename = String::from(""); - filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text)) + filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span)) } impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { @@ -1070,7 +1066,7 @@ impl<'feat> ExpansionConfig<'feat> { } // A Marker adds the given mark to the syntax context. -struct Marker(Mark); +pub struct Marker(pub Mark); impl Folder for Marker { fn fold_ident(&mut self, mut ident: Ident) -> Ident { diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index f8fac847a053e..314a97496f8cc 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -364,7 +364,7 @@ pub mod rt { fn parse_tts(&self, s: String) -> Vec { let source_name = "".to_owned(); - parse::parse_stream_from_source_str(source_name, s, self.parse_sess()) + parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None) .into_trees().collect() } } @@ -700,7 +700,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { token::Underscore => "Underscore", token::Eof => "Eof", - token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => { + token::Whitespace | token::Comment | token::Shebang(_) => { panic!("unhandled token in quote!"); } }; diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 61d8fc2941afb..e877f1fedd409 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -158,15 +158,10 @@ pub type NamedParseResult = ParseResult>>; pub fn count_names(ms: &[TokenTree]) -> usize { ms.iter().fold(0, |count, elt| { count + match *elt { - TokenTree::Sequence(_, ref seq) => { - seq.num_captures - } - TokenTree::Delimited(_, ref delim) => { - count_names(&delim.tts) - } - TokenTree::MetaVarDecl(..) => { - 1 - } + TokenTree::Sequence(_, ref seq) => seq.num_captures, + TokenTree::Delimited(_, ref delim) => count_names(&delim.tts), + TokenTree::MetaVar(..) => 0, + TokenTree::MetaVarDecl(..) => 1, TokenTree::Token(..) => 0, } }) @@ -244,7 +239,7 @@ fn nameize>(sess: &ParseSess, ms: &[TokenTree], mut } } } - TokenTree::Token(..) => (), + TokenTree::MetaVar(..) | TokenTree::Token(..) => (), } Ok(()) @@ -409,12 +404,11 @@ fn inner_parse_loop(sess: &ParseSess, ei.idx = 0; cur_eis.push(ei); } - TokenTree::Token(_, ref t) => { - if token_name_eq(t, token) { - ei.idx += 1; - next_eis.push(ei); - } + TokenTree::Token(_, ref t) if token_name_eq(t, token) => { + ei.idx += 1; + next_eis.push(ei); } + TokenTree::Token(..) | TokenTree::MetaVar(..) => {} } } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 9c728c9f2ebf0..b732f47ce6a93 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -120,7 +120,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt, _ => cx.span_bug(sp, "malformed macro rhs"), }; // rhs has holes ( `$id` and `$(...)` that need filled) - let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs); + let tts = transcribe(cx, Some(named_matches), rhs); if cx.trace_macros() { trace_macros_note(cx, sp, format!("to `{}`", tts)); @@ -292,7 +292,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool { use self::quoted::TokenTree; for tt in tts { match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (), + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (), TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { return false; }, @@ -372,7 +372,7 @@ impl FirstSets { let mut first = TokenSet::empty(); for tt in tts.iter().rev() { match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.replace_with(tt.clone()); } TokenTree::Delimited(span, ref delimited) => { @@ -432,7 +432,7 @@ impl FirstSets { for tt in tts.iter() { assert!(first.maybe_empty); match *tt { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { first.add_one(tt.clone()); return first; } @@ -602,7 +602,7 @@ fn check_matcher_core(sess: &ParseSess, // First, update `last` so that it corresponds to the set // of NT tokens that might end the sequence `... token`. match *token { - TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => { + TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => { let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) { let msg = format!("invalid fragment specifier `{}`", bad_frag); @@ -872,6 +872,7 @@ fn is_legal_fragment_specifier(sess: &ParseSess, fn quoted_tt_to_string(tt: "ed::TokenTree) -> String { match *tt { quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok), + quoted::TokenTree::MetaVar(_, name) => format!("${}", name), quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind), _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \ in follow set checker"), diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index fa65e9501c2bb..18056f6028745 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -78,9 +78,11 @@ pub enum KleeneOp { pub enum TokenTree { Token(Span, token::Token), Delimited(Span, Rc), - /// A kleene-style repetition sequence with a span + /// A kleene-style repetition sequence Sequence(Span, Rc), - /// Matches a nonterminal. This is only used in the left hand side of MBE macros. + /// E.g. `$var` + MetaVar(Span, ast::Ident), + /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */), } @@ -130,6 +132,7 @@ impl TokenTree { pub fn span(&self) -> Span { match *self { TokenTree::Token(sp, _) | + TokenTree::MetaVar(sp, _) | TokenTree::MetaVarDecl(sp, _, _) | TokenTree::Delimited(sp, _) | TokenTree::Sequence(sp, _) => sp, @@ -144,7 +147,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars while let Some(tree) = trees.next() { let tree = parse_tree(tree, &mut trees, expect_matchers, sess); match tree { - TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { + TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() { @@ -199,13 +202,13 @@ fn parse_tree(tree: tokenstream::TokenTree, let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident }; TokenTree::Token(span, token::Ident(ident)) } else { - TokenTree::Token(span, token::SubstNt(ident)) + TokenTree::MetaVar(span, ident) } } Some(tokenstream::TokenTree::Token(span, tok)) => { let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok)); sess.span_diagnostic.span_err(span, &msg); - TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident())) + TokenTree::MetaVar(span, keywords::Invalid.ident()) } None => TokenTree::Token(span, token::Dollar), }, diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 78e755e73fa30..9438e2fb0e5bf 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -9,10 +9,12 @@ // except according to those terms. use ast::Ident; -use errors::Handler; +use ext::base::ExtCtxt; +use ext::expand::Marker; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::quoted; -use parse::token::{self, SubstNt, Token, NtTT}; +use fold::noop_fold_tt; +use parse::token::{self, Token, NtTT}; use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{TokenStream, TokenTree, Delimited}; use util::small_vector::SmallVector; @@ -61,9 +63,9 @@ impl Iterator for Frame { } /// This can do Macro-By-Example transcription. On the other hand, if -/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can +/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can /// (and should) be None. -pub fn transcribe(sp_diag: &Handler, +pub fn transcribe(cx: &ExtCtxt, interp: Option>>, src: Vec) -> TokenStream { @@ -120,22 +122,20 @@ pub fn transcribe(sp_diag: &Handler, &interpolations, &repeats) { LockstepIterSize::Unconstrained => { - panic!(sp_diag.span_fatal( - sp, /* blame macro writer */ + cx.span_fatal(sp, /* blame macro writer */ "attempted to repeat an expression \ containing no syntax \ - variables matched as repeating at this depth")); + variables matched as repeating at this depth"); } LockstepIterSize::Contradiction(ref msg) => { // FIXME #2887 blame macro invoker instead - panic!(sp_diag.span_fatal(sp, &msg[..])); + cx.span_fatal(sp, &msg[..]); } LockstepIterSize::Constraint(len, _) => { if len == 0 { if seq.op == quoted::KleeneOp::OneOrMore { // FIXME #2887 blame invoker - panic!(sp_diag.span_fatal(sp, - "this must repeat at least once")); + cx.span_fatal(sp, "this must repeat at least once"); } } else { repeats.push((0, len)); @@ -149,29 +149,37 @@ pub fn transcribe(sp_diag: &Handler, } } // FIXME #2887: think about span stuff here - quoted::TokenTree::Token(sp, SubstNt(ident)) => { - match lookup_cur_matched(ident, &interpolations, &repeats) { - None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), - Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { - match **nt { - NtTT(ref tt) => result.push(tt.clone().into()), - _ => { - let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); - result.push(token.into()); - } + quoted::TokenTree::MetaVar(mut sp, ident) => { + if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) { + if let MatchedNonterminal(ref nt) = *cur_matched { + if let NtTT(ref tt) = **nt { + result.push(tt.clone().into()); + } else { + sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); + result.push(token.into()); } } else { - panic!(sp_diag.span_fatal( - sp, /* blame the macro writer */ - &format!("variable '{}' is still repeating at this depth", ident))); + cx.span_fatal(sp, /* blame the macro writer */ + &format!("variable '{}' is still repeating at this depth", ident)); } + } else { + let ident = + Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident }; + sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); + result.push(TokenTree::Token(sp, token::Dollar).into()); + result.push(TokenTree::Token(sp, token::Ident(ident)).into()); } } - quoted::TokenTree::Delimited(span, delimited) => { + quoted::TokenTree::Delimited(mut span, delimited) => { + span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark); stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span }); result_stack.push(mem::replace(&mut result, Vec::new())); } - quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()), + quoted::TokenTree::Token(sp, tok) => { + let mut marker = Marker(cx.current_expansion.mark); + result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into()) + } quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"), } } @@ -240,7 +248,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree, size + lockstep_iter_size(tt, interpolations, repeats) }) }, - TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) => + TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => match lookup_cur_matched(name, interpolations, repeats) { Some(matched) => match *matched { MatchedNonterminal(_) => LockstepIterSize::Unconstrained, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 4c6cf49a8db43..2032aecacbb91 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -588,7 +588,6 @@ pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token }; token::Interpolated(Rc::new(fld.fold_interpolated(nt))) } - token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), _ => t } } diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index e2656bea48339..afc1e583d69bb 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -66,14 +66,15 @@ pub struct StringReader<'a> { token: token::Token, span: Span, open_braces: Vec<(token::DelimToken, Span)>, -} - -fn mk_sp(lo: BytePos, hi: BytePos) -> Span { - Span { lo: lo, hi: hi, ctxt: NO_EXPANSION } + pub override_span: Option, } impl<'a> StringReader<'a> { - fn next_token(&mut self) -> TokenAndSpan { + fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span { + unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION}) + } + + fn next_token(&mut self) -> TokenAndSpan where Self: Sized { let res = self.try_next_token(); self.unwrap_or_abort(res) } @@ -175,6 +176,7 @@ impl<'a> StringReader<'a> { token: token::Eof, span: syntax_pos::DUMMY_SP, open_braces: Vec::new(), + override_span: None, } } @@ -229,12 +231,12 @@ impl<'a> StringReader<'a> { /// Report a fatal error spanning [`from_pos`, `to_pos`). fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError { - self.fatal_span(mk_sp(from_pos, to_pos), m) + self.fatal_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`). fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) { - self.err_span(mk_sp(from_pos, to_pos), m) + self.err_span(self.mk_sp(from_pos, to_pos), m) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -258,7 +260,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an @@ -282,7 +284,7 @@ impl<'a> StringReader<'a> { for c in c.escape_default() { m.push(c) } - self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..]) + self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..]) } /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the @@ -306,11 +308,11 @@ impl<'a> StringReader<'a> { None => { if self.is_eof() { self.peek_tok = token::Eof; - self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos); + self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = mk_sp(start_bytepos, self.pos); + self.peek_span = self.mk_sp(start_bytepos, self.pos); }; } } @@ -502,7 +504,7 @@ impl<'a> StringReader<'a> { if let Some(c) = self.ch { if c.is_whitespace() { let msg = "called consume_any_line_comment, but there was whitespace"; - self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg); + self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg); } } @@ -545,13 +547,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) } } @@ -584,7 +586,7 @@ impl<'a> StringReader<'a> { } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: mk_sp(start, self.pos), + sp: self.mk_sp(start, self.pos), }); } } @@ -612,7 +614,7 @@ impl<'a> StringReader<'a> { } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -674,7 +676,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: mk_sp(start_bpos, self.pos), + sp: self.mk_sp(start_bpos, self.pos), }) }) } @@ -869,7 +871,7 @@ impl<'a> StringReader<'a> { let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = mk_sp(start, self.pos); + let span = self.mk_sp(start, self.pos); self.sess.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -907,13 +909,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(mk_sp(escaped_pos, pos), + err.span_help(self.mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 3a68a6ba7646c..f917eec2cd0b1 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess new_parser_from_source_str(sess, name, source).parse_stmt() } -pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess) - -> TokenStream { - filemap_to_stream(sess, sess.codemap().new_filemap(name, source)) +pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess, + override_span: Option) + -> TokenStream { + filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span) } // Create a new parser from a source string @@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, /// Given a filemap and config, return a parser pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc, ) -> Parser { let end_pos = filemap.end_pos; - let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap)); + let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION }; @@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a filemap, produce a sequence of token-trees -pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc) -> TokenStream { +pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc, override_span: Option) + -> TokenStream { let mut srdr = lexer::StringReader::new(sess, filemap); + srdr.override_span = override_span; srdr.real_token(); panictry!(srdr.parse_all_token_trees()) } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 851a638e14842..25ab46f6f9e2b 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -2626,7 +2626,10 @@ impl<'a> Parser<'a> { pub fn process_potential_macro_variable(&mut self) { let ident = match self.token { - token::SubstNt(name) => { + token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() && + self.look_ahead(1, |t| t.is_ident()) => { + self.bump(); + let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() }; self.fatal(&format!("unknown macro variable `{}`", name)).emit(); return } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 77db604c56e11..f208b0f56f81e 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -172,9 +172,6 @@ pub enum Token { // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), - // In right-hand-sides of MBE macros: - /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 34cda433d5250..6c6ca556e35ed 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -270,7 +270,6 @@ pub fn token_to_string(tok: &Token) -> String { /* Other */ token::DocComment(s) => s.to_string(), - token::SubstNt(s) => format!("${}", s), token::Eof => "".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index dc4b8eb24cd0a..6f4c112acb6c6 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -15,6 +15,8 @@ use syntax::feature_gate; use syntax::parse::token; use syntax::ptr::P; use syntax_pos::Span; +use syntax_pos::symbol::Symbol; +use syntax_pos::hygiene::SyntaxContext; use syntax::tokenstream::TokenTree; pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, @@ -50,7 +52,10 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, } } } - let res = ast::Ident::from_str(&res_str); + let res = ast::Ident { + name: Symbol::intern(&res_str), + ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark), + }; struct Result { ident: ast::Ident, diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index b01ef65e5fe5e..fa5537b5d8fe3 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -16,7 +16,6 @@ use syntax::ast::{self, ItemKind, Attribute, Mac}; use syntax::attr::{mark_used, mark_known}; use syntax::codemap::Span; use syntax::ext::base::*; -use syntax::fold::Folder; use syntax::visit::Visitor; struct MarkAttrs<'a>(&'a [ast::Name]); @@ -75,7 +74,7 @@ impl MultiItemModifier for ProcMacroDerive { MarkAttrs(&self.attrs).visit_item(&item); let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone())); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { let inner = self.inner; panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input))) }); @@ -97,9 +96,9 @@ impl MultiItemModifier for ProcMacroDerive { } }; - let new_items = __internal::set_parse_sess(&ecx.parse_sess, || { + __internal::set_sess(ecx, || { match __internal::token_stream_parse_items(stream) { - Ok(new_items) => new_items, + Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(), Err(_) => { // FIXME: handle this better let msg = "proc-macro derive produced unparseable tokens"; @@ -107,12 +106,6 @@ impl MultiItemModifier for ProcMacroDerive { panic!(FatalError); } } - }); - - // Reassign spans of all expanded items to the input `item` - // for better errors here. - new_items.into_iter().map(|item| { - Annotatable::Item(ChangeSpan { span: span }.fold_item(item).expect_one("")) - }).collect() + }) } } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index a6768c07fe13b..144d1930df90b 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -20,7 +20,7 @@ use syntax::ext::build::AstBuilder; use syntax::parse::token; use syntax::ptr::P; use syntax::symbol::{Symbol, keywords}; -use syntax_pos::{Span, DUMMY_SP}; +use syntax_pos::Span; use syntax::tokenstream; use std::collections::{HashMap, HashSet}; @@ -558,7 +558,9 @@ impl<'a, 'b> Context<'a, 'b> { // passed to this function. for (i, e) in self.args.into_iter().enumerate() { let name = self.ecx.ident_of(&format!("__arg{}", i)); - pats.push(self.ecx.pat_ident(DUMMY_SP, name)); + let span = + Span { ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark), ..e.span }; + pats.push(self.ecx.pat_ident(span, name)); for ref arg_ty in self.arg_unique_types[i].iter() { locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name)); } @@ -672,10 +674,10 @@ impl<'a, 'b> Context<'a, 'b> { } pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, - sp: Span, + mut sp: Span, tts: &[tokenstream::TokenTree]) -> Box { - + sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark); match parse_args(ecx, sp, tts) { Some((efmt, args, names)) => { MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names)) @@ -696,7 +698,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, // `ArgumentType` does not derive `Clone`. let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect(); - let macsp = ecx.call_site(); + let mut macsp = ecx.call_site(); + macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark); let msg = "format argument must be a string literal."; let fmt = match expr_to_spanned_string(ecx, efmt, msg) { Some(fmt) => fmt, diff --git a/src/libsyntax_ext/proc_macro_impl.rs b/src/libsyntax_ext/proc_macro_impl.rs index f60e5824db962..5fcedbf50c60f 100644 --- a/src/libsyntax_ext/proc_macro_impl.rs +++ b/src/libsyntax_ext/proc_macro_impl.rs @@ -34,7 +34,7 @@ impl base::AttrProcMacro for AttrProcMacro { let annotation = __internal::token_stream_wrap(annotation); let annotated = __internal::token_stream_wrap(annotated); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(annotation, annotated))) }); @@ -69,7 +69,7 @@ impl base::ProcMacro for BangProcMacro { -> TokenStream { let input = __internal::token_stream_wrap(input); - let res = __internal::set_parse_sess(&ecx.parse_sess, || { + let res = __internal::set_sess(ecx, || { panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(input))) }); diff --git a/src/libsyntax_pos/hygiene.rs b/src/libsyntax_pos/hygiene.rs index f2ccc3f051e92..804b91ab09e3c 100644 --- a/src/libsyntax_pos/hygiene.rs +++ b/src/libsyntax_pos/hygiene.rs @@ -144,24 +144,18 @@ impl SyntaxContext { pub fn apply_mark(self, mark: Mark) -> SyntaxContext { HygieneData::with(|data| { let syntax_contexts = &mut data.syntax_contexts; - let ctxt_data = syntax_contexts[self.0 as usize]; - if mark == ctxt_data.outer_mark { - return ctxt_data.prev_ctxt; - } - - let modern = if data.marks[mark.0 as usize].modern { - *data.markings.entry((ctxt_data.modern, mark)).or_insert_with(|| { - let modern = SyntaxContext(syntax_contexts.len() as u32); + let mut modern = syntax_contexts[self.0 as usize].modern; + if data.marks[mark.0 as usize].modern { + modern = *data.markings.entry((modern, mark)).or_insert_with(|| { + let len = syntax_contexts.len() as u32; syntax_contexts.push(SyntaxContextData { outer_mark: mark, - prev_ctxt: ctxt_data.modern, - modern: modern, + prev_ctxt: modern, + modern: SyntaxContext(len), }); - modern - }) - } else { - ctxt_data.modern - }; + SyntaxContext(len) + }); + } *data.markings.entry((self, mark)).or_insert_with(|| { syntax_contexts.push(SyntaxContextData { diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index bb62efd376a0b..a7c247689cce8 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -186,7 +186,7 @@ impl Span { pub fn to(self, end: Span) -> Span { // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480) - if end.ctxt == SyntaxContext::empty() { + if self.ctxt == SyntaxContext::empty() { Span { lo: self.lo, ..end } } else { Span { hi: end.hi, ..self } diff --git a/src/test/compile-fail/asm-out-assign-imm.rs b/src/test/compile-fail/asm-out-assign-imm.rs index 3c4a5dcb7b038..f95e4410381d9 100644 --- a/src/test/compile-fail/asm-out-assign-imm.rs +++ b/src/test/compile-fail/asm-out-assign-imm.rs @@ -28,7 +28,6 @@ pub fn main() { asm!("mov $1, $0" : "=r"(x) : "r"(5)); //~^ ERROR re-assignment of immutable variable `x` //~| NOTE re-assignment of immutable - //~| NOTE in this expansion of asm! } foo(x); } diff --git a/src/test/compile-fail/macro-context.rs b/src/test/compile-fail/macro-context.rs index 80802e19f8401..cc714a6e43141 100644 --- a/src/test/compile-fail/macro-context.rs +++ b/src/test/compile-fail/macro-context.rs @@ -23,5 +23,5 @@ fn main() { m!() => {} //~ NOTE the usage of `m!` is likely invalid in pattern context } - m!(); + m!(); //~ NOTE in this expansion } diff --git a/src/test/ui/token/macro-incomplete-parse.rs b/src/test/ui/token/macro-incomplete-parse.rs index 47374fc3c6085..08749373432f5 100644 --- a/src/test/ui/token/macro-incomplete-parse.rs +++ b/src/test/ui/token/macro-incomplete-parse.rs @@ -32,7 +32,7 @@ macro_rules! ignored_pat { ignored_item!(); //~ NOTE caused by the macro expansion here fn main() { - ignored_expr!(); + ignored_expr!(); //~ NOTE in this expansion match 1 { ignored_pat!() => (), //~ NOTE caused by the macro expansion here _ => (), From 7d41674b175cdb3452e042ef6f37141bc3788f8b Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 23:23:12 +0000 Subject: [PATCH 02/12] Clean up `tokenstream::Cursor` and `proc_macro`. --- src/libproc_macro/lib.rs | 28 ++++++++----------- src/libsyntax/tokenstream.rs | 54 ++++++++++++++++++------------------ 2 files changed, 38 insertions(+), 44 deletions(-) diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 4744baf1b42fe..b9f4fa63e6006 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -48,7 +48,7 @@ use std::str::FromStr; use syntax::errors::DiagnosticBuilder; use syntax::parse; -use syntax::tokenstream::TokenStream as TokenStream_; +use syntax::tokenstream; /// The main type provided by this crate, representing an abstract stream of /// tokens. @@ -60,9 +60,7 @@ use syntax::tokenstream::TokenStream as TokenStream_; /// The API of this type is intentionally bare-bones, but it'll be expanded over /// time! #[stable(feature = "proc_macro_lib", since = "1.15.0")] -pub struct TokenStream { - inner: TokenStream_, -} +pub struct TokenStream(tokenstream::TokenStream); /// Error returned from `TokenStream::from_str`. #[derive(Debug)] @@ -91,26 +89,22 @@ pub mod __internal { use syntax::ext::hygiene::Mark; use syntax::ptr::P; use syntax::parse::{self, token, ParseSess}; - use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_}; + use syntax::tokenstream; use super::{TokenStream, LexError}; pub fn new_token_stream(item: P) -> TokenStream { - TokenStream { - inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item)))) - .into() - } + let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item)))); + TokenStream(tokenstream::TokenTree::Token(span, token).into()) } - pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream { - TokenStream { - inner: inner - } + pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream { + TokenStream(inner) } pub fn token_stream_parse_items(stream: TokenStream) -> Result>, LexError> { with_sess(move |(sess, _)| { - let mut parser = parse::stream_to_parser(sess, stream.inner); + let mut parser = parse::stream_to_parser(sess, stream.0); let mut items = Vec::new(); while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) { @@ -121,8 +115,8 @@ pub mod __internal { }) } - pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ { - stream.inner + pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream { + stream.0 } pub trait Registry { @@ -197,6 +191,6 @@ impl FromStr for TokenStream { #[stable(feature = "proc_macro_lib", since = "1.15.0")] impl fmt::Display for TokenStream { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.inner.fmt(f) + self.0.fmt(f) } } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 963482fc223f1..ab4f697071477 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -199,7 +199,7 @@ impl TokenStream { pub fn concat(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), - 1 => TokenStream::from(streams.pop().unwrap()), + 1 => streams.pop().unwrap(), _ => TokenStream::concat_rc_slice(RcSlice::new(streams)), } } @@ -244,37 +244,22 @@ struct StreamCursor { stack: Vec<(RcSlice, usize)>, } -impl Iterator for Cursor { - type Item = TokenTree; - +impl StreamCursor { fn next(&mut self) -> Option { - let cursor = match self.0 { - CursorKind::Stream(ref mut cursor) => cursor, - CursorKind::Tree(ref tree, ref mut consumed @ false) => { - *consumed = true; - return Some(tree.clone()); - } - _ => return None, - }; - loop { - if cursor.index < cursor.stream.len() { - match cursor.stream[cursor.index].kind.clone() { - TokenStreamKind::Tree(tree) => { - cursor.index += 1; - return Some(tree); - } + if self.index < self.stream.len() { + self.index += 1; + match self.stream[self.index - 1].kind.clone() { + TokenStreamKind::Tree(tree) => return Some(tree), TokenStreamKind::Stream(stream) => { - cursor.stack.push((mem::replace(&mut cursor.stream, stream), - mem::replace(&mut cursor.index, 0) + 1)); - } - TokenStreamKind::Empty => { - cursor.index += 1; + self.stack.push((mem::replace(&mut self.stream, stream), + mem::replace(&mut self.index, 0))); } + TokenStreamKind::Empty => {} } - } else if let Some((stream, index)) = cursor.stack.pop() { - cursor.stream = stream; - cursor.index = index; + } else if let Some((stream, index)) = self.stack.pop() { + self.stream = stream; + self.index = index; } else { return None; } @@ -282,6 +267,21 @@ impl Iterator for Cursor { } } +impl Iterator for Cursor { + type Item = TokenTree; + + fn next(&mut self) -> Option { + let (tree, consumed) = match self.0 { + CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed), + CursorKind::Stream(ref mut cursor) => return cursor.next(), + _ => return None, + }; + + *consumed = true; + Some(tree.clone()) + } +} + impl Cursor { fn new(stream: TokenStream) -> Self { Cursor(match stream.kind { From e42836b2085233323339bacb636ecf9c28e8422e Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 23:41:09 +0000 Subject: [PATCH 03/12] Implement `quote!` and other `proc_macro` API. --- src/Cargo.lock | 9 - .../src/library-features/proc-macro.md | 7 + src/libproc_macro/Cargo.toml | 1 + src/libproc_macro/lib.rs | 474 +++++++++++++++++- src/libproc_macro/quote.rs | 259 ++++++++++ src/libproc_macro_plugin/Cargo.toml | 13 - src/libproc_macro_plugin/lib.rs | 103 ---- src/libproc_macro_plugin/quote.rs | 230 --------- src/librustc/hir/map/definitions.rs | 27 +- src/librustc/middle/stability.rs | 1 + src/librustc_driver/Cargo.toml | 1 - src/librustc_metadata/creader.rs | 48 +- src/librustc_metadata/cstore_impl.rs | 5 + src/librustc_metadata/decoder.rs | 2 +- src/librustc_metadata/encoder.rs | 7 +- src/libsyntax/ast.rs | 8 +- src/libsyntax/ext/base.rs | 5 +- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/feature_gate.rs | 28 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/lexer/tokentrees.rs | 12 +- src/libsyntax/parse/parser.rs | 4 +- src/libsyntax/parse/token.rs | 63 +++ src/libsyntax/tokenstream.rs | 152 +++++- src/libsyntax/util/rc_slice.rs | 10 +- .../auxiliary/cond_plugin.rs | 43 +- .../auxiliary/hello_macro.rs | 23 +- .../auxiliary/proc_macro_def.rs | 50 +- src/test/run-pass-fulldeps/macro-quote-1.rs | 40 -- .../run-pass-fulldeps/macro-quote-cond.rs | 8 +- .../run-pass-fulldeps/macro-quote-test.rs | 8 +- src/test/run-pass-fulldeps/proc_macro.rs | 7 +- src/tools/tidy/src/cargo.rs | 8 - src/tools/tidy/src/features.rs | 2 +- 34 files changed, 1085 insertions(+), 577 deletions(-) create mode 100644 src/doc/unstable-book/src/library-features/proc-macro.md create mode 100644 src/libproc_macro/quote.rs delete mode 100644 src/libproc_macro_plugin/Cargo.toml delete mode 100644 src/libproc_macro_plugin/lib.rs delete mode 100644 src/libproc_macro_plugin/quote.rs delete mode 100644 src/test/run-pass-fulldeps/macro-quote-1.rs diff --git a/src/Cargo.lock b/src/Cargo.lock index dd6e72a56149e..e8f0ed6ed2c18 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -882,14 +882,6 @@ name = "proc_macro" version = "0.0.0" dependencies = [ "syntax 0.0.0", -] - -[[package]] -name = "proc_macro_plugin" -version = "0.0.0" -dependencies = [ - "rustc_plugin 0.0.0", - "syntax 0.0.0", "syntax_pos 0.0.0", ] @@ -1210,7 +1202,6 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc_macro_plugin 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_borrowck 0.0.0", diff --git a/src/doc/unstable-book/src/library-features/proc-macro.md b/src/doc/unstable-book/src/library-features/proc-macro.md new file mode 100644 index 0000000000000..19e7f663c7ac3 --- /dev/null +++ b/src/doc/unstable-book/src/library-features/proc-macro.md @@ -0,0 +1,7 @@ +# `proc_macro` + +The tracking issue for this feature is: [#38356] + +[#38356]: https://github.com/rust-lang/rust/issues/38356 + +------------------------ diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index 7ce65d0fe4dbc..1b5141773a967 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -9,3 +9,4 @@ crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index b9f4fa63e6006..f1abd3339ed53 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -37,18 +37,24 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] +#![feature(i128_type)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(lang_items)] extern crate syntax; +extern crate syntax_pos; -use std::fmt; +use std::{fmt, iter, ops}; use std::str::FromStr; +use syntax::ast; use syntax::errors::DiagnosticBuilder; -use syntax::parse; +use syntax::parse::{self, token}; +use syntax::symbol; use syntax::tokenstream; +use syntax_pos::DUMMY_SP; +use syntax_pos::SyntaxContext; /// The main type provided by this crate, representing an abstract stream of /// tokens. @@ -60,6 +66,7 @@ use syntax::tokenstream; /// The API of this type is intentionally bare-bones, but it'll be expanded over /// time! #[stable(feature = "proc_macro_lib", since = "1.15.0")] +#[derive(Clone)] pub struct TokenStream(tokenstream::TokenStream); /// Error returned from `TokenStream::from_str`. @@ -69,6 +76,443 @@ pub struct LexError { _inner: (), } +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl FromStr for TokenStream { + type Err = LexError; + + fn from_str(src: &str) -> Result { + __internal::with_sess(|(sess, mark)| { + let src = src.to_string(); + let name = "".to_string(); + let call_site = mark.expn_info().unwrap().call_site; + let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); + Ok(__internal::token_stream_wrap(stream)) + }) + } +} + +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input. +/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs +/// constructs the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`. +/// +/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. +/// To quote `$` itself, use `$$`. +#[unstable(feature = "proc_macro", issue = "38356")] +#[macro_export] +macro_rules! quote { () => {} } + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(tree: TokenTree) -> TokenStream { + TokenStream(tree.to_raw()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(kind: TokenKind) -> TokenStream { + TokenTree::from(kind).into() + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl> iter::FromIterator for TokenStream { + fn from_iter>(streams: I) -> Self { + let mut builder = tokenstream::TokenStream::builder(); + for stream in streams { + builder.push(stream.into().0); + } + TokenStream(builder.build()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = TokenIter; + + fn into_iter(self) -> TokenIter { + TokenIter { cursor: self.0.trees(), next: None } + } +} + +impl TokenStream { + /// Returns an empty `TokenStream`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn empty() -> TokenStream { + TokenStream(tokenstream::TokenStream::empty()) + } + + /// Checks if this `TokenStream` is empty. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +/// A region of source code, along with macro expansion information. +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Copy, Clone)] +pub struct Span(syntax_pos::Span); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Default for Span { + fn default() -> Span { + ::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span { + ctxt: SyntaxContext::empty().apply_mark(mark), + ..mark.expn_info().unwrap().call_site + })) + } +} + +impl Span { + /// The span of the invocation of the current procedural macro. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn call_site() -> Span { + ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site)) + } +} + +/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Clone)] +pub struct TokenTree { + /// The `TokenTree`'s span + pub span: Span, + /// Description of the `TokenTree` + pub kind: TokenKind, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenTree { + fn from(kind: TokenKind) -> TokenTree { + TokenTree { span: Span::default(), kind: kind } + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for TokenTree { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenStream::from(self.clone()).fmt(f) + } +} + +/// Description of a `TokenTree` +#[derive(Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum TokenKind { + /// A delimited tokenstream. + Sequence(Delimiter, TokenStream), + /// A unicode identifier. + Word(Symbol), + /// A punctuation character (`+`, `,`, `$`, etc.). + Op(char, OpKind), + /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). + Literal(Literal), +} + +/// Describes how a sequence of token trees is delimited. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum Delimiter { + /// `( ... )` + Parenthesis, + /// `[ ... ]` + Brace, + /// `{ ... }` + Bracket, + /// An implicit delimiter, e.g. `$var`, where $var is `...`. + None, +} + +/// An interned string. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Symbol(symbol::Symbol); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl<'a> From<&'a str> for Symbol { + fn from(string: &'a str) -> Symbol { + Symbol(symbol::Symbol::intern(string)) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl ops::Deref for Symbol { + type Target = str; + + fn deref(&self) -> &str { + unsafe { &*(self.0.as_str().deref() as *const str) } + } +} + +/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum OpKind { + /// e.g. `+` is `Alone` in `+ =`. + Alone, + /// e.g. `+` is `Joint` in `+=`. + Joint, +} + +/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). +#[derive(Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Literal(token::Token); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenTree { kind: TokenKind::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f) + } +} + +macro_rules! int_literals { + ($($int_kind:ident),*) => {$( + /// Integer literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $int_kind(n: $int_kind) -> Literal { + Literal::integer(n as i128, stringify!($int_kind)) + } + )*} +} + +impl Literal { + int_literals!(u8, i8, u16, i16, u32, i32, u64, i64); + fn integer(n: i128, kind: &'static str) -> Literal { + Literal(token::Literal(token::Lit::Integer(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern(kind)))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f32(n: f32) -> Literal { + Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern("f32")))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f64(n: f32) -> Literal { + Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern("f64")))) + } + + /// String literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn string(string: &str) -> Literal { + let mut escaped = String::new(); + for ch in string.chars() { + escaped.extend(ch.escape_unicode()); + } + Literal(token::Literal(token::Lit::Str_(symbol::Symbol::intern(&escaped)), None)) + } + + /// Character literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn character(ch: char) -> Literal { + let mut escaped = String::new(); + escaped.extend(ch.escape_unicode()); + Literal(token::Literal(token::Lit::Char(symbol::Symbol::intern(&escaped)), None)) + } +} + +/// An iterator over `TokenTree`s. +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct TokenIter { + cursor: tokenstream::Cursor, + next: Option, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Iterator for TokenIter { + type Item = TokenTree; + + fn next(&mut self) -> Option { + self.next.take().or_else(|| self.cursor.next_as_stream()) + .map(|next| TokenTree::from_raw(next, &mut self.next)) + } +} + +impl Delimiter { + fn from_raw(delim: token::DelimToken) -> Delimiter { + match delim { + token::Paren => Delimiter::Parenthesis, + token::Brace => Delimiter::Brace, + token::Bracket => Delimiter::Bracket, + token::NoDelim => Delimiter::None, + } + } + + fn to_raw(self) -> token::DelimToken { + match self { + Delimiter::Parenthesis => token::Paren, + Delimiter::Brace => token::Brace, + Delimiter::Bracket => token::Bracket, + Delimiter::None => token::NoDelim, + } + } +} + +impl TokenTree { + fn from_raw(stream: tokenstream::TokenStream, next: &mut Option) + -> TokenTree { + use syntax::parse::token::*; + + let (tree, is_joint) = stream.as_tree(); + let (mut span, token) = match tree { + tokenstream::TokenTree::Token(span, token) => (span, token), + tokenstream::TokenTree::Delimited(span, delimed) => { + let delimiter = Delimiter::from_raw(delimed.delim); + return TokenTree { + span: Span(span), + kind: TokenKind::Sequence(delimiter, TokenStream(delimed.tts.into())), + }; + } + }; + + let op_kind = if is_joint { OpKind::Joint } else { OpKind::Alone }; + macro_rules! op { + ($op:expr) => { TokenKind::Op($op, op_kind) } + } + + macro_rules! joint { + ($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) } + } + + fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span, + next: &mut Option) + -> TokenKind { + let (first_span, rest_span) = (*span, *span); + *span = first_span; + let tree = tokenstream::TokenTree::Token(rest_span, rest); + *next = Some(if is_joint { tree.joint() } else { tree.into() }); + TokenKind::Op(first, OpKind::Joint) + } + + let kind = match token { + Eq => op!('='), + Lt => op!('<'), + Le => joint!('<', Eq), + EqEq => joint!('=', Eq), + Ne => joint!('!', Eq), + Ge => joint!('>', Eq), + Gt => op!('>'), + AndAnd => joint!('&', BinOp(And)), + OrOr => joint!('|', BinOp(Or)), + Not => op!('!'), + Tilde => op!('~'), + BinOp(Plus) => op!('+'), + BinOp(Minus) => op!('-'), + BinOp(Star) => op!('*'), + BinOp(Slash) => op!('/'), + BinOp(Percent) => op!('%'), + BinOp(Caret) => op!('^'), + BinOp(And) => op!('&'), + BinOp(Or) => op!('|'), + BinOp(Shl) => joint!('<', Lt), + BinOp(Shr) => joint!('>', Gt), + BinOpEq(Plus) => joint!('+', Eq), + BinOpEq(Minus) => joint!('-', Eq), + BinOpEq(Star) => joint!('*', Eq), + BinOpEq(Slash) => joint!('/', Eq), + BinOpEq(Percent) => joint!('%', Eq), + BinOpEq(Caret) => joint!('^', Eq), + BinOpEq(And) => joint!('&', Eq), + BinOpEq(Or) => joint!('|', Eq), + BinOpEq(Shl) => joint!('<', Le), + BinOpEq(Shr) => joint!('>', Ge), + At => op!('@'), + Dot => op!('.'), + DotDot => joint!('.', Dot), + DotDotDot => joint!('.', DotDot), + Comma => op!(','), + Semi => op!(';'), + Colon => op!(':'), + ModSep => joint!(':', Colon), + RArrow => joint!('-', Gt), + LArrow => joint!('<', BinOp(Minus)), + FatArrow => joint!('=', Gt), + Pound => op!('#'), + Dollar => op!('$'), + Question => op!('?'), + Underscore => op!('_'), + + Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)), + Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)), + + Interpolated(..) => unimplemented!(), + + OpenDelim(..) | CloseDelim(..) => unreachable!(), + Whitespace | Comment | Shebang(..) | Eof => unreachable!(), + }; + + TokenTree { span: Span(span), kind: kind } + } + + fn to_raw(self) -> tokenstream::TokenStream { + use syntax::parse::token::*; + use syntax::tokenstream::{TokenTree, Delimited}; + + let (op, kind) = match self.kind { + TokenKind::Op(op, kind) => (op, kind), + TokenKind::Sequence(delimiter, tokens) => { + return TokenTree::Delimited(self.span.0, Delimited { + delim: delimiter.to_raw(), + tts: tokens.0.into(), + }).into(); + }, + TokenKind::Word(symbol) => { + let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt }; + let token = + if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) }; + return TokenTree::Token(self.span.0, token).into(); + } + TokenKind::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(), + }; + + let token = match op { + '=' => Eq, + '<' => Lt, + '>' => Gt, + '!' => Not, + '~' => Tilde, + '+' => BinOp(Plus), + '-' => BinOp(Minus), + '*' => BinOp(Star), + '/' => BinOp(Slash), + '%' => BinOp(Percent), + '^' => BinOp(Caret), + '&' => BinOp(And), + '|' => BinOp(Or), + '@' => At, + '.' => Dot, + ',' => Comma, + ';' => Semi, + ':' => Colon, + '#' => Pound, + '$' => Dollar, + '?' => Question, + '_' => Underscore, + _ => panic!("unsupported character {}", op), + }; + + let tree = TokenTree::Token(self.span.0, token); + match kind { + OpKind::Alone => tree.into(), + OpKind::Joint => tree.joint(), + } + } +} + /// Permanently unstable internal implementation details of this crate. This /// should not be used. /// @@ -80,7 +524,11 @@ pub struct LexError { /// all of the contents. #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] +#[path = ""] pub mod __internal { + mod quote; + pub use self::quote::{Quoter, __rt}; + use std::cell::Cell; use std::rc::Rc; @@ -172,25 +620,3 @@ fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError { err.cancel(); LexError { _inner: () } } - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - __internal::with_sess(|(sess, mark)| { - let src = src.to_string(); - let name = "".to_string(); - let call_site = mark.expn_info().unwrap().call_site; - let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); - Ok(__internal::token_stream_wrap(stream)) - }) - } -} - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} diff --git a/src/libproc_macro/quote.rs b/src/libproc_macro/quote.rs new file mode 100644 index 0000000000000..a3ea3925fcd48 --- /dev/null +++ b/src/libproc_macro/quote.rs @@ -0,0 +1,259 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Quasiquoter +//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. + +use syntax::ast::Ident; +use syntax::ext::base::{ExtCtxt, ProcMacro}; +use syntax::parse::token::{self, Token, Lit}; +use syntax::symbol::Symbol; +use syntax::tokenstream::{Delimited, TokenTree, TokenStream}; +use syntax_pos::{DUMMY_SP, Span}; +use syntax_pos::hygiene::SyntaxContext; + +pub struct Quoter; + +pub mod __rt { + pub use syntax::ast::Ident; + pub use syntax::parse::token; + pub use syntax::symbol::Symbol; + pub use syntax::tokenstream::{TokenStream, TokenTree, Delimited}; + pub use super::{ctxt, span}; + + pub fn unquote + Clone>(tokens: &T) -> TokenStream { + T::into(tokens.clone()).0 + } +} + +pub fn ctxt() -> SyntaxContext { + ::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark)) +} + +pub fn span() -> Span { + ::Span::default().0 +} + +trait Quote { + fn quote(&self) -> TokenStream; +} + +macro_rules! quote_tok { + (,) => { Token::Comma }; + (.) => { Token::Dot }; + (:) => { Token::Colon }; + (::) => { Token::ModSep }; + (!) => { Token::Not }; + (<) => { Token::Lt }; + (>) => { Token::Gt }; + (_) => { Token::Underscore }; + (0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) }; + (&) => { Token::BinOp(token::And) }; + ($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) }; +} + +macro_rules! quote_tree { + ((unquote $($t:tt)*)) => { TokenStream::from($($t)*) }; + ((quote $($t:tt)*)) => { ($($t)*).quote() }; + (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; + ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; + ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; + (rt) => { quote!(::__internal::__rt) }; + ($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) }; +} + +fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { + TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into() +} + +macro_rules! quote { + () => { TokenStream::empty() }; + ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; +} + +impl ProcMacro for Quoter { + fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream { + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + cx.current_expansion.mark.set_expn_info(info); + ::__internal::set_sess(cx, || quote!(::TokenStream((quote stream)))) + } +} + +impl Quote for Option { + fn quote(&self) -> TokenStream { + match *self { + Some(ref t) => quote!(Some((quote t))), + None => quote!(None), + } + } +} + +impl Quote for TokenStream { + fn quote(&self) -> TokenStream { + let mut builder = TokenStream::builder(); + builder.push(quote!(rt::TokenStream::builder())); + + let mut trees = self.trees(); + loop { + let (mut tree, mut is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => return builder.add(quote!(.build())).build(), + }; + if let TokenTree::Token(_, Token::Dollar) = tree { + let (next_tree, next_is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => panic!("unexpected trailing `$` in `quote!`"), + }; + match next_tree { + TokenTree::Token(_, Token::Ident(..)) => { + builder.push(quote!(.add(rt::unquote(&(unquote next_tree))))); + continue + } + TokenTree::Token(_, Token::Dollar) => { + tree = next_tree; + is_joint = next_is_joint; + } + _ => panic!("`$` must be followed by an ident or `$` in `quote!`"), + } + } + + builder.push(match is_joint { + true => quote!(.add((quote tree).joint())), + false => quote!(.add(rt::TokenStream::from((quote tree)))), + }); + } + } +} + +impl Quote for TokenTree { + fn quote(&self) -> TokenStream { + match *self { + TokenTree::Token(span, ref token) => quote! { + rt::TokenTree::Token((quote span), (quote token)) + }, + TokenTree::Delimited(span, ref delimited) => quote! { + rt::TokenTree::Delimited((quote span), (quote delimited)) + }, + } + } +} + +impl Quote for Delimited { + fn quote(&self) -> TokenStream { + quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() }) + } +} + +impl<'a> Quote for &'a str { + fn quote(&self) -> TokenStream { + TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) + .into() + } +} + +impl Quote for usize { + fn quote(&self) -> TokenStream { + let integer_symbol = Symbol::intern(&self.to_string()); + TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) + .into() + } +} + +impl Quote for Ident { + fn quote(&self) -> TokenStream { + quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() }) + } +} + +impl Quote for Symbol { + fn quote(&self) -> TokenStream { + quote!(rt::Symbol::intern((quote &*self.as_str()))) + } +} + +impl Quote for Span { + fn quote(&self) -> TokenStream { + quote!(rt::span()) + } +} + +impl Quote for Token { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($t:tt)*) => { + match *self { + $( Token::$i => quote!(rt::token::$i), )* + $( $t )* + } + } + } + + gen_match! { + Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, + Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, + Underscore; + + Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))), + Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))), + Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))), + Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))), + Token::Ident(ident) => quote!(rt::token::Ident((quote ident))), + Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))), + Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))), + _ => panic!("Unhandled case!"), + } + } +} + +impl Quote for token::BinOpToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )* + } + } + } + + gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) + } +} + +impl Quote for Lit { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($raw:ident),*) => { + match *self { + $( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )* + $( Lit::$raw(lit, n) => { + quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) + })* + } + } + } + + gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) + } +} + +impl Quote for token::DelimToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })* + } + } + } + + gen_match!(Paren, Bracket, Brace, NoDelim) + } +} diff --git a/src/libproc_macro_plugin/Cargo.toml b/src/libproc_macro_plugin/Cargo.toml deleted file mode 100644 index 146a66cdf01cb..0000000000000 --- a/src/libproc_macro_plugin/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "proc_macro_plugin" -version = "0.0.0" - -[lib] -path = "lib.rs" -crate-type = ["dylib"] - -[dependencies] -rustc_plugin = { path = "../librustc_plugin" } -syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs deleted file mode 100644 index d1bc0966eb567..0000000000000 --- a/src/libproc_macro_plugin/lib.rs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Proc_Macro -//! -//! A library for procedural macro writers. -//! -//! ## Usage -//! This crate provides the `quote!` macro for syntax creation. -//! -//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` -//! at the crate root. This is a temporary solution until we have better hygiene. -//! -//! ## Quasiquotation -//! -//! The quasiquoter creates output that, when run, constructs the tokenstream specified as -//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will -//! construct the TokenStream `5 | + | 5`. -//! -//! ### Unquoting -//! -//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. -//! To quote `$` itself, use `$$`. -//! -//! A simple example is: -//! -//!``` -//!fn double(tmp: TokenStream) -> TokenStream { -//! quote!($tmp * 2) -//!} -//!``` -//! -//! ### Large example: Scheme's `cond` -//! -//! Below is an example implementation of Scheme's `cond`. -//! -//! ``` -//! fn cond(input: TokenStream) -> TokenStream { -//! let mut conds = Vec::new(); -//! let mut input = input.trees().peekable(); -//! while let Some(tree) = input.next() { -//! let mut cond = match tree { -//! TokenTree::Delimited(_, ref delimited) => delimited.stream(), -//! _ => panic!("Invalid input"), -//! }; -//! let mut trees = cond.trees(); -//! let test = trees.next(); -//! let rhs = trees.collect::(); -//! if rhs.is_empty() { -//! panic!("Invalid macro usage in cond: {}", cond); -//! } -//! let is_else = match test { -//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, -//! _ => false, -//! }; -//! conds.push(if is_else || input.peek().is_none() { -//! quote!({ $rhs }) -//! } else { -//! let test = test.unwrap(); -//! quote!(if $test { $rhs } else) -//! }); -//! } -//! -//! conds.into_iter().collect() -//! } -//! ``` -#![crate_name = "proc_macro_plugin"] -#![feature(plugin_registrar)] -#![crate_type = "dylib"] -#![crate_type = "rlib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] -#![deny(warnings)] - -#![feature(rustc_diagnostic_macros)] - -extern crate rustc_plugin; -extern crate syntax; -extern crate syntax_pos; - -mod quote; -use quote::quote; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; - -// ____________________________________________________________________________________________ -// Main macro definition - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("quote"), - SyntaxExtension::ProcMacro(Box::new(quote))); -} diff --git a/src/libproc_macro_plugin/quote.rs b/src/libproc_macro_plugin/quote.rs deleted file mode 100644 index 09675564291a2..0000000000000 --- a/src/libproc_macro_plugin/quote.rs +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. - -use syntax::ast::Ident; -use syntax::parse::token::{self, Token, Lit}; -use syntax::symbol::Symbol; -use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream}; -use syntax_pos::DUMMY_SP; - -use std::iter; - -pub fn quote<'cx>(stream: TokenStream) -> TokenStream { - stream.quote() -} - -trait Quote { - fn quote(&self) -> TokenStream; -} - -macro_rules! quote_tok { - (,) => { Token::Comma }; - (.) => { Token::Dot }; - (:) => { Token::Colon }; - (::) => { Token::ModSep }; - (!) => { Token::Not }; - (<) => { Token::Lt }; - (>) => { Token::Gt }; - (_) => { Token::Underscore }; - ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) }; -} - -macro_rules! quote_tree { - ((unquote $($t:tt)*)) => { $($t)* }; - ((quote $($t:tt)*)) => { ($($t)*).quote() }; - (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; - ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; - ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; - ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) }; -} - -fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { - TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into() -} - -macro_rules! quote { - () => { TokenStream::empty() }; - ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; -} - -impl Quote for Option { - fn quote(&self) -> TokenStream { - match *self { - Some(ref t) => quote!(::std::option::Option::Some((quote t))), - None => quote!(::std::option::Option::None), - } - } -} - -impl Quote for TokenStream { - fn quote(&self) -> TokenStream { - if self.is_empty() { - return quote!(::syntax::tokenstream::TokenStream::empty()); - } - - struct Quoter(iter::Peekable); - - impl Iterator for Quoter { - type Item = TokenStream; - - fn next(&mut self) -> Option { - let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() { - self.0.next(); - match self.0.next() { - Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()), - Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()), - // FIXME(jseyfried): improve these diagnostics - Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"), - None => panic!("unexpected trailing `$` in `quote!`"), - } - } else { - self.0.next().as_ref().map(Quote::quote) - }; - - quoted_tree.map(|quoted_tree| { - quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) - }) - } - } - - let quoted = Quoter(self.trees().peekable()).collect::(); - quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) - } -} - -impl Quote for TokenTree { - fn quote(&self) -> TokenStream { - match *self { - TokenTree::Token(_, ref token) => quote! { - ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP, - (quote token)) - }, - TokenTree::Delimited(_, ref delimited) => quote! { - ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, - (quote delimited)) - }, - } - } -} - -impl Quote for Delimited { - fn quote(&self) -> TokenStream { - quote!(::syntax::tokenstream::Delimited { - delim: (quote self.delim), - tts: (quote self.stream()).into(), - }) - } -} - -impl<'a> Quote for &'a str { - fn quote(&self) -> TokenStream { - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) - .into() - } -} - -impl Quote for usize { - fn quote(&self) -> TokenStream { - let integer_symbol = Symbol::intern(&self.to_string()); - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) - .into() - } -} - -impl Quote for Ident { - fn quote(&self) -> TokenStream { - // FIXME(jseyfried) quote hygiene - quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str()))) - } -} - -impl Quote for Symbol { - fn quote(&self) -> TokenStream { - quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str()))) - } -} - -impl Quote for Token { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($t:tt)*) => { - match *self { - $( Token::$i => quote!(::syntax::parse::token::$i), )* - $( $t )* - } - } - } - - gen_match! { - Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, - Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, - Underscore; - - Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))), - Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))), - Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))), - Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))), - Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))), - Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))), - Token::Literal(lit, sfx) => quote! { - ::syntax::parse::token::Literal((quote lit), (quote sfx)) - }, - _ => panic!("Unhandled case!"), - } - } -} - -impl Quote for token::BinOpToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )* - } - } - } - - gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) - } -} - -impl Quote for Lit { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($raw:ident),*) => { - match *self { - $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )* - $( Lit::$raw(lit, n) => { - quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) - })* - } - } - } - - gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) - } -} - -impl Quote for token::DelimToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })* - } - } - } - - gen_match!(Paren, Bracket, Brace, NoDelim) - } -} diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index c969aef675ff9..5322d24e38934 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -55,12 +55,19 @@ impl Clone for DefPathTable { } impl DefPathTable { + pub fn new() -> Self { + DefPathTable { + index_to_key: [vec![], vec![]], + key_to_index: FxHashMap(), + def_path_hashes: [vec![], vec![]], + } + } - fn allocate(&mut self, - key: DefKey, - def_path_hash: DefPathHash, - address_space: DefIndexAddressSpace) - -> DefIndex { + pub fn allocate(&mut self, + key: DefKey, + def_path_hash: DefPathHash, + address_space: DefIndexAddressSpace) + -> DefIndex { let index = { let index_to_key = &mut self.index_to_key[address_space.index()]; let index = DefIndex::new(index_to_key.len() + address_space.start()); @@ -241,7 +248,7 @@ pub struct DefKey { } impl DefKey { - fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { + pub fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { let mut hasher = StableHasher::new(); // We hash a 0u8 here to disambiguate between regular DefPath hashes, @@ -284,7 +291,7 @@ impl DefKey { DefPathHash(hasher.finish()) } - fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { + pub fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { let mut hasher = StableHasher::new(); // Disambiguate this from a regular DefPath hash, // see compute_stable_hash() above. @@ -446,11 +453,7 @@ impl Definitions { /// Create new empty definition map. pub fn new() -> Definitions { Definitions { - table: DefPathTable { - index_to_key: [vec![], vec![]], - key_to_index: FxHashMap(), - def_path_hashes: [vec![], vec![]], - }, + table: DefPathTable::new(), node_to_def_index: NodeMap(), def_index_to_node: [vec![], vec![]], node_to_hir_id: IndexVec::new(), diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index e27990c29cf9e..e6dc5da969a88 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -728,6 +728,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let ref declared_lib_features = sess.features.borrow().declared_lib_features; let mut remaining_lib_features: FxHashMap = declared_lib_features.clone().into_iter().collect(); + remaining_lib_features.remove(&Symbol::intern("proc_macro")); fn format_stable_since_msg(version: &str) -> String { format!("this feature has been stable since {}. Attribute no longer needed", version) diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 2e949f48c175e..0b950787e3b91 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,7 +13,6 @@ arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } log = { version = "0.3", features = ["release_max_level_info"] } env_logger = { version = "0.4", default-features = false } -proc_macro_plugin = { path = "../libproc_macro_plugin" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_borrowck = { path = "../librustc_borrowck" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 27c2d22168c8b..57a09ed15032f 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -26,7 +26,8 @@ use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::util::common::record_time; use rustc::util::nodemap::FxHashSet; use rustc::middle::cstore::NativeLibrary; -use rustc::hir::map::Definitions; +use rustc::hir::map::{Definitions, DefKey, DefPathData, DisambiguatedDefPathData, ITEM_LIKE_SPACE}; +use rustc::hir::map::definitions::DefPathTable; use std::cell::{RefCell, Cell}; use std::ops::Deref; @@ -34,7 +35,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::{cmp, fs}; -use syntax::ast; +use syntax::ast::{self, Ident}; use syntax::abi::Abi; use syntax::attr; use syntax::ext::base::SyntaxExtension; @@ -307,9 +308,16 @@ impl<'a> CrateLoader<'a> { let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); - let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { - crate_root.def_path_table.decode(&metadata) + let proc_macros = crate_root.macro_derive_registrar.map(|_| { + self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) }); + let def_path_table = if let Some(ref proc_macros) = proc_macros { + proc_macro_def_path_table(proc_macros) + } else { + record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { + crate_root.def_path_table.decode(&metadata) + }) + }; let exported_symbols = crate_root.exported_symbols .map(|x| x.decode(&metadata).collect()); @@ -328,9 +336,7 @@ impl<'a> CrateLoader<'a> { def_path_table: Rc::new(def_path_table), exported_symbols: exported_symbols, trait_impls: trait_impls, - proc_macros: crate_root.macro_derive_registrar.map(|_| { - self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) - }), + proc_macros: proc_macros, root: crate_root, blob: metadata, cnum_map: RefCell::new(cnum_map), @@ -1213,3 +1219,31 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { } } } + +fn proc_macro_def_path_table(proc_macros: &[(ast::Name, Rc)]) -> DefPathTable { + let mut table = DefPathTable::new(); + let root = DefKey { + parent: None, + disambiguated_data: DisambiguatedDefPathData { + data: DefPathData::CrateRoot, + disambiguator: 0, + }, + }; + + let initial_hash = DefKey::root_parent_stable_hash("", ""); + let root_hash = root.compute_stable_hash(initial_hash); + let root_id = table.allocate(root, root_hash, ITEM_LIKE_SPACE); + let root_path_hash = table.def_path_hash(root_id); + for proc_macro in proc_macros { + let key = DefKey { + parent: Some(CRATE_DEF_INDEX), + disambiguated_data: DisambiguatedDefPathData { + data: DefPathData::MacroDef(Ident::with_empty_ctxt(proc_macro.0)), + disambiguator: 0, + }, + }; + let def_path_hash = key.compute_stable_hash(root_path_hash); + table.allocate(key, def_path_hash, ITEM_LIKE_SPACE); + } + table +} diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 0649553e382e3..ff5febadeb70f 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -33,6 +33,7 @@ use std::rc::Rc; use syntax::ast; use syntax::attr; +use syntax::ext::base::SyntaxExtension; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION}; @@ -365,6 +366,10 @@ impl CrateStore for cstore::CStore { let data = self.get_crate_data(id.krate); if let Some(ref proc_macros) = data.proc_macros { return LoadedMacro::ProcMacro(proc_macros[id.index.as_usize() - 1].1.clone()); + } else if data.name == "proc_macro" && + self.get_crate_data(id.krate).item_name(id.index) == "quote" { + let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter)); + return LoadedMacro::ProcMacro(Rc::new(ext)); } let (name, def) = data.get_macro(id.index); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 728ab30bb17dc..b974541ef255a 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -472,7 +472,7 @@ impl<'a, 'tcx> CrateMetadata { } } - fn item_name(&self, item_index: DefIndex) -> ast::Name { + pub fn item_name(&self, item_index: DefIndex) -> ast::Name { self.def_key(item_index) .disambiguated_data .data diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 2a504c4c07794..63a24c7db18ff 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1095,6 +1095,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { /// Serialize the text of exported macros fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> { use syntax::print::pprust; + let def_id = self.tcx.hir.local_def_id(macro_def.id); Entry { kind: EntryKind::MacroDef(self.lazy(&MacroDef { body: pprust::tts_to_string(¯o_def.body.trees().collect::>()), @@ -1102,11 +1103,11 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { })), visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(¯o_def.span), - attributes: self.encode_attributes(¯o_def.attrs), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + children: LazySeq::empty(), - stability: None, - deprecation: None, ty: None, inherent_impls: LazySeq::empty(), variances: LazySeq::empty(), diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 8bd58ec7a52d5..325a5cdf8fc02 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -100,7 +100,7 @@ impl Path { let name = self.segments[0].identifier.name; if !self.is_global() && name != "$crate" && name != keywords::SelfValue.name() && name != keywords::Super.name() { - self.segments.insert(0, PathSegment::crate_root()); + self.segments.insert(0, PathSegment::crate_root(self.span)); } self } @@ -134,10 +134,10 @@ impl PathSegment { pub fn from_ident(ident: Ident, span: Span) -> Self { PathSegment { identifier: ident, span: span, parameters: None } } - pub fn crate_root() -> Self { + pub fn crate_root(span: Span) -> Self { PathSegment { - identifier: keywords::CrateRoot.ident(), - span: DUMMY_SP, + identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() }, + span: span, parameters: None, } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index af5eabf06f87b..7a5c9456c5315 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -578,7 +578,10 @@ impl SyntaxExtension { pub fn is_modern(&self) -> bool { match *self { - SyntaxExtension::DeclMacro(..) => true, + SyntaxExtension::DeclMacro(..) | + SyntaxExtension::ProcMacro(..) | + SyntaxExtension::AttrProcMacro(..) | + SyntaxExtension::ProcMacroDerive(..) => true, _ => false, } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index a4580ea3939fb..5168943d108cb 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -320,7 +320,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let last_identifier = idents.pop().unwrap(); let mut segments: Vec = Vec::new(); if global { - segments.push(ast::PathSegment::crate_root()); + segments.push(ast::PathSegment::crate_root(sp)); } segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp))); diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index d7d3a70f3c7c5..3f3c94536a6e7 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -38,12 +38,19 @@ use symbol::Symbol; use std::ascii::AsciiExt; use std::env; -macro_rules! setter { +macro_rules! set { + (proc_macro) => {{ + fn f(features: &mut Features, span: Span) { + features.declared_lib_features.push((Symbol::intern("proc_macro"), span)); + features.proc_macro = true; + } + f as fn(&mut Features, Span) + }}; ($field: ident) => {{ - fn f(features: &mut Features) -> &mut bool { - &mut features.$field + fn f(features: &mut Features, _: Span) { + features.$field = true; } - f as fn(&mut Features) -> &mut bool + f as fn(&mut Features, Span) }} } @@ -51,10 +58,9 @@ macro_rules! declare_features { ($((active, $feature: ident, $ver: expr, $issue: expr),)+) => { /// Represents active features that are currently being implemented or /// currently being considered for addition/removal. - const ACTIVE_FEATURES: &'static [(&'static str, &'static str, - Option, fn(&mut Features) -> &mut bool)] = &[ - $((stringify!($feature), $ver, $issue, setter!($feature))),+ - ]; + const ACTIVE_FEATURES: + &'static [(&'static str, &'static str, Option, fn(&mut Features, Span))] = + &[$((stringify!($feature), $ver, $issue, set!($feature))),+]; /// A set of features to be used by later passes. pub struct Features { @@ -1464,9 +1470,9 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F continue }; - if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter() + if let Some(&(_, _, _, set)) = ACTIVE_FEATURES.iter() .find(|& &(n, _, _, _)| name == n) { - *(setter(&mut features)) = true; + set(&mut features, mi.span); feature_checker.collect(&features, mi.span); } else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter() @@ -1500,7 +1506,7 @@ struct MutexFeatureChecker { impl MutexFeatureChecker { // If this method turns out to be a hotspot due to branching, - // the branching can be eliminated by modifying `setter!()` to set these spans + // the branching can be eliminated by modifying `set!()` to set these spans // only for the features that need to be checked for mutual exclusion. fn collect(&mut self, features: &Features, span: Span) { if features.proc_macro { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 2032aecacbb91..ca4814397d8ac 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -573,7 +573,7 @@ pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { } pub fn noop_fold_tts(tts: TokenStream, fld: &mut T) -> TokenStream { - tts.trees().map(|tt| fld.fold_tt(tt)).collect() + tts.map(|tt| fld.fold_tt(tt)) } // apply ident folder if it's an ident, apply other folds to interpolated nodes diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 554a1fcfc71a6..63a396c14db85 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -19,7 +19,9 @@ impl<'a> StringReader<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?.into()); + let tree = self.parse_token_tree()?; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } Ok(TokenStream::concat(tts)) } @@ -31,13 +33,15 @@ impl<'a> StringReader<'a> { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } - match self.parse_token_tree() { - Ok(tt) => tts.push(tt.into()), + let tree = match self.parse_token_tree() { + Ok(tree) => tree, Err(mut e) => { e.emit(); return TokenStream::concat(tts); } - } + }; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 25ab46f6f9e2b..a30dcef6f44e1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1777,7 +1777,7 @@ impl<'a> Parser<'a> { }; if is_global { - segments.insert(0, PathSegment::crate_root()); + segments.insert(0, PathSegment::crate_root(lo)); } // Assemble the result. @@ -6187,7 +6187,7 @@ impl<'a> Parser<'a> { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { - segments: vec![PathSegment::crate_root()], + segments: vec![PathSegment::crate_root(lo)], span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index f208b0f56f81e..e568af66e8aa8 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -349,6 +349,60 @@ impl Token { _ => false, } } + + pub fn glue(self, joint: Token) -> Option { + Some(match self { + Eq => match joint { + Eq => EqEq, + Gt => FatArrow, + _ => return None, + }, + Lt => match joint { + Eq => Le, + Lt => BinOp(Shl), + Le => BinOpEq(Shl), + BinOp(Minus) => LArrow, + _ => return None, + }, + Gt => match joint { + Eq => Ge, + Gt => BinOp(Shr), + Ge => BinOpEq(Shr), + _ => return None, + }, + Not => match joint { + Eq => Ne, + _ => return None, + }, + BinOp(op) => match joint { + Eq => BinOpEq(op), + BinOp(And) if op == And => AndAnd, + BinOp(Or) if op == Or => OrOr, + Gt if op == Minus => RArrow, + _ => return None, + }, + Dot => match joint { + Dot => DotDot, + DotDot => DotDotDot, + _ => return None, + }, + DotDot => match joint { + Dot => DotDotDot, + _ => return None, + }, + Colon => match joint { + Colon => ModSep, + _ => return None, + }, + + Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | Comma | + Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | + OpenDelim(..) | CloseDelim(..) | Underscore => return None, + + Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | + Whitespace | Comment | Shebang(..) | Eof => return None, + }) + } } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)] @@ -398,3 +452,12 @@ impl fmt::Debug for Nonterminal { } } } + +pub fn is_op(tok: &Token) -> bool { + match *tok { + OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | + Ident(..) | Underscore | Lifetime(..) | Interpolated(..) | + Whitespace | Comment | Shebang(..) | Eof => false, + _ => true, + } +} diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index ab4f697071477..2637972cc6362 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -138,6 +138,10 @@ impl TokenTree { _ => false, } } + + pub fn joint(self) -> TokenStream { + TokenStream { kind: TokenStreamKind::JointTree(self) } + } } /// # Token Streams @@ -155,6 +159,7 @@ pub struct TokenStream { enum TokenStreamKind { Empty, Tree(TokenTree), + JointTree(TokenTree), Stream(RcSlice), } @@ -196,6 +201,10 @@ impl TokenStream { } } + pub fn builder() -> TokenStreamBuilder { + TokenStreamBuilder(Vec::new()) + } + pub fn concat(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), @@ -225,6 +234,99 @@ impl TokenStream { } true } + + pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { + match self.kind { + TokenStreamKind::Tree(tree) => (tree, false), + TokenStreamKind::JointTree(tree) => (tree, true), + _ => unreachable!(), + } + } + + pub fn map TokenTree>(self, mut f: F) -> TokenStream { + let mut trees = self.into_trees(); + let mut result = Vec::new(); + while let Some(stream) = trees.next_as_stream() { + result.push(match stream.kind { + TokenStreamKind::Tree(tree) => f(tree).into(), + TokenStreamKind::JointTree(tree) => f(tree).joint(), + _ => unreachable!() + }); + } + TokenStream::concat(result) + } + + fn first_tree(&self) -> Option { + match self.kind { + TokenStreamKind::Empty => None, + TokenStreamKind::Tree(ref tree) | + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree(), + } + } + + fn last_tree_if_joint(&self) -> Option { + match self.kind { + TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None, + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(), + } + } +} + +pub struct TokenStreamBuilder(Vec); + +impl TokenStreamBuilder { + pub fn push>(&mut self, stream: T) { + let stream = stream.into(); + let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); + if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { + if let Some(TokenTree::Token(span, tok)) = stream.first_tree() { + if let Some(glued_tok) = last_tok.glue(tok) { + let last_stream = self.0.pop().unwrap(); + self.push_all_but_last_tree(&last_stream); + let glued_span = last_span.to(span); + self.0.push(TokenTree::Token(glued_span, glued_tok).into()); + self.push_all_but_first_tree(&stream); + return + } + } + } + self.0.push(stream); + } + + pub fn add>(mut self, stream: T) -> Self { + self.push(stream); + self + } + + pub fn build(self) -> TokenStream { + TokenStream::concat(self.0) + } + + fn push_all_but_last_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[0].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(0 .. len - 1))), + } + self.push_all_but_last_tree(&streams[len - 1]) + } + } + + fn push_all_but_first_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[1].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(1 .. len))), + } + self.push_all_but_first_tree(&streams[0]) + } + } } #[derive(Clone)] @@ -234,6 +336,7 @@ pub struct Cursor(CursorKind); enum CursorKind { Empty, Tree(TokenTree, bool /* consumed? */), + JointTree(TokenTree, bool /* consumed? */), Stream(StreamCursor), } @@ -245,12 +348,13 @@ struct StreamCursor { } impl StreamCursor { - fn next(&mut self) -> Option { + fn next_as_stream(&mut self) -> Option { loop { if self.index < self.stream.len() { self.index += 1; - match self.stream[self.index - 1].kind.clone() { - TokenStreamKind::Tree(tree) => return Some(tree), + let next = self.stream[self.index - 1].clone(); + match next.kind { + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next), TokenStreamKind::Stream(stream) => { self.stack.push((mem::replace(&mut self.stream, stream), mem::replace(&mut self.index, 0))); @@ -271,14 +375,10 @@ impl Iterator for Cursor { type Item = TokenTree; fn next(&mut self) -> Option { - let (tree, consumed) = match self.0 { - CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed), - CursorKind::Stream(ref mut cursor) => return cursor.next(), - _ => return None, - }; - - *consumed = true; - Some(tree.clone()) + self.next_as_stream().map(|stream| match stream.kind { + TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree, + _ => unreachable!() + }) } } @@ -287,16 +387,32 @@ impl Cursor { Cursor(match stream.kind { TokenStreamKind::Empty => CursorKind::Empty, TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), + TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false), TokenStreamKind::Stream(stream) => { CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() }) } }) } + pub fn next_as_stream(&mut self) -> Option { + let (stream, consumed) = match self.0 { + CursorKind::Tree(ref tree, ref mut consumed @ false) => + (tree.clone().into(), consumed), + CursorKind::JointTree(ref tree, ref mut consumed @ false) => + (tree.clone().joint(), consumed), + CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(), + _ => return None, + }; + + *consumed = true; + Some(stream) + } + pub fn original_stream(self) -> TokenStream { match self.0 { CursorKind::Empty => TokenStream::empty(), CursorKind::Tree(tree, _) => tree.into(), + CursorKind::JointTree(tree, _) => tree.joint(), CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({ cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream) }), @@ -307,8 +423,9 @@ impl Cursor { fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result { for stream in streams { n = match stream.kind { - TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()), - TokenStreamKind::Tree(..) => n - 1, + TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree) + if n == 0 => return Ok(tree.clone()), + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1, TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) { Ok(tree) => return Ok(tree), Err(n) => n, @@ -316,13 +433,15 @@ impl Cursor { _ => n, }; } - Err(n) } match self.0 { - CursorKind::Empty | CursorKind::Tree(_, true) => Err(n), - CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n), + CursorKind::Empty | + CursorKind::Tree(_, true) | + CursorKind::JointTree(_, true) => Err(n), + CursorKind::Tree(ref tree, false) | + CursorKind::JointTree(ref tree, false) => look_ahead(&[tree.clone().into()], n), CursorKind::Stream(ref cursor) => { look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { for &(ref stream, index) in cursor.stack.iter().rev() { @@ -350,6 +469,7 @@ impl From for ThinTokenStream { ThinTokenStream(match stream.kind { TokenStreamKind::Empty => None, TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])), + TokenStreamKind::JointTree(tree) => Some(RcSlice::new(vec![tree.joint()])), TokenStreamKind::Stream(stream) => Some(stream), }) } diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs index 2d9fd7aa87553..d6939d71129e4 100644 --- a/src/libsyntax/util/rc_slice.rs +++ b/src/libsyntax/util/rc_slice.rs @@ -9,7 +9,7 @@ // except according to those terms. use std::fmt; -use std::ops::Deref; +use std::ops::{Deref, Range}; use std::rc::Rc; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, @@ -30,6 +30,14 @@ impl RcSlice { data: Rc::new(vec.into_boxed_slice()), } } + + pub fn sub_slice(&self, range: Range) -> Self { + RcSlice { + data: self.data.clone(), + offset: self.offset + range.start as u32, + len: (range.end - range.start) as u32, + } + } } impl Deref for RcSlice { diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 0433b95865ef8..9406eda5231d5 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -8,50 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(unused_parens)] -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] -use rustc_plugin::Registry; +extern crate proc_macro; -use syntax::ext::base::SyntaxExtension; -use syntax::parse::token::Token; -use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenTree, TokenStream}; +use proc_macro::{TokenStream, TokenKind, quote}; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("cond"), - SyntaxExtension::ProcMacro(Box::new(cond))); -} - -fn cond(input: TokenStream) -> TokenStream { +#[proc_macro] +pub fn cond(input: TokenStream) -> TokenStream { let mut conds = Vec::new(); - let mut input = input.trees().peekable(); + let mut input = input.into_iter().peekable(); while let Some(tree) = input.next() { - let mut cond = match tree { - TokenTree::Delimited(_, ref delimited) => delimited.stream(), + let cond = match tree.kind { + TokenKind::Sequence(_, cond) => cond, _ => panic!("Invalid input"), }; - let mut trees = cond.trees(); - let test = trees.next(); - let rhs = trees.collect::(); + let mut cond_trees = cond.clone().into_iter(); + let test = cond_trees.next().expect("Unexpected empty condition in `cond!`"); + let rhs = cond_trees.collect::(); if rhs.is_empty() { panic!("Invalid macro usage in cond: {}", cond); } - let is_else = match test { - Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, + let is_else = match test.kind { + TokenKind::Word(word) => *word == *"else", _ => false, }; conds.push(if is_else || input.peek().is_none() { quote!({ $rhs }) } else { - let test = test.unwrap(); quote!(if $test { $rhs } else) }); } diff --git a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs index 9522592a5e9e6..cf6584e961a67 100644 --- a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs +++ b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs @@ -8,29 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; -use syntax::tokenstream::TokenStream; +extern crate proc_macro; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("hello"), - SyntaxExtension::ProcMacro(Box::new(hello))); -} +use proc_macro::{TokenStream, quote}; // This macro is not very interesting, but it does contain delimited tokens with // no content - `()` and `{}` - which has caused problems in the past. // Also, it tests that we can escape `$` via `$$`. -fn hello(_: TokenStream) -> TokenStream { +#[proc_macro] +pub fn hello(_: TokenStream) -> TokenStream { quote!({ fn hello() {} macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index 0e37a7a5dcce2..1b47043884844 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -8,47 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin, plugin_registrar, rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate rustc_plugin; -extern crate syntax; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::tokenstream::TokenStream; -use syntax::symbol::Symbol; - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("attr_tru"), - SyntaxExtension::AttrProcMacro(Box::new(attr_tru))); - reg.register_syntax_extension(Symbol::intern("attr_identity"), - SyntaxExtension::AttrProcMacro(Box::new(attr_identity))); - reg.register_syntax_extension(Symbol::intern("tru"), - SyntaxExtension::ProcMacro(Box::new(tru))); - reg.register_syntax_extension(Symbol::intern("ret_tru"), - SyntaxExtension::ProcMacro(Box::new(ret_tru))); - reg.register_syntax_extension(Symbol::intern("identity"), - SyntaxExtension::ProcMacro(Box::new(identity))); -} +// no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] + +extern crate proc_macro; + +use proc_macro::{TokenStream, quote}; -fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { - quote!(fn f1() -> bool { true }) +#[proc_macro_attribute] +pub fn attr_tru(_attr: TokenStream, item: TokenStream) -> TokenStream { + let name = item.into_iter().skip(1).next().unwrap(); + quote!(fn $name() -> bool { true }) } -fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { +#[proc_macro_attribute] +pub fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { quote!($item) } -fn tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn tru(_ts: TokenStream) -> TokenStream { quote!(true) } -fn ret_tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn ret_tru(_ts: TokenStream) -> TokenStream { quote!(return true;) } -fn identity(ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn identity(ts: TokenStream) -> TokenStream { quote!($ts) } diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs deleted file mode 100644 index e7d0a83017be0..0000000000000 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// ignore-stage1 - -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate syntax; -extern crate syntax_pos; - -use syntax::ast::{Ident, Name}; -use syntax::parse::token::{self, Token, Lit}; -use syntax::tokenstream::TokenTree; - -fn main() { - let true_tok = token::Ident(Ident::from_str("true")); - assert!(quote!(true).eq_unspanned(&true_tok.into())); - - // issue #35829, extended check to proc_macro. - let triple_dot_tok = Token::DotDotDot; - assert!(quote!(...).eq_unspanned(&triple_dot_tok.into())); - - let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None); - assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into())); - - let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None); - assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into())); - - let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None); - assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into())); -} diff --git a/src/test/run-pass-fulldeps/macro-quote-cond.rs b/src/test/run-pass-fulldeps/macro-quote-cond.rs index fa969b6a087cf..cff743bdae6cd 100644 --- a/src/test/run-pass-fulldeps/macro-quote-cond.rs +++ b/src/test/run-pass-fulldeps/macro-quote-cond.rs @@ -11,9 +11,11 @@ // aux-build:cond_plugin.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(cond_plugin)] +#![feature(proc_macro)] + +extern crate cond_plugin; + +use cond_plugin::cond; fn fact(n : i64) -> i64 { if n == 0 { diff --git a/src/test/run-pass-fulldeps/macro-quote-test.rs b/src/test/run-pass-fulldeps/macro-quote-test.rs index bdbea8a419416..eb77895e2d7ad 100644 --- a/src/test/run-pass-fulldeps/macro-quote-test.rs +++ b/src/test/run-pass-fulldeps/macro-quote-test.rs @@ -13,10 +13,10 @@ // aux-build:hello_macro.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(hello_macro)] +#![feature(proc_macro)] + +extern crate hello_macro; fn main() { - hello!(); + hello_macro::hello!(); } diff --git a/src/test/run-pass-fulldeps/proc_macro.rs b/src/test/run-pass-fulldeps/proc_macro.rs index 22cc9f0f8d40e..cdda723585b7a 100644 --- a/src/test/run-pass-fulldeps/proc_macro.rs +++ b/src/test/run-pass-fulldeps/proc_macro.rs @@ -12,10 +12,11 @@ // ignore-stage1 // ignore-cross-compile -#![feature(plugin, custom_attribute)] -#![feature(type_macros)] +#![feature(proc_macro)] -#![plugin(proc_macro_def)] +extern crate proc_macro_def; + +use proc_macro_def::{attr_tru, attr_identity, identity, ret_tru, tru}; #[attr_tru] fn f1() -> bool { diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index c8c6cb0ee6b41..f40fea60f40a8 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -91,14 +91,6 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { continue } - // We want the compiler to depend on the proc_macro_plugin crate so - // that it is built and included in the end, but we don't want to - // actually use it in the compiler. - if toml.contains("name = \"rustc_driver\"") && - krate == "proc_macro_plugin" { - continue - } - if !librs.contains(&format!("extern crate {}", krate)) { tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \ depends on it", libfile.display(), krate); diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 722fc2b317eb4..d98c6932c51e1 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -245,7 +245,7 @@ fn get_and_check_lib_features(base_src_path: &Path, let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); }; - if lang_features.contains_key(name) { + if lang_features.contains_key(name) && feature_name != "proc_macro" { err("duplicating a lang feature"); } if let Some(ref s) = lib_features.get(name) { From 7d493bdd2a9f86ed51bc80a5c91cbb502aa3b3c4 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Wed, 29 Mar 2017 01:55:01 +0000 Subject: [PATCH 04/12] Add `LazyTokenStream`. --- src/libproc_macro/lib.rs | 36 ++++++++++--- src/libsyntax/attr.rs | 4 +- src/libsyntax/ext/base.rs | 2 +- src/libsyntax/ext/expand.rs | 40 ++++---------- src/libsyntax/ext/quote.rs | 36 ++++++------- src/libsyntax/ext/tt/transcribe.rs | 2 +- src/libsyntax/fold.rs | 4 +- src/libsyntax/parse/attr.rs | 4 +- src/libsyntax/parse/parser.rs | 10 ++-- src/libsyntax/parse/token.rs | 53 ++++++++++++++++--- src/libsyntax/print/pprust.rs | 2 +- src/libsyntax/tokenstream.rs | 45 +++++++++++----- .../proc-macro/auxiliary/attr-args.rs | 2 +- 13 files changed, 151 insertions(+), 89 deletions(-) diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index f1abd3339ed53..8a345e67c57b3 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -42,6 +42,7 @@ #![feature(staged_api)] #![feature(lang_items)] +#[macro_use] extern crate syntax; extern crate syntax_pos; @@ -50,7 +51,8 @@ use std::str::FromStr; use syntax::ast; use syntax::errors::DiagnosticBuilder; -use syntax::parse::{self, token}; +use syntax::parse::{self, token, parse_stream_from_source_str}; +use syntax::print::pprust; use syntax::symbol; use syntax::tokenstream; use syntax_pos::DUMMY_SP; @@ -337,8 +339,18 @@ impl Iterator for TokenIter { type Item = TokenTree; fn next(&mut self) -> Option { - self.next.take().or_else(|| self.cursor.next_as_stream()) - .map(|next| TokenTree::from_raw(next, &mut self.next)) + loop { + let next = + unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None); + let tree = TokenTree::from_raw(next, &mut self.next); + if tree.span.0 == DUMMY_SP { + if let TokenKind::Sequence(Delimiter::None, stream) = tree.kind { + self.cursor.insert(stream.0); + continue + } + } + return Some(tree); + } } } @@ -449,7 +461,14 @@ impl TokenTree { Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)), Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)), - Interpolated(..) => unimplemented!(), + Interpolated(ref nt) => __internal::with_sess(|(sess, _)| { + TokenKind::Sequence(Delimiter::None, TokenStream(nt.1.force(|| { + // FIXME(jseyfried): Avoid this pretty-print + reparse hack + let name = "".to_owned(); + let source = pprust::token_to_string(&token); + parse_stream_from_source_str(name, source, sess, Some(span)) + }))) + }), OpenDelim(..) | CloseDelim(..) => unreachable!(), Whitespace | Comment | Shebang(..) | Eof => unreachable!(), @@ -530,20 +549,21 @@ pub mod __internal { pub use self::quote::{Quoter, __rt}; use std::cell::Cell; - use std::rc::Rc; use syntax::ast; use syntax::ext::base::ExtCtxt; use syntax::ext::hygiene::Mark; use syntax::ptr::P; - use syntax::parse::{self, token, ParseSess}; + use syntax::parse::{self, ParseSess}; + use syntax::parse::token::{self, Token}; use syntax::tokenstream; + use syntax_pos::DUMMY_SP; use super::{TokenStream, LexError}; pub fn new_token_stream(item: P) -> TokenStream { - let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item)))); - TokenStream(tokenstream::TokenTree::Token(span, token).into()) + let token = Token::interpolated(token::NtItem(item)); + TokenStream(tokenstream::TokenTree::Token(DUMMY_SP, token).into()) } pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream { diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 8e63e219c42c1..f0fc849c0c596 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -1057,7 +1057,7 @@ impl MetaItem { { let (mut span, name) = match tokens.next() { Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), - Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt { + Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 { token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name), token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()), _ => return None, @@ -1229,7 +1229,7 @@ impl LitKind { match token { Token::Ident(ident) if ident.name == "true" => Some(LitKind::Bool(true)), Token::Ident(ident) if ident.name == "false" => Some(LitKind::Bool(false)), - Token::Interpolated(ref nt) => match **nt { + Token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) => match v.node { ExprKind::Lit(ref lit) => Some(lit.node.clone()), _ => None, diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 7a5c9456c5315..4881170c1d13a 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -215,7 +215,7 @@ impl TTMacroExpander for F impl Folder for AvoidInterpolatedIdents { fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree { if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt { - if let token::NtIdent(ident) = **nt { + if let token::NtIdent(ident) = nt.0 { return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node)); } } diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 11efef4549976..d2e51c9cb4868 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -21,15 +21,15 @@ use ext::placeholders::{placeholder, PlaceholderExpander}; use feature_gate::{self, Features, is_builtin_attr}; use fold; use fold::*; -use parse::{filemap_to_stream, ParseSess, DirectoryOwnership, PResult, token}; +use parse::{DirectoryOwnership, PResult}; +use parse::token::{self, Token}; use parse::parser::Parser; -use print::pprust; use ptr::P; use std_inject; use symbol::Symbol; use symbol::keywords; use syntax_pos::{Span, DUMMY_SP}; -use tokenstream::TokenStream; +use tokenstream::{TokenStream, TokenTree}; use util::small_vector::SmallVector; use visit::Visitor; @@ -427,11 +427,13 @@ impl<'a, 'b> MacroExpander<'a, 'b> { kind.expect_from_annotatables(items) } SyntaxExtension::AttrProcMacro(ref mac) => { - let item_toks = stream_for_item(&item, self.cx.parse_sess); - - let span = Span { ctxt: self.cx.backtrace(), ..attr.span }; - let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_toks); - self.parse_expansion(tok_result, kind, &attr.path, span) + let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item { + Annotatable::Item(item) => token::NtItem(item), + Annotatable::TraitItem(item) => token::NtTraitItem(item.unwrap()), + Annotatable::ImplItem(item) => token::NtImplItem(item.unwrap()), + })).into(); + let tok_result = mac.expand(self.cx, attr.span, attr.tokens, item_tok); + self.parse_expansion(tok_result, kind, &attr.path, attr.span) } SyntaxExtension::ProcMacroDerive(..) | SyntaxExtension::BuiltinDerive(..) => { self.cx.span_err(attr.span, &format!("`{}` is a derive mode", attr.path)); @@ -769,28 +771,6 @@ pub fn find_attr_invoc(attrs: &mut Vec) -> Option TokenStream { - let text = match *item { - Annotatable::Item(ref i) => pprust::item_to_string(i), - Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), - Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), - }; - string_to_stream(text, parse_sess, item.span()) -} - -fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream { - let filename = String::from(""); - filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span)) -} - impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { fn fold_expr(&mut self, expr: P) -> P { let mut expr = self.cfg.configure_expr(expr).unwrap(); diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 314a97496f8cc..9907dfe341e75 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -30,9 +30,9 @@ pub mod rt { use ast; use codemap::Spanned; use ext::base::ExtCtxt; - use parse::{self, token, classify}; + use parse::{self, classify}; + use parse::token::{self, Token}; use ptr::P; - use std::rc::Rc; use symbol::Symbol; use tokenstream::{self, TokenTree, TokenStream}; @@ -82,70 +82,70 @@ pub mod rt { impl ToTokens for ast::Path { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtPath(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Ty { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtTy(P(self.clone())); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Block { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtBlock(P(self.clone())); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Generics { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtGenerics(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::WhereClause { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtWhereClause(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::ImplItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtImplItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtImplItem((**self).clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::TraitItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtTraitItem(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Stmt { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtStmt(self.clone()); - let mut tts = vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))]; + let mut tts = vec![TokenTree::Token(self.span, Token::interpolated(nt))]; // Some statements require a trailing semicolon. if classify::stmt_ends_with_semi(&self.node) { @@ -159,35 +159,35 @@ pub mod rt { impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtExpr(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtPat(self.clone()); - vec![TokenTree::Token(self.span, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(self.span, Token::interpolated(nt))] } } impl ToTokens for ast::Arm { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtArm(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for ast::Arg { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtArg(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } impl ToTokens for P { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtBlock(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } @@ -215,7 +215,7 @@ pub mod rt { impl ToTokens for ast::MetaItem { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { let nt = token::NtMeta(self.clone()); - vec![TokenTree::Token(DUMMY_SP, token::Interpolated(Rc::new(nt)))] + vec![TokenTree::Token(DUMMY_SP, Token::interpolated(nt))] } } diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 9438e2fb0e5bf..fe3dd83f9d5c0 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -156,7 +156,7 @@ pub fn transcribe(cx: &ExtCtxt, result.push(tt.clone().into()); } else { sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark); - let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); + let token = TokenTree::Token(sp, Token::interpolated((**nt).clone())); result.push(token.into()); } } else { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index ca4814397d8ac..1fc670ec9f7fb 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -22,7 +22,7 @@ use ast::*; use ast; use syntax_pos::Span; use codemap::{Spanned, respan}; -use parse::token; +use parse::token::{self, Token}; use ptr::P; use symbol::keywords; use tokenstream::*; @@ -586,7 +586,7 @@ pub fn noop_fold_token(t: token::Token, fld: &mut T) -> token::Token Ok(nt) => nt, Err(nt) => (*nt).clone(), }; - token::Interpolated(Rc::new(fld.fold_interpolated(nt))) + Token::interpolated(fld.fold_interpolated(nt.0)) } _ => t } diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index 082930777e598..c99a09ab24e6b 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -151,7 +151,7 @@ impl<'a> Parser<'a> { pub fn parse_path_and_tokens(&mut self) -> PResult<'a, (ast::Path, TokenStream)> { let meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { Nonterminal::NtMeta(ref meta) => Some(meta.clone()), _ => None, }, @@ -223,7 +223,7 @@ impl<'a> Parser<'a> { /// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ; pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> { let nt_meta = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref e) => Some(e.clone()), _ => None, }, diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index a30dcef6f44e1..2858d49d63db7 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -107,7 +107,7 @@ pub enum BlockMode { macro_rules! maybe_whole_expr { ($p:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - match *nt { + match nt.0 { token::NtExpr(ref e) => { $p.bump(); return Ok((*e).clone()); @@ -134,7 +134,7 @@ macro_rules! maybe_whole_expr { macro_rules! maybe_whole { ($p:expr, $constructor:ident, |$x:ident| $e:expr) => { if let token::Interpolated(nt) = $p.token.clone() { - if let token::$constructor($x) = (*nt).clone() { + if let token::$constructor($x) = nt.0.clone() { $p.bump(); return Ok($e); } @@ -1620,7 +1620,7 @@ impl<'a> Parser<'a> { /// Matches token_lit = LIT_INTEGER | ... pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { let out = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref v) => match v.node { ExprKind::Lit(ref lit) => { lit.node.clone() } _ => { return self.unexpected_last(&self.token); } @@ -1791,7 +1791,7 @@ impl<'a> Parser<'a> { /// This is used when parsing derive macro paths in `#[derive]` attributes. pub fn parse_path_allowing_meta(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { let meta_ident = match self.token { - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtMeta(ref meta) => match meta.node { ast::MetaItemKind::Word => Some(ast::Ident::with_empty_ctxt(meta.name)), _ => None, @@ -2635,7 +2635,7 @@ impl<'a> Parser<'a> { } token::Interpolated(ref nt) => { self.meta_var_span = Some(self.span); - match **nt { + match nt.0 { token::NtIdent(ident) => ident, _ => return, } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index e568af66e8aa8..189a18f442033 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -16,9 +16,11 @@ pub use self::Token::*; use ast::{self}; use ptr::P; +use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; -use tokenstream::TokenTree; +use tokenstream::{TokenStream, TokenTree}; +use std::cell::RefCell; use std::fmt; use std::rc::Rc; @@ -168,7 +170,7 @@ pub enum Token { Lifetime(ast::Ident), /* For interpolation */ - Interpolated(Rc), + Interpolated(Rc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment DocComment(ast::Name), @@ -187,6 +189,10 @@ pub enum Token { } impl Token { + pub fn interpolated(nt: Nonterminal) -> Token { + Token::Interpolated(Rc::new((nt, LazyTokenStream::new()))) + } + /// Returns `true` if the token starts with '>'. pub fn is_like_gt(&self) -> bool { match *self { @@ -211,7 +217,7 @@ impl Token { Lt | BinOp(Shl) | // associated path ModSep | // global path Pound => true, // expression attributes - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true, _ => false, }, @@ -234,7 +240,7 @@ impl Token { Lifetime(..) | // lifetime bound in trait object Lt | BinOp(Shl) | // associated path ModSep => true, // global path - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(..) | NtTy(..) | NtPath(..) => true, _ => false, }, @@ -253,7 +259,7 @@ impl Token { pub fn ident(&self) -> Option { match *self { Ident(ident) => Some(ident), - Interpolated(ref nt) => match **nt { + Interpolated(ref nt) => match nt.0 { NtIdent(ident) => Some(ident.node), _ => None, }, @@ -285,7 +291,7 @@ impl Token { /// Returns `true` if the token is an interpolated path. pub fn is_path(&self) -> bool { if let Interpolated(ref nt) = *self { - if let NtPath(..) = **nt { + if let NtPath(..) = nt.0 { return true; } } @@ -461,3 +467,38 @@ pub fn is_op(tok: &Token) -> bool { _ => true, } } + +#[derive(Clone, Eq, PartialEq, Debug)] +pub struct LazyTokenStream(RefCell>); + +impl LazyTokenStream { + pub fn new() -> Self { + LazyTokenStream(RefCell::new(None)) + } + + pub fn force TokenStream>(&self, f: F) -> TokenStream { + let mut opt_stream = self.0.borrow_mut(); + if opt_stream.is_none() { + *opt_stream = Some(f()); + }; + opt_stream.clone().unwrap() + } +} + +impl Encodable for LazyTokenStream { + fn encode(&self, _: &mut S) -> Result<(), S::Error> { + Ok(()) + } +} + +impl Decodable for LazyTokenStream { + fn decode(_: &mut D) -> Result { + Ok(LazyTokenStream::new()) + } +} + +impl ::std::hash::Hash for LazyTokenStream { + fn hash(&self, hasher: &mut H) { + self.0.borrow().hash(hasher); + } +} diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 6c6ca556e35ed..ac5b32c828ae2 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -275,7 +275,7 @@ pub fn token_to_string(tok: &Token) -> String { token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), - token::Interpolated(ref nt) => match **nt { + token::Interpolated(ref nt) => match nt.0 { token::NtExpr(ref e) => expr_to_string(e), token::NtMeta(ref e) => meta_item_to_string(e), token::NtTy(ref e) => ty_to_string(e), diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 2637972cc6362..a3c3fa3a52ee7 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -348,6 +348,10 @@ struct StreamCursor { } impl StreamCursor { + fn new(stream: RcSlice) -> Self { + StreamCursor { stream: stream, index: 0, stack: Vec::new() } + } + fn next_as_stream(&mut self) -> Option { loop { if self.index < self.stream.len() { @@ -355,10 +359,7 @@ impl StreamCursor { let next = self.stream[self.index - 1].clone(); match next.kind { TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next), - TokenStreamKind::Stream(stream) => { - self.stack.push((mem::replace(&mut self.stream, stream), - mem::replace(&mut self.index, 0))); - } + TokenStreamKind::Stream(stream) => self.insert(stream), TokenStreamKind::Empty => {} } } else if let Some((stream, index)) = self.stack.pop() { @@ -369,6 +370,11 @@ impl StreamCursor { } } } + + fn insert(&mut self, stream: RcSlice) { + self.stack.push((mem::replace(&mut self.stream, stream), + mem::replace(&mut self.index, 0))); + } } impl Iterator for Cursor { @@ -388,9 +394,7 @@ impl Cursor { TokenStreamKind::Empty => CursorKind::Empty, TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false), - TokenStreamKind::Stream(stream) => { - CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() }) - } + TokenStreamKind::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)), }) } @@ -408,13 +412,30 @@ impl Cursor { Some(stream) } - pub fn original_stream(self) -> TokenStream { + pub fn insert(&mut self, stream: TokenStream) { + match self.0 { + _ if stream.is_empty() => return, + CursorKind::Empty => *self = stream.trees(), + CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => { + *self = TokenStream::concat(vec![self.original_stream(), stream]).trees(); + if consumed { + self.next(); + } + } + CursorKind::Stream(ref mut cursor) => { + cursor.insert(ThinTokenStream::from(stream).0.unwrap()); + } + } + } + + pub fn original_stream(&self) -> TokenStream { match self.0 { CursorKind::Empty => TokenStream::empty(), - CursorKind::Tree(tree, _) => tree.into(), - CursorKind::JointTree(tree, _) => tree.joint(), - CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({ - cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream) + CursorKind::Tree(ref tree, _) => tree.clone().into(), + CursorKind::JointTree(ref tree, _) => tree.clone().joint(), + CursorKind::Stream(ref cursor) => TokenStream::concat_rc_slice({ + cursor.stack.get(0).cloned().map(|(stream, _)| stream) + .unwrap_or(cursor.stream.clone()) }), } } diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs index 989c77f1089cf..93815d16837d3 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/attr-args.rs @@ -24,7 +24,7 @@ pub fn attr_with_args(args: TokenStream, input: TokenStream) -> TokenStream { let input = input.to_string(); - assert_eq!(input, "fn foo ( ) { }"); + assert_eq!(input, "fn foo() { }"); r#" fn foo() -> &'static str { "Hello, world!" } From 20a90485c040df87a667e9b6ee38e4d8a7d7fc5d Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 31 Mar 2017 04:06:28 +0000 Subject: [PATCH 05/12] Add exmaple/test for `quote!` hygiene. --- .../proc-macro/auxiliary/hygiene_example.rs | 19 ++++++++++ .../auxiliary/hygiene_example_codegen.rs | 36 +++++++++++++++++++ .../proc-macro/hygiene_example.rs | 27 ++++++++++++++ 3 files changed, 82 insertions(+) create mode 100644 src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs create mode 100644 src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs create mode 100644 src/test/run-pass-fulldeps/proc-macro/hygiene_example.rs diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs new file mode 100644 index 0000000000000..8ffa7abe6f7f9 --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example.rs @@ -0,0 +1,19 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(proc_macro)] + +extern crate hygiene_example_codegen; + +pub use hygiene_example_codegen::hello; + +pub fn print(string: &str) { + println!("{}", string); +} diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs new file mode 100644 index 0000000000000..055e4e2fad7af --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/hygiene_example_codegen.rs @@ -0,0 +1,36 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro as proc_macro_renamed; // This does not break `quote!` + +use proc_macro_renamed::{TokenStream, quote}; + +#[proc_macro] +pub fn hello(input: TokenStream) -> TokenStream { + quote!(hello_helper!($input)) + //^ `hello_helper!` always resolves to the following proc macro, + //| no matter where `hello!` is used. +} + +#[proc_macro] +pub fn hello_helper(input: TokenStream) -> TokenStream { + quote! { + extern crate hygiene_example; // This is never a conflict error + let string = format!("hello {}", $input); + //^ `format!` always resolves to the prelude macro, + //| even if a different `format!` is in scope where `hello!` is used. + hygiene_example::print(&string) + } +} diff --git a/src/test/run-pass-fulldeps/proc-macro/hygiene_example.rs b/src/test/run-pass-fulldeps/proc-macro/hygiene_example.rs new file mode 100644 index 0000000000000..51198db5aa76d --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/hygiene_example.rs @@ -0,0 +1,27 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:hygiene_example_codegen.rs +// aux-build:hygiene_example.rs + +#![feature(proc_macro)] + +extern crate hygiene_example; +use hygiene_example::hello; + +fn main() { + mod hygiene_example {} // no conflict with `extern crate hygiene_example;` from the proc macro + macro_rules! format { () => {} } // does not interfere with `format!` from the proc macro + macro_rules! hello_helper { () => {} } // similarly does not intefere with the proc macro + + let string = "world"; // no conflict with `string` from the proc macro + hello!(string); + hello!(string); +} From 1e32a3f15e35075b537741cdfef40f29bb856582 Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 31 Mar 2017 04:08:33 +0000 Subject: [PATCH 06/12] Test compound tokens. --- .../auxiliary/count_compound_ops.rs | 36 +++++++++++++++++++ .../proc-macro/count_compound_ops.rs | 20 +++++++++++ 2 files changed, 56 insertions(+) create mode 100644 src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs create mode 100644 src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs new file mode 100644 index 0000000000000..2ff9dc2849460 --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs @@ -0,0 +1,36 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(proc_macro)] +#![crate_type = "proc-macro"] + +extern crate proc_macro; + +use proc_macro::{TokenStream, TokenKind, OpKind, Literal, quote}; + +#[proc_macro] +pub fn count_compound_ops(input: TokenStream) -> TokenStream { + assert_eq!(count_compound_ops_helper(quote!(++ (&&) 4@a)), 3); + TokenKind::Literal(Literal::u32(count_compound_ops_helper(input))).into() +} + +fn count_compound_ops_helper(input: TokenStream) -> u32 { + let mut count = 0; + for token in input { + match token.kind { + TokenKind::Op(c, OpKind::Alone) => count += 1, + TokenKind::Sequence(_, tokens) => count += count_compound_ops_helper(tokens), + _ => {} + } + } + count +} diff --git a/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs new file mode 100644 index 0000000000000..1a2b144e4717b --- /dev/null +++ b/src/test/run-pass-fulldeps/proc-macro/count_compound_ops.rs @@ -0,0 +1,20 @@ +// Copyright 2017 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:count_compound_ops.rs + +#![feature(proc_macro)] + +extern crate count_compound_ops; +use count_compound_ops::count_compound_ops; + +fn main() { + assert_eq!(count_compound_ops!(foo<=>bar << Date: Mon, 5 Jun 2017 01:41:33 +0000 Subject: [PATCH 07/12] Address review comments. --- src/libproc_macro/lib.rs | 190 +++++++++++------- src/libproc_macro/quote.rs | 16 +- src/librustc_metadata/creader.rs | 6 +- src/libsyntax/parse/lexer/mod.rs | 2 +- src/libsyntax/parse/token.rs | 44 ++-- src/libsyntax/tokenstream.rs | 10 +- .../auxiliary/cond_plugin.rs | 6 +- .../auxiliary/count_compound_ops.rs | 8 +- 8 files changed, 172 insertions(+), 110 deletions(-) diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 8a345e67c57b3..06f9634d70613 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -46,14 +46,14 @@ extern crate syntax; extern crate syntax_pos; -use std::{fmt, iter, ops}; +use std::{ascii, fmt, iter}; use std::str::FromStr; use syntax::ast; use syntax::errors::DiagnosticBuilder; use syntax::parse::{self, token, parse_stream_from_source_str}; use syntax::print::pprust; -use syntax::symbol; +use syntax::symbol::Symbol; use syntax::tokenstream; use syntax_pos::DUMMY_SP; use syntax_pos::SyntaxContext; @@ -68,12 +68,12 @@ use syntax_pos::SyntaxContext; /// The API of this type is intentionally bare-bones, but it'll be expanded over /// time! #[stable(feature = "proc_macro_lib", since = "1.15.0")] -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct TokenStream(tokenstream::TokenStream); /// Error returned from `TokenStream::from_str`. -#[derive(Debug)] #[stable(feature = "proc_macro_lib", since = "1.15.0")] +#[derive(Debug)] pub struct LexError { _inner: (), } @@ -110,16 +110,20 @@ impl fmt::Display for TokenStream { #[macro_export] macro_rules! quote { () => {} } +#[unstable(feature = "proc_macro_internals", issue = "27812")] +#[doc(hidden)] +mod quote; + #[unstable(feature = "proc_macro", issue = "38356")] impl From for TokenStream { fn from(tree: TokenTree) -> TokenStream { - TokenStream(tree.to_raw()) + TokenStream(tree.to_internal()) } } #[unstable(feature = "proc_macro", issue = "38356")] -impl From for TokenStream { - fn from(kind: TokenKind) -> TokenStream { +impl From for TokenStream { + fn from(kind: TokenNode) -> TokenStream { TokenTree::from(kind).into() } } @@ -127,7 +131,7 @@ impl From for TokenStream { #[unstable(feature = "proc_macro", issue = "38356")] impl> iter::FromIterator for TokenStream { fn from_iter>(streams: I) -> Self { - let mut builder = tokenstream::TokenStream::builder(); + let mut builder = tokenstream::TokenStreamBuilder::new(); for stream in streams { builder.push(stream.into().0); } @@ -138,10 +142,10 @@ impl> iter::FromIterator for TokenStream { #[unstable(feature = "proc_macro", issue = "38356")] impl IntoIterator for TokenStream { type Item = TokenTree; - type IntoIter = TokenIter; + type IntoIter = TokenTreeIter; - fn into_iter(self) -> TokenIter { - TokenIter { cursor: self.0.trees(), next: None } + fn into_iter(self) -> TokenTreeIter { + TokenTreeIter { cursor: self.0.trees(), next: None } } } @@ -161,7 +165,7 @@ impl TokenStream { /// A region of source code, along with macro expansion information. #[unstable(feature = "proc_macro", issue = "38356")] -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub struct Span(syntax_pos::Span); #[unstable(feature = "proc_macro", issue = "38356")] @@ -174,6 +178,13 @@ impl Default for Span { } } +/// Quote a `Span` into a `TokenStream`. +/// This is needed to implement a custom quoter. +#[unstable(feature = "proc_macro", issue = "38356")] +pub fn quote_span(span: Span) -> TokenStream { + TokenStream(quote::Quote::quote(&span.0)) +} + impl Span { /// The span of the invocation of the current procedural macro. #[unstable(feature = "proc_macro", issue = "38356")] @@ -184,17 +195,17 @@ impl Span { /// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). #[unstable(feature = "proc_macro", issue = "38356")] -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct TokenTree { /// The `TokenTree`'s span pub span: Span, /// Description of the `TokenTree` - pub kind: TokenKind, + pub kind: TokenNode, } #[unstable(feature = "proc_macro", issue = "38356")] -impl From for TokenTree { - fn from(kind: TokenKind) -> TokenTree { +impl From for TokenTree { + fn from(kind: TokenNode) -> TokenTree { TokenTree { span: Span::default(), kind: kind } } } @@ -207,21 +218,21 @@ impl fmt::Display for TokenTree { } /// Description of a `TokenTree` -#[derive(Clone)] +#[derive(Clone, Debug)] #[unstable(feature = "proc_macro", issue = "38356")] -pub enum TokenKind { +pub enum TokenNode { /// A delimited tokenstream. - Sequence(Delimiter, TokenStream), + Group(Delimiter, TokenStream), /// A unicode identifier. - Word(Symbol), + Term(Term), /// A punctuation character (`+`, `,`, `$`, etc.). - Op(char, OpKind), + Op(char, Spacing), /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). Literal(Literal), } /// Describes how a sequence of token trees is delimited. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] #[unstable(feature = "proc_macro", issue = "38356")] pub enum Delimiter { /// `( ... )` @@ -235,30 +246,28 @@ pub enum Delimiter { } /// An interned string. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] #[unstable(feature = "proc_macro", issue = "38356")] -pub struct Symbol(symbol::Symbol); +pub struct Term(Symbol); -#[unstable(feature = "proc_macro", issue = "38356")] -impl<'a> From<&'a str> for Symbol { - fn from(string: &'a str) -> Symbol { - Symbol(symbol::Symbol::intern(string)) +impl Term { + /// Intern a string into a `Term`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn intern(string: &str) -> Term { + Term(Symbol::intern(string)) } -} -#[unstable(feature = "proc_macro", issue = "38356")] -impl ops::Deref for Symbol { - type Target = str; - - fn deref(&self) -> &str { - unsafe { &*(self.0.as_str().deref() as *const str) } + /// Get a reference to the interned string. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn as_str(&self) -> &str { + unsafe { &*(&*self.0.as_str() as *const str) } } } /// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] #[unstable(feature = "proc_macro", issue = "38356")] -pub enum OpKind { +pub enum Spacing { /// e.g. `+` is `Alone` in `+ =`. Alone, /// e.g. `+` is `Joint` in `+=`. @@ -266,14 +275,14 @@ pub enum OpKind { } /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). -#[derive(Clone)] +#[derive(Clone, Debug)] #[unstable(feature = "proc_macro", issue = "38356")] pub struct Literal(token::Token); #[unstable(feature = "proc_macro", issue = "38356")] impl fmt::Display for Literal { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - TokenTree { kind: TokenKind::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f) + TokenTree { kind: TokenNode::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f) } } @@ -282,30 +291,51 @@ macro_rules! int_literals { /// Integer literal. #[unstable(feature = "proc_macro", issue = "38356")] pub fn $int_kind(n: $int_kind) -> Literal { - Literal::integer(n as i128, stringify!($int_kind)) + Literal::typed_integer(n as i128, stringify!($int_kind)) } )*} } impl Literal { + /// Integer literal + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn integer(n: i128) -> Literal { + Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), None)) + } + int_literals!(u8, i8, u16, i16, u32, i32, u64, i64); - fn integer(n: i128, kind: &'static str) -> Literal { - Literal(token::Literal(token::Lit::Integer(symbol::Symbol::intern(&n.to_string())), - Some(symbol::Symbol::intern(kind)))) + fn typed_integer(n: i128, kind: &'static str) -> Literal { + Literal(token::Literal(token::Lit::Integer(Symbol::intern(&n.to_string())), + Some(Symbol::intern(kind)))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn float(n: f64) -> Literal { + if !n.is_finite() { + panic!("Invalid float literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), None)) } /// Floating point literal. #[unstable(feature = "proc_macro", issue = "38356")] pub fn f32(n: f32) -> Literal { - Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), - Some(symbol::Symbol::intern("f32")))) + if !n.is_finite() { + panic!("Invalid f32 literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), + Some(Symbol::intern("f32")))) } /// Floating point literal. #[unstable(feature = "proc_macro", issue = "38356")] - pub fn f64(n: f32) -> Literal { - Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), - Some(symbol::Symbol::intern("f64")))) + pub fn f64(n: f64) -> Literal { + if !n.is_finite() { + panic!("Invalid f64 literal {}", n); + } + Literal(token::Literal(token::Lit::Float(Symbol::intern(&n.to_string())), + Some(Symbol::intern("f64")))) } /// String literal. @@ -315,7 +345,7 @@ impl Literal { for ch in string.chars() { escaped.extend(ch.escape_unicode()); } - Literal(token::Literal(token::Lit::Str_(symbol::Symbol::intern(&escaped)), None)) + Literal(token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)) } /// Character literal. @@ -323,28 +353,36 @@ impl Literal { pub fn character(ch: char) -> Literal { let mut escaped = String::new(); escaped.extend(ch.escape_unicode()); - Literal(token::Literal(token::Lit::Char(symbol::Symbol::intern(&escaped)), None)) + Literal(token::Literal(token::Lit::Char(Symbol::intern(&escaped)), None)) + } + + /// Byte string literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn byte_string(bytes: &[u8]) -> Literal { + let string = bytes.iter().cloned().flat_map(ascii::escape_default) + .map(Into::::into).collect::(); + Literal(token::Literal(token::Lit::ByteStr(Symbol::intern(&string)), None)) } } /// An iterator over `TokenTree`s. #[unstable(feature = "proc_macro", issue = "38356")] -pub struct TokenIter { +pub struct TokenTreeIter { cursor: tokenstream::Cursor, next: Option, } #[unstable(feature = "proc_macro", issue = "38356")] -impl Iterator for TokenIter { +impl Iterator for TokenTreeIter { type Item = TokenTree; fn next(&mut self) -> Option { loop { let next = unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None); - let tree = TokenTree::from_raw(next, &mut self.next); + let tree = TokenTree::from_internal(next, &mut self.next); if tree.span.0 == DUMMY_SP { - if let TokenKind::Sequence(Delimiter::None, stream) = tree.kind { + if let TokenNode::Group(Delimiter::None, stream) = tree.kind { self.cursor.insert(stream.0); continue } @@ -355,7 +393,7 @@ impl Iterator for TokenIter { } impl Delimiter { - fn from_raw(delim: token::DelimToken) -> Delimiter { + fn from_internal(delim: token::DelimToken) -> Delimiter { match delim { token::Paren => Delimiter::Parenthesis, token::Brace => Delimiter::Brace, @@ -364,7 +402,7 @@ impl Delimiter { } } - fn to_raw(self) -> token::DelimToken { + fn to_internal(self) -> token::DelimToken { match self { Delimiter::Parenthesis => token::Paren, Delimiter::Brace => token::Brace, @@ -375,7 +413,7 @@ impl Delimiter { } impl TokenTree { - fn from_raw(stream: tokenstream::TokenStream, next: &mut Option) + fn from_internal(stream: tokenstream::TokenStream, next: &mut Option) -> TokenTree { use syntax::parse::token::*; @@ -383,17 +421,17 @@ impl TokenTree { let (mut span, token) = match tree { tokenstream::TokenTree::Token(span, token) => (span, token), tokenstream::TokenTree::Delimited(span, delimed) => { - let delimiter = Delimiter::from_raw(delimed.delim); + let delimiter = Delimiter::from_internal(delimed.delim); return TokenTree { span: Span(span), - kind: TokenKind::Sequence(delimiter, TokenStream(delimed.tts.into())), + kind: TokenNode::Group(delimiter, TokenStream(delimed.tts.into())), }; } }; - let op_kind = if is_joint { OpKind::Joint } else { OpKind::Alone }; + let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone }; macro_rules! op { - ($op:expr) => { TokenKind::Op($op, op_kind) } + ($op:expr) => { TokenNode::Op($op, op_kind) } } macro_rules! joint { @@ -402,12 +440,12 @@ impl TokenTree { fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span, next: &mut Option) - -> TokenKind { + -> TokenNode { let (first_span, rest_span) = (*span, *span); *span = first_span; let tree = tokenstream::TokenTree::Token(rest_span, rest); *next = Some(if is_joint { tree.joint() } else { tree.into() }); - TokenKind::Op(first, OpKind::Joint) + TokenNode::Op(first, Spacing::Joint) } let kind = match token { @@ -458,11 +496,11 @@ impl TokenTree { Question => op!('?'), Underscore => op!('_'), - Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)), - Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)), + Ident(ident) | Lifetime(ident) => TokenNode::Term(Term(ident.name)), + Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)), Interpolated(ref nt) => __internal::with_sess(|(sess, _)| { - TokenKind::Sequence(Delimiter::None, TokenStream(nt.1.force(|| { + TokenNode::Group(Delimiter::None, TokenStream(nt.1.force(|| { // FIXME(jseyfried): Avoid this pretty-print + reparse hack let name = "".to_owned(); let source = pprust::token_to_string(&token); @@ -477,25 +515,25 @@ impl TokenTree { TokenTree { span: Span(span), kind: kind } } - fn to_raw(self) -> tokenstream::TokenStream { + fn to_internal(self) -> tokenstream::TokenStream { use syntax::parse::token::*; use syntax::tokenstream::{TokenTree, Delimited}; let (op, kind) = match self.kind { - TokenKind::Op(op, kind) => (op, kind), - TokenKind::Sequence(delimiter, tokens) => { + TokenNode::Op(op, kind) => (op, kind), + TokenNode::Group(delimiter, tokens) => { return TokenTree::Delimited(self.span.0, Delimited { - delim: delimiter.to_raw(), + delim: delimiter.to_internal(), tts: tokens.0.into(), }).into(); }, - TokenKind::Word(symbol) => { + TokenNode::Term(symbol) => { let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt }; let token = if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) }; return TokenTree::Token(self.span.0, token).into(); } - TokenKind::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(), + TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(), }; let token = match op { @@ -526,8 +564,8 @@ impl TokenTree { let tree = TokenTree::Token(self.span.0, token); match kind { - OpKind::Alone => tree.into(), - OpKind::Joint => tree.joint(), + Spacing::Alone => tree.into(), + Spacing::Joint => tree.joint(), } } } @@ -543,10 +581,8 @@ impl TokenTree { /// all of the contents. #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] -#[path = ""] pub mod __internal { - mod quote; - pub use self::quote::{Quoter, __rt}; + pub use quote::{Quoter, __rt}; use std::cell::Cell; diff --git a/src/libproc_macro/quote.rs b/src/libproc_macro/quote.rs index a3ea3925fcd48..bee2c1e0eb6b6 100644 --- a/src/libproc_macro/quote.rs +++ b/src/libproc_macro/quote.rs @@ -9,13 +9,17 @@ // except according to those terms. //! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. +//! This file contains the implementation internals of the quasiquoter provided by `quote!`. + +//! This quasiquoter uses macros 2.0 hygiene to reliably use items from `__rt`, +//! including re-exported API `libsyntax`, to build a `syntax::tokenstream::TokenStream` +//! and wrap it into a `proc_macro::TokenStream`. use syntax::ast::Ident; use syntax::ext::base::{ExtCtxt, ProcMacro}; use syntax::parse::token::{self, Token, Lit}; use syntax::symbol::Symbol; -use syntax::tokenstream::{Delimited, TokenTree, TokenStream}; +use syntax::tokenstream::{Delimited, TokenTree, TokenStream, TokenStreamBuilder}; use syntax_pos::{DUMMY_SP, Span}; use syntax_pos::hygiene::SyntaxContext; @@ -25,7 +29,7 @@ pub mod __rt { pub use syntax::ast::Ident; pub use syntax::parse::token; pub use syntax::symbol::Symbol; - pub use syntax::tokenstream::{TokenStream, TokenTree, Delimited}; + pub use syntax::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree, Delimited}; pub use super::{ctxt, span}; pub fn unquote + Clone>(tokens: &T) -> TokenStream { @@ -41,7 +45,7 @@ pub fn span() -> Span { ::Span::default().0 } -trait Quote { +pub trait Quote { fn quote(&self) -> TokenStream; } @@ -98,8 +102,8 @@ impl Quote for Option { impl Quote for TokenStream { fn quote(&self) -> TokenStream { - let mut builder = TokenStream::builder(); - builder.push(quote!(rt::TokenStream::builder())); + let mut builder = TokenStreamBuilder::new(); + builder.push(quote!(rt::TokenStreamBuilder::new())); let mut trees = self.trees(); loop { diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 57a09ed15032f..e008a5cd9ea61 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -14,7 +14,7 @@ use cstore::{self, CStore, CrateSource, MetadataBlob}; use locator::{self, CratePaths}; use schema::{CrateRoot, Tracked}; -use rustc::hir::def_id::{CrateNum, DefIndex}; +use rustc::hir::def_id::{CrateNum, DefIndex, CRATE_DEF_INDEX}; use rustc::hir::svh::Svh; use rustc::middle::cstore::DepKind; use rustc::session::Session; @@ -35,7 +35,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::{cmp, fs}; -use syntax::ast::{self, Ident}; +use syntax::ast; use syntax::abi::Abi; use syntax::attr; use syntax::ext::base::SyntaxExtension; @@ -1238,7 +1238,7 @@ fn proc_macro_def_path_table(proc_macros: &[(ast::Name, Rc)]) - let key = DefKey { parent: Some(CRATE_DEF_INDEX), disambiguated_data: DisambiguatedDefPathData { - data: DefPathData::MacroDef(Ident::with_empty_ctxt(proc_macro.0)), + data: DefPathData::MacroDef(proc_macro.0), disambiguator: 0, }, }; diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index afc1e583d69bb..66775d8c43d63 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -483,7 +483,7 @@ impl<'a> StringReader<'a> { self.with_str_from(start, |string| { if string == "_" { self.sess.span_diagnostic - .struct_span_warn(mk_sp(start, self.pos), + .struct_span_warn(self.mk_sp(start, self.pos), "underscore literal suffix is not allowed") .warn("this was previously accepted by the compiler but is \ being phased out; it will become a hard error in \ diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 189a18f442033..d4198261d3f69 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -20,8 +20,8 @@ use serialize::{Decodable, Decoder, Encodable, Encoder}; use symbol::keywords; use tokenstream::{TokenStream, TokenTree}; -use std::cell::RefCell; -use std::fmt; +use std::cell::Cell; +use std::{cmp, fmt}; use std::rc::Rc; #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] @@ -169,7 +169,8 @@ pub enum Token { Underscore, Lifetime(ast::Ident), - /* For interpolation */ + // The `LazyTokenStream` is a pure function of the `Nonterminal`, + // and so the `LazyTokenStream` can be ignored by Eq, Hash, etc. Interpolated(Rc<(Nonterminal, LazyTokenStream)>), // Can be expanded into several tokens. /// Doc comment @@ -468,19 +469,40 @@ pub fn is_op(tok: &Token) -> bool { } } -#[derive(Clone, Eq, PartialEq, Debug)] -pub struct LazyTokenStream(RefCell>); +pub struct LazyTokenStream(Cell>); + +impl Clone for LazyTokenStream { + fn clone(&self) -> Self { + let opt_stream = self.0.take(); + self.0.set(opt_stream.clone()); + LazyTokenStream(Cell::new(opt_stream)) + } +} + +impl cmp::Eq for LazyTokenStream {} +impl PartialEq for LazyTokenStream { + fn eq(&self, _other: &LazyTokenStream) -> bool { + true + } +} + +impl fmt::Debug for LazyTokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.clone().0.into_inner(), f) + } +} impl LazyTokenStream { pub fn new() -> Self { - LazyTokenStream(RefCell::new(None)) + LazyTokenStream(Cell::new(None)) } pub fn force TokenStream>(&self, f: F) -> TokenStream { - let mut opt_stream = self.0.borrow_mut(); + let mut opt_stream = self.0.take(); if opt_stream.is_none() { - *opt_stream = Some(f()); - }; + opt_stream = Some(f()); + } + self.0.set(opt_stream.clone()); opt_stream.clone().unwrap() } } @@ -498,7 +520,5 @@ impl Decodable for LazyTokenStream { } impl ::std::hash::Hash for LazyTokenStream { - fn hash(&self, hasher: &mut H) { - self.0.borrow().hash(hasher); - } + fn hash(&self, _hasher: &mut H) {} } diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index a3c3fa3a52ee7..8eee25405df6b 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -201,10 +201,6 @@ impl TokenStream { } } - pub fn builder() -> TokenStreamBuilder { - TokenStreamBuilder(Vec::new()) - } - pub fn concat(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), @@ -235,6 +231,8 @@ impl TokenStream { true } + /// Precondition: `self` consists of a single token tree. + /// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`. pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { match self.kind { TokenStreamKind::Tree(tree) => (tree, false), @@ -277,6 +275,10 @@ impl TokenStream { pub struct TokenStreamBuilder(Vec); impl TokenStreamBuilder { + pub fn new() -> TokenStreamBuilder { + TokenStreamBuilder(Vec::new()) + } + pub fn push>(&mut self, stream: T) { let stream = stream.into(); let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 9406eda5231d5..6d6a452b03b62 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -15,7 +15,7 @@ extern crate proc_macro; -use proc_macro::{TokenStream, TokenKind, quote}; +use proc_macro::{TokenStream, TokenNode, quote}; #[proc_macro] pub fn cond(input: TokenStream) -> TokenStream { @@ -23,7 +23,7 @@ pub fn cond(input: TokenStream) -> TokenStream { let mut input = input.into_iter().peekable(); while let Some(tree) = input.next() { let cond = match tree.kind { - TokenKind::Sequence(_, cond) => cond, + TokenNode::Sequence(_, cond) => cond, _ => panic!("Invalid input"), }; let mut cond_trees = cond.clone().into_iter(); @@ -33,7 +33,7 @@ pub fn cond(input: TokenStream) -> TokenStream { panic!("Invalid macro usage in cond: {}", cond); } let is_else = match test.kind { - TokenKind::Word(word) => *word == *"else", + TokenNode::Word(word) => word.as_str() == "else", _ => false, }; conds.push(if is_else || input.peek().is_none() { diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs index 2ff9dc2849460..e7a0283962d81 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs @@ -15,20 +15,20 @@ extern crate proc_macro; -use proc_macro::{TokenStream, TokenKind, OpKind, Literal, quote}; +use proc_macro::{TokenStream, TokenNode, OpKind, Literal, quote}; #[proc_macro] pub fn count_compound_ops(input: TokenStream) -> TokenStream { assert_eq!(count_compound_ops_helper(quote!(++ (&&) 4@a)), 3); - TokenKind::Literal(Literal::u32(count_compound_ops_helper(input))).into() + TokenNode::Literal(Literal::u32(count_compound_ops_helper(input))).into() } fn count_compound_ops_helper(input: TokenStream) -> u32 { let mut count = 0; for token in input { match token.kind { - TokenKind::Op(c, OpKind::Alone) => count += 1, - TokenKind::Sequence(_, tokens) => count += count_compound_ops_helper(tokens), + TokenNode::Op(c, OpKind::Alone) => count += 1, + TokenNode::Sequence(_, tokens) => count += count_compound_ops_helper(tokens), _ => {} } } From 699078a35be3ec30dfbb88683129a9f2fd4ad415 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 21 Jun 2017 11:32:32 -0700 Subject: [PATCH 08/12] Fix a semantic merge conflict --- src/tools/tidy/src/features.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index d98c6932c51e1..4c94ade98d965 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -245,7 +245,7 @@ fn get_and_check_lib_features(base_src_path: &Path, let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); }; - if lang_features.contains_key(name) && feature_name != "proc_macro" { + if lang_features.contains_key(name) && name != "proc_macro" { err("duplicating a lang feature"); } if let Some(ref s) = lib_features.get(name) { From 4012b8dc4abb4915b3804a88c8e5c6d5de91d410 Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 21 Jun 2017 11:32:50 -0700 Subject: [PATCH 09/12] Update UI test with proc_macro changes --- src/test/ui/token/macro-incomplete-parse.stderr | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/test/ui/token/macro-incomplete-parse.stderr b/src/test/ui/token/macro-incomplete-parse.stderr index 14a7186aab19c..054364b0048f6 100644 --- a/src/test/ui/token/macro-incomplete-parse.stderr +++ b/src/test/ui/token/macro-incomplete-parse.stderr @@ -15,6 +15,9 @@ error: expected one of `.`, `;`, `?`, `}`, or an operator, found `,` | 22 | () => ( 1, //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `,` | ^ expected one of `.`, `;`, `?`, `}`, or an operator here +... +35 | ignored_expr!(); //~ NOTE in this expansion + | ---------------- in this macro invocation error: macro expansion ignores token `,` and any following --> $DIR/macro-incomplete-parse.rs:29:14 From 302935ff2a169d57cdde78f17591e13e8aa47f9e Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 21 Jun 2017 11:34:33 -0700 Subject: [PATCH 10/12] Revert a few changes --- src/librustc/hir/map/definitions.rs | 27 ++++++++-------- src/librustc_metadata/creader.rs | 48 +++++------------------------ 2 files changed, 19 insertions(+), 56 deletions(-) diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 5322d24e38934..c969aef675ff9 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -55,19 +55,12 @@ impl Clone for DefPathTable { } impl DefPathTable { - pub fn new() -> Self { - DefPathTable { - index_to_key: [vec![], vec![]], - key_to_index: FxHashMap(), - def_path_hashes: [vec![], vec![]], - } - } - pub fn allocate(&mut self, - key: DefKey, - def_path_hash: DefPathHash, - address_space: DefIndexAddressSpace) - -> DefIndex { + fn allocate(&mut self, + key: DefKey, + def_path_hash: DefPathHash, + address_space: DefIndexAddressSpace) + -> DefIndex { let index = { let index_to_key = &mut self.index_to_key[address_space.index()]; let index = DefIndex::new(index_to_key.len() + address_space.start()); @@ -248,7 +241,7 @@ pub struct DefKey { } impl DefKey { - pub fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { + fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { let mut hasher = StableHasher::new(); // We hash a 0u8 here to disambiguate between regular DefPath hashes, @@ -291,7 +284,7 @@ impl DefKey { DefPathHash(hasher.finish()) } - pub fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { + fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { let mut hasher = StableHasher::new(); // Disambiguate this from a regular DefPath hash, // see compute_stable_hash() above. @@ -453,7 +446,11 @@ impl Definitions { /// Create new empty definition map. pub fn new() -> Definitions { Definitions { - table: DefPathTable::new(), + table: DefPathTable { + index_to_key: [vec![], vec![]], + key_to_index: FxHashMap(), + def_path_hashes: [vec![], vec![]], + }, node_to_def_index: NodeMap(), def_index_to_node: [vec![], vec![]], node_to_hir_id: IndexVec::new(), diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index e008a5cd9ea61..27c2d22168c8b 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -14,7 +14,7 @@ use cstore::{self, CStore, CrateSource, MetadataBlob}; use locator::{self, CratePaths}; use schema::{CrateRoot, Tracked}; -use rustc::hir::def_id::{CrateNum, DefIndex, CRATE_DEF_INDEX}; +use rustc::hir::def_id::{CrateNum, DefIndex}; use rustc::hir::svh::Svh; use rustc::middle::cstore::DepKind; use rustc::session::Session; @@ -26,8 +26,7 @@ use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::util::common::record_time; use rustc::util::nodemap::FxHashSet; use rustc::middle::cstore::NativeLibrary; -use rustc::hir::map::{Definitions, DefKey, DefPathData, DisambiguatedDefPathData, ITEM_LIKE_SPACE}; -use rustc::hir::map::definitions::DefPathTable; +use rustc::hir::map::Definitions; use std::cell::{RefCell, Cell}; use std::ops::Deref; @@ -308,16 +307,9 @@ impl<'a> CrateLoader<'a> { let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); - let proc_macros = crate_root.macro_derive_registrar.map(|_| { - self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) + let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { + crate_root.def_path_table.decode(&metadata) }); - let def_path_table = if let Some(ref proc_macros) = proc_macros { - proc_macro_def_path_table(proc_macros) - } else { - record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { - crate_root.def_path_table.decode(&metadata) - }) - }; let exported_symbols = crate_root.exported_symbols .map(|x| x.decode(&metadata).collect()); @@ -336,7 +328,9 @@ impl<'a> CrateLoader<'a> { def_path_table: Rc::new(def_path_table), exported_symbols: exported_symbols, trait_impls: trait_impls, - proc_macros: proc_macros, + proc_macros: crate_root.macro_derive_registrar.map(|_| { + self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) + }), root: crate_root, blob: metadata, cnum_map: RefCell::new(cnum_map), @@ -1219,31 +1213,3 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { } } } - -fn proc_macro_def_path_table(proc_macros: &[(ast::Name, Rc)]) -> DefPathTable { - let mut table = DefPathTable::new(); - let root = DefKey { - parent: None, - disambiguated_data: DisambiguatedDefPathData { - data: DefPathData::CrateRoot, - disambiguator: 0, - }, - }; - - let initial_hash = DefKey::root_parent_stable_hash("", ""); - let root_hash = root.compute_stable_hash(initial_hash); - let root_id = table.allocate(root, root_hash, ITEM_LIKE_SPACE); - let root_path_hash = table.def_path_hash(root_id); - for proc_macro in proc_macros { - let key = DefKey { - parent: Some(CRATE_DEF_INDEX), - disambiguated_data: DisambiguatedDefPathData { - data: DefPathData::MacroDef(proc_macro.0), - disambiguator: 0, - }, - }; - let def_path_hash = key.compute_stable_hash(root_path_hash); - table.allocate(key, def_path_hash, ITEM_LIKE_SPACE); - } - table -} From d316874c87e25669895c306658e15aa3746d66ab Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 21 Jun 2017 12:42:44 -0700 Subject: [PATCH 11/12] Update and fix a few tests --- src/libsyntax/parse/mod.rs | 2 +- src/libsyntax/util/parser_testing.rs | 2 +- src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs | 6 +++--- .../proc-macro/auxiliary/count_compound_ops.rs | 6 +++--- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index f917eec2cd0b1..bd9a621c00c00 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -687,7 +687,7 @@ mod tests { id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, ast::Path { span: sp(0, 6), - segments: vec![ast::PathSegment::crate_root(), + segments: vec![ast::PathSegment::crate_root(sp(0, 2)), str2seg("a", 2, 3), str2seg("b", 5, 6)] }), diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 2727ab79ebf76..d993ba14a4ab5 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -20,7 +20,7 @@ use std::iter::Peekable; /// Map a string to tts, using a made-up filename: pub fn string_to_stream(source_str: String) -> TokenStream { let ps = ParseSess::new(FilePathMapping::empty()); - filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str)) + filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str), None) } /// Map string to parser (via tts) diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 6d6a452b03b62..e2c68a626f91e 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -11,7 +11,7 @@ // no-prefer-dynamic #![crate_type = "proc-macro"] -#![feature(proc_macro, proc_macro_lib)] +#![feature(proc_macro)] extern crate proc_macro; @@ -23,7 +23,7 @@ pub fn cond(input: TokenStream) -> TokenStream { let mut input = input.into_iter().peekable(); while let Some(tree) = input.next() { let cond = match tree.kind { - TokenNode::Sequence(_, cond) => cond, + TokenNode::Group(_, cond) => cond, _ => panic!("Invalid input"), }; let mut cond_trees = cond.clone().into_iter(); @@ -33,7 +33,7 @@ pub fn cond(input: TokenStream) -> TokenStream { panic!("Invalid macro usage in cond: {}", cond); } let is_else = match test.kind { - TokenNode::Word(word) => word.as_str() == "else", + TokenNode::Term(word) => word.as_str() == "else", _ => false, }; conds.push(if is_else || input.peek().is_none() { diff --git a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs index e7a0283962d81..ec2ff0d1e2b8c 100644 --- a/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/count_compound_ops.rs @@ -15,7 +15,7 @@ extern crate proc_macro; -use proc_macro::{TokenStream, TokenNode, OpKind, Literal, quote}; +use proc_macro::{TokenStream, TokenNode, Spacing, Literal, quote}; #[proc_macro] pub fn count_compound_ops(input: TokenStream) -> TokenStream { @@ -27,8 +27,8 @@ fn count_compound_ops_helper(input: TokenStream) -> u32 { let mut count = 0; for token in input { match token.kind { - TokenNode::Op(c, OpKind::Alone) => count += 1, - TokenNode::Sequence(_, tokens) => count += count_compound_ops_helper(tokens), + TokenNode::Op(c, Spacing::Alone) => count += 1, + TokenNode::Group(_, tokens) => count += count_compound_ops_helper(tokens), _ => {} } } From 78fdbfc4008b52bcce201fd589ed84d2abb0419d Mon Sep 17 00:00:00 2001 From: Alex Crichton Date: Wed, 5 Jul 2017 13:51:34 -0700 Subject: [PATCH 12/12] rustbuild: Only -Zsave-analysis for libstd Don't pass the flag when we're compiling the compiler or other related tools --- src/bootstrap/lib.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 69b0c4a2756a6..5a3785b1ed634 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -482,7 +482,9 @@ impl Build { } } - if self.config.extended && compiler.is_final_stage(self) { + if mode == Mode::Libstd && + self.config.extended && + compiler.is_final_stage(self) { cargo.env("RUSTC_SAVE_ANALYSIS", "api".to_string()); }