From e42836b2085233323339bacb636ecf9c28e8422e Mon Sep 17 00:00:00 2001 From: Jeffrey Seyfried Date: Fri, 17 Mar 2017 23:41:09 +0000 Subject: [PATCH] Implement `quote!` and other `proc_macro` API. --- src/Cargo.lock | 9 - .../src/library-features/proc-macro.md | 7 + src/libproc_macro/Cargo.toml | 1 + src/libproc_macro/lib.rs | 474 +++++++++++++++++- src/libproc_macro/quote.rs | 259 ++++++++++ src/libproc_macro_plugin/Cargo.toml | 13 - src/libproc_macro_plugin/lib.rs | 103 ---- src/libproc_macro_plugin/quote.rs | 230 --------- src/librustc/hir/map/definitions.rs | 27 +- src/librustc/middle/stability.rs | 1 + src/librustc_driver/Cargo.toml | 1 - src/librustc_metadata/creader.rs | 48 +- src/librustc_metadata/cstore_impl.rs | 5 + src/librustc_metadata/decoder.rs | 2 +- src/librustc_metadata/encoder.rs | 7 +- src/libsyntax/ast.rs | 8 +- src/libsyntax/ext/base.rs | 5 +- src/libsyntax/ext/build.rs | 2 +- src/libsyntax/feature_gate.rs | 28 +- src/libsyntax/fold.rs | 2 +- src/libsyntax/parse/lexer/tokentrees.rs | 12 +- src/libsyntax/parse/parser.rs | 4 +- src/libsyntax/parse/token.rs | 63 +++ src/libsyntax/tokenstream.rs | 152 +++++- src/libsyntax/util/rc_slice.rs | 10 +- .../auxiliary/cond_plugin.rs | 43 +- .../auxiliary/hello_macro.rs | 23 +- .../auxiliary/proc_macro_def.rs | 50 +- src/test/run-pass-fulldeps/macro-quote-1.rs | 40 -- .../run-pass-fulldeps/macro-quote-cond.rs | 8 +- .../run-pass-fulldeps/macro-quote-test.rs | 8 +- src/test/run-pass-fulldeps/proc_macro.rs | 7 +- src/tools/tidy/src/cargo.rs | 8 - src/tools/tidy/src/features.rs | 2 +- 34 files changed, 1085 insertions(+), 577 deletions(-) create mode 100644 src/doc/unstable-book/src/library-features/proc-macro.md create mode 100644 src/libproc_macro/quote.rs delete mode 100644 src/libproc_macro_plugin/Cargo.toml delete mode 100644 src/libproc_macro_plugin/lib.rs delete mode 100644 src/libproc_macro_plugin/quote.rs delete mode 100644 src/test/run-pass-fulldeps/macro-quote-1.rs diff --git a/src/Cargo.lock b/src/Cargo.lock index dd6e72a56149e..e8f0ed6ed2c18 100644 --- a/src/Cargo.lock +++ b/src/Cargo.lock @@ -882,14 +882,6 @@ name = "proc_macro" version = "0.0.0" dependencies = [ "syntax 0.0.0", -] - -[[package]] -name = "proc_macro_plugin" -version = "0.0.0" -dependencies = [ - "rustc_plugin 0.0.0", - "syntax 0.0.0", "syntax_pos 0.0.0", ] @@ -1210,7 +1202,6 @@ dependencies = [ "env_logger 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)", "graphviz 0.0.0", "log 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", - "proc_macro_plugin 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", "rustc_borrowck 0.0.0", diff --git a/src/doc/unstable-book/src/library-features/proc-macro.md b/src/doc/unstable-book/src/library-features/proc-macro.md new file mode 100644 index 0000000000000..19e7f663c7ac3 --- /dev/null +++ b/src/doc/unstable-book/src/library-features/proc-macro.md @@ -0,0 +1,7 @@ +# `proc_macro` + +The tracking issue for this feature is: [#38356] + +[#38356]: https://github.com/rust-lang/rust/issues/38356 + +------------------------ diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index 7ce65d0fe4dbc..1b5141773a967 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -9,3 +9,4 @@ crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index b9f4fa63e6006..f1abd3339ed53 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -37,18 +37,24 @@ test(no_crate_inject, attr(deny(warnings))), test(attr(allow(dead_code, deprecated, unused_variables, unused_mut))))] +#![feature(i128_type)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(lang_items)] extern crate syntax; +extern crate syntax_pos; -use std::fmt; +use std::{fmt, iter, ops}; use std::str::FromStr; +use syntax::ast; use syntax::errors::DiagnosticBuilder; -use syntax::parse; +use syntax::parse::{self, token}; +use syntax::symbol; use syntax::tokenstream; +use syntax_pos::DUMMY_SP; +use syntax_pos::SyntaxContext; /// The main type provided by this crate, representing an abstract stream of /// tokens. @@ -60,6 +66,7 @@ use syntax::tokenstream; /// The API of this type is intentionally bare-bones, but it'll be expanded over /// time! #[stable(feature = "proc_macro_lib", since = "1.15.0")] +#[derive(Clone)] pub struct TokenStream(tokenstream::TokenStream); /// Error returned from `TokenStream::from_str`. @@ -69,6 +76,443 @@ pub struct LexError { _inner: (), } +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl FromStr for TokenStream { + type Err = LexError; + + fn from_str(src: &str) -> Result { + __internal::with_sess(|(sess, mark)| { + let src = src.to_string(); + let name = "".to_string(); + let call_site = mark.expn_info().unwrap().call_site; + let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); + Ok(__internal::token_stream_wrap(stream)) + }) + } +} + +#[stable(feature = "proc_macro_lib", since = "1.15.0")] +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +/// `quote!(..)` accepts arbitrary tokens and expands into a `TokenStream` describing the input. +/// For example, `quote!(a + b)` will produce a expression, that, when evaluated, constructs +/// constructs the `TokenStream` `[Word("a"), Op('+', Alone), Word("b")]`. +/// +/// Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. +/// To quote `$` itself, use `$$`. +#[unstable(feature = "proc_macro", issue = "38356")] +#[macro_export] +macro_rules! quote { () => {} } + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(tree: TokenTree) -> TokenStream { + TokenStream(tree.to_raw()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenStream { + fn from(kind: TokenKind) -> TokenStream { + TokenTree::from(kind).into() + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl> iter::FromIterator for TokenStream { + fn from_iter>(streams: I) -> Self { + let mut builder = tokenstream::TokenStream::builder(); + for stream in streams { + builder.push(stream.into().0); + } + TokenStream(builder.build()) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl IntoIterator for TokenStream { + type Item = TokenTree; + type IntoIter = TokenIter; + + fn into_iter(self) -> TokenIter { + TokenIter { cursor: self.0.trees(), next: None } + } +} + +impl TokenStream { + /// Returns an empty `TokenStream`. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn empty() -> TokenStream { + TokenStream(tokenstream::TokenStream::empty()) + } + + /// Checks if this `TokenStream` is empty. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } +} + +/// A region of source code, along with macro expansion information. +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Copy, Clone)] +pub struct Span(syntax_pos::Span); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Default for Span { + fn default() -> Span { + ::__internal::with_sess(|(_, mark)| Span(syntax_pos::Span { + ctxt: SyntaxContext::empty().apply_mark(mark), + ..mark.expn_info().unwrap().call_site + })) + } +} + +impl Span { + /// The span of the invocation of the current procedural macro. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn call_site() -> Span { + ::__internal::with_sess(|(_, mark)| Span(mark.expn_info().unwrap().call_site)) + } +} + +/// A single token or a delimited sequence of token trees (e.g. `[1, (), ..]`). +#[unstable(feature = "proc_macro", issue = "38356")] +#[derive(Clone)] +pub struct TokenTree { + /// The `TokenTree`'s span + pub span: Span, + /// Description of the `TokenTree` + pub kind: TokenKind, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl From for TokenTree { + fn from(kind: TokenKind) -> TokenTree { + TokenTree { span: Span::default(), kind: kind } + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for TokenTree { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenStream::from(self.clone()).fmt(f) + } +} + +/// Description of a `TokenTree` +#[derive(Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum TokenKind { + /// A delimited tokenstream. + Sequence(Delimiter, TokenStream), + /// A unicode identifier. + Word(Symbol), + /// A punctuation character (`+`, `,`, `$`, etc.). + Op(char, OpKind), + /// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). + Literal(Literal), +} + +/// Describes how a sequence of token trees is delimited. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum Delimiter { + /// `( ... )` + Parenthesis, + /// `[ ... ]` + Brace, + /// `{ ... }` + Bracket, + /// An implicit delimiter, e.g. `$var`, where $var is `...`. + None, +} + +/// An interned string. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Symbol(symbol::Symbol); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl<'a> From<&'a str> for Symbol { + fn from(string: &'a str) -> Symbol { + Symbol(symbol::Symbol::intern(string)) + } +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl ops::Deref for Symbol { + type Target = str; + + fn deref(&self) -> &str { + unsafe { &*(self.0.as_str().deref() as *const str) } + } +} + +/// Whether an `Op` is either followed immediately by another `Op` or followed by whitespace. +#[derive(Copy, Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub enum OpKind { + /// e.g. `+` is `Alone` in `+ =`. + Alone, + /// e.g. `+` is `Joint` in `+=`. + Joint, +} + +/// A literal character (`'a'`), string (`"hello"`), or number (`2.3`). +#[derive(Clone)] +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct Literal(token::Token); + +#[unstable(feature = "proc_macro", issue = "38356")] +impl fmt::Display for Literal { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + TokenTree { kind: TokenKind::Literal(self.clone()), span: Span(DUMMY_SP) }.fmt(f) + } +} + +macro_rules! int_literals { + ($($int_kind:ident),*) => {$( + /// Integer literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn $int_kind(n: $int_kind) -> Literal { + Literal::integer(n as i128, stringify!($int_kind)) + } + )*} +} + +impl Literal { + int_literals!(u8, i8, u16, i16, u32, i32, u64, i64); + fn integer(n: i128, kind: &'static str) -> Literal { + Literal(token::Literal(token::Lit::Integer(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern(kind)))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f32(n: f32) -> Literal { + Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern("f32")))) + } + + /// Floating point literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn f64(n: f32) -> Literal { + Literal(token::Literal(token::Lit::Float(symbol::Symbol::intern(&n.to_string())), + Some(symbol::Symbol::intern("f64")))) + } + + /// String literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn string(string: &str) -> Literal { + let mut escaped = String::new(); + for ch in string.chars() { + escaped.extend(ch.escape_unicode()); + } + Literal(token::Literal(token::Lit::Str_(symbol::Symbol::intern(&escaped)), None)) + } + + /// Character literal. + #[unstable(feature = "proc_macro", issue = "38356")] + pub fn character(ch: char) -> Literal { + let mut escaped = String::new(); + escaped.extend(ch.escape_unicode()); + Literal(token::Literal(token::Lit::Char(symbol::Symbol::intern(&escaped)), None)) + } +} + +/// An iterator over `TokenTree`s. +#[unstable(feature = "proc_macro", issue = "38356")] +pub struct TokenIter { + cursor: tokenstream::Cursor, + next: Option, +} + +#[unstable(feature = "proc_macro", issue = "38356")] +impl Iterator for TokenIter { + type Item = TokenTree; + + fn next(&mut self) -> Option { + self.next.take().or_else(|| self.cursor.next_as_stream()) + .map(|next| TokenTree::from_raw(next, &mut self.next)) + } +} + +impl Delimiter { + fn from_raw(delim: token::DelimToken) -> Delimiter { + match delim { + token::Paren => Delimiter::Parenthesis, + token::Brace => Delimiter::Brace, + token::Bracket => Delimiter::Bracket, + token::NoDelim => Delimiter::None, + } + } + + fn to_raw(self) -> token::DelimToken { + match self { + Delimiter::Parenthesis => token::Paren, + Delimiter::Brace => token::Brace, + Delimiter::Bracket => token::Bracket, + Delimiter::None => token::NoDelim, + } + } +} + +impl TokenTree { + fn from_raw(stream: tokenstream::TokenStream, next: &mut Option) + -> TokenTree { + use syntax::parse::token::*; + + let (tree, is_joint) = stream.as_tree(); + let (mut span, token) = match tree { + tokenstream::TokenTree::Token(span, token) => (span, token), + tokenstream::TokenTree::Delimited(span, delimed) => { + let delimiter = Delimiter::from_raw(delimed.delim); + return TokenTree { + span: Span(span), + kind: TokenKind::Sequence(delimiter, TokenStream(delimed.tts.into())), + }; + } + }; + + let op_kind = if is_joint { OpKind::Joint } else { OpKind::Alone }; + macro_rules! op { + ($op:expr) => { TokenKind::Op($op, op_kind) } + } + + macro_rules! joint { + ($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) } + } + + fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span, + next: &mut Option) + -> TokenKind { + let (first_span, rest_span) = (*span, *span); + *span = first_span; + let tree = tokenstream::TokenTree::Token(rest_span, rest); + *next = Some(if is_joint { tree.joint() } else { tree.into() }); + TokenKind::Op(first, OpKind::Joint) + } + + let kind = match token { + Eq => op!('='), + Lt => op!('<'), + Le => joint!('<', Eq), + EqEq => joint!('=', Eq), + Ne => joint!('!', Eq), + Ge => joint!('>', Eq), + Gt => op!('>'), + AndAnd => joint!('&', BinOp(And)), + OrOr => joint!('|', BinOp(Or)), + Not => op!('!'), + Tilde => op!('~'), + BinOp(Plus) => op!('+'), + BinOp(Minus) => op!('-'), + BinOp(Star) => op!('*'), + BinOp(Slash) => op!('/'), + BinOp(Percent) => op!('%'), + BinOp(Caret) => op!('^'), + BinOp(And) => op!('&'), + BinOp(Or) => op!('|'), + BinOp(Shl) => joint!('<', Lt), + BinOp(Shr) => joint!('>', Gt), + BinOpEq(Plus) => joint!('+', Eq), + BinOpEq(Minus) => joint!('-', Eq), + BinOpEq(Star) => joint!('*', Eq), + BinOpEq(Slash) => joint!('/', Eq), + BinOpEq(Percent) => joint!('%', Eq), + BinOpEq(Caret) => joint!('^', Eq), + BinOpEq(And) => joint!('&', Eq), + BinOpEq(Or) => joint!('|', Eq), + BinOpEq(Shl) => joint!('<', Le), + BinOpEq(Shr) => joint!('>', Ge), + At => op!('@'), + Dot => op!('.'), + DotDot => joint!('.', Dot), + DotDotDot => joint!('.', DotDot), + Comma => op!(','), + Semi => op!(';'), + Colon => op!(':'), + ModSep => joint!(':', Colon), + RArrow => joint!('-', Gt), + LArrow => joint!('<', BinOp(Minus)), + FatArrow => joint!('=', Gt), + Pound => op!('#'), + Dollar => op!('$'), + Question => op!('?'), + Underscore => op!('_'), + + Ident(ident) | Lifetime(ident) => TokenKind::Word(Symbol(ident.name)), + Literal(..) | DocComment(..) => TokenKind::Literal(self::Literal(token)), + + Interpolated(..) => unimplemented!(), + + OpenDelim(..) | CloseDelim(..) => unreachable!(), + Whitespace | Comment | Shebang(..) | Eof => unreachable!(), + }; + + TokenTree { span: Span(span), kind: kind } + } + + fn to_raw(self) -> tokenstream::TokenStream { + use syntax::parse::token::*; + use syntax::tokenstream::{TokenTree, Delimited}; + + let (op, kind) = match self.kind { + TokenKind::Op(op, kind) => (op, kind), + TokenKind::Sequence(delimiter, tokens) => { + return TokenTree::Delimited(self.span.0, Delimited { + delim: delimiter.to_raw(), + tts: tokens.0.into(), + }).into(); + }, + TokenKind::Word(symbol) => { + let ident = ast::Ident { name: symbol.0, ctxt: self.span.0.ctxt }; + let token = + if symbol.0.as_str().starts_with("'") { Lifetime(ident) } else { Ident(ident) }; + return TokenTree::Token(self.span.0, token).into(); + } + TokenKind::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(), + }; + + let token = match op { + '=' => Eq, + '<' => Lt, + '>' => Gt, + '!' => Not, + '~' => Tilde, + '+' => BinOp(Plus), + '-' => BinOp(Minus), + '*' => BinOp(Star), + '/' => BinOp(Slash), + '%' => BinOp(Percent), + '^' => BinOp(Caret), + '&' => BinOp(And), + '|' => BinOp(Or), + '@' => At, + '.' => Dot, + ',' => Comma, + ';' => Semi, + ':' => Colon, + '#' => Pound, + '$' => Dollar, + '?' => Question, + '_' => Underscore, + _ => panic!("unsupported character {}", op), + }; + + let tree = TokenTree::Token(self.span.0, token); + match kind { + OpKind::Alone => tree.into(), + OpKind::Joint => tree.joint(), + } + } +} + /// Permanently unstable internal implementation details of this crate. This /// should not be used. /// @@ -80,7 +524,11 @@ pub struct LexError { /// all of the contents. #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] +#[path = ""] pub mod __internal { + mod quote; + pub use self::quote::{Quoter, __rt}; + use std::cell::Cell; use std::rc::Rc; @@ -172,25 +620,3 @@ fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError { err.cancel(); LexError { _inner: () } } - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - __internal::with_sess(|(sess, mark)| { - let src = src.to_string(); - let name = "".to_string(); - let call_site = mark.expn_info().unwrap().call_site; - let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site)); - Ok(__internal::token_stream_wrap(stream)) - }) - } -} - -#[stable(feature = "proc_macro_lib", since = "1.15.0")] -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - self.0.fmt(f) - } -} diff --git a/src/libproc_macro/quote.rs b/src/libproc_macro/quote.rs new file mode 100644 index 0000000000000..a3ea3925fcd48 --- /dev/null +++ b/src/libproc_macro/quote.rs @@ -0,0 +1,259 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Quasiquoter +//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. + +use syntax::ast::Ident; +use syntax::ext::base::{ExtCtxt, ProcMacro}; +use syntax::parse::token::{self, Token, Lit}; +use syntax::symbol::Symbol; +use syntax::tokenstream::{Delimited, TokenTree, TokenStream}; +use syntax_pos::{DUMMY_SP, Span}; +use syntax_pos::hygiene::SyntaxContext; + +pub struct Quoter; + +pub mod __rt { + pub use syntax::ast::Ident; + pub use syntax::parse::token; + pub use syntax::symbol::Symbol; + pub use syntax::tokenstream::{TokenStream, TokenTree, Delimited}; + pub use super::{ctxt, span}; + + pub fn unquote + Clone>(tokens: &T) -> TokenStream { + T::into(tokens.clone()).0 + } +} + +pub fn ctxt() -> SyntaxContext { + ::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark)) +} + +pub fn span() -> Span { + ::Span::default().0 +} + +trait Quote { + fn quote(&self) -> TokenStream; +} + +macro_rules! quote_tok { + (,) => { Token::Comma }; + (.) => { Token::Dot }; + (:) => { Token::Colon }; + (::) => { Token::ModSep }; + (!) => { Token::Not }; + (<) => { Token::Lt }; + (>) => { Token::Gt }; + (_) => { Token::Underscore }; + (0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) }; + (&) => { Token::BinOp(token::And) }; + ($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) }; +} + +macro_rules! quote_tree { + ((unquote $($t:tt)*)) => { TokenStream::from($($t)*) }; + ((quote $($t:tt)*)) => { ($($t)*).quote() }; + (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; + ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; + ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; + (rt) => { quote!(::__internal::__rt) }; + ($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) }; +} + +fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { + TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into() +} + +macro_rules! quote { + () => { TokenStream::empty() }; + ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; +} + +impl ProcMacro for Quoter { + fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream { + let mut info = cx.current_expansion.mark.expn_info().unwrap(); + info.callee.allow_internal_unstable = true; + cx.current_expansion.mark.set_expn_info(info); + ::__internal::set_sess(cx, || quote!(::TokenStream((quote stream)))) + } +} + +impl Quote for Option { + fn quote(&self) -> TokenStream { + match *self { + Some(ref t) => quote!(Some((quote t))), + None => quote!(None), + } + } +} + +impl Quote for TokenStream { + fn quote(&self) -> TokenStream { + let mut builder = TokenStream::builder(); + builder.push(quote!(rt::TokenStream::builder())); + + let mut trees = self.trees(); + loop { + let (mut tree, mut is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => return builder.add(quote!(.build())).build(), + }; + if let TokenTree::Token(_, Token::Dollar) = tree { + let (next_tree, next_is_joint) = match trees.next_as_stream() { + Some(next) => next.as_tree(), + None => panic!("unexpected trailing `$` in `quote!`"), + }; + match next_tree { + TokenTree::Token(_, Token::Ident(..)) => { + builder.push(quote!(.add(rt::unquote(&(unquote next_tree))))); + continue + } + TokenTree::Token(_, Token::Dollar) => { + tree = next_tree; + is_joint = next_is_joint; + } + _ => panic!("`$` must be followed by an ident or `$` in `quote!`"), + } + } + + builder.push(match is_joint { + true => quote!(.add((quote tree).joint())), + false => quote!(.add(rt::TokenStream::from((quote tree)))), + }); + } + } +} + +impl Quote for TokenTree { + fn quote(&self) -> TokenStream { + match *self { + TokenTree::Token(span, ref token) => quote! { + rt::TokenTree::Token((quote span), (quote token)) + }, + TokenTree::Delimited(span, ref delimited) => quote! { + rt::TokenTree::Delimited((quote span), (quote delimited)) + }, + } + } +} + +impl Quote for Delimited { + fn quote(&self) -> TokenStream { + quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() }) + } +} + +impl<'a> Quote for &'a str { + fn quote(&self) -> TokenStream { + TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) + .into() + } +} + +impl Quote for usize { + fn quote(&self) -> TokenStream { + let integer_symbol = Symbol::intern(&self.to_string()); + TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) + .into() + } +} + +impl Quote for Ident { + fn quote(&self) -> TokenStream { + quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() }) + } +} + +impl Quote for Symbol { + fn quote(&self) -> TokenStream { + quote!(rt::Symbol::intern((quote &*self.as_str()))) + } +} + +impl Quote for Span { + fn quote(&self) -> TokenStream { + quote!(rt::span()) + } +} + +impl Quote for Token { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($t:tt)*) => { + match *self { + $( Token::$i => quote!(rt::token::$i), )* + $( $t )* + } + } + } + + gen_match! { + Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, + Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, + Underscore; + + Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))), + Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))), + Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))), + Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))), + Token::Ident(ident) => quote!(rt::token::Ident((quote ident))), + Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))), + Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))), + _ => panic!("Unhandled case!"), + } + } +} + +impl Quote for token::BinOpToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )* + } + } + } + + gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) + } +} + +impl Quote for Lit { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*; $($raw:ident),*) => { + match *self { + $( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )* + $( Lit::$raw(lit, n) => { + quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) + })* + } + } + } + + gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) + } +} + +impl Quote for token::DelimToken { + fn quote(&self) -> TokenStream { + macro_rules! gen_match { + ($($i:ident),*) => { + match *self { + $(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })* + } + } + } + + gen_match!(Paren, Bracket, Brace, NoDelim) + } +} diff --git a/src/libproc_macro_plugin/Cargo.toml b/src/libproc_macro_plugin/Cargo.toml deleted file mode 100644 index 146a66cdf01cb..0000000000000 --- a/src/libproc_macro_plugin/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "proc_macro_plugin" -version = "0.0.0" - -[lib] -path = "lib.rs" -crate-type = ["dylib"] - -[dependencies] -rustc_plugin = { path = "../librustc_plugin" } -syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs deleted file mode 100644 index d1bc0966eb567..0000000000000 --- a/src/libproc_macro_plugin/lib.rs +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Proc_Macro -//! -//! A library for procedural macro writers. -//! -//! ## Usage -//! This crate provides the `quote!` macro for syntax creation. -//! -//! The `quote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;` -//! at the crate root. This is a temporary solution until we have better hygiene. -//! -//! ## Quasiquotation -//! -//! The quasiquoter creates output that, when run, constructs the tokenstream specified as -//! input. For example, `quote!(5 + 5)` will produce a program, that, when run, will -//! construct the TokenStream `5 | + | 5`. -//! -//! ### Unquoting -//! -//! Unquoting is done with `$`, and works by taking the single next ident as the unquoted term. -//! To quote `$` itself, use `$$`. -//! -//! A simple example is: -//! -//!``` -//!fn double(tmp: TokenStream) -> TokenStream { -//! quote!($tmp * 2) -//!} -//!``` -//! -//! ### Large example: Scheme's `cond` -//! -//! Below is an example implementation of Scheme's `cond`. -//! -//! ``` -//! fn cond(input: TokenStream) -> TokenStream { -//! let mut conds = Vec::new(); -//! let mut input = input.trees().peekable(); -//! while let Some(tree) = input.next() { -//! let mut cond = match tree { -//! TokenTree::Delimited(_, ref delimited) => delimited.stream(), -//! _ => panic!("Invalid input"), -//! }; -//! let mut trees = cond.trees(); -//! let test = trees.next(); -//! let rhs = trees.collect::(); -//! if rhs.is_empty() { -//! panic!("Invalid macro usage in cond: {}", cond); -//! } -//! let is_else = match test { -//! Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, -//! _ => false, -//! }; -//! conds.push(if is_else || input.peek().is_none() { -//! quote!({ $rhs }) -//! } else { -//! let test = test.unwrap(); -//! quote!(if $test { $rhs } else) -//! }); -//! } -//! -//! conds.into_iter().collect() -//! } -//! ``` -#![crate_name = "proc_macro_plugin"] -#![feature(plugin_registrar)] -#![crate_type = "dylib"] -#![crate_type = "rlib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] -#![deny(warnings)] - -#![feature(rustc_diagnostic_macros)] - -extern crate rustc_plugin; -extern crate syntax; -extern crate syntax_pos; - -mod quote; -use quote::quote; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; - -// ____________________________________________________________________________________________ -// Main macro definition - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("quote"), - SyntaxExtension::ProcMacro(Box::new(quote))); -} diff --git a/src/libproc_macro_plugin/quote.rs b/src/libproc_macro_plugin/quote.rs deleted file mode 100644 index 09675564291a2..0000000000000 --- a/src/libproc_macro_plugin/quote.rs +++ /dev/null @@ -1,230 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! # Quasiquoter -//! This file contains the implementation internals of the quasiquoter provided by `qquote!`. - -use syntax::ast::Ident; -use syntax::parse::token::{self, Token, Lit}; -use syntax::symbol::Symbol; -use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream}; -use syntax_pos::DUMMY_SP; - -use std::iter; - -pub fn quote<'cx>(stream: TokenStream) -> TokenStream { - stream.quote() -} - -trait Quote { - fn quote(&self) -> TokenStream; -} - -macro_rules! quote_tok { - (,) => { Token::Comma }; - (.) => { Token::Dot }; - (:) => { Token::Colon }; - (::) => { Token::ModSep }; - (!) => { Token::Not }; - (<) => { Token::Lt }; - (>) => { Token::Gt }; - (_) => { Token::Underscore }; - ($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) }; -} - -macro_rules! quote_tree { - ((unquote $($t:tt)*)) => { $($t)* }; - ((quote $($t:tt)*)) => { ($($t)*).quote() }; - (($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) }; - ([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) }; - ({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) }; - ($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) }; -} - -fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream { - TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into() -} - -macro_rules! quote { - () => { TokenStream::empty() }; - ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::() }; -} - -impl Quote for Option { - fn quote(&self) -> TokenStream { - match *self { - Some(ref t) => quote!(::std::option::Option::Some((quote t))), - None => quote!(::std::option::Option::None), - } - } -} - -impl Quote for TokenStream { - fn quote(&self) -> TokenStream { - if self.is_empty() { - return quote!(::syntax::tokenstream::TokenStream::empty()); - } - - struct Quoter(iter::Peekable); - - impl Iterator for Quoter { - type Item = TokenStream; - - fn next(&mut self) -> Option { - let quoted_tree = if let Some(&TokenTree::Token(_, Token::Dollar)) = self.0.peek() { - self.0.next(); - match self.0.next() { - Some(tree @ TokenTree::Token(_, Token::Ident(..))) => Some(tree.into()), - Some(tree @ TokenTree::Token(_, Token::Dollar)) => Some(tree.quote()), - // FIXME(jseyfried): improve these diagnostics - Some(..) => panic!("`$` must be followed by an ident or `$` in `quote!`"), - None => panic!("unexpected trailing `$` in `quote!`"), - } - } else { - self.0.next().as_ref().map(Quote::quote) - }; - - quoted_tree.map(|quoted_tree| { - quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),) - }) - } - } - - let quoted = Quoter(self.trees().peekable()).collect::(); - quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>()) - } -} - -impl Quote for TokenTree { - fn quote(&self) -> TokenStream { - match *self { - TokenTree::Token(_, ref token) => quote! { - ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP, - (quote token)) - }, - TokenTree::Delimited(_, ref delimited) => quote! { - ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP, - (quote delimited)) - }, - } - } -} - -impl Quote for Delimited { - fn quote(&self) -> TokenStream { - quote!(::syntax::tokenstream::Delimited { - delim: (quote self.delim), - tts: (quote self.stream()).into(), - }) - } -} - -impl<'a> Quote for &'a str { - fn quote(&self) -> TokenStream { - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None)) - .into() - } -} - -impl Quote for usize { - fn quote(&self) -> TokenStream { - let integer_symbol = Symbol::intern(&self.to_string()); - TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None)) - .into() - } -} - -impl Quote for Ident { - fn quote(&self) -> TokenStream { - // FIXME(jseyfried) quote hygiene - quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str()))) - } -} - -impl Quote for Symbol { - fn quote(&self) -> TokenStream { - quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str()))) - } -} - -impl Quote for Token { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($t:tt)*) => { - match *self { - $( Token::$i => quote!(::syntax::parse::token::$i), )* - $( $t )* - } - } - } - - gen_match! { - Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot, - Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question, - Underscore; - - Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))), - Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))), - Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))), - Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))), - Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))), - Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))), - Token::Literal(lit, sfx) => quote! { - ::syntax::parse::token::Literal((quote lit), (quote sfx)) - }, - _ => panic!("Unhandled case!"), - } - } -} - -impl Quote for token::BinOpToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )* - } - } - } - - gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr) - } -} - -impl Quote for Lit { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*; $($raw:ident),*) => { - match *self { - $( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )* - $( Lit::$raw(lit, n) => { - quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n))) - })* - } - } - } - - gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw) - } -} - -impl Quote for token::DelimToken { - fn quote(&self) -> TokenStream { - macro_rules! gen_match { - ($($i:ident),*) => { - match *self { - $(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })* - } - } - } - - gen_match!(Paren, Bracket, Brace, NoDelim) - } -} diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index c969aef675ff9..5322d24e38934 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -55,12 +55,19 @@ impl Clone for DefPathTable { } impl DefPathTable { + pub fn new() -> Self { + DefPathTable { + index_to_key: [vec![], vec![]], + key_to_index: FxHashMap(), + def_path_hashes: [vec![], vec![]], + } + } - fn allocate(&mut self, - key: DefKey, - def_path_hash: DefPathHash, - address_space: DefIndexAddressSpace) - -> DefIndex { + pub fn allocate(&mut self, + key: DefKey, + def_path_hash: DefPathHash, + address_space: DefIndexAddressSpace) + -> DefIndex { let index = { let index_to_key = &mut self.index_to_key[address_space.index()]; let index = DefIndex::new(index_to_key.len() + address_space.start()); @@ -241,7 +248,7 @@ pub struct DefKey { } impl DefKey { - fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { + pub fn compute_stable_hash(&self, parent_hash: DefPathHash) -> DefPathHash { let mut hasher = StableHasher::new(); // We hash a 0u8 here to disambiguate between regular DefPath hashes, @@ -284,7 +291,7 @@ impl DefKey { DefPathHash(hasher.finish()) } - fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { + pub fn root_parent_stable_hash(crate_name: &str, crate_disambiguator: &str) -> DefPathHash { let mut hasher = StableHasher::new(); // Disambiguate this from a regular DefPath hash, // see compute_stable_hash() above. @@ -446,11 +453,7 @@ impl Definitions { /// Create new empty definition map. pub fn new() -> Definitions { Definitions { - table: DefPathTable { - index_to_key: [vec![], vec![]], - key_to_index: FxHashMap(), - def_path_hashes: [vec![], vec![]], - }, + table: DefPathTable::new(), node_to_def_index: NodeMap(), def_index_to_node: [vec![], vec![]], node_to_hir_id: IndexVec::new(), diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index e27990c29cf9e..e6dc5da969a88 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -728,6 +728,7 @@ pub fn check_unused_or_stable_features<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let ref declared_lib_features = sess.features.borrow().declared_lib_features; let mut remaining_lib_features: FxHashMap = declared_lib_features.clone().into_iter().collect(); + remaining_lib_features.remove(&Symbol::intern("proc_macro")); fn format_stable_since_msg(version: &str) -> String { format!("this feature has been stable since {}. Attribute no longer needed", version) diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 2e949f48c175e..0b950787e3b91 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,7 +13,6 @@ arena = { path = "../libarena" } graphviz = { path = "../libgraphviz" } log = { version = "0.3", features = ["release_max_level_info"] } env_logger = { version = "0.4", default-features = false } -proc_macro_plugin = { path = "../libproc_macro_plugin" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_borrowck = { path = "../librustc_borrowck" } diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index 27c2d22168c8b..57a09ed15032f 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -26,7 +26,8 @@ use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::util::common::record_time; use rustc::util::nodemap::FxHashSet; use rustc::middle::cstore::NativeLibrary; -use rustc::hir::map::Definitions; +use rustc::hir::map::{Definitions, DefKey, DefPathData, DisambiguatedDefPathData, ITEM_LIKE_SPACE}; +use rustc::hir::map::definitions::DefPathTable; use std::cell::{RefCell, Cell}; use std::ops::Deref; @@ -34,7 +35,7 @@ use std::path::PathBuf; use std::rc::Rc; use std::{cmp, fs}; -use syntax::ast; +use syntax::ast::{self, Ident}; use syntax::abi::Abi; use syntax::attr; use syntax::ext::base::SyntaxExtension; @@ -307,9 +308,16 @@ impl<'a> CrateLoader<'a> { let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind); - let def_path_table = record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { - crate_root.def_path_table.decode(&metadata) + let proc_macros = crate_root.macro_derive_registrar.map(|_| { + self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) }); + let def_path_table = if let Some(ref proc_macros) = proc_macros { + proc_macro_def_path_table(proc_macros) + } else { + record_time(&self.sess.perf_stats.decode_def_path_tables_time, || { + crate_root.def_path_table.decode(&metadata) + }) + }; let exported_symbols = crate_root.exported_symbols .map(|x| x.decode(&metadata).collect()); @@ -328,9 +336,7 @@ impl<'a> CrateLoader<'a> { def_path_table: Rc::new(def_path_table), exported_symbols: exported_symbols, trait_impls: trait_impls, - proc_macros: crate_root.macro_derive_registrar.map(|_| { - self.load_derive_macros(&crate_root, dylib.clone().map(|p| p.0), span) - }), + proc_macros: proc_macros, root: crate_root, blob: metadata, cnum_map: RefCell::new(cnum_map), @@ -1213,3 +1219,31 @@ impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { } } } + +fn proc_macro_def_path_table(proc_macros: &[(ast::Name, Rc)]) -> DefPathTable { + let mut table = DefPathTable::new(); + let root = DefKey { + parent: None, + disambiguated_data: DisambiguatedDefPathData { + data: DefPathData::CrateRoot, + disambiguator: 0, + }, + }; + + let initial_hash = DefKey::root_parent_stable_hash("", ""); + let root_hash = root.compute_stable_hash(initial_hash); + let root_id = table.allocate(root, root_hash, ITEM_LIKE_SPACE); + let root_path_hash = table.def_path_hash(root_id); + for proc_macro in proc_macros { + let key = DefKey { + parent: Some(CRATE_DEF_INDEX), + disambiguated_data: DisambiguatedDefPathData { + data: DefPathData::MacroDef(Ident::with_empty_ctxt(proc_macro.0)), + disambiguator: 0, + }, + }; + let def_path_hash = key.compute_stable_hash(root_path_hash); + table.allocate(key, def_path_hash, ITEM_LIKE_SPACE); + } + table +} diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs index 0649553e382e3..ff5febadeb70f 100644 --- a/src/librustc_metadata/cstore_impl.rs +++ b/src/librustc_metadata/cstore_impl.rs @@ -33,6 +33,7 @@ use std::rc::Rc; use syntax::ast; use syntax::attr; +use syntax::ext::base::SyntaxExtension; use syntax::parse::filemap_to_stream; use syntax::symbol::Symbol; use syntax_pos::{Span, NO_EXPANSION}; @@ -365,6 +366,10 @@ impl CrateStore for cstore::CStore { let data = self.get_crate_data(id.krate); if let Some(ref proc_macros) = data.proc_macros { return LoadedMacro::ProcMacro(proc_macros[id.index.as_usize() - 1].1.clone()); + } else if data.name == "proc_macro" && + self.get_crate_data(id.krate).item_name(id.index) == "quote" { + let ext = SyntaxExtension::ProcMacro(Box::new(::proc_macro::__internal::Quoter)); + return LoadedMacro::ProcMacro(Rc::new(ext)); } let (name, def) = data.get_macro(id.index); diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 728ab30bb17dc..b974541ef255a 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -472,7 +472,7 @@ impl<'a, 'tcx> CrateMetadata { } } - fn item_name(&self, item_index: DefIndex) -> ast::Name { + pub fn item_name(&self, item_index: DefIndex) -> ast::Name { self.def_key(item_index) .disambiguated_data .data diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 2a504c4c07794..63a24c7db18ff 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -1095,6 +1095,7 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { /// Serialize the text of exported macros fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> { use syntax::print::pprust; + let def_id = self.tcx.hir.local_def_id(macro_def.id); Entry { kind: EntryKind::MacroDef(self.lazy(&MacroDef { body: pprust::tts_to_string(¯o_def.body.trees().collect::>()), @@ -1102,11 +1103,11 @@ impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> { })), visibility: self.lazy(&ty::Visibility::Public), span: self.lazy(¯o_def.span), - attributes: self.encode_attributes(¯o_def.attrs), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + children: LazySeq::empty(), - stability: None, - deprecation: None, ty: None, inherent_impls: LazySeq::empty(), variances: LazySeq::empty(), diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 8bd58ec7a52d5..325a5cdf8fc02 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -100,7 +100,7 @@ impl Path { let name = self.segments[0].identifier.name; if !self.is_global() && name != "$crate" && name != keywords::SelfValue.name() && name != keywords::Super.name() { - self.segments.insert(0, PathSegment::crate_root()); + self.segments.insert(0, PathSegment::crate_root(self.span)); } self } @@ -134,10 +134,10 @@ impl PathSegment { pub fn from_ident(ident: Ident, span: Span) -> Self { PathSegment { identifier: ident, span: span, parameters: None } } - pub fn crate_root() -> Self { + pub fn crate_root(span: Span) -> Self { PathSegment { - identifier: keywords::CrateRoot.ident(), - span: DUMMY_SP, + identifier: Ident { ctxt: span.ctxt, ..keywords::CrateRoot.ident() }, + span: span, parameters: None, } } diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index af5eabf06f87b..7a5c9456c5315 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -578,7 +578,10 @@ impl SyntaxExtension { pub fn is_modern(&self) -> bool { match *self { - SyntaxExtension::DeclMacro(..) => true, + SyntaxExtension::DeclMacro(..) | + SyntaxExtension::ProcMacro(..) | + SyntaxExtension::AttrProcMacro(..) | + SyntaxExtension::ProcMacroDerive(..) => true, _ => false, } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index a4580ea3939fb..5168943d108cb 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -320,7 +320,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let last_identifier = idents.pop().unwrap(); let mut segments: Vec = Vec::new(); if global { - segments.push(ast::PathSegment::crate_root()); + segments.push(ast::PathSegment::crate_root(sp)); } segments.extend(idents.into_iter().map(|i| ast::PathSegment::from_ident(i, sp))); diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index d7d3a70f3c7c5..3f3c94536a6e7 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -38,12 +38,19 @@ use symbol::Symbol; use std::ascii::AsciiExt; use std::env; -macro_rules! setter { +macro_rules! set { + (proc_macro) => {{ + fn f(features: &mut Features, span: Span) { + features.declared_lib_features.push((Symbol::intern("proc_macro"), span)); + features.proc_macro = true; + } + f as fn(&mut Features, Span) + }}; ($field: ident) => {{ - fn f(features: &mut Features) -> &mut bool { - &mut features.$field + fn f(features: &mut Features, _: Span) { + features.$field = true; } - f as fn(&mut Features) -> &mut bool + f as fn(&mut Features, Span) }} } @@ -51,10 +58,9 @@ macro_rules! declare_features { ($((active, $feature: ident, $ver: expr, $issue: expr),)+) => { /// Represents active features that are currently being implemented or /// currently being considered for addition/removal. - const ACTIVE_FEATURES: &'static [(&'static str, &'static str, - Option, fn(&mut Features) -> &mut bool)] = &[ - $((stringify!($feature), $ver, $issue, setter!($feature))),+ - ]; + const ACTIVE_FEATURES: + &'static [(&'static str, &'static str, Option, fn(&mut Features, Span))] = + &[$((stringify!($feature), $ver, $issue, set!($feature))),+]; /// A set of features to be used by later passes. pub struct Features { @@ -1464,9 +1470,9 @@ pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> F continue }; - if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter() + if let Some(&(_, _, _, set)) = ACTIVE_FEATURES.iter() .find(|& &(n, _, _, _)| name == n) { - *(setter(&mut features)) = true; + set(&mut features, mi.span); feature_checker.collect(&features, mi.span); } else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter() @@ -1500,7 +1506,7 @@ struct MutexFeatureChecker { impl MutexFeatureChecker { // If this method turns out to be a hotspot due to branching, - // the branching can be eliminated by modifying `setter!()` to set these spans + // the branching can be eliminated by modifying `set!()` to set these spans // only for the features that need to be checked for mutual exclusion. fn collect(&mut self, features: &Features, span: Span) { if features.proc_macro { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 2032aecacbb91..ca4814397d8ac 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -573,7 +573,7 @@ pub fn noop_fold_tt(tt: TokenTree, fld: &mut T) -> TokenTree { } pub fn noop_fold_tts(tts: TokenStream, fld: &mut T) -> TokenStream { - tts.trees().map(|tt| fld.fold_tt(tt)).collect() + tts.map(|tt| fld.fold_tt(tt)) } // apply ident folder if it's an ident, apply other folds to interpolated nodes diff --git a/src/libsyntax/parse/lexer/tokentrees.rs b/src/libsyntax/parse/lexer/tokentrees.rs index 554a1fcfc71a6..63a396c14db85 100644 --- a/src/libsyntax/parse/lexer/tokentrees.rs +++ b/src/libsyntax/parse/lexer/tokentrees.rs @@ -19,7 +19,9 @@ impl<'a> StringReader<'a> { pub fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { let mut tts = Vec::new(); while self.token != token::Eof { - tts.push(self.parse_token_tree()?.into()); + let tree = self.parse_token_tree()?; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } Ok(TokenStream::concat(tts)) } @@ -31,13 +33,15 @@ impl<'a> StringReader<'a> { if let token::CloseDelim(..) = self.token { return TokenStream::concat(tts); } - match self.parse_token_tree() { - Ok(tt) => tts.push(tt.into()), + let tree = match self.parse_token_tree() { + Ok(tree) => tree, Err(mut e) => { e.emit(); return TokenStream::concat(tts); } - } + }; + let is_joint = tree.span().hi == self.span.lo && token::is_op(&self.token); + tts.push(if is_joint { tree.joint() } else { tree.into() }); } } diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 25ab46f6f9e2b..a30dcef6f44e1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -1777,7 +1777,7 @@ impl<'a> Parser<'a> { }; if is_global { - segments.insert(0, PathSegment::crate_root()); + segments.insert(0, PathSegment::crate_root(lo)); } // Assemble the result. @@ -6187,7 +6187,7 @@ impl<'a> Parser<'a> { // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. self.eat(&token::ModSep); let prefix = ast::Path { - segments: vec![PathSegment::crate_root()], + segments: vec![PathSegment::crate_root(lo)], span: lo.to(self.span), }; let view_path_kind = if self.eat(&token::BinOp(token::Star)) { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index f208b0f56f81e..e568af66e8aa8 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -349,6 +349,60 @@ impl Token { _ => false, } } + + pub fn glue(self, joint: Token) -> Option { + Some(match self { + Eq => match joint { + Eq => EqEq, + Gt => FatArrow, + _ => return None, + }, + Lt => match joint { + Eq => Le, + Lt => BinOp(Shl), + Le => BinOpEq(Shl), + BinOp(Minus) => LArrow, + _ => return None, + }, + Gt => match joint { + Eq => Ge, + Gt => BinOp(Shr), + Ge => BinOpEq(Shr), + _ => return None, + }, + Not => match joint { + Eq => Ne, + _ => return None, + }, + BinOp(op) => match joint { + Eq => BinOpEq(op), + BinOp(And) if op == And => AndAnd, + BinOp(Or) if op == Or => OrOr, + Gt if op == Minus => RArrow, + _ => return None, + }, + Dot => match joint { + Dot => DotDot, + DotDot => DotDotDot, + _ => return None, + }, + DotDot => match joint { + Dot => DotDotDot, + _ => return None, + }, + Colon => match joint { + Colon => ModSep, + _ => return None, + }, + + Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | Comma | + Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar | Question | + OpenDelim(..) | CloseDelim(..) | Underscore => return None, + + Literal(..) | Ident(..) | Lifetime(..) | Interpolated(..) | DocComment(..) | + Whitespace | Comment | Shebang(..) | Eof => return None, + }) + } } #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash)] @@ -398,3 +452,12 @@ impl fmt::Debug for Nonterminal { } } } + +pub fn is_op(tok: &Token) -> bool { + match *tok { + OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) | + Ident(..) | Underscore | Lifetime(..) | Interpolated(..) | + Whitespace | Comment | Shebang(..) | Eof => false, + _ => true, + } +} diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index ab4f697071477..2637972cc6362 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -138,6 +138,10 @@ impl TokenTree { _ => false, } } + + pub fn joint(self) -> TokenStream { + TokenStream { kind: TokenStreamKind::JointTree(self) } + } } /// # Token Streams @@ -155,6 +159,7 @@ pub struct TokenStream { enum TokenStreamKind { Empty, Tree(TokenTree), + JointTree(TokenTree), Stream(RcSlice), } @@ -196,6 +201,10 @@ impl TokenStream { } } + pub fn builder() -> TokenStreamBuilder { + TokenStreamBuilder(Vec::new()) + } + pub fn concat(mut streams: Vec) -> TokenStream { match streams.len() { 0 => TokenStream::empty(), @@ -225,6 +234,99 @@ impl TokenStream { } true } + + pub fn as_tree(self) -> (TokenTree, bool /* joint? */) { + match self.kind { + TokenStreamKind::Tree(tree) => (tree, false), + TokenStreamKind::JointTree(tree) => (tree, true), + _ => unreachable!(), + } + } + + pub fn map TokenTree>(self, mut f: F) -> TokenStream { + let mut trees = self.into_trees(); + let mut result = Vec::new(); + while let Some(stream) = trees.next_as_stream() { + result.push(match stream.kind { + TokenStreamKind::Tree(tree) => f(tree).into(), + TokenStreamKind::JointTree(tree) => f(tree).joint(), + _ => unreachable!() + }); + } + TokenStream::concat(result) + } + + fn first_tree(&self) -> Option { + match self.kind { + TokenStreamKind::Empty => None, + TokenStreamKind::Tree(ref tree) | + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.first().unwrap().first_tree(), + } + } + + fn last_tree_if_joint(&self) -> Option { + match self.kind { + TokenStreamKind::Empty | TokenStreamKind::Tree(..) => None, + TokenStreamKind::JointTree(ref tree) => Some(tree.clone()), + TokenStreamKind::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(), + } + } +} + +pub struct TokenStreamBuilder(Vec); + +impl TokenStreamBuilder { + pub fn push>(&mut self, stream: T) { + let stream = stream.into(); + let last_tree_if_joint = self.0.last().and_then(TokenStream::last_tree_if_joint); + if let Some(TokenTree::Token(last_span, last_tok)) = last_tree_if_joint { + if let Some(TokenTree::Token(span, tok)) = stream.first_tree() { + if let Some(glued_tok) = last_tok.glue(tok) { + let last_stream = self.0.pop().unwrap(); + self.push_all_but_last_tree(&last_stream); + let glued_span = last_span.to(span); + self.0.push(TokenTree::Token(glued_span, glued_tok).into()); + self.push_all_but_first_tree(&stream); + return + } + } + } + self.0.push(stream); + } + + pub fn add>(mut self, stream: T) -> Self { + self.push(stream); + self + } + + pub fn build(self) -> TokenStream { + TokenStream::concat(self.0) + } + + fn push_all_but_last_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[0].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(0 .. len - 1))), + } + self.push_all_but_last_tree(&streams[len - 1]) + } + } + + fn push_all_but_first_tree(&mut self, stream: &TokenStream) { + if let TokenStreamKind::Stream(ref streams) = stream.kind { + let len = streams.len(); + match len { + 1 => {} + 2 => self.0.push(streams[1].clone().into()), + _ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(1 .. len))), + } + self.push_all_but_first_tree(&streams[0]) + } + } } #[derive(Clone)] @@ -234,6 +336,7 @@ pub struct Cursor(CursorKind); enum CursorKind { Empty, Tree(TokenTree, bool /* consumed? */), + JointTree(TokenTree, bool /* consumed? */), Stream(StreamCursor), } @@ -245,12 +348,13 @@ struct StreamCursor { } impl StreamCursor { - fn next(&mut self) -> Option { + fn next_as_stream(&mut self) -> Option { loop { if self.index < self.stream.len() { self.index += 1; - match self.stream[self.index - 1].kind.clone() { - TokenStreamKind::Tree(tree) => return Some(tree), + let next = self.stream[self.index - 1].clone(); + match next.kind { + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => return Some(next), TokenStreamKind::Stream(stream) => { self.stack.push((mem::replace(&mut self.stream, stream), mem::replace(&mut self.index, 0))); @@ -271,14 +375,10 @@ impl Iterator for Cursor { type Item = TokenTree; fn next(&mut self) -> Option { - let (tree, consumed) = match self.0 { - CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed), - CursorKind::Stream(ref mut cursor) => return cursor.next(), - _ => return None, - }; - - *consumed = true; - Some(tree.clone()) + self.next_as_stream().map(|stream| match stream.kind { + TokenStreamKind::Tree(tree) | TokenStreamKind::JointTree(tree) => tree, + _ => unreachable!() + }) } } @@ -287,16 +387,32 @@ impl Cursor { Cursor(match stream.kind { TokenStreamKind::Empty => CursorKind::Empty, TokenStreamKind::Tree(tree) => CursorKind::Tree(tree, false), + TokenStreamKind::JointTree(tree) => CursorKind::JointTree(tree, false), TokenStreamKind::Stream(stream) => { CursorKind::Stream(StreamCursor { stream: stream, index: 0, stack: Vec::new() }) } }) } + pub fn next_as_stream(&mut self) -> Option { + let (stream, consumed) = match self.0 { + CursorKind::Tree(ref tree, ref mut consumed @ false) => + (tree.clone().into(), consumed), + CursorKind::JointTree(ref tree, ref mut consumed @ false) => + (tree.clone().joint(), consumed), + CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(), + _ => return None, + }; + + *consumed = true; + Some(stream) + } + pub fn original_stream(self) -> TokenStream { match self.0 { CursorKind::Empty => TokenStream::empty(), CursorKind::Tree(tree, _) => tree.into(), + CursorKind::JointTree(tree, _) => tree.joint(), CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({ cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream) }), @@ -307,8 +423,9 @@ impl Cursor { fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result { for stream in streams { n = match stream.kind { - TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()), - TokenStreamKind::Tree(..) => n - 1, + TokenStreamKind::Tree(ref tree) | TokenStreamKind::JointTree(ref tree) + if n == 0 => return Ok(tree.clone()), + TokenStreamKind::Tree(..) | TokenStreamKind::JointTree(..) => n - 1, TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) { Ok(tree) => return Ok(tree), Err(n) => n, @@ -316,13 +433,15 @@ impl Cursor { _ => n, }; } - Err(n) } match self.0 { - CursorKind::Empty | CursorKind::Tree(_, true) => Err(n), - CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n), + CursorKind::Empty | + CursorKind::Tree(_, true) | + CursorKind::JointTree(_, true) => Err(n), + CursorKind::Tree(ref tree, false) | + CursorKind::JointTree(ref tree, false) => look_ahead(&[tree.clone().into()], n), CursorKind::Stream(ref cursor) => { look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| { for &(ref stream, index) in cursor.stack.iter().rev() { @@ -350,6 +469,7 @@ impl From for ThinTokenStream { ThinTokenStream(match stream.kind { TokenStreamKind::Empty => None, TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])), + TokenStreamKind::JointTree(tree) => Some(RcSlice::new(vec![tree.joint()])), TokenStreamKind::Stream(stream) => Some(stream), }) } diff --git a/src/libsyntax/util/rc_slice.rs b/src/libsyntax/util/rc_slice.rs index 2d9fd7aa87553..d6939d71129e4 100644 --- a/src/libsyntax/util/rc_slice.rs +++ b/src/libsyntax/util/rc_slice.rs @@ -9,7 +9,7 @@ // except according to those terms. use std::fmt; -use std::ops::Deref; +use std::ops::{Deref, Range}; use std::rc::Rc; use rustc_data_structures::stable_hasher::{StableHasher, StableHasherResult, @@ -30,6 +30,14 @@ impl RcSlice { data: Rc::new(vec.into_boxed_slice()), } } + + pub fn sub_slice(&self, range: Range) -> Self { + RcSlice { + data: self.data.clone(), + offset: self.offset + range.start as u32, + len: (range.end - range.start) as u32, + } + } } impl Deref for RcSlice { diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 0433b95865ef8..9406eda5231d5 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -8,50 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(unused_parens)] -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] -use rustc_plugin::Registry; +extern crate proc_macro; -use syntax::ext::base::SyntaxExtension; -use syntax::parse::token::Token; -use syntax::symbol::Symbol; -use syntax::tokenstream::{TokenTree, TokenStream}; +use proc_macro::{TokenStream, TokenKind, quote}; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("cond"), - SyntaxExtension::ProcMacro(Box::new(cond))); -} - -fn cond(input: TokenStream) -> TokenStream { +#[proc_macro] +pub fn cond(input: TokenStream) -> TokenStream { let mut conds = Vec::new(); - let mut input = input.trees().peekable(); + let mut input = input.into_iter().peekable(); while let Some(tree) = input.next() { - let mut cond = match tree { - TokenTree::Delimited(_, ref delimited) => delimited.stream(), + let cond = match tree.kind { + TokenKind::Sequence(_, cond) => cond, _ => panic!("Invalid input"), }; - let mut trees = cond.trees(); - let test = trees.next(); - let rhs = trees.collect::(); + let mut cond_trees = cond.clone().into_iter(); + let test = cond_trees.next().expect("Unexpected empty condition in `cond!`"); + let rhs = cond_trees.collect::(); if rhs.is_empty() { panic!("Invalid macro usage in cond: {}", cond); } - let is_else = match test { - Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true, + let is_else = match test.kind { + TokenKind::Word(word) => *word == *"else", _ => false, }; conds.push(if is_else || input.peek().is_none() { quote!({ $rhs }) } else { - let test = test.unwrap(); quote!(if $test { $rhs } else) }); } diff --git a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs index 9522592a5e9e6..cf6584e961a67 100644 --- a/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs +++ b/src/test/run-pass-fulldeps/auxiliary/hello_macro.rs @@ -8,29 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin)] -#![feature(plugin_registrar)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] +// no-prefer-dynamic -extern crate rustc_plugin; -extern crate syntax; +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::symbol::Symbol; -use syntax::tokenstream::TokenStream; +extern crate proc_macro; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("hello"), - SyntaxExtension::ProcMacro(Box::new(hello))); -} +use proc_macro::{TokenStream, quote}; // This macro is not very interesting, but it does contain delimited tokens with // no content - `()` and `{}` - which has caused problems in the past. // Also, it tests that we can escape `$` via `$$`. -fn hello(_: TokenStream) -> TokenStream { +#[proc_macro] +pub fn hello(_: TokenStream) -> TokenStream { quote!({ fn hello() {} macro_rules! m { ($$($$t:tt)*) => { $$($$t)* } } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs index 0e37a7a5dcce2..1b47043884844 100644 --- a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -8,47 +8,37 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(plugin, plugin_registrar, rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate rustc_plugin; -extern crate syntax; - -use rustc_plugin::Registry; -use syntax::ext::base::SyntaxExtension; -use syntax::tokenstream::TokenStream; -use syntax::symbol::Symbol; - -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(Symbol::intern("attr_tru"), - SyntaxExtension::AttrProcMacro(Box::new(attr_tru))); - reg.register_syntax_extension(Symbol::intern("attr_identity"), - SyntaxExtension::AttrProcMacro(Box::new(attr_identity))); - reg.register_syntax_extension(Symbol::intern("tru"), - SyntaxExtension::ProcMacro(Box::new(tru))); - reg.register_syntax_extension(Symbol::intern("ret_tru"), - SyntaxExtension::ProcMacro(Box::new(ret_tru))); - reg.register_syntax_extension(Symbol::intern("identity"), - SyntaxExtension::ProcMacro(Box::new(identity))); -} +// no-prefer-dynamic + +#![crate_type = "proc-macro"] +#![feature(proc_macro, proc_macro_lib)] + +extern crate proc_macro; + +use proc_macro::{TokenStream, quote}; -fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { - quote!(fn f1() -> bool { true }) +#[proc_macro_attribute] +pub fn attr_tru(_attr: TokenStream, item: TokenStream) -> TokenStream { + let name = item.into_iter().skip(1).next().unwrap(); + quote!(fn $name() -> bool { true }) } -fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { +#[proc_macro_attribute] +pub fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { quote!($item) } -fn tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn tru(_ts: TokenStream) -> TokenStream { quote!(true) } -fn ret_tru(_ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn ret_tru(_ts: TokenStream) -> TokenStream { quote!(return true;) } -fn identity(ts: TokenStream) -> TokenStream { +#[proc_macro] +pub fn identity(ts: TokenStream) -> TokenStream { quote!($ts) } diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs deleted file mode 100644 index e7d0a83017be0..0000000000000 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ /dev/null @@ -1,40 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// ignore-stage1 - -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(proc_macro_plugin)] - -extern crate syntax; -extern crate syntax_pos; - -use syntax::ast::{Ident, Name}; -use syntax::parse::token::{self, Token, Lit}; -use syntax::tokenstream::TokenTree; - -fn main() { - let true_tok = token::Ident(Ident::from_str("true")); - assert!(quote!(true).eq_unspanned(&true_tok.into())); - - // issue #35829, extended check to proc_macro. - let triple_dot_tok = Token::DotDotDot; - assert!(quote!(...).eq_unspanned(&triple_dot_tok.into())); - - let byte_str_tok = Token::Literal(Lit::ByteStr(Name::intern("one")), None); - assert!(quote!(b"one").eq_unspanned(&byte_str_tok.into())); - - let byte_str_raw_tok = Token::Literal(Lit::ByteStrRaw(Name::intern("#\"two\"#"), 3), None); - assert!(quote!(br###"#"two"#"###).eq_unspanned(&byte_str_raw_tok.into())); - - let str_raw_tok = Token::Literal(Lit::StrRaw(Name::intern("#\"three\"#"), 2), None); - assert!(quote!(r##"#"three"#"##).eq_unspanned(&str_raw_tok.into())); -} diff --git a/src/test/run-pass-fulldeps/macro-quote-cond.rs b/src/test/run-pass-fulldeps/macro-quote-cond.rs index fa969b6a087cf..cff743bdae6cd 100644 --- a/src/test/run-pass-fulldeps/macro-quote-cond.rs +++ b/src/test/run-pass-fulldeps/macro-quote-cond.rs @@ -11,9 +11,11 @@ // aux-build:cond_plugin.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(cond_plugin)] +#![feature(proc_macro)] + +extern crate cond_plugin; + +use cond_plugin::cond; fn fact(n : i64) -> i64 { if n == 0 { diff --git a/src/test/run-pass-fulldeps/macro-quote-test.rs b/src/test/run-pass-fulldeps/macro-quote-test.rs index bdbea8a419416..eb77895e2d7ad 100644 --- a/src/test/run-pass-fulldeps/macro-quote-test.rs +++ b/src/test/run-pass-fulldeps/macro-quote-test.rs @@ -13,10 +13,10 @@ // aux-build:hello_macro.rs // ignore-stage1 -#![feature(plugin)] -#![feature(rustc_private)] -#![plugin(hello_macro)] +#![feature(proc_macro)] + +extern crate hello_macro; fn main() { - hello!(); + hello_macro::hello!(); } diff --git a/src/test/run-pass-fulldeps/proc_macro.rs b/src/test/run-pass-fulldeps/proc_macro.rs index 22cc9f0f8d40e..cdda723585b7a 100644 --- a/src/test/run-pass-fulldeps/proc_macro.rs +++ b/src/test/run-pass-fulldeps/proc_macro.rs @@ -12,10 +12,11 @@ // ignore-stage1 // ignore-cross-compile -#![feature(plugin, custom_attribute)] -#![feature(type_macros)] +#![feature(proc_macro)] -#![plugin(proc_macro_def)] +extern crate proc_macro_def; + +use proc_macro_def::{attr_tru, attr_identity, identity, ret_tru, tru}; #[attr_tru] fn f1() -> bool { diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index c8c6cb0ee6b41..f40fea60f40a8 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -91,14 +91,6 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { continue } - // We want the compiler to depend on the proc_macro_plugin crate so - // that it is built and included in the end, but we don't want to - // actually use it in the compiler. - if toml.contains("name = \"rustc_driver\"") && - krate == "proc_macro_plugin" { - continue - } - if !librs.contains(&format!("extern crate {}", krate)) { tidy_error!(bad, "{} doesn't have `extern crate {}`, but Cargo.toml \ depends on it", libfile.display(), krate); diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 722fc2b317eb4..d98c6932c51e1 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -245,7 +245,7 @@ fn get_and_check_lib_features(base_src_path: &Path, let mut err = |msg: &str| { tidy_error!(bad, "{}:{}: {}", file.display(), line, msg); }; - if lang_features.contains_key(name) { + if lang_features.contains_key(name) && feature_name != "proc_macro" { err("duplicating a lang feature"); } if let Some(ref s) = lib_features.get(name) {