Skip to content

Commit

Permalink
Rollup merge of rust-lang#38842 - abonander:proc_macro_attribute, r=j…
Browse files Browse the repository at this point in the history
…seyfried

Implement `#[proc_macro_attribute]`

This implements `#[proc_macro_attribute]` as described in rust-lang/rfcs#1566

The following major (hopefully non-breaking) changes are included:

* Refactor `proc_macro::TokenStream` to use `syntax::tokenstream::TokenStream`.
    * `proc_macro::tokenstream::TokenStream` no longer emits newlines between items, this can be trivially restored if desired
    * `proc_macro::TokenStream::from_str` does not try to parse an item anymore, moved to `impl MultiItemModifier for CustomDerive` with more informative error message

* Implement `#[proc_macro_attribute]`, which expects functions of the kind `fn(TokenStream, TokenStream) -> TokenStream`
    * Reactivated `#![feature(proc_macro)]` and gated `#[proc_macro_attribute]` under it
    * `#![feature(proc_macro)]` and `#![feature(custom_attribute)]` are mutually exclusive
    * adding `#![feature(proc_macro)]` makes the expansion pass assume that any attributes that are not built-in, or introduced by existing syntax extensions, are proc-macro attributes

* Fix `feature_gate::find_lang_feature_issue()` to not use `unwrap()`

    * This change wasn't necessary for this PR, but it helped debugging a problem where I was using the wrong feature string.

* Move "completed feature gate checking" pass to after "name resolution" pass

    * This was necessary for proper feature-gating of `#[proc_macro_attribute]` invocations when the `proc_macro` feature flag isn't set.

Prototype/Litmus Test: [Implementation](https://github.com/abonander/anterofit/blob/proc_macro/service-attr/src/lib.rs#L13) -- [Usage](https://github.com/abonander/anterofit/blob/proc_macro/service-attr/examples/post_service.rs#L35)
  • Loading branch information
alexcrichton committed Jan 20, 2017
2 parents 437d2b5 + 04ecee1 commit d4d276f
Show file tree
Hide file tree
Showing 24 changed files with 613 additions and 127 deletions.
73 changes: 47 additions & 26 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,9 @@ extern crate syntax;
use std::fmt;
use std::str::FromStr;

use syntax::ast;
use syntax::errors::DiagnosticBuilder;
use syntax::parse;
use syntax::ptr::P;
use syntax::tokenstream::TokenStream as TokenStream_;

/// The main type provided by this crate, representing an abstract stream of
/// tokens.
Expand All @@ -54,7 +54,7 @@ use syntax::ptr::P;
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
pub struct TokenStream {
inner: Vec<P<ast::Item>>,
inner: TokenStream_,
}

/// Error returned from `TokenStream::from_str`.
Expand All @@ -77,17 +77,41 @@ pub struct LexError {
#[doc(hidden)]
pub mod __internal {
use std::cell::Cell;
use std::rc::Rc;

use syntax::ast;
use syntax::ptr::P;
use syntax::parse::ParseSess;
use super::TokenStream;
use syntax::parse::{self, token, ParseSess};
use syntax::tokenstream::TokenStream as TokenStream_;

use super::{TokenStream, LexError};

pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
TokenStream { inner: vec![item] }
TokenStream { inner: TokenStream_::from_tokens(vec![
token::Interpolated(Rc::new(token::NtItem(item)))
])}
}

pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
TokenStream {
inner: inner
}
}

pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
with_parse_sess(move |sess| {
let mut parser = parse::new_parser_from_ts(sess, stream.inner);
let mut items = Vec::new();

while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
items.push(item)
}

Ok(items)
})
}

pub fn token_stream_items(stream: TokenStream) -> Vec<P<ast::Item>> {
pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
stream.inner
}

Expand All @@ -96,6 +120,10 @@ pub mod __internal {
trait_name: &str,
expand: fn(TokenStream) -> TokenStream,
attributes: &[&'static str]);

fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream);
}

// Emulate scoped_thread_local!() here essentially
Expand Down Expand Up @@ -125,11 +153,17 @@ pub mod __internal {
where F: FnOnce(&ParseSess) -> R
{
let p = CURRENT_SESS.with(|p| p.get());
assert!(!p.is_null());
assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \
before set_parse_sess()!");
f(unsafe { &*p })
}
}

fn parse_to_lex_err(mut err: DiagnosticBuilder) -> LexError {
err.cancel();
LexError { _inner: () }
}

#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
type Err = LexError;
Expand All @@ -138,30 +172,17 @@ impl FromStr for TokenStream {
__internal::with_parse_sess(|sess| {
let src = src.to_string();
let name = "<proc-macro source code>".to_string();
let mut parser = parse::new_parser_from_source_str(sess, name, src);
let mut ret = TokenStream { inner: Vec::new() };
loop {
match parser.parse_item() {
Ok(Some(item)) => ret.inner.push(item),
Ok(None) => return Ok(ret),
Err(mut err) => {
err.cancel();
return Err(LexError { _inner: () })
}
}
}
let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
.map_err(parse_to_lex_err));

Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts)))
})
}
}

#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
for item in self.inner.iter() {
let item = syntax::print::pprust::item_to_string(item);
try!(f.write_str(&item));
try!(f.write_str("\n"));
}
Ok(())
self.inner.fmt(f)
}
}
23 changes: 12 additions & 11 deletions src/librustc_driver/driver.rs
Original file line number Diff line number Diff line change
Expand Up @@ -681,6 +681,7 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
should_test: sess.opts.test,
..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string())
};

let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver);
let err_count = ecx.parse_sess.span_diagnostic.err_count();

Expand Down Expand Up @@ -740,17 +741,6 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
"checking for inline asm in case the target doesn't support it",
|| no_asm::check_crate(sess, &krate));

// Needs to go *after* expansion to be able to check the results of macro expansion.
time(time_passes, "complete gated feature checking", || {
sess.track_errors(|| {
syntax::feature_gate::check_crate(&krate,
&sess.parse_sess,
&sess.features.borrow(),
&attributes,
sess.opts.unstable_features);
})
})?;

time(sess.time_passes(),
"early lint checks",
|| lint::check_ast_crate(sess, &krate));
Expand All @@ -768,6 +758,17 @@ pub fn phase_2_configure_and_expand<F>(sess: &Session,
Ok(())
})?;

// Needs to go *after* expansion to be able to check the results of macro expansion.
time(time_passes, "complete gated feature checking", || {
sess.track_errors(|| {
syntax::feature_gate::check_crate(&krate,
&sess.parse_sess,
&sess.features.borrow(),
&attributes,
sess.opts.unstable_features);
})
})?;

// Lower ast -> hir.
let hir_forest = time(sess.time_passes(), "lowering ast -> hir", || {
let hir_crate = lower_crate(sess, &krate, &mut resolver);
Expand Down
10 changes: 10 additions & 0 deletions src/librustc_metadata/creader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -578,6 +578,7 @@ impl<'a> CrateLoader<'a> {
use proc_macro::__internal::Registry;
use rustc_back::dynamic_lib::DynamicLibrary;
use syntax_ext::deriving::custom::CustomDerive;
use syntax_ext::proc_macro_impl::AttrProcMacro;

let path = match dylib {
Some(dylib) => dylib,
Expand Down Expand Up @@ -613,6 +614,15 @@ impl<'a> CrateLoader<'a> {
);
self.0.push((Symbol::intern(trait_name), Rc::new(derive)));
}

fn register_attr_proc_macro(&mut self,
name: &str,
expand: fn(TokenStream, TokenStream) -> TokenStream) {
let expand = SyntaxExtension::AttrProcMacro(
Box::new(AttrProcMacro { inner: expand })
);
self.0.push((Symbol::intern(name), Rc::new(expand)));
}
}

let mut my_registrar = MyRegistrar(Vec::new());
Expand Down
46 changes: 44 additions & 2 deletions src/librustc_resolve/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
use syntax::ast::{Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};

use syntax_pos::{Span, DUMMY_SP, MultiSpan};
use errors::DiagnosticBuilder;
Expand Down Expand Up @@ -1123,6 +1123,12 @@ pub struct Resolver<'a> {

// Avoid duplicated errors for "name already defined".
name_already_seen: FxHashMap<Name, Span>,

// If `#![feature(proc_macro)]` is set
proc_macro_enabled: bool,

// A set of procedural macros imported by `#[macro_use]` that have already been warned about
warned_proc_macros: FxHashSet<Name>,
}

pub struct ResolverArenas<'a> {
Expand Down Expand Up @@ -1227,6 +1233,8 @@ impl<'a> Resolver<'a> {
invocations.insert(Mark::root(),
arenas.alloc_invocation_data(InvocationData::root(graph_root)));

let features = session.features.borrow();

Resolver {
session: session,

Expand Down Expand Up @@ -1284,7 +1292,9 @@ impl<'a> Resolver<'a> {
span: DUMMY_SP,
vis: ty::Visibility::Public,
}),
use_extern_macros: session.features.borrow().use_extern_macros,

// `#![feature(proc_macro)]` implies `#[feature(extern_macros)]`
use_extern_macros: features.use_extern_macros || features.proc_macro,

exported_macros: Vec::new(),
crate_loader: crate_loader,
Expand All @@ -1296,6 +1306,8 @@ impl<'a> Resolver<'a> {
invocations: invocations,
name_already_seen: FxHashMap(),
whitelisted_legacy_custom_derives: Vec::new(),
proc_macro_enabled: features.proc_macro,
warned_proc_macros: FxHashSet(),
}
}

Expand Down Expand Up @@ -1525,6 +1537,8 @@ impl<'a> Resolver<'a> {

debug!("(resolving item) resolving {}", name);

self.check_proc_macro_attrs(&item.attrs);

match item.node {
ItemKind::Enum(_, ref generics) |
ItemKind::Ty(_, ref generics) |
Expand Down Expand Up @@ -1554,6 +1568,8 @@ impl<'a> Resolver<'a> {
walk_list!(this, visit_ty_param_bound, bounds);

for trait_item in trait_items {
this.check_proc_macro_attrs(&trait_item.attrs);

match trait_item.node {
TraitItemKind::Const(_, ref default) => {
// Only impose the restrictions of
Expand Down Expand Up @@ -1738,6 +1754,7 @@ impl<'a> Resolver<'a> {
this.with_self_rib(Def::SelfTy(trait_id, Some(item_def_id)), |this| {
this.with_current_self_type(self_type, |this| {
for impl_item in impl_items {
this.check_proc_macro_attrs(&impl_item.attrs);
this.resolve_visibility(&impl_item.vis);
match impl_item.node {
ImplItemKind::Const(..) => {
Expand Down Expand Up @@ -3184,6 +3201,31 @@ impl<'a> Resolver<'a> {
let msg = "`self` no longer imports values".to_string();
self.session.add_lint(lint::builtin::LEGACY_IMPORTS, id, span, msg);
}

fn check_proc_macro_attrs(&mut self, attrs: &[ast::Attribute]) {
if self.proc_macro_enabled { return; }

for attr in attrs {
let maybe_binding = self.builtin_macros.get(&attr.name()).cloned().or_else(|| {
let ident = Ident::with_empty_ctxt(attr.name());
self.resolve_lexical_macro_path_segment(ident, MacroNS, None).ok()
});

if let Some(binding) = maybe_binding {
if let SyntaxExtension::AttrProcMacro(..) = *binding.get_macro(self) {
attr::mark_known(attr);

let msg = "attribute procedural macros are experimental";
let feature = "proc_macro";

feature_err(&self.session.parse_sess, feature,
attr.span, GateIssue::Language, msg)
.span_note(binding.span, "procedural macro imported here")
.emit();
}
}
}
}
}

fn is_struct_like(def: Def) -> bool {
Expand Down
43 changes: 42 additions & 1 deletion src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ use syntax::ext::base::{NormalTT, Resolver as SyntaxResolver, SyntaxExtension};
use syntax::ext::expand::{Expansion, mark_tts};
use syntax::ext::hygiene::Mark;
use syntax::ext::tt::macro_rules;
use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax::feature_gate::{emit_feature_err, GateIssue, is_builtin_attr};
use syntax::fold::{self, Folder};
use syntax::ptr::P;
use syntax::symbol::keywords;
Expand Down Expand Up @@ -183,6 +183,10 @@ impl<'a> base::Resolver for Resolver<'a> {
},
None => {}
}

if self.proc_macro_enabled && !is_builtin_attr(&attrs[i]) {
return Some(attrs.remove(i));
}
}
None
}
Expand Down Expand Up @@ -373,6 +377,10 @@ impl<'a> Resolver<'a> {
let resolution = self.resolve_lexical_macro_path_segment(ident, MacroNS, Some(span));
let (legacy_resolution, resolution) = match (legacy_resolution, resolution) {
(Some(legacy_resolution), Ok(resolution)) => (legacy_resolution, resolution),
(Some(MacroBinding::Modern(binding)), Err(_)) => {
self.err_if_macro_use_proc_macro(ident.name, span, binding);
continue
},
_ => continue,
};
let (legacy_span, participle) = match legacy_resolution {
Expand Down Expand Up @@ -469,4 +477,37 @@ impl<'a> Resolver<'a> {
self.exported_macros.push(def);
}
}

/// Error if `ext` is a Macros 1.1 procedural macro being imported by `#[macro_use]`
fn err_if_macro_use_proc_macro(&mut self, name: Name, use_span: Span,
binding: &NameBinding<'a>) {
use self::SyntaxExtension::*;

let krate = binding.def().def_id().krate;

// Plugin-based syntax extensions are exempt from this check
if krate == BUILTIN_MACROS_CRATE { return; }

let ext = binding.get_macro(self);

match *ext {
// If `ext` is a procedural macro, check if we've already warned about it
AttrProcMacro(_) | ProcMacro(_) => if !self.warned_proc_macros.insert(name) { return; },
_ => return,
}

let warn_msg = match *ext {
AttrProcMacro(_) => "attribute procedural macros cannot be \
imported with `#[macro_use]`",
ProcMacro(_) => "procedural macros cannot be imported with `#[macro_use]`",
_ => return,
};

let crate_name = self.session.cstore.crate_name(krate);

self.session.struct_span_err(use_span, warn_msg)
.help(&format!("instead, import the procedural macro like any other item: \
`use {}::{};`", crate_name, name))
.emit();
}
}
Loading

0 comments on commit d4d276f

Please sign in to comment.