Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Review proc macro API 1.2 #50473

Merged
merged 7 commits into from
May 16, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
333 changes: 207 additions & 126 deletions src/libproc_macro/lib.rs

Large diffs are not rendered by default.

76 changes: 48 additions & 28 deletions src/libproc_macro/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,11 @@
//! This quasiquoter uses macros 2.0 hygiene to reliably access
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.

use {Delimiter, Literal, Spacing, Span, Term, Op, Group, TokenStream, TokenTree};
use {Delimiter, Literal, Spacing, Span, Ident, Punct, Group, TokenStream, TokenTree};

use syntax::ext::base::{ExtCtxt, ProcMacro};
use syntax::parse::token;
use syntax::symbol::Symbol;
use syntax::tokenstream;

pub struct Quoter;
Expand All @@ -35,14 +36,14 @@ macro_rules! tt2ts {
}

macro_rules! quote_tok {
(,) => { tt2ts!(Op::new(',', Spacing::Alone)) };
(.) => { tt2ts!(Op::new('.', Spacing::Alone)) };
(:) => { tt2ts!(Op::new(':', Spacing::Alone)) };
(|) => { tt2ts!(Op::new('|', Spacing::Alone)) };
(,) => { tt2ts!(Punct::new(',', Spacing::Alone)) };
(.) => { tt2ts!(Punct::new('.', Spacing::Alone)) };
(:) => { tt2ts!(Punct::new(':', Spacing::Alone)) };
(|) => { tt2ts!(Punct::new('|', Spacing::Alone)) };
(::) => {
[
TokenTree::from(Op::new(':', Spacing::Joint)),
TokenTree::from(Op::new(':', Spacing::Alone)),
TokenTree::from(Punct::new(':', Spacing::Joint)),
TokenTree::from(Punct::new(':', Spacing::Alone)),
].iter()
.cloned()
.map(|mut x| {
Expand All @@ -51,13 +52,13 @@ macro_rules! quote_tok {
})
.collect::<TokenStream>()
};
(!) => { tt2ts!(Op::new('!', Spacing::Alone)) };
(<) => { tt2ts!(Op::new('<', Spacing::Alone)) };
(>) => { tt2ts!(Op::new('>', Spacing::Alone)) };
(_) => { tt2ts!(Op::new('_', Spacing::Alone)) };
(!) => { tt2ts!(Punct::new('!', Spacing::Alone)) };
(<) => { tt2ts!(Punct::new('<', Spacing::Alone)) };
(>) => { tt2ts!(Punct::new('>', Spacing::Alone)) };
(_) => { tt2ts!(Punct::new('_', Spacing::Alone)) };
(0) => { tt2ts!(Literal::i8_unsuffixed(0)) };
(&) => { tt2ts!(Op::new('&', Spacing::Alone)) };
($i:ident) => { tt2ts!(Term::new(stringify!($i), Span::def_site())) };
(&) => { tt2ts!(Punct::new('&', Spacing::Alone)) };
($i:ident) => { tt2ts!(Ident::new(stringify!($i), Span::def_site())) };
}

macro_rules! quote_tree {
Expand Down Expand Up @@ -110,15 +111,15 @@ impl Quote for TokenStream {
if after_dollar {
after_dollar = false;
match tree {
TokenTree::Term(_) => {
TokenTree::Ident(_) => {
let tree = TokenStream::from(tree);
return Some(quote!(::__internal::unquote(&(unquote tree)),));
}
TokenTree::Op(ref tt) if tt.op() == '$' => {}
TokenTree::Punct(ref tt) if tt.as_char() == '$' => {}
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
}
} else if let TokenTree::Op(tt) = tree {
if tt.op() == '$' {
} else if let TokenTree::Punct(ref tt) = tree {
if tt.as_char() == '$' {
after_dollar = true;
return None;
}
Expand All @@ -143,9 +144,9 @@ impl Quote for TokenStream {
impl Quote for TokenTree {
fn quote(self) -> TokenStream {
match self {
TokenTree::Op(tt) => quote!(::TokenTree::Op( (quote tt) )),
TokenTree::Punct(tt) => quote!(::TokenTree::Punct( (quote tt) )),
TokenTree::Group(tt) => quote!(::TokenTree::Group( (quote tt) )),
TokenTree::Term(tt) => quote!(::TokenTree::Term( (quote tt) )),
TokenTree::Ident(tt) => quote!(::TokenTree::Ident( (quote tt) )),
TokenTree::Literal(tt) => quote!(::TokenTree::Literal( (quote tt) )),
}
}
Expand Down Expand Up @@ -175,15 +176,15 @@ impl Quote for Group {
}
}

impl Quote for Op {
impl Quote for Punct {
fn quote(self) -> TokenStream {
quote!(::Op::new((quote self.op()), (quote self.spacing())))
quote!(::Punct::new((quote self.as_char()), (quote self.spacing())))
}
}

impl Quote for Term {
impl Quote for Ident {
fn quote(self) -> TokenStream {
quote!(::Term::new((quote self.sym.as_str()), (quote self.span())))
quote!(::Ident::new((quote self.sym.as_str()), (quote self.span())))
}
}

Expand All @@ -195,14 +196,32 @@ impl Quote for Span {

macro_rules! literals {
($($i:ident),*; $($raw:ident),*) => {
pub struct SpannedSymbol {
sym: Symbol,
span: Span,
}

impl SpannedSymbol {
pub fn new(string: &str, span: Span) -> SpannedSymbol {
SpannedSymbol { sym: Symbol::intern(string), span }
}
}

impl Quote for SpannedSymbol {
fn quote(self) -> TokenStream {
quote!(::__internal::SpannedSymbol::new((quote self.sym.as_str()),
(quote self.span)))
}
}

pub enum LiteralKind {
$($i,)*
$($raw(u16),)*
}

impl LiteralKind {
pub fn with_contents_and_suffix(self, contents: Term, suffix: Option<Term>)
-> Literal {
pub fn with_contents_and_suffix(self, contents: SpannedSymbol,
suffix: Option<SpannedSymbol>) -> Literal {
let sym = contents.sym;
let suffix = suffix.map(|t| t.sym);
match self {
Expand All @@ -225,13 +244,14 @@ macro_rules! literals {
}

impl Literal {
fn kind_contents_and_suffix(self) -> (LiteralKind, Term, Option<Term>) {
fn kind_contents_and_suffix(self) -> (LiteralKind, SpannedSymbol, Option<SpannedSymbol>)
{
let (kind, contents) = match self.lit {
$(token::Lit::$i(contents) => (LiteralKind::$i, contents),)*
$(token::Lit::$raw(contents, n) => (LiteralKind::$raw(n), contents),)*
};
let suffix = self.suffix.map(|sym| Term::new(&sym.as_str(), self.span()));
(kind, Term::new(&contents.as_str(), self.span()), suffix)
let suffix = self.suffix.map(|sym| SpannedSymbol::new(&sym.as_str(), self.span()));
(kind, SpannedSymbol::new(&contents.as_str(), self.span()), suffix)
}
}

Expand Down
1 change: 1 addition & 0 deletions src/librustc/ich/impls_syntax.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,6 +314,7 @@ fn hash_token<'a, 'gcx, W: StableHasherResult>(
token::Token::Pound |
token::Token::Dollar |
token::Token::Question |
token::Token::SingleQuote |
token::Token::Whitespace |
token::Token::Comment |
token::Token::Eof => {}
Expand Down
2 changes: 1 addition & 1 deletion src/librustdoc/html/highlight.rs
Original file line number Diff line number Diff line change
Expand Up @@ -353,7 +353,7 @@ impl<'a> Classifier<'a> {
token::Lifetime(..) => Class::Lifetime,

token::Eof | token::Interpolated(..) |
token::Tilde | token::At | token::DotEq => Class::None,
token::Tilde | token::At | token::DotEq | token::SingleQuote => Class::None,
};

// Anything that didn't return above is the simple case where we the
Expand Down
1 change: 1 addition & 0 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -711,6 +711,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::Pound => "Pound",
token::Dollar => "Dollar",
token::Question => "Question",
token::SingleQuote => "SingleQuote",
token::Eof => "Eof",

token::Whitespace | token::Comment | token::Shebang(_) => {
Expand Down
6 changes: 6 additions & 0 deletions src/libsyntax/parse/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1770,6 +1770,12 @@ fn ident_continue(c: Option<char>) -> bool {
(c > '\x7f' && c.is_xid_continue())
}

// The string is a valid identifier or a lifetime identifier.
pub fn is_valid_ident(s: &str) -> bool {
let mut chars = s.chars();
ident_start(chars.next()) && chars.all(|ch| ident_continue(Some(ch)))
}

#[cfg(test)]
mod tests {
use super::*;
Expand Down
6 changes: 6 additions & 0 deletions src/libsyntax/parse/token.rs
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,8 @@ pub enum Token {
Pound,
Dollar,
Question,
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
SingleQuote,
/// An opening delimiter, eg. `{`
OpenDelim(DelimToken),
/// A closing delimiter, eg. `}`
Expand Down Expand Up @@ -513,6 +515,10 @@ impl Token {
Colon => ModSep,
_ => return None,
},
SingleQuote => match joint {
Ident(ident, false) => Lifetime(ident),
_ => return None,
},

Le | EqEq | Ne | Ge | AndAnd | OrOr | Tilde | BinOpEq(..) | At | DotDotDot | DotEq |
DotDotEq | Comma | Semi | ModSep | RArrow | LArrow | FatArrow | Pound | Dollar |
Expand Down
1 change: 1 addition & 0 deletions src/libsyntax/print/pprust.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::Pound => "#".to_string(),
token::Dollar => "$".to_string(),
token::Question => "?".to_string(),
token::SingleQuote => "'".to_string(),

/* Literals */
token::Literal(lit, suf) => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ pub fn bar(attr: TokenStream, input: TokenStream) -> TokenStream {

fn assert_inline(slice: &mut &[TokenTree]) {
match &slice[0] {
TokenTree::Op(tt) => assert_eq!(tt.op(), '#'),
TokenTree::Punct(tt) => assert_eq!(tt.as_char(), '#'),
_ => panic!("expected '#' char"),
}
match &slice[1] {
Expand All @@ -65,8 +65,8 @@ fn assert_inline(slice: &mut &[TokenTree]) {

fn assert_doc(slice: &mut &[TokenTree]) {
match &slice[0] {
TokenTree::Op(tt) => {
assert_eq!(tt.op(), '#');
TokenTree::Punct(tt) => {
assert_eq!(tt.as_char(), '#');
assert_eq!(tt.spacing(), Spacing::Alone);
}
_ => panic!("expected #"),
Expand All @@ -86,12 +86,12 @@ fn assert_doc(slice: &mut &[TokenTree]) {
}

match &tokens[0] {
TokenTree::Term(tt) => assert_eq!("doc", &*tt.to_string()),
TokenTree::Ident(tt) => assert_eq!("doc", &*tt.to_string()),
_ => panic!("expected `doc`"),
}
match &tokens[1] {
TokenTree::Op(tt) => {
assert_eq!(tt.op(), '=');
TokenTree::Punct(tt) => {
assert_eq!(tt.as_char(), '=');
assert_eq!(tt.spacing(), Spacing::Alone);
}
_ => panic!("expected equals"),
Expand All @@ -106,7 +106,7 @@ fn assert_doc(slice: &mut &[TokenTree]) {

fn assert_invoc(slice: &mut &[TokenTree]) {
match &slice[0] {
TokenTree::Op(tt) => assert_eq!(tt.op(), '#'),
TokenTree::Punct(tt) => assert_eq!(tt.as_char(), '#'),
_ => panic!("expected '#' char"),
}
match &slice[1] {
Expand All @@ -118,11 +118,11 @@ fn assert_invoc(slice: &mut &[TokenTree]) {

fn assert_foo(slice: &mut &[TokenTree]) {
match &slice[0] {
TokenTree::Term(tt) => assert_eq!(&*tt.to_string(), "fn"),
TokenTree::Ident(tt) => assert_eq!(&*tt.to_string(), "fn"),
_ => panic!("expected fn"),
}
match &slice[1] {
TokenTree::Term(tt) => assert_eq!(&*tt.to_string(), "foo"),
TokenTree::Ident(tt) => assert_eq!(&*tt.to_string(), "foo"),
_ => panic!("expected foo"),
}
match &slice[2] {
Expand All @@ -148,8 +148,8 @@ fn fold_tree(input: TokenTree) -> TokenTree {
TokenTree::Group(b) => {
TokenTree::Group(Group::new(b.delimiter(), fold_stream(b.stream())))
}
TokenTree::Op(b) => TokenTree::Op(b),
TokenTree::Term(a) => TokenTree::Term(a),
TokenTree::Punct(b) => TokenTree::Punct(b),
TokenTree::Ident(a) => TokenTree::Ident(a),
TokenTree::Literal(a) => {
if a.to_string() != "\"foo\"" {
TokenTree::Literal(a)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
// force-host
// no-prefer-dynamic

#![feature(proc_macro, proc_macro_lib)]
#![crate_type = "proc-macro"]

extern crate proc_macro;
Expand Down
2 changes: 0 additions & 2 deletions src/test/compile-fail-fulldeps/proc-macro/issue-38586.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,6 @@
// aux-build:issue_38586.rs
// ignore-stage1

#![feature(proc_macro)]

#[macro_use]
extern crate issue_38586;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// aux-build:bang_proc_macro2.rs
// ignore-stage1

#![feature(proc_macro, proc_macro_non_items)]
#![feature(use_extern_macros, proc_macro_non_items)]
#![allow(unused_macros)]

extern crate bang_proc_macro2;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

// aux-build:bang_proc_macro.rs

#![feature(proc_macro, proc_macro_non_items)]
#![feature(proc_macro_non_items)]

#[macro_use]
extern crate bang_proc_macro;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@

// aux-build:proc-macro-gates.rs

#![feature(proc_macro, stmt_expr_attributes)]
#![feature(use_extern_macros, stmt_expr_attributes)]

extern crate proc_macro_gates as foo;

Expand Down
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ pub fn cond(input: TokenStream) -> TokenStream {
panic!("Invalid macro usage in cond: {}", cond);
}
let is_else = match test {
TokenTree::Term(word) => &*word.to_string() == "else",
TokenTree::Ident(ref word) => &*word.to_string() == "else",
_ => false,
};
conds.push(if is_else || input.peek().is_none() {
Expand Down
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/auxiliary/hello_macro.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// no-prefer-dynamic

#![crate_type = "proc-macro"]
#![feature(proc_macro, proc_macro_lib, proc_macro_non_items)]
#![feature(proc_macro, proc_macro_non_items)]

extern crate proc_macro;

Expand Down
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// no-prefer-dynamic

#![crate_type = "proc-macro"]
#![feature(proc_macro, proc_macro_lib, proc_macro_non_items)]
#![feature(proc_macro, proc_macro_non_items)]

extern crate proc_macro;

Expand Down
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/macro-quote-cond.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
// aux-build:cond_plugin.rs
// ignore-stage1

#![feature(proc_macro, proc_macro_non_items)]
#![feature(use_extern_macros, proc_macro_non_items)]

extern crate cond_plugin;

Expand Down
2 changes: 1 addition & 1 deletion src/test/run-pass-fulldeps/macro-quote-test.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
// aux-build:hello_macro.rs
// ignore-stage1

#![feature(proc_macro, proc_macro_non_items)]
#![feature(use_extern_macros, proc_macro_non_items)]

extern crate hello_macro;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ fn count_compound_ops_helper(input: TokenStream) -> u32 {
let mut count = 0;
for token in input {
match &token {
TokenTree::Op(tt) if tt.spacing() == Spacing::Alone => {
TokenTree::Punct(tt) if tt.spacing() == Spacing::Alone => {
count += 1;
}
TokenTree::Group(tt) => {
Expand Down
Loading