diff --git a/RELEASES.txt b/RELEASES.txt index e265ae77aa8a4..14b74e4ac7914 100644 --- a/RELEASES.txt +++ b/RELEASES.txt @@ -7,7 +7,7 @@ Version 0.5 (December 2012) * Removed `<-` move operator * Completed the transition from the `#fmt` extension syntax to `fmt!` * Removed old fixed length vector syntax - `[T]/N` - * New token-based quasi-quoter, `quote!` + * New token-based quasi-quoters, `quote_tokens!`, `quote_expr!`, etc. * Macros may now expand to items and statements * `a.b()` is always parsed as a method call, never as a field projection * `Eq` and `IterBytes` implementations can be automatically generated diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index 630ba3b8749ba..312c6cc16f7a5 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -123,8 +123,8 @@ fn syntax_expander_table() -> HashMap<~str, syntax_extension> { ~"quote_tokens", builtin_normal_tt(ext::quote::expand_quote_tokens)); syntax_expanders.insert(~"quote_expr", builtin_normal_tt(ext::quote::expand_quote_expr)); - syntax_expanders.insert(~"quote_type", - builtin_normal_tt(ext::quote::expand_quote_type)); + syntax_expanders.insert(~"quote_ty", + builtin_normal_tt(ext::quote::expand_quote_ty)); syntax_expanders.insert(~"quote_item", builtin_normal_tt(ext::quote::expand_quote_item)); syntax_expanders.insert(~"quote_pat", @@ -163,6 +163,7 @@ trait ext_ctxt { fn codemap() -> @CodeMap; fn parse_sess() -> parse::parse_sess; fn cfg() -> ast::crate_cfg; + fn call_site() -> span; fn print_backtrace(); fn backtrace() -> Option<@ExpnInfo>; fn mod_push(mod_name: ast::ident); @@ -195,6 +196,12 @@ fn mk_ctxt(parse_sess: parse::parse_sess, fn codemap() -> @CodeMap { self.parse_sess.cm } fn parse_sess() -> parse::parse_sess { self.parse_sess } fn cfg() -> ast::crate_cfg { self.cfg } + fn call_site() -> span { + match self.backtrace { + Some(@ExpandedFrom({call_site: cs, _})) => cs, + None => self.bug(~"missing top span") + } + } fn print_backtrace() { } fn backtrace() -> Option<@ExpnInfo> { self.backtrace } fn mod_push(i: ast::ident) { self.mod_path.push(i); } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index 9d74509a02c94..04a59d1fe4162 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -145,6 +145,24 @@ fn mk_glob_use(cx: ext_ctxt, sp: span, vis: ast::private, span: sp} } +fn mk_local(cx: ext_ctxt, sp: span, mutbl: bool, + ident: ast::ident, ex: @ast::expr) -> @ast::stmt { + + let pat : @ast::pat = @{id: cx.next_id(), + node: ast::pat_ident(ast::bind_by_value, + mk_raw_path(sp, ~[ident]), + None), + span: sp}; + let ty : @ast::Ty = @{ id: cx.next_id(), node: ast::ty_infer, span: sp }; + let local : @ast::local = @{node: {is_mutbl: mutbl, + ty: ty, + pat: pat, + init: Some(ex), + id: cx.next_id()}, + span: sp}; + let decl = {node: ast::decl_local(~[local]), span: sp}; + @{ node: ast::stmt_decl(@decl, cx.next_id()), span: sp } +} fn mk_block(cx: ext_ctxt, sp: span, view_items: ~[@ast::view_item], stmts: ~[@ast::stmt], diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 9bbe9568f2614..1890ec36bddd1 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -50,10 +50,11 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("%s can only be used as a decorator", *extname)); } Some(normal({expander: exp, span: exp_sp})) => { - let expanded = exp(cx, (*mac).span, args, body); cx.bt_push(ExpandedFrom({call_site: s, callie: {name: *extname, span: exp_sp}})); + let expanded = exp(cx, (*mac).span, args, body); + //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; cx.bt_pop(); @@ -90,6 +91,9 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("macro undefined: '%s'", *extname)) } Some(normal_tt({expander: exp, span: exp_sp})) => { + cx.bt_push(ExpandedFrom({call_site: s, + callie: {name: *extname, span: exp_sp}})); + let expanded = match exp(cx, (*mac).span, (*tts)) { mr_expr(e) => e, mr_any(expr_maker,_,_) => expr_maker(), @@ -98,8 +102,6 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, *extname)) }; - cx.bt_push(ExpandedFrom({call_site: s, - callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; cx.bt_pop(); @@ -107,13 +109,14 @@ fn expand_expr(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, (fully_expanded, s) } Some(normal({expander: exp, span: exp_sp})) => { + cx.bt_push(ExpandedFrom({call_site: s, + callie: {name: *extname, span: exp_sp}})); + //convert the new-style invoc for the old-style macro let arg = base::tt_args_to_original_flavor(cx, pth.span, (*tts)); let expanded = exp(cx, (*mac).span, arg, None); - cx.bt_push(ExpandedFrom({call_site: s, - callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_expr(expanded).node; cx.bt_pop(); @@ -234,7 +237,7 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, ); let extname = cx.parse_sess().interner.get(pth.idents[0]); - let (expanded, ex_span) = match exts.find(*extname) { + let expanded = match exts.find(*extname) { None => cx.span_fatal(pth.span, fmt!("macro undefined: '%s!'", *extname)), @@ -245,7 +248,10 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, given '%s'", *extname, *cx.parse_sess().interner.get(it.ident))); } - (((*expand).expander)(cx, it.span, tts), (*expand).span) + cx.bt_push(ExpandedFrom({call_site: it.span, + callie: {name: *extname, + span: (*expand).span}})); + ((*expand).expander)(cx, it.span, tts) } Some(item_tt(ref expand)) => { if it.ident == parse::token::special_idents::invalid { @@ -253,14 +259,15 @@ fn expand_item_mac(exts: HashMap<~str, syntax_extension>, fmt!("macro %s! expects an ident argument", *extname)); } - (((*expand).expander)(cx, it.span, it.ident, tts), (*expand).span) + cx.bt_push(ExpandedFrom({call_site: it.span, + callie: {name: *extname, + span: (*expand).span}})); + ((*expand).expander)(cx, it.span, it.ident, tts) } _ => cx.span_fatal( it.span, fmt!("%s! is not legal in item position", *extname)) }; - cx.bt_push(ExpandedFrom({call_site: it.span, - callie: {name: *extname, span: ex_span}})); let maybe_it = match expanded { mr_item(it) => fld.fold_item(it), mr_expr(_) => cx.span_fatal(pth.span, @@ -296,6 +303,8 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, cx.span_fatal(pth.span, fmt!("macro undefined: '%s'", *extname)), Some(normal_tt({expander: exp, span: exp_sp})) => { + cx.bt_push(ExpandedFrom( + {call_site: sp, callie: {name: *extname, span: exp_sp}})); let expanded = match exp(cx, mac.span, tts) { mr_expr(e) => @{node: stmt_expr(e, cx.next_id()), span: e.span}, @@ -305,8 +314,6 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, fmt!("non-stmt macro in stmt pos: %s", *extname)) }; - cx.bt_push(ExpandedFrom( - {call_site: sp, callie: {name: *extname, span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_stmt(expanded).node; cx.bt_pop(); @@ -315,15 +322,15 @@ fn expand_stmt(exts: HashMap<~str, syntax_extension>, cx: ext_ctxt, } Some(normal({expander: exp, span: exp_sp})) => { + cx.bt_push(ExpandedFrom({call_site: sp, + callie: {name: *extname, + span: exp_sp}})); //convert the new-style invoc for the old-style macro let arg = base::tt_args_to_original_flavor(cx, pth.span, tts); let exp_expr = exp(cx, mac.span, arg, None); let expanded = @{node: stmt_expr(exp_expr, cx.next_id()), span: exp_expr.span}; - cx.bt_push(ExpandedFrom({call_site: sp, - callie: {name: *extname, - span: exp_sp}})); //keep going, outside-in let fully_expanded = fld.fold_stmt(expanded).node; cx.bt_pop(); diff --git a/src/libsyntax/ext/pipes/ast_builder.rs b/src/libsyntax/ext/pipes/ast_builder.rs index 7293438ca0942..7e5a267c00843 100644 --- a/src/libsyntax/ext/pipes/ast_builder.rs +++ b/src/libsyntax/ext/pipes/ast_builder.rs @@ -17,6 +17,7 @@ use ast::{ident, node_id}; use ast_util::{ident_to_path, respan, dummy_sp}; use codemap::span; use ext::base::mk_ctxt; +use quote::rt::*; // Transitional reexports so qquote can find the paths it is looking for mod syntax { @@ -121,6 +122,7 @@ impl ext_ctxt: ext_ctxt_ast_builder { span: dummy_sp()} } + #[cfg(stage0)] fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt { // If the quasiquoter could interpolate idents, this is all // we'd need. @@ -142,6 +144,13 @@ impl ext_ctxt: ext_ctxt_ast_builder { span: dummy_sp()}]), span: dummy_sp()}, self.next_id()), span: dummy_sp()} + } + + #[cfg(stage1)] + #[cfg(stage2)] + fn stmt_let(ident: ident, e: @ast::expr) -> @ast::stmt { + let ext_cx = self; + quote_stmt!( let $ident = $e; ) } fn field_imm(name: ident, e: @ast::expr) -> ast::field { diff --git a/src/libsyntax/ext/pipes/pipec.rs b/src/libsyntax/ext/pipes/pipec.rs index 1c4dd197105b6..822480f0ad65e 100644 --- a/src/libsyntax/ext/pipes/pipec.rs +++ b/src/libsyntax/ext/pipes/pipec.rs @@ -17,12 +17,10 @@ use dvec::DVec; use ast::ident; use ast_util::dummy_sp; use util::interner; -use print::pprust; -use pprust::{item_to_str, ty_to_str}; -use ext::base::{mk_ctxt, ext_ctxt}; +use ext::base::ext_ctxt; use parse::*; use proto::*; - +use quote::rt::*; use ast_builder::{append_types, path}; // Transitional reexports so qquote can find the paths it is looking for @@ -303,6 +301,8 @@ impl state: to_type_decls { } impl protocol: gen_init { + + #[cfg(stage0)] fn gen_init(cx: ext_ctxt) -> @ast::item { let ext_cx = cx; @@ -341,6 +341,47 @@ impl protocol: gen_init { body.to_source(cx))) } + #[cfg(stage1)] + #[cfg(stage2)] + fn gen_init(cx: ext_ctxt) -> @ast::item { + let ext_cx = cx; + + debug!("gen_init"); + let start_state = self.states[0]; + + let body = if !self.is_bounded() { + match start_state.dir { + send => quote_expr!( pipes::entangle() ), + recv => { + quote_expr!({ + let (s, c) = pipes::entangle(); + (move c, move s) + }) + } + } + } + else { + let body = self.gen_init_bounded(ext_cx); + match start_state.dir { + send => body, + recv => { + quote_expr!({ + let (s, c) = $body; + (move c, move s) + }) + } + } + }; + + cx.parse_item(fmt!("pub fn init%s() -> (client::%s, server::%s)\ + { use pipes::HasBuffer; %s }", + start_state.ty_params.to_source(cx), + start_state.to_ty(cx).to_source(cx), + start_state.to_ty(cx).to_source(cx), + body.to_source(cx))) + } + + #[cfg(stage0)] fn gen_buffer_init(ext_cx: ext_ctxt) -> @ast::expr { ext_cx.rec(self.states.map_to_vec(|s| { let fty = s.to_ty(ext_cx); @@ -349,10 +390,22 @@ impl protocol: gen_init { })) } + #[cfg(stage1)] + #[cfg(stage2)] + fn gen_buffer_init(ext_cx: ext_ctxt) -> @ast::expr { + ext_cx.rec(self.states.map_to_vec(|s| { + let fty = s.to_ty(ext_cx); + ext_cx.field_imm(ext_cx.ident_of(s.name), + quote_expr!( + pipes::mk_packet::<$fty>() + )) + })) + } + + #[cfg(stage0)] fn gen_init_bounded(ext_cx: ext_ctxt) -> @ast::expr { debug!("gen_init_bounded"); let buffer_fields = self.gen_buffer_init(ext_cx); - let buffer = #ast { ~{header: pipes::BufferHeader(), data: $(buffer_fields)} @@ -376,6 +429,34 @@ impl protocol: gen_init { }} } + #[cfg(stage1)] + #[cfg(stage2)] + fn gen_init_bounded(ext_cx: ext_ctxt) -> @ast::expr { + debug!("gen_init_bounded"); + let buffer_fields = self.gen_buffer_init(ext_cx); + let buffer = quote_expr!( + ~{header: pipes::BufferHeader(), + data: $buffer_fields} + ); + + let entangle_body = ext_cx.block_expr( + ext_cx.block( + self.states.map_to_vec( + |s| ext_cx.parse_stmt( + fmt!("data.%s.set_buffer_(buffer)", + s.name))), + ext_cx.parse_expr( + fmt!("ptr::addr_of(&(data.%s))", + self.states[0].name)))); + + quote_expr!({ + let buffer = $buffer; + do pipes::entangle_buffer(move buffer) |buffer, data| { + $entangle_body + } + }) + } + fn buffer_ty_path(cx: ext_ctxt) -> @ast::Ty { let mut params: ~[ast::ty_param] = ~[]; for (copy self.states).each |s| { @@ -391,6 +472,7 @@ impl protocol: gen_init { .add_tys(cx.ty_vars(params))) } + #[cfg(stage0)] fn gen_buffer_type(cx: ext_ctxt) -> @ast::item { let ext_cx = cx; let mut params: ~[ast::ty_param] = ~[]; @@ -405,6 +487,32 @@ impl protocol: gen_init { let fty = #ast[ty] { pipes::Packet<$(ty)> }; + + cx.ty_field_imm(cx.ident_of(s.name), fty) + }; + + cx.item_ty_poly( + cx.ident_of(~"__Buffer"), + dummy_sp(), + cx.ty_rec(fields), + params) + } + + #[cfg(stage1)] + #[cfg(stage2)] + fn gen_buffer_type(cx: ext_ctxt) -> @ast::item { + let ext_cx = cx; + let mut params: ~[ast::ty_param] = ~[]; + let fields = do (copy self.states).map_to_vec |s| { + for s.ty_params.each |tp| { + match params.find(|tpp| tp.ident == tpp.ident) { + None => params.push(*tp), + _ => () + } + } + let ty = s.to_ty(cx); + let fty = quote_ty!( pipes::Packet<$ty> ); + cx.ty_field_imm(cx.ident_of(s.name), fty) }; @@ -420,7 +528,6 @@ impl protocol: gen_init { let mut client_states = ~[]; let mut server_states = ~[]; - // :( for (copy self.states).each |s| { items += s.to_type_decls(cx); @@ -441,95 +548,4 @@ impl protocol: gen_init { cx.item_mod(cx.ident_of(self.name), self.span, items) } -} - -trait to_source { - // Takes a thing and generates a string containing rust code for it. - fn to_source(cx: ext_ctxt) -> ~str; -} - -impl @ast::item: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - item_to_str(self, cx.parse_sess().interner) - } -} - -impl ~[@ast::item]: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - str::connect(self.map(|i| i.to_source(cx)), ~"\n\n") - } -} - -impl @ast::Ty: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - ty_to_str(self, cx.parse_sess().interner) - } -} - -impl ~[@ast::Ty]: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - str::connect(self.map(|i| i.to_source(cx)), ~", ") - } -} - -impl ~[ast::ty_param]: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - pprust::typarams_to_str(self, cx.parse_sess().interner) - } -} - -impl @ast::expr: to_source { - fn to_source(cx: ext_ctxt) -> ~str { - pprust::expr_to_str(self, cx.parse_sess().interner) } -} - -trait ext_ctxt_parse_utils { - fn parse_item(s: ~str) -> @ast::item; - fn parse_expr(s: ~str) -> @ast::expr; - fn parse_stmt(s: ~str) -> @ast::stmt; - fn parse_tts(s: ~str) -> ~[ast::token_tree]; -} - -impl ext_ctxt: ext_ctxt_parse_utils { - fn parse_item(s: ~str) -> @ast::item { - let res = parse::parse_item_from_source_str( - ~"***protocol expansion***", - @(copy s), - self.cfg(), - ~[], - self.parse_sess()); - match res { - Some(ast) => ast, - None => { - error!("Parse error with ```\n%s\n```", s); - fail - } - } - } - - fn parse_stmt(s: ~str) -> @ast::stmt { - parse::parse_stmt_from_source_str( - ~"***protocol expansion***", - @(copy s), - self.cfg(), - ~[], - self.parse_sess()) - } - - fn parse_expr(s: ~str) -> @ast::expr { - parse::parse_expr_from_source_str( - ~"***protocol expansion***", - @(copy s), - self.cfg(), - self.parse_sess()) - } - - fn parse_tts(s: ~str) -> ~[ast::token_tree] { - parse::parse_tts_from_source_str( - ~"***protocol expansion***", - @(copy s), - self.cfg(), - self.parse_sess()) - } -} diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index b2e651c7e3330..60918121e9547 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -19,10 +19,10 @@ use token::*; * * Quasiquoting works via token trees. * -* This is registered as a expression syntax extension called quote! that lifts -* its argument token-tree to an AST representing the construction of the same -* token tree, with ast::tt_nonterminal nodes interpreted as antiquotes -* (splices). +* This is registered as a set of expression syntax extension called quote! +* that lifts its argument token-tree to an AST representing the +* construction of the same token tree, with ast::tt_nonterminal nodes +* interpreted as antiquotes (splices). * */ @@ -32,6 +32,176 @@ pub mod rt { pub use parse::new_parser_from_tts; pub use codemap::BytePos; pub use codemap::span; + + use print::pprust; + use pprust::{item_to_str, ty_to_str}; + + trait ToTokens { + pub fn to_tokens(_cx: ext_ctxt) -> ~[token_tree]; + } + + impl ~[token_tree]: ToTokens { + pub fn to_tokens(_cx: ext_ctxt) -> ~[token_tree] { + copy self + } + } + + /* Should be (when bugs in default methods are fixed): + + trait ToSource : ToTokens { + // Takes a thing and generates a string containing rust code for it. + pub fn to_source(cx: ext_ctxt) -> ~str; + + // If you can make source, you can definitely make tokens. + pub fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + */ + + trait ToSource { + // Takes a thing and generates a string containing rust code for it. + pub fn to_source(cx: ext_ctxt) -> ~str; + } + + impl ast::ident: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + copy *cx.parse_sess().interner.get(self) + } + } + + impl @ast::item: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + item_to_str(self, cx.parse_sess().interner) + } + } + + impl ~[@ast::item]: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + str::connect(self.map(|i| i.to_source(cx)), ~"\n\n") + } + } + + impl @ast::Ty: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + ty_to_str(self, cx.parse_sess().interner) + } + } + + impl ~[@ast::Ty]: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + str::connect(self.map(|i| i.to_source(cx)), ~", ") + } + } + + impl ~[ast::ty_param]: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + pprust::typarams_to_str(self, cx.parse_sess().interner) + } + } + + impl @ast::expr: ToSource { + fn to_source(cx: ext_ctxt) -> ~str { + pprust::expr_to_str(self, cx.parse_sess().interner) + } + } + + // Alas ... we write these out instead. All redundant. + + impl ast::ident: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl @ast::item: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl ~[@ast::item]: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl @ast::Ty: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl ~[@ast::Ty]: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl ~[ast::ty_param]: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + impl @ast::expr: ToTokens { + fn to_tokens(cx: ext_ctxt) -> ~[token_tree] { + cx.parse_tts(self.to_source(cx)) + } + } + + trait ExtParseUtils { + fn parse_item(s: ~str) -> @ast::item; + fn parse_expr(s: ~str) -> @ast::expr; + fn parse_stmt(s: ~str) -> @ast::stmt; + fn parse_tts(s: ~str) -> ~[ast::token_tree]; + } + + impl ext_ctxt: ExtParseUtils { + + fn parse_item(s: ~str) -> @ast::item { + let res = parse::parse_item_from_source_str( + ~"", + @(copy s), + self.cfg(), + ~[], + self.parse_sess()); + match res { + Some(ast) => ast, + None => { + error!("Parse error with ```\n%s\n```", s); + fail + } + } + } + + fn parse_stmt(s: ~str) -> @ast::stmt { + parse::parse_stmt_from_source_str( + ~"", + @(copy s), + self.cfg(), + ~[], + self.parse_sess()) + } + + fn parse_expr(s: ~str) -> @ast::expr { + parse::parse_expr_from_source_str( + ~"", + @(copy s), + self.cfg(), + self.parse_sess()) + } + + fn parse_tts(s: ~str) -> ~[ast::token_tree] { + parse::parse_tts_from_source_str( + ~"", + @(copy s), + self.cfg(), + self.parse_sess()) + } + } + } pub fn expand_quote_tokens(cx: ext_ctxt, @@ -62,11 +232,11 @@ pub fn expand_quote_pat(cx: ext_ctxt, ~[e_refutable], tts)) } -pub fn expand_quote_type(cx: ext_ctxt, - sp: span, - tts: ~[ast::token_tree]) -> base::mac_result { +pub fn expand_quote_ty(cx: ext_ctxt, + sp: span, + tts: ~[ast::token_tree]) -> base::mac_result { let e_param_colons = build::mk_lit(cx, sp, ast::lit_bool(false)); - base::mr_expr(expand_parse_call(cx, sp, ~"parse_type", + base::mr_expr(expand_parse_call(cx, sp, ~"parse_ty", ~[e_param_colons], tts)) } @@ -86,76 +256,13 @@ fn id_ext(cx: ext_ctxt, str: ~str) -> ast::ident { cx.parse_sess().interner.intern(@str) } -fn mk_option_span(cx: ext_ctxt, - qsp: span, - sp: Option) -> @ast::expr { - match sp { - None => build::mk_path(cx, qsp, ids_ext(cx, ~[~"None"])), - Some(sp) => { - build::mk_call(cx, qsp, - ids_ext(cx, ~[~"Some"]), - ~[build::mk_managed(cx, qsp, - mk_span(cx, qsp, sp))]) - } - } -} - -fn mk_span(cx: ext_ctxt, qsp: span, sp: span) -> @ast::expr { - - let e_expn_info = match sp.expn_info { - None => build::mk_path(cx, qsp, ids_ext(cx, ~[~"None"])), - Some(@codemap::ExpandedFrom(ref cr)) => { - let e_callee = - build::mk_rec_e( - cx, qsp, - ~[{ident: id_ext(cx, ~"name"), - ex: build::mk_uniq_str(cx, qsp, - (*cr).callie.name)}, - {ident: id_ext(cx, ~"span"), - ex: mk_option_span(cx, qsp, (*cr).callie.span)}]); - - let e_expn_info_ = - build::mk_call( - cx, qsp, - ids_ext(cx, ~[~"expanded_from"]), - ~[build::mk_rec_e( - cx, qsp, - ~[{ident: id_ext(cx, ~"call_site"), - ex: mk_span(cx, qsp, (*cr).call_site)}, - {ident: id_ext(cx, ~"callie"), - ex: e_callee}])]); - - build::mk_call(cx, qsp, - ids_ext(cx, ~[~"Some"]), - ~[build::mk_managed(cx, qsp, e_expn_info_)]) - } - }; - - let span_path = ids_ext(cx, ~[~"span"]); - - build::mk_struct_e(cx, qsp, - span_path, - ~[{ident: id_ext(cx, ~"lo"), - ex: mk_bytepos(cx, qsp, sp.lo) }, - - {ident: id_ext(cx, ~"hi"), - ex: mk_bytepos(cx, qsp, sp.hi) }, - - {ident: id_ext(cx, ~"expn_info"), - ex: e_expn_info}]) -} - // Lift an ident to the expr that evaluates to that ident. -// -// NB: this identifies the interner used when re-parsing the token tree -// with the interner used during initial parse. This is _wrong_ and we -// should be emitting a &str here and the token type should be ok with -// &static/str or &session/str. Longer-term issue. fn mk_ident(cx: ext_ctxt, sp: span, ident: ast::ident) -> @ast::expr { - build::mk_struct_e(cx, sp, - ids_ext(cx, ~[~"ident"]), - ~[{ident: id_ext(cx, ~"repr"), - ex: build::mk_uint(cx, sp, ident.repr) }]) + let e_meth = build::mk_access(cx, sp, + ids_ext(cx, ~[~"ext_cx"]), + id_ext(cx, ~"ident_of")); + let e_str = build::mk_uniq_str(cx, sp, cx.str_of(ident)); + build::mk_call_(cx, sp, e_meth, ~[e_str]) } fn mk_bytepos(cx: ext_ctxt, sp: span, bpos: BytePos) -> @ast::expr { @@ -325,59 +432,134 @@ fn mk_token(cx: ext_ctxt, sp: span, tok: token::Token) -> @ast::expr { } -fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree) -> @ast::expr { +fn mk_tt(cx: ext_ctxt, sp: span, tt: &ast::token_tree) + -> ~[@ast::stmt] { + match *tt { + ast::tt_tok(sp, ref tok) => { + let e_sp = build::mk_path(cx, sp, + ids_ext(cx, ~[~"sp"])); let e_tok = build::mk_call(cx, sp, ids_ext(cx, ~[~"tt_tok"]), - ~[mk_span(cx, sp, sp), - mk_token(cx, sp, (*tok))]); - build::mk_uniq_vec_e(cx, sp, ~[e_tok]) - } + ~[e_sp, mk_token(cx, sp, *tok)]); + let e_push = + build::mk_call_(cx, sp, + build::mk_access(cx, sp, + ids_ext(cx, ~[~"tt"]), + id_ext(cx, ~"push")), + ~[e_tok]); + ~[build::mk_stmt(cx, sp, e_push)] - ast::tt_delim(ref tts) => { - let e_delim = - build::mk_call(cx, sp, - ids_ext(cx, ~[~"tt_delim"]), - ~[mk_tts(cx, sp, (*tts))]); - build::mk_uniq_vec_e(cx, sp, ~[e_delim]) } + ast::tt_delim(ref tts) => mk_tts(cx, sp, *tts), ast::tt_seq(*) => fail ~"tt_seq in quote!", - ast::tt_nonterminal(sp, ident) => - build::mk_copy(cx, sp, build::mk_path(cx, sp, ~[ident])) + ast::tt_nonterminal(sp, ident) => { + + // tt.push_all_move($ident.to_tokens(ext_cx)) + + let e_to_toks = + build::mk_call_(cx, sp, + build::mk_access + (cx, sp, + ~[ident], + id_ext(cx, ~"to_tokens")), + ~[build::mk_path(cx, sp, + ids_ext(cx, ~[~"ext_cx"]))]); + + let e_push = + build::mk_call_(cx, sp, + build::mk_access + (cx, sp, + ids_ext(cx, ~[~"tt"]), + id_ext(cx, ~"push_all_move")), + ~[e_to_toks]); + + ~[build::mk_stmt(cx, sp, e_push)] + } } } -fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) -> @ast::expr { - let e_tts = tts.map(|tt| mk_tt(cx, sp, tt)); - build::mk_call(cx, sp, - ids_ext(cx, ~[~"vec", ~"concat"]), - ~[build::mk_slice_vec_e(cx, sp, e_tts)]) +fn mk_tts(cx: ext_ctxt, sp: span, tts: &[ast::token_tree]) + -> ~[@ast::stmt] { + let mut ss = ~[]; + for tts.each |tt| { + ss.push_all_move(mk_tt(cx, sp, tt)); + } + ss } fn expand_tts(cx: ext_ctxt, sp: span, tts: ~[ast::token_tree]) -> @ast::expr { + // NB: It appears that the main parser loses its mind if we consider // $foo as a tt_nonterminal during the main parse, so we have to re-parse // under quote_depth > 0. This is silly and should go away; the _guess_ is // it has to do with transition away from supporting old-style macros, so // try removing it when enough of them are gone. + let p = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts); p.quote_depth += 1u; let tts = p.parse_all_token_trees(); p.abort_if_errors(); // We want to emit a block expression that does a sequence of 'use's to - // import the runtime module, followed by a tt expression. + // import the runtime module, followed by a tt-building expression. + let uses = ~[ build::mk_glob_use(cx, sp, ids_ext(cx, ~[~"syntax", ~"ext", ~"quote", ~"rt"])) ]; - build::mk_block(cx, sp, uses, ~[], Some(mk_tts(cx, sp, tts))) + + // We also bind a single value, sp, to ext_cx.call_site() + // + // This causes every span in a token-tree quote to be attributed to the + // call site of the extension using the quote. We can't really do much + // better since the source of the quote may well be in a library that + // was not even parsed by this compilation run, that the user has no + // source code for (eg. in libsyntax, which they're just _using_). + // + // The old quasiquoter had an elaborate mechanism for denoting input + // file locations from which quotes originated; unfortunately this + // relied on feeding the source string of the quote back into the + // compiler (which we don't really want to do) and, in any case, only + // pushed the problem a very small step further back: an error + // resulting from a parse of the resulting quote is still attributed to + // the site the string literal occured, which was in a source file + // _other_ than the one the user has control over. For example, an + // error in a quote from the protocol compiler, invoked in user code + // using proto! for example, will be attributed to the pipec.rs file in + // libsyntax, which the user might not even have source to (unless they + // happen to have a compiler on hand). Over all, the phase distinction + // just makes quotes "hard to attribute". Possibly this could be fixed + // by recreating some of the original qq machinery in the tt regime + // (pushing fake FileMaps onto the parser to account for original sites + // of quotes, for example) but at this point it seems not likely to be + // worth the hassle. + + let e_sp = build::mk_call_(cx, sp, + build::mk_access(cx, sp, + ids_ext(cx, ~[~"ext_cx"]), + id_ext(cx, ~"call_site")), + ~[]); + + let stmt_let_sp = build::mk_local(cx, sp, false, + id_ext(cx, ~"sp"), + e_sp); + + let stmt_let_tt = build::mk_local(cx, sp, true, + id_ext(cx, ~"tt"), + build::mk_uniq_vec_e(cx, sp, ~[])); + + build::mk_block(cx, sp, uses, + ~[stmt_let_sp, + stmt_let_tt] + mk_tts(cx, sp, tts), + Some(build::mk_path(cx, sp, + ids_ext(cx, ~[~"tt"])))) } fn expand_parse_call(cx: ext_ctxt, diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 14ef7c56b76db..c0a01cc23faf4 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -496,6 +496,7 @@ fn print_item(s: ps, &&item: @ast::item) { ast::item_foreign_mod(nmod) => { head(s, visibility_qualified(item.vis, ~"extern")); print_string(s, *s.intr.get(nmod.abi)); + nbsp(s); match nmod.sort { ast::named => { word_nbsp(s, ~"mod"); diff --git a/src/test/run-pass-fulldeps/quote-tokens.rs b/src/test/run-pass-fulldeps/quote-tokens.rs index 2e71d213428d1..d36fc4a37ba41 100644 --- a/src/test/run-pass-fulldeps/quote-tokens.rs +++ b/src/test/run-pass-fulldeps/quote-tokens.rs @@ -8,14 +8,13 @@ fn syntax_extension(ext_cx: @ext_ctxt) { let e_toks : ~[syntax::ast::token_tree] = quote_tokens!(1 + 2); let p_toks : ~[syntax::ast::token_tree] = quote_tokens!((x, 1 .. 4, *)); - let _a: @syntax::ast::expr = quote_expr!(1 + 2); + let a: @syntax::ast::expr = quote_expr!(1 + 2); let _b: Option<@syntax::ast::item> = quote_item!( const foo : int = $e_toks; ); let _c: @syntax::ast::pat = quote_pat!( (x, 1 .. 4, *) ); - let _d: @syntax::ast::stmt = quote_stmt!( let x = $e_toks; ); + let _d: @syntax::ast::stmt = quote_stmt!( let x = $a; ); let _e: @syntax::ast::expr = quote_expr!( match foo { $p_toks => 10 } ); } fn main() { - let _x: ~[syntax::ast::token_tree] = quote_tokens!(a::Foo::foo()); }