Skip to content

Commit debcbf0

Browse files
committed
Refactor the parser to consume token trees.
1 parent de46b24 commit debcbf0

File tree

11 files changed

+59
-213
lines changed

11 files changed

+59
-213
lines changed

src/librustc/session/config.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ use lint;
2525
use middle::cstore;
2626

2727
use syntax::ast::{self, IntTy, UintTy};
28+
use syntax::parse::token;
2829
use syntax::parse;
2930
use syntax::symbol::Symbol;
3031
use syntax::feature_gate::UnstableFeatures;
@@ -1259,7 +1260,7 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
12591260

12601261
let meta_item = panictry!(parser.parse_meta_item());
12611262

1262-
if !parser.reader.is_eof() {
1263+
if parser.token != token::Eof {
12631264
early_error(ErrorOutputType::default(), &format!("invalid --cfg argument: {}", s))
12641265
} else if meta_item.is_meta_item_list() {
12651266
let msg =

src/librustc_metadata/cstore_impl.rs

+4-14
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ use rustc_back::PanicStrategy;
2929

3030
use syntax::ast;
3131
use syntax::attr;
32-
use syntax::parse::new_parser_from_source_str;
32+
use syntax::parse::filemap_to_tts;
3333
use syntax::symbol::Symbol;
3434
use syntax_pos::{mk_sp, Span};
3535
use rustc::hir::svh::Svh;
@@ -395,19 +395,9 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore {
395395
let (name, def) = data.get_macro(id.index);
396396
let source_name = format!("<{} macros>", name);
397397

398-
// NB: Don't use parse_tts_from_source_str because it parses with quote_depth > 0.
399-
let mut parser = new_parser_from_source_str(&sess.parse_sess, source_name, def.body);
400-
401-
let lo = parser.span.lo;
402-
let body = match parser.parse_all_token_trees() {
403-
Ok(body) => body,
404-
Err(mut err) => {
405-
err.emit();
406-
sess.abort_if_errors();
407-
unreachable!();
408-
}
409-
};
410-
let local_span = mk_sp(lo, parser.prev_span.hi);
398+
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
399+
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
400+
let body = filemap_to_tts(&sess.parse_sess, filemap);
411401

412402
// Mark the attrs as used
413403
let attrs = data.get_item_attrs(id.index);

src/libsyntax/ext/base.rs

+1-3
Original file line numberDiff line numberDiff line change
@@ -615,9 +615,7 @@ impl<'a> ExtCtxt<'a> {
615615

616616
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
617617
-> parser::Parser<'a> {
618-
let mut parser = parse::tts_to_parser(self.parse_sess, tts.to_vec());
619-
parser.allow_interpolated_tts = false; // FIXME(jseyfried) `quote!` can't handle these yet
620-
parser
618+
parse::tts_to_parser(self.parse_sess, tts.to_vec())
621619
}
622620
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
623621
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }

src/libsyntax/ext/tt/macro_parser.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,6 @@ use ast::Ident;
8282
use syntax_pos::{self, BytePos, mk_sp, Span};
8383
use codemap::Spanned;
8484
use errors::FatalError;
85-
use parse::lexer::*; //resolve bug?
8685
use parse::{Directory, ParseSess};
8786
use parse::parser::{PathStyle, Parser};
8887
use parse::token::{DocComment, MatchNt, SubstNt};
@@ -407,9 +406,9 @@ fn inner_parse_loop(cur_eis: &mut SmallVector<Box<MatcherPos>>,
407406
Success(())
408407
}
409408

410-
pub fn parse(sess: &ParseSess, rdr: TtReader, ms: &[TokenTree], directory: Option<Directory>)
409+
pub fn parse(sess: &ParseSess, tts: Vec<TokenTree>, ms: &[TokenTree], directory: Option<Directory>)
411410
-> NamedParseResult {
412-
let mut parser = Parser::new(sess, Box::new(rdr), directory, true);
411+
let mut parser = Parser::new(sess, tts, directory, true);
413412
let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), parser.span.lo));
414413
let mut next_eis = Vec::new(); // or proceed normally
415414

@@ -527,7 +526,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
527526
"ident" => match p.token {
528527
token::Ident(sn) => {
529528
p.bump();
530-
token::NtIdent(Spanned::<Ident>{node: sn, span: p.span})
529+
token::NtIdent(Spanned::<Ident>{node: sn, span: p.prev_span})
531530
}
532531
_ => {
533532
let token_str = pprust::token_to_string(&p.token);

src/libsyntax/ext/tt/macro_rules.rs

+13-7
Original file line numberDiff line numberDiff line change
@@ -16,8 +16,8 @@ use ext::expand::{Expansion, ExpansionKind};
1616
use ext::tt::macro_parser::{Success, Error, Failure};
1717
use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
1818
use ext::tt::macro_parser::{parse, parse_failure_msg};
19+
use ext::tt::transcribe::new_tt_reader;
1920
use parse::{Directory, ParseSess};
20-
use parse::lexer::new_tt_reader;
2121
use parse::parser::Parser;
2222
use parse::token::{self, NtTT, Token};
2323
use parse::token::Token::*;
@@ -113,13 +113,21 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
113113
_ => cx.span_bug(sp, "malformed macro rhs"),
114114
};
115115
// rhs has holes ( `$id` and `$(...)` that need filled)
116-
let trncbr =
116+
let mut trncbr =
117117
new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
118+
let mut tts = Vec::new();
119+
loop {
120+
let tok = trncbr.real_token();
121+
if tok.tok == token::Eof {
122+
break
123+
}
124+
tts.push(TokenTree::Token(tok.sp, tok.tok));
125+
}
118126
let directory = Directory {
119127
path: cx.current_expansion.module.directory.clone(),
120128
ownership: cx.current_expansion.directory_ownership,
121129
};
122-
let mut p = Parser::new(cx.parse_sess(), Box::new(trncbr), Some(directory), false);
130+
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), false);
123131
p.root_module_name = cx.current_expansion.module.mod_path.last()
124132
.map(|id| (*id.name.as_str()).to_owned());
125133

@@ -187,10 +195,8 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
187195
})),
188196
];
189197

190-
// Parse the macro_rules! invocation (`none` is for no interpolations):
191-
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone());
192-
193-
let argument_map = match parse(sess, arg_reader, &argument_gram, None) {
198+
// Parse the macro_rules! invocation
199+
let argument_map = match parse(sess, def.body.clone(), &argument_gram, None) {
194200
Success(m) => m,
195201
Failure(sp, tok) => {
196202
let s = parse_failure_msg(tok);

src/libsyntax/ext/tt/transcribe.rs

+7-4
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
use self::LockstepIterSize::*;
1111

1212
use ast::Ident;
13-
use errors::{Handler, DiagnosticBuilder};
13+
use errors::Handler;
1414
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
1515
use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
1616
use parse::lexer::TokenAndSpan;
@@ -44,8 +44,12 @@ pub struct TtReader<'a> {
4444
/* cached: */
4545
pub cur_tok: Token,
4646
pub cur_span: Span,
47-
/// Transform doc comments. Only useful in macro invocations
48-
pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
47+
}
48+
49+
impl<'a> TtReader<'a> {
50+
pub fn real_token(&mut self) -> TokenAndSpan {
51+
tt_next_token(self)
52+
}
4953
}
5054

5155
/// This can do Macro-By-Example transcription. On the other hand, if
@@ -76,7 +80,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
7680
/* dummy values, never read: */
7781
cur_tok: token::Eof,
7882
cur_span: DUMMY_SP,
79-
fatal_errs: Vec::new(),
8083
};
8184
tt_next_token(&mut r); /* get cur_tok and cur_span set up */
8285
r

src/libsyntax/parse/lexer/mod.rs

-74
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@ use ast::{self, Ident};
1212
use syntax_pos::{self, BytePos, CharPos, Pos, Span};
1313
use codemap::CodeMap;
1414
use errors::{FatalError, DiagnosticBuilder};
15-
use ext::tt::transcribe::tt_next_token;
1615
use parse::{token, ParseSess};
1716
use str::char_at;
1817
use symbol::{Symbol, keywords};
@@ -23,53 +22,10 @@ use std::char;
2322
use std::mem::replace;
2423
use std::rc::Rc;
2524

26-
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
27-
2825
pub mod comments;
2926
mod tokentrees;
3027
mod unicode_chars;
3128

32-
pub trait Reader {
33-
fn is_eof(&self) -> bool;
34-
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()>;
35-
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
36-
let res = self.try_next_token();
37-
self.unwrap_or_abort(res)
38-
}
39-
/// Report a fatal error with the current span.
40-
fn fatal(&self, &str) -> FatalError;
41-
/// Report a non-fatal error with the current span.
42-
fn err(&self, &str);
43-
fn emit_fatal_errors(&mut self);
44-
fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
45-
match res {
46-
Ok(tok) => tok,
47-
Err(_) => {
48-
self.emit_fatal_errors();
49-
panic!(FatalError);
50-
}
51-
}
52-
}
53-
fn peek(&self) -> TokenAndSpan;
54-
/// Get a token the parser cares about.
55-
fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
56-
let mut t = self.try_next_token()?;
57-
loop {
58-
match t.tok {
59-
token::Whitespace | token::Comment | token::Shebang(_) => {
60-
t = self.try_next_token()?;
61-
}
62-
_ => break,
63-
}
64-
}
65-
Ok(t)
66-
}
67-
fn real_token(&mut self) -> TokenAndSpan {
68-
let res = self.try_real_token();
69-
self.unwrap_or_abort(res)
70-
}
71-
}
72-
7329
#[derive(Clone, PartialEq, Eq, Debug)]
7430
pub struct TokenAndSpan {
7531
pub tok: token::Token,
@@ -182,36 +138,6 @@ impl<'a> StringReader<'a> {
182138
}
183139
}
184140

185-
impl<'a> Reader for TtReader<'a> {
186-
fn is_eof(&self) -> bool {
187-
self.peek().tok == token::Eof
188-
}
189-
fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
190-
assert!(self.fatal_errs.is_empty());
191-
let r = tt_next_token(self);
192-
debug!("TtReader: r={:?}", r);
193-
Ok(r)
194-
}
195-
fn fatal(&self, m: &str) -> FatalError {
196-
self.sp_diag.span_fatal(self.cur_span, m)
197-
}
198-
fn err(&self, m: &str) {
199-
self.sp_diag.span_err(self.cur_span, m);
200-
}
201-
fn emit_fatal_errors(&mut self) {
202-
for err in &mut self.fatal_errs {
203-
err.emit();
204-
}
205-
self.fatal_errs.clear();
206-
}
207-
fn peek(&self) -> TokenAndSpan {
208-
TokenAndSpan {
209-
tok: self.cur_tok.clone(),
210-
sp: self.cur_span,
211-
}
212-
}
213-
}
214-
215141
impl<'a> StringReader<'a> {
216142
/// For comments.rs, which hackily pokes into next_pos and ch
217143
pub fn new_raw<'b>(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Self {

src/libsyntax/parse/mod.rs

+2-3
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,7 @@ pub mod obsolete;
4545

4646
/// Info about a parsing session.
4747
pub struct ParseSess {
48-
pub span_diagnostic: Handler, // better be the same as the one in the reader!
48+
pub span_diagnostic: Handler,
4949
pub unstable_features: UnstableFeatures,
5050
pub config: CrateConfig,
5151
/// Used to determine and report recursive mod inclusions
@@ -227,8 +227,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream
227227

228228
/// Given tts and the ParseSess, produce a parser
229229
pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenTree>) -> Parser<'a> {
230-
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
231-
let mut p = Parser::new(sess, Box::new(trdr), None, false);
230+
let mut p = Parser::new(sess, tts, None, false);
232231
p.check_unknown_macro_variable();
233232
p
234233
}

0 commit comments

Comments
 (0)