Skip to content

Commit 349c9ee

Browse files
authored
Rollup merge of rust-lang#57486 - nnethercote:simplify-TokenStream-more, r=petrochenkov
Simplify `TokenStream` some more These commits simplify `TokenStream`, remove `ThinTokenStream`, and avoid some clones. The end result is simpler code and a slight perf win on some benchmarks. r? @petrochenkov
2 parents c87144f + 7285724 commit 349c9ee

File tree

12 files changed

+79
-169
lines changed

12 files changed

+79
-169
lines changed

src/librustc/ich/impls_syntax.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -258,7 +258,7 @@ for tokenstream::TokenTree {
258258
tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
259259
span.hash_stable(hcx, hasher);
260260
std_hash::Hash::hash(&delim, hasher);
261-
for sub_tt in tts.stream().trees() {
261+
for sub_tt in tts.trees() {
262262
sub_tt.hash_stable(hcx, hasher);
263263
}
264264
}

src/librustc_lint/builtin.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -1474,7 +1474,7 @@ impl KeywordIdents {
14741474
_ => {},
14751475
}
14761476
TokenTree::Delimited(_, _, tts) => {
1477-
self.check_tokens(cx, tts.stream())
1477+
self.check_tokens(cx, tts)
14781478
},
14791479
}
14801480
}

src/libsyntax/ast.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ use rustc_target::spec::abi::Abi;
1515
use source_map::{dummy_spanned, respan, Spanned};
1616
use symbol::{keywords, Symbol};
1717
use syntax_pos::{Span, DUMMY_SP};
18-
use tokenstream::{ThinTokenStream, TokenStream};
18+
use tokenstream::TokenStream;
1919
use ThinVec;
2020

2121
use rustc_data_structures::fx::FxHashSet;
@@ -1216,7 +1216,7 @@ pub type Mac = Spanned<Mac_>;
12161216
pub struct Mac_ {
12171217
pub path: Path,
12181218
pub delim: MacDelimiter,
1219-
pub tts: ThinTokenStream,
1219+
pub tts: TokenStream,
12201220
}
12211221

12221222
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
@@ -1228,13 +1228,13 @@ pub enum MacDelimiter {
12281228

12291229
impl Mac_ {
12301230
pub fn stream(&self) -> TokenStream {
1231-
self.tts.stream()
1231+
self.tts.clone()
12321232
}
12331233
}
12341234

12351235
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
12361236
pub struct MacroDef {
1237-
pub tokens: ThinTokenStream,
1237+
pub tokens: TokenStream,
12381238
pub legacy: bool,
12391239
}
12401240

src/libsyntax/attr/mod.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -570,7 +570,7 @@ impl MetaItemKind {
570570
}
571571
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
572572
tokens.next();
573-
tts.stream()
573+
tts.clone()
574574
}
575575
_ => return Some(MetaItemKind::Word),
576576
};

src/libsyntax/ext/quote.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
748748
},
749749
TokenTree::Delimited(span, delim, ref tts) => {
750750
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
751-
stmts.extend(statements_mk_tts(cx, tts.stream()));
751+
stmts.extend(statements_mk_tts(cx, tts.clone()));
752752
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
753753
stmts
754754
}

src/libsyntax/fold.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
598598
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
599599
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
600600
delim,
601-
fld.fold_tts(tts.stream()).into(),
601+
fld.fold_tts(tts).into(),
602602
),
603603
}
604604
}

src/libsyntax/parse/mod.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -817,7 +817,7 @@ mod tests {
817817
)
818818
if name_macro_rules.name == "macro_rules"
819819
&& name_zip.name == "zip" => {
820-
let tts = &macro_tts.stream().trees().collect::<Vec<_>>();
820+
let tts = &macro_tts.trees().collect::<Vec<_>>();
821821
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
822822
(
823823
3,
@@ -826,7 +826,7 @@ mod tests {
826826
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
827827
)
828828
if macro_delim == token::Paren => {
829-
let tts = &first_tts.stream().trees().collect::<Vec<_>>();
829+
let tts = &first_tts.trees().collect::<Vec<_>>();
830830
match (tts.len(), tts.get(0), tts.get(1)) {
831831
(
832832
2,
@@ -836,7 +836,7 @@ mod tests {
836836
if first_delim == token::Paren && ident.name == "a" => {},
837837
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
838838
}
839-
let tts = &second_tts.stream().trees().collect::<Vec<_>>();
839+
let tts = &second_tts.trees().collect::<Vec<_>>();
840840
match (tts.len(), tts.get(0), tts.get(1)) {
841841
(
842842
2,

src/libsyntax/parse/parser.rs

+11-12
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,7 @@ use print::pprust;
4646
use ptr::P;
4747
use parse::PResult;
4848
use ThinVec;
49-
use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
49+
use tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
5050
use symbol::{Symbol, keywords};
5151

5252
use std::borrow::Cow;
@@ -280,17 +280,17 @@ struct TokenCursorFrame {
280280
/// on the parser.
281281
#[derive(Clone)]
282282
enum LastToken {
283-
Collecting(Vec<TokenStream>),
284-
Was(Option<TokenStream>),
283+
Collecting(Vec<TreeAndJoint>),
284+
Was(Option<TreeAndJoint>),
285285
}
286286

287287
impl TokenCursorFrame {
288-
fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
288+
fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
289289
TokenCursorFrame {
290290
delim: delim,
291291
span: sp,
292292
open_delim: delim == token::NoDelim,
293-
tree_cursor: tts.stream().into_trees(),
293+
tree_cursor: tts.clone().into_trees(),
294294
close_delim: delim == token::NoDelim,
295295
last_token: LastToken::Was(None),
296296
}
@@ -2330,7 +2330,7 @@ impl<'a> Parser<'a> {
23302330
})
23312331
}
23322332

2333-
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
2333+
fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
23342334
let delim = match self.token {
23352335
token::OpenDelim(delim) => delim,
23362336
_ => {
@@ -2350,7 +2350,7 @@ impl<'a> Parser<'a> {
23502350
token::Brace => MacDelimiter::Brace,
23512351
token::NoDelim => self.bug("unexpected no delimiter"),
23522352
};
2353-
Ok((delim, tts.stream().into()))
2353+
Ok((delim, tts.into()))
23542354
}
23552355

23562356
/// At the bottom (top?) of the precedence hierarchy,
@@ -4641,7 +4641,7 @@ impl<'a> Parser<'a> {
46414641
let ident = self.parse_ident()?;
46424642
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
46434643
match self.parse_token_tree() {
4644-
TokenTree::Delimited(_, _, tts) => tts.stream(),
4644+
TokenTree::Delimited(_, _, tts) => tts,
46454645
_ => unreachable!(),
46464646
}
46474647
} else if self.check(&token::OpenDelim(token::Paren)) {
@@ -7757,7 +7757,7 @@ impl<'a> Parser<'a> {
77577757
&mut self.token_cursor.stack[prev].last_token
77587758
};
77597759

7760-
// Pull our the toekns that we've collected from the call to `f` above
7760+
// Pull out the tokens that we've collected from the call to `f` above.
77617761
let mut collected_tokens = match *last_token {
77627762
LastToken::Collecting(ref mut v) => mem::replace(v, Vec::new()),
77637763
LastToken::Was(_) => panic!("our vector went away?"),
@@ -7776,10 +7776,9 @@ impl<'a> Parser<'a> {
77767776
// call. In that case we need to record all the tokens we collected in
77777777
// our parent list as well. To do that we push a clone of our stream
77787778
// onto the previous list.
7779-
let stream = collected_tokens.into_iter().collect::<TokenStream>();
77807779
match prev_collecting {
77817780
Some(mut list) => {
7782-
list.push(stream.clone());
7781+
list.extend(collected_tokens.iter().cloned());
77837782
list.extend(extra_token);
77847783
*last_token = LastToken::Collecting(list);
77857784
}
@@ -7788,7 +7787,7 @@ impl<'a> Parser<'a> {
77887787
}
77897788
}
77907789

7791-
Ok((ret?, stream))
7790+
Ok((ret?, TokenStream::new(collected_tokens)))
77927791
}
77937792

77947793
pub fn parse_item(&mut self) -> PResult<'a, Option<P<Item>>> {

src/libsyntax/print/pprust.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -807,7 +807,7 @@ pub trait PrintState<'a> {
807807
TokenTree::Delimited(_, delim, tts) => {
808808
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
809809
self.writer().space()?;
810-
self.print_tts(tts.stream())?;
810+
self.print_tts(tts)?;
811811
self.writer().space()?;
812812
self.writer().word(token_to_string(&token::CloseDelim(delim)))
813813
},

0 commit comments

Comments
 (0)