Skip to content

Commit c70491b

Browse files
committed
Remove TokenStream::flattened and InvisibleOrigin::FlattenToken.
They are no longer needed. This does slightly worsen the error message for a single test, but that test contains code that is so badly broken that I'm not worried about it.
1 parent c886f83 commit c70491b

File tree

8 files changed

+12
-70
lines changed

8 files changed

+12
-70
lines changed

compiler/rustc_ast/src/token.rs

+1-7
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,6 @@ pub enum InvisibleOrigin {
3030
// Converted from `proc_macro::Delimiter` in
3131
// `proc_macro::Delimiter::to_internal`, i.e. returned by a proc macro.
3232
ProcMacro,
33-
34-
// Converted from `TokenKind::NtLifetime` in `TokenStream::flatten_token`.
35-
// Treated similarly to `ProcMacro`.
36-
FlattenToken,
3733
}
3834

3935
impl PartialEq for InvisibleOrigin {
@@ -130,9 +126,7 @@ impl Delimiter {
130126
match self {
131127
Delimiter::Parenthesis | Delimiter::Bracket | Delimiter::Brace => false,
132128
Delimiter::Invisible(InvisibleOrigin::MetaVar(_)) => false,
133-
Delimiter::Invisible(InvisibleOrigin::FlattenToken | InvisibleOrigin::ProcMacro) => {
134-
true
135-
}
129+
Delimiter::Invisible(InvisibleOrigin::ProcMacro) => true,
136130
}
137131
}
138132

compiler/rustc_ast/src/tokenstream.rs

+1-43
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ use rustc_span::{DUMMY_SP, Span, SpanDecoder, SpanEncoder, Symbol, sym};
2525

2626
use crate::ast::AttrStyle;
2727
use crate::ast_traits::{HasAttrs, HasTokens};
28-
use crate::token::{self, Delimiter, InvisibleOrigin, Token, TokenKind};
28+
use crate::token::{self, Delimiter, Token, TokenKind};
2929
use crate::{AttrVec, Attribute};
3030

3131
/// Part of a `TokenStream`.
@@ -471,48 +471,6 @@ impl TokenStream {
471471
TokenStream::new(tts)
472472
}
473473

474-
fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
475-
match token.kind {
476-
token::NtIdent(ident, is_raw) => {
477-
TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
478-
}
479-
token::NtLifetime(ident, is_raw) => TokenTree::Delimited(
480-
DelimSpan::from_single(token.span),
481-
DelimSpacing::new(Spacing::JointHidden, spacing),
482-
Delimiter::Invisible(InvisibleOrigin::FlattenToken),
483-
TokenStream::token_alone(token::Lifetime(ident.name, is_raw), ident.span),
484-
),
485-
_ => TokenTree::Token(*token, spacing),
486-
}
487-
}
488-
489-
fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
490-
match tree {
491-
TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
492-
TokenTree::Delimited(span, spacing, delim, tts) => {
493-
TokenTree::Delimited(*span, *spacing, *delim, tts.flattened())
494-
}
495-
}
496-
}
497-
498-
#[must_use]
499-
pub fn flattened(&self) -> TokenStream {
500-
fn can_skip(stream: &TokenStream) -> bool {
501-
stream.iter().all(|tree| match tree {
502-
TokenTree::Token(token, _) => {
503-
!matches!(token.kind, token::NtIdent(..) | token::NtLifetime(..))
504-
}
505-
TokenTree::Delimited(.., inner) => can_skip(inner),
506-
})
507-
}
508-
509-
if can_skip(self) {
510-
return self.clone();
511-
}
512-
513-
self.iter().map(|tree| TokenStream::flatten_token_tree(tree)).collect()
514-
}
515-
516474
// If `vec` is not empty, try to glue `tt` onto its last token. The return
517475
// value indicates if gluing took place.
518476
fn try_glue_to_last(vec: &mut Vec<TokenTree>, tt: &TokenTree) -> bool {

compiler/rustc_ast_lowering/src/lib.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -914,7 +914,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
914914
}
915915

916916
fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
917-
DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() }
917+
DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.clone() }
918918
}
919919

920920
/// Lower an associated item constraint.

compiler/rustc_attr_parsing/src/context.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -318,7 +318,7 @@ impl<'sess> AttributeParser<'sess> {
318318
ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs {
319319
dspan: args.dspan,
320320
delim: args.delim,
321-
tokens: args.tokens.flattened(),
321+
tokens: args.tokens.clone(),
322322
}),
323323
// This is an inert key-value attribute - it will never be visible to macros
324324
// after it gets lowered to HIR. Therefore, we can extract literals to handle

compiler/rustc_builtin_macros/src/cfg_eval.rs

+3-7
Original file line numberDiff line numberDiff line change
@@ -92,20 +92,16 @@ impl CfgEval<'_> {
9292
// the location of `#[cfg]` and `#[cfg_attr]` in the token stream. The tokenization
9393
// process is lossless, so this process is invisible to proc-macros.
9494

95-
// 'Flatten' all nonterminals (i.e. `TokenKind::Nt{Ident,Lifetime}`)
96-
// to `None`-delimited groups containing the corresponding tokens. This
97-
// is normally delayed until the proc-macro server actually needs to
98-
// provide tokens to a proc-macro. We do this earlier, so that we can
99-
// handle cases like:
95+
// Interesting cases:
10096
//
10197
// ```rust
10298
// #[cfg_eval] #[cfg] $item
10399
//```
104100
//
105101
// where `$item` is `#[cfg_attr] struct Foo {}`. We want to make
106102
// sure to evaluate *all* `#[cfg]` and `#[cfg_attr]` attributes - the simplest
107-
// way to do this is to do a single parse of a stream without any nonterminals.
108-
let orig_tokens = annotatable.to_tokens().flattened();
103+
// way to do this is to do a single parse of the token stream.
104+
let orig_tokens = annotatable.to_tokens();
109105

110106
// Re-parse the tokens, setting the `capture_cfg` flag to save extra information
111107
// to the captured `AttrTokenStream` (specifically, we capture

compiler/rustc_expand/src/config.rs

-6
Original file line numberDiff line numberDiff line change
@@ -221,12 +221,6 @@ impl<'a> StripUnconfigured<'a> {
221221
inner = self.configure_tokens(&inner);
222222
Some(AttrTokenTree::Delimited(sp, spacing, delim, inner))
223223
}
224-
AttrTokenTree::Token(
225-
Token { kind: TokenKind::NtIdent(..) | TokenKind::NtLifetime(..), .. },
226-
_,
227-
) => {
228-
panic!("Nonterminal should have been flattened: {:?}", tree);
229-
}
230224
AttrTokenTree::Token(
231225
Token { kind: TokenKind::OpenDelim(_) | TokenKind::CloseDelim(_), .. },
232226
_,

tests/ui/macros/syntax-error-recovery.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -5,14 +5,14 @@ macro_rules! values {
55
$(
66
#[$attr]
77
$token $($inner)? = $value,
8+
//~^ ERROR expected one of `!` or `::`, found `<eof>`
89
)*
910
}
1011
};
1112
}
12-
//~^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
13+
//~^^^^^^ ERROR expected one of `(`, `,`, `=`, `{`, or `}`, found `ty` metavariable
1314
//~| ERROR macro expansion ignores `ty` metavariable and any tokens following
1415

1516
values!(STRING(1) as (String) => cfg(test),);
16-
//~^ ERROR expected one of `!` or `::`, found `<eof>`
1717

1818
fn main() {}

tests/ui/macros/syntax-error-recovery.stderr

+3-3
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ LL | values!(STRING(1) as (String) => cfg(test),);
2222
= note: the usage of `values!` is likely invalid in item context
2323

2424
error: expected one of `!` or `::`, found `<eof>`
25-
--> $DIR/syntax-error-recovery.rs:15:9
25+
--> $DIR/syntax-error-recovery.rs:7:17
2626
|
27-
LL | values!(STRING(1) as (String) => cfg(test),);
28-
| ^^^^^^ expected one of `!` or `::`
27+
LL | $token $($inner)? = $value,
28+
| ^^^^^^ expected one of `!` or `::`
2929

3030
error: aborting due to 3 previous errors
3131

0 commit comments

Comments
 (0)