Skip to content

Commit 5fa22fe

Browse files
committedFeb 14, 2021
Auto merge of #81286 - Aaron1011:collect-tokens-attrs, r=petrochenkov
Require passing an `AttrWrapper` to `collect_tokens_trailing_token` This is a pure refactoring split out from #80689. It represents the most invasive part of that PR, requiring changes in every caller of `parse_outer_attributes` In order to eagerly expand `#[cfg]` attributes while preserving the original `TokenStream`, we need to know the range of tokens that corresponds to every attribute target. This is accomplished by making `parse_outer_attributes` return an opaque `AttrWrapper` struct. An `AttrWrapper` must be converted to a plain `AttrVec` by passing it to `collect_tokens_trailing_token`. This makes it difficult to accidentally construct an AST node with attributes without calling `collect_tokens_trailing_token`, since AST nodes store an `AttrVec`, not an `AttrWrapper`. As a result, we now call `collect_tokens_trailing_token` for attribute targets which only support inert attributes, such as generic arguments and struct fields. Currently, the constructed `LazyTokenStream` is simply discarded. Future PRs will record the token range corresponding to the attribute target, allowing those tokens to be removed from an enclosing `collect_tokens_trailing_token` call if necessary.
2 parents b86674e + 3321d70 commit 5fa22fe

File tree

10 files changed

+777
-541
lines changed

10 files changed

+777
-541
lines changed
 

Diff for: ‎compiler/rustc_ast/src/ast.rs

+15
Original file line numberDiff line numberDiff line change
@@ -2975,3 +2975,18 @@ macro_rules! derive_has_tokens {
29752975
derive_has_tokens! {
29762976
Item, Expr, Ty, AttrItem, Visibility, Path, Block, Pat
29772977
}
2978+
2979+
macro_rules! derive_has_attrs_no_tokens {
2980+
($($ty:path),*) => { $(
2981+
impl HasTokens for $ty {
2982+
fn finalize_tokens(&mut self, _tokens: LazyTokenStream) {}
2983+
}
2984+
)* }
2985+
}
2986+
2987+
// These ast nodes only support inert attributes, so they don't
2988+
// store tokens (since nothing can observe them)
2989+
derive_has_attrs_no_tokens! {
2990+
StructField, Arm,
2991+
Field, FieldPat, Variant, Param, GenericParam
2992+
}

Diff for: ‎compiler/rustc_parse/src/parser/attr.rs

+7-5
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use super::{Parser, PathStyle};
1+
use super::{AttrWrapper, Parser, PathStyle};
22
use rustc_ast as ast;
33
use rustc_ast::attr;
44
use rustc_ast::token::{self, Nonterminal};
@@ -26,7 +26,7 @@ pub(super) const DEFAULT_INNER_ATTR_FORBIDDEN: InnerAttrPolicy<'_> = InnerAttrPo
2626

2727
impl<'a> Parser<'a> {
2828
/// Parses attributes that appear before an item.
29-
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, Vec<ast::Attribute>> {
29+
pub(super) fn parse_outer_attributes(&mut self) -> PResult<'a, AttrWrapper> {
3030
let mut attrs: Vec<ast::Attribute> = Vec::new();
3131
let mut just_parsed_doc_comment = false;
3232
loop {
@@ -74,7 +74,7 @@ impl<'a> Parser<'a> {
7474
break;
7575
}
7676
}
77-
Ok(attrs)
77+
Ok(AttrWrapper::new(attrs))
7878
}
7979

8080
/// Matches `attribute = # ! [ meta_item ]`.
@@ -89,7 +89,8 @@ impl<'a> Parser<'a> {
8989
inner_parse_policy, self.token
9090
);
9191
let lo = self.token.span;
92-
self.collect_tokens(|this| {
92+
// Attributse can't have attributes of their own
93+
self.collect_tokens_no_attrs(|this| {
9394
if this.eat(&token::Pound) {
9495
let style = if this.eat(&token::Not) {
9596
ast::AttrStyle::Inner
@@ -163,7 +164,8 @@ impl<'a> Parser<'a> {
163164
let args = this.parse_attr_args()?;
164165
Ok(ast::AttrItem { path, args, tokens: None })
165166
};
166-
if capture_tokens { self.collect_tokens(do_parse) } else { do_parse(self) }?
167+
// Attr items don't have attributes
168+
if capture_tokens { self.collect_tokens_no_attrs(do_parse) } else { do_parse(self) }?
167169
})
168170
}
169171

Diff for: ‎compiler/rustc_parse/src/parser/attr_wrapper.rs

+185
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,185 @@
1+
use super::attr;
2+
use super::{ForceCollect, Parser, TokenCursor, TrailingToken};
3+
use rustc_ast::token::{self, Token, TokenKind};
4+
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
5+
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing};
6+
use rustc_ast::HasTokens;
7+
use rustc_ast::{self as ast};
8+
use rustc_errors::PResult;
9+
use rustc_span::{Span, DUMMY_SP};
10+
11+
/// A wrapper type to ensure that the parser handles outer attributes correctly.
12+
/// When we parse outer attributes, we need to ensure that we capture tokens
13+
/// for the attribute target. This allows us to perform cfg-expansion on
14+
/// a token stream before we invoke a derive proc-macro.
15+
///
16+
/// This wrapper prevents direct access to the underlying `Vec<ast::Attribute>`.
17+
/// Parsing code can only get access to the underlying attributes
18+
/// by passing an `AttrWrapper` to `collect_tokens_trailing_tokens`.
19+
/// This makes it difficult to accidentally construct an AST node
20+
/// (which stores a `Vec<ast::Attribute>`) without first collecting tokens.
21+
///
22+
/// This struct has its own module, to ensure that the parser code
23+
/// cannot directly access the `attrs` field
24+
#[derive(Debug, Clone)]
25+
pub struct AttrWrapper {
26+
attrs: Vec<ast::Attribute>,
27+
}
28+
29+
impl AttrWrapper {
30+
pub fn empty() -> AttrWrapper {
31+
AttrWrapper { attrs: vec![] }
32+
}
33+
pub fn new(attrs: Vec<ast::Attribute>) -> AttrWrapper {
34+
AttrWrapper { attrs }
35+
}
36+
// FIXME: Delay span bug here?
37+
pub(crate) fn take_for_recovery(self) -> Vec<ast::Attribute> {
38+
self.attrs
39+
}
40+
pub fn is_empty(&self) -> bool {
41+
self.attrs.is_empty()
42+
}
43+
}
44+
45+
impl<'a> Parser<'a> {
46+
/// Records all tokens consumed by the provided callback,
47+
/// including the current token. These tokens are collected
48+
/// into a `LazyTokenStream`, and returned along with the result
49+
/// of the callback.
50+
///
51+
/// Note: If your callback consumes an opening delimiter
52+
/// (including the case where you call `collect_tokens`
53+
/// when the current token is an opening delimeter),
54+
/// you must also consume the corresponding closing delimiter.
55+
///
56+
/// That is, you can consume
57+
/// `something ([{ }])` or `([{}])`, but not `([{}]`
58+
///
59+
/// This restriction shouldn't be an issue in practice,
60+
/// since this function is used to record the tokens for
61+
/// a parsed AST item, which always has matching delimiters.
62+
pub fn collect_tokens_trailing_token<R: HasTokens>(
63+
&mut self,
64+
attrs: AttrWrapper,
65+
force_collect: ForceCollect,
66+
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
67+
) -> PResult<'a, R> {
68+
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
69+
return Ok(f(self, attrs.attrs)?.0);
70+
}
71+
let start_token = (self.token.clone(), self.token_spacing);
72+
let cursor_snapshot = self.token_cursor.clone();
73+
74+
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
75+
76+
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
77+
// and `num_calls`, we can reconstruct the `TokenStream` seen
78+
// by the callback. This allows us to avoid producing a `TokenStream`
79+
// if it is never needed - for example, a captured `macro_rules!`
80+
// argument that is never passed to a proc macro.
81+
// In practice token stream creation happens rarely compared to
82+
// calls to `collect_tokens` (see some statistics in #78736),
83+
// so we are doing as little up-front work as possible.
84+
//
85+
// This also makes `Parser` very cheap to clone, since
86+
// there is no intermediate collection buffer to clone.
87+
#[derive(Clone)]
88+
struct LazyTokenStreamImpl {
89+
start_token: (Token, Spacing),
90+
cursor_snapshot: TokenCursor,
91+
num_calls: usize,
92+
desugar_doc_comments: bool,
93+
append_unglued_token: Option<TreeAndSpacing>,
94+
}
95+
impl CreateTokenStream for LazyTokenStreamImpl {
96+
fn create_token_stream(&self) -> TokenStream {
97+
// The token produced by the final call to `next` or `next_desugared`
98+
// was not actually consumed by the callback. The combination
99+
// of chaining the initial token and using `take` produces the desired
100+
// result - we produce an empty `TokenStream` if no calls were made,
101+
// and omit the final token otherwise.
102+
let mut cursor_snapshot = self.cursor_snapshot.clone();
103+
let tokens = std::iter::once(self.start_token.clone())
104+
.chain((0..self.num_calls).map(|_| {
105+
if self.desugar_doc_comments {
106+
cursor_snapshot.next_desugared()
107+
} else {
108+
cursor_snapshot.next()
109+
}
110+
}))
111+
.take(self.num_calls);
112+
113+
make_token_stream(tokens, self.append_unglued_token.clone())
114+
}
115+
}
116+
117+
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
118+
match trailing_token {
119+
TrailingToken::None => {}
120+
TrailingToken::Semi => {
121+
assert_eq!(self.token.kind, token::Semi);
122+
num_calls += 1;
123+
}
124+
TrailingToken::MaybeComma => {
125+
if self.token.kind == token::Comma {
126+
num_calls += 1;
127+
}
128+
}
129+
}
130+
131+
let lazy_impl = LazyTokenStreamImpl {
132+
start_token,
133+
num_calls,
134+
cursor_snapshot,
135+
desugar_doc_comments: self.desugar_doc_comments,
136+
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
137+
};
138+
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
139+
Ok(ret)
140+
}
141+
}
142+
143+
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
144+
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
145+
/// of open and close delims.
146+
fn make_token_stream(
147+
tokens: impl Iterator<Item = (Token, Spacing)>,
148+
append_unglued_token: Option<TreeAndSpacing>,
149+
) -> TokenStream {
150+
#[derive(Debug)]
151+
struct FrameData {
152+
open: Span,
153+
inner: Vec<(TokenTree, Spacing)>,
154+
}
155+
let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
156+
for (token, spacing) in tokens {
157+
match token {
158+
Token { kind: TokenKind::OpenDelim(_), span } => {
159+
stack.push(FrameData { open: span, inner: vec![] });
160+
}
161+
Token { kind: TokenKind::CloseDelim(delim), span } => {
162+
let frame_data = stack.pop().expect("Token stack was empty!");
163+
let dspan = DelimSpan::from_pair(frame_data.open, span);
164+
let stream = TokenStream::new(frame_data.inner);
165+
let delimited = TokenTree::Delimited(dspan, delim, stream);
166+
stack
167+
.last_mut()
168+
.unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
169+
.inner
170+
.push((delimited, Spacing::Alone));
171+
}
172+
token => {
173+
stack
174+
.last_mut()
175+
.expect("Bottom token frame is missing!")
176+
.inner
177+
.push((TokenTree::Token(token), spacing));
178+
}
179+
}
180+
}
181+
let mut final_buf = stack.pop().expect("Missing final buf!");
182+
final_buf.inner.extend(append_unglued_token);
183+
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
184+
TokenStream::new(final_buf.inner)
185+
}

Diff for: ‎compiler/rustc_parse/src/parser/expr.rs

+224-161
Large diffs are not rendered by default.

Diff for: ‎compiler/rustc_parse/src/parser/generics.rs

+80-59
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use super::Parser;
1+
use super::{ForceCollect, Parser, TrailingToken};
22

33
use rustc_ast::token;
44
use rustc_ast::{
@@ -84,68 +84,89 @@ impl<'a> Parser<'a> {
8484
/// a trailing comma and erroneous trailing attributes.
8585
pub(super) fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
8686
let mut params = Vec::new();
87-
loop {
87+
let mut done = false;
88+
while !done {
8889
let attrs = self.parse_outer_attributes()?;
89-
if self.check_lifetime() {
90-
let lifetime = self.expect_lifetime();
91-
// Parse lifetime parameter.
92-
let bounds =
93-
if self.eat(&token::Colon) { self.parse_lt_param_bounds() } else { Vec::new() };
94-
params.push(ast::GenericParam {
95-
ident: lifetime.ident,
96-
id: lifetime.id,
97-
attrs: attrs.into(),
98-
bounds,
99-
kind: ast::GenericParamKind::Lifetime,
100-
is_placeholder: false,
101-
});
102-
} else if self.check_keyword(kw::Const) {
103-
// Parse const parameter.
104-
params.push(self.parse_const_param(attrs)?);
105-
} else if self.check_ident() {
106-
// Parse type parameter.
107-
params.push(self.parse_ty_param(attrs)?);
108-
} else if self.token.can_begin_type() {
109-
// Trying to write an associated type bound? (#26271)
110-
let snapshot = self.clone();
111-
match self.parse_ty_where_predicate() {
112-
Ok(where_predicate) => {
113-
self.struct_span_err(
114-
where_predicate.span(),
115-
"bounds on associated types do not belong here",
116-
)
117-
.span_label(where_predicate.span(), "belongs in `where` clause")
118-
.emit();
119-
}
120-
Err(mut err) => {
121-
err.cancel();
122-
*self = snapshot;
123-
break;
124-
}
125-
}
126-
} else {
127-
// Check for trailing attributes and stop parsing.
128-
if !attrs.is_empty() {
129-
if !params.is_empty() {
130-
self.struct_span_err(
131-
attrs[0].span,
132-
"trailing attribute after generic parameter",
133-
)
134-
.span_label(attrs[0].span, "attributes must go before parameters")
135-
.emit();
90+
let param =
91+
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
92+
let param = if this.check_lifetime() {
93+
let lifetime = this.expect_lifetime();
94+
// Parse lifetime parameter.
95+
let bounds = if this.eat(&token::Colon) {
96+
this.parse_lt_param_bounds()
97+
} else {
98+
Vec::new()
99+
};
100+
Some(ast::GenericParam {
101+
ident: lifetime.ident,
102+
id: lifetime.id,
103+
attrs: attrs.into(),
104+
bounds,
105+
kind: ast::GenericParamKind::Lifetime,
106+
is_placeholder: false,
107+
})
108+
} else if this.check_keyword(kw::Const) {
109+
// Parse const parameter.
110+
Some(this.parse_const_param(attrs)?)
111+
} else if this.check_ident() {
112+
// Parse type parameter.
113+
Some(this.parse_ty_param(attrs)?)
114+
} else if this.token.can_begin_type() {
115+
// Trying to write an associated type bound? (#26271)
116+
let snapshot = this.clone();
117+
match this.parse_ty_where_predicate() {
118+
Ok(where_predicate) => {
119+
this.struct_span_err(
120+
where_predicate.span(),
121+
"bounds on associated types do not belong here",
122+
)
123+
.span_label(where_predicate.span(), "belongs in `where` clause")
124+
.emit();
125+
// FIXME - try to continue parsing other generics?
126+
return Ok((None, TrailingToken::None));
127+
}
128+
Err(mut err) => {
129+
err.cancel();
130+
// FIXME - maybe we should overwrite 'self' outside of `collect_tokens`?
131+
*this = snapshot;
132+
return Ok((None, TrailingToken::None));
133+
}
134+
}
136135
} else {
137-
self.struct_span_err(attrs[0].span, "attribute without generic parameters")
138-
.span_label(
139-
attrs[0].span,
140-
"attributes are only permitted when preceding parameters",
141-
)
142-
.emit();
136+
// Check for trailing attributes and stop parsing.
137+
if !attrs.is_empty() {
138+
if !params.is_empty() {
139+
this.struct_span_err(
140+
attrs[0].span,
141+
"trailing attribute after generic parameter",
142+
)
143+
.span_label(attrs[0].span, "attributes must go before parameters")
144+
.emit();
145+
} else {
146+
this.struct_span_err(
147+
attrs[0].span,
148+
"attribute without generic parameters",
149+
)
150+
.span_label(
151+
attrs[0].span,
152+
"attributes are only permitted when preceding parameters",
153+
)
154+
.emit();
155+
}
156+
}
157+
return Ok((None, TrailingToken::None));
158+
};
159+
160+
if !this.eat(&token::Comma) {
161+
done = true;
143162
}
144-
}
145-
break;
146-
}
163+
// We just ate the comma, so no need to use `TrailingToken`
164+
Ok((param, TrailingToken::None))
165+
})?;
147166

148-
if !self.eat(&token::Comma) {
167+
if let Some(param) = param {
168+
params.push(param);
169+
} else {
149170
break;
150171
}
151172
}

Diff for: ‎compiler/rustc_parse/src/parser/item.rs

+154-122
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,6 @@
11
use super::diagnostics::{dummy_arg, ConsumeClosingDelim, Error};
22
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
3-
use super::{FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
4-
5-
use crate::{maybe_collect_tokens, maybe_whole};
3+
use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken};
64

75
use rustc_ast::ast::*;
86
use rustc_ast::ptr::P;
@@ -108,25 +106,40 @@ impl<'a> Parser<'a> {
108106

109107
pub(super) fn parse_item_common(
110108
&mut self,
111-
mut attrs: Vec<Attribute>,
109+
attrs: AttrWrapper,
112110
mac_allowed: bool,
113111
attrs_allowed: bool,
114112
req_name: ReqName,
115113
force_collect: ForceCollect,
116114
) -> PResult<'a, Option<Item>> {
117-
maybe_whole!(self, NtItem, |item| {
118-
let mut item = item;
119-
mem::swap(&mut item.attrs, &mut attrs);
120-
item.attrs.extend(attrs);
121-
Some(item.into_inner())
122-
});
115+
// Don't use `maybe_whole` so that we have precise control
116+
// over when we bump the parser
117+
if let token::Interpolated(nt) = &self.token.kind {
118+
if let token::NtItem(item) = &**nt {
119+
let item = item.clone();
120+
121+
return self.collect_tokens_trailing_token(
122+
attrs,
123+
force_collect,
124+
|this, mut attrs| {
125+
let mut item = item;
126+
mem::swap(&mut item.attrs, &mut attrs);
127+
item.attrs.extend(attrs);
128+
// Bump the parser so the we capture the token::Interpolated
129+
this.bump();
130+
Ok((Some(item.into_inner()), TrailingToken::None))
131+
},
132+
);
133+
}
134+
};
123135

124136
let mut unclosed_delims = vec![];
125-
let item = maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Self| {
126-
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
127-
unclosed_delims.append(&mut this.unclosed_delims);
128-
Ok((item?, TrailingToken::None))
129-
})?;
137+
let item =
138+
self.collect_tokens_trailing_token(attrs, force_collect, |this: &mut Self, attrs| {
139+
let item = this.parse_item_common_(attrs, mac_allowed, attrs_allowed, req_name);
140+
unclosed_delims.append(&mut this.unclosed_delims);
141+
Ok((item?, TrailingToken::None))
142+
})?;
130143

131144
self.unclosed_delims.append(&mut unclosed_delims);
132145
Ok(item)
@@ -1109,39 +1122,45 @@ impl<'a> Parser<'a> {
11091122

11101123
fn parse_enum_variant(&mut self) -> PResult<'a, Option<Variant>> {
11111124
let variant_attrs = self.parse_outer_attributes()?;
1112-
let vlo = self.token.span;
1113-
1114-
let vis = self.parse_visibility(FollowedByType::No)?;
1115-
if !self.recover_nested_adt_item(kw::Enum)? {
1116-
return Ok(None);
1117-
}
1118-
let ident = self.parse_ident()?;
1119-
1120-
let struct_def = if self.check(&token::OpenDelim(token::Brace)) {
1121-
// Parse a struct variant.
1122-
let (fields, recovered) = self.parse_record_struct_body()?;
1123-
VariantData::Struct(fields, recovered)
1124-
} else if self.check(&token::OpenDelim(token::Paren)) {
1125-
VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID)
1126-
} else {
1127-
VariantData::Unit(DUMMY_NODE_ID)
1128-
};
1129-
1130-
let disr_expr =
1131-
if self.eat(&token::Eq) { Some(self.parse_anon_const_expr()?) } else { None };
1125+
self.collect_tokens_trailing_token(
1126+
variant_attrs,
1127+
ForceCollect::No,
1128+
|this, variant_attrs| {
1129+
let vlo = this.token.span;
1130+
1131+
let vis = this.parse_visibility(FollowedByType::No)?;
1132+
if !this.recover_nested_adt_item(kw::Enum)? {
1133+
return Ok((None, TrailingToken::None));
1134+
}
1135+
let ident = this.parse_ident()?;
1136+
1137+
let struct_def = if this.check(&token::OpenDelim(token::Brace)) {
1138+
// Parse a struct variant.
1139+
let (fields, recovered) = this.parse_record_struct_body()?;
1140+
VariantData::Struct(fields, recovered)
1141+
} else if this.check(&token::OpenDelim(token::Paren)) {
1142+
VariantData::Tuple(this.parse_tuple_struct_body()?, DUMMY_NODE_ID)
1143+
} else {
1144+
VariantData::Unit(DUMMY_NODE_ID)
1145+
};
11321146

1133-
let vr = ast::Variant {
1134-
ident,
1135-
vis,
1136-
id: DUMMY_NODE_ID,
1137-
attrs: variant_attrs,
1138-
data: struct_def,
1139-
disr_expr,
1140-
span: vlo.to(self.prev_token.span),
1141-
is_placeholder: false,
1142-
};
1147+
let disr_expr =
1148+
if this.eat(&token::Eq) { Some(this.parse_anon_const_expr()?) } else { None };
1149+
1150+
let vr = ast::Variant {
1151+
ident,
1152+
vis,
1153+
id: DUMMY_NODE_ID,
1154+
attrs: variant_attrs,
1155+
data: struct_def,
1156+
disr_expr,
1157+
span: vlo.to(this.prev_token.span),
1158+
is_placeholder: false,
1159+
};
11431160

1144-
Ok(Some(vr))
1161+
Ok((Some(vr), TrailingToken::MaybeComma))
1162+
},
1163+
)
11451164
}
11461165

11471166
/// Parses `struct Foo { ... }`.
@@ -1262,17 +1281,23 @@ impl<'a> Parser<'a> {
12621281
// Unit like structs are handled in parse_item_struct function
12631282
self.parse_paren_comma_seq(|p| {
12641283
let attrs = p.parse_outer_attributes()?;
1265-
let lo = p.token.span;
1266-
let vis = p.parse_visibility(FollowedByType::Yes)?;
1267-
let ty = p.parse_ty()?;
1268-
Ok(StructField {
1269-
span: lo.to(ty.span),
1270-
vis,
1271-
ident: None,
1272-
id: DUMMY_NODE_ID,
1273-
ty,
1274-
attrs,
1275-
is_placeholder: false,
1284+
p.collect_tokens_trailing_token(attrs, ForceCollect::No, |p, attrs| {
1285+
let lo = p.token.span;
1286+
let vis = p.parse_visibility(FollowedByType::Yes)?;
1287+
let ty = p.parse_ty()?;
1288+
1289+
Ok((
1290+
StructField {
1291+
span: lo.to(ty.span),
1292+
vis,
1293+
ident: None,
1294+
id: DUMMY_NODE_ID,
1295+
ty,
1296+
attrs,
1297+
is_placeholder: false,
1298+
},
1299+
TrailingToken::MaybeComma,
1300+
))
12761301
})
12771302
})
12781303
.map(|(r, _)| r)
@@ -1281,9 +1306,11 @@ impl<'a> Parser<'a> {
12811306
/// Parses an element of a struct declaration.
12821307
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
12831308
let attrs = self.parse_outer_attributes()?;
1284-
let lo = self.token.span;
1285-
let vis = self.parse_visibility(FollowedByType::No)?;
1286-
self.parse_single_struct_field(lo, vis, attrs)
1309+
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
1310+
let lo = this.token.span;
1311+
let vis = this.parse_visibility(FollowedByType::No)?;
1312+
Ok((this.parse_single_struct_field(lo, vis, attrs)?, TrailingToken::None))
1313+
})
12871314
}
12881315

12891316
/// Parses a structure field declaration.
@@ -1736,74 +1763,79 @@ impl<'a> Parser<'a> {
17361763
fn parse_param_general(&mut self, req_name: ReqName, first_param: bool) -> PResult<'a, Param> {
17371764
let lo = self.token.span;
17381765
let attrs = self.parse_outer_attributes()?;
1766+
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
1767+
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
1768+
if let Some(mut param) = this.parse_self_param()? {
1769+
param.attrs = attrs.into();
1770+
let res = if first_param { Ok(param) } else { this.recover_bad_self_param(param) };
1771+
return Ok((res?, TrailingToken::None));
1772+
}
17391773

1740-
// Possibly parse `self`. Recover if we parsed it and it wasn't allowed here.
1741-
if let Some(mut param) = self.parse_self_param()? {
1742-
param.attrs = attrs.into();
1743-
return if first_param { Ok(param) } else { self.recover_bad_self_param(param) };
1744-
}
1745-
1746-
let is_name_required = match self.token.kind {
1747-
token::DotDotDot => false,
1748-
_ => req_name(self.token.span.edition()),
1749-
};
1750-
let (pat, ty) = if is_name_required || self.is_named_param() {
1751-
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
1774+
let is_name_required = match this.token.kind {
1775+
token::DotDotDot => false,
1776+
_ => req_name(this.token.span.edition()),
1777+
};
1778+
let (pat, ty) = if is_name_required || this.is_named_param() {
1779+
debug!("parse_param_general parse_pat (is_name_required:{})", is_name_required);
1780+
1781+
let pat = this.parse_fn_param_pat()?;
1782+
if let Err(mut err) = this.expect(&token::Colon) {
1783+
return if let Some(ident) =
1784+
this.parameter_without_type(&mut err, pat, is_name_required, first_param)
1785+
{
1786+
err.emit();
1787+
Ok((dummy_arg(ident), TrailingToken::None))
1788+
} else {
1789+
Err(err)
1790+
};
1791+
}
17521792

1753-
let pat = self.parse_fn_param_pat()?;
1754-
if let Err(mut err) = self.expect(&token::Colon) {
1755-
return if let Some(ident) =
1756-
self.parameter_without_type(&mut err, pat, is_name_required, first_param)
1793+
this.eat_incorrect_doc_comment_for_param_type();
1794+
(pat, this.parse_ty_for_param()?)
1795+
} else {
1796+
debug!("parse_param_general ident_to_pat");
1797+
let parser_snapshot_before_ty = this.clone();
1798+
this.eat_incorrect_doc_comment_for_param_type();
1799+
let mut ty = this.parse_ty_for_param();
1800+
if ty.is_ok()
1801+
&& this.token != token::Comma
1802+
&& this.token != token::CloseDelim(token::Paren)
17571803
{
1758-
err.emit();
1759-
Ok(dummy_arg(ident))
1760-
} else {
1761-
Err(err)
1762-
};
1763-
}
1764-
1765-
self.eat_incorrect_doc_comment_for_param_type();
1766-
(pat, self.parse_ty_for_param()?)
1767-
} else {
1768-
debug!("parse_param_general ident_to_pat");
1769-
let parser_snapshot_before_ty = self.clone();
1770-
self.eat_incorrect_doc_comment_for_param_type();
1771-
let mut ty = self.parse_ty_for_param();
1772-
if ty.is_ok()
1773-
&& self.token != token::Comma
1774-
&& self.token != token::CloseDelim(token::Paren)
1775-
{
1776-
// This wasn't actually a type, but a pattern looking like a type,
1777-
// so we are going to rollback and re-parse for recovery.
1778-
ty = self.unexpected();
1779-
}
1780-
match ty {
1781-
Ok(ty) => {
1782-
let ident = Ident::new(kw::Empty, self.prev_token.span);
1783-
let bm = BindingMode::ByValue(Mutability::Not);
1784-
let pat = self.mk_pat_ident(ty.span, bm, ident);
1785-
(pat, ty)
1804+
// This wasn't actually a type, but a pattern looking like a type,
1805+
// so we are going to rollback and re-parse for recovery.
1806+
ty = this.unexpected();
17861807
}
1787-
// If this is a C-variadic argument and we hit an error, return the error.
1788-
Err(err) if self.token == token::DotDotDot => return Err(err),
1789-
// Recover from attempting to parse the argument as a type without pattern.
1790-
Err(mut err) => {
1791-
err.cancel();
1792-
*self = parser_snapshot_before_ty;
1793-
self.recover_arg_parse()?
1808+
match ty {
1809+
Ok(ty) => {
1810+
let ident = Ident::new(kw::Empty, this.prev_token.span);
1811+
let bm = BindingMode::ByValue(Mutability::Not);
1812+
let pat = this.mk_pat_ident(ty.span, bm, ident);
1813+
(pat, ty)
1814+
}
1815+
// If this is a C-variadic argument and we hit an error, return the error.
1816+
Err(err) if this.token == token::DotDotDot => return Err(err),
1817+
// Recover from attempting to parse the argument as a type without pattern.
1818+
Err(mut err) => {
1819+
err.cancel();
1820+
*this = parser_snapshot_before_ty;
1821+
this.recover_arg_parse()?
1822+
}
17941823
}
1795-
}
1796-
};
1797-
1798-
let span = lo.until(self.token.span);
1824+
};
17991825

1800-
Ok(Param {
1801-
attrs: attrs.into(),
1802-
id: ast::DUMMY_NODE_ID,
1803-
is_placeholder: false,
1804-
pat,
1805-
span,
1806-
ty,
1826+
let span = lo.until(this.token.span);
1827+
1828+
Ok((
1829+
Param {
1830+
attrs: attrs.into(),
1831+
id: ast::DUMMY_NODE_ID,
1832+
is_placeholder: false,
1833+
pat,
1834+
span,
1835+
ty,
1836+
},
1837+
TrailingToken::None,
1838+
))
18071839
})
18081840
}
18091841

Diff for: ‎compiler/rustc_parse/src/parser/mod.rs

+19-151
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
pub mod attr;
2+
mod attr_wrapper;
23
mod diagnostics;
34
mod expr;
45
mod generics;
@@ -10,14 +11,15 @@ mod stmt;
1011
mod ty;
1112

1213
use crate::lexer::UnmatchedBrace;
14+
pub use attr_wrapper::AttrWrapper;
1315
pub use diagnostics::AttemptLocalParseRecovery;
1416
use diagnostics::Error;
1517
pub use path::PathStyle;
1618

1719
use rustc_ast::ptr::P;
1820
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
19-
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
20-
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
21+
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
22+
use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing};
2123
use rustc_ast::DUMMY_NODE_ID;
2224
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
2325
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
@@ -64,6 +66,9 @@ pub enum ForceCollect {
6466
pub enum TrailingToken {
6567
None,
6668
Semi,
69+
/// If the trailing token is a comma, then capture it
70+
/// Otherwise, ignore the trailing token
71+
MaybeComma,
6772
}
6873

6974
/// Like `maybe_whole_expr`, but for things other than expressions.
@@ -981,7 +986,7 @@ impl<'a> Parser<'a> {
981986
}
982987

983988
// Collect tokens because they are used during lowering to HIR.
984-
let expr = self.collect_tokens(|this| this.parse_expr())?;
989+
let expr = self.collect_tokens_no_attrs(|this| this.parse_expr())?;
985990
let span = expr.span;
986991

987992
match &expr.kind {
@@ -1004,12 +1009,12 @@ impl<'a> Parser<'a> {
10041009

10051010
fn parse_or_use_outer_attributes(
10061011
&mut self,
1007-
already_parsed_attrs: Option<AttrVec>,
1008-
) -> PResult<'a, AttrVec> {
1012+
already_parsed_attrs: Option<AttrWrapper>,
1013+
) -> PResult<'a, AttrWrapper> {
10091014
if let Some(attrs) = already_parsed_attrs {
10101015
Ok(attrs)
10111016
} else {
1012-
self.parse_outer_attributes().map(|a| a.into())
1017+
self.parse_outer_attributes()
10131018
}
10141019
}
10151020

@@ -1226,97 +1231,17 @@ impl<'a> Parser<'a> {
12261231
}
12271232
}
12281233

1229-
pub fn collect_tokens<R: HasTokens>(
1234+
pub fn collect_tokens_no_attrs<R: HasTokens>(
12301235
&mut self,
12311236
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
12321237
) -> PResult<'a, R> {
1233-
self.collect_tokens_trailing_token(|this| Ok((f(this)?, TrailingToken::None)))
1234-
}
1235-
1236-
/// Records all tokens consumed by the provided callback,
1237-
/// including the current token. These tokens are collected
1238-
/// into a `LazyTokenStream`, and returned along with the result
1239-
/// of the callback.
1240-
///
1241-
/// Note: If your callback consumes an opening delimiter
1242-
/// (including the case where you call `collect_tokens`
1243-
/// when the current token is an opening delimeter),
1244-
/// you must also consume the corresponding closing delimiter.
1245-
///
1246-
/// That is, you can consume
1247-
/// `something ([{ }])` or `([{}])`, but not `([{}]`
1248-
///
1249-
/// This restriction shouldn't be an issue in practice,
1250-
/// since this function is used to record the tokens for
1251-
/// a parsed AST item, which always has matching delimiters.
1252-
pub fn collect_tokens_trailing_token<R: HasTokens>(
1253-
&mut self,
1254-
f: impl FnOnce(&mut Self) -> PResult<'a, (R, TrailingToken)>,
1255-
) -> PResult<'a, R> {
1256-
let start_token = (self.token.clone(), self.token_spacing);
1257-
let cursor_snapshot = self.token_cursor.clone();
1258-
1259-
let (mut ret, trailing_token) = f(self)?;
1260-
1261-
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
1262-
// and `num_calls`, we can reconstruct the `TokenStream` seen
1263-
// by the callback. This allows us to avoid producing a `TokenStream`
1264-
// if it is never needed - for example, a captured `macro_rules!`
1265-
// argument that is never passed to a proc macro.
1266-
// In practice token stream creation happens rarely compared to
1267-
// calls to `collect_tokens` (see some statistics in #78736),
1268-
// so we are doing as little up-front work as possible.
1269-
//
1270-
// This also makes `Parser` very cheap to clone, since
1271-
// there is no intermediate collection buffer to clone.
1272-
#[derive(Clone)]
1273-
struct LazyTokenStreamImpl {
1274-
start_token: (Token, Spacing),
1275-
cursor_snapshot: TokenCursor,
1276-
num_calls: usize,
1277-
desugar_doc_comments: bool,
1278-
append_unglued_token: Option<TreeAndSpacing>,
1279-
}
1280-
impl CreateTokenStream for LazyTokenStreamImpl {
1281-
fn create_token_stream(&self) -> TokenStream {
1282-
// The token produced by the final call to `next` or `next_desugared`
1283-
// was not actually consumed by the callback. The combination
1284-
// of chaining the initial token and using `take` produces the desired
1285-
// result - we produce an empty `TokenStream` if no calls were made,
1286-
// and omit the final token otherwise.
1287-
let mut cursor_snapshot = self.cursor_snapshot.clone();
1288-
let tokens = std::iter::once(self.start_token.clone())
1289-
.chain((0..self.num_calls).map(|_| {
1290-
if self.desugar_doc_comments {
1291-
cursor_snapshot.next_desugared()
1292-
} else {
1293-
cursor_snapshot.next()
1294-
}
1295-
}))
1296-
.take(self.num_calls);
1297-
1298-
make_token_stream(tokens, self.append_unglued_token.clone())
1299-
}
1300-
}
1301-
1302-
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
1303-
match trailing_token {
1304-
TrailingToken::None => {}
1305-
TrailingToken::Semi => {
1306-
assert_eq!(self.token.kind, token::Semi);
1307-
num_calls += 1;
1308-
}
1309-
}
1310-
1311-
let lazy_impl = LazyTokenStreamImpl {
1312-
start_token,
1313-
num_calls,
1314-
cursor_snapshot,
1315-
desugar_doc_comments: self.desugar_doc_comments,
1316-
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
1317-
};
1318-
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
1319-
Ok(ret)
1238+
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1239+
// `ForceCollect::Yes`
1240+
self.collect_tokens_trailing_token(
1241+
AttrWrapper::empty(),
1242+
ForceCollect::Yes,
1243+
|this, _attrs| Ok((f(this)?, TrailingToken::None)),
1244+
)
13201245
}
13211246

13221247
/// `::{` or `::*`
@@ -1365,60 +1290,3 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
13651290
}
13661291
}
13671292
}
1368-
1369-
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
1370-
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
1371-
/// of open and close delims.
1372-
fn make_token_stream(
1373-
tokens: impl Iterator<Item = (Token, Spacing)>,
1374-
append_unglued_token: Option<TreeAndSpacing>,
1375-
) -> TokenStream {
1376-
#[derive(Debug)]
1377-
struct FrameData {
1378-
open: Span,
1379-
inner: Vec<(TokenTree, Spacing)>,
1380-
}
1381-
let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
1382-
for (token, spacing) in tokens {
1383-
match token {
1384-
Token { kind: TokenKind::OpenDelim(_), span } => {
1385-
stack.push(FrameData { open: span, inner: vec![] });
1386-
}
1387-
Token { kind: TokenKind::CloseDelim(delim), span } => {
1388-
let frame_data = stack.pop().expect("Token stack was empty!");
1389-
let dspan = DelimSpan::from_pair(frame_data.open, span);
1390-
let stream = TokenStream::new(frame_data.inner);
1391-
let delimited = TokenTree::Delimited(dspan, delim, stream);
1392-
stack
1393-
.last_mut()
1394-
.unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
1395-
.inner
1396-
.push((delimited, Spacing::Alone));
1397-
}
1398-
token => {
1399-
stack
1400-
.last_mut()
1401-
.expect("Bottom token frame is missing!")
1402-
.inner
1403-
.push((TokenTree::Token(token), spacing));
1404-
}
1405-
}
1406-
}
1407-
let mut final_buf = stack.pop().expect("Missing final buf!");
1408-
final_buf.inner.extend(append_unglued_token);
1409-
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
1410-
TokenStream::new(final_buf.inner)
1411-
}
1412-
1413-
#[macro_export]
1414-
macro_rules! maybe_collect_tokens {
1415-
($self:ident, $force_collect:expr, $attrs:expr, $f:expr) => {
1416-
if matches!($force_collect, ForceCollect::Yes)
1417-
|| $crate::parser::attr::maybe_needs_tokens($attrs)
1418-
{
1419-
$self.collect_tokens_trailing_token($f)
1420-
} else {
1421-
Ok($f($self)?.0)
1422-
}
1423-
};
1424-
}

Diff for: ‎compiler/rustc_parse/src/parser/nonterminal.rs

+34-10
Original file line numberDiff line numberDiff line change
@@ -108,7 +108,9 @@ impl<'a> Parser<'a> {
108108
}
109109
},
110110
NonterminalKind::Block => {
111-
token::NtBlock(self.collect_tokens(|this| this.parse_block())?)
111+
// While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
112+
// the ':block' matcher does not support them
113+
token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
112114
}
113115
NonterminalKind::Stmt => match self.parse_stmt(ForceCollect::Yes)? {
114116
Some(s) => token::NtStmt(s),
@@ -117,19 +119,41 @@ impl<'a> Parser<'a> {
117119
}
118120
},
119121
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
120-
token::NtPat(self.collect_tokens(|this| match kind {
122+
token::NtPat(self.collect_tokens_no_attrs(|this| match kind {
121123
NonterminalKind::Pat2018 { .. } => this.parse_pat(None),
122124
NonterminalKind::Pat2021 { .. } => {
123125
this.parse_top_pat(GateOr::Yes, RecoverComma::No)
124126
}
125127
_ => unreachable!(),
126128
})?)
127129
}
128-
NonterminalKind::Expr => token::NtExpr(self.collect_tokens(|this| this.parse_expr())?),
130+
131+
// If there are attributes present, then `parse_expr` will end up collecting tokens,
132+
// turning the outer `collect_tokens_no_attrs` into a no-op due to the already present
133+
// tokens. If there are *not* attributes present, then the outer
134+
// `collect_tokens_no_attrs` will ensure that we will end up collecting tokens for the
135+
// expressions.
136+
//
137+
// This is less efficient than it could be, since the outer `collect_tokens_no_attrs`
138+
// still needs to snapshot the `TokenCursor` before calling `parse_expr`, even when
139+
// `parse_expr` will end up collecting tokens. Ideally, this would work more like
140+
// `parse_item`, and take in a `ForceCollect` parameter. However, this would require
141+
// adding a `ForceCollect` parameter in a bunch of places in expression parsing
142+
// for little gain. If the perf impact from this turns out to be noticeable, we should
143+
// revisit this apporach.
144+
NonterminalKind::Expr => {
145+
token::NtExpr(self.collect_tokens_no_attrs(|this| this.parse_expr())?)
146+
}
129147
NonterminalKind::Literal => {
130-
token::NtLiteral(self.collect_tokens(|this| this.parse_literal_maybe_minus())?)
148+
// The `:literal` matcher does not support attributes
149+
token::NtLiteral(
150+
self.collect_tokens_no_attrs(|this| this.parse_literal_maybe_minus())?,
151+
)
152+
}
153+
154+
NonterminalKind::Ty => {
155+
token::NtTy(self.collect_tokens_no_attrs(|this| this.parse_ty())?)
131156
}
132-
NonterminalKind::Ty => token::NtTy(self.collect_tokens(|this| this.parse_ty())?),
133157
// this could be handled like a token, since it is one
134158
NonterminalKind::Ident => {
135159
if let Some((ident, is_raw)) = get_macro_ident(&self.token) {
@@ -141,15 +165,15 @@ impl<'a> Parser<'a> {
141165
return Err(self.struct_span_err(self.token.span, msg));
142166
}
143167
}
144-
NonterminalKind::Path => {
145-
token::NtPath(self.collect_tokens(|this| this.parse_path(PathStyle::Type))?)
146-
}
168+
NonterminalKind::Path => token::NtPath(
169+
self.collect_tokens_no_attrs(|this| this.parse_path(PathStyle::Type))?,
170+
),
147171
NonterminalKind::Meta => {
148-
token::NtMeta(P(self.collect_tokens(|this| this.parse_attr_item(false))?))
172+
token::NtMeta(P(self.collect_tokens_no_attrs(|this| this.parse_attr_item(false))?))
149173
}
150174
NonterminalKind::TT => token::NtTT(self.parse_token_tree()),
151175
NonterminalKind::Vis => token::NtVis(
152-
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?,
176+
self.collect_tokens_no_attrs(|this| this.parse_visibility(FollowedByType::Yes))?,
153177
),
154178
NonterminalKind::Lifetime => {
155179
if self.check_lifetime() {

Diff for: ‎compiler/rustc_parse/src/parser/pat.rs

+19-11
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
use super::{Parser, PathStyle};
1+
use super::{ForceCollect, Parser, PathStyle, TrailingToken};
22
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
33
use rustc_ast::mut_visit::{noop_visit_pat, MutVisitor};
44
use rustc_ast::ptr::P;
@@ -938,16 +938,24 @@ impl<'a> Parser<'a> {
938938
}
939939
}
940940

941-
fields.push(match self.parse_pat_field(lo, attrs) {
942-
Ok(field) => field,
943-
Err(err) => {
944-
if let Some(mut delayed_err) = delayed_err {
945-
delayed_err.emit();
946-
}
947-
return Err(err);
948-
}
949-
});
950-
ate_comma = self.eat(&token::Comma);
941+
let field =
942+
self.collect_tokens_trailing_token(attrs, ForceCollect::No, |this, attrs| {
943+
let field = match this.parse_pat_field(lo, attrs) {
944+
Ok(field) => Ok(field),
945+
Err(err) => {
946+
if let Some(mut delayed_err) = delayed_err.take() {
947+
delayed_err.emit();
948+
}
949+
return Err(err);
950+
}
951+
}?;
952+
ate_comma = this.eat(&token::Comma);
953+
// We just ate a comma, so there's no need to use
954+
// `TrailingToken::Comma`
955+
Ok((field, TrailingToken::None))
956+
})?;
957+
958+
fields.push(field)
951959
}
952960

953961
if let Some(mut err) = delayed_err {

Diff for: ‎compiler/rustc_parse/src/parser/stmt.rs

+40-22
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,9 @@ use super::diagnostics::{AttemptLocalParseRecovery, Error};
33
use super::expr::LhsExpr;
44
use super::pat::{GateOr, RecoverComma};
55
use super::path::PathStyle;
6-
use super::{BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken};
7-
use crate::{maybe_collect_tokens, maybe_whole};
6+
use super::TrailingToken;
7+
use super::{AttrWrapper, BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode};
8+
use crate::maybe_whole;
89

910
use rustc_ast as ast;
1011
use rustc_ast::attr::HasAttrs;
@@ -38,30 +39,47 @@ impl<'a> Parser<'a> {
3839
capture_semi: bool,
3940
force_collect: ForceCollect,
4041
) -> PResult<'a, Option<Stmt>> {
41-
let mut attrs = self.parse_outer_attributes()?;
42+
let attrs = self.parse_outer_attributes()?;
4243
let lo = self.token.span;
4344

44-
maybe_whole!(self, NtStmt, |stmt| {
45-
let mut stmt = stmt;
46-
stmt.visit_attrs(|stmt_attrs| {
47-
mem::swap(stmt_attrs, &mut attrs);
48-
stmt_attrs.extend(attrs);
49-
});
50-
Some(stmt)
51-
});
45+
// Don't use `maybe_whole` so that we have precise control
46+
// over when we bump the parser
47+
if let token::Interpolated(nt) = &self.token.kind {
48+
if let token::NtStmt(stmt) = &**nt {
49+
let mut stmt = stmt.clone();
50+
return self.collect_tokens_trailing_token(
51+
attrs,
52+
force_collect,
53+
|this, mut attrs| {
54+
stmt.visit_attrs(|stmt_attrs| {
55+
mem::swap(stmt_attrs, &mut attrs);
56+
stmt_attrs.extend(attrs);
57+
});
58+
// Make sure we capture the token::Interpolated
59+
this.bump();
60+
Ok((Some(stmt), TrailingToken::None))
61+
},
62+
);
63+
}
64+
}
5265

5366
Ok(Some(if self.token.is_keyword(kw::Let) {
54-
self.parse_local_mk(lo, attrs.into(), capture_semi, force_collect)?
67+
self.parse_local_mk(lo, attrs, capture_semi, force_collect)?
5568
} else if self.is_kw_followed_by_ident(kw::Mut) {
56-
self.recover_stmt_local(lo, attrs.into(), "missing keyword", "let mut")?
69+
self.recover_stmt_local(
70+
lo,
71+
attrs.take_for_recovery().into(),
72+
"missing keyword",
73+
"let mut",
74+
)?
5775
} else if self.is_kw_followed_by_ident(kw::Auto) {
5876
self.bump(); // `auto`
5977
let msg = "write `let` instead of `auto` to introduce a new variable";
60-
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
78+
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
6179
} else if self.is_kw_followed_by_ident(sym::var) {
6280
self.bump(); // `var`
6381
let msg = "write `let` instead of `var` to introduce a new variable";
64-
self.recover_stmt_local(lo, attrs.into(), msg, "let")?
82+
self.recover_stmt_local(lo, attrs.take_for_recovery().into(), msg, "let")?
6583
} else if self.check_path() && !self.token.is_qpath_start() && !self.is_path_start_item() {
6684
// We have avoided contextual keywords like `union`, items with `crate` visibility,
6785
// or `auto trait` items. We aim to parse an arbitrary path `a::b` but not something
@@ -75,25 +93,25 @@ impl<'a> Parser<'a> {
7593
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
7694
} else if self.eat(&token::Semi) {
7795
// Do not attempt to parse an expression if we're done here.
78-
self.error_outer_attrs(&attrs);
96+
self.error_outer_attrs(&attrs.take_for_recovery());
7997
self.mk_stmt(lo, StmtKind::Empty)
8098
} else if self.token != token::CloseDelim(token::Brace) {
8199
// Remainder are line-expr stmts.
82100
let e = self.parse_expr_res(Restrictions::STMT_EXPR, Some(attrs.into()))?;
83101
self.mk_stmt(lo.to(e.span), StmtKind::Expr(e))
84102
} else {
85-
self.error_outer_attrs(&attrs);
103+
self.error_outer_attrs(&attrs.take_for_recovery());
86104
return Ok(None);
87105
}))
88106
}
89107

90108
fn parse_stmt_path_start(
91109
&mut self,
92110
lo: Span,
93-
attrs: Vec<Attribute>,
111+
attrs: AttrWrapper,
94112
force_collect: ForceCollect,
95113
) -> PResult<'a, Stmt> {
96-
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
114+
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
97115
let path = this.parse_path(PathStyle::Expr)?;
98116

99117
if this.eat(&token::Not) {
@@ -142,7 +160,7 @@ impl<'a> Parser<'a> {
142160
// Since none of the above applied, this is an expression statement macro.
143161
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac), AttrVec::new());
144162
let e = self.maybe_recover_from_bad_qpath(e, true)?;
145-
let e = self.parse_dot_or_call_expr_with(e, lo, attrs)?;
163+
let e = self.parse_dot_or_call_expr_with(e, lo, attrs.into())?;
146164
let e = self.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(e))?;
147165
StmtKind::Expr(e)
148166
};
@@ -178,11 +196,11 @@ impl<'a> Parser<'a> {
178196
fn parse_local_mk(
179197
&mut self,
180198
lo: Span,
181-
attrs: AttrVec,
199+
attrs: AttrWrapper,
182200
capture_semi: bool,
183201
force_collect: ForceCollect,
184202
) -> PResult<'a, Stmt> {
185-
maybe_collect_tokens!(self, force_collect, &attrs, |this: &mut Parser<'a>| {
203+
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
186204
this.expect_keyword(kw::Let)?;
187205
let local = this.parse_local(attrs.into())?;
188206
let trailing = if capture_semi && this.token.kind == token::Semi {

0 commit comments

Comments
 (0)
Please sign in to comment.