Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 8 pull requests #101733

Closed
wants to merge 22 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
315d12d
Fix ReErased leaking into typeck due to typeof recovery
compiler-errors Aug 5, 2022
890e759
Move `Spacing` out of `AttrAnnotatedTokenStream`.
nnethercote Sep 9, 2022
a56d345
Rename `AttrAnnotatedToken{Stream,Tree}`.
nnethercote Sep 9, 2022
208ca93
Change return type of `Attribute::tokens`.
nnethercote Sep 9, 2022
81eaf87
Tweak some formatting.
nnethercote Sep 9, 2022
f6c9e1d
Inline and remove `TokenStream::opt_from_ast`.
nnethercote Sep 9, 2022
d2df07c
Rename `{Create,Lazy}TokenStream` as `{To,Lazy}AttrTokenStream`.
nnethercote Sep 9, 2022
2126622
Add test for #101211
winxpqq955 Sep 11, 2022
bd54846
A SubstitutionPart is not a deletion if it replaces nothing with nothing
compiler-errors Sep 11, 2022
3bf33c3
Don't trim substitution if it's only whitespace
compiler-errors Sep 11, 2022
cb02b64
constify `CStr` methods
WaffleLapkin Aug 8, 2022
2b7fb8d
Impove diagnostic for .await-ing non-futures
Sep 12, 2022
9139891
Allow unauthenticated users to add the `const-hack` label
fee1-dead Sep 12, 2022
84ca399
rustdoc: improve rustdoc HTML suggestions handling of nested generics
notriddle Sep 12, 2022
81ea7fe
Rollup merge of #100185 - compiler-errors:issue-100183, r=wesleywiser
GuillaumeGomez Sep 12, 2022
003efd1
Rollup merge of #100291 - WaffleLapkin:cstr_const_methods, r=oli-obk
GuillaumeGomez Sep 12, 2022
84fa199
Rollup merge of #101602 - nnethercote:AttrTokenStream, r=petrochenkov
GuillaumeGomez Sep 12, 2022
ba287f8
Rollup merge of #101677 - winxpqq955:issue-101211, r=fee1-dead
GuillaumeGomez Sep 12, 2022
091e699
Rollup merge of #101700 - compiler-errors:deletion-span, r=davidtwco
GuillaumeGomez Sep 12, 2022
5233f68
Rollup merge of #101723 - lukas-code:await-diag, r=compiler-errors
GuillaumeGomez Sep 12, 2022
c75f513
Rollup merge of #101724 - fee1-dead-contrib:triage-const-hack, r=oli-obk
GuillaumeGomez Sep 12, 2022
c8d5541
Rollup merge of #101731 - notriddle:notriddle/more-improved-html-chec…
GuillaumeGomez Sep 12, 2022
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 14 additions & 14 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ pub use UnsafeSource::*;

use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter};
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream};
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, TokenStream};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_data_structures::sync::Lrc;
Expand Down Expand Up @@ -91,7 +91,7 @@ pub struct Path {
/// The segments in the path: the things separated by `::`.
/// Global paths begin with `kw::PathRoot`.
pub segments: Vec<PathSegment>,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl PartialEq<Symbol> for Path {
Expand Down Expand Up @@ -534,7 +534,7 @@ pub struct Block {
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
pub rules: BlockCheckMode,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
/// The following *isn't* a parse error, but will cause multiple errors in following stages.
/// ```compile_fail
/// let x = {
Expand All @@ -553,7 +553,7 @@ pub struct Pat {
pub id: NodeId,
pub kind: PatKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Pat {
Expand Down Expand Up @@ -937,8 +937,8 @@ impl Stmt {
/// a trailing semicolon.
///
/// This only modifies the parsed AST struct, not the attached
/// `LazyTokenStream`. The parser is responsible for calling
/// `CreateTokenStream::add_trailing_semi` when there is actually
/// `LazyAttrTokenStream`. The parser is responsible for calling
/// `ToAttrTokenStream::add_trailing_semi` when there is actually
/// a semicolon in the tokenstream.
pub fn add_trailing_semicolon(mut self) -> Self {
self.kind = match self.kind {
Expand Down Expand Up @@ -984,7 +984,7 @@ pub struct MacCallStmt {
pub mac: P<MacCall>,
pub style: MacStmtStyle,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
Expand All @@ -1009,7 +1009,7 @@ pub struct Local {
pub kind: LocalKind,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -1108,7 +1108,7 @@ pub struct Expr {
pub kind: ExprKind,
pub span: Span,
pub attrs: AttrVec,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Expr {
Expand Down Expand Up @@ -1967,7 +1967,7 @@ pub struct Ty {
pub id: NodeId,
pub kind: TyKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Clone for Ty {
Expand Down Expand Up @@ -2532,7 +2532,7 @@ impl<D: Decoder> Decodable<D> for AttrId {
pub struct AttrItem {
pub path: Path,
pub args: MacArgs,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

/// A list of attributes.
Expand All @@ -2552,7 +2552,7 @@ pub struct Attribute {
#[derive(Clone, Encodable, Decodable, Debug)]
pub struct NormalAttr {
pub item: AttrItem,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -2603,7 +2603,7 @@ impl PolyTraitRef {
pub struct Visibility {
pub kind: VisibilityKind,
pub span: Span,
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

#[derive(Clone, Encodable, Decodable, Debug)]
Expand Down Expand Up @@ -2689,7 +2689,7 @@ pub struct Item<K = ItemKind> {
///
/// Note that the tokens here do not include the outer attributes, but will
/// include inner attributes.
pub tokens: Option<LazyTokenStream>,
pub tokens: Option<LazyAttrTokenStream>,
}

impl Item {
Expand Down
38 changes: 19 additions & 19 deletions compiler/rustc_ast/src/ast_traits.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

use crate::ptr::P;
use crate::token::Nonterminal;
use crate::tokenstream::LazyTokenStream;
use crate::tokenstream::LazyAttrTokenStream;
use crate::{Arm, Crate, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
Expand Down Expand Up @@ -124,18 +124,18 @@ impl HasSpan for AttrItem {

/// A trait for AST nodes having (or not having) collected tokens.
pub trait HasTokens {
fn tokens(&self) -> Option<&LazyTokenStream>;
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>>;
}

macro_rules! impl_has_tokens {
($($T:ty),+ $(,)?) => {
$(
impl HasTokens for $T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.tokens.as_ref()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
Some(&mut self.tokens)
}
}
Expand All @@ -147,10 +147,10 @@ macro_rules! impl_has_tokens_none {
($($T:ty),+ $(,)?) => {
$(
impl HasTokens for $T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
None
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
None
}
}
Expand All @@ -162,25 +162,25 @@ impl_has_tokens!(AssocItem, AttrItem, Block, Expr, ForeignItem, Item, Pat, Path,
impl_has_tokens_none!(Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant);

impl<T: AstDeref<Target: HasTokens>> HasTokens for T {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.ast_deref().tokens()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.ast_deref_mut().tokens_mut()
}
}

impl<T: HasTokens> HasTokens for Option<T> {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.as_ref().and_then(|inner| inner.tokens())
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.as_mut().and_then(|inner| inner.tokens_mut())
}
}

impl HasTokens for StmtKind {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match self {
StmtKind::Local(local) => local.tokens.as_ref(),
StmtKind::Item(item) => item.tokens(),
Expand All @@ -189,7 +189,7 @@ impl HasTokens for StmtKind {
StmtKind::MacCall(mac) => mac.tokens.as_ref(),
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
match self {
StmtKind::Local(local) => Some(&mut local.tokens),
StmtKind::Item(item) => item.tokens_mut(),
Expand All @@ -201,24 +201,24 @@ impl HasTokens for StmtKind {
}

impl HasTokens for Stmt {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
self.kind.tokens()
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
self.kind.tokens_mut()
}
}

impl HasTokens for Attribute {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match &self.kind {
AttrKind::Normal(normal) => normal.tokens.as_ref(),
kind @ AttrKind::DocComment(..) => {
panic!("Called tokens on doc comment attr {:?}", kind)
}
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
Some(match &mut self.kind {
AttrKind::Normal(normal) => &mut normal.tokens,
kind @ AttrKind::DocComment(..) => {
Expand All @@ -229,7 +229,7 @@ impl HasTokens for Attribute {
}

impl HasTokens for Nonterminal {
fn tokens(&self) -> Option<&LazyTokenStream> {
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
match self {
Nonterminal::NtItem(item) => item.tokens(),
Nonterminal::NtStmt(stmt) => stmt.tokens(),
Expand All @@ -243,7 +243,7 @@ impl HasTokens for Nonterminal {
Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None,
}
}
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
match self {
Nonterminal::NtItem(item) => item.tokens_mut(),
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
Expand Down
19 changes: 8 additions & 11 deletions compiler/rustc_ast/src/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,8 @@ use crate::ast::{MacArgs, MacArgsEq, MacDelimiter, MetaItem, MetaItemKind, Neste
use crate::ast::{Path, PathSegment};
use crate::ptr::P;
use crate::token::{self, CommentKind, Delimiter, Token};
use crate::tokenstream::{AttrAnnotatedTokenStream, AttrAnnotatedTokenTree};
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
use crate::tokenstream::{LazyTokenStream, TokenStream};
use crate::tokenstream::{LazyAttrTokenStream, TokenStream};
use crate::util::comments;

use rustc_index::bit_set::GrowableBitSet;
Expand Down Expand Up @@ -296,20 +295,18 @@ impl Attribute {
}
}

pub fn tokens(&self) -> AttrAnnotatedTokenStream {
pub fn tokens(&self) -> TokenStream {
match self.kind {
AttrKind::Normal(ref normal) => normal
.tokens
.as_ref()
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
.create_token_stream(),
AttrKind::DocComment(comment_kind, data) => AttrAnnotatedTokenStream::from((
AttrAnnotatedTokenTree::Token(Token::new(
token::DocComment(comment_kind, self.style, data),
self.span,
)),
.to_attr_token_stream()
.to_tokenstream(),
AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
Spacing::Alone,
)),
)]),
}
}
}
Expand Down Expand Up @@ -356,7 +353,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri

pub fn mk_attr_from_item(
item: AttrItem,
tokens: Option<LazyTokenStream>,
tokens: Option<LazyAttrTokenStream>,
style: AttrStyle,
span: Span,
) -> Attribute {
Expand Down
1 change: 1 addition & 0 deletions compiler/rustc_ast/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@
#![feature(if_let_guard)]
#![cfg_attr(bootstrap, feature(label_break_value))]
#![feature(let_chains)]
#![feature(let_else)]
#![feature(min_specialization)]
#![feature(negative_impls)]
#![feature(slice_internals)]
Expand Down
30 changes: 15 additions & 15 deletions compiler/rustc_ast/src/mut_visit.rs
Original file line number Diff line number Diff line change
Expand Up @@ -642,17 +642,17 @@ pub fn noop_flat_map_param<T: MutVisitor>(mut param: Param, vis: &mut T) -> Smal
}

// No `noop_` prefix because there isn't a corresponding method in `MutVisitor`.
pub fn visit_attr_annotated_tt<T: MutVisitor>(tt: &mut AttrAnnotatedTokenTree, vis: &mut T) {
pub fn visit_attr_tt<T: MutVisitor>(tt: &mut AttrTokenTree, vis: &mut T) {
match tt {
AttrAnnotatedTokenTree::Token(token) => {
AttrTokenTree::Token(token, _) => {
visit_token(token, vis);
}
AttrAnnotatedTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
AttrTokenTree::Delimited(DelimSpan { open, close }, _delim, tts) => {
vis.visit_span(open);
vis.visit_span(close);
visit_attr_annotated_tts(tts, vis);
visit_attr_tts(tts, vis);
}
AttrAnnotatedTokenTree::Attributes(data) => {
AttrTokenTree::Attributes(data) => {
for attr in &mut *data.attrs {
match &mut attr.kind {
AttrKind::Normal(normal) => {
Expand Down Expand Up @@ -690,27 +690,27 @@ pub fn visit_tts<T: MutVisitor>(TokenStream(tts): &mut TokenStream, vis: &mut T)
}
}

pub fn visit_attr_annotated_tts<T: MutVisitor>(
AttrAnnotatedTokenStream(tts): &mut AttrAnnotatedTokenStream,
vis: &mut T,
) {
pub fn visit_attr_tts<T: MutVisitor>(AttrTokenStream(tts): &mut AttrTokenStream, vis: &mut T) {
if T::VISIT_TOKENS && !tts.is_empty() {
let tts = Lrc::make_mut(tts);
visit_vec(tts, |(tree, _is_joint)| visit_attr_annotated_tt(tree, vis));
visit_vec(tts, |tree| visit_attr_tt(tree, vis));
}
}

pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(
lazy_tts: Option<&mut LazyAttrTokenStream>,
vis: &mut T,
) {
if T::VISIT_TOKENS {
if let Some(lazy_tts) = lazy_tts {
let mut tts = lazy_tts.create_token_stream();
visit_attr_annotated_tts(&mut tts, vis);
*lazy_tts = LazyTokenStream::new(tts);
let mut tts = lazy_tts.to_attr_token_stream();
visit_attr_tts(&mut tts, vis);
*lazy_tts = LazyAttrTokenStream::new(tts);
}
}
}

pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyAttrTokenStream>, vis: &mut T) {
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
}

Expand Down
Loading