diff --git a/Cargo.lock b/Cargo.lock index 470a19bfb00f5..6e1bf249b0ccc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3689,6 +3689,7 @@ dependencies = [ "rustc_lexer", "rustc_lint_defs", "rustc_macros", + "rustc_middle", "rustc_parse", "rustc_serialize", "rustc_session", diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 43d87b96ead90..3cfd5dc271d6c 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -1,5 +1,6 @@ use std::borrow::Cow; use std::fmt; +use std::hash::Hash; use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; @@ -21,7 +22,7 @@ use crate::ast; use crate::ptr::P; use crate::util::case::Case; -#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] +#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug, HashStable_Generic, Hash)] pub enum CommentKind { Line, Block, diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 057b4455dca89..6af1c98a41c33 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -105,7 +105,6 @@ where } } } - pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync { fn to_attr_token_stream(&self) -> AttrTokenStream; } @@ -140,13 +139,19 @@ impl fmt::Debug for LazyAttrTokenStream { impl Encodable for LazyAttrTokenStream { fn encode(&self, _s: &mut S) { - panic!("Attempted to encode LazyAttrTokenStream"); + tracing::debug!("ENCODING {self:?}"); + self.to_attr_token_stream().encode(_s); + // panic!("Attempted to encode {self:?}"); } } impl Decodable for LazyAttrTokenStream { fn decode(_d: &mut D) -> Self { - panic!("Attempted to decode LazyAttrTokenStream"); + let ats = AttrTokenStream::decode(_d); + let res = LazyAttrTokenStream::new(ats); + tracing::debug!("DECODED {res:?}"); + res + // panic!("Attempted to decode LazyAttrTokenStream"); } } diff --git a/compiler/rustc_expand/Cargo.toml b/compiler/rustc_expand/Cargo.toml index ce014364b0d01..5e9092c84d074 100644 --- a/compiler/rustc_expand/Cargo.toml +++ b/compiler/rustc_expand/Cargo.toml @@ -20,6 +20,7 @@ rustc_fluent_macro = { path = "../rustc_fluent_macro" } rustc_lexer = { path = "../rustc_lexer" } rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } +rustc_middle = { path = "../rustc_middle" } rustc_parse = { path = "../rustc_parse" } rustc_serialize = { path = "../rustc_serialize" } rustc_session = { path = "../rustc_session" } diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index c195d69258899..da48663faa34a 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -15,6 +15,7 @@ use rustc_data_structures::sync::{self, Lrc}; use rustc_errors::{DiagCtxtHandle, ErrorGuaranteed, PResult}; use rustc_feature::Features; use rustc_lint_defs::{BufferedEarlyLint, RegisteredTools}; +use rustc_middle::expand::TcxMacroExpander; use rustc_parse::parser::Parser; use rustc_parse::MACRO_ARGUMENTS; use rustc_session::config::CollapseMacroDebuginfo; @@ -676,6 +677,11 @@ pub enum SyntaxExtensionKind { Box, ), + TcxLegacyBang( + /// An expander with signature TokenStream -> AST. + Lrc, + ), + /// A token-based attribute macro. Attr( /// An expander with signature (TokenStream, TokenStream) -> TokenStream. @@ -754,7 +760,8 @@ impl SyntaxExtension { match self.kind { SyntaxExtensionKind::Bang(..) | SyntaxExtensionKind::LegacyBang(..) - | SyntaxExtensionKind::GlobDelegation(..) => MacroKind::Bang, + | SyntaxExtensionKind::GlobDelegation(..) + | SyntaxExtensionKind::TcxLegacyBang(..) => MacroKind::Bang, SyntaxExtensionKind::Attr(..) | SyntaxExtensionKind::LegacyAttr(..) | SyntaxExtensionKind::NonMacroAttr => MacroKind::Attr, @@ -1072,6 +1079,12 @@ pub trait ResolverExpand { trait_def_id: DefId, impl_def_id: LocalDefId, ) -> Result)>, Indeterminate>; + + fn expand_legacy_bang( + &self, + invoc_id: LocalExpnId, + current_expansion: LocalExpnId, + ) -> Result<(TokenStream, usize), (Span, ErrorGuaranteed)>; } pub trait LintStoreExpand { diff --git a/compiler/rustc_expand/src/expand.rs b/compiler/rustc_expand/src/expand.rs index d8cb367e3face..b52b145199f2a 100644 --- a/compiler/rustc_expand/src/expand.rs +++ b/compiler/rustc_expand/src/expand.rs @@ -12,13 +12,16 @@ use rustc_ast::visit::{self, try_visit, walk_list, AssocCtxt, Visitor, VisitorRe use rustc_ast::{ AssocItemKind, AstNodeWrapper, AttrArgs, AttrStyle, AttrVec, ExprKind, ForeignItemKind, HasAttrs, HasNodeId, Inline, ItemKind, MacStmtStyle, MetaItemKind, ModKind, NestedMetaItem, - NodeId, PatKind, StmtKind, TyKind, + NodeId, PatKind, StmtKind, TyKind, DUMMY_NODE_ID, }; use rustc_ast_pretty::pprust; +use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::flat_map_in_place::FlatMapInPlace; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; use rustc_errors::PResult; use rustc_feature::Features; +use rustc_middle::ty::TyCtxt; use rustc_parse::parser::{ AttemptLocalParseRecovery, CommaRecoveryMode, ForceCollect, Parser, RecoverColon, RecoverComma, }; @@ -40,6 +43,7 @@ use crate::errors::{ WrongFragmentKind, }; use crate::mbe::diagnostics::annotate_err_with_kind; +use crate::mbe::macro_rules::{trace_macros_note, ParserAnyMacro}; use crate::module::{mod_dir_path, parse_external_mod, DirOwnership, ParsedExternalMod}; use crate::placeholders::{placeholder, PlaceholderExpander}; @@ -394,6 +398,37 @@ pub struct MacroExpander<'a, 'b> { monotonic: bool, // cf. `cx.monotonic_expander()` } +#[tracing::instrument(level = "debug", skip(tcx))] +pub fn expand_legacy_bang<'tcx>( + tcx: TyCtxt<'tcx>, + key: (LocalExpnId, LocalExpnId, Fingerprint), +) -> Result<(&'tcx TokenStream, usize), (Span, ErrorGuaranteed)> { + use tracing::debug; + + let (invoc_id, current_expansion, arg_fingerprint) = key; + + let map = tcx.macro_map.borrow(); + let (arg, span, expander) = map.get(&invoc_id).as_ref().unwrap(); + debug!(?arg); + + // this (i.e., debug-printing `span`) somehow made the test pass?? + // tracing::debug!(?span); + + let arg_hash: Fingerprint = tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + arg.flattened().hash_stable(&mut hcx, &mut hasher); + hasher.finish() + }); + + // sanity-check, to make sure we're not running for (maybe) old arguments + // that were loaded from the cache. this would certainly be a bug. + assert_eq!(arg_fingerprint, arg_hash); + + expander + .expand(&tcx.sess, *span, arg.clone(), current_expansion) + .map(|(tts, i)| (tcx.arena.alloc(tts) as &TokenStream, i)) +} + impl<'a, 'b> MacroExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { MacroExpander { cx, monotonic } @@ -679,6 +714,63 @@ impl<'a, 'b> MacroExpander<'a, 'b> { Err(guar) => return ExpandResult::Ready(fragment_kind.dummy(span, guar)), } } + SyntaxExtensionKind::TcxLegacyBang(expander) => { + // Macros defined in the current crate have a real node id, + // whereas macros from an external crate have a dummy id. + if self.cx.trace_macros() { + let msg = format!( + "expanding `{}! {{ {} }}`", + expander.name(), + pprust::tts_to_string(&mac.args.tokens) + ); + trace_macros_note(&mut self.cx.expansions, span, msg); + } + + // Macros defined in the current crate have a real node id, + // whereas macros from an external crate have a dummy id.\ + let tok_result: Box = match self + .cx + .resolver + .expand_legacy_bang(invoc.expansion_data.id, self.cx.current_expansion.id) + { + Ok((tts, i)) => { + if self.cx.trace_macros() { + let msg = format!("to `{}`", pprust::tts_to_string(&tts)); + trace_macros_note(&mut self.cx.expansions, span, msg); + } + let is_local = expander.node_id() != DUMMY_NODE_ID; + if is_local { + self.cx.resolver.record_macro_rule_usage(expander.node_id(), i); + } + + // Let the context choose how to interpret the result. + // Weird, but useful for X-macros. + Box::new(ParserAnyMacro::new( + Parser::new(&self.cx.sess.psess, tts.clone(), None), + // Pass along the original expansion site and the name of the macro, + // so we can print a useful error message if the parse of the expanded + // macro leaves unparsed tokens. + span, + expander.name(), + self.cx.current_expansion.lint_node_id, + self.cx.current_expansion.is_trailing_mac, + expander.arm_span(i), + is_local, + )) + } + Err((span, guar)) => { + self.cx.trace_macros_diag(); + DummyResult::any(span, guar) + } + }; + let result = if let Some(result) = fragment_kind.make_from(tok_result) { + result + } else { + let guar = self.error_wrong_fragment_kind(fragment_kind, &mac, span); + fragment_kind.dummy(span, guar) + }; + result + } SyntaxExtensionKind::LegacyBang(expander) => { let tok_result = match expander.expand(self.cx, span, mac.args.tokens.clone()) { ExpandResult::Ready(tok_result) => tok_result, diff --git a/compiler/rustc_expand/src/lib.rs b/compiler/rustc_expand/src/lib.rs index 4222c9fe90616..be9f15ddfa437 100644 --- a/compiler/rustc_expand/src/lib.rs +++ b/compiler/rustc_expand/src/lib.rs @@ -26,6 +26,7 @@ mod placeholders; mod proc_macro_server; pub use mbe::macro_rules::compile_declarative_macro; +use rustc_middle::query::Providers; pub mod base; pub mod config; pub mod expand; @@ -34,4 +35,8 @@ pub mod module; #[allow(rustc::untranslatable_diagnostic)] pub mod proc_macro; +pub fn provide(providers: &mut Providers) { + providers.expand_legacy_bang = expand::expand_legacy_bang; +} + rustc_fluent_macro::fluent_messages! { "../messages.ftl" } diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index 6ce9ff18c279f..d541ba687b342 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -3,34 +3,32 @@ use std::borrow::Cow; use rustc_ast::token::{self, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast_pretty::pprust; -use rustc_errors::{Applicability, Diag, DiagMessage}; +use rustc_errors::{Applicability, Diag, DiagCtxtHandle, DiagMessage}; use rustc_macros::Subdiagnostic; use rustc_parse::parser::{Parser, Recovery}; +use rustc_session::parse::ParseSess; use rustc_span::source_map::SourceMap; use rustc_span::symbol::Ident; use rustc_span::{ErrorGuaranteed, Span}; use tracing::debug; use super::macro_rules::{parser_from_cx, NoopTracker}; -use crate::base::{DummyResult, ExtCtxt, MacResult}; use crate::expand::{parse_ast_fragment, AstFragmentKind}; use crate::mbe::macro_parser::ParseResult::*; use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser}; use crate::mbe::macro_rules::{try_match_macro, Tracker}; -pub(super) fn failed_to_match_macro<'cx>( - cx: &'cx mut ExtCtxt<'_>, +pub(crate) fn failed_to_match_macro( + psess: &ParseSess, sp: Span, def_span: Span, name: Ident, arg: TokenStream, lhses: &[Vec], -) -> Box { - let psess = &cx.sess.psess; - +) -> (Span, ErrorGuaranteed) { // An error occurred, try the expansion again, tracking the expansion closely for better // diagnostics. - let mut tracker = CollectTrackerAndEmitter::new(cx, sp); + let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp); let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker); @@ -38,7 +36,7 @@ pub(super) fn failed_to_match_macro<'cx>( // Nonterminal parser recovery might turn failed matches into successful ones, // but for that it must have emitted an error already assert!( - tracker.cx.dcx().has_errors().is_some(), + tracker.dcx.has_errors().is_some(), "Macro matching returned a success on the second try" ); } @@ -50,15 +48,15 @@ pub(super) fn failed_to_match_macro<'cx>( let Some(BestFailure { token, msg: label, remaining_matcher, .. }) = tracker.best_failure else { - return DummyResult::any(sp, cx.dcx().span_delayed_bug(sp, "failed to match a macro")); + return (sp, psess.dcx().span_delayed_bug(sp, "failed to match a macro")); }; let span = token.span.substitute_dummy(sp); - let mut err = cx.dcx().struct_span_err(span, parse_failure_msg(&token, None)); + let mut err = psess.dcx().struct_span_err(span, parse_failure_msg(&token, None)); err.span_label(span, label); - if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) { - err.span_label(cx.source_map().guess_head_span(def_span), "when calling this macro"); + if !def_span.is_dummy() && !psess.source_map().is_imported(def_span) { + err.span_label(psess.source_map().guess_head_span(def_span), "when calling this macro"); } annotate_doc_comment(&mut err, psess.source_map(), span); @@ -76,7 +74,7 @@ pub(super) fn failed_to_match_macro<'cx>( err.note("captured metavariables except for `:tt`, `:ident` and `:lifetime` cannot be compared to other tokens"); err.note("see for more information"); - if !def_span.is_dummy() && !cx.source_map().is_imported(def_span) { + if !def_span.is_dummy() && !psess.source_map().is_imported(def_span) { err.help("try using `:tt` instead in the macro definition"); } } @@ -104,18 +102,17 @@ pub(super) fn failed_to_match_macro<'cx>( } } let guar = err.emit(); - cx.trace_macros_diag(); - DummyResult::any(sp, guar) + (sp, guar) } /// The tracker used for the slow error path that collects useful info for diagnostics. -struct CollectTrackerAndEmitter<'a, 'cx, 'matcher> { - cx: &'a mut ExtCtxt<'cx>, +struct CollectTrackerAndEmitter<'dcx, 'matcher> { + dcx: DiagCtxtHandle<'dcx>, remaining_matcher: Option<&'matcher MatcherLoc>, /// Which arm's failure should we report? (the one furthest along) best_failure: Option, root_span: Span, - result: Option>, + result: Option<(Span, ErrorGuaranteed)>, } struct BestFailure { @@ -131,7 +128,7 @@ impl BestFailure { } } -impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, 'matcher> { +impl<'dcx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'dcx, 'matcher> { type Failure = (Token, u32, &'static str); fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure { @@ -151,7 +148,7 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, Success(_) => { // Nonterminal parser recovery might turn failed matches into successful ones, // but for that it must have emitted an error already - self.cx.dcx().span_delayed_bug( + self.dcx.span_delayed_bug( self.root_span, "should not collect detailed info for successful macro match", ); @@ -177,10 +174,10 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, } Error(err_sp, msg) => { let span = err_sp.substitute_dummy(self.root_span); - let guar = self.cx.dcx().span_err(span, msg.clone()); - self.result = Some(DummyResult::any(span, guar)); + let guar = self.dcx.span_err(span, msg.clone()); + self.result = Some((span, guar)); } - ErrorReported(guar) => self.result = Some(DummyResult::any(self.root_span, *guar)), + ErrorReported(guar) => self.result = Some((self.root_span, *guar)), } } @@ -193,9 +190,9 @@ impl<'a, 'cx, 'matcher> Tracker<'matcher> for CollectTrackerAndEmitter<'a, 'cx, } } -impl<'a, 'cx> CollectTrackerAndEmitter<'a, 'cx, '_> { - fn new(cx: &'a mut ExtCtxt<'cx>, root_span: Span) -> Self { - Self { cx, remaining_matcher: None, best_failure: None, root_span, result: None } +impl<'dcx> CollectTrackerAndEmitter<'dcx, '_> { + fn new(dcx: DiagCtxtHandle<'dcx>, root_span: Span) -> Self { + Self { dcx, remaining_matcher: None, best_failure: None, root_span, result: None } } } diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 1502177563d9b..1746e59bb1aad 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -2,29 +2,30 @@ use std::borrow::Cow; use std::collections::hash_map::Entry; use std::{mem, slice}; -use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::token::NtPatKind::*; use rustc_ast::token::TokenKind::*; -use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, NonterminalKind, Token, TokenKind}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_ast_pretty::pprust; use rustc_attr::{self as attr, TransparencyError}; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::sync::Lrc; use rustc_errors::{Applicability, ErrorGuaranteed}; use rustc_feature::Features; use rustc_lint_defs::builtin::{ RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS, }; use rustc_lint_defs::BuiltinLintDiag; +use rustc_middle::expand::TcxMacroExpander; use rustc_parse::parser::{ParseNtResult, Parser, Recovery}; use rustc_session::parse::ParseSess; use rustc_session::Session; use rustc_span::edition::Edition; use rustc_span::hygiene::Transparency; use rustc_span::symbol::{kw, sym, Ident, MacroRulesNormalizedIdent}; -use rustc_span::Span; +use rustc_span::{LocalExpnId, Span}; use tracing::{debug, instrument, trace, trace_span}; use super::diagnostics; @@ -56,6 +57,18 @@ pub(crate) struct ParserAnyMacro<'a> { } impl<'a> ParserAnyMacro<'a> { + pub(crate) fn new( + parser: Parser<'a>, + site_span: Span, + macro_ident: Ident, + lint_node_id: NodeId, + is_trailing_mac: bool, + arm_span: Span, + is_local: bool, + ) -> Self { + Self { parser, site_span, macro_ident, lint_node_id, is_trailing_mac, arm_span, is_local } + } + pub(crate) fn make(mut self: Box>, kind: AstFragmentKind) -> AstFragment { let ParserAnyMacro { site_span, @@ -129,6 +142,68 @@ impl TTMacroExpander for MacroRulesMacroExpander { } } +impl TcxMacroExpander for MacroRulesMacroExpander { + fn expand( + &self, + sess: &Session, + sp: Span, + input: TokenStream, + expand_id: LocalExpnId, + ) -> Result<(TokenStream, usize), (Span, ErrorGuaranteed)> { + // Track nothing for the best performance. + let try_success_result = + try_match_macro(&sess.psess, self.name, &input, &self.lhses, &mut NoopTracker); + + match try_success_result { + Ok((i, named_matches)) => { + let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &self.rhses[i] { + mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span), + _ => sess.dcx().span_bug(sp, "malformed macro rhs"), + }; + + // rhs has holes ( `$id` and `$(...)` that need filled) + match transcribe( + &sess.psess, + &named_matches, + rhs, + rhs_span, + self.transparency, + expand_id, + ) { + Ok(tts) => Ok((tts, i)), + Err(err) => { + let arm_span = self.rhses[i].span(); + Err((arm_span, err.emit())) + } + } + } + Err(CanRetry::No(guar)) => Err((sp, guar)), + Err(CanRetry::Yes) => { + // Retry and emit a better error. + Err(crate::mbe::diagnostics::failed_to_match_macro( + &sess.psess, + sp, + self.span, + self.name, + input, + &self.lhses, + )) + } + } + } + + fn name(&self) -> Ident { + self.name + } + + fn arm_span(&self, rhs: usize) -> Span { + self.rhses[rhs].span() + } + + fn node_id(&self) -> NodeId { + self.node_id + } +} struct DummyExpander(ErrorGuaranteed); impl TTMacroExpander for DummyExpander { @@ -142,7 +217,11 @@ impl TTMacroExpander for DummyExpander { } } -fn trace_macros_note(cx_expansions: &mut FxIndexMap>, sp: Span, message: String) { +pub(crate) fn trace_macros_note( + cx_expansions: &mut FxIndexMap>, + sp: Span, + message: String, +) { let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site); cx_expansions.entry(sp).or_default().push(message); } @@ -268,7 +347,10 @@ fn expand_macro<'cx>( } Err(CanRetry::Yes) => { // Retry and emit a better error. - diagnostics::failed_to_match_macro(cx, sp, def_span, name, arg, lhses) + let (span, guar) = + diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, lhses); + cx.trace_macros_diag(); + DummyResult::any(span, guar) } } } @@ -385,6 +467,19 @@ pub fn compile_declarative_macro( ) }; let dummy_syn_ext = |guar| (mk_syn_ext(Box::new(DummyExpander(guar))), Vec::new()); + let mk_tcx_syn_ext = |expander| { + SyntaxExtension::new( + sess, + features, + SyntaxExtensionKind::TcxLegacyBang(expander), + def.span, + Vec::new(), + edition, + def.ident.name, + &def.attrs, + def.id != DUMMY_NODE_ID, + ) + }; let dcx = sess.dcx(); let lhs_nm = Ident::new(sym::lhs, def.span); @@ -598,7 +693,7 @@ pub fn compile_declarative_macro( }) .collect(); - let expander = Box::new(MacroRulesMacroExpander { + let expander = Lrc::new(MacroRulesMacroExpander { name: def.ident, span: def.span, node_id: def.id, @@ -606,7 +701,7 @@ pub fn compile_declarative_macro( lhses, rhses, }); - (mk_syn_ext(expander), rule_spans) + (mk_tcx_syn_ext(expander), rule_spans) } fn check_lhs_nt_follows( diff --git a/compiler/rustc_interface/src/passes.rs b/compiler/rustc_interface/src/passes.rs index 8c99b1f444766..d39139bcae3fb 100644 --- a/compiler/rustc_interface/src/passes.rs +++ b/compiler/rustc_interface/src/passes.rs @@ -632,6 +632,7 @@ pub static DEFAULT_QUERY_PROVIDERS: LazyLock = LazyLock::new(|| { rustc_monomorphize::provide(providers); rustc_privacy::provide(providers); rustc_resolve::provide(providers); + rustc_expand::provide(providers); rustc_hir_analysis::provide(providers); rustc_hir_typeck::provide(providers); ty::provide(providers); diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index e3d7dff3c66bb..d59ce162c9b6e 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -119,6 +119,7 @@ macro_rules! arena_types { [decode] specialization_graph: rustc_middle::traits::specialization_graph::Graph, [] crate_inherent_impls: rustc_middle::ty::CrateInherentImpls, [] hir_owner_nodes: rustc_hir::OwnerNodes<'tcx>, + [decode] expand_legacy_bang: rustc_ast::tokenstream::TokenStream, ]); ) } diff --git a/compiler/rustc_middle/src/expand.rs b/compiler/rustc_middle/src/expand.rs new file mode 100644 index 0000000000000..b28b18fa86793 --- /dev/null +++ b/compiler/rustc_middle/src/expand.rs @@ -0,0 +1,21 @@ +use rustc_ast::tokenstream::TokenStream; +use rustc_ast::NodeId; +use rustc_session::Session; +use rustc_span::symbol::Ident; +use rustc_span::{ErrorGuaranteed, LocalExpnId, Span}; + +pub trait TcxMacroExpander { + fn expand( + &self, + _sess: &Session, + _span: Span, + _input: TokenStream, + _expand_id: LocalExpnId, + ) -> Result<(TokenStream, usize), (Span, ErrorGuaranteed)>; + + fn name(&self) -> Ident; + + fn arm_span(&self, rhs: usize) -> Span; + + fn node_id(&self) -> NodeId; +} diff --git a/compiler/rustc_middle/src/lib.rs b/compiler/rustc_middle/src/lib.rs index b499604df87e8..5dd008a7bb6b1 100644 --- a/compiler/rustc_middle/src/lib.rs +++ b/compiler/rustc_middle/src/lib.rs @@ -73,6 +73,7 @@ mod macros; #[macro_use] pub mod arena; pub mod error; +pub mod expand; pub mod hir; pub mod hooks; pub mod infer; diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index bd20e6aa00537..a628e1f03f309 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -1,6 +1,9 @@ use std::intrinsics::transmute_unchecked; use std::mem::MaybeUninit; +use rustc_ast::tokenstream::TokenStream; +use rustc_span::{ErrorGuaranteed, Span}; + use crate::query::CyclePlaceholder; use crate::ty::adjustment::CoerceUnsizedInfo; use crate::ty::{self, Ty}; @@ -172,6 +175,10 @@ impl EraseType for Result>, CyclePlaceholder> { type Result = [u8; size_of::>, CyclePlaceholder>>()]; } +impl EraseType for Result<(&'_ TokenStream, usize), (Span, ErrorGuaranteed)> { + type Result = [u8; size_of::>()]; +} + impl EraseType for Option<&'_ T> { type Result = [u8; size_of::>()]; } diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index 6562d46d7b866..fd4c27e9ca5bd 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -1,10 +1,11 @@ //! Defines the set of legal keys that can be used in queries. +use rustc_data_structures::fingerprint::Fingerprint; use rustc_hir::def_id::{CrateNum, DefId, LocalDefId, LocalModDefId, ModDefId, LOCAL_CRATE}; use rustc_hir::hir_id::{HirId, OwnerId}; use rustc_query_system::query::{DefIdCache, DefaultCache, SingleCache, VecCache}; use rustc_span::symbol::{Ident, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi; use crate::infer::canonical::Canonical; @@ -591,3 +592,15 @@ impl<'tcx> Key for (ValidityRequirement, ty::ParamEnvAnd<'tcx, Ty<'tcx>>) { } } } + +impl Key for (LocalExpnId, LocalExpnId, Fingerprint) { + type Cache = DefaultCache; + + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } + + fn ty_def_id(&self) -> Option { + None + } +} diff --git a/compiler/rustc_middle/src/query/mod.rs b/compiler/rustc_middle/src/query/mod.rs index c22c2e985abba..3f050c7ab00ea 100644 --- a/compiler/rustc_middle/src/query/mod.rs +++ b/compiler/rustc_middle/src/query/mod.rs @@ -14,6 +14,7 @@ use std::sync::Arc; use rustc_arena::TypedArena; use rustc_ast::expand::allocator::AllocatorKind; use rustc_ast::expand::StrippedCfgItem; +use rustc_ast::tokenstream::TokenStream; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; use rustc_data_structures::steal::Steal; @@ -39,7 +40,7 @@ use rustc_session::lint::LintExpectationId; use rustc_session::Limits; use rustc_span::def_id::LOCAL_CRATE; use rustc_span::symbol::Symbol; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi; use rustc_target::spec::PanicStrategy; use {rustc_ast as ast, rustc_attr as attr, rustc_hir as hir}; @@ -109,6 +110,13 @@ rustc_queries! { desc { "triggering a delayed bug for testing incremental" } } + query expand_legacy_bang(key: (LocalExpnId, LocalExpnId, Fingerprint)) -> Result<(&'tcx TokenStream, usize), (Span, ErrorGuaranteed)> { + no_hash + // eval_always + cache_on_disk_if { true } + desc { "expand legacy bang" } + } + /// Collects the list of all tools registered using `#![register_tool]`. query registered_tools(_: ()) -> &'tcx ty::RegisteredTools { arena_cache diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index ca52358218e92..da6bc33feaac0 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -790,6 +790,13 @@ impl<'a, 'tcx> Decodable> } } +impl<'a, 'tcx> Decodable> for &'tcx rustc_ast::tokenstream::TokenStream { + #[inline] + fn decode(d: &mut CacheDecoder<'a, 'tcx>) -> Self { + RefDecodable::decode(d) + } +} + macro_rules! impl_ref_decoder { (<$tcx:tt> $($ty:ty,)*) => { $(impl<'a, $tcx> Decodable> for &$tcx [$ty] { diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs index 401f6da6526aa..2ff2fcc47345d 100644 --- a/compiler/rustc_middle/src/ty/codec.rs +++ b/compiler/rustc_middle/src/ty/codec.rs @@ -335,6 +335,16 @@ impl<'tcx, D: TyDecoder>> RefDecodable<'tcx, D> for ty::List>> RefDecodable<'tcx, D> +// for rustc_ast::tokenstream::TokenStream +// { +// fn decode(_decoder: &mut D) -> &'tcx Self { +// // use rustc_ast::HasTokens; +// // _decoder.interner().tokens_mut().unwrap().as_ref(); +// todo!("felix foobar next") +// } +// } + impl<'tcx, D: TyDecoder>> RefDecodable<'tcx, D> for ty::List> { diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 8f8fd09c9e4d9..c72607ae2acb4 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -12,6 +12,7 @@ use std::marker::PhantomData; use std::ops::{Bound, Deref}; use std::{fmt, iter, mem}; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::{self as ast, attr}; use rustc_data_structures::defer; use rustc_data_structures::fingerprint::Fingerprint; @@ -37,6 +38,7 @@ use rustc_hir::lang_items::LangItem; use rustc_hir::{HirId, Node, TraitCandidate}; use rustc_index::IndexVec; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; +use rustc_middle::expand::TcxMacroExpander; use rustc_query_system::cache::WithDepNode; use rustc_query_system::dep_graph::DepNodeIndex; use rustc_query_system::ich::StableHashingContext; @@ -47,7 +49,7 @@ use rustc_session::lint::Lint; use rustc_session::{Limit, MetadataKind, Session}; use rustc_span::def_id::{DefPathHash, StableCrateId, CRATE_DEF_ID}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{LocalExpnId, Span, DUMMY_SP}; use rustc_target::abi::{FieldIdx, Layout, LayoutS, TargetDataLayout, VariantIdx}; use rustc_target::spec::abi; use rustc_type_ir::fold::TypeFoldable; @@ -1303,8 +1305,14 @@ pub struct GlobalCtxt<'tcx> { /// Stores memory for globals (statics/consts). pub(crate) alloc_map: Lock>, - current_gcx: CurrentGcx, + + pub macro_map: RwLock< + FxHashMap< + LocalExpnId, + (TokenStream, Span, Lrc), + >, + >, } impl<'tcx> GlobalCtxt<'tcx> { @@ -1534,6 +1542,7 @@ impl<'tcx> TyCtxt<'tcx> { canonical_param_env_cache: Default::default(), data_layout, alloc_map: Lock::new(interpret::AllocMap::new()), + macro_map: RwLock::new(Default::default()), current_gcx, } } diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index 64ae0d82952d5..add091afcc85c 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -5,10 +5,13 @@ use std::cell::Cell; use std::mem; use rustc_ast::expand::StrippedCfgItem; +use rustc_ast::tokenstream::TokenStream; use rustc_ast::{self as ast, attr, Crate, Inline, ItemKind, ModKind, NodeId}; use rustc_ast_pretty::pprust; use rustc_attr::StabilityLevel; +use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::intern::Interned; +use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; use rustc_data_structures::sync::Lrc; use rustc_errors::{Applicability, StashKey}; use rustc_expand::base::{ @@ -33,7 +36,7 @@ use rustc_span::edit_distance::edit_distance; use rustc_span::edition::Edition; use rustc_span::hygiene::{self, AstPass, ExpnData, ExpnKind, LocalExpnId, MacroKind}; use rustc_span::symbol::{kw, sym, Ident, Symbol}; -use rustc_span::{Span, DUMMY_SP}; +use rustc_span::{ErrorGuaranteed, Span, DUMMY_SP}; use crate::errors::{ self, AddAsNonDerive, CannotDetermineMacroResolution, CannotFindIdentInThisScope, @@ -333,6 +336,14 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { ), self.create_stable_hashing_context(), ); + if let SyntaxExtensionKind::TcxLegacyBang(tcx_expander) = &ext.kind { + if let InvocationKind::Bang { ref mac, span } = invoc.kind { + self.tcx + .macro_map + .borrow_mut() + .insert(invoc_id, (mac.args.tokens.clone(), span, tcx_expander.clone())); + } + } Ok(ext) } @@ -524,6 +535,34 @@ impl<'a, 'tcx> ResolverExpand for Resolver<'a, 'tcx> { }); Ok(idents) } + + fn expand_legacy_bang( + &self, + invoc_id: LocalExpnId, + current_expansion: LocalExpnId, + ) -> Result<(TokenStream, usize), (Span, ErrorGuaranteed)> { + use tracing::debug; + + debug!( + invoc_id_hash = ?invoc_id.to_expn_id().expn_hash(), + current_expansion_hash = ?current_expansion.to_expn_id().expn_hash() + ); + + let map = self.tcx().macro_map.borrow(); + let (arg, _span, _expander) = map.get(&invoc_id).as_ref().unwrap(); + + debug!(?arg); + + let arg_hash: Fingerprint = self.tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + arg.flattened().hash_stable(&mut hcx, &mut hasher); + hasher.finish() + }); + + self.tcx() + .expand_legacy_bang((invoc_id, current_expansion, arg_hash)) + .map(|(tts, i)| (tts.clone(), i)) + } } impl<'a, 'tcx> Resolver<'a, 'tcx> { diff --git a/compiler/rustc_span/src/hygiene.rs b/compiler/rustc_span/src/hygiene.rs index 434df35a5156f..451ec812760ea 100644 --- a/compiler/rustc_span/src/hygiene.rs +++ b/compiler/rustc_span/src/hygiene.rs @@ -1564,3 +1564,9 @@ impl HashStable for ExpnId { hash.hash_stable(ctx, hasher); } } + +impl HashStable for LocalExpnId { + fn hash_stable(&self, ctx: &mut CTX, hasher: &mut StableHasher) { + self.to_expn_id().hash_stable(ctx, hasher) + } +}