@@ -3,12 +3,11 @@ use std::collections::hash_map::Entry;
33use std:: { mem, slice} ;
44
55use ast:: token:: IdentIsRaw ;
6- use rustc_ast as ast;
76use rustc_ast:: token:: NtPatKind :: * ;
87use rustc_ast:: token:: TokenKind :: * ;
98use rustc_ast:: token:: { self , Delimiter , NonterminalKind , Token , TokenKind } ;
109use rustc_ast:: tokenstream:: { DelimSpan , TokenStream } ;
11- use rustc_ast:: { DUMMY_NODE_ID , NodeId } ;
10+ use rustc_ast:: { self as ast , DUMMY_NODE_ID , NodeId } ;
1211use rustc_ast_pretty:: pprust;
1312use rustc_attr:: { self as attr, TransparencyError } ;
1413use rustc_data_structures:: fx:: { FxHashMap , FxIndexMap } ;
@@ -370,34 +369,32 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
370369pub fn compile_declarative_macro (
371370 sess : & Session ,
372371 features : & Features ,
373- def : & ast:: Item ,
372+ macro_def : & ast:: MacroDef ,
373+ ident : Ident ,
374+ attrs : & [ ast:: Attribute ] ,
375+ span : Span ,
376+ node_id : NodeId ,
374377 edition : Edition ,
375378) -> ( SyntaxExtension , Vec < ( usize , Span ) > ) {
376- debug ! ( "compile_declarative_macro: {:?}" , def) ;
377379 let mk_syn_ext = |expander| {
378380 SyntaxExtension :: new (
379381 sess,
380382 features,
381383 SyntaxExtensionKind :: LegacyBang ( expander) ,
382- def . span ,
384+ span,
383385 Vec :: new ( ) ,
384386 edition,
385- def . ident . name ,
386- & def . attrs ,
387- def . id != DUMMY_NODE_ID ,
387+ ident. name ,
388+ attrs,
389+ node_id != DUMMY_NODE_ID ,
388390 )
389391 } ;
390392 let dummy_syn_ext = |guar| ( mk_syn_ext ( Box :: new ( DummyExpander ( guar) ) ) , Vec :: new ( ) ) ;
391393
392394 let dcx = sess. dcx ( ) ;
393- let lhs_nm = Ident :: new ( sym:: lhs, def . span ) ;
394- let rhs_nm = Ident :: new ( sym:: rhs, def . span ) ;
395+ let lhs_nm = Ident :: new ( sym:: lhs, span) ;
396+ let rhs_nm = Ident :: new ( sym:: rhs, span) ;
395397 let tt_spec = Some ( NonterminalKind :: TT ) ;
396-
397- let macro_def = match & def. kind {
398- ast:: ItemKind :: MacroDef ( def) => def,
399- _ => unreachable ! ( ) ,
400- } ;
401398 let macro_rules = macro_def. macro_rules ;
402399
403400 // Parse the macro_rules! invocation
@@ -410,25 +407,22 @@ pub fn compile_declarative_macro(
410407 let argument_gram = vec ! [
411408 mbe:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , mbe:: SequenceRepetition {
412409 tts: vec![
413- mbe:: TokenTree :: MetaVarDecl ( def . span, lhs_nm, tt_spec) ,
414- mbe:: TokenTree :: token( token:: FatArrow , def . span) ,
415- mbe:: TokenTree :: MetaVarDecl ( def . span, rhs_nm, tt_spec) ,
410+ mbe:: TokenTree :: MetaVarDecl ( span, lhs_nm, tt_spec) ,
411+ mbe:: TokenTree :: token( token:: FatArrow , span) ,
412+ mbe:: TokenTree :: MetaVarDecl ( span, rhs_nm, tt_spec) ,
416413 ] ,
417- separator: Some ( Token :: new(
418- if macro_rules { token:: Semi } else { token:: Comma } ,
419- def. span,
420- ) ) ,
421- kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: OneOrMore , def. span) ,
414+ separator: Some ( Token :: new( if macro_rules { token:: Semi } else { token:: Comma } , span) ) ,
415+ kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: OneOrMore , span) ,
422416 num_captures: 2 ,
423417 } ) ,
424418 // to phase into semicolon-termination instead of semicolon-separation
425419 mbe:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , mbe:: SequenceRepetition {
426420 tts: vec![ mbe:: TokenTree :: token(
427421 if macro_rules { token:: Semi } else { token:: Comma } ,
428- def . span,
422+ span,
429423 ) ] ,
430424 separator: None ,
431- kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: ZeroOrMore , def . span) ,
425+ kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: ZeroOrMore , span) ,
432426 num_captures: 0 ,
433427 } ) ,
434428 ] ;
@@ -460,15 +454,15 @@ pub fn compile_declarative_macro(
460454 } ;
461455
462456 let s = parse_failure_msg ( & token, track. get_expected_token ( ) ) ;
463- let sp = token. span . substitute_dummy ( def . span ) ;
457+ let sp = token. span . substitute_dummy ( span) ;
464458 let mut err = sess. dcx ( ) . struct_span_err ( sp, s) ;
465459 err. span_label ( sp, msg) ;
466460 annotate_doc_comment ( & mut err, sess. source_map ( ) , sp) ;
467461 let guar = err. emit ( ) ;
468462 return dummy_syn_ext ( guar) ;
469463 }
470464 Error ( sp, msg) => {
471- let guar = sess. dcx ( ) . span_err ( sp. substitute_dummy ( def . span ) , msg) ;
465+ let guar = sess. dcx ( ) . span_err ( sp. substitute_dummy ( span) , msg) ;
472466 return dummy_syn_ext ( guar) ;
473467 }
474468 ErrorReported ( guar) => {
@@ -489,21 +483,21 @@ pub fn compile_declarative_macro(
489483 & TokenStream :: new ( vec ! [ tt. clone( ) ] ) ,
490484 true ,
491485 sess,
492- def . id ,
486+ node_id ,
493487 features,
494488 edition,
495489 )
496490 . pop ( )
497491 . unwrap ( ) ;
498492 // We don't handle errors here, the driver will abort
499493 // after parsing/expansion. We can report every error in every macro this way.
500- check_emission ( check_lhs_nt_follows ( sess, def , & tt) ) ;
494+ check_emission ( check_lhs_nt_follows ( sess, node_id , & tt) ) ;
501495 return tt;
502496 }
503- sess. dcx ( ) . span_bug ( def . span , "wrong-structured lhs" )
497+ sess. dcx ( ) . span_bug ( span, "wrong-structured lhs" )
504498 } )
505499 . collect :: < Vec < mbe:: TokenTree > > ( ) ,
506- _ => sess. dcx ( ) . span_bug ( def . span , "wrong-structured lhs" ) ,
500+ _ => sess. dcx ( ) . span_bug ( span, "wrong-structured lhs" ) ,
507501 } ;
508502
509503 let rhses = match & argument_map[ & MacroRulesNormalizedIdent :: new ( rhs_nm) ] {
@@ -515,17 +509,17 @@ pub fn compile_declarative_macro(
515509 & TokenStream :: new ( vec ! [ tt. clone( ) ] ) ,
516510 false ,
517511 sess,
518- def . id ,
512+ node_id ,
519513 features,
520514 edition,
521515 )
522516 . pop ( )
523517 . unwrap ( ) ;
524518 }
525- sess. dcx ( ) . span_bug ( def . span , "wrong-structured rhs" )
519+ sess. dcx ( ) . span_bug ( span, "wrong-structured rhs" )
526520 } )
527521 . collect :: < Vec < mbe:: TokenTree > > ( ) ,
528- _ => sess. dcx ( ) . span_bug ( def . span , "wrong-structured rhs" ) ,
522+ _ => sess. dcx ( ) . span_bug ( span, "wrong-structured rhs" ) ,
529523 } ;
530524
531525 for rhs in & rhses {
@@ -537,15 +531,9 @@ pub fn compile_declarative_macro(
537531 check_emission ( check_lhs_no_empty_seq ( sess, slice:: from_ref ( lhs) ) ) ;
538532 }
539533
540- check_emission ( macro_check:: check_meta_variables (
541- & sess. psess ,
542- def. id ,
543- def. span ,
544- & lhses,
545- & rhses,
546- ) ) ;
534+ check_emission ( macro_check:: check_meta_variables ( & sess. psess , node_id, span, & lhses, & rhses) ) ;
547535
548- let ( transparency, transparency_error) = attr:: find_transparency ( & def . attrs , macro_rules) ;
536+ let ( transparency, transparency_error) = attr:: find_transparency ( attrs, macro_rules) ;
549537 match transparency_error {
550538 Some ( TransparencyError :: UnknownTransparency ( value, span) ) => {
551539 dcx. span_err ( span, format ! ( "unknown macro transparency: `{value}`" ) ) ;
@@ -564,7 +552,7 @@ pub fn compile_declarative_macro(
564552
565553 // Compute the spans of the macro rules for unused rule linting.
566554 // Also, we are only interested in non-foreign macros.
567- let rule_spans = if def . id != DUMMY_NODE_ID {
555+ let rule_spans = if node_id != DUMMY_NODE_ID {
568556 lhses
569557 . iter ( )
570558 . zip ( rhses. iter ( ) )
@@ -590,15 +578,15 @@ pub fn compile_declarative_macro(
590578 mbe:: TokenTree :: Delimited ( .., delimited) => {
591579 mbe:: macro_parser:: compute_locs ( & delimited. tts )
592580 }
593- _ => sess. dcx ( ) . span_bug ( def . span , "malformed macro lhs" ) ,
581+ _ => sess. dcx ( ) . span_bug ( span, "malformed macro lhs" ) ,
594582 }
595583 } )
596584 . collect ( ) ;
597585
598586 let expander = Box :: new ( MacroRulesMacroExpander {
599- name : def . ident ,
600- span : def . span ,
601- node_id : def . id ,
587+ name : ident,
588+ span,
589+ node_id,
602590 transparency,
603591 lhses,
604592 rhses,
@@ -608,13 +596,13 @@ pub fn compile_declarative_macro(
608596
609597fn check_lhs_nt_follows (
610598 sess : & Session ,
611- def : & ast :: Item ,
599+ node_id : NodeId ,
612600 lhs : & mbe:: TokenTree ,
613601) -> Result < ( ) , ErrorGuaranteed > {
614602 // lhs is going to be like TokenTree::Delimited(...), where the
615603 // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
616604 if let mbe:: TokenTree :: Delimited ( .., delimited) = lhs {
617- check_matcher ( sess, def , & delimited. tts )
605+ check_matcher ( sess, node_id , & delimited. tts )
618606 } else {
619607 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters" ;
620608 Err ( sess. dcx ( ) . span_err ( lhs. span ( ) , msg) )
@@ -686,12 +674,12 @@ fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed
686674
687675fn check_matcher (
688676 sess : & Session ,
689- def : & ast :: Item ,
677+ node_id : NodeId ,
690678 matcher : & [ mbe:: TokenTree ] ,
691679) -> Result < ( ) , ErrorGuaranteed > {
692680 let first_sets = FirstSets :: new ( matcher) ;
693681 let empty_suffix = TokenSet :: empty ( ) ;
694- check_matcher_core ( sess, def , & first_sets, matcher, & empty_suffix) ?;
682+ check_matcher_core ( sess, node_id , & first_sets, matcher, & empty_suffix) ?;
695683 Ok ( ( ) )
696684}
697685
@@ -1028,7 +1016,7 @@ impl<'tt> TokenSet<'tt> {
10281016// see `FirstSets::new`.
10291017fn check_matcher_core < ' tt > (
10301018 sess : & Session ,
1031- def : & ast :: Item ,
1019+ node_id : NodeId ,
10321020 first_sets : & FirstSets < ' tt > ,
10331021 matcher : & ' tt [ mbe:: TokenTree ] ,
10341022 follow : & TokenSet < ' tt > ,
@@ -1082,7 +1070,7 @@ fn check_matcher_core<'tt>(
10821070 token:: CloseDelim ( d. delim ) ,
10831071 span. close ,
10841072 ) ) ;
1085- check_matcher_core ( sess, def , first_sets, & d. tts , & my_suffix) ?;
1073+ check_matcher_core ( sess, node_id , first_sets, & d. tts , & my_suffix) ?;
10861074 // don't track non NT tokens
10871075 last. replace_with_irrelevant ( ) ;
10881076
@@ -1114,7 +1102,7 @@ fn check_matcher_core<'tt>(
11141102 // At this point, `suffix_first` is built, and
11151103 // `my_suffix` is some TokenSet that we can use
11161104 // for checking the interior of `seq_rep`.
1117- let next = check_matcher_core ( sess, def , first_sets, & seq_rep. tts , my_suffix) ?;
1105+ let next = check_matcher_core ( sess, node_id , first_sets, & seq_rep. tts , my_suffix) ?;
11181106 if next. maybe_empty {
11191107 last. add_all ( & next) ;
11201108 } else {
@@ -1144,7 +1132,7 @@ fn check_matcher_core<'tt>(
11441132 // macro. (See #86567.)
11451133 // Macros defined in the current crate have a real node id,
11461134 // whereas macros from an external crate have a dummy id.
1147- if def . id != DUMMY_NODE_ID
1135+ if node_id != DUMMY_NODE_ID
11481136 && matches ! ( kind, NonterminalKind :: Pat ( PatParam { inferred: true } ) )
11491137 && matches ! (
11501138 next_token,
0 commit comments