From 6b71399d3850216f9fab05bc99d63854df731e6a Mon Sep 17 00:00:00 2001 From: Josh Triplett Date: Thu, 3 Jul 2025 21:36:43 -0700 Subject: [PATCH 1/4] mbe: Introduce an enum for which part of a rule we're parsing Rather than a `bool` that's `true` for the LHS and `false` for the RHS, use a self-documenting enum. --- compiler/rustc_expand/src/mbe/macro_rules.rs | 5 +- compiler/rustc_expand/src/mbe/quoted.rs | 48 ++++++++++++++------ 2 files changed, 36 insertions(+), 17 deletions(-) diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 2ffd4e3cf285e..16b9fef6a21c8 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -36,6 +36,7 @@ use crate::base::{ }; use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment}; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser}; +use crate::mbe::quoted::RulePart; use crate::mbe::transcribe::transcribe; use crate::mbe::{self, KleeneOp, macro_check}; @@ -396,7 +397,7 @@ pub fn compile_declarative_macro( let lhs_tt = p.parse_token_tree(); let lhs_tt = mbe::quoted::parse( &TokenStream::new(vec![lhs_tt]), - true, // LHS + RulePart::Pattern, sess, node_id, features, @@ -423,7 +424,7 @@ pub fn compile_declarative_macro( let rhs_tt = p.parse_token_tree(); let rhs_tt = mbe::quoted::parse( &TokenStream::new(vec![rhs_tt]), - false, // RHS + RulePart::Body, sess, node_id, features, diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 2daa4e7155843..065ae23a262d0 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -16,6 +16,27 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, `path`, \ `meta`, `tt`, `item` and `vis`, along with `expr_2021` and `pat_param` for edition compatibility"; +/// Which part of a macro rule we're parsing +#[derive(Copy, Clone)] +pub(crate) enum RulePart { + /// The left-hand side, with patterns and metavar definitions with types + Pattern, + /// The right-hand side body, with metavar references and metavar expressions + Body, +} + +impl RulePart { + #[inline(always)] + fn is_pattern(&self) -> bool { + matches!(self, Self::Pattern) + } + + #[inline(always)] + fn is_body(&self) -> bool { + matches!(self, Self::Body) + } +} + /// Takes a `tokenstream::TokenStream` and returns a `Vec`. Specifically, this /// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a /// collection of `TokenTree` for use in parsing a macro. @@ -23,8 +44,8 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are /// # Parameters /// /// - `input`: a token stream to read from, the contents of which we are parsing. -/// - `parsing_patterns`: `parse` can be used to parse either the "patterns" or the "body" of a -/// macro. Both take roughly the same form _except_ that: +/// - `part`: whether we're parsing the patterns or the body of a macro. Both take roughly the same +/// form _except_ that: /// - In a pattern, metavars are declared with their "matcher" type. For example `$var:expr` or /// `$id:ident`. In this example, `expr` and `ident` are "matchers". They are not present in the /// body of a macro rule -- just in the pattern. @@ -38,7 +59,7 @@ pub(crate) const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. pub(super) fn parse( input: &tokenstream::TokenStream, - parsing_patterns: bool, + part: RulePart, sess: &Session, node_id: NodeId, features: &Features, @@ -53,9 +74,9 @@ pub(super) fn parse( while let Some(tree) = iter.next() { // Given the parsed tree, if there is a metavar and we are expecting matchers, actually // parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`). - let tree = parse_tree(tree, &mut iter, parsing_patterns, sess, node_id, features, edition); + let tree = parse_tree(tree, &mut iter, part, sess, node_id, features, edition); - if !parsing_patterns { + if part.is_body() { // No matchers allowed, nothing to process here result.push(tree); continue; @@ -157,13 +178,13 @@ fn maybe_emit_macro_metavar_expr_concat_feature(features: &Features, sess: &Sess /// - `tree`: the tree we wish to convert. /// - `outer_iter`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` -/// - `parsing_patterns`: same as [parse]. +/// - `part`: same as [parse]. /// - `sess`: the parsing session. Any errors will be emitted to this session. /// - `features`: language features so we can do feature gating. fn parse_tree<'a>( tree: &'a tokenstream::TokenTree, outer_iter: &mut TokenStreamIter<'a>, - parsing_patterns: bool, + part: RulePart, sess: &Session, node_id: NodeId, features: &Features, @@ -189,7 +210,7 @@ fn parse_tree<'a>( match next { // `tree` is followed by a delimited set of token trees. Some(&tokenstream::TokenTree::Delimited(delim_span, _, delim, ref tts)) => { - if parsing_patterns { + if part.is_pattern() { if delim != Delimiter::Parenthesis { span_dollar_dollar_or_metavar_in_the_lhs_err( sess, @@ -244,13 +265,13 @@ fn parse_tree<'a>( // If we didn't find a metavar expression above, then we must have a // repetition sequence in the macro (e.g. `$(pat)*`). Parse the // contents of the sequence itself - let sequence = parse(tts, parsing_patterns, sess, node_id, features, edition); + let sequence = parse(tts, part, sess, node_id, features, edition); // Get the Kleene operator and optional separator let (separator, kleene) = parse_sep_and_kleene_op(&mut iter, delim_span.entire(), sess); // Count the number of captured "names" (i.e., named metavars) let num_captures = - if parsing_patterns { count_metavar_decls(&sequence) } else { 0 }; + if part.is_pattern() { count_metavar_decls(&sequence) } else { 0 }; TokenTree::Sequence( delim_span, SequenceRepetition { tts: sequence, separator, kleene, num_captures }, @@ -274,7 +295,7 @@ fn parse_tree<'a>( Token { kind: token::Dollar, span: dollar_span2 }, _, )) => { - if parsing_patterns { + if part.is_pattern() { span_dollar_dollar_or_metavar_in_the_lhs_err( sess, &Token { kind: token::Dollar, span: dollar_span2 }, @@ -306,10 +327,7 @@ fn parse_tree<'a>( &tokenstream::TokenTree::Delimited(span, spacing, delim, ref tts) => TokenTree::Delimited( span, spacing, - Delimited { - delim, - tts: parse(tts, parsing_patterns, sess, node_id, features, edition), - }, + Delimited { delim, tts: parse(tts, part, sess, node_id, features, edition) }, ), } } From a7382eae3fccb2db5353640454c5d09840d3089e Mon Sep 17 00:00:00 2001 From: Josh Triplett Date: Thu, 3 Jul 2025 21:55:12 -0700 Subject: [PATCH 2/4] mbe: Add a helper to parse a single `TokenTree` The parser repeatedly invokes the `parse` function, constructing a one-entry vector, and assuming that the return value will be a one-entry vector. Add a helper for that case. This will simplify adding additional callers, and put all the logic in one place to allow potential future simplification of the one-TT case. --- compiler/rustc_expand/src/mbe/macro_rules.rs | 24 +++----------------- compiler/rustc_expand/src/mbe/quoted.rs | 18 ++++++++++++++- 2 files changed, 20 insertions(+), 22 deletions(-) diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 16b9fef6a21c8..44201d48147b7 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -36,7 +36,7 @@ use crate::base::{ }; use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment}; use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser}; -use crate::mbe::quoted::RulePart; +use crate::mbe::quoted::{RulePart, parse_one_tt}; use crate::mbe::transcribe::transcribe; use crate::mbe::{self, KleeneOp, macro_check}; @@ -395,16 +395,7 @@ pub fn compile_declarative_macro( while p.token != token::Eof { let lhs_tt = p.parse_token_tree(); - let lhs_tt = mbe::quoted::parse( - &TokenStream::new(vec![lhs_tt]), - RulePart::Pattern, - sess, - node_id, - features, - edition, - ) - .pop() - .unwrap(); + let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition); // We don't handle errors here, the driver will abort after parsing/expansion. We can // report every error in every macro this way. check_emission(check_lhs_nt_follows(sess, node_id, &lhs_tt)); @@ -422,16 +413,7 @@ pub fn compile_declarative_macro( return dummy_syn_ext(guar); } let rhs_tt = p.parse_token_tree(); - let rhs_tt = mbe::quoted::parse( - &TokenStream::new(vec![rhs_tt]), - RulePart::Body, - sess, - node_id, - features, - edition, - ) - .pop() - .unwrap(); + let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition); check_emission(check_rhs(sess, &rhs_tt)); check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt)); lhses.push(lhs_tt); diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 065ae23a262d0..eb874a27cece5 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -57,7 +57,7 @@ impl RulePart { /// # Returns /// /// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. -pub(super) fn parse( +fn parse( input: &tokenstream::TokenStream, part: RulePart, sess: &Session, @@ -152,6 +152,22 @@ pub(super) fn parse( result } +/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Like `parse`, but for a +/// single token tree. Emits errors to `sess` if needed. +#[inline] +pub(super) fn parse_one_tt( + input: tokenstream::TokenTree, + part: RulePart, + sess: &Session, + node_id: NodeId, + features: &Features, + edition: Edition, +) -> TokenTree { + parse(&tokenstream::TokenStream::new(vec![input]), part, sess, node_id, features, edition) + .pop() + .unwrap() +} + /// Asks for the `macro_metavar_expr` feature if it is not enabled fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &Session, span: Span) { if !features.macro_metavar_expr() { From 0d5ab3e46ce3be1e5e3c45ac0ff7d355303c4353 Mon Sep 17 00:00:00 2001 From: Josh Triplett Date: Thu, 3 Jul 2025 22:57:06 -0700 Subject: [PATCH 3/4] mbe: Simplify a match to a let-else --- compiler/rustc_expand/src/mbe/macro_rules.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 44201d48147b7..731a4a08ab00f 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -213,9 +213,8 @@ fn expand_macro<'cx>( match try_success_result { Ok((i, named_matches)) => { - let (rhs, rhs_span): (&mbe::Delimited, DelimSpan) = match &rhses[i] { - mbe::TokenTree::Delimited(span, _, delimited) => (&delimited, *span), - _ => cx.dcx().span_bug(sp, "malformed macro rhs"), + let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rhses[i] else { + cx.dcx().span_bug(sp, "malformed macro rhs"); }; let arm_span = rhses[i].span(); From 63cfb3af37d74d312829d4e260e03128eb7e3f27 Mon Sep 17 00:00:00 2001 From: Josh Triplett Date: Fri, 4 Jul 2025 01:22:37 -0700 Subject: [PATCH 4/4] mbe: Defer checks for `compile_error!` until reporting an unused macro rule The MBE parser checks rules at initial parse time to see if their RHS has `compile_error!` in it, and returns a list of rule indexes and LHS spans that don't map to `compile_error!`, for use in unused macro rule checking. Instead, have the unused macro rule reporting ask the macro for the rule to report, and let the macro check at that time. That avoids checking rules unless they're unused. In the process, refactor the data structure used to store macro rules, to group the LHS and RHS (and LHS span) of each rule together, and refactor the unused rule tracking to only track rule indexes. This ends up being a net simplification, and reduction in code size. --- compiler/rustc_expand/src/base.rs | 4 + compiler/rustc_expand/src/mbe/diagnostics.rs | 10 +- compiler/rustc_expand/src/mbe/macro_rules.rs | 114 +++++++----------- .../rustc_resolve/src/build_reduced_graph.rs | 8 +- compiler/rustc_resolve/src/lib.rs | 6 +- compiler/rustc_resolve/src/macros.rs | 30 +++-- 6 files changed, 80 insertions(+), 92 deletions(-) diff --git a/compiler/rustc_expand/src/base.rs b/compiler/rustc_expand/src/base.rs index 80f6e9d9fc487..d6d898088395e 100644 --- a/compiler/rustc_expand/src/base.rs +++ b/compiler/rustc_expand/src/base.rs @@ -348,6 +348,10 @@ pub trait TTMacroExpander { span: Span, input: TokenStream, ) -> MacroExpanderResult<'cx>; + + fn get_unused_rule(&self, _rule_i: usize) -> Option<(&Ident, Span)> { + None + } } pub type MacroExpanderResult<'cx> = ExpandResult, ()>; diff --git a/compiler/rustc_expand/src/mbe/diagnostics.rs b/compiler/rustc_expand/src/mbe/diagnostics.rs index c607a3a3652c9..7a280d671f413 100644 --- a/compiler/rustc_expand/src/mbe/diagnostics.rs +++ b/compiler/rustc_expand/src/mbe/diagnostics.rs @@ -10,7 +10,7 @@ use rustc_span::source_map::SourceMap; use rustc_span::{ErrorGuaranteed, Ident, Span}; use tracing::debug; -use super::macro_rules::{NoopTracker, parser_from_cx}; +use super::macro_rules::{MacroRule, NoopTracker, parser_from_cx}; use crate::expand::{AstFragmentKind, parse_ast_fragment}; use crate::mbe::macro_parser::ParseResult::*; use crate::mbe::macro_parser::{MatcherLoc, NamedParseResult, TtParser}; @@ -22,14 +22,14 @@ pub(super) fn failed_to_match_macro( def_span: Span, name: Ident, arg: TokenStream, - lhses: &[Vec], + rules: &[MacroRule], ) -> (Span, ErrorGuaranteed) { debug!("failed to match macro"); // An error occurred, try the expansion again, tracking the expansion closely for better // diagnostics. let mut tracker = CollectTrackerAndEmitter::new(psess.dcx(), sp); - let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut tracker); + let try_success_result = try_match_macro(psess, name, &arg, rules, &mut tracker); if try_success_result.is_ok() { // Nonterminal parser recovery might turn failed matches into successful ones, @@ -80,12 +80,12 @@ pub(super) fn failed_to_match_macro( // Check whether there's a missing comma in this macro call, like `println!("{}" a);` if let Some((arg, comma_span)) = arg.add_comma() { - for lhs in lhses { + for rule in rules { let parser = parser_from_cx(psess, arg.clone(), Recovery::Allowed); let mut tt_parser = TtParser::new(name); if let Success(_) = - tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, &mut NoopTracker) + tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, &mut NoopTracker) { if comma_span.is_dummy() { err.note("you might be missing a comma"); diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index 731a4a08ab00f..52cdcc5c747e9 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -98,13 +98,18 @@ impl<'a> ParserAnyMacro<'a> { } } +pub(super) struct MacroRule { + pub(super) lhs: Vec, + lhs_span: Span, + rhs: mbe::TokenTree, +} + struct MacroRulesMacroExpander { node_id: NodeId, name: Ident, span: Span, transparency: Transparency, - lhses: Vec>, - rhses: Vec, + rules: Vec, } impl TTMacroExpander for MacroRulesMacroExpander { @@ -122,10 +127,15 @@ impl TTMacroExpander for MacroRulesMacroExpander { self.name, self.transparency, input, - &self.lhses, - &self.rhses, + &self.rules, )) } + + fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, Span)> { + // If the rhs contains an invocation like `compile_error!`, don't report it as unused. + let rule = &self.rules[rule_i]; + if has_compile_error_macro(&rule.rhs) { None } else { Some((&self.name, rule.lhs_span)) } + } } struct DummyExpander(ErrorGuaranteed); @@ -184,9 +194,8 @@ impl<'matcher> Tracker<'matcher> for NoopTracker { } } -/// Expands the rules based macro defined by `lhses` and `rhses` for a given -/// input `arg`. -#[instrument(skip(cx, transparency, arg, lhses, rhses))] +/// Expands the rules based macro defined by `rules` for a given input `arg`. +#[instrument(skip(cx, transparency, arg, rules))] fn expand_macro<'cx>( cx: &'cx mut ExtCtxt<'_>, sp: Span, @@ -195,8 +204,7 @@ fn expand_macro<'cx>( name: Ident, transparency: Transparency, arg: TokenStream, - lhses: &[Vec], - rhses: &[mbe::TokenTree], + rules: &[MacroRule], ) -> Box { let psess = &cx.sess.psess; // Macros defined in the current crate have a real node id, @@ -209,14 +217,14 @@ fn expand_macro<'cx>( } // Track nothing for the best performance. - let try_success_result = try_match_macro(psess, name, &arg, lhses, &mut NoopTracker); + let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker); match try_success_result { - Ok((i, named_matches)) => { - let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rhses[i] else { + Ok((i, rule, named_matches)) => { + let mbe::TokenTree::Delimited(rhs_span, _, ref rhs) = rule.rhs else { cx.dcx().span_bug(sp, "malformed macro rhs"); }; - let arm_span = rhses[i].span(); + let arm_span = rule.rhs.span(); // rhs has holes ( `$id` and `$(...)` that need filled) let id = cx.current_expansion.id; @@ -262,7 +270,7 @@ fn expand_macro<'cx>( Err(CanRetry::Yes) => { // Retry and emit a better error. let (span, guar) = - diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, lhses); + diagnostics::failed_to_match_macro(cx.psess(), sp, def_span, name, arg, rules); cx.trace_macros_diag(); DummyResult::any(span, guar) } @@ -278,14 +286,14 @@ pub(super) enum CanRetry { /// Try expanding the macro. Returns the index of the successful arm and its named_matches if it was successful, /// and nothing if it failed. On failure, it's the callers job to use `track` accordingly to record all errors /// correctly. -#[instrument(level = "debug", skip(psess, arg, lhses, track), fields(tracking = %T::description()))] +#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))] pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( psess: &ParseSess, name: Ident, arg: &TokenStream, - lhses: &'matcher [Vec], + rules: &'matcher [MacroRule], track: &mut T, -) -> Result<(usize, NamedMatches), CanRetry> { +) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> { // We create a base parser that can be used for the "black box" parts. // Every iteration needs a fresh copy of that parser. However, the parser // is not mutated on many of the iterations, particularly when dealing with @@ -308,7 +316,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( let parser = parser_from_cx(psess, arg.clone(), T::recovery()); // Try each arm's matchers. let mut tt_parser = TtParser::new(name); - for (i, lhs) in lhses.iter().enumerate() { + for (i, rule) in rules.iter().enumerate() { let _tracing_span = trace_span!("Matching arm", %i); // Take a snapshot of the state of pre-expansion gating at this point. @@ -317,7 +325,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( // are not recorded. On the first `Success(..)`ful matcher, the spans are merged. let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut()); - let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track); + let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), &rule.lhs, track); track.after_arm(&result); @@ -328,7 +336,7 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>( // Merge the gated spans from parsing the matcher with the preexisting ones. psess.gated_spans.merge(gated_spans_snapshot); - return Ok((i, named_matches)); + return Ok((i, rule, named_matches)); } Failure(_) => { trace!("Failed to match arm, trying the next one"); @@ -364,7 +372,7 @@ pub fn compile_declarative_macro( span: Span, node_id: NodeId, edition: Edition, -) -> (SyntaxExtension, Vec<(usize, Span)>) { +) -> (SyntaxExtension, usize) { let mk_syn_ext = |expander| { SyntaxExtension::new( sess, @@ -377,7 +385,7 @@ pub fn compile_declarative_macro( node_id != DUMMY_NODE_ID, ) }; - let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), Vec::new()); + let dummy_syn_ext = |guar| (mk_syn_ext(Arc::new(DummyExpander(guar))), 0); let macro_rules = macro_def.macro_rules; let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) }; @@ -389,8 +397,7 @@ pub fn compile_declarative_macro( let mut guar = None; let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err()); - let mut lhses = Vec::new(); - let mut rhses = Vec::new(); + let mut rules = Vec::new(); while p.token != token::Eof { let lhs_tt = p.parse_token_tree(); @@ -415,8 +422,15 @@ pub fn compile_declarative_macro( let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition); check_emission(check_rhs(sess, &rhs_tt)); check_emission(macro_check::check_meta_variables(&sess.psess, node_id, &lhs_tt, &rhs_tt)); - lhses.push(lhs_tt); - rhses.push(rhs_tt); + let lhs_span = lhs_tt.span(); + // Convert the lhs into `MatcherLoc` form, which is better for doing the + // actual matching. + let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt { + mbe::macro_parser::compute_locs(&delimited.tts) + } else { + return dummy_syn_ext(guar.unwrap()); + }; + rules.push(MacroRule { lhs, lhs_span, rhs: rhs_tt }); if p.token == token::Eof { break; } @@ -425,7 +439,7 @@ pub fn compile_declarative_macro( } } - if lhses.is_empty() { + if rules.is_empty() { let guar = sess.dcx().span_err(span, "macros must contain at least one rule"); return dummy_syn_ext(guar); } @@ -439,48 +453,12 @@ pub fn compile_declarative_macro( return dummy_syn_ext(guar); } - // Compute the spans of the macro rules for unused rule linting. - // Also, we are only interested in non-foreign macros. - let rule_spans = if node_id != DUMMY_NODE_ID { - lhses - .iter() - .zip(rhses.iter()) - .enumerate() - // If the rhs contains an invocation like compile_error!, - // don't consider the rule for the unused rule lint. - .filter(|(_idx, (_lhs, rhs))| !has_compile_error_macro(rhs)) - // We only take the span of the lhs here, - // so that the spans of created warnings are smaller. - .map(|(idx, (lhs, _rhs))| (idx, lhs.span())) - .collect::>() - } else { - Vec::new() - }; + // Return the number of rules for unused rule linting, if this is a local macro. + let nrules = if node_id != DUMMY_NODE_ID { rules.len() } else { 0 }; - // Convert the lhses into `MatcherLoc` form, which is better for doing the - // actual matching. - let lhses = lhses - .iter() - .map(|lhs| { - // Ignore the delimiters around the matcher. - match lhs { - mbe::TokenTree::Delimited(.., delimited) => { - mbe::macro_parser::compute_locs(&delimited.tts) - } - _ => sess.dcx().span_bug(span, "malformed macro lhs"), - } - }) - .collect(); - - let expander = Arc::new(MacroRulesMacroExpander { - name: ident, - span, - node_id, - transparency, - lhses, - rhses, - }); - (mk_syn_ext(expander), rule_spans) + let expander = + Arc::new(MacroRulesMacroExpander { name: ident, span, node_id, transparency, rules }); + (mk_syn_ext(expander), nrules) } fn check_lhs_nt_follows( diff --git a/compiler/rustc_resolve/src/build_reduced_graph.rs b/compiler/rustc_resolve/src/build_reduced_graph.rs index 650a827ba5644..eeb8cb893d75b 100644 --- a/compiler/rustc_resolve/src/build_reduced_graph.rs +++ b/compiler/rustc_resolve/src/build_reduced_graph.rs @@ -1202,12 +1202,8 @@ impl<'a, 'ra, 'tcx> BuildReducedGraphVisitor<'a, 'ra, 'tcx> { fn insert_unused_macro(&mut self, ident: Ident, def_id: LocalDefId, node_id: NodeId) { if !ident.as_str().starts_with('_') { self.r.unused_macros.insert(def_id, (node_id, ident)); - for (rule_i, rule_span) in &self.r.macro_map[&def_id.to_def_id()].rule_spans { - self.r - .unused_macro_rules - .entry(node_id) - .or_default() - .insert(*rule_i, (ident, *rule_span)); + for rule_i in 0..self.r.macro_map[&def_id.to_def_id()].nrules { + self.r.unused_macro_rules.entry(node_id).or_default().insert(rule_i); } } } diff --git a/compiler/rustc_resolve/src/lib.rs b/compiler/rustc_resolve/src/lib.rs index 7162f3a77d350..3f865d7c2daca 100644 --- a/compiler/rustc_resolve/src/lib.rs +++ b/compiler/rustc_resolve/src/lib.rs @@ -1014,13 +1014,13 @@ struct DeriveData { struct MacroData { ext: Arc, - rule_spans: Vec<(usize, Span)>, + nrules: usize, macro_rules: bool, } impl MacroData { fn new(ext: Arc) -> MacroData { - MacroData { ext, rule_spans: Vec::new(), macro_rules: false } + MacroData { ext, nrules: 0, macro_rules: false } } } @@ -1135,7 +1135,7 @@ pub struct Resolver<'ra, 'tcx> { ast_transform_scopes: FxHashMap>, unused_macros: FxIndexMap, /// A map from the macro to all its potentially unused arms. - unused_macro_rules: FxIndexMap>, + unused_macro_rules: FxIndexMap>, proc_macro_stubs: FxHashSet, /// Traces collected during macro resolution and validated when it's complete. single_segment_macro_resolutions: diff --git a/compiler/rustc_resolve/src/macros.rs b/compiler/rustc_resolve/src/macros.rs index 3d33a02a9c6da..9bc96403559ee 100644 --- a/compiler/rustc_resolve/src/macros.rs +++ b/compiler/rustc_resolve/src/macros.rs @@ -351,13 +351,23 @@ impl<'ra, 'tcx> ResolverExpand for Resolver<'ra, 'tcx> { } for (&node_id, unused_arms) in self.unused_macro_rules.iter() { - for (&arm_i, &(ident, rule_span)) in unused_arms.to_sorted_stable_ord() { - self.lint_buffer.buffer_lint( - UNUSED_MACRO_RULES, - node_id, - rule_span, - BuiltinLintDiag::MacroRuleNeverUsed(arm_i, ident.name), - ); + if unused_arms.is_empty() { + continue; + } + let def_id = self.local_def_id(node_id).to_def_id(); + let m = &self.macro_map[&def_id]; + let SyntaxExtensionKind::LegacyBang(ref ext) = m.ext.kind else { + continue; + }; + for &arm_i in unused_arms.to_sorted_stable_ord() { + if let Some((ident, rule_span)) = ext.get_unused_rule(arm_i) { + self.lint_buffer.buffer_lint( + UNUSED_MACRO_RULES, + node_id, + rule_span, + BuiltinLintDiag::MacroRuleNeverUsed(arm_i, ident.name), + ); + } } } } @@ -1146,7 +1156,7 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { node_id: NodeId, edition: Edition, ) -> MacroData { - let (mut ext, mut rule_spans) = compile_declarative_macro( + let (mut ext, mut nrules) = compile_declarative_macro( self.tcx.sess, self.tcx.features(), macro_def, @@ -1163,13 +1173,13 @@ impl<'ra, 'tcx> Resolver<'ra, 'tcx> { // The macro is a built-in, replace its expander function // while still taking everything else from the source code. ext.kind = builtin_ext_kind.clone(); - rule_spans = Vec::new(); + nrules = 0; } else { self.dcx().emit_err(errors::CannotFindBuiltinMacroWithName { span, ident }); } } - MacroData { ext: Arc::new(ext), rule_spans, macro_rules: macro_def.macro_rules } + MacroData { ext: Arc::new(ext), nrules, macro_rules: macro_def.macro_rules } } fn path_accessible(