1use std::borrow::Cow;
2use std::collections::hash_map::Entry;
3use std::sync::Arc;
4use std::{mem, slice};
5
6use ast::token::IdentIsRaw;
7use rustc_ast::token::NtPatKind::*;
8use rustc_ast::token::TokenKind::*;
9use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind};
10use rustc_ast::tokenstream::{self, DelimSpan, TokenStream};
11use rustc_ast::{self as ast, DUMMY_NODE_ID, NodeId};
12use rustc_ast_pretty::pprust;
13use rustc_data_structures::fx::{FxHashMap, FxIndexMap};
14use rustc_errors::{Applicability, Diag, ErrorGuaranteed, MultiSpan};
15use rustc_feature::Features;
16use rustc_hir as hir;
17use rustc_hir::attrs::AttributeKind;
18use rustc_hir::def::MacroKinds;
19use rustc_hir::find_attr;
20use rustc_lint_defs::BuiltinLintDiag;
21use rustc_lint_defs::builtin::{
22 RUST_2021_INCOMPATIBLE_OR_PATTERNS, SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
23};
24use rustc_parse::exp;
25use rustc_parse::parser::{Parser, Recovery};
26use rustc_session::Session;
27use rustc_session::parse::{ParseSess, feature_err};
28use rustc_span::edition::Edition;
29use rustc_span::hygiene::Transparency;
30use rustc_span::{Ident, Span, Symbol, kw, sym};
31use tracing::{debug, instrument, trace, trace_span};
32
33use super::diagnostics::{FailedMacro, failed_to_match_macro};
34use super::macro_parser::{NamedMatches, NamedParseResult};
35use super::{SequenceRepetition, diagnostics};
36use crate::base::{
37 AttrProcMacro, DummyResult, ExpandResult, ExtCtxt, MacResult, MacroExpanderResult,
38 SyntaxExtension, SyntaxExtensionKind, TTMacroExpander,
39};
40use crate::errors;
41use crate::expand::{AstFragment, AstFragmentKind, ensure_complete_parse, parse_ast_fragment};
42use crate::mbe::macro_check::check_meta_variables;
43use crate::mbe::macro_parser::{Error, ErrorReported, Failure, MatcherLoc, Success, TtParser};
44use crate::mbe::quoted::{RulePart, parse_one_tt};
45use crate::mbe::transcribe::transcribe;
46use crate::mbe::{self, KleeneOp};
47
48pub(crate) struct ParserAnyMacro<'a> {
49 parser: Parser<'a>,
50
51 site_span: Span,
53 macro_ident: Ident,
55 lint_node_id: NodeId,
56 is_trailing_mac: bool,
57 arm_span: Span,
58 is_local: bool,
60}
61
62impl<'a> ParserAnyMacro<'a> {
63 pub(crate) fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
64 let ParserAnyMacro {
65 site_span,
66 macro_ident,
67 ref mut parser,
68 lint_node_id,
69 arm_span,
70 is_trailing_mac,
71 is_local,
72 } = *self;
73 let snapshot = &mut parser.create_snapshot_for_diagnostic();
74 let fragment = match parse_ast_fragment(parser, kind) {
75 Ok(f) => f,
76 Err(err) => {
77 let guar = diagnostics::emit_frag_parse_err(
78 err, parser, snapshot, site_span, arm_span, kind,
79 );
80 return kind.dummy(site_span, guar);
81 }
82 };
83
84 if kind == AstFragmentKind::Expr && parser.token == token::Semi {
88 if is_local {
89 parser.psess.buffer_lint(
90 SEMICOLON_IN_EXPRESSIONS_FROM_MACROS,
91 parser.token.span,
92 lint_node_id,
93 BuiltinLintDiag::TrailingMacro(is_trailing_mac, macro_ident),
94 );
95 }
96 parser.bump();
97 }
98
99 let path = ast::Path::from_ident(macro_ident.with_span_pos(site_span));
101 ensure_complete_parse(parser, &path, kind.name(), site_span);
102 fragment
103 }
104
105 #[instrument(skip(cx, tts))]
106 pub(crate) fn from_tts<'cx>(
107 cx: &'cx mut ExtCtxt<'a>,
108 tts: TokenStream,
109 site_span: Span,
110 arm_span: Span,
111 is_local: bool,
112 macro_ident: Ident,
113 ) -> Self {
114 Self {
115 parser: Parser::new(&cx.sess.psess, tts, None),
116
117 site_span,
121 macro_ident,
122 lint_node_id: cx.current_expansion.lint_node_id,
123 is_trailing_mac: cx.current_expansion.is_trailing_mac,
124 arm_span,
125 is_local,
126 }
127 }
128}
129
130pub(super) enum MacroRule {
131 Func { lhs: Vec<MatcherLoc>, lhs_span: Span, rhs: mbe::TokenTree },
133 Attr {
135 args: Vec<MatcherLoc>,
136 args_span: Span,
137 body: Vec<MatcherLoc>,
138 body_span: Span,
139 rhs: mbe::TokenTree,
140 },
141 Derive { body: Vec<MatcherLoc>, body_span: Span, rhs: mbe::TokenTree },
143}
144
145pub struct MacroRulesMacroExpander {
146 node_id: NodeId,
147 name: Ident,
148 span: Span,
149 transparency: Transparency,
150 kinds: MacroKinds,
151 rules: Vec<MacroRule>,
152}
153
154impl MacroRulesMacroExpander {
155 pub fn get_unused_rule(&self, rule_i: usize) -> Option<(&Ident, MultiSpan)> {
156 let (span, rhs) = match self.rules[rule_i] {
158 MacroRule::Func { lhs_span, ref rhs, .. } => (MultiSpan::from_span(lhs_span), rhs),
159 MacroRule::Attr { args_span, body_span, ref rhs, .. } => {
160 (MultiSpan::from_spans(vec![args_span, body_span]), rhs)
161 }
162 MacroRule::Derive { body_span, ref rhs, .. } => (MultiSpan::from_span(body_span), rhs),
163 };
164 if has_compile_error_macro(rhs) { None } else { Some((&self.name, span)) }
165 }
166
167 pub fn kinds(&self) -> MacroKinds {
168 self.kinds
169 }
170
171 pub fn expand_derive(
172 &self,
173 cx: &mut ExtCtxt<'_>,
174 sp: Span,
175 body: &TokenStream,
176 ) -> Result<TokenStream, ErrorGuaranteed> {
177 let Self { name, ref rules, node_id, .. } = *self;
180 let psess = &cx.sess.psess;
181
182 if cx.trace_macros() {
183 let msg = format!("expanding `#[derive({name})] {}`", pprust::tts_to_string(body));
184 trace_macros_note(&mut cx.expansions, sp, msg);
185 }
186
187 match try_match_macro_derive(psess, name, body, rules, &mut NoopTracker) {
188 Ok((rule_index, rule, named_matches)) => {
189 let MacroRule::Derive { rhs, .. } = rule else {
190 panic!("try_match_macro_derive returned non-derive rule");
191 };
192 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
193 cx.dcx().span_bug(sp, "malformed macro derive rhs");
194 };
195
196 let id = cx.current_expansion.id;
197 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, self.transparency, id)
198 .map_err(|e| e.emit())?;
199
200 if cx.trace_macros() {
201 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
202 trace_macros_note(&mut cx.expansions, sp, msg);
203 }
204
205 if is_defined_in_current_crate(node_id) {
206 cx.resolver.record_macro_rule_usage(node_id, rule_index);
207 }
208
209 Ok(tts)
210 }
211 Err(CanRetry::No(guar)) => Err(guar),
212 Err(CanRetry::Yes) => {
213 let (_, guar) = failed_to_match_macro(
214 cx.psess(),
215 sp,
216 self.span,
217 name,
218 FailedMacro::Derive,
219 body,
220 rules,
221 );
222 cx.macro_error_and_trace_macros_diag();
223 Err(guar)
224 }
225 }
226 }
227}
228
229impl TTMacroExpander for MacroRulesMacroExpander {
230 fn expand<'cx>(
231 &self,
232 cx: &'cx mut ExtCtxt<'_>,
233 sp: Span,
234 input: TokenStream,
235 ) -> MacroExpanderResult<'cx> {
236 ExpandResult::Ready(expand_macro(
237 cx,
238 sp,
239 self.span,
240 self.node_id,
241 self.name,
242 self.transparency,
243 input,
244 &self.rules,
245 ))
246 }
247}
248
249impl AttrProcMacro for MacroRulesMacroExpander {
250 fn expand(
251 &self,
252 cx: &mut ExtCtxt<'_>,
253 sp: Span,
254 args: TokenStream,
255 body: TokenStream,
256 ) -> Result<TokenStream, ErrorGuaranteed> {
257 expand_macro_attr(
258 cx,
259 sp,
260 self.span,
261 self.node_id,
262 self.name,
263 self.transparency,
264 args,
265 body,
266 &self.rules,
267 )
268 }
269}
270
271struct DummyExpander(ErrorGuaranteed);
272
273impl TTMacroExpander for DummyExpander {
274 fn expand<'cx>(
275 &self,
276 _: &'cx mut ExtCtxt<'_>,
277 span: Span,
278 _: TokenStream,
279 ) -> ExpandResult<Box<dyn MacResult + 'cx>, ()> {
280 ExpandResult::Ready(DummyResult::any(span, self.0))
281 }
282}
283
284fn trace_macros_note(cx_expansions: &mut FxIndexMap<Span, Vec<String>>, sp: Span, message: String) {
285 let sp = sp.macro_backtrace().last().map_or(sp, |trace| trace.call_site);
286 cx_expansions.entry(sp).or_default().push(message);
287}
288
289pub(super) trait Tracker<'matcher> {
290 type Failure;
292
293 fn build_failure(tok: Token, position: u32, msg: &'static str) -> Self::Failure;
297
298 fn before_match_loc(&mut self, _parser: &TtParser, _matcher: &'matcher MatcherLoc) {}
300
301 fn after_arm(&mut self, _in_body: bool, _result: &NamedParseResult<Self::Failure>) {}
304
305 fn description() -> &'static str;
307
308 fn recovery() -> Recovery {
309 Recovery::Forbidden
310 }
311}
312
313pub(super) struct NoopTracker;
316
317impl<'matcher> Tracker<'matcher> for NoopTracker {
318 type Failure = ();
319
320 fn build_failure(_tok: Token, _position: u32, _msg: &'static str) -> Self::Failure {}
321
322 fn description() -> &'static str {
323 "none"
324 }
325}
326
327#[instrument(skip(cx, transparency, arg, rules))]
329fn expand_macro<'cx>(
330 cx: &'cx mut ExtCtxt<'_>,
331 sp: Span,
332 def_span: Span,
333 node_id: NodeId,
334 name: Ident,
335 transparency: Transparency,
336 arg: TokenStream,
337 rules: &[MacroRule],
338) -> Box<dyn MacResult + 'cx> {
339 let psess = &cx.sess.psess;
340
341 if cx.trace_macros() {
342 let msg = format!("expanding `{}! {{ {} }}`", name, pprust::tts_to_string(&arg));
343 trace_macros_note(&mut cx.expansions, sp, msg);
344 }
345
346 let try_success_result = try_match_macro(psess, name, &arg, rules, &mut NoopTracker);
348
349 match try_success_result {
350 Ok((rule_index, rule, named_matches)) => {
351 let MacroRule::Func { rhs, .. } = rule else {
352 panic!("try_match_macro returned non-func rule");
353 };
354 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
355 cx.dcx().span_bug(sp, "malformed macro rhs");
356 };
357 let arm_span = rhs_span.entire();
358
359 let id = cx.current_expansion.id;
361 let tts = match transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id) {
362 Ok(tts) => tts,
363 Err(err) => {
364 let guar = err.emit();
365 return DummyResult::any(arm_span, guar);
366 }
367 };
368
369 if cx.trace_macros() {
370 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
371 trace_macros_note(&mut cx.expansions, sp, msg);
372 }
373
374 let is_local = is_defined_in_current_crate(node_id);
375 if is_local {
376 cx.resolver.record_macro_rule_usage(node_id, rule_index);
377 }
378
379 Box::new(ParserAnyMacro::from_tts(cx, tts, sp, arm_span, is_local, name))
381 }
382 Err(CanRetry::No(guar)) => {
383 debug!("Will not retry matching as an error was emitted already");
384 DummyResult::any(sp, guar)
385 }
386 Err(CanRetry::Yes) => {
387 let (span, guar) = failed_to_match_macro(
389 cx.psess(),
390 sp,
391 def_span,
392 name,
393 FailedMacro::Func,
394 &arg,
395 rules,
396 );
397 cx.macro_error_and_trace_macros_diag();
398 DummyResult::any(span, guar)
399 }
400 }
401}
402
403#[instrument(skip(cx, transparency, args, body, rules))]
405fn expand_macro_attr(
406 cx: &mut ExtCtxt<'_>,
407 sp: Span,
408 def_span: Span,
409 node_id: NodeId,
410 name: Ident,
411 transparency: Transparency,
412 args: TokenStream,
413 body: TokenStream,
414 rules: &[MacroRule],
415) -> Result<TokenStream, ErrorGuaranteed> {
416 let psess = &cx.sess.psess;
417 let is_local = node_id != DUMMY_NODE_ID;
420
421 if cx.trace_macros() {
422 let msg = format!(
423 "expanding `#[{name}({})] {}`",
424 pprust::tts_to_string(&args),
425 pprust::tts_to_string(&body),
426 );
427 trace_macros_note(&mut cx.expansions, sp, msg);
428 }
429
430 match try_match_macro_attr(psess, name, &args, &body, rules, &mut NoopTracker) {
432 Ok((i, rule, named_matches)) => {
433 let MacroRule::Attr { rhs, .. } = rule else {
434 panic!("try_macro_match_attr returned non-attr rule");
435 };
436 let mbe::TokenTree::Delimited(rhs_span, _, rhs) = rhs else {
437 cx.dcx().span_bug(sp, "malformed macro rhs");
438 };
439
440 let id = cx.current_expansion.id;
441 let tts = transcribe(psess, &named_matches, rhs, *rhs_span, transparency, id)
442 .map_err(|e| e.emit())?;
443
444 if cx.trace_macros() {
445 let msg = format!("to `{}`", pprust::tts_to_string(&tts));
446 trace_macros_note(&mut cx.expansions, sp, msg);
447 }
448
449 if is_local {
450 cx.resolver.record_macro_rule_usage(node_id, i);
451 }
452
453 Ok(tts)
454 }
455 Err(CanRetry::No(guar)) => Err(guar),
456 Err(CanRetry::Yes) => {
457 let (_, guar) = failed_to_match_macro(
459 cx.psess(),
460 sp,
461 def_span,
462 name,
463 FailedMacro::Attr(&args),
464 &body,
465 rules,
466 );
467 cx.trace_macros_diag();
468 Err(guar)
469 }
470 }
471}
472
473pub(super) enum CanRetry {
474 Yes,
475 No(ErrorGuaranteed),
477}
478
479#[instrument(level = "debug", skip(psess, arg, rules, track), fields(tracking = %T::description()))]
483pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
484 psess: &ParseSess,
485 name: Ident,
486 arg: &TokenStream,
487 rules: &'matcher [MacroRule],
488 track: &mut T,
489) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
490 let parser = parser_from_cx(psess, arg.clone(), T::recovery());
510 let mut tt_parser = TtParser::new(name);
512 for (i, rule) in rules.iter().enumerate() {
513 let MacroRule::Func { lhs, .. } = rule else { continue };
514 let _tracing_span = trace_span!("Matching arm", %i);
515
516 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
521
522 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs, track);
523
524 track.after_arm(true, &result);
525
526 match result {
527 Success(named_matches) => {
528 debug!("Parsed arm successfully");
529 psess.gated_spans.merge(gated_spans_snapshot);
532
533 return Ok((i, rule, named_matches));
534 }
535 Failure(_) => {
536 trace!("Failed to match arm, trying the next one");
537 }
539 Error(_, _) => {
540 debug!("Fatal error occurred during matching");
541 return Err(CanRetry::Yes);
543 }
544 ErrorReported(guarantee) => {
545 debug!("Fatal error occurred and was reported during matching");
546 return Err(CanRetry::No(guarantee));
548 }
549 }
550
551 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
554 }
555
556 Err(CanRetry::Yes)
557}
558
559#[instrument(level = "debug", skip(psess, attr_args, attr_body, rules, track), fields(tracking = %T::description()))]
563pub(super) fn try_match_macro_attr<'matcher, T: Tracker<'matcher>>(
564 psess: &ParseSess,
565 name: Ident,
566 attr_args: &TokenStream,
567 attr_body: &TokenStream,
568 rules: &'matcher [MacroRule],
569 track: &mut T,
570) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
571 let args_parser = parser_from_cx(psess, attr_args.clone(), T::recovery());
573 let body_parser = parser_from_cx(psess, attr_body.clone(), T::recovery());
574 let mut tt_parser = TtParser::new(name);
575 for (i, rule) in rules.iter().enumerate() {
576 let MacroRule::Attr { args, body, .. } = rule else { continue };
577
578 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
579
580 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&args_parser), args, track);
581 track.after_arm(false, &result);
582
583 let mut named_matches = match result {
584 Success(named_matches) => named_matches,
585 Failure(_) => {
586 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut());
587 continue;
588 }
589 Error(_, _) => return Err(CanRetry::Yes),
590 ErrorReported(guar) => return Err(CanRetry::No(guar)),
591 };
592
593 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
594 track.after_arm(true, &result);
595
596 match result {
597 Success(body_named_matches) => {
598 psess.gated_spans.merge(gated_spans_snapshot);
599 #[allow(rustc::potential_query_instability)]
600 named_matches.extend(body_named_matches);
601 return Ok((i, rule, named_matches));
602 }
603 Failure(_) => {
604 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
605 }
606 Error(_, _) => return Err(CanRetry::Yes),
607 ErrorReported(guar) => return Err(CanRetry::No(guar)),
608 }
609 }
610
611 Err(CanRetry::Yes)
612}
613
614#[instrument(level = "debug", skip(psess, body, rules, track), fields(tracking = %T::description()))]
618pub(super) fn try_match_macro_derive<'matcher, T: Tracker<'matcher>>(
619 psess: &ParseSess,
620 name: Ident,
621 body: &TokenStream,
622 rules: &'matcher [MacroRule],
623 track: &mut T,
624) -> Result<(usize, &'matcher MacroRule, NamedMatches), CanRetry> {
625 let body_parser = parser_from_cx(psess, body.clone(), T::recovery());
627 let mut tt_parser = TtParser::new(name);
628 for (i, rule) in rules.iter().enumerate() {
629 let MacroRule::Derive { body, .. } = rule else { continue };
630
631 let mut gated_spans_snapshot = mem::take(&mut *psess.gated_spans.spans.borrow_mut());
632
633 let result = tt_parser.parse_tt(&mut Cow::Borrowed(&body_parser), body, track);
634 track.after_arm(true, &result);
635
636 match result {
637 Success(named_matches) => {
638 psess.gated_spans.merge(gated_spans_snapshot);
639 return Ok((i, rule, named_matches));
640 }
641 Failure(_) => {
642 mem::swap(&mut gated_spans_snapshot, &mut psess.gated_spans.spans.borrow_mut())
643 }
644 Error(_, _) => return Err(CanRetry::Yes),
645 ErrorReported(guar) => return Err(CanRetry::No(guar)),
646 }
647 }
648
649 Err(CanRetry::Yes)
650}
651
652pub fn compile_declarative_macro(
654 sess: &Session,
655 features: &Features,
656 macro_def: &ast::MacroDef,
657 ident: Ident,
658 attrs: &[hir::Attribute],
659 span: Span,
660 node_id: NodeId,
661 edition: Edition,
662) -> (SyntaxExtension, usize) {
663 let mk_syn_ext = |kind| {
664 let is_local = is_defined_in_current_crate(node_id);
665 SyntaxExtension::new(sess, kind, span, Vec::new(), edition, ident.name, attrs, is_local)
666 };
667 let dummy_syn_ext =
668 |guar| (mk_syn_ext(SyntaxExtensionKind::LegacyBang(Arc::new(DummyExpander(guar)))), 0);
669
670 let macro_rules = macro_def.macro_rules;
671 let exp_sep = if macro_rules { exp!(Semi) } else { exp!(Comma) };
672
673 let body = macro_def.body.tokens.clone();
674 let mut p = Parser::new(&sess.psess, body, rustc_parse::MACRO_ARGUMENTS);
675
676 let mut guar = None;
679 let mut check_emission = |ret: Result<(), ErrorGuaranteed>| guar = guar.or(ret.err());
680
681 let mut kinds = MacroKinds::empty();
682 let mut rules = Vec::new();
683
684 while p.token != token::Eof {
685 let (args, is_derive) = if p.eat_keyword_noexpect(sym::attr) {
686 kinds |= MacroKinds::ATTR;
687 if !features.macro_attr() {
688 feature_err(sess, sym::macro_attr, span, "`macro_rules!` attributes are unstable")
689 .emit();
690 }
691 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr args") {
692 return dummy_syn_ext(guar);
693 }
694 let args = p.parse_token_tree();
695 check_args_parens(sess, sym::attr, &args);
696 let args = parse_one_tt(args, RulePart::Pattern, sess, node_id, features, edition);
697 check_emission(check_lhs(sess, node_id, &args));
698 if let Some(guar) = check_no_eof(sess, &p, "expected macro attr body") {
699 return dummy_syn_ext(guar);
700 }
701 (Some(args), false)
702 } else if p.eat_keyword_noexpect(sym::derive) {
703 kinds |= MacroKinds::DERIVE;
704 let derive_keyword_span = p.prev_token.span;
705 if !features.macro_derive() {
706 feature_err(sess, sym::macro_attr, span, "`macro_rules!` derives are unstable")
707 .emit();
708 }
709 if let Some(guar) = check_no_eof(sess, &p, "expected `()` after `derive`") {
710 return dummy_syn_ext(guar);
711 }
712 let args = p.parse_token_tree();
713 check_args_parens(sess, sym::derive, &args);
714 let args_empty_result = check_args_empty(sess, &args);
715 let args_not_empty = args_empty_result.is_err();
716 check_emission(args_empty_result);
717 if let Some(guar) = check_no_eof(sess, &p, "expected macro derive body") {
718 return dummy_syn_ext(guar);
719 }
720 if p.token == token::FatArrow {
723 let mut err = sess
724 .dcx()
725 .struct_span_err(p.token.span, "expected macro derive body, got `=>`");
726 if args_not_empty {
727 err.span_label(derive_keyword_span, "need `()` after this `derive`");
728 }
729 return dummy_syn_ext(err.emit());
730 }
731 (None, true)
732 } else {
733 kinds |= MacroKinds::BANG;
734 (None, false)
735 };
736 let lhs_tt = p.parse_token_tree();
737 let lhs_tt = parse_one_tt(lhs_tt, RulePart::Pattern, sess, node_id, features, edition);
738 check_emission(check_lhs(sess, node_id, &lhs_tt));
739 if let Err(e) = p.expect(exp!(FatArrow)) {
740 return dummy_syn_ext(e.emit());
741 }
742 if let Some(guar) = check_no_eof(sess, &p, "expected right-hand side of macro rule") {
743 return dummy_syn_ext(guar);
744 }
745 let rhs_tt = p.parse_token_tree();
746 let rhs_tt = parse_one_tt(rhs_tt, RulePart::Body, sess, node_id, features, edition);
747 check_emission(check_rhs(sess, &rhs_tt));
748 check_emission(check_meta_variables(&sess.psess, node_id, args.as_ref(), &lhs_tt, &rhs_tt));
749 let lhs_span = lhs_tt.span();
750 let lhs = if let mbe::TokenTree::Delimited(.., delimited) = lhs_tt {
753 mbe::macro_parser::compute_locs(&delimited.tts)
754 } else {
755 return dummy_syn_ext(guar.unwrap());
756 };
757 if let Some(args) = args {
758 let args_span = args.span();
759 let mbe::TokenTree::Delimited(.., delimited) = args else {
760 return dummy_syn_ext(guar.unwrap());
761 };
762 let args = mbe::macro_parser::compute_locs(&delimited.tts);
763 let body_span = lhs_span;
764 rules.push(MacroRule::Attr { args, args_span, body: lhs, body_span, rhs: rhs_tt });
765 } else if is_derive {
766 rules.push(MacroRule::Derive { body: lhs, body_span: lhs_span, rhs: rhs_tt });
767 } else {
768 rules.push(MacroRule::Func { lhs, lhs_span, rhs: rhs_tt });
769 }
770 if p.token == token::Eof {
771 break;
772 }
773 if let Err(e) = p.expect(exp_sep) {
774 return dummy_syn_ext(e.emit());
775 }
776 }
777
778 if rules.is_empty() {
779 let guar = sess.dcx().span_err(span, "macros must contain at least one rule");
780 return dummy_syn_ext(guar);
781 }
782 assert!(!kinds.is_empty());
783
784 let transparency = find_attr!(attrs, AttributeKind::MacroTransparency(x) => *x)
785 .unwrap_or(Transparency::fallback(macro_rules));
786
787 if let Some(guar) = guar {
788 return dummy_syn_ext(guar);
791 }
792
793 let nrules = if is_defined_in_current_crate(node_id) { rules.len() } else { 0 };
795
796 let exp = MacroRulesMacroExpander { name: ident, kinds, span, node_id, transparency, rules };
797 (mk_syn_ext(SyntaxExtensionKind::MacroRules(Arc::new(exp))), nrules)
798}
799
800fn check_no_eof(sess: &Session, p: &Parser<'_>, msg: &'static str) -> Option<ErrorGuaranteed> {
801 if p.token == token::Eof {
802 let err_sp = p.token.span.shrink_to_hi();
803 let guar = sess
804 .dcx()
805 .struct_span_err(err_sp, "macro definition ended unexpectedly")
806 .with_span_label(err_sp, msg)
807 .emit();
808 return Some(guar);
809 }
810 None
811}
812
813fn check_args_parens(sess: &Session, rule_kw: Symbol, args: &tokenstream::TokenTree) {
814 if let tokenstream::TokenTree::Delimited(dspan, _, delim, _) = args
816 && *delim != Delimiter::Parenthesis
817 {
818 sess.dcx().emit_err(errors::MacroArgsBadDelim {
819 span: dspan.entire(),
820 sugg: errors::MacroArgsBadDelimSugg { open: dspan.open, close: dspan.close },
821 rule_kw,
822 });
823 }
824}
825
826fn check_args_empty(sess: &Session, args: &tokenstream::TokenTree) -> Result<(), ErrorGuaranteed> {
827 match args {
828 tokenstream::TokenTree::Delimited(.., delimited) if delimited.is_empty() => Ok(()),
829 _ => {
830 let msg = "`derive` rules do not accept arguments; `derive` must be followed by `()`";
831 Err(sess.dcx().span_err(args.span(), msg))
832 }
833 }
834}
835
836fn check_lhs(sess: &Session, node_id: NodeId, lhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
837 let e1 = check_lhs_nt_follows(sess, node_id, lhs);
838 let e2 = check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
839 e1.and(e2)
840}
841
842fn check_lhs_nt_follows(
843 sess: &Session,
844 node_id: NodeId,
845 lhs: &mbe::TokenTree,
846) -> Result<(), ErrorGuaranteed> {
847 if let mbe::TokenTree::Delimited(.., delimited) = lhs {
850 check_matcher(sess, node_id, &delimited.tts)
851 } else {
852 let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
853 Err(sess.dcx().span_err(lhs.span(), msg))
854 }
855}
856
857fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
858 if seq.separator.is_some() {
859 false
860 } else {
861 let mut is_empty = true;
862 let mut iter = seq.tts.iter().peekable();
863 while let Some(tt) = iter.next() {
864 match tt {
865 mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. } => {}
866 mbe::TokenTree::Token(t @ Token { kind: DocComment(..), .. }) => {
867 let mut now = t;
868 while let Some(&mbe::TokenTree::Token(
869 next @ Token { kind: DocComment(..), .. },
870 )) = iter.peek()
871 {
872 now = next;
873 iter.next();
874 }
875 let span = t.span.to(now.span);
876 sess.dcx().span_note(span, "doc comments are ignored in matcher position");
877 }
878 mbe::TokenTree::Sequence(_, sub_seq)
879 if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
880 || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne) => {}
881 _ => is_empty = false,
882 }
883 }
884 is_empty
885 }
886}
887
888fn check_redundant_vis_repetition(
893 err: &mut Diag<'_>,
894 sess: &Session,
895 seq: &SequenceRepetition,
896 span: &DelimSpan,
897) {
898 let is_zero_or_one: bool = seq.kleene.op == KleeneOp::ZeroOrOne;
899 let is_vis = seq.tts.first().map_or(false, |tt| {
900 matches!(tt, mbe::TokenTree::MetaVarDecl { kind: NonterminalKind::Vis, .. })
901 });
902
903 if is_vis && is_zero_or_one {
904 err.note("a `vis` fragment can already be empty");
905 err.multipart_suggestion(
906 "remove the `$(` and `)?`",
907 vec![
908 (
909 sess.source_map().span_extend_to_prev_char_before(span.open, '$', true),
910 "".to_string(),
911 ),
912 (span.close.with_hi(seq.kleene.span.hi()), "".to_string()),
913 ],
914 Applicability::MaybeIncorrect,
915 );
916 }
917}
918
919fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> Result<(), ErrorGuaranteed> {
922 use mbe::TokenTree;
923 for tt in tts {
924 match tt {
925 TokenTree::Token(..)
926 | TokenTree::MetaVar(..)
927 | TokenTree::MetaVarDecl { .. }
928 | TokenTree::MetaVarExpr(..) => (),
929 TokenTree::Delimited(.., del) => check_lhs_no_empty_seq(sess, &del.tts)?,
930 TokenTree::Sequence(span, seq) => {
931 if is_empty_token_tree(sess, seq) {
932 let sp = span.entire();
933 let mut err =
934 sess.dcx().struct_span_err(sp, "repetition matches empty token tree");
935 check_redundant_vis_repetition(&mut err, sess, seq, span);
936 return Err(err.emit());
937 }
938 check_lhs_no_empty_seq(sess, &seq.tts)?
939 }
940 }
941 }
942
943 Ok(())
944}
945
946fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed> {
947 match *rhs {
948 mbe::TokenTree::Delimited(..) => Ok(()),
949 _ => Err(sess.dcx().span_err(rhs.span(), "macro rhs must be delimited")),
950 }
951}
952
953fn check_matcher(
954 sess: &Session,
955 node_id: NodeId,
956 matcher: &[mbe::TokenTree],
957) -> Result<(), ErrorGuaranteed> {
958 let first_sets = FirstSets::new(matcher);
959 let empty_suffix = TokenSet::empty();
960 check_matcher_core(sess, node_id, &first_sets, matcher, &empty_suffix)?;
961 Ok(())
962}
963
964fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
965 match rhs {
966 mbe::TokenTree::Delimited(.., d) => {
967 let has_compile_error = d.tts.array_windows::<3>().any(|[ident, bang, args]| {
968 if let mbe::TokenTree::Token(ident) = ident
969 && let TokenKind::Ident(ident, _) = ident.kind
970 && ident == sym::compile_error
971 && let mbe::TokenTree::Token(bang) = bang
972 && let TokenKind::Bang = bang.kind
973 && let mbe::TokenTree::Delimited(.., del) = args
974 && !del.delim.skip()
975 {
976 true
977 } else {
978 false
979 }
980 });
981 if has_compile_error { true } else { d.tts.iter().any(has_compile_error_macro) }
982 }
983 _ => false,
984 }
985}
986
987struct FirstSets<'tt> {
1000 first: FxHashMap<Span, Option<TokenSet<'tt>>>,
1007}
1008
1009impl<'tt> FirstSets<'tt> {
1010 fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
1011 use mbe::TokenTree;
1012
1013 let mut sets = FirstSets { first: FxHashMap::default() };
1014 build_recur(&mut sets, tts);
1015 return sets;
1016
1017 fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
1021 let mut first = TokenSet::empty();
1022 for tt in tts.iter().rev() {
1023 match tt {
1024 TokenTree::Token(..)
1025 | TokenTree::MetaVar(..)
1026 | TokenTree::MetaVarDecl { .. }
1027 | TokenTree::MetaVarExpr(..) => {
1028 first.replace_with(TtHandle::TtRef(tt));
1029 }
1030 TokenTree::Delimited(span, _, delimited) => {
1031 build_recur(sets, &delimited.tts);
1032 first.replace_with(TtHandle::from_token_kind(
1033 delimited.delim.as_open_token_kind(),
1034 span.open,
1035 ));
1036 }
1037 TokenTree::Sequence(sp, seq_rep) => {
1038 let subfirst = build_recur(sets, &seq_rep.tts);
1039
1040 match sets.first.entry(sp.entire()) {
1041 Entry::Vacant(vac) => {
1042 vac.insert(Some(subfirst.clone()));
1043 }
1044 Entry::Occupied(mut occ) => {
1045 occ.insert(None);
1052 }
1053 }
1054
1055 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1059 first.add_one_maybe(TtHandle::from_token(*sep));
1060 }
1061
1062 if subfirst.maybe_empty
1064 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1065 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1066 {
1067 first.add_all(&TokenSet { maybe_empty: true, ..subfirst });
1070 } else {
1071 first = subfirst;
1074 }
1075 }
1076 }
1077 }
1078
1079 first
1080 }
1081 }
1082
1083 fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
1086 use mbe::TokenTree;
1087
1088 let mut first = TokenSet::empty();
1089 for tt in tts.iter() {
1090 assert!(first.maybe_empty);
1091 match tt {
1092 TokenTree::Token(..)
1093 | TokenTree::MetaVar(..)
1094 | TokenTree::MetaVarDecl { .. }
1095 | TokenTree::MetaVarExpr(..) => {
1096 first.add_one(TtHandle::TtRef(tt));
1097 return first;
1098 }
1099 TokenTree::Delimited(span, _, delimited) => {
1100 first.add_one(TtHandle::from_token_kind(
1101 delimited.delim.as_open_token_kind(),
1102 span.open,
1103 ));
1104 return first;
1105 }
1106 TokenTree::Sequence(sp, seq_rep) => {
1107 let subfirst_owned;
1108 let subfirst = match self.first.get(&sp.entire()) {
1109 Some(Some(subfirst)) => subfirst,
1110 Some(&None) => {
1111 subfirst_owned = self.first(&seq_rep.tts);
1112 &subfirst_owned
1113 }
1114 None => {
1115 panic!("We missed a sequence during FirstSets construction");
1116 }
1117 };
1118
1119 if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
1122 first.add_one_maybe(TtHandle::from_token(*sep));
1123 }
1124
1125 assert!(first.maybe_empty);
1126 first.add_all(subfirst);
1127 if subfirst.maybe_empty
1128 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
1129 || seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
1130 {
1131 first.maybe_empty = true;
1135 continue;
1136 } else {
1137 return first;
1138 }
1139 }
1140 }
1141 }
1142
1143 assert!(first.maybe_empty);
1146 first
1147 }
1148}
1149
1150#[derive(Debug)]
1155enum TtHandle<'tt> {
1156 TtRef(&'tt mbe::TokenTree),
1158
1159 Token(mbe::TokenTree),
1164}
1165
1166impl<'tt> TtHandle<'tt> {
1167 fn from_token(tok: Token) -> Self {
1168 TtHandle::Token(mbe::TokenTree::Token(tok))
1169 }
1170
1171 fn from_token_kind(kind: TokenKind, span: Span) -> Self {
1172 TtHandle::from_token(Token::new(kind, span))
1173 }
1174
1175 fn get(&'tt self) -> &'tt mbe::TokenTree {
1177 match self {
1178 TtHandle::TtRef(tt) => tt,
1179 TtHandle::Token(token_tt) => token_tt,
1180 }
1181 }
1182}
1183
1184impl<'tt> PartialEq for TtHandle<'tt> {
1185 fn eq(&self, other: &TtHandle<'tt>) -> bool {
1186 self.get() == other.get()
1187 }
1188}
1189
1190impl<'tt> Clone for TtHandle<'tt> {
1191 fn clone(&self) -> Self {
1192 match self {
1193 TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
1194
1195 TtHandle::Token(mbe::TokenTree::Token(tok)) => {
1198 TtHandle::Token(mbe::TokenTree::Token(*tok))
1199 }
1200
1201 _ => unreachable!(),
1202 }
1203 }
1204}
1205
1206#[derive(Clone, Debug)]
1217struct TokenSet<'tt> {
1218 tokens: Vec<TtHandle<'tt>>,
1219 maybe_empty: bool,
1220}
1221
1222impl<'tt> TokenSet<'tt> {
1223 fn empty() -> Self {
1225 TokenSet { tokens: Vec::new(), maybe_empty: true }
1226 }
1227
1228 fn singleton(tt: TtHandle<'tt>) -> Self {
1231 TokenSet { tokens: vec![tt], maybe_empty: false }
1232 }
1233
1234 fn replace_with(&mut self, tt: TtHandle<'tt>) {
1237 self.tokens.clear();
1238 self.tokens.push(tt);
1239 self.maybe_empty = false;
1240 }
1241
1242 fn replace_with_irrelevant(&mut self) {
1246 self.tokens.clear();
1247 self.maybe_empty = false;
1248 }
1249
1250 fn add_one(&mut self, tt: TtHandle<'tt>) {
1252 if !self.tokens.contains(&tt) {
1253 self.tokens.push(tt);
1254 }
1255 self.maybe_empty = false;
1256 }
1257
1258 fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
1260 if !self.tokens.contains(&tt) {
1261 self.tokens.push(tt);
1262 }
1263 }
1264
1265 fn add_all(&mut self, other: &Self) {
1273 for tt in &other.tokens {
1274 if !self.tokens.contains(tt) {
1275 self.tokens.push(tt.clone());
1276 }
1277 }
1278 if !other.maybe_empty {
1279 self.maybe_empty = false;
1280 }
1281 }
1282}
1283
1284fn check_matcher_core<'tt>(
1296 sess: &Session,
1297 node_id: NodeId,
1298 first_sets: &FirstSets<'tt>,
1299 matcher: &'tt [mbe::TokenTree],
1300 follow: &TokenSet<'tt>,
1301) -> Result<TokenSet<'tt>, ErrorGuaranteed> {
1302 use mbe::TokenTree;
1303
1304 let mut last = TokenSet::empty();
1305
1306 let mut errored = Ok(());
1307
1308 'each_token: for i in 0..matcher.len() {
1312 let token = &matcher[i];
1313 let suffix = &matcher[i + 1..];
1314
1315 let build_suffix_first = || {
1316 let mut s = first_sets.first(suffix);
1317 if s.maybe_empty {
1318 s.add_all(follow);
1319 }
1320 s
1321 };
1322
1323 let suffix_first;
1327
1328 match token {
1331 TokenTree::Token(..)
1332 | TokenTree::MetaVar(..)
1333 | TokenTree::MetaVarDecl { .. }
1334 | TokenTree::MetaVarExpr(..) => {
1335 if token_can_be_followed_by_any(token) {
1336 last.replace_with_irrelevant();
1338 continue 'each_token;
1341 } else {
1342 last.replace_with(TtHandle::TtRef(token));
1343 suffix_first = build_suffix_first();
1344 }
1345 }
1346 TokenTree::Delimited(span, _, d) => {
1347 let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1348 d.delim.as_close_token_kind(),
1349 span.close,
1350 ));
1351 check_matcher_core(sess, node_id, first_sets, &d.tts, &my_suffix)?;
1352 last.replace_with_irrelevant();
1354
1355 continue 'each_token;
1358 }
1359 TokenTree::Sequence(_, seq_rep) => {
1360 suffix_first = build_suffix_first();
1361 let mut new;
1372 let my_suffix = if let Some(sep) = &seq_rep.separator {
1373 new = suffix_first.clone();
1374 new.add_one_maybe(TtHandle::from_token(*sep));
1375 &new
1376 } else {
1377 &suffix_first
1378 };
1379
1380 let next = check_matcher_core(sess, node_id, first_sets, &seq_rep.tts, my_suffix)?;
1384 if next.maybe_empty {
1385 last.add_all(&next);
1386 } else {
1387 last = next;
1388 }
1389
1390 continue 'each_token;
1393 }
1394 }
1395
1396 for tt in &last.tokens {
1401 if let &TokenTree::MetaVarDecl { span, name, kind } = tt.get() {
1402 for next_token in &suffix_first.tokens {
1403 let next_token = next_token.get();
1404
1405 if is_defined_in_current_crate(node_id)
1412 && matches!(kind, NonterminalKind::Pat(PatParam { inferred: true }))
1413 && matches!(
1414 next_token,
1415 TokenTree::Token(token) if *token == token::Or
1416 )
1417 {
1418 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1420 span,
1421 name,
1422 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1423 });
1424 sess.psess.buffer_lint(
1425 RUST_2021_INCOMPATIBLE_OR_PATTERNS,
1426 span,
1427 ast::CRATE_NODE_ID,
1428 BuiltinLintDiag::OrPatternsBackCompat(span, suggestion),
1429 );
1430 }
1431 match is_in_follow(next_token, kind) {
1432 IsInFollow::Yes => {}
1433 IsInFollow::No(possible) => {
1434 let may_be = if last.tokens.len() == 1 && suffix_first.tokens.len() == 1
1435 {
1436 "is"
1437 } else {
1438 "may be"
1439 };
1440
1441 let sp = next_token.span();
1442 let mut err = sess.dcx().struct_span_err(
1443 sp,
1444 format!(
1445 "`${name}:{frag}` {may_be} followed by `{next}`, which \
1446 is not allowed for `{frag}` fragments",
1447 name = name,
1448 frag = kind,
1449 next = quoted_tt_to_string(next_token),
1450 may_be = may_be
1451 ),
1452 );
1453 err.span_label(sp, format!("not allowed after `{kind}` fragments"));
1454
1455 if kind == NonterminalKind::Pat(PatWithOr)
1456 && sess.psess.edition.at_least_rust_2021()
1457 && next_token.is_token(&token::Or)
1458 {
1459 let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl {
1460 span,
1461 name,
1462 kind: NonterminalKind::Pat(PatParam { inferred: false }),
1463 });
1464 err.span_suggestion(
1465 span,
1466 "try a `pat_param` fragment specifier instead",
1467 suggestion,
1468 Applicability::MaybeIncorrect,
1469 );
1470 }
1471
1472 let msg = "allowed there are: ";
1473 match possible {
1474 &[] => {}
1475 &[t] => {
1476 err.note(format!(
1477 "only {t} is allowed after `{kind}` fragments",
1478 ));
1479 }
1480 ts => {
1481 err.note(format!(
1482 "{}{} or {}",
1483 msg,
1484 ts[..ts.len() - 1].to_vec().join(", "),
1485 ts[ts.len() - 1],
1486 ));
1487 }
1488 }
1489 errored = Err(err.emit());
1490 }
1491 }
1492 }
1493 }
1494 }
1495 }
1496 errored?;
1497 Ok(last)
1498}
1499
1500fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
1501 if let mbe::TokenTree::MetaVarDecl { kind, .. } = *tok {
1502 frag_can_be_followed_by_any(kind)
1503 } else {
1504 true
1506 }
1507}
1508
1509fn frag_can_be_followed_by_any(kind: NonterminalKind) -> bool {
1518 matches!(
1519 kind,
1520 NonterminalKind::Item | NonterminalKind::Block | NonterminalKind::Ident | NonterminalKind::Literal | NonterminalKind::Meta | NonterminalKind::Lifetime | NonterminalKind::TT )
1528}
1529
1530enum IsInFollow {
1531 Yes,
1532 No(&'static [&'static str]),
1533}
1534
1535fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
1544 use mbe::TokenTree;
1545
1546 if let TokenTree::Token(Token { kind, .. }) = tok
1547 && kind.close_delim().is_some()
1548 {
1549 IsInFollow::Yes
1552 } else {
1553 match kind {
1554 NonterminalKind::Item => {
1555 IsInFollow::Yes
1558 }
1559 NonterminalKind::Block => {
1560 IsInFollow::Yes
1563 }
1564 NonterminalKind::Stmt | NonterminalKind::Expr(_) => {
1565 const TOKENS: &[&str] = &["`=>`", "`,`", "`;`"];
1566 match tok {
1567 TokenTree::Token(token) => match token.kind {
1568 FatArrow | Comma | Semi => IsInFollow::Yes,
1569 _ => IsInFollow::No(TOKENS),
1570 },
1571 _ => IsInFollow::No(TOKENS),
1572 }
1573 }
1574 NonterminalKind::Pat(PatParam { .. }) => {
1575 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
1576 match tok {
1577 TokenTree::Token(token) => match token.kind {
1578 FatArrow | Comma | Eq | Or => IsInFollow::Yes,
1579 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1580 IsInFollow::Yes
1581 }
1582 _ => IsInFollow::No(TOKENS),
1583 },
1584 _ => IsInFollow::No(TOKENS),
1585 }
1586 }
1587 NonterminalKind::Pat(PatWithOr) => {
1588 const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`if`", "`in`"];
1589 match tok {
1590 TokenTree::Token(token) => match token.kind {
1591 FatArrow | Comma | Eq => IsInFollow::Yes,
1592 Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => {
1593 IsInFollow::Yes
1594 }
1595 _ => IsInFollow::No(TOKENS),
1596 },
1597 _ => IsInFollow::No(TOKENS),
1598 }
1599 }
1600 NonterminalKind::Path | NonterminalKind::Ty => {
1601 const TOKENS: &[&str] = &[
1602 "`{`", "`[`", "`=>`", "`,`", "`>`", "`=`", "`:`", "`;`", "`|`", "`as`",
1603 "`where`",
1604 ];
1605 match tok {
1606 TokenTree::Token(token) => match token.kind {
1607 OpenBrace | OpenBracket | Comma | FatArrow | Colon | Eq | Gt | Shr
1608 | Semi | Or => IsInFollow::Yes,
1609 Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => {
1610 IsInFollow::Yes
1611 }
1612 _ => IsInFollow::No(TOKENS),
1613 },
1614 TokenTree::MetaVarDecl { kind: NonterminalKind::Block, .. } => IsInFollow::Yes,
1615 _ => IsInFollow::No(TOKENS),
1616 }
1617 }
1618 NonterminalKind::Ident | NonterminalKind::Lifetime => {
1619 IsInFollow::Yes
1621 }
1622 NonterminalKind::Literal => {
1623 IsInFollow::Yes
1625 }
1626 NonterminalKind::Meta | NonterminalKind::TT => {
1627 IsInFollow::Yes
1630 }
1631 NonterminalKind::Vis => {
1632 const TOKENS: &[&str] = &["`,`", "an ident", "a type"];
1634 match tok {
1635 TokenTree::Token(token) => match token.kind {
1636 Comma => IsInFollow::Yes,
1637 Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes,
1638 Ident(name, _) if name != kw::Priv => IsInFollow::Yes,
1639 _ => {
1640 if token.can_begin_type() {
1641 IsInFollow::Yes
1642 } else {
1643 IsInFollow::No(TOKENS)
1644 }
1645 }
1646 },
1647 TokenTree::MetaVarDecl {
1648 kind: NonterminalKind::Ident | NonterminalKind::Ty | NonterminalKind::Path,
1649 ..
1650 } => IsInFollow::Yes,
1651 _ => IsInFollow::No(TOKENS),
1652 }
1653 }
1654 }
1655 }
1656}
1657
1658fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
1659 match tt {
1660 mbe::TokenTree::Token(token) => pprust::token_to_string(token).into(),
1661 mbe::TokenTree::MetaVar(_, name) => format!("${name}"),
1662 mbe::TokenTree::MetaVarDecl { name, kind, .. } => format!("${name}:{kind}"),
1663 _ => panic!(
1664 "{}",
1665 "unexpected mbe::TokenTree::{Sequence or Delimited} \
1666 in follow set checker"
1667 ),
1668 }
1669}
1670
1671fn is_defined_in_current_crate(node_id: NodeId) -> bool {
1672 node_id != DUMMY_NODE_ID
1675}
1676
1677pub(super) fn parser_from_cx(
1678 psess: &ParseSess,
1679 mut tts: TokenStream,
1680 recovery: Recovery,
1681) -> Parser<'_> {
1682 tts.desugar_doc_comments();
1683 Parser::new(psess, tts, rustc_parse::MACRO_ARGUMENTS).recovery(recovery)
1684}