1use std::mem;
2
3use rustc_ast::token::{
4 self, Delimiter, IdentIsRaw, InvisibleOrigin, Lit, LitKind, MetaVarKind, Token, TokenKind,
5};
6use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
7use rustc_ast::{ExprKind, StmtKind, TyKind, UnOp};
8use rustc_data_structures::fx::FxHashMap;
9use rustc_errors::{Diag, DiagCtxtHandle, PResult, pluralize};
10use rustc_parse::lexer::nfc_normalize;
11use rustc_parse::parser::ParseNtResult;
12use rustc_session::parse::ParseSess;
13use rustc_span::hygiene::{LocalExpnId, Transparency};
14use rustc_span::{
15 Ident, MacroRulesNormalizedIdent, Span, Symbol, SyntaxContext, sym, with_metavar_spans,
16};
17use smallvec::{SmallVec, smallvec};
18
19use crate::errors::{
20 CountRepetitionMisplaced, MetaVarsDifSeqMatchers, MustRepeatOnce, MveUnrecognizedVar,
21 NoSyntaxVarsExprRepeat, VarStillRepeating,
22};
23use crate::mbe::macro_parser::NamedMatch;
24use crate::mbe::macro_parser::NamedMatch::*;
25use crate::mbe::metavar_expr::{MetaVarExprConcatElem, RAW_IDENT_ERR};
26use crate::mbe::{self, KleeneOp, MetaVarExpr};
27
28struct TranscrCtx<'psess, 'itp> {
30 psess: &'psess ParseSess,
31
32 interp: &'itp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
34
35 marker: Marker,
37
38 stack: SmallVec<[Frame<'itp>; 1]>,
44
45 repeats: Vec<(usize, usize)>,
51
52 result: Vec<TokenTree>,
65
66 result_stack: Vec<Vec<TokenTree>>,
69}
70
71impl<'psess> TranscrCtx<'psess, '_> {
72 fn visited_dspan(&mut self, dspan: DelimSpan) -> Span {
74 let mut span = dspan.entire();
75 self.marker.mark_span(&mut span);
76 span
77 }
78}
79
80struct Marker {
82 expand_id: LocalExpnId,
83 transparency: Transparency,
84 cache: FxHashMap<SyntaxContext, SyntaxContext>,
85}
86
87impl Marker {
88 fn mark_span(&mut self, span: &mut Span) {
90 *span = span.map_ctxt(|ctxt| {
95 *self
96 .cache
97 .entry(ctxt)
98 .or_insert_with(|| ctxt.apply_mark(self.expand_id.to_expn_id(), self.transparency))
99 });
100 }
101}
102
103struct Frame<'a> {
105 tts: &'a [mbe::TokenTree],
106 idx: usize,
107 kind: FrameKind,
108}
109
110enum FrameKind {
111 Delimited { delim: Delimiter, span: DelimSpan, spacing: DelimSpacing },
112 Sequence { sep: Option<Token>, kleene_op: KleeneOp },
113}
114
115impl<'a> Frame<'a> {
116 fn new_delimited(src: &'a mbe::Delimited, span: DelimSpan, spacing: DelimSpacing) -> Frame<'a> {
117 Frame {
118 tts: &src.tts,
119 idx: 0,
120 kind: FrameKind::Delimited { delim: src.delim, span, spacing },
121 }
122 }
123
124 fn new_sequence(
125 src: &'a mbe::SequenceRepetition,
126 sep: Option<Token>,
127 kleene_op: KleeneOp,
128 ) -> Frame<'a> {
129 Frame { tts: &src.tts, idx: 0, kind: FrameKind::Sequence { sep, kleene_op } }
130 }
131}
132
133impl<'a> Iterator for Frame<'a> {
134 type Item = &'a mbe::TokenTree;
135
136 fn next(&mut self) -> Option<&'a mbe::TokenTree> {
137 let res = self.tts.get(self.idx);
138 self.idx += 1;
139 res
140 }
141}
142
143pub(super) fn transcribe<'a>(
164 psess: &'a ParseSess,
165 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
166 src: &mbe::Delimited,
167 src_span: DelimSpan,
168 transparency: Transparency,
169 expand_id: LocalExpnId,
170) -> PResult<'a, TokenStream> {
171 if src.tts.is_empty() {
173 return Ok(TokenStream::default());
174 }
175
176 let mut tscx = TranscrCtx {
177 psess,
178 interp,
179 marker: Marker { expand_id, transparency, cache: Default::default() },
180 repeats: Vec::new(),
181 stack: smallvec![Frame::new_delimited(
182 src,
183 src_span,
184 DelimSpacing::new(Spacing::Alone, Spacing::Alone)
185 )],
186 result: Vec::new(),
187 result_stack: Vec::new(),
188 };
189
190 loop {
191 let Some(tree) = tscx.stack.last_mut().unwrap().next() else {
194 let frame = tscx.stack.last_mut().unwrap();
199 if let FrameKind::Sequence { sep, .. } = &frame.kind {
200 let (repeat_idx, repeat_len) = tscx.repeats.last_mut().unwrap();
201 *repeat_idx += 1;
202 if repeat_idx < repeat_len {
203 frame.idx = 0;
204 if let Some(sep) = sep {
205 tscx.result.push(TokenTree::Token(*sep, Spacing::Alone));
206 }
207 continue;
208 }
209 }
210
211 match tscx.stack.pop().unwrap().kind {
215 FrameKind::Sequence { .. } => {
217 tscx.repeats.pop();
218 }
219
220 FrameKind::Delimited { delim, span, mut spacing, .. } => {
224 if delim == Delimiter::Bracket {
227 spacing.close = Spacing::Alone;
228 }
229 if tscx.result_stack.is_empty() {
230 return Ok(TokenStream::new(tscx.result));
232 }
233
234 let tree =
236 TokenTree::Delimited(span, spacing, delim, TokenStream::new(tscx.result));
237 tscx.result = tscx.result_stack.pop().unwrap();
238 tscx.result.push(tree);
239 }
240 }
241 continue;
242 };
243
244 match tree {
247 seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
249 transcribe_sequence(&mut tscx, seq, seq_rep)?;
250 }
251
252 &mbe::TokenTree::MetaVar(sp, original_ident) => {
254 transcribe_metavar(&mut tscx, sp, original_ident)?;
255 }
256
257 mbe::TokenTree::MetaVarExpr(dspan, expr) => {
259 transcribe_metavar_expr(&mut tscx, *dspan, expr)?;
260 }
261
262 &mbe::TokenTree::Delimited(mut span, ref spacing, ref delimited) => {
268 tscx.marker.mark_span(&mut span.open);
269 tscx.marker.mark_span(&mut span.close);
270 tscx.stack.push(Frame::new_delimited(delimited, span, *spacing));
271 tscx.result_stack.push(mem::take(&mut tscx.result));
272 }
273
274 &mbe::TokenTree::Token(mut token) => {
277 tscx.marker.mark_span(&mut token.span);
278 if let token::NtIdent(ident, _) | token::NtLifetime(ident, _) = &mut token.kind {
279 tscx.marker.mark_span(&mut ident.span);
280 }
281 let tt = TokenTree::Token(token, Spacing::Alone);
282 tscx.result.push(tt);
283 }
284
285 mbe::TokenTree::MetaVarDecl { .. } => panic!("unexpected `TokenTree::MetaVarDecl`"),
287 }
288 }
289}
290
291fn transcribe_sequence<'tx, 'itp>(
293 tscx: &mut TranscrCtx<'tx, 'itp>,
294 seq: &mbe::TokenTree,
295 seq_rep: &'itp mbe::SequenceRepetition,
296) -> PResult<'tx, ()> {
297 let dcx = tscx.psess.dcx();
298
299 match lockstep_iter_size(seq, tscx.interp, &tscx.repeats) {
303 LockstepIterSize::Unconstrained => {
304 return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
305 }
306
307 LockstepIterSize::Contradiction(msg) => {
308 return Err(dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
313 }
314
315 LockstepIterSize::Constraint(len, _) => {
316 let mbe::TokenTree::Sequence(sp, seq) = seq else { unreachable!() };
319
320 if len == 0 {
322 if seq.kleene.op == KleeneOp::OneOrMore {
323 return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
327 }
328 } else {
329 tscx.repeats.push((0, len));
332
333 tscx.stack.push(Frame::new_sequence(seq_rep, seq.separator.clone(), seq.kleene.op));
337 }
338 }
339 }
340
341 Ok(())
342}
343
344fn transcribe_metavar<'tx>(
361 tscx: &mut TranscrCtx<'tx, '_>,
362 mut sp: Span,
363 mut original_ident: Ident,
364) -> PResult<'tx, ()> {
365 let dcx = tscx.psess.dcx();
366
367 let ident = MacroRulesNormalizedIdent::new(original_ident);
368 let Some(cur_matched) = lookup_cur_matched(ident, tscx.interp, &tscx.repeats) else {
369 tscx.marker.mark_span(&mut sp);
372 tscx.marker.mark_span(&mut original_ident.span);
373 tscx.result.push(TokenTree::token_joint_hidden(token::Dollar, sp));
374 tscx.result.push(TokenTree::Token(Token::from_ast_ident(original_ident), Spacing::Alone));
375 return Ok(());
376 };
377
378 let mut mk_delimited = |mk_span, mv_kind, mut stream: TokenStream| {
383 if stream.len() == 1 {
384 let tree = stream.iter().next().unwrap();
385 if let TokenTree::Delimited(_, _, delim, inner) = tree
386 && let Delimiter::Invisible(InvisibleOrigin::MetaVar(mvk)) = delim
387 && mv_kind == *mvk
388 {
389 stream = inner.clone();
390 }
391 }
392
393 tscx.marker.mark_span(&mut sp);
396 with_metavar_spans(|mspans| mspans.insert(mk_span, sp));
397 TokenTree::Delimited(
400 DelimSpan::from_single(sp),
401 DelimSpacing::new(Spacing::Alone, Spacing::Alone),
402 Delimiter::Invisible(InvisibleOrigin::MetaVar(mv_kind)),
403 stream,
404 )
405 };
406
407 let tt = match cur_matched {
408 MatchedSingle(ParseNtResult::Tt(tt)) => {
409 maybe_use_metavar_location(tscx.psess, &tscx.stack, sp, tt, &mut tscx.marker)
414 }
415 MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
416 tscx.marker.mark_span(&mut sp);
417 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
418 let kind = token::NtIdent(*ident, *is_raw);
419 TokenTree::token_alone(kind, sp)
420 }
421 MatchedSingle(ParseNtResult::Lifetime(ident, is_raw)) => {
422 tscx.marker.mark_span(&mut sp);
423 with_metavar_spans(|mspans| mspans.insert(ident.span, sp));
424 let kind = token::NtLifetime(*ident, *is_raw);
425 TokenTree::token_alone(kind, sp)
426 }
427 MatchedSingle(ParseNtResult::Item(item)) => {
428 mk_delimited(item.span, MetaVarKind::Item, TokenStream::from_ast(item))
429 }
430 MatchedSingle(ParseNtResult::Block(block)) => {
431 mk_delimited(block.span, MetaVarKind::Block, TokenStream::from_ast(block))
432 }
433 MatchedSingle(ParseNtResult::Stmt(stmt)) => {
434 let stream = if let StmtKind::Empty = stmt.kind {
435 TokenStream::token_alone(token::Semi, stmt.span)
437 } else {
438 TokenStream::from_ast(stmt)
439 };
440 mk_delimited(stmt.span, MetaVarKind::Stmt, stream)
441 }
442 MatchedSingle(ParseNtResult::Pat(pat, pat_kind)) => {
443 mk_delimited(pat.span, MetaVarKind::Pat(*pat_kind), TokenStream::from_ast(pat))
444 }
445 MatchedSingle(ParseNtResult::Expr(expr, kind)) => {
446 let (can_begin_literal_maybe_minus, can_begin_string_literal) = match &expr.kind {
447 ExprKind::Lit(_) => (true, true),
448 ExprKind::Unary(UnOp::Neg, e) if matches!(&e.kind, ExprKind::Lit(_)) => {
449 (true, false)
450 }
451 _ => (false, false),
452 };
453 mk_delimited(
454 expr.span,
455 MetaVarKind::Expr {
456 kind: *kind,
457 can_begin_literal_maybe_minus,
458 can_begin_string_literal,
459 },
460 TokenStream::from_ast(expr),
461 )
462 }
463 MatchedSingle(ParseNtResult::Literal(lit)) => {
464 mk_delimited(lit.span, MetaVarKind::Literal, TokenStream::from_ast(lit))
465 }
466 MatchedSingle(ParseNtResult::Ty(ty)) => {
467 let is_path = matches!(&ty.kind, TyKind::Path(None, _path));
468 mk_delimited(ty.span, MetaVarKind::Ty { is_path }, TokenStream::from_ast(ty))
469 }
470 MatchedSingle(ParseNtResult::Meta(attr_item)) => {
471 let has_meta_form = attr_item.meta_kind().is_some();
472 mk_delimited(
473 attr_item.span(),
474 MetaVarKind::Meta { has_meta_form },
475 TokenStream::from_ast(attr_item),
476 )
477 }
478 MatchedSingle(ParseNtResult::Path(path)) => {
479 mk_delimited(path.span, MetaVarKind::Path, TokenStream::from_ast(path))
480 }
481 MatchedSingle(ParseNtResult::Vis(vis)) => {
482 mk_delimited(vis.span, MetaVarKind::Vis, TokenStream::from_ast(vis))
483 }
484 MatchedSeq(..) => {
485 return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
487 }
488 };
489
490 tscx.result.push(tt);
491 Ok(())
492}
493
494fn transcribe_metavar_expr<'tx>(
496 tscx: &mut TranscrCtx<'tx, '_>,
497 dspan: DelimSpan,
498 expr: &MetaVarExpr,
499) -> PResult<'tx, ()> {
500 let dcx = tscx.psess.dcx();
501 let tt = match *expr {
502 MetaVarExpr::Concat(ref elements) => metavar_expr_concat(tscx, dspan, elements)?,
503 MetaVarExpr::Count(original_ident, depth) => {
504 let matched = matched_from_ident(dcx, original_ident, tscx.interp)?;
505 let count = count_repetitions(dcx, depth, matched, &tscx.repeats, &dspan)?;
506 TokenTree::token_alone(
507 TokenKind::lit(token::Integer, sym::integer(count), None),
508 tscx.visited_dspan(dspan),
509 )
510 }
511 MetaVarExpr::Ignore(original_ident) => {
512 let _ = matched_from_ident(dcx, original_ident, tscx.interp)?;
514 return Ok(());
515 }
516 MetaVarExpr::Index(depth) => match tscx.repeats.iter().nth_back(depth) {
517 Some((index, _)) => TokenTree::token_alone(
518 TokenKind::lit(token::Integer, sym::integer(*index), None),
519 tscx.visited_dspan(dspan),
520 ),
521 None => {
522 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "index"));
523 }
524 },
525 MetaVarExpr::Len(depth) => match tscx.repeats.iter().nth_back(depth) {
526 Some((_, length)) => TokenTree::token_alone(
527 TokenKind::lit(token::Integer, sym::integer(*length), None),
528 tscx.visited_dspan(dspan),
529 ),
530 None => {
531 return Err(out_of_bounds_err(dcx, tscx.repeats.len(), dspan.entire(), "len"));
532 }
533 },
534 };
535 tscx.result.push(tt);
536 Ok(())
537}
538
539fn metavar_expr_concat<'tx>(
541 tscx: &mut TranscrCtx<'tx, '_>,
542 dspan: DelimSpan,
543 elements: &[MetaVarExprConcatElem],
544) -> PResult<'tx, TokenTree> {
545 let dcx = tscx.psess.dcx();
546 let mut concatenated = String::new();
547 for element in elements.into_iter() {
548 let symbol = match element {
549 MetaVarExprConcatElem::Ident(elem) => elem.name,
550 MetaVarExprConcatElem::Literal(elem) => *elem,
551 MetaVarExprConcatElem::Var(ident) => {
552 match matched_from_ident(dcx, *ident, tscx.interp)? {
553 NamedMatch::MatchedSeq(named_matches) => {
554 let Some((curr_idx, _)) = tscx.repeats.last() else {
555 return Err(dcx.struct_span_err(dspan.entire(), "invalid syntax"));
556 };
557 match &named_matches[*curr_idx] {
558 MatchedSeq(_) => {
560 return Err(dcx.struct_span_err(
561 ident.span,
562 "nested repetitions with `${concat(...)}` metavariable expressions are not yet supported",
563 ));
564 }
565 MatchedSingle(pnr) => extract_symbol_from_pnr(dcx, pnr, ident.span)?,
566 }
567 }
568 NamedMatch::MatchedSingle(pnr) => {
569 extract_symbol_from_pnr(dcx, pnr, ident.span)?
570 }
571 }
572 }
573 };
574 concatenated.push_str(symbol.as_str());
575 }
576 let symbol = nfc_normalize(&concatenated);
577 let concatenated_span = tscx.visited_dspan(dspan);
578 if !rustc_lexer::is_ident(symbol.as_str()) {
579 return Err(dcx.struct_span_err(
580 concatenated_span,
581 "`${concat(..)}` is not generating a valid identifier",
582 ));
583 }
584 tscx.psess.symbol_gallery.insert(symbol, concatenated_span);
585
586 Ok(TokenTree::Token(
590 Token::from_ast_ident(Ident::new(symbol, concatenated_span)),
591 Spacing::Alone,
592 ))
593}
594
595fn maybe_use_metavar_location(
626 psess: &ParseSess,
627 stack: &[Frame<'_>],
628 mut metavar_span: Span,
629 orig_tt: &TokenTree,
630 marker: &mut Marker,
631) -> TokenTree {
632 let undelimited_seq = matches!(
633 stack.last(),
634 Some(Frame {
635 tts: [_],
636 kind: FrameKind::Sequence {
637 sep: None,
638 kleene_op: KleeneOp::ZeroOrMore | KleeneOp::OneOrMore,
639 ..
640 },
641 ..
642 })
643 );
644 if undelimited_seq {
645 return orig_tt.clone();
647 }
648
649 marker.mark_span(&mut metavar_span);
650 let no_collision = match orig_tt {
651 TokenTree::Token(token, ..) => {
652 with_metavar_spans(|mspans| mspans.insert(token.span, metavar_span))
653 }
654 TokenTree::Delimited(dspan, ..) => with_metavar_spans(|mspans| {
655 mspans.insert(dspan.open, metavar_span)
656 && mspans.insert(dspan.close, metavar_span)
657 && mspans.insert(dspan.entire(), metavar_span)
658 }),
659 };
660 if no_collision || psess.source_map().is_imported(metavar_span) {
661 return orig_tt.clone();
662 }
663
664 match orig_tt {
667 TokenTree::Token(Token { kind, span }, spacing) => {
668 let span = metavar_span.with_ctxt(span.ctxt());
669 with_metavar_spans(|mspans| mspans.insert(span, metavar_span));
670 TokenTree::Token(Token { kind: kind.clone(), span }, *spacing)
671 }
672 TokenTree::Delimited(dspan, dspacing, delimiter, tts) => {
673 let open = metavar_span.with_ctxt(dspan.open.ctxt());
674 let close = metavar_span.with_ctxt(dspan.close.ctxt());
675 with_metavar_spans(|mspans| {
676 mspans.insert(open, metavar_span) && mspans.insert(close, metavar_span)
677 });
678 let dspan = DelimSpan::from_pair(open, close);
679 TokenTree::Delimited(dspan, *dspacing, *delimiter, tts.clone())
680 }
681 }
682}
683
684fn lookup_cur_matched<'a>(
691 ident: MacroRulesNormalizedIdent,
692 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
693 repeats: &[(usize, usize)],
694) -> Option<&'a NamedMatch> {
695 interpolations.get(&ident).map(|mut matched| {
696 for &(idx, _) in repeats {
697 match matched {
698 MatchedSingle(_) => break,
699 MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
700 }
701 }
702
703 matched
704 })
705}
706
707#[derive(Clone)]
712enum LockstepIterSize {
713 Unconstrained,
716
717 Constraint(usize, MacroRulesNormalizedIdent),
720
721 Contradiction(String),
723}
724
725impl LockstepIterSize {
726 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
731 match self {
732 LockstepIterSize::Unconstrained => other,
733 LockstepIterSize::Contradiction(_) => self,
734 LockstepIterSize::Constraint(l_len, l_id) => match other {
735 LockstepIterSize::Unconstrained => self,
736 LockstepIterSize::Contradiction(_) => other,
737 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
738 LockstepIterSize::Constraint(r_len, r_id) => {
739 let msg = format!(
740 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
741 l_id,
742 l_len,
743 pluralize!(l_len),
744 r_id,
745 r_len,
746 pluralize!(r_len),
747 );
748 LockstepIterSize::Contradiction(msg)
749 }
750 },
751 }
752 }
753}
754
755fn lockstep_iter_size(
768 tree: &mbe::TokenTree,
769 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
770 repeats: &[(usize, usize)],
771) -> LockstepIterSize {
772 use mbe::TokenTree;
773 match tree {
774 TokenTree::Delimited(.., delimited) => {
775 delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
776 size.with(lockstep_iter_size(tt, interpolations, repeats))
777 })
778 }
779 TokenTree::Sequence(_, seq) => {
780 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
781 size.with(lockstep_iter_size(tt, interpolations, repeats))
782 })
783 }
784 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl { name, .. } => {
785 let name = MacroRulesNormalizedIdent::new(*name);
786 match lookup_cur_matched(name, interpolations, repeats) {
787 Some(matched) => match matched {
788 MatchedSingle(_) => LockstepIterSize::Unconstrained,
789 MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
790 },
791 _ => LockstepIterSize::Unconstrained,
792 }
793 }
794 TokenTree::MetaVarExpr(_, expr) => {
795 expr.for_each_metavar(LockstepIterSize::Unconstrained, |lis, ident| {
796 lis.with(lockstep_iter_size(
797 &TokenTree::MetaVar(ident.span, *ident),
798 interpolations,
799 repeats,
800 ))
801 })
802 }
803 TokenTree::Token(..) => LockstepIterSize::Unconstrained,
804 }
805}
806
807fn count_repetitions<'dx>(
817 dcx: DiagCtxtHandle<'dx>,
818 depth_user: usize,
819 mut matched: &NamedMatch,
820 repeats: &[(usize, usize)],
821 sp: &DelimSpan,
822) -> PResult<'dx, usize> {
823 fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> {
826 match matched {
827 MatchedSingle(_) => Ok(1),
828 MatchedSeq(named_matches) => {
829 if depth_curr == depth_max {
830 Ok(named_matches.len())
831 } else {
832 named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum()
833 }
834 }
835 }
836 }
837
838 fn depth(counter: usize, matched: &NamedMatch) -> usize {
840 match matched {
841 MatchedSingle(_) => counter,
842 MatchedSeq(named_matches) => {
843 let rslt = counter + 1;
844 if let Some(elem) = named_matches.first() { depth(rslt, elem) } else { rslt }
845 }
846 }
847 }
848
849 let depth_max = depth(0, matched)
850 .checked_sub(1)
851 .and_then(|el| el.checked_sub(repeats.len()))
852 .unwrap_or_default();
853 if depth_user > depth_max {
854 return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
855 }
856
857 for &(idx, _) in repeats {
864 if let MatchedSeq(ads) = matched {
865 matched = &ads[idx];
866 }
867 }
868
869 if let MatchedSingle(_) = matched {
870 return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
871 }
872
873 count(depth_user, depth_max, matched)
874}
875
876fn matched_from_ident<'ctx, 'interp, 'rslt>(
878 dcx: DiagCtxtHandle<'ctx>,
879 ident: Ident,
880 interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
881) -> PResult<'ctx, &'rslt NamedMatch>
882where
883 'interp: 'rslt,
884{
885 let span = ident.span;
886 let key = MacroRulesNormalizedIdent::new(ident);
887 interp.get(&key).ok_or_else(|| dcx.create_err(MveUnrecognizedVar { span, key }))
888}
889
890fn out_of_bounds_err<'a>(dcx: DiagCtxtHandle<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
893 let msg = if max == 0 {
894 format!(
895 "meta-variable expression `{ty}` with depth parameter \
896 must be called inside of a macro repetition"
897 )
898 } else {
899 format!(
900 "depth parameter of meta-variable expression `{ty}` \
901 must be less than {max}"
902 )
903 };
904 dcx.struct_span_err(span, msg)
905}
906
907fn extract_symbol_from_pnr<'a>(
909 dcx: DiagCtxtHandle<'a>,
910 pnr: &ParseNtResult,
911 span_err: Span,
912) -> PResult<'a, Symbol> {
913 match pnr {
914 ParseNtResult::Ident(nt_ident, is_raw) => {
915 if let IdentIsRaw::Yes = is_raw {
916 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
917 } else {
918 Ok(nt_ident.name)
919 }
920 }
921 ParseNtResult::Tt(TokenTree::Token(
922 Token { kind: TokenKind::Ident(symbol, is_raw), .. },
923 _,
924 )) => {
925 if let IdentIsRaw::Yes = is_raw {
926 Err(dcx.struct_span_err(span_err, RAW_IDENT_ERR))
927 } else {
928 Ok(*symbol)
929 }
930 }
931 ParseNtResult::Tt(TokenTree::Token(
932 Token {
933 kind: TokenKind::Literal(Lit { kind: LitKind::Str, symbol, suffix: None }),
934 ..
935 },
936 _,
937 )) => Ok(*symbol),
938 ParseNtResult::Literal(expr)
939 if let ExprKind::Lit(Lit { kind: LitKind::Str, symbol, suffix: None }) = &expr.kind =>
940 {
941 Ok(*symbol)
942 }
943 _ => Err(dcx
944 .struct_err(
945 "metavariables of `${concat(..)}` must be of type `ident`, `literal` or `tt`",
946 )
947 .with_note("currently only string literals are supported")
948 .with_span(span_err)),
949 }
950}