rustc_const_eval/check_consts/
check.rs

1//! The `Visitor` responsible for actually checking a `mir::Body` for invalid operations.
2
3use std::assert_matches::assert_matches;
4use std::borrow::Cow;
5use std::mem;
6use std::num::NonZero;
7use std::ops::Deref;
8
9use rustc_errors::{Diag, ErrorGuaranteed};
10use rustc_hir::def::DefKind;
11use rustc_hir::def_id::DefId;
12use rustc_hir::{self as hir, LangItem};
13use rustc_index::bit_set::DenseBitSet;
14use rustc_infer::infer::TyCtxtInferExt;
15use rustc_middle::mir::visit::Visitor;
16use rustc_middle::mir::*;
17use rustc_middle::span_bug;
18use rustc_middle::ty::adjustment::PointerCoercion;
19use rustc_middle::ty::{self, Ty, TypeVisitableExt};
20use rustc_mir_dataflow::Analysis;
21use rustc_mir_dataflow::impls::{MaybeStorageLive, always_storage_live_locals};
22use rustc_span::{Span, Symbol, sym};
23use rustc_trait_selection::traits::{
24    Obligation, ObligationCause, ObligationCauseCode, ObligationCtxt,
25};
26use tracing::{instrument, trace};
27
28use super::ops::{self, NonConstOp, Status};
29use super::qualifs::{self, HasMutInterior, NeedsDrop, NeedsNonConstDrop};
30use super::resolver::FlowSensitiveAnalysis;
31use super::{ConstCx, Qualif};
32use crate::check_consts::is_fn_or_trait_safe_to_expose_on_stable;
33use crate::errors;
34
35type QualifResults<'mir, 'tcx, Q> =
36    rustc_mir_dataflow::ResultsCursor<'mir, 'tcx, FlowSensitiveAnalysis<'mir, 'tcx, Q>>;
37
38#[derive(Copy, Clone, PartialEq, Eq, Debug)]
39enum ConstConditionsHold {
40    Yes,
41    No,
42}
43
44#[derive(Default)]
45pub(crate) struct Qualifs<'mir, 'tcx> {
46    has_mut_interior: Option<QualifResults<'mir, 'tcx, HasMutInterior>>,
47    needs_drop: Option<QualifResults<'mir, 'tcx, NeedsDrop>>,
48    needs_non_const_drop: Option<QualifResults<'mir, 'tcx, NeedsNonConstDrop>>,
49}
50
51impl<'mir, 'tcx> Qualifs<'mir, 'tcx> {
52    /// Returns `true` if `local` is `NeedsDrop` at the given `Location`.
53    ///
54    /// Only updates the cursor if absolutely necessary
55    pub(crate) fn needs_drop(
56        &mut self,
57        ccx: &'mir ConstCx<'mir, 'tcx>,
58        local: Local,
59        location: Location,
60    ) -> bool {
61        let ty = ccx.body.local_decls[local].ty;
62        // Peeking into opaque types causes cycles if the current function declares said opaque
63        // type. Thus we avoid short circuiting on the type and instead run the more expensive
64        // analysis that looks at the actual usage within this function
65        if !ty.has_opaque_types() && !NeedsDrop::in_any_value_of_ty(ccx, ty) {
66            return false;
67        }
68
69        let needs_drop = self.needs_drop.get_or_insert_with(|| {
70            let ConstCx { tcx, body, .. } = *ccx;
71
72            FlowSensitiveAnalysis::new(NeedsDrop, ccx)
73                .iterate_to_fixpoint(tcx, body, None)
74                .into_results_cursor(body)
75        });
76
77        needs_drop.seek_before_primary_effect(location);
78        needs_drop.get().contains(local)
79    }
80
81    /// Returns `true` if `local` is `NeedsNonConstDrop` at the given `Location`.
82    ///
83    /// Only updates the cursor if absolutely necessary
84    pub(crate) fn needs_non_const_drop(
85        &mut self,
86        ccx: &'mir ConstCx<'mir, 'tcx>,
87        local: Local,
88        location: Location,
89    ) -> bool {
90        let ty = ccx.body.local_decls[local].ty;
91        // Peeking into opaque types causes cycles if the current function declares said opaque
92        // type. Thus we avoid short circuiting on the type and instead run the more expensive
93        // analysis that looks at the actual usage within this function
94        if !ty.has_opaque_types() && !NeedsNonConstDrop::in_any_value_of_ty(ccx, ty) {
95            return false;
96        }
97
98        let needs_non_const_drop = self.needs_non_const_drop.get_or_insert_with(|| {
99            let ConstCx { tcx, body, .. } = *ccx;
100
101            FlowSensitiveAnalysis::new(NeedsNonConstDrop, ccx)
102                .iterate_to_fixpoint(tcx, body, None)
103                .into_results_cursor(body)
104        });
105
106        needs_non_const_drop.seek_before_primary_effect(location);
107        needs_non_const_drop.get().contains(local)
108    }
109
110    /// Returns `true` if `local` is `HasMutInterior` at the given `Location`.
111    ///
112    /// Only updates the cursor if absolutely necessary.
113    fn has_mut_interior(
114        &mut self,
115        ccx: &'mir ConstCx<'mir, 'tcx>,
116        local: Local,
117        location: Location,
118    ) -> bool {
119        let ty = ccx.body.local_decls[local].ty;
120        // Peeking into opaque types causes cycles if the current function declares said opaque
121        // type. Thus we avoid short circuiting on the type and instead run the more expensive
122        // analysis that looks at the actual usage within this function
123        if !ty.has_opaque_types() && !HasMutInterior::in_any_value_of_ty(ccx, ty) {
124            return false;
125        }
126
127        let has_mut_interior = self.has_mut_interior.get_or_insert_with(|| {
128            let ConstCx { tcx, body, .. } = *ccx;
129
130            FlowSensitiveAnalysis::new(HasMutInterior, ccx)
131                .iterate_to_fixpoint(tcx, body, None)
132                .into_results_cursor(body)
133        });
134
135        has_mut_interior.seek_before_primary_effect(location);
136        has_mut_interior.get().contains(local)
137    }
138
139    fn in_return_place(
140        &mut self,
141        ccx: &'mir ConstCx<'mir, 'tcx>,
142        tainted_by_errors: Option<ErrorGuaranteed>,
143    ) -> ConstQualifs {
144        // FIXME(explicit_tail_calls): uhhhh I think we can return without return now, does it change anything
145
146        // Find the `Return` terminator if one exists.
147        //
148        // If no `Return` terminator exists, this MIR is divergent. Just return the conservative
149        // qualifs for the return type.
150        let return_block = ccx
151            .body
152            .basic_blocks
153            .iter_enumerated()
154            .find(|(_, block)| matches!(block.terminator().kind, TerminatorKind::Return))
155            .map(|(bb, _)| bb);
156
157        let Some(return_block) = return_block else {
158            return qualifs::in_any_value_of_ty(ccx, ccx.body.return_ty(), tainted_by_errors);
159        };
160
161        let return_loc = ccx.body.terminator_loc(return_block);
162
163        ConstQualifs {
164            needs_drop: self.needs_drop(ccx, RETURN_PLACE, return_loc),
165            needs_non_const_drop: self.needs_non_const_drop(ccx, RETURN_PLACE, return_loc),
166            has_mut_interior: self.has_mut_interior(ccx, RETURN_PLACE, return_loc),
167            tainted_by_errors,
168        }
169    }
170}
171
172pub struct Checker<'mir, 'tcx> {
173    ccx: &'mir ConstCx<'mir, 'tcx>,
174    qualifs: Qualifs<'mir, 'tcx>,
175
176    /// The span of the current statement.
177    span: Span,
178
179    /// A set that stores for each local whether it is "transient", i.e. guaranteed to be dead
180    /// when this MIR body returns.
181    transient_locals: Option<DenseBitSet<Local>>,
182
183    error_emitted: Option<ErrorGuaranteed>,
184    secondary_errors: Vec<Diag<'tcx>>,
185}
186
187impl<'mir, 'tcx> Deref for Checker<'mir, 'tcx> {
188    type Target = ConstCx<'mir, 'tcx>;
189
190    fn deref(&self) -> &Self::Target {
191        self.ccx
192    }
193}
194
195impl<'mir, 'tcx> Checker<'mir, 'tcx> {
196    pub fn new(ccx: &'mir ConstCx<'mir, 'tcx>) -> Self {
197        Checker {
198            span: ccx.body.span,
199            ccx,
200            qualifs: Default::default(),
201            transient_locals: None,
202            error_emitted: None,
203            secondary_errors: Vec::new(),
204        }
205    }
206
207    pub fn check_body(&mut self) {
208        let ConstCx { tcx, body, .. } = *self.ccx;
209        let def_id = self.ccx.def_id();
210
211        // `async` functions cannot be `const fn`. This is checked during AST lowering, so there's
212        // no need to emit duplicate errors here.
213        if self.ccx.is_async() || body.coroutine.is_some() {
214            tcx.dcx().span_delayed_bug(body.span, "`async` functions cannot be `const fn`");
215            return;
216        }
217
218        if !tcx.has_attr(def_id, sym::rustc_do_not_const_check) {
219            self.visit_body(body);
220        }
221
222        // If we got through const-checking without emitting any "primary" errors, emit any
223        // "secondary" errors if they occurred. Otherwise, cancel the "secondary" errors.
224        let secondary_errors = mem::take(&mut self.secondary_errors);
225        if self.error_emitted.is_none() {
226            for error in secondary_errors {
227                self.error_emitted = Some(error.emit());
228            }
229        } else {
230            assert!(self.tcx.dcx().has_errors().is_some());
231            for error in secondary_errors {
232                error.cancel();
233            }
234        }
235    }
236
237    fn local_is_transient(&mut self, local: Local) -> bool {
238        let ccx = self.ccx;
239        self.transient_locals
240            .get_or_insert_with(|| {
241                // A local is "transient" if it is guaranteed dead at all `Return`.
242                // So first compute the say of "maybe live" locals at each program point.
243                let always_live_locals = &always_storage_live_locals(&ccx.body);
244                let mut maybe_storage_live =
245                    MaybeStorageLive::new(Cow::Borrowed(always_live_locals))
246                        .iterate_to_fixpoint(ccx.tcx, &ccx.body, None)
247                        .into_results_cursor(&ccx.body);
248
249                // And then check all `Return` in the MIR, and if a local is "maybe live" at a
250                // `Return` then it is definitely not transient.
251                let mut transient = DenseBitSet::new_filled(ccx.body.local_decls.len());
252                // Make sure to only visit reachable blocks, the dataflow engine can ICE otherwise.
253                for (bb, data) in traversal::reachable(&ccx.body) {
254                    if matches!(data.terminator().kind, TerminatorKind::Return) {
255                        let location = ccx.body.terminator_loc(bb);
256                        maybe_storage_live.seek_after_primary_effect(location);
257                        // If a local may be live here, it is definitely not transient.
258                        transient.subtract(maybe_storage_live.get());
259                    }
260                }
261
262                transient
263            })
264            .contains(local)
265    }
266
267    pub fn qualifs_in_return_place(&mut self) -> ConstQualifs {
268        self.qualifs.in_return_place(self.ccx, self.error_emitted)
269    }
270
271    /// Emits an error if an expression cannot be evaluated in the current context.
272    pub fn check_op(&mut self, op: impl NonConstOp<'tcx>) {
273        self.check_op_spanned(op, self.span);
274    }
275
276    /// Emits an error at the given `span` if an expression cannot be evaluated in the current
277    /// context.
278    pub fn check_op_spanned<O: NonConstOp<'tcx>>(&mut self, op: O, span: Span) {
279        let gate = match op.status_in_item(self.ccx) {
280            Status::Unstable {
281                gate,
282                safe_to_expose_on_stable,
283                is_function_call,
284                gate_already_checked,
285            } if gate_already_checked || self.tcx.features().enabled(gate) => {
286                if gate_already_checked {
287                    assert!(
288                        !safe_to_expose_on_stable,
289                        "setting `gate_already_checked` without `safe_to_expose_on_stable` makes no sense"
290                    );
291                }
292                // Generally this is allowed since the feature gate is enabled -- except
293                // if this function wants to be safe-to-expose-on-stable.
294                if !safe_to_expose_on_stable
295                    && self.enforce_recursive_const_stability()
296                    && !super::rustc_allow_const_fn_unstable(self.tcx, self.def_id(), gate)
297                {
298                    emit_unstable_in_stable_exposed_error(self.ccx, span, gate, is_function_call);
299                }
300
301                return;
302            }
303
304            Status::Unstable { gate, .. } => Some(gate),
305            Status::Forbidden => None,
306        };
307
308        if self.tcx.sess.opts.unstable_opts.unleash_the_miri_inside_of_you {
309            self.tcx.sess.miri_unleashed_feature(span, gate);
310            return;
311        }
312
313        let err = op.build_error(self.ccx, span);
314        assert!(err.is_error());
315
316        match op.importance() {
317            ops::DiagImportance::Primary => {
318                let reported = err.emit();
319                self.error_emitted = Some(reported);
320            }
321
322            ops::DiagImportance::Secondary => {
323                self.secondary_errors.push(err);
324                self.tcx.dcx().span_delayed_bug(
325                    span,
326                    "compilation must fail when there is a secondary const checker error",
327                );
328            }
329        }
330    }
331
332    fn check_static(&mut self, def_id: DefId, span: Span) {
333        if self.tcx.is_thread_local_static(def_id) {
334            self.tcx.dcx().span_bug(span, "tls access is checked in `Rvalue::ThreadLocalRef`");
335        }
336        if let Some(def_id) = def_id.as_local()
337            && let Err(guar) = self.tcx.ensure_ok().check_well_formed(hir::OwnerId { def_id })
338        {
339            self.error_emitted = Some(guar);
340        }
341    }
342
343    /// Returns whether this place can possibly escape the evaluation of the current const/static
344    /// initializer. The check assumes that all already existing pointers and references point to
345    /// non-escaping places.
346    fn place_may_escape(&mut self, place: &Place<'_>) -> bool {
347        let is_transient = match self.const_kind() {
348            // In a const fn all borrows are transient or point to the places given via
349            // references in the arguments (so we already checked them with
350            // TransientMutBorrow/MutBorrow as appropriate).
351            // The borrow checker guarantees that no new non-transient borrows are created.
352            // NOTE: Once we have heap allocations during CTFE we need to figure out
353            // how to prevent `const fn` to create long-lived allocations that point
354            // to mutable memory.
355            hir::ConstContext::ConstFn => true,
356            _ => {
357                // For indirect places, we are not creating a new permanent borrow, it's just as
358                // transient as the already existing one.
359                // Locals with StorageDead do not live beyond the evaluation and can
360                // thus safely be borrowed without being able to be leaked to the final
361                // value of the constant.
362                // Note: This is only sound if every local that has a `StorageDead` has a
363                // `StorageDead` in every control flow path leading to a `return` terminator.
364                // If anything slips through, there's no safety net -- safe code can create
365                // references to variants of `!Freeze` enums as long as that variant is `Freeze`, so
366                // interning can't protect us here. (There *is* a safety net for mutable references
367                // though, interning will ICE if we miss something here.)
368                place.is_indirect() || self.local_is_transient(place.local)
369            }
370        };
371        // Transient places cannot possibly escape because the place doesn't exist any more at the
372        // end of evaluation.
373        !is_transient
374    }
375
376    /// Returns whether there are const-conditions.
377    fn revalidate_conditional_constness(
378        &mut self,
379        callee: DefId,
380        callee_args: ty::GenericArgsRef<'tcx>,
381        call_span: Span,
382    ) -> Option<ConstConditionsHold> {
383        let tcx = self.tcx;
384        if !tcx.is_conditionally_const(callee) {
385            return None;
386        }
387
388        let const_conditions = tcx.const_conditions(callee).instantiate(tcx, callee_args);
389        if const_conditions.is_empty() {
390            return None;
391        }
392
393        let (infcx, param_env) = tcx.infer_ctxt().build_with_typing_env(self.body.typing_env(tcx));
394        let ocx = ObligationCtxt::new(&infcx);
395
396        let body_id = self.body.source.def_id().expect_local();
397        let host_polarity = match self.const_kind() {
398            hir::ConstContext::ConstFn => ty::BoundConstness::Maybe,
399            hir::ConstContext::Static(_) | hir::ConstContext::Const { .. } => {
400                ty::BoundConstness::Const
401            }
402        };
403        let const_conditions =
404            ocx.normalize(&ObligationCause::misc(call_span, body_id), param_env, const_conditions);
405        ocx.register_obligations(const_conditions.into_iter().map(|(trait_ref, span)| {
406            Obligation::new(
407                tcx,
408                ObligationCause::new(
409                    call_span,
410                    body_id,
411                    ObligationCauseCode::WhereClause(callee, span),
412                ),
413                param_env,
414                trait_ref.to_host_effect_clause(tcx, host_polarity),
415            )
416        }));
417
418        let errors = ocx.select_all_or_error();
419        if errors.is_empty() {
420            Some(ConstConditionsHold::Yes)
421        } else {
422            tcx.dcx()
423                .span_delayed_bug(call_span, "this should have reported a [const] error in HIR");
424            Some(ConstConditionsHold::No)
425        }
426    }
427
428    pub fn check_drop_terminator(
429        &mut self,
430        dropped_place: Place<'tcx>,
431        location: Location,
432        terminator_span: Span,
433    ) {
434        let ty_of_dropped_place = dropped_place.ty(self.body, self.tcx).ty;
435
436        let needs_drop = if let Some(local) = dropped_place.as_local() {
437            self.qualifs.needs_drop(self.ccx, local, location)
438        } else {
439            qualifs::NeedsDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
440        };
441        // If this type doesn't need a drop at all, then there's nothing to enforce.
442        if !needs_drop {
443            return;
444        }
445
446        let mut err_span = self.span;
447        let needs_non_const_drop = if let Some(local) = dropped_place.as_local() {
448            // Use the span where the local was declared as the span of the drop error.
449            err_span = self.body.local_decls[local].source_info.span;
450            self.qualifs.needs_non_const_drop(self.ccx, local, location)
451        } else {
452            qualifs::NeedsNonConstDrop::in_any_value_of_ty(self.ccx, ty_of_dropped_place)
453        };
454
455        self.check_op_spanned(
456            ops::LiveDrop {
457                dropped_at: terminator_span,
458                dropped_ty: ty_of_dropped_place,
459                needs_non_const_drop,
460            },
461            err_span,
462        );
463    }
464
465    /// Check the const stability of the given item (fn or trait).
466    fn check_callee_stability(&mut self, def_id: DefId) {
467        match self.tcx.lookup_const_stability(def_id) {
468            Some(hir::ConstStability { level: hir::StabilityLevel::Stable { .. }, .. }) => {
469                // All good.
470            }
471            None => {
472                // This doesn't need a separate const-stability check -- const-stability equals
473                // regular stability, and regular stability is checked separately.
474                // However, we *do* have to worry about *recursive* const stability.
475                if self.enforce_recursive_const_stability()
476                    && !is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id)
477                {
478                    self.dcx().emit_err(errors::UnmarkedConstItemExposed {
479                        span: self.span,
480                        def_path: self.tcx.def_path_str(def_id),
481                    });
482                }
483            }
484            Some(hir::ConstStability {
485                level: hir::StabilityLevel::Unstable { implied_by: implied_feature, issue, .. },
486                feature,
487                ..
488            }) => {
489                // An unstable const fn/trait with a feature gate.
490                let callee_safe_to_expose_on_stable =
491                    is_fn_or_trait_safe_to_expose_on_stable(self.tcx, def_id);
492
493                // We only honor `span.allows_unstable` aka `#[allow_internal_unstable]` if
494                // the callee is safe to expose, to avoid bypassing recursive stability.
495                // This is not ideal since it means the user sees an error, not the macro
496                // author, but that's also the case if one forgets to set
497                // `#[allow_internal_unstable]` in the first place. Note that this cannot be
498                // integrated in the check below since we want to enforce
499                // `callee_safe_to_expose_on_stable` even if
500                // `!self.enforce_recursive_const_stability()`.
501                if (self.span.allows_unstable(feature)
502                    || implied_feature.is_some_and(|f| self.span.allows_unstable(f)))
503                    && callee_safe_to_expose_on_stable
504                {
505                    return;
506                }
507
508                // We can't use `check_op` to check whether the feature is enabled because
509                // the logic is a bit different than elsewhere: local functions don't need
510                // the feature gate, and there might be an "implied" gate that also suffices
511                // to allow this.
512                let feature_enabled = def_id.is_local()
513                    || self.tcx.features().enabled(feature)
514                    || implied_feature.is_some_and(|f| self.tcx.features().enabled(f))
515                    || {
516                        // When we're compiling the compiler itself we may pull in
517                        // crates from crates.io, but those crates may depend on other
518                        // crates also pulled in from crates.io. We want to ideally be
519                        // able to compile everything without requiring upstream
520                        // modifications, so in the case that this looks like a
521                        // `rustc_private` crate (e.g., a compiler crate) and we also have
522                        // the `-Z force-unstable-if-unmarked` flag present (we're
523                        // compiling a compiler crate), then let this missing feature
524                        // annotation slide.
525                        // This matches what we do in `eval_stability_allow_unstable` for
526                        // regular stability.
527                        feature == sym::rustc_private
528                            && issue == NonZero::new(27812)
529                            && self.tcx.sess.opts.unstable_opts.force_unstable_if_unmarked
530                    };
531                // Even if the feature is enabled, we still need check_op to double-check
532                // this if the callee is not safe to expose on stable.
533                if !feature_enabled || !callee_safe_to_expose_on_stable {
534                    self.check_op(ops::CallUnstable {
535                        def_id,
536                        feature,
537                        feature_enabled,
538                        safe_to_expose_on_stable: callee_safe_to_expose_on_stable,
539                        is_function_call: self.tcx.def_kind(def_id) != DefKind::Trait,
540                    });
541                }
542            }
543        }
544    }
545}
546
547impl<'tcx> Visitor<'tcx> for Checker<'_, 'tcx> {
548    fn visit_basic_block_data(&mut self, bb: BasicBlock, block: &BasicBlockData<'tcx>) {
549        trace!("visit_basic_block_data: bb={:?} is_cleanup={:?}", bb, block.is_cleanup);
550
551        // We don't const-check basic blocks on the cleanup path since we never unwind during
552        // const-eval: a panic causes an immediate compile error. In other words, cleanup blocks
553        // are unreachable during const-eval.
554        //
555        // We can't be more conservative (e.g., by const-checking cleanup blocks anyways) because
556        // locals that would never be dropped during normal execution are sometimes dropped during
557        // unwinding, which means backwards-incompatible live-drop errors.
558        if block.is_cleanup {
559            return;
560        }
561
562        self.super_basic_block_data(bb, block);
563    }
564
565    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
566        trace!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
567
568        self.super_rvalue(rvalue, location);
569
570        match rvalue {
571            Rvalue::ThreadLocalRef(_) => self.check_op(ops::ThreadLocalAccess),
572
573            Rvalue::Use(_)
574            | Rvalue::CopyForDeref(..)
575            | Rvalue::Repeat(..)
576            | Rvalue::Discriminant(..)
577            | Rvalue::Len(_) => {}
578
579            Rvalue::Aggregate(kind, ..) => {
580                if let AggregateKind::Coroutine(def_id, ..) = kind.as_ref()
581                    && let Some(coroutine_kind) = self.tcx.coroutine_kind(def_id)
582                {
583                    self.check_op(ops::Coroutine(coroutine_kind));
584                }
585            }
586
587            Rvalue::Ref(_, BorrowKind::Mut { .. }, place)
588            | Rvalue::RawPtr(RawPtrKind::Mut, place) => {
589                // Inside mutable statics, we allow arbitrary mutable references.
590                // We've allowed `static mut FOO = &mut [elements];` for a long time (the exact
591                // reasons why are lost to history), and there is no reason to restrict that to
592                // arrays and slices.
593                let is_allowed =
594                    self.const_kind() == hir::ConstContext::Static(hir::Mutability::Mut);
595
596                if !is_allowed && self.place_may_escape(place) {
597                    self.check_op(ops::EscapingMutBorrow);
598                }
599            }
600
601            Rvalue::Ref(_, BorrowKind::Shared | BorrowKind::Fake(_), place)
602            | Rvalue::RawPtr(RawPtrKind::Const, place) => {
603                let borrowed_place_has_mut_interior = qualifs::in_place::<HasMutInterior, _>(
604                    self.ccx,
605                    &mut |local| self.qualifs.has_mut_interior(self.ccx, local, location),
606                    place.as_ref(),
607                );
608
609                if borrowed_place_has_mut_interior && self.place_may_escape(place) {
610                    self.check_op(ops::EscapingCellBorrow);
611                }
612            }
613
614            Rvalue::RawPtr(RawPtrKind::FakeForPtrMetadata, place) => {
615                // These are only inserted for slice length, so the place must already be indirect.
616                // This implies we do not have to worry about whether the borrow escapes.
617                if !place.is_indirect() {
618                    self.tcx.dcx().span_delayed_bug(
619                        self.body.source_info(location).span,
620                        "fake borrows are always indirect",
621                    );
622                }
623            }
624
625            Rvalue::Cast(
626                CastKind::PointerCoercion(
627                    PointerCoercion::MutToConstPointer
628                    | PointerCoercion::ArrayToPointer
629                    | PointerCoercion::UnsafeFnPointer
630                    | PointerCoercion::ClosureFnPointer(_)
631                    | PointerCoercion::ReifyFnPointer,
632                    _,
633                ),
634                _,
635                _,
636            ) => {
637                // These are all okay; they only change the type, not the data.
638            }
639
640            Rvalue::Cast(CastKind::PointerExposeProvenance, _, _) => {
641                self.check_op(ops::RawPtrToIntCast);
642            }
643            Rvalue::Cast(CastKind::PointerWithExposedProvenance, _, _) => {
644                // Since no pointer can ever get exposed (rejected above), this is easy to support.
645            }
646
647            Rvalue::Cast(_, _, _) => {}
648
649            Rvalue::NullaryOp(
650                NullOp::SizeOf
651                | NullOp::AlignOf
652                | NullOp::OffsetOf(_)
653                | NullOp::UbChecks
654                | NullOp::ContractChecks,
655                _,
656            ) => {}
657            Rvalue::ShallowInitBox(_, _) => {}
658
659            Rvalue::UnaryOp(op, operand) => {
660                let ty = operand.ty(self.body, self.tcx);
661                match op {
662                    UnOp::Not | UnOp::Neg => {
663                        if is_int_bool_float_or_char(ty) {
664                            // Int, bool, float, and char operations are fine.
665                        } else {
666                            span_bug!(
667                                self.span,
668                                "non-primitive type in `Rvalue::UnaryOp{op:?}`: {ty:?}",
669                            );
670                        }
671                    }
672                    UnOp::PtrMetadata => {
673                        // Getting the metadata from a pointer is always const.
674                        // We already validated the type is valid in the validator.
675                    }
676                }
677            }
678
679            Rvalue::BinaryOp(op, box (lhs, rhs)) => {
680                let lhs_ty = lhs.ty(self.body, self.tcx);
681                let rhs_ty = rhs.ty(self.body, self.tcx);
682
683                if is_int_bool_float_or_char(lhs_ty) && is_int_bool_float_or_char(rhs_ty) {
684                    // Int, bool, float, and char operations are fine.
685                } else if lhs_ty.is_fn_ptr() || lhs_ty.is_raw_ptr() {
686                    assert_matches!(
687                        op,
688                        BinOp::Eq
689                            | BinOp::Ne
690                            | BinOp::Le
691                            | BinOp::Lt
692                            | BinOp::Ge
693                            | BinOp::Gt
694                            | BinOp::Offset
695                    );
696
697                    self.check_op(ops::RawPtrComparison);
698                } else {
699                    span_bug!(
700                        self.span,
701                        "non-primitive type in `Rvalue::BinaryOp`: {:?} ⚬ {:?}",
702                        lhs_ty,
703                        rhs_ty
704                    );
705                }
706            }
707
708            Rvalue::WrapUnsafeBinder(..) => {
709                // Unsafe binders are always trivial to create.
710            }
711        }
712    }
713
714    fn visit_operand(&mut self, op: &Operand<'tcx>, location: Location) {
715        self.super_operand(op, location);
716        if let Operand::Constant(c) = op
717            && let Some(def_id) = c.check_static_ptr(self.tcx)
718        {
719            self.check_static(def_id, self.span);
720        }
721    }
722
723    fn visit_source_info(&mut self, source_info: &SourceInfo) {
724        trace!("visit_source_info: source_info={:?}", source_info);
725        self.span = source_info.span;
726    }
727
728    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
729        trace!("visit_statement: statement={:?} location={:?}", statement, location);
730
731        self.super_statement(statement, location);
732
733        match statement.kind {
734            StatementKind::Assign(..)
735            | StatementKind::SetDiscriminant { .. }
736            | StatementKind::Deinit(..)
737            | StatementKind::FakeRead(..)
738            | StatementKind::StorageLive(_)
739            | StatementKind::StorageDead(_)
740            | StatementKind::Retag { .. }
741            | StatementKind::PlaceMention(..)
742            | StatementKind::AscribeUserType(..)
743            | StatementKind::Coverage(..)
744            | StatementKind::Intrinsic(..)
745            | StatementKind::ConstEvalCounter
746            | StatementKind::BackwardIncompatibleDropHint { .. }
747            | StatementKind::Nop => {}
748        }
749    }
750
751    #[instrument(level = "debug", skip(self))]
752    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
753        self.super_terminator(terminator, location);
754
755        match &terminator.kind {
756            TerminatorKind::Call { func, args, fn_span, .. }
757            | TerminatorKind::TailCall { func, args, fn_span, .. } => {
758                let call_source = match terminator.kind {
759                    TerminatorKind::Call { call_source, .. } => call_source,
760                    TerminatorKind::TailCall { .. } => CallSource::Normal,
761                    _ => unreachable!(),
762                };
763
764                let ConstCx { tcx, body, .. } = *self.ccx;
765
766                let fn_ty = func.ty(body, tcx);
767
768                let (callee, fn_args) = match *fn_ty.kind() {
769                    ty::FnDef(def_id, fn_args) => (def_id, fn_args),
770
771                    ty::FnPtr(..) => {
772                        self.check_op(ops::FnCallIndirect);
773                        // We can get here without an error in miri-unleashed mode... might as well
774                        // skip the rest of the checks as well then.
775                        return;
776                    }
777                    _ => {
778                        span_bug!(terminator.source_info.span, "invalid callee of type {:?}", fn_ty)
779                    }
780                };
781
782                let has_const_conditions =
783                    self.revalidate_conditional_constness(callee, fn_args, *fn_span);
784
785                // Attempting to call a trait method?
786                if let Some(trait_did) = tcx.trait_of_assoc(callee) {
787                    // We can't determine the actual callee here, so we have to do different checks
788                    // than usual.
789
790                    trace!("attempting to call a trait method");
791                    let trait_is_const = tcx.is_const_trait(trait_did);
792
793                    // Only consider a trait to be const if the const conditions hold.
794                    // Otherwise, it's really misleading to call something "conditionally"
795                    // const when it's very obviously not conditionally const.
796                    if trait_is_const && has_const_conditions == Some(ConstConditionsHold::Yes) {
797                        // Trait calls are always conditionally-const.
798                        self.check_op(ops::ConditionallyConstCall {
799                            callee,
800                            args: fn_args,
801                            span: *fn_span,
802                            call_source,
803                        });
804                        self.check_callee_stability(trait_did);
805                    } else {
806                        // Not even a const trait.
807                        self.check_op(ops::FnCallNonConst {
808                            callee,
809                            args: fn_args,
810                            span: *fn_span,
811                            call_source,
812                        });
813                    }
814                    // That's all we can check here.
815                    return;
816                }
817
818                // Even if we know the callee, ensure we can use conditionally-const calls.
819                if has_const_conditions.is_some() {
820                    self.check_op(ops::ConditionallyConstCall {
821                        callee,
822                        args: fn_args,
823                        span: *fn_span,
824                        call_source,
825                    });
826                }
827
828                // At this point, we are calling a function, `callee`, whose `DefId` is known...
829
830                // `begin_panic` and `#[rustc_const_panic_str]` functions accept generic
831                // types other than str. Check to enforce that only str can be used in
832                // const-eval.
833
834                // const-eval of the `begin_panic` fn assumes the argument is `&str`
835                if tcx.is_lang_item(callee, LangItem::BeginPanic) {
836                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
837                        ty::Ref(_, ty, _) if ty.is_str() => {}
838                        _ => self.check_op(ops::PanicNonStr),
839                    }
840                    // Allow this call, skip all the checks below.
841                    return;
842                }
843
844                // const-eval of `#[rustc_const_panic_str]` functions assumes the argument is `&&str`
845                if tcx.has_attr(callee, sym::rustc_const_panic_str) {
846                    match args[0].node.ty(&self.ccx.body.local_decls, tcx).kind() {
847                        ty::Ref(_, ty, _) if matches!(ty.kind(), ty::Ref(_, ty, _) if ty.is_str()) =>
848                            {}
849                        _ => {
850                            self.check_op(ops::PanicNonStr);
851                        }
852                    }
853                    // Allow this call, skip all the checks below.
854                    return;
855                }
856
857                // This can be called on stable via the `vec!` macro.
858                if tcx.is_lang_item(callee, LangItem::ExchangeMalloc) {
859                    self.check_op(ops::HeapAllocation);
860                    // Allow this call, skip all the checks below.
861                    return;
862                }
863
864                // Intrinsics are language primitives, not regular calls, so treat them separately.
865                if let Some(intrinsic) = tcx.intrinsic(callee) {
866                    if !tcx.is_const_fn(callee) {
867                        // Non-const intrinsic.
868                        self.check_op(ops::IntrinsicNonConst { name: intrinsic.name });
869                        // If we allowed this, we're in miri-unleashed mode, so we might
870                        // as well skip the remaining checks.
871                        return;
872                    }
873                    // We use `intrinsic.const_stable` to determine if this can be safely exposed to
874                    // stable code, rather than `const_stable_indirect`. This is to make
875                    // `#[rustc_const_stable_indirect]` an attribute that is always safe to add.
876                    // We also ask is_safe_to_expose_on_stable_const_fn; this determines whether the intrinsic
877                    // fallback body is safe to expose on stable.
878                    let is_const_stable = intrinsic.const_stable
879                        || (!intrinsic.must_be_overridden
880                            && is_fn_or_trait_safe_to_expose_on_stable(tcx, callee));
881                    match tcx.lookup_const_stability(callee) {
882                        None => {
883                            // This doesn't need a separate const-stability check -- const-stability equals
884                            // regular stability, and regular stability is checked separately.
885                            // However, we *do* have to worry about *recursive* const stability.
886                            if !is_const_stable && self.enforce_recursive_const_stability() {
887                                self.dcx().emit_err(errors::UnmarkedIntrinsicExposed {
888                                    span: self.span,
889                                    def_path: self.tcx.def_path_str(callee),
890                                });
891                            }
892                        }
893                        Some(hir::ConstStability {
894                            level: hir::StabilityLevel::Unstable { .. },
895                            feature,
896                            ..
897                        }) => {
898                            self.check_op(ops::IntrinsicUnstable {
899                                name: intrinsic.name,
900                                feature,
901                                const_stable_indirect: is_const_stable,
902                            });
903                        }
904                        Some(hir::ConstStability {
905                            level: hir::StabilityLevel::Stable { .. },
906                            ..
907                        }) => {
908                            // All good. Note that a `#[rustc_const_stable]` intrinsic (meaning it
909                            // can be *directly* invoked from stable const code) does not always
910                            // have the `#[rustc_intrinsic_const_stable_indirect]` attribute (which controls
911                            // exposing an intrinsic indirectly); we accept this call anyway.
912                        }
913                    }
914                    // This completes the checks for intrinsics.
915                    return;
916                }
917
918                if !tcx.is_const_fn(callee) {
919                    self.check_op(ops::FnCallNonConst {
920                        callee,
921                        args: fn_args,
922                        span: *fn_span,
923                        call_source,
924                    });
925                    // If we allowed this, we're in miri-unleashed mode, so we might
926                    // as well skip the remaining checks.
927                    return;
928                }
929
930                // Finally, stability for regular function calls -- this is the big one.
931                self.check_callee_stability(callee);
932            }
933
934            // Forbid all `Drop` terminators unless the place being dropped is a local with no
935            // projections that cannot be `NeedsNonConstDrop`.
936            TerminatorKind::Drop { place: dropped_place, .. } => {
937                // If we are checking live drops after drop-elaboration, don't emit duplicate
938                // errors here.
939                if super::post_drop_elaboration::checking_enabled(self.ccx) {
940                    return;
941                }
942
943                self.check_drop_terminator(*dropped_place, location, terminator.source_info.span);
944            }
945
946            TerminatorKind::InlineAsm { .. } => self.check_op(ops::InlineAsm),
947
948            TerminatorKind::Yield { .. } => {
949                self.check_op(ops::Coroutine(
950                    self.tcx
951                        .coroutine_kind(self.body.source.def_id())
952                        .expect("Only expected to have a yield in a coroutine"),
953                ));
954            }
955
956            TerminatorKind::CoroutineDrop => {
957                span_bug!(
958                    self.body.source_info(location).span,
959                    "We should not encounter TerminatorKind::CoroutineDrop after coroutine transform"
960                );
961            }
962
963            TerminatorKind::UnwindTerminate(_) => {
964                // Cleanup blocks are skipped for const checking (see `visit_basic_block_data`).
965                span_bug!(self.span, "`Terminate` terminator outside of cleanup block")
966            }
967
968            TerminatorKind::Assert { .. }
969            | TerminatorKind::FalseEdge { .. }
970            | TerminatorKind::FalseUnwind { .. }
971            | TerminatorKind::Goto { .. }
972            | TerminatorKind::UnwindResume
973            | TerminatorKind::Return
974            | TerminatorKind::SwitchInt { .. }
975            | TerminatorKind::Unreachable => {}
976        }
977    }
978}
979
980fn is_int_bool_float_or_char(ty: Ty<'_>) -> bool {
981    ty.is_bool() || ty.is_integral() || ty.is_char() || ty.is_floating_point()
982}
983
984fn emit_unstable_in_stable_exposed_error(
985    ccx: &ConstCx<'_, '_>,
986    span: Span,
987    gate: Symbol,
988    is_function_call: bool,
989) -> ErrorGuaranteed {
990    let attr_span = ccx.tcx.def_span(ccx.def_id()).shrink_to_lo();
991
992    ccx.dcx().emit_err(errors::UnstableInStableExposed {
993        gate: gate.to_string(),
994        span,
995        attr_span,
996        is_function_call,
997        is_function_call2: is_function_call,
998    })
999}