rustc_mir_transform/
validate.rs

1//! Validates the MIR to ensure that invariants are upheld.
2
3use rustc_abi::{ExternAbi, FIRST_VARIANT, Size};
4use rustc_data_structures::fx::{FxHashMap, FxHashSet};
5use rustc_hir::LangItem;
6use rustc_hir::attrs::InlineAttr;
7use rustc_index::IndexVec;
8use rustc_index::bit_set::DenseBitSet;
9use rustc_infer::infer::TyCtxtInferExt;
10use rustc_infer::traits::{Obligation, ObligationCause};
11use rustc_middle::mir::coverage::CoverageKind;
12use rustc_middle::mir::visit::{NonUseContext, PlaceContext, Visitor};
13use rustc_middle::mir::*;
14use rustc_middle::ty::adjustment::PointerCoercion;
15use rustc_middle::ty::print::with_no_trimmed_paths;
16use rustc_middle::ty::{
17    self, CoroutineArgsExt, InstanceKind, ScalarInt, Ty, TyCtxt, TypeVisitableExt, Upcast, Variance,
18};
19use rustc_middle::{bug, span_bug};
20use rustc_trait_selection::traits::ObligationCtxt;
21
22use crate::util::{self, is_within_packed};
23
24#[derive(Copy, Clone, Debug, PartialEq, Eq)]
25enum EdgeKind {
26    Unwind,
27    Normal,
28}
29
30pub(super) struct Validator {
31    /// Describes at which point in the pipeline this validation is happening.
32    pub when: String,
33}
34
35impl<'tcx> crate::MirPass<'tcx> for Validator {
36    fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
37        // FIXME(JakobDegen): These bodies never instantiated in codegend anyway, so it's not
38        // terribly important that they pass the validator. However, I think other passes might
39        // still see them, in which case they might be surprised. It would probably be better if we
40        // didn't put this through the MIR pipeline at all.
41        if matches!(body.source.instance, InstanceKind::Intrinsic(..) | InstanceKind::Virtual(..)) {
42            return;
43        }
44        let def_id = body.source.def_id();
45        let typing_env = body.typing_env(tcx);
46        let can_unwind = if body.phase <= MirPhase::Runtime(RuntimePhase::Initial) {
47            // In this case `AbortUnwindingCalls` haven't yet been executed.
48            true
49        } else if !tcx.def_kind(def_id).is_fn_like() {
50            true
51        } else {
52            let body_ty = tcx.type_of(def_id).skip_binder();
53            let body_abi = match body_ty.kind() {
54                ty::FnDef(..) => body_ty.fn_sig(tcx).abi(),
55                ty::Closure(..) => ExternAbi::RustCall,
56                ty::CoroutineClosure(..) => ExternAbi::RustCall,
57                ty::Coroutine(..) => ExternAbi::Rust,
58                // No need to do MIR validation on error bodies
59                ty::Error(_) => return,
60                _ => span_bug!(body.span, "unexpected body ty: {body_ty}"),
61            };
62
63            ty::layout::fn_can_unwind(tcx, Some(def_id), body_abi)
64        };
65
66        let mut cfg_checker = CfgChecker {
67            when: &self.when,
68            body,
69            tcx,
70            unwind_edge_count: 0,
71            reachable_blocks: traversal::reachable_as_bitset(body),
72            value_cache: FxHashSet::default(),
73            can_unwind,
74        };
75        cfg_checker.visit_body(body);
76        cfg_checker.check_cleanup_control_flow();
77
78        // Also run the TypeChecker.
79        for (location, msg) in validate_types(tcx, typing_env, body, body) {
80            cfg_checker.fail(location, msg);
81        }
82
83        if let MirPhase::Runtime(_) = body.phase
84            && let ty::InstanceKind::Item(_) = body.source.instance
85            && body.has_free_regions()
86        {
87            cfg_checker.fail(
88                Location::START,
89                format!("Free regions in optimized {} MIR", body.phase.name()),
90            );
91        }
92    }
93
94    fn is_required(&self) -> bool {
95        true
96    }
97}
98
99/// This checker covers basic properties of the control-flow graph, (dis)allowed statements and terminators.
100/// Everything checked here must be stable under substitution of generic parameters. In other words,
101/// this is about the *structure* of the MIR, not the *contents*.
102///
103/// Everything that depends on types, or otherwise can be affected by generic parameters,
104/// must be checked in `TypeChecker`.
105struct CfgChecker<'a, 'tcx> {
106    when: &'a str,
107    body: &'a Body<'tcx>,
108    tcx: TyCtxt<'tcx>,
109    unwind_edge_count: usize,
110    reachable_blocks: DenseBitSet<BasicBlock>,
111    value_cache: FxHashSet<u128>,
112    // If `false`, then the MIR must not contain `UnwindAction::Continue` or
113    // `TerminatorKind::Resume`.
114    can_unwind: bool,
115}
116
117impl<'a, 'tcx> CfgChecker<'a, 'tcx> {
118    #[track_caller]
119    fn fail(&self, location: Location, msg: impl AsRef<str>) {
120        // We might see broken MIR when other errors have already occurred.
121        if self.tcx.dcx().has_errors().is_none() {
122            span_bug!(
123                self.body.source_info(location).span,
124                "broken MIR in {:?} ({}) at {:?}:\n{}",
125                self.body.source.instance,
126                self.when,
127                location,
128                msg.as_ref(),
129            );
130        }
131    }
132
133    fn check_edge(&mut self, location: Location, bb: BasicBlock, edge_kind: EdgeKind) {
134        if bb == START_BLOCK {
135            self.fail(location, "start block must not have predecessors")
136        }
137        if let Some(bb) = self.body.basic_blocks.get(bb) {
138            let src = self.body.basic_blocks.get(location.block).unwrap();
139            match (src.is_cleanup, bb.is_cleanup, edge_kind) {
140                // Non-cleanup blocks can jump to non-cleanup blocks along non-unwind edges
141                (false, false, EdgeKind::Normal)
142                // Cleanup blocks can jump to cleanup blocks along non-unwind edges
143                | (true, true, EdgeKind::Normal) => {}
144                // Non-cleanup blocks can jump to cleanup blocks along unwind edges
145                (false, true, EdgeKind::Unwind) => {
146                    self.unwind_edge_count += 1;
147                }
148                // All other jumps are invalid
149                _ => {
150                    self.fail(
151                        location,
152                        format!(
153                            "{:?} edge to {:?} violates unwind invariants (cleanup {:?} -> {:?})",
154                            edge_kind,
155                            bb,
156                            src.is_cleanup,
157                            bb.is_cleanup,
158                        )
159                    )
160                }
161            }
162        } else {
163            self.fail(location, format!("encountered jump to invalid basic block {bb:?}"))
164        }
165    }
166
167    fn check_cleanup_control_flow(&self) {
168        if self.unwind_edge_count <= 1 {
169            return;
170        }
171        let doms = self.body.basic_blocks.dominators();
172        let mut post_contract_node = FxHashMap::default();
173        // Reusing the allocation across invocations of the closure
174        let mut dom_path = vec![];
175        let mut get_post_contract_node = |mut bb| {
176            let root = loop {
177                if let Some(root) = post_contract_node.get(&bb) {
178                    break *root;
179                }
180                let parent = doms.immediate_dominator(bb).unwrap();
181                dom_path.push(bb);
182                if !self.body.basic_blocks[parent].is_cleanup {
183                    break bb;
184                }
185                bb = parent;
186            };
187            for bb in dom_path.drain(..) {
188                post_contract_node.insert(bb, root);
189            }
190            root
191        };
192
193        let mut parent = IndexVec::from_elem(None, &self.body.basic_blocks);
194        for (bb, bb_data) in self.body.basic_blocks.iter_enumerated() {
195            if !bb_data.is_cleanup || !self.reachable_blocks.contains(bb) {
196                continue;
197            }
198            let bb = get_post_contract_node(bb);
199            for s in bb_data.terminator().successors() {
200                let s = get_post_contract_node(s);
201                if s == bb {
202                    continue;
203                }
204                let parent = &mut parent[bb];
205                match parent {
206                    None => {
207                        *parent = Some(s);
208                    }
209                    Some(e) if *e == s => (),
210                    Some(e) => self.fail(
211                        Location { block: bb, statement_index: 0 },
212                        format!(
213                            "Cleanup control flow violation: The blocks dominated by {:?} have edges to both {:?} and {:?}",
214                            bb,
215                            s,
216                            *e
217                        )
218                    ),
219                }
220            }
221        }
222
223        // Check for cycles
224        let mut stack = FxHashSet::default();
225        for (mut bb, parent) in parent.iter_enumerated_mut() {
226            stack.clear();
227            stack.insert(bb);
228            loop {
229                let Some(parent) = parent.take() else { break };
230                let no_cycle = stack.insert(parent);
231                if !no_cycle {
232                    self.fail(
233                        Location { block: bb, statement_index: 0 },
234                        format!(
235                            "Cleanup control flow violation: Cycle involving edge {bb:?} -> {parent:?}",
236                        ),
237                    );
238                    break;
239                }
240                bb = parent;
241            }
242        }
243    }
244
245    fn check_unwind_edge(&mut self, location: Location, unwind: UnwindAction) {
246        let is_cleanup = self.body.basic_blocks[location.block].is_cleanup;
247        match unwind {
248            UnwindAction::Cleanup(unwind) => {
249                if is_cleanup {
250                    self.fail(location, "`UnwindAction::Cleanup` in cleanup block");
251                }
252                self.check_edge(location, unwind, EdgeKind::Unwind);
253            }
254            UnwindAction::Continue => {
255                if is_cleanup {
256                    self.fail(location, "`UnwindAction::Continue` in cleanup block");
257                }
258
259                if !self.can_unwind {
260                    self.fail(location, "`UnwindAction::Continue` in no-unwind function");
261                }
262            }
263            UnwindAction::Terminate(UnwindTerminateReason::InCleanup) => {
264                if !is_cleanup {
265                    self.fail(
266                        location,
267                        "`UnwindAction::Terminate(InCleanup)` in a non-cleanup block",
268                    );
269                }
270            }
271            // These are allowed everywhere.
272            UnwindAction::Unreachable | UnwindAction::Terminate(UnwindTerminateReason::Abi) => (),
273        }
274    }
275
276    fn is_critical_call_edge(&self, target: Option<BasicBlock>, unwind: UnwindAction) -> bool {
277        let Some(target) = target else { return false };
278        matches!(unwind, UnwindAction::Cleanup(_) | UnwindAction::Terminate(_))
279            && self.body.basic_blocks.predecessors()[target].len() > 1
280    }
281}
282
283impl<'a, 'tcx> Visitor<'tcx> for CfgChecker<'a, 'tcx> {
284    fn visit_local(&mut self, local: Local, _context: PlaceContext, location: Location) {
285        if self.body.local_decls.get(local).is_none() {
286            self.fail(
287                location,
288                format!("local {local:?} has no corresponding declaration in `body.local_decls`"),
289            );
290        }
291    }
292
293    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
294        match &statement.kind {
295            StatementKind::AscribeUserType(..) => {
296                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
297                    self.fail(
298                        location,
299                        "`AscribeUserType` should have been removed after drop lowering phase",
300                    );
301                }
302            }
303            StatementKind::FakeRead(..) => {
304                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
305                    self.fail(
306                        location,
307                        "`FakeRead` should have been removed after drop lowering phase",
308                    );
309                }
310            }
311            StatementKind::SetDiscriminant { .. } => {
312                if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
313                    self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
314                }
315            }
316            StatementKind::Deinit(..) => {
317                if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
318                    self.fail(location, "`Deinit`is not allowed until deaggregation");
319                }
320            }
321            StatementKind::Retag(kind, _) => {
322                // FIXME(JakobDegen) The validator should check that `self.body.phase <
323                // DropsLowered`. However, this causes ICEs with generation of drop shims, which
324                // seem to fail to set their `MirPhase` correctly.
325                if matches!(kind, RetagKind::TwoPhase) {
326                    self.fail(location, format!("explicit `{kind:?}` is forbidden"));
327                }
328            }
329            StatementKind::Coverage(kind) => {
330                if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup)
331                    && let CoverageKind::BlockMarker { .. } | CoverageKind::SpanMarker { .. } = kind
332                {
333                    self.fail(
334                        location,
335                        format!("{kind:?} should have been removed after analysis"),
336                    );
337                }
338            }
339            StatementKind::Assign(..)
340            | StatementKind::StorageLive(_)
341            | StatementKind::StorageDead(_)
342            | StatementKind::Intrinsic(_)
343            | StatementKind::ConstEvalCounter
344            | StatementKind::PlaceMention(..)
345            | StatementKind::BackwardIncompatibleDropHint { .. }
346            | StatementKind::Nop => {}
347        }
348
349        self.super_statement(statement, location);
350    }
351
352    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
353        match &terminator.kind {
354            TerminatorKind::Goto { target } => {
355                self.check_edge(location, *target, EdgeKind::Normal);
356            }
357            TerminatorKind::SwitchInt { targets, discr: _ } => {
358                for (_, target) in targets.iter() {
359                    self.check_edge(location, target, EdgeKind::Normal);
360                }
361                self.check_edge(location, targets.otherwise(), EdgeKind::Normal);
362
363                self.value_cache.clear();
364                self.value_cache.extend(targets.iter().map(|(value, _)| value));
365                let has_duplicates = targets.iter().len() != self.value_cache.len();
366                if has_duplicates {
367                    self.fail(
368                        location,
369                        format!(
370                            "duplicated values in `SwitchInt` terminator: {:?}",
371                            terminator.kind,
372                        ),
373                    );
374                }
375            }
376            TerminatorKind::Drop { target, unwind, drop, .. } => {
377                self.check_edge(location, *target, EdgeKind::Normal);
378                self.check_unwind_edge(location, *unwind);
379                if let Some(drop) = drop {
380                    self.check_edge(location, *drop, EdgeKind::Normal);
381                }
382            }
383            TerminatorKind::Call { func, args, .. }
384            | TerminatorKind::TailCall { func, args, .. } => {
385                // FIXME(explicit_tail_calls): refactor this & add tail-call specific checks
386                if let TerminatorKind::Call { target, unwind, destination, .. } = terminator.kind {
387                    if let Some(target) = target {
388                        self.check_edge(location, target, EdgeKind::Normal);
389                    }
390                    self.check_unwind_edge(location, unwind);
391
392                    // The code generation assumes that there are no critical call edges. The
393                    // assumption is used to simplify inserting code that should be executed along
394                    // the return edge from the call. FIXME(tmiasko): Since this is a strictly code
395                    // generation concern, the code generation should be responsible for handling
396                    // it.
397                    if self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
398                        && self.is_critical_call_edge(target, unwind)
399                    {
400                        self.fail(
401                            location,
402                            format!(
403                                "encountered critical edge in `Call` terminator {:?}",
404                                terminator.kind,
405                            ),
406                        );
407                    }
408
409                    // The call destination place and Operand::Move place used as an argument might
410                    // be passed by a reference to the callee. Consequently they cannot be packed.
411                    if is_within_packed(self.tcx, &self.body.local_decls, destination).is_some() {
412                        // This is bad! The callee will expect the memory to be aligned.
413                        self.fail(
414                            location,
415                            format!(
416                                "encountered packed place in `Call` terminator destination: {:?}",
417                                terminator.kind,
418                            ),
419                        );
420                    }
421                }
422
423                for arg in args {
424                    if let Operand::Move(place) = &arg.node {
425                        if is_within_packed(self.tcx, &self.body.local_decls, *place).is_some() {
426                            // This is bad! The callee will expect the memory to be aligned.
427                            self.fail(
428                                location,
429                                format!(
430                                    "encountered `Move` of a packed place in `Call` terminator: {:?}",
431                                    terminator.kind,
432                                ),
433                            );
434                        }
435                    }
436                }
437
438                if let ty::FnDef(did, ..) = func.ty(&self.body.local_decls, self.tcx).kind()
439                    && self.body.phase >= MirPhase::Runtime(RuntimePhase::Optimized)
440                    && matches!(self.tcx.codegen_fn_attrs(did).inline, InlineAttr::Force { .. })
441                {
442                    self.fail(location, "`#[rustc_force_inline]`-annotated function not inlined");
443                }
444            }
445            TerminatorKind::Assert { target, unwind, .. } => {
446                self.check_edge(location, *target, EdgeKind::Normal);
447                self.check_unwind_edge(location, *unwind);
448            }
449            TerminatorKind::Yield { resume, drop, .. } => {
450                if self.body.coroutine.is_none() {
451                    self.fail(location, "`Yield` cannot appear outside coroutine bodies");
452                }
453                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
454                    self.fail(location, "`Yield` should have been replaced by coroutine lowering");
455                }
456                self.check_edge(location, *resume, EdgeKind::Normal);
457                if let Some(drop) = drop {
458                    self.check_edge(location, *drop, EdgeKind::Normal);
459                }
460            }
461            TerminatorKind::FalseEdge { real_target, imaginary_target } => {
462                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
463                    self.fail(
464                        location,
465                        "`FalseEdge` should have been removed after drop elaboration",
466                    );
467                }
468                self.check_edge(location, *real_target, EdgeKind::Normal);
469                self.check_edge(location, *imaginary_target, EdgeKind::Normal);
470            }
471            TerminatorKind::FalseUnwind { real_target, unwind } => {
472                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
473                    self.fail(
474                        location,
475                        "`FalseUnwind` should have been removed after drop elaboration",
476                    );
477                }
478                self.check_edge(location, *real_target, EdgeKind::Normal);
479                self.check_unwind_edge(location, *unwind);
480            }
481            TerminatorKind::InlineAsm { targets, unwind, .. } => {
482                for &target in targets {
483                    self.check_edge(location, target, EdgeKind::Normal);
484                }
485                self.check_unwind_edge(location, *unwind);
486            }
487            TerminatorKind::CoroutineDrop => {
488                if self.body.coroutine.is_none() {
489                    self.fail(location, "`CoroutineDrop` cannot appear outside coroutine bodies");
490                }
491                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
492                    self.fail(
493                        location,
494                        "`CoroutineDrop` should have been replaced by coroutine lowering",
495                    );
496                }
497            }
498            TerminatorKind::UnwindResume => {
499                let bb = location.block;
500                if !self.body.basic_blocks[bb].is_cleanup {
501                    self.fail(location, "Cannot `UnwindResume` from non-cleanup basic block")
502                }
503                if !self.can_unwind {
504                    self.fail(location, "Cannot `UnwindResume` in a function that cannot unwind")
505                }
506            }
507            TerminatorKind::UnwindTerminate(_) => {
508                let bb = location.block;
509                if !self.body.basic_blocks[bb].is_cleanup {
510                    self.fail(location, "Cannot `UnwindTerminate` from non-cleanup basic block")
511                }
512            }
513            TerminatorKind::Return => {
514                let bb = location.block;
515                if self.body.basic_blocks[bb].is_cleanup {
516                    self.fail(location, "Cannot `Return` from cleanup basic block")
517                }
518            }
519            TerminatorKind::Unreachable => {}
520        }
521
522        self.super_terminator(terminator, location);
523    }
524
525    fn visit_source_scope(&mut self, scope: SourceScope) {
526        if self.body.source_scopes.get(scope).is_none() {
527            self.tcx.dcx().span_bug(
528                self.body.span,
529                format!(
530                    "broken MIR in {:?} ({}):\ninvalid source scope {:?}",
531                    self.body.source.instance, self.when, scope,
532                ),
533            );
534        }
535    }
536}
537
538/// A faster version of the validation pass that only checks those things which may break when
539/// instantiating any generic parameters.
540///
541/// `caller_body` is used to detect cycles in MIR inlining and MIR validation before
542/// `optimized_mir` is available.
543pub(super) fn validate_types<'tcx>(
544    tcx: TyCtxt<'tcx>,
545    typing_env: ty::TypingEnv<'tcx>,
546    body: &Body<'tcx>,
547    caller_body: &Body<'tcx>,
548) -> Vec<(Location, String)> {
549    let mut type_checker = TypeChecker { body, caller_body, tcx, typing_env, failures: Vec::new() };
550    // The type checker formats a bunch of strings with type names in it, but these strings
551    // are not always going to be encountered on the error path since the inliner also uses
552    // the validator, and there are certain kinds of inlining (even for valid code) that
553    // can cause validation errors (mostly around where clauses and rigid projections).
554    with_no_trimmed_paths!({
555        type_checker.visit_body(body);
556    });
557    type_checker.failures
558}
559
560struct TypeChecker<'a, 'tcx> {
561    body: &'a Body<'tcx>,
562    caller_body: &'a Body<'tcx>,
563    tcx: TyCtxt<'tcx>,
564    typing_env: ty::TypingEnv<'tcx>,
565    failures: Vec<(Location, String)>,
566}
567
568impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
569    fn fail(&mut self, location: Location, msg: impl Into<String>) {
570        self.failures.push((location, msg.into()));
571    }
572
573    /// Check if src can be assigned into dest.
574    /// This is not precise, it will accept some incorrect assignments.
575    fn mir_assign_valid_types(&self, src: Ty<'tcx>, dest: Ty<'tcx>) -> bool {
576        // Fast path before we normalize.
577        if src == dest {
578            // Equal types, all is good.
579            return true;
580        }
581
582        // We sometimes have to use `defining_opaque_types` for subtyping
583        // to succeed here and figuring out how exactly that should work
584        // is annoying. It is harmless enough to just not validate anything
585        // in that case. We still check this after analysis as all opaque
586        // types have been revealed at this point.
587        if (src, dest).has_opaque_types() {
588            return true;
589        }
590
591        // After borrowck subtyping should be fully explicit via
592        // `Subtype` projections.
593        let variance = if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
594            Variance::Invariant
595        } else {
596            Variance::Covariant
597        };
598
599        crate::util::relate_types(self.tcx, self.typing_env, variance, src, dest)
600    }
601
602    /// Check that the given predicate definitely holds in the param-env of this MIR body.
603    fn predicate_must_hold_modulo_regions(
604        &self,
605        pred: impl Upcast<TyCtxt<'tcx>, ty::Predicate<'tcx>>,
606    ) -> bool {
607        let pred: ty::Predicate<'tcx> = pred.upcast(self.tcx);
608
609        // We sometimes have to use `defining_opaque_types` for predicates
610        // to succeed here and figuring out how exactly that should work
611        // is annoying. It is harmless enough to just not validate anything
612        // in that case. We still check this after analysis as all opaque
613        // types have been revealed at this point.
614        if pred.has_opaque_types() {
615            return true;
616        }
617
618        let (infcx, param_env) = self.tcx.infer_ctxt().build_with_typing_env(self.typing_env);
619        let ocx = ObligationCtxt::new(&infcx);
620        ocx.register_obligation(Obligation::new(
621            self.tcx,
622            ObligationCause::dummy(),
623            param_env,
624            pred,
625        ));
626        ocx.select_all_or_error().is_empty()
627    }
628}
629
630impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
631    fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
632        // This check is somewhat expensive, so only run it when -Zvalidate-mir is passed.
633        if self.tcx.sess.opts.unstable_opts.validate_mir
634            && self.body.phase < MirPhase::Runtime(RuntimePhase::Initial)
635        {
636            // `Operand::Copy` is only supposed to be used with `Copy` types.
637            if let Operand::Copy(place) = operand {
638                let ty = place.ty(&self.body.local_decls, self.tcx).ty;
639
640                if !self.tcx.type_is_copy_modulo_regions(self.typing_env, ty) {
641                    self.fail(location, format!("`Operand::Copy` with non-`Copy` type {ty}"));
642                }
643            }
644        }
645
646        self.super_operand(operand, location);
647    }
648
649    fn visit_projection_elem(
650        &mut self,
651        place_ref: PlaceRef<'tcx>,
652        elem: PlaceElem<'tcx>,
653        context: PlaceContext,
654        location: Location,
655    ) {
656        match elem {
657            ProjectionElem::OpaqueCast(ty)
658                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) =>
659            {
660                self.fail(
661                    location,
662                    format!("explicit opaque type cast to `{ty}` after `PostAnalysisNormalize`"),
663                )
664            }
665            ProjectionElem::Index(index) => {
666                let index_ty = self.body.local_decls[index].ty;
667                if index_ty != self.tcx.types.usize {
668                    self.fail(location, format!("bad index ({index_ty} != usize)"))
669                }
670            }
671            ProjectionElem::Deref
672                if self.body.phase >= MirPhase::Runtime(RuntimePhase::PostCleanup) =>
673            {
674                let base_ty = place_ref.ty(&self.body.local_decls, self.tcx).ty;
675
676                if base_ty.is_box() {
677                    self.fail(location, format!("{base_ty} dereferenced after ElaborateBoxDerefs"))
678                }
679            }
680            ProjectionElem::Field(f, ty) => {
681                let parent_ty = place_ref.ty(&self.body.local_decls, self.tcx);
682                let fail_out_of_bounds = |this: &mut Self, location| {
683                    this.fail(location, format!("Out of bounds field {f:?} for {parent_ty:?}"));
684                };
685                let check_equal = |this: &mut Self, location, f_ty| {
686                    if !this.mir_assign_valid_types(ty, f_ty) {
687                        this.fail(
688                            location,
689                            format!(
690                                "Field projection `{place_ref:?}.{f:?}` specified type `{ty}`, but actual type is `{f_ty}`"
691                            )
692                        )
693                    }
694                };
695
696                let kind = match parent_ty.ty.kind() {
697                    &ty::Alias(ty::Opaque, ty::AliasTy { def_id, args, .. }) => {
698                        self.tcx.type_of(def_id).instantiate(self.tcx, args).kind()
699                    }
700                    kind => kind,
701                };
702
703                match kind {
704                    ty::Tuple(fields) => {
705                        let Some(f_ty) = fields.get(f.as_usize()) else {
706                            fail_out_of_bounds(self, location);
707                            return;
708                        };
709                        check_equal(self, location, *f_ty);
710                    }
711                    ty::Adt(adt_def, args) => {
712                        // see <https://github.com/rust-lang/rust/blob/7601adcc764d42c9f2984082b49948af652df986/compiler/rustc_middle/src/ty/layout.rs#L861-L864>
713                        if self.tcx.is_lang_item(adt_def.did(), LangItem::DynMetadata) {
714                            self.fail(
715                                location,
716                                format!(
717                                    "You can't project to field {f:?} of `DynMetadata` because \
718                                     layout is weird and thinks it doesn't have fields."
719                                ),
720                            );
721                        }
722
723                        if adt_def.repr().simd() {
724                            self.fail(
725                                location,
726                                format!(
727                                    "Projecting into SIMD type {adt_def:?} is banned by MCP#838"
728                                ),
729                            );
730                        }
731
732                        let var = parent_ty.variant_index.unwrap_or(FIRST_VARIANT);
733                        let Some(field) = adt_def.variant(var).fields.get(f) else {
734                            fail_out_of_bounds(self, location);
735                            return;
736                        };
737                        check_equal(self, location, field.ty(self.tcx, args));
738                    }
739                    ty::Closure(_, args) => {
740                        let args = args.as_closure();
741                        let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
742                            fail_out_of_bounds(self, location);
743                            return;
744                        };
745                        check_equal(self, location, f_ty);
746                    }
747                    ty::CoroutineClosure(_, args) => {
748                        let args = args.as_coroutine_closure();
749                        let Some(&f_ty) = args.upvar_tys().get(f.as_usize()) else {
750                            fail_out_of_bounds(self, location);
751                            return;
752                        };
753                        check_equal(self, location, f_ty);
754                    }
755                    &ty::Coroutine(def_id, args) => {
756                        let f_ty = if let Some(var) = parent_ty.variant_index {
757                            // If we're currently validating an inlined copy of this body,
758                            // then it will no longer be parameterized over the original
759                            // args of the coroutine. Otherwise, we prefer to use this body
760                            // since we may be in the process of computing this MIR in the
761                            // first place.
762                            let layout = if def_id == self.caller_body.source.def_id() {
763                                self.caller_body
764                                    .coroutine_layout_raw()
765                                    .or_else(|| self.tcx.coroutine_layout(def_id, args).ok())
766                            } else if self.tcx.needs_coroutine_by_move_body_def_id(def_id)
767                                && let ty::ClosureKind::FnOnce =
768                                    args.as_coroutine().kind_ty().to_opt_closure_kind().unwrap()
769                                && self.caller_body.source.def_id()
770                                    == self.tcx.coroutine_by_move_body_def_id(def_id)
771                            {
772                                // Same if this is the by-move body of a coroutine-closure.
773                                self.caller_body.coroutine_layout_raw()
774                            } else {
775                                self.tcx.coroutine_layout(def_id, args).ok()
776                            };
777
778                            let Some(layout) = layout else {
779                                self.fail(
780                                    location,
781                                    format!("No coroutine layout for {parent_ty:?}"),
782                                );
783                                return;
784                            };
785
786                            let Some(&local) = layout.variant_fields[var].get(f) else {
787                                fail_out_of_bounds(self, location);
788                                return;
789                            };
790
791                            let Some(f_ty) = layout.field_tys.get(local) else {
792                                self.fail(
793                                    location,
794                                    format!("Out of bounds local {local:?} for {parent_ty:?}"),
795                                );
796                                return;
797                            };
798
799                            ty::EarlyBinder::bind(f_ty.ty).instantiate(self.tcx, args)
800                        } else {
801                            let Some(&f_ty) = args.as_coroutine().prefix_tys().get(f.index())
802                            else {
803                                fail_out_of_bounds(self, location);
804                                return;
805                            };
806
807                            f_ty
808                        };
809
810                        check_equal(self, location, f_ty);
811                    }
812                    _ => {
813                        self.fail(location, format!("{:?} does not have fields", parent_ty.ty));
814                    }
815                }
816            }
817            ProjectionElem::Subtype(ty) => {
818                if !util::sub_types(
819                    self.tcx,
820                    self.typing_env,
821                    ty,
822                    place_ref.ty(&self.body.local_decls, self.tcx).ty,
823                ) {
824                    self.fail(
825                        location,
826                        format!(
827                            "Failed subtyping {ty} and {}",
828                            place_ref.ty(&self.body.local_decls, self.tcx).ty
829                        ),
830                    )
831                }
832            }
833            ProjectionElem::UnwrapUnsafeBinder(unwrapped_ty) => {
834                let binder_ty = place_ref.ty(&self.body.local_decls, self.tcx);
835                let ty::UnsafeBinder(binder_ty) = *binder_ty.ty.kind() else {
836                    self.fail(
837                        location,
838                        format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
839                    );
840                    return;
841                };
842                let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
843                if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
844                    self.fail(
845                        location,
846                        format!(
847                            "Cannot unwrap unsafe binder {binder_ty:?} into type {unwrapped_ty}"
848                        ),
849                    );
850                }
851            }
852            _ => {}
853        }
854        self.super_projection_elem(place_ref, elem, context, location);
855    }
856
857    fn visit_var_debug_info(&mut self, debuginfo: &VarDebugInfo<'tcx>) {
858        if let Some(box VarDebugInfoFragment { ty, ref projection }) = debuginfo.composite {
859            if ty.is_union() || ty.is_enum() {
860                self.fail(
861                    START_BLOCK.start_location(),
862                    format!("invalid type {ty} in debuginfo for {:?}", debuginfo.name),
863                );
864            }
865            if projection.is_empty() {
866                self.fail(
867                    START_BLOCK.start_location(),
868                    format!("invalid empty projection in debuginfo for {:?}", debuginfo.name),
869                );
870            }
871            if projection.iter().any(|p| !matches!(p, PlaceElem::Field(..))) {
872                self.fail(
873                    START_BLOCK.start_location(),
874                    format!(
875                        "illegal projection {:?} in debuginfo for {:?}",
876                        projection, debuginfo.name
877                    ),
878                );
879            }
880        }
881        match debuginfo.value {
882            VarDebugInfoContents::Const(_) => {}
883            VarDebugInfoContents::Place(place) => {
884                if place.projection.iter().any(|p| !p.can_use_in_debuginfo()) {
885                    self.fail(
886                        START_BLOCK.start_location(),
887                        format!("illegal place {:?} in debuginfo for {:?}", place, debuginfo.name),
888                    );
889                }
890            }
891        }
892        self.super_var_debug_info(debuginfo);
893    }
894
895    fn visit_place(&mut self, place: &Place<'tcx>, cntxt: PlaceContext, location: Location) {
896        // Set off any `bug!`s in the type computation code
897        let _ = place.ty(&self.body.local_decls, self.tcx);
898
899        if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial)
900            && place.projection.len() > 1
901            && cntxt != PlaceContext::NonUse(NonUseContext::VarDebugInfo)
902            && place.projection[1..].contains(&ProjectionElem::Deref)
903        {
904            self.fail(
905                location,
906                format!("place {place:?} has deref as a later projection (it is only permitted as the first projection)"),
907            );
908        }
909
910        // Ensure all downcast projections are followed by field projections.
911        let mut projections_iter = place.projection.iter();
912        while let Some(proj) = projections_iter.next() {
913            if matches!(proj, ProjectionElem::Downcast(..)) {
914                if !matches!(projections_iter.next(), Some(ProjectionElem::Field(..))) {
915                    self.fail(
916                        location,
917                        format!(
918                            "place {place:?} has `Downcast` projection not followed by `Field`"
919                        ),
920                    );
921                }
922            }
923        }
924
925        self.super_place(place, cntxt, location);
926    }
927
928    fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
929        macro_rules! check_kinds {
930            ($t:expr, $text:literal, $typat:pat) => {
931                if !matches!(($t).kind(), $typat) {
932                    self.fail(location, format!($text, $t));
933                }
934            };
935        }
936        match rvalue {
937            Rvalue::Use(_) | Rvalue::CopyForDeref(_) => {}
938            Rvalue::Aggregate(kind, fields) => match **kind {
939                AggregateKind::Tuple => {}
940                AggregateKind::Array(dest) => {
941                    for src in fields {
942                        if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
943                            self.fail(location, "array field has the wrong type");
944                        }
945                    }
946                }
947                AggregateKind::Adt(def_id, idx, args, _, Some(field)) => {
948                    let adt_def = self.tcx.adt_def(def_id);
949                    assert!(adt_def.is_union());
950                    assert_eq!(idx, FIRST_VARIANT);
951                    let dest_ty = self.tcx.normalize_erasing_regions(
952                        self.typing_env,
953                        adt_def.non_enum_variant().fields[field].ty(self.tcx, args),
954                    );
955                    if let [field] = fields.raw.as_slice() {
956                        let src_ty = field.ty(self.body, self.tcx);
957                        if !self.mir_assign_valid_types(src_ty, dest_ty) {
958                            self.fail(location, "union field has the wrong type");
959                        }
960                    } else {
961                        self.fail(location, "unions should have one initialized field");
962                    }
963                }
964                AggregateKind::Adt(def_id, idx, args, _, None) => {
965                    let adt_def = self.tcx.adt_def(def_id);
966                    assert!(!adt_def.is_union());
967                    let variant = &adt_def.variants()[idx];
968                    if variant.fields.len() != fields.len() {
969                        self.fail(location, "adt has the wrong number of initialized fields");
970                    }
971                    for (src, dest) in std::iter::zip(fields, &variant.fields) {
972                        let dest_ty = self
973                            .tcx
974                            .normalize_erasing_regions(self.typing_env, dest.ty(self.tcx, args));
975                        if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest_ty) {
976                            self.fail(location, "adt field has the wrong type");
977                        }
978                    }
979                }
980                AggregateKind::Closure(_, args) => {
981                    let upvars = args.as_closure().upvar_tys();
982                    if upvars.len() != fields.len() {
983                        self.fail(location, "closure has the wrong number of initialized fields");
984                    }
985                    for (src, dest) in std::iter::zip(fields, upvars) {
986                        if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
987                            self.fail(location, "closure field has the wrong type");
988                        }
989                    }
990                }
991                AggregateKind::Coroutine(_, args) => {
992                    let upvars = args.as_coroutine().upvar_tys();
993                    if upvars.len() != fields.len() {
994                        self.fail(location, "coroutine has the wrong number of initialized fields");
995                    }
996                    for (src, dest) in std::iter::zip(fields, upvars) {
997                        if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
998                            self.fail(location, "coroutine field has the wrong type");
999                        }
1000                    }
1001                }
1002                AggregateKind::CoroutineClosure(_, args) => {
1003                    let upvars = args.as_coroutine_closure().upvar_tys();
1004                    if upvars.len() != fields.len() {
1005                        self.fail(
1006                            location,
1007                            "coroutine-closure has the wrong number of initialized fields",
1008                        );
1009                    }
1010                    for (src, dest) in std::iter::zip(fields, upvars) {
1011                        if !self.mir_assign_valid_types(src.ty(self.body, self.tcx), dest) {
1012                            self.fail(location, "coroutine-closure field has the wrong type");
1013                        }
1014                    }
1015                }
1016                AggregateKind::RawPtr(pointee_ty, mutability) => {
1017                    if !matches!(self.body.phase, MirPhase::Runtime(_)) {
1018                        // It would probably be fine to support this in earlier phases, but at the
1019                        // time of writing it's only ever introduced from intrinsic lowering, so
1020                        // earlier things just `bug!` on it.
1021                        self.fail(location, "RawPtr should be in runtime MIR only");
1022                    }
1023
1024                    if let [data_ptr, metadata] = fields.raw.as_slice() {
1025                        let data_ptr_ty = data_ptr.ty(self.body, self.tcx);
1026                        let metadata_ty = metadata.ty(self.body, self.tcx);
1027                        if let ty::RawPtr(in_pointee, in_mut) = data_ptr_ty.kind() {
1028                            if *in_mut != mutability {
1029                                self.fail(location, "input and output mutability must match");
1030                            }
1031
1032                            // FIXME: check `Thin` instead of `Sized`
1033                            if !in_pointee.is_sized(self.tcx, self.typing_env) {
1034                                self.fail(location, "input pointer must be thin");
1035                            }
1036                        } else {
1037                            self.fail(
1038                                location,
1039                                "first operand to raw pointer aggregate must be a raw pointer",
1040                            );
1041                        }
1042
1043                        // FIXME: Check metadata more generally
1044                        if pointee_ty.is_slice() {
1045                            if !self.mir_assign_valid_types(metadata_ty, self.tcx.types.usize) {
1046                                self.fail(location, "slice metadata must be usize");
1047                            }
1048                        } else if pointee_ty.is_sized(self.tcx, self.typing_env) {
1049                            if metadata_ty != self.tcx.types.unit {
1050                                self.fail(location, "metadata for pointer-to-thin must be unit");
1051                            }
1052                        }
1053                    } else {
1054                        self.fail(location, "raw pointer aggregate must have 2 fields");
1055                    }
1056                }
1057            },
1058            Rvalue::Ref(_, BorrowKind::Fake(_), _) => {
1059                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1060                    self.fail(
1061                        location,
1062                        "`Assign` statement with a `Fake` borrow should have been removed in runtime MIR",
1063                    );
1064                }
1065            }
1066            Rvalue::Ref(..) => {}
1067            Rvalue::Len(p) => {
1068                let pty = p.ty(&self.body.local_decls, self.tcx).ty;
1069                check_kinds!(
1070                    pty,
1071                    "Cannot compute length of non-array type {:?}",
1072                    ty::Array(..) | ty::Slice(..)
1073                );
1074            }
1075            Rvalue::BinaryOp(op, vals) => {
1076                use BinOp::*;
1077                let a = vals.0.ty(&self.body.local_decls, self.tcx);
1078                let b = vals.1.ty(&self.body.local_decls, self.tcx);
1079                if crate::util::binop_right_homogeneous(*op) {
1080                    if let Eq | Lt | Le | Ne | Ge | Gt = op {
1081                        // The function pointer types can have lifetimes
1082                        if !self.mir_assign_valid_types(a, b) {
1083                            self.fail(
1084                                location,
1085                                format!("Cannot {op:?} compare incompatible types {a} and {b}"),
1086                            );
1087                        }
1088                    } else if a != b {
1089                        self.fail(
1090                            location,
1091                            format!("Cannot perform binary op {op:?} on unequal types {a} and {b}"),
1092                        );
1093                    }
1094                }
1095
1096                match op {
1097                    Offset => {
1098                        check_kinds!(a, "Cannot offset non-pointer type {:?}", ty::RawPtr(..));
1099                        if b != self.tcx.types.isize && b != self.tcx.types.usize {
1100                            self.fail(location, format!("Cannot offset by non-isize type {b}"));
1101                        }
1102                    }
1103                    Eq | Lt | Le | Ne | Ge | Gt => {
1104                        for x in [a, b] {
1105                            check_kinds!(
1106                                x,
1107                                "Cannot {op:?} compare type {:?}",
1108                                ty::Bool
1109                                    | ty::Char
1110                                    | ty::Int(..)
1111                                    | ty::Uint(..)
1112                                    | ty::Float(..)
1113                                    | ty::RawPtr(..)
1114                                    | ty::FnPtr(..)
1115                            )
1116                        }
1117                    }
1118                    Cmp => {
1119                        for x in [a, b] {
1120                            check_kinds!(
1121                                x,
1122                                "Cannot three-way compare non-integer type {:?}",
1123                                ty::Char | ty::Uint(..) | ty::Int(..)
1124                            )
1125                        }
1126                    }
1127                    AddUnchecked | AddWithOverflow | SubUnchecked | SubWithOverflow
1128                    | MulUnchecked | MulWithOverflow | Shl | ShlUnchecked | Shr | ShrUnchecked => {
1129                        for x in [a, b] {
1130                            check_kinds!(
1131                                x,
1132                                "Cannot {op:?} non-integer type {:?}",
1133                                ty::Uint(..) | ty::Int(..)
1134                            )
1135                        }
1136                    }
1137                    BitAnd | BitOr | BitXor => {
1138                        for x in [a, b] {
1139                            check_kinds!(
1140                                x,
1141                                "Cannot perform bitwise op {op:?} on type {:?}",
1142                                ty::Uint(..) | ty::Int(..) | ty::Bool
1143                            )
1144                        }
1145                    }
1146                    Add | Sub | Mul | Div | Rem => {
1147                        for x in [a, b] {
1148                            check_kinds!(
1149                                x,
1150                                "Cannot perform arithmetic {op:?} on type {:?}",
1151                                ty::Uint(..) | ty::Int(..) | ty::Float(..)
1152                            )
1153                        }
1154                    }
1155                }
1156            }
1157            Rvalue::UnaryOp(op, operand) => {
1158                let a = operand.ty(&self.body.local_decls, self.tcx);
1159                match op {
1160                    UnOp::Neg => {
1161                        check_kinds!(a, "Cannot negate type {:?}", ty::Int(..) | ty::Float(..))
1162                    }
1163                    UnOp::Not => {
1164                        check_kinds!(
1165                            a,
1166                            "Cannot binary not type {:?}",
1167                            ty::Int(..) | ty::Uint(..) | ty::Bool
1168                        );
1169                    }
1170                    UnOp::PtrMetadata => {
1171                        check_kinds!(
1172                            a,
1173                            "Cannot PtrMetadata non-pointer non-reference type {:?}",
1174                            ty::RawPtr(..) | ty::Ref(..)
1175                        );
1176                    }
1177                }
1178            }
1179            Rvalue::ShallowInitBox(operand, _) => {
1180                let a = operand.ty(&self.body.local_decls, self.tcx);
1181                check_kinds!(a, "Cannot shallow init type {:?}", ty::RawPtr(..));
1182            }
1183            Rvalue::Cast(kind, operand, target_type) => {
1184                let op_ty = operand.ty(self.body, self.tcx);
1185                match kind {
1186                    // FIXME: Add Checks for these
1187                    CastKind::PointerWithExposedProvenance | CastKind::PointerExposeProvenance => {}
1188                    CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
1189                        // FIXME: check signature compatibility.
1190                        check_kinds!(
1191                            op_ty,
1192                            "CastKind::{kind:?} input must be a fn item, not {:?}",
1193                            ty::FnDef(..)
1194                        );
1195                        check_kinds!(
1196                            target_type,
1197                            "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1198                            ty::FnPtr(..)
1199                        );
1200                    }
1201                    CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
1202                        // FIXME: check safety and signature compatibility.
1203                        check_kinds!(
1204                            op_ty,
1205                            "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1206                            ty::FnPtr(..)
1207                        );
1208                        check_kinds!(
1209                            target_type,
1210                            "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1211                            ty::FnPtr(..)
1212                        );
1213                    }
1214                    CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(..), _) => {
1215                        // FIXME: check safety, captures, and signature compatibility.
1216                        check_kinds!(
1217                            op_ty,
1218                            "CastKind::{kind:?} input must be a closure, not {:?}",
1219                            ty::Closure(..)
1220                        );
1221                        check_kinds!(
1222                            target_type,
1223                            "CastKind::{kind:?} output must be a fn pointer, not {:?}",
1224                            ty::FnPtr(..)
1225                        );
1226                    }
1227                    CastKind::PointerCoercion(PointerCoercion::MutToConstPointer, _) => {
1228                        // FIXME: check same pointee?
1229                        check_kinds!(
1230                            op_ty,
1231                            "CastKind::{kind:?} input must be a raw mut pointer, not {:?}",
1232                            ty::RawPtr(_, Mutability::Mut)
1233                        );
1234                        check_kinds!(
1235                            target_type,
1236                            "CastKind::{kind:?} output must be a raw const pointer, not {:?}",
1237                            ty::RawPtr(_, Mutability::Not)
1238                        );
1239                        if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1240                            self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1241                        }
1242                    }
1243                    CastKind::PointerCoercion(PointerCoercion::ArrayToPointer, _) => {
1244                        // FIXME: Check pointee types
1245                        check_kinds!(
1246                            op_ty,
1247                            "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1248                            ty::RawPtr(..)
1249                        );
1250                        check_kinds!(
1251                            target_type,
1252                            "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1253                            ty::RawPtr(..)
1254                        );
1255                        if self.body.phase >= MirPhase::Analysis(AnalysisPhase::PostCleanup) {
1256                            self.fail(location, format!("After borrowck, MIR disallows {kind:?}"));
1257                        }
1258                    }
1259                    CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
1260                        // Pointers being unsize coerced should at least implement
1261                        // `CoerceUnsized`.
1262                        if !self.predicate_must_hold_modulo_regions(ty::TraitRef::new(
1263                            self.tcx,
1264                            self.tcx.require_lang_item(
1265                                LangItem::CoerceUnsized,
1266                                self.body.source_info(location).span,
1267                            ),
1268                            [op_ty, *target_type],
1269                        )) {
1270                            self.fail(location, format!("Unsize coercion, but `{op_ty}` isn't coercible to `{target_type}`"));
1271                        }
1272                    }
1273                    CastKind::IntToInt | CastKind::IntToFloat => {
1274                        let input_valid = op_ty.is_integral() || op_ty.is_char() || op_ty.is_bool();
1275                        let target_valid = target_type.is_numeric() || target_type.is_char();
1276                        if !input_valid || !target_valid {
1277                            self.fail(
1278                                location,
1279                                format!("Wrong cast kind {kind:?} for the type {op_ty}"),
1280                            );
1281                        }
1282                    }
1283                    CastKind::FnPtrToPtr => {
1284                        check_kinds!(
1285                            op_ty,
1286                            "CastKind::{kind:?} input must be a fn pointer, not {:?}",
1287                            ty::FnPtr(..)
1288                        );
1289                        check_kinds!(
1290                            target_type,
1291                            "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1292                            ty::RawPtr(..)
1293                        );
1294                    }
1295                    CastKind::PtrToPtr => {
1296                        check_kinds!(
1297                            op_ty,
1298                            "CastKind::{kind:?} input must be a raw pointer, not {:?}",
1299                            ty::RawPtr(..)
1300                        );
1301                        check_kinds!(
1302                            target_type,
1303                            "CastKind::{kind:?} output must be a raw pointer, not {:?}",
1304                            ty::RawPtr(..)
1305                        );
1306                    }
1307                    CastKind::FloatToFloat | CastKind::FloatToInt => {
1308                        if !op_ty.is_floating_point() || !target_type.is_numeric() {
1309                            self.fail(
1310                                location,
1311                                format!(
1312                                    "Trying to cast non 'Float' as {kind:?} into {target_type:?}"
1313                                ),
1314                            );
1315                        }
1316                    }
1317                    CastKind::Transmute => {
1318                        // Unlike `mem::transmute`, a MIR `Transmute` is well-formed
1319                        // for any two `Sized` types, just potentially UB to run.
1320
1321                        if !self
1322                            .tcx
1323                            .normalize_erasing_regions(self.typing_env, op_ty)
1324                            .is_sized(self.tcx, self.typing_env)
1325                        {
1326                            self.fail(
1327                                location,
1328                                format!("Cannot transmute from non-`Sized` type {op_ty}"),
1329                            );
1330                        }
1331                        if !self
1332                            .tcx
1333                            .normalize_erasing_regions(self.typing_env, *target_type)
1334                            .is_sized(self.tcx, self.typing_env)
1335                        {
1336                            self.fail(
1337                                location,
1338                                format!("Cannot transmute to non-`Sized` type {target_type:?}"),
1339                            );
1340                        }
1341                    }
1342                }
1343            }
1344            Rvalue::NullaryOp(NullOp::OffsetOf(indices), container) => {
1345                let fail_out_of_bounds = |this: &mut Self, location, field, ty| {
1346                    this.fail(location, format!("Out of bounds field {field:?} for {ty}"));
1347                };
1348
1349                let mut current_ty = *container;
1350
1351                for (variant, field) in indices.iter() {
1352                    match current_ty.kind() {
1353                        ty::Tuple(fields) => {
1354                            if variant != FIRST_VARIANT {
1355                                self.fail(
1356                                    location,
1357                                    format!("tried to get variant {variant:?} of tuple"),
1358                                );
1359                                return;
1360                            }
1361                            let Some(&f_ty) = fields.get(field.as_usize()) else {
1362                                fail_out_of_bounds(self, location, field, current_ty);
1363                                return;
1364                            };
1365
1366                            current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1367                        }
1368                        ty::Adt(adt_def, args) => {
1369                            let Some(field) = adt_def.variant(variant).fields.get(field) else {
1370                                fail_out_of_bounds(self, location, field, current_ty);
1371                                return;
1372                            };
1373
1374                            let f_ty = field.ty(self.tcx, args);
1375                            current_ty = self.tcx.normalize_erasing_regions(self.typing_env, f_ty);
1376                        }
1377                        _ => {
1378                            self.fail(
1379                                location,
1380                                format!("Cannot get offset ({variant:?}, {field:?}) from type {current_ty}"),
1381                            );
1382                            return;
1383                        }
1384                    }
1385                }
1386            }
1387            Rvalue::Repeat(_, _)
1388            | Rvalue::ThreadLocalRef(_)
1389            | Rvalue::RawPtr(_, _)
1390            | Rvalue::NullaryOp(
1391                NullOp::SizeOf | NullOp::AlignOf | NullOp::UbChecks | NullOp::ContractChecks,
1392                _,
1393            )
1394            | Rvalue::Discriminant(_) => {}
1395
1396            Rvalue::WrapUnsafeBinder(op, ty) => {
1397                let unwrapped_ty = op.ty(self.body, self.tcx);
1398                let ty::UnsafeBinder(binder_ty) = *ty.kind() else {
1399                    self.fail(
1400                        location,
1401                        format!("WrapUnsafeBinder does not produce a ty::UnsafeBinder"),
1402                    );
1403                    return;
1404                };
1405                let binder_inner_ty = self.tcx.instantiate_bound_regions_with_erased(*binder_ty);
1406                if !self.mir_assign_valid_types(unwrapped_ty, binder_inner_ty) {
1407                    self.fail(
1408                        location,
1409                        format!("Cannot wrap {unwrapped_ty} into unsafe binder {binder_ty:?}"),
1410                    );
1411                }
1412            }
1413        }
1414        self.super_rvalue(rvalue, location);
1415    }
1416
1417    fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1418        match &statement.kind {
1419            StatementKind::Assign(box (dest, rvalue)) => {
1420                // LHS and RHS of the assignment must have the same type.
1421                let left_ty = dest.ty(&self.body.local_decls, self.tcx).ty;
1422                let right_ty = rvalue.ty(&self.body.local_decls, self.tcx);
1423
1424                if !self.mir_assign_valid_types(right_ty, left_ty) {
1425                    self.fail(
1426                        location,
1427                        format!(
1428                            "encountered `{:?}` with incompatible types:\n\
1429                            left-hand side has type: {}\n\
1430                            right-hand side has type: {}",
1431                            statement.kind, left_ty, right_ty,
1432                        ),
1433                    );
1434                }
1435                if let Rvalue::CopyForDeref(place) = rvalue {
1436                    if place.ty(&self.body.local_decls, self.tcx).ty.builtin_deref(true).is_none() {
1437                        self.fail(
1438                            location,
1439                            "`CopyForDeref` should only be used for dereferenceable types",
1440                        )
1441                    }
1442                }
1443            }
1444            StatementKind::AscribeUserType(..) => {
1445                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1446                    self.fail(
1447                        location,
1448                        "`AscribeUserType` should have been removed after drop lowering phase",
1449                    );
1450                }
1451            }
1452            StatementKind::FakeRead(..) => {
1453                if self.body.phase >= MirPhase::Runtime(RuntimePhase::Initial) {
1454                    self.fail(
1455                        location,
1456                        "`FakeRead` should have been removed after drop lowering phase",
1457                    );
1458                }
1459            }
1460            StatementKind::Intrinsic(box NonDivergingIntrinsic::Assume(op)) => {
1461                let ty = op.ty(&self.body.local_decls, self.tcx);
1462                if !ty.is_bool() {
1463                    self.fail(
1464                        location,
1465                        format!("`assume` argument must be `bool`, but got: `{ty}`"),
1466                    );
1467                }
1468            }
1469            StatementKind::Intrinsic(box NonDivergingIntrinsic::CopyNonOverlapping(
1470                CopyNonOverlapping { src, dst, count },
1471            )) => {
1472                let src_ty = src.ty(&self.body.local_decls, self.tcx);
1473                let op_src_ty = if let Some(src_deref) = src_ty.builtin_deref(true) {
1474                    src_deref
1475                } else {
1476                    self.fail(
1477                        location,
1478                        format!("Expected src to be ptr in copy_nonoverlapping, got: {src_ty}"),
1479                    );
1480                    return;
1481                };
1482                let dst_ty = dst.ty(&self.body.local_decls, self.tcx);
1483                let op_dst_ty = if let Some(dst_deref) = dst_ty.builtin_deref(true) {
1484                    dst_deref
1485                } else {
1486                    self.fail(
1487                        location,
1488                        format!("Expected dst to be ptr in copy_nonoverlapping, got: {dst_ty}"),
1489                    );
1490                    return;
1491                };
1492                // since CopyNonOverlapping is parametrized by 1 type,
1493                // we only need to check that they are equal and not keep an extra parameter.
1494                if !self.mir_assign_valid_types(op_src_ty, op_dst_ty) {
1495                    self.fail(location, format!("bad arg ({op_src_ty} != {op_dst_ty})"));
1496                }
1497
1498                let op_cnt_ty = count.ty(&self.body.local_decls, self.tcx);
1499                if op_cnt_ty != self.tcx.types.usize {
1500                    self.fail(location, format!("bad arg ({op_cnt_ty} != usize)"))
1501                }
1502            }
1503            StatementKind::SetDiscriminant { place, .. } => {
1504                if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1505                    self.fail(location, "`SetDiscriminant`is not allowed until deaggregation");
1506                }
1507                let pty = place.ty(&self.body.local_decls, self.tcx).ty;
1508                if !matches!(
1509                    pty.kind(),
1510                    ty::Adt(..) | ty::Coroutine(..) | ty::Alias(ty::Opaque, ..)
1511                ) {
1512                    self.fail(
1513                        location,
1514                        format!(
1515                            "`SetDiscriminant` is only allowed on ADTs and coroutines, not {pty}"
1516                        ),
1517                    );
1518                }
1519            }
1520            StatementKind::Deinit(..) => {
1521                if self.body.phase < MirPhase::Runtime(RuntimePhase::Initial) {
1522                    self.fail(location, "`Deinit`is not allowed until deaggregation");
1523                }
1524            }
1525            StatementKind::Retag(kind, _) => {
1526                // FIXME(JakobDegen) The validator should check that `self.body.phase <
1527                // DropsLowered`. However, this causes ICEs with generation of drop shims, which
1528                // seem to fail to set their `MirPhase` correctly.
1529                if matches!(kind, RetagKind::TwoPhase) {
1530                    self.fail(location, format!("explicit `{kind:?}` is forbidden"));
1531                }
1532            }
1533            StatementKind::StorageLive(_)
1534            | StatementKind::StorageDead(_)
1535            | StatementKind::Coverage(_)
1536            | StatementKind::ConstEvalCounter
1537            | StatementKind::PlaceMention(..)
1538            | StatementKind::BackwardIncompatibleDropHint { .. }
1539            | StatementKind::Nop => {}
1540        }
1541
1542        self.super_statement(statement, location);
1543    }
1544
1545    fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1546        match &terminator.kind {
1547            TerminatorKind::SwitchInt { targets, discr } => {
1548                let switch_ty = discr.ty(&self.body.local_decls, self.tcx);
1549
1550                let target_width = self.tcx.sess.target.pointer_width;
1551
1552                let size = Size::from_bits(match switch_ty.kind() {
1553                    ty::Uint(uint) => uint.normalize(target_width).bit_width().unwrap(),
1554                    ty::Int(int) => int.normalize(target_width).bit_width().unwrap(),
1555                    ty::Char => 32,
1556                    ty::Bool => 1,
1557                    other => bug!("unhandled type: {:?}", other),
1558                });
1559
1560                for (value, _) in targets.iter() {
1561                    if ScalarInt::try_from_uint(value, size).is_none() {
1562                        self.fail(
1563                            location,
1564                            format!("the value {value:#x} is not a proper {switch_ty}"),
1565                        )
1566                    }
1567                }
1568            }
1569            TerminatorKind::Call { func, .. } | TerminatorKind::TailCall { func, .. } => {
1570                let func_ty = func.ty(&self.body.local_decls, self.tcx);
1571                match func_ty.kind() {
1572                    ty::FnPtr(..) | ty::FnDef(..) => {}
1573                    _ => self.fail(
1574                        location,
1575                        format!(
1576                            "encountered non-callable type {func_ty} in `{}` terminator",
1577                            terminator.kind.name()
1578                        ),
1579                    ),
1580                }
1581
1582                if let TerminatorKind::TailCall { .. } = terminator.kind {
1583                    // FIXME(explicit_tail_calls): implement tail-call specific checks here (such
1584                    // as signature matching, forbidding closures, etc)
1585                }
1586            }
1587            TerminatorKind::Assert { cond, .. } => {
1588                let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
1589                if cond_ty != self.tcx.types.bool {
1590                    self.fail(
1591                        location,
1592                        format!(
1593                            "encountered non-boolean condition of type {cond_ty} in `Assert` terminator"
1594                        ),
1595                    );
1596                }
1597            }
1598            TerminatorKind::Goto { .. }
1599            | TerminatorKind::Drop { .. }
1600            | TerminatorKind::Yield { .. }
1601            | TerminatorKind::FalseEdge { .. }
1602            | TerminatorKind::FalseUnwind { .. }
1603            | TerminatorKind::InlineAsm { .. }
1604            | TerminatorKind::CoroutineDrop
1605            | TerminatorKind::UnwindResume
1606            | TerminatorKind::UnwindTerminate(_)
1607            | TerminatorKind::Return
1608            | TerminatorKind::Unreachable => {}
1609        }
1610
1611        self.super_terminator(terminator, location);
1612    }
1613}