1use std::assert_matches::assert_matches;
2
3use rustc_abi::{FieldIdx, Integer};
4use rustc_apfloat::ieee::{Double, Half, Quad, Single};
5use rustc_apfloat::{Float, FloatConvert};
6use rustc_middle::mir::CastKind;
7use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, FloatTy, Ty};
11use rustc_middle::{bug, span_bug};
12use tracing::trace;
13
14use super::util::ensure_monomorphic_enough;
15use super::{
16 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
17 throw_ub_custom,
18};
19use crate::interpret::Writeable;
20use crate::{enter_trace_span, fluent_generated as fluent};
21
22impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
23 pub fn cast(
24 &mut self,
25 src: &OpTy<'tcx, M::Provenance>,
26 cast_kind: CastKind,
27 cast_ty: Ty<'tcx>,
28 dest: &PlaceTy<'tcx, M::Provenance>,
29 ) -> InterpResult<'tcx> {
30 let cast_layout =
33 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
34 match cast_kind {
36 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
37 self.unsize_into(src, cast_layout, dest)?;
38 }
39
40 CastKind::PointerExposeProvenance => {
41 let src = self.read_immediate(src)?;
42 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
43 self.write_immediate(*res, dest)?;
44 }
45
46 CastKind::PointerWithExposedProvenance => {
47 let src = self.read_immediate(src)?;
48 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
49 self.write_immediate(*res, dest)?;
50 }
51
52 CastKind::IntToInt | CastKind::IntToFloat => {
53 let src = self.read_immediate(src)?;
54 let res = self.int_to_int_or_float(&src, cast_layout)?;
55 self.write_immediate(*res, dest)?;
56 }
57
58 CastKind::FloatToFloat | CastKind::FloatToInt => {
59 let src = self.read_immediate(src)?;
60 let res = self.float_to_float_or_int(&src, cast_layout)?;
61 self.write_immediate(*res, dest)?;
62 }
63
64 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
65 let src = self.read_immediate(src)?;
66 let res = self.ptr_to_ptr(&src, cast_layout)?;
67 self.write_immediate(*res, dest)?;
68 }
69
70 CastKind::PointerCoercion(
71 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
72 _,
73 ) => {
74 bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
75 }
76
77 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
78 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
80
81 match *src.layout.ty.kind() {
83 ty::FnDef(def_id, args) => {
84 let instance = {
85 let _trace = enter_trace_span!(M, resolve::resolve_for_fn_ptr, ?def_id);
86 ty::Instance::resolve_for_fn_ptr(
87 *self.tcx,
88 self.typing_env,
89 def_id,
90 args,
91 )
92 .ok_or_else(|| err_inval!(TooGeneric))?
93 };
94
95 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
96 self.write_pointer(fn_ptr, dest)?;
97 }
98 _ => span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
99 }
100 }
101
102 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
103 let src = self.read_immediate(src)?;
104 match cast_ty.kind() {
105 ty::FnPtr(..) => {
106 self.write_immediate(*src, dest)?;
108 }
109 _ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
110 }
111 }
112
113 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
114 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
116
117 match *src.layout.ty.kind() {
119 ty::Closure(def_id, args) => {
120 let instance = {
121 let _trace = enter_trace_span!(M, resolve::resolve_closure, ?def_id);
122 ty::Instance::resolve_closure(
123 *self.tcx,
124 def_id,
125 args,
126 ty::ClosureKind::FnOnce,
127 )
128 };
129 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
130 self.write_pointer(fn_ptr, dest)?;
131 }
132 _ => span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
133 }
134 }
135
136 CastKind::Transmute => {
137 assert!(src.layout.is_sized());
138 assert!(dest.layout.is_sized());
139 assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
141 throw_ub_custom!(
142 fluent::const_eval_invalid_transmute,
143 src_bytes = src.layout.size.bytes(),
144 dest_bytes = dest.layout.size.bytes(),
145 src = src.layout.ty,
146 dest = dest.layout.ty,
147 );
148 }
149
150 self.copy_op_allow_transmute(src, dest)?;
151 }
152 }
153 interp_ok(())
154 }
155
156 pub fn int_to_int_or_float(
158 &self,
159 src: &ImmTy<'tcx, M::Provenance>,
160 cast_to: TyAndLayout<'tcx>,
161 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
162 assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
163 assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
164
165 interp_ok(ImmTy::from_scalar(
166 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
167 cast_to,
168 ))
169 }
170
171 pub fn float_to_float_or_int(
173 &self,
174 src: &ImmTy<'tcx, M::Provenance>,
175 cast_to: TyAndLayout<'tcx>,
176 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
177 let ty::Float(fty) = src.layout.ty.kind() else {
178 bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
179 };
180 let val = match fty {
181 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
182 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
183 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
184 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
185 };
186 interp_ok(ImmTy::from_scalar(val, cast_to))
187 }
188
189 pub fn ptr_to_ptr(
191 &self,
192 src: &ImmTy<'tcx, M::Provenance>,
193 cast_to: TyAndLayout<'tcx>,
194 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
195 assert!(src.layout.ty.is_any_ptr());
196 assert!(cast_to.ty.is_raw_ptr());
197 if cast_to.size == src.layout.size {
199 return interp_ok(ImmTy::from_immediate(**src, cast_to));
201 } else {
202 assert_eq!(src.layout.size, 2 * self.pointer_size());
204 assert_eq!(cast_to.size, self.pointer_size());
205 assert!(src.layout.ty.is_raw_ptr());
206 return match **src {
207 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
208 Immediate::Scalar(..) => span_bug!(
209 self.cur_span(),
210 "{:?} input to a fat-to-thin cast ({} -> {})",
211 *src,
212 src.layout.ty,
213 cast_to.ty
214 ),
215 Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
216 };
217 }
218 }
219
220 pub fn pointer_expose_provenance_cast(
221 &mut self,
222 src: &ImmTy<'tcx, M::Provenance>,
223 cast_to: TyAndLayout<'tcx>,
224 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
225 assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
226 assert!(cast_to.ty.is_integral());
227
228 let scalar = src.to_scalar();
229 let ptr = scalar.to_pointer(self)?;
230 match ptr.into_pointer_or_addr() {
231 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
232 Err(_) => {} };
234 interp_ok(ImmTy::from_scalar(
235 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
236 cast_to,
237 ))
238 }
239
240 pub fn pointer_with_exposed_provenance_cast(
241 &self,
242 src: &ImmTy<'tcx, M::Provenance>,
243 cast_to: TyAndLayout<'tcx>,
244 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
245 assert!(src.layout.ty.is_integral());
246 assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
247
248 let scalar = src.to_scalar();
250 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
251 let addr = addr.to_target_usize(self)?;
252
253 let ptr = M::ptr_from_addr_cast(self, addr)?;
255 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
256 }
257
258 fn cast_from_int_like(
261 &self,
262 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
264 cast_ty: Ty<'tcx>,
265 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
266 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
270 ty::Uint(_) | ty::RawPtr(..) | ty::FnPtr(..) => scalar.to_uint(src_layout.size)?,
271 ty::Int(_) => scalar.to_int(src_layout.size)? as u128, ty::Bool => scalar.to_bool()?.into(),
273 ty::Char => scalar.to_char()?.into(),
274 _ => span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
275 };
276
277 interp_ok(match *cast_ty.kind() {
278 ty::Int(_) | ty::Uint(_) => {
280 let size = match *cast_ty.kind() {
281 ty::Int(t) => Integer::from_int_ty(self, t).size(),
282 ty::Uint(t) => Integer::from_uint_ty(self, t).size(),
283 _ => bug!(),
284 };
285 let v = size.truncate(v);
286 Scalar::from_uint(v, size)
287 }
288
289 ty::Float(fty) if signed => {
291 let v = v as i128;
292 match fty {
293 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
294 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
295 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
296 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
297 }
298 }
299 ty::Float(fty) => match fty {
301 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
302 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
303 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
304 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
305 },
306
307 ty::Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
309
310 _ => span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
312 })
313 }
314
315 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
317 where
318 F: Float
319 + Into<Scalar<M::Provenance>>
320 + FloatConvert<Half>
321 + FloatConvert<Single>
322 + FloatConvert<Double>
323 + FloatConvert<Quad>,
324 {
325 match *dest_ty.kind() {
326 ty::Uint(t) => {
328 let size = Integer::from_uint_ty(self, t).size();
329 let v = f.to_u128(size.bits_usize()).value;
332 Scalar::from_uint(v, size)
334 }
335 ty::Int(t) => {
337 let size = Integer::from_int_ty(self, t).size();
338 let v = f.to_i128(size.bits_usize()).value;
341 Scalar::from_int(v, size)
342 }
343 ty::Float(fty) => match fty {
345 FloatTy::F16 => {
346 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
347 }
348 FloatTy::F32 => {
349 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
350 }
351 FloatTy::F64 => {
352 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
353 }
354 FloatTy::F128 => {
355 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
356 }
357 },
358 _ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
360 }
361 }
362
363 fn unsize_into_ptr(
366 &mut self,
367 src: &OpTy<'tcx, M::Provenance>,
368 dest: &impl Writeable<'tcx, M::Provenance>,
369 source_ty: Ty<'tcx>,
371 cast_ty: Ty<'tcx>,
372 ) -> InterpResult<'tcx> {
373 let (src_pointee_ty, dest_pointee_ty) =
375 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
376
377 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
378 (&ty::Array(_, length), &ty::Slice(_)) => {
379 let ptr = self.read_pointer(src)?;
380 let val = Immediate::new_slice(
381 ptr,
382 length
383 .try_to_target_usize(*self.tcx)
384 .expect("expected monomorphic const in const eval"),
385 self,
386 );
387 self.write_immediate(val, dest)
388 }
389 (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
390 let val = self.read_immediate(src)?;
391 if data_a == data_b {
395 return self.write_immediate(*val, dest);
396 }
397 let (old_data, old_vptr) = val.to_scalar_pair();
399 let old_data = old_data.to_pointer(self)?;
400 let old_vptr = old_vptr.to_pointer(self)?;
401 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
402
403 let vptr_entry_idx =
406 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
407 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
408 if let Some(entry_idx) = vptr_entry_idx {
409 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
410 vtable_entries.get(entry_idx)
411 else {
412 span_bug!(
413 self.cur_span(),
414 "invalid vtable entry index in {} -> {} upcast",
415 src_pointee_ty,
416 dest_pointee_ty
417 );
418 };
419 let erased_trait_ref =
420 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
421 assert_eq!(
422 data_b.principal().map(|b| {
423 self.tcx.normalize_erasing_late_bound_regions(self.typing_env, b)
424 }),
425 Some(erased_trait_ref),
426 );
427 } else {
428 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
432 assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
433 };
434
435 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
437 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
438 }
439 (_, &ty::Dynamic(data, _, ty::Dyn)) => {
440 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
442 let ptr = self.read_pointer(src)?;
443 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
444 self.write_immediate(val, dest)
445 }
446 _ => {
447 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
449 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
450
451 span_bug!(
452 self.cur_span(),
453 "invalid pointer unsizing {} -> {}",
454 src.layout.ty,
455 cast_ty
456 )
457 }
458 }
459 }
460
461 pub fn unsize_into(
462 &mut self,
463 src: &OpTy<'tcx, M::Provenance>,
464 cast_ty: TyAndLayout<'tcx>,
465 dest: &impl Writeable<'tcx, M::Provenance>,
466 ) -> InterpResult<'tcx> {
467 trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
468 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
469 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
470 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
471 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
472 assert_eq!(def_a, def_b); let mut found_cast_field = false;
478 for i in 0..src.layout.fields.count() {
479 let cast_ty_field = cast_ty.field(self, i);
480 let i = FieldIdx::from_usize(i);
481 let src_field = self.project_field(src, i)?;
482 let dst_field = self.project_field(dest, i)?;
483 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
484 } else if src_field.layout.ty == cast_ty_field.ty {
486 self.copy_op(&src_field, &dst_field)?;
487 } else {
488 if found_cast_field {
489 span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
490 }
491 found_cast_field = true;
492 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
493 }
494 }
495 interp_ok(())
496 }
497 _ => {
498 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
500 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
501
502 span_bug!(
503 self.cur_span(),
504 "unsize_into: invalid conversion: {:?} -> {:?}",
505 src.layout,
506 dest.layout()
507 )
508 }
509 }
510 }
511}