1use std::assert_matches::assert_matches;
2
3use rustc_abi::{FieldIdx, Integer};
4use rustc_apfloat::ieee::{Double, Half, Quad, Single};
5use rustc_apfloat::{Float, FloatConvert};
6use rustc_middle::mir::CastKind;
7use rustc_middle::mir::interpret::{InterpResult, PointerArithmetic, Scalar};
8use rustc_middle::ty::adjustment::PointerCoercion;
9use rustc_middle::ty::layout::{IntegerExt, TyAndLayout};
10use rustc_middle::ty::{self, FloatTy, Ty};
11use rustc_middle::{bug, span_bug};
12use tracing::trace;
13
14use super::util::ensure_monomorphic_enough;
15use super::{
16 FnVal, ImmTy, Immediate, InterpCx, Machine, OpTy, PlaceTy, err_inval, interp_ok, throw_ub,
17 throw_ub_custom,
18};
19use crate::fluent_generated as fluent;
20use crate::interpret::Writeable;
21
22impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> {
23 pub fn cast(
24 &mut self,
25 src: &OpTy<'tcx, M::Provenance>,
26 cast_kind: CastKind,
27 cast_ty: Ty<'tcx>,
28 dest: &PlaceTy<'tcx, M::Provenance>,
29 ) -> InterpResult<'tcx> {
30 let cast_layout =
33 if cast_ty == dest.layout.ty { dest.layout } else { self.layout_of(cast_ty)? };
34 match cast_kind {
36 CastKind::PointerCoercion(PointerCoercion::Unsize, _) => {
37 self.unsize_into(src, cast_layout, dest)?;
38 }
39
40 CastKind::PointerExposeProvenance => {
41 let src = self.read_immediate(src)?;
42 let res = self.pointer_expose_provenance_cast(&src, cast_layout)?;
43 self.write_immediate(*res, dest)?;
44 }
45
46 CastKind::PointerWithExposedProvenance => {
47 let src = self.read_immediate(src)?;
48 let res = self.pointer_with_exposed_provenance_cast(&src, cast_layout)?;
49 self.write_immediate(*res, dest)?;
50 }
51
52 CastKind::IntToInt | CastKind::IntToFloat => {
53 let src = self.read_immediate(src)?;
54 let res = self.int_to_int_or_float(&src, cast_layout)?;
55 self.write_immediate(*res, dest)?;
56 }
57
58 CastKind::FloatToFloat | CastKind::FloatToInt => {
59 let src = self.read_immediate(src)?;
60 let res = self.float_to_float_or_int(&src, cast_layout)?;
61 self.write_immediate(*res, dest)?;
62 }
63
64 CastKind::FnPtrToPtr | CastKind::PtrToPtr => {
65 let src = self.read_immediate(src)?;
66 let res = self.ptr_to_ptr(&src, cast_layout)?;
67 self.write_immediate(*res, dest)?;
68 }
69
70 CastKind::PointerCoercion(
71 PointerCoercion::MutToConstPointer | PointerCoercion::ArrayToPointer,
72 _,
73 ) => {
74 bug!("{cast_kind:?} casts are for borrowck only, not runtime MIR");
75 }
76
77 CastKind::PointerCoercion(PointerCoercion::ReifyFnPointer, _) => {
78 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
80
81 match *src.layout.ty.kind() {
83 ty::FnDef(def_id, args) => {
84 let instance = ty::Instance::resolve_for_fn_ptr(
85 *self.tcx,
86 self.typing_env,
87 def_id,
88 args,
89 )
90 .ok_or_else(|| err_inval!(TooGeneric))?;
91
92 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
93 self.write_pointer(fn_ptr, dest)?;
94 }
95 _ => span_bug!(self.cur_span(), "reify fn pointer on {}", src.layout.ty),
96 }
97 }
98
99 CastKind::PointerCoercion(PointerCoercion::UnsafeFnPointer, _) => {
100 let src = self.read_immediate(src)?;
101 match cast_ty.kind() {
102 ty::FnPtr(..) => {
103 self.write_immediate(*src, dest)?;
105 }
106 _ => span_bug!(self.cur_span(), "fn to unsafe fn cast on {}", cast_ty),
107 }
108 }
109
110 CastKind::PointerCoercion(PointerCoercion::ClosureFnPointer(_), _) => {
111 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
113
114 match *src.layout.ty.kind() {
116 ty::Closure(def_id, args) => {
117 let instance = ty::Instance::resolve_closure(
118 *self.tcx,
119 def_id,
120 args,
121 ty::ClosureKind::FnOnce,
122 );
123 let fn_ptr = self.fn_ptr(FnVal::Instance(instance));
124 self.write_pointer(fn_ptr, dest)?;
125 }
126 _ => span_bug!(self.cur_span(), "closure fn pointer on {}", src.layout.ty),
127 }
128 }
129
130 CastKind::Transmute => {
131 assert!(src.layout.is_sized());
132 assert!(dest.layout.is_sized());
133 assert_eq!(cast_ty, dest.layout.ty); if src.layout.size != dest.layout.size {
135 throw_ub_custom!(
136 fluent::const_eval_invalid_transmute,
137 src_bytes = src.layout.size.bytes(),
138 dest_bytes = dest.layout.size.bytes(),
139 src = src.layout.ty,
140 dest = dest.layout.ty,
141 );
142 }
143
144 self.copy_op_allow_transmute(src, dest)?;
145 }
146 }
147 interp_ok(())
148 }
149
150 pub fn int_to_int_or_float(
152 &self,
153 src: &ImmTy<'tcx, M::Provenance>,
154 cast_to: TyAndLayout<'tcx>,
155 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
156 assert!(src.layout.ty.is_integral() || src.layout.ty.is_char() || src.layout.ty.is_bool());
157 assert!(cast_to.ty.is_floating_point() || cast_to.ty.is_integral() || cast_to.ty.is_char());
158
159 interp_ok(ImmTy::from_scalar(
160 self.cast_from_int_like(src.to_scalar(), src.layout, cast_to.ty)?,
161 cast_to,
162 ))
163 }
164
165 pub fn float_to_float_or_int(
167 &self,
168 src: &ImmTy<'tcx, M::Provenance>,
169 cast_to: TyAndLayout<'tcx>,
170 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
171 let ty::Float(fty) = src.layout.ty.kind() else {
172 bug!("FloatToFloat/FloatToInt cast: source type {} is not a float type", src.layout.ty)
173 };
174 let val = match fty {
175 FloatTy::F16 => self.cast_from_float(src.to_scalar().to_f16()?, cast_to.ty),
176 FloatTy::F32 => self.cast_from_float(src.to_scalar().to_f32()?, cast_to.ty),
177 FloatTy::F64 => self.cast_from_float(src.to_scalar().to_f64()?, cast_to.ty),
178 FloatTy::F128 => self.cast_from_float(src.to_scalar().to_f128()?, cast_to.ty),
179 };
180 interp_ok(ImmTy::from_scalar(val, cast_to))
181 }
182
183 pub fn ptr_to_ptr(
185 &self,
186 src: &ImmTy<'tcx, M::Provenance>,
187 cast_to: TyAndLayout<'tcx>,
188 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
189 assert!(src.layout.ty.is_any_ptr());
190 assert!(cast_to.ty.is_raw_ptr());
191 if cast_to.size == src.layout.size {
193 return interp_ok(ImmTy::from_immediate(**src, cast_to));
195 } else {
196 assert_eq!(src.layout.size, 2 * self.pointer_size());
198 assert_eq!(cast_to.size, self.pointer_size());
199 assert!(src.layout.ty.is_raw_ptr());
200 return match **src {
201 Immediate::ScalarPair(data, _) => interp_ok(ImmTy::from_scalar(data, cast_to)),
202 Immediate::Scalar(..) => span_bug!(
203 self.cur_span(),
204 "{:?} input to a fat-to-thin cast ({} -> {})",
205 *src,
206 src.layout.ty,
207 cast_to.ty
208 ),
209 Immediate::Uninit => throw_ub!(InvalidUninitBytes(None)),
210 };
211 }
212 }
213
214 pub fn pointer_expose_provenance_cast(
215 &mut self,
216 src: &ImmTy<'tcx, M::Provenance>,
217 cast_to: TyAndLayout<'tcx>,
218 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
219 assert_matches!(src.layout.ty.kind(), ty::RawPtr(_, _) | ty::FnPtr(..));
220 assert!(cast_to.ty.is_integral());
221
222 let scalar = src.to_scalar();
223 let ptr = scalar.to_pointer(self)?;
224 match ptr.into_pointer_or_addr() {
225 Ok(ptr) => M::expose_provenance(self, ptr.provenance)?,
226 Err(_) => {} };
228 interp_ok(ImmTy::from_scalar(
229 self.cast_from_int_like(scalar, src.layout, cast_to.ty)?,
230 cast_to,
231 ))
232 }
233
234 pub fn pointer_with_exposed_provenance_cast(
235 &self,
236 src: &ImmTy<'tcx, M::Provenance>,
237 cast_to: TyAndLayout<'tcx>,
238 ) -> InterpResult<'tcx, ImmTy<'tcx, M::Provenance>> {
239 assert!(src.layout.ty.is_integral());
240 assert_matches!(cast_to.ty.kind(), ty::RawPtr(_, _));
241
242 let scalar = src.to_scalar();
244 let addr = self.cast_from_int_like(scalar, src.layout, self.tcx.types.usize)?;
245 let addr = addr.to_target_usize(self)?;
246
247 let ptr = M::ptr_from_addr_cast(self, addr)?;
249 interp_ok(ImmTy::from_scalar(Scalar::from_maybe_pointer(ptr, self), cast_to))
250 }
251
252 fn cast_from_int_like(
255 &self,
256 scalar: Scalar<M::Provenance>, src_layout: TyAndLayout<'tcx>,
258 cast_ty: Ty<'tcx>,
259 ) -> InterpResult<'tcx, Scalar<M::Provenance>> {
260 let signed = src_layout.backend_repr.is_signed(); let v = match src_layout.ty.kind() {
264 ty::Uint(_) | ty::RawPtr(..) | ty::FnPtr(..) => scalar.to_uint(src_layout.size)?,
265 ty::Int(_) => scalar.to_int(src_layout.size)? as u128, ty::Bool => scalar.to_bool()?.into(),
267 ty::Char => scalar.to_char()?.into(),
268 _ => span_bug!(self.cur_span(), "invalid int-like cast from {}", src_layout.ty),
269 };
270
271 interp_ok(match *cast_ty.kind() {
272 ty::Int(_) | ty::Uint(_) => {
274 let size = match *cast_ty.kind() {
275 ty::Int(t) => Integer::from_int_ty(self, t).size(),
276 ty::Uint(t) => Integer::from_uint_ty(self, t).size(),
277 _ => bug!(),
278 };
279 let v = size.truncate(v);
280 Scalar::from_uint(v, size)
281 }
282
283 ty::Float(fty) if signed => {
285 let v = v as i128;
286 match fty {
287 FloatTy::F16 => Scalar::from_f16(Half::from_i128(v).value),
288 FloatTy::F32 => Scalar::from_f32(Single::from_i128(v).value),
289 FloatTy::F64 => Scalar::from_f64(Double::from_i128(v).value),
290 FloatTy::F128 => Scalar::from_f128(Quad::from_i128(v).value),
291 }
292 }
293 ty::Float(fty) => match fty {
295 FloatTy::F16 => Scalar::from_f16(Half::from_u128(v).value),
296 FloatTy::F32 => Scalar::from_f32(Single::from_u128(v).value),
297 FloatTy::F64 => Scalar::from_f64(Double::from_u128(v).value),
298 FloatTy::F128 => Scalar::from_f128(Quad::from_u128(v).value),
299 },
300
301 ty::Char => Scalar::from_u32(u8::try_from(v).unwrap().into()),
303
304 _ => span_bug!(self.cur_span(), "invalid int to {} cast", cast_ty),
306 })
307 }
308
309 fn cast_from_float<F>(&self, f: F, dest_ty: Ty<'tcx>) -> Scalar<M::Provenance>
311 where
312 F: Float
313 + Into<Scalar<M::Provenance>>
314 + FloatConvert<Half>
315 + FloatConvert<Single>
316 + FloatConvert<Double>
317 + FloatConvert<Quad>,
318 {
319 match *dest_ty.kind() {
320 ty::Uint(t) => {
322 let size = Integer::from_uint_ty(self, t).size();
323 let v = f.to_u128(size.bits_usize()).value;
326 Scalar::from_uint(v, size)
328 }
329 ty::Int(t) => {
331 let size = Integer::from_int_ty(self, t).size();
332 let v = f.to_i128(size.bits_usize()).value;
335 Scalar::from_int(v, size)
336 }
337 ty::Float(fty) => match fty {
339 FloatTy::F16 => {
340 Scalar::from_f16(self.adjust_nan(f.convert(&mut false).value, &[f]))
341 }
342 FloatTy::F32 => {
343 Scalar::from_f32(self.adjust_nan(f.convert(&mut false).value, &[f]))
344 }
345 FloatTy::F64 => {
346 Scalar::from_f64(self.adjust_nan(f.convert(&mut false).value, &[f]))
347 }
348 FloatTy::F128 => {
349 Scalar::from_f128(self.adjust_nan(f.convert(&mut false).value, &[f]))
350 }
351 },
352 _ => span_bug!(self.cur_span(), "invalid float to {} cast", dest_ty),
354 }
355 }
356
357 fn unsize_into_ptr(
360 &mut self,
361 src: &OpTy<'tcx, M::Provenance>,
362 dest: &impl Writeable<'tcx, M::Provenance>,
363 source_ty: Ty<'tcx>,
365 cast_ty: Ty<'tcx>,
366 ) -> InterpResult<'tcx> {
367 let (src_pointee_ty, dest_pointee_ty) =
369 self.tcx.struct_lockstep_tails_for_codegen(source_ty, cast_ty, self.typing_env);
370
371 match (src_pointee_ty.kind(), dest_pointee_ty.kind()) {
372 (&ty::Array(_, length), &ty::Slice(_)) => {
373 let ptr = self.read_pointer(src)?;
374 let val = Immediate::new_slice(
375 ptr,
376 length
377 .try_to_target_usize(*self.tcx)
378 .expect("expected monomorphic const in const eval"),
379 self,
380 );
381 self.write_immediate(val, dest)
382 }
383 (ty::Dynamic(data_a, _, ty::Dyn), ty::Dynamic(data_b, _, ty::Dyn)) => {
384 let val = self.read_immediate(src)?;
385 if data_a == data_b {
389 return self.write_immediate(*val, dest);
390 }
391 let (old_data, old_vptr) = val.to_scalar_pair();
393 let old_data = old_data.to_pointer(self)?;
394 let old_vptr = old_vptr.to_pointer(self)?;
395 let ty = self.get_ptr_vtable_ty(old_vptr, Some(data_a))?;
396
397 let vptr_entry_idx =
400 self.tcx.supertrait_vtable_slot((src_pointee_ty, dest_pointee_ty));
401 let vtable_entries = self.vtable_entries(data_a.principal(), ty);
402 if let Some(entry_idx) = vptr_entry_idx {
403 let Some(&ty::VtblEntry::TraitVPtr(upcast_trait_ref)) =
404 vtable_entries.get(entry_idx)
405 else {
406 span_bug!(
407 self.cur_span(),
408 "invalid vtable entry index in {} -> {} upcast",
409 src_pointee_ty,
410 dest_pointee_ty
411 );
412 };
413 let erased_trait_ref =
414 ty::ExistentialTraitRef::erase_self_ty(*self.tcx, upcast_trait_ref);
415 assert_eq!(
416 data_b.principal().map(|b| {
417 self.tcx.normalize_erasing_late_bound_regions(self.typing_env, b)
418 }),
419 Some(erased_trait_ref),
420 );
421 } else {
422 let vtable_entries_b = self.vtable_entries(data_b.principal(), ty);
426 assert!(&vtable_entries[..vtable_entries_b.len()] == vtable_entries_b);
427 };
428
429 let new_vptr = self.get_vtable_ptr(ty, data_b)?;
431 self.write_immediate(Immediate::new_dyn_trait(old_data, new_vptr, self), dest)
432 }
433 (_, &ty::Dynamic(data, _, ty::Dyn)) => {
434 let vtable = self.get_vtable_ptr(src_pointee_ty, data)?;
436 let ptr = self.read_pointer(src)?;
437 let val = Immediate::new_dyn_trait(ptr, vtable, &*self.tcx);
438 self.write_immediate(val, dest)
439 }
440 _ => {
441 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
443 ensure_monomorphic_enough(*self.tcx, cast_ty)?;
444
445 span_bug!(
446 self.cur_span(),
447 "invalid pointer unsizing {} -> {}",
448 src.layout.ty,
449 cast_ty
450 )
451 }
452 }
453 }
454
455 pub fn unsize_into(
456 &mut self,
457 src: &OpTy<'tcx, M::Provenance>,
458 cast_ty: TyAndLayout<'tcx>,
459 dest: &impl Writeable<'tcx, M::Provenance>,
460 ) -> InterpResult<'tcx> {
461 trace!("Unsizing {:?} of type {} into {}", *src, src.layout.ty, cast_ty.ty);
462 match (src.layout.ty.kind(), cast_ty.ty.kind()) {
463 (&ty::Ref(_, s, _), &ty::Ref(_, c, _) | &ty::RawPtr(c, _))
464 | (&ty::RawPtr(s, _), &ty::RawPtr(c, _)) => self.unsize_into_ptr(src, dest, s, c),
465 (&ty::Adt(def_a, _), &ty::Adt(def_b, _)) => {
466 assert_eq!(def_a, def_b); let mut found_cast_field = false;
472 for i in 0..src.layout.fields.count() {
473 let cast_ty_field = cast_ty.field(self, i);
474 let i = FieldIdx::from_usize(i);
475 let src_field = self.project_field(src, i)?;
476 let dst_field = self.project_field(dest, i)?;
477 if src_field.layout.is_1zst() && cast_ty_field.is_1zst() {
478 } else if src_field.layout.ty == cast_ty_field.ty {
480 self.copy_op(&src_field, &dst_field)?;
481 } else {
482 if found_cast_field {
483 span_bug!(self.cur_span(), "unsize_into: more than one field to cast");
484 }
485 found_cast_field = true;
486 self.unsize_into(&src_field, cast_ty_field, &dst_field)?;
487 }
488 }
489 interp_ok(())
490 }
491 _ => {
492 ensure_monomorphic_enough(*self.tcx, src.layout.ty)?;
494 ensure_monomorphic_enough(*self.tcx, cast_ty.ty)?;
495
496 span_bug!(
497 self.cur_span(),
498 "unsize_into: invalid conversion: {:?} -> {:?}",
499 src.layout,
500 dest.layout()
501 )
502 }
503 }
504 }
505}