rustc_const_eval/interpret/visitor.rs
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227
//! Visitor for a run-time value with a given layout: Traverse enums, structs and other compound
//! types until we arrive at the leaves, with custom handling for primitive types.
use std::num::NonZero;
use rustc_index::IndexVec;
use rustc_middle::mir::interpret::InterpResult;
use rustc_middle::ty::layout::LayoutOf;
use rustc_middle::ty::{self, Ty};
use rustc_target::abi::{FieldIdx, FieldsShape, VariantIdx, Variants};
use tracing::trace;
use super::{InterpCx, MPlaceTy, Machine, Projectable, interp_ok, throw_inval};
/// How to traverse a value and what to do when we are at the leaves.
pub trait ValueVisitor<'tcx, M: Machine<'tcx>>: Sized {
type V: Projectable<'tcx, M::Provenance> + From<MPlaceTy<'tcx, M::Provenance>>;
/// The visitor must have an `InterpCx` in it.
fn ecx(&self) -> &InterpCx<'tcx, M>;
/// `read_discriminant` can be hooked for better error messages.
#[inline(always)]
fn read_discriminant(&mut self, v: &Self::V) -> InterpResult<'tcx, VariantIdx> {
self.ecx().read_discriminant(&v.to_op(self.ecx())?)
}
/// This function provides the chance to reorder the order in which fields are visited for
/// `FieldsShape::Aggregate`.
///
/// The default means we iterate in source declaration order; alternatively this can do some
/// work with `memory_index` to iterate in memory order.
#[inline(always)]
fn aggregate_field_iter(
memory_index: &IndexVec<FieldIdx, u32>,
) -> impl Iterator<Item = FieldIdx> + 'static {
memory_index.indices()
}
// Recursive actions, ready to be overloaded.
/// Visits the given value, dispatching as appropriate to more specialized visitors.
#[inline(always)]
fn visit_value(&mut self, v: &Self::V) -> InterpResult<'tcx> {
self.walk_value(v)
}
/// Visits the given value as a union. No automatic recursion can happen here.
#[inline(always)]
fn visit_union(&mut self, _v: &Self::V, _fields: NonZero<usize>) -> InterpResult<'tcx> {
interp_ok(())
}
/// Visits the given value as the pointer of a `Box`. There is nothing to recurse into.
/// The type of `v` will be a raw pointer to `T`, but this is a field of `Box<T>` and the
/// pointee type is the actual `T`. `box_ty` provides the full type of the `Box` itself.
#[inline(always)]
fn visit_box(&mut self, _box_ty: Ty<'tcx>, _v: &Self::V) -> InterpResult<'tcx> {
interp_ok(())
}
/// Called each time we recurse down to a field of a "product-like" aggregate
/// (structs, tuples, arrays and the like, but not enums), passing in old (outer)
/// and new (inner) value.
/// This gives the visitor the chance to track the stack of nested fields that
/// we are descending through.
#[inline(always)]
fn visit_field(
&mut self,
_old_val: &Self::V,
_field: usize,
new_val: &Self::V,
) -> InterpResult<'tcx> {
self.visit_value(new_val)
}
/// Called when recursing into an enum variant.
/// This gives the visitor the chance to track the stack of nested fields that
/// we are descending through.
#[inline(always)]
fn visit_variant(
&mut self,
_old_val: &Self::V,
_variant: VariantIdx,
new_val: &Self::V,
) -> InterpResult<'tcx> {
self.visit_value(new_val)
}
/// Traversal logic; should not be overloaded.
fn walk_value(&mut self, v: &Self::V) -> InterpResult<'tcx> {
let ty = v.layout().ty;
trace!("walk_value: type: {ty}");
// Special treatment for special types, where the (static) layout is not sufficient.
match *ty.kind() {
// If it is a trait object, switch to the real type that was used to create it.
ty::Dynamic(data, _, ty::Dyn) => {
// Dyn types. This is unsized, and the actual dynamic type of the data is given by the
// vtable stored in the place metadata.
// unsized values are never immediate, so we can assert_mem_place
let op = v.to_op(self.ecx())?;
let dest = op.assert_mem_place();
let inner_mplace = self.ecx().unpack_dyn_trait(&dest, data)?;
trace!("walk_value: dyn object layout: {:#?}", inner_mplace.layout);
// recurse with the inner type
return self.visit_field(v, 0, &inner_mplace.into());
}
ty::Dynamic(data, _, ty::DynStar) => {
// DynStar types. Very different from a dyn type (but strangely part of the
// same variant in `TyKind`): These are pairs where the 2nd component is the
// vtable, and the first component is the data (which must be ptr-sized).
// First make sure the vtable can be read at its type.
// The type of this vtable is fake, it claims to be a reference to some actual memory but that isn't true.
// So we transmute it to a raw pointer.
let raw_ptr_ty = Ty::new_mut_ptr(*self.ecx().tcx, self.ecx().tcx.types.unit);
let raw_ptr_ty = self.ecx().layout_of(raw_ptr_ty)?;
let vtable_field =
self.ecx().project_field(v, 1)?.transmute(raw_ptr_ty, self.ecx())?;
self.visit_field(v, 1, &vtable_field)?;
// Then unpack the first field, and continue.
let data = self.ecx().unpack_dyn_star(v, data)?;
return self.visit_field(v, 0, &data);
}
// Slices do not need special handling here: they have `Array` field
// placement with length 0, so we enter the `Array` case below which
// indirectly uses the metadata to determine the actual length.
// However, `Box`... let's talk about `Box`.
ty::Adt(def, ..) if def.is_box() => {
// `Box` is a hybrid primitive-library-defined type that one the one hand is
// a dereferenceable pointer, on the other hand has *basically arbitrary
// user-defined layout* since the user controls the 'allocator' field. So it
// cannot be treated like a normal pointer, since it does not fit into an
// `Immediate`. Yeah, it is quite terrible. But many visitors want to do
// something with "all boxed pointers", so we handle this mess for them.
//
// When we hit a `Box`, we do not do the usual field recursion; instead,
// we (a) call `visit_box` on the pointer value, and (b) recurse on the
// allocator field. We also assert tons of things to ensure we do not miss
// any other fields.
// `Box` has two fields: the pointer we care about, and the allocator.
assert_eq!(v.layout().fields.count(), 2, "`Box` must have exactly 2 fields");
let (unique_ptr, alloc) =
(self.ecx().project_field(v, 0)?, self.ecx().project_field(v, 1)?);
// Unfortunately there is some type junk in the way here: `unique_ptr` is a `Unique`...
// (which means another 2 fields, the second of which is a `PhantomData`)
assert_eq!(unique_ptr.layout().fields.count(), 2);
let (nonnull_ptr, phantom) = (
self.ecx().project_field(&unique_ptr, 0)?,
self.ecx().project_field(&unique_ptr, 1)?,
);
assert!(
phantom.layout().ty.ty_adt_def().is_some_and(|adt| adt.is_phantom_data()),
"2nd field of `Unique` should be PhantomData but is {:?}",
phantom.layout().ty,
);
// ... that contains a `NonNull`... (gladly, only a single field here)
assert_eq!(nonnull_ptr.layout().fields.count(), 1);
let raw_ptr = self.ecx().project_field(&nonnull_ptr, 0)?; // the actual raw ptr
// ... whose only field finally is a raw ptr we can dereference.
self.visit_box(ty, &raw_ptr)?;
// The second `Box` field is the allocator, which we recursively check for validity
// like in regular structs.
self.visit_field(v, 1, &alloc)?;
// We visited all parts of this one.
return interp_ok(());
}
// Non-normalized types should never show up here.
ty::Param(..)
| ty::Alias(..)
| ty::Bound(..)
| ty::Placeholder(..)
| ty::Infer(..)
| ty::Error(..) => throw_inval!(TooGeneric),
// The rest is handled below.
_ => {}
};
// Visit the fields of this value.
match &v.layout().fields {
FieldsShape::Primitive => {}
&FieldsShape::Union(fields) => {
self.visit_union(v, fields)?;
}
FieldsShape::Arbitrary { memory_index, .. } => {
for idx in Self::aggregate_field_iter(memory_index) {
let idx = idx.as_usize();
let field = self.ecx().project_field(v, idx)?;
self.visit_field(v, idx, &field)?;
}
}
FieldsShape::Array { .. } => {
let mut iter = self.ecx().project_array_fields(v)?;
while let Some((idx, field)) = iter.next(self.ecx())? {
self.visit_field(v, idx.try_into().unwrap(), &field)?;
}
}
}
match v.layout().variants {
// If this is a multi-variant layout, find the right variant and proceed
// with *its* fields.
Variants::Multiple { .. } => {
let idx = self.read_discriminant(v)?;
// There are 3 cases where downcasts can turn a Scalar/ScalarPair into a different ABI which
// could be a problem for `ImmTy` (see layout_sanity_check):
// - variant.size == Size::ZERO: works fine because `ImmTy::offset` has a special case for
// zero-sized layouts.
// - variant.fields.count() == 0: works fine because `ImmTy::offset` has a special case for
// zero-field aggregates.
// - variant.abi.is_uninhabited(): triggers UB in `read_discriminant` so we never get here.
let inner = self.ecx().project_downcast(v, idx)?;
trace!("walk_value: variant layout: {:#?}", inner.layout());
// recurse with the inner type
self.visit_variant(v, idx, &inner)?;
}
// For single-variant layouts, we already did anything there is to do.
Variants::Single { .. } => {}
}
interp_ok(())
}
}