pub trait Machine<'tcx>: Sized {
type MemoryKind: Debug + Display + MayLeak + Eq + 'static;
type Provenance: Provenance + Eq + Hash + 'static;
type ProvenanceExtra: Copy + 'static;
type ExtraFnVal: Debug + Copy;
type FrameExtra;
type AllocExtra: Debug + Clone + 'tcx;
type Bytes: AllocBytes + 'static;
type MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone;
const GLOBAL_KIND: Option<Self::MemoryKind>;
const PANIC_ON_ALLOC_FAIL: bool;
const ALL_CONSTS_ARE_PRECHECKED: bool = true;
Show 47 methods
// Required methods
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool;
fn enforce_validity(
ecx: &InterpCx<'tcx, Self>,
layout: TyAndLayout<'tcx>,
) -> bool;
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool;
fn find_mir_or_eval_fn(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
abi: CallAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>;
fn call_extra_fn(
ecx: &mut InterpCx<'tcx, Self>,
fn_val: Self::ExtraFnVal,
abi: CallAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx>;
fn call_intrinsic(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>;
fn check_fn_target_features(
_ecx: &InterpCx<'tcx, Self>,
_instance: Instance<'tcx>,
) -> InterpResult<'tcx>;
fn assert_panic(
ecx: &mut InterpCx<'tcx, Self>,
msg: &AssertMessage<'tcx>,
unwind: UnwindAction,
) -> InterpResult<'tcx>;
fn panic_nounwind(
_ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>;
fn unwind_terminate(
ecx: &mut InterpCx<'tcx, Self>,
reason: UnwindTerminateReason,
) -> InterpResult<'tcx>;
fn binary_ptr_op(
ecx: &InterpCx<'tcx, Self>,
bin_op: BinOp,
left: &ImmTy<'tcx, Self::Provenance>,
right: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>;
fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>;
fn extern_static_pointer(
ecx: &InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
fn ptr_from_addr_cast(
ecx: &InterpCx<'tcx, Self>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>;
fn expose_ptr(
ecx: &mut InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx>;
fn ptr_get_alloc(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>;
fn adjust_global_allocation<'b>(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>;
fn init_alloc_extra(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
kind: MemoryKind<Self::MemoryKind>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>;
fn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>;
fn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
fn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>];
fn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>;
fn get_global_alloc_salt(
ecx: &InterpCx<'tcx, Self>,
instance: Option<Instance<'tcx>>,
) -> usize;
// Provided methods
fn alignment_check(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
_alloc_align: Align,
_alloc_kind: AllocKind,
_offset: Size,
_align: Align,
) -> Option<Misalignment> { ... }
fn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool { ... }
fn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> InterpResult<'tcx, &'tcx Body<'tcx>> { ... }
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
_ecx: &InterpCx<'tcx, Self>,
_inputs: &[F1],
) -> F2 { ... }
fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> { ... }
fn increment_const_eval_counter(
_ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult<'tcx> { ... }
fn before_access_global(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
_allocation: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
_is_write: bool,
) -> InterpResult<'tcx> { ... }
fn thread_local_static_pointer(
_ecx: &mut InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>> { ... }
fn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[BasicBlock],
) -> InterpResult<'tcx> { ... }
fn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx> { ... }
fn before_alloc_read(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx> { ... }
fn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx> { ... }
fn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_size: Size,
_align: Align,
_kind: MemoryKind<Self::MemoryKind>,
) -> InterpResult<'tcx> { ... }
fn retag_ptr_value(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>> { ... }
fn retag_place_contents(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx> { ... }
fn before_stack_pop(
_ecx: &InterpCx<'tcx, Self>,
_frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx> { ... }
fn after_stack_pop(
_ecx: &mut InterpCx<'tcx, Self>,
_frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction> { ... }
fn after_local_read(
_ecx: &InterpCx<'tcx, Self>,
_local: Local,
) -> InterpResult<'tcx> { ... }
fn after_local_write(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_storage_live: bool,
) -> InterpResult<'tcx> { ... }
fn after_local_moved_to_memory(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx> { ... }
fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>
where F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>> { ... }
fn cached_union_data_range<'e>(
_ecx: &'e mut InterpCx<'tcx, Self>,
_ty: Ty<'tcx>,
compute_range: impl FnOnce() -> RangeSet,
) -> Cow<'e, RangeSet> { ... }
}
Expand description
Methods of this trait signifies a point where CTFE evaluation would fail and some use case dependent behaviour can instead be applied.
Required Associated Constants§
sourceconst GLOBAL_KIND: Option<Self::MemoryKind>
const GLOBAL_KIND: Option<Self::MemoryKind>
The memory kind to use for copied global memory (held in tcx
) –
or None if such memory should not be mutated and thus any such attempt will cause
a ModifiedStatic
error to be raised.
Statics are copied under two circumstances: When they are mutated, and when
adjust_allocation
(see below) returns an owned allocation
that is added to the memory so that the work is not done twice.
sourceconst PANIC_ON_ALLOC_FAIL: bool
const PANIC_ON_ALLOC_FAIL: bool
Should the machine panic on allocation failures?
Provided Associated Constants§
sourceconst ALL_CONSTS_ARE_PRECHECKED: bool = true
const ALL_CONSTS_ARE_PRECHECKED: bool = true
Determines whether eval_mir_constant
can never fail because all required consts have
already been checked before.
Required Associated Types§
sourcetype MemoryKind: Debug + Display + MayLeak + Eq + 'static
type MemoryKind: Debug + Display + MayLeak + Eq + 'static
Additional memory kinds a machine wishes to distinguish from the builtin ones
sourcetype Provenance: Provenance + Eq + Hash + 'static
type Provenance: Provenance + Eq + Hash + 'static
Pointers are “tagged” with provenance information; typically the AllocId
they belong to.
sourcetype ProvenanceExtra: Copy + 'static
type ProvenanceExtra: Copy + 'static
When getting the AllocId of a pointer, some extra data is also obtained from the provenance that is passed to memory access hooks so they can do things with it.
sourcetype ExtraFnVal: Debug + Copy
type ExtraFnVal: Debug + Copy
Machines can define extra (non-instance) things that represent values of function pointers.
For example, Miri uses this to return a function pointer from dlsym
that can later be called to execute the right thing.
sourcetype FrameExtra
type FrameExtra
Extra data stored in every call frame.
sourcetype AllocExtra: Debug + Clone + 'tcx
type AllocExtra: Debug + Clone + 'tcx
Extra data stored in every allocation.
sourcetype Bytes: AllocBytes + 'static
type Bytes: AllocBytes + 'static
Type for the bytes of the allocation.
sourcetype MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone
type MemoryMap: AllocMap<AllocId, (MemoryKind<Self::MemoryKind>, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>)> + Default + Clone
Memory’s allocation map
Required Methods§
sourcefn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
fn enforce_alignment(ecx: &InterpCx<'tcx, Self>) -> bool
Whether memory accesses should be alignment-checked.
sourcefn enforce_validity(
ecx: &InterpCx<'tcx, Self>,
layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity( ecx: &InterpCx<'tcx, Self>, layout: TyAndLayout<'tcx>, ) -> bool
Whether to enforce the validity invariant for a specific layout.
sourcefn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
fn ignore_optional_overflow_checks(_ecx: &InterpCx<'tcx, Self>) -> bool
Whether Assert(OverflowNeg) and Assert(Overflow) MIR terminators should actually check for overflow.
sourcefn find_mir_or_eval_fn(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
abi: CallAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
fn find_mir_or_eval_fn( ecx: &mut InterpCx<'tcx, Self>, instance: Instance<'tcx>, abi: CallAbi, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<(&'tcx Body<'tcx>, Instance<'tcx>)>>
Entry point to all function calls.
Returns either the mir to use for the call, or None
if execution should
just proceed (which usually means this hook did all the work that the
called function should usually have done). In the latter case, it is
this hook’s responsibility to advance the instruction pointer!
(This is to support functions like __rust_maybe_catch_panic
that neither find a MIR
nor just jump to ret
, but instead push their own stack frame.)
Passing dest
and ret
in the same Option
proved very annoying when only one of them
was used.
sourcefn call_extra_fn(
ecx: &mut InterpCx<'tcx, Self>,
fn_val: Self::ExtraFnVal,
abi: CallAbi,
args: &[FnArg<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn call_extra_fn( ecx: &mut InterpCx<'tcx, Self>, fn_val: Self::ExtraFnVal, abi: CallAbi, args: &[FnArg<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx>
Execute fn_val
. It is the hook’s responsibility to advance the instruction
pointer as appropriate.
sourcefn call_intrinsic(
ecx: &mut InterpCx<'tcx, Self>,
instance: Instance<'tcx>,
args: &[OpTy<'tcx, Self::Provenance>],
destination: &MPlaceTy<'tcx, Self::Provenance>,
target: Option<BasicBlock>,
unwind: UnwindAction,
) -> InterpResult<'tcx, Option<Instance<'tcx>>>
fn call_intrinsic( ecx: &mut InterpCx<'tcx, Self>, instance: Instance<'tcx>, args: &[OpTy<'tcx, Self::Provenance>], destination: &MPlaceTy<'tcx, Self::Provenance>, target: Option<BasicBlock>, unwind: UnwindAction, ) -> InterpResult<'tcx, Option<Instance<'tcx>>>
Directly process an intrinsic without pushing a stack frame. It is the hook’s responsibility to advance the instruction pointer as appropriate.
Returns None
if the intrinsic was fully handled.
Otherwise, returns an Instance
of the function that implements the intrinsic.
sourcefn check_fn_target_features(
_ecx: &InterpCx<'tcx, Self>,
_instance: Instance<'tcx>,
) -> InterpResult<'tcx>
fn check_fn_target_features( _ecx: &InterpCx<'tcx, Self>, _instance: Instance<'tcx>, ) -> InterpResult<'tcx>
Check whether the given function may be executed on the current machine, in terms of the target features is requires.
sourcefn assert_panic(
ecx: &mut InterpCx<'tcx, Self>,
msg: &AssertMessage<'tcx>,
unwind: UnwindAction,
) -> InterpResult<'tcx>
fn assert_panic( ecx: &mut InterpCx<'tcx, Self>, msg: &AssertMessage<'tcx>, unwind: UnwindAction, ) -> InterpResult<'tcx>
Called to evaluate Assert
MIR terminators that trigger a panic.
sourcefn panic_nounwind(
_ecx: &mut InterpCx<'tcx, Self>,
msg: &str,
) -> InterpResult<'tcx>
fn panic_nounwind( _ecx: &mut InterpCx<'tcx, Self>, msg: &str, ) -> InterpResult<'tcx>
Called to trigger a non-unwinding panic.
sourcefn unwind_terminate(
ecx: &mut InterpCx<'tcx, Self>,
reason: UnwindTerminateReason,
) -> InterpResult<'tcx>
fn unwind_terminate( ecx: &mut InterpCx<'tcx, Self>, reason: UnwindTerminateReason, ) -> InterpResult<'tcx>
Called when unwinding reached a state where execution should be terminated.
sourcefn binary_ptr_op(
ecx: &InterpCx<'tcx, Self>,
bin_op: BinOp,
left: &ImmTy<'tcx, Self::Provenance>,
right: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn binary_ptr_op( ecx: &InterpCx<'tcx, Self>, bin_op: BinOp, left: &ImmTy<'tcx, Self::Provenance>, right: &ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
Called for all binary operations where the LHS has pointer type.
Returns a (value, overflowed) pair if the operation succeeded
sourcefn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
fn ub_checks(_ecx: &InterpCx<'tcx, Self>) -> InterpResult<'tcx, bool>
Determines the result of a NullaryOp::UbChecks
invocation.
sourcefn extern_static_pointer(
ecx: &InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn extern_static_pointer( ecx: &InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return the AllocId
for the given extern static
.
sourcefn ptr_from_addr_cast(
ecx: &InterpCx<'tcx, Self>,
addr: u64,
) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>
fn ptr_from_addr_cast( ecx: &InterpCx<'tcx, Self>, addr: u64, ) -> InterpResult<'tcx, Pointer<Option<Self::Provenance>>>
“Int-to-pointer cast”
sourcefn expose_ptr(
ecx: &mut InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
) -> InterpResult<'tcx>
fn expose_ptr( ecx: &mut InterpCx<'tcx, Self>, ptr: Pointer<Self::Provenance>, ) -> InterpResult<'tcx>
Marks a pointer as exposed, allowing it’s provenance to be recovered. “Pointer-to-int cast”
sourcefn ptr_get_alloc(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer<Self::Provenance>,
size: i64,
) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
fn ptr_get_alloc( ecx: &InterpCx<'tcx, Self>, ptr: Pointer<Self::Provenance>, size: i64, ) -> Option<(AllocId, Size, Self::ProvenanceExtra)>
Convert a pointer with provenance into an allocation-offset pair and extra provenance info.
size
says how many bytes of memory are expected at that pointer. The sign of size
can
be used to disambiguate situations where a wildcard pointer sits right in between two
allocations.
If ptr.provenance.get_alloc_id()
is Some(p)
, the returned AllocId
must be p
.
The resulting AllocId
will just be used for that one step and the forgotten again
(i.e., we’ll never turn the data returned here back into a Pointer
that might be
stored in machine state).
When this fails, that means the pointer does not point to a live allocation.
sourcefn adjust_global_allocation<'b>(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
alloc: &'b Allocation,
) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
fn adjust_global_allocation<'b>( ecx: &InterpCx<'tcx, Self>, id: AllocId, alloc: &'b Allocation, ) -> InterpResult<'tcx, Cow<'b, Allocation<Self::Provenance, Self::AllocExtra, Self::Bytes>>>
Called to adjust global allocations to the Provenance and AllocExtra of this machine.
If alloc
contains pointers, then they are all pointing to globals.
This should avoid copying if no work has to be done! If this returns an owned
allocation (because a copy had to be done to adjust things), machine memory will
cache the result. (This relies on AllocMap::get_or
being able to add the
owned allocation to the map even when the map is shared.)
sourcefn init_alloc_extra(
ecx: &InterpCx<'tcx, Self>,
id: AllocId,
kind: MemoryKind<Self::MemoryKind>,
size: Size,
align: Align,
) -> InterpResult<'tcx, Self::AllocExtra>
fn init_alloc_extra( ecx: &InterpCx<'tcx, Self>, id: AllocId, kind: MemoryKind<Self::MemoryKind>, size: Size, align: Align, ) -> InterpResult<'tcx, Self::AllocExtra>
Initialize the extra state of an allocation.
This is guaranteed to be called exactly once on all allocations that are accessed by the program.
sourcefn adjust_alloc_root_pointer(
ecx: &InterpCx<'tcx, Self>,
ptr: Pointer,
kind: Option<MemoryKind<Self::MemoryKind>>,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn adjust_alloc_root_pointer( ecx: &InterpCx<'tcx, Self>, ptr: Pointer, kind: Option<MemoryKind<Self::MemoryKind>>, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return a “root” pointer for the given allocation: the one that is used for direct accesses to this static/const/fn allocation, or the one returned from the heap allocator.
Not called on extern
or thread-local statics (those use the methods above).
kind
is the kind of the allocation the pointer points to; it can be None
when
it’s a global and GLOBAL_KIND
is None
.
sourcefn init_frame(
ecx: &mut InterpCx<'tcx, Self>,
frame: Frame<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn init_frame( ecx: &mut InterpCx<'tcx, Self>, frame: Frame<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Called immediately before a new stack frame gets pushed.
sourcefn stack<'a>(
ecx: &'a InterpCx<'tcx, Self>,
) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
fn stack<'a>( ecx: &'a InterpCx<'tcx, Self>, ) -> &'a [Frame<'tcx, Self::Provenance, Self::FrameExtra>]
Borrow the current thread’s stack.
sourcefn stack_mut<'a>(
ecx: &'a mut InterpCx<'tcx, Self>,
) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
fn stack_mut<'a>( ecx: &'a mut InterpCx<'tcx, Self>, ) -> &'a mut Vec<Frame<'tcx, Self::Provenance, Self::FrameExtra>>
Mutably borrow the current thread’s stack.
Provided Methods§
sourcefn alignment_check(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
_alloc_align: Align,
_alloc_kind: AllocKind,
_offset: Size,
_align: Align,
) -> Option<Misalignment>
fn alignment_check( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, _alloc_align: Align, _alloc_kind: AllocKind, _offset: Size, _align: Align, ) -> Option<Misalignment>
Gives the machine a chance to detect more misalignment than the built-in checks would catch.
sourcefn enforce_validity_recursively(
_ecx: &InterpCx<'tcx, Self>,
_layout: TyAndLayout<'tcx>,
) -> bool
fn enforce_validity_recursively( _ecx: &InterpCx<'tcx, Self>, _layout: TyAndLayout<'tcx>, ) -> bool
Whether to enforce the validity invariant recursively.
sourcefn load_mir(
ecx: &InterpCx<'tcx, Self>,
instance: InstanceKind<'tcx>,
) -> InterpResult<'tcx, &'tcx Body<'tcx>>
fn load_mir( ecx: &InterpCx<'tcx, Self>, instance: InstanceKind<'tcx>, ) -> InterpResult<'tcx, &'tcx Body<'tcx>>
Entry point for obtaining the MIR of anything that should get evaluated. So not just functions and shims, but also const/static initializers, anonymous constants, …
sourcefn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>(
_ecx: &InterpCx<'tcx, Self>,
_inputs: &[F1],
) -> F2
fn generate_nan<F1: Float + FloatConvert<F2>, F2: Float>( _ecx: &InterpCx<'tcx, Self>, _inputs: &[F1], ) -> F2
Generate the NaN returned by a float operation, given the list of inputs. (This is all inputs, not just NaN inputs!)
sourcefn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn before_terminator(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Called before a basic block terminator is executed.
sourcefn increment_const_eval_counter(
_ecx: &mut InterpCx<'tcx, Self>,
) -> InterpResult<'tcx>
fn increment_const_eval_counter( _ecx: &mut InterpCx<'tcx, Self>, ) -> InterpResult<'tcx>
Called when the interpreter encounters a StatementKind::ConstEvalCounter
instruction.
You can use this to detect long or endlessly running programs.
sourcefn before_access_global(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_id: AllocId,
_allocation: ConstAllocation<'tcx>,
_static_def_id: Option<DefId>,
_is_write: bool,
) -> InterpResult<'tcx>
fn before_access_global( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_id: AllocId, _allocation: ConstAllocation<'tcx>, _static_def_id: Option<DefId>, _is_write: bool, ) -> InterpResult<'tcx>
Called before a global allocation is accessed.
def_id
is Some
if this is the “lazy” allocation of a static.
sourcefn thread_local_static_pointer(
_ecx: &mut InterpCx<'tcx, Self>,
def_id: DefId,
) -> InterpResult<'tcx, Pointer<Self::Provenance>>
fn thread_local_static_pointer( _ecx: &mut InterpCx<'tcx, Self>, def_id: DefId, ) -> InterpResult<'tcx, Pointer<Self::Provenance>>
Return the AllocId
for the given thread-local static in the current thread.
sourcefn eval_inline_asm(
_ecx: &mut InterpCx<'tcx, Self>,
_template: &'tcx [InlineAsmTemplatePiece],
_operands: &[InlineAsmOperand<'tcx>],
_options: InlineAsmOptions,
_targets: &[BasicBlock],
) -> InterpResult<'tcx>
fn eval_inline_asm( _ecx: &mut InterpCx<'tcx, Self>, _template: &'tcx [InlineAsmTemplatePiece], _operands: &[InlineAsmOperand<'tcx>], _options: InlineAsmOptions, _targets: &[BasicBlock], ) -> InterpResult<'tcx>
Evaluate the inline assembly.
This should take care of jumping to the next block (one of targets
) when asm goto
is triggered, targets[0]
when the assembly falls through, or diverge in case of
naked_asm! or InlineAsmOptions::NORETURN
being set.
sourcefn before_memory_read(
_tcx: TyCtxtAt<'tcx>,
_machine: &Self,
_alloc_extra: &Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_read( _tcx: TyCtxtAt<'tcx>, _machine: &Self, _alloc_extra: &Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange, ) -> InterpResult<'tcx>
Hook for performing extra checks on a memory read access.
This will not be called during validation!
Takes read-only access to the allocation so we can keep all the memory read
operations take &self
. Use a RefCell
in AllocExtra
if you
need to mutate.
This is not invoked for ZST accesses, as no read actually happens.
sourcefn before_alloc_read(
_ecx: &InterpCx<'tcx, Self>,
_alloc_id: AllocId,
) -> InterpResult<'tcx>
fn before_alloc_read( _ecx: &InterpCx<'tcx, Self>, _alloc_id: AllocId, ) -> InterpResult<'tcx>
Hook for performing extra checks on any memory read access, that involves an allocation, even ZST reads.
This will not be called during validation!
Used to prevent statics from self-initializing by reading from their own memory as it is being initialized.
sourcefn before_memory_write(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_range: AllocRange,
) -> InterpResult<'tcx>
fn before_memory_write( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _range: AllocRange, ) -> InterpResult<'tcx>
Hook for performing extra checks on a memory write access. This is not invoked for ZST accesses, as no write actually happens.
sourcefn before_memory_deallocation(
_tcx: TyCtxtAt<'tcx>,
_machine: &mut Self,
_alloc_extra: &mut Self::AllocExtra,
_prov: (AllocId, Self::ProvenanceExtra),
_size: Size,
_align: Align,
_kind: MemoryKind<Self::MemoryKind>,
) -> InterpResult<'tcx>
fn before_memory_deallocation( _tcx: TyCtxtAt<'tcx>, _machine: &mut Self, _alloc_extra: &mut Self::AllocExtra, _prov: (AllocId, Self::ProvenanceExtra), _size: Size, _align: Align, _kind: MemoryKind<Self::MemoryKind>, ) -> InterpResult<'tcx>
Hook for performing extra operations on a memory deallocation.
sourcefn retag_ptr_value(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
val: &ImmTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
fn retag_ptr_value( _ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, val: &ImmTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx, ImmTy<'tcx, Self::Provenance>>
Executes a retagging operation for a single pointer. Returns the possibly adjusted pointer.
sourcefn retag_place_contents(
_ecx: &mut InterpCx<'tcx, Self>,
_kind: RetagKind,
_place: &PlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn retag_place_contents( _ecx: &mut InterpCx<'tcx, Self>, _kind: RetagKind, _place: &PlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Executes a retagging operation on a compound value. Replaces all pointers stored in the given place.
sourcefn protect_in_place_function_argument(
ecx: &mut InterpCx<'tcx, Self>,
mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn protect_in_place_function_argument( ecx: &mut InterpCx<'tcx, Self>, mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Called on places used for in-place function argument and return value handling.
These places need to be protected to make sure the program cannot tell whether the argument/return value was actually copied or passed in-place..
sourcefn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
fn after_stack_push(_ecx: &mut InterpCx<'tcx, Self>) -> InterpResult<'tcx>
Called immediately after a stack frame got pushed and its locals got initialized.
sourcefn before_stack_pop(
_ecx: &InterpCx<'tcx, Self>,
_frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>,
) -> InterpResult<'tcx>
fn before_stack_pop( _ecx: &InterpCx<'tcx, Self>, _frame: &Frame<'tcx, Self::Provenance, Self::FrameExtra>, ) -> InterpResult<'tcx>
Called just before the return value is copied to the caller-provided return place.
sourcefn after_stack_pop(
_ecx: &mut InterpCx<'tcx, Self>,
_frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>,
unwinding: bool,
) -> InterpResult<'tcx, ReturnAction>
fn after_stack_pop( _ecx: &mut InterpCx<'tcx, Self>, _frame: Frame<'tcx, Self::Provenance, Self::FrameExtra>, unwinding: bool, ) -> InterpResult<'tcx, ReturnAction>
Called immediately after a stack frame got popped, but before jumping back to the caller.
The locals
have already been destroyed!
sourcefn after_local_read(
_ecx: &InterpCx<'tcx, Self>,
_local: Local,
) -> InterpResult<'tcx>
fn after_local_read( _ecx: &InterpCx<'tcx, Self>, _local: Local, ) -> InterpResult<'tcx>
Called immediately after an “immediate” local variable is read (i.e., this is called for reads that do not end up accessing addressable memory).
sourcefn after_local_write(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_storage_live: bool,
) -> InterpResult<'tcx>
fn after_local_write( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _storage_live: bool, ) -> InterpResult<'tcx>
Called immediately after an “immediate” local variable is assigned a new value
(i.e., this is called for writes that do not end up in memory).
storage_live
indicates whether this is the initial write upon StorageLive
.
sourcefn after_local_moved_to_memory(
_ecx: &mut InterpCx<'tcx, Self>,
_local: Local,
_mplace: &MPlaceTy<'tcx, Self::Provenance>,
) -> InterpResult<'tcx>
fn after_local_moved_to_memory( _ecx: &mut InterpCx<'tcx, Self>, _local: Local, _mplace: &MPlaceTy<'tcx, Self::Provenance>, ) -> InterpResult<'tcx>
Called immediately after actual memory was allocated for a local but before the local’s stack frame is updated to point to that memory.
sourcefn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
fn eval_mir_constant<F>(
ecx: &InterpCx<'tcx, Self>,
val: Const<'tcx>,
span: Span,
layout: Option<TyAndLayout<'tcx>>,
eval: F,
) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>where
F: Fn(&InterpCx<'tcx, Self>, Const<'tcx>, Span, Option<TyAndLayout<'tcx>>) -> InterpResult<'tcx, OpTy<'tcx, Self::Provenance>>,
Evaluate the given constant. The eval
function will do all the required evaluation,
but this hook has the chance to do some pre/postprocessing.
fn cached_union_data_range<'e>( _ecx: &'e mut InterpCx<'tcx, Self>, _ty: Ty<'tcx>, compute_range: impl FnOnce() -> RangeSet, ) -> Cow<'e, RangeSet>
Dyn Compatibility§
This trait is not dyn compatible.
In older versions of Rust, dyn compatibility was called "object safety", so this trait is not object safe.