Do not allocate a second "background" alloc id for the main allocation of a static.
Instead we re-use the static's alloc id within the interpreter for its initializer to refer to the `Allocation` that only exists within the interpreter.
This commit is contained in:
@@ -899,7 +899,19 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
.local_to_op(self.frame(), mir::RETURN_PLACE, None)
|
||||
.expect("return place should always be live");
|
||||
let dest = self.frame().return_place.clone();
|
||||
let err = self.copy_op_allow_transmute(&op, &dest);
|
||||
let err = if self.stack().len() == 1 {
|
||||
// The initializer of constants and statics will get validated separately
|
||||
// after the constant has been fully evaluated. While we could fall back to the default
|
||||
// code path, that will cause -Zenforce-validity to cycle on static initializers.
|
||||
// Reading from a static's memory is not allowed during its evaluation, and will always
|
||||
// trigger a cycle error. Validation must read from the memory of the current item.
|
||||
// For Miri this means we do not validate the root frame return value,
|
||||
// but Miri anyway calls `read_target_isize` on that so separate validation
|
||||
// is not needed.
|
||||
self.copy_op_no_dest_validation(&op, &dest)
|
||||
} else {
|
||||
self.copy_op_allow_transmute(&op, &dest)
|
||||
};
|
||||
trace!("return value: {:?}", self.dump_place(&dest));
|
||||
// We delay actually short-circuiting on this error until *after* the stack frame is
|
||||
// popped, since we want this error to be attributed to the caller, whose type defines
|
||||
|
||||
@@ -85,6 +85,8 @@ pub enum InternKind {
|
||||
///
|
||||
/// This *cannot raise an interpreter error*. Doing so is left to validation, which
|
||||
/// tracks where in the value we are and thus can show much better error messages.
|
||||
///
|
||||
/// For `InternKind::Static` the root allocation will not be interned, but must be handled by the caller.
|
||||
#[instrument(level = "debug", skip(ecx))]
|
||||
pub fn intern_const_alloc_recursive<
|
||||
'mir,
|
||||
@@ -97,12 +99,12 @@ pub fn intern_const_alloc_recursive<
|
||||
) -> Result<(), ErrorGuaranteed> {
|
||||
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
|
||||
// that we are starting in, and all other allocations that we are encountering recursively.
|
||||
let (base_mutability, inner_mutability) = match intern_kind {
|
||||
let (base_mutability, inner_mutability, is_static) = match intern_kind {
|
||||
InternKind::Constant | InternKind::Promoted => {
|
||||
// Completely immutable. Interning anything mutably here can only lead to unsoundness,
|
||||
// since all consts are conceptually independent values but share the same underlying
|
||||
// memory.
|
||||
(Mutability::Not, Mutability::Not)
|
||||
(Mutability::Not, Mutability::Not, false)
|
||||
}
|
||||
InternKind::Static(Mutability::Not) => {
|
||||
(
|
||||
@@ -115,22 +117,31 @@ pub fn intern_const_alloc_recursive<
|
||||
// Inner allocations are never mutable. They can only arise via the "tail
|
||||
// expression" / "outer scope" rule, and we treat them consistently with `const`.
|
||||
Mutability::Not,
|
||||
true,
|
||||
)
|
||||
}
|
||||
InternKind::Static(Mutability::Mut) => {
|
||||
// Just make everything mutable. We accept code like
|
||||
// `static mut X = &mut [42]`, so even inner allocations need to be mutable.
|
||||
(Mutability::Mut, Mutability::Mut)
|
||||
(Mutability::Mut, Mutability::Mut, true)
|
||||
}
|
||||
};
|
||||
|
||||
// Intern the base allocation, and initialize todo list for recursive interning.
|
||||
let base_alloc_id = ret.ptr().provenance.unwrap().alloc_id();
|
||||
trace!(?base_alloc_id, ?base_mutability);
|
||||
// First we intern the base allocation, as it requires a different mutability.
|
||||
// This gives us the initial set of nested allocations, which will then all be processed
|
||||
// recursively in the loop below.
|
||||
let mut todo: Vec<_> =
|
||||
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect();
|
||||
let mut todo: Vec<_> = if is_static {
|
||||
// Do not steal the root allocation, we need it later for `take_static_root_alloc`
|
||||
// But still change its mutability to match the requested one.
|
||||
let alloc = ecx.memory.alloc_map.get_mut(&base_alloc_id).unwrap();
|
||||
alloc.1.mutability = base_mutability;
|
||||
alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect()
|
||||
} else {
|
||||
intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect()
|
||||
};
|
||||
// We need to distinguish "has just been interned" from "was already in `tcx`",
|
||||
// so we track this in a separate set.
|
||||
let mut just_interned: FxHashSet<_> = std::iter::once(base_alloc_id).collect();
|
||||
@@ -148,7 +159,17 @@ pub fn intern_const_alloc_recursive<
|
||||
// before validation, and interning doesn't know the type of anything, this means we can't show
|
||||
// better errors. Maybe we should consider doing validation before interning in the future.
|
||||
while let Some(prov) = todo.pop() {
|
||||
trace!(?prov);
|
||||
let alloc_id = prov.alloc_id();
|
||||
|
||||
if base_alloc_id == alloc_id && is_static {
|
||||
// This is a pointer to the static itself. It's ok for a static to refer to itself,
|
||||
// even mutably. Whether that mutable pointer is legal at all is checked in validation.
|
||||
// See tests/ui/statics/recursive_interior_mut.rs for how such a situation can occur.
|
||||
// We also already collected all the nested allocations, so there's no need to do that again.
|
||||
continue;
|
||||
}
|
||||
|
||||
// Crucially, we check this *before* checking whether the `alloc_id`
|
||||
// has already been interned. The point of this check is to ensure that when
|
||||
// there are multiple pointers to the same allocation, they are *all* immutable.
|
||||
@@ -176,6 +197,7 @@ pub fn intern_const_alloc_recursive<
|
||||
// `&None::<Cell<i32>>` lead to promotion that can produce mutable pointers. We rely
|
||||
// on the promotion analysis not screwing up to ensure that it is sound to intern
|
||||
// promoteds as immutable.
|
||||
trace!("found bad mutable pointer");
|
||||
found_bad_mutable_pointer = true;
|
||||
}
|
||||
if ecx.tcx.try_get_global_alloc(alloc_id).is_some() {
|
||||
|
||||
@@ -388,6 +388,8 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
|
||||
/// Takes read-only access to the allocation so we can keep all the memory read
|
||||
/// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
|
||||
/// need to mutate.
|
||||
///
|
||||
/// This is not invoked for ZST accesses, as no read actually happens.
|
||||
#[inline(always)]
|
||||
fn before_memory_read(
|
||||
_tcx: TyCtxtAt<'tcx>,
|
||||
@@ -399,7 +401,20 @@ pub trait Machine<'mir, 'tcx: 'mir>: Sized {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Hook for performing extra checks on any memory read access,
|
||||
/// that involves an allocation, even ZST reads.
|
||||
///
|
||||
/// Used to prevent statics from self-initializing by reading from their own memory
|
||||
/// as it is being initialized.
|
||||
fn before_alloc_read(
|
||||
_ecx: &InterpCx<'mir, 'tcx, Self>,
|
||||
_alloc_id: AllocId,
|
||||
) -> InterpResult<'tcx> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Hook for performing extra checks on a memory write access.
|
||||
/// This is not invoked for ZST accesses, as no write actually happens.
|
||||
#[inline(always)]
|
||||
fn before_memory_write(
|
||||
_tcx: TyCtxtAt<'tcx>,
|
||||
|
||||
@@ -624,19 +624,20 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
size,
|
||||
CheckInAllocMsg::MemoryAccessTest,
|
||||
|alloc_id, offset, prov| {
|
||||
// We want to call the hook on *all* accesses that involve an AllocId,
|
||||
// including zero-sized accesses. That means we have to do it here
|
||||
// rather than below in the `Some` branch.
|
||||
M::before_alloc_read(self, alloc_id)?;
|
||||
let alloc = self.get_alloc_raw(alloc_id)?;
|
||||
Ok((alloc.size(), alloc.align, (alloc_id, offset, prov, alloc)))
|
||||
},
|
||||
)?;
|
||||
|
||||
if let Some((alloc_id, offset, prov, alloc)) = ptr_and_alloc {
|
||||
let range = alloc_range(offset, size);
|
||||
M::before_memory_read(self.tcx, &self.machine, &alloc.extra, (alloc_id, prov), range)?;
|
||||
Ok(Some(AllocRef { alloc, range, tcx: *self.tcx, alloc_id }))
|
||||
} else {
|
||||
// Even in this branch we have to be sure that we actually access the allocation, in
|
||||
// order to ensure that `static FOO: Type = FOO;` causes a cycle error instead of
|
||||
// magically pulling *any* ZST value from the ether. However, the `get_raw` above is
|
||||
// always called when `ptr` has an `AllocId`.
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
@@ -855,6 +856,21 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
|
||||
DumpAllocs { ecx: self, allocs }
|
||||
}
|
||||
|
||||
/// Print the allocation's bytes, without any nested allocations.
|
||||
pub fn print_alloc_bytes_for_diagnostics(&self, id: AllocId) -> String {
|
||||
// Using the "raw" access to avoid the `before_alloc_read` hook, we specifically
|
||||
// want to be able to read all memory for diagnostics, even if that is cyclic.
|
||||
let alloc = self.get_alloc_raw(id).unwrap();
|
||||
let mut bytes = String::new();
|
||||
if alloc.size() != Size::ZERO {
|
||||
bytes = "\n".into();
|
||||
// FIXME(translation) there might be pieces that are translatable.
|
||||
rustc_middle::mir::pretty::write_allocation_bytes(*self.tcx, alloc, &mut bytes, " ")
|
||||
.unwrap();
|
||||
}
|
||||
bytes
|
||||
}
|
||||
|
||||
/// Find leaked allocations. Allocations reachable from `static_roots` or a `Global` allocation
|
||||
/// are not considered leaked, as well as leaks whose kind's `may_leak()` returns true.
|
||||
pub fn find_leaked_allocations(
|
||||
|
||||
@@ -39,4 +39,5 @@ use self::{
|
||||
};
|
||||
|
||||
pub(crate) use self::intrinsics::eval_nullary_intrinsic;
|
||||
pub(crate) use self::util::{create_static_alloc, take_static_root_alloc};
|
||||
use eval_context::{from_known_layout, mir_assign_valid_types};
|
||||
|
||||
@@ -758,6 +758,22 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Copies the data from an operand to a place.
|
||||
/// The layouts of the `src` and `dest` may disagree.
|
||||
/// Does not perform validation of the destination.
|
||||
/// The only known use case for this function is checking the return
|
||||
/// value of a static during stack frame popping.
|
||||
#[inline(always)]
|
||||
pub(super) fn copy_op_no_dest_validation(
|
||||
&mut self,
|
||||
src: &impl Readable<'tcx, M::Provenance>,
|
||||
dest: &impl Writeable<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
self.copy_op_inner(
|
||||
src, dest, /* allow_transmute */ true, /* validate_dest */ false,
|
||||
)
|
||||
}
|
||||
|
||||
/// Copies the data from an operand to a place.
|
||||
/// The layouts of the `src` and `dest` may disagree.
|
||||
#[inline(always)]
|
||||
@@ -766,17 +782,22 @@ where
|
||||
src: &impl Readable<'tcx, M::Provenance>,
|
||||
dest: &impl Writeable<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
self.copy_op_inner(src, dest, /* allow_transmute */ true)
|
||||
self.copy_op_inner(
|
||||
src, dest, /* allow_transmute */ true, /* validate_dest */ true,
|
||||
)
|
||||
}
|
||||
|
||||
/// Copies the data from an operand to a place.
|
||||
/// `src` and `dest` must have the same layout and the copied value will be validated.
|
||||
#[inline(always)]
|
||||
pub fn copy_op(
|
||||
&mut self,
|
||||
src: &impl Readable<'tcx, M::Provenance>,
|
||||
dest: &impl Writeable<'tcx, M::Provenance>,
|
||||
) -> InterpResult<'tcx> {
|
||||
self.copy_op_inner(src, dest, /* allow_transmute */ false)
|
||||
self.copy_op_inner(
|
||||
src, dest, /* allow_transmute */ false, /* validate_dest */ true,
|
||||
)
|
||||
}
|
||||
|
||||
/// Copies the data from an operand to a place.
|
||||
@@ -788,6 +809,7 @@ where
|
||||
src: &impl Readable<'tcx, M::Provenance>,
|
||||
dest: &impl Writeable<'tcx, M::Provenance>,
|
||||
allow_transmute: bool,
|
||||
validate_dest: bool,
|
||||
) -> InterpResult<'tcx> {
|
||||
// Generally for transmutation, data must be valid both at the old and new type.
|
||||
// But if the types are the same, the 2nd validation below suffices.
|
||||
@@ -798,7 +820,7 @@ where
|
||||
// Do the actual copy.
|
||||
self.copy_op_no_validate(src, dest, allow_transmute)?;
|
||||
|
||||
if M::enforce_validity(self, dest.layout()) {
|
||||
if validate_dest && M::enforce_validity(self, dest.layout()) {
|
||||
// Data got changed, better make sure it matches the type!
|
||||
self.validate_operand(&dest.to_op(self)?)?;
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
use rustc_middle::mir::interpret::InterpResult;
|
||||
use crate::const_eval::CompileTimeEvalContext;
|
||||
use crate::interpret::{MemPlaceMeta, MemoryKind};
|
||||
use rustc_middle::mir::interpret::{AllocId, Allocation, InterpResult, Pointer};
|
||||
use rustc_middle::ty::layout::TyAndLayout;
|
||||
use rustc_middle::ty::{
|
||||
self, Ty, TyCtxt, TypeSuperVisitable, TypeVisitable, TypeVisitableExt, TypeVisitor,
|
||||
};
|
||||
use rustc_span::def_id::DefId;
|
||||
use std::ops::ControlFlow;
|
||||
|
||||
use super::MPlaceTy;
|
||||
|
||||
/// Checks whether a type contains generic parameters which must be instantiated.
|
||||
///
|
||||
/// In case it does, returns a `TooGeneric` const eval error. Note that due to polymorphization
|
||||
@@ -73,3 +79,23 @@ where
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn take_static_root_alloc<'mir, 'tcx: 'mir>(
|
||||
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
|
||||
alloc_id: AllocId,
|
||||
) -> Allocation {
|
||||
ecx.memory.alloc_map.swap_remove(&alloc_id).unwrap().1
|
||||
}
|
||||
|
||||
pub(crate) fn create_static_alloc<'mir, 'tcx: 'mir>(
|
||||
ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
|
||||
static_def_id: DefId,
|
||||
layout: TyAndLayout<'tcx>,
|
||||
) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
|
||||
let alloc = Allocation::try_uninit(layout.size, layout.align.abi)?;
|
||||
let alloc_id = ecx.tcx.reserve_and_set_static_alloc(static_def_id);
|
||||
assert_eq!(ecx.machine.static_root_alloc_id, None);
|
||||
ecx.machine.static_root_alloc_id = Some(alloc_id);
|
||||
assert!(ecx.memory.alloc_map.insert(alloc_id, (MemoryKind::Stack, alloc)).is_none());
|
||||
Ok(ecx.ptr_with_meta_to_mplace(Pointer::from(alloc_id).into(), MemPlaceMeta::None, layout))
|
||||
}
|
||||
|
||||
@@ -27,9 +27,9 @@ use rustc_target::abi::{
|
||||
use std::hash::Hash;
|
||||
|
||||
use super::{
|
||||
format_interp_error, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy, Immediate, InterpCx,
|
||||
InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable, Scalar,
|
||||
ValueVisitor,
|
||||
format_interp_error, machine::AllocMap, AllocId, CheckInAllocMsg, GlobalAlloc, ImmTy,
|
||||
Immediate, InterpCx, InterpResult, MPlaceTy, Machine, MemPlaceMeta, OpTy, Pointer, Projectable,
|
||||
Scalar, ValueVisitor,
|
||||
};
|
||||
|
||||
// for the validation errors
|
||||
@@ -712,11 +712,14 @@ impl<'rt, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> ValidityVisitor<'rt, 'mir, '
|
||||
fn in_mutable_memory(&self, op: &OpTy<'tcx, M::Provenance>) -> bool {
|
||||
if let Some(mplace) = op.as_mplace_or_imm().left() {
|
||||
if let Some(alloc_id) = mplace.ptr().provenance.and_then(|p| p.get_alloc_id()) {
|
||||
if self.ecx.tcx.global_alloc(alloc_id).unwrap_memory().inner().mutability
|
||||
== Mutability::Mut
|
||||
{
|
||||
return true;
|
||||
}
|
||||
let mutability = match self.ecx.tcx.global_alloc(alloc_id) {
|
||||
GlobalAlloc::Static(_) => {
|
||||
self.ecx.memory.alloc_map.get(alloc_id).unwrap().1.mutability
|
||||
}
|
||||
GlobalAlloc::Memory(alloc) => alloc.inner().mutability,
|
||||
_ => span_bug!(self.ecx.tcx.span, "not a memory allocation"),
|
||||
};
|
||||
return mutability == Mutability::Mut;
|
||||
}
|
||||
}
|
||||
false
|
||||
|
||||
Reference in New Issue
Block a user