Auto merge of #60026 - Aaron1011:feature/miri-unwind, r=RalfJung,oli-obk

Add hooks for Miri panic unwinding

This commits adds in some additional hooks to allow Miri to properly
handle panic unwinding. None of this should have any impact on CTFE mode

This supports https://github.com/rust-lang/miri/pull/693
This commit is contained in:
bors
2019-11-12 21:27:04 +00:00
16 changed files with 306 additions and 113 deletions

View File

@@ -1348,6 +1348,11 @@ extern "rust-intrinsic" {
/// See documentation of `<*const T>::offset_from` for details. /// See documentation of `<*const T>::offset_from` for details.
#[cfg(not(bootstrap))] #[cfg(not(bootstrap))]
pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize; pub fn ptr_offset_from<T>(ptr: *const T, base: *const T) -> isize;
/// Internal hook used by Miri to implement unwinding.
/// Perma-unstable: do not use
#[cfg(not(bootstrap))]
pub fn miri_start_panic(data: *mut (dyn crate::any::Any + crate::marker::Send)) -> !;
} }
// Some functions are defined here because they accidentally got made // Some functions are defined here because they accidentally got made

View File

@@ -36,7 +36,10 @@ use core::raw;
use core::panic::BoxMeUp; use core::panic::BoxMeUp;
cfg_if::cfg_if! { cfg_if::cfg_if! {
if #[cfg(target_os = "emscripten")] { if #[cfg(miri)] {
#[path = "miri.rs"]
mod imp;
} else if #[cfg(target_os = "emscripten")] {
#[path = "emcc.rs"] #[path = "emcc.rs"]
mod imp; mod imp;
} else if #[cfg(target_arch = "wasm32")] { } else if #[cfg(target_arch = "wasm32")] {

View File

@@ -0,0 +1,23 @@
use core::any::Any;
use alloc::boxed::Box;
pub fn payload() -> *mut u8 {
core::ptr::null_mut()
}
pub unsafe fn panic(data: Box<dyn Any + Send>) -> ! {
core::intrinsics::miri_start_panic(Box::into_raw(data))
}
pub unsafe fn cleanup(ptr: *mut u8) -> Box<dyn Any + Send> {
Box::from_raw(ptr)
}
// This is required by the compiler to exist (e.g., it's a lang item),
// but is never used by Miri. Therefore, we just use a stub here
#[lang = "eh_personality"]
#[cfg(not(test))]
fn rust_eh_personality() {
unsafe { core::intrinsics::abort() }
}

View File

@@ -528,6 +528,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> {
_ => FnAbi::new(&bx, sig, &extra_args) _ => FnAbi::new(&bx, sig, &extra_args)
}; };
// This should never be reachable at runtime:
// We should only emit a call to this intrinsic in #[cfg(miri)] mode,
// which means that we will never actually use the generate object files
// (we will just be interpreting the MIR)
//
// Note that we still need to be able to codegen *something* for this intrisnic:
// Miri currently uses Xargo to build a special libstd. As a side effect,
// we generate normal object files for libstd - while these are never used,
// we still need to be able to build them.
if intrinsic == Some("miri_start_panic") {
bx.abort();
bx.unreachable();
return;
}
// Emit a panic or a no-op for `panic_if_uninhabited`. // Emit a panic or a no-op for `panic_if_uninhabited`.
if intrinsic == Some("panic_if_uninhabited") { if intrinsic == Some("panic_if_uninhabited") {
let ty = instance.unwrap().substs.type_at(0); let ty = instance.unwrap().substs.type_at(0);

View File

@@ -325,6 +325,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
args: &[OpTy<'tcx>], args: &[OpTy<'tcx>],
dest: Option<PlaceTy<'tcx>>, dest: Option<PlaceTy<'tcx>>,
ret: Option<mir::BasicBlock>, ret: Option<mir::BasicBlock>,
_unwind: Option<mir::BasicBlock> // unwinding is not supported in consts
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
debug!("eval_fn_call: {:?}", instance); debug!("eval_fn_call: {:?}", instance);
// Only check non-glue functions // Only check non-glue functions
@@ -336,7 +337,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
// Some functions we support even if they are non-const -- but avoid testing // Some functions we support even if they are non-const -- but avoid testing
// that for const fn! We certainly do *not* want to actually call the fn // that for const fn! We certainly do *not* want to actually call the fn
// though, so be sure we return here. // though, so be sure we return here.
return if ecx.hook_fn(instance, args, dest)? { return if ecx.hook_panic_fn(instance, args, dest)? {
ecx.goto_block(ret)?; // fully evaluated and done ecx.goto_block(ret)?; // fully evaluated and done
Ok(None) Ok(None)
} else { } else {
@@ -374,7 +375,9 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
span: Span, span: Span,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx>], args: &[OpTy<'tcx>],
dest: PlaceTy<'tcx>, dest: Option<PlaceTy<'tcx>>,
_ret: Option<mir::BasicBlock>,
_unwind: Option<mir::BasicBlock>
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
if ecx.emulate_intrinsic(span, instance, args, dest)? { if ecx.emulate_intrinsic(span, instance, args, dest)? {
return Ok(()); return Ok(());
@@ -469,12 +472,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir,
fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
Ok(()) Ok(())
} }
/// Called immediately before a stack frame gets popped.
#[inline(always)]
fn stack_pop(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> {
Ok(())
}
} }
/// Extracts a field of a (variant of a) const. /// Extracts a field of a (variant of a) const.

View File

@@ -21,7 +21,7 @@ use rustc_data_structures::fx::FxHashMap;
use super::{ use super::{
Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef, Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
Memory, Machine Memory, Machine, StackPopInfo
}; };
pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> { pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> {
@@ -60,6 +60,9 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> {
/// The span of the call site. /// The span of the call site.
pub span: source_map::Span, pub span: source_map::Span,
/// Extra data for the machine.
pub extra: Extra,
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
// Return place and locals // Return place and locals
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
@@ -82,13 +85,12 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> {
//////////////////////////////////////////////////////////////////////////////// ////////////////////////////////////////////////////////////////////////////////
/// The block that is currently executed (or will be executed after the above call stacks /// The block that is currently executed (or will be executed after the above call stacks
/// return). /// return).
pub block: mir::BasicBlock, /// If this is `None`, we are unwinding and this function doesn't need any clean-up.
/// Just continue the same as with `Resume`.
pub block: Option<mir::BasicBlock>,
/// The index of the currently evaluated statement. /// The index of the currently evaluated statement.
pub stmt: usize, pub stmt: usize,
/// Extra data for the machine.
pub extra: Extra,
} }
#[derive(Clone, Eq, PartialEq, Debug)] // Miri debug-prints these #[derive(Clone, Eq, PartialEq, Debug)] // Miri debug-prints these
@@ -96,7 +98,9 @@ pub enum StackPopCleanup {
/// Jump to the next block in the caller, or cause UB if None (that's a function /// Jump to the next block in the caller, or cause UB if None (that's a function
/// that may never return). Also store layout of return place so /// that may never return). Also store layout of return place so
/// we can validate it at that layout. /// we can validate it at that layout.
Goto(Option<mir::BasicBlock>), /// `ret` stores the block we jump to on a normal return, while 'unwind'
/// stores the block used for cleanup during unwinding
Goto { ret: Option<mir::BasicBlock>, unwind: Option<mir::BasicBlock> },
/// Just do nohing: Used by Main and for the box_alloc hook in miri. /// Just do nohing: Used by Main and for the box_alloc hook in miri.
/// `cleanup` says whether locals are deallocated. Static computation /// `cleanup` says whether locals are deallocated. Static computation
/// wants them leaked to intern what they need (and just throw away /// wants them leaked to intern what they need (and just throw away
@@ -489,7 +493,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let extra = M::stack_push(self)?; let extra = M::stack_push(self)?;
self.stack.push(Frame { self.stack.push(Frame {
body, body,
block: mir::START_BLOCK, block: Some(mir::START_BLOCK),
return_to_block, return_to_block,
return_place, return_place,
// empty local array, we fill it in below, after we are inside the stack frame and // empty local array, we fill it in below, after we are inside the stack frame and
@@ -547,29 +551,87 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
} }
pub(super) fn pop_stack_frame(&mut self) -> InterpResult<'tcx> { /// Pops the current frame from the stack, deallocating the
info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance); /// memory for allocated locals.
///
/// If `unwinding` is `false`, then we are performing a normal return
/// from a function. In this case, we jump back into the frame of the caller,
/// and continue execution as normal.
///
/// If `unwinding` is `true`, then we are in the middle of a panic,
/// and need to unwind this frame. In this case, we jump to the
/// `cleanup` block for the function, which is responsible for running
/// `Drop` impls for any locals that have been initialized at this point.
/// The cleanup block ends with a special `Resume` terminator, which will
/// cause us to continue unwinding.
pub(super) fn pop_stack_frame(
&mut self,
unwinding: bool
) -> InterpResult<'tcx> {
info!("LEAVING({}) {} (unwinding = {})",
self.cur_frame(), self.frame().instance, unwinding);
// Sanity check `unwinding`.
assert_eq!(
unwinding,
match self.frame().block {
None => true,
Some(block) => self.body().basic_blocks()[block].is_cleanup
}
);
::log_settings::settings().indentation -= 1; ::log_settings::settings().indentation -= 1;
let frame = self.stack.pop().expect( let frame = self.stack.pop().expect(
"tried to pop a stack frame, but there were none", "tried to pop a stack frame, but there were none",
); );
M::stack_pop(self, frame.extra)?; let stack_pop_info = M::stack_pop(self, frame.extra, unwinding)?;
// Abort early if we do not want to clean up: We also avoid validation in that case, if let (false, StackPopInfo::StopUnwinding) = (unwinding, stack_pop_info) {
bug!("Attempted to stop unwinding while there is no unwinding!");
}
// Now where do we jump next?
// Determine if we leave this function normally or via unwinding.
let cur_unwinding = if let StackPopInfo::StopUnwinding = stack_pop_info {
false
} else {
unwinding
};
// Usually we want to clean up (deallocate locals), but in a few rare cases we don't.
// In that case, we return early. We also avoid validation in that case,
// because this is CTFE and the final value will be thoroughly validated anyway. // because this is CTFE and the final value will be thoroughly validated anyway.
match frame.return_to_block { let (cleanup, next_block) = match frame.return_to_block {
StackPopCleanup::Goto(_) => {}, StackPopCleanup::Goto { ret, unwind } => {
StackPopCleanup::None { cleanup } => { (true, Some(if cur_unwinding { unwind } else { ret }))
},
StackPopCleanup::None { cleanup, .. } => (cleanup, None)
};
if !cleanup { if !cleanup {
assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked"); assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
assert!(next_block.is_none(), "tried to skip cleanup when we have a next block!");
// Leak the locals, skip validation. // Leak the locals, skip validation.
return Ok(()); return Ok(());
} }
}
} // Cleanup: deallocate all locals that are backed by an allocation.
// Deallocate all locals that are backed by an allocation.
for local in frame.locals { for local in frame.locals {
self.deallocate_local(local.value)?; self.deallocate_local(local.value)?;
} }
trace!("StackPopCleanup: {:?} StackPopInfo: {:?} cur_unwinding = {:?}",
frame.return_to_block, stack_pop_info, cur_unwinding);
if cur_unwinding {
// Follow the unwind edge.
let unwind = next_block.expect("Encounted StackPopCleanup::None when unwinding!");
let next_frame = self.frame_mut();
// If `unwind` is `None`, we'll leave that function immediately again.
next_frame.block = unwind;
next_frame.stmt = 0;
} else {
// Follow the normal return edge.
// Validate the return value. Do this after deallocating so that we catch dangling // Validate the return value. Do this after deallocating so that we catch dangling
// references. // references.
if let Some(return_place) = frame.return_place { if let Some(return_place) = frame.return_place {
@@ -589,18 +651,18 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
} else { } else {
// Uh, that shouldn't happen... the function did not intend to return // Uh, that shouldn't happen... the function did not intend to return
throw_ub!(Unreachable) throw_ub!(Unreachable);
} }
// Jump to new block -- *after* validation so that the spans make more sense. // Jump to new block -- *after* validation so that the spans make more sense.
match frame.return_to_block { if let Some(ret) = next_block {
StackPopCleanup::Goto(block) => { self.goto_block(ret)?;
self.goto_block(block)?;
} }
StackPopCleanup::None { .. } => {}
} }
if self.stack.len() > 0 { if self.stack.len() > 0 {
info!("CONTINUING({}) {}", self.cur_frame(), self.frame().instance); info!("CONTINUING({}) {} (unwinding = {})",
self.cur_frame(), self.frame().instance, cur_unwinding);
} }
Ok(()) Ok(())
@@ -745,16 +807,20 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} else { } else {
last_span = Some(span); last_span = Some(span);
} }
let lint_root = block.and_then(|block| {
let block = &body.basic_blocks()[block]; let block = &body.basic_blocks()[block];
let source_info = if stmt < block.statements.len() { let source_info = if stmt < block.statements.len() {
block.statements[stmt].source_info block.statements[stmt].source_info
} else { } else {
block.terminator().source_info block.terminator().source_info
}; };
let lint_root = match body.source_scope_local_data { match body.source_scope_local_data {
mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root), mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
mir::ClearCrossCrate::Clear => None, mir::ClearCrossCrate::Clear => None,
}; }
});
frames.push(FrameInfo { call_site: span, instance, lint_root }); frames.push(FrameInfo { call_site: span, instance, lint_root });
} }
trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span); trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);

View File

@@ -91,11 +91,17 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
span: Span, span: Span,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, M::PointerTag>], args: &[OpTy<'tcx, M::PointerTag>],
dest: PlaceTy<'tcx, M::PointerTag>, dest: Option<PlaceTy<'tcx, M::PointerTag>>,
) -> InterpResult<'tcx, bool> { ) -> InterpResult<'tcx, bool> {
let substs = instance.substs; let substs = instance.substs;
// We currently do not handle any diverging intrinsics.
let dest = match dest {
Some(dest) => dest,
None => return Ok(false)
};
let intrinsic_name = &*self.tcx.item_name(instance.def_id()).as_str(); let intrinsic_name = &*self.tcx.item_name(instance.def_id()).as_str();
match intrinsic_name { match intrinsic_name {
"caller_location" => { "caller_location" => {
let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span);
@@ -347,9 +353,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(true) Ok(true)
} }
/// "Intercept" a function call because we have something special to do for it. /// "Intercept" a function call to a panic-related function
/// because we have something special to do for it.
/// Returns `true` if an intercept happened. /// Returns `true` if an intercept happened.
pub fn hook_fn( pub fn hook_panic_fn(
&mut self, &mut self,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, M::PointerTag>], args: &[OpTy<'tcx, M::PointerTag>],

View File

@@ -16,6 +16,21 @@ use super::{
Frame, Operand, Frame, Operand,
}; };
/// Data returned by Machine::stack_pop,
/// to provide further control over the popping of the stack frame
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
pub enum StackPopInfo {
/// Indicates that no special handling should be
/// done - we'll either return normally or unwind
/// based on the terminator for the function
/// we're leaving.
Normal,
/// Indicates that we should stop unwinding,
/// as we've reached a catch frame
StopUnwinding
}
/// Whether this kind of memory is allowed to leak /// Whether this kind of memory is allowed to leak
pub trait MayLeak: Copy { pub trait MayLeak: Copy {
fn may_leak(self) -> bool; fn may_leak(self) -> bool;
@@ -137,6 +152,7 @@ pub trait Machine<'mir, 'tcx>: Sized {
args: &[OpTy<'tcx, Self::PointerTag>], args: &[OpTy<'tcx, Self::PointerTag>],
dest: Option<PlaceTy<'tcx, Self::PointerTag>>, dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
ret: Option<mir::BasicBlock>, ret: Option<mir::BasicBlock>,
unwind: Option<mir::BasicBlock>
) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>; ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>;
/// Execute `fn_val`. it is the hook's responsibility to advance the instruction /// Execute `fn_val`. it is the hook's responsibility to advance the instruction
@@ -156,7 +172,9 @@ pub trait Machine<'mir, 'tcx>: Sized {
span: Span, span: Span,
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
args: &[OpTy<'tcx, Self::PointerTag>], args: &[OpTy<'tcx, Self::PointerTag>],
dest: PlaceTy<'tcx, Self::PointerTag>, dest: Option<PlaceTy<'tcx, Self::PointerTag>>,
ret: Option<mir::BasicBlock>,
unwind: Option<mir::BasicBlock>,
) -> InterpResult<'tcx>; ) -> InterpResult<'tcx>;
/// Called for read access to a foreign static item. /// Called for read access to a foreign static item.
@@ -251,9 +269,13 @@ pub trait Machine<'mir, 'tcx>: Sized {
/// Called immediately after a stack frame gets popped /// Called immediately after a stack frame gets popped
fn stack_pop( fn stack_pop(
ecx: &mut InterpCx<'mir, 'tcx, Self>, _ecx: &mut InterpCx<'mir, 'tcx, Self>,
extra: Self::FrameExtra, _extra: Self::FrameExtra,
) -> InterpResult<'tcx>; _unwinding: bool
) -> InterpResult<'tcx, StackPopInfo> {
// By default, we do not support unwinding from panics
Ok(StackPopInfo::Normal)
}
fn int_to_ptr( fn int_to_ptr(
_mem: &Memory<'mir, 'tcx, Self>, _mem: &Memory<'mir, 'tcx, Self>,

View File

@@ -26,7 +26,7 @@ pub use self::place::{Place, PlaceTy, MemPlace, MPlaceTy};
pub use self::memory::{Memory, MemoryKind, AllocCheck, FnVal}; pub use self::memory::{Memory, MemoryKind, AllocCheck, FnVal};
pub use self::machine::{Machine, AllocMap, MayLeak}; pub use self::machine::{Machine, AllocMap, MayLeak, StackPopInfo};
pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy}; pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy};

View File

@@ -315,7 +315,7 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation
} }
impl_stable_hash_for!(enum crate::interpret::eval_context::StackPopCleanup { impl_stable_hash_for!(enum crate::interpret::eval_context::StackPopCleanup {
Goto(block), Goto { ret, unwind },
None { cleanup }, None { cleanup },
}); });
@@ -326,7 +326,7 @@ struct FrameSnapshot<'a, 'tcx> {
return_to_block: &'a StackPopCleanup, return_to_block: &'a StackPopCleanup,
return_place: Option<Place<(), AllocIdSnapshot<'a>>>, return_place: Option<Place<(), AllocIdSnapshot<'a>>>,
locals: IndexVec<mir::Local, LocalValue<(), AllocIdSnapshot<'a>>>, locals: IndexVec<mir::Local, LocalValue<(), AllocIdSnapshot<'a>>>,
block: &'a mir::BasicBlock, block: Option<mir::BasicBlock>,
stmt: usize, stmt: usize,
} }
@@ -364,7 +364,7 @@ impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
instance: *instance, instance: *instance,
span: *span, span: *span,
return_to_block, return_to_block,
block, block: *block,
stmt: *stmt, stmt: *stmt,
return_place: return_place.map(|r| r.snapshot(ctx)), return_place: return_place.map(|r| r.snapshot(ctx)),
locals: locals.iter().map(|local| local.snapshot(ctx)).collect(), locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),

View File

@@ -49,7 +49,16 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
return Ok(false); return Ok(false);
} }
let block = self.frame().block; let block = match self.frame().block {
Some(block) => block,
None => {
// We are unwinding and this fn has no cleanup code.
// Just go on unwinding.
trace!("unwinding: skipping frame");
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(true)
}
};
let stmt_id = self.frame().stmt; let stmt_id = self.frame().stmt;
let body = self.body(); let body = self.body();
let basic_block = &body.basic_blocks()[block]; let basic_block = &body.basic_blocks()[block];
@@ -290,6 +299,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
let old_stack = self.cur_frame(); let old_stack = self.cur_frame();
let old_bb = self.frame().block; let old_bb = self.frame().block;
self.eval_terminator(terminator)?; self.eval_terminator(terminator)?;
if !self.stack.is_empty() { if !self.stack.is_empty() {
// This should change *something* // This should change *something*

View File

@@ -15,7 +15,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
#[inline] #[inline]
pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> { pub fn goto_block(&mut self, target: Option<mir::BasicBlock>) -> InterpResult<'tcx> {
if let Some(target) = target { if let Some(target) = target {
self.frame_mut().block = target; self.frame_mut().block = Some(target);
self.frame_mut().stmt = 0; self.frame_mut().stmt = 0;
Ok(()) Ok(())
} else { } else {
@@ -31,7 +31,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match terminator.kind { match terminator.kind {
Return => { Return => {
self.frame().return_place.map(|r| self.dump_place(*r)); self.frame().return_place.map(|r| self.dump_place(*r));
self.pop_stack_frame()? self.pop_stack_frame(/* unwinding */ false)?
} }
Goto { target } => self.goto_block(Some(target))?, Goto { target } => self.goto_block(Some(target))?,
@@ -67,6 +67,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
ref func, ref func,
ref args, ref args,
ref destination, ref destination,
ref cleanup,
.. ..
} => { } => {
let (dest, ret) = match *destination { let (dest, ret) = match *destination {
@@ -98,13 +99,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&args[..], &args[..],
dest, dest,
ret, ret,
*cleanup
)?; )?;
} }
Drop { Drop {
ref location, ref location,
target, target,
.. unwind,
} => { } => {
// FIXME(CTFE): forbid drop in const eval // FIXME(CTFE): forbid drop in const eval
let place = self.eval_place(location)?; let place = self.eval_place(location)?;
@@ -117,6 +119,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
instance, instance,
terminator.source_info.span, terminator.source_info.span,
target, target,
unwind
)?; )?;
} }
@@ -160,10 +163,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
} }
// When we encounter Resume, we've finished unwinding
// cleanup for the current stack frame. We pop it in order
// to continue unwinding the next frame
Resume => {
trace!("unwinding: resuming from cleanup");
// By definition, a Resume terminator means
// that we're unwinding
self.pop_stack_frame(/* unwinding */ true)?;
return Ok(())
},
Yield { .. } | Yield { .. } |
GeneratorDrop | GeneratorDrop |
DropAndReplace { .. } | DropAndReplace { .. } |
Resume |
Abort => unimplemented!("{:#?}", terminator.kind), Abort => unimplemented!("{:#?}", terminator.kind),
FalseEdges { .. } => bug!("should have been eliminated by\ FalseEdges { .. } => bug!("should have been eliminated by\
`simplify_branches` mir pass"), `simplify_branches` mir pass"),
@@ -237,6 +251,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
args: &[OpTy<'tcx, M::PointerTag>], args: &[OpTy<'tcx, M::PointerTag>],
dest: Option<PlaceTy<'tcx, M::PointerTag>>, dest: Option<PlaceTy<'tcx, M::PointerTag>>,
ret: Option<mir::BasicBlock>, ret: Option<mir::BasicBlock>,
unwind: Option<mir::BasicBlock>
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
trace!("eval_fn_call: {:#?}", fn_val); trace!("eval_fn_call: {:#?}", fn_val);
@@ -249,17 +264,23 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
match instance.def { match instance.def {
ty::InstanceDef::Intrinsic(..) => { ty::InstanceDef::Intrinsic(..) => {
// The intrinsic itself cannot diverge, so if we got here without a return let old_stack = self.cur_frame();
// place... (can happen e.g., for transmute returning `!`) let old_bb = self.frame().block;
let dest = match dest { M::call_intrinsic(self, span, instance, args, dest, ret, unwind)?;
Some(dest) => dest,
None => throw_ub!(Unreachable)
};
M::call_intrinsic(self, span, instance, args, dest)?;
// No stack frame gets pushed, the main loop will just act as if the // No stack frame gets pushed, the main loop will just act as if the
// call completed. // call completed.
if ret.is_some() {
self.goto_block(ret)?; self.goto_block(ret)?;
self.dump_place(*dest); } else {
// If this intrinsic call doesn't have a ret block,
// then the intrinsic implementation should have
// changed the stack frame (otherwise, we'll end
// up trying to execute this intrinsic call again)
debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb);
}
if let Some(dest) = dest {
self.dump_place(*dest)
}
Ok(()) Ok(())
} }
ty::InstanceDef::VtableShim(..) | ty::InstanceDef::VtableShim(..) |
@@ -294,7 +315,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
} }
// We need MIR for this fn // We need MIR for this fn
let body = match M::find_fn(self, instance, args, dest, ret)? { let body = match M::find_fn(self, instance, args, dest, ret, unwind)? {
Some(body) => body, Some(body) => body,
None => return Ok(()), None => return Ok(()),
}; };
@@ -304,7 +325,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
span, span,
body, body,
dest, dest,
StackPopCleanup::Goto(ret), StackPopCleanup::Goto { ret, unwind }
)?; )?;
// We want to pop this frame again in case there was an error, to put // We want to pop this frame again in case there was an error, to put
@@ -422,7 +443,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
// cannot use the shim here, because that will only result in infinite recursion // cannot use the shim here, because that will only result in infinite recursion
ty::InstanceDef::Virtual(_, idx) => { ty::InstanceDef::Virtual(_, idx) => {
let mut args = args.to_vec(); let mut args = args.to_vec();
let ptr_size = self.pointer_size();
// We have to implement all "object safe receivers". Currently we // We have to implement all "object safe receivers". Currently we
// support built-in pointers (&, &mut, Box) as well as unsized-self. We do // support built-in pointers (&, &mut, Box) as well as unsized-self. We do
// not yet support custom self types. // not yet support custom self types.
@@ -439,15 +459,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}; };
// Find and consult vtable // Find and consult vtable
let vtable = receiver_place.vtable(); let vtable = receiver_place.vtable();
let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?; let drop_fn = self.get_vtable_slot(vtable, idx)?;
let vtable_slot = self.memory.check_ptr_access(
vtable_slot,
ptr_size,
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let fn_ptr = self.memory.get_raw(vtable_slot.alloc_id)?
.read_ptr_sized(self, vtable_slot)?.not_undef()?;
let drop_fn = self.memory.get_fn(fn_ptr)?;
// `*mut receiver_place.layout.ty` is almost the layout that we // `*mut receiver_place.layout.ty` is almost the layout that we
// want for args[0]: We have to project to field 0 because we want // want for args[0]: We have to project to field 0 because we want
@@ -462,7 +474,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
}); });
trace!("Patched self operand to {:#?}", args[0]); trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function // recurse with concrete function
self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret) self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret, unwind)
} }
} }
} }
@@ -473,6 +485,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
instance: ty::Instance<'tcx>, instance: ty::Instance<'tcx>,
span: Span, span: Span,
target: mir::BasicBlock, target: mir::BasicBlock,
unwind: Option<mir::BasicBlock>
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance); trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance);
// We take the address of the object. This may well be unaligned, which is fine // We take the address of the object. This may well be unaligned, which is fine
@@ -503,6 +516,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
&[arg.into()], &[arg.into()],
Some(dest.into()), Some(dest.into()),
Some(target), Some(target),
unwind
) )
} }
} }

View File

@@ -97,6 +97,27 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> {
Ok(vtable) Ok(vtable)
} }
/// Resolve the function at the specified slot in the provided
/// vtable. An index of '0' corresponds to the first method
/// declared in the trait of the provided vtable
pub fn get_vtable_slot(
&self,
vtable: Scalar<M::PointerTag>,
idx: usize
) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> {
let ptr_size = self.pointer_size();
// Skip over the 'drop_ptr', 'size', and 'align' fields
let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?;
let vtable_slot = self.memory.check_ptr_access(
vtable_slot,
ptr_size,
self.tcx.data_layout.pointer_align.abi,
)?.expect("cannot be a ZST");
let fn_ptr = self.memory.get_raw(vtable_slot.alloc_id)?
.read_ptr_sized(self, vtable_slot)?.not_undef()?;
Ok(self.memory.get_fn(fn_ptr)?)
}
/// Returns the drop fn instance as well as the actual dynamic type /// Returns the drop fn instance as well as the actual dynamic type
pub fn read_drop_type_from_vtable( pub fn read_drop_type_from_vtable(
&self, &self,

View File

@@ -143,6 +143,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
_args: &[OpTy<'tcx>], _args: &[OpTy<'tcx>],
_dest: Option<PlaceTy<'tcx>>, _dest: Option<PlaceTy<'tcx>>,
_ret: Option<BasicBlock>, _ret: Option<BasicBlock>,
_unwind: Option<BasicBlock>,
) -> InterpResult<'tcx, Option<&'mir Body<'tcx>>> { ) -> InterpResult<'tcx, Option<&'mir Body<'tcx>>> {
Ok(None) Ok(None)
} }
@@ -162,7 +163,9 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
_span: Span, _span: Span,
_instance: ty::Instance<'tcx>, _instance: ty::Instance<'tcx>,
_args: &[OpTy<'tcx>], _args: &[OpTy<'tcx>],
_dest: PlaceTy<'tcx>, _dest: Option<PlaceTy<'tcx>>,
_ret: Option<BasicBlock>,
_unwind: Option<BasicBlock>
) -> InterpResult<'tcx> { ) -> InterpResult<'tcx> {
throw_unsup_format!("calling intrinsics isn't supported in ConstProp"); throw_unsup_format!("calling intrinsics isn't supported in ConstProp");
} }
@@ -251,12 +254,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine {
fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
Ok(()) Ok(())
} }
/// Called immediately before a stack frame gets popped.
#[inline(always)]
fn stack_pop(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> {
Ok(())
}
} }
type Const<'tcx> = OpTy<'tcx>; type Const<'tcx> = OpTy<'tcx>;

View File

@@ -384,6 +384,12 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) {
(1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_unit()) (1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_unit())
} }
"miri_start_panic" => {
// FIXME - the relevant types aren't lang items,
// so it's not trivial to check this
return;
}
ref other => { ref other => {
struct_span_err!(tcx.sess, it.span, E0093, struct_span_err!(tcx.sess, it.span, E0093,
"unrecognized intrinsic function: `{}`", "unrecognized intrinsic function: `{}`",

View File

@@ -66,7 +66,14 @@ unsafe fn _print(w: &mut dyn Write, format: PrintFmt) -> io::Result<()> {
} }
unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::Result { unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::Result {
let cwd = env::current_dir().ok(); // Always 'fail' to get the cwd when running under Miri -
// this allows Miri to display backtraces in isolation mode
let cwd = if !cfg!(miri) {
env::current_dir().ok()
} else {
None
};
let mut print_path = move |fmt: &mut fmt::Formatter<'_>, bows: BytesOrWideString<'_>| { let mut print_path = move |fmt: &mut fmt::Formatter<'_>, bows: BytesOrWideString<'_>| {
output_filename(fmt, bows, print_fmt, cwd.as_ref()) output_filename(fmt, bows, print_fmt, cwd.as_ref())
}; };