diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 3db85d05d7a98..686d0bbefc882 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -1348,6 +1348,11 @@ extern "rust-intrinsic" { /// See documentation of `<*const T>::offset_from` for details. #[cfg(not(bootstrap))] pub fn ptr_offset_from(ptr: *const T, base: *const T) -> isize; + + /// Internal hook used by Miri to implement unwinding. + /// Perma-unstable: do not use + #[cfg(not(bootstrap))] + pub fn miri_start_panic(data: *mut (dyn crate::any::Any + crate::marker::Send)) -> !; } // Some functions are defined here because they accidentally got made diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs index 7de347446ada1..4c6c728f6f7d4 100644 --- a/src/libpanic_unwind/lib.rs +++ b/src/libpanic_unwind/lib.rs @@ -36,7 +36,10 @@ use core::raw; use core::panic::BoxMeUp; cfg_if::cfg_if! { - if #[cfg(target_os = "emscripten")] { + if #[cfg(miri)] { + #[path = "miri.rs"] + mod imp; + } else if #[cfg(target_os = "emscripten")] { #[path = "emcc.rs"] mod imp; } else if #[cfg(target_arch = "wasm32")] { diff --git a/src/libpanic_unwind/miri.rs b/src/libpanic_unwind/miri.rs new file mode 100644 index 0000000000000..254a9383b4223 --- /dev/null +++ b/src/libpanic_unwind/miri.rs @@ -0,0 +1,23 @@ +use core::any::Any; +use alloc::boxed::Box; + +pub fn payload() -> *mut u8 { + core::ptr::null_mut() +} + +pub unsafe fn panic(data: Box) -> ! { + core::intrinsics::miri_start_panic(Box::into_raw(data)) +} + +pub unsafe fn cleanup(ptr: *mut u8) -> Box { + Box::from_raw(ptr) +} + + +// This is required by the compiler to exist (e.g., it's a lang item), +// but is never used by Miri. Therefore, we just use a stub here +#[lang = "eh_personality"] +#[cfg(not(test))] +fn rust_eh_personality() { + unsafe { core::intrinsics::abort() } +} diff --git a/src/librustc_codegen_ssa/mir/block.rs b/src/librustc_codegen_ssa/mir/block.rs index 13cd202158b77..14be0e80fb482 100644 --- a/src/librustc_codegen_ssa/mir/block.rs +++ b/src/librustc_codegen_ssa/mir/block.rs @@ -528,6 +528,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { _ => FnAbi::new(&bx, sig, &extra_args) }; + // This should never be reachable at runtime: + // We should only emit a call to this intrinsic in #[cfg(miri)] mode, + // which means that we will never actually use the generate object files + // (we will just be interpreting the MIR) + // + // Note that we still need to be able to codegen *something* for this intrisnic: + // Miri currently uses Xargo to build a special libstd. As a side effect, + // we generate normal object files for libstd - while these are never used, + // we still need to be able to build them. + if intrinsic == Some("miri_start_panic") { + bx.abort(); + bx.unreachable(); + return; + } + // Emit a panic or a no-op for `panic_if_uninhabited`. if intrinsic == Some("panic_if_uninhabited") { let ty = instance.unwrap().substs.type_at(0); diff --git a/src/librustc_mir/const_eval.rs b/src/librustc_mir/const_eval.rs index 707ad1511826a..f280ea7aa4669 100644 --- a/src/librustc_mir/const_eval.rs +++ b/src/librustc_mir/const_eval.rs @@ -325,6 +325,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, args: &[OpTy<'tcx>], dest: Option>, ret: Option, + _unwind: Option // unwinding is not supported in consts ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> { debug!("eval_fn_call: {:?}", instance); // Only check non-glue functions @@ -336,7 +337,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, // Some functions we support even if they are non-const -- but avoid testing // that for const fn! We certainly do *not* want to actually call the fn // though, so be sure we return here. - return if ecx.hook_fn(instance, args, dest)? { + return if ecx.hook_panic_fn(instance, args, dest)? { ecx.goto_block(ret)?; // fully evaluated and done Ok(None) } else { @@ -374,7 +375,9 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx>], - dest: PlaceTy<'tcx>, + dest: Option>, + _ret: Option, + _unwind: Option ) -> InterpResult<'tcx> { if ecx.emulate_intrinsic(span, instance, args, dest)? { return Ok(()); @@ -469,12 +472,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { Ok(()) } - - /// Called immediately before a stack frame gets popped. - #[inline(always)] - fn stack_pop(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> { - Ok(()) - } } /// Extracts a field of a (variant of a) const. diff --git a/src/librustc_mir/interpret/eval_context.rs b/src/librustc_mir/interpret/eval_context.rs index 8e901068a8d26..92358ad247e18 100644 --- a/src/librustc_mir/interpret/eval_context.rs +++ b/src/librustc_mir/interpret/eval_context.rs @@ -21,7 +21,7 @@ use rustc_data_structures::fx::FxHashMap; use super::{ Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef, - Memory, Machine + Memory, Machine, StackPopInfo }; pub struct InterpCx<'mir, 'tcx, M: Machine<'mir, 'tcx>> { @@ -60,6 +60,9 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> { /// The span of the call site. pub span: source_map::Span, + /// Extra data for the machine. + pub extra: Extra, + //////////////////////////////////////////////////////////////////////////////// // Return place and locals //////////////////////////////////////////////////////////////////////////////// @@ -82,13 +85,12 @@ pub struct Frame<'mir, 'tcx, Tag=(), Extra=()> { //////////////////////////////////////////////////////////////////////////////// /// The block that is currently executed (or will be executed after the above call stacks /// return). - pub block: mir::BasicBlock, + /// If this is `None`, we are unwinding and this function doesn't need any clean-up. + /// Just continue the same as with `Resume`. + pub block: Option, /// The index of the currently evaluated statement. pub stmt: usize, - - /// Extra data for the machine. - pub extra: Extra, } #[derive(Clone, Eq, PartialEq, Debug)] // Miri debug-prints these @@ -96,7 +98,9 @@ pub enum StackPopCleanup { /// Jump to the next block in the caller, or cause UB if None (that's a function /// that may never return). Also store layout of return place so /// we can validate it at that layout. - Goto(Option), + /// `ret` stores the block we jump to on a normal return, while 'unwind' + /// stores the block used for cleanup during unwinding + Goto { ret: Option, unwind: Option }, /// Just do nohing: Used by Main and for the box_alloc hook in miri. /// `cleanup` says whether locals are deallocated. Static computation /// wants them leaked to intern what they need (and just throw away @@ -489,7 +493,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let extra = M::stack_push(self)?; self.stack.push(Frame { body, - block: mir::START_BLOCK, + block: Some(mir::START_BLOCK), return_to_block, return_place, // empty local array, we fill it in below, after we are inside the stack frame and @@ -547,60 +551,118 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } - pub(super) fn pop_stack_frame(&mut self) -> InterpResult<'tcx> { - info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance); + /// Pops the current frame from the stack, deallocating the + /// memory for allocated locals. + /// + /// If `unwinding` is `false`, then we are performing a normal return + /// from a function. In this case, we jump back into the frame of the caller, + /// and continue execution as normal. + /// + /// If `unwinding` is `true`, then we are in the middle of a panic, + /// and need to unwind this frame. In this case, we jump to the + /// `cleanup` block for the function, which is responsible for running + /// `Drop` impls for any locals that have been initialized at this point. + /// The cleanup block ends with a special `Resume` terminator, which will + /// cause us to continue unwinding. + pub(super) fn pop_stack_frame( + &mut self, + unwinding: bool + ) -> InterpResult<'tcx> { + info!("LEAVING({}) {} (unwinding = {})", + self.cur_frame(), self.frame().instance, unwinding); + + // Sanity check `unwinding`. + assert_eq!( + unwinding, + match self.frame().block { + None => true, + Some(block) => self.body().basic_blocks()[block].is_cleanup + } + ); + ::log_settings::settings().indentation -= 1; let frame = self.stack.pop().expect( "tried to pop a stack frame, but there were none", ); - M::stack_pop(self, frame.extra)?; - // Abort early if we do not want to clean up: We also avoid validation in that case, + let stack_pop_info = M::stack_pop(self, frame.extra, unwinding)?; + if let (false, StackPopInfo::StopUnwinding) = (unwinding, stack_pop_info) { + bug!("Attempted to stop unwinding while there is no unwinding!"); + } + + // Now where do we jump next? + + // Determine if we leave this function normally or via unwinding. + let cur_unwinding = if let StackPopInfo::StopUnwinding = stack_pop_info { + false + } else { + unwinding + }; + + // Usually we want to clean up (deallocate locals), but in a few rare cases we don't. + // In that case, we return early. We also avoid validation in that case, // because this is CTFE and the final value will be thoroughly validated anyway. - match frame.return_to_block { - StackPopCleanup::Goto(_) => {}, - StackPopCleanup::None { cleanup } => { - if !cleanup { - assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked"); - // Leak the locals, skip validation. - return Ok(()); - } - } + let (cleanup, next_block) = match frame.return_to_block { + StackPopCleanup::Goto { ret, unwind } => { + (true, Some(if cur_unwinding { unwind } else { ret })) + }, + StackPopCleanup::None { cleanup, .. } => (cleanup, None) + }; + + if !cleanup { + assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked"); + assert!(next_block.is_none(), "tried to skip cleanup when we have a next block!"); + // Leak the locals, skip validation. + return Ok(()); } - // Deallocate all locals that are backed by an allocation. + + // Cleanup: deallocate all locals that are backed by an allocation. for local in frame.locals { self.deallocate_local(local.value)?; } - // Validate the return value. Do this after deallocating so that we catch dangling - // references. - if let Some(return_place) = frame.return_place { - if M::enforce_validity(self) { - // Data got changed, better make sure it matches the type! - // It is still possible that the return place held invalid data while - // the function is running, but that's okay because nobody could have - // accessed that same data from the "outside" to observe any broken - // invariant -- that is, unless a function somehow has a ptr to - // its return place... but the way MIR is currently generated, the - // return place is always a local and then this cannot happen. - self.validate_operand( - self.place_to_op(return_place)?, - vec![], - None, - )?; - } + + + trace!("StackPopCleanup: {:?} StackPopInfo: {:?} cur_unwinding = {:?}", + frame.return_to_block, stack_pop_info, cur_unwinding); + if cur_unwinding { + // Follow the unwind edge. + let unwind = next_block.expect("Encounted StackPopCleanup::None when unwinding!"); + let next_frame = self.frame_mut(); + // If `unwind` is `None`, we'll leave that function immediately again. + next_frame.block = unwind; + next_frame.stmt = 0; } else { - // Uh, that shouldn't happen... the function did not intend to return - throw_ub!(Unreachable) - } - // Jump to new block -- *after* validation so that the spans make more sense. - match frame.return_to_block { - StackPopCleanup::Goto(block) => { - self.goto_block(block)?; + // Follow the normal return edge. + // Validate the return value. Do this after deallocating so that we catch dangling + // references. + if let Some(return_place) = frame.return_place { + if M::enforce_validity(self) { + // Data got changed, better make sure it matches the type! + // It is still possible that the return place held invalid data while + // the function is running, but that's okay because nobody could have + // accessed that same data from the "outside" to observe any broken + // invariant -- that is, unless a function somehow has a ptr to + // its return place... but the way MIR is currently generated, the + // return place is always a local and then this cannot happen. + self.validate_operand( + self.place_to_op(return_place)?, + vec![], + None, + )?; + } + } else { + // Uh, that shouldn't happen... the function did not intend to return + throw_ub!(Unreachable); + } + + // Jump to new block -- *after* validation so that the spans make more sense. + if let Some(ret) = next_block { + self.goto_block(ret)?; } - StackPopCleanup::None { .. } => {} } if self.stack.len() > 0 { - info!("CONTINUING({}) {}", self.cur_frame(), self.frame().instance); + info!("CONTINUING({}) {} (unwinding = {})", + self.cur_frame(), self.frame().instance, cur_unwinding); } Ok(()) @@ -745,16 +807,20 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } else { last_span = Some(span); } - let block = &body.basic_blocks()[block]; - let source_info = if stmt < block.statements.len() { - block.statements[stmt].source_info - } else { - block.terminator().source_info - }; - let lint_root = match body.source_scope_local_data { - mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root), - mir::ClearCrossCrate::Clear => None, - }; + + let lint_root = block.and_then(|block| { + let block = &body.basic_blocks()[block]; + let source_info = if stmt < block.statements.len() { + block.statements[stmt].source_info + } else { + block.terminator().source_info + }; + match body.source_scope_local_data { + mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root), + mir::ClearCrossCrate::Clear => None, + } + }); + frames.push(FrameInfo { call_site: span, instance, lint_root }); } trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span); diff --git a/src/librustc_mir/interpret/intrinsics.rs b/src/librustc_mir/interpret/intrinsics.rs index 39f10d8e6045d..66b6d4ac12c40 100644 --- a/src/librustc_mir/interpret/intrinsics.rs +++ b/src/librustc_mir/interpret/intrinsics.rs @@ -91,11 +91,17 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx, M::PointerTag>], - dest: PlaceTy<'tcx, M::PointerTag>, + dest: Option>, ) -> InterpResult<'tcx, bool> { let substs = instance.substs; + // We currently do not handle any diverging intrinsics. + let dest = match dest { + Some(dest) => dest, + None => return Ok(false) + }; let intrinsic_name = &*self.tcx.item_name(instance.def_id()).as_str(); + match intrinsic_name { "caller_location" => { let topmost = span.ctxt().outer_expn().expansion_cause().unwrap_or(span); @@ -347,9 +353,10 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Ok(true) } - /// "Intercept" a function call because we have something special to do for it. + /// "Intercept" a function call to a panic-related function + /// because we have something special to do for it. /// Returns `true` if an intercept happened. - pub fn hook_fn( + pub fn hook_panic_fn( &mut self, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx, M::PointerTag>], diff --git a/src/librustc_mir/interpret/machine.rs b/src/librustc_mir/interpret/machine.rs index 870e50a3cbb9a..0ddd0962ba544 100644 --- a/src/librustc_mir/interpret/machine.rs +++ b/src/librustc_mir/interpret/machine.rs @@ -16,6 +16,21 @@ use super::{ Frame, Operand, }; +/// Data returned by Machine::stack_pop, +/// to provide further control over the popping of the stack frame +#[derive(Eq, PartialEq, Debug, Copy, Clone)] +pub enum StackPopInfo { + /// Indicates that no special handling should be + /// done - we'll either return normally or unwind + /// based on the terminator for the function + /// we're leaving. + Normal, + + /// Indicates that we should stop unwinding, + /// as we've reached a catch frame + StopUnwinding +} + /// Whether this kind of memory is allowed to leak pub trait MayLeak: Copy { fn may_leak(self) -> bool; @@ -137,6 +152,7 @@ pub trait Machine<'mir, 'tcx>: Sized { args: &[OpTy<'tcx, Self::PointerTag>], dest: Option>, ret: Option, + unwind: Option ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>>; /// Execute `fn_val`. it is the hook's responsibility to advance the instruction @@ -156,7 +172,9 @@ pub trait Machine<'mir, 'tcx>: Sized { span: Span, instance: ty::Instance<'tcx>, args: &[OpTy<'tcx, Self::PointerTag>], - dest: PlaceTy<'tcx, Self::PointerTag>, + dest: Option>, + ret: Option, + unwind: Option, ) -> InterpResult<'tcx>; /// Called for read access to a foreign static item. @@ -251,9 +269,13 @@ pub trait Machine<'mir, 'tcx>: Sized { /// Called immediately after a stack frame gets popped fn stack_pop( - ecx: &mut InterpCx<'mir, 'tcx, Self>, - extra: Self::FrameExtra, - ) -> InterpResult<'tcx>; + _ecx: &mut InterpCx<'mir, 'tcx, Self>, + _extra: Self::FrameExtra, + _unwinding: bool + ) -> InterpResult<'tcx, StackPopInfo> { + // By default, we do not support unwinding from panics + Ok(StackPopInfo::Normal) + } fn int_to_ptr( _mem: &Memory<'mir, 'tcx, Self>, diff --git a/src/librustc_mir/interpret/mod.rs b/src/librustc_mir/interpret/mod.rs index 0c61be283dfd0..520bd434faa25 100644 --- a/src/librustc_mir/interpret/mod.rs +++ b/src/librustc_mir/interpret/mod.rs @@ -26,7 +26,7 @@ pub use self::place::{Place, PlaceTy, MemPlace, MPlaceTy}; pub use self::memory::{Memory, MemoryKind, AllocCheck, FnVal}; -pub use self::machine::{Machine, AllocMap, MayLeak}; +pub use self::machine::{Machine, AllocMap, MayLeak, StackPopInfo}; pub use self::operand::{ScalarMaybeUndef, Immediate, ImmTy, Operand, OpTy}; diff --git a/src/librustc_mir/interpret/snapshot.rs b/src/librustc_mir/interpret/snapshot.rs index 1df98f079cc10..7f3ea0283cda3 100644 --- a/src/librustc_mir/interpret/snapshot.rs +++ b/src/librustc_mir/interpret/snapshot.rs @@ -315,7 +315,7 @@ impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation } impl_stable_hash_for!(enum crate::interpret::eval_context::StackPopCleanup { - Goto(block), + Goto { ret, unwind }, None { cleanup }, }); @@ -326,7 +326,7 @@ struct FrameSnapshot<'a, 'tcx> { return_to_block: &'a StackPopCleanup, return_place: Option>>, locals: IndexVec>>, - block: &'a mir::BasicBlock, + block: Option, stmt: usize, } @@ -364,7 +364,7 @@ impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx> instance: *instance, span: *span, return_to_block, - block, + block: *block, stmt: *stmt, return_place: return_place.map(|r| r.snapshot(ctx)), locals: locals.iter().map(|local| local.snapshot(ctx)).collect(), diff --git a/src/librustc_mir/interpret/step.rs b/src/librustc_mir/interpret/step.rs index daca7a25787ca..d7493338a5352 100644 --- a/src/librustc_mir/interpret/step.rs +++ b/src/librustc_mir/interpret/step.rs @@ -49,7 +49,16 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { return Ok(false); } - let block = self.frame().block; + let block = match self.frame().block { + Some(block) => block, + None => { + // We are unwinding and this fn has no cleanup code. + // Just go on unwinding. + trace!("unwinding: skipping frame"); + self.pop_stack_frame(/* unwinding */ true)?; + return Ok(true) + } + }; let stmt_id = self.frame().stmt; let body = self.body(); let basic_block = &body.basic_blocks()[block]; @@ -290,6 +299,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let old_stack = self.cur_frame(); let old_bb = self.frame().block; + self.eval_terminator(terminator)?; if !self.stack.is_empty() { // This should change *something* diff --git a/src/librustc_mir/interpret/terminator.rs b/src/librustc_mir/interpret/terminator.rs index e10bb85d52df8..4f9e404b2c635 100644 --- a/src/librustc_mir/interpret/terminator.rs +++ b/src/librustc_mir/interpret/terminator.rs @@ -15,7 +15,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { #[inline] pub fn goto_block(&mut self, target: Option) -> InterpResult<'tcx> { if let Some(target) = target { - self.frame_mut().block = target; + self.frame_mut().block = Some(target); self.frame_mut().stmt = 0; Ok(()) } else { @@ -31,7 +31,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { match terminator.kind { Return => { self.frame().return_place.map(|r| self.dump_place(*r)); - self.pop_stack_frame()? + self.pop_stack_frame(/* unwinding */ false)? } Goto { target } => self.goto_block(Some(target))?, @@ -67,6 +67,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { ref func, ref args, ref destination, + ref cleanup, .. } => { let (dest, ret) = match *destination { @@ -98,13 +99,14 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { &args[..], dest, ret, + *cleanup )?; } Drop { ref location, target, - .. + unwind, } => { // FIXME(CTFE): forbid drop in const eval let place = self.eval_place(location)?; @@ -117,6 +119,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { instance, terminator.source_info.span, target, + unwind )?; } @@ -160,10 +163,21 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } } + + // When we encounter Resume, we've finished unwinding + // cleanup for the current stack frame. We pop it in order + // to continue unwinding the next frame + Resume => { + trace!("unwinding: resuming from cleanup"); + // By definition, a Resume terminator means + // that we're unwinding + self.pop_stack_frame(/* unwinding */ true)?; + return Ok(()) + }, + Yield { .. } | GeneratorDrop | DropAndReplace { .. } | - Resume | Abort => unimplemented!("{:#?}", terminator.kind), FalseEdges { .. } => bug!("should have been eliminated by\ `simplify_branches` mir pass"), @@ -237,6 +251,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { args: &[OpTy<'tcx, M::PointerTag>], dest: Option>, ret: Option, + unwind: Option ) -> InterpResult<'tcx> { trace!("eval_fn_call: {:#?}", fn_val); @@ -249,17 +264,23 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { match instance.def { ty::InstanceDef::Intrinsic(..) => { - // The intrinsic itself cannot diverge, so if we got here without a return - // place... (can happen e.g., for transmute returning `!`) - let dest = match dest { - Some(dest) => dest, - None => throw_ub!(Unreachable) - }; - M::call_intrinsic(self, span, instance, args, dest)?; + let old_stack = self.cur_frame(); + let old_bb = self.frame().block; + M::call_intrinsic(self, span, instance, args, dest, ret, unwind)?; // No stack frame gets pushed, the main loop will just act as if the // call completed. - self.goto_block(ret)?; - self.dump_place(*dest); + if ret.is_some() { + self.goto_block(ret)?; + } else { + // If this intrinsic call doesn't have a ret block, + // then the intrinsic implementation should have + // changed the stack frame (otherwise, we'll end + // up trying to execute this intrinsic call again) + debug_assert!(self.cur_frame() != old_stack || self.frame().block != old_bb); + } + if let Some(dest) = dest { + self.dump_place(*dest) + } Ok(()) } ty::InstanceDef::VtableShim(..) | @@ -294,7 +315,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { } // We need MIR for this fn - let body = match M::find_fn(self, instance, args, dest, ret)? { + let body = match M::find_fn(self, instance, args, dest, ret, unwind)? { Some(body) => body, None => return Ok(()), }; @@ -304,7 +325,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { span, body, dest, - StackPopCleanup::Goto(ret), + StackPopCleanup::Goto { ret, unwind } )?; // We want to pop this frame again in case there was an error, to put @@ -422,7 +443,6 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // cannot use the shim here, because that will only result in infinite recursion ty::InstanceDef::Virtual(_, idx) => { let mut args = args.to_vec(); - let ptr_size = self.pointer_size(); // We have to implement all "object safe receivers". Currently we // support built-in pointers (&, &mut, Box) as well as unsized-self. We do // not yet support custom self types. @@ -439,15 +459,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }; // Find and consult vtable let vtable = receiver_place.vtable(); - let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?; - let vtable_slot = self.memory.check_ptr_access( - vtable_slot, - ptr_size, - self.tcx.data_layout.pointer_align.abi, - )?.expect("cannot be a ZST"); - let fn_ptr = self.memory.get_raw(vtable_slot.alloc_id)? - .read_ptr_sized(self, vtable_slot)?.not_undef()?; - let drop_fn = self.memory.get_fn(fn_ptr)?; + let drop_fn = self.get_vtable_slot(vtable, idx)?; // `*mut receiver_place.layout.ty` is almost the layout that we // want for args[0]: We have to project to field 0 because we want @@ -462,7 +474,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { }); trace!("Patched self operand to {:#?}", args[0]); // recurse with concrete function - self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret) + self.eval_fn_call(drop_fn, span, caller_abi, &args, dest, ret, unwind) } } } @@ -473,6 +485,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { instance: ty::Instance<'tcx>, span: Span, target: mir::BasicBlock, + unwind: Option ) -> InterpResult<'tcx> { trace!("drop_in_place: {:?},\n {:?}, {:?}", *place, place.layout.ty, instance); // We take the address of the object. This may well be unaligned, which is fine @@ -503,6 +516,7 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { &[arg.into()], Some(dest.into()), Some(target), + unwind ) } } diff --git a/src/librustc_mir/interpret/traits.rs b/src/librustc_mir/interpret/traits.rs index c15425321ec01..e3b3df2674f41 100644 --- a/src/librustc_mir/interpret/traits.rs +++ b/src/librustc_mir/interpret/traits.rs @@ -97,6 +97,27 @@ impl<'mir, 'tcx, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { Ok(vtable) } + /// Resolve the function at the specified slot in the provided + /// vtable. An index of '0' corresponds to the first method + /// declared in the trait of the provided vtable + pub fn get_vtable_slot( + &self, + vtable: Scalar, + idx: usize + ) -> InterpResult<'tcx, FnVal<'tcx, M::ExtraFnVal>> { + let ptr_size = self.pointer_size(); + // Skip over the 'drop_ptr', 'size', and 'align' fields + let vtable_slot = vtable.ptr_offset(ptr_size * (idx as u64 + 3), self)?; + let vtable_slot = self.memory.check_ptr_access( + vtable_slot, + ptr_size, + self.tcx.data_layout.pointer_align.abi, + )?.expect("cannot be a ZST"); + let fn_ptr = self.memory.get_raw(vtable_slot.alloc_id)? + .read_ptr_sized(self, vtable_slot)?.not_undef()?; + Ok(self.memory.get_fn(fn_ptr)?) + } + /// Returns the drop fn instance as well as the actual dynamic type pub fn read_drop_type_from_vtable( &self, diff --git a/src/librustc_mir/transform/const_prop.rs b/src/librustc_mir/transform/const_prop.rs index a0d04bd593212..2ede43e2111ed 100644 --- a/src/librustc_mir/transform/const_prop.rs +++ b/src/librustc_mir/transform/const_prop.rs @@ -143,6 +143,7 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine { _args: &[OpTy<'tcx>], _dest: Option>, _ret: Option, + _unwind: Option, ) -> InterpResult<'tcx, Option<&'mir Body<'tcx>>> { Ok(None) } @@ -162,7 +163,9 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine { _span: Span, _instance: ty::Instance<'tcx>, _args: &[OpTy<'tcx>], - _dest: PlaceTy<'tcx>, + _dest: Option>, + _ret: Option, + _unwind: Option ) -> InterpResult<'tcx> { throw_unsup_format!("calling intrinsics isn't supported in ConstProp"); } @@ -251,12 +254,6 @@ impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for ConstPropMachine { fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> { Ok(()) } - - /// Called immediately before a stack frame gets popped. - #[inline(always)] - fn stack_pop(_ecx: &mut InterpCx<'mir, 'tcx, Self>, _extra: ()) -> InterpResult<'tcx> { - Ok(()) - } } type Const<'tcx> = OpTy<'tcx>; diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 1a1b98f582ff2..84a15c6e4ebe2 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -384,6 +384,12 @@ pub fn check_intrinsic_type(tcx: TyCtxt<'_>, it: &hir::ForeignItem) { (1, vec![ tcx.mk_mut_ptr(param(0)), param(0) ], tcx.mk_unit()) } + "miri_start_panic" => { + // FIXME - the relevant types aren't lang items, + // so it's not trivial to check this + return; + } + ref other => { struct_span_err!(tcx.sess, it.span, E0093, "unrecognized intrinsic function: `{}`", diff --git a/src/libstd/sys_common/backtrace.rs b/src/libstd/sys_common/backtrace.rs index 9c406ec39cc45..d7296b43fbe8a 100644 --- a/src/libstd/sys_common/backtrace.rs +++ b/src/libstd/sys_common/backtrace.rs @@ -66,7 +66,14 @@ unsafe fn _print(w: &mut dyn Write, format: PrintFmt) -> io::Result<()> { } unsafe fn _print_fmt(fmt: &mut fmt::Formatter<'_>, print_fmt: PrintFmt) -> fmt::Result { - let cwd = env::current_dir().ok(); + // Always 'fail' to get the cwd when running under Miri - + // this allows Miri to display backtraces in isolation mode + let cwd = if !cfg!(miri) { + env::current_dir().ok() + } else { + None + }; + let mut print_path = move |fmt: &mut fmt::Formatter<'_>, bows: BytesOrWideString<'_>| { output_filename(fmt, bows, print_fmt, cwd.as_ref()) };