From 3f0a2a29414a67d9c52241a94ce373546ca2ddcf Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 15 Apr 2019 15:36:09 +0200 Subject: [PATCH 01/16] rewrite Stacked Borrows Core. this passes stacked-borrows.rs! --- src/fn_call.rs | 28 +- src/helpers.rs | 24 +- src/intrinsic.rs | 6 +- src/lib.rs | 108 +++-- src/operator.rs | 38 +- src/stacked_borrows.rs | 937 +++++++++++++++++++++-------------------- src/tls.rs | 10 +- 7 files changed, 573 insertions(+), 578 deletions(-) diff --git a/src/fn_call.rs b/src/fn_call.rs index 64dcce161d..d8794fed46 100644 --- a/src/fn_call.rs +++ b/src/fn_call.rs @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' fn find_fn( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { let this = self.eval_context_mut(); @@ -55,8 +55,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' fn emulate_foreign_item( &mut self, def_id: DefId, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); @@ -92,7 +92,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } else { let align = this.tcx.data_layout.pointer_align.abi; let ptr = this.memory_mut().allocate(Size::from_bytes(size), align, MiriMemoryKind::C.into()); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(ptr), dest)?; } } "calloc" => { @@ -105,7 +105,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } else { let size = Size::from_bytes(bytes); let align = this.tcx.data_layout.pointer_align.abi; - let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()).with_default_tag(); + let ptr = this.memory_mut().allocate(size, align, MiriMemoryKind::C.into()); this.memory_mut().get_mut(ptr.alloc_id)?.write_repeat(tcx, ptr, 0, size)?; this.write_scalar(Scalar::Ptr(ptr), dest)?; } @@ -132,7 +132,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Align::from_bytes(align).unwrap(), MiriMemoryKind::C.into() ); - this.write_scalar(Scalar::Ptr(ptr.with_default_tag()), ret.into())?; + this.write_scalar(Scalar::Ptr(ptr), ret.into())?; } this.write_null(dest)?; } @@ -162,8 +162,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.write_scalar(Scalar::Ptr(ptr), dest)?; } "__rust_alloc_zeroed" => { @@ -180,8 +179,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes(size), Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into() - ) - .with_default_tag(); + ); this.memory_mut() .get_mut(ptr.alloc_id)? .write_repeat(tcx, ptr, 0, Size::from_bytes(size))?; @@ -222,7 +220,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Align::from_bytes(align).unwrap(), MiriMemoryKind::Rust.into(), )?; - this.write_scalar(Scalar::Ptr(new_ptr.with_default_tag()), dest)?; + this.write_scalar(Scalar::Ptr(new_ptr), dest)?; } "syscall" => { @@ -428,7 +426,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Size::from_bytes((value.len() + 1) as u64), Align::from_bytes(1).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); { let alloc = this.memory_mut().get_mut(value_copy.alloc_id)?; alloc.write_bytes(tcx, value_copy, &value)?; @@ -798,13 +796,13 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' Ok(()) } - fn write_null(&mut self, dest: PlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> { + fn write_null(&mut self, dest: PlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { self.eval_context_mut().write_scalar(Scalar::from_int(0, dest.layout.size), dest) } /// Evaluates the scalar at the specified path. Returns Some(val) /// if the path could be resolved, and None otherwise - fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { + fn eval_path_scalar(&mut self, path: &[&str]) -> EvalResult<'tcx, Option>> { let this = self.eval_context_mut(); if let Ok(instance) = this.resolve_path(path) { let cid = GlobalId { diff --git a/src/helpers.rs b/src/helpers.rs index 8a4cccc743..f468d25603 100644 --- a/src/helpers.rs +++ b/src/helpers.rs @@ -47,9 +47,9 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' /// will be true if this is frozen, false if this is in an `UnsafeCell`. fn visit_freeze_sensitive( &self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, - mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, + mut action: impl FnMut(Pointer, Size, bool) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { let this = self.eval_context_ref(); trace!("visit_frozen(place={:?}, size={:?})", *place, size); @@ -64,7 +64,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' let mut end_ptr = place.ptr; // Called when we detected an `UnsafeCell` at the given offset and size. // Calls `action` and advances `end_ptr`. - let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { + let mut unsafe_cell_action = |unsafe_cell_ptr: Scalar, unsafe_cell_size: Size| { if unsafe_cell_size != Size::ZERO { debug_assert_eq!(unsafe_cell_ptr.to_ptr().unwrap().alloc_id, end_ptr.to_ptr().unwrap().alloc_id); @@ -120,7 +120,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' /// Visiting the memory covered by a `MemPlace`, being aware of /// whether we are inside an `UnsafeCell` or not. struct UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> - where F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + where F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { ecx: &'ecx MiriEvalContext<'a, 'mir, 'tcx>, unsafe_cell_action: F, @@ -131,9 +131,9 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' for UnsafeCellVisitor<'ecx, 'a, 'mir, 'tcx, F> where - F: FnMut(MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + F: FnMut(MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&self) -> &MiriEvalContext<'a, 'mir, 'tcx> { @@ -141,7 +141,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // Hook to detect `UnsafeCell`. - fn visit_value(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_value(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { trace!("UnsafeCellVisitor: {:?} {:?}", *v, v.layout.ty); let is_unsafe_cell = match v.layout.ty.sty { @@ -163,8 +163,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' // Make sure we visit aggregrates in increasing offset order. fn visit_aggregate( &mut self, - place: MPlaceTy<'tcx, Borrow>, - fields: impl Iterator>>, + place: MPlaceTy<'tcx, Tag>, + fields: impl Iterator>>, ) -> EvalResult<'tcx> { match place.layout.fields { layout::FieldPlacement::Array { .. } => { @@ -174,7 +174,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } layout::FieldPlacement::Arbitrary { .. } => { // Gather the subplaces and sort them before visiting. - let mut places = fields.collect::>>>()?; + let mut places = fields.collect::>>>()?; places.sort_by_key(|place| place.ptr.get_ptr_offset(self.ecx())); self.walk_aggregate(place, places.into_iter().map(Ok)) } @@ -186,7 +186,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // We have to do *something* for unions. - fn visit_union(&mut self, v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_union(&mut self, v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // With unions, we fall back to whatever the type says, to hopefully be consistent // with LLVM IR. @@ -200,7 +200,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a + 'mir>: crate::MiriEvalContextExt<' } // We should never get to a primitive, but always short-circuit somewhere above. - fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, _v: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { bug!("we should always short-circuit before coming to a primitive") } diff --git a/src/intrinsic.rs b/src/intrinsic.rs index bb156c95df..a17f576b43 100644 --- a/src/intrinsic.rs +++ b/src/intrinsic.rs @@ -4,7 +4,7 @@ use rustc::ty::layout::{self, LayoutOf, Size}; use rustc::ty; use crate::{ - PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Borrow, + PlaceTy, OpTy, ImmTy, Immediate, Scalar, ScalarMaybeUndef, Tag, OperatorEvalContextExt }; @@ -13,8 +13,8 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, fn call_intrinsic( &mut self, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); if this.emulate_intrinsic(instance, args, dest)? { diff --git a/src/lib.rs b/src/lib.rs index 541986de55..3dbe922999 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -23,6 +23,7 @@ mod stacked_borrows; use std::collections::HashMap; use std::borrow::Cow; +use std::rc::Rc; use rand::rngs::StdRng; use rand::SeedableRng; @@ -48,7 +49,7 @@ use crate::mono_hash_map::MonoHashMap; pub use crate::stacked_borrows::{EvalContextExt as StackedBorEvalContextExt}; // Used by priroda. -pub use crate::stacked_borrows::{Borrow, Stack, Stacks, BorStackItem}; +pub use crate::stacked_borrows::{Tag, Permission, Stack, Stacks, Item}; /// Insert rustc arguments at the beginning of the argument list that Miri wants to be /// set per default, for maximal validation power. @@ -155,7 +156,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( // Don't forget `0` terminator. cmd.push(std::char::from_u32(0).unwrap()); // Collect the pointers to the individual strings. - let mut argvs = Vec::>::new(); + let mut argvs = Vec::>::new(); for arg in config.args { // Add `0` terminator. let mut arg = arg.into_bytes(); @@ -187,7 +188,7 @@ pub fn create_ecx<'a, 'mir: 'a, 'tcx: 'mir>( Size::from_bytes(cmd_utf16.len() as u64 * 2), Align::from_bytes(2).unwrap(), MiriMemoryKind::Env.into(), - ).with_default_tag(); + ); ecx.machine.cmd_line = Some(cmd_ptr); // Store the UTF-16 string. let char_size = Size::from_bytes(2); @@ -214,7 +215,13 @@ pub fn eval_main<'a, 'tcx: 'a>( main_id: DefId, config: MiriConfig, ) { - let mut ecx = create_ecx(tcx, main_id, config).expect("couldn't create ecx"); + let mut ecx = match create_ecx(tcx, main_id, config) { + Ok(ecx) => ecx, + Err(mut err) => { + err.print_backtrace(); + panic!("Miri initialziation error: {}", err.kind) + } + }; // Perform the main execution. let res: EvalResult = (|| { @@ -310,14 +317,14 @@ impl MayLeak for MiriMemoryKind { pub struct Evaluator<'tcx> { /// Environment variables set by `setenv`. /// Miri does not expose env vars from the host to the emulated program. - pub(crate) env_vars: HashMap, Pointer>, + pub(crate) env_vars: HashMap, Pointer>, /// Program arguments (`Option` because we can only initialize them after creating the ecx). /// These are *pointers* to argc/argv because macOS. /// We also need the full command line as one string because of Windows. - pub(crate) argc: Option>, - pub(crate) argv: Option>, - pub(crate) cmd_line: Option>, + pub(crate) argc: Option>, + pub(crate) argv: Option>, + pub(crate) cmd_line: Option>, /// Last OS error. pub(crate) last_error: u32, @@ -328,9 +335,6 @@ pub struct Evaluator<'tcx> { /// Whether to enforce the validity invariant. pub(crate) validate: bool, - /// Stacked Borrows state. - pub(crate) stacked_borrows: stacked_borrows::State, - /// The random number generator to use if Miri /// is running in non-deterministic mode pub(crate) rng: Option @@ -346,7 +350,6 @@ impl<'tcx> Evaluator<'tcx> { last_error: 0, tls: TlsData::default(), validate, - stacked_borrows: stacked_borrows::State::default(), rng: seed.map(|s| StdRng::seed_from_u64(s)) } } @@ -378,9 +381,9 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { type FrameExtra = stacked_borrows::CallId; type MemoryExtra = stacked_borrows::MemoryState; type AllocExtra = stacked_borrows::Stacks; - type PointerTag = Borrow; + type PointerTag = Tag; - type MemoryMap = MonoHashMap, Allocation)>; + type MemoryMap = MonoHashMap, Allocation)>; const STATIC_KIND: Option = Some(MiriMemoryKind::MutStatic); @@ -394,8 +397,8 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn find_fn( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: Option>, + args: &[OpTy<'tcx, Tag>], + dest: Option>, ret: Option, ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> { ecx.find_fn(instance, args, dest, ret) @@ -405,8 +408,8 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn call_intrinsic( ecx: &mut rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, instance: ty::Instance<'tcx>, - args: &[OpTy<'tcx, Borrow>], - dest: PlaceTy<'tcx, Borrow>, + args: &[OpTy<'tcx, Tag>], + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { ecx.call_intrinsic(instance, args, dest) } @@ -415,15 +418,15 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn ptr_op( ecx: &rustc_mir::interpret::InterpretCx<'a, 'mir, 'tcx, Self>, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { ecx.ptr_op(bin_op, left, right) } fn box_alloc( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - dest: PlaceTy<'tcx, Borrow>, + dest: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { trace!("box_alloc for {:?}", dest.layout.ty); // Call the `exchange_malloc` lang item. @@ -467,7 +470,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { def_id: DefId, tcx: TyCtxtAt<'a, 'tcx, 'tcx>, memory_extra: &Self::MemoryExtra, - ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { + ) -> EvalResult<'tcx, Cow<'tcx, Allocation>> { let attrs = tcx.get_attrs(def_id); let link_name = match attr::first_attr_value_str_by_name(&attrs, "link_name") { Some(name) => name.as_str(), @@ -479,7 +482,7 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { // This should be all-zero, pointer-sized. let size = tcx.data_layout.pointer_size; let data = vec![0; size.bytes() as usize]; - let extra = AllocationExtra::memory_allocated(size, memory_extra); + let extra = Stacks::new(size, Tag::default(), Rc::clone(memory_extra)); Allocation::from_bytes(&data, tcx.data_layout.pointer_align.abi, extra) } _ => return err!(Unimplemented( @@ -499,16 +502,17 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { fn adjust_static_allocation<'b>( alloc: &'b Allocation, memory_extra: &Self::MemoryExtra, - ) -> Cow<'b, Allocation> { - let extra = AllocationExtra::memory_allocated( + ) -> Cow<'b, Allocation> { + let extra = Stacks::new( Size::from_bytes(alloc.bytes.len() as u64), - memory_extra, + Tag::default(), + Rc::clone(memory_extra), ); - let alloc: Allocation = Allocation { + let alloc: Allocation = Allocation { bytes: alloc.bytes.clone(), relocations: Relocations::from_presorted( alloc.relocations.iter() - .map(|&(offset, ((), alloc))| (offset, (Borrow::default(), alloc))) + .map(|&(offset, ((), alloc))| (offset, (Tag::default(), alloc))) .collect() ), undef_mask: alloc.undef_mask.clone(), @@ -519,46 +523,30 @@ impl<'a, 'mir, 'tcx> Machine<'a, 'mir, 'tcx> for Evaluator<'tcx> { Cow::Owned(alloc) } - fn tag_dereference( - ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, - place: MPlaceTy<'tcx, Borrow>, - mutability: Option, - ) -> EvalResult<'tcx, Scalar> { - let size = ecx.size_and_align_of_mplace(place)?.map(|(size, _)| size) - // For extern types, just cover what we can. - .unwrap_or_else(|| place.layout.size); - if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || - !Self::enforce_validity(ecx) || size == Size::ZERO - { - // No tracking. - Ok(place.ptr) - } else { - ecx.ptr_dereference(place, size, mutability.into())?; - // We never change the pointer. - Ok(place.ptr) - } + #[inline(always)] + fn new_allocation( + size: Size, + extra: &Self::MemoryExtra, + kind: MemoryKind, + ) -> (Self::AllocExtra, Self::PointerTag) { + Stacks::new_allocation(size, extra, kind) } #[inline(always)] - fn tag_new_allocation( - ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, - ptr: Pointer, - kind: MemoryKind, - ) -> Pointer { - if !ecx.machine.validate { - // No tracking. - ptr.with_default_tag() - } else { - let tag = ecx.tag_new_allocation(ptr.alloc_id, kind); - Pointer::new_with_tag(ptr.alloc_id, ptr.offset, tag) - } + fn tag_dereference( + _ecx: &InterpretCx<'a, 'mir, 'tcx, Self>, + place: MPlaceTy<'tcx, Tag>, + _mutability: Option, + ) -> EvalResult<'tcx, Scalar> { + // Nothing happens. + Ok(place.ptr) } #[inline(always)] fn retag( ecx: &mut InterpretCx<'a, 'mir, 'tcx, Self>, kind: mir::RetagKind, - place: PlaceTy<'tcx, Borrow>, + place: PlaceTy<'tcx, Tag>, ) -> EvalResult<'tcx> { if !ecx.tcx.sess.opts.debugging_opts.mir_emit_retag || !Self::enforce_validity(ecx) { // No tracking, or no retagging. The latter is possible because a dependency of ours diff --git a/src/operator.rs b/src/operator.rs index 45c0e63542..386fc4307b 100644 --- a/src/operator.rs +++ b/src/operator.rs @@ -7,39 +7,39 @@ pub trait EvalContextExt<'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)>; + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)>; + ) -> EvalResult<'tcx, (Scalar, bool)>; fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool>; fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar>; + ) -> EvalResult<'tcx, Scalar>; } impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, 'tcx> { fn ptr_op( &self, bin_op: mir::BinOp, - left: ImmTy<'tcx, Borrow>, - right: ImmTy<'tcx, Borrow>, - ) -> EvalResult<'tcx, (Scalar, bool)> { + left: ImmTy<'tcx, Tag>, + right: ImmTy<'tcx, Tag>, + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; trace!("ptr_op: {:?} {:?} {:?}", *left, bin_op, *right); @@ -136,8 +136,8 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' fn ptr_eq( &self, - left: Scalar, - right: Scalar, + left: Scalar, + right: Scalar, ) -> EvalResult<'tcx, bool> { let size = self.pointer_size(); Ok(match (left, right) { @@ -233,13 +233,13 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' fn ptr_int_arithmetic( &self, bin_op: mir::BinOp, - left: Pointer, + left: Pointer, right: u128, signed: bool, - ) -> EvalResult<'tcx, (Scalar, bool)> { + ) -> EvalResult<'tcx, (Scalar, bool)> { use rustc::mir::BinOp::*; - fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { + fn map_to_primval((res, over): (Pointer, bool)) -> (Scalar, bool) { (Scalar::Ptr(res), over) } @@ -327,10 +327,10 @@ impl<'a, 'mir, 'tcx> EvalContextExt<'tcx> for super::MiriEvalContext<'a, 'mir, ' /// allocation, and all the remaining integers pointers their own allocation. fn pointer_offset_inbounds( &self, - ptr: Scalar, + ptr: Scalar, pointee_ty: Ty<'tcx>, offset: i64, - ) -> EvalResult<'tcx, Scalar> { + ) -> EvalResult<'tcx, Scalar> { // FIXME: assuming here that type size is less than `i64::max_value()`. let pointee_size = self.layout_of(pointee_ty)?.size.bytes() as i64; let offset = offset diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index bea6aaf9cf..080200b12a 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -1,6 +1,8 @@ use std::cell::RefCell; use std::collections::HashSet; use std::rc::Rc; +use std::fmt; +use std::num::NonZeroU64; use rustc::ty::{self, layout::Size}; use rustc::hir::{Mutability, MutMutable, MutImmutable}; @@ -8,120 +10,163 @@ use rustc::mir::RetagKind; use crate::{ EvalResult, InterpError, MiriEvalContext, HelpersEvalContextExt, Evaluator, MutValueVisitor, - MemoryKind, MiriMemoryKind, RangeMap, AllocId, Allocation, AllocationExtra, + MemoryKind, MiriMemoryKind, RangeMap, Allocation, AllocationExtra, Pointer, Immediate, ImmTy, PlaceTy, MPlaceTy, }; -pub type Timestamp = u64; +pub type PtrId = NonZeroU64; pub type CallId = u64; -/// Information about which kind of borrow was used to create the reference this is tagged with. +/// Tracking pointer provenance #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum Borrow { - /// A unique (mutable) reference. - Uniq(Timestamp), - /// An aliasing reference. This is also used by raw pointers, which do not track details - /// of how or when they were created, hence the timestamp is optional. - /// `Shr(Some(_))` does *not* mean that the destination of this reference is frozen; - /// that depends on the type! Only those parts outside of an `UnsafeCell` are actually - /// frozen. - Alias(Option), +pub enum Tag { + Tagged(PtrId), + Untagged, } -impl Borrow { - #[inline(always)] - pub fn is_aliasing(self) -> bool { - match self { - Borrow::Alias(_) => true, - _ => false, - } - } - - #[inline(always)] - pub fn is_unique(self) -> bool { +impl fmt::Display for Tag { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - Borrow::Uniq(_) => true, - _ => false, + Tag::Tagged(id) => write!(f, "{}", id), + Tag::Untagged => write!(f, ""), } } } -impl Default for Borrow { - fn default() -> Self { - Borrow::Alias(None) - } +/// Indicates which permission is granted (by this item to some pointers) +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum Permission { + /// Grants unique mutable access. + Unique, + /// Grants shared mutable access. + SharedReadWrite, + /// Greants shared read-only access. + SharedReadOnly, } /// An item in the per-location borrow stack. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum BorStackItem { - /// Indicates the unique reference that may mutate. - Uniq(Timestamp), - /// Indicates that the location has been mutably shared. Used for raw pointers as - /// well as for unfrozen shared references. - Raw, +pub enum Item { + /// Grants the given permission for pointers with this tag. + Permission(Permission, Tag), /// A barrier, tracking the function it belongs to by its index on the call stack. - FnBarrier(CallId) + FnBarrier(CallId), +} + +impl fmt::Display for Item { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + Item::Permission(perm, tag) => write!(f, "[{:?} for {}]", perm, tag), + Item::FnBarrier(call) => write!(f, "[barrier {}]", call), + } + } } /// Extra per-location state. #[derive(Clone, Debug, PartialEq, Eq)] pub struct Stack { - /// Used as the stack; never empty. - borrows: Vec, - /// A virtual frozen "item" on top of the stack. - frozen_since: Option, + /// Used *mostly* as a stack; never empty. + /// We sometimes push into the middle but never remove from the middle. + /// The same tag may occur multiple times, e.g. from a two-phase borrow. + /// Invariants: + /// * Above a `SharedReadOnly` there can only be barriers and more `SharedReadOnly`. + borrows: Vec, } -impl Stack { - #[inline(always)] - pub fn is_frozen(&self) -> bool { - self.frozen_since.is_some() - } + +/// Extra per-allocation state. +#[derive(Clone, Debug)] +pub struct Stacks { + // Even reading memory can have effects on the stack, so we need a `RefCell` here. + stacks: RefCell>, + // Pointer to global state + global: MemoryState, } -/// Indicates which kind of reference is being used. -#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum RefKind { - /// `&mut`. - Unique, - /// `&` without interior mutability. - Frozen, - /// `*` (raw pointer) or `&` to `UnsafeCell`. - Raw, +/// Extra global state, available to the memory access hooks. +#[derive(Debug)] +pub struct GlobalState { + next_ptr_id: PtrId, + next_call_id: CallId, + active_calls: HashSet, } +pub type MemoryState = Rc>; /// Indicates which kind of access is being performed. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum AccessKind { Read, - Write, - Dealloc, + Write { dealloc: bool }, } -/// Extra global state in the memory, available to the memory access hooks. -#[derive(Debug)] -pub struct BarrierTracking { - next_id: CallId, - active_calls: HashSet, +// "Fake" constructors +impl AccessKind { + fn write() -> AccessKind { + AccessKind::Write { dealloc: false } + } + + fn dealloc() -> AccessKind { + AccessKind::Write { dealloc: true } + } +} + +impl fmt::Display for AccessKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + AccessKind::Read => write!(f, "read"), + AccessKind::Write { dealloc: false } => write!(f, "write"), + AccessKind::Write { dealloc: true } => write!(f, "deallocation"), + } + } +} + +/// Indicates which kind of reference is being created. +/// Used by `reborrow` to compute which permissions to grant to the +/// new pointer. +#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] +pub enum RefKind { + /// `&mut`. + Mutable, + /// `&` with or without interior mutability. + Shared { frozen: bool }, + /// `*` (raw pointer). + Raw, +} + +impl fmt::Display for RefKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match self { + RefKind::Mutable => write!(f, "mutable"), + RefKind::Shared { frozen: true } => write!(f, "shared (frozen)"), + RefKind::Shared { frozen: false } => write!(f, "shared (mutable)"), + RefKind::Raw => write!(f, "raw"), + } + } } -pub type MemoryState = Rc>; -impl Default for BarrierTracking { +/// Utilities for initialization and ID generation +impl Default for GlobalState { fn default() -> Self { - BarrierTracking { - next_id: 0, + GlobalState { + next_ptr_id: NonZeroU64::new(1).unwrap(), + next_call_id: 0, active_calls: HashSet::default(), } } } -impl BarrierTracking { +impl GlobalState { + pub fn new_ptr(&mut self) -> PtrId { + let id = self.next_ptr_id; + self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap(); + id + } + pub fn new_call(&mut self) -> CallId { - let id = self.next_id; + let id = self.next_call_id; trace!("new_call: Assigning ID {}", id); self.active_calls.insert(id); - self.next_id += 1; + self.next_call_id = id+1; id } @@ -134,272 +179,354 @@ impl BarrierTracking { } } -/// Extra global machine state. -#[derive(Clone, Debug)] -pub struct State { - clock: Timestamp -} - -impl Default for State { - fn default() -> Self { - State { clock: 0 } - } -} +// # Stacked Borrows Core Begin -impl State { - fn increment_clock(&mut self) -> Timestamp { - let val = self.clock; - self.clock = val + 1; - val - } -} - -/// Extra per-allocation state. -#[derive(Clone, Debug)] -pub struct Stacks { - // Even reading memory can have effects on the stack, so we need a `RefCell` here. - stacks: RefCell>, - barrier_tracking: MemoryState, -} - -/// Core per-location operations: deref, access, create. /// We need to make at least the following things true: /// /// U1: After creating a `Uniq`, it is at the top (and unfrozen). /// U2: If the top is `Uniq` (and unfrozen), accesses must be through that `Uniq` or pop it. -/// U3: If an access (deref sufficient?) happens with a `Uniq`, it requires the `Uniq` to be in the stack. +/// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack. /// /// F1: After creating a `&`, the parts outside `UnsafeCell` are frozen. /// F2: If a write access happens, it unfreezes. -/// F3: If an access (well, a deref) happens with an `&` outside `UnsafeCell`, +/// F3: If an access happens with an `&` outside `UnsafeCell`, /// it requires the location to still be frozen. -impl<'tcx> Stack { - /// Deref `bor`: check if the location is frozen and the tag in the stack. - /// This dos *not* constitute an access! "Deref" refers to the `*` operator - /// in Rust, and includs cases like `&*x` or `(*x).foo` where no or only part - /// of the memory actually gets accessed. Also we cannot know if we are - /// going to read or write. - /// Returns the index of the item we matched, `None` if it was the frozen one. - /// `kind` indicates which kind of reference is being dereferenced. - fn deref( - &self, - bor: Borrow, - kind: RefKind, - ) -> Result, String> { - // Exclude unique ref with frozen tag. - if let (RefKind::Unique, Borrow::Alias(Some(_))) = (kind, bor) { - return Err(format!("encountered mutable reference with frozen tag ({:?})", bor)); + +impl Default for Tag { + #[inline(always)] + fn default() -> Tag { + Tag::Untagged + } +} + +/// Core relations on `Permission` define which accesses are allowed: +/// On every access, we try to find a *granting* item, and then we remove all +/// *incompatible* items above it. +impl Permission { + /// This defines for a given permission, whether it permits the given kind of access. + fn grants(self, access: AccessKind) -> bool { + match (self, access) { + // Unique and SharedReadWrite allow any kind of access. + (Permission::Unique, _) | + (Permission::SharedReadWrite, _) => + true, + // SharedReadOnly only permits read access. + (Permission::SharedReadOnly, AccessKind::Read) => + true, + (Permission::SharedReadOnly, AccessKind::Write { .. }) => + false, } - // Checks related to freezing. - match bor { - Borrow::Alias(Some(bor_t)) if kind == RefKind::Frozen => { - // We need the location to be frozen. This ensures F3. - let frozen = self.frozen_since.map_or(false, |itm_t| itm_t <= bor_t); - return if frozen { Ok(None) } else { - Err(format!("location is not frozen long enough")) - } - } - Borrow::Alias(_) if self.frozen_since.is_some() => { - // Shared deref to frozen location; looking good. - return Ok(None) - } - // Not sufficient; go on looking. - _ => {} + } + + /// This defines for a given permission, which other items it can tolerate "above" itself + /// for which kinds of accesses. + /// If true, then `other` is allowed to remain on top of `self` when `access` happens. + fn compatible_with(self, access: AccessKind, other: Item) -> bool { + use self::Permission::*; + + let other = match other { + Item::Permission(perm, _) => perm, + Item::FnBarrier(_) => return false, // Remove all barriers -- if they are active, cause UB. + }; + + match (self, access, other) { + // Some cases are impossible. + (SharedReadOnly, _, SharedReadWrite) | + (SharedReadOnly, _, Unique) => + bug!("There can never be a SharedReadWrite or a Unique on top of a SharedReadOnly"), + // When `other` is `SharedReadOnly`, that is NEVER compatible with + // write accesses. + // This makes sure read-only pointers become invalid on write accesses. + (_, AccessKind::Write { .. }, SharedReadOnly) => + false, + // When `other` is `Unique`, that is compatible with nothing. + // This makes sure unique pointers become invalid on incompatible accesses (ensures U2). + (_, _, Unique) => + false, + // When we are unique and this is a write/dealloc, we tolerate nothing. + // This makes sure we re-assert uniqueness on write accesses. + // (This is particularily important such that when a new mutable ref gets created, it gets + // pushed into the right item -- this behaves like a write and we assert uniqueness of the + // pointer from which this comes, *if* it was a unique pointer.) + (Unique, AccessKind::Write { .. }, _) => + false, + // `SharedReadWrite` items can tolerate any other akin items for any kind of access. + (SharedReadWrite, _, SharedReadWrite) => + true, + // Any item can tolerate read accesses for shared items. + // This includes unique items! Reads from unique pointers do not invalidate + // other pointers. + (_, AccessKind::Read, SharedReadWrite) | + (_, AccessKind::Read, SharedReadOnly) => + true, + // That's it. } - // If we got here, we have to look for our item in the stack. - for (idx, &itm) in self.borrows.iter().enumerate().rev() { - match (itm, bor) { - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. This satisfies U3. - return Ok(Some(idx)) - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching aliasing/raw item. - return Ok(Some(idx)) - } - // Go on looking. We ignore barriers! When an `&mut` and an `&` alias, - // dereferencing the `&` is still possible (to reborrow), but doing - // an access is not. - _ => {} - } + } +} + +impl<'tcx> RefKind { + /// Defines which kind of access the "parent" must grant to create this reference. + fn access(self) -> AccessKind { + match self { + RefKind::Mutable | RefKind::Shared { frozen: false } => AccessKind::write(), + RefKind::Raw | RefKind::Shared { frozen: true } => AccessKind::Read, + // FIXME: Just requiring read-only access for raw means that a raw ptr might not be writeable + // even when we think it should be! Think about this some more. } - // If we got here, we did not find our item. We have to error to satisfy U3. - Err(format!("Borrow being dereferenced ({:?}) does not exist on the borrow stack", bor)) } - /// Performs an actual memory access using `bor`. We do not know any types here - /// or whether things should be frozen, but we *do* know if this is reading - /// or writing. + /// This defines the new permission used when a pointer gets created: For raw pointers, whether these are read-only + /// or read-write depends on the permission from which they derive. + fn new_perm(self, derived_from: Permission) -> EvalResult<'tcx, Permission> { + Ok(match (self, derived_from) { + // Do not derive writable safe pointer from read-only pointer! + (RefKind::Mutable, Permission::SharedReadOnly) => + return err!(MachineError(format!( + "deriving mutable reference from read-only pointer" + ))), + (RefKind::Shared { frozen: false }, Permission::SharedReadOnly) => + return err!(MachineError(format!( + "deriving shared reference with interior mutability from read-only pointer" + ))), + // Safe pointer cases. + (RefKind::Mutable, _) => Permission::Unique, + (RefKind::Shared { frozen: true }, _) => Permission::SharedReadOnly, + (RefKind::Shared { frozen: false }, _) => Permission::SharedReadWrite, + // Raw pointer cases. + (RefKind::Raw, Permission::SharedReadOnly) => Permission::SharedReadOnly, + (RefKind::Raw, _) => Permission::SharedReadWrite, + }) + } +} + +/// Core per-location operations: access, create. +impl<'tcx> Stack { + /// Find the item granting the given kind of access to the given tag, and where that item is in the stack. + fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<(usize, Permission)> { + self.borrows.iter() + .enumerate() // we also need to know *where* in the stack + .rev() // search top-to-bottom + // Return permission of first item that grants access. + .filter_map(|(idx, item)| match item { + &Item::Permission(perm, item_tag) if perm.grants(access) && tag == item_tag => + Some((idx, perm)), + _ => None, + }) + .next() + } + + /// Test if a memory `access` using pointer tagged `tag` is granted. + /// If yes, return the index of the item that granted it. fn access( &mut self, - bor: Borrow, - kind: AccessKind, - barrier_tracking: &BarrierTracking, - ) -> EvalResult<'tcx> { - // Check if we can match the frozen "item". - // Not possible on writes! - if self.is_frozen() { - if kind == AccessKind::Read { - // When we are frozen, we just accept all reads. No harm in this. - // The deref already checked that `Uniq` items are in the stack, and that - // the location is frozen if it should be. - return Ok(()); - } - trace!("access: unfreezing"); - } - // Unfreeze on writes. This ensures F2. - self.frozen_since = None; - // Pop the stack until we have something matching. - while let Some(&itm) = self.borrows.last() { - match (itm, bor) { - (BorStackItem::FnBarrier(call), _) if barrier_tracking.is_active(call) => { - return err!(MachineError(format!( - "stopping looking for borrow being accessed ({:?}) because of barrier ({})", - bor, call - ))) - } - (BorStackItem::Uniq(itm_t), Borrow::Uniq(bor_t)) if itm_t == bor_t => { - // Found matching unique item. Continue after the match. - } - (BorStackItem::Raw, _) if kind == AccessKind::Read => { - // When reading, everything can use a raw item! - // We do not want to do this when writing: Writing to an `&mut` - // should reaffirm its exclusivity (i.e., make sure it is - // on top of the stack). Continue after the match. - } - (BorStackItem::Raw, Borrow::Alias(_)) => { - // Found matching raw item. Continue after the match. - } - _ => { - // Pop this, go on. This ensures U2. - let itm = self.borrows.pop().unwrap(); - trace!("access: Popping {:?}", itm); - continue - } - } - // If we got here, we found a matching item. Congratulations! - // However, we are not done yet: If this access is deallocating, we must make sure - // there are no active barriers remaining on the stack. - if kind == AccessKind::Dealloc { - for &itm in self.borrows.iter().rev() { - match itm { - BorStackItem::FnBarrier(call) if barrier_tracking.is_active(call) => { + access: AccessKind, + tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx, usize> { + // Two main steps: Find granting item, remove all incompatible items above. + // Afterwards we just do some post-processing for deallocation accesses. + + // Step 1: Find granting item. + let (granting_idx, granting_perm) = self.find_granting(access, tag) + .ok_or_else(|| InterpError::MachineError(format!( + "no item granting {} access to tag {} found in borrow stack", + access, tag, + )))?; + + // Step 2: Remove everything incompatible above them. + // Implemented with indices because there does not seem to be a nice iterator and range-based + // API for this. + { + let mut cur = granting_idx + 1; + while let Some(item) = self.borrows.get(cur) { + if granting_perm.compatible_with(access, *item) { + // Keep this, check next. + cur += 1; + } else { + // Aha! This is a bad one, remove it, and if it is an *active* barrier + // we have a problem. + match self.borrows.remove(cur) { + Item::FnBarrier(call) if global.is_active(call) => { return err!(MachineError(format!( - "deallocating with active barrier ({})", call - ))) + "not granting access because of barrier ({})", call + ))); } - _ => {}, + _ => {} } } } - // Now we are done. - return Ok(()) } - // If we got here, we did not find our item. - err!(MachineError(format!( - "borrow being accessed ({:?}) does not exist on the borrow stack", - bor - ))) - } - - /// Initiate `bor`; mostly this means pushing. - /// This operation cannot fail; it is up to the caller to ensure that the precondition - /// is met: We cannot push `Uniq` onto frozen stacks. - /// `kind` indicates which kind of reference is being created. - fn create(&mut self, bor: Borrow, kind: RefKind) { - // When creating a frozen reference, freeze. This ensures F1. - // We also do *not* push anything else to the stack, making sure that no nother kind - // of access (like writing through raw pointers) is permitted. - if kind == RefKind::Frozen { - let bor_t = match bor { - Borrow::Alias(Some(t)) => t, - _ => bug!("Creating illegal borrow {:?} for frozen ref", bor), - }; - // It is possible that we already are frozen (e.g., if we just pushed a barrier, - // the redundancy check would not have kicked in). - match self.frozen_since { - Some(loc_t) => assert!( - loc_t <= bor_t, - "trying to freeze location for longer than it was already frozen" - ), - None => { - trace!("create: Freezing"); - self.frozen_since = Some(bor_t); + + // Post-processing. + // If we got here, we found a matching item. Congratulations! + // However, we are not done yet: If this access is deallocating, we must make sure + // there are no active barriers remaining on the stack. + if access == AccessKind::dealloc() { + for &itm in self.borrows.iter().rev() { + match itm { + Item::FnBarrier(call) if global.is_active(call) => { + return err!(MachineError(format!( + "deallocating with active barrier ({})", call + ))) + } + _ => {}, } } - return; } - assert!( - self.frozen_since.is_none(), - "trying to create non-frozen reference to frozen location" - ); - // Push new item to the stack. - let itm = match bor { - Borrow::Uniq(t) => BorStackItem::Uniq(t), - Borrow::Alias(_) => BorStackItem::Raw, - }; - if *self.borrows.last().unwrap() == itm { - // This is just an optimization, no functional change: Avoid stacking - // multiple `Shr` on top of each other. - assert!(bor.is_aliasing()); - trace!("create: sharing a shared location is a NOP"); - } else { - // This ensures U1. - trace!("create: pushing {:?}", itm); - self.borrows.push(itm); + // Done. + return Ok(granting_idx); + } + + /// `reborrow` helper function. + /// Grant `permisson` to new pointer tagged `tag`, added at `position` in the stack. + fn grant(&mut self, perm: Permission, tag: Tag, position: usize) { + // Simply add it to the "stack" -- this might add in the middle. + // As an optimization, do nothing if the new item is identical to one of its neighbors. + let item = Item::Permission(perm, tag); + if self.borrows[position-1] == item || self.borrows.get(position) == Some(&item) { + // Optimization applies, done. + trace!("reborrow: avoiding redundant item {}", item); + return; } + trace!("reborrow: pushing item {}", item); + self.borrows.insert(position, item); } + /// `reborrow` helper function. /// Adds a barrier. fn barrier(&mut self, call: CallId) { - let itm = BorStackItem::FnBarrier(call); + let itm = Item::FnBarrier(call); if *self.borrows.last().unwrap() == itm { // This is just an optimization, no functional change: Avoid stacking // multiple identical barriers on top of each other. // This can happen when a function receives several shared references // that overlap. - trace!("barrier: avoiding redundant extra barrier"); + trace!("reborrow: avoiding redundant extra barrier"); } else { - trace!("barrier: pushing barrier for call {}", call); + trace!("reborrow: pushing barrier for call {}", call); self.borrows.push(itm); } } + + /// `reborrow` helper function: test that the stack invariants are still maintained. + fn test_invariants(&self) { + let mut saw_shared_read_only = false; + for item in self.borrows.iter() { + match item { + Item::Permission(Permission::SharedReadOnly, _) => { + saw_shared_read_only = true; + } + Item::Permission(perm, _) if saw_shared_read_only => { + panic!("Found {:?} on top of a SharedReadOnly!", perm); + } + _ => {} + } + } + } + + /// Derived a new pointer from one with the given tag . + fn reborrow( + &mut self, + derived_from: Tag, + barrier: Option, + new_kind: RefKind, + new_tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx> { + // Find the permission "from which we derive". To this end we first have to decide + // if we derive from a permission that grants writes or just reads. + let access = new_kind.access(); + let (derived_from_idx, derived_from_perm) = self.find_granting(access, derived_from) + .ok_or_else(|| InterpError::MachineError(format!( + "no item to reborrow as {} from tag {} found in borrow stack", new_kind, derived_from, + )))?; + // With this we can compute the permission for the new pointer. + let new_perm = new_kind.new_perm(derived_from_perm)?; + + // We behave very differently for the "unsafe" case of a shared-read-write pointer + // ("unsafe" because this also applies to shared references with interior mutability). + // This is because such pointers may be reborrowed to unique pointers that actually + // remain valid when their "parents" get further reborrows! + if new_perm == Permission::SharedReadWrite { + // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee. + // Just insert new permission as child of old permission, and maintain everything else. + // This inserts "as far down as possible", which is good because it makes this pointer as + // long-lived as possible *and* we want all the items that are incompatible with this + // to actually get removed from the stack. If we pushed a `SharedReadWrite` on top of + // a `SharedReadOnly`, we'd violate the invariant that `SaredReadOnly` are at the top + // and we'd allow write access without invalidating frozen shared references! + self.grant(new_perm, new_tag, derived_from_idx+1); + + // No barrier. They can rightfully alias with `&mut`. + // FIXME: This means that the `dereferencable` attribute on non-frozen shared references + // is incorrect! They are dereferencable when the function is called, but might become + // non-dereferencable during the course of execution. + // Also see [1], [2]. + // + // [1]: , + // [2]: + } else { + // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. + // Here, creating a reference actually counts as an access, and pops incompatible + // stuff off the stack. + let check_idx = self.access(access, derived_from, global)?; + assert_eq!(check_idx, derived_from_idx, "somehow we saw different items??"); + + // Now is a good time to add the barrier. + if let Some(call) = barrier { + self.barrier(call); + } + + // We insert "as far up as possible": We know only compatible items are remaining + // on top of `derived_from`, and we want the new item at the top so that we + // get the strongest possible guarantees. + self.grant(new_perm, new_tag, self.borrows.len()); + } + + // Make sure that after all this, the stack's invariant is still maintained. + if cfg!(debug_assertions) { + self.test_invariants(); + } + + Ok(()) + } } /// Higher-level per-location operations: deref, access, reborrow. impl<'tcx> Stacks { - /// Checks that this stack is fine with being dereferenced. - fn deref( - &self, - ptr: Pointer, + /// Creates new stack with initial tag. + pub(crate) fn new( size: Size, - kind: RefKind, - ) -> EvalResult<'tcx> { - trace!("deref for tag {:?} as {:?}: {:?}, size {}", - ptr.tag, kind, ptr, size.bytes()); - let stacks = self.stacks.borrow(); - for stack in stacks.iter(ptr.offset, size) { - stack.deref(ptr.tag, kind).map_err(InterpError::MachineError)?; + tag: Tag, + extra: MemoryState, + ) -> Self { + let item = Item::Permission(Permission::Unique, tag); + let stack = Stack { + borrows: vec![item], + }; + Stacks { + stacks: RefCell::new(RangeMap::new(size, stack)), + global: extra, } - Ok(()) } /// `ptr` got used, reflect that in the stack. fn access( &self, - ptr: Pointer, + ptr: Pointer, size: Size, kind: AccessKind, ) -> EvalResult<'tcx> { - trace!("{:?} access of tag {:?}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); + trace!("{} access of tag {}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); // Even reads can have a side-effect, by invalidating other references. // This is fundamentally necessary since `&mut` asserts that there // are no accesses through other references, not even reads. - let barrier_tracking = self.barrier_tracking.borrow(); + let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(ptr.tag, kind, &*barrier_tracking)?; + stack.access(kind, ptr.tag, &*global)?; } Ok(()) } @@ -408,86 +535,61 @@ impl<'tcx> Stacks { /// This works on `&self` because we might encounter references to constant memory. fn reborrow( &self, - ptr: Pointer, + ptr: Pointer, size: Size, - mut barrier: Option, - new_bor: Borrow, + barrier: Option, new_kind: RefKind, + new_tag: Tag, ) -> EvalResult<'tcx> { - assert_eq!(new_bor.is_unique(), new_kind == RefKind::Unique); trace!( - "reborrow for tag {:?} to {:?} as {:?}: {:?}, size {}", - ptr.tag, new_bor, new_kind, ptr, size.bytes(), + "{} reborrow for tag {} to {}: {:?}, size {}", + new_kind, ptr.tag, new_tag, ptr, size.bytes(), ); - if new_kind == RefKind::Raw { - // No barrier for raw, including `&UnsafeCell`. They can rightfully alias with `&mut`. - // FIXME: This means that the `dereferencable` attribute on non-frozen shared references - // is incorrect! They are dereferencable when the function is called, but might become - // non-dereferencable during the course of execution. - // Also see [1], [2]. - // - // [1]: , - // [2]: - barrier = None; - } - let barrier_tracking = self.barrier_tracking.borrow(); + let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - // Access source `ptr`, create new ref. - let ptr_idx = stack.deref(ptr.tag, new_kind).map_err(InterpError::MachineError)?; - // If we can deref the new tag already, and if that tag lives higher on - // the stack than the one we come from, just use that. - // That is, we check if `new_bor` *already* is "derived from" `ptr.tag`. - // This also checks frozenness, if required. - let bor_redundant = barrier.is_none() && - match (ptr_idx, stack.deref(new_bor, new_kind)) { - // If the new borrow works with the frozen item, or else if it lives - // above the old one in the stack, our job here is done. - (_, Ok(None)) => true, - (Some(ptr_idx), Ok(Some(new_idx))) if new_idx >= ptr_idx => true, - // Otherwise, we need to create a new borrow. - _ => false, - }; - if bor_redundant { - assert!(new_bor.is_aliasing(), "a unique reborrow can never be redundant"); - trace!("reborrow is redundant"); - continue; - } - // We need to do some actual work. - let access_kind = if new_kind == RefKind::Unique { - AccessKind::Write - } else { - AccessKind::Read - }; - stack.access(ptr.tag, access_kind, &*barrier_tracking)?; - if let Some(call) = barrier { - stack.barrier(call); - } - stack.create(new_bor, new_kind); + stack.reborrow(ptr.tag, barrier, new_kind, new_tag, &*global)?; } Ok(()) } } -/// Hooks and glue. -impl AllocationExtra for Stacks { - #[inline(always)] - fn memory_allocated<'tcx>(size: Size, extra: &MemoryState) -> Self { - let stack = Stack { - borrows: vec![BorStackItem::Raw], - frozen_since: None, +// # Stacked Borrows Core End + +// Glue code to connect with Miri Machine Hooks + +impl Stacks { + pub fn new_allocation( + size: Size, + extra: &MemoryState, + kind: MemoryKind, + ) -> (Self, Tag) { + let tag = match kind { + MemoryKind::Stack => { + // New unique borrow. This `Uniq` is not accessible by the program, + // so it will only ever be used when using the local directly (i.e., + // not through a pointer). That is, whenever we directly use a local, this will pop + // everything else off the stack, invalidating all previous pointers, + // and in particular, *all* raw pointers. This subsumes the explicit + // `reset` which the blog post [1] says to perform when accessing a local. + // + // [1]: + Tag::Tagged(extra.borrow_mut().new_ptr()) + } + _ => { + Tag::Untagged + } }; - Stacks { - stacks: RefCell::new(RangeMap::new(size, stack)), - barrier_tracking: Rc::clone(extra), - } + let stack = Stacks::new(size, tag, Rc::clone(extra)); + (stack, tag) } +} +impl AllocationExtra for Stacks { #[inline(always)] fn memory_read<'tcx>( - alloc: &Allocation, - ptr: Pointer, + alloc: &Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { alloc.extra.access(ptr, size, AccessKind::Read) @@ -495,35 +597,20 @@ impl AllocationExtra for Stacks { #[inline(always)] fn memory_written<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Write) + alloc.extra.access(ptr, size, AccessKind::write()) } #[inline(always)] fn memory_deallocated<'tcx>( - alloc: &mut Allocation, - ptr: Pointer, + alloc: &mut Allocation, + ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Dealloc) - } -} - -impl<'tcx> Stacks { - /// Pushes the first item to the stacks. - pub(crate) fn first_item( - &mut self, - itm: BorStackItem, - size: Size - ) { - for stack in self.stacks.get_mut().iter_mut(Size::ZERO, size) { - assert!(stack.borrows.len() == 1); - assert_eq!(stack.borrows.pop().unwrap(), BorStackItem::Raw); - stack.borrows.push(itm); - } + alloc.extra.access(ptr, size, AccessKind::dealloc()) } } @@ -531,31 +618,32 @@ impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalConte trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { fn reborrow( &mut self, - place: MPlaceTy<'tcx, Borrow>, + place: MPlaceTy<'tcx, Tag>, size: Size, + mutbl: Option, + new_tag: Tag, fn_barrier: bool, - new_bor: Borrow ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); - let ptr = place.ptr.to_ptr()?; let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; + let ptr = place.ptr.to_ptr()?; trace!("reborrow: creating new reference for {:?} (pointee {}): {:?}", - ptr, place.layout.ty, new_bor); + ptr, place.layout.ty, new_tag); // Get the allocation. It might not be mutable, so we cannot use `get_mut`. let alloc = this.memory().get(ptr.alloc_id)?; alloc.check_bounds(this, ptr, size)?; // Update the stacks. - if let Borrow::Alias(Some(_)) = new_bor { + if mutbl == Some(MutImmutable) { // Reference that cares about freezing. We need a frozen-sensitive reborrow. this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.reborrow(cur_ptr, size, barrier, new_bor, kind) + let new_kind = RefKind::Shared { frozen }; + alloc.extra.reborrow(cur_ptr, size, barrier, new_kind, new_tag) })?; } else { // Just treat this as one big chunk. - let kind = if new_bor.is_unique() { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, barrier, new_bor, kind)?; + let new_kind = if mutbl == Some(MutMutable) { RefKind::Mutable } else { RefKind::Raw }; + alloc.extra.reborrow(ptr, size, barrier, new_kind, new_tag)?; } Ok(()) } @@ -564,11 +652,11 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, /// `mutbl` can be `None` to make this a raw pointer. fn retag_reference( &mut self, - val: ImmTy<'tcx, Borrow>, + val: ImmTy<'tcx, Tag>, mutbl: Option, fn_barrier: bool, two_phase: bool, - ) -> EvalResult<'tcx, Immediate> { + ) -> EvalResult<'tcx, Immediate> { let this = self.eval_context_mut(); // We want a place for where the ptr *points to*, so we get one. let place = this.ref_to_mplace(val)?; @@ -581,23 +669,24 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, } // Compute new borrow. - let time = this.machine.stacked_borrows.increment_clock(); - let new_bor = match mutbl { - Some(MutMutable) => Borrow::Uniq(time), - Some(MutImmutable) => Borrow::Alias(Some(time)), - None => Borrow::default(), + let new_tag = match mutbl { + Some(_) => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()), + None => Tag::Untagged, }; // Reborrow. - this.reborrow(place, size, fn_barrier, new_bor)?; - let new_place = place.with_tag(new_bor); + this.reborrow(place, size, mutbl, new_tag, fn_barrier)?; + let new_place = place.replace_tag(new_tag); // Handle two-phase borrows. if two_phase { assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense"); - // We immediately share it, to allow read accesses - let two_phase_time = this.machine.stacked_borrows.increment_clock(); - let two_phase_bor = Borrow::Alias(Some(two_phase_time)); - this.reborrow(new_place, size, false /* fn_barrier */, two_phase_bor)?; + // Grant read access *to the parent pointer* with the old tag. This means the same pointer + // has multiple items in the stack now! + // FIXME: Think about this some more, in particular about the interaction with cast-to-raw. + // Maybe find a better way to express 2-phase, now that we have a "more expressive language" + // in the stack. + let old_tag = place.ptr.to_ptr().unwrap().tag; + this.reborrow(new_place, size, Some(MutImmutable), old_tag, /* fn_barrier: */ false)?; } // Return new pointer. @@ -607,90 +696,10 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, impl<'a, 'mir, 'tcx> EvalContextExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { - fn tag_new_allocation( - &mut self, - id: AllocId, - kind: MemoryKind, - ) -> Borrow { - let this = self.eval_context_mut(); - let time = match kind { - MemoryKind::Stack => { - // New unique borrow. This `Uniq` is not accessible by the program, - // so it will only ever be used when using the local directly (i.e., - // not through a pointer). That is, whenever we directly use a local, this will pop - // everything else off the stack, invalidating all previous pointers, - // and in particular, *all* raw pointers. This subsumes the explicit - // `reset` which the blog post [1] says to perform when accessing a local. - // - // [1]: - this.machine.stacked_borrows.increment_clock() - } - _ => { - // Nothing to do for everything else. - return Borrow::default() - } - }; - // Make this the active borrow for this allocation. - let alloc = this - .memory_mut() - .get_mut(id) - .expect("this is a new allocation; it must still exist"); - let size = Size::from_bytes(alloc.bytes.len() as u64); - alloc.extra.first_item(BorStackItem::Uniq(time), size); - Borrow::Uniq(time) - } - - /// Called for value-to-place conversion. `mutability` is `None` for raw pointers. - /// - /// Note that this does *not* mean that all this memory will actually get accessed/referenced! - /// We could be in the middle of `&(*var).1`. - fn ptr_dereference( - &self, - place: MPlaceTy<'tcx, Borrow>, - size: Size, - mutability: Option, - ) -> EvalResult<'tcx> { - let this = self.eval_context_ref(); - trace!( - "ptr_dereference: Accessing {} reference for {:?} (pointee {})", - if let Some(mutability) = mutability { - format!("{:?}", mutability) - } else { - format!("raw") - }, - place.ptr, place.layout.ty - ); - let ptr = place.ptr.to_ptr()?; - if mutability.is_none() { - // No further checks on raw derefs -- only the access itself will be checked. - return Ok(()); - } - - // Get the allocation - let alloc = this.memory().get(ptr.alloc_id)?; - alloc.check_bounds(this, ptr, size)?; - // If we got here, we do some checking, *but* we leave the tag unchanged. - if let Borrow::Alias(Some(_)) = ptr.tag { - assert_eq!(mutability, Some(MutImmutable)); - // We need a frozen-sensitive check. - this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let kind = if frozen { RefKind::Frozen } else { RefKind::Raw }; - alloc.extra.deref(cur_ptr, size, kind) - })?; - } else { - // Just treat this as one big chunk. - let kind = if mutability == Some(MutMutable) { RefKind::Unique } else { RefKind::Raw }; - alloc.extra.deref(ptr, size, kind)?; - } - - // All is good. - Ok(()) - } - fn retag( &mut self, kind: RetagKind, - place: PlaceTy<'tcx, Borrow> + place: PlaceTy<'tcx, Tag> ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); // Determine mutability and whether to add a barrier. @@ -734,7 +743,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, for RetagVisitor<'ecx, 'a, 'mir, 'tcx> { - type V = MPlaceTy<'tcx, Borrow>; + type V = MPlaceTy<'tcx, Tag>; #[inline(always)] fn ecx(&mut self) -> &mut MiriEvalContext<'a, 'mir, 'tcx> { @@ -742,7 +751,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, } // Primitives of reference type, that is the one thing we are interested in. - fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Borrow>) -> EvalResult<'tcx> + fn visit_primitive(&mut self, place: MPlaceTy<'tcx, Tag>) -> EvalResult<'tcx> { // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. diff --git a/src/tls.rs b/src/tls.rs index 992e4fd056..9346fba0dc 100644 --- a/src/tls.rs +++ b/src/tls.rs @@ -5,14 +5,14 @@ use rustc::{ty, ty::layout::HasDataLayout, mir}; use crate::{ EvalResult, InterpError, StackPopCleanup, - MPlaceTy, Scalar, Borrow, + MPlaceTy, Scalar, Tag, }; pub type TlsKey = u128; #[derive(Copy, Clone, Debug)] pub struct TlsEntry<'tcx> { - pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. + pub(crate) data: Scalar, // Will eventually become a map from thread IDs to `Scalar`s, if we ever support more than one thread. pub(crate) dtor: Option>, } @@ -63,7 +63,7 @@ impl<'tcx> TlsData<'tcx> { } } - pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { + pub fn load_tls(&mut self, key: TlsKey) -> EvalResult<'tcx, Scalar> { match self.keys.get(&key) { Some(&TlsEntry { data, .. }) => { trace!("TLS key {} loaded: {:?}", key, data); @@ -73,7 +73,7 @@ impl<'tcx> TlsData<'tcx> { } } - pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { + pub fn store_tls(&mut self, key: TlsKey, new_data: Scalar) -> EvalResult<'tcx> { match self.keys.get_mut(&key) { Some(&mut TlsEntry { ref mut data, .. }) => { trace!("TLS key {} stored: {:?}", key, new_data); @@ -106,7 +106,7 @@ impl<'tcx> TlsData<'tcx> { &mut self, key: Option, cx: &impl HasDataLayout, - ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { + ) -> Option<(ty::Instance<'tcx>, Scalar, TlsKey)> { use std::collections::Bound::*; let thread_local = &mut self.keys; From 966d638760d391db49e691479cb06062a19add0e Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 15 Apr 2019 17:06:42 +0200 Subject: [PATCH 02/16] make run-pass tests pass. tweak how we remove barriers. --- src/stacked_borrows.rs | 85 ++++++++++++++++++---------- tests/run-pass/2phase.rs | 7 +-- tests/run-pass/ptr_arith_offset.rs | 2 +- tests/run-pass/ptr_offset.rs | 2 +- tests/run-pass/regions-mock-trans.rs | 4 +- 5 files changed, 61 insertions(+), 39 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 080200b12a..c55ebecea7 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -183,14 +183,17 @@ impl GlobalState { /// We need to make at least the following things true: /// -/// U1: After creating a `Uniq`, it is at the top (and unfrozen). -/// U2: If the top is `Uniq` (and unfrozen), accesses must be through that `Uniq` or pop it. +/// U1: After creating a `Uniq`, it is at the top. +/// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it. /// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack. /// -/// F1: After creating a `&`, the parts outside `UnsafeCell` are frozen. -/// F2: If a write access happens, it unfreezes. +/// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top. +/// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces: +/// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly` +/// gets popped. +/// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`. /// F3: If an access happens with an `&` outside `UnsafeCell`, -/// it requires the location to still be frozen. +/// it requires the `SharedReadOnly` to still be in the stack. impl Default for Tag { #[inline(always)] @@ -218,17 +221,12 @@ impl Permission { } } - /// This defines for a given permission, which other items it can tolerate "above" itself + /// This defines for a given permission, which other permissions it can tolerate "above" itself /// for which kinds of accesses. /// If true, then `other` is allowed to remain on top of `self` when `access` happens. - fn compatible_with(self, access: AccessKind, other: Item) -> bool { + fn compatible_with(self, access: AccessKind, other: Permission) -> bool { use self::Permission::*; - let other = match other { - Item::Permission(perm, _) => perm, - Item::FnBarrier(_) => return false, // Remove all barriers -- if they are active, cause UB. - }; - match (self, access, other) { // Some cases are impossible. (SharedReadOnly, _, SharedReadWrite) | @@ -236,7 +234,7 @@ impl Permission { bug!("There can never be a SharedReadWrite or a Unique on top of a SharedReadOnly"), // When `other` is `SharedReadOnly`, that is NEVER compatible with // write accesses. - // This makes sure read-only pointers become invalid on write accesses. + // This makes sure read-only pointers become invalid on write accesses (ensures F2a). (_, AccessKind::Write { .. }, SharedReadOnly) => false, // When `other` is `Unique`, that is compatible with nothing. @@ -244,7 +242,7 @@ impl Permission { (_, _, Unique) => false, // When we are unique and this is a write/dealloc, we tolerate nothing. - // This makes sure we re-assert uniqueness on write accesses. + // This makes sure we re-assert uniqueness ("being on top") on write accesses. // (This is particularily important such that when a new mutable ref gets created, it gets // pushed into the right item -- this behaves like a write and we assert uniqueness of the // pointer from which this comes, *if* it was a unique pointer.) @@ -307,6 +305,7 @@ impl<'tcx> Stack { .enumerate() // we also need to know *where* in the stack .rev() // search top-to-bottom // Return permission of first item that grants access. + // We require a permission with the right tag, ensuring U3 and F3. .filter_map(|(idx, item)| match item { &Item::Permission(perm, item_tag) if perm.grants(access) && tag == item_tag => Some((idx, perm)), @@ -324,6 +323,9 @@ impl<'tcx> Stack { global: &GlobalState, ) -> EvalResult<'tcx, usize> { // Two main steps: Find granting item, remove all incompatible items above. + // The second step is where barriers get implemented: they "protect" the items + // below them, meaning that if we remove an item and then further up encounter a barrier, + // we raise an error. // Afterwards we just do some post-processing for deallocation accesses. // Step 1: Find granting item. @@ -338,20 +340,35 @@ impl<'tcx> Stack { // API for this. { let mut cur = granting_idx + 1; + let mut removed_item = None; while let Some(item) = self.borrows.get(cur) { - if granting_perm.compatible_with(access, *item) { - // Keep this, check next. - cur += 1; - } else { - // Aha! This is a bad one, remove it, and if it is an *active* barrier - // we have a problem. - match self.borrows.remove(cur) { - Item::FnBarrier(call) if global.is_active(call) => { + match *item { + Item::Permission(perm, _) => { + if granting_perm.compatible_with(access, perm) { + // Keep this, check next. + cur += 1; + } else { + // Aha! This is a bad one, remove it. + let item = self.borrows.remove(cur); + trace!("access: popping item {}", item); + removed_item = Some(item); + } + } + Item::FnBarrier(call) if !global.is_active(call) => { + // An inactive barrier, just get rid of it. (Housekeeping.) + self.borrows.remove(cur); + } + Item::FnBarrier(call) => { + // We hit an active barrier! If we have already removed an item, + // we got a problem! The barrier was supposed to protect this item. + if let Some(removed_item) = removed_item { return err!(MachineError(format!( - "not granting access because of barrier ({})", call - ))); + "not granting {} access to tag {} because barrier ({}) protects incompatible item {}", + access, tag, call, removed_item + ))); } - _ => {} + // Keep this, check next. + cur += 1; } } } @@ -425,7 +442,7 @@ impl<'tcx> Stack { } } - /// Derived a new pointer from one with the given tag . + /// Derived a new pointer from one with the given tag. fn reborrow( &mut self, derived_from: Tag, @@ -448,6 +465,8 @@ impl<'tcx> Stack { // ("unsafe" because this also applies to shared references with interior mutability). // This is because such pointers may be reborrowed to unique pointers that actually // remain valid when their "parents" get further reborrows! + // However, either way, we ensure that we insert the new item in a way that between + // `derived_from` and the new one, there are only items *compatible with* `derived_from`. if new_perm == Permission::SharedReadWrite { // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee. // Just insert new permission as child of old permission, and maintain everything else. @@ -456,6 +475,8 @@ impl<'tcx> Stack { // to actually get removed from the stack. If we pushed a `SharedReadWrite` on top of // a `SharedReadOnly`, we'd violate the invariant that `SaredReadOnly` are at the top // and we'd allow write access without invalidating frozen shared references! + // This ensures F2b for `SharedReadWrite` by adding the new item below any + // potentially existing `SharedReadOnly`. self.grant(new_perm, new_tag, derived_from_idx+1); // No barrier. They can rightfully alias with `&mut`. @@ -471,18 +492,20 @@ impl<'tcx> Stack { // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. // Here, creating a reference actually counts as an access, and pops incompatible // stuff off the stack. + // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`. let check_idx = self.access(access, derived_from, global)?; assert_eq!(check_idx, derived_from_idx, "somehow we saw different items??"); - // Now is a good time to add the barrier. - if let Some(call) = barrier { - self.barrier(call); - } - // We insert "as far up as possible": We know only compatible items are remaining // on top of `derived_from`, and we want the new item at the top so that we // get the strongest possible guarantees. + // This ensures U1 and F1. self.grant(new_perm, new_tag, self.borrows.len()); + + // Now is a good time to add the barrier, protecting the item we just added. + if let Some(call) = barrier { + self.barrier(call); + } } // Make sure that after all this, the stack's invariant is still maintained. diff --git a/tests/run-pass/2phase.rs b/tests/run-pass/2phase.rs index 57f3631143..78ddf566a9 100644 --- a/tests/run-pass/2phase.rs +++ b/tests/run-pass/2phase.rs @@ -51,14 +51,13 @@ fn with_interior_mutability() { impl Thing for Cell {} let mut x = Cell::new(1); - let l = &x; + //let l = &x; - #[allow(unknown_lints, mutable_borrow_reservation_conflict)] x .do_the_thing({ x.set(3); - l.set(4); - x.get() + l.get() + // l.set(4); // FIXME: Enable this as an example of overlapping 2PB! + x.get() // FIXME same: + l.get() }) ; } diff --git a/tests/run-pass/ptr_arith_offset.rs b/tests/run-pass/ptr_arith_offset.rs index 7912da9fd4..a6ee151e3e 100644 --- a/tests/run-pass/ptr_arith_offset.rs +++ b/tests/run-pass/ptr_arith_offset.rs @@ -1,6 +1,6 @@ fn main() { let v = [1i16, 2]; - let x = &v as *const i16; + let x = &v as *const [i16] as *const i16; let x = x.wrapping_offset(1); assert_eq!(unsafe { *x }, 2); } diff --git a/tests/run-pass/ptr_offset.rs b/tests/run-pass/ptr_offset.rs index 9e2e26fad3..1c7f0eb717 100644 --- a/tests/run-pass/ptr_offset.rs +++ b/tests/run-pass/ptr_offset.rs @@ -2,7 +2,7 @@ fn f() -> i32 { 42 } fn main() { let v = [1i16, 2]; - let x = &v as *const i16; + let x = &v as *const [i16; 2] as *const i16; let x = unsafe { x.offset(1) }; assert_eq!(unsafe { *x }, 2); diff --git a/tests/run-pass/regions-mock-trans.rs b/tests/run-pass/regions-mock-trans.rs index ac8a1c04fb..020ed4927a 100644 --- a/tests/run-pass/regions-mock-trans.rs +++ b/tests/run-pass/regions-mock-trans.rs @@ -22,14 +22,14 @@ struct Ccx { x: isize } -fn alloc<'a>(_bcx : &'a Arena) -> &'a Bcx<'a> { +fn alloc<'a>(_bcx : &'a Arena) -> &'a mut Bcx<'a> { unsafe { mem::transmute(libc::malloc(mem::size_of::>() as libc::size_t)) } } -fn h<'a>(bcx : &'a Bcx<'a>) -> &'a Bcx<'a> { +fn h<'a>(bcx : &'a Bcx<'a>) -> &'a mut Bcx<'a> { return alloc(bcx.fcx.arena); } From a6d377ca0b883355b40a2aff9d29fde195dd0a20 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 16 Apr 2019 15:26:21 +0200 Subject: [PATCH 03/16] more comments --- src/stacked_borrows.rs | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index c55ebecea7..40cbf92e96 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -334,11 +334,17 @@ impl<'tcx> Stack { "no item granting {} access to tag {} found in borrow stack", access, tag, )))?; - + // Step 2: Remove everything incompatible above them. - // Implemented with indices because there does not seem to be a nice iterator and range-based - // API for this. + // Items below an active barrier however may not be removed, so we check that as well. + // We do *not* maintain a stack discipline here. We could, in principle, decide to only + // keep the items immediately above `granting_idx` that are compatible, and then pop the rest. + // However, that kills off entire "branches" of pointer derivation too easily: + // in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement would pop the `Unique` + // from the reborrow of the first statement, and subequently also pop the `SharedReadWrite` for `raw`. { + // Implemented with indices because there does not seem to be a nice iterator and range-based + // API for this. let mut cur = granting_idx + 1; let mut removed_item = None; while let Some(item) = self.borrows.get(cur) { @@ -454,6 +460,14 @@ impl<'tcx> Stack { // Find the permission "from which we derive". To this end we first have to decide // if we derive from a permission that grants writes or just reads. let access = new_kind.access(); + // Now we figure out which item grants our parent (`derived_from`) permission. + // We use that to determine (a) where to put the new item, and for raw pointers + // (b) whether to given read-only or read-write access. + // FIXME: This handling of raw pointers is fragile, very fragile. What if we do + // not get "the right one", like when there are multiple items granting `derived_from` + // and we accidentally create a read-only pointer? This can happen for two-phase borrows + // (then there's a `Unique` and a `SharedReadOnly` for the same tag), and for raw pointers + // (which currently all are `Untagged`). let (derived_from_idx, derived_from_perm) = self.find_granting(access, derived_from) .ok_or_else(|| InterpError::MachineError(format!( "no item to reborrow as {} from tag {} found in borrow stack", new_kind, derived_from, From ef52be031ca7a75863b937e68799de66557563ce Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 16 Apr 2019 17:17:28 +0200 Subject: [PATCH 04/16] adjust compile-fail error messages This also passes miri-test-libstd! --- .../compile-fail/stacked_borrows/alias_through_mutation.rs | 2 +- tests/compile-fail/stacked_borrows/aliasing_mut3.rs | 2 +- .../stacked_borrows/box_exclusive_violation1.rs | 2 +- tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs | 2 +- tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_read1.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_read2.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_read3.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_read4.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_read5.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_write1.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_write2.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_write3.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_write4.rs | 2 +- tests/compile-fail/stacked_borrows/illegal_write5.rs | 2 +- tests/compile-fail/stacked_borrows/load_invalid_mut.rs | 2 +- tests/compile-fail/stacked_borrows/load_invalid_shr.rs | 2 +- .../stacked_borrows/mut_exclusive_violation1.rs | 2 +- tests/compile-fail/stacked_borrows/outdated_local.rs | 2 +- tests/compile-fail/stacked_borrows/pass_invalid_mut.rs | 2 +- tests/compile-fail/stacked_borrows/pass_invalid_shr.rs | 2 +- tests/compile-fail/stacked_borrows/pointer_smuggling.rs | 2 +- tests/compile-fail/stacked_borrows/return_invalid_mut.rs | 2 +- .../stacked_borrows/return_invalid_mut_option.rs | 2 +- .../stacked_borrows/return_invalid_mut_tuple.rs | 2 +- tests/compile-fail/stacked_borrows/return_invalid_shr.rs | 2 +- .../stacked_borrows/return_invalid_shr_option.rs | 2 +- .../stacked_borrows/return_invalid_shr_tuple.rs | 2 +- .../compile-fail/stacked_borrows/shr_frozen_violation1.rs | 7 ++----- .../stacked_borrows/static_memory_modification.rs | 2 +- .../compile-fail/stacked_borrows/transmute-is-no-escape.rs | 2 +- tests/compile-fail/stacked_borrows/unescaped_local.rs | 2 +- 32 files changed, 33 insertions(+), 36 deletions(-) diff --git a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs index 30f5921202..4a153d74ff 100644 --- a/tests/compile-fail/stacked_borrows/alias_through_mutation.rs +++ b/tests/compile-fail/stacked_borrows/alias_through_mutation.rs @@ -9,5 +9,5 @@ fn main() { retarget(&mut target_alias, target); // now `target_alias` points to the same thing as `target` *target = 13; - let _val = *target_alias; //~ ERROR does not exist on the borrow stack + let _val = *target_alias; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut3.rs b/tests/compile-fail/stacked_borrows/aliasing_mut3.rs index e3c59d1566..3943e95761 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut3.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut3.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &mut i32, _y: &i32) {} //~ ERROR does not exist on the borrow stack +pub fn safe(_x: &mut i32, _y: &i32) {} //~ ERROR borrow stack fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs b/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs index 481915faed..7d7f5e24e2 100644 --- a/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs +++ b/tests/compile-fail/stacked_borrows/box_exclusive_violation1.rs @@ -8,7 +8,7 @@ fn demo_mut_advanced_unique(mut our: Box) -> i32 { unknown_code_2(); // We know this will return 5 - *our //~ ERROR does not exist on the borrow stack + *our //~ ERROR borrow stack } // Now comes the evil context diff --git a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs index 98d4e6f229..9ff67ae354 100644 --- a/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs +++ b/tests/compile-fail/stacked_borrows/buggy_as_mut_slice.rs @@ -13,5 +13,5 @@ fn main() { let v1 = safe::as_mut_slice(&v); let _v2 = safe::as_mut_slice(&v); v1[1] = 5; - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs index 42f345f551..812dd47ef1 100644 --- a/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs +++ b/tests/compile-fail/stacked_borrows/buggy_split_at_mut.rs @@ -9,7 +9,7 @@ mod safe { assert!(mid <= len); (from_raw_parts_mut(ptr, len - mid), // BUG: should be "mid" instead of "len - mid" - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack from_raw_parts_mut(ptr.offset(mid as isize), len - mid)) } } diff --git a/tests/compile-fail/stacked_borrows/illegal_read1.rs b/tests/compile-fail/stacked_borrows/illegal_read1.rs index 3fb38abefd..d942d2b27b 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read1.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read1.rs @@ -7,7 +7,7 @@ fn main() { let xref = unsafe { &mut *xraw }; // derived from raw, so using raw is still ok... callee(xraw); let _val = *xref; // ...but any use of raw will invalidate our ref. - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read2.rs b/tests/compile-fail/stacked_borrows/illegal_read2.rs index e43340f0b8..c50c88d48f 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read2.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read2.rs @@ -7,7 +7,7 @@ fn main() { let xref = unsafe { &mut *xraw }; // derived from raw, so using raw is still ok... callee(xraw); let _val = *xref; // ...but any use of raw will invalidate our ref. - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read3.rs b/tests/compile-fail/stacked_borrows/illegal_read3.rs index b4abbb4a1a..09fd5d534c 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read3.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read3.rs @@ -15,7 +15,7 @@ fn main() { callee(xref1_sneaky); // ... though any use of it will invalidate our ref. let _val = *xref2; - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xref1: usize) { diff --git a/tests/compile-fail/stacked_borrows/illegal_read4.rs b/tests/compile-fail/stacked_borrows/illegal_read4.rs index bb889de8f8..d7e281e3ff 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read4.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read4.rs @@ -5,5 +5,5 @@ fn main() { let xraw = xref1 as *mut _; let xref2 = unsafe { &mut *xraw }; let _val = unsafe { *xraw }; // use the raw again, this invalidates xref2 *even* with the special read except for uniq refs - let _illegal = *xref2; //~ ERROR does not exist on the borrow stack + let _illegal = *xref2; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_read5.rs b/tests/compile-fail/stacked_borrows/illegal_read5.rs index 0f4737f16e..d6120cd64a 100644 --- a/tests/compile-fail/stacked_borrows/illegal_read5.rs +++ b/tests/compile-fail/stacked_borrows/illegal_read5.rs @@ -12,5 +12,5 @@ fn main() { let _val = *xref; // we can even still use our mutable reference mem::forget(unsafe { ptr::read(xshr) }); // but after reading through the shared ref let _val = *xref; // the mutable one is dead and gone - //~^ ERROR does not exist on the borrow stack + //~^ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_write1.rs b/tests/compile-fail/stacked_borrows/illegal_write1.rs index d0a23cb444..dd262a341e 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write1.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write1.rs @@ -5,5 +5,5 @@ fn main() { let x : *mut u32 = xref as *const _ as *mut _; unsafe { *x = 42; } // invalidates shared ref, activates raw } - let _x = *xref; //~ ERROR is not frozen + let _x = *xref; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_write2.rs b/tests/compile-fail/stacked_borrows/illegal_write2.rs index affa21c762..62ea05e181 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write2.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write2.rs @@ -3,6 +3,6 @@ fn main() { let target2 = target as *mut _; drop(&mut *target); // reborrow // Now make sure our ref is still the only one. - unsafe { *target2 = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *target2 = 13; } //~ ERROR borrow stack let _val = *target; } diff --git a/tests/compile-fail/stacked_borrows/illegal_write3.rs b/tests/compile-fail/stacked_borrows/illegal_write3.rs index dc4edcc3a5..d2d8528d90 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write3.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write3.rs @@ -3,6 +3,6 @@ fn main() { // Make sure raw ptr with raw tag cannot mutate frozen location without breaking the shared ref. let r#ref = ⌖ // freeze let ptr = r#ref as *const _ as *mut _; // raw ptr, with raw tag - unsafe { *ptr = 42; } //~ ERROR does not exist on the borrow stack + unsafe { *ptr = 42; } //~ ERROR borrow stack let _val = *r#ref; } diff --git a/tests/compile-fail/stacked_borrows/illegal_write4.rs b/tests/compile-fail/stacked_borrows/illegal_write4.rs index 37ae0f055f..be4f89ba28 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write4.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write4.rs @@ -9,5 +9,5 @@ fn main() { let ptr = reference as *const _ as *mut i32; // raw ptr, with raw tag let _mut_ref: &mut i32 = unsafe { mem::transmute(ptr) }; // &mut, with raw tag // Now we retag, making our ref top-of-stack -- and, in particular, unfreezing. - let _val = *reference; //~ ERROR is not frozen + let _val = *reference; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/illegal_write5.rs b/tests/compile-fail/stacked_borrows/illegal_write5.rs index 3a0738bfd0..c60fe90fe0 100644 --- a/tests/compile-fail/stacked_borrows/illegal_write5.rs +++ b/tests/compile-fail/stacked_borrows/illegal_write5.rs @@ -8,7 +8,7 @@ fn main() { callee(xraw); // ... though any use of raw value will invalidate our ref. let _val = *xref; - //~^ ERROR: does not exist on the borrow stack + //~^ ERROR: borrow stack } fn callee(xraw: *mut i32) { diff --git a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs index f2e4b36f85..1704b7fe19 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_mut.rs @@ -5,5 +5,5 @@ fn main() { let xref = unsafe { &mut *xraw }; let xref_in_mem = Box::new(xref); let _val = unsafe { *xraw }; // invalidate xref - let _val = *xref_in_mem; //~ ERROR does not exist on the borrow stack + let _val = *xref_in_mem; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs index 6599924f0f..4757a2c1e5 100644 --- a/tests/compile-fail/stacked_borrows/load_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/load_invalid_shr.rs @@ -5,5 +5,5 @@ fn main() { let xref = unsafe { &*xraw }; let xref_in_mem = Box::new(xref); unsafe { *xraw = 42 }; // unfreeze - let _val = *xref_in_mem; //~ ERROR is not frozen + let _val = *xref_in_mem; //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs index 3fe6b65674..03343b985a 100644 --- a/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs +++ b/tests/compile-fail/stacked_borrows/mut_exclusive_violation1.rs @@ -21,7 +21,7 @@ fn unknown_code_1(x: &i32) { unsafe { } } fn unknown_code_2() { unsafe { - *LEAK = 7; //~ ERROR barrier + *LEAK = 7; //~ ERROR borrow stack } } fn main() { diff --git a/tests/compile-fail/stacked_borrows/outdated_local.rs b/tests/compile-fail/stacked_borrows/outdated_local.rs index ba36e43e0c..4cb655366e 100644 --- a/tests/compile-fail/stacked_borrows/outdated_local.rs +++ b/tests/compile-fail/stacked_borrows/outdated_local.rs @@ -3,7 +3,7 @@ fn main() { let y: *const i32 = &x; x = 1; // this invalidates y by reactivating the lowermost uniq borrow for this local - assert_eq!(unsafe { *y }, 1); //~ ERROR does not exist on the borrow stack + assert_eq!(unsafe { *y }, 1); //~ ERROR borrow stack assert_eq!(x, 1); } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs index b239237f01..d8a53b7a96 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_mut.rs @@ -6,5 +6,5 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &mut *xraw }; let _val = unsafe { *xraw }; // invalidate xref - foo(xref); //~ ERROR does not exist on the borrow stack + foo(xref); //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs index 22a80e2710..091604a283 100644 --- a/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/pass_invalid_shr.rs @@ -6,5 +6,5 @@ fn main() { let xraw = x as *mut _; let xref = unsafe { &*xraw }; unsafe { *xraw = 42 }; // unfreeze - foo(xref); //~ ERROR is not frozen + foo(xref); //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/pointer_smuggling.rs b/tests/compile-fail/stacked_borrows/pointer_smuggling.rs index a8207d58e9..f724cdd2a7 100644 --- a/tests/compile-fail/stacked_borrows/pointer_smuggling.rs +++ b/tests/compile-fail/stacked_borrows/pointer_smuggling.rs @@ -8,7 +8,7 @@ fn fun1(x: &mut u8) { fn fun2() { // Now we use a pointer we are not allowed to use - let _x = unsafe { *PTR }; //~ ERROR does not exist on the borrow stack + let _x = unsafe { *PTR }; //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs index 31f8a4e33a..54004ec438 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> &mut i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &mut (*xraw).1 }; let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs index 750d507d6f..2eb2df81f5 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut_option.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> Option<&mut i32> { let xraw = x as *mut (i32, i32); let ret = Some(unsafe { &mut (*xraw).1 }); let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs b/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs index bb712e9e48..8b73df4bd1 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_mut_tuple.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> (&mut i32,) { let xraw = x as *mut (i32, i32); let ret = (unsafe { &mut (*xraw).1 },); let _val = unsafe { *xraw }; // invalidate xref - ret //~ ERROR does not exist on the borrow stack + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs index 986dd18b2e..eab026f9a4 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> &i32 { let xraw = x as *mut (i32, i32); let ret = unsafe { &(*xraw).1 }; unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs index 9d220991c3..f3a35ca266 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr_option.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> Option<&i32> { let xraw = x as *mut (i32, i32); let ret = Some(unsafe { &(*xraw).1 }); unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs b/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs index 060fa25c23..82723bade2 100644 --- a/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs +++ b/tests/compile-fail/stacked_borrows/return_invalid_shr_tuple.rs @@ -3,7 +3,7 @@ fn foo(x: &mut (i32, i32)) -> (&i32,) { let xraw = x as *mut (i32, i32); let ret = (unsafe { &(*xraw).1 },); unsafe { *xraw = (42, 23) }; // unfreeze - ret //~ ERROR is not frozen + ret //~ ERROR borrow stack } fn main() { diff --git a/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs b/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs index 560c9dfb66..5031210c54 100644 --- a/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs +++ b/tests/compile-fail/stacked_borrows/shr_frozen_violation1.rs @@ -8,9 +8,6 @@ fn main() { println!("{}", foo(&mut 0)); } -// If we replace the `*const` by `&`, my current dev version of miri -// *does* find the problem, but not for a good reason: It finds it because -// of barriers, and we shouldn't rely on unknown code using barriers. -fn unknown_code(x: *const i32) { - unsafe { *(x as *mut i32) = 7; } //~ ERROR barrier +fn unknown_code(x: &i32) { + unsafe { *(x as *const i32 as *mut i32) = 7; } //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/static_memory_modification.rs b/tests/compile-fail/stacked_borrows/static_memory_modification.rs index c092cbfe50..88ac164947 100644 --- a/tests/compile-fail/stacked_borrows/static_memory_modification.rs +++ b/tests/compile-fail/stacked_borrows/static_memory_modification.rs @@ -3,6 +3,6 @@ static X: usize = 5; #[allow(mutable_transmutes)] fn main() { let _x = unsafe { - std::mem::transmute::<&usize, &mut usize>(&X) //~ ERROR mutable reference with frozen tag + std::mem::transmute::<&usize, &mut usize>(&X) //~ ERROR borrow stack }; } diff --git a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs index 45ada88977..e9282c5ba8 100644 --- a/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs +++ b/tests/compile-fail/stacked_borrows/transmute-is-no-escape.rs @@ -10,5 +10,5 @@ fn main() { let _raw: *mut i32 = unsafe { mem::transmute(&mut x[0]) }; // `raw` still carries a tag, so we get another pointer to the same location that does not carry a tag let raw = (&mut x[1] as *mut i32).wrapping_offset(-1); - unsafe { *raw = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *raw = 13; } //~ ERROR borrow stack } diff --git a/tests/compile-fail/stacked_borrows/unescaped_local.rs b/tests/compile-fail/stacked_borrows/unescaped_local.rs index 1db14ea7ed..b49e6cce63 100644 --- a/tests/compile-fail/stacked_borrows/unescaped_local.rs +++ b/tests/compile-fail/stacked_borrows/unescaped_local.rs @@ -4,5 +4,5 @@ fn main() { let mut x = 42; let raw = &mut x as *mut i32 as usize as *mut i32; let _ptr = &mut x; - unsafe { *raw = 13; } //~ ERROR does not exist on the borrow stack + unsafe { *raw = 13; } //~ ERROR borrow stack } From 924624f810bd70078074c62ea94061927d2516bd Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 16 Apr 2019 23:37:37 +0200 Subject: [PATCH 05/16] some failures are impossible --- src/stacked_borrows.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 40cbf92e96..152171aed0 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -473,7 +473,7 @@ impl<'tcx> Stack { "no item to reborrow as {} from tag {} found in borrow stack", new_kind, derived_from, )))?; // With this we can compute the permission for the new pointer. - let new_perm = new_kind.new_perm(derived_from_perm)?; + let new_perm = new_kind.new_perm(derived_from_perm).expect("this should never fail"); // We behave very differently for the "unsafe" case of a shared-read-write pointer // ("unsafe" because this also applies to shared references with interior mutability). From 97c34c266f492ff548ca1584ccb7f1c3c49dcf82 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 08:25:21 +0200 Subject: [PATCH 06/16] try to test the problematic cast-to-raw case... unfortunately with the implicit reborrow that's not currently possible --- tests/run-pass/2phase.rs | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/tests/run-pass/2phase.rs b/tests/run-pass/2phase.rs index 78ddf566a9..87ecb4aef0 100644 --- a/tests/run-pass/2phase.rs +++ b/tests/run-pass/2phase.rs @@ -26,6 +26,21 @@ fn two_phase3(b: bool) { )); } +#[allow(unreachable_code)] +fn two_phase_raw() { + let x: &mut Vec = &mut vec![]; + x.push( + { + // Unfortunately this does not trigger the problem of creating a + // raw ponter from a pointer that had a two-phase borrow derived from + // it because of the implicit &mut reborrow. + let raw = x as *mut _; + unsafe { *raw = vec![1]; } + return + } + ); +} + /* fn two_phase_overlapping1() { let mut x = vec![]; @@ -67,6 +82,7 @@ fn main() { two_phase2(); two_phase3(false); two_phase3(true); + two_phase_raw(); with_interior_mutability(); //FIXME: enable these, or remove them, depending on how https://github.com/rust-lang/rust/issues/56254 gets resolved //two_phase_overlapping1(); From a503259d8b6437550fd1563368cfae618f8ab426 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 08:35:06 +0200 Subject: [PATCH 07/16] organize stacked borrows run-pass tests --- tests/run-pass/refcell.rs | 46 +------------------ .../run-pass/{ => stacked-borrows}/2phase.rs | 0 tests/run-pass/stacked-borrows/refcell.rs | 44 ++++++++++++++++++ .../{ => stacked-borrows}/stacked-borrows.rs | 0 4 files changed, 45 insertions(+), 45 deletions(-) rename tests/run-pass/{ => stacked-borrows}/2phase.rs (100%) create mode 100644 tests/run-pass/stacked-borrows/refcell.rs rename tests/run-pass/{ => stacked-borrows}/stacked-borrows.rs (100%) diff --git a/tests/run-pass/refcell.rs b/tests/run-pass/refcell.rs index 0bc8b15c5f..93cef1572a 100644 --- a/tests/run-pass/refcell.rs +++ b/tests/run-pass/refcell.rs @@ -1,6 +1,6 @@ use std::cell::RefCell; -fn lots_of_funny_borrows() { +fn main() { let c = RefCell::new(42); { let s1 = c.borrow(); @@ -31,47 +31,3 @@ fn lots_of_funny_borrows() { let _y: i32 = *s2; } } - -fn aliasing_mut_and_shr() { - fn inner(rc: &RefCell, aliasing: &mut i32) { - *aliasing += 4; - let _escape_to_raw = rc as *const _; - *aliasing += 4; - let _shr = &*rc; - *aliasing += 4; - // also turning this into a frozen ref now must work - let aliasing = &*aliasing; - let _val = *aliasing; - let _escape_to_raw = rc as *const _; // this must NOT unfreeze - let _val = *aliasing; - let _shr = &*rc; // this must NOT unfreeze - let _val = *aliasing; - } - - let rc = RefCell::new(23); - let mut bmut = rc.borrow_mut(); - inner(&rc, &mut *bmut); - drop(bmut); - assert_eq!(*rc.borrow(), 23+12); -} - -fn aliasing_frz_and_shr() { - fn inner(rc: &RefCell, aliasing: &i32) { - let _val = *aliasing; - let _escape_to_raw = rc as *const _; // this must NOT unfreeze - let _val = *aliasing; - let _shr = &*rc; // this must NOT unfreeze - let _val = *aliasing; - } - - let rc = RefCell::new(23); - let bshr = rc.borrow(); - inner(&rc, &*bshr); - assert_eq!(*rc.borrow(), 23); -} - -fn main() { - lots_of_funny_borrows(); - aliasing_mut_and_shr(); - aliasing_frz_and_shr(); -} diff --git a/tests/run-pass/2phase.rs b/tests/run-pass/stacked-borrows/2phase.rs similarity index 100% rename from tests/run-pass/2phase.rs rename to tests/run-pass/stacked-borrows/2phase.rs diff --git a/tests/run-pass/stacked-borrows/refcell.rs b/tests/run-pass/stacked-borrows/refcell.rs new file mode 100644 index 0000000000..dddc7089d0 --- /dev/null +++ b/tests/run-pass/stacked-borrows/refcell.rs @@ -0,0 +1,44 @@ +use std::cell::RefCell; + +fn main() { + aliasing_mut_and_shr(); + aliasing_frz_and_shr(); +} + +fn aliasing_mut_and_shr() { + fn inner(rc: &RefCell, aliasing: &mut i32) { + *aliasing += 4; + let _escape_to_raw = rc as *const _; + *aliasing += 4; + let _shr = &*rc; + *aliasing += 4; + // also turning this into a frozen ref now must work + let aliasing = &*aliasing; + let _val = *aliasing; + let _escape_to_raw = rc as *const _; // this must NOT unfreeze + let _val = *aliasing; + let _shr = &*rc; // this must NOT unfreeze + let _val = *aliasing; + } + + let rc = RefCell::new(23); + let mut bmut = rc.borrow_mut(); + inner(&rc, &mut *bmut); + drop(bmut); + assert_eq!(*rc.borrow(), 23+12); +} + +fn aliasing_frz_and_shr() { + fn inner(rc: &RefCell, aliasing: &i32) { + let _val = *aliasing; + let _escape_to_raw = rc as *const _; // this must NOT unfreeze + let _val = *aliasing; + let _shr = &*rc; // this must NOT unfreeze + let _val = *aliasing; + } + + let rc = RefCell::new(23); + let bshr = rc.borrow(); + inner(&rc, &*bshr); + assert_eq!(*rc.borrow(), 23); +} diff --git a/tests/run-pass/stacked-borrows.rs b/tests/run-pass/stacked-borrows/stacked-borrows.rs similarity index 100% rename from tests/run-pass/stacked-borrows.rs rename to tests/run-pass/stacked-borrows/stacked-borrows.rs From 7b7fef1b53dab72a6a61951e166851ffc7d4bc82 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 08:42:41 +0200 Subject: [PATCH 08/16] let the permission of a new pointer depend on the type only --- src/stacked_borrows.rs | 264 +++++++++++++++++++---------------------- 1 file changed, 125 insertions(+), 139 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 152171aed0..ca257aaf1f 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -5,7 +5,7 @@ use std::fmt; use std::num::NonZeroU64; use rustc::ty::{self, layout::Size}; -use rustc::hir::{Mutability, MutMutable, MutImmutable}; +use rustc::hir::{MutMutable, MutImmutable}; use rustc::mir::RetagKind; use crate::{ @@ -96,50 +96,38 @@ pub type MemoryState = Rc>; #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum AccessKind { Read, - Write { dealloc: bool }, -} - -// "Fake" constructors -impl AccessKind { - fn write() -> AccessKind { - AccessKind::Write { dealloc: false } - } - - fn dealloc() -> AccessKind { - AccessKind::Write { dealloc: true } - } + Write, } impl fmt::Display for AccessKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { AccessKind::Read => write!(f, "read"), - AccessKind::Write { dealloc: false } => write!(f, "write"), - AccessKind::Write { dealloc: true } => write!(f, "deallocation"), + AccessKind::Write => write!(f, "write"), } } } /// Indicates which kind of reference is being created. -/// Used by `reborrow` to compute which permissions to grant to the +/// Used by high-level `reborrow` to compute which permissions to grant to the /// new pointer. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] pub enum RefKind { - /// `&mut`. - Mutable, + /// `&mut` and `Box`. + Unique, /// `&` with or without interior mutability. - Shared { frozen: bool }, - /// `*` (raw pointer). - Raw, + Shared, + /// `*mut`/`*const` (raw pointers). + Raw { mutable: bool }, } impl fmt::Display for RefKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { - RefKind::Mutable => write!(f, "mutable"), - RefKind::Shared { frozen: true } => write!(f, "shared (frozen)"), - RefKind::Shared { frozen: false } => write!(f, "shared (mutable)"), - RefKind::Raw => write!(f, "raw"), + RefKind::Unique => write!(f, "unique"), + RefKind::Shared => write!(f, "shared"), + RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"), + RefKind::Raw { mutable: false } => write!(f, "raw (constant)"), } } } @@ -216,7 +204,7 @@ impl Permission { // SharedReadOnly only permits read access. (Permission::SharedReadOnly, AccessKind::Read) => true, - (Permission::SharedReadOnly, AccessKind::Write { .. }) => + (Permission::SharedReadOnly, AccessKind::Write) => false, } } @@ -235,7 +223,7 @@ impl Permission { // When `other` is `SharedReadOnly`, that is NEVER compatible with // write accesses. // This makes sure read-only pointers become invalid on write accesses (ensures F2a). - (_, AccessKind::Write { .. }, SharedReadOnly) => + (_, AccessKind::Write, SharedReadOnly) => false, // When `other` is `Unique`, that is compatible with nothing. // This makes sure unique pointers become invalid on incompatible accesses (ensures U2). @@ -246,7 +234,7 @@ impl Permission { // (This is particularily important such that when a new mutable ref gets created, it gets // pushed into the right item -- this behaves like a write and we assert uniqueness of the // pointer from which this comes, *if* it was a unique pointer.) - (Unique, AccessKind::Write { .. }, _) => + (Unique, AccessKind::Write, _) => false, // `SharedReadWrite` items can tolerate any other akin items for any kind of access. (SharedReadWrite, _, SharedReadWrite) => @@ -262,42 +250,7 @@ impl Permission { } } -impl<'tcx> RefKind { - /// Defines which kind of access the "parent" must grant to create this reference. - fn access(self) -> AccessKind { - match self { - RefKind::Mutable | RefKind::Shared { frozen: false } => AccessKind::write(), - RefKind::Raw | RefKind::Shared { frozen: true } => AccessKind::Read, - // FIXME: Just requiring read-only access for raw means that a raw ptr might not be writeable - // even when we think it should be! Think about this some more. - } - } - - /// This defines the new permission used when a pointer gets created: For raw pointers, whether these are read-only - /// or read-write depends on the permission from which they derive. - fn new_perm(self, derived_from: Permission) -> EvalResult<'tcx, Permission> { - Ok(match (self, derived_from) { - // Do not derive writable safe pointer from read-only pointer! - (RefKind::Mutable, Permission::SharedReadOnly) => - return err!(MachineError(format!( - "deriving mutable reference from read-only pointer" - ))), - (RefKind::Shared { frozen: false }, Permission::SharedReadOnly) => - return err!(MachineError(format!( - "deriving shared reference with interior mutability from read-only pointer" - ))), - // Safe pointer cases. - (RefKind::Mutable, _) => Permission::Unique, - (RefKind::Shared { frozen: true }, _) => Permission::SharedReadOnly, - (RefKind::Shared { frozen: false }, _) => Permission::SharedReadWrite, - // Raw pointer cases. - (RefKind::Raw, Permission::SharedReadOnly) => Permission::SharedReadOnly, - (RefKind::Raw, _) => Permission::SharedReadWrite, - }) - } -} - -/// Core per-location operations: access, create. +/// Core per-location operations: access, dealloc, reborrow. impl<'tcx> Stack { /// Find the item granting the given kind of access to the given tag, and where that item is in the stack. fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<(usize, Permission)> { @@ -326,7 +279,6 @@ impl<'tcx> Stack { // The second step is where barriers get implemented: they "protect" the items // below them, meaning that if we remove an item and then further up encounter a barrier, // we raise an error. - // Afterwards we just do some post-processing for deallocation accesses. // Step 1: Find granting item. let (granting_idx, granting_perm) = self.find_granting(access, tag) @@ -356,7 +308,7 @@ impl<'tcx> Stack { } else { // Aha! This is a bad one, remove it. let item = self.borrows.remove(cur); - trace!("access: popping item {}", item); + trace!("access: removing item {}", item); removed_item = Some(item); } } @@ -380,25 +332,38 @@ impl<'tcx> Stack { } } - // Post-processing. - // If we got here, we found a matching item. Congratulations! - // However, we are not done yet: If this access is deallocating, we must make sure - // there are no active barriers remaining on the stack. - if access == AccessKind::dealloc() { - for &itm in self.borrows.iter().rev() { - match itm { - Item::FnBarrier(call) if global.is_active(call) => { - return err!(MachineError(format!( - "deallocating with active barrier ({})", call - ))) - } - _ => {}, + // Done. + return Ok(granting_idx); + } + + /// Deallocate a location: Like a write access, but also there must be no + /// barriers at all. + fn dealloc( + &mut self, + tag: Tag, + global: &GlobalState, + ) -> EvalResult<'tcx> { + // Step 1: Find granting item. + self.find_granting(AccessKind::Write, tag) + .ok_or_else(|| InterpError::MachineError(format!( + "no item granting write access for deallocation to tag {} found in borrow stack", + tag, + )))?; + + // We must make sure there are no active barriers remaining on the stack. + // Also clear the stack, no more accesses are possible. + while let Some(itm) = self.borrows.pop() { + match itm { + Item::FnBarrier(call) if global.is_active(call) => { + return err!(MachineError(format!( + "deallocating with active barrier ({})", call + ))) } + _ => {}, } } - // Done. - return Ok(granting_idx); + Ok(()) } /// `reborrow` helper function. @@ -412,7 +377,7 @@ impl<'tcx> Stack { trace!("reborrow: avoiding redundant item {}", item); return; } - trace!("reborrow: pushing item {}", item); + trace!("reborrow: adding item {}", item); self.borrows.insert(position, item); } @@ -427,7 +392,7 @@ impl<'tcx> Stack { // that overlap. trace!("reborrow: avoiding redundant extra barrier"); } else { - trace!("reborrow: pushing barrier for call {}", call); + trace!("reborrow: adding barrier for call {}", call); self.borrows.push(itm); } } @@ -453,27 +418,22 @@ impl<'tcx> Stack { &mut self, derived_from: Tag, barrier: Option, - new_kind: RefKind, + new_perm: Permission, new_tag: Tag, global: &GlobalState, ) -> EvalResult<'tcx> { - // Find the permission "from which we derive". To this end we first have to decide - // if we derive from a permission that grants writes or just reads. - let access = new_kind.access(); - // Now we figure out which item grants our parent (`derived_from`) permission. - // We use that to determine (a) where to put the new item, and for raw pointers - // (b) whether to given read-only or read-write access. - // FIXME: This handling of raw pointers is fragile, very fragile. What if we do - // not get "the right one", like when there are multiple items granting `derived_from` - // and we accidentally create a read-only pointer? This can happen for two-phase borrows - // (then there's a `Unique` and a `SharedReadOnly` for the same tag), and for raw pointers - // (which currently all are `Untagged`). - let (derived_from_idx, derived_from_perm) = self.find_granting(access, derived_from) + // Figure out which access `perm` corresponds to. + let access = if new_perm.grants(AccessKind::Write) { + AccessKind::Write + } else { + AccessKind::Read + }; + // Now we figure out which item grants our parent (`derived_from`) this kind of access. + // We use that to determine where to put the new item. + let (derived_from_idx, _) = self.find_granting(access, derived_from) .ok_or_else(|| InterpError::MachineError(format!( - "no item to reborrow as {} from tag {} found in borrow stack", new_kind, derived_from, + "no item to reborrow for {:?} from tag {} found in borrow stack", new_perm, derived_from, )))?; - // With this we can compute the permission for the new pointer. - let new_perm = new_kind.new_perm(derived_from_perm).expect("this should never fail"); // We behave very differently for the "unsafe" case of a shared-read-write pointer // ("unsafe" because this also applies to shared references with interior mutability). @@ -530,8 +490,9 @@ impl<'tcx> Stack { Ok(()) } } +// # Stacked Borrows Core End -/// Higher-level per-location operations: deref, access, reborrow. +/// Higher-level per-location operations: deref, access, dealloc, reborrow. impl<'tcx> Stacks { /// Creates new stack with initial tag. pub(crate) fn new( @@ -554,16 +515,34 @@ impl<'tcx> Stacks { &self, ptr: Pointer, size: Size, - kind: AccessKind, + access: AccessKind, + ) -> EvalResult<'tcx> { + trace!("{} access of tag {}: {:?}, size {}", access, ptr.tag, ptr, size.bytes()); + // Even reads can have a side-effect, by invalidating other references. + // This is fundamentally necessary since `&mut` asserts that there + // are no accesses through other references, not even reads. + let global = self.global.borrow(); + let mut stacks = self.stacks.borrow_mut(); + for stack in stacks.iter_mut(ptr.offset, size) { + stack.access(access, ptr.tag, &*global)?; + } + Ok(()) + } + + /// `ptr` is used to deallocate. + fn dealloc( + &self, + ptr: Pointer, + size: Size, ) -> EvalResult<'tcx> { - trace!("{} access of tag {}: {:?}, size {}", kind, ptr.tag, ptr, size.bytes()); + trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); // Even reads can have a side-effect, by invalidating other references. // This is fundamentally necessary since `&mut` asserts that there // are no accesses through other references, not even reads. let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(kind, ptr.tag, &*global)?; + stack.dealloc(ptr.tag, &*global)?; } Ok(()) } @@ -575,26 +554,23 @@ impl<'tcx> Stacks { ptr: Pointer, size: Size, barrier: Option, - new_kind: RefKind, + new_perm: Permission, new_tag: Tag, ) -> EvalResult<'tcx> { trace!( - "{} reborrow for tag {} to {}: {:?}, size {}", - new_kind, ptr.tag, new_tag, ptr, size.bytes(), + "reborrow tag {} as {:?} {}: {:?}, size {}", + ptr.tag, new_perm, new_tag, ptr, size.bytes(), ); let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.reborrow(ptr.tag, barrier, new_kind, new_tag, &*global)?; + stack.reborrow(ptr.tag, barrier, new_perm, new_tag, &*global)?; } Ok(()) } } -// # Stacked Borrows Core End - -// Glue code to connect with Miri Machine Hooks - +/// Glue code to connect with Miri Machine Hooks impl Stacks { pub fn new_allocation( size: Size, @@ -638,7 +614,7 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::write()) + alloc.extra.access(ptr, size, AccessKind::Write) } #[inline(always)] @@ -647,42 +623,48 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::dealloc()) + alloc.extra.dealloc(ptr, size) } } impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { + /// High-level `reborrow` operation. This decides which reference gets which kind + /// of permission! fn reborrow( &mut self, place: MPlaceTy<'tcx, Tag>, size: Size, - mutbl: Option, + kind: RefKind, new_tag: Tag, fn_barrier: bool, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; let ptr = place.ptr.to_ptr()?; - trace!("reborrow: creating new reference for {:?} (pointee {}): {:?}", - ptr, place.layout.ty, new_tag); + trace!("reborrow: creating new reference for {:?} (pointee {}): {}, {:?}", + ptr, place.layout.ty, kind, new_tag); // Get the allocation. It might not be mutable, so we cannot use `get_mut`. let alloc = this.memory().get(ptr.alloc_id)?; alloc.check_bounds(this, ptr, size)?; // Update the stacks. - if mutbl == Some(MutImmutable) { - // Reference that cares about freezing. We need a frozen-sensitive reborrow. - this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { - let new_kind = RefKind::Shared { frozen }; - alloc.extra.reborrow(cur_ptr, size, barrier, new_kind, new_tag) - })?; - } else { - // Just treat this as one big chunk. - let new_kind = if mutbl == Some(MutMutable) { RefKind::Mutable } else { RefKind::Raw }; - alloc.extra.reborrow(ptr, size, barrier, new_kind, new_tag)?; - } - Ok(()) + let perm = match kind { + RefKind::Unique => Permission::Unique, + RefKind::Raw { mutable: true } => Permission::SharedReadWrite, + RefKind::Shared | RefKind::Raw { mutable: false } => { + // Shared references and *const are a whole different kind of game, the + // permission is not uniform across the entire range! + // We need a frozen-sensitive reborrow. + return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { + // We are only ever `SharedReadOnly` inside the frozen bits. + let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite }; + alloc.extra.reborrow(cur_ptr, size, barrier, perm, new_tag) + }); + } + }; + debug_assert_ne!(perm, Permission::SharedReadOnly, "SharedReadOnly must be used frozen-sensitive"); + return alloc.extra.reborrow(ptr, size, barrier, perm, new_tag); } /// Retags an indidual pointer, returning the retagged version. @@ -690,7 +672,7 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, fn retag_reference( &mut self, val: ImmTy<'tcx, Tag>, - mutbl: Option, + kind: RefKind, fn_barrier: bool, two_phase: bool, ) -> EvalResult<'tcx, Immediate> { @@ -706,24 +688,24 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, } // Compute new borrow. - let new_tag = match mutbl { - Some(_) => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()), - None => Tag::Untagged, + let new_tag = match kind { + RefKind::Raw { .. } => Tag::Untagged, + _ => Tag::Tagged(this.memory().extra.borrow_mut().new_ptr()), }; // Reborrow. - this.reborrow(place, size, mutbl, new_tag, fn_barrier)?; + this.reborrow(place, size, kind, new_tag, fn_barrier)?; let new_place = place.replace_tag(new_tag); // Handle two-phase borrows. if two_phase { - assert!(mutbl == Some(MutMutable), "two-phase shared borrows make no sense"); + assert!(kind == RefKind::Unique, "two-phase shared borrows make no sense"); // Grant read access *to the parent pointer* with the old tag. This means the same pointer // has multiple items in the stack now! // FIXME: Think about this some more, in particular about the interaction with cast-to-raw. // Maybe find a better way to express 2-phase, now that we have a "more expressive language" // in the stack. let old_tag = place.ptr.to_ptr().unwrap().tag; - this.reborrow(new_place, size, Some(MutImmutable), old_tag, /* fn_barrier: */ false)?; + this.reborrow(new_place, size, RefKind::Shared, old_tag, /* fn_barrier: */ false)?; } // Return new pointer. @@ -742,15 +724,19 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, // Determine mutability and whether to add a barrier. // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. - fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(Option, bool)> { + fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> { match ty.sty { // References are simple. - ty::Ref(_, _, mutbl) => Some((Some(mutbl), kind == RetagKind::FnEntry)), + ty::Ref(_, _, MutMutable) => + Some((RefKind::Unique, kind == RetagKind::FnEntry)), + ty::Ref(_, _, MutImmutable) => + Some((RefKind::Shared, kind == RetagKind::FnEntry)), // Raw pointers need to be enabled. - ty::RawPtr(..) if kind == RetagKind::Raw => Some((None, false)), + ty::RawPtr(tym) if kind == RetagKind::Raw => + Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)), // Boxes do not get a barrier: barriers reflect that references outlive the call // they were passed in to; that's just not the case for boxes. - ty::Adt(..) if ty.is_box() => Some((Some(MutMutable), false)), + ty::Adt(..) if ty.is_box() => Some((RefKind::Unique, false)), _ => None, } } From 14e701f7d8008656327094056e800af506b0f6a4 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 14:23:21 +0200 Subject: [PATCH 09/16] abstract mapping over all the stacks in some memory range --- src/stacked_borrows.rs | 83 ++++++++++++++---------------------------- 1 file changed, 27 insertions(+), 56 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index ca257aaf1f..7c0a72cde3 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -492,7 +492,7 @@ impl<'tcx> Stack { } // # Stacked Borrows Core End -/// Higher-level per-location operations: deref, access, dealloc, reborrow. +/// Map per-stack operations to higher-level per-location-range operations. impl<'tcx> Stacks { /// Creates new stack with initial tag. pub(crate) fn new( @@ -510,61 +510,17 @@ impl<'tcx> Stacks { } } - /// `ptr` got used, reflect that in the stack. - fn access( - &self, - ptr: Pointer, - size: Size, - access: AccessKind, - ) -> EvalResult<'tcx> { - trace!("{} access of tag {}: {:?}, size {}", access, ptr.tag, ptr, size.bytes()); - // Even reads can have a side-effect, by invalidating other references. - // This is fundamentally necessary since `&mut` asserts that there - // are no accesses through other references, not even reads. - let global = self.global.borrow(); - let mut stacks = self.stacks.borrow_mut(); - for stack in stacks.iter_mut(ptr.offset, size) { - stack.access(access, ptr.tag, &*global)?; - } - Ok(()) - } - - /// `ptr` is used to deallocate. - fn dealloc( + /// Call `f` on every stack in the range. + fn for_each( &self, ptr: Pointer, size: Size, + f: impl Fn(&mut Stack, Tag, &GlobalState) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { - trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); - // Even reads can have a side-effect, by invalidating other references. - // This is fundamentally necessary since `&mut` asserts that there - // are no accesses through other references, not even reads. let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - stack.dealloc(ptr.tag, &*global)?; - } - Ok(()) - } - - /// Reborrow the given pointer to the new tag for the given kind of reference. - /// This works on `&self` because we might encounter references to constant memory. - fn reborrow( - &self, - ptr: Pointer, - size: Size, - barrier: Option, - new_perm: Permission, - new_tag: Tag, - ) -> EvalResult<'tcx> { - trace!( - "reborrow tag {} as {:?} {}: {:?}, size {}", - ptr.tag, new_perm, new_tag, ptr, size.bytes(), - ); - let global = self.global.borrow(); - let mut stacks = self.stacks.borrow_mut(); - for stack in stacks.iter_mut(ptr.offset, size) { - stack.reborrow(ptr.tag, barrier, new_perm, new_tag, &*global)?; + f(stack, ptr.tag, &*global)?; } Ok(()) } @@ -605,7 +561,11 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Read) + trace!("read access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, tag, global| { + stack.access(AccessKind::Read, tag, global)?; + Ok(()) + }) } #[inline(always)] @@ -614,7 +574,11 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.access(ptr, size, AccessKind::Write) + trace!("write access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, tag, global| { + stack.access(AccessKind::Write, tag, global)?; + Ok(()) + }) } #[inline(always)] @@ -623,7 +587,10 @@ impl AllocationExtra for Stacks { ptr: Pointer, size: Size, ) -> EvalResult<'tcx> { - alloc.extra.dealloc(ptr, size) + trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); + alloc.extra.for_each(ptr, size, |stack, tag, global| { + stack.dealloc(tag, global) + }) } } @@ -642,8 +609,8 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, let this = self.eval_context_mut(); let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; let ptr = place.ptr.to_ptr()?; - trace!("reborrow: creating new reference for {:?} (pointee {}): {}, {:?}", - ptr, place.layout.ty, kind, new_tag); + trace!("reborrow: {:?} reference {} derived from {} (pointee {}): {:?}, size {}", + kind, new_tag, ptr.tag, place.layout.ty, ptr, size.bytes()); // Get the allocation. It might not be mutable, so we cannot use `get_mut`. let alloc = this.memory().get(ptr.alloc_id)?; @@ -659,12 +626,16 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { // We are only ever `SharedReadOnly` inside the frozen bits. let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite }; - alloc.extra.reborrow(cur_ptr, size, barrier, perm, new_tag) + alloc.extra.for_each(cur_ptr, size, |stack, tag, global| { + stack.reborrow(tag, barrier, perm, new_tag, global) + }) }); } }; debug_assert_ne!(perm, Permission::SharedReadOnly, "SharedReadOnly must be used frozen-sensitive"); - return alloc.extra.reborrow(ptr, size, barrier, perm, new_tag); + alloc.extra.for_each(ptr, size, |stack, tag, global| { + stack.reborrow(tag, barrier, perm, new_tag, global) + }) } /// Retags an indidual pointer, returning the retagged version. From e7a500b7e112be64e90517316a527e3a4a239437 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 14:28:45 +0200 Subject: [PATCH 10/16] test creating two raw pointers from the same mutable ref --- tests/run-pass/stacked-borrows/stacked-borrows.rs | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/tests/run-pass/stacked-borrows/stacked-borrows.rs b/tests/run-pass/stacked-borrows/stacked-borrows.rs index 711026c02d..791e97f0eb 100644 --- a/tests/run-pass/stacked-borrows/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows/stacked-borrows.rs @@ -10,6 +10,7 @@ fn main() { partially_invalidate_mut(); drop_after_sharing(); direct_mut_to_const_raw(); + two_raw(); } // Deref a raw ptr to access a field of a large struct, where the field @@ -123,3 +124,15 @@ fn direct_mut_to_const_raw() { assert_eq!(*x, 1); */ } + +// Make sure that we can create two raw pointers from a mutable reference and use them both. +fn two_raw() { unsafe { + let x = &mut 0; + // Given the implicit reborrows, the only reason this currently works is that we + // do not track raw pointers: The creation of `y2` reborrows `x` and thus pops + // `y1` off the stack. + let y1 = x as *mut _; + let y2 = x as *mut _; + *y1 += 2; + *y2 += 1; +} } From 46d5fd848773dcb3699344a8927a21e285d9207a Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 14:57:13 +0200 Subject: [PATCH 11/16] barriers are dead, long live protectors -- this enables overlapping two-phase borrows! --- src/stacked_borrows.rs | 255 ++++++++---------- .../stacked_borrows/aliasing_mut1.rs | 2 +- .../stacked_borrows/aliasing_mut2.rs | 2 +- .../stacked_borrows/aliasing_mut4.rs | 2 +- .../deallocate_against_barrier.rs | 2 +- .../invalidate_against_barrier1.rs | 2 +- .../invalidate_against_barrier2.rs | 2 +- tests/run-pass/stacked-borrows/2phase.rs | 15 +- 8 files changed, 120 insertions(+), 162 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 7c0a72cde3..98b6ca6543 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -15,7 +15,7 @@ use crate::{ }; pub type PtrId = NonZeroU64; -pub type CallId = u64; +pub type CallId = NonZeroU64; /// Tracking pointer provenance #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] @@ -46,19 +46,23 @@ pub enum Permission { /// An item in the per-location borrow stack. #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)] -pub enum Item { - /// Grants the given permission for pointers with this tag. - Permission(Permission, Tag), - /// A barrier, tracking the function it belongs to by its index on the call stack. - FnBarrier(CallId), +pub struct Item { + /// The permission this item grants. + perm: Permission, + /// The pointers the permission is granted to. + tag: Tag, + /// An optional protector, ensuring the item cannot get popped until `CallId` is over. + protector: Option, } impl fmt::Display for Item { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match self { - Item::Permission(perm, tag) => write!(f, "[{:?} for {}]", perm, tag), - Item::FnBarrier(call) => write!(f, "[barrier {}]", call), + write!(f, "[{:?} for {}", self.perm, self.tag)?; + if let Some(call) = self.protector { + write!(f, " (call {})", call)?; } + write!(f, "]")?; + Ok(()) } } @@ -69,7 +73,7 @@ pub struct Stack { /// We sometimes push into the middle but never remove from the middle. /// The same tag may occur multiple times, e.g. from a two-phase borrow. /// Invariants: - /// * Above a `SharedReadOnly` there can only be barriers and more `SharedReadOnly`. + /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`. borrows: Vec, } @@ -137,7 +141,7 @@ impl Default for GlobalState { fn default() -> Self { GlobalState { next_ptr_id: NonZeroU64::new(1).unwrap(), - next_call_id: 0, + next_call_id: NonZeroU64::new(1).unwrap(), active_calls: HashSet::default(), } } @@ -154,7 +158,7 @@ impl GlobalState { let id = self.next_call_id; trace!("new_call: Assigning ID {}", id); self.active_calls.insert(id); - self.next_call_id = id+1; + self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap(); id } @@ -232,7 +236,7 @@ impl Permission { // When we are unique and this is a write/dealloc, we tolerate nothing. // This makes sure we re-assert uniqueness ("being on top") on write accesses. // (This is particularily important such that when a new mutable ref gets created, it gets - // pushed into the right item -- this behaves like a write and we assert uniqueness of the + // pushed onto the right item -- this behaves like a write and we assert uniqueness of the // pointer from which this comes, *if* it was a unique pointer.) (Unique, AccessKind::Write, _) => false, @@ -259,11 +263,13 @@ impl<'tcx> Stack { .rev() // search top-to-bottom // Return permission of first item that grants access. // We require a permission with the right tag, ensuring U3 and F3. - .filter_map(|(idx, item)| match item { - &Item::Permission(perm, item_tag) if perm.grants(access) && tag == item_tag => - Some((idx, perm)), - _ => None, - }) + .filter_map(|(idx, item)| + if item.perm.grants(access) && tag == item.tag { + Some((idx, item.perm)) + } else { + None + } + ) .next() } @@ -276,9 +282,6 @@ impl<'tcx> Stack { global: &GlobalState, ) -> EvalResult<'tcx, usize> { // Two main steps: Find granting item, remove all incompatible items above. - // The second step is where barriers get implemented: they "protect" the items - // below them, meaning that if we remove an item and then further up encounter a barrier, - // we raise an error. // Step 1: Find granting item. let (granting_idx, granting_perm) = self.find_granting(access, tag) @@ -287,47 +290,35 @@ impl<'tcx> Stack { access, tag, )))?; - // Step 2: Remove everything incompatible above them. - // Items below an active barrier however may not be removed, so we check that as well. + // Step 2: Remove everything incompatible above them. Make sure we do not remove protected + // items. // We do *not* maintain a stack discipline here. We could, in principle, decide to only // keep the items immediately above `granting_idx` that are compatible, and then pop the rest. // However, that kills off entire "branches" of pointer derivation too easily: // in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement would pop the `Unique` // from the reborrow of the first statement, and subequently also pop the `SharedReadWrite` for `raw`. + // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared + // reference and use that. { // Implemented with indices because there does not seem to be a nice iterator and range-based // API for this. let mut cur = granting_idx + 1; - let mut removed_item = None; while let Some(item) = self.borrows.get(cur) { - match *item { - Item::Permission(perm, _) => { - if granting_perm.compatible_with(access, perm) { - // Keep this, check next. - cur += 1; - } else { - // Aha! This is a bad one, remove it. - let item = self.borrows.remove(cur); - trace!("access: removing item {}", item); - removed_item = Some(item); - } - } - Item::FnBarrier(call) if !global.is_active(call) => { - // An inactive barrier, just get rid of it. (Housekeeping.) - self.borrows.remove(cur); - } - Item::FnBarrier(call) => { - // We hit an active barrier! If we have already removed an item, - // we got a problem! The barrier was supposed to protect this item. - if let Some(removed_item) = removed_item { + if granting_perm.compatible_with(access, item.perm) { + // Keep this, check next. + cur += 1; + } else { + // Aha! This is a bad one, remove it, and make sure it is not protected. + let item = self.borrows.remove(cur); + if let Some(call) = item.protector { + if global.is_active(call) { return err!(MachineError(format!( - "not granting {} access to tag {} because barrier ({}) protects incompatible item {}", - access, tag, call, removed_item + "not granting {} access to tag {} because incompatible item {} is protected", + access, tag, item ))); } - // Keep this, check next. - cur += 1; } + trace!("access: removing item {}", item); } } } @@ -337,7 +328,7 @@ impl<'tcx> Stack { } /// Deallocate a location: Like a write access, but also there must be no - /// barriers at all. + /// active protectors at all. fn dealloc( &mut self, tag: Tag, @@ -350,63 +341,32 @@ impl<'tcx> Stack { tag, )))?; - // We must make sure there are no active barriers remaining on the stack. + // We must make sure there are no protected items remaining on the stack. // Also clear the stack, no more accesses are possible. - while let Some(itm) = self.borrows.pop() { - match itm { - Item::FnBarrier(call) if global.is_active(call) => { + while let Some(item) = self.borrows.pop() { + if let Some(call) = item.protector { + if global.is_active(call) { return err!(MachineError(format!( - "deallocating with active barrier ({})", call + "deallocating with active protector ({})", call ))) } - _ => {}, } } Ok(()) } - /// `reborrow` helper function. - /// Grant `permisson` to new pointer tagged `tag`, added at `position` in the stack. - fn grant(&mut self, perm: Permission, tag: Tag, position: usize) { - // Simply add it to the "stack" -- this might add in the middle. - // As an optimization, do nothing if the new item is identical to one of its neighbors. - let item = Item::Permission(perm, tag); - if self.borrows[position-1] == item || self.borrows.get(position) == Some(&item) { - // Optimization applies, done. - trace!("reborrow: avoiding redundant item {}", item); - return; - } - trace!("reborrow: adding item {}", item); - self.borrows.insert(position, item); - } - - /// `reborrow` helper function. - /// Adds a barrier. - fn barrier(&mut self, call: CallId) { - let itm = Item::FnBarrier(call); - if *self.borrows.last().unwrap() == itm { - // This is just an optimization, no functional change: Avoid stacking - // multiple identical barriers on top of each other. - // This can happen when a function receives several shared references - // that overlap. - trace!("reborrow: avoiding redundant extra barrier"); - } else { - trace!("reborrow: adding barrier for call {}", call); - self.borrows.push(itm); - } - } - /// `reborrow` helper function: test that the stack invariants are still maintained. fn test_invariants(&self) { let mut saw_shared_read_only = false; for item in self.borrows.iter() { - match item { - Item::Permission(Permission::SharedReadOnly, _) => { + match item.perm { + Permission::SharedReadOnly => { saw_shared_read_only = true; } - Item::Permission(perm, _) if saw_shared_read_only => { - panic!("Found {:?} on top of a SharedReadOnly!", perm); + // Otherwise, if we saw one before, that's a bug. + perm if saw_shared_read_only => { + bug!("Found {:?} on top of a SharedReadOnly!", perm); } _ => {} } @@ -414,16 +374,18 @@ impl<'tcx> Stack { } /// Derived a new pointer from one with the given tag. + /// `weak` controls whether this is a weak reborrow: weak reborrows do not act as + /// accesses, and they add the new item directly on top of the one it is derived + /// from instead of all the way at the top of the stack. fn reborrow( &mut self, derived_from: Tag, - barrier: Option, - new_perm: Permission, - new_tag: Tag, + weak: bool, + new: Item, global: &GlobalState, ) -> EvalResult<'tcx> { // Figure out which access `perm` corresponds to. - let access = if new_perm.grants(AccessKind::Write) { + let access = if new.perm.grants(AccessKind::Write) { AccessKind::Write } else { AccessKind::Read @@ -432,16 +394,13 @@ impl<'tcx> Stack { // We use that to determine where to put the new item. let (derived_from_idx, _) = self.find_granting(access, derived_from) .ok_or_else(|| InterpError::MachineError(format!( - "no item to reborrow for {:?} from tag {} found in borrow stack", new_perm, derived_from, + "no item to reborrow for {:?} from tag {} found in borrow stack", new.perm, derived_from, )))?; - // We behave very differently for the "unsafe" case of a shared-read-write pointer - // ("unsafe" because this also applies to shared references with interior mutability). - // This is because such pointers may be reborrowed to unique pointers that actually - // remain valid when their "parents" get further reborrows! - // However, either way, we ensure that we insert the new item in a way that between + // Compute where to put the new item. + // Either way, we ensure that we insert the new item in a way that between // `derived_from` and the new one, there are only items *compatible with* `derived_from`. - if new_perm == Permission::SharedReadWrite { + let new_idx = if weak { // A very liberal reborrow because the new pointer does not expect any kind of aliasing guarantee. // Just insert new permission as child of old permission, and maintain everything else. // This inserts "as far down as possible", which is good because it makes this pointer as @@ -451,17 +410,7 @@ impl<'tcx> Stack { // and we'd allow write access without invalidating frozen shared references! // This ensures F2b for `SharedReadWrite` by adding the new item below any // potentially existing `SharedReadOnly`. - self.grant(new_perm, new_tag, derived_from_idx+1); - - // No barrier. They can rightfully alias with `&mut`. - // FIXME: This means that the `dereferencable` attribute on non-frozen shared references - // is incorrect! They are dereferencable when the function is called, but might become - // non-dereferencable during the course of execution. - // Also see [1], [2]. - // - // [1]: , - // [2]: + derived_from_idx+1 } else { // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. // Here, creating a reference actually counts as an access, and pops incompatible @@ -474,12 +423,16 @@ impl<'tcx> Stack { // on top of `derived_from`, and we want the new item at the top so that we // get the strongest possible guarantees. // This ensures U1 and F1. - self.grant(new_perm, new_tag, self.borrows.len()); + self.borrows.len() + }; - // Now is a good time to add the barrier, protecting the item we just added. - if let Some(call) = barrier { - self.barrier(call); - } + // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors. + if self.borrows[new_idx-1] == new || self.borrows.get(new_idx) == Some(&new) { + // Optimization applies, done. + trace!("reborrow: avoiding adding redundant item {}", new); + } else { + trace!("reborrow: adding item {}", new); + self.borrows.insert(new_idx, new); } // Make sure that after all this, the stack's invariant is still maintained. @@ -500,7 +453,7 @@ impl<'tcx> Stacks { tag: Tag, extra: MemoryState, ) -> Self { - let item = Item::Permission(Permission::Unique, tag); + let item = Item { perm: Permission::Unique, tag, protector: None }; let stack = Stack { borrows: vec![item], }; @@ -515,12 +468,12 @@ impl<'tcx> Stacks { &self, ptr: Pointer, size: Size, - f: impl Fn(&mut Stack, Tag, &GlobalState) -> EvalResult<'tcx>, + f: impl Fn(&mut Stack, &GlobalState) -> EvalResult<'tcx>, ) -> EvalResult<'tcx> { let global = self.global.borrow(); let mut stacks = self.stacks.borrow_mut(); for stack in stacks.iter_mut(ptr.offset, size) { - f(stack, ptr.tag, &*global)?; + f(stack, &*global)?; } Ok(()) } @@ -562,8 +515,8 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { trace!("read access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); - alloc.extra.for_each(ptr, size, |stack, tag, global| { - stack.access(AccessKind::Read, tag, global)?; + alloc.extra.for_each(ptr, size, |stack, global| { + stack.access(AccessKind::Read, ptr.tag, global)?; Ok(()) }) } @@ -575,8 +528,8 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { trace!("write access with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); - alloc.extra.for_each(ptr, size, |stack, tag, global| { - stack.access(AccessKind::Write, tag, global)?; + alloc.extra.for_each(ptr, size, |stack, global| { + stack.access(AccessKind::Write, ptr.tag, global)?; Ok(()) }) } @@ -588,26 +541,28 @@ impl AllocationExtra for Stacks { size: Size, ) -> EvalResult<'tcx> { trace!("deallocation with tag {}: {:?}, size {}", ptr.tag, ptr, size.bytes()); - alloc.extra.for_each(ptr, size, |stack, tag, global| { - stack.dealloc(tag, global) + alloc.extra.for_each(ptr, size, |stack, global| { + stack.dealloc(ptr.tag, global) }) } } +/// Retagging/reborrowing. There is some policy in here, such as which permissions +/// to grant for which references, when to add protectors, and how to realize two-phase +/// borrows in terms of the primitives above. impl<'a, 'mir, 'tcx> EvalContextPrivExt<'a, 'mir, 'tcx> for crate::MiriEvalContext<'a, 'mir, 'tcx> {} trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, 'mir, 'tcx> { - /// High-level `reborrow` operation. This decides which reference gets which kind - /// of permission! fn reborrow( &mut self, place: MPlaceTy<'tcx, Tag>, size: Size, kind: RefKind, new_tag: Tag, - fn_barrier: bool, + force_weak: bool, + protect: bool, ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); - let barrier = if fn_barrier { Some(this.frame().extra) } else { None }; + let protector = if protect { Some(this.frame().extra) } else { None }; let ptr = place.ptr.to_ptr()?; trace!("reborrow: {:?} reference {} derived from {} (pointee {}): {:?}, size {}", kind, new_tag, ptr.tag, place.layout.ty, ptr, size.bytes()); @@ -616,6 +571,8 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, let alloc = this.memory().get(ptr.alloc_id)?; alloc.check_bounds(this, ptr, size)?; // Update the stacks. + // Make sure that raw pointers and mutable shared references are reborrowed "weak": + // There could be existing unique pointers reborrowed from them that should remain valid! let perm = match kind { RefKind::Unique => Permission::Unique, RefKind::Raw { mutable: true } => Permission::SharedReadWrite, @@ -625,16 +582,20 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, // We need a frozen-sensitive reborrow. return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| { // We are only ever `SharedReadOnly` inside the frozen bits. + let weak = !frozen || kind != RefKind::Shared; // `RefKind::Raw` is always weak, as is `SharedReadWrite`. let perm = if frozen { Permission::SharedReadOnly } else { Permission::SharedReadWrite }; - alloc.extra.for_each(cur_ptr, size, |stack, tag, global| { - stack.reborrow(tag, barrier, perm, new_tag, global) + let item = Item { perm, tag: new_tag, protector }; + alloc.extra.for_each(cur_ptr, size, |stack, global| { + stack.reborrow(cur_ptr.tag, force_weak || weak, item, global) }) }); } }; debug_assert_ne!(perm, Permission::SharedReadOnly, "SharedReadOnly must be used frozen-sensitive"); - alloc.extra.for_each(ptr, size, |stack, tag, global| { - stack.reborrow(tag, barrier, perm, new_tag, global) + let weak = perm == Permission::SharedReadWrite; + let item = Item { perm, tag: new_tag, protector }; + alloc.extra.for_each(ptr, size, |stack, global| { + stack.reborrow(ptr.tag, force_weak || weak, item, global) }) } @@ -644,7 +605,7 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, &mut self, val: ImmTy<'tcx, Tag>, kind: RefKind, - fn_barrier: bool, + protect: bool, two_phase: bool, ) -> EvalResult<'tcx, Immediate> { let this = self.eval_context_mut(); @@ -665,18 +626,16 @@ trait EvalContextPrivExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, }; // Reborrow. - this.reborrow(place, size, kind, new_tag, fn_barrier)?; + this.reborrow(place, size, kind, new_tag, /*force_weak:*/ two_phase, protect)?; let new_place = place.replace_tag(new_tag); // Handle two-phase borrows. if two_phase { assert!(kind == RefKind::Unique, "two-phase shared borrows make no sense"); - // Grant read access *to the parent pointer* with the old tag. This means the same pointer - // has multiple items in the stack now! - // FIXME: Think about this some more, in particular about the interaction with cast-to-raw. - // Maybe find a better way to express 2-phase, now that we have a "more expressive language" - // in the stack. + // Grant read access *to the parent pointer* with the old tag *derived from the new tag* (`new_place`). + // This means the old pointer has multiple items in the stack now, which otherwise cannot happen + // for unique references -- but in this case it precisely expresses the semantics we want. let old_tag = place.ptr.to_ptr().unwrap().tag; - this.reborrow(new_place, size, RefKind::Shared, old_tag, /* fn_barrier: */ false)?; + this.reborrow(new_place, size, RefKind::Shared, old_tag, /*force_weak:*/ false, /*protect:*/ false)?; } // Return new pointer. @@ -692,7 +651,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, place: PlaceTy<'tcx, Tag> ) -> EvalResult<'tcx> { let this = self.eval_context_mut(); - // Determine mutability and whether to add a barrier. + // Determine mutability and whether to add a protector. // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> { @@ -705,7 +664,7 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, // Raw pointers need to be enabled. ty::RawPtr(tym) if kind == RetagKind::Raw => Some((RefKind::Raw { mutable: tym.mutbl == MutMutable }, false)), - // Boxes do not get a barrier: barriers reflect that references outlive the call + // Boxes do not get a protector: protectors reflect that references outlive the call // they were passed in to; that's just not the case for boxes. ty::Adt(..) if ty.is_box() => Some((RefKind::Unique, false)), _ => None, @@ -715,10 +674,10 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, // We need a visitor to visit all references. However, that requires // a `MemPlace`, so we have a fast path for reference types that // avoids allocating. - if let Some((mutbl, barrier)) = qualify(place.layout.ty, kind) { + if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) { // Fast path. let val = this.read_immediate(this.place_to_op(place)?)?; - let val = this.retag_reference(val, mutbl, barrier, kind == RetagKind::TwoPhase)?; + let val = this.retag_reference(val, mutbl, protector, kind == RetagKind::TwoPhase)?; this.write_immediate(val, place)?; return Ok(()); } @@ -749,12 +708,12 @@ pub trait EvalContextExt<'a, 'mir, 'tcx: 'a+'mir>: crate::MiriEvalContextExt<'a, { // Cannot use `builtin_deref` because that reports *immutable* for `Box`, // making it useless. - if let Some((mutbl, barrier)) = qualify(place.layout.ty, self.kind) { + if let Some((mutbl, protector)) = qualify(place.layout.ty, self.kind) { let val = self.ecx.read_immediate(place.into())?; let val = self.ecx.retag_reference( val, mutbl, - barrier, + protector, self.kind == RetagKind::TwoPhase )?; self.ecx.write_immediate(val, place.into())?; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut1.rs b/tests/compile-fail/stacked_borrows/aliasing_mut1.rs index 9bced43f6e..d047925163 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut1.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut1.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &mut i32, _y: &mut i32) {} //~ ERROR barrier +pub fn safe(_x: &mut i32, _y: &mut i32) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut2.rs b/tests/compile-fail/stacked_borrows/aliasing_mut2.rs index ea24f1bd27..c679e01677 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut2.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut2.rs @@ -1,6 +1,6 @@ use std::mem; -pub fn safe(_x: &i32, _y: &mut i32) {} //~ ERROR barrier +pub fn safe(_x: &i32, _y: &mut i32) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/aliasing_mut4.rs b/tests/compile-fail/stacked_borrows/aliasing_mut4.rs index 15f67d0f87..778935a6d0 100644 --- a/tests/compile-fail/stacked_borrows/aliasing_mut4.rs +++ b/tests/compile-fail/stacked_borrows/aliasing_mut4.rs @@ -2,7 +2,7 @@ use std::mem; use std::cell::Cell; // Make sure &mut UnsafeCell also is exclusive -pub fn safe(_x: &i32, _y: &mut Cell) {} //~ ERROR barrier +pub fn safe(_x: &i32, _y: &mut Cell) {} //~ ERROR protect fn main() { let mut x = 0; diff --git a/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs b/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs index b2f1c824f1..49e376c028 100644 --- a/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs +++ b/tests/compile-fail/stacked_borrows/deallocate_against_barrier.rs @@ -1,4 +1,4 @@ -// error-pattern: deallocating with active barrier +// error-pattern: deallocating with active protect fn inner(x: &mut i32, f: fn(&mut i32)) { // `f` may mutate, but it may not deallocate! diff --git a/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs b/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs index fc0dbb9e13..3a214a75b5 100644 --- a/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs +++ b/tests/compile-fail/stacked_borrows/invalidate_against_barrier1.rs @@ -2,7 +2,7 @@ fn inner(x: *mut i32, _y: &mut i32) { // If `x` and `y` alias, retagging is fine with this... but we really // shouldn't be allowed to use `x` at all because `y` was assumed to be // unique for the duration of this call. - let _val = unsafe { *x }; //~ ERROR barrier + let _val = unsafe { *x }; //~ ERROR protect } fn main() { diff --git a/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs b/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs index a080c0958e..86e4a84287 100644 --- a/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs +++ b/tests/compile-fail/stacked_borrows/invalidate_against_barrier2.rs @@ -2,7 +2,7 @@ fn inner(x: *mut i32, _y: &i32) { // If `x` and `y` alias, retagging is fine with this... but we really // shouldn't be allowed to write to `x` at all because `y` was assumed to be // immutable for the duration of this call. - unsafe { *x = 0 }; //~ ERROR barrier + unsafe { *x = 0 }; //~ ERROR protect } fn main() { diff --git a/tests/run-pass/stacked-borrows/2phase.rs b/tests/run-pass/stacked-borrows/2phase.rs index 87ecb4aef0..97f435472e 100644 --- a/tests/run-pass/stacked-borrows/2phase.rs +++ b/tests/run-pass/stacked-borrows/2phase.rs @@ -1,3 +1,5 @@ +#![allow(mutable_borrow_reservation_conflict)] + trait S: Sized { fn tpb(&mut self, _s: Self) {} } @@ -41,7 +43,6 @@ fn two_phase_raw() { ); } -/* fn two_phase_overlapping1() { let mut x = vec![]; let p = &x; @@ -54,7 +55,6 @@ fn two_phase_overlapping2() { let l = &x; x.add_assign(x + *l); } -*/ fn with_interior_mutability() { use std::cell::Cell; @@ -66,13 +66,13 @@ fn with_interior_mutability() { impl Thing for Cell {} let mut x = Cell::new(1); - //let l = &x; + let l = &x; x .do_the_thing({ x.set(3); - // l.set(4); // FIXME: Enable this as an example of overlapping 2PB! - x.get() // FIXME same: + l.get() + l.set(4); + x.get() + l.get() }) ; } @@ -84,7 +84,6 @@ fn main() { two_phase3(true); two_phase_raw(); with_interior_mutability(); - //FIXME: enable these, or remove them, depending on how https://github.com/rust-lang/rust/issues/56254 gets resolved - //two_phase_overlapping1(); - //two_phase_overlapping2(); + two_phase_overlapping1(); + two_phase_overlapping2(); } From 72cec0562cf3ff5a50f4318e316e188325cda54c Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 15:20:33 +0200 Subject: [PATCH 12/16] add tests for fixes: sharing no longer leaks, and we can handle entering interior mutability --- .../compile-fail/stacked_borrows/illegal_read6.rs | 8 ++++++++ .../{refcell.rs => interior_mutability.rs} | 15 +++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 tests/compile-fail/stacked_borrows/illegal_read6.rs rename tests/run-pass/stacked-borrows/{refcell.rs => interior_mutability.rs} (70%) diff --git a/tests/compile-fail/stacked_borrows/illegal_read6.rs b/tests/compile-fail/stacked_borrows/illegal_read6.rs new file mode 100644 index 0000000000..dc37814729 --- /dev/null +++ b/tests/compile-fail/stacked_borrows/illegal_read6.rs @@ -0,0 +1,8 @@ +// Creating a shared reference does not leak the data to raw pointers. +fn main() { unsafe { + let x = &mut 0; + let raw = x as *mut _; + let x = &mut *x; // kill `raw` + let _y = &*x; // this should not activate `raw` again + let _val = *raw; //~ ERROR borrow stack +} } diff --git a/tests/run-pass/stacked-borrows/refcell.rs b/tests/run-pass/stacked-borrows/interior_mutability.rs similarity index 70% rename from tests/run-pass/stacked-borrows/refcell.rs rename to tests/run-pass/stacked-borrows/interior_mutability.rs index dddc7089d0..33f44d0093 100644 --- a/tests/run-pass/stacked-borrows/refcell.rs +++ b/tests/run-pass/stacked-borrows/interior_mutability.rs @@ -1,8 +1,12 @@ +#![feature(maybe_uninit, maybe_uninit_ref)] +use std::mem::MaybeUninit; +use std::cell::Cell; use std::cell::RefCell; fn main() { aliasing_mut_and_shr(); aliasing_frz_and_shr(); + into_interior_mutability(); } fn aliasing_mut_and_shr() { @@ -42,3 +46,14 @@ fn aliasing_frz_and_shr() { inner(&rc, &*bshr); assert_eq!(*rc.borrow(), 23); } + +// Getting a pointer into a union with interior mutability used to be tricky +// business (https://github.com/rust-lang/miri/issues/615), but it should work +// now. +fn into_interior_mutability() { + let mut x: MaybeUninit<(Cell, u32)> = MaybeUninit::uninit(); + x.as_ptr(); + x.write((Cell::new(0), 1)); + let ptr = unsafe { x.get_ref() }; + assert_eq!(ptr.1, 1); +} From 0a313183b1bfe6f2324ae2dc30673e1649cf3859 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 15:23:20 +0200 Subject: [PATCH 13/16] bump Rust --- rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rust-version b/rust-version index dcee3ec5da..40e42e6886 100644 --- a/rust-version +++ b/rust-version @@ -1 +1 @@ -ee621f42329069c296b4c2066b3743cc4ff0f369 +efe2f32a6b8217425f361ec7c206910c611c03ee From e1ed855a441dc6f7234104b4c3a6751b23ae588d Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 16:02:42 +0200 Subject: [PATCH 14/16] more tests -- also one showing why we are not done yet --- .../stacked_borrows/shared_rw_borrows_are_weak1.rs | 14 ++++++++++++++ .../stacked_borrows/shared_rw_borrows_are_weak2.rs | 14 ++++++++++++++ tests/run-pass/stacked-borrows/stacked-borrows.rs | 13 +++++++++++++ 3 files changed, 41 insertions(+) create mode 100644 tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs create mode 100644 tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs diff --git a/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs new file mode 100644 index 0000000000..d734caf1d9 --- /dev/null +++ b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak1.rs @@ -0,0 +1,14 @@ +// We want to test that granting a SharedReadWrite will be added +// *below* an already granted Unique -- so writing to +// the SharedReadWrite will invalidate the Unique. + +use std::mem; +use std::cell::Cell; + +fn main() { unsafe { + let x = &mut Cell::new(0); + let y: &mut Cell = mem::transmute(&mut *x); // launder lifetime + let shr_rw = &*x; // thanks to interior mutability this will be a SharedReadWrite + shr_rw.set(1); + y.get_mut(); //~ ERROR borrow stack +} } diff --git a/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs new file mode 100644 index 0000000000..942bb503db --- /dev/null +++ b/tests/compile-fail/stacked_borrows/shared_rw_borrows_are_weak2.rs @@ -0,0 +1,14 @@ +// We want to test that granting a SharedReadWrite will be added +// *below* an already granted SharedReadWrite -- so writing to +// the SharedReadWrite will invalidate the SharedReadWrite. + +use std::mem; +use std::cell::RefCell; + +fn main() { unsafe { + let x = &mut RefCell::new(0); + let y: &i32 = mem::transmute(&*x.borrow()); // launder lifetime + let shr_rw = &*x; // thanks to interior mutability this will be a SharedReadWrite + shr_rw.replace(1); + let _val = *y; //~ ERROR borrow stack +} } diff --git a/tests/run-pass/stacked-borrows/stacked-borrows.rs b/tests/run-pass/stacked-borrows/stacked-borrows.rs index 791e97f0eb..7d84e33b3d 100644 --- a/tests/run-pass/stacked-borrows/stacked-borrows.rs +++ b/tests/run-pass/stacked-borrows/stacked-borrows.rs @@ -11,6 +11,7 @@ fn main() { drop_after_sharing(); direct_mut_to_const_raw(); two_raw(); + shr_and_raw(); } // Deref a raw ptr to access a field of a large struct, where the field @@ -136,3 +137,15 @@ fn two_raw() { unsafe { *y1 += 2; *y2 += 1; } } + +// Make sure that creating a *mut does not invalidate existing shared references. +fn shr_and_raw() { /* unsafe { + use std::mem; + // FIXME: This is currently disabled because "as *mut _" incurs a reborrow. + let x = &mut 0; + let y1: &i32 = mem::transmute(&*x); // launder lifetimes + let y2 = x as *mut _; + let _val = *y1; + *y2 += 1; + // TODO: Once this works, add compile-fail test that tries to read from y1 again. +} */ } From abe8959339c7fafbc4cba85bda1f64a59f88a88e Mon Sep 17 00:00:00 2001 From: Oliver Scherer Date: Wed, 17 Apr 2019 16:22:33 +0200 Subject: [PATCH 15/16] Apply suggestions from code review Co-Authored-By: RalfJung --- src/stacked_borrows.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index 98b6ca6543..dc2a78d296 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -296,7 +296,7 @@ impl<'tcx> Stack { // keep the items immediately above `granting_idx` that are compatible, and then pop the rest. // However, that kills off entire "branches" of pointer derivation too easily: // in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement would pop the `Unique` - // from the reborrow of the first statement, and subequently also pop the `SharedReadWrite` for `raw`. + // from the reborrow of the first statement, and subsequently also pop the `SharedReadWrite` for `raw`. // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared // reference and use that. { @@ -343,7 +343,7 @@ impl<'tcx> Stack { // We must make sure there are no protected items remaining on the stack. // Also clear the stack, no more accesses are possible. - while let Some(item) = self.borrows.pop() { + for item in self.borrows.drain(..) { if let Some(call) = item.protector { if global.is_active(call) { return err!(MachineError(format!( @@ -394,7 +394,7 @@ impl<'tcx> Stack { // We use that to determine where to put the new item. let (derived_from_idx, _) = self.find_granting(access, derived_from) .ok_or_else(|| InterpError::MachineError(format!( - "no item to reborrow for {:?} from tag {} found in borrow stack", new.perm, derived_from, + "no item to reborrow for {:?} from tag {} found in borrow stack", new.perm, derived_from, )))?; // Compute where to put the new item. @@ -410,7 +410,7 @@ impl<'tcx> Stack { // and we'd allow write access without invalidating frozen shared references! // This ensures F2b for `SharedReadWrite` by adding the new item below any // potentially existing `SharedReadOnly`. - derived_from_idx+1 + derived_from_idx + 1 } else { // A "safe" reborrow for a pointer that actually expects some aliasing guarantees. // Here, creating a reference actually counts as an access, and pops incompatible From 39ecd05c46e96e606561d22163ea05be5b1fa6e2 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Wed, 17 Apr 2019 16:25:38 +0200 Subject: [PATCH 16/16] embrace find_map and some whitespace changes --- src/stacked_borrows.rs | 25 ++++++++++++------------- 1 file changed, 12 insertions(+), 13 deletions(-) diff --git a/src/stacked_borrows.rs b/src/stacked_borrows.rs index dc2a78d296..250def0c7c 100644 --- a/src/stacked_borrows.rs +++ b/src/stacked_borrows.rs @@ -263,14 +263,13 @@ impl<'tcx> Stack { .rev() // search top-to-bottom // Return permission of first item that grants access. // We require a permission with the right tag, ensuring U3 and F3. - .filter_map(|(idx, item)| + .find_map(|(idx, item)| if item.perm.grants(access) && tag == item.tag { Some((idx, item.perm)) } else { None } ) - .next() } /// Test if a memory `access` using pointer tagged `tag` is granted. @@ -286,8 +285,8 @@ impl<'tcx> Stack { // Step 1: Find granting item. let (granting_idx, granting_perm) = self.find_granting(access, tag) .ok_or_else(|| InterpError::MachineError(format!( - "no item granting {} access to tag {} found in borrow stack", - access, tag, + "no item granting {} access to tag {} found in borrow stack", + access, tag, )))?; // Step 2: Remove everything incompatible above them. Make sure we do not remove protected @@ -313,9 +312,9 @@ impl<'tcx> Stack { if let Some(call) = item.protector { if global.is_active(call) { return err!(MachineError(format!( - "not granting {} access to tag {} because incompatible item {} is protected", - access, tag, item - ))); + "not granting {} access to tag {} because incompatible item {} is protected", + access, tag, item + ))); } } trace!("access: removing item {}", item); @@ -337,8 +336,8 @@ impl<'tcx> Stack { // Step 1: Find granting item. self.find_granting(AccessKind::Write, tag) .ok_or_else(|| InterpError::MachineError(format!( - "no item granting write access for deallocation to tag {} found in borrow stack", - tag, + "no item granting write access for deallocation to tag {} found in borrow stack", + tag, )))?; // We must make sure there are no protected items remaining on the stack. @@ -386,10 +385,10 @@ impl<'tcx> Stack { ) -> EvalResult<'tcx> { // Figure out which access `perm` corresponds to. let access = if new.perm.grants(AccessKind::Write) { - AccessKind::Write - } else { - AccessKind::Read - }; + AccessKind::Write + } else { + AccessKind::Read + }; // Now we figure out which item grants our parent (`derived_from`) this kind of access. // We use that to determine where to put the new item. let (derived_from_idx, _) = self.find_granting(access, derived_from)