Skip to content

Commit

Permalink
Use SortedMap instead of BTreeMap for relocations in MIRI.
Browse files Browse the repository at this point in the history
  • Loading branch information
michaelwoerister committed May 18, 2018
1 parent cd44d47 commit 34b4f03
Show file tree
Hide file tree
Showing 3 changed files with 56 additions and 26 deletions.
38 changes: 34 additions & 4 deletions src/librustc/mir/interpret/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};

pub use self::value::{PrimVal, PrimValKind, Value, Pointer, ConstValue};

use std::collections::BTreeMap;
use std::fmt;
use mir;
use hir::def_id::DefId;
Expand All @@ -21,8 +20,10 @@ use ty::layout::{self, Align, HasDataLayout};
use middle::region;
use std::iter;
use std::io;
use std::ops::{Deref, DerefMut};
use syntax::ast::Mutability;
use rustc_serialize::{Encoder, Decoder, Decodable, Encodable};
use rustc_data_structures::sorted_map::SortedMap;
use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};

#[derive(Clone, Debug, PartialEq, RustcEncodable, RustcDecodable)]
Expand Down Expand Up @@ -244,7 +245,7 @@ pub struct Allocation {
pub bytes: Vec<u8>,
/// Maps from byte addresses to allocations.
/// Only the first byte of a pointer is inserted into the map.
pub relocations: BTreeMap<u64, AllocId>,
pub relocations: Relocations,
/// Denotes undefined memory. Reading from undefined memory is forbidden in miri
pub undef_mask: UndefMask,
/// The alignment of the allocation to detect unaligned reads.
Expand All @@ -261,7 +262,7 @@ impl Allocation {
undef_mask.grow(slice.len() as u64, true);
Self {
bytes: slice.to_owned(),
relocations: BTreeMap::new(),
relocations: Relocations::new(),
undef_mask,
align,
runtime_mutability: Mutability::Immutable,
Expand All @@ -276,7 +277,7 @@ impl Allocation {
assert_eq!(size as usize as u64, size);
Allocation {
bytes: vec![0; size as usize],
relocations: BTreeMap::new(),
relocations: Relocations::new(),
undef_mask: UndefMask::new(size),
align,
runtime_mutability: Mutability::Immutable,
Expand All @@ -286,6 +287,35 @@ impl Allocation {

impl<'tcx> ::serialize::UseSpecializedDecodable for &'tcx Allocation {}

#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Relocations(SortedMap<u64, AllocId>);

impl Relocations {
pub fn new() -> Relocations {
Relocations(SortedMap::new())
}

// The caller must guarantee that the given relocations are already sorted
// by address and contain no duplicates.
pub fn from_presorted(r: Vec<(u64, AllocId)>) -> Relocations {
Relocations(SortedMap::from_presorted_elements(r))
}
}

impl Deref for Relocations {
type Target = SortedMap<u64, AllocId>;

fn deref(&self) -> &Self::Target {
&self.0
}
}

impl DerefMut for Relocations {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}

////////////////////////////////////////////////////////////////////////////////
// Methods to access integers in the target endianness
////////////////////////////////////////////////////////////////////////////////
Expand Down
2 changes: 1 addition & 1 deletion src/librustc_codegen_llvm/mir/constant.rs
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ pub fn const_alloc_to_llvm(cx: &CodegenCx, alloc: &Allocation) -> ValueRef {
let pointer_size = layout.pointer_size.bytes() as usize;

let mut next_offset = 0;
for (&offset, &alloc_id) in &alloc.relocations {
for &(offset, alloc_id) in alloc.relocations.iter() {
assert_eq!(offset as usize as u64, offset);
let offset = offset as usize;
if offset > next_offset {
Expand Down
42 changes: 21 additions & 21 deletions src/librustc_mir/interpret/memory.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
use std::collections::{btree_map, VecDeque};
use std::collections::VecDeque;
use std::ptr;

use rustc::hir::def_id::DefId;
Expand Down Expand Up @@ -515,7 +515,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {

fn get_bytes(&self, ptr: MemoryPointer, size: u64, align: Align) -> EvalResult<'tcx, &[u8]> {
assert_ne!(size, 0);
if self.relocations(ptr, size)?.count() != 0 {
if self.relocations(ptr, size)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
self.check_defined(ptr, size)?;
Expand Down Expand Up @@ -610,9 +610,9 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
// first copy the relocations to a temporary buffer, because
// `get_bytes_mut` will clear the relocations, which is correct,
// since we don't want to keep any relocations at the target.

let relocations: Vec<_> = self.relocations(src, size)?
.map(|(&offset, &alloc_id)| {
.iter()
.map(|&(offset, alloc_id)| {
// Update relocation offsets for the new positions in the destination allocation.
(offset + dest.offset - src.offset, alloc_id)
})
Expand Down Expand Up @@ -644,7 +644,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {

self.copy_undef_mask(src, dest, size)?;
// copy back the relocations
self.get_mut(dest.alloc_id)?.relocations.extend(relocations);
self.get_mut(dest.alloc_id)?.relocations.insert_presorted(relocations);

Ok(())
}
Expand All @@ -655,7 +655,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let offset = ptr.offset as usize;
match alloc.bytes[offset..].iter().position(|&c| c == 0) {
Some(size) => {
if self.relocations(ptr, (size + 1) as u64)?.count() != 0 {
if self.relocations(ptr, (size + 1) as u64)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
self.check_defined(ptr, (size + 1) as u64)?;
Expand Down Expand Up @@ -715,7 +715,7 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
let bytes = read_target_uint(endianness, bytes).unwrap();
// See if we got a pointer
if size != self.pointer_size() {
if self.relocations(ptr, size)?.count() != 0 {
if self.relocations(ptr, size)?.len() != 0 {
return err!(ReadPointerAsBytes);
}
} else {
Expand Down Expand Up @@ -803,24 +803,26 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
&self,
ptr: MemoryPointer,
size: u64,
) -> EvalResult<'tcx, btree_map::Range<u64, AllocId>> {
) -> EvalResult<'tcx, &[(u64, AllocId)]> {
let start = ptr.offset.saturating_sub(self.pointer_size() - 1);
let end = ptr.offset + size;
Ok(self.get(ptr.alloc_id)?.relocations.range(start..end))
}

fn clear_relocations(&mut self, ptr: MemoryPointer, size: u64) -> EvalResult<'tcx> {
// Find all relocations overlapping the given range.
let keys: Vec<_> = self.relocations(ptr, size)?.map(|(&k, _)| k).collect();
if keys.is_empty() {
return Ok(());
}

// Find the start and end of the given range and its outermost relocations.
let (first, last) = {
// Find all relocations overlapping the given range.
let relocations = self.relocations(ptr, size)?;
if relocations.is_empty() {
return Ok(());
}

(relocations.first().unwrap().0,
relocations.last().unwrap().0 + self.pointer_size())
};
let start = ptr.offset;
let end = start + size;
let first = *keys.first().unwrap();
let last = *keys.last().unwrap() + self.pointer_size();

let alloc = self.get_mut(ptr.alloc_id)?;

Expand All @@ -834,16 +836,14 @@ impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
}

// Forget all the relocations.
for k in keys {
alloc.relocations.remove(&k);
}
alloc.relocations.remove_range(first ..= last);

Ok(())
}

fn check_relocation_edges(&self, ptr: MemoryPointer, size: u64) -> EvalResult<'tcx> {
let overlapping_start = self.relocations(ptr, 0)?.count();
let overlapping_end = self.relocations(ptr.offset(size, self)?, 0)?.count();
let overlapping_start = self.relocations(ptr, 0)?.len();
let overlapping_end = self.relocations(ptr.offset(size, self)?, 0)?.len();
if overlapping_start + overlapping_end != 0 {
return err!(ReadPointerAsBytes);
}
Expand Down

0 comments on commit 34b4f03

Please sign in to comment.