Skip to content

Commit

Permalink
Implement -Zmiri-tag-gc a garbage collector for tags
Browse files Browse the repository at this point in the history
  • Loading branch information
saethlin committed Aug 26, 2022
1 parent 101c4f2 commit 664c741
Show file tree
Hide file tree
Showing 10 changed files with 246 additions and 7 deletions.
4 changes: 4 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -323,6 +323,10 @@ environment variable. We first document the most relevant and most commonly used
ensure alignment. (The standard library `align_to` method works fine in both modes; under
symbolic alignment it only fills the middle slice when the allocation guarantees sufficient
alignment.)
* `-Zmiri-tag-gc=<blocks>` configures how often the pointer tag garbage collector runs. The default
is to search for and remove unreachable tags once every `10,000` basic blocks. Setting this to
`0` disables the garbage collector, which causes some programs to have explosive memory usage
and/or super-linear runtime.

The remaining flags are for advanced use only, and more likely to change or be removed.
Some of these are **unsound**, which means they can lead
Expand Down
6 changes: 6 additions & 0 deletions src/bin/miri.rs
Original file line number Diff line number Diff line change
Expand Up @@ -521,6 +521,12 @@ fn main() {
Err(err) => show_error!("-Zmiri-report-progress requires a `u32`: {}", err),
};
miri_config.report_progress = Some(interval);
} else if let Some(param) = arg.strip_prefix("-Zmiri-tag-gc=") {
let interval = match param.parse::<u32>() {
Ok(i) => i,
Err(err) => show_error!("-Zmiri-tag-gc requires a `u32`: {}", err),
};
miri_config.gc_interval = interval;
} else if let Some(param) = arg.strip_prefix("-Zmiri-measureme=") {
miri_config.measureme_out = Some(param.to_string());
} else if let Some(param) = arg.strip_prefix("-Zmiri-backtrace=") {
Expand Down
4 changes: 4 additions & 0 deletions src/concurrency/thread.rs
Original file line number Diff line number Diff line change
Expand Up @@ -289,6 +289,10 @@ impl<'mir, 'tcx: 'mir> ThreadManager<'mir, 'tcx> {
&mut self.threads[self.active_thread].stack
}

pub fn iter(&self) -> impl Iterator<Item = &Thread<'mir, 'tcx>> {
self.threads.iter()
}

pub fn all_stacks(
&self,
) -> impl Iterator<Item = &[Frame<'mir, 'tcx, Provenance, FrameData<'tcx>>]> {
Expand Down
3 changes: 3 additions & 0 deletions src/eval.rs
Original file line number Diff line number Diff line change
Expand Up @@ -132,6 +132,8 @@ pub struct MiriConfig {
/// The location of a shared object file to load when calling external functions
/// FIXME! consider allowing users to specify paths to multiple SO files, or to a directory
pub external_so_file: Option<PathBuf>,
/// Run a garbage collector for SbTags every N basic blocks.
pub gc_interval: u32,
}

impl Default for MiriConfig {
Expand Down Expand Up @@ -164,6 +166,7 @@ impl Default for MiriConfig {
report_progress: None,
retag_fields: false,
external_so_file: None,
gc_interval: 10_000,
}
}
}
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,7 @@ mod operator;
mod range_map;
mod shims;
mod stacked_borrows;
mod tag_gc;

// Establish a "crate-wide prelude": we often import `crate::*`.

Expand Down Expand Up @@ -110,6 +111,7 @@ pub use crate::range_map::RangeMap;
pub use crate::stacked_borrows::{
CallId, EvalContextExt as StackedBorEvalContextExt, Item, Permission, SbTag, Stack, Stacks,
};
pub use crate::tag_gc::EvalContextExt as _;

/// Insert rustc arguments at the beginning of the argument list that Miri wants to be
/// set per default, for maximal validation power.
Expand Down
21 changes: 21 additions & 0 deletions src/machine.rs
Original file line number Diff line number Diff line change
Expand Up @@ -361,6 +361,11 @@ pub struct Evaluator<'mir, 'tcx> {

/// Handle of the optional shared object file for external functions.
pub external_so_lib: Option<(libloading::Library, std::path::PathBuf)>,

/// Run a garbage collector for SbTags every N basic blocks.
pub(crate) gc_interval: u32,
/// The number of blocks that passed since the last SbTag GC pass.
pub(crate) since_gc: u32,
}

impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
Expand Down Expand Up @@ -436,6 +441,8 @@ impl<'mir, 'tcx> Evaluator<'mir, 'tcx> {
lib_file_path.clone(),
)
}),
gc_interval: config.gc_interval,
since_gc: 0,
}
}

Expand Down Expand Up @@ -983,6 +990,20 @@ impl<'mir, 'tcx> Machine<'mir, 'tcx> for Evaluator<'mir, 'tcx> {
// Cannot overflow, since it is strictly less than `report_progress`.
ecx.machine.since_progress_report += 1;
}

// Search for SbTags to find all live pointers, then remove all other tags from borrow
// stacks.
// When debug assertions are enabled, run the GC as often as possible so that any cases
// where it mistakenly removes an important tag become visible.
if cfg!(debug_assertions)
|| (ecx.machine.gc_interval > 0 && ecx.machine.since_gc >= ecx.machine.gc_interval)
{
ecx.machine.since_gc = 0;
ecx.garbage_collect_tags()?;
} else {
ecx.machine.since_gc += 1;
}

// These are our preemption points.
ecx.maybe_preempt_active_thread();
Ok(())
Expand Down
6 changes: 6 additions & 0 deletions src/shims/tls.rs
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,12 @@ impl<'tcx> TlsData<'tcx> {
data.remove(&thread_id);
}
}

pub fn iter(&self, mut visitor: impl FnMut(&Scalar<Provenance>)) {
for scalar in self.keys.values().flat_map(|v| v.data.values()) {
visitor(scalar);
}
}
}

impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
Expand Down
19 changes: 19 additions & 0 deletions src/stacked_borrows/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,8 @@ pub struct Stacks {
history: AllocHistory,
/// The set of tags that have been exposed inside this allocation.
exposed_tags: FxHashSet<SbTag>,
/// Whether this memory has been modified since the last time the tag GC ran
modified_since_last_gc: bool,
}

/// Extra global state, available to the memory access hooks.
Expand Down Expand Up @@ -422,6 +424,7 @@ impl<'tcx> Stack {
let item = self.get(idx).unwrap();
Stack::item_popped(&item, global, dcx)?;
}

Ok(())
}

Expand Down Expand Up @@ -496,6 +499,20 @@ impl<'tcx> Stack {
}
// # Stacked Borrows Core End

/// Integration with the SbTag garbage collector
impl Stacks {
pub fn remove_unreachable_tags(&mut self, live_tags: &FxHashSet<SbTag>) {
if self.modified_since_last_gc {
for stack in self.stacks.iter_mut_all() {
if stack.len() > 64 {
stack.retain(live_tags);
}
}
self.modified_since_last_gc = false;
}
}
}

/// Map per-stack operations to higher-level per-location-range operations.
impl<'tcx> Stacks {
/// Creates a new stack with an initial tag. For diagnostic purposes, we also need to know
Expand All @@ -508,6 +525,7 @@ impl<'tcx> Stacks {
stacks: RangeMap::new(size, stack),
history: AllocHistory::new(id),
exposed_tags: FxHashSet::default(),
modified_since_last_gc: false,
}
}

Expand All @@ -522,6 +540,7 @@ impl<'tcx> Stacks {
&mut FxHashSet<SbTag>,
) -> InterpResult<'tcx>,
) -> InterpResult<'tcx> {
self.modified_since_last_gc = true;
for (offset, stack) in self.stacks.iter_mut(range.start, range.size) {
let mut dcx = dcx_builder.build(&mut self.history, offset);
f(stack, &mut dcx, &mut self.exposed_tags)?;
Expand Down
66 changes: 59 additions & 7 deletions src/stacked_borrows/stack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,61 @@ pub struct Stack {
unique_range: Range<usize>,
}

impl Stack {
pub fn retain(&mut self, tags: &FxHashSet<SbTag>) {
let mut first_removed = None;

let mut read_idx = 1;
let mut write_idx = 1;
while read_idx < self.borrows.len() {
let left = self.borrows[read_idx - 1];
let this = self.borrows[read_idx];
let should_keep = match this.perm() {
// Always destroy adjacent Disabled tags, they only exist to separate SRW blocks so
// two is just as good as one.
Permission::Disabled => left.perm() != Permission::Disabled,
// SRW and SRO don't do anything special, so we keep them if they are in use
Permission::SharedReadWrite | Permission::SharedReadOnly =>
tags.contains(&this.tag()),
Permission::Unique =>
left.perm() != Permission::Unique || tags.contains(&this.tag()),
};

if should_keep {
if read_idx != write_idx {
self.borrows[write_idx] = self.borrows[read_idx];
}
write_idx += 1;
} else if first_removed.is_none() {
first_removed = Some(read_idx);
}

read_idx += 1;
}
self.borrows.truncate(write_idx);

#[cfg(not(feature = "stack-cache"))]
drop(first_removed); // This is only needed for the stack-cache

#[cfg(feature = "stack-cache")]
if let Some(first_removed) = first_removed {
// Either end of unique_range may have shifted, all we really know is that we can't
// have introduced a new Unique.
if !self.unique_range.is_empty() {
self.unique_range = 0..self.len();
}

// Replace any Items which have been collected with the base item, a known-good value.
for i in 0..CACHE_LEN {
if self.cache.idx[i] >= first_removed {
self.cache.items[i] = self.borrows[0];
self.cache.idx[i] = 0;
}
}
}
}
}

/// A very small cache of searches of a borrow stack, mapping `Item`s to their position in said stack.
///
/// It may seem like maintaining this cache is a waste for small stacks, but
Expand Down Expand Up @@ -105,14 +160,11 @@ impl<'tcx> Stack {

// Check that the unique_range is a valid index into the borrow stack.
// This asserts that the unique_range's start <= end.
let uniques = &self.borrows[self.unique_range.clone()];
let _uniques = &self.borrows[self.unique_range.clone()];

// Check that the start of the unique_range is precise.
if let Some(first_unique) = uniques.first() {
assert_eq!(first_unique.perm(), Permission::Unique);
}
// We cannot assert that the unique range is exact on the upper end.
// When we pop items within the unique range, setting the end of the range precisely
// We cannot assert that the unique range is precise.
// Both ends may shift around when `Stack::retain` is called. Additionally,
// when we pop items within the unique range, setting the end of the range precisely
// requires doing a linear search of the borrow stack, which is exactly the kind of
// operation that all this caching exists to avoid.
}
Expand Down
122 changes: 122 additions & 0 deletions src/tag_gc.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,122 @@
use crate::*;
use rustc_data_structures::fx::FxHashSet;

impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
pub trait EvalContextExt<'mir, 'tcx: 'mir>: MiriEvalContextExt<'mir, 'tcx> {
fn garbage_collect_tags(&mut self) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
// No reason to do anything at all if stacked borrows is off.
if this.machine.stacked_borrows.is_none() {
return Ok(());
}

let mut tags = FxHashSet::default();

for thread in this.machine.threads.iter() {
if let Some(Scalar::Ptr(
Pointer { provenance: Provenance::Concrete { sb, .. }, .. },
_,
)) = thread.panic_payload
{
tags.insert(sb);
}
}

self.find_tags_in_tls(&mut tags);
self.find_tags_in_memory(&mut tags);
self.find_tags_in_locals(&mut tags)?;

self.remove_unreachable_tags(tags);

Ok(())
}

fn find_tags_in_tls(&mut self, tags: &mut FxHashSet<SbTag>) {
let this = self.eval_context_mut();
this.machine.tls.iter(|scalar| {
if let Scalar::Ptr(Pointer { provenance: Provenance::Concrete { sb, .. }, .. }, _) =
scalar
{
tags.insert(*sb);
}
});
}

fn find_tags_in_memory(&mut self, tags: &mut FxHashSet<SbTag>) {
let this = self.eval_context_mut();
this.memory.alloc_map().iter(|it| {
for (_id, (_kind, alloc)) in it {
for (_size, prov) in alloc.relocations().iter() {
if let Provenance::Concrete { sb, .. } = prov {
tags.insert(*sb);
}
}
}
});
}

fn find_tags_in_locals(&mut self, tags: &mut FxHashSet<SbTag>) -> InterpResult<'tcx> {
let this = self.eval_context_mut();
for frame in this.machine.threads.all_stacks().flatten() {
// Handle the return place of each frame
if let Ok(return_place) = frame.return_place.try_as_mplace() {
if let Some(Provenance::Concrete { sb, .. }) = return_place.ptr.provenance {
tags.insert(sb);
}
}

for local in frame.locals.iter() {
let LocalValue::Live(value) = local.value else {
continue;
};
match value {
Operand::Immediate(Immediate::Scalar(ScalarMaybeUninit::Scalar(
Scalar::Ptr(ptr, _),
))) =>
if let Provenance::Concrete { sb, .. } = ptr.provenance {
tags.insert(sb);
},
Operand::Immediate(Immediate::ScalarPair(s1, s2)) => {
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _)) = s1 {
if let Provenance::Concrete { sb, .. } = ptr.provenance {
tags.insert(sb);
}
}
if let ScalarMaybeUninit::Scalar(Scalar::Ptr(ptr, _)) = s2 {
if let Provenance::Concrete { sb, .. } = ptr.provenance {
tags.insert(sb);
}
}
}
Operand::Indirect(MemPlace { ptr, .. }) => {
if let Some(Provenance::Concrete { sb, .. }) = ptr.provenance {
tags.insert(sb);
}
}
Operand::Immediate(Immediate::Uninit)
| Operand::Immediate(Immediate::Scalar(ScalarMaybeUninit::Uninit))
| Operand::Immediate(Immediate::Scalar(ScalarMaybeUninit::Scalar(
Scalar::Int(_),
))) => {}
}
}
}

Ok(())
}

fn remove_unreachable_tags(&mut self, tags: FxHashSet<SbTag>) {
let this = self.eval_context_mut();
this.memory.alloc_map().iter(|it| {
for (_id, (_kind, alloc)) in it {
alloc
.extra
.stacked_borrows
.as_ref()
.unwrap()
.borrow_mut()
.remove_unreachable_tags(&tags);
}
});
}
}

0 comments on commit 664c741

Please sign in to comment.