Skip to content

Commit

Permalink
* Introduced CommonPlan
Browse files Browse the repository at this point in the history
* Introduced HeapMeta
* Introduced SpaceDescriptor
* Sanity code is conditionally compiled.
  • Loading branch information
Yi Lin committed Feb 3, 2020
1 parent 3149c86 commit 32158cd
Show file tree
Hide file tree
Showing 25 changed files with 510 additions and 384 deletions.
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ lazy_static = "1.1"
log = {version = "0.4", features = ["max_level_trace", "release_max_level_off"] }
crossbeam-deque = "0.6"
num_cpus = "1.8"
derivative = "1.0.3"

[features]
default = ["nogc", "openjdk"]
Expand Down
2 changes: 2 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ extern crate lazy_static;
extern crate log;
extern crate crossbeam_deque;
extern crate num_cpus;
#[macro_use]
extern crate derivative;

#[macro_use]
pub mod util;
Expand Down
90 changes: 58 additions & 32 deletions src/mm/memory_manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use libc::c_void;
use libc::c_char;

use std::ffi::CStr;
use std::str;
use std::{str, thread};

use std::sync::atomic::Ordering;

Expand All @@ -12,7 +12,6 @@ use ::plan::MutatorContext;
use ::plan::TraceLocal;
use ::plan::CollectorContext;
use ::plan::ParallelCollectorGroup;
use ::plan::plan::CONTROL_COLLECTOR_CONTEXT;

use ::vm::{Collection, VMCollection};

Expand All @@ -31,7 +30,6 @@ use ::plan::Allocator;
use util::constants::LOG_BYTES_IN_PAGE;
use util::heap::layout::vm_layout_constants::HEAP_START;
use util::heap::layout::vm_layout_constants::HEAP_END;
use ::util::sanity::sanity_checker::{INSIDE_SANITY, SanityChecker};
use util::OpaquePointer;
use crate::mmtk::SINGLETON;

Expand Down Expand Up @@ -77,7 +75,7 @@ pub unsafe extern fn openjdk_gc_init(calls: *const OpenJDK_Upcalls, heap_size: u
#[no_mangle]
#[cfg(any(feature = "jikesrvm", feature = "openjdk"))]
pub extern fn start_control_collector(tls: OpaquePointer) {
CONTROL_COLLECTOR_CONTEXT.run(tls);
SINGLETON.plan.common().control_collector_context.run(tls);
}

#[no_mangle]
Expand All @@ -96,7 +94,10 @@ pub unsafe extern fn gc_init(heap_size: usize) {
}
::util::logger::init().unwrap();
SINGLETON.plan.gc_init(heap_size, &SINGLETON.vm_map);
::plan::plan::INITIALIZED.store(true, Ordering::SeqCst);
SINGLETON.plan.common().initialized.store(true, Ordering::SeqCst);
thread::spawn(|| {
SINGLETON.plan.common().control_collector_context.run(OpaquePointer::UNINITIALIZED)
});
}

#[no_mangle]
Expand Down Expand Up @@ -145,48 +146,72 @@ pub unsafe extern fn is_valid_ref(val: ObjectReference) -> bool {
}

#[no_mangle]
#[cfg(feature = "sanity")]
pub unsafe extern fn report_delayed_root_edge(trace_local: *mut c_void, addr: *mut c_void) {
trace!("JikesRVM called report_delayed_root_edge with trace_local={:?}", trace_local);
if cfg!(feature = "sanity") && INSIDE_SANITY.load(Ordering::Relaxed) {
let local = &mut *(trace_local as *mut SanityChecker);
local.report_delayed_root_edge(Address::from_usize(addr as usize));
use ::util::sanity::sanity_checker::SanityChecker;
if SINGLETON.plan.common().is_in_sanity() {
report_delayed_root_edge_inner::<SanityChecker>(trace_local, addr)
} else {
let local = &mut *(trace_local as *mut <SelectedPlan as Plan>::TraceLocalT);
local.report_delayed_root_edge(Address::from_usize(addr as usize));
report_delayed_root_edge_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, addr)
}
}
#[no_mangle]
#[cfg(not(feature = "sanity"))]
pub unsafe extern fn report_delayed_root_edge(trace_local: *mut c_void, addr: *mut c_void) {
report_delayed_root_edge_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, addr)
}
unsafe fn report_delayed_root_edge_inner<T: TraceLocal>(trace_local: *mut c_void, addr: *mut c_void) {
trace!("report_delayed_root_edge with trace_local={:?}", trace_local);
let local = &mut *(trace_local as *mut T);
local.report_delayed_root_edge(Address::from_usize(addr as usize));
trace!("report_delayed_root_edge returned with trace_local={:?}", trace_local);
}

#[no_mangle]
#[cfg(feature = "sanity")]
pub unsafe extern fn will_not_move_in_current_collection(trace_local: *mut c_void, obj: *mut c_void) -> bool {
trace!("will_not_move_in_current_collection({:?}, {:?})", trace_local, obj);
if cfg!(feature = "sanity") && INSIDE_SANITY.load(Ordering::Relaxed) {
let local = &mut *(trace_local as *mut SanityChecker);
let ret = local.will_not_move_in_current_collection(Address::from_usize(obj as usize).to_object_reference());
trace!("will_not_move_in_current_collection returned with trace_local={:?}", trace_local);
ret
use ::util::sanity::sanity_checker::SanityChecker;
if SINGLETON.plan.common().is_in_sanity() {
will_not_move_in_current_collection_inner::<SanityChecker>(trace_local, obj)
} else {
let local = &mut *(trace_local as *mut <SelectedPlan as Plan>::TraceLocalT);
let ret = local.will_not_move_in_current_collection(Address::from_usize(obj as usize).to_object_reference());
trace!("will_not_move_in_current_collection returned with trace_local={:?}", trace_local);
ret
will_not_move_in_current_collection_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, obj)
}
}
#[no_mangle]
#[cfg(not(feature = "sanity"))]
pub unsafe extern fn will_not_move_in_current_collection(trace_local: *mut c_void, obj: *mut c_void) -> bool {
will_not_move_in_current_collection_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, obj)
}
unsafe fn will_not_move_in_current_collection_inner<T: TraceLocal>(trace_local: *mut c_void, obj: *mut c_void) -> bool {
trace!("will_not_move_in_current_collection({:?}, {:?})", trace_local, obj);
let local = &mut *(trace_local as *mut T);
let ret = local.will_not_move_in_current_collection(Address::from_usize(obj as usize).to_object_reference());
trace!("will_not_move_in_current_collection returned with trace_local={:?}", trace_local);
ret
}

#[no_mangle]
#[cfg(feature = "sanity")]
pub unsafe extern fn process_interior_edge(trace_local: *mut c_void, target: *mut c_void, slot: *mut c_void, root: bool) {
trace!("JikesRVM called process_interior_edge with trace_local={:?}", trace_local);
if cfg!(feature = "sanity") && INSIDE_SANITY.load(Ordering::Relaxed) {
let local = &mut *(trace_local as *mut SanityChecker);
local.process_interior_edge(Address::from_usize(target as usize).to_object_reference(),
Address::from_usize(slot as usize), root);
use ::util::sanity::sanity_checker::SanityChecker;
if SINGLETON.plan.common().is_in_sanity() {
process_interior_edge_inner::<SanityChecker>(trace_local, target, slot, root)
} else {
let local = &mut *(trace_local as *mut <SelectedPlan as Plan>::TraceLocalT);
local.process_interior_edge(Address::from_usize(target as usize).to_object_reference(),
Address::from_usize(slot as usize), root);
process_interior_edge_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, target, slot, root)
}
trace!("process_interior_root_edge returned with trace_local={:?}", trace_local);

}
#[no_mangle]
#[cfg(not(feature = "sanity"))]
pub unsafe extern fn process_interior_edge(trace_local: *mut c_void, target: *mut c_void, slot: *mut c_void, root: bool) {
process_interior_edge_inner::<<SelectedPlan as Plan>::TraceLocalT>(trace_local, target, slot, root)
}
unsafe fn process_interior_edge_inner<T: TraceLocal>(trace_local: *mut c_void, target: *mut c_void, slot: *mut c_void, root: bool) {
trace!("process_interior_edge with trace_local={:?}", trace_local);
let local = &mut *(trace_local as *mut T);
local.process_interior_edge(Address::from_usize(target as usize).to_object_reference(),
Address::from_usize(slot as usize), root);
trace!("process_interior_root_edge returned with trace_local={:?}", trace_local);
}

#[no_mangle]
Expand All @@ -199,9 +224,9 @@ pub unsafe extern fn start_worker(tls: OpaquePointer, worker: *mut c_void) {
#[no_mangle]
#[cfg(feature = "jikesrvm")]
pub unsafe extern fn enable_collection(tls: OpaquePointer) {
(&mut *CONTROL_COLLECTOR_CONTEXT.workers.get()).init_group(&SINGLETON, tls);
(&mut *SINGLETON.plan.common().control_collector_context.workers.get()).init_group(&SINGLETON, tls);
VMCollection::spawn_worker_thread::<<SelectedPlan as Plan>::CollectorT>(tls, null_mut()); // spawn controller thread
::plan::plan::INITIALIZED.store(true, Ordering::SeqCst);
SINGLETON.plan.common().initialized.store(true, Ordering::SeqCst);
}

#[no_mangle]
Expand Down Expand Up @@ -279,6 +304,7 @@ pub extern fn executable() -> bool {
}

#[no_mangle]
#[cfg(feature = "sanity")]
pub unsafe extern fn scan_region(){
::util::sanity::memory_scan::scan_region(&SINGLETON.plan);
}
Expand Down
4 changes: 3 additions & 1 deletion src/plan/controller_collector_context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,9 @@ impl ControllerCollectorContext {
VMCollection::stop_all_mutators(tls);

// For heap growth logic
let user_triggered_collection: bool = SelectedPlan::is_user_triggered_collection();
// FIXME: This is not used. However, we probably want to set a 'user_triggered' flag
// when GC is requested.
// let user_triggered_collection: bool = SelectedPlan::is_user_triggered_collection();

self.clear_request();

Expand Down
74 changes: 32 additions & 42 deletions src/plan/nogc/nogc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,17 +19,20 @@ use super::NoGCTraceLocal;
use super::NoGCMutator;
use super::NoGCCollector;
use util::conversions::bytes_to_pages;
use plan::plan::create_vm_space;
use plan::plan::{create_vm_space, CommonPlan};
use util::heap::layout::heap_layout::VMMap;
use util::heap::layout::heap_layout::Mmapper;
use util::options::{Options, UnsafeOptionsWrapper};
use std::sync::Arc;
use util::heap::HeapMeta;
use util::heap::layout::vm_layout_constants::{HEAP_START, HEAP_END};
use std::sync::atomic::Ordering;

pub type SelectedPlan = NoGC;

pub struct NoGC {
pub control_collector_context: ControllerCollectorContext,
pub unsync: UnsafeCell<NoGCUnsync>,
pub common: CommonPlan,
}

unsafe impl Sync for NoGC {}
Expand All @@ -38,9 +41,6 @@ pub struct NoGCUnsync {
vm_space: ImmortalSpace,
pub space: ImmortalSpace,
pub los: LargeObjectSpace,
pub mmapper: &'static Mmapper,
pub options: Arc<UnsafeOptionsWrapper>,
pub total_pages: usize,
}

impl Plan for NoGC {
Expand All @@ -49,53 +49,36 @@ impl Plan for NoGC {
type CollectorT = NoGCCollector;

fn new(vm_map: &'static VMMap, mmapper: &'static Mmapper, options: Arc<UnsafeOptionsWrapper>) -> Self {
let mut heap = HeapMeta::new(HEAP_START, HEAP_END);

NoGC {
control_collector_context: ControllerCollectorContext::new(),
unsync: UnsafeCell::new(NoGCUnsync {
vm_space: create_vm_space(vm_map, mmapper),
vm_space: create_vm_space(vm_map, mmapper, &mut heap),
space: ImmortalSpace::new("nogc_space", true,
VMRequest::discontiguous(), vm_map, mmapper),
los: LargeObjectSpace::new("los", true, VMRequest::discontiguous(), vm_map, mmapper),
mmapper,
options,
total_pages: 0,
}
),
VMRequest::discontiguous(), vm_map, mmapper, &mut heap),
los: LargeObjectSpace::new("los", true, VMRequest::discontiguous(), vm_map, mmapper, &mut heap),
}),
common: CommonPlan::new(vm_map, mmapper, options, heap),
}
}

unsafe fn gc_init(&self, heap_size: usize, vm_map: &'static VMMap) {
vm_map.finalize_static_space_map();
vm_map.finalize_static_space_map(self.common.heap.get_discontig_start(), self.common.heap.get_discontig_end());

let unsync = &mut *self.unsync.get();
unsync.total_pages = bytes_to_pages(heap_size);
self.common.heap.total_pages.store(bytes_to_pages(heap_size), Ordering::Relaxed);
// FIXME correctly initialize spaces based on options
unsync.vm_space.init(vm_map);
unsync.space.init(vm_map);
unsync.los.init(vm_map);

// These VMs require that the controller thread is started by the VM itself.
// (Usually because it calls into VM code that accesses the TLS.)
if !(cfg!(feature = "jikesrvm") || cfg!(feature = "openjdk")) {
thread::spawn(|| {
::plan::plan::CONTROL_COLLECTOR_CONTEXT.run(OpaquePointer::UNINITIALIZED )
});
}
}

fn mmapper(&self) -> &'static Mmapper {
let unsync = unsafe { &*self.unsync.get() };
unsync.mmapper
}

fn options(&self) -> &Options {
let unsync = unsafe { &*self.unsync.get() };
&unsync.options
fn common(&self) -> &CommonPlan {
&self.common
}

fn bind_mutator(&self, tls: OpaquePointer) -> *mut c_void {
let unsync = unsafe { &*self.unsync.get() };
Box::into_raw(Box::new(NoGCMutator::new(
tls, &unsync.space, &unsync.los))) as *mut c_void
fn bind_mutator(&'static self, tls: OpaquePointer) -> *mut c_void {
Box::into_raw(Box::new(NoGCMutator::new(tls, self))) as *mut c_void
}

fn will_never_move(&self, object: ObjectReference) -> bool {
Expand All @@ -104,11 +87,6 @@ impl Plan for NoGC {

unsafe fn collection_phase(&self, tls: OpaquePointer, phase: &Phase) {}

fn get_total_pages(&self) -> usize {
let unsync = unsafe { &*self.unsync.get() };
unsync.total_pages
}

fn get_pages_used(&self) -> usize {
let unsync = unsafe { &*self.unsync.get() };
unsync.space.reserved_pages() + unsync.los.reserved_pages()
Expand Down Expand Up @@ -139,7 +117,7 @@ impl Plan for NoGC {
unsync.vm_space.in_space(address.to_object_reference()) ||
unsync.los.in_space(address.to_object_reference())
} {
return unsync.mmapper.address_is_mapped(address);
return self.common.mmapper.address_is_mapped(address);
} else {
return false;
}
Expand All @@ -158,4 +136,16 @@ impl Plan for NoGC {
}
return true;
}
}

impl NoGC {
pub fn get_immortal_space(&self) -> &'static ImmortalSpace {
let unsync = unsafe { &*self.unsync.get() };
&unsync.space
}

pub fn get_los(&self) -> &'static LargeObjectSpace {
let unsync = unsafe { &*self.unsync.get() };
&unsync.los
}
}
9 changes: 5 additions & 4 deletions src/plan/nogc/nogcmutator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,12 +10,13 @@ use ::util::heap::MonotonePageResource;
use ::mmtk::SINGLETON;
use ::util::OpaquePointer;
use libc::c_void;
use plan::nogc::NoGC;

#[repr(C)]
pub struct NoGCMutator {
// ImmortalLocal
nogc: BumpAllocator<MonotonePageResource<ImmortalSpace>>,
los: LargeObjectAllocator
los: LargeObjectAllocator,
}

impl MutatorContext for NoGCMutator {
Expand Down Expand Up @@ -56,10 +57,10 @@ impl MutatorContext for NoGCMutator {
}

impl NoGCMutator {
pub fn new(tls: OpaquePointer, space: &'static ImmortalSpace, los: &'static LargeObjectSpace) -> Self {
pub fn new(tls: OpaquePointer, plan: &'static NoGC) -> Self {
NoGCMutator {
nogc: BumpAllocator::new(tls, Some(space)),
los: LargeObjectAllocator::new(tls, Some(los))
nogc: BumpAllocator::new(tls, Some(plan.get_immortal_space()), plan),
los: LargeObjectAllocator::new(tls, Some(plan.get_los()), plan),
}
}
}
2 changes: 1 addition & 1 deletion src/plan/phase.rs
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ impl PhaseManager {
let order = collector.rendezvous();
let primary = order == 0;
if primary && resume {
plan::plan::set_gc_status(plan::plan::GcStatus::GcProper);
plan.common().set_gc_status(plan::plan::GcStatus::GcProper);
}
let mut is_even_phase = true;
if primary {
Expand Down
Loading

0 comments on commit 32158cd

Please sign in to comment.