Skip to content

Commit

Permalink
Implement transparent hugepage support (#905)
Browse files Browse the repository at this point in the history
This PR adds a new MMTk option `transparent_hugepages`. When set to true (only supported on Linux), madvise is called after mmap to use the transparent hugepages support from the OS.

This PR also refactors mmaper code so that future changes to mmap behavior will require fewer invasive changes.
  • Loading branch information
caizixian authored Aug 17, 2023
1 parent 7882280 commit 4873b4a
Show file tree
Hide file tree
Showing 11 changed files with 117 additions and 32 deletions.
4 changes: 4 additions & 0 deletions src/mmtk.rs
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,10 @@ impl<VM: VMBinding> MMTK<VM> {
plan.base().heap.get_discontig_end(),
);

if *options.transparent_hugepages {
MMAPPER.set_mmap_strategy(crate::util::memory::MmapStrategy::TransparentHugePages);
}

MMTK {
options,
plan,
Expand Down
8 changes: 7 additions & 1 deletion src/policy/lockfreeimmortalspace.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ use crate::util::address::Address;
use crate::util::conversions;
use crate::util::heap::layout::vm_layout_constants::{AVAILABLE_BYTES, AVAILABLE_START};
use crate::util::heap::PageResource;
use crate::util::memory::MmapStrategy;
use crate::util::metadata::side_metadata::SideMetadataContext;
use crate::util::metadata::side_metadata::SideMetadataSanity;
use crate::util::opaque_pointer::*;
Expand Down Expand Up @@ -200,7 +201,12 @@ impl<VM: VMBinding> LockFreeImmortalSpace<VM> {
};

// Eagerly memory map the entire heap (also zero all the memory)
crate::util::memory::dzmmap_noreplace(AVAILABLE_START, total_bytes).unwrap();
let strategy = if *args.options.transparent_hugepages {
MmapStrategy::TransparentHugePages
} else {
MmapStrategy::Normal
};
crate::util::memory::dzmmap_noreplace(AVAILABLE_START, total_bytes, strategy).unwrap();
if space
.metadata
.try_map_metadata_space(AVAILABLE_START, total_bytes)
Expand Down
21 changes: 19 additions & 2 deletions src/util/heap/layout/byte_map_mmapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use crate::util::Address;
use crate::util::constants::*;
use crate::util::conversions::pages_to_bytes;
use crate::util::heap::layout::vm_layout_constants::*;
use crate::util::memory::MmapStrategy;
use std::fmt;
use std::sync::atomic::Ordering;
use std::sync::Mutex;
Expand All @@ -23,6 +24,7 @@ pub const VERBOSE: bool = true;
pub struct ByteMapMmapper {
lock: Mutex<()>,
mapped: [Atomic<MapState>; MMAP_NUM_CHUNKS],
strategy: Atomic<MmapStrategy>,
}

impl fmt::Debug for ByteMapMmapper {
Expand All @@ -32,6 +34,10 @@ impl fmt::Debug for ByteMapMmapper {
}

impl Mmapper for ByteMapMmapper {
fn set_mmap_strategy(&self, strategy: MmapStrategy) {
self.strategy.store(strategy, Ordering::Relaxed);
}

fn eagerly_mmap_all_spaces(&self, _space_map: &[Address]) {
unimplemented!()
}
Expand Down Expand Up @@ -62,7 +68,12 @@ impl Mmapper for ByteMapMmapper {

let mmap_start = Self::mmap_chunks_to_address(chunk);
let _guard = self.lock.lock().unwrap();
MapState::transition_to_mapped(&self.mapped[chunk], mmap_start).unwrap();
MapState::transition_to_mapped(
&self.mapped[chunk],
mmap_start,
self.strategy.load(Ordering::Relaxed),
)
.unwrap();
}

Ok(())
Expand All @@ -86,7 +97,12 @@ impl Mmapper for ByteMapMmapper {

let mmap_start = Self::mmap_chunks_to_address(chunk);
let _guard = self.lock.lock().unwrap();
MapState::transition_to_quarantined(&self.mapped[chunk], mmap_start).unwrap();
MapState::transition_to_quarantined(
&self.mapped[chunk],
mmap_start,
self.strategy.load(Ordering::Relaxed),
)
.unwrap();
}

Ok(())
Expand Down Expand Up @@ -123,6 +139,7 @@ impl ByteMapMmapper {
ByteMapMmapper {
lock: Mutex::new(()),
mapped: unsafe { transmute([MapState::Unmapped; MMAP_NUM_CHUNKS]) },
strategy: Atomic::new(MmapStrategy::Normal),
}
}

Expand Down
19 changes: 17 additions & 2 deletions src/util/heap/layout/fragmented_mapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@ use super::Mmapper;
use crate::util::constants::BYTES_IN_PAGE;
use crate::util::conversions;
use crate::util::heap::layout::vm_layout_constants::*;
use crate::util::memory::MmapStrategy;
use crate::util::Address;
use atomic::{Atomic, Ordering};
use std::fmt;
Expand Down Expand Up @@ -49,6 +50,7 @@ pub struct FragmentedMapper {
free_slabs: Vec<Option<Box<Slab>>>,
slab_table: Vec<Option<Box<Slab>>>,
slab_map: Vec<Address>,
strategy: Atomic<MmapStrategy>,
}

impl fmt::Debug for FragmentedMapper {
Expand All @@ -58,6 +60,10 @@ impl fmt::Debug for FragmentedMapper {
}

impl Mmapper for FragmentedMapper {
fn set_mmap_strategy(&self, strategy: MmapStrategy) {
self.strategy.store(strategy, Ordering::Relaxed);
}

fn eagerly_mmap_all_spaces(&self, _space_map: &[Address]) {}

fn mark_as_mapped(&self, mut start: Address, bytes: usize) {
Expand Down Expand Up @@ -123,7 +129,11 @@ impl Mmapper for FragmentedMapper {
// Transition the chunks in bulk.
{
let _guard = self.lock.lock().unwrap();
MapState::bulk_transition_to_quarantined(state_slices.as_slice(), mmap_start)?;
MapState::bulk_transition_to_quarantined(
state_slices.as_slice(),
mmap_start,
self.strategy.load(Ordering::Relaxed),
)?;
}

Ok(())
Expand Down Expand Up @@ -154,7 +164,11 @@ impl Mmapper for FragmentedMapper {

let mmap_start = Self::chunk_index_to_address(base, chunk);
let _guard = self.lock.lock().unwrap();
MapState::transition_to_mapped(entry, mmap_start)?;
MapState::transition_to_mapped(
entry,
mmap_start,
self.strategy.load(Ordering::Relaxed),
)?;
}
start = high;
}
Expand Down Expand Up @@ -213,6 +227,7 @@ impl FragmentedMapper {
free_slabs: (0..MAX_SLABS).map(|_| Some(Self::new_slab())).collect(),
slab_table: (0..SLAB_TABLE_SIZE).map(|_| None).collect(),
slab_map: vec![SENTINEL; SLAB_TABLE_SIZE],
strategy: Atomic::new(MmapStrategy::Normal),
}
}

Expand Down
2 changes: 2 additions & 0 deletions src/util/heap/layout/map64.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ use crate::util::heap::freelistpageresource::CommonFreeListPageResource;
use crate::util::heap::layout::heap_parameters::*;
use crate::util::heap::layout::vm_layout_constants::*;
use crate::util::heap::space_descriptor::SpaceDescriptor;
use crate::util::memory::MmapStrategy;
use crate::util::raw_memory_freelist::RawMemoryFreeList;
use crate::util::rust_util::zeroed_alloc::new_zeroed_vec;
use crate::util::Address;
Expand Down Expand Up @@ -95,6 +96,7 @@ impl VMMap for Map64 {
units as _,
grain,
heads,
MmapStrategy::Normal,
));

self_mut.fl_map[index] =
Expand Down
14 changes: 10 additions & 4 deletions src/util/heap/layout/mmapper.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ use std::io::Result;

/// Generic mmap and protection functionality
pub trait Mmapper: Sync {
/// Set mmap strategy
fn set_mmap_strategy(&self, strategy: MmapStrategy);

/// Given an address array describing the regions of virtual memory to be used
/// by MMTk, demand zero map all of them if they are not already mapped.
///
Expand Down Expand Up @@ -81,16 +84,17 @@ impl MapState {
pub(super) fn transition_to_mapped(
state: &Atomic<MapState>,
mmap_start: Address,
strategy: MmapStrategy,
) -> Result<()> {
trace!(
"Trying to map {} - {}",
mmap_start,
mmap_start + MMAP_CHUNK_BYTES
);
let res = match state.load(Ordering::Relaxed) {
MapState::Unmapped => dzmmap_noreplace(mmap_start, MMAP_CHUNK_BYTES),
MapState::Unmapped => dzmmap_noreplace(mmap_start, MMAP_CHUNK_BYTES, strategy),
MapState::Protected => munprotect(mmap_start, MMAP_CHUNK_BYTES),
MapState::Quarantined => unsafe { dzmmap(mmap_start, MMAP_CHUNK_BYTES) },
MapState::Quarantined => unsafe { dzmmap(mmap_start, MMAP_CHUNK_BYTES, strategy) },
// might have become MapState::Mapped here
MapState::Mapped => Ok(()),
};
Expand All @@ -105,14 +109,15 @@ impl MapState {
pub(super) fn transition_to_quarantined(
state: &Atomic<MapState>,
mmap_start: Address,
strategy: MmapStrategy,
) -> Result<()> {
trace!(
"Trying to quarantine {} - {}",
mmap_start,
mmap_start + MMAP_CHUNK_BYTES
);
let res = match state.load(Ordering::Relaxed) {
MapState::Unmapped => mmap_noreserve(mmap_start, MMAP_CHUNK_BYTES),
MapState::Unmapped => mmap_noreserve(mmap_start, MMAP_CHUNK_BYTES, strategy),
MapState::Quarantined => Ok(()),
MapState::Mapped => {
// If a chunk is mapped by us and we try to quanrantine it, we simply don't do anything.
Expand Down Expand Up @@ -148,6 +153,7 @@ impl MapState {
pub(super) fn bulk_transition_to_quarantined(
state_slices: &[&[Atomic<MapState>]],
mmap_start: Address,
strategy: MmapStrategy,
) -> Result<()> {
trace!(
"Trying to bulk-quarantine {} - {}",
Expand All @@ -170,7 +176,7 @@ impl MapState {
match group.key {
MapState::Unmapped => {
trace!("Trying to quarantine {} - {}", start_addr, end_addr);
mmap_noreserve(start_addr, end_addr - start_addr)?;
mmap_noreserve(start_addr, end_addr - start_addr, strategy)?;

for state in group {
state.store(MapState::Quarantined, Ordering::Relaxed);
Expand Down
Loading

0 comments on commit 4873b4a

Please sign in to comment.