Skip to content

Commit

Permalink
Remove the MemoryStyle enum
Browse files Browse the repository at this point in the history
There are no more major users left, only a few minor pieces here and
there. This should complete the refactoring to delete the "static" and
"dynamic" terminology from Wasmtime and all we're left with are linear
memories with a few knobs in `Tunables`.
  • Loading branch information
alexcrichton committed Nov 7, 2024
1 parent f50d8d9 commit 95367fd
Show file tree
Hide file tree
Showing 7 changed files with 18 additions and 115 deletions.
23 changes: 0 additions & 23 deletions crates/cranelift/src/translate/heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,29 +35,6 @@ entity_impl!(Heap, "heap");
/// to always cause a trap when accessed. It is used to optimize bounds
/// checking for heap accesses with a shared base pointer. They are
/// addressable but not accessible.
///
/// The *heap bound* is the total size of the mapped and unmapped pages. This is
/// the bound that `heap_addr` checks against. Memory accesses inside the heap
/// bounds can trap if they hit an unmapped page (which is not accessible).
///
/// Two styles of heaps are supported, *static* and *dynamic*. They behave
/// differently when resized.
///
/// #### Static heaps
///
/// A *static heap* starts out with all the address space it will ever need, so
/// it never moves to a different address. At the base address is a number of
/// mapped pages corresponding to the heap's current size. Then follows a number
/// of unmapped pages where the heap can grow up to its maximum size. After the
/// unmapped pages follow the offset-guard pages which are also guaranteed to
/// generate a trap when accessed.
///
/// #### Dynamic heaps
///
/// A *dynamic heap* can be relocated to a different base address when it is
/// resized, and its bound can move dynamically. The offset-guard pages move
/// when the heap is resized. The bound of a dynamic heap is stored in a global
/// value.
#[derive(Clone, PartialEq, Hash)]
pub struct HeapData {
/// The address of the start of the heap's storage.
Expand Down
60 changes: 0 additions & 60 deletions crates/environ/src/module.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,66 +7,6 @@ use core::ops::Range;
use cranelift_entity::{packed_option::ReservedValue, EntityRef};
use serde_derive::{Deserialize, Serialize};

/// Implementation styles for WebAssembly linear memory.
#[derive(Debug, Clone, Hash, Serialize, Deserialize)]
pub enum MemoryStyle {
/// The actual memory can be resized and moved.
Dynamic {
/// Extra space to reserve when a memory must be moved due to growth.
reserve: u64,
},
/// Address space is allocated up front.
Static {
/// The number of bytes which are reserved for this linear memory. Only
/// the lower bytes which represent the actual linear memory need be
/// mapped, but other bytes must be guaranteed to be unmapped.
byte_reservation: u64,
},
}

impl MemoryStyle {
/// Decide on an implementation style for the given `Memory`.
pub fn for_memory(memory: Memory, tunables: &Tunables) -> Self {
let is_static =
// Ideally we would compare against (an upper bound on) the target's
// page size, but unfortunately that is a little hard to plumb
// through here.
memory.page_size_log2 >= Memory::DEFAULT_PAGE_SIZE_LOG2
&& tunables.signals_based_traps
&& match memory.maximum_byte_size() {
Ok(mut maximum) => {
if !tunables.memory_may_move {
maximum = maximum.min(tunables.memory_reservation);
}

// Ensure the minimum is less than the maximum; the minimum
// might exceed the maximum when the memory is artificially
// bounded via `memory_may_move` above
memory.minimum_byte_size().unwrap() <= maximum
&& maximum <= tunables.memory_reservation
}

// If the maximum size of this memory is not representable with
// `u64` then use the `memory_may_move` to indicate whether
// it's a static memory or not. It should be ok to discard the
// linear memory's maximum size here as growth to the maximum
// is always fallible and never guaranteed.
Err(_) => !tunables.memory_may_move,
};

if is_static {
return Self::Static {
byte_reservation: tunables.memory_reservation,
};
}

// Otherwise, make it dynamic.
Self::Dynamic {
reserve: tunables.memory_reservation_for_growth,
}
}
}

/// A WebAssembly linear memory initializer.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct MemoryInitializer {
Expand Down
13 changes: 3 additions & 10 deletions crates/wasmtime/src/runtime/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1048,16 +1048,9 @@ mod tests {
let ty = MemoryType::new(1, None);
let mem = Memory::new(&mut store, ty).unwrap();
let store = store.as_context();
let style = wasmtime_environ::MemoryStyle::for_memory(
store[mem.0].memory,
store.engine().tunables(),
);

assert_eq!(store.engine().tunables().memory_guard_size, 0);
match style {
wasmtime_environ::MemoryStyle::Dynamic { .. } => {}
other => panic!("unexpected style {other:?}"),
}
let tunables = store.engine().tunables();
assert_eq!(tunables.guard_size, 0);
assert!(!store[mem.0].memory.can_elide_bounds_check(tunables, 12));
}

#[test]
Expand Down
11 changes: 2 additions & 9 deletions crates/wasmtime/src/runtime/trampoline/memory.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,7 @@ use crate::store::{InstanceId, StoreOpaque};
use crate::MemoryType;
use alloc::sync::Arc;
use wasmtime_environ::{
DefinedMemoryIndex, DefinedTableIndex, EntityIndex, HostPtr, MemoryStyle, Module, Tunables,
VMOffsets,
DefinedMemoryIndex, DefinedTableIndex, EntityIndex, HostPtr, Module, Tunables, VMOffsets,
};

#[cfg(feature = "component-model")]
Expand Down Expand Up @@ -106,13 +105,7 @@ impl RuntimeMemoryCreator for MemoryCreatorProxy {
minimum: usize,
maximum: Option<usize>,
) -> Result<Box<dyn RuntimeLinearMemory>> {
let style = MemoryStyle::for_memory(*ty, tunables);
let reserved_size_in_bytes = match style {
MemoryStyle::Static { byte_reservation } => {
Some(usize::try_from(byte_reservation).unwrap())
}
MemoryStyle::Dynamic { .. } => None,
};
let reserved_size_in_bytes = Some(tunables.memory_reservation.try_into().unwrap());
self.0
.new_memory(
MemoryType::from_wasmtime_memory(ty),
Expand Down
20 changes: 10 additions & 10 deletions crates/wasmtime/src/runtime/vm/cow.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,15 +9,14 @@ use super::sys::DecommitBehavior;
use crate::prelude::*;
use crate::runtime::vm::sys::vm::{self, MemoryImageSource};
use crate::runtime::vm::{
round_usize_up_to_host_pages, usize_is_multiple_of_host_page_size, MmapVec, SendSyncPtr,
host_page_size, round_usize_up_to_host_pages, usize_is_multiple_of_host_page_size, MmapVec,
SendSyncPtr,
};
use alloc::sync::Arc;
use core::ffi::c_void;
use core::ops::Range;
use core::ptr::{self, NonNull};
use wasmtime_environ::{
DefinedMemoryIndex, MemoryInitialization, MemoryStyle, Module, PrimaryMap, Tunables,
};
use wasmtime_environ::{DefinedMemoryIndex, MemoryInitialization, Module, PrimaryMap, Tunables};

/// Backing images for memories in a module.
///
Expand Down Expand Up @@ -430,13 +429,14 @@ impl MemoryImageSlot {
}

// If (1) the accessible region is not in its initial state, and (2) the
// memory relies on virtual memory at all (i.e. has offset guard pages
// and/or is static), then we need to reset memory protections. Put
// another way, the only time it is safe to not reset protections is
// when we are using dynamic memory without any guard pages.
let style = MemoryStyle::for_memory(*ty, tunables);
// memory relies on virtual memory at all (i.e. has offset guard
// pages), then we need to reset memory protections. Put another way,
// the only time it is safe to not reset protections is when we are
// using dynamic memory without any guard pages.
let host_page_size_log2 = u8::try_from(host_page_size().ilog2()).unwrap();
if initial_size_bytes_page_aligned < self.accessible
&& (tunables.memory_guard_size > 0 || matches!(style, MemoryStyle::Static { .. }))
&& (tunables.memory_guard_size > 0
|| ty.can_elide_bounds_check(tunables, host_page_size_log2))
{
self.set_protection(initial_size_bytes_page_aligned..self.accessible, false)?;
self.accessible = initial_size_bytes_page_aligned;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -547,11 +547,11 @@ impl SlabConstraints {
// static memory slot (see `Config::memory_reservation`); even
// if the memory never grows to this size (e.g., it has a lower memory
// maximum), codegen will assume that this unused memory is mapped
// `PROT_NONE`. Typically `static_memory_bound` is 4GiB which helps
// `PROT_NONE`. Typically `memory_reservation` is 4GiB which helps
// elide most bounds checks. `MemoryPool` must respect this bound,
// though not explicitly: if we can achieve the same effect via
// MPK-protected stripes, the slot size can be lower than the
// `static_memory_bound`.
// `memory_reservation`.
let expected_slot_bytes: usize = tunables
.memory_reservation
.try_into()
Expand Down
2 changes: 1 addition & 1 deletion tests/all/memory_creator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ mod not_for_windows {
guard_size: usize,
) -> Result<Box<dyn LinearMemory>, String> {
assert_eq!(guard_size, 0);
assert!(reserved_size.is_none());
assert_eq!(reserved_size, Some(0));
assert!(!ty.is_64());
unsafe {
let mem = Box::new(CustomMemory::new(
Expand Down

0 comments on commit 95367fd

Please sign in to comment.