Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add Suballocator::suballocations #2499

Merged
merged 4 commits into from
Mar 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
56 changes: 26 additions & 30 deletions vulkano/src/memory/allocator/suballocator/buddy.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
use super::{AllocationType, Region, Suballocation, Suballocator, SuballocatorError};
use super::{
AllocationType, Region, Suballocation, SuballocationNode, Suballocator, SuballocatorError,
};
use crate::{
memory::{
allocator::{align_up, array_vec::ArrayVec, AllocationHandle, DeviceLayout},
is_aligned, DeviceAlignment,
},
DeviceSize, NonZeroDeviceSize,
};
use std::{
cell::{Cell, UnsafeCell},
cmp,
};
use std::cmp;

/// A [suballocator] whose structure forms a binary tree of power-of-two-sized suballocations.
///
Expand Down Expand Up @@ -62,8 +61,11 @@ use std::{
pub struct BuddyAllocator {
region_offset: DeviceSize,
// Total memory remaining in the region.
free_size: Cell<DeviceSize>,
state: UnsafeCell<BuddyAllocatorState>,
free_size: DeviceSize,
// Every order has its own free-list for convenience, so that we don't have to traverse a tree.
// Each free-list is sorted by offset because we want to find the first-fit as this strategy
// minimizes external fragmentation.
free_list: ArrayVec<Vec<DeviceSize>, { Self::MAX_ORDERS }>,
}

impl BuddyAllocator {
Expand All @@ -75,6 +77,8 @@ impl BuddyAllocator {
}

unsafe impl Suballocator for BuddyAllocator {
type Suballocations<'a> = std::iter::Empty<SuballocationNode>;

/// Creates a new `BuddyAllocator` for the given [region].
///
/// # Panics
Expand All @@ -93,24 +97,21 @@ unsafe impl Suballocator for BuddyAllocator {

assert!(max_order < BuddyAllocator::MAX_ORDERS);

let free_size = Cell::new(region.size());

let mut free_list =
ArrayVec::new(max_order + 1, [EMPTY_FREE_LIST; BuddyAllocator::MAX_ORDERS]);
// The root node has the lowest offset and highest order, so it's the whole region.
free_list[max_order].push(region.offset());
let state = UnsafeCell::new(BuddyAllocatorState { free_list });

BuddyAllocator {
region_offset: region.offset(),
free_size,
state,
free_size: region.size(),
free_list,
}
}

#[inline]
fn allocate(
&self,
&mut self,
layout: DeviceLayout,
allocation_type: AllocationType,
buffer_image_granularity: DeviceAlignment,
Expand Down Expand Up @@ -150,17 +151,16 @@ unsafe impl Suballocator for BuddyAllocator {
let size = cmp::max(size, BuddyAllocator::MIN_NODE_SIZE).next_power_of_two();

let min_order = (size / BuddyAllocator::MIN_NODE_SIZE).trailing_zeros() as usize;
let state = unsafe { &mut *self.state.get() };

// Start searching at the lowest possible order going up.
for (order, free_list) in state.free_list.iter_mut().enumerate().skip(min_order) {
for (order, free_list) in self.free_list.iter_mut().enumerate().skip(min_order) {
for (index, &offset) in free_list.iter().enumerate() {
if is_aligned(offset, alignment) {
free_list.remove(index);

// Go in the opposite direction, splitting nodes from higher orders. The lowest
// order doesn't need any splitting.
for (order, free_list) in state
for (order, free_list) in self
.free_list
.iter_mut()
.enumerate()
Expand All @@ -185,7 +185,7 @@ unsafe impl Suballocator for BuddyAllocator {

// This can't overflow because suballocation sizes in the free-list are
// constrained by the remaining size of the region.
self.free_size.set(self.free_size.get() - size);
self.free_size -= size;

return Ok(Suballocation {
offset,
Expand All @@ -206,17 +206,16 @@ unsafe impl Suballocator for BuddyAllocator {
}

#[inline]
unsafe fn deallocate(&self, suballocation: Suballocation) {
unsafe fn deallocate(&mut self, suballocation: Suballocation) {
let mut offset = suballocation.offset;
let order = suballocation.handle.as_index();

let min_order = order;
let state = unsafe { &mut *self.state.get() };

debug_assert!(!state.free_list[order].contains(&offset));
debug_assert!(!self.free_list[order].contains(&offset));

// Try to coalesce nodes while incrementing the order.
for (order, free_list) in state.free_list.iter_mut().enumerate().skip(min_order) {
for (order, free_list) in self.free_list.iter_mut().enumerate().skip(min_order) {
// This can't discard any bits because `order` is confined to the range
// [0, log(region.size / BuddyAllocator::MIN_NODE_SIZE)].
let size = BuddyAllocator::MIN_NODE_SIZE << order;
Expand All @@ -241,7 +240,7 @@ unsafe impl Suballocator for BuddyAllocator {

// The sizes of suballocations allocated by `self` are constrained by that of
// its region, so they can't possibly overflow when added up.
self.free_size.set(self.free_size.get() + size);
self.free_size += size;

break;
}
Expand All @@ -256,17 +255,14 @@ unsafe impl Suballocator for BuddyAllocator {
/// [internal fragmentation]: super#internal-fragmentation
#[inline]
fn free_size(&self) -> DeviceSize {
self.free_size.get()
self.free_size
}

#[inline]
fn cleanup(&mut self) {}
}

#[derive(Debug)]
struct BuddyAllocatorState {
// Every order has its own free-list for convenience, so that we don't have to traverse a tree.
// Each free-list is sorted by offset because we want to find the first-fit as this strategy
// minimizes external fragmentation.
free_list: ArrayVec<Vec<DeviceSize>, { BuddyAllocator::MAX_ORDERS }>,
#[inline]
fn suballocations(&self) -> Self::Suballocations<'_> {
todo!()
}
Rua marked this conversation as resolved.
Show resolved Hide resolved
}
131 changes: 113 additions & 18 deletions vulkano/src/memory/allocator/suballocator/bump.rs
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
use super::{AllocationType, Region, Suballocation, Suballocator, SuballocatorError};
use super::{
are_blocks_on_same_page, AllocationType, Region, Suballocation, SuballocationNode,
SuballocationType, Suballocator, SuballocatorError,
};
use crate::{
memory::{
allocator::{
align_up, suballocator::are_blocks_on_same_page, AllocationHandle, DeviceLayout,
},
allocator::{align_up, AllocationHandle, DeviceLayout},
DeviceAlignment,
},
DeviceSize,
};
use std::cell::Cell;
use std::iter::FusedIterator;

/// A [suballocator] which can allocate dynamically, but can only free all allocations at once.
///
Expand Down Expand Up @@ -53,8 +54,8 @@ use std::cell::Cell;
#[derive(Debug)]
pub struct BumpAllocator {
region: Region,
free_start: Cell<DeviceSize>,
prev_allocation_type: Cell<AllocationType>,
free_start: DeviceSize,
prev_allocation_type: AllocationType,
}

impl BumpAllocator {
Expand All @@ -63,26 +64,46 @@ impl BumpAllocator {
/// [region]: Suballocator#regions
#[inline]
pub fn reset(&mut self) {
*self.free_start.get_mut() = 0;
*self.prev_allocation_type.get_mut() = AllocationType::Unknown;
self.free_start = 0;
self.prev_allocation_type = AllocationType::Unknown;
}

fn suballocation_node(&self, part: usize) -> SuballocationNode {
if part == 0 {
SuballocationNode {
offset: self.region.offset(),
size: self.free_start,
allocation_type: self.prev_allocation_type.into(),
}
} else {
debug_assert_eq!(part, 1);

SuballocationNode {
offset: self.region.offset() + self.free_start,
size: self.free_size(),
allocation_type: SuballocationType::Free,
}
}
}
}

unsafe impl Suballocator for BumpAllocator {
type Suballocations<'a> = Suballocations<'a>;

/// Creates a new `BumpAllocator` for the given [region].
///
/// [region]: Suballocator#regions
fn new(region: Region) -> Self {
BumpAllocator {
region,
free_start: Cell::new(0),
prev_allocation_type: Cell::new(AllocationType::Unknown),
free_start: 0,
prev_allocation_type: AllocationType::Unknown,
}
}

#[inline]
fn allocate(
&self,
&mut self,
layout: DeviceLayout,
allocation_type: AllocationType,
buffer_image_granularity: DeviceAlignment,
Expand All @@ -96,13 +117,13 @@ unsafe impl Suballocator for BumpAllocator {

// These can't overflow because suballocation offsets are bounded by the region, whose end
// can itself not exceed `DeviceLayout::MAX_SIZE`.
let prev_end = self.region.offset() + self.free_start.get();
let prev_end = self.region.offset() + self.free_start;
let mut offset = align_up(prev_end, alignment);

if buffer_image_granularity != DeviceAlignment::MIN
&& prev_end > 0
&& are_blocks_on_same_page(0, prev_end, offset, buffer_image_granularity)
&& has_granularity_conflict(self.prev_allocation_type.get(), allocation_type)
&& has_granularity_conflict(self.prev_allocation_type, allocation_type)
{
offset = align_up(offset, buffer_image_granularity);
}
Expand All @@ -115,8 +136,8 @@ unsafe impl Suballocator for BumpAllocator {
return Err(SuballocatorError::OutOfRegionMemory);
}

self.free_start.set(free_start);
self.prev_allocation_type.set(allocation_type);
self.free_start = free_start;
self.prev_allocation_type = allocation_type;

Ok(Suballocation {
offset,
Expand All @@ -127,17 +148,91 @@ unsafe impl Suballocator for BumpAllocator {
}

#[inline]
unsafe fn deallocate(&self, _suballocation: Suballocation) {
unsafe fn deallocate(&mut self, _suballocation: Suballocation) {
// such complex, very wow
}

#[inline]
fn free_size(&self) -> DeviceSize {
self.region.size() - self.free_start.get()
self.region.size() - self.free_start
}

#[inline]
fn cleanup(&mut self) {
self.reset();
}

#[inline]
fn suballocations(&self) -> Self::Suballocations<'_> {
let start = if self.free_start == 0 { 1 } else { 0 };
let end = if self.free_start == self.region.size() {
1
} else {
2
};

Suballocations {
allocator: self,
start,
end,
}
}
}

#[derive(Clone)]
pub struct Suballocations<'a> {
allocator: &'a BumpAllocator,
start: usize,
end: usize,
}

impl Iterator for Suballocations<'_> {
type Item = SuballocationNode;

#[inline]
fn next(&mut self) -> Option<Self::Item> {
if self.len() != 0 {
let node = self.allocator.suballocation_node(self.start);
self.start += 1;

Some(node)
} else {
None
}
}

#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
let len = self.len();

(len, Some(len))
}

#[inline]
fn last(mut self) -> Option<Self::Item> {
self.next_back()
}
}

impl DoubleEndedIterator for Suballocations<'_> {
#[inline]
fn next_back(&mut self) -> Option<Self::Item> {
if self.len() != 0 {
self.end -= 1;
let node = self.allocator.suballocation_node(self.end);

Some(node)
} else {
None
}
}
}

impl ExactSizeIterator for Suballocations<'_> {
#[inline]
fn len(&self) -> usize {
self.end - self.start
}
}

impl FusedIterator for Suballocations<'_> {}
Loading
Loading