From 78945d00ceb91813cc94ac32f5189f1c5b68a5b0 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 9 Dec 2019 16:02:22 +0100 Subject: [PATCH 01/13] Parametrize `RawTable` over an allocator * `RawTable` has a new type parameter, `A: Alloc + Clone` * When the `nightly` flag is passed, `Alloc` will be the trait in `alloc::Alloc`. * On stable, a minimal shim implementation is provided, along with an implementation for the global allocator. * No public APIs changed. * For `HashMap`, everything is monomorphized to the global allocator, and there should be no performance or size overhead. --- src/map.rs | 16 +++---- src/raw/alloc.rs | 29 ++++++++++++ src/raw/mod.rs | 113 +++++++++++++++++++++++++++-------------------- 3 files changed, 101 insertions(+), 57 deletions(-) create mode 100644 src/raw/alloc.rs diff --git a/src/map.rs b/src/map.rs index 29d1949b02..fe93ce2cc1 100644 --- a/src/map.rs +++ b/src/map.rs @@ -1,4 +1,4 @@ -use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable}; +use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable, Global}; use crate::CollectionAllocErr; use core::borrow::Borrow; use core::fmt::{self, Debug}; @@ -193,7 +193,7 @@ pub enum DefaultHashBuilder {} #[derive(Clone)] pub struct HashMap { pub(crate) hash_builder: S, - pub(crate) table: RawTable<(K, V)>, + pub(crate) table: RawTable, } #[cfg_attr(feature = "inline-more", inline)] @@ -263,7 +263,7 @@ impl HashMap { pub fn with_hasher(hash_builder: S) -> Self { Self { hash_builder, - table: RawTable::new(), + table: RawTable::new(Global), } } @@ -292,7 +292,7 @@ impl HashMap { pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> Self { Self { hash_builder, - table: RawTable::with_capacity(capacity), + table: RawTable::with_capacity(Global, capacity), } } @@ -1146,7 +1146,7 @@ impl IterMut<'_, K, V> { /// [`into_iter`]: struct.HashMap.html#method.into_iter /// [`HashMap`]: struct.HashMap.html pub struct IntoIter { - inner: RawIntoIter<(K, V)>, + inner: RawIntoIter, } impl IntoIter { @@ -1222,7 +1222,7 @@ impl fmt::Debug for Values<'_, K, V> { /// [`drain`]: struct.HashMap.html#method.drain /// [`HashMap`]: struct.HashMap.html pub struct Drain<'a, K, V> { - inner: RawDrain<'a, (K, V)>, + inner: RawDrain<'a, Global, (K, V)>, } impl Drain<'_, K, V> { @@ -1280,7 +1280,7 @@ pub enum RawEntryMut<'a, K, V, S> { /// [`RawEntryMut`]: enum.RawEntryMut.html pub struct RawOccupiedEntryMut<'a, K, V> { elem: Bucket<(K, V)>, - table: &'a mut RawTable<(K, V)>, + table: &'a mut RawTable, } unsafe impl Send for RawOccupiedEntryMut<'_, K, V> @@ -1301,7 +1301,7 @@ where /// /// [`RawEntryMut`]: enum.RawEntryMut.html pub struct RawVacantEntryMut<'a, K, V, S> { - table: &'a mut RawTable<(K, V)>, + table: &'a mut RawTable, hash_builder: &'a S, } diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs new file mode 100644 index 0000000000..be562256cb --- /dev/null +++ b/src/raw/alloc.rs @@ -0,0 +1,29 @@ +pub use self::inner::*; + +#[cfg(feature = "nightly")] +mod inner { + pub use crate::alloc::alloc::{Alloc, Global}; +} + +#[cfg(not(feature = "nightly"))] +mod inner { + use core::ptr::NonNull; + use crate::alloc::alloc::{Layout, alloc, dealloc}; + + pub trait Alloc { + unsafe fn alloc(&mut self, layout: Layout) -> Result, ()>; + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); + } + + #[derive(Copy, Clone)] + pub struct Global; + impl Alloc for Global { + unsafe fn alloc(&mut self, layout: Layout) -> Result, ()> { + Ok(NonNull::new_unchecked(alloc(layout))) + } + unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + dealloc(ptr.as_ptr(), layout) + } + } +} + diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 1a04b9ac1f..91462256f1 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -1,7 +1,6 @@ -use crate::alloc::alloc::{alloc, dealloc, handle_alloc_error}; +use crate::alloc::alloc::{Layout, handle_alloc_error}; use crate::scopeguard::guard; use crate::CollectionAllocErr; -use core::alloc::Layout; use core::hint; use core::iter::FusedIterator; use core::marker::PhantomData; @@ -32,6 +31,10 @@ cfg_if! { } } +mod alloc; +pub use self::alloc::Global; +use self::alloc::Alloc; + mod bitmask; use self::bitmask::BitMask; @@ -329,7 +332,7 @@ impl Bucket { } /// A raw hash table with an unsafe API. -pub struct RawTable { +pub struct RawTable { // Mask to get an index from a hash value. The value is one less than the // number of buckets in the table. bucket_mask: usize, @@ -348,16 +351,20 @@ pub struct RawTable { // Tell dropck that we own instances of T. marker: PhantomData, + + alloc: A, } -impl RawTable { +impl RawTable { /// Creates a new empty hash table without allocating any memory. /// /// In effect this returns a table with exactly 1 bucket. However we can /// leave the data pointer dangling since that bucket is never written to /// due to our load factor forcing us to always have at least 1 free bucket. #[cfg_attr(feature = "inline-more", inline)] - pub fn new() -> Self { + pub fn new( + alloc: A, + ) -> Self { Self { data: NonNull::dangling(), // Be careful to cast the entire slice to a raw pointer. @@ -366,6 +373,7 @@ impl RawTable { items: 0, growth_left: 0, marker: PhantomData, + alloc, } } @@ -374,13 +382,15 @@ impl RawTable { /// The control bytes are left uninitialized. #[cfg_attr(feature = "inline-more", inline)] unsafe fn new_uninitialized( + mut alloc: A, buckets: usize, fallability: Fallibility, ) -> Result { debug_assert!(buckets.is_power_of_two()); let (layout, data_offset) = calculate_layout::(buckets).ok_or_else(|| fallability.capacity_overflow())?; - let ctrl = NonNull::new(alloc(layout)).ok_or_else(|| fallability.alloc_err(layout))?; + let ctrl = alloc.alloc(layout) + .map_err(|_| fallability.alloc_err(layout))?; let data = NonNull::new_unchecked(ctrl.as_ptr().add(data_offset) as *mut T); Ok(Self { data, @@ -389,22 +399,24 @@ impl RawTable { items: 0, growth_left: bucket_mask_to_capacity(buckets - 1), marker: PhantomData, + alloc, }) } /// Attempts to allocate a new hash table with at least enough capacity /// for inserting the given number of elements without reallocating. fn try_with_capacity( + alloc: A, capacity: usize, fallability: Fallibility, ) -> Result { if capacity == 0 { - Ok(Self::new()) + Ok(Self::new(alloc)) } else { unsafe { let buckets = capacity_to_buckets(capacity).ok_or_else(|| fallability.capacity_overflow())?; - let result = Self::new_uninitialized(buckets, fallability)?; + let result = Self::new_uninitialized(alloc, buckets, fallability)?; result.ctrl(0).write_bytes(EMPTY, result.num_ctrl_bytes()); Ok(result) @@ -414,8 +426,8 @@ impl RawTable { /// Allocates a new hash table with at least enough capacity for inserting /// the given number of elements without reallocating. - pub fn with_capacity(capacity: usize) -> Self { - Self::try_with_capacity(capacity, Fallibility::Infallible) + pub fn with_capacity(alloc: A, capacity: usize) -> Self { + Self::try_with_capacity(alloc, capacity, Fallibility::Infallible) .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() }) } @@ -424,7 +436,7 @@ impl RawTable { unsafe fn free_buckets(&mut self) { let (layout, _) = calculate_layout::(self.buckets()).unwrap_or_else(|| hint::unreachable_unchecked()); - dealloc(self.ctrl.as_ptr(), layout); + self.alloc.dealloc(NonNull::new_unchecked(self.ctrl.as_ptr()), layout); } /// Returns the index of a bucket from a `Bucket`. @@ -593,7 +605,7 @@ impl RawTable { // space for. let min_size = usize::max(self.items, min_size); if min_size == 0 { - *self = Self::new(); + *self = Self::new(self.alloc); return; } @@ -610,7 +622,7 @@ impl RawTable { if min_buckets < self.buckets() { // Fast path if the table is empty if self.items == 0 { - *self = Self::with_capacity(min_size) + *self = Self::with_capacity(self.alloc, min_size) } else { self.resize(min_size, hasher, Fallibility::Infallible) .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() }); @@ -785,7 +797,7 @@ impl RawTable { debug_assert!(self.items <= capacity); // Allocate and initialize the new table. - let mut new_table = Self::try_with_capacity(capacity, fallability)?; + let mut new_table = Self::try_with_capacity(self.alloc, capacity, fallability)?; new_table.growth_left -= self.items; new_table.items = self.items; @@ -950,10 +962,10 @@ impl RawTable { /// outlives the `RawDrain`. Because we cannot make the `next` method unsafe /// on the `RawDrain`, we have to make the `drain` method unsafe. #[cfg_attr(feature = "inline-more", inline)] - pub unsafe fn drain(&mut self) -> RawDrain<'_, T> { + pub unsafe fn drain(&mut self) -> RawDrain<'_, A, T> { RawDrain { iter: self.iter(), - table: ManuallyDrop::new(mem::replace(self, Self::new())), + table: ManuallyDrop::new(mem::replace(self, Self::new(self.alloc))), orig_table: NonNull::from(self), marker: PhantomData, } @@ -975,17 +987,17 @@ impl RawTable { } } -unsafe impl Send for RawTable where T: Send {} -unsafe impl Sync for RawTable where T: Sync {} +unsafe impl Send for RawTable where T: Send {} +unsafe impl Sync for RawTable where T: Sync {} -impl Clone for RawTable { +impl Clone for RawTable { fn clone(&self) -> Self { if self.is_empty_singleton() { - Self::new() + Self::new(self.alloc) } else { unsafe { let mut new_table = ManuallyDrop::new( - Self::new_uninitialized(self.buckets(), Fallibility::Infallible) + Self::new_uninitialized(self.alloc, self.buckets(), Fallibility::Infallible) .unwrap_or_else(|_| hint::unreachable_unchecked()), ); @@ -1031,7 +1043,7 @@ impl Clone for RawTable { } #[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T> Drop for RawTable { +unsafe impl Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1047,7 +1059,7 @@ unsafe impl<#[may_dangle] T> Drop for RawTable { } } #[cfg(not(feature = "nightly"))] -impl Drop for RawTable { +impl Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1063,19 +1075,21 @@ impl Drop for RawTable { } } -impl IntoIterator for RawTable { +impl IntoIterator for RawTable { type Item = T; - type IntoIter = RawIntoIter; + type IntoIter = RawIntoIter; #[cfg_attr(feature = "inline-more", inline)] - fn into_iter(self) -> RawIntoIter { + fn into_iter(self) -> RawIntoIter { unsafe { + let allocator = self.alloc; let iter = self.iter(); let alloc = self.into_alloc(); RawIntoIter { iter, alloc, marker: PhantomData, + allocator, } } } @@ -1263,24 +1277,25 @@ impl ExactSizeIterator for RawIter {} impl FusedIterator for RawIter {} /// Iterator which consumes a table and returns elements. -pub struct RawIntoIter { +pub struct RawIntoIter { iter: RawIter, alloc: Option<(NonNull, Layout)>, marker: PhantomData, + allocator: A, } -impl RawIntoIter { +impl RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawIntoIter where T: Send {} -unsafe impl Sync for RawIntoIter where T: Sync {} +unsafe impl Send for RawIntoIter where T: Send {} +unsafe impl Sync for RawIntoIter where T: Sync {} #[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T> Drop for RawIntoIter { +unsafe impl Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1293,13 +1308,13 @@ unsafe impl<#[may_dangle] T> Drop for RawIntoIter { // Free the table if let Some((ptr, layout)) = self.alloc { - dealloc(ptr.as_ptr(), layout); + self.allocator.dealloc(ptr, layout); } } } } #[cfg(not(feature = "nightly"))] -impl Drop for RawIntoIter { +impl Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1312,13 +1327,13 @@ impl Drop for RawIntoIter { // Free the table if let Some((ptr, layout)) = self.alloc { - dealloc(ptr.as_ptr(), layout); + self.allocator.dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout); } } } } -impl Iterator for RawIntoIter { +impl Iterator for RawIntoIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1332,35 +1347,35 @@ impl Iterator for RawIntoIter { } } -impl ExactSizeIterator for RawIntoIter {} -impl FusedIterator for RawIntoIter {} +impl ExactSizeIterator for RawIntoIter {} +impl FusedIterator for RawIntoIter {} /// Iterator which consumes elements without freeing the table storage. -pub struct RawDrain<'a, T> { +pub struct RawDrain<'a, A: Alloc + Copy, T> { iter: RawIter, // The table is moved into the iterator for the duration of the drain. This // ensures that an empty table is left if the drain iterator is leaked // without dropping. - table: ManuallyDrop>, - orig_table: NonNull>, + table: ManuallyDrop>, + orig_table: NonNull>, // We don't use a &'a mut RawTable because we want RawDrain to be // covariant over T. - marker: PhantomData<&'a RawTable>, + marker: PhantomData<&'a RawTable>, } -impl RawDrain<'_, T> { +impl RawDrain<'_, A, T> { #[cfg_attr(feature = "inline-more", inline)] - pub fn iter(&self) -> RawIter { + pub fn iter(&self) -> RawIter< T> { self.iter.clone() } } -unsafe impl Send for RawDrain<'_, T> where T: Send {} -unsafe impl Sync for RawDrain<'_, T> where T: Sync {} +unsafe impl Send for RawDrain<'_, A, T> where T: Send {} +unsafe impl Sync for RawDrain<'_, A, T> where T: Sync {} -impl Drop for RawDrain<'_, T> { +impl Drop for RawDrain<'_, A, T> { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1383,7 +1398,7 @@ impl Drop for RawDrain<'_, T> { } } -impl Iterator for RawDrain<'_, T> { +impl Iterator for RawDrain<'_, A, T> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1400,5 +1415,5 @@ impl Iterator for RawDrain<'_, T> { } } -impl ExactSizeIterator for RawDrain<'_, T> {} -impl FusedIterator for RawDrain<'_, T> {} +impl ExactSizeIterator for RawDrain<'_, A, T> {} +impl FusedIterator for RawDrain<'_, A, T> {} From aaa79643025f383dadb19e0ef6d5ac650c63f5da Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 9 Dec 2019 17:37:09 +0100 Subject: [PATCH 02/13] Fix issues pointed out in PR * Change order of type parameters * Handle null case for `alloc` * Run rustfmt --- src/map.rs | 12 +++--- src/raw/alloc.rs | 5 +-- src/raw/mod.rs | 101 +++++++++++++++++++++++++---------------------- 3 files changed, 61 insertions(+), 57 deletions(-) diff --git a/src/map.rs b/src/map.rs index fe93ce2cc1..5af4b4d639 100644 --- a/src/map.rs +++ b/src/map.rs @@ -1,4 +1,4 @@ -use crate::raw::{Bucket, RawDrain, RawIntoIter, RawIter, RawTable, Global}; +use crate::raw::{Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable}; use crate::CollectionAllocErr; use core::borrow::Borrow; use core::fmt::{self, Debug}; @@ -193,7 +193,7 @@ pub enum DefaultHashBuilder {} #[derive(Clone)] pub struct HashMap { pub(crate) hash_builder: S, - pub(crate) table: RawTable, + pub(crate) table: RawTable<(K, V), Global>, } #[cfg_attr(feature = "inline-more", inline)] @@ -1146,7 +1146,7 @@ impl IterMut<'_, K, V> { /// [`into_iter`]: struct.HashMap.html#method.into_iter /// [`HashMap`]: struct.HashMap.html pub struct IntoIter { - inner: RawIntoIter, + inner: RawIntoIter<(K, V), Global>, } impl IntoIter { @@ -1222,7 +1222,7 @@ impl fmt::Debug for Values<'_, K, V> { /// [`drain`]: struct.HashMap.html#method.drain /// [`HashMap`]: struct.HashMap.html pub struct Drain<'a, K, V> { - inner: RawDrain<'a, Global, (K, V)>, + inner: RawDrain<'a, (K, V), Global>, } impl Drain<'_, K, V> { @@ -1280,7 +1280,7 @@ pub enum RawEntryMut<'a, K, V, S> { /// [`RawEntryMut`]: enum.RawEntryMut.html pub struct RawOccupiedEntryMut<'a, K, V> { elem: Bucket<(K, V)>, - table: &'a mut RawTable, + table: &'a mut RawTable<(K, V), Global>, } unsafe impl Send for RawOccupiedEntryMut<'_, K, V> @@ -1301,7 +1301,7 @@ where /// /// [`RawEntryMut`]: enum.RawEntryMut.html pub struct RawVacantEntryMut<'a, K, V, S> { - table: &'a mut RawTable, + table: &'a mut RawTable<(K, V), Global>, hash_builder: &'a S, } diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs index be562256cb..d0291e713a 100644 --- a/src/raw/alloc.rs +++ b/src/raw/alloc.rs @@ -7,8 +7,8 @@ mod inner { #[cfg(not(feature = "nightly"))] mod inner { + use crate::alloc::alloc::{alloc, dealloc, Layout}; use core::ptr::NonNull; - use crate::alloc::alloc::{Layout, alloc, dealloc}; pub trait Alloc { unsafe fn alloc(&mut self, layout: Layout) -> Result, ()>; @@ -19,11 +19,10 @@ mod inner { pub struct Global; impl Alloc for Global { unsafe fn alloc(&mut self, layout: Layout) -> Result, ()> { - Ok(NonNull::new_unchecked(alloc(layout))) + NonNull::new(alloc(layout)).ok_or(()) } unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { dealloc(ptr.as_ptr(), layout) } } } - diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 91462256f1..0b9374d9c6 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -1,4 +1,4 @@ -use crate::alloc::alloc::{Layout, handle_alloc_error}; +use crate::alloc::alloc::{handle_alloc_error, Layout}; use crate::scopeguard::guard; use crate::CollectionAllocErr; use core::hint; @@ -32,8 +32,8 @@ cfg_if! { } mod alloc; -pub use self::alloc::Global; use self::alloc::Alloc; +pub use self::alloc::Global; mod bitmask; @@ -332,7 +332,7 @@ impl Bucket { } /// A raw hash table with an unsafe API. -pub struct RawTable { +pub struct RawTable { // Mask to get an index from a hash value. The value is one less than the // number of buckets in the table. bucket_mask: usize, @@ -355,16 +355,14 @@ pub struct RawTable { alloc: A, } -impl RawTable { +impl RawTable { /// Creates a new empty hash table without allocating any memory. /// /// In effect this returns a table with exactly 1 bucket. However we can /// leave the data pointer dangling since that bucket is never written to /// due to our load factor forcing us to always have at least 1 free bucket. #[cfg_attr(feature = "inline-more", inline)] - pub fn new( - alloc: A, - ) -> Self { + pub fn new(alloc: A) -> Self { Self { data: NonNull::dangling(), // Be careful to cast the entire slice to a raw pointer. @@ -389,7 +387,8 @@ impl RawTable { debug_assert!(buckets.is_power_of_two()); let (layout, data_offset) = calculate_layout::(buckets).ok_or_else(|| fallability.capacity_overflow())?; - let ctrl = alloc.alloc(layout) + let ctrl = alloc + .alloc(layout) .map_err(|_| fallability.alloc_err(layout))?; let data = NonNull::new_unchecked(ctrl.as_ptr().add(data_offset) as *mut T); Ok(Self { @@ -436,7 +435,8 @@ impl RawTable { unsafe fn free_buckets(&mut self) { let (layout, _) = calculate_layout::(self.buckets()).unwrap_or_else(|| hint::unreachable_unchecked()); - self.alloc.dealloc(NonNull::new_unchecked(self.ctrl.as_ptr()), layout); + self.alloc + .dealloc(NonNull::new_unchecked(self.ctrl.as_ptr()), layout); } /// Returns the index of a bucket from a `Bucket`. @@ -605,7 +605,7 @@ impl RawTable { // space for. let min_size = usize::max(self.items, min_size); if min_size == 0 { - *self = Self::new(self.alloc); + *self = Self::new(self.alloc.clone()); return; } @@ -622,7 +622,7 @@ impl RawTable { if min_buckets < self.buckets() { // Fast path if the table is empty if self.items == 0 { - *self = Self::with_capacity(self.alloc, min_size) + *self = Self::with_capacity(self.alloc.clone(), min_size) } else { self.resize(min_size, hasher, Fallibility::Infallible) .unwrap_or_else(|_| unsafe { hint::unreachable_unchecked() }); @@ -797,7 +797,7 @@ impl RawTable { debug_assert!(self.items <= capacity); // Allocate and initialize the new table. - let mut new_table = Self::try_with_capacity(self.alloc, capacity, fallability)?; + let mut new_table = Self::try_with_capacity(self.alloc.clone(), capacity, fallability)?; new_table.growth_left -= self.items; new_table.items = self.items; @@ -962,10 +962,10 @@ impl RawTable { /// outlives the `RawDrain`. Because we cannot make the `next` method unsafe /// on the `RawDrain`, we have to make the `drain` method unsafe. #[cfg_attr(feature = "inline-more", inline)] - pub unsafe fn drain(&mut self) -> RawDrain<'_, A, T> { + pub unsafe fn drain(&mut self) -> RawDrain<'_, T, A> { RawDrain { iter: self.iter(), - table: ManuallyDrop::new(mem::replace(self, Self::new(self.alloc))), + table: ManuallyDrop::new(mem::replace(self, Self::new(self.alloc.clone()))), orig_table: NonNull::from(self), marker: PhantomData, } @@ -987,18 +987,22 @@ impl RawTable { } } -unsafe impl Send for RawTable where T: Send {} -unsafe impl Sync for RawTable where T: Sync {} +unsafe impl Send for RawTable where T: Send {} +unsafe impl Sync for RawTable where T: Sync {} -impl Clone for RawTable { +impl Clone for RawTable { fn clone(&self) -> Self { if self.is_empty_singleton() { - Self::new(self.alloc) + Self::new(self.alloc.clone()) } else { unsafe { let mut new_table = ManuallyDrop::new( - Self::new_uninitialized(self.alloc, self.buckets(), Fallibility::Infallible) - .unwrap_or_else(|_| hint::unreachable_unchecked()), + Self::new_uninitialized( + self.alloc.clone(), + self.buckets(), + Fallibility::Infallible, + ) + .unwrap_or_else(|_| hint::unreachable_unchecked()), ); // Copy the control bytes unchanged. We do this in a single pass @@ -1043,7 +1047,7 @@ impl Clone for RawTable { } #[cfg(feature = "nightly")] -unsafe impl Drop for RawTable { +unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1059,7 +1063,7 @@ unsafe impl Drop for RawTable { } } #[cfg(not(feature = "nightly"))] -impl Drop for RawTable { +impl Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1075,14 +1079,14 @@ impl Drop for RawTable { } } -impl IntoIterator for RawTable { +impl IntoIterator for RawTable { type Item = T; - type IntoIter = RawIntoIter; + type IntoIter = RawIntoIter; #[cfg_attr(feature = "inline-more", inline)] - fn into_iter(self) -> RawIntoIter { + fn into_iter(self) -> RawIntoIter { unsafe { - let allocator = self.alloc; + let allocator = self.alloc.clone(); let iter = self.iter(); let alloc = self.into_alloc(); RawIntoIter { @@ -1277,25 +1281,25 @@ impl ExactSizeIterator for RawIter {} impl FusedIterator for RawIter {} /// Iterator which consumes a table and returns elements. -pub struct RawIntoIter { +pub struct RawIntoIter { iter: RawIter, alloc: Option<(NonNull, Layout)>, marker: PhantomData, allocator: A, } -impl RawIntoIter { +impl RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawIntoIter where T: Send {} -unsafe impl Sync for RawIntoIter where T: Sync {} +unsafe impl Send for RawIntoIter where T: Send {} +unsafe impl Sync for RawIntoIter where T: Sync {} #[cfg(feature = "nightly")] -unsafe impl Drop for RawIntoIter { +unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1314,7 +1318,7 @@ unsafe impl Drop for RawIntoIter { } } #[cfg(not(feature = "nightly"))] -impl Drop for RawIntoIter { +impl Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1327,13 +1331,14 @@ impl Drop for RawIntoIter { // Free the table if let Some((ptr, layout)) = self.alloc { - self.allocator.dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout); + self.allocator + .dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout); } } } } -impl Iterator for RawIntoIter { +impl Iterator for RawIntoIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1347,35 +1352,35 @@ impl Iterator for RawIntoIter { } } -impl ExactSizeIterator for RawIntoIter {} -impl FusedIterator for RawIntoIter {} +impl ExactSizeIterator for RawIntoIter {} +impl FusedIterator for RawIntoIter {} /// Iterator which consumes elements without freeing the table storage. -pub struct RawDrain<'a, A: Alloc + Copy, T> { +pub struct RawDrain<'a, T, A: Alloc + Clone> { iter: RawIter, // The table is moved into the iterator for the duration of the drain. This // ensures that an empty table is left if the drain iterator is leaked // without dropping. - table: ManuallyDrop>, - orig_table: NonNull>, + table: ManuallyDrop>, + orig_table: NonNull>, // We don't use a &'a mut RawTable because we want RawDrain to be // covariant over T. - marker: PhantomData<&'a RawTable>, + marker: PhantomData<&'a RawTable>, } -impl RawDrain<'_, A, T> { +impl RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] - pub fn iter(&self) -> RawIter< T> { + pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawDrain<'_, A, T> where T: Send {} -unsafe impl Sync for RawDrain<'_, A, T> where T: Sync {} +unsafe impl Send for RawDrain<'_, T, A> where T: Send {} +unsafe impl Sync for RawDrain<'_, T, A> where T: Sync {} -impl Drop for RawDrain<'_, A, T> { +impl Drop for RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1398,7 +1403,7 @@ impl Drop for RawDrain<'_, A, T> { } } -impl Iterator for RawDrain<'_, A, T> { +impl Iterator for RawDrain<'_, T, A> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1415,5 +1420,5 @@ impl Iterator for RawDrain<'_, A, T> { } } -impl ExactSizeIterator for RawDrain<'_, A, T> {} -impl FusedIterator for RawDrain<'_, A, T> {} +impl ExactSizeIterator for RawDrain<'_, T, A> {} +impl FusedIterator for RawDrain<'_, T, A> {} From 8e39062a3327b8a99bd6f6e32ae405bca5c798ce Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 9 Dec 2019 18:00:05 +0100 Subject: [PATCH 03/13] Fix rayon implementations --- src/external_trait_impls/rayon/raw.rs | 26 +++++++++++++------------- src/raw/mod.rs | 3 +-- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/src/external_trait_impls/rayon/raw.rs b/src/external_trait_impls/rayon/raw.rs index c6fe09dff1..e00e6a404f 100644 --- a/src/external_trait_impls/rayon/raw.rs +++ b/src/external_trait_impls/rayon/raw.rs @@ -1,5 +1,5 @@ use crate::raw::Bucket; -use crate::raw::{RawIterRange, RawTable}; +use crate::raw::{Alloc, RawIterRange, RawTable}; use crate::scopeguard::guard; use alloc::alloc::dealloc; use core::marker::PhantomData; @@ -54,11 +54,11 @@ impl UnindexedProducer for ParIterProducer { } /// Parallel iterator which consumes a table and returns elements. -pub struct RawIntoParIter { - table: RawTable, +pub struct RawIntoParIter { + table: RawTable, } -impl ParallelIterator for RawIntoParIter { +impl ParallelIterator for RawIntoParIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -80,16 +80,16 @@ impl ParallelIterator for RawIntoParIter { } /// Parallel iterator which consumes elements without freeing the table storage. -pub struct RawParDrain<'a, T> { +pub struct RawParDrain<'a, T, A: Alloc + Clone> { // We don't use a &'a mut RawTable because we want RawParDrain to be // covariant over T. - table: NonNull>, - marker: PhantomData<&'a RawTable>, + table: NonNull>, + marker: PhantomData<&'a RawTable>, } -unsafe impl Send for RawParDrain<'_, T> {} +unsafe impl Send for RawParDrain<'_, T, A> {} -impl ParallelIterator for RawParDrain<'_, T> { +impl ParallelIterator for RawParDrain<'_, T, A> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -107,7 +107,7 @@ impl ParallelIterator for RawParDrain<'_, T> { } } -impl Drop for RawParDrain<'_, T> { +impl Drop for RawParDrain<'_, T, A> { fn drop(&mut self) { // If drive_unindexed is not called then simply clear the table. unsafe { self.table.as_mut().clear() } @@ -166,7 +166,7 @@ impl Drop for ParDrainProducer { } } -impl RawTable { +impl RawTable { /// Returns a parallel iterator over the elements in a `RawTable`. #[cfg_attr(feature = "inline-more", inline)] pub fn par_iter(&self) -> RawParIter { @@ -177,14 +177,14 @@ impl RawTable { /// Returns a parallel iterator over the elements in a `RawTable`. #[cfg_attr(feature = "inline-more", inline)] - pub fn into_par_iter(self) -> RawIntoParIter { + pub fn into_par_iter(self) -> RawIntoParIter { RawIntoParIter { table: self } } /// Returns a parallel iterator which consumes all elements of a `RawTable` /// without freeing its memory allocation. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_drain(&mut self) -> RawParDrain<'_, T> { + pub fn par_drain(&mut self) -> RawParDrain<'_, T, A> { RawParDrain { table: NonNull::from(self), marker: PhantomData, diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 0b9374d9c6..ec9b8c1f6e 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -32,8 +32,7 @@ cfg_if! { } mod alloc; -use self::alloc::Alloc; -pub use self::alloc::Global; +pub use self::alloc::{Alloc, Global}; mod bitmask; From c9cc19449f9f413d54fbedd78e9f139fb8116949 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 9 Dec 2019 18:19:00 +0100 Subject: [PATCH 04/13] Fix the `rustc-internal-api` feature --- src/rustc_entry.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/rustc_entry.rs b/src/rustc_entry.rs index 39dc51aa27..8d4cc8d202 100644 --- a/src/rustc_entry.rs +++ b/src/rustc_entry.rs @@ -1,6 +1,6 @@ use self::RustcEntry::*; use crate::map::{make_hash, Drain, HashMap, IntoIter, Iter, IterMut}; -use crate::raw::{Bucket, RawTable}; +use crate::raw::{Bucket, Global, RawTable}; use core::fmt::{self, Debug}; use core::hash::{BuildHasher, Hash}; use core::mem; @@ -83,7 +83,7 @@ impl Debug for RustcEntry<'_, K, V> { pub struct RustcOccupiedEntry<'a, K, V> { key: Option, elem: Bucket<(K, V)>, - table: &'a mut RawTable<(K, V)>, + table: &'a mut RawTable<(K, V), Global>, } unsafe impl Send for RustcOccupiedEntry<'_, K, V> @@ -115,7 +115,7 @@ impl Debug for RustcOccupiedEntry<'_, K, V> { pub struct RustcVacantEntry<'a, K, V> { hash: u64, key: K, - table: &'a mut RawTable<(K, V)>, + table: &'a mut RawTable<(K, V), Global>, } impl Debug for RustcVacantEntry<'_, K, V> { From 90e4d3b29d0c2ce3c2af8404117dc9716530266a Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Fri, 28 Feb 2020 12:05:09 +0100 Subject: [PATCH 05/13] Parametrize map and set over allocator --- src/external_trait_impls/rayon/map.rs | 107 +++++----- src/external_trait_impls/rayon/raw.rs | 16 +- src/external_trait_impls/rayon/set.rs | 101 +++++---- src/map.rs | 291 +++++++++++++++++--------- src/raw/alloc.rs | 11 +- src/raw/mod.rs | 52 ++--- src/set.rs | 272 +++++++++++++++++------- 7 files changed, 548 insertions(+), 302 deletions(-) diff --git a/src/external_trait_impls/rayon/map.rs b/src/external_trait_impls/rayon/map.rs index 022e23e0f3..23c82e0908 100644 --- a/src/external_trait_impls/rayon/map.rs +++ b/src/external_trait_impls/rayon/map.rs @@ -1,6 +1,7 @@ //! Rayon extensions for `HashMap`. use crate::hash_map::HashMap; +use crate::raw::{AllocRef, Global}; use core::fmt; use core::hash::{BuildHasher, Hash}; use rayon::iter::plumbing::UnindexedConsumer; @@ -15,11 +16,11 @@ use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, Pa /// [`par_iter`]: /hashbrown/struct.HashMap.html#method.par_iter /// [`HashMap`]: /hashbrown/struct.HashMap.html /// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html -pub struct ParIter<'a, K, V, S> { - map: &'a HashMap, +pub struct ParIter<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParIter<'a, K, V, S> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParIter<'a, K, V, S, A> { type Item = (&'a K, &'a V); #[cfg_attr(feature = "inline-more", inline)] @@ -38,14 +39,14 @@ impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParIter<'a, K, V, S> { } } -impl Clone for ParIter<'_, K, V, S> { +impl Clone for ParIter<'_, K, V, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { ParIter { map: self.map } } } -impl fmt::Debug for ParIter<'_, K, V, S> { +impl fmt::Debug for ParIter<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) } @@ -58,11 +59,11 @@ impl fmt::Debug for Pa /// /// [`par_keys`]: /hashbrown/struct.HashMap.html#method.par_keys /// [`HashMap`]: /hashbrown/struct.HashMap.html -pub struct ParKeys<'a, K, V, S> { - map: &'a HashMap, +pub struct ParKeys<'a, K, V, S, A: AllocRef + Clone> { + map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParKeys<'a, K, V, S> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParKeys<'a, K, V, S, A> { type Item = &'a K; #[cfg_attr(feature = "inline-more", inline)] @@ -78,14 +79,14 @@ impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParKeys<'a, K, V, S> { } } -impl Clone for ParKeys<'_, K, V, S> { +impl Clone for ParKeys<'_, K, V, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { ParKeys { map: self.map } } } -impl fmt::Debug for ParKeys<'_, K, V, S> { +impl fmt::Debug for ParKeys<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.keys().fmt(f) } @@ -98,11 +99,11 @@ impl fmt::Debug for ParKeys<'_, K, /// /// [`par_values`]: /hashbrown/struct.HashMap.html#method.par_values /// [`HashMap`]: /hashbrown/struct.HashMap.html -pub struct ParValues<'a, K, V, S> { - map: &'a HashMap, +pub struct ParValues<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParValues<'a, K, V, S> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParValues<'a, K, V, S, A> { type Item = &'a V; #[cfg_attr(feature = "inline-more", inline)] @@ -118,14 +119,14 @@ impl<'a, K: Sync, V: Sync, S: Sync> ParallelIterator for ParValues<'a, K, V, S> } } -impl Clone for ParValues<'_, K, V, S> { +impl Clone for ParValues<'_, K, V, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { ParValues { map: self.map } } } -impl fmt::Debug for ParValues<'_, K, V, S> { +impl fmt::Debug for ParValues<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.values().fmt(f) } @@ -140,11 +141,11 @@ impl fmt::Debug for ParValues<'_, K /// [`par_iter_mut`]: /hashbrown/struct.HashMap.html#method.par_iter_mut /// [`HashMap`]: /hashbrown/struct.HashMap.html /// [`IntoParallelRefMutIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefMutIterator.html -pub struct ParIterMut<'a, K, V, S> { - map: &'a mut HashMap, +pub struct ParIterMut<'a, K, V, S, A: AllocRef + Clone> { + map: &'a mut HashMap, } -impl<'a, K: Send + Sync, V: Send, S: Send> ParallelIterator for ParIterMut<'a, K, V, S> { +impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> ParallelIterator for ParIterMut<'a, K, V, S, A> { type Item = (&'a K, &'a mut V); #[cfg_attr(feature = "inline-more", inline)] @@ -163,8 +164,8 @@ impl<'a, K: Send + Sync, V: Send, S: Send> ParallelIterator for ParIterMut<'a, K } } -impl fmt::Debug - for ParIterMut<'_, K, V, S> +impl fmt::Debug + for ParIterMut<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) @@ -178,11 +179,11 @@ impl fmt::Debug /// /// [`par_values_mut`]: /hashbrown/struct.HashMap.html#method.par_values_mut /// [`HashMap`]: /hashbrown/struct.HashMap.html -pub struct ParValuesMut<'a, K, V, S> { - map: &'a mut HashMap, +pub struct ParValuesMut<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a mut HashMap, } -impl<'a, K: Send, V: Send, S: Send> ParallelIterator for ParValuesMut<'a, K, V, S> { +impl<'a, K: Send, V: Send, S: Send, A: AllocRef + Clone + Send> ParallelIterator for ParValuesMut<'a, K, V, S, A> { type Item = &'a mut V; #[cfg_attr(feature = "inline-more", inline)] @@ -198,7 +199,7 @@ impl<'a, K: Send, V: Send, S: Send> ParallelIterator for ParValuesMut<'a, K, V, } } -impl fmt::Debug for ParValuesMut<'_, K, V, S> { +impl fmt::Debug for ParValuesMut<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.values().fmt(f) } @@ -213,11 +214,11 @@ impl fmt::Debug for ParValuesMut<'_ /// [`into_par_iter`]: /hashbrown/struct.HashMap.html#method.into_par_iter /// [`HashMap`]: /hashbrown/struct.HashMap.html /// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html -pub struct IntoParIter { - map: HashMap, +pub struct IntoParIter { + map: HashMap, } -impl ParallelIterator for IntoParIter { +impl ParallelIterator for IntoParIter { type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -229,7 +230,7 @@ impl ParallelIterator for IntoParIter { } } -impl fmt::Debug for IntoParIter { +impl fmt::Debug for IntoParIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) } @@ -242,11 +243,11 @@ impl fmt::Debug for In /// /// [`par_drain`]: /hashbrown/struct.HashMap.html#method.par_drain /// [`HashMap`]: /hashbrown/struct.HashMap.html -pub struct ParDrain<'a, K, V, S> { - map: &'a mut HashMap, +pub struct ParDrain<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a mut HashMap, } -impl ParallelIterator for ParDrain<'_, K, V, S> { +impl ParallelIterator for ParDrain<'_, K, V, S, A> { type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -258,48 +259,49 @@ impl ParallelIterator for ParDrain<'_, K, V, S> { } } -impl fmt::Debug - for ParDrain<'_, K, V, S> +impl fmt::Debug + for ParDrain<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) } } -impl HashMap { +impl HashMap { /// Visits (potentially in parallel) immutably borrowed keys in an arbitrary order. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_keys(&self) -> ParKeys<'_, K, V, S> { + pub fn par_keys(&self) -> ParKeys<'_, K, V, S, A> { ParKeys { map: self } } /// Visits (potentially in parallel) immutably borrowed values in an arbitrary order. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_values(&self) -> ParValues<'_, K, V, S> { + pub fn par_values(&self) -> ParValues<'_, K, V, S, A> { ParValues { map: self } } } -impl HashMap { +impl HashMap { /// Visits (potentially in parallel) mutably borrowed values in an arbitrary order. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_values_mut(&mut self) -> ParValuesMut<'_, K, V, S> { + pub fn par_values_mut(&mut self) -> ParValuesMut<'_, K, V, S, A> { ParValuesMut { map: self } } /// Consumes (potentially in parallel) all values in an arbitrary order, /// while preserving the map's allocated memory for reuse. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_drain(&mut self) -> ParDrain<'_, K, V, S> { + pub fn par_drain(&mut self) -> ParDrain<'_, K, V, S, A> { ParDrain { map: self } } } -impl HashMap +impl HashMap where K: Eq + Hash + Sync, V: PartialEq + Sync, S: BuildHasher + Sync, + A: AllocRef + Clone + Sync, { /// Returns `true` if the map is equal to another, /// i.e. both maps contain the same keys mapped to the same values. @@ -313,9 +315,9 @@ where } } -impl IntoParallelIterator for HashMap { +impl IntoParallelIterator for HashMap { type Item = (K, V); - type Iter = IntoParIter; + type Iter = IntoParIter; #[cfg_attr(feature = "inline-more", inline)] fn into_par_iter(self) -> Self::Iter { @@ -323,9 +325,9 @@ impl IntoParallelIterator for HashMap { } } -impl<'a, K: Sync, V: Sync, S: Sync> IntoParallelIterator for &'a HashMap { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a HashMap { type Item = (&'a K, &'a V); - type Iter = ParIter<'a, K, V, S>; + type Iter = ParIter<'a, K, V, S, A>; #[cfg_attr(feature = "inline-more", inline)] fn into_par_iter(self) -> Self::Iter { @@ -333,9 +335,9 @@ impl<'a, K: Sync, V: Sync, S: Sync> IntoParallelIterator for &'a HashMap IntoParallelIterator for &'a mut HashMap { +impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a mut HashMap { type Item = (&'a K, &'a mut V); - type Iter = ParIterMut<'a, K, V, S>; + type Iter = ParIterMut<'a, K, V, S, A>; #[cfg_attr(feature = "inline-more", inline)] fn into_par_iter(self) -> Self::Iter { @@ -347,7 +349,7 @@ impl<'a, K: Send + Sync, V: Send, S: Send> IntoParallelIterator for &'a mut Hash /// hashmap. If multiple pairs correspond to the same key, then the /// ones produced earlier in the parallel iterator will be /// overwritten, just as with a sequential iterator. -impl FromParallelIterator<(K, V)> for HashMap +impl FromParallelIterator<(K, V)> for HashMap where K: Eq + Hash + Send, V: Send, @@ -364,11 +366,12 @@ where } /// Extend a hash map with items from a parallel iterator. -impl ParallelExtend<(K, V)> for HashMap +impl ParallelExtend<(K, V)> for HashMap where K: Eq + Hash + Send, V: Send, S: BuildHasher, + A: AllocRef + Clone, { fn par_extend(&mut self, par_iter: I) where @@ -379,11 +382,12 @@ where } /// Extend a hash map with copied items from a parallel iterator. -impl<'a, K, V, S> ParallelExtend<(&'a K, &'a V)> for HashMap +impl<'a, K, V, S, A> ParallelExtend<(&'a K, &'a V)> for HashMap where K: Copy + Eq + Hash + Sync, V: Copy + Sync, S: BuildHasher, + A: AllocRef + Clone, { fn par_extend(&mut self, par_iter: I) where @@ -394,12 +398,13 @@ where } // This is equal to the normal `HashMap` -- no custom advantage. -fn extend(map: &mut HashMap, par_iter: I) +fn extend(map: &mut HashMap, par_iter: I) where K: Eq + Hash, S: BuildHasher, I: IntoParallelIterator, - HashMap: Extend, + A: AllocRef + Clone, + HashMap: Extend, { let (list, len) = super::helpers::collect(par_iter); diff --git a/src/external_trait_impls/rayon/raw.rs b/src/external_trait_impls/rayon/raw.rs index e00e6a404f..f557e9c09c 100644 --- a/src/external_trait_impls/rayon/raw.rs +++ b/src/external_trait_impls/rayon/raw.rs @@ -1,5 +1,5 @@ use crate::raw::Bucket; -use crate::raw::{Alloc, RawIterRange, RawTable}; +use crate::raw::{AllocRef, RawIterRange, RawTable}; use crate::scopeguard::guard; use alloc::alloc::dealloc; use core::marker::PhantomData; @@ -54,11 +54,11 @@ impl UnindexedProducer for ParIterProducer { } /// Parallel iterator which consumes a table and returns elements. -pub struct RawIntoParIter { +pub struct RawIntoParIter { table: RawTable, } -impl ParallelIterator for RawIntoParIter { +impl ParallelIterator for RawIntoParIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -80,16 +80,16 @@ impl ParallelIterator for RawIntoParIter { } /// Parallel iterator which consumes elements without freeing the table storage. -pub struct RawParDrain<'a, T, A: Alloc + Clone> { +pub struct RawParDrain<'a, T, A: AllocRef + Clone> { // We don't use a &'a mut RawTable because we want RawParDrain to be // covariant over T. table: NonNull>, marker: PhantomData<&'a RawTable>, } -unsafe impl Send for RawParDrain<'_, T, A> {} +unsafe impl Send for RawParDrain<'_, T, A> {} -impl ParallelIterator for RawParDrain<'_, T, A> { +impl ParallelIterator for RawParDrain<'_, T, A> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -107,7 +107,7 @@ impl ParallelIterator for RawParDrain<'_, T, A> { } } -impl Drop for RawParDrain<'_, T, A> { +impl Drop for RawParDrain<'_, T, A> { fn drop(&mut self) { // If drive_unindexed is not called then simply clear the table. unsafe { self.table.as_mut().clear() } @@ -166,7 +166,7 @@ impl Drop for ParDrainProducer { } } -impl RawTable { +impl RawTable { /// Returns a parallel iterator over the elements in a `RawTable`. #[cfg_attr(feature = "inline-more", inline)] pub fn par_iter(&self) -> RawParIter { diff --git a/src/external_trait_impls/rayon/set.rs b/src/external_trait_impls/rayon/set.rs index 53d2660d58..3bfbbf97e8 100644 --- a/src/external_trait_impls/rayon/set.rs +++ b/src/external_trait_impls/rayon/set.rs @@ -1,6 +1,7 @@ //! Rayon extensions for `HashSet`. use crate::hash_set::HashSet; +use crate::raw::{AllocRef, Global}; use core::hash::{BuildHasher, Hash}; use rayon::iter::plumbing::UnindexedConsumer; use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, ParallelIterator}; @@ -14,11 +15,11 @@ use rayon::iter::{FromParallelIterator, IntoParallelIterator, ParallelExtend, Pa /// [`into_par_iter`]: /hashbrown/struct.HashSet.html#method.into_par_iter /// [`HashSet`]: /hashbrown/struct.HashSet.html /// [`IntoParallelIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelIterator.html -pub struct IntoParIter { - set: HashSet, +pub struct IntoParIter { + set: HashSet, } -impl ParallelIterator for IntoParIter { +impl ParallelIterator for IntoParIter { type Item = T; fn drive_unindexed(self, consumer: C) -> C::Result @@ -40,11 +41,11 @@ impl ParallelIterator for IntoParIter { /// /// [`par_drain`]: /hashbrown/struct.HashSet.html#method.par_drain /// [`HashSet`]: /hashbrown/struct.HashSet.html -pub struct ParDrain<'a, T, S> { - set: &'a mut HashSet, +pub struct ParDrain<'a, T, S, A: AllocRef + Clone = Global> { + set: &'a mut HashSet, } -impl ParallelIterator for ParDrain<'_, T, S> { +impl ParallelIterator for ParDrain<'_, T, S, A> { type Item = T; fn drive_unindexed(self, consumer: C) -> C::Result @@ -68,11 +69,11 @@ impl ParallelIterator for ParDrain<'_, T, S> { /// [`par_iter`]: /hashbrown/struct.HashSet.html#method.par_iter /// [`HashSet`]: /hashbrown/struct.HashSet.html /// [`IntoParallelRefIterator`]: https://docs.rs/rayon/1.0/rayon/iter/trait.IntoParallelRefIterator.html -pub struct ParIter<'a, T, S> { - set: &'a HashSet, +pub struct ParIter<'a, T, S, A: AllocRef + Clone = Global> { + set: &'a HashSet, } -impl<'a, T: Sync, S: Sync> ParallelIterator for ParIter<'a, T, S> { +impl<'a, T: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParIter<'a, T, S, A> { type Item = &'a T; fn drive_unindexed(self, consumer: C) -> C::Result @@ -91,15 +92,16 @@ impl<'a, T: Sync, S: Sync> ParallelIterator for ParIter<'a, T, S> { /// /// [`par_difference`]: /hashbrown/struct.HashSet.html#method.par_difference /// [`HashSet`]: /hashbrown/struct.HashSet.html -pub struct ParDifference<'a, T, S> { - a: &'a HashSet, - b: &'a HashSet, +pub struct ParDifference<'a, T, S, A: AllocRef + Clone = Global> { + a: &'a HashSet, + b: &'a HashSet, } -impl<'a, T, S> ParallelIterator for ParDifference<'a, T, S> +impl<'a, T, S, A> ParallelIterator for ParDifference<'a, T, S, A> where T: Eq + Hash + Sync, S: BuildHasher + Sync, + A: AllocRef + Clone + Sync, { type Item = &'a T; @@ -123,15 +125,16 @@ where /// /// [`par_symmetric_difference`]: /hashbrown/struct.HashSet.html#method.par_symmetric_difference /// [`HashSet`]: /hashbrown/struct.HashSet.html -pub struct ParSymmetricDifference<'a, T, S> { - a: &'a HashSet, - b: &'a HashSet, +pub struct ParSymmetricDifference<'a, T, S, A: AllocRef + Clone = Global> { + a: &'a HashSet, + b: &'a HashSet, } -impl<'a, T, S> ParallelIterator for ParSymmetricDifference<'a, T, S> +impl<'a, T, S, A> ParallelIterator for ParSymmetricDifference<'a, T, S, A> where T: Eq + Hash + Sync, S: BuildHasher + Sync, + A: AllocRef + Clone + Sync, { type Item = &'a T; @@ -154,15 +157,16 @@ where /// /// [`par_intersection`]: /hashbrown/struct.HashSet.html#method.par_intersection /// [`HashSet`]: /hashbrown/struct.HashSet.html -pub struct ParIntersection<'a, T, S> { - a: &'a HashSet, - b: &'a HashSet, +pub struct ParIntersection<'a, T, S, A: AllocRef + Clone = Global> { + a: &'a HashSet, + b: &'a HashSet, } -impl<'a, T, S> ParallelIterator for ParIntersection<'a, T, S> +impl<'a, T, S, A> ParallelIterator for ParIntersection<'a, T, S, A> where T: Eq + Hash + Sync, S: BuildHasher + Sync, + A: AllocRef + Clone + Sync, { type Item = &'a T; @@ -207,15 +211,31 @@ where } } -impl HashSet +impl HashSet where T: Eq + Hash + Sync, S: BuildHasher + Sync, { + + /// Visits (potentially in parallel) the values representing the union, + /// i.e. all the values in `self` or `other`, without duplicates. + #[cfg_attr(feature = "inline-more", inline)] + pub fn par_union<'a>(&'a self, other: &'a Self) -> ParUnion<'a, T, S> { + ParUnion { a: self, b: other } + } + +} + +impl HashSet +where + T: Eq + Hash + Sync, + S: BuildHasher + Sync, + A: AllocRef + Clone + Sync, +{ /// Visits (potentially in parallel) the values representing the difference, /// i.e. the values that are in `self` but not in `other`. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_difference<'a>(&'a self, other: &'a Self) -> ParDifference<'a, T, S> { + pub fn par_difference<'a>(&'a self, other: &'a Self) -> ParDifference<'a, T, S, A> { ParDifference { a: self, b: other } } @@ -225,24 +245,17 @@ where pub fn par_symmetric_difference<'a>( &'a self, other: &'a Self, - ) -> ParSymmetricDifference<'a, T, S> { + ) -> ParSymmetricDifference<'a, T, S, A> { ParSymmetricDifference { a: self, b: other } } /// Visits (potentially in parallel) the values representing the /// intersection, i.e. the values that are both in `self` and `other`. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_intersection<'a>(&'a self, other: &'a Self) -> ParIntersection<'a, T, S> { + pub fn par_intersection<'a>(&'a self, other: &'a Self) -> ParIntersection<'a, T, S, A> { ParIntersection { a: self, b: other } } - /// Visits (potentially in parallel) the values representing the union, - /// i.e. all the values in `self` or `other`, without duplicates. - #[cfg_attr(feature = "inline-more", inline)] - pub fn par_union<'a>(&'a self, other: &'a Self) -> ParUnion<'a, T, S> { - ParUnion { a: self, b: other } - } - /// Returns `true` if `self` has no elements in common with `other`. /// This is equivalent to checking for an empty intersection. /// @@ -280,22 +293,23 @@ where } } -impl HashSet +impl HashSet where T: Eq + Hash + Send, S: BuildHasher + Send, + A: AllocRef + Clone + Send, { /// Consumes (potentially in parallel) all values in an arbitrary order, /// while preserving the set's allocated memory for reuse. #[cfg_attr(feature = "inline-more", inline)] - pub fn par_drain(&mut self) -> ParDrain<'_, T, S> { + pub fn par_drain(&mut self) -> ParDrain<'_, T, S, A> { ParDrain { set: self } } } -impl IntoParallelIterator for HashSet { +impl IntoParallelIterator for HashSet { type Item = T; - type Iter = IntoParIter; + type Iter = IntoParIter; #[cfg_attr(feature = "inline-more", inline)] fn into_par_iter(self) -> Self::Iter { @@ -303,9 +317,9 @@ impl IntoParallelIterator for HashSet { } } -impl<'a, T: Sync, S: Sync> IntoParallelIterator for &'a HashSet { +impl<'a, T: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a HashSet { type Item = &'a T; - type Iter = ParIter<'a, T, S>; + type Iter = ParIter<'a, T, S, A>; #[cfg_attr(feature = "inline-more", inline)] fn into_par_iter(self) -> Self::Iter { @@ -314,7 +328,7 @@ impl<'a, T: Sync, S: Sync> IntoParallelIterator for &'a HashSet { } /// Collect values from a parallel iterator into a hashset. -impl FromParallelIterator for HashSet +impl FromParallelIterator for HashSet where T: Eq + Hash + Send, S: BuildHasher + Default, @@ -330,7 +344,7 @@ where } /// Extend a hash set with items from a parallel iterator. -impl ParallelExtend for HashSet +impl ParallelExtend for HashSet where T: Eq + Hash + Send, S: BuildHasher, @@ -344,7 +358,7 @@ where } /// Extend a hash set with copied items from a parallel iterator. -impl<'a, T, S> ParallelExtend<&'a T> for HashSet +impl<'a, T, S> ParallelExtend<&'a T> for HashSet where T: 'a + Copy + Eq + Hash + Sync, S: BuildHasher, @@ -358,12 +372,13 @@ where } // This is equal to the normal `HashSet` -- no custom advantage. -fn extend(set: &mut HashSet, par_iter: I) +fn extend(set: &mut HashSet, par_iter: I) where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, I: IntoParallelIterator, - HashSet: Extend, + HashSet: Extend, { let (list, len) = super::helpers::collect(par_iter); diff --git a/src/map.rs b/src/map.rs index 0a996ce4e2..e5d7bd84d2 100644 --- a/src/map.rs +++ b/src/map.rs @@ -1,4 +1,4 @@ -use crate::raw::{Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable}; +use crate::raw::{Bucket, AllocRef, Global, RawDrain, RawIntoIter, RawIter, RawTable}; use crate::CollectionAllocErr; use core::borrow::Borrow; use core::fmt::{self, Debug}; @@ -191,9 +191,9 @@ pub enum DefaultHashBuilder {} /// } /// ``` #[derive(Clone)] -pub struct HashMap { +pub struct HashMap { pub(crate) hash_builder: S, - pub(crate) table: RawTable<(K, V), Global>, + pub(crate) table: RawTable<(K, V), A>, } #[cfg_attr(feature = "inline-more", inline)] @@ -238,6 +238,27 @@ impl HashMap { } } +#[cfg(feature = "ahash")] +impl HashMap { + /// Creates an empty `HashMap` using the given allocator. + /// + /// The hash map is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + #[cfg_attr(feature = "inline-more", inline)] + pub fn new_in(alloc: A) -> Self { + Self::with_hasher_in(Default::default(), alloc) + } + + /// Creates an empty `HashMap` with the specified capacity using the given allocator. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Self::with_capacity_and_hasher_in(capacity, DefaultHashBuilder::default(), alloc) + } +} + impl HashMap { /// Creates an empty `HashMap` which will use the given hash builder to hash /// keys. @@ -295,6 +316,65 @@ impl HashMap { table: RawTable::with_capacity(Global, capacity), } } +} + +impl HashMap { + /// Creates an empty `HashMap` which will use the given hash builder to hash + /// keys. It will be allocated with the given allocator. + /// + /// The created map has the default initial capacity. + /// + /// Warning: `hash_builder` is normally randomly generated, and + /// is designed to allow HashMaps to be resistant to attacks that + /// cause many collisions and very poor performance. Setting it + /// manually using this function can expose a DoS attack vector. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashMap; + /// use hashbrown::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::with_hasher(s); + /// map.insert(1, 2); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_hasher_in(hash_builder: S, alloc: A) -> Self { + Self { + hash_builder, + table: RawTable::new(alloc), + } + } + + /// Creates an empty `HashMap` with the specified capacity, using `hash_builder` + /// to hash the keys. It will be allocated with the given allocator. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. + /// + /// Warning: `hash_builder` is normally randomly generated, and + /// is designed to allow HashMaps to be resistant to attacks that + /// cause many collisions and very poor performance. Setting it + /// manually using this function can expose a DoS attack vector. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashMap; + /// use hashbrown::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut map = HashMap::with_capacity_and_hasher(10, s); + /// map.insert(1, 2); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_capacity_and_hasher_in(capacity: usize, hash_builder: S, alloc: A) -> Self { + Self { + hash_builder, + table: RawTable::with_capacity(alloc, capacity), + } + } /// Returns a reference to the map's [`BuildHasher`]. /// @@ -527,7 +607,7 @@ impl HashMap { /// assert!(a.is_empty()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn drain(&mut self) -> Drain<'_, K, V> { + pub fn drain(&mut self) -> Drain<'_, K, V, A> { // Here we tie the lifetime of self to the iter. unsafe { Drain { @@ -555,10 +635,11 @@ impl HashMap { } } -impl HashMap +impl HashMap where K: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { /// Reserves capacity for at least `additional` more elements to be inserted /// in the `HashMap`. The collection may reserve more space to avoid @@ -679,7 +760,7 @@ where /// assert_eq!(letters.get(&'y'), None); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S> { + pub fn entry(&mut self, key: K) -> Entry<'_, K, V, S, A> { let hash = make_hash(&self.hash_builder, &key); if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) { Entry::Occupied(OccupiedEntry { @@ -975,7 +1056,7 @@ where /// assert_eq!(drained.count(), 4); /// assert_eq!(map.len(), 4); /// ``` - pub fn drain_filter(&mut self, f: F) -> DrainFilter<'_, K, V, F> + pub fn drain_filter(&mut self, f: F) -> DrainFilter<'_, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, { @@ -987,7 +1068,7 @@ where } } -impl HashMap { +impl HashMap { /// Creates a raw entry builder for the HashMap. /// /// Raw entries provide the lowest level of control for searching and @@ -1020,7 +1101,7 @@ impl HashMap { /// acting erratically, with two keys randomly masking each other. Implementations /// are free to assume this doesn't happen (within the limits of memory-safety). #[cfg_attr(feature = "inline-more", inline)] - pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S> { + pub fn raw_entry_mut(&mut self) -> RawEntryBuilderMut<'_, K, V, S, A> { RawEntryBuilderMut { map: self } } @@ -1040,16 +1121,17 @@ impl HashMap { /// /// Immutable raw entries have very limited use; you might instead want `raw_entry_mut`. #[cfg_attr(feature = "inline-more", inline)] - pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S> { + pub fn raw_entry(&self) -> RawEntryBuilder<'_, K, V, S, A> { RawEntryBuilder { map: self } } } -impl PartialEq for HashMap +impl PartialEq for HashMap where K: Eq + Hash, V: PartialEq, S: BuildHasher, + A: AllocRef + Clone, { fn eq(&self, other: &Self) -> bool { if self.len() != other.len() { @@ -1061,32 +1143,35 @@ where } } -impl Eq for HashMap +impl Eq for HashMap where K: Eq + Hash, V: Eq, S: BuildHasher, + A: AllocRef + Clone, { } -impl Debug for HashMap +impl Debug for HashMap where K: Debug, V: Debug, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } -impl Default for HashMap +impl Default for HashMap where S: Default, + A: Default + AllocRef + Clone, { - /// Creates an empty `HashMap`, with the `Default` value for the hasher. + /// Creates an empty `HashMap`, with the `Default` value for the hasher and allocator. #[cfg_attr(feature = "inline-more", inline)] fn default() -> Self { - Self::with_hasher(Default::default()) + Self::with_hasher_in(Default::default(), Default::default()) } } @@ -1174,11 +1259,11 @@ impl IterMut<'_, K, V> { /// /// [`into_iter`]: struct.HashMap.html#method.into_iter /// [`HashMap`]: struct.HashMap.html -pub struct IntoIter { - inner: RawIntoIter<(K, V), Global>, +pub struct IntoIter { + inner: RawIntoIter<(K, V), A>, } -impl IntoIter { +impl IntoIter { /// Returns a iterator of references over the remaining items. #[cfg_attr(feature = "inline-more", inline)] pub(super) fn iter(&self) -> Iter<'_, K, V> { @@ -1196,7 +1281,7 @@ impl IntoIter { /// /// [`keys`]: struct.HashMap.html#method.keys /// [`HashMap`]: struct.HashMap.html -pub struct Keys<'a, K, V> { +pub struct Keys<'a, K, V = Global> { inner: Iter<'a, K, V>, } @@ -1250,11 +1335,11 @@ impl fmt::Debug for Values<'_, K, V> { /// /// [`drain`]: struct.HashMap.html#method.drain /// [`HashMap`]: struct.HashMap.html -pub struct Drain<'a, K, V> { - inner: RawDrain<'a, (K, V), Global>, +pub struct Drain<'a, K, V, A: AllocRef + Clone = Global> { + inner: RawDrain<'a, (K, V), A>, } -impl Drain<'_, K, V> { +impl Drain<'_, K, V, A> { /// Returns a iterator of references over the remaining items. #[cfg_attr(feature = "inline-more", inline)] pub(super) fn iter(&self) -> Iter<'_, K, V> { @@ -1272,27 +1357,30 @@ impl Drain<'_, K, V> { /// /// [`drain_filter`]: struct.HashMap.html#method.drain_filter /// [`HashMap`]: struct.HashMap.html -pub struct DrainFilter<'a, K, V, F> +pub struct DrainFilter<'a, K, V, F, A: AllocRef + Clone = Global> where F: FnMut(&K, &mut V) -> bool, { f: F, iter: RawIter<(K, V)>, - table: &'a mut RawTable<(K, V)>, + table: &'a mut RawTable<(K, V), A>, } -impl<'a, K, V, F> Drop for DrainFilter<'a, K, V, F> +impl<'a, K, V, F, A> Drop for DrainFilter<'a, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, + A: AllocRef + Clone, { fn drop(&mut self) { - struct DropGuard<'r, 'a, K, V, F>(&'r mut DrainFilter<'a, K, V, F>) + struct DropGuard<'r, 'a, K, V, F, A>(&'r mut DrainFilter<'a, K, V, F, A>) where - F: FnMut(&K, &mut V) -> bool; + F: FnMut(&K, &mut V) -> bool, + A: AllocRef + Clone; - impl<'r, 'a, K, V, F> Drop for DropGuard<'r, 'a, K, V, F> + impl<'r, 'a, K, V, F, A> Drop for DropGuard<'r, 'a, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, + A: AllocRef + Clone, { fn drop(&mut self) { while let Some(_) = self.0.next() {} @@ -1306,9 +1394,10 @@ where } } -impl Iterator for DrainFilter<'_, K, V, F> +impl Iterator for DrainFilter<'_, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, + A: AllocRef + Clone, { type Item = (K, V); fn next(&mut self) -> Option { @@ -1341,8 +1430,8 @@ pub struct ValuesMut<'a, K, V> { /// See the [`HashMap::raw_entry_mut`] docs for usage examples. /// /// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut -pub struct RawEntryBuilderMut<'a, K, V, S> { - map: &'a mut HashMap, +pub struct RawEntryBuilderMut<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a mut HashMap, } /// A view into a single entry in a map, which may either be vacant or occupied. @@ -1356,32 +1445,34 @@ pub struct RawEntryBuilderMut<'a, K, V, S> { /// [`Entry`]: enum.Entry.html /// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut /// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html -pub enum RawEntryMut<'a, K, V, S> { +pub enum RawEntryMut<'a, K, V, S, A: AllocRef + Clone> { /// An occupied entry. - Occupied(RawOccupiedEntryMut<'a, K, V>), + Occupied(RawOccupiedEntryMut<'a, K, V, A>), /// A vacant entry. - Vacant(RawVacantEntryMut<'a, K, V, S>), + Vacant(RawVacantEntryMut<'a, K, V, S, A>), } /// A view into an occupied entry in a `HashMap`. /// It is part of the [`RawEntryMut`] enum. /// /// [`RawEntryMut`]: enum.RawEntryMut.html -pub struct RawOccupiedEntryMut<'a, K, V> { +pub struct RawOccupiedEntryMut<'a, K, V, A: AllocRef + Clone = Global> { elem: Bucket<(K, V)>, - table: &'a mut RawTable<(K, V), Global>, + table: &'a mut RawTable<(K, V), A>, } -unsafe impl Send for RawOccupiedEntryMut<'_, K, V> +unsafe impl Send for RawOccupiedEntryMut<'_, K, V, A> where K: Send, V: Send, + A: Send + AllocRef + Clone, { } -unsafe impl Sync for RawOccupiedEntryMut<'_, K, V> +unsafe impl Sync for RawOccupiedEntryMut<'_, K, V, A> where K: Sync, V: Sync, + A: Send + AllocRef + Clone, { } @@ -1389,8 +1480,8 @@ where /// It is part of the [`RawEntryMut`] enum. /// /// [`RawEntryMut`]: enum.RawEntryMut.html -pub struct RawVacantEntryMut<'a, K, V, S> { - table: &'a mut RawTable<(K, V), Global>, +pub struct RawVacantEntryMut<'a, K, V, S, A: AllocRef + Clone = Global> { + table: &'a mut RawTable<(K, V), A>, hash_builder: &'a S, } @@ -1399,15 +1490,15 @@ pub struct RawVacantEntryMut<'a, K, V, S> { /// See the [`HashMap::raw_entry`] docs for usage examples. /// /// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry -pub struct RawEntryBuilder<'a, K, V, S> { - map: &'a HashMap, +pub struct RawEntryBuilder<'a, K, V, S, A: AllocRef + Clone = Global> { + map: &'a HashMap, } -impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> { /// Creates a `RawEntryMut` from the given key. #[cfg_attr(feature = "inline-more", inline)] #[allow(clippy::wrong_self_convention)] - pub fn from_key(self, k: &Q) -> RawEntryMut<'a, K, V, S> + pub fn from_key(self, k: &Q) -> RawEntryMut<'a, K, V, S, A> where S: BuildHasher, K: Borrow, @@ -1421,7 +1512,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { /// Creates a `RawEntryMut` from the given key and its hash. #[inline] #[allow(clippy::wrong_self_convention)] - pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S> + pub fn from_key_hashed_nocheck(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S, A> where K: Borrow, Q: Eq, @@ -1430,11 +1521,11 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { } } -impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilderMut<'a, K, V, S, A> { /// Creates a `RawEntryMut` from the given hash. #[cfg_attr(feature = "inline-more", inline)] #[allow(clippy::wrong_self_convention)] - pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S> + pub fn from_hash(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S, A> where for<'b> F: FnMut(&'b K) -> bool, { @@ -1442,7 +1533,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { } #[cfg_attr(feature = "inline-more", inline)] - fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S> + fn search(self, hash: u64, mut is_match: F) -> RawEntryMut<'a, K, V, S, A> where for<'b> F: FnMut(&'b K) -> bool, { @@ -1459,7 +1550,7 @@ impl<'a, K, V, S> RawEntryBuilderMut<'a, K, V, S> { } } -impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> RawEntryBuilder<'a, K, V, S, A> { /// Access an entry by key. #[cfg_attr(feature = "inline-more", inline)] #[allow(clippy::wrong_self_convention)] @@ -1510,7 +1601,7 @@ impl<'a, K, V, S> RawEntryBuilder<'a, K, V, S> { } } -impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> RawEntryMut<'a, K, V, S, A> { /// Sets the value of the entry, and returns a RawOccupiedEntryMut. /// /// # Examples @@ -1524,7 +1615,7 @@ impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { /// assert_eq!(entry.remove_entry(), ("horseyland", 37)); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V> + pub fn insert(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, A> where K: Hash, S: BuildHasher, @@ -1638,7 +1729,7 @@ impl<'a, K, V, S> RawEntryMut<'a, K, V, S> { } } -impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> { +impl<'a, K, V, A: AllocRef + Clone> RawOccupiedEntryMut<'a, K, V, A> { /// Gets a reference to the key in the entry. #[cfg_attr(feature = "inline-more", inline)] pub fn key(&self) -> &K { @@ -1733,7 +1824,7 @@ impl<'a, K, V> RawOccupiedEntryMut<'a, K, V> { } } -impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> RawVacantEntryMut<'a, K, V, S, A> { /// Sets the value of the entry with the VacantEntry's key, /// and returns a mutable reference to it. #[cfg_attr(feature = "inline-more", inline)] @@ -1780,7 +1871,7 @@ impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { } #[cfg_attr(feature = "inline-more", inline)] - fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V> + fn insert_entry(self, key: K, value: V) -> RawOccupiedEntryMut<'a, K, V, A> where K: Hash, S: BuildHasher, @@ -1799,13 +1890,13 @@ impl<'a, K, V, S> RawVacantEntryMut<'a, K, V, S> { } } -impl Debug for RawEntryBuilderMut<'_, K, V, S> { +impl Debug for RawEntryBuilderMut<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawEntryBuilder").finish() } } -impl Debug for RawEntryMut<'_, K, V, S> { +impl Debug for RawEntryMut<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { RawEntryMut::Vacant(ref v) => f.debug_tuple("RawEntry").field(v).finish(), @@ -1814,7 +1905,7 @@ impl Debug for RawEntryMut<'_, K, V, S> { } } -impl Debug for RawOccupiedEntryMut<'_, K, V> { +impl Debug for RawOccupiedEntryMut<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawOccupiedEntryMut") .field("key", self.key()) @@ -1823,13 +1914,13 @@ impl Debug for RawOccupiedEntryMut<'_, K, V> { } } -impl Debug for RawVacantEntryMut<'_, K, V, S> { +impl Debug for RawVacantEntryMut<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawVacantEntryMut").finish() } } -impl Debug for RawEntryBuilder<'_, K, V, S> { +impl Debug for RawEntryBuilder<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("RawEntryBuilder").finish() } @@ -1841,15 +1932,15 @@ impl Debug for RawEntryBuilder<'_, K, V, S> { /// /// [`HashMap`]: struct.HashMap.html /// [`entry`]: struct.HashMap.html#method.entry -pub enum Entry<'a, K, V, S> { +pub enum Entry<'a, K, V, S, A> where A: AllocRef + Clone { /// An occupied entry. - Occupied(OccupiedEntry<'a, K, V, S>), + Occupied(OccupiedEntry<'a, K, V, S, A>), /// A vacant entry. - Vacant(VacantEntry<'a, K, V, S>), + Vacant(VacantEntry<'a, K, V, S, A>), } -impl Debug for Entry<'_, K, V, S> { +impl Debug for Entry<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Entry::Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), @@ -1862,28 +1953,30 @@ impl Debug for Entry<'_, K, V, S> { /// It is part of the [`Entry`] enum. /// /// [`Entry`]: enum.Entry.html -pub struct OccupiedEntry<'a, K, V, S> { +pub struct OccupiedEntry<'a, K, V, S, A: AllocRef + Clone = Global> { key: Option, elem: Bucket<(K, V)>, - table: &'a mut HashMap, + table: &'a mut HashMap, } -unsafe impl Send for OccupiedEntry<'_, K, V, S> +unsafe impl Send for OccupiedEntry<'_, K, V, S, A> where K: Send, V: Send, S: Send, + A: Send + AllocRef + Clone, { } -unsafe impl Sync for OccupiedEntry<'_, K, V, S> +unsafe impl Sync for OccupiedEntry<'_, K, V, S, A> where K: Sync, V: Sync, S: Sync, + A: Sync + AllocRef + Clone, { } -impl Debug for OccupiedEntry<'_, K, V, S> { +impl Debug for OccupiedEntry<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedEntry") .field("key", self.key()) @@ -1896,19 +1989,19 @@ impl Debug for OccupiedEntry<'_, K, V, S> { /// It is part of the [`Entry`] enum. /// /// [`Entry`]: enum.Entry.html -pub struct VacantEntry<'a, K, V, S> { +pub struct VacantEntry<'a, K, V, S, A: AllocRef + Clone = Global> { hash: u64, key: K, - table: &'a mut HashMap, + table: &'a mut HashMap, } -impl Debug for VacantEntry<'_, K, V, S> { +impl Debug for VacantEntry<'_, K, V, S, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("VacantEntry").field(self.key()).finish() } } -impl<'a, K, V, S> IntoIterator for &'a HashMap { +impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a HashMap { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; @@ -1918,7 +2011,7 @@ impl<'a, K, V, S> IntoIterator for &'a HashMap { } } -impl<'a, K, V, S> IntoIterator for &'a mut HashMap { +impl<'a, K, V, S, A: AllocRef + Clone> IntoIterator for &'a mut HashMap { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; @@ -1928,9 +2021,9 @@ impl<'a, K, V, S> IntoIterator for &'a mut HashMap { } } -impl IntoIterator for HashMap { +impl IntoIterator for HashMap { type Item = (K, V); - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each key-value /// pair out of the map in arbitrary order. The map cannot be used after @@ -1950,7 +2043,7 @@ impl IntoIterator for HashMap { /// let vec: Vec<(&str, i32)> = map.into_iter().collect(); /// ``` #[cfg_attr(feature = "inline-more", inline)] - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter { inner: self.table.into_iter(), } @@ -2014,7 +2107,7 @@ where } } -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -2026,15 +2119,15 @@ impl Iterator for IntoIter { self.inner.size_hint() } } -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator for IntoIter { #[cfg_attr(feature = "inline-more", inline)] fn len(&self) -> usize { self.inner.len() } } -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} -impl fmt::Debug for IntoIter { +impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } @@ -2110,7 +2203,7 @@ where } } -impl<'a, K, V> Iterator for Drain<'a, K, V> { +impl<'a, K, V, A: AllocRef + Clone> Iterator for Drain<'a, K, V, A> { type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -2122,25 +2215,26 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> { self.inner.size_hint() } } -impl ExactSizeIterator for Drain<'_, K, V> { +impl ExactSizeIterator for Drain<'_, K, V, A> { #[cfg_attr(feature = "inline-more", inline)] fn len(&self) -> usize { self.inner.len() } } -impl FusedIterator for Drain<'_, K, V> {} +impl FusedIterator for Drain<'_, K, V, A> {} -impl fmt::Debug for Drain<'_, K, V> +impl fmt::Debug for Drain<'_, K, V, A> where K: fmt::Debug, V: fmt::Debug, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } } -impl<'a, K, V, S> Entry<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> { /// Sets the value of the entry, and returns an OccupiedEntry. /// /// # Examples @@ -2154,7 +2248,7 @@ impl<'a, K, V, S> Entry<'a, K, V, S> { /// assert_eq!(entry.key(), &"horseyland"); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S> + pub fn insert(self, value: V) -> OccupiedEntry<'a, K, V, S, A> where K: Hash, S: BuildHasher, @@ -2276,7 +2370,7 @@ impl<'a, K, V, S> Entry<'a, K, V, S> { } } -impl<'a, K, V: Default, S> Entry<'a, K, V, S> { +impl<'a, K, V: Default, S, A: AllocRef + Clone> Entry<'a, K, V, S, A> { /// Ensures a value is in the entry by inserting the default value if empty, /// and returns a mutable reference to the value in the entry. /// @@ -2305,7 +2399,7 @@ impl<'a, K, V: Default, S> Entry<'a, K, V, S> { } } -impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> OccupiedEntry<'a, K, V, S, A> { /// Gets a reference to the key in the entry. /// /// # Examples @@ -2535,7 +2629,7 @@ impl<'a, K, V, S> OccupiedEntry<'a, K, V, S> { } } -impl<'a, K, V, S> VacantEntry<'a, K, V, S> { +impl<'a, K, V, S, A: AllocRef + Clone> VacantEntry<'a, K, V, S, A> { /// Gets a reference to the key that would be used when inserting a value /// through the `VacantEntry`. /// @@ -2601,7 +2695,7 @@ impl<'a, K, V, S> VacantEntry<'a, K, V, S> { } #[cfg_attr(feature = "inline-more", inline)] - fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S> + fn insert_entry(self, value: V) -> OccupiedEntry<'a, K, V, S, A> where K: Hash, S: BuildHasher, @@ -2618,15 +2712,16 @@ impl<'a, K, V, S> VacantEntry<'a, K, V, S> { } } -impl FromIterator<(K, V)> for HashMap +impl FromIterator<(K, V)> for HashMap where K: Eq + Hash, S: BuildHasher + Default, + A: Default + AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn from_iter>(iter: T) -> Self { let iter = iter.into_iter(); - let mut map = Self::with_capacity_and_hasher(iter.size_hint().0, S::default()); + let mut map = Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default()); iter.for_each(|(k, v)| { map.insert(k, v); }); @@ -2634,10 +2729,11 @@ where } } -impl Extend<(K, V)> for HashMap +impl Extend<(K, V)> for HashMap where K: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn extend>(&mut self, iter: T) { @@ -2658,11 +2754,12 @@ where } } -impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap +impl<'a, K, V, S, A> Extend<(&'a K, &'a V)> for HashMap where K: Eq + Hash + Copy, V: Copy, S: BuildHasher, + A: AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn extend>(&mut self, iter: T) { @@ -2684,10 +2781,10 @@ fn assert_covariance() { fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { v } - fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> { + fn into_iter_key<'new, A: AllocRef + Clone>(v: IntoIter<&'static str, u8, A>) -> IntoIter<&'new str, u8, A> { v } - fn into_iter_val<'new>(v: IntoIter) -> IntoIter { + fn into_iter_val<'new, A: AllocRef + Clone>(v: IntoIter) -> IntoIter { v } fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs index d0291e713a..72401f9b7f 100644 --- a/src/raw/alloc.rs +++ b/src/raw/alloc.rs @@ -2,7 +2,7 @@ pub use self::inner::*; #[cfg(feature = "nightly")] mod inner { - pub use crate::alloc::alloc::{Alloc, Global}; + pub use crate::alloc::alloc::{AllocRef, Global}; } #[cfg(not(feature = "nightly"))] @@ -10,14 +10,14 @@ mod inner { use crate::alloc::alloc::{alloc, dealloc, Layout}; use core::ptr::NonNull; - pub trait Alloc { + pub trait AllocRef { unsafe fn alloc(&mut self, layout: Layout) -> Result, ()>; unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); } #[derive(Copy, Clone)] pub struct Global; - impl Alloc for Global { + impl AllocRef for Global { unsafe fn alloc(&mut self, layout: Layout) -> Result, ()> { NonNull::new(alloc(layout)).ok_or(()) } @@ -25,4 +25,9 @@ mod inner { dealloc(ptr.as_ptr(), layout) } } + impl Default for Global { + fn default() -> Self { + Global + } + } } diff --git a/src/raw/mod.rs b/src/raw/mod.rs index ec9b8c1f6e..99411a0b68 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -32,7 +32,7 @@ cfg_if! { } mod alloc; -pub use self::alloc::{Alloc, Global}; +pub use self::alloc::{AllocRef, Global}; mod bitmask; @@ -331,7 +331,7 @@ impl Bucket { } /// A raw hash table with an unsafe API. -pub struct RawTable { +pub struct RawTable { // Mask to get an index from a hash value. The value is one less than the // number of buckets in the table. bucket_mask: usize, @@ -354,7 +354,7 @@ pub struct RawTable { alloc: A, } -impl RawTable { +impl RawTable { /// Creates a new empty hash table without allocating any memory. /// /// In effect this returns a table with exactly 1 bucket. However we can @@ -986,10 +986,10 @@ impl RawTable { } } -unsafe impl Send for RawTable where T: Send {} -unsafe impl Sync for RawTable where T: Sync {} +unsafe impl Send for RawTable where T: Send {} +unsafe impl Sync for RawTable where T: Sync {} -impl Clone for RawTable { +impl Clone for RawTable { fn clone(&self) -> Self { if self.is_empty_singleton() { Self::new(self.alloc.clone()) @@ -1046,7 +1046,7 @@ impl Clone for RawTable { } #[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawTable { +unsafe impl<#[may_dangle] T, A: AllocRef + Clone> Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1062,7 +1062,7 @@ unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawTable { } } #[cfg(not(feature = "nightly"))] -impl Drop for RawTable { +impl Drop for RawTable { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { if !self.is_empty_singleton() { @@ -1078,7 +1078,7 @@ impl Drop for RawTable { } } -impl IntoIterator for RawTable { +impl IntoIterator for RawTable { type Item = T; type IntoIter = RawIntoIter; @@ -1280,25 +1280,25 @@ impl ExactSizeIterator for RawIter {} impl FusedIterator for RawIter {} /// Iterator which consumes a table and returns elements. -pub struct RawIntoIter { +pub struct RawIntoIter { iter: RawIter, alloc: Option<(NonNull, Layout)>, marker: PhantomData, allocator: A, } -impl RawIntoIter { +impl RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawIntoIter where T: Send {} -unsafe impl Sync for RawIntoIter where T: Sync {} +unsafe impl Send for RawIntoIter where T: Send {} +unsafe impl Sync for RawIntoIter where T: Sync {} #[cfg(feature = "nightly")] -unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawIntoIter { +unsafe impl<#[may_dangle] T, A: AllocRef + Clone> Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1317,7 +1317,7 @@ unsafe impl<#[may_dangle] T, A: Alloc + Clone> Drop for RawIntoIter { } } #[cfg(not(feature = "nightly"))] -impl Drop for RawIntoIter { +impl Drop for RawIntoIter { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1337,7 +1337,7 @@ impl Drop for RawIntoIter { } } -impl Iterator for RawIntoIter { +impl Iterator for RawIntoIter { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1351,11 +1351,11 @@ impl Iterator for RawIntoIter { } } -impl ExactSizeIterator for RawIntoIter {} -impl FusedIterator for RawIntoIter {} +impl ExactSizeIterator for RawIntoIter {} +impl FusedIterator for RawIntoIter {} /// Iterator which consumes elements without freeing the table storage. -pub struct RawDrain<'a, T, A: Alloc + Clone> { +pub struct RawDrain<'a, T, A: AllocRef + Clone> { iter: RawIter, // The table is moved into the iterator for the duration of the drain. This @@ -1369,17 +1369,17 @@ pub struct RawDrain<'a, T, A: Alloc + Clone> { marker: PhantomData<&'a RawTable>, } -impl RawDrain<'_, T, A> { +impl RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] pub fn iter(&self) -> RawIter { self.iter.clone() } } -unsafe impl Send for RawDrain<'_, T, A> where T: Send {} -unsafe impl Sync for RawDrain<'_, T, A> where T: Sync {} +unsafe impl Send for RawDrain<'_, T, A> where T: Send {} +unsafe impl Sync for RawDrain<'_, T, A> where T: Sync {} -impl Drop for RawDrain<'_, T, A> { +impl Drop for RawDrain<'_, T, A> { #[cfg_attr(feature = "inline-more", inline)] fn drop(&mut self) { unsafe { @@ -1402,7 +1402,7 @@ impl Drop for RawDrain<'_, T, A> { } } -impl Iterator for RawDrain<'_, T, A> { +impl Iterator for RawDrain<'_, T, A> { type Item = T; #[cfg_attr(feature = "inline-more", inline)] @@ -1419,5 +1419,5 @@ impl Iterator for RawDrain<'_, T, A> { } } -impl ExactSizeIterator for RawDrain<'_, T, A> {} -impl FusedIterator for RawDrain<'_, T, A> {} +impl ExactSizeIterator for RawDrain<'_, T, A> {} +impl FusedIterator for RawDrain<'_, T, A> {} diff --git a/src/set.rs b/src/set.rs index 537a10d770..bae13ba7df 100644 --- a/src/set.rs +++ b/src/set.rs @@ -5,6 +5,7 @@ use core::hash::{BuildHasher, Hash}; use core::iter::{Chain, FromIterator, FusedIterator}; use core::ops::{BitAnd, BitOr, BitXor, Sub}; +use crate::raw::{AllocRef, Global}; use super::map::{self, DefaultHashBuilder, HashMap, Keys}; // Future Optimization (FIXME!) @@ -112,8 +113,8 @@ use super::map::{self, DefaultHashBuilder, HashMap, Keys}; /// [`PartialEq`]: https://doc.rust-lang.org/std/cmp/trait.PartialEq.html /// [`RefCell`]: https://doc.rust-lang.org/std/cell/struct.RefCell.html #[derive(Clone)] -pub struct HashSet { - pub(crate) map: HashMap, +pub struct HashSet { + pub(crate) map: HashMap, } #[cfg(feature = "ahash")] @@ -156,7 +157,47 @@ impl HashSet { } } -impl HashSet { +#[cfg(feature = "ahash")] +impl HashSet { + /// Creates an empty `HashSet`. + /// + /// The hash set is initially created with a capacity of 0, so it will not allocate until it + /// is first inserted into. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashSet; + /// let set: HashSet = HashSet::new(); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn new_in(alloc: A) -> Self { + Self { + map: HashMap::new_in(alloc), + } + } + + /// Creates an empty `HashSet` with the specified capacity. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashSet; + /// let set: HashSet = HashSet::with_capacity(10); + /// assert!(set.capacity() >= 10); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Self { + map: HashMap::with_capacity_in(capacity, alloc), + } + } +} + +impl HashSet { /// Returns the number of elements the set can hold without reallocating. /// /// # Examples @@ -246,7 +287,7 @@ impl HashSet { /// assert!(set.is_empty()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn drain(&mut self) -> Drain<'_, T> { + pub fn drain(&mut self) -> Drain<'_, T, A> { Drain { iter: self.map.drain(), } @@ -329,6 +370,68 @@ where map: HashMap::with_capacity_and_hasher(capacity, hasher), } } +} + +impl HashSet +where + T: Eq + Hash, + S: BuildHasher, + A: AllocRef + Clone, +{ + /// Creates a new empty hash set which will use the given hasher to hash + /// keys. + /// + /// The hash set is also created with the default initial capacity. + /// + /// Warning: `hasher` is normally randomly generated, and + /// is designed to allow `HashSet`s to be resistant to attacks that + /// cause many collisions and very poor performance. Setting it + /// manually using this function can expose a DoS attack vector. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashSet; + /// use hashbrown::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::with_hasher(s); + /// set.insert(2); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_hasher_in(hasher: S, alloc: A) -> Self { + Self { + map: HashMap::with_hasher_in(hasher, alloc), + } + } + + /// Creates an empty `HashSet` with the specified capacity, using + /// `hasher` to hash the keys. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. + /// + /// Warning: `hasher` is normally randomly generated, and + /// is designed to allow `HashSet`s to be resistant to attacks that + /// cause many collisions and very poor performance. Setting it + /// manually using this function can expose a DoS attack vector. + /// + /// # Examples + /// + /// ``` + /// use hashbrown::HashSet; + /// use hashbrown::hash_map::DefaultHashBuilder; + /// + /// let s = DefaultHashBuilder::default(); + /// let mut set = HashSet::with_capacity_and_hasher(10, s); + /// set.insert(1); + /// ``` + #[cfg_attr(feature = "inline-more", inline)] + pub fn with_capacity_and_hasher_in(capacity: usize, hasher: S, alloc: A) -> Self { + Self { + map: HashMap::with_capacity_and_hasher_in(capacity, hasher, alloc), + } + } /// Returns a reference to the set's [`BuildHasher`]. /// @@ -462,7 +565,7 @@ where /// assert_eq!(diff, [4].iter().collect()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S> { + pub fn difference<'a>(&'a self, other: &'a Self) -> Difference<'a, T, S, A> { Difference { iter: self.iter(), other, @@ -491,7 +594,7 @@ where /// assert_eq!(diff1, [1, 4].iter().collect()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn symmetric_difference<'a>(&'a self, other: &'a Self) -> SymmetricDifference<'a, T, S> { + pub fn symmetric_difference<'a>(&'a self, other: &'a Self) -> SymmetricDifference<'a, T, S, A> { SymmetricDifference { iter: self.difference(other).chain(other.difference(self)), } @@ -516,7 +619,7 @@ where /// assert_eq!(intersection, [2, 3].iter().collect()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S> { + pub fn intersection<'a>(&'a self, other: &'a Self) -> Intersection<'a, T, S, A> { let (smaller, larger) = if self.len() <= other.len() { (self, other) } else { @@ -547,7 +650,7 @@ where /// assert_eq!(union, [1, 2, 3, 4].iter().collect()); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn union<'a>(&'a self, other: &'a Self) -> Union<'a, T, S> { + pub fn union<'a>(&'a self, other: &'a HashSet) -> Union<'a, T, S, A> { let (smaller, larger) = if self.len() <= other.len() { (self, other) } else { @@ -865,10 +968,11 @@ where } } -impl PartialEq for HashSet +impl PartialEq for HashSet where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { fn eq(&self, other: &Self) -> bool { if self.len() != other.len() { @@ -879,40 +983,44 @@ where } } -impl Eq for HashSet +impl Eq for HashSet where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { } -impl fmt::Debug for HashSet +impl fmt::Debug for HashSet where T: Eq + Hash + fmt::Debug, S: BuildHasher, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } } -impl FromIterator for HashSet +impl FromIterator for HashSet where T: Eq + Hash, S: BuildHasher + Default, + A: Default + AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn from_iter>(iter: I) -> Self { - let mut set = Self::with_hasher(Default::default()); + let mut set = Self::with_hasher_in(Default::default(), Default::default()); set.extend(iter); set } } -impl Extend for HashSet +impl Extend for HashSet where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn extend>(&mut self, iter: I) { @@ -920,10 +1028,11 @@ where } } -impl<'a, T, S> Extend<&'a T> for HashSet +impl<'a, T, S, A> Extend<&'a T> for HashSet where T: 'a + Eq + Hash + Copy, S: BuildHasher, + A: AllocRef + Clone, { #[cfg_attr(feature = "inline-more", inline)] fn extend>(&mut self, iter: I) { @@ -931,10 +1040,11 @@ where } } -impl Default for HashSet +impl Default for HashSet where T: Eq + Hash, S: BuildHasher + Default, + A: Default + AllocRef + Clone, { /// Creates an empty `HashSet` with the `Default` value for the hasher. #[cfg_attr(feature = "inline-more", inline)] @@ -945,10 +1055,11 @@ where } } -impl BitOr<&HashSet> for &HashSet +impl BitOr<&HashSet> for &HashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, + A: AllocRef + Clone, { type Output = HashSet; @@ -972,15 +1083,16 @@ where /// } /// assert_eq!(i, expected.len()); /// ``` - fn bitor(self, rhs: &HashSet) -> HashSet { + fn bitor(self, rhs: &HashSet) -> HashSet { self.union(rhs).cloned().collect() } } -impl BitAnd<&HashSet> for &HashSet +impl BitAnd<&HashSet> for &HashSet where T: Eq + Hash + Clone, S: BuildHasher + Default, + A: AllocRef + Clone, { type Output = HashSet; @@ -1004,7 +1116,7 @@ where /// } /// assert_eq!(i, expected.len()); /// ``` - fn bitand(self, rhs: &HashSet) -> HashSet { + fn bitand(self, rhs: &HashSet) -> HashSet { self.intersection(rhs).cloned().collect() } } @@ -1091,8 +1203,8 @@ pub struct Iter<'a, K> { /// /// [`HashSet`]: struct.HashSet.html /// [`into_iter`]: struct.HashSet.html#method.into_iter -pub struct IntoIter { - iter: map::IntoIter, +pub struct IntoIter { + iter: map::IntoIter, } /// A draining iterator over the items of a `HashSet`. @@ -1102,8 +1214,8 @@ pub struct IntoIter { /// /// [`HashSet`]: struct.HashSet.html /// [`drain`]: struct.HashSet.html#method.drain -pub struct Drain<'a, K> { - iter: map::Drain<'a, K, ()>, +pub struct Drain<'a, K, A: AllocRef + Clone = Global> { + iter: map::Drain<'a, K, (), A>, } /// A lazy iterator producing elements in the intersection of `HashSet`s. @@ -1113,11 +1225,11 @@ pub struct Drain<'a, K> { /// /// [`HashSet`]: struct.HashSet.html /// [`intersection`]: struct.HashSet.html#method.intersection -pub struct Intersection<'a, T, S> { +pub struct Intersection<'a, T, S, A: AllocRef + Clone = Global> { // iterator of the first set iter: Iter<'a, T>, // the second set - other: &'a HashSet, + other: &'a HashSet, } /// A lazy iterator producing elements in the difference of `HashSet`s. @@ -1127,11 +1239,11 @@ pub struct Intersection<'a, T, S> { /// /// [`HashSet`]: struct.HashSet.html /// [`difference`]: struct.HashSet.html#method.difference -pub struct Difference<'a, T, S> { +pub struct Difference<'a, T, S, A: AllocRef + Clone = Global> { // iterator of the first set iter: Iter<'a, T>, // the second set - other: &'a HashSet, + other: &'a HashSet, } /// A lazy iterator producing elements in the symmetric difference of `HashSet`s. @@ -1141,8 +1253,8 @@ pub struct Difference<'a, T, S> { /// /// [`HashSet`]: struct.HashSet.html /// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference -pub struct SymmetricDifference<'a, T, S> { - iter: Chain, Difference<'a, T, S>>, +pub struct SymmetricDifference<'a, T, S, A: AllocRef + Clone = Global> { + iter: Chain, Difference<'a, T, S, A>>, } /// A lazy iterator producing elements in the union of `HashSet`s. @@ -1152,11 +1264,11 @@ pub struct SymmetricDifference<'a, T, S> { /// /// [`HashSet`]: struct.HashSet.html /// [`union`]: struct.HashSet.html#method.union -pub struct Union<'a, T, S> { - iter: Chain, Difference<'a, T, S>>, +pub struct Union<'a, T, S, A: AllocRef + Clone = Global> { + iter: Chain, Difference<'a, T, S, A>>, } -impl<'a, T, S> IntoIterator for &'a HashSet { +impl<'a, T, S, A: AllocRef + Clone> IntoIterator for &'a HashSet { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -1166,9 +1278,9 @@ impl<'a, T, S> IntoIterator for &'a HashSet { } } -impl IntoIterator for HashSet { +impl IntoIterator for HashSet { type Item = T; - type IntoIter = IntoIter; + type IntoIter = IntoIter; /// Creates a consuming iterator, that is, one that moves each value out /// of the set in arbitrary order. The set cannot be used after calling @@ -1191,7 +1303,7 @@ impl IntoIterator for HashSet { /// } /// ``` #[cfg_attr(feature = "inline-more", inline)] - fn into_iter(self) -> IntoIter { + fn into_iter(self) -> IntoIter { IntoIter { iter: self.map.into_iter(), } @@ -1232,7 +1344,7 @@ impl fmt::Debug for Iter<'_, K> { } } -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = K; #[cfg_attr(feature = "inline-more", inline)] @@ -1244,22 +1356,22 @@ impl Iterator for IntoIter { self.iter.size_hint() } } -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator for IntoIter { #[cfg_attr(feature = "inline-more", inline)] fn len(&self) -> usize { self.iter.len() } } -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} -impl fmt::Debug for IntoIter { +impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let entries_iter = self.iter.iter().map(|(k, _)| k); f.debug_list().entries(entries_iter).finish() } } -impl Iterator for Drain<'_, K> { +impl Iterator for Drain<'_, K, A> { type Item = K; #[cfg_attr(feature = "inline-more", inline)] @@ -1271,22 +1383,22 @@ impl Iterator for Drain<'_, K> { self.iter.size_hint() } } -impl ExactSizeIterator for Drain<'_, K> { +impl ExactSizeIterator for Drain<'_, K, A> { #[cfg_attr(feature = "inline-more", inline)] fn len(&self) -> usize { self.iter.len() } } -impl FusedIterator for Drain<'_, K> {} +impl FusedIterator for Drain<'_, K, A> {} -impl fmt::Debug for Drain<'_, K> { +impl fmt::Debug for Drain<'_, K, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let entries_iter = self.iter.iter().map(|(k, _)| k); f.debug_list().entries(entries_iter).finish() } } -impl Clone for Intersection<'_, T, S> { +impl Clone for Intersection<'_, T, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { Intersection { @@ -1296,10 +1408,11 @@ impl Clone for Intersection<'_, T, S> { } } -impl<'a, T, S> Iterator for Intersection<'a, T, S> +impl<'a, T, S, A> Iterator for Intersection<'a, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { type Item = &'a T; @@ -1320,24 +1433,26 @@ where } } -impl fmt::Debug for Intersection<'_, T, S> +impl fmt::Debug for Intersection<'_, T, S, A> where T: fmt::Debug + Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } -impl FusedIterator for Intersection<'_, T, S> +impl FusedIterator for Intersection<'_, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { } -impl Clone for Difference<'_, T, S> { +impl Clone for Difference<'_, T, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { Difference { @@ -1347,10 +1462,11 @@ impl Clone for Difference<'_, T, S> { } } -impl<'a, T, S> Iterator for Difference<'a, T, S> +impl<'a, T, S, A> Iterator for Difference<'a, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { type Item = &'a T; @@ -1371,24 +1487,26 @@ where } } -impl FusedIterator for Difference<'_, T, S> +impl FusedIterator for Difference<'_, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { } -impl fmt::Debug for Difference<'_, T, S> +impl fmt::Debug for Difference<'_, T, S, A> where T: fmt::Debug + Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } -impl Clone for SymmetricDifference<'_, T, S> { +impl Clone for SymmetricDifference<'_, T, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { SymmetricDifference { @@ -1397,10 +1515,11 @@ impl Clone for SymmetricDifference<'_, T, S> { } } -impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S> +impl<'a, T, S, A> Iterator for SymmetricDifference<'a, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { type Item = &'a T; @@ -1414,24 +1533,26 @@ where } } -impl FusedIterator for SymmetricDifference<'_, T, S> +impl FusedIterator for SymmetricDifference<'_, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { } -impl fmt::Debug for SymmetricDifference<'_, T, S> +impl fmt::Debug for SymmetricDifference<'_, T, S, A> where T: fmt::Debug + Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } -impl Clone for Union<'_, T, S> { +impl Clone for Union<'_, T, S, A> { #[cfg_attr(feature = "inline-more", inline)] fn clone(&self) -> Self { Union { @@ -1440,27 +1561,30 @@ impl Clone for Union<'_, T, S> { } } -impl FusedIterator for Union<'_, T, S> +impl FusedIterator for Union<'_, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { } -impl fmt::Debug for Union<'_, T, S> +impl fmt::Debug for Union<'_, T, S, A> where T: fmt::Debug + Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.clone()).finish() } } -impl<'a, T, S> Iterator for Union<'a, T, S> +impl<'a, T, S, A> Iterator for Union<'a, T, S, A> where T: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { type Item = &'a T; @@ -1482,30 +1606,30 @@ fn assert_covariance() { fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { v } - fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { + fn into_iter<'new, A: AllocRef + Clone>(v: IntoIter<&'static str, A>) -> IntoIter<&'new str, A> { v } - fn difference<'a, 'new>( - v: Difference<'a, &'static str, DefaultHashBuilder>, - ) -> Difference<'a, &'new str, DefaultHashBuilder> { + fn difference<'a, 'new, A: AllocRef + Clone>( + v: Difference<'a, &'static str, DefaultHashBuilder, A>, + ) -> Difference<'a, &'new str, DefaultHashBuilder, A> { v } - fn symmetric_difference<'a, 'new>( - v: SymmetricDifference<'a, &'static str, DefaultHashBuilder>, - ) -> SymmetricDifference<'a, &'new str, DefaultHashBuilder> { + fn symmetric_difference<'a, 'new, A: AllocRef + Clone>( + v: SymmetricDifference<'a, &'static str, DefaultHashBuilder, A>, + ) -> SymmetricDifference<'a, &'new str, DefaultHashBuilder, A> { v } - fn intersection<'a, 'new>( - v: Intersection<'a, &'static str, DefaultHashBuilder>, - ) -> Intersection<'a, &'new str, DefaultHashBuilder> { + fn intersection<'a, 'new, A: AllocRef + Clone>( + v: Intersection<'a, &'static str, DefaultHashBuilder, A>, + ) -> Intersection<'a, &'new str, DefaultHashBuilder, A> { v } - fn union<'a, 'new>( - v: Union<'a, &'static str, DefaultHashBuilder>, - ) -> Union<'a, &'new str, DefaultHashBuilder> { + fn union<'a, 'new, A: AllocRef + Clone>( + v: Union<'a, &'static str, DefaultHashBuilder, A>, + ) -> Union<'a, &'new str, DefaultHashBuilder, A> { v } - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + fn drain<'new, A: AllocRef + Clone>(d: Drain<'static, &'static str, A>) -> Drain<'new, &'new str, A> { d } } From 746f06e2fa11751f47c3248903157a87e77885c0 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Fri, 28 Feb 2020 13:58:42 +0100 Subject: [PATCH 06/13] cargo fmt --- src/external_trait_impls/rayon/map.rs | 60 ++++++++++++++++++++------- src/external_trait_impls/rayon/set.rs | 10 +++-- src/map.rs | 18 +++++--- src/set.rs | 10 +++-- 4 files changed, 71 insertions(+), 27 deletions(-) diff --git a/src/external_trait_impls/rayon/map.rs b/src/external_trait_impls/rayon/map.rs index 23c82e0908..1824bd93aa 100644 --- a/src/external_trait_impls/rayon/map.rs +++ b/src/external_trait_impls/rayon/map.rs @@ -20,7 +20,9 @@ pub struct ParIter<'a, K, V, S, A: AllocRef + Clone = Global> { map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParIter<'a, K, V, S, A> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator + for ParIter<'a, K, V, S, A> +{ type Item = (&'a K, &'a V); #[cfg_attr(feature = "inline-more", inline)] @@ -46,7 +48,9 @@ impl Clone for ParIter<'_, K, V, S, A> { } } -impl fmt::Debug for ParIter<'_, K, V, S, A> { +impl fmt::Debug + for ParIter<'_, K, V, S, A> +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) } @@ -63,7 +67,9 @@ pub struct ParKeys<'a, K, V, S, A: AllocRef + Clone> { map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParKeys<'a, K, V, S, A> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator + for ParKeys<'a, K, V, S, A> +{ type Item = &'a K; #[cfg_attr(feature = "inline-more", inline)] @@ -86,7 +92,9 @@ impl Clone for ParKeys<'_, K, V, S, A> { } } -impl fmt::Debug for ParKeys<'_, K, V, S, A> { +impl fmt::Debug + for ParKeys<'_, K, V, S, A> +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.keys().fmt(f) } @@ -103,7 +111,9 @@ pub struct ParValues<'a, K, V, S, A: AllocRef + Clone = Global> { map: &'a HashMap, } -impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator for ParValues<'a, K, V, S, A> { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> ParallelIterator + for ParValues<'a, K, V, S, A> +{ type Item = &'a V; #[cfg_attr(feature = "inline-more", inline)] @@ -126,7 +136,9 @@ impl Clone for ParValues<'_, K, V, S, A> { } } -impl fmt::Debug for ParValues<'_, K, V, S, A> { +impl fmt::Debug + for ParValues<'_, K, V, S, A> +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.values().fmt(f) } @@ -145,7 +157,9 @@ pub struct ParIterMut<'a, K, V, S, A: AllocRef + Clone> { map: &'a mut HashMap, } -impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> ParallelIterator for ParIterMut<'a, K, V, S, A> { +impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> ParallelIterator + for ParIterMut<'a, K, V, S, A> +{ type Item = (&'a K, &'a mut V); #[cfg_attr(feature = "inline-more", inline)] @@ -183,7 +197,9 @@ pub struct ParValuesMut<'a, K, V, S, A: AllocRef + Clone = Global> { map: &'a mut HashMap, } -impl<'a, K: Send, V: Send, S: Send, A: AllocRef + Clone + Send> ParallelIterator for ParValuesMut<'a, K, V, S, A> { +impl<'a, K: Send, V: Send, S: Send, A: AllocRef + Clone + Send> ParallelIterator + for ParValuesMut<'a, K, V, S, A> +{ type Item = &'a mut V; #[cfg_attr(feature = "inline-more", inline)] @@ -199,7 +215,9 @@ impl<'a, K: Send, V: Send, S: Send, A: AllocRef + Clone + Send> ParallelIterator } } -impl fmt::Debug for ParValuesMut<'_, K, V, S, A> { +impl fmt::Debug + for ParValuesMut<'_, K, V, S, A> +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.values().fmt(f) } @@ -218,7 +236,9 @@ pub struct IntoParIter { map: HashMap, } -impl ParallelIterator for IntoParIter { +impl ParallelIterator + for IntoParIter +{ type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -230,7 +250,9 @@ impl ParallelIterator for } } -impl fmt::Debug for IntoParIter { +impl fmt::Debug + for IntoParIter +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.map.iter().fmt(f) } @@ -247,7 +269,9 @@ pub struct ParDrain<'a, K, V, S, A: AllocRef + Clone = Global> { map: &'a mut HashMap, } -impl ParallelIterator for ParDrain<'_, K, V, S, A> { +impl ParallelIterator + for ParDrain<'_, K, V, S, A> +{ type Item = (K, V); #[cfg_attr(feature = "inline-more", inline)] @@ -315,7 +339,9 @@ where } } -impl IntoParallelIterator for HashMap { +impl IntoParallelIterator + for HashMap +{ type Item = (K, V); type Iter = IntoParIter; @@ -325,7 +351,9 @@ impl IntoParallelIterator } } -impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a HashMap { +impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator + for &'a HashMap +{ type Item = (&'a K, &'a V); type Iter = ParIter<'a, K, V, S, A>; @@ -335,7 +363,9 @@ impl<'a, K: Sync, V: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIter } } -impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a mut HashMap { +impl<'a, K: Send + Sync, V: Send, S: Send, A: AllocRef + Clone + Sync> IntoParallelIterator + for &'a mut HashMap +{ type Item = (&'a K, &'a mut V); type Iter = ParIterMut<'a, K, V, S, A>; diff --git a/src/external_trait_impls/rayon/set.rs b/src/external_trait_impls/rayon/set.rs index 3bfbbf97e8..6865815406 100644 --- a/src/external_trait_impls/rayon/set.rs +++ b/src/external_trait_impls/rayon/set.rs @@ -45,7 +45,9 @@ pub struct ParDrain<'a, T, S, A: AllocRef + Clone = Global> { set: &'a mut HashSet, } -impl ParallelIterator for ParDrain<'_, T, S, A> { +impl ParallelIterator + for ParDrain<'_, T, S, A> +{ type Item = T; fn drive_unindexed(self, consumer: C) -> C::Result @@ -216,14 +218,12 @@ where T: Eq + Hash + Sync, S: BuildHasher + Sync, { - /// Visits (potentially in parallel) the values representing the union, /// i.e. all the values in `self` or `other`, without duplicates. #[cfg_attr(feature = "inline-more", inline)] pub fn par_union<'a>(&'a self, other: &'a Self) -> ParUnion<'a, T, S> { ParUnion { a: self, b: other } } - } impl HashSet @@ -317,7 +317,9 @@ impl IntoParallelIterator for Hash } } -impl<'a, T: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator for &'a HashSet { +impl<'a, T: Sync, S: Sync, A: AllocRef + Clone + Sync> IntoParallelIterator + for &'a HashSet +{ type Item = &'a T; type Iter = ParIter<'a, T, S, A>; diff --git a/src/map.rs b/src/map.rs index e5d7bd84d2..bc6fa8dc36 100644 --- a/src/map.rs +++ b/src/map.rs @@ -1,4 +1,4 @@ -use crate::raw::{Bucket, AllocRef, Global, RawDrain, RawIntoIter, RawIter, RawTable}; +use crate::raw::{AllocRef, Bucket, Global, RawDrain, RawIntoIter, RawIter, RawTable}; use crate::CollectionAllocErr; use core::borrow::Borrow; use core::fmt::{self, Debug}; @@ -1932,7 +1932,10 @@ impl Debug for RawEntryBuilder<'_, K, V, S, A> { /// /// [`HashMap`]: struct.HashMap.html /// [`entry`]: struct.HashMap.html#method.entry -pub enum Entry<'a, K, V, S, A> where A: AllocRef + Clone { +pub enum Entry<'a, K, V, S, A> +where + A: AllocRef + Clone, +{ /// An occupied entry. Occupied(OccupiedEntry<'a, K, V, S, A>), @@ -2721,7 +2724,8 @@ where #[cfg_attr(feature = "inline-more", inline)] fn from_iter>(iter: T) -> Self { let iter = iter.into_iter(); - let mut map = Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default()); + let mut map = + Self::with_capacity_and_hasher_in(iter.size_hint().0, S::default(), A::default()); iter.for_each(|(k, v)| { map.insert(k, v); }); @@ -2781,10 +2785,14 @@ fn assert_covariance() { fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { v } - fn into_iter_key<'new, A: AllocRef + Clone>(v: IntoIter<&'static str, u8, A>) -> IntoIter<&'new str, u8, A> { + fn into_iter_key<'new, A: AllocRef + Clone>( + v: IntoIter<&'static str, u8, A>, + ) -> IntoIter<&'new str, u8, A> { v } - fn into_iter_val<'new, A: AllocRef + Clone>(v: IntoIter) -> IntoIter { + fn into_iter_val<'new, A: AllocRef + Clone>( + v: IntoIter, + ) -> IntoIter { v } fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { diff --git a/src/set.rs b/src/set.rs index bae13ba7df..25a1c9df70 100644 --- a/src/set.rs +++ b/src/set.rs @@ -5,8 +5,8 @@ use core::hash::{BuildHasher, Hash}; use core::iter::{Chain, FromIterator, FusedIterator}; use core::ops::{BitAnd, BitOr, BitXor, Sub}; -use crate::raw::{AllocRef, Global}; use super::map::{self, DefaultHashBuilder, HashMap, Keys}; +use crate::raw::{AllocRef, Global}; // Future Optimization (FIXME!) // ============================= @@ -1606,7 +1606,9 @@ fn assert_covariance() { fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { v } - fn into_iter<'new, A: AllocRef + Clone>(v: IntoIter<&'static str, A>) -> IntoIter<&'new str, A> { + fn into_iter<'new, A: AllocRef + Clone>( + v: IntoIter<&'static str, A>, + ) -> IntoIter<&'new str, A> { v } fn difference<'a, 'new, A: AllocRef + Clone>( @@ -1629,7 +1631,9 @@ fn assert_covariance() { ) -> Union<'a, &'new str, DefaultHashBuilder, A> { v } - fn drain<'new, A: AllocRef + Clone>(d: Drain<'static, &'static str, A>) -> Drain<'new, &'new str, A> { + fn drain<'new, A: AllocRef + Clone>( + d: Drain<'static, &'static str, A>, + ) -> Drain<'new, &'new str, A> { d } } From a96b8df62bfa1cf6a0be99c1858d6cdd22722097 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Fri, 28 Feb 2020 14:58:08 +0100 Subject: [PATCH 07/13] fix clippy errors --- src/map.rs | 2 +- src/raw/mod.rs | 10 ++++------ 2 files changed, 5 insertions(+), 7 deletions(-) diff --git a/src/map.rs b/src/map.rs index bc6fa8dc36..58b3548528 100644 --- a/src/map.rs +++ b/src/map.rs @@ -246,7 +246,7 @@ impl HashMap { /// is first inserted into. #[cfg_attr(feature = "inline-more", inline)] pub fn new_in(alloc: A) -> Self { - Self::with_hasher_in(Default::default(), alloc) + Self::with_hasher_in(DefaultHashBuilder::default(), alloc) } /// Creates an empty `HashMap` with the specified capacity using the given allocator. diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 99411a0b68..d5fe49e8c6 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -76,20 +76,18 @@ impl Fallibility { /// Error to return on capacity overflow. #[cfg_attr(feature = "inline-more", inline)] fn capacity_overflow(self) -> CollectionAllocErr { - use Fallibility::*; match self { - Fallible => CollectionAllocErr::CapacityOverflow, - Infallible => panic!("Hash table capacity overflow"), + Fallibility::Fallible => CollectionAllocErr::CapacityOverflow, + Fallibility::Infallible => panic!("Hash table capacity overflow"), } } /// Error to return on allocation error. #[cfg_attr(feature = "inline-more", inline)] fn alloc_err(self, layout: Layout) -> CollectionAllocErr { - use Fallibility::*; match self { - Fallible => CollectionAllocErr::AllocErr { layout }, - Infallible => handle_alloc_error(layout), + Fallibility::Fallible => CollectionAllocErr::AllocErr { layout }, + Fallibility::Infallible => handle_alloc_error(layout), } } } From 0b7e111d207d2cab22429772c59335082c6b488a Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Sun, 1 Mar 2020 00:26:49 +0100 Subject: [PATCH 08/13] Parametrize missing index implementation --- src/map.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/map.rs b/src/map.rs index 58b3548528..15db599fd1 100644 --- a/src/map.rs +++ b/src/map.rs @@ -1175,11 +1175,12 @@ where } } -impl Index<&Q> for HashMap +impl Index<&Q> for HashMap where K: Eq + Hash + Borrow, Q: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { type Output = V; From b61d9c2acaf0ff93afbe7b0d284449d6f3ca57da Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 12 Oct 2020 12:23:17 +0200 Subject: [PATCH 09/13] Fix nightly feature --- src/raw/alloc.rs | 350 +++-------------------------------------------- src/raw/mod.rs | 2 +- 2 files changed, 17 insertions(+), 335 deletions(-) diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs index 26753e7b99..4055dad64d 100644 --- a/src/raw/alloc.rs +++ b/src/raw/alloc.rs @@ -2,354 +2,36 @@ pub use self::inner::*; #[cfg(feature = "nightly")] mod inner { - pub use crate::alloc::alloc::{AllocRef, Global}; + pub use crate::alloc::alloc::{AllocError, AllocRef, Global}; } #[cfg(not(feature = "nightly"))] mod inner { - use crate::alloc::alloc::{alloc, dealloc, AllocErr, Layout}; + use crate::alloc::alloc::{alloc, dealloc, Layout}; + use core::ptr; use core::ptr::NonNull; - #[derive(Debug, Copy, Clone)] - pub enum AllocInit { - Uninitialized, - } - - #[derive(Debug, Copy, Clone)] - pub struct MemoryBlock { - pub ptr: NonNull, - pub size: usize, - } + pub struct AllocError; - /// An implementation of `AllocRef` can allocate, grow, shrink, and deallocate arbitrary blocks of - /// data described via [`Layout`][]. - /// - /// `AllocRef` is designed to be implemented on ZSTs, references, or smart pointers because having - /// an allocator like `MyAlloc([u8; N])` cannot be moved, without updating the pointers to the - /// allocated memory. - /// - /// Unlike [`GlobalAlloc`][], zero-sized allocations are allowed in `AllocRef`. If an underlying - /// allocator does not support this (like jemalloc) or return a null pointer (such as - /// `libc::malloc`), this must be caught by the implementation. - /// - /// ### Currently allocated memory - /// - /// Some of the methods require that a memory block be *currently allocated* via an allocator. This - /// means that: - /// - /// * the starting address for that memory block was previously returned by [`alloc`], [`grow`], or - /// [`shrink`], and - /// - /// * the memory block has not been subsequently deallocated, where blocks are either deallocated - /// directly by being passed to [`dealloc`] or were changed by being passed to [`grow`] or - /// [`shrink`] that returns `Ok`. If `grow` or `shrink` have returned `Err`, the passed pointer - /// remains valid. - /// - /// [`alloc`]: AllocRef::alloc - /// [`grow`]: AllocRef::grow - /// [`shrink`]: AllocRef::shrink - /// [`dealloc`]: AllocRef::dealloc - /// - /// ### Memory fitting - /// - /// Some of the methods require that a layout *fit* a memory block. What it means for a layout to - /// "fit" a memory block means (or equivalently, for a memory block to "fit" a layout) is that the - /// following conditions must hold: - /// - /// * The block must be allocated with the same alignment as [`layout.align()`], and - /// - /// * The provided [`layout.size()`] must fall in the range `min ..= max`, where: - /// - `min` is the size of the layout most recently used to allocate the block, and - /// - `max` is the latest actual size returned from [`alloc`], [`grow`], or [`shrink`]. - /// - /// [`layout.align()`]: Layout::align - /// [`layout.size()`]: Layout::size - /// - /// # Safety - /// - /// * Memory blocks returned from an allocator must point to valid memory and retain their validity - /// until the instance and all of its clones are dropped, - /// - /// * cloning or moving the allocator must not invalidate memory blocks returned from this - /// allocator. A cloned allocator must behave like the same allocator, and - /// - /// * any pointer to a memory block which is [*currently allocated*] may be passed to any other - /// method of the allocator. - /// - /// [*currently allocated*]: #currently-allocated-memory - #[unstable(feature = "allocator_api", issue = "32838")] pub unsafe trait AllocRef { - /// Attempts to allocate a block of memory. - /// - /// On success, returns a [`NonNull<[u8]>`] meeting the size and alignment guarantees of `layout`. - /// - /// The returned block may have a larger size than specified by `layout.size()`, and may or may - /// not have its contents initialized. - /// - /// [`NonNull<[u8]>`]: NonNull - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet - /// allocator's size or alignment constraints. - /// - /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or - /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement - /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an allocation error are encouraged to - /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc(&self, layout: Layout) -> Result, AllocErr>; - - /// Behaves like `alloc`, but also ensures that the returned memory is zero-initialized. - /// - /// # Errors - /// - /// Returning `Err` indicates that either memory is exhausted or `layout` does not meet - /// allocator's size or alignment constraints. - /// - /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or - /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement - /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an allocation error are encouraged to - /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - fn alloc_zeroed(&self, layout: Layout) -> Result, AllocErr> { - let ptr = self.alloc(layout)?; - // SAFETY: `alloc` returns a valid memory block - unsafe { ptr.as_non_null_ptr().as_ptr().write_bytes(0, ptr.len()) } - Ok(ptr) - } - - /// Deallocates the memory referenced by `ptr`. - /// - /// # Safety - /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator, and - /// * `layout` must [*fit*] that block of memory. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting + fn alloc(&self, layout: Layout) -> Result, AllocError>; unsafe fn dealloc(&self, ptr: NonNull, layout: Layout); - - /// Attempts to extend the memory block. - /// - /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated - /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish - /// this, the allocator may extend the allocation referenced by `ptr` to fit the new layout. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable unless it was transferred back to the caller again via the return value - /// of this method. - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. - /// - /// [`NonNull<[u8]>`]: NonNull - /// - /// # Safety - /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. - /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). - /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting - /// - /// # Errors - /// - /// Returns `Err` if the new layout does not meet the allocator's size and alignment - /// constraints of the allocator, or if growing otherwise fails. - /// - /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or - /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement - /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an allocation error are encouraged to - /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn grow( - &self, - ptr: NonNull, - old_layout: Layout, - new_layout: Layout, - ) -> Result, AllocErr> { - debug_assert!( - new_layout.size() >= old_layout.size(), - "`new_layout.size()` must be greater than or equal to `old_layout.size()`" - ); - - let new_ptr = self.alloc(new_layout)?; - - // SAFETY: because `new_layout.size()` must be greater than or equal to - // `old_layout.size()`, both the old and new memory allocation are valid for reads and - // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet - // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is - // safe. The safety contract for `dealloc` must be upheld by the caller. - unsafe { - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); - self.dealloc(ptr, old_layout); - } - - Ok(new_ptr) - } - - /// Behaves like `grow`, but also ensures that the new contents are set to zero before being - /// returned. - /// - /// The memory block will contain the following contents after a successful call to - /// `grow_zeroed`: - /// * Bytes `0..old_layout.size()` are preserved from the original allocation. - /// * Bytes `old_layout.size()..old_size` will either be preserved or zeroed, depending on - /// the allocator implementation. `old_size` refers to the size of the memory block prior - /// to the `grow_zeroed` call, which may be larger than the size that was originally - /// requested when it was allocated. - /// * Bytes `old_size..new_size` are zeroed. `new_size` refers to the size of the memory - /// block returned by the `grow_zeroed` call. - /// - /// # Safety - /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. - /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). - /// * `new_layout.size()` must be greater than or equal to `old_layout.size()`. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting - /// - /// # Errors - /// - /// Returns `Err` if the new layout does not meet the allocator's size and alignment - /// constraints of the allocator, or if growing otherwise fails. - /// - /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or - /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement - /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an allocation error are encouraged to - /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn grow_zeroed( - &self, - ptr: NonNull, - old_layout: Layout, - new_layout: Layout, - ) -> Result, AllocErr> { - debug_assert!( - new_layout.size() >= old_layout.size(), - "`new_layout.size()` must be greater than or equal to `old_layout.size()`" - ); - - let new_ptr = self.alloc_zeroed(new_layout)?; - - // SAFETY: because `new_layout.size()` must be greater than or equal to - // `old_layout.size()`, both the old and new memory allocation are valid for reads and - // writes for `old_layout.size()` bytes. Also, because the old allocation wasn't yet - // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is - // safe. The safety contract for `dealloc` must be upheld by the caller. - unsafe { - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_layout.size()); - self.dealloc(ptr, old_layout); - } - - Ok(new_ptr) - } - - /// Attempts to shrink the memory block. - /// - /// Returns a new [`NonNull<[u8]>`] containing a pointer and the actual size of the allocated - /// memory. The pointer is suitable for holding data described by `new_layout`. To accomplish - /// this, the allocator may shrink the allocation referenced by `ptr` to fit the new layout. - /// - /// If this returns `Ok`, then ownership of the memory block referenced by `ptr` has been - /// transferred to this allocator. The memory may or may not have been freed, and should be - /// considered unusable unless it was transferred back to the caller again via the return value - /// of this method. - /// - /// If this method returns `Err`, then ownership of the memory block has not been transferred to - /// this allocator, and the contents of the memory block are unaltered. - /// - /// [`NonNull<[u8]>`]: NonNull - /// - /// # Safety - /// - /// * `ptr` must denote a block of memory [*currently allocated*] via this allocator. - /// * `old_layout` must [*fit*] that block of memory (The `new_layout` argument need not fit it.). - /// * `new_layout.size()` must be smaller than or equal to `old_layout.size()`. - /// - /// [*currently allocated*]: #currently-allocated-memory - /// [*fit*]: #memory-fitting - /// - /// # Errors - /// - /// Returns `Err` if the new layout does not meet the allocator's size and alignment - /// constraints of the allocator, or if shrinking otherwise fails. - /// - /// Implementations are encouraged to return `Err` on memory exhaustion rather than panicking or - /// aborting, but this is not a strict requirement. (Specifically: it is *legal* to implement - /// this trait atop an underlying native allocation library that aborts on memory exhaustion.) - /// - /// Clients wishing to abort computation in response to an allocation error are encouraged to - /// call the [`handle_alloc_error`] function, rather than directly invoking `panic!` or similar. - /// - /// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html - unsafe fn shrink( - &self, - ptr: NonNull, - old_layout: Layout, - new_layout: Layout, - ) -> Result, AllocErr> { - debug_assert!( - new_layout.size() <= old_layout.size(), - "`new_layout.size()` must be smaller than or equal to `old_layout.size()`" - ); - - let new_ptr = self.alloc(new_layout)?; - - // SAFETY: because `new_layout.size()` must be lower than or equal to - // `old_layout.size()`, both the old and new memory allocation are valid for reads and - // writes for `new_layout.size()` bytes. Also, because the old allocation wasn't yet - // deallocated, it cannot overlap `new_ptr`. Thus, the call to `copy_nonoverlapping` is - // safe. The safety contract for `dealloc` must be upheld by the caller. - unsafe { - ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_layout.size()); - self.dealloc(ptr, old_layout); - } - - Ok(new_ptr) - } - - /// Creates a "by reference" adaptor for this instance of `AllocRef`. - /// - /// The returned adaptor also implements `AllocRef` and will simply borrow this. - #[inline(always)] - fn by_ref(&self) -> &Self { - self - } - } - - pub trait AllocRef { - unsafe fn alloc(&mut self, layout: Layout, init: AllocInit) -> Result; - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout); } #[derive(Copy, Clone)] pub struct Global; - impl AllocRef for Global { - unsafe fn alloc(&mut self, layout: Layout, _init: AllocInit) -> Result { - let ptr = NonNull::new(alloc(layout)).ok_or(())?; - Ok(MemoryBlock { - ptr, - size: layout.size(), - }) + unsafe impl AllocRef for Global { + fn alloc(&self, layout: Layout) -> Result, AllocError> { + unsafe { + let ptr = alloc(layout); + if ptr.is_null() { + return Err(AllocError); + } + let slice = ptr::slice_from_raw_parts_mut(ptr, layout.size()); + Ok(NonNull::new_unchecked(slice)) + } } - unsafe fn dealloc(&mut self, ptr: NonNull, layout: Layout) { + unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { dealloc(ptr.as_ptr(), layout) } } diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 40e11c57c9..0269816191 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -424,7 +424,7 @@ impl RawTable { /// The control bytes are left uninitialized. #[cfg_attr(feature = "inline-more", inline)] unsafe fn new_uninitialized( - mut alloc: A, + alloc: A, buckets: usize, fallability: Fallibility, ) -> Result { From ca14ee7e5a7c91ee17f798eb0a1e8f18e66ba5b8 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 12 Oct 2020 14:28:55 +0200 Subject: [PATCH 10/13] Fix for rust 1.36.0 --- src/lib.rs | 3 ++- src/raw/alloc.rs | 29 +++++++++++++++++------------ src/raw/mod.rs | 4 ++-- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index a020637f35..8e65a50fb0 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -18,7 +18,8 @@ dropck_eyepatch, min_specialization, extend_one, - allocator_api + allocator_api, + slice_ptr_get ) )] #![allow( diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs index 4055dad64d..0073d1d0f8 100644 --- a/src/raw/alloc.rs +++ b/src/raw/alloc.rs @@ -2,34 +2,35 @@ pub use self::inner::*; #[cfg(feature = "nightly")] mod inner { - pub use crate::alloc::alloc::{AllocError, AllocRef, Global}; + use crate::alloc::alloc::Layout; + pub use crate::alloc::alloc::{AllocRef, Global}; + use core::ptr::NonNull; + + pub fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { + alloc + .alloc(layout) + .map(|ptr| ptr.as_non_null_ptr()) + .map_err(|_| ()) + } } #[cfg(not(feature = "nightly"))] mod inner { use crate::alloc::alloc::{alloc, dealloc, Layout}; - use core::ptr; use core::ptr::NonNull; pub struct AllocError; pub unsafe trait AllocRef { - fn alloc(&self, layout: Layout) -> Result, AllocError>; + fn alloc(&self, layout: Layout) -> Result, AllocError>; unsafe fn dealloc(&self, ptr: NonNull, layout: Layout); } #[derive(Copy, Clone)] pub struct Global; unsafe impl AllocRef for Global { - fn alloc(&self, layout: Layout) -> Result, AllocError> { - unsafe { - let ptr = alloc(layout); - if ptr.is_null() { - return Err(AllocError); - } - let slice = ptr::slice_from_raw_parts_mut(ptr, layout.size()); - Ok(NonNull::new_unchecked(slice)) - } + fn alloc(&self, layout: Layout) -> Result, AllocError> { + unsafe { NonNull::new(alloc(layout)).ok_or(AllocError) } } unsafe fn dealloc(&self, ptr: NonNull, layout: Layout) { dealloc(ptr.as_ptr(), layout) @@ -40,4 +41,8 @@ mod inner { Global } } + + pub fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { + alloc.alloc(layout).map_err(|_| ()) + } } diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 0269816191..846dd82685 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -32,7 +32,7 @@ cfg_if! { } mod alloc; -pub use self::alloc::{AllocRef, Global}; +pub use self::alloc::{do_alloc, AllocRef, Global}; mod bitmask; @@ -435,7 +435,7 @@ impl RawTable { Some(lco) => lco, None => return Err(fallability.capacity_overflow()), }; - let ptr: NonNull = match alloc.alloc(layout) { + let ptr: NonNull = match do_alloc(&alloc, layout) { Ok(block) => block.cast(), Err(_) => return Err(fallability.alloc_err(layout)), }; From e6b71794cd9b9d56a8d9026f93a2654694d24560 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Mon, 12 Oct 2020 17:40:40 +0200 Subject: [PATCH 11/13] ignore clippy warnings --- src/raw/alloc.rs | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/raw/alloc.rs b/src/raw/alloc.rs index 0073d1d0f8..dcc45649fc 100644 --- a/src/raw/alloc.rs +++ b/src/raw/alloc.rs @@ -6,6 +6,7 @@ mod inner { pub use crate::alloc::alloc::{AllocRef, Global}; use core::ptr::NonNull; + #[allow(clippy::map_err_ignore)] pub fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { alloc .alloc(layout) @@ -42,6 +43,7 @@ mod inner { } } + #[allow(clippy::map_err_ignore)] pub fn do_alloc(alloc: &A, layout: Layout) -> Result, ()> { alloc.alloc(layout).map_err(|_| ()) } From 3a97a8a72f2a5cd0c4e6e95f4cf99c66fe68e07c Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Thu, 22 Oct 2020 11:15:57 +0200 Subject: [PATCH 12/13] Improve field naming in `RawIntoIter` Rename `alloc` to `allocation` and `allocator` to `alloc` in `RawIntoIter` --- src/external_trait_impls/rayon/raw.rs | 2 +- src/raw/mod.rs | 22 +++++++++++----------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/src/external_trait_impls/rayon/raw.rs b/src/external_trait_impls/rayon/raw.rs index 9183cce156..5a2ac62342 100644 --- a/src/external_trait_impls/rayon/raw.rs +++ b/src/external_trait_impls/rayon/raw.rs @@ -73,7 +73,7 @@ impl ParallelIterator for RawIntoParIter { C: UnindexedConsumer, { let iter = unsafe { self.table.iter().iter }; - let _guard = guard(self.table.into_alloc(), |alloc| { + let _guard = guard(self.table.into_allocation(), |alloc| { if let Some((ptr, layout)) = *alloc { unsafe { dealloc(ptr.as_ptr(), layout); diff --git a/src/raw/mod.rs b/src/raw/mod.rs index 846dd82685..716b1aea7a 100644 --- a/src/raw/mod.rs +++ b/src/raw/mod.rs @@ -1182,20 +1182,20 @@ impl RawTable { pub unsafe fn into_iter_from(self, iter: RawIter) -> RawIntoIter { debug_assert_eq!(iter.len(), self.len()); - let allocator = self.alloc.clone(); - let alloc = self.into_alloc(); + let alloc = self.alloc.clone(); + let allocation = self.into_allocation(); RawIntoIter { iter, - alloc, + allocation, marker: PhantomData, - allocator, + alloc, } } /// Converts the table into a raw allocation. The contents of the table /// should be dropped using a `RawIter` before freeing the allocation. #[cfg_attr(feature = "inline-more", inline)] - pub(crate) fn into_alloc(self) -> Option<(NonNull, Layout)> { + pub(crate) fn into_allocation(self) -> Option<(NonNull, Layout)> { let alloc = if self.is_empty_singleton() { None } else { @@ -1765,9 +1765,9 @@ impl FusedIterator for RawIter {} /// Iterator which consumes a table and returns elements. pub struct RawIntoIter { iter: RawIter, - alloc: Option<(NonNull, Layout)>, + allocation: Option<(NonNull, Layout)>, marker: PhantomData, - allocator: A, + alloc: A, } impl RawIntoIter { @@ -1793,8 +1793,8 @@ unsafe impl<#[may_dangle] T, A: AllocRef + Clone> Drop for RawIntoIter { } // Free the table - if let Some((ptr, layout)) = self.alloc { - self.allocator.dealloc(ptr, layout); + if let Some((ptr, layout)) = self.allocation { + self.alloc.dealloc(ptr, layout); } } } @@ -1812,8 +1812,8 @@ impl Drop for RawIntoIter { } // Free the table - if let Some((ptr, layout)) = self.alloc { - self.allocator + if let Some((ptr, layout)) = self.allocation { + self.alloc .dealloc(NonNull::new_unchecked(ptr.as_ptr()), layout); } } From 89613b09535fb7ee46b3c60c85ba5641882f0f08 Mon Sep 17 00:00:00 2001 From: "Hans Elias B. Josephsen" Date: Thu, 22 Oct 2020 11:41:16 +0200 Subject: [PATCH 13/13] Add allocator parameter to `rustc_entry` --- src/rustc_entry.rs | 54 ++++++++++++++++++++++++++++------------------ 1 file changed, 33 insertions(+), 21 deletions(-) diff --git a/src/rustc_entry.rs b/src/rustc_entry.rs index 2ec9920cfd..7290f9c484 100644 --- a/src/rustc_entry.rs +++ b/src/rustc_entry.rs @@ -1,14 +1,15 @@ use self::RustcEntry::*; use crate::map::{make_hash, Drain, HashMap, IntoIter, Iter, IterMut}; -use crate::raw::{Bucket, Global, RawTable}; +use crate::raw::{AllocRef, Bucket, Global, RawTable}; use core::fmt::{self, Debug}; use core::hash::{BuildHasher, Hash}; use core::mem; -impl HashMap +impl HashMap where K: Eq + Hash, S: BuildHasher, + A: AllocRef + Clone, { /// Gets the given key's corresponding entry in the map for in-place manipulation. /// @@ -30,7 +31,7 @@ where /// assert_eq!(letters.get(&'y'), None); /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn rustc_entry(&mut self, key: K) -> RustcEntry<'_, K, V> { + pub fn rustc_entry(&mut self, key: K) -> RustcEntry<'_, K, V, A> { let hash = make_hash(&self.hash_builder, &key); if let Some(elem) = self.table.find(hash, |q| q.0.eq(&key)) { RustcEntry::Occupied(RustcOccupiedEntry { @@ -59,15 +60,18 @@ where /// /// [`HashMap`]: struct.HashMap.html /// [`entry`]: struct.HashMap.html#method.rustc_entry -pub enum RustcEntry<'a, K, V> { +pub enum RustcEntry<'a, K, V, A = Global> +where + A: AllocRef + Clone, +{ /// An occupied entry. - Occupied(RustcOccupiedEntry<'a, K, V>), + Occupied(RustcOccupiedEntry<'a, K, V, A>), /// A vacant entry. - Vacant(RustcVacantEntry<'a, K, V>), + Vacant(RustcVacantEntry<'a, K, V, A>), } -impl Debug for RustcEntry<'_, K, V> { +impl Debug for RustcEntry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), @@ -80,26 +84,31 @@ impl Debug for RustcEntry<'_, K, V> { /// It is part of the [`RustcEntry`] enum. /// /// [`RustcEntry`]: enum.RustcEntry.html -pub struct RustcOccupiedEntry<'a, K, V> { +pub struct RustcOccupiedEntry<'a, K, V, A = Global> +where + A: AllocRef + Clone, +{ key: Option, elem: Bucket<(K, V)>, - table: &'a mut RawTable<(K, V), Global>, + table: &'a mut RawTable<(K, V), A>, } -unsafe impl Send for RustcOccupiedEntry<'_, K, V> +unsafe impl Send for RustcOccupiedEntry<'_, K, V, A> where K: Send, V: Send, + A: AllocRef + Clone + Send, { } -unsafe impl Sync for RustcOccupiedEntry<'_, K, V> +unsafe impl Sync for RustcOccupiedEntry<'_, K, V, A> where K: Sync, V: Sync, + A: AllocRef + Clone + Sync, { } -impl Debug for RustcOccupiedEntry<'_, K, V> { +impl Debug for RustcOccupiedEntry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedEntry") .field("key", self.key()) @@ -112,19 +121,22 @@ impl Debug for RustcOccupiedEntry<'_, K, V> { /// It is part of the [`RustcEntry`] enum. /// /// [`RustcEntry`]: enum.RustcEntry.html -pub struct RustcVacantEntry<'a, K, V> { +pub struct RustcVacantEntry<'a, K, V, A = Global> +where + A: AllocRef + Clone, +{ hash: u64, key: K, - table: &'a mut RawTable<(K, V), Global>, + table: &'a mut RawTable<(K, V), A>, } -impl Debug for RustcVacantEntry<'_, K, V> { +impl Debug for RustcVacantEntry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("VacantEntry").field(self.key()).finish() } } -impl<'a, K, V> RustcEntry<'a, K, V> { +impl<'a, K, V, A: AllocRef + Clone> RustcEntry<'a, K, V, A> { /// Sets the value of the entry, and returns a RustcOccupiedEntry. /// /// # Examples @@ -137,7 +149,7 @@ impl<'a, K, V> RustcEntry<'a, K, V> { /// /// assert_eq!(entry.key(), &"horseyland"); /// ``` - pub fn insert(self, value: V) -> RustcOccupiedEntry<'a, K, V> { + pub fn insert(self, value: V) -> RustcOccupiedEntry<'a, K, V, A> { match self { Vacant(entry) => entry.insert_entry(value), Occupied(mut entry) => { @@ -253,7 +265,7 @@ impl<'a, K, V> RustcEntry<'a, K, V> { } } -impl<'a, K, V: Default> RustcEntry<'a, K, V> { +impl<'a, K, V: Default, A: AllocRef + Clone> RustcEntry<'a, K, V, A> { /// Ensures a value is in the entry by inserting the default value if empty, /// and returns a mutable reference to the value in the entry. /// @@ -281,7 +293,7 @@ impl<'a, K, V: Default> RustcEntry<'a, K, V> { } } -impl<'a, K, V> RustcOccupiedEntry<'a, K, V> { +impl<'a, K, V, A: AllocRef + Clone> RustcOccupiedEntry<'a, K, V, A> { /// Gets a reference to the key in the entry. /// /// # Examples @@ -508,7 +520,7 @@ impl<'a, K, V> RustcOccupiedEntry<'a, K, V> { } } -impl<'a, K, V> RustcVacantEntry<'a, K, V> { +impl<'a, K, V, A: AllocRef + Clone> RustcVacantEntry<'a, K, V, A> { /// Gets a reference to the key that would be used when inserting a value /// through the `RustcVacantEntry`. /// @@ -583,7 +595,7 @@ impl<'a, K, V> RustcVacantEntry<'a, K, V> { /// } /// ``` #[cfg_attr(feature = "inline-more", inline)] - pub fn insert_entry(self, value: V) -> RustcOccupiedEntry<'a, K, V> { + pub fn insert_entry(self, value: V) -> RustcOccupiedEntry<'a, K, V, A> { let bucket = self.table.insert_no_grow(self.hash, (self.key, value)); RustcOccupiedEntry { key: None,