diff --git a/library/alloc/src/collections/btree/append.rs b/library/alloc/src/collections/btree/append.rs index 5434ffbc3cb6..b6989afb6255 100644 --- a/library/alloc/src/collections/btree/append.rs +++ b/library/alloc/src/collections/btree/append.rs @@ -15,12 +15,12 @@ impl Root { /// a `BTreeMap`, both iterators should produce keys in strictly ascending /// order, each greater than all keys in the tree, including any keys /// already in the tree upon entry. - pub fn append_from_sorted_iters( + pub fn append_from_sorted_iters( &mut self, left: I, right: I, length: &mut usize, - alloc: &A, + alloc: A, ) where K: Ord, I: Iterator + FusedIterator, @@ -35,7 +35,7 @@ impl Root { /// Pushes all key-value pairs to the end of the tree, incrementing a /// `length` variable along the way. The latter makes it easier for the /// caller to avoid a leak when the iterator panicks. - pub fn bulk_push(&mut self, iter: I, length: &mut usize, alloc: &A) + pub fn bulk_push(&mut self, iter: I, length: &mut usize, alloc: A) where I: Iterator, { @@ -64,7 +64,7 @@ impl Root { } Err(_) => { // We are at the top, create a new root node and push there. - open_node = self.push_internal_level(alloc); + open_node = self.push_internal_level(alloc.clone()); break; } } @@ -72,9 +72,9 @@ impl Root { // Push key-value pair and new right subtree. let tree_height = open_node.height() - 1; - let mut right_tree = Root::new(alloc); + let mut right_tree = Root::new(alloc.clone()); for _ in 0..tree_height { - right_tree.push_internal_level(alloc); + right_tree.push_internal_level(alloc.clone()); } open_node.push(key, value, right_tree); diff --git a/library/alloc/src/collections/btree/fix.rs b/library/alloc/src/collections/btree/fix.rs index f139ab10f2c4..91b61218005a 100644 --- a/library/alloc/src/collections/btree/fix.rs +++ b/library/alloc/src/collections/btree/fix.rs @@ -7,9 +7,9 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// sibling. If successful but at the cost of shrinking the parent node, /// returns that shrunk parent node. Returns an `Err` if the node is /// an empty root. - fn fix_node_through_parent( + fn fix_node_through_parent( self, - alloc: &A, + alloc: A, ) -> Result, K, V, marker::Internal>>, Self> { let len = self.len(); if len >= MIN_LEN { @@ -54,9 +54,9 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { /// /// This method does not expect ancestors to already be underfull upon entry /// and panics if it encounters an empty ancestor. - pub fn fix_node_and_affected_ancestors(mut self, alloc: &A) -> bool { + pub fn fix_node_and_affected_ancestors(mut self, alloc: A) -> bool { loop { - match self.fix_node_through_parent(alloc) { + match self.fix_node_through_parent(alloc.clone()) { Ok(Some(parent)) => self = parent.forget_type(), Ok(None) => return true, Err(_) => return false, @@ -67,28 +67,28 @@ impl<'a, K: 'a, V: 'a> NodeRef, K, V, marker::LeafOrInternal> { impl Root { /// Removes empty levels on the top, but keeps an empty leaf if the entire tree is empty. - pub fn fix_top(&mut self, alloc: &A) { + pub fn fix_top(&mut self, alloc: A) { while self.height() > 0 && self.len() == 0 { - self.pop_internal_level(alloc); + self.pop_internal_level(alloc.clone()); } } /// Stocks up or merge away any underfull nodes on the right border of the /// tree. The other nodes, those that are not the root nor a rightmost edge, /// must already have at least MIN_LEN elements. - pub fn fix_right_border(&mut self, alloc: &A) { - self.fix_top(alloc); + pub fn fix_right_border(&mut self, alloc: A) { + self.fix_top(alloc.clone()); if self.len() > 0 { - self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc); + self.borrow_mut().last_kv().fix_right_border_of_right_edge(alloc.clone()); self.fix_top(alloc); } } /// The symmetric clone of `fix_right_border`. - pub fn fix_left_border(&mut self, alloc: &A) { - self.fix_top(alloc); + pub fn fix_left_border(&mut self, alloc: A) { + self.fix_top(alloc.clone()); if self.len() > 0 { - self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc); + self.borrow_mut().first_kv().fix_left_border_of_left_edge(alloc.clone()); self.fix_top(alloc); } } @@ -115,16 +115,16 @@ impl Root { } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInternal>, marker::KV> { - fn fix_left_border_of_left_edge(mut self, alloc: &A) { + fn fix_left_border_of_left_edge(mut self, alloc: A) { while let Internal(internal_kv) = self.force() { - self = internal_kv.fix_left_child(alloc).first_kv(); + self = internal_kv.fix_left_child(alloc.clone()).first_kv(); debug_assert!(self.reborrow().into_node().len() > MIN_LEN); } } - fn fix_right_border_of_right_edge(mut self, alloc: &A) { + fn fix_right_border_of_right_edge(mut self, alloc: A) { while let Internal(internal_kv) = self.force() { - self = internal_kv.fix_right_child(alloc).last_kv(); + self = internal_kv.fix_right_child(alloc.clone()).last_kv(); debug_assert!(self.reborrow().into_node().len() > MIN_LEN); } } @@ -135,9 +135,9 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// provisions an extra element to allow merging its children in turn /// without becoming underfull. /// Returns the left child. - fn fix_left_child( + fn fix_left_child( self, - alloc: &A, + alloc: A, ) -> NodeRef, K, V, marker::LeafOrInternal> { let mut internal_kv = self.consider_for_balancing(); let left_len = internal_kv.left_child_len(); @@ -158,9 +158,9 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// provisions an extra element to allow merging its children in turn /// without becoming underfull. /// Returns wherever the right child ended up. - fn fix_right_child( + fn fix_right_child( self, - alloc: &A, + alloc: A, ) -> NodeRef, K, V, marker::LeafOrInternal> { let mut internal_kv = self.consider_for_balancing(); let right_len = internal_kv.right_child_len(); diff --git a/library/alloc/src/collections/btree/map.rs b/library/alloc/src/collections/btree/map.rs index f23980faa041..e1124a68750a 100644 --- a/library/alloc/src/collections/btree/map.rs +++ b/library/alloc/src/collections/btree/map.rs @@ -171,26 +171,27 @@ pub(super) const MIN_LEN: usize = node::MIN_LEN_AFTER_SPLIT; pub struct BTreeMap< K, V, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { root: Option>, length: usize, + /// `ManuallyDrop` to control drop order (needs to be dropped after all the nodes). pub(super) alloc: ManuallyDrop, } #[stable(feature = "btree_drop", since = "1.7.0")] -unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator> Drop for BTreeMap { +unsafe impl<#[may_dangle] K, #[may_dangle] V, A: Allocator + Clone> Drop for BTreeMap { fn drop(&mut self) { drop(unsafe { ptr::read(self) }.into_iter()) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Clone for BTreeMap { +impl Clone for BTreeMap { fn clone(&self) -> BTreeMap { - fn clone_subtree<'a, K: Clone, V: Clone, A: Clone + Allocator>( + fn clone_subtree<'a, K: Clone, V: Clone, A: Allocator + Clone>( node: NodeRef, K, V, marker::LeafOrInternal>, - alloc: &A, + alloc: A, ) -> BTreeMap where K: 'a, @@ -199,9 +200,9 @@ impl Clone for BTreeMap { match node.force() { Leaf(leaf) => { let mut out_tree = BTreeMap { - root: Some(Root::new(alloc)), + root: Some(Root::new(alloc.clone())), length: 0, - alloc: ManuallyDrop::new((*alloc).clone()), + alloc: ManuallyDrop::new(alloc), }; { @@ -224,11 +225,12 @@ impl Clone for BTreeMap { out_tree } Internal(internal) => { - let mut out_tree = clone_subtree(internal.first_edge().descend(), alloc); + let mut out_tree = + clone_subtree(internal.first_edge().descend(), alloc.clone()); { let out_root = out_tree.root.as_mut().unwrap(); - let mut out_node = out_root.push_internal_level(alloc); + let mut out_node = out_root.push_internal_level(alloc.clone()); let mut in_edge = internal.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); @@ -236,7 +238,7 @@ impl Clone for BTreeMap { let k = (*k).clone(); let v = (*v).clone(); - let subtree = clone_subtree(in_edge.descend(), alloc); + let subtree = clone_subtree(in_edge.descend(), alloc.clone()); // We can't destructure subtree directly // because BTreeMap implements Drop @@ -247,7 +249,11 @@ impl Clone for BTreeMap { (root, length) }; - out_node.push(k, v, subroot.unwrap_or_else(|| Root::new(alloc))); + out_node.push( + k, + v, + subroot.unwrap_or_else(|| Root::new(alloc.clone())), + ); out_tree.length += 1 + sublength; } } @@ -258,14 +264,14 @@ impl Clone for BTreeMap { } if self.is_empty() { - BTreeMap::new_in(ManuallyDrop::into_inner(self.alloc.clone())) + BTreeMap::new_in((*self.alloc).clone()) } else { - clone_subtree(self.root.as_ref().unwrap().reborrow(), &*self.alloc) // unwrap succeeds because not empty + clone_subtree(self.root.as_ref().unwrap().reborrow(), (*self.alloc).clone()) // unwrap succeeds because not empty } } } -impl super::Recover for BTreeMap +impl super::Recover for BTreeMap where K: Borrow + Ord, Q: Ord, @@ -285,9 +291,14 @@ where let root_node = map.root.as_mut()?.borrow_mut(); match root_node.search_tree(key) { Found(handle) => Some( - OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData } - .remove_kv() - .0, + OccupiedEntry { + handle, + dormant_map, + alloc: (*map.alloc).clone(), + _marker: PhantomData, + } + .remove_kv() + .0, ), GoDown(_) => None, } @@ -295,7 +306,8 @@ where fn replace(&mut self, key: K) -> Option { let (map, dormant_map) = DormantMutRef::new(self); - let root_node = map.root.get_or_insert_with(|| Root::new(&*map.alloc)).borrow_mut(); + let root_node = + map.root.get_or_insert_with(|| Root::new((*map.alloc).clone())).borrow_mut(); match root_node.search_tree::(&key) { Found(mut kv) => Some(mem::replace(kv.key_mut(), key)), GoDown(handle) => { @@ -303,7 +315,7 @@ where key, handle: Some(handle), dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, } .insert(()); @@ -369,14 +381,15 @@ impl fmt::Debug for IterMut<'_, K, V> { pub struct IntoIter< K, V, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { range: LazyLeafRange, length: usize, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. alloc: A, } -impl IntoIter { +impl IntoIter { /// Returns an iterator of references over the remaining items. #[inline] pub(super) fn iter(&self) -> Iter<'_, K, V> { @@ -385,7 +398,7 @@ impl IntoIter { } #[stable(feature = "collection_debug", since = "1.17.0")] -impl Debug for IntoIter { +impl Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.iter()).finish() } @@ -456,12 +469,12 @@ impl fmt::Debug for ValuesMut<'_, K, V> { /// [`into_keys`]: BTreeMap::into_keys #[must_use = "iterators are lazy and do nothing unless consumed"] #[stable(feature = "map_into_keys_values", since = "1.54.0")] -pub struct IntoKeys { +pub struct IntoKeys { inner: IntoIter, } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl fmt::Debug for IntoKeys { +impl fmt::Debug for IntoKeys { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter().map(|(key, _)| key)).finish() } @@ -478,13 +491,13 @@ impl fmt::Debug for IntoKeys { pub struct IntoValues< K, V, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { inner: IntoIter, } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl fmt::Debug for IntoValues { +impl fmt::Debug for IntoValues { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_list().entries(self.inner.iter().map(|(_, val)| val)).finish() } @@ -557,7 +570,7 @@ impl BTreeMap { } } -impl BTreeMap { +impl BTreeMap { /// Clears the map, removing all elements. /// /// # Examples @@ -578,7 +591,7 @@ impl BTreeMap { mem::drop(BTreeMap { root: mem::replace(&mut self.root, None), length: mem::replace(&mut self.length, 0), - alloc: ManuallyDrop::new(&*self.alloc), + alloc: self.alloc.clone(), }); } @@ -605,7 +618,7 @@ impl BTreeMap { } } -impl BTreeMap { +impl BTreeMap { /// Returns a reference to the value corresponding to the key. /// /// The key may be any borrowed form of the map's key type, but the ordering @@ -721,7 +734,7 @@ impl BTreeMap { Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, }) } @@ -809,7 +822,7 @@ impl BTreeMap { Some(OccupiedEntry { handle: kv.forget_node_type(), dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, }) } @@ -1029,8 +1042,13 @@ impl BTreeMap { let root_node = map.root.as_mut()?.borrow_mut(); match root_node.search_tree(key) { Found(handle) => Some( - OccupiedEntry { handle, dormant_map, alloc: &*map.alloc, _marker: PhantomData } - .remove_entry(), + OccupiedEntry { + handle, + dormant_map, + alloc: (*map.alloc).clone(), + _marker: PhantomData, + } + .remove_entry(), ), GoDown(_) => None, } @@ -1106,14 +1124,15 @@ impl BTreeMap { return; } - let self_iter = - mem::replace(self, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone()))) - .into_iter(); - let other_iter = - mem::replace(other, Self::new_in(ManuallyDrop::into_inner(self.alloc.clone()))) - .into_iter(); - let root = self.root.get_or_insert_with(|| Root::new(&*self.alloc)); - root.append_from_sorted_iters(self_iter, other_iter, &mut self.length, &*self.alloc) + let self_iter = mem::replace(self, Self::new_in((*self.alloc).clone())).into_iter(); + let other_iter = mem::replace(other, Self::new_in((*self.alloc).clone())).into_iter(); + let root = self.root.get_or_insert_with(|| Root::new((*self.alloc).clone())); + root.append_from_sorted_iters( + self_iter, + other_iter, + &mut self.length, + (*self.alloc).clone(), + ) } /// Constructs a double-ended iterator over a sub-range of elements in the map. @@ -1232,21 +1251,21 @@ impl BTreeMap { key, handle: None, dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, }), Some(ref mut root) => match root.borrow_mut().search_tree(&key) { Found(handle) => Occupied(OccupiedEntry { handle, dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, }), GoDown(handle) => Vacant(VacantEntry { key, handle: Some(handle), dormant_map, - alloc: &*map.alloc, + alloc: (*map.alloc).clone(), _marker: PhantomData, }), }, @@ -1289,22 +1308,18 @@ impl BTreeMap { A: Clone, { if self.is_empty() { - return Self::new_in(ManuallyDrop::into_inner(self.alloc.clone())); + return Self::new_in((*self.alloc).clone()); } let total_num = self.len(); let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty - let right_root = left_root.split_off(key, &*self.alloc); + let right_root = left_root.split_off(key, (*self.alloc).clone()); let (new_left_len, right_len) = Root::calc_split_length(total_num, &left_root, &right_root); self.length = new_left_len; - BTreeMap { - root: Some(right_root), - length: right_len, - alloc: ManuallyDrop::new((*self.alloc).clone()), - } + BTreeMap { root: Some(right_root), length: right_len, alloc: self.alloc.clone() } } /// Creates an iterator that visits all elements (key-value pairs) in @@ -1340,7 +1355,7 @@ impl BTreeMap { /// assert_eq!(odds.keys().copied().collect::>(), [1, 3, 5, 7]); /// ``` #[unstable(feature = "btree_drain_filter", issue = "70530")] - pub fn drain_filter(&mut self, pred: F) -> DrainFilter<'_, K, V, F, &A> + pub fn drain_filter(&mut self, pred: F) -> DrainFilter<'_, K, V, F, A> where K: Ord, F: FnMut(&K, &mut V) -> bool, @@ -1349,7 +1364,7 @@ impl BTreeMap { DrainFilter { pred, inner, alloc } } - pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, &A) + pub(super) fn drain_filter_inner(&mut self) -> (DrainFilterInner<'_, K, V>, A) where K: Ord, { @@ -1362,7 +1377,7 @@ impl BTreeMap { dormant_root: Some(dormant_root), cur_leaf_edge: Some(front), }, - &*self.alloc, + (*self.alloc).clone(), ) } else { ( @@ -1371,7 +1386,7 @@ impl BTreeMap { dormant_root: None, cur_leaf_edge: None, }, - &*self.alloc, + (*self.alloc).clone(), ) } } @@ -1426,15 +1441,15 @@ impl BTreeMap { K: Ord, I: IntoIterator, { - let mut root = Root::new(&alloc); + let mut root = Root::new(alloc.clone()); let mut length = 0; - root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, &alloc); + root.bulk_push(DedupSortedIter::new(iter.into_iter()), &mut length, alloc.clone()); BTreeMap { root: Some(root), length, alloc: ManuallyDrop::new(alloc) } } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, K, V, A: Allocator> IntoIterator for &'a BTreeMap { +impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a BTreeMap { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; @@ -1503,7 +1518,7 @@ impl Clone for Iter<'_, K, V> { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, K, V, A: Allocator> IntoIterator for &'a mut BTreeMap { +impl<'a, K, V, A: Allocator + Clone> IntoIterator for &'a mut BTreeMap { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; @@ -1573,7 +1588,7 @@ impl<'a, K, V> IterMut<'a, K, V> { } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for BTreeMap { +impl IntoIterator for BTreeMap { type Item = (K, V); type IntoIter = IntoIter; @@ -1598,11 +1613,11 @@ impl IntoIterator for BTreeMap { } #[stable(feature = "btree_drop", since = "1.7.0")] -impl Drop for IntoIter { +impl Drop for IntoIter { fn drop(&mut self) { - struct DropGuard<'a, K, V, A: Allocator>(&'a mut IntoIter); + struct DropGuard<'a, K, V, A: Allocator + Clone>(&'a mut IntoIter); - impl<'a, K, V, A: Allocator> Drop for DropGuard<'a, K, V, A> { + impl<'a, K, V, A: Allocator + Clone> Drop for DropGuard<'a, K, V, A> { fn drop(&mut self) { // Continue the same loop we perform below. This only runs when unwinding, so we // don't have to care about panics this time (they'll abort). @@ -1622,7 +1637,7 @@ impl Drop for IntoIter { } } -impl IntoIter { +impl IntoIter { /// Core of a `next` method returning a dying KV handle, /// invalidated by further calls to this function and some others. fn dying_next( @@ -1653,7 +1668,7 @@ impl IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = (K, V); fn next(&mut self) -> Option<(K, V)> { @@ -1667,7 +1682,7 @@ impl Iterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option<(K, V)> { // SAFETY: we consume the dying handle immediately. self.dying_next_back().map(unsafe { |kv| kv.into_key_val() }) @@ -1675,14 +1690,14 @@ impl DoubleEndedIterator for IntoIter { } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator for IntoIter { fn len(&self) -> usize { self.length } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Keys<'a, K, V> { @@ -1781,12 +1796,13 @@ pub struct DrainFilter< K, V, F, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > where F: 'a + FnMut(&K, &mut V) -> bool, { pred: F, inner: DrainFilterInner<'a, K, V>, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. alloc: A, } /// Most of the implementation of DrainFilter are generic over the type @@ -1804,7 +1820,7 @@ pub(super) struct DrainFilterInner<'a, K, V> { } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl Drop for DrainFilter<'_, K, V, F, A> +impl Drop for DrainFilter<'_, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, { @@ -1826,7 +1842,7 @@ where } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl Iterator for DrainFilter<'_, K, V, F, A> +impl Iterator for DrainFilter<'_, K, V, F, A> where F: FnMut(&K, &mut V) -> bool, { @@ -1849,7 +1865,7 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> { } /// Implementation of a typical `DrainFilter::next` method, given the predicate. - pub(super) fn next(&mut self, pred: &mut F, alloc: &A) -> Option<(K, V)> + pub(super) fn next(&mut self, pred: &mut F, alloc: A) -> Option<(K, V)> where F: FnMut(&K, &mut V) -> bool, { @@ -1862,10 +1878,10 @@ impl<'a, K, V> DrainFilterInner<'a, K, V> { // SAFETY: we will touch the root in a way that will not // invalidate the position returned. let root = unsafe { self.dormant_root.take().unwrap().awaken() }; - root.pop_internal_level(alloc); + root.pop_internal_level(alloc.clone()); self.dormant_root = Some(DormantMutRef::new(root).1); }, - alloc, + alloc.clone(), ); self.cur_leaf_edge = Some(pos); return Some(kv); @@ -1944,7 +1960,7 @@ impl ExactSizeIterator for ValuesMut<'_, K, V> { impl FusedIterator for ValuesMut<'_, K, V> {} #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl Iterator for IntoKeys { +impl Iterator for IntoKeys { type Item = K; fn next(&mut self) -> Option { @@ -1969,24 +1985,24 @@ impl Iterator for IntoKeys { } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl DoubleEndedIterator for IntoKeys { +impl DoubleEndedIterator for IntoKeys { fn next_back(&mut self) -> Option { self.inner.next_back().map(|(k, _)| k) } } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl ExactSizeIterator for IntoKeys { +impl ExactSizeIterator for IntoKeys { fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl FusedIterator for IntoKeys {} +impl FusedIterator for IntoKeys {} #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl Iterator for IntoValues { +impl Iterator for IntoValues { type Item = V; fn next(&mut self) -> Option { @@ -2003,21 +2019,21 @@ impl Iterator for IntoValues { } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl DoubleEndedIterator for IntoValues { +impl DoubleEndedIterator for IntoValues { fn next_back(&mut self) -> Option { self.inner.next_back().map(|(_, v)| v) } } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl ExactSizeIterator for IntoValues { +impl ExactSizeIterator for IntoValues { fn len(&self) -> usize { self.inner.len() } } #[stable(feature = "map_into_keys_values", since = "1.54.0")] -impl FusedIterator for IntoValues {} +impl FusedIterator for IntoValues {} #[stable(feature = "btree_range", since = "1.17.0")] impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> { @@ -2083,7 +2099,7 @@ impl FromIterator<(K, V)> for BTreeMap { } #[stable(feature = "rust1", since = "1.0.0")] -impl Extend<(K, V)> for BTreeMap { +impl Extend<(K, V)> for BTreeMap { #[inline] fn extend>(&mut self, iter: T) { iter.into_iter().for_each(move |(k, v)| { @@ -2098,7 +2114,9 @@ impl Extend<(K, V)> for BTreeMap { } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, K: Ord + Copy, V: Copy, A: Allocator> Extend<(&'a K, &'a V)> for BTreeMap { +impl<'a, K: Ord + Copy, V: Copy, A: Allocator + Clone> Extend<(&'a K, &'a V)> + for BTreeMap +{ fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); } @@ -2110,7 +2128,7 @@ impl<'a, K: Ord + Copy, V: Copy, A: Allocator> Extend<(&'a K, &'a V)> for BTreeM } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for BTreeMap { +impl Hash for BTreeMap { fn hash(&self, state: &mut H) { state.write_length_prefix(self.len()); for elt in self { @@ -2128,17 +2146,17 @@ impl Default for BTreeMap { } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for BTreeMap { +impl PartialEq for BTreeMap { fn eq(&self, other: &BTreeMap) -> bool { self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for BTreeMap {} +impl Eq for BTreeMap {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for BTreeMap { +impl PartialOrd for BTreeMap { #[inline] fn partial_cmp(&self, other: &BTreeMap) -> Option { self.iter().partial_cmp(other.iter()) @@ -2146,7 +2164,7 @@ impl PartialOrd for BTreeMap Ord for BTreeMap { +impl Ord for BTreeMap { #[inline] fn cmp(&self, other: &BTreeMap) -> Ordering { self.iter().cmp(other.iter()) @@ -2154,14 +2172,14 @@ impl Ord for BTreeMap { } #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for BTreeMap { +impl Debug for BTreeMap { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } #[stable(feature = "rust1", since = "1.0.0")] -impl Index<&Q> for BTreeMap +impl Index<&Q> for BTreeMap where K: Borrow + Ord, Q: Ord, @@ -2201,7 +2219,7 @@ impl From<[(K, V); N]> for BTreeMap { } } -impl BTreeMap { +impl BTreeMap { /// Gets an iterator over the entries of the map, sorted by key. /// /// # Examples diff --git a/library/alloc/src/collections/btree/map/entry.rs b/library/alloc/src/collections/btree/map/entry.rs index 61b2f89100d4..b6eecf9b0e95 100644 --- a/library/alloc/src/collections/btree/map/entry.rs +++ b/library/alloc/src/collections/btree/map/entry.rs @@ -21,7 +21,7 @@ pub enum Entry< 'a, K: 'a, V: 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { /// A vacant entry. #[stable(feature = "rust1", since = "1.0.0")] @@ -33,7 +33,7 @@ pub enum Entry< } #[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for Entry<'_, K, V, A> { +impl Debug for Entry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match *self { Vacant(ref v) => f.debug_tuple("Entry").field(v).finish(), @@ -49,21 +49,22 @@ pub struct VacantEntry< 'a, K, V, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { pub(super) key: K, /// `None` for a (empty) map without root pub(super) handle: Option, K, V, marker::Leaf>, marker::Edge>>, pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, - pub(super) alloc: &'a A, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + pub(super) alloc: A, // Be invariant in `K` and `V` pub(super) _marker: PhantomData<&'a mut (K, V)>, } #[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for VacantEntry<'_, K, V, A> { +impl Debug for VacantEntry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("VacantEntry").field(self.key()).finish() } @@ -76,19 +77,20 @@ pub struct OccupiedEntry< 'a, K, V, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { pub(super) handle: Handle, K, V, marker::LeafOrInternal>, marker::KV>, pub(super) dormant_map: DormantMutRef<'a, BTreeMap>, - pub(super) alloc: &'a A, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + pub(super) alloc: A, // Be invariant in `K` and `V` pub(super) _marker: PhantomData<&'a mut (K, V)>, } #[stable(feature = "debug_btree_map", since = "1.12.0")] -impl Debug for OccupiedEntry<'_, K, V, A> { +impl Debug for OccupiedEntry<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedEntry").field("key", self.key()).field("value", self.get()).finish() } @@ -98,7 +100,7 @@ impl Debug for OccupiedEntry<'_, K, V, A /// /// Contains the occupied entry, and the value that was not inserted. #[unstable(feature = "map_try_insert", issue = "82766")] -pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> { +pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator + Clone = Global> { /// The entry in the map that was already occupied. pub entry: OccupiedEntry<'a, K, V, A>, /// The value which was not inserted, because the entry was already occupied. @@ -106,7 +108,7 @@ pub struct OccupiedError<'a, K: 'a, V: 'a, A: Allocator = Global> { } #[unstable(feature = "map_try_insert", issue = "82766")] -impl Debug for OccupiedError<'_, K, V, A> { +impl Debug for OccupiedError<'_, K, V, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_struct("OccupiedError") .field("key", self.entry.key()) @@ -117,7 +119,9 @@ impl Debug for OccupiedError<'_, K, V, A } #[unstable(feature = "map_try_insert", issue = "82766")] -impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError<'a, K, V, A> { +impl<'a, K: Debug + Ord, V: Debug, A: Allocator + Clone> fmt::Display + for OccupiedError<'a, K, V, A> +{ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, @@ -129,7 +133,7 @@ impl<'a, K: Debug + Ord, V: Debug, A: Allocator> fmt::Display for OccupiedError< } } -impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> { +impl<'a, K: Ord, V, A: Allocator + Clone> Entry<'a, K, V, A> { /// Ensures a value is in the entry by inserting the default if empty, and returns /// a mutable reference to the value in the entry. /// @@ -257,7 +261,7 @@ impl<'a, K: Ord, V, A: Allocator> Entry<'a, K, V, A> { } } -impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> { +impl<'a, K: Ord, V: Default, A: Allocator + Clone> Entry<'a, K, V, A> { #[stable(feature = "entry_or_default", since = "1.28.0")] /// Ensures a value is in the entry by inserting the default value if empty, /// and returns a mutable reference to the value in the entry. @@ -280,7 +284,7 @@ impl<'a, K: Ord, V: Default, A: Allocator> Entry<'a, K, V, A> { } } -impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> { +impl<'a, K: Ord, V, A: Allocator + Clone> VacantEntry<'a, K, V, A> { /// Gets a reference to the key that would be used when inserting a value /// through the VacantEntry. /// @@ -338,13 +342,13 @@ impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> { None => { // SAFETY: There is no tree yet so no reference to it exists. let map = unsafe { self.dormant_map.awaken() }; - let mut root = NodeRef::new_leaf(self.alloc); + let mut root = NodeRef::new_leaf(self.alloc.clone()); let val_ptr = root.borrow_mut().push(self.key, value) as *mut V; map.root = Some(root.forget_type()); map.length = 1; val_ptr } - Some(handle) => match handle.insert_recursing(self.key, value, self.alloc) { + Some(handle) => match handle.insert_recursing(self.key, value, self.alloc.clone()) { (None, val_ptr) => { // SAFETY: We have consumed self.handle. let map = unsafe { self.dormant_map.awaken() }; @@ -369,7 +373,7 @@ impl<'a, K: Ord, V, A: Allocator> VacantEntry<'a, K, V, A> { } } -impl<'a, K: Ord, V, A: Allocator> OccupiedEntry<'a, K, V, A> { +impl<'a, K: Ord, V, A: Allocator + Clone> OccupiedEntry<'a, K, V, A> { /// Gets a reference to the key in the entry. /// /// # Examples @@ -538,13 +542,13 @@ impl<'a, K: Ord, V, A: Allocator> OccupiedEntry<'a, K, V, A> { pub(super) fn remove_kv(self) -> (K, V) { let mut emptied_internal_root = false; let (old_kv, _) = - self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc); + self.handle.remove_kv_tracking(|| emptied_internal_root = true, self.alloc.clone()); // SAFETY: we consumed the intermediate root borrow, `self.handle`. let map = unsafe { self.dormant_map.awaken() }; map.length -= 1; if emptied_internal_root { let root = map.root.as_mut().unwrap(); - root.pop_internal_level(&*self.alloc); + root.pop_internal_level(self.alloc); } old_kv } diff --git a/library/alloc/src/collections/btree/map/tests.rs b/library/alloc/src/collections/btree/map/tests.rs index a4c24cd4593b..5504959c34d8 100644 --- a/library/alloc/src/collections/btree/map/tests.rs +++ b/library/alloc/src/collections/btree/map/tests.rs @@ -116,11 +116,7 @@ impl BTreeMap { { let iter = mem::take(self).into_iter(); if !iter.is_empty() { - self.root.insert(Root::new(&*self.alloc)).bulk_push( - iter, - &mut self.length, - &*self.alloc, - ); + self.root.insert(Root::new(*self.alloc)).bulk_push(iter, &mut self.length, *self.alloc); } } } diff --git a/library/alloc/src/collections/btree/navigate.rs b/library/alloc/src/collections/btree/navigate.rs index d44cb57618df..1e33c1e64d66 100644 --- a/library/alloc/src/collections/btree/navigate.rs +++ b/library/alloc/src/collections/btree/navigate.rs @@ -178,9 +178,9 @@ impl LazyLeafRange { } #[inline] - pub unsafe fn deallocating_next_unchecked( + pub unsafe fn deallocating_next_unchecked( &mut self, - alloc: &A, + alloc: A, ) -> Handle, marker::KV> { debug_assert!(self.front.is_some()); let front = self.init_front().unwrap(); @@ -188,9 +188,9 @@ impl LazyLeafRange { } #[inline] - pub unsafe fn deallocating_next_back_unchecked( + pub unsafe fn deallocating_next_back_unchecked( &mut self, - alloc: &A, + alloc: A, ) -> Handle, marker::KV> { debug_assert!(self.back.is_some()); let back = self.init_back().unwrap(); @@ -198,7 +198,7 @@ impl LazyLeafRange { } #[inline] - pub fn deallocating_end(&mut self, alloc: &A) { + pub fn deallocating_end(&mut self, alloc: A) { if let Some(front) = self.take_front() { front.deallocating_end(alloc) } @@ -444,9 +444,9 @@ impl Handle, marker::Edge> { /// `deallocating_next_back`. /// - The returned KV handle is only valid to access the key and value, /// and only valid until the next call to a `deallocating_` method. - unsafe fn deallocating_next( + unsafe fn deallocating_next( self, - alloc: &A, + alloc: A, ) -> Option<(Self, Handle, marker::KV>)> { let mut edge = self.forget_node_type(); @@ -454,7 +454,7 @@ impl Handle, marker::Edge> { edge = match edge.right_kv() { Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_leaf_edge(), kv)), Err(last_edge) => { - match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } { + match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } { Some(parent_edge) => parent_edge.forget_node_type(), None => return None, } @@ -476,9 +476,9 @@ impl Handle, marker::Edge> { /// `deallocating_next`. /// - The returned KV handle is only valid to access the key and value, /// and only valid until the next call to a `deallocating_` method. - unsafe fn deallocating_next_back( + unsafe fn deallocating_next_back( self, - alloc: &A, + alloc: A, ) -> Option<(Self, Handle, marker::KV>)> { let mut edge = self.forget_node_type(); @@ -486,7 +486,7 @@ impl Handle, marker::Edge> { edge = match edge.left_kv() { Ok(kv) => return Some((unsafe { ptr::read(&kv) }.next_back_leaf_edge(), kv)), Err(last_edge) => { - match unsafe { last_edge.into_node().deallocate_and_ascend(alloc) } { + match unsafe { last_edge.into_node().deallocate_and_ascend(alloc.clone()) } { Some(parent_edge) => parent_edge.forget_node_type(), None => return None, } @@ -501,9 +501,11 @@ impl Handle, marker::Edge> { /// both sides of the tree, and have hit the same edge. As it is intended /// only to be called when all keys and values have been returned, /// no cleanup is done on any of the keys or values. - fn deallocating_end(self, alloc: &A) { + fn deallocating_end(self, alloc: A) { let mut edge = self.forget_node_type(); - while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend(alloc) } { + while let Some(parent_edge) = + unsafe { edge.into_node().deallocate_and_ascend(alloc.clone()) } + { edge = parent_edge.forget_node_type(); } } @@ -578,9 +580,9 @@ impl Handle, marker::Edge> { /// /// The only safe way to proceed with the updated handle is to compare it, drop it, /// or call this method or counterpart `deallocating_next_back_unchecked` again. - unsafe fn deallocating_next_unchecked( + unsafe fn deallocating_next_unchecked( &mut self, - alloc: &A, + alloc: A, ) -> Handle, marker::KV> { super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next(alloc).unwrap() @@ -599,9 +601,9 @@ impl Handle, marker::Edge> { /// /// The only safe way to proceed with the updated handle is to compare it, drop it, /// or call this method or counterpart `deallocating_next_unchecked` again. - unsafe fn deallocating_next_back_unchecked( + unsafe fn deallocating_next_back_unchecked( &mut self, - alloc: &A, + alloc: A, ) -> Handle, marker::KV> { super::mem::replace(self, |leaf_edge| unsafe { leaf_edge.deallocating_next_back(alloc).unwrap() diff --git a/library/alloc/src/collections/btree/node.rs b/library/alloc/src/collections/btree/node.rs index 5ae0a554aeea..d831161bcb68 100644 --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@ -78,7 +78,7 @@ impl LeafNode { } /// Creates a new boxed `LeafNode`. - fn new(alloc: &A) -> Box { + fn new(alloc: A) -> Box { unsafe { let mut leaf = Box::new_uninit_in(alloc); LeafNode::init(leaf.as_mut_ptr()); @@ -110,7 +110,7 @@ impl InternalNode { /// An invariant of internal nodes is that they have at least one /// initialized and valid edge. This function does not set up /// such an edge. - unsafe fn new(alloc: &A) -> Box { + unsafe fn new(alloc: A) -> Box { unsafe { let mut node = Box::::new_uninit_in(alloc); // We only need to initialize the data; the edges are MaybeUninit. @@ -213,17 +213,17 @@ unsafe impl Send for NodeRef unsafe impl Send for NodeRef {} impl NodeRef { - pub fn new_leaf(alloc: &A) -> Self { + pub fn new_leaf(alloc: A) -> Self { Self::from_new_leaf(LeafNode::new(alloc)) } - fn from_new_leaf(leaf: Box, A>) -> Self { + fn from_new_leaf(leaf: Box, A>) -> Self { NodeRef { height: 0, node: NonNull::from(Box::leak(leaf)), _marker: PhantomData } } } impl NodeRef { - fn new_internal(child: Root, alloc: &A) -> Self { + fn new_internal(child: Root, alloc: A) -> Self { let mut new_node = unsafe { InternalNode::new(alloc) }; new_node.edges[0].write(child.node); unsafe { NodeRef::from_new_internal(new_node, child.height + 1) } @@ -231,7 +231,7 @@ impl NodeRef { /// # Safety /// `height` must not be zero. - unsafe fn from_new_internal( + unsafe fn from_new_internal( internal: Box, A>, height: usize, ) -> Self { @@ -390,9 +390,9 @@ impl NodeRef { /// Similar to `ascend`, gets a reference to a node's parent node, but also /// deallocates the current node in the process. This is unsafe because the /// current node will still be accessible despite being deallocated. - pub unsafe fn deallocate_and_ascend( + pub unsafe fn deallocate_and_ascend( self, - alloc: &A, + alloc: A, ) -> Option, marker::Edge>> { let height = self.height; let node = self.node; @@ -559,16 +559,16 @@ impl NodeRef { impl NodeRef { /// Returns a new owned tree, with its own root node that is initially empty. - pub fn new(alloc: &A) -> Self { + pub fn new(alloc: A) -> Self { NodeRef::new_leaf(alloc).forget_type() } /// Adds a new internal node with a single edge pointing to the previous root node, /// make that new node the root node, and return it. This increases the height by 1 /// and is the opposite of `pop_internal_level`. - pub fn push_internal_level( + pub fn push_internal_level( &mut self, - alloc: &A, + alloc: A, ) -> NodeRef, K, V, marker::Internal> { super::mem::take_mut(self, |old_root| NodeRef::new_internal(old_root, alloc).forget_type()); @@ -585,7 +585,7 @@ impl NodeRef { /// it will not invalidate other handles or references to the root node. /// /// Panics if there is no internal level, i.e., if the root node is a leaf. - pub fn pop_internal_level(&mut self, alloc: &A) { + pub fn pop_internal_level(&mut self, alloc: A) { assert!(self.height > 0); let top = self.node; @@ -869,11 +869,11 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// this edge. This method splits the node if there isn't enough room. /// /// The returned pointer points to the inserted value. - fn insert( + fn insert( mut self, key: K, val: V, - alloc: &A, + alloc: A, ) -> (Option>, *mut V) { if self.node.len() < CAPACITY { let val_ptr = self.insert_fit(key, val); @@ -930,12 +930,12 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// Inserts a new key-value pair and an edge that will go to the right of that new pair /// between this edge and the key-value pair to the right of this edge. This method splits /// the node if there isn't enough room. - fn insert( + fn insert( mut self, key: K, val: V, edge: Root, - alloc: &A, + alloc: A, ) -> Option> { assert!(edge.height == self.node.height - 1); @@ -968,23 +968,25 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// If the returned result is some `SplitResult`, the `left` field will be the root node. /// The returned pointer points to the inserted value, which in the case of `SplitResult` /// is in the `left` or `right` tree. - pub fn insert_recursing( + pub fn insert_recursing( self, key: K, value: V, - alloc: &A, + alloc: A, ) -> (Option>, *mut V) { - let (mut split, val_ptr) = match self.insert(key, value, alloc) { + let (mut split, val_ptr) = match self.insert(key, value, alloc.clone()) { (None, val_ptr) => return (None, val_ptr), (Some(split), val_ptr) => (split.forget_node_type(), val_ptr), }; loop { split = match split.left.ascend() { - Ok(parent) => match parent.insert(split.kv.0, split.kv.1, split.right, alloc) { - None => return (None, val_ptr), - Some(split) => split.forget_node_type(), - }, + Ok(parent) => { + match parent.insert(split.kv.0, split.kv.1, split.right, alloc.clone()) { + None => return (None, val_ptr), + Some(split) => split.forget_node_type(), + } + } Err(root) => return (Some(SplitResult { left: root, ..split }), val_ptr), }; } @@ -1126,7 +1128,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark /// - The key and value pointed to by this handle are extracted. /// - All the key-value pairs to the right of this handle are put into a newly /// allocated node. - pub fn split(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Leaf> { + pub fn split(mut self, alloc: A) -> SplitResult<'a, K, V, marker::Leaf> { let mut new_node = LeafNode::new(alloc); let kv = self.split_leaf_data(&mut new_node); @@ -1158,7 +1160,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, /// - The key and value pointed to by this handle are extracted. /// - All the edges and key-value pairs to the right of this handle are put into /// a newly allocated node. - pub fn split(mut self, alloc: &A) -> SplitResult<'a, K, V, marker::Internal> { + pub fn split( + mut self, + alloc: A, + ) -> SplitResult<'a, K, V, marker::Internal> { let old_len = self.node.len(); unsafe { let mut new_node = InternalNode::new(alloc); @@ -1270,7 +1275,7 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { >( self, result: F, - alloc: &A, + alloc: A, ) -> R { let Handle { node: mut parent_node, idx: parent_idx, _marker } = self.parent; let old_parent_len = parent_node.len(); @@ -1327,9 +1332,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// the left child node and returns the shrunk parent node. /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_parent( + pub fn merge_tracking_parent( self, - alloc: &A, + alloc: A, ) -> NodeRef, K, V, marker::Internal> { self.do_merge(|parent, _child| parent, alloc) } @@ -1338,9 +1343,9 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// the left child node and returns that child node. /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_child( + pub fn merge_tracking_child( self, - alloc: &A, + alloc: A, ) -> NodeRef, K, V, marker::LeafOrInternal> { self.do_merge(|_parent, child| child, alloc) } @@ -1350,10 +1355,10 @@ impl<'a, K: 'a, V: 'a> BalancingContext<'a, K, V> { /// where the tracked child edge ended up, /// /// Panics unless we `.can_merge()`. - pub fn merge_tracking_child_edge( + pub fn merge_tracking_child_edge( self, track_edge_idx: LeftOrRight, - alloc: &A, + alloc: A, ) -> Handle, K, V, marker::LeafOrInternal>, marker::Edge> { let old_left_len = self.left_child.len(); let right_len = self.right_child.len(); diff --git a/library/alloc/src/collections/btree/remove.rs b/library/alloc/src/collections/btree/remove.rs index 693efd176546..0904299254f0 100644 --- a/library/alloc/src/collections/btree/remove.rs +++ b/library/alloc/src/collections/btree/remove.rs @@ -7,10 +7,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInter /// the leaf edge corresponding to that former pair. It's possible this empties /// a root node that is internal, which the caller should pop from the map /// holding the tree. The caller should also decrement the map's length. - pub fn remove_kv_tracking( + pub fn remove_kv_tracking( self, handle_emptied_internal_root: F, - alloc: &A, + alloc: A, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { match self.force() { Leaf(node) => node.remove_leaf_kv(handle_emptied_internal_root, alloc), @@ -20,10 +20,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::LeafOrInter } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { - fn remove_leaf_kv( + fn remove_leaf_kv( self, handle_emptied_internal_root: F, - alloc: &A, + alloc: A, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { let (old_kv, mut pos) = self.remove(); let len = pos.reborrow().into_node().len(); @@ -35,7 +35,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark Ok(Left(left_parent_kv)) => { debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1); if left_parent_kv.can_merge() { - left_parent_kv.merge_tracking_child_edge(Right(idx), alloc) + left_parent_kv.merge_tracking_child_edge(Right(idx), alloc.clone()) } else { debug_assert!(left_parent_kv.left_child_len() > MIN_LEN); left_parent_kv.steal_left(idx) @@ -44,7 +44,7 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark Ok(Right(right_parent_kv)) => { debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1); if right_parent_kv.can_merge() { - right_parent_kv.merge_tracking_child_edge(Left(idx), alloc) + right_parent_kv.merge_tracking_child_edge(Left(idx), alloc.clone()) } else { debug_assert!(right_parent_kv.right_child_len() > MIN_LEN); right_parent_kv.steal_right(idx) @@ -73,10 +73,10 @@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, mark } impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Internal>, marker::KV> { - fn remove_internal_kv( + fn remove_internal_kv( self, handle_emptied_internal_root: F, - alloc: &A, + alloc: A, ) -> ((K, V), Handle, K, V, marker::Leaf>, marker::Edge>) { // Remove an adjacent KV from its leaf and then put it back in place of // the element we were asked to remove. Prefer the left adjacent KV, diff --git a/library/alloc/src/collections/btree/set.rs b/library/alloc/src/collections/btree/set.rs index aeb5c30dba34..bec3b9675254 100644 --- a/library/alloc/src/collections/btree/set.rs +++ b/library/alloc/src/collections/btree/set.rs @@ -79,37 +79,37 @@ use crate::alloc::{Allocator, Global}; #[cfg_attr(not(test), rustc_diagnostic_item = "BTreeSet")] pub struct BTreeSet< T, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { map: BTreeMap, } #[stable(feature = "rust1", since = "1.0.0")] -impl Hash for BTreeSet { +impl Hash for BTreeSet { fn hash(&self, state: &mut H) { self.map.hash(state) } } #[stable(feature = "rust1", since = "1.0.0")] -impl PartialEq for BTreeSet { +impl PartialEq for BTreeSet { fn eq(&self, other: &BTreeSet) -> bool { self.map.eq(&other.map) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Eq for BTreeSet {} +impl Eq for BTreeSet {} #[stable(feature = "rust1", since = "1.0.0")] -impl PartialOrd for BTreeSet { +impl PartialOrd for BTreeSet { fn partial_cmp(&self, other: &BTreeSet) -> Option { self.map.partial_cmp(&other.map) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Ord for BTreeSet { +impl Ord for BTreeSet { fn cmp(&self, other: &BTreeSet) -> Ordering { self.map.cmp(&other.map) } @@ -156,7 +156,7 @@ impl fmt::Debug for Iter<'_, T> { #[derive(Debug)] pub struct IntoIter< T, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { iter: super::map::IntoIter, } @@ -186,11 +186,11 @@ pub struct Range<'a, T: 'a> { pub struct Difference< 'a, T: 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { inner: DifferenceInner<'a, T, A>, } -enum DifferenceInner<'a, T: 'a, A: Allocator> { +enum DifferenceInner<'a, T: 'a, A: Allocator + Clone> { Stitch { // iterate all of `self` and some of `other`, spotting matches along the way self_iter: Iter<'a, T>, @@ -205,7 +205,7 @@ enum DifferenceInner<'a, T: 'a, A: Allocator> { } // Explicit Debug impl necessary because of issue #26925 -impl Debug for DifferenceInner<'_, T, A> { +impl Debug for DifferenceInner<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { DifferenceInner::Stitch { self_iter, other_iter } => f @@ -224,7 +224,7 @@ impl Debug for DifferenceInner<'_, T, A> { } #[stable(feature = "collection_debug", since = "1.17.0")] -impl fmt::Debug for Difference<'_, T, A> { +impl fmt::Debug for Difference<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Difference").field(&self.inner).finish() } @@ -260,11 +260,11 @@ impl fmt::Debug for SymmetricDifference<'_, T> { pub struct Intersection< 'a, T: 'a, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > { inner: IntersectionInner<'a, T, A>, } -enum IntersectionInner<'a, T: 'a, A: Allocator> { +enum IntersectionInner<'a, T: 'a, A: Allocator + Clone> { Stitch { // iterate similarly sized sets jointly, spotting matches along the way a: Iter<'a, T>, @@ -279,7 +279,7 @@ enum IntersectionInner<'a, T: 'a, A: Allocator> { } // Explicit Debug impl necessary because of issue #26925 -impl Debug for IntersectionInner<'_, T, A> { +impl Debug for IntersectionInner<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { IntersectionInner::Stitch { a, b } => { @@ -296,7 +296,7 @@ impl Debug for IntersectionInner<'_, T, A> { } #[stable(feature = "collection_debug", since = "1.17.0")] -impl Debug for Intersection<'_, T, A> { +impl Debug for Intersection<'_, T, A> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_tuple("Intersection").field(&self.inner).finish() } @@ -349,7 +349,7 @@ impl BTreeSet { } } -impl BTreeSet { +impl BTreeSet { /// Makes a new `BTreeSet` with a reasonable choice of B. /// /// # Examples @@ -1208,7 +1208,7 @@ impl FromIterator for BTreeSet { } } -impl BTreeSet { +impl BTreeSet { fn from_sorted_iter>(iter: I, alloc: A) -> BTreeSet { let iter = iter.map(|k| (k, ())); let map = BTreeMap::bulk_build_from_sorted_iter(iter, alloc); @@ -1241,7 +1241,7 @@ impl From<[T; N]> for BTreeSet { } #[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for BTreeSet { +impl IntoIterator for BTreeSet { type Item = T; type IntoIter = IntoIter; @@ -1263,7 +1263,7 @@ impl IntoIterator for BTreeSet { } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T, A: Allocator> IntoIterator for &'a BTreeSet { +impl<'a, T, A: Allocator + Clone> IntoIterator for &'a BTreeSet { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -1278,18 +1278,19 @@ pub struct DrainFilter< 'a, T, F, - #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator = Global, + #[unstable(feature = "allocator_api", issue = "32838")] A: Allocator + Clone = Global, > where T: 'a, F: 'a + FnMut(&T) -> bool, { pred: F, inner: super::map::DrainFilterInner<'a, T, ()>, - alloc: &'a A, + /// The BTreeMap will outlive this IntoIter so we don't care about drop order for `alloc`. + alloc: A, } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl Drop for DrainFilter<'_, T, F, A> +impl Drop for DrainFilter<'_, T, F, A> where F: FnMut(&T) -> bool, { @@ -1299,7 +1300,7 @@ where } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl fmt::Debug for DrainFilter<'_, T, F, A> +impl fmt::Debug for DrainFilter<'_, T, F, A> where T: fmt::Debug, F: FnMut(&T) -> bool, @@ -1310,7 +1311,7 @@ where } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl<'a, T, F, A: Allocator> Iterator for DrainFilter<'_, T, F, A> +impl<'a, T, F, A: Allocator + Clone> Iterator for DrainFilter<'_, T, F, A> where F: 'a + FnMut(&T) -> bool, { @@ -1328,10 +1329,13 @@ where } #[unstable(feature = "btree_drain_filter", issue = "70530")] -impl FusedIterator for DrainFilter<'_, T, F, A> where F: FnMut(&T) -> bool {} +impl FusedIterator for DrainFilter<'_, T, F, A> where + F: FnMut(&T) -> bool +{ +} #[stable(feature = "rust1", since = "1.0.0")] -impl Extend for BTreeSet { +impl Extend for BTreeSet { #[inline] fn extend>(&mut self, iter: Iter) { iter.into_iter().for_each(move |elem| { @@ -1346,7 +1350,7 @@ impl Extend for BTreeSet { } #[stable(feature = "extend_ref", since = "1.2.0")] -impl<'a, T: 'a + Ord + Copy, A: Allocator> Extend<&'a T> for BTreeSet { +impl<'a, T: 'a + Ord + Copy, A: Allocator + Clone> Extend<&'a T> for BTreeSet { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } @@ -1466,7 +1470,7 @@ impl BitOr<&BTreeSet> for &BTreeSet< } #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for BTreeSet { +impl Debug for BTreeSet { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.debug_set().entries(self.iter()).finish() } @@ -1519,7 +1523,7 @@ impl ExactSizeIterator for Iter<'_, T> { impl FusedIterator for Iter<'_, T> {} #[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for IntoIter { +impl Iterator for IntoIter { type Item = T; fn next(&mut self) -> Option { @@ -1531,20 +1535,20 @@ impl Iterator for IntoIter { } } #[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for IntoIter { +impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option { self.iter.next_back().map(|(k, _)| k) } } #[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for IntoIter { +impl ExactSizeIterator for IntoIter { fn len(&self) -> usize { self.iter.len() } } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for IntoIter {} +impl FusedIterator for IntoIter {} #[stable(feature = "btree_range", since = "1.17.0")] impl Clone for Range<'_, T> { @@ -1602,7 +1606,7 @@ impl Clone for Difference<'_, T, A> { } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> { +impl<'a, T: Ord, A: Allocator + Clone> Iterator for Difference<'a, T, A> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { @@ -1649,7 +1653,7 @@ impl<'a, T: Ord, A: Allocator> Iterator for Difference<'a, T, A> { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Difference<'_, T, A> {} +impl FusedIterator for Difference<'_, T, A> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for SymmetricDifference<'_, T> { @@ -1703,7 +1707,7 @@ impl Clone for Intersection<'_, T, A> { } } #[stable(feature = "rust1", since = "1.0.0")] -impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> { +impl<'a, T: Ord, A: Allocator + Clone> Iterator for Intersection<'a, T, A> { type Item = &'a T; fn next(&mut self) -> Option<&'a T> { @@ -1744,7 +1748,7 @@ impl<'a, T: Ord, A: Allocator> Iterator for Intersection<'a, T, A> { } #[stable(feature = "fused", since = "1.26.0")] -impl FusedIterator for Intersection<'_, T, A> {} +impl FusedIterator for Intersection<'_, T, A> {} #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Union<'_, T> { diff --git a/library/alloc/src/collections/btree/split.rs b/library/alloc/src/collections/btree/split.rs index 3ccd1d1d8619..638dc98fc3e4 100644 --- a/library/alloc/src/collections/btree/split.rs +++ b/library/alloc/src/collections/btree/split.rs @@ -29,12 +29,12 @@ impl Root { /// and if the ordering of `Q` corresponds to that of `K`. /// If `self` respects all `BTreeMap` tree invariants, then both /// `self` and the returned tree will respect those invariants. - pub fn split_off(&mut self, key: &Q, alloc: &A) -> Self + pub fn split_off(&mut self, key: &Q, alloc: A) -> Self where K: Borrow, { let left_root = self; - let mut right_root = Root::new_pillar(left_root.height(), alloc); + let mut right_root = Root::new_pillar(left_root.height(), alloc.clone()); let mut left_node = left_root.borrow_mut(); let mut right_node = right_root.borrow_mut(); @@ -57,16 +57,16 @@ impl Root { } } - left_root.fix_right_border(alloc); + left_root.fix_right_border(alloc.clone()); right_root.fix_left_border(alloc); right_root } /// Creates a tree consisting of empty nodes. - fn new_pillar(height: usize, alloc: &A) -> Self { - let mut root = Root::new(alloc); + fn new_pillar(height: usize, alloc: A) -> Self { + let mut root = Root::new(alloc.clone()); for _ in 0..height { - root.push_internal_level(alloc); + root.push_internal_level(alloc.clone()); } root }