Skip to content

Commit a118ee2

Browse files
committed
Auto merge of rust-lang#81486 - ssomers:btree_separate_drop, r=Mark-Simulacrum
BTreeMap: disentangle Drop implementation from IntoIter No longer require every `BTreeMap` to dig up its last leaf edge before dying. This speeds up the `clone_` benchmarks by 25% for normal keys and values (far less for huge values). r? `@Mark-Simulacrum`
2 parents e9920ef + 3045b75 commit a118ee2

File tree

2 files changed

+106
-65
lines changed

2 files changed

+106
-65
lines changed

library/alloc/src/collections/btree/map.rs

+34-26
Original file line numberDiff line numberDiff line change
@@ -145,8 +145,8 @@ pub struct BTreeMap<K, V> {
145145
#[stable(feature = "btree_drop", since = "1.7.0")]
146146
unsafe impl<#[may_dangle] K, #[may_dangle] V> Drop for BTreeMap<K, V> {
147147
fn drop(&mut self) {
148-
unsafe {
149-
drop(ptr::read(self).into_iter());
148+
if let Some(root) = self.root.take() {
149+
Dropper { front: root.into_dying().first_leaf_edge(), remaining_length: self.length };
150150
}
151151
}
152152
}
@@ -332,6 +332,14 @@ impl<K: fmt::Debug, V: fmt::Debug> fmt::Debug for IntoIter<K, V> {
332332
}
333333
}
334334

335+
/// A simplified version of `IntoIter` that is not double-ended and has only one
336+
/// purpose: to drop the remainder of an `IntoIter`. Therefore it also serves to
337+
/// drop an entire tree without the need to first look up a `back` leaf edge.
338+
struct Dropper<K, V> {
339+
front: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
340+
remaining_length: usize,
341+
}
342+
335343
/// An iterator over the keys of a `BTreeMap`.
336344
///
337345
/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
@@ -1410,42 +1418,42 @@ impl<K, V> IntoIterator for BTreeMap<K, V> {
14101418
}
14111419
}
14121420

1413-
#[stable(feature = "btree_drop", since = "1.7.0")]
1414-
impl<K, V> Drop for IntoIter<K, V> {
1421+
impl<K, V> Drop for Dropper<K, V> {
14151422
fn drop(&mut self) {
1416-
struct DropGuard<'a, K, V>(&'a mut IntoIter<K, V>);
1423+
// Similar to advancing a non-fusing iterator.
1424+
fn next_or_end<K, V>(this: &mut Dropper<K, V>) -> Option<(K, V)> {
1425+
if this.remaining_length == 0 {
1426+
unsafe { ptr::read(&this.front).deallocating_end() }
1427+
None
1428+
} else {
1429+
this.remaining_length -= 1;
1430+
Some(unsafe { this.front.deallocating_next_unchecked() })
1431+
}
1432+
}
1433+
1434+
struct DropGuard<'a, K, V>(&'a mut Dropper<K, V>);
14171435

14181436
impl<'a, K, V> Drop for DropGuard<'a, K, V> {
14191437
fn drop(&mut self) {
14201438
// Continue the same loop we perform below. This only runs when unwinding, so we
14211439
// don't have to care about panics this time (they'll abort).
1422-
while let Some(_) = self.0.next() {}
1423-
1424-
unsafe {
1425-
let mut node =
1426-
ptr::read(&self.0.front).unwrap_unchecked().into_node().forget_type();
1427-
while let Some(parent) = node.deallocate_and_ascend() {
1428-
node = parent.into_node().forget_type();
1429-
}
1430-
}
1440+
while let Some(_pair) = next_or_end(&mut self.0) {}
14311441
}
14321442
}
14331443

1434-
while let Some(pair) = self.next() {
1444+
while let Some(pair) = next_or_end(self) {
14351445
let guard = DropGuard(self);
14361446
drop(pair);
14371447
mem::forget(guard);
14381448
}
1449+
}
1450+
}
14391451

1440-
unsafe {
1441-
if let Some(front) = ptr::read(&self.front) {
1442-
let mut node = front.into_node().forget_type();
1443-
// Most of the nodes have been deallocated while traversing
1444-
// but one pile from a leaf up to the root is left standing.
1445-
while let Some(parent) = node.deallocate_and_ascend() {
1446-
node = parent.into_node().forget_type();
1447-
}
1448-
}
1452+
#[stable(feature = "btree_drop", since = "1.7.0")]
1453+
impl<K, V> Drop for IntoIter<K, V> {
1454+
fn drop(&mut self) {
1455+
if let Some(front) = self.front.take() {
1456+
Dropper { front, remaining_length: self.length };
14491457
}
14501458
}
14511459
}
@@ -1459,7 +1467,7 @@ impl<K, V> Iterator for IntoIter<K, V> {
14591467
None
14601468
} else {
14611469
self.length -= 1;
1462-
Some(unsafe { self.front.as_mut().unwrap().next_unchecked() })
1470+
Some(unsafe { self.front.as_mut().unwrap().deallocating_next_unchecked() })
14631471
}
14641472
}
14651473

@@ -1475,7 +1483,7 @@ impl<K, V> DoubleEndedIterator for IntoIter<K, V> {
14751483
None
14761484
} else {
14771485
self.length -= 1;
1478-
Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() })
1486+
Some(unsafe { self.back.as_mut().unwrap().deallocating_next_back_unchecked() })
14791487
}
14801488
}
14811489
}

library/alloc/src/collections/btree/navigate.rs

+72-39
Original file line numberDiff line numberDiff line change
@@ -289,37 +289,76 @@ impl<BorrowType: marker::BorrowType, K, V>
289289
}
290290
}
291291

292-
macro_rules! def_next_kv_uncheched_dealloc {
293-
{ unsafe fn $name:ident : $adjacent_kv:ident } => {
294-
/// Given a leaf edge handle into an owned tree, returns a handle to the next KV,
295-
/// while deallocating any node left behind yet leaving the corresponding edge
296-
/// in its parent node dangling.
297-
///
298-
/// # Safety
299-
/// - The leaf edge must not be the last one in the direction travelled.
300-
/// - The node carrying the next KV returned must not have been deallocated by a
301-
/// previous call on any handle obtained for this tree.
302-
unsafe fn $name <K, V>(
303-
leaf_edge: Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge>,
304-
) -> Handle<NodeRef<marker::Dying, K, V, marker::LeafOrInternal>, marker::KV> {
305-
let mut edge = leaf_edge.forget_node_type();
306-
loop {
307-
edge = match edge.$adjacent_kv() {
308-
Ok(internal_kv) => return internal_kv,
309-
Err(last_edge) => {
310-
unsafe {
311-
let parent_edge = last_edge.into_node().deallocate_and_ascend();
312-
parent_edge.unwrap_unchecked().forget_node_type()
313-
}
314-
}
292+
impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
293+
/// Given a leaf edge handle into a dying tree, returns the next leaf edge
294+
/// on the right side, and the key-value pair in between, which is either
295+
/// in the same leaf node, in an ancestor node, or non-existent.
296+
///
297+
/// This method also deallocates any node(s) it reaches the end of. This
298+
/// implies that if no more key-value pair exists, the entire remainder of
299+
/// the tree will have been deallocated and there is nothing left to return.
300+
///
301+
/// # Safety
302+
/// The given edge must not have been previously returned by counterpart
303+
/// `deallocating_next_back`.
304+
unsafe fn deallocating_next(self) -> Option<(Self, (K, V))> {
305+
let mut edge = self.forget_node_type();
306+
loop {
307+
edge = match edge.right_kv() {
308+
Ok(kv) => {
309+
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
310+
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
311+
return Some((kv.next_leaf_edge(), (k, v)));
315312
}
313+
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
314+
Some(parent_edge) => parent_edge.forget_node_type(),
315+
None => return None,
316+
},
316317
}
317318
}
318-
};
319-
}
319+
}
320320

321-
def_next_kv_uncheched_dealloc! {unsafe fn next_kv_unchecked_dealloc: right_kv}
322-
def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_kv}
321+
/// Given a leaf edge handle into a dying tree, returns the next leaf edge
322+
/// on the left side, and the key-value pair in between, which is either
323+
/// in the same leaf node, in an ancestor node, or non-existent.
324+
///
325+
/// This method also deallocates any node(s) it reaches the end of. This
326+
/// implies that if no more key-value pair exists, the entire remainder of
327+
/// the tree will have been deallocated and there is nothing left to return.
328+
///
329+
/// # Safety
330+
/// The given edge must not have been previously returned by counterpart
331+
/// `deallocating_next`.
332+
unsafe fn deallocating_next_back(self) -> Option<(Self, (K, V))> {
333+
let mut edge = self.forget_node_type();
334+
loop {
335+
edge = match edge.left_kv() {
336+
Ok(kv) => {
337+
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
338+
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
339+
return Some((kv.next_back_leaf_edge(), (k, v)));
340+
}
341+
Err(last_edge) => match unsafe { last_edge.into_node().deallocate_and_ascend() } {
342+
Some(parent_edge) => parent_edge.forget_node_type(),
343+
None => return None,
344+
},
345+
}
346+
}
347+
}
348+
349+
/// Deallocates a pile of nodes from the leaf up to the root.
350+
/// This is the only way to deallocate the remainder of a tree after
351+
/// `deallocating_next` and `deallocating_next_back` have been nibbling at
352+
/// both sides of the tree, and have hit the same edge. As it is intended
353+
/// only to be called when all keys and values have been returned,
354+
/// no cleanup is done on any of the keys or values.
355+
pub fn deallocating_end(self) {
356+
let mut edge = self.forget_node_type();
357+
while let Some(parent_edge) = unsafe { edge.into_node().deallocate_and_ascend() } {
358+
edge = parent_edge.forget_node_type();
359+
}
360+
}
361+
}
323362

324363
impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge> {
325364
/// Moves the leaf edge handle to the next leaf edge and returns references to the
@@ -394,12 +433,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
394433
/// The only safe way to proceed with the updated handle is to compare it, drop it,
395434
/// call this method again subject to its safety conditions, or call counterpart
396435
/// `next_back_unchecked` subject to its safety conditions.
397-
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
398-
super::mem::replace(self, |leaf_edge| {
399-
let kv = unsafe { next_kv_unchecked_dealloc(leaf_edge) };
400-
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
401-
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
402-
(kv.next_leaf_edge(), (k, v))
436+
pub unsafe fn deallocating_next_unchecked(&mut self) -> (K, V) {
437+
super::mem::replace(self, |leaf_edge| unsafe {
438+
leaf_edge.deallocating_next().unwrap_unchecked()
403439
})
404440
}
405441

@@ -415,12 +451,9 @@ impl<K, V> Handle<NodeRef<marker::Dying, K, V, marker::Leaf>, marker::Edge> {
415451
/// The only safe way to proceed with the updated handle is to compare it, drop it,
416452
/// call this method again subject to its safety conditions, or call counterpart
417453
/// `next_unchecked` subject to its safety conditions.
418-
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
419-
super::mem::replace(self, |leaf_edge| {
420-
let kv = unsafe { next_back_kv_unchecked_dealloc(leaf_edge) };
421-
let k = unsafe { ptr::read(kv.reborrow().into_kv().0) };
422-
let v = unsafe { ptr::read(kv.reborrow().into_kv().1) };
423-
(kv.next_back_leaf_edge(), (k, v))
454+
pub unsafe fn deallocating_next_back_unchecked(&mut self) -> (K, V) {
455+
super::mem::replace(self, |leaf_edge| unsafe {
456+
leaf_edge.deallocating_next_back().unwrap_unchecked()
424457
})
425458
}
426459
}

0 commit comments

Comments
 (0)