diff --git a/src/etc/gdb_rust_pretty_printing.py b/src/etc/gdb_rust_pretty_printing.py index 0914c22eb13f0..d32a6bfd3f270 100755 --- a/src/etc/gdb_rust_pretty_printing.py +++ b/src/etc/gdb_rust_pretty_printing.py @@ -370,12 +370,25 @@ def to_string(self): ("(len: %i)" % self.__val.get_wrapped_value()['map']['length'])) def children(self): - root = self.__val.get_wrapped_value()['map']['root'] - node_ptr = root['node'] - i = 0 - for child in children_of_node(node_ptr, root['height'], False): - yield (str(i), child) - i = i + 1 + if self.__val.get_wrapped_value()['map']['length'] > 0: + root = self.__val.get_wrapped_value()['map']['root'] + # get at `Option` innards + root = GdbValue(root).get_child_at_index(0).get_wrapped_value() + # pull out the `Some` variant of the enum + try: + root = root['Some'] + except: + # Do nothing, we just want to pull out Some if it exists. + # If it didn't, then it seems at least on some versions of gdb + # we don't actually need to do the above line, so just skip it. + pass + # And now the value of the Some variant + root = GdbValue(root).get_child_at_index(0).get_wrapped_value() + node_ptr = root['node'] + i = 0 + for child in children_of_node(node_ptr, root['height'], False): + yield (str(i), child) + i = i + 1 class RustStdBTreeMapPrinter(object): @@ -391,13 +404,26 @@ def to_string(self): ("(len: %i)" % self.__val.get_wrapped_value()['length'])) def children(self): - root = self.__val.get_wrapped_value()['root'] - node_ptr = root['node'] - i = 0 - for child in children_of_node(node_ptr, root['height'], True): - yield (str(i), child[0]) - yield (str(i), child[1]) - i = i + 1 + if self.__val.get_wrapped_value()['length'] > 0: + root = self.__val.get_wrapped_value()['root'] + # get at `Option` innards + root = GdbValue(root).get_child_at_index(0).get_wrapped_value() + # pull out the `Some` variant of the enum + try: + root = root['Some'] + except: + # Do nothing, we just want to pull out Some if it exists. + # If it didn't, then it seems at least on some versions of gdb + # we don't actually need to do the above line, so just skip it. + pass + # And now the value of the Some variant + root = GdbValue(root).get_child_at_index(0).get_wrapped_value() + node_ptr = root['node'] + i = 0 + for child in children_of_node(node_ptr, root['height'], True): + yield (str(i), child[0]) + yield (str(i), child[1]) + i = i + 1 class RustStdStringPrinter(object): diff --git a/src/liballoc/collections/btree/map.rs b/src/liballoc/collections/btree/map.rs index 9da324ba2d4f1..3ba7befc04609 100644 --- a/src/liballoc/collections/btree/map.rs +++ b/src/liballoc/collections/btree/map.rs @@ -122,7 +122,7 @@ use UnderflowResult::*; /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct BTreeMap { - root: node::Root, + root: Option>, length: usize, } @@ -147,10 +147,11 @@ impl Clone for BTreeMap { { match node.force() { Leaf(leaf) => { - let mut out_tree = BTreeMap { root: node::Root::new_leaf(), length: 0 }; + let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 }; { - let mut out_node = match out_tree.root.as_mut().force() { + let root = out_tree.root.as_mut().unwrap(); + let mut out_node = match root.as_mut().force() { Leaf(leaf) => leaf, Internal(_) => unreachable!(), }; @@ -169,9 +170,14 @@ impl Clone for BTreeMap { } Internal(internal) => { let mut out_tree = clone_subtree(internal.first_edge().descend()); + out_tree.ensure_root_is_owned(); { - let mut out_node = out_tree.root.push_level(); + // Ideally we'd use the return of ensure_root_is_owned + // instead of re-unwrapping here but unfortunately that + // borrows all of out_tree and we need access to the + // length below. + let mut out_node = out_tree.root.as_mut().unwrap().push_level(); let mut in_edge = internal.first_edge(); while let Ok(kv) = in_edge.right_kv() { let (k, v) = kv.into_kv(); @@ -190,7 +196,7 @@ impl Clone for BTreeMap { (root, length) }; - out_node.push(k, v, subroot); + out_node.push(k, v, subroot.unwrap_or_else(|| node::Root::new_leaf())); out_tree.length += 1 + sublength; } } @@ -203,9 +209,9 @@ impl Clone for BTreeMap { if self.is_empty() { // Ideally we'd call `BTreeMap::new` here, but that has the `K: // Ord` constraint, which this method lacks. - BTreeMap { root: node::Root::shared_empty_root(), length: 0 } + BTreeMap { root: None, length: 0 } } else { - clone_subtree(self.root.as_ref()) + clone_subtree(self.root.as_ref().unwrap().as_ref()) } } @@ -271,14 +277,14 @@ where type Key = K; fn get(&self, key: &Q) -> Option<&K> { - match search::search_tree(self.root.as_ref(), key) { + match search::search_tree(self.root.as_ref()?.as_ref(), key) { Found(handle) => Some(handle.into_kv().0), GoDown(_) => None, } } fn take(&mut self, key: &Q) -> Option { - match search::search_tree(self.root.as_mut(), key) { + match search::search_tree(self.root.as_mut()?.as_mut(), key) { Found(handle) => Some( OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData } .remove_kv() @@ -290,7 +296,7 @@ where fn replace(&mut self, key: K) -> Option { self.ensure_root_is_owned(); - match search::search_tree::, K, (), K>(self.root.as_mut(), &key) { + match search::search_tree::, K, (), K>(self.root.as_mut()?.as_mut(), &key) { Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)), GoDown(handle) => { VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData } @@ -344,15 +350,18 @@ pub struct IterMut<'a, K: 'a, V: 'a> { /// [`BTreeMap`]: struct.BTreeMap.html #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { - front: Handle, marker::Edge>, - back: Handle, marker::Edge>, + front: Option, marker::Edge>>, + back: Option, marker::Edge>>, length: usize, } #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let range = Range { front: self.front.reborrow(), back: self.back.reborrow() }; + let range = Range { + front: self.front.as_ref().map(|f| f.reborrow()), + back: self.back.as_ref().map(|b| b.reborrow()), + }; f.debug_list().entries(range).finish() } } @@ -417,8 +426,8 @@ pub struct ValuesMut<'a, K: 'a, V: 'a> { /// [`BTreeMap`]: struct.BTreeMap.html #[stable(feature = "btree_range", since = "1.17.0")] pub struct Range<'a, K: 'a, V: 'a> { - front: Handle, K, V, marker::Leaf>, marker::Edge>, - back: Handle, K, V, marker::Leaf>, marker::Edge>, + front: Option, K, V, marker::Leaf>, marker::Edge>>, + back: Option, K, V, marker::Leaf>, marker::Edge>>, } #[stable(feature = "collection_debug", since = "1.17.0")] @@ -437,8 +446,8 @@ impl fmt::Debug for Range<'_, K, V> { /// [`BTreeMap`]: struct.BTreeMap.html #[stable(feature = "btree_range", since = "1.17.0")] pub struct RangeMut<'a, K: 'a, V: 'a> { - front: Handle, K, V, marker::Leaf>, marker::Edge>, - back: Handle, K, V, marker::Leaf>, marker::Edge>, + front: Option, K, V, marker::Leaf>, marker::Edge>>, + back: Option, K, V, marker::Leaf>, marker::Edge>>, // Be invariant in `K` and `V` _marker: PhantomData<&'a mut (K, V)>, @@ -447,7 +456,10 @@ pub struct RangeMut<'a, K: 'a, V: 'a> { #[stable(feature = "collection_debug", since = "1.17.0")] impl fmt::Debug for RangeMut<'_, K, V> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - let range = Range { front: self.front.reborrow(), back: self.back.reborrow() }; + let range = Range { + front: self.front.as_ref().map(|f| f.reborrow()), + back: self.back.as_ref().map(|b| b.reborrow()), + }; f.debug_list().entries(range).finish() } } @@ -544,7 +556,7 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> BTreeMap { - BTreeMap { root: node::Root::shared_empty_root(), length: 0 } + BTreeMap { root: None, length: 0 } } /// Clears the map, removing all elements. @@ -589,7 +601,7 @@ impl BTreeMap { K: Borrow, Q: Ord, { - match search::search_tree(self.root.as_ref(), key) { + match search::search_tree(self.root.as_ref()?.as_ref(), key) { Found(handle) => Some(handle.into_kv().1), GoDown(_) => None, } @@ -616,7 +628,7 @@ impl BTreeMap { K: Borrow, Q: Ord, { - match search::search_tree(self.root.as_ref(), k) { + match search::search_tree(self.root.as_ref()?.as_ref(), k) { Found(handle) => Some(handle.into_kv()), GoDown(_) => None, } @@ -645,7 +657,7 @@ impl BTreeMap { T: Ord, K: Borrow, { - let front = self.root.as_ref().first_leaf_edge(); + let front = self.root.as_ref()?.as_ref().first_leaf_edge(); front.right_kv().ok().map(Handle::into_kv) } @@ -674,7 +686,7 @@ impl BTreeMap { T: Ord, K: Borrow, { - let front = self.root.as_mut().first_leaf_edge(); + let front = self.root.as_mut()?.as_mut().first_leaf_edge(); if let Ok(kv) = front.right_kv() { Some(OccupiedEntry { handle: kv.forget_node_type(), @@ -708,7 +720,7 @@ impl BTreeMap { T: Ord, K: Borrow, { - let back = self.root.as_ref().last_leaf_edge(); + let back = self.root.as_ref()?.as_ref().last_leaf_edge(); back.left_kv().ok().map(Handle::into_kv) } @@ -737,7 +749,7 @@ impl BTreeMap { T: Ord, K: Borrow, { - let back = self.root.as_mut().last_leaf_edge(); + let back = self.root.as_mut()?.as_mut().last_leaf_edge(); if let Ok(kv) = back.left_kv() { Some(OccupiedEntry { handle: kv.forget_node_type(), @@ -801,7 +813,7 @@ impl BTreeMap { K: Borrow, Q: Ord, { - match search::search_tree(self.root.as_mut(), key) { + match search::search_tree(self.root.as_mut()?.as_mut(), key) { Found(handle) => Some(handle.into_kv_mut().1), GoDown(_) => None, } @@ -896,7 +908,7 @@ impl BTreeMap { K: Borrow, Q: Ord, { - match search::search_tree(self.root.as_mut(), key) { + match search::search_tree(self.root.as_mut()?.as_mut(), key) { Found(handle) => Some( OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData } .remove_entry(), @@ -992,11 +1004,15 @@ impl BTreeMap { K: Borrow, R: RangeBounds, { - let root1 = self.root.as_ref(); - let root2 = self.root.as_ref(); - let (f, b) = range_search(root1, root2, range); + if let Some(root) = &self.root { + let root1 = root.as_ref(); + let root2 = root.as_ref(); + let (f, b) = range_search(root1, root2, range); - Range { front: f, back: b } + Range { front: Some(f), back: Some(b) } + } else { + Range { front: None, back: None } + } } /// Constructs a mutable double-ended iterator over a sub-range of elements in the map. @@ -1036,11 +1052,15 @@ impl BTreeMap { K: Borrow, R: RangeBounds, { - let root1 = self.root.as_mut(); - let root2 = unsafe { ptr::read(&root1) }; - let (f, b) = range_search(root1, root2, range); + if let Some(root) = &mut self.root { + let root1 = root.as_mut(); + let root2 = unsafe { ptr::read(&root1) }; + let (f, b) = range_search(root1, root2, range); - RangeMut { front: f, back: b, _marker: PhantomData } + RangeMut { front: Some(f), back: Some(b), _marker: PhantomData } + } else { + RangeMut { front: None, back: None, _marker: PhantomData } + } } /// Gets the given key's corresponding entry in the map for in-place manipulation. @@ -1065,7 +1085,7 @@ impl BTreeMap { pub fn entry(&mut self, key: K) -> Entry<'_, K, V> { // FIXME(@porglezomp) Avoid allocating if we don't insert self.ensure_root_is_owned(); - match search::search_tree(self.root.as_mut(), &key) { + match search::search_tree(self.root.as_mut().unwrap().as_mut(), &key) { Found(handle) => { Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData }) } @@ -1077,7 +1097,7 @@ impl BTreeMap { fn from_sorted_iter>(&mut self, iter: I) { self.ensure_root_is_owned(); - let mut cur_node = self.root.as_mut().last_leaf_edge().into_node(); + let mut cur_node = self.root.as_mut().unwrap().as_mut().last_leaf_edge().into_node(); // Iterate through all key-value pairs, pushing them into nodes at the right level. for (key, value) in iter { // Try to push key-value pair into the current leaf node. @@ -1126,7 +1146,7 @@ impl BTreeMap { fn fix_right_edge(&mut self) { // Handle underfull nodes, start from the top. - let mut cur_node = self.root.as_mut(); + let mut cur_node = self.root.as_mut().unwrap().as_mut(); while let Internal(internal) = cur_node.force() { // Check if right-most child is underfull. let mut last_edge = internal.last_edge(); @@ -1187,14 +1207,14 @@ impl BTreeMap { let total_num = self.len(); let mut right = Self::new(); - right.root = node::Root::new_leaf(); - for _ in 0..(self.root.as_ref().height()) { - right.root.push_level(); + let right_root = right.ensure_root_is_owned(); + for _ in 0..(self.root.as_ref().unwrap().as_ref().height()) { + right_root.push_level(); } { - let mut left_node = self.root.as_mut(); - let mut right_node = right.root.as_mut(); + let mut left_node = self.root.as_mut().unwrap().as_mut(); + let mut right_node = right.root.as_mut().unwrap().as_mut(); loop { let mut split_edge = match search::search_node(left_node, key) { @@ -1223,7 +1243,9 @@ impl BTreeMap { self.fix_right_border(); right.fix_left_border(); - if self.root.as_ref().height() < right.root.as_ref().height() { + if self.root.as_ref().unwrap().as_ref().height() + < right.root.as_ref().unwrap().as_ref().height() + { self.recalc_length(); right.length = total_num - self.len(); } else { @@ -1261,19 +1283,19 @@ impl BTreeMap { res } - self.length = dfs(self.root.as_ref()); + self.length = dfs(self.root.as_ref().unwrap().as_ref()); } /// Removes empty levels on the top. fn fix_top(&mut self) { loop { { - let node = self.root.as_ref(); + let node = self.root.as_ref().unwrap().as_ref(); if node.height() == 0 || node.len() > 0 { break; } } - self.root.pop_level(); + self.root.as_mut().unwrap().pop_level(); } } @@ -1281,7 +1303,7 @@ impl BTreeMap { self.fix_top(); { - let mut cur_node = self.root.as_mut(); + let mut cur_node = self.root.as_mut().unwrap().as_mut(); while let Internal(node) = cur_node.force() { let mut last_kv = node.last_kv(); @@ -1307,7 +1329,7 @@ impl BTreeMap { self.fix_top(); { - let mut cur_node = self.root.as_mut(); + let mut cur_node = self.root.as_mut().unwrap().as_mut(); while let Internal(node) = cur_node.force() { let mut first_kv = node.first_kv(); @@ -1326,13 +1348,6 @@ impl BTreeMap { self.fix_top(); } - - /// If the root node is the shared root node, allocate our own node. - fn ensure_root_is_owned(&mut self) { - if self.root.is_shared_root() { - self.root = node::Root::new_leaf(); - } - } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1458,12 +1473,21 @@ impl IntoIterator for BTreeMap { type IntoIter = IntoIter; fn into_iter(self) -> IntoIter { - let root1 = unsafe { ptr::read(&self.root).into_ref() }; - let root2 = unsafe { ptr::read(&self.root).into_ref() }; + if self.root.is_none() { + mem::forget(self); + return IntoIter { front: None, back: None, length: 0 }; + } + + let root1 = unsafe { unwrap_unchecked(ptr::read(&self.root)).into_ref() }; + let root2 = unsafe { unwrap_unchecked(ptr::read(&self.root)).into_ref() }; let len = self.length; mem::forget(self); - IntoIter { front: root1.first_leaf_edge(), back: root2.last_leaf_edge(), length: len } + IntoIter { + front: Some(root1.first_leaf_edge()), + back: Some(root2.last_leaf_edge()), + length: len, + } } } @@ -1478,9 +1502,9 @@ impl Drop for IntoIter { // don't have to care about panics this time (they'll abort). while let Some(_) = self.0.next() {} - // No need to avoid the shared root, because the tree was definitely not empty. unsafe { - let mut node = ptr::read(&self.0.front).into_node().forget_type(); + let mut node = + unwrap_unchecked(ptr::read(&self.0.front)).into_node().forget_type(); while let Some(parent) = node.deallocate_and_ascend() { node = parent.into_node().forget_type(); } @@ -1495,14 +1519,13 @@ impl Drop for IntoIter { } unsafe { - let mut node = ptr::read(&self.front).into_node().forget_type(); - if node.is_shared_root() { - return; - } - // Most of the nodes have been deallocated while traversing - // but one pile from a leaf up to the root is left standing. - while let Some(parent) = node.deallocate_and_ascend() { - node = parent.into_node().forget_type(); + if let Some(front) = ptr::read(&self.front) { + let mut node = front.into_node().forget_type(); + // Most of the nodes have been deallocated while traversing + // but one pile from a leaf up to the root is left standing. + while let Some(parent) = node.deallocate_and_ascend() { + node = parent.into_node().forget_type(); + } } } } @@ -1517,7 +1540,7 @@ impl Iterator for IntoIter { None } else { self.length -= 1; - Some(unsafe { self.front.next_unchecked() }) + Some(unsafe { self.front.as_mut().unwrap().next_unchecked() }) } } @@ -1533,7 +1556,7 @@ impl DoubleEndedIterator for IntoIter { None } else { self.length -= 1; - Some(unsafe { self.back.next_back_unchecked() }) + Some(unsafe { self.back.as_mut().unwrap().next_back_unchecked() }) } } } @@ -1683,7 +1706,7 @@ impl<'a, K, V> Range<'a, K, V> { } unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) { - self.front.next_unchecked() + unwrap_unchecked(self.front.as_mut()).next_unchecked() } } @@ -1696,7 +1719,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> { impl<'a, K, V> Range<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) { - self.back.next_back_unchecked() + unwrap_unchecked(self.back.as_mut()).next_back_unchecked() } } @@ -1734,7 +1757,7 @@ impl<'a, K, V> RangeMut<'a, K, V> { } unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - self.front.next_unchecked() + unwrap_unchecked(self.front.as_mut()).next_unchecked() } } @@ -1755,7 +1778,7 @@ impl FusedIterator for RangeMut<'_, K, V> {} impl<'a, K, V> RangeMut<'a, K, V> { unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) { - self.back.next_back_unchecked() + unwrap_unchecked(self.back.as_mut()).next_back_unchecked() } } @@ -1969,8 +1992,8 @@ impl BTreeMap { pub fn iter(&self) -> Iter<'_, K, V> { Iter { range: Range { - front: self.root.as_ref().first_leaf_edge(), - back: self.root.as_ref().last_leaf_edge(), + front: self.root.as_ref().map(|r| r.as_ref().first_leaf_edge()), + back: self.root.as_ref().map(|r| r.as_ref().last_leaf_edge()), }, length: self.length, } @@ -1999,13 +2022,17 @@ impl BTreeMap { /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn iter_mut(&mut self) -> IterMut<'_, K, V> { - let root1 = self.root.as_mut(); - let root2 = unsafe { ptr::read(&root1) }; IterMut { - range: RangeMut { - front: root1.first_leaf_edge(), - back: root2.last_leaf_edge(), - _marker: PhantomData, + range: if let Some(root) = &mut self.root { + let root1 = root.as_mut(); + let root2 = unsafe { ptr::read(&root1) }; + RangeMut { + front: Some(root1.first_leaf_edge()), + back: Some(root2.last_leaf_edge()), + _marker: PhantomData, + } + } else { + RangeMut { front: None, back: None, _marker: PhantomData } }, length: self.length, } @@ -2116,6 +2143,12 @@ impl BTreeMap { pub fn is_empty(&self) -> bool { self.len() == 0 } + + /// If the root node is the empty (non-allocated) root node, allocate our + /// own node. + fn ensure_root_is_owned(&mut self) -> &mut node::Root { + self.root.get_or_insert_with(|| node::Root::new_leaf()) + } } impl<'a, K: Ord, V> Entry<'a, K, V> { diff --git a/src/liballoc/collections/btree/node.rs b/src/liballoc/collections/btree/node.rs index 1132ffdaf8005..6ebb98c42cd4f 100644 --- a/src/liballoc/collections/btree/node.rs +++ b/src/liballoc/collections/btree/node.rs @@ -44,34 +44,7 @@ const B: usize = 6; pub const MIN_LEN: usize = B - 1; pub const CAPACITY: usize = 2 * B - 1; -/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store -/// these, since only the first `len` keys and values are assumed to be initialized. As such, -/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned -/// case. -/// -/// We have a separate type for the header and rely on it matching the prefix of `LeafNode`, in -/// order to statically allocate a single dummy node to avoid allocations. This struct is -/// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a -/// `NodeHeader` because we do not want unnecessary padding between `len` and the keys. -/// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited -/// by `as_header`.) -#[repr(C)] -struct NodeHeader { - /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`. - /// This either points to an actual node or is null. - parent: *const InternalNode, - - /// This node's index into the parent node's `edges` array. - /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`. - /// This is only guaranteed to be initialized when `parent` is non-null. - parent_idx: MaybeUninit, - - /// The number of keys and values this node stores. - /// - /// This next to `parent_idx` to encourage the compiler to join `len` and - /// `parent_idx` into the same 32-bit word, reducing space overhead. - len: u16, -} +/// The underlying representation of leaf nodes. #[repr(C)] struct LeafNode { /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`. @@ -111,21 +84,6 @@ impl LeafNode { } } -impl NodeHeader { - fn is_shared_root(&self) -> bool { - ptr::eq(self, &EMPTY_ROOT_NODE as *const _ as *const _) - } -} - -// We need to implement Sync here in order to make a static instance. -unsafe impl Sync for NodeHeader<(), ()> {} - -// An empty node used as a placeholder for the root node, to avoid allocations. -// We use just a header in order to save space, since no operation on an empty tree will -// ever take a pointer past the first key. -static EMPTY_ROOT_NODE: NodeHeader<(), ()> = - NodeHeader { parent: ptr::null(), parent_idx: MaybeUninit::uninit(), len: 0 }; - /// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden /// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an /// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the @@ -154,12 +112,9 @@ impl InternalNode { } /// A managed, non-null pointer to a node. This is either an owned pointer to -/// `LeafNode`, an owned pointer to `InternalNode`, or a (not owned) -/// pointer to `NodeHeader<(), ()` (more specifically, the pointer to EMPTY_ROOT_NODE). -/// All of these types have a `NodeHeader` prefix, meaning that they have at -/// least the same size as `NodeHeader` and store the same kinds of data at the same -/// offsets; and they have a pointer alignment at least as large as `NodeHeader`'s. -/// However, `BoxedNode` contains no information as to which of the three types +/// `LeafNode` or an owned pointer to `InternalNode`. +/// +/// However, `BoxedNode` contains no information as to which of the two types /// of nodes it actually contains, and, partially due to this lack of information, /// has no destructor. struct BoxedNode { @@ -184,8 +139,9 @@ impl BoxedNode { } } -/// Either an owned tree or a shared, empty tree. Note that this does not have a destructor, -/// and must be cleaned up manually if it is an owned tree. +/// An owned tree. +/// +/// Note that this does not have a destructor, and must be cleaned up manually. pub struct Root { node: BoxedNode, /// The number of levels below the root node. @@ -196,20 +152,6 @@ unsafe impl Sync for Root {} unsafe impl Send for Root {} impl Root { - /// Whether the instance of `Root` wraps a shared, empty root node. If not, - /// the entire tree is uniquely owned by the owner of the `Root` instance. - pub fn is_shared_root(&self) -> bool { - self.as_ref().is_shared_root() - } - - /// Returns a shared tree, wrapping a shared root node that is eternally empty. - pub fn shared_empty_root() -> Self { - Root { - node: unsafe { BoxedNode::from_ptr(NonNull::from(&EMPTY_ROOT_NODE).cast()) }, - height: 0, - } - } - /// Returns a new owned tree, with its own root node that is initially empty. pub fn new_leaf() -> Self { Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 } @@ -245,7 +187,6 @@ impl Root { /// Adds a new internal node with a single edge, pointing to the previous root, and make that /// new node the root. This increases the height by 1 and is the opposite of `pop_level`. pub fn push_level(&mut self) -> NodeRef, K, V, marker::Internal> { - debug_assert!(!self.is_shared_root()); let mut new_node = Box::new(unsafe { InternalNode::new() }); new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) }); @@ -308,11 +249,6 @@ impl Root { /// `Leaf`, the `NodeRef` points to a leaf node, when this is `Internal` the /// `NodeRef` points to an internal node, and when this is `LeafOrInternal` the /// `NodeRef` could be pointing to either type of node. -/// Note that in case of a leaf node, this might still be the shared root! -/// Only turn this into a `LeafNode` reference if you know it is not the shared root! -/// Shared references must be dereferenceable *for the entire size of their pointee*, -/// so '&LeafNode` or `&InternalNode` pointing to the shared root is undefined behavior. -/// Turning this into a `NodeHeader` reference is always safe. pub struct NodeRef { /// The number of levels below the node. height: usize, @@ -354,7 +290,7 @@ impl NodeRef { /// Note that, despite being safe, calling this function can have the side effect /// of invalidating mutable references that unsafe code has created. pub fn len(&self) -> usize { - self.as_header().len as usize + self.as_leaf().len as usize } /// Returns the height of this node in the whole tree. Zero height denotes the @@ -374,35 +310,24 @@ impl NodeRef { NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData } } - /// Exposes the leaf "portion" of any leaf or internal node that is not the shared root. + /// Exposes the leaf "portion" of any leaf or internal node. /// If the node is a leaf, this function simply opens up its data. /// If the node is an internal node, so not a leaf, it does have all the data a leaf has /// (header, keys and values), and this function exposes that. - /// Unsafe because the node must not be the shared root. For more information, - /// see the `NodeRef` comments. - unsafe fn as_leaf(&self) -> &LeafNode { - debug_assert!(!self.is_shared_root()); - self.node.as_ref() - } - - fn as_header(&self) -> &NodeHeader { - unsafe { &*(self.node.as_ptr() as *const NodeHeader) } - } - - /// Returns whether the node is the shared, empty root. - pub fn is_shared_root(&self) -> bool { - self.as_header().is_shared_root() + fn as_leaf(&self) -> &LeafNode { + // The node must be valid for at least the LeafNode portion. + // This is not a reference in the NodeRef type because we don't know if + // it should be unique or shared. + unsafe { self.node.as_ref() } } /// Borrows a view into the keys stored in the node. - /// Unsafe because the caller must ensure that the node is not the shared root. - pub unsafe fn keys(&self) -> &[K] { + pub fn keys(&self) -> &[K] { self.reborrow().into_key_slice() } /// Borrows a view into the values stored in the node. - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn vals(&self) -> &[V] { + fn vals(&self) -> &[V] { self.reborrow().into_val_slice() } @@ -416,7 +341,7 @@ impl NodeRef { pub fn ascend( self, ) -> Result, marker::Edge>, Self> { - let parent_as_leaf = self.as_header().parent as *const LeafNode; + let parent_as_leaf = self.as_leaf().parent as *const LeafNode; if let Some(non_zero) = NonNull::new(parent_as_leaf as *mut _) { Ok(Handle { node: NodeRef { @@ -425,7 +350,7 @@ impl NodeRef { root: self.root, _marker: PhantomData, }, - idx: unsafe { usize::from(*self.as_header().parent_idx.as_ptr()) }, + idx: unsafe { usize::from(*self.as_leaf().parent_idx.as_ptr()) }, _marker: PhantomData, }) } else { @@ -464,7 +389,6 @@ impl NodeRef { pub unsafe fn deallocate_and_ascend( self, ) -> Option, marker::Edge>> { - assert!(!self.is_shared_root()); let height = self.height; let node = self.node; let ret = self.ascend().ok(); @@ -507,41 +431,37 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { /// (header, keys and values), and this function exposes that. /// /// Returns a raw ptr to avoid asserting exclusive access to the entire node. - /// This also implies you can invoke this member on the shared root, but the resulting pointer - /// might not be properly aligned and definitely would not allow accessing keys and values. fn as_leaf_mut(&mut self) -> *mut LeafNode { self.node.as_ptr() } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn keys_mut(&mut self) -> &mut [K] { - self.reborrow_mut().into_key_slice_mut() + fn keys_mut(&mut self) -> &mut [K] { + // SAFETY: the caller will not be able to call further methods on self + // until the key slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { self.reborrow_mut().into_key_slice_mut() } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn vals_mut(&mut self) -> &mut [V] { - self.reborrow_mut().into_val_slice_mut() + fn vals_mut(&mut self) -> &mut [V] { + // SAFETY: the caller will not be able to call further methods on self + // until the value slice reference is dropped, as we have unique access + // for the lifetime of the borrow. + unsafe { self.reborrow_mut().into_val_slice_mut() } } } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_key_slice(self) -> &'a [K] { - debug_assert!(!self.is_shared_root()); - // We cannot be the shared root, so `as_leaf` is okay. - slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) + fn into_key_slice(self) -> &'a [K] { + unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().keys), self.len()) } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_val_slice(self) -> &'a [V] { - debug_assert!(!self.is_shared_root()); - // We cannot be the shared root, so `as_leaf` is okay. - slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) + fn into_val_slice(self) -> &'a [V] { + unsafe { slice::from_raw_parts(MaybeUninit::first_ptr(&self.as_leaf().vals), self.len()) } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_slices(self) -> (&'a [K], &'a [V]) { - let k = ptr::read(&self); + fn into_slices(self) -> (&'a [K], &'a [V]) { + // SAFETY: equivalent to reborrow() except not requiring Type: 'a + let k = unsafe { ptr::read(&self) }; (k.into_key_slice(), self.into_val_slice()) } } @@ -553,28 +473,27 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { unsafe { &mut *(self.root as *mut Root) } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_key_slice_mut(mut self) -> &'a mut [K] { - debug_assert!(!self.is_shared_root()); - // We cannot be the shared root, so `as_leaf_mut` is okay. - slice::from_raw_parts_mut( - MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys), - self.len(), - ) + fn into_key_slice_mut(mut self) -> &'a mut [K] { + // SAFETY: The keys of a node must always be initialized up to length. + unsafe { + slice::from_raw_parts_mut( + MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).keys), + self.len(), + ) + } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_val_slice_mut(mut self) -> &'a mut [V] { - debug_assert!(!self.is_shared_root()); - slice::from_raw_parts_mut( - MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals), - self.len(), - ) + fn into_val_slice_mut(mut self) -> &'a mut [V] { + // SAFETY: The values of a node must always be initialized up to length. + unsafe { + slice::from_raw_parts_mut( + MaybeUninit::first_ptr_mut(&mut (*self.as_leaf_mut()).vals), + self.len(), + ) + } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) { - debug_assert!(!self.is_shared_root()); + fn into_slices_mut(mut self) -> (&'a mut [K], &'a mut [V]) { // We cannot use the getters here, because calling the second one // invalidates the reference returned by the first. // More precisely, it is the call to `len` that is the culprit, @@ -582,8 +501,13 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { // overlap with the keys (and even the values, for ZST keys). let len = self.len(); let leaf = self.as_leaf_mut(); - let keys = slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len); - let vals = slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len); + // SAFETY: The keys and values of a node must always be initialized up to length. + let keys = unsafe { + slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).keys), len) + }; + let vals = unsafe { + slice::from_raw_parts_mut(MaybeUninit::first_ptr_mut(&mut (*leaf).vals), len) + }; (keys, vals) } } @@ -592,7 +516,6 @@ impl<'a, K, V> NodeRef, K, V, marker::Leaf> { /// Adds a key/value pair the end of the node. pub fn push(&mut self, key: K, val: V) { assert!(self.len() < CAPACITY); - debug_assert!(!self.is_shared_root()); let idx = self.len(); @@ -607,7 +530,6 @@ impl<'a, K, V> NodeRef, K, V, marker::Leaf> { /// Adds a key/value pair to the beginning of the node. pub fn push_front(&mut self, key: K, val: V) { assert!(self.len() < CAPACITY); - debug_assert!(!self.is_shared_root()); unsafe { slice_insert(self.keys_mut(), 0, key); @@ -624,7 +546,6 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { pub fn push(&mut self, key: K, val: V, edge: Root) { assert!(edge.height == self.height - 1); assert!(self.len() < CAPACITY); - debug_assert!(!self.is_shared_root()); let idx = self.len(); @@ -658,7 +579,6 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { pub fn push_front(&mut self, key: K, val: V, edge: Root) { assert!(edge.height == self.height - 1); assert!(self.len() < CAPACITY); - debug_assert!(!self.is_shared_root()); unsafe { slice_insert(self.keys_mut(), 0, key); @@ -744,8 +664,7 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { } } - /// Unsafe because the caller must ensure that the node is not the shared root. - unsafe fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) { + fn into_kv_pointers_mut(mut self) -> (*mut K, *mut V) { (self.keys_mut().as_mut_ptr(), self.vals_mut().as_mut_ptr()) } } @@ -904,7 +823,6 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge fn insert_fit(&mut self, key: K, val: V) -> *mut V { // Necessary for correctness, but in a private module debug_assert!(self.node.len() < CAPACITY); - debug_assert!(!self.node.is_shared_root()); unsafe { slice_insert(self.node.keys_mut(), self.idx, key); @@ -1081,7 +999,6 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> /// - All the key/value pairs to the right of this handle are put into a newly /// allocated node. pub fn split(mut self) -> (NodeRef, K, V, marker::Leaf>, K, V, Root) { - assert!(!self.node.is_shared_root()); unsafe { let mut new_node = Box::new(LeafNode::new()); @@ -1113,7 +1030,6 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> pub fn remove( mut self, ) -> (Handle, K, V, marker::Leaf>, marker::Edge>, K, V) { - assert!(!self.node.is_shared_root()); unsafe { let k = slice_remove(self.node.keys_mut(), self.idx); let v = slice_remove(self.node.vals_mut(), self.idx); diff --git a/src/liballoc/collections/btree/search.rs b/src/liballoc/collections/btree/search.rs index 2ba5cebbdee74..c7cae019a26ca 100644 --- a/src/liballoc/collections/btree/search.rs +++ b/src/liballoc/collections/btree/search.rs @@ -67,13 +67,12 @@ where Q: Ord, K: Borrow, { - // This function is defined over all borrow types (immutable, mutable, owned), - // and may be called on the shared root in each case. + // This function is defined over all borrow types (immutable, mutable, owned). // Using `keys()` is fine here even if BorrowType is mutable, as all we return // is an index -- not a reference. let len = node.len(); if len > 0 { - let keys = unsafe { node.keys() }; // safe because a non-empty node cannot be the shared root + let keys = node.keys(); for (i, k) in keys.iter().enumerate() { match key.cmp(k.borrow()) { Ordering::Greater => {} diff --git a/src/libcore/mem/mod.rs b/src/libcore/mem/mod.rs index 1cf2b40e93068..dac9ee6a5d91e 100644 --- a/src/libcore/mem/mod.rs +++ b/src/libcore/mem/mod.rs @@ -58,7 +58,9 @@ pub use crate::intrinsics::transmute; /// /// # Examples /// -/// Leak an I/O object, never closing the file: +/// The canonical safe use of `mem::forget` is to circumvent a value's destructor +/// implemented by the `Drop` trait. For example, this will leak a `File`, i.e. reclaim +/// the space taken by the variable but never close the underlying system resource: /// /// ```no_run /// use std::mem; @@ -68,9 +70,40 @@ pub use crate::intrinsics::transmute; /// mem::forget(file); /// ``` /// -/// The practical use cases for `forget` are rather specialized and mainly come -/// up in unsafe or FFI code. However, [`ManuallyDrop`] is usually preferred -/// for such cases, e.g.: +/// This is useful when the ownership of the underlying resource was previously +/// transferred to code outside of Rust, for example by transmitting the raw +/// file descriptor to C code. +/// +/// # Relationship with `ManuallyDrop` +/// +/// While `mem::forget` can also be used to transfer *memory* ownership, doing so is error-prone. +/// [`ManuallyDrop`] should be used instead. Consider, for example, this code: +/// +/// ``` +/// use std::mem; +/// +/// let mut v = vec![65, 122]; +/// // Build a `String` using the contents of `v` +/// let s = unsafe { String::from_raw_parts(v.as_mut_ptr(), v.len(), v.capacity()) }; +/// // leak `v` because its memory is now managed by `s` +/// mem::forget(v); // ERROR - v is invalid and must not be passed to a function +/// assert_eq!(s, "Az"); +/// // `s` is implicitly dropped and its memory deallocated. +/// ``` +/// +/// There are two issues with the above example: +/// +/// * If more code were added between the construction of `String` and the invocation of +/// `mem::forget()`, a panic within it would cause a double free because the same memory +/// is handled by both `v` and `s`. +/// * After calling `v.as_mut_ptr()` and transmitting the ownership of the data to `s`, +/// the `v` value is invalid. Even when a value is just moved to `mem::forget` (which won't +/// inspect it), some types have strict requirements on their values that +/// make them invalid when dangling or no longer owned. Using invalid values in any +/// way, including passing them to or returning them from functions, constitutes +/// undefined behavior and may break the assumptions made by the compiler. +/// +/// Switching to `ManuallyDrop` avoids both issues: /// /// ``` /// use std::mem::ManuallyDrop; @@ -80,24 +113,24 @@ pub use crate::intrinsics::transmute; /// // does not get dropped! /// let mut v = ManuallyDrop::new(v); /// // Now disassemble `v`. These operations cannot panic, so there cannot be a leak. -/// let ptr = v.as_mut_ptr(); -/// let cap = v.capacity(); +/// let (ptr, len, cap) = (v.as_mut_ptr(), v.len(), v.capacity()); /// // Finally, build a `String`. -/// let s = unsafe { String::from_raw_parts(ptr, 2, cap) }; +/// let s = unsafe { String::from_raw_parts(ptr, len, cap) }; /// assert_eq!(s, "Az"); /// // `s` is implicitly dropped and its memory deallocated. /// ``` /// -/// Using `ManuallyDrop` here has two advantages: +/// `ManuallyDrop` robustly prevents double-free because we disable `v`'s destructor +/// before doing anything else. `mem::forget()` doesn't allow this because it consumes its +/// argument, forcing us to call it only after extracting anything we need from `v`. Even +/// if a panic were introduced between construction of `ManuallyDrop` and building the +/// string (which cannot happen in the code as shown), it would result in a leak and not a +/// double free. In other words, `ManuallyDrop` errs on the side of leaking instead of +/// erring on the side of (double-)dropping. /// -/// * We do not "touch" `v` after disassembling it. For some types, operations -/// such as passing ownership (to a function like `mem::forget`) requires them to actually -/// be fully owned right now; that is a promise we do not want to make here as we are -/// in the process of transferring ownership to the new `String` we are building. -/// * In case of an unexpected panic, `ManuallyDrop` is not dropped, but if the panic -/// occurs before `mem::forget` was called we might end up dropping invalid data, -/// or double-dropping. In other words, `ManuallyDrop` errs on the side of leaking -/// instead of erring on the side of dropping. +/// Also, `ManuallyDrop` prevents us from having to "touch" `v` after transferring the +/// ownership to `s` - the final step of interacting with `v` to dispoe of it without +/// running its destructor is entirely avoided. /// /// [drop]: fn.drop.html /// [uninit]: fn.uninitialized.html diff --git a/src/libpanic_unwind/hermit.rs b/src/libpanic_unwind/hermit.rs index 6bded4dd499bd..69b9edb77c564 100644 --- a/src/libpanic_unwind/hermit.rs +++ b/src/libpanic_unwind/hermit.rs @@ -4,7 +4,6 @@ use alloc::boxed::Box; use core::any::Any; -use core::ptr; pub unsafe fn cleanup(_ptr: *mut u8) -> Box { extern "C" { diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 742d57fb58a51..c6b43022b775d 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -1323,7 +1323,7 @@ impl<'tcx> TyCtxt<'tcx> { } pub fn encode_metadata(self) -> EncodedMetadata { - let _prof_timer = self.prof.generic_activity("generate_crate_metadata"); + let _prof_timer = self.prof.verbose_generic_activity("generate_crate_metadata"); self.cstore.encode_metadata(self) } diff --git a/src/librustc_error_codes/error_codes/E0437.md b/src/librustc_error_codes/error_codes/E0437.md index 834cf33dbc7f0..0f924ba692064 100644 --- a/src/librustc_error_codes/error_codes/E0437.md +++ b/src/librustc_error_codes/error_codes/E0437.md @@ -1,7 +1,5 @@ -Trait implementations can only implement associated types that are members of -the trait in question. This error indicates that you attempted to implement -an associated type whose name does not match the name of any associated type -in the trait. +An associated type whose name does not match any of the associated types +in the trait was used when implementing the trait. Erroneous code example: @@ -13,6 +11,9 @@ impl Foo for i32 { } ``` +Trait implementations can only implement associated types that are members of +the trait in question. + The solution to this problem is to remove the extraneous associated type: ``` diff --git a/src/librustc_error_codes/error_codes/E0438.md b/src/librustc_error_codes/error_codes/E0438.md index cb141a5d24aed..13723bc30090e 100644 --- a/src/librustc_error_codes/error_codes/E0438.md +++ b/src/librustc_error_codes/error_codes/E0438.md @@ -1,7 +1,5 @@ -Trait implementations can only implement associated constants that are -members of the trait in question. This error indicates that you -attempted to implement an associated constant whose name does not -match the name of any associated constant in the trait. +An associated constant whose name does not match any of the associated constants +in the trait was used when implementing the trait. Erroneous code example: @@ -13,6 +11,9 @@ impl Foo for i32 { } ``` +Trait implementations can only implement associated constants that are +members of the trait in question. + The solution to this problem is to remove the extraneous associated constant: ``` diff --git a/src/librustc_metadata/rmeta/encoder.rs b/src/librustc_metadata/rmeta/encoder.rs index 6280fd62de9a8..5963047fc760d 100644 --- a/src/librustc_metadata/rmeta/encoder.rs +++ b/src/librustc_metadata/rmeta/encoder.rs @@ -18,12 +18,13 @@ use rustc_ast::attr; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::stable_hasher::StableHasher; -use rustc_data_structures::sync::Lrc; +use rustc_data_structures::sync::{join, Lrc}; use rustc_hir as hir; use rustc_hir::def::CtorKind; +use rustc_hir::def_id::DefIdSet; use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor}; -use rustc_hir::itemlikevisit::ItemLikeVisitor; +use rustc_hir::itemlikevisit::{ItemLikeVisitor, ParItemLikeVisitor}; use rustc_hir::{AnonConst, GenericParamKind}; use rustc_index::vec::Idx; use rustc_serialize::{opaque, Encodable, Encoder, SpecializedEncoder}; @@ -467,12 +468,6 @@ impl<'tcx> EncodeContext<'tcx> { let impls = self.encode_impls(); let impl_bytes = self.position() - i; - // Encode exported symbols info. - i = self.position(); - let exported_symbols = self.tcx.exported_symbols(LOCAL_CRATE); - let exported_symbols = self.encode_exported_symbols(&exported_symbols); - let exported_symbols_bytes = self.position() - i; - let tcx = self.tcx; // Encode the items. @@ -513,6 +508,13 @@ impl<'tcx> EncodeContext<'tcx> { let proc_macro_data = self.encode_proc_macros(); let proc_macro_data_bytes = self.position() - i; + // Encode exported symbols info. This is prefetched in `encode_metadata` so we encode + // this last to give the prefetching as much time as possible to complete. + i = self.position(); + let exported_symbols = self.tcx.exported_symbols(LOCAL_CRATE); + let exported_symbols = self.encode_exported_symbols(&exported_symbols); + let exported_symbols_bytes = self.position() - i; + let attrs = tcx.hir().krate_attrs(); let has_default_lib_allocator = attr::contains_name(&attrs, sym::default_lib_allocator); @@ -888,6 +890,8 @@ impl EncodeContext<'tcx> { self.encode_generics(def_id); self.encode_explicit_predicates(def_id); self.encode_inferred_outlives(def_id); + + // This should be kept in sync with `PrefetchVisitor.visit_trait_item`. self.encode_optimized_mir(def_id); self.encode_promoted_mir(def_id); } @@ -959,6 +963,9 @@ impl EncodeContext<'tcx> { self.encode_generics(def_id); self.encode_explicit_predicates(def_id); self.encode_inferred_outlives(def_id); + + // The following part should be kept in sync with `PrefetchVisitor.visit_impl_item`. + let mir = match ast_item.kind { hir::ImplItemKind::Const(..) => true, hir::ImplItemKind::Fn(ref sig, _) => { @@ -1250,6 +1257,8 @@ impl EncodeContext<'tcx> { _ => {} } + // The following part should be kept in sync with `PrefetchVisitor.visit_item`. + let mir = match item.kind { hir::ItemKind::Static(..) | hir::ItemKind::Const(..) => true, hir::ItemKind::Fn(ref sig, ..) => { @@ -1697,6 +1706,70 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> { } } +/// Used to prefetch queries which will be needed later by metadata encoding. +/// Only a subset of the queries are actually prefetched to keep this code smaller. +struct PrefetchVisitor<'tcx> { + tcx: TyCtxt<'tcx>, + mir_keys: &'tcx DefIdSet, +} + +impl<'tcx> PrefetchVisitor<'tcx> { + fn prefetch_mir(&self, def_id: DefId) { + if self.mir_keys.contains(&def_id) { + self.tcx.optimized_mir(def_id); + self.tcx.promoted_mir(def_id); + } + } +} + +impl<'tcx, 'v> ParItemLikeVisitor<'v> for PrefetchVisitor<'tcx> { + fn visit_item(&self, item: &hir::Item<'_>) { + // This should be kept in sync with `encode_info_for_item`. + let tcx = self.tcx; + match item.kind { + hir::ItemKind::Static(..) | hir::ItemKind::Const(..) => { + self.prefetch_mir(tcx.hir().local_def_id(item.hir_id)) + } + hir::ItemKind::Fn(ref sig, ..) => { + let def_id = tcx.hir().local_def_id(item.hir_id); + let generics = tcx.generics_of(def_id); + let needs_inline = generics.requires_monomorphization(tcx) + || tcx.codegen_fn_attrs(def_id).requests_inline(); + if needs_inline || sig.header.constness == hir::Constness::Const { + self.prefetch_mir(def_id) + } + } + _ => (), + } + } + + fn visit_trait_item(&self, trait_item: &'v hir::TraitItem<'v>) { + // This should be kept in sync with `encode_info_for_trait_item`. + self.prefetch_mir(self.tcx.hir().local_def_id(trait_item.hir_id)); + } + + fn visit_impl_item(&self, impl_item: &'v hir::ImplItem<'v>) { + // This should be kept in sync with `encode_info_for_impl_item`. + let tcx = self.tcx; + match impl_item.kind { + hir::ImplItemKind::Const(..) => { + self.prefetch_mir(tcx.hir().local_def_id(impl_item.hir_id)) + } + hir::ImplItemKind::Fn(ref sig, _) => { + let def_id = tcx.hir().local_def_id(impl_item.hir_id); + let generics = tcx.generics_of(def_id); + let needs_inline = generics.requires_monomorphization(tcx) + || tcx.codegen_fn_attrs(def_id).requests_inline(); + let is_const_fn = sig.header.constness == hir::Constness::Const; + if needs_inline || is_const_fn { + self.prefetch_mir(def_id) + } + } + hir::ImplItemKind::OpaqueTy(..) | hir::ImplItemKind::TyAlias(..) => (), + } + } +} + // NOTE(eddyb) The following comment was preserved for posterity, even // though it's no longer relevant as EBML (which uses nested & tagged // "documents") was replaced with a scheme that can't go out of bounds. @@ -1721,35 +1794,64 @@ impl<'tcx, 'v> ItemLikeVisitor<'v> for ImplVisitor<'tcx> { // generated regardless of trailing bytes that end up in it. pub(super) fn encode_metadata(tcx: TyCtxt<'_>) -> EncodedMetadata { + // Since encoding metadata is not in a query, and nothing is cached, + // there's no need to do dep-graph tracking for any of it. + tcx.dep_graph.assert_ignored(); + + join( + || encode_metadata_impl(tcx), + || { + if tcx.sess.threads() == 1 { + return; + } + // Prefetch some queries used by metadata encoding. + // This is not necessary for correctness, but is only done for performance reasons. + // It can be removed if it turns out to cause trouble or be detrimental to performance. + join( + || { + if !tcx.sess.opts.output_types.should_codegen() { + // We won't emit MIR, so don't prefetch it. + return; + } + tcx.hir().krate().par_visit_all_item_likes(&PrefetchVisitor { + tcx, + mir_keys: tcx.mir_keys(LOCAL_CRATE), + }); + }, + || tcx.exported_symbols(LOCAL_CRATE), + ); + }, + ) + .0 +} + +fn encode_metadata_impl(tcx: TyCtxt<'_>) -> EncodedMetadata { let mut encoder = opaque::Encoder::new(vec![]); encoder.emit_raw_bytes(METADATA_HEADER); // Will be filled with the root position after encoding everything. encoder.emit_raw_bytes(&[0, 0, 0, 0]); - // Since encoding metadata is not in a query, and nothing is cached, - // there's no need to do dep-graph tracking for any of it. - let (root, mut result) = tcx.dep_graph.with_ignore(move || { - let mut ecx = EncodeContext { - opaque: encoder, - tcx, - per_def: Default::default(), - lazy_state: LazyState::NoNode, - type_shorthands: Default::default(), - predicate_shorthands: Default::default(), - source_file_cache: tcx.sess.source_map().files()[0].clone(), - interpret_allocs: Default::default(), - interpret_allocs_inverse: Default::default(), - }; - - // Encode the rustc version string in a predictable location. - rustc_version().encode(&mut ecx).unwrap(); - - // Encode all the entries and extra information in the crate, - // culminating in the `CrateRoot` which points to all of it. - let root = ecx.encode_crate_root(); - (root, ecx.opaque.into_inner()) - }); + let mut ecx = EncodeContext { + opaque: encoder, + tcx, + per_def: Default::default(), + lazy_state: LazyState::NoNode, + type_shorthands: Default::default(), + predicate_shorthands: Default::default(), + source_file_cache: tcx.sess.source_map().files()[0].clone(), + interpret_allocs: Default::default(), + interpret_allocs_inverse: Default::default(), + }; + + // Encode the rustc version string in a predictable location. + rustc_version().encode(&mut ecx).unwrap(); + + // Encode all the entries and extra information in the crate, + // culminating in the `CrateRoot` which points to all of it. + let root = ecx.encode_crate_root(); + + let mut result = ecx.opaque.into_inner(); // Encode the root position. let header = METADATA_HEADER.len(); diff --git a/src/librustc_metadata/rmeta/mod.rs b/src/librustc_metadata/rmeta/mod.rs index 05d834e5dee12..448c1610c1368 100644 --- a/src/librustc_metadata/rmeta/mod.rs +++ b/src/librustc_metadata/rmeta/mod.rs @@ -196,7 +196,6 @@ crate struct CrateRoot<'tcx> { source_map: Lazy<[rustc_span::SourceFile]>, def_path_table: Lazy, impls: Lazy<[TraitImpls]>, - exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]), interpret_alloc_index: Lazy<[u32]>, per_def: LazyPerDefTables<'tcx>, @@ -204,6 +203,8 @@ crate struct CrateRoot<'tcx> { /// The DefIndex's of any proc macros declared by this crate. proc_macro_data: Option>, + exported_symbols: Lazy!([(ExportedSymbol<'tcx>, SymbolExportLevel)]), + compiler_builtins: bool, needs_allocator: bool, needs_panic_runtime: bool, diff --git a/src/libstd/env.rs b/src/libstd/env.rs index af35a5d9b7c47..6aad082a97f9a 100644 --- a/src/libstd/env.rs +++ b/src/libstd/env.rs @@ -723,8 +723,8 @@ pub struct ArgsOs { /// (such as `*` and `?`). On Windows this is not done, and such arguments are /// passed as-is. /// -/// On glibc Linux, arguments are retrieved by placing a function in .init_array. -/// glibc passes argc, argv, and envp to functions in .init_array, as a non-standard extension. +/// On glibc Linux systems, arguments are retrieved by placing a function in ".init_array". +/// Glibc passes argc, argv, and envp to functions in ".init_array", as a non-standard extension. /// This allows `std::env::args` to work even in a `cdylib` or `staticlib`, as it does on macOS /// and Windows. /// @@ -758,8 +758,8 @@ pub fn args() -> Args { /// set to arbitrary text, and it may not even exist, so this property should /// not be relied upon for security purposes. /// -/// On glibc Linux, arguments are retrieved by placing a function in .init_array. -/// glibc passes argc, argv, and envp to functions in .init_array, as a non-standard extension. +/// On glibc Linux systems, arguments are retrieved by placing a function in ".init_array". +/// Glibc passes argc, argv, and envp to functions in ".init_array", as a non-standard extension. /// This allows `std::env::args` to work even in a `cdylib` or `staticlib`, as it does on macOS /// and Windows. /// diff --git a/src/test/debuginfo/pretty-std-collections.rs b/src/test/debuginfo/pretty-std-collections.rs index f8997fad9a53f..3d2d88a676d0d 100644 --- a/src/test/debuginfo/pretty-std-collections.rs +++ b/src/test/debuginfo/pretty-std-collections.rs @@ -17,35 +17,43 @@ // gdb-command: print btree_set // gdb-check:$1 = BTreeSet(len: 15) = {0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14} +// gdb-command: print empty_btree_set +// gdb-check:$2 = BTreeSet(len: 0) + // gdb-command: print btree_map -// gdb-check:$2 = BTreeMap(len: 15) = {[0] = 0, [1] = 1, [2] = 2, [3] = 3, [4] = 4, [5] = 5, [6] = 6, [7] = 7, [8] = 8, [9] = 9, [10] = 10, [11] = 11, [12] = 12, [13] = 13, [14] = 14} +// gdb-check:$3 = BTreeMap(len: 15) = {[0] = 0, [1] = 1, [2] = 2, [3] = 3, [4] = 4, [5] = 5, [6] = 6, [7] = 7, [8] = 8, [9] = 9, [10] = 10, [11] = 11, [12] = 12, [13] = 13, [14] = 14} + +// gdb-command: print empty_btree_map +// gdb-check:$4 = BTreeMap(len: 0) // gdb-command: print vec_deque -// gdb-check:$3 = VecDeque(len: 3, cap: 8) = {5, 3, 7} +// gdb-check:$5 = VecDeque(len: 3, cap: 8) = {5, 3, 7} // gdb-command: print vec_deque2 -// gdb-check:$4 = VecDeque(len: 7, cap: 8) = {2, 3, 4, 5, 6, 7, 8} +// gdb-check:$6 = VecDeque(len: 7, cap: 8) = {2, 3, 4, 5, 6, 7, 8} #![allow(unused_variables)] -use std::collections::BTreeSet; use std::collections::BTreeMap; +use std::collections::BTreeSet; use std::collections::VecDeque; - fn main() { - // BTreeSet let mut btree_set = BTreeSet::new(); for i in 0..15 { btree_set.insert(i); } + let mut empty_btree_set: BTreeSet = BTreeSet::new(); + // BTreeMap let mut btree_map = BTreeMap::new(); for i in 0..15 { btree_map.insert(i, i); } + let mut empty_btree_map: BTreeMap = BTreeMap::new(); + // VecDeque let mut vec_deque = VecDeque::new(); vec_deque.push_back(5); @@ -63,4 +71,6 @@ fn main() { zzz(); // #break } -fn zzz() { () } +fn zzz() { + () +} diff --git a/src/test/ui/type-alias-impl-trait/issue-55099-lifetime-inference.rs b/src/test/ui/type-alias-impl-trait/issue-55099-lifetime-inference.rs new file mode 100644 index 0000000000000..8e8508cdd6f30 --- /dev/null +++ b/src/test/ui/type-alias-impl-trait/issue-55099-lifetime-inference.rs @@ -0,0 +1,28 @@ +// check-pass +// Regression test for issue #55099 +// Tests that we don't incorrectly consider a lifetime to part +// of the concrete type + +#![feature(type_alias_impl_trait)] + +trait Future { +} + +struct AndThen(F); + +impl Future for AndThen { +} + +struct Foo<'a> { + x: &'a mut (), +} + +type F = impl Future; + +impl<'a> Foo<'a> { + fn reply(&mut self) -> F { + AndThen(|| ()) + } +} + +fn main() {} diff --git a/triagebot.toml b/triagebot.toml index ec32771583334..2476f414e0e03 100644 --- a/triagebot.toml +++ b/triagebot.toml @@ -23,7 +23,7 @@ Thanks! <3 label = "ICEBreaker-LLVM" [ping.icebreakers-cleanup-crew] -alias = ["cleanup", "cleanups", "shrink", "reduce", "bisect"] +alias = ["cleanup", "cleanups", "cleanup-crew", "shrink", "reduce", "bisect"] message = """\ Hey Cleanup Crew ICE-breakers! This bug has been identified as a good "Cleanup ICE-breaking candidate". In case it's useful, here are some