Skip to content

Commit

Permalink
Rollup merge of rust-lang#72709 - LeSeulArtichaut:unsafe-liballoc, r=…
Browse files Browse the repository at this point in the history
…nikomatsakis

`#[deny(unsafe_op_in_unsafe_fn)]` in liballoc

This PR proposes to make use of the new `unsafe_op_in_unsafe_fn` lint, i.e. no longer consider the body of an unsafe function as an unsafe block and require explicit unsafe block to perform unsafe operations.

This has been first (partly) suggested by @Mark-Simulacrum in rust-lang#69245 (comment)

Tracking issue for the feature: rust-lang#71668.
~~Blocked on rust-lang#71862.~~
r? @Mark-Simulacrum cc @nikomatsakis can you confirm that those changes are desirable? Should I restrict it to only BTree for the moment?
  • Loading branch information
Manishearth authored Jun 19, 2020
2 parents 85e1c3b + 7b63986 commit 55479de
Show file tree
Hide file tree
Showing 19 changed files with 387 additions and 257 deletions.
45 changes: 28 additions & 17 deletions src/liballoc/alloc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ pub struct Global;
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn alloc(layout: Layout) -> *mut u8 {
__rust_alloc(layout.size(), layout.align())
unsafe { __rust_alloc(layout.size(), layout.align()) }
}

/// Deallocate memory with the global allocator.
Expand All @@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
__rust_dealloc(ptr, layout.size(), layout.align())
unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
}

/// Reallocate memory with the global allocator.
Expand All @@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
}

/// Allocate zero-initialized memory with the global allocator.
Expand Down Expand Up @@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
#[stable(feature = "global_alloc", since = "1.28.0")]
#[inline]
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
__rust_alloc_zeroed(layout.size(), layout.align())
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
}

#[unstable(feature = "allocator_api", issue = "32838")]
Expand All @@ -183,7 +183,7 @@ unsafe impl AllocRef for Global {
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
dealloc(ptr.as_ptr(), layout)
unsafe { dealloc(ptr.as_ptr(), layout) }
}
}

Expand All @@ -209,16 +209,21 @@ unsafe impl AllocRef for Global {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_layout =
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size > size` or something similar.
intrinsics::assume(new_size > size);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
let ptr = unsafe {
intrinsics::assume(new_size > size);
realloc(ptr.as_ptr(), layout, new_size)
};
let memory =
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
init.init_offset(memory, size);
unsafe {
init.init_offset(memory, size);
}
Ok(memory)
}
}
Expand All @@ -245,13 +250,17 @@ unsafe impl AllocRef for Global {
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
self.dealloc(ptr, layout);
unsafe {
self.dealloc(ptr, layout);
}
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
}
ReallocPlacement::MayMove => {
// `realloc` probably checks for `new_size < size` or something similar.
intrinsics::assume(new_size < size);
let ptr = realloc(ptr.as_ptr(), layout, new_size);
let ptr = unsafe {
intrinsics::assume(new_size < size);
realloc(ptr.as_ptr(), layout, new_size)
};
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
}
}
Expand All @@ -264,7 +273,7 @@ unsafe impl AllocRef for Global {
#[lang = "exchange_malloc"]
#[inline]
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
let layout = Layout::from_size_align_unchecked(size, align);
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
match Global.alloc(layout, AllocInit::Uninitialized) {
Ok(memory) => memory.ptr.as_ptr(),
Err(_) => handle_alloc_error(layout),
Expand All @@ -279,10 +288,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
Global.dealloc(ptr.cast().into(), layout)
}
}

/// Abort on memory allocation error or failure.
Expand Down
6 changes: 3 additions & 3 deletions src/liballoc/boxed.rs
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<T> {
Box::from_raw(Box::into_raw(self) as *mut T)
unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
}
}

Expand Down Expand Up @@ -349,7 +349,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
#[unstable(feature = "new_uninit", issue = "63291")]
#[inline]
pub unsafe fn assume_init(self) -> Box<[T]> {
Box::from_raw(Box::into_raw(self) as *mut [T])
unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
}
}

Expand Down Expand Up @@ -393,7 +393,7 @@ impl<T: ?Sized> Box<T> {
#[stable(feature = "box_raw", since = "1.4.0")]
#[inline]
pub unsafe fn from_raw(raw: *mut T) -> Self {
Box(Unique::new_unchecked(raw))
Box(unsafe { Unique::new_unchecked(raw) })
}

/// Consumes the `Box`, returning a wrapped raw pointer.
Expand Down
12 changes: 7 additions & 5 deletions src/liballoc/collections/binary_heap.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
debug_assert!(pos < data.len());
// SAFE: pos should be inside the slice
let elt = ptr::read(data.get_unchecked(pos));
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
Hole { data, elt: ManuallyDrop::new(elt), pos }
}

Expand All @@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> {
unsafe fn get(&self, index: usize) -> &T {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
self.data.get_unchecked(index)
unsafe { self.data.get_unchecked(index) }
}

/// Move hole to new location
Expand All @@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> {
unsafe fn move_to(&mut self, index: usize) {
debug_assert!(index != self.pos);
debug_assert!(index < self.data.len());
let index_ptr: *const _ = self.data.get_unchecked(index);
let hole_ptr = self.data.get_unchecked_mut(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
unsafe {
let index_ptr: *const _ = self.data.get_unchecked(index);
let hole_ptr = self.data.get_unchecked_mut(self.pos);
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
}
self.pos = index;
}
}
Expand Down
10 changes: 5 additions & 5 deletions src/liballoc/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
&mut self,
) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> {
let edge = self.cur_leaf_edge.as_ref()?;
ptr::read(edge).next_kv().ok()
unsafe { ptr::read(edge).next_kv().ok() }
}

/// Implementation of a typical `DrainFilter::next` method, given the predicate.
Expand Down Expand Up @@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> {
}

unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked()
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
}
}

Expand All @@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {

impl<'a, K, V> Range<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
}
}

Expand Down Expand Up @@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
}

unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.front.as_mut()).next_unchecked()
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
}
}

Expand All @@ -1880,7 +1880,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}

impl<'a, K, V> RangeMut<'a, K, V> {
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
}
}

Expand Down
4 changes: 3 additions & 1 deletion src/liballoc/collections/btree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
if cfg!(debug_assertions) {
panic!("'unchecked' unwrap on None in BTreeMap");
} else {
core::intrinsics::unreachable();
unsafe {
core::intrinsics::unreachable();
}
}
})
}
100 changes: 58 additions & 42 deletions src/liballoc/collections/btree/navigate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc {
edge = match edge.$adjacent_kv() {
Ok(internal_kv) => return internal_kv,
Err(last_edge) => {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
unsafe {
let parent_edge = last_edge.into_node().deallocate_and_ascend();
unwrap_unchecked(parent_edge).forget_node_type()
}
}
}
}
Expand All @@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k
/// Safety: The change closure must not panic.
#[inline]
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
let value = ptr::read(v);
let value = unsafe { ptr::read(v) };
let (new_value, ret) = change(value);
ptr::write(v, new_value);
unsafe {
ptr::write(v, new_value);
}
ret
}

Expand All @@ -93,22 +97,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
unsafe {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_leaf_edge(), kv.into_kv())
})
}
}

/// Moves the leaf edge handle to the previous leaf edge and returns references to the
/// key and value in between.
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
unsafe {
replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(kv.next_back_leaf_edge(), kv.into_kv())
})
}
}
}

Expand All @@ -119,14 +127,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the last one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
unsafe {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
}

/// Moves the leaf edge handle to the previous leaf and returns references to the
Expand All @@ -135,14 +145,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
/// - The caller must ensure that the leaf edge is not the first one in the tree.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
unsafe {
let kv = replace(self, |leaf_edge| {
let kv = leaf_edge.next_back_kv();
let kv = unwrap_unchecked(kv.ok());
(ptr::read(&kv).next_back_leaf_edge(), kv)
});
// Doing the descend (and perhaps another move) invalidates the references
// returned by `into_kv_mut`, so we have to do this last.
kv.into_kv_mut()
}
}
}

Expand All @@ -159,12 +171,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
unsafe {
replace(self, |leaf_edge| {
let kv = next_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_leaf_edge(), (k, v))
})
}
}

/// Moves the leaf edge handle to the previous leaf edge and returns the key
Expand All @@ -179,12 +193,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// if the two preconditions above hold.
/// - Using the updated handle may well invalidate the returned references.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
unsafe {
replace(self, |leaf_edge| {
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
let k = ptr::read(kv.reborrow().into_kv().0);
let v = ptr::read(kv.reborrow().into_kv().1);
(kv.next_back_leaf_edge(), (k, v))
})
}
}
}

Expand Down
Loading

0 comments on commit 55479de

Please sign in to comment.