Skip to content

Commit 55479de

Browse files
authored
Rollup merge of #72709 - LeSeulArtichaut:unsafe-liballoc, r=nikomatsakis
`#[deny(unsafe_op_in_unsafe_fn)]` in liballoc This PR proposes to make use of the new `unsafe_op_in_unsafe_fn` lint, i.e. no longer consider the body of an unsafe function as an unsafe block and require explicit unsafe block to perform unsafe operations. This has been first (partly) suggested by @Mark-Simulacrum in #69245 (comment) Tracking issue for the feature: #71668. ~~Blocked on #71862.~~ r? @Mark-Simulacrum cc @nikomatsakis can you confirm that those changes are desirable? Should I restrict it to only BTree for the moment?
2 parents 85e1c3b + 7b63986 commit 55479de

19 files changed

+387
-257
lines changed

src/liballoc/alloc.rs

+28-17
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ pub struct Global;
7777
#[stable(feature = "global_alloc", since = "1.28.0")]
7878
#[inline]
7979
pub unsafe fn alloc(layout: Layout) -> *mut u8 {
80-
__rust_alloc(layout.size(), layout.align())
80+
unsafe { __rust_alloc(layout.size(), layout.align()) }
8181
}
8282

8383
/// Deallocate memory with the global allocator.
@@ -99,7 +99,7 @@ pub unsafe fn alloc(layout: Layout) -> *mut u8 {
9999
#[stable(feature = "global_alloc", since = "1.28.0")]
100100
#[inline]
101101
pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
102-
__rust_dealloc(ptr, layout.size(), layout.align())
102+
unsafe { __rust_dealloc(ptr, layout.size(), layout.align()) }
103103
}
104104

105105
/// Reallocate memory with the global allocator.
@@ -121,7 +121,7 @@ pub unsafe fn dealloc(ptr: *mut u8, layout: Layout) {
121121
#[stable(feature = "global_alloc", since = "1.28.0")]
122122
#[inline]
123123
pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8 {
124-
__rust_realloc(ptr, layout.size(), layout.align(), new_size)
124+
unsafe { __rust_realloc(ptr, layout.size(), layout.align(), new_size) }
125125
}
126126

127127
/// Allocate zero-initialized memory with the global allocator.
@@ -158,7 +158,7 @@ pub unsafe fn realloc(ptr: *mut u8, layout: Layout, new_size: usize) -> *mut u8
158158
#[stable(feature = "global_alloc", since = "1.28.0")]
159159
#[inline]
160160
pub unsafe fn alloc_zeroed(layout: Layout) -> *mut u8 {
161-
__rust_alloc_zeroed(layout.size(), layout.align())
161+
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
162162
}
163163

164164
#[unstable(feature = "allocator_api", issue = "32838")]
@@ -183,7 +183,7 @@ unsafe impl AllocRef for Global {
183183
#[inline]
184184
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
185185
if layout.size() != 0 {
186-
dealloc(ptr.as_ptr(), layout)
186+
unsafe { dealloc(ptr.as_ptr(), layout) }
187187
}
188188
}
189189

@@ -209,16 +209,21 @@ unsafe impl AllocRef for Global {
209209
match placement {
210210
ReallocPlacement::InPlace => Err(AllocErr),
211211
ReallocPlacement::MayMove if layout.size() == 0 => {
212-
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
212+
let new_layout =
213+
unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
213214
self.alloc(new_layout, init)
214215
}
215216
ReallocPlacement::MayMove => {
216217
// `realloc` probably checks for `new_size > size` or something similar.
217-
intrinsics::assume(new_size > size);
218-
let ptr = realloc(ptr.as_ptr(), layout, new_size);
218+
let ptr = unsafe {
219+
intrinsics::assume(new_size > size);
220+
realloc(ptr.as_ptr(), layout, new_size)
221+
};
219222
let memory =
220223
MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
221-
init.init_offset(memory, size);
224+
unsafe {
225+
init.init_offset(memory, size);
226+
}
222227
Ok(memory)
223228
}
224229
}
@@ -245,13 +250,17 @@ unsafe impl AllocRef for Global {
245250
match placement {
246251
ReallocPlacement::InPlace => Err(AllocErr),
247252
ReallocPlacement::MayMove if new_size == 0 => {
248-
self.dealloc(ptr, layout);
253+
unsafe {
254+
self.dealloc(ptr, layout);
255+
}
249256
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
250257
}
251258
ReallocPlacement::MayMove => {
252259
// `realloc` probably checks for `new_size < size` or something similar.
253-
intrinsics::assume(new_size < size);
254-
let ptr = realloc(ptr.as_ptr(), layout, new_size);
260+
let ptr = unsafe {
261+
intrinsics::assume(new_size < size);
262+
realloc(ptr.as_ptr(), layout, new_size)
263+
};
255264
Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
256265
}
257266
}
@@ -264,7 +273,7 @@ unsafe impl AllocRef for Global {
264273
#[lang = "exchange_malloc"]
265274
#[inline]
266275
unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
267-
let layout = Layout::from_size_align_unchecked(size, align);
276+
let layout = unsafe { Layout::from_size_align_unchecked(size, align) };
268277
match Global.alloc(layout, AllocInit::Uninitialized) {
269278
Ok(memory) => memory.ptr.as_ptr(),
270279
Err(_) => handle_alloc_error(layout),
@@ -279,10 +288,12 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
279288
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
280289
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
281290
pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
282-
let size = size_of_val(ptr.as_ref());
283-
let align = min_align_of_val(ptr.as_ref());
284-
let layout = Layout::from_size_align_unchecked(size, align);
285-
Global.dealloc(ptr.cast().into(), layout)
291+
unsafe {
292+
let size = size_of_val(ptr.as_ref());
293+
let align = min_align_of_val(ptr.as_ref());
294+
let layout = Layout::from_size_align_unchecked(size, align);
295+
Global.dealloc(ptr.cast().into(), layout)
296+
}
286297
}
287298

288299
/// Abort on memory allocation error or failure.

src/liballoc/boxed.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -311,7 +311,7 @@ impl<T> Box<mem::MaybeUninit<T>> {
311311
#[unstable(feature = "new_uninit", issue = "63291")]
312312
#[inline]
313313
pub unsafe fn assume_init(self) -> Box<T> {
314-
Box::from_raw(Box::into_raw(self) as *mut T)
314+
unsafe { Box::from_raw(Box::into_raw(self) as *mut T) }
315315
}
316316
}
317317

@@ -349,7 +349,7 @@ impl<T> Box<[mem::MaybeUninit<T>]> {
349349
#[unstable(feature = "new_uninit", issue = "63291")]
350350
#[inline]
351351
pub unsafe fn assume_init(self) -> Box<[T]> {
352-
Box::from_raw(Box::into_raw(self) as *mut [T])
352+
unsafe { Box::from_raw(Box::into_raw(self) as *mut [T]) }
353353
}
354354
}
355355

@@ -393,7 +393,7 @@ impl<T: ?Sized> Box<T> {
393393
#[stable(feature = "box_raw", since = "1.4.0")]
394394
#[inline]
395395
pub unsafe fn from_raw(raw: *mut T) -> Self {
396-
Box(Unique::new_unchecked(raw))
396+
Box(unsafe { Unique::new_unchecked(raw) })
397397
}
398398

399399
/// Consumes the `Box`, returning a wrapped raw pointer.

src/liballoc/collections/binary_heap.rs

+7-5
Original file line numberDiff line numberDiff line change
@@ -1003,7 +1003,7 @@ impl<'a, T> Hole<'a, T> {
10031003
unsafe fn new(data: &'a mut [T], pos: usize) -> Self {
10041004
debug_assert!(pos < data.len());
10051005
// SAFE: pos should be inside the slice
1006-
let elt = ptr::read(data.get_unchecked(pos));
1006+
let elt = unsafe { ptr::read(data.get_unchecked(pos)) };
10071007
Hole { data, elt: ManuallyDrop::new(elt), pos }
10081008
}
10091009

@@ -1025,7 +1025,7 @@ impl<'a, T> Hole<'a, T> {
10251025
unsafe fn get(&self, index: usize) -> &T {
10261026
debug_assert!(index != self.pos);
10271027
debug_assert!(index < self.data.len());
1028-
self.data.get_unchecked(index)
1028+
unsafe { self.data.get_unchecked(index) }
10291029
}
10301030

10311031
/// Move hole to new location
@@ -1035,9 +1035,11 @@ impl<'a, T> Hole<'a, T> {
10351035
unsafe fn move_to(&mut self, index: usize) {
10361036
debug_assert!(index != self.pos);
10371037
debug_assert!(index < self.data.len());
1038-
let index_ptr: *const _ = self.data.get_unchecked(index);
1039-
let hole_ptr = self.data.get_unchecked_mut(self.pos);
1040-
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1038+
unsafe {
1039+
let index_ptr: *const _ = self.data.get_unchecked(index);
1040+
let hole_ptr = self.data.get_unchecked_mut(self.pos);
1041+
ptr::copy_nonoverlapping(index_ptr, hole_ptr, 1);
1042+
}
10411043
self.pos = index;
10421044
}
10431045
}

src/liballoc/collections/btree/map.rs

+5-5
Original file line numberDiff line numberDiff line change
@@ -1725,7 +1725,7 @@ impl<'a, K: 'a, V: 'a> DrainFilterInner<'a, K, V> {
17251725
&mut self,
17261726
) -> Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV>> {
17271727
let edge = self.cur_leaf_edge.as_ref()?;
1728-
ptr::read(edge).next_kv().ok()
1728+
unsafe { ptr::read(edge).next_kv().ok() }
17291729
}
17301730

17311731
/// Implementation of a typical `DrainFilter::next` method, given the predicate.
@@ -1808,7 +1808,7 @@ impl<'a, K, V> Range<'a, K, V> {
18081808
}
18091809

18101810
unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
1811-
unwrap_unchecked(self.front.as_mut()).next_unchecked()
1811+
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
18121812
}
18131813
}
18141814

@@ -1821,7 +1821,7 @@ impl<'a, K, V> DoubleEndedIterator for Range<'a, K, V> {
18211821

18221822
impl<'a, K, V> Range<'a, K, V> {
18231823
unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
1824-
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
1824+
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
18251825
}
18261826
}
18271827

@@ -1859,7 +1859,7 @@ impl<'a, K, V> RangeMut<'a, K, V> {
18591859
}
18601860

18611861
unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
1862-
unwrap_unchecked(self.front.as_mut()).next_unchecked()
1862+
unsafe { unwrap_unchecked(self.front.as_mut()).next_unchecked() }
18631863
}
18641864
}
18651865

@@ -1880,7 +1880,7 @@ impl<K, V> FusedIterator for RangeMut<'_, K, V> {}
18801880

18811881
impl<'a, K, V> RangeMut<'a, K, V> {
18821882
unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
1883-
unwrap_unchecked(self.back.as_mut()).next_back_unchecked()
1883+
unsafe { unwrap_unchecked(self.back.as_mut()).next_back_unchecked() }
18841884
}
18851885
}
18861886

src/liballoc/collections/btree/mod.rs

+3-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,9 @@ pub unsafe fn unwrap_unchecked<T>(val: Option<T>) -> T {
1919
if cfg!(debug_assertions) {
2020
panic!("'unchecked' unwrap on None in BTreeMap");
2121
} else {
22-
core::intrinsics::unreachable();
22+
unsafe {
23+
core::intrinsics::unreachable();
24+
}
2325
}
2426
})
2527
}

src/liballoc/collections/btree/navigate.rs

+58-42
Original file line numberDiff line numberDiff line change
@@ -64,8 +64,10 @@ macro_rules! def_next_kv_uncheched_dealloc {
6464
edge = match edge.$adjacent_kv() {
6565
Ok(internal_kv) => return internal_kv,
6666
Err(last_edge) => {
67-
let parent_edge = last_edge.into_node().deallocate_and_ascend();
68-
unwrap_unchecked(parent_edge).forget_node_type()
67+
unsafe {
68+
let parent_edge = last_edge.into_node().deallocate_and_ascend();
69+
unwrap_unchecked(parent_edge).forget_node_type()
70+
}
6971
}
7072
}
7173
}
@@ -82,9 +84,11 @@ def_next_kv_uncheched_dealloc! {unsafe fn next_back_kv_unchecked_dealloc: left_k
8284
/// Safety: The change closure must not panic.
8385
#[inline]
8486
unsafe fn replace<T, R>(v: &mut T, change: impl FnOnce(T) -> (T, R)) -> R {
85-
let value = ptr::read(v);
87+
let value = unsafe { ptr::read(v) };
8688
let (new_value, ret) = change(value);
87-
ptr::write(v, new_value);
89+
unsafe {
90+
ptr::write(v, new_value);
91+
}
8892
ret
8993
}
9094

@@ -93,22 +97,26 @@ impl<'a, K, V> Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Ed
9397
/// key and value in between.
9498
/// Unsafe because the caller must ensure that the leaf edge is not the last one in the tree.
9599
pub unsafe fn next_unchecked(&mut self) -> (&'a K, &'a V) {
96-
replace(self, |leaf_edge| {
97-
let kv = leaf_edge.next_kv();
98-
let kv = unwrap_unchecked(kv.ok());
99-
(kv.next_leaf_edge(), kv.into_kv())
100-
})
100+
unsafe {
101+
replace(self, |leaf_edge| {
102+
let kv = leaf_edge.next_kv();
103+
let kv = unwrap_unchecked(kv.ok());
104+
(kv.next_leaf_edge(), kv.into_kv())
105+
})
106+
}
101107
}
102108

103109
/// Moves the leaf edge handle to the previous leaf edge and returns references to the
104110
/// key and value in between.
105111
/// Unsafe because the caller must ensure that the leaf edge is not the first one in the tree.
106112
pub unsafe fn next_back_unchecked(&mut self) -> (&'a K, &'a V) {
107-
replace(self, |leaf_edge| {
108-
let kv = leaf_edge.next_back_kv();
109-
let kv = unwrap_unchecked(kv.ok());
110-
(kv.next_back_leaf_edge(), kv.into_kv())
111-
})
113+
unsafe {
114+
replace(self, |leaf_edge| {
115+
let kv = leaf_edge.next_back_kv();
116+
let kv = unwrap_unchecked(kv.ok());
117+
(kv.next_back_leaf_edge(), kv.into_kv())
118+
})
119+
}
112120
}
113121
}
114122

@@ -119,14 +127,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
119127
/// - The caller must ensure that the leaf edge is not the last one in the tree.
120128
/// - Using the updated handle may well invalidate the returned references.
121129
pub unsafe fn next_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
122-
let kv = replace(self, |leaf_edge| {
123-
let kv = leaf_edge.next_kv();
124-
let kv = unwrap_unchecked(kv.ok());
125-
(ptr::read(&kv).next_leaf_edge(), kv)
126-
});
127-
// Doing the descend (and perhaps another move) invalidates the references
128-
// returned by `into_kv_mut`, so we have to do this last.
129-
kv.into_kv_mut()
130+
unsafe {
131+
let kv = replace(self, |leaf_edge| {
132+
let kv = leaf_edge.next_kv();
133+
let kv = unwrap_unchecked(kv.ok());
134+
(ptr::read(&kv).next_leaf_edge(), kv)
135+
});
136+
// Doing the descend (and perhaps another move) invalidates the references
137+
// returned by `into_kv_mut`, so we have to do this last.
138+
kv.into_kv_mut()
139+
}
130140
}
131141

132142
/// Moves the leaf edge handle to the previous leaf and returns references to the
@@ -135,14 +145,16 @@ impl<'a, K, V> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge
135145
/// - The caller must ensure that the leaf edge is not the first one in the tree.
136146
/// - Using the updated handle may well invalidate the returned references.
137147
pub unsafe fn next_back_unchecked(&mut self) -> (&'a mut K, &'a mut V) {
138-
let kv = replace(self, |leaf_edge| {
139-
let kv = leaf_edge.next_back_kv();
140-
let kv = unwrap_unchecked(kv.ok());
141-
(ptr::read(&kv).next_back_leaf_edge(), kv)
142-
});
143-
// Doing the descend (and perhaps another move) invalidates the references
144-
// returned by `into_kv_mut`, so we have to do this last.
145-
kv.into_kv_mut()
148+
unsafe {
149+
let kv = replace(self, |leaf_edge| {
150+
let kv = leaf_edge.next_back_kv();
151+
let kv = unwrap_unchecked(kv.ok());
152+
(ptr::read(&kv).next_back_leaf_edge(), kv)
153+
});
154+
// Doing the descend (and perhaps another move) invalidates the references
155+
// returned by `into_kv_mut`, so we have to do this last.
156+
kv.into_kv_mut()
157+
}
146158
}
147159
}
148160

@@ -159,12 +171,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
159171
/// if the two preconditions above hold.
160172
/// - Using the updated handle may well invalidate the returned references.
161173
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
162-
replace(self, |leaf_edge| {
163-
let kv = next_kv_unchecked_dealloc(leaf_edge);
164-
let k = ptr::read(kv.reborrow().into_kv().0);
165-
let v = ptr::read(kv.reborrow().into_kv().1);
166-
(kv.next_leaf_edge(), (k, v))
167-
})
174+
unsafe {
175+
replace(self, |leaf_edge| {
176+
let kv = next_kv_unchecked_dealloc(leaf_edge);
177+
let k = ptr::read(kv.reborrow().into_kv().0);
178+
let v = ptr::read(kv.reborrow().into_kv().1);
179+
(kv.next_leaf_edge(), (k, v))
180+
})
181+
}
168182
}
169183

170184
/// Moves the leaf edge handle to the previous leaf edge and returns the key
@@ -179,12 +193,14 @@ impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
179193
/// if the two preconditions above hold.
180194
/// - Using the updated handle may well invalidate the returned references.
181195
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
182-
replace(self, |leaf_edge| {
183-
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
184-
let k = ptr::read(kv.reborrow().into_kv().0);
185-
let v = ptr::read(kv.reborrow().into_kv().1);
186-
(kv.next_back_leaf_edge(), (k, v))
187-
})
196+
unsafe {
197+
replace(self, |leaf_edge| {
198+
let kv = next_back_kv_unchecked_dealloc(leaf_edge);
199+
let k = ptr::read(kv.reborrow().into_kv().0);
200+
let v = ptr::read(kv.reborrow().into_kv().1);
201+
(kv.next_back_leaf_edge(), (k, v))
202+
})
203+
}
188204
}
189205
}
190206

0 commit comments

Comments
 (0)