Skip to content

Commit 1891b62

Browse files
authored
Unrolled build for rust-lang#120445
Rollup merge of rust-lang#120445 - Nemo157:arc-plug, r=Mark-Simulacrum Fix some `Arc` allocator leaks This doesn't matter for the stable `Global` allocator as it is a ZST singleton, but other allocators may rely on all instances being dropped.
2 parents 5ad7454 + 6837b81 commit 1891b62

File tree

2 files changed

+80
-29
lines changed

2 files changed

+80
-29
lines changed

library/alloc/src/sync.rs

+20-24
Original file line numberDiff line numberDiff line change
@@ -279,6 +279,12 @@ impl<T: ?Sized> Arc<T> {
279279
}
280280

281281
impl<T: ?Sized, A: Allocator> Arc<T, A> {
282+
#[inline]
283+
fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) {
284+
let this = mem::ManuallyDrop::new(self);
285+
(this.ptr, unsafe { ptr::read(&this.alloc) })
286+
}
287+
282288
#[inline]
283289
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
284290
Self { ptr, phantom: PhantomData, alloc }
@@ -1275,12 +1281,9 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
12751281
#[unstable(feature = "new_uninit", issue = "63291")]
12761282
#[must_use = "`self` will be dropped if the result is not used"]
12771283
#[inline]
1278-
pub unsafe fn assume_init(self) -> Arc<T, A>
1279-
where
1280-
A: Clone,
1281-
{
1282-
let md_self = mem::ManuallyDrop::new(self);
1283-
unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
1284+
pub unsafe fn assume_init(self) -> Arc<T, A> {
1285+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
1286+
unsafe { Arc::from_inner_in(ptr.cast(), alloc) }
12841287
}
12851288
}
12861289

@@ -1320,12 +1323,9 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
13201323
#[unstable(feature = "new_uninit", issue = "63291")]
13211324
#[must_use = "`self` will be dropped if the result is not used"]
13221325
#[inline]
1323-
pub unsafe fn assume_init(self) -> Arc<[T], A>
1324-
where
1325-
A: Clone,
1326-
{
1327-
let md_self = mem::ManuallyDrop::new(self);
1328-
unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
1326+
pub unsafe fn assume_init(self) -> Arc<[T], A> {
1327+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
1328+
unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) }
13291329
}
13301330
}
13311331

@@ -2413,7 +2413,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
24132413
}
24142414
}
24152415

2416-
impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
2416+
impl<A: Allocator> Arc<dyn Any + Send + Sync, A> {
24172417
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
24182418
///
24192419
/// # Examples
@@ -2440,10 +2440,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
24402440
{
24412441
if (*self).is::<T>() {
24422442
unsafe {
2443-
let ptr = self.ptr.cast::<ArcInner<T>>();
2444-
let alloc = self.alloc.clone();
2445-
mem::forget(self);
2446-
Ok(Arc::from_inner_in(ptr, alloc))
2443+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
2444+
Ok(Arc::from_inner_in(ptr.cast(), alloc))
24472445
}
24482446
} else {
24492447
Err(self)
@@ -2483,10 +2481,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
24832481
T: Any + Send + Sync,
24842482
{
24852483
unsafe {
2486-
let ptr = self.ptr.cast::<ArcInner<T>>();
2487-
let alloc = self.alloc.clone();
2488-
mem::forget(self);
2489-
Arc::from_inner_in(ptr, alloc)
2484+
let (ptr, alloc) = self.internal_into_inner_with_allocator();
2485+
Arc::from_inner_in(ptr.cast(), alloc)
24902486
}
24912487
}
24922488
}
@@ -3442,13 +3438,13 @@ impl From<Arc<str>> for Arc<[u8]> {
34423438
}
34433439

34443440
#[stable(feature = "boxed_slice_try_from", since = "1.43.0")]
3445-
impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
3441+
impl<T, A: Allocator, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N], A> {
34463442
type Error = Arc<[T], A>;
34473443

34483444
fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
34493445
if boxed_slice.len() == N {
3450-
let alloc = boxed_slice.alloc.clone();
3451-
Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
3446+
let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator();
3447+
Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) })
34523448
} else {
34533449
Err(boxed_slice)
34543450
}

library/alloc/src/sync/tests.rs

+60-5
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,15 @@
11
use super::*;
22

33
use std::clone::Clone;
4+
use std::mem::MaybeUninit;
45
use std::option::Option::None;
6+
use std::sync::atomic::AtomicUsize;
57
use std::sync::atomic::Ordering::SeqCst;
68
use std::sync::mpsc::channel;
79
use std::sync::Mutex;
810
use std::thread;
911

10-
struct Canary(*mut atomic::AtomicUsize);
12+
struct Canary(*mut AtomicUsize);
1113

1214
impl Drop for Canary {
1315
fn drop(&mut self) {
@@ -21,6 +23,37 @@ impl Drop for Canary {
2123
}
2224
}
2325

26+
struct AllocCanary<'a>(&'a AtomicUsize);
27+
28+
impl<'a> AllocCanary<'a> {
29+
fn new(counter: &'a AtomicUsize) -> Self {
30+
counter.fetch_add(1, SeqCst);
31+
Self(counter)
32+
}
33+
}
34+
35+
unsafe impl Allocator for AllocCanary<'_> {
36+
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
37+
std::alloc::Global.allocate(layout)
38+
}
39+
40+
unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
41+
unsafe { std::alloc::Global.deallocate(ptr, layout) }
42+
}
43+
}
44+
45+
impl Clone for AllocCanary<'_> {
46+
fn clone(&self) -> Self {
47+
Self::new(self.0)
48+
}
49+
}
50+
51+
impl Drop for AllocCanary<'_> {
52+
fn drop(&mut self) {
53+
self.0.fetch_sub(1, SeqCst);
54+
}
55+
}
56+
2457
#[test]
2558
#[cfg_attr(target_os = "emscripten", ignore)]
2659
fn manually_share_arc() {
@@ -295,16 +328,16 @@ fn weak_self_cyclic() {
295328

296329
#[test]
297330
fn drop_arc() {
298-
let mut canary = atomic::AtomicUsize::new(0);
299-
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
331+
let mut canary = AtomicUsize::new(0);
332+
let x = Arc::new(Canary(&mut canary as *mut AtomicUsize));
300333
drop(x);
301334
assert!(canary.load(Acquire) == 1);
302335
}
303336

304337
#[test]
305338
fn drop_arc_weak() {
306-
let mut canary = atomic::AtomicUsize::new(0);
307-
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
339+
let mut canary = AtomicUsize::new(0);
340+
let arc = Arc::new(Canary(&mut canary as *mut AtomicUsize));
308341
let arc_weak = Arc::downgrade(&arc);
309342
assert!(canary.load(Acquire) == 0);
310343
drop(arc);
@@ -660,3 +693,25 @@ fn arc_drop_dereferenceable_race() {
660693
thread.join().unwrap();
661694
}
662695
}
696+
697+
#[test]
698+
fn arc_doesnt_leak_allocator() {
699+
let counter = AtomicUsize::new(0);
700+
701+
{
702+
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
703+
drop(arc.downcast::<usize>().unwrap());
704+
705+
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
706+
drop(unsafe { arc.downcast_unchecked::<usize>() });
707+
708+
let arc = Arc::new_in(MaybeUninit::<usize>::new(5usize), AllocCanary::new(&counter));
709+
drop(unsafe { arc.assume_init() });
710+
711+
let arc: Arc<[MaybeUninit<usize>], _> =
712+
Arc::new_zeroed_slice_in(5, AllocCanary::new(&counter));
713+
drop(unsafe { arc.assume_init() });
714+
}
715+
716+
assert_eq!(counter.load(SeqCst), 0);
717+
}

0 commit comments

Comments
 (0)