Skip to content

Commit

Permalink
Fix some Arc allocator leaks
Browse files Browse the repository at this point in the history
This doesn't matter for the stable `Global` allocator as it is a ZST
singleton, but other allocators may rely on all instances being dropped.
  • Loading branch information
Nemo157 committed Jan 28, 2024
1 parent 6351247 commit 5e08d64
Show file tree
Hide file tree
Showing 2 changed files with 77 additions and 20 deletions.
32 changes: 17 additions & 15 deletions library/alloc/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -279,6 +279,12 @@ impl<T: ?Sized> Arc<T> {
}

impl<T: ?Sized, A: Allocator> Arc<T, A> {
#[inline]
fn internal_into_inner_with_allocator(self) -> (NonNull<ArcInner<T>>, A) {
let this = mem::ManuallyDrop::new(self);
(this.ptr, unsafe { ptr::read(&this.alloc) })
}

#[inline]
unsafe fn from_inner_in(ptr: NonNull<ArcInner<T>>, alloc: A) -> Self {
Self { ptr, phantom: PhantomData, alloc }
Expand Down Expand Up @@ -1275,8 +1281,8 @@ impl<T, A: Allocator> Arc<mem::MaybeUninit<T>, A> {
where
A: Clone,
{
let md_self = mem::ManuallyDrop::new(self);
unsafe { Arc::from_inner_in(md_self.ptr.cast(), md_self.alloc.clone()) }
let (ptr, alloc) = self.internal_into_inner_with_allocator();
unsafe { Arc::from_inner_in(ptr.cast(), alloc) }
}
}

Expand Down Expand Up @@ -1320,8 +1326,8 @@ impl<T, A: Allocator> Arc<[mem::MaybeUninit<T>], A> {
where
A: Clone,
{
let md_self = mem::ManuallyDrop::new(self);
unsafe { Arc::from_ptr_in(md_self.ptr.as_ptr() as _, md_self.alloc.clone()) }
let (ptr, alloc) = self.internal_into_inner_with_allocator();
unsafe { Arc::from_ptr_in(ptr.as_ptr() as _, alloc) }
}
}

Expand Down Expand Up @@ -2409,7 +2415,7 @@ unsafe impl<#[may_dangle] T: ?Sized, A: Allocator> Drop for Arc<T, A> {
}
}

impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
impl<A: Allocator> Arc<dyn Any + Send + Sync, A> {
/// Attempt to downcast the `Arc<dyn Any + Send + Sync>` to a concrete type.
///
/// # Examples
Expand All @@ -2436,10 +2442,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
{
if (*self).is::<T>() {
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
let alloc = self.alloc.clone();
mem::forget(self);
Ok(Arc::from_inner_in(ptr, alloc))
let (ptr, alloc) = self.internal_into_inner_with_allocator();
Ok(Arc::from_inner_in(ptr.cast(), alloc))
}
} else {
Err(self)
Expand Down Expand Up @@ -2479,10 +2483,8 @@ impl<A: Allocator + Clone> Arc<dyn Any + Send + Sync, A> {
T: Any + Send + Sync,
{
unsafe {
let ptr = self.ptr.cast::<ArcInner<T>>();
let alloc = self.alloc.clone();
mem::forget(self);
Arc::from_inner_in(ptr, alloc)
let (ptr, alloc) = self.internal_into_inner_with_allocator();
Arc::from_inner_in(ptr.cast(), alloc)
}
}
}
Expand Down Expand Up @@ -3443,8 +3445,8 @@ impl<T, A: Allocator + Clone, const N: usize> TryFrom<Arc<[T], A>> for Arc<[T; N

fn try_from(boxed_slice: Arc<[T], A>) -> Result<Self, Self::Error> {
if boxed_slice.len() == N {
let alloc = boxed_slice.alloc.clone();
Ok(unsafe { Arc::from_raw_in(Arc::into_raw(boxed_slice) as *mut [T; N], alloc) })
let (ptr, alloc) = boxed_slice.internal_into_inner_with_allocator();
Ok(unsafe { Arc::from_inner_in(ptr.cast(), alloc) })
} else {
Err(boxed_slice)
}
Expand Down
65 changes: 60 additions & 5 deletions library/alloc/src/sync/tests.rs
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
use super::*;

use std::clone::Clone;
use std::mem::MaybeUninit;
use std::option::Option::None;
use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst;
use std::sync::mpsc::channel;
use std::sync::Mutex;
use std::thread;

struct Canary(*mut atomic::AtomicUsize);
struct Canary(*mut AtomicUsize);

impl Drop for Canary {
fn drop(&mut self) {
Expand All @@ -21,6 +23,37 @@ impl Drop for Canary {
}
}

struct AllocCanary<'a>(&'a AtomicUsize);

impl<'a> AllocCanary<'a> {
fn new(counter: &'a AtomicUsize) -> Self {
counter.fetch_add(1, SeqCst);
Self(counter)
}
}

unsafe impl Allocator for AllocCanary<'_> {
fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
std::alloc::Global.allocate(layout)
}

unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
unsafe { std::alloc::Global.deallocate(ptr, layout) }
}
}

impl Clone for AllocCanary<'_> {
fn clone(&self) -> Self {
Self::new(self.0)
}
}

impl Drop for AllocCanary<'_> {
fn drop(&mut self) {
self.0.fetch_sub(1, SeqCst);
}
}

#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
fn manually_share_arc() {
Expand Down Expand Up @@ -295,16 +328,16 @@ fn weak_self_cyclic() {

#[test]
fn drop_arc() {
let mut canary = atomic::AtomicUsize::new(0);
let x = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
let mut canary = AtomicUsize::new(0);
let x = Arc::new(Canary(&mut canary as *mut AtomicUsize));
drop(x);
assert!(canary.load(Acquire) == 1);
}

#[test]
fn drop_arc_weak() {
let mut canary = atomic::AtomicUsize::new(0);
let arc = Arc::new(Canary(&mut canary as *mut atomic::AtomicUsize));
let mut canary = AtomicUsize::new(0);
let arc = Arc::new(Canary(&mut canary as *mut AtomicUsize));
let arc_weak = Arc::downgrade(&arc);
assert!(canary.load(Acquire) == 0);
drop(arc);
Expand Down Expand Up @@ -660,3 +693,25 @@ fn arc_drop_dereferenceable_race() {
thread.join().unwrap();
}
}

#[test]
fn arc_doesnt_leak_allocator() {
let counter = AtomicUsize::new(0);

{
let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
drop(arc.downcast::<usize>().unwrap());

let arc: Arc<dyn Any + Send + Sync, _> = Arc::new_in(5usize, AllocCanary::new(&counter));
drop(unsafe { arc.downcast_unchecked::<usize>() });

let arc = Arc::new_in(MaybeUninit::<usize>::new(5usize), AllocCanary::new(&counter));
drop(unsafe { arc.assume_init() });

let arc: Arc<[MaybeUninit<usize>], _> =
Arc::new_zeroed_slice_in(5, AllocCanary::new(&counter));
drop(unsafe { arc.assume_init() });
}

assert_eq!(counter.load(SeqCst), 0);
}

0 comments on commit 5e08d64

Please sign in to comment.