Skip to content

Commit ecf46d1

Browse files
committed
Add Atomic*::get_mut_slice
Just as `get_mut` is the inverse of `from_mut`, `get_mut_slice` is the inverse of `from_mut_slice`.
1 parent ba14a83 commit ecf46d1

File tree

1 file changed

+110
-0
lines changed

1 file changed

+110
-0
lines changed

library/core/src/sync/atomic.rs

+110
Original file line numberDiff line numberDiff line change
@@ -340,6 +340,40 @@ impl AtomicBool {
340340
unsafe { &mut *(v as *mut bool as *mut Self) }
341341
}
342342

343+
/// Get non-atomic access to a `&mut [AtomicBool]` slice.
344+
///
345+
/// This is safe because the mutable reference guarantees that no other threads are
346+
/// concurrently accessing the atomic data.
347+
///
348+
/// # Examples
349+
///
350+
/// ```
351+
/// #![feature(atomic_from_mut, inline_const, scoped_threads)]
352+
/// use std::sync::atomic::{AtomicBool, Ordering};
353+
///
354+
/// let mut some_bools = [const { AtomicBool::new(false) }; 10];
355+
///
356+
/// let view: &mut [bool] = AtomicBool::get_mut_slice(&mut some_bools);
357+
/// assert_eq!(view, [false; 10]);
358+
/// view[..5].copy_from_slice(&[true; 5]);
359+
///
360+
/// std::thread::scope(|s| {
361+
/// for t in &some_bools[..5] {
362+
/// s.spawn(move || assert_eq!(t.load(Ordering::Relaxed), true));
363+
/// }
364+
///
365+
/// for f in &some_bools[5..] {
366+
/// s.spawn(move || assert_eq!(f.load(Ordering::Relaxed), false));
367+
/// }
368+
/// });
369+
/// ```
370+
#[inline]
371+
#[unstable(feature = "atomic_from_mut", issue = "76314")]
372+
pub fn get_mut_slice(this: &mut [Self]) -> &mut [bool] {
373+
// SAFETY: the mutable reference guarantees unique ownership.
374+
unsafe { &mut *(this as *mut [Self] as *mut [bool]) }
375+
}
376+
343377
/// Get atomic access to a `&mut [bool]` slice.
344378
///
345379
/// # Examples
@@ -971,6 +1005,46 @@ impl<T> AtomicPtr<T> {
9711005
unsafe { &mut *(v as *mut *mut T as *mut Self) }
9721006
}
9731007

1008+
/// Get non-atomic access to a `&mut [AtomicPtr]` slice.
1009+
///
1010+
/// This is safe because the mutable reference guarantees that no other threads are
1011+
/// concurrently accessing the atomic data.
1012+
///
1013+
/// # Examples
1014+
///
1015+
/// ```
1016+
/// #![feature(atomic_from_mut, inline_const, scoped_threads)]
1017+
/// use std::ptr::null_mut;
1018+
/// use std::sync::atomic::{AtomicPtr, Ordering};
1019+
///
1020+
/// let mut some_ptrs = [const { AtomicPtr::new(null_mut::<String>()) }; 10];
1021+
///
1022+
/// let view: &mut [*mut String] = AtomicPtr::get_mut_slice(&mut some_ptrs);
1023+
/// assert_eq!(view, [null_mut::<String>(); 10]);
1024+
/// view
1025+
/// .iter_mut()
1026+
/// .enumerate()
1027+
/// .for_each(|(i, ptr)| *ptr = Box::into_raw(Box::new(format!("iteration#{i}"))));
1028+
///
1029+
/// std::thread::scope(|s| {
1030+
/// for ptr in &some_ptrs {
1031+
/// s.spawn(move || {
1032+
/// let ptr = ptr.load(Ordering::Relaxed);
1033+
/// assert!(!ptr.is_null());
1034+
///
1035+
/// let name = unsafe { Box::from_raw(ptr) };
1036+
/// println!("Hello, {name}!");
1037+
/// });
1038+
/// }
1039+
/// });
1040+
/// ```
1041+
#[inline]
1042+
#[unstable(feature = "atomic_from_mut", issue = "76314")]
1043+
pub fn get_mut_slice(this: &mut [Self]) -> &mut [*mut T] {
1044+
// SAFETY: the mutable reference guarantees unique ownership.
1045+
unsafe { &mut *(this as *mut [Self] as *mut [*mut T]) }
1046+
}
1047+
9741048
/// Get atomic access to a slice of pointers.
9751049
///
9761050
/// # Examples
@@ -1521,6 +1595,42 @@ macro_rules! atomic_int {
15211595
unsafe { &mut *(v as *mut $int_type as *mut Self) }
15221596
}
15231597

1598+
#[doc = concat!("Get non-atomic access to a `&mut [", stringify!($atomic_type), "]` slice")]
1599+
///
1600+
/// This is safe because the mutable reference guarantees that no other threads are
1601+
/// concurrently accessing the atomic data.
1602+
///
1603+
/// # Examples
1604+
///
1605+
/// ```
1606+
/// #![feature(atomic_from_mut, inline_const, scoped_threads)]
1607+
#[doc = concat!($extra_feature, "use std::sync::atomic::{", stringify!($atomic_type), ", Ordering};")]
1608+
///
1609+
#[doc = concat!("let mut some_ints = [const { ", stringify!($atomic_type), "::new(0) }; 10];")]
1610+
///
1611+
#[doc = concat!("let view: &mut [", stringify!($int_type), "] = ", stringify!($atomic_type), "::get_mut_slice(&mut some_ints);")]
1612+
/// assert_eq!(view, [0; 10]);
1613+
/// view
1614+
/// .iter_mut()
1615+
/// .enumerate()
1616+
/// .for_each(|(idx, int)| *int = idx as _);
1617+
///
1618+
/// std::thread::scope(|s| {
1619+
/// some_ints
1620+
/// .iter()
1621+
/// .enumerate()
1622+
/// .for_each(|(idx, int)| {
1623+
/// s.spawn(move || assert_eq!(int.load(Ordering::Relaxed), idx as _));
1624+
/// })
1625+
/// });
1626+
/// ```
1627+
#[inline]
1628+
#[unstable(feature = "atomic_from_mut", issue = "76314")]
1629+
pub fn get_mut_slice(this: &mut [Self]) -> &mut [$int_type] {
1630+
// SAFETY: the mutable reference guarantees unique ownership.
1631+
unsafe { &mut *(this as *mut [Self] as *mut [$int_type]) }
1632+
}
1633+
15241634
#[doc = concat!("Get atomic access to a `&mut [", stringify!($int_type), "]` slice.")]
15251635
///
15261636
/// # Examples

0 commit comments

Comments
 (0)