Skip to content

Commit

Permalink
Merge #191
Browse files Browse the repository at this point in the history
191: simplify strict-provenance polyfill r=matklad a=matklad

bors r+

Co-authored-by: Aleksey Kladov <aleksey.kladov@gmail.com>
  • Loading branch information
bors[bot] and matklad authored Aug 16, 2022
2 parents f4645cb + d997723 commit 865ab57
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 30 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,11 @@

-

## 1.13.1

- Make implementation compliant with [strict provenance](https://github.com/rust-lang/rust/issues/95228).
- Upgrade `atomic-polyfill` to `1.0`

## 1.13.0

- Add `Lazy::get`, similar to `OnceCell::get`.
Expand Down
2 changes: 1 addition & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "once_cell"
version = "1.13.0"
version = "1.13.1"
authors = ["Aleksey Kladov <aleksey.kladov@gmail.com>"]
license = "MIT OR Apache-2.0"
edition = "2018"
Expand Down
49 changes: 20 additions & 29 deletions src/imp_std.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,6 @@
// * no poisoning
// * init function can fail

// Our polyfills collide with the strict_provenance feature
#![allow(unstable_name_collisions)]

use std::{
cell::{Cell, UnsafeCell},
hint::unreachable_unchecked,
Expand Down Expand Up @@ -174,11 +171,11 @@ impl Drop for Guard<'_> {
fn drop(&mut self) {
let queue = self.queue.swap(self.new_queue, Ordering::AcqRel);

let state = queue.addr() & STATE_MASK;
let state = strict::addr(queue) & STATE_MASK;
assert_eq!(state, RUNNING);

unsafe {
let mut waiter = queue.map_addr(|q| q & !STATE_MASK);
let mut waiter = strict::map_addr(queue, |q| q & !STATE_MASK);
while !waiter.is_null() {
let next = (*waiter).next;
let thread = (*waiter).thread.take().unwrap();
Expand All @@ -201,13 +198,13 @@ fn initialize_or_wait(queue: &AtomicPtr<Waiter>, mut init: Option<&mut dyn FnMut
let mut curr_queue = queue.load(Ordering::Acquire);

loop {
let curr_state = curr_queue.addr() & STATE_MASK;
let curr_state = strict::addr(curr_queue) & STATE_MASK;
match (curr_state, &mut init) {
(COMPLETE, _) => return,
(INCOMPLETE, Some(init)) => {
let exchange = queue.compare_exchange(
curr_queue,
curr_queue.map_addr(|q| (q & !STATE_MASK) | RUNNING),
strict::map_addr(curr_queue, |q| (q & !STATE_MASK) | RUNNING),
Ordering::Acquire,
Ordering::Acquire,
);
Expand All @@ -231,23 +228,23 @@ fn initialize_or_wait(queue: &AtomicPtr<Waiter>, mut init: Option<&mut dyn FnMut
}

fn wait(queue: &AtomicPtr<Waiter>, mut curr_queue: *mut Waiter) {
let curr_state = curr_queue.addr() & STATE_MASK;
let curr_state = strict::addr(curr_queue) & STATE_MASK;
loop {
let node = Waiter {
thread: Cell::new(Some(thread::current())),
signaled: AtomicBool::new(false),
next: curr_queue.map_addr(|q| q & !STATE_MASK),
next: strict::map_addr(curr_queue, |q| q & !STATE_MASK),
};
let me = &node as *const Waiter as *mut Waiter;

let exchange = queue.compare_exchange(
curr_queue,
me.map_addr(|q| q | curr_state),
strict::map_addr(me, |q| q | curr_state),
Ordering::Release,
Ordering::Relaxed,
);
if let Err(new_queue) = exchange {
if new_queue.addr() & STATE_MASK != curr_state {
if strict::addr(new_queue) & STATE_MASK != curr_state {
return;
}
curr_queue = new_queue;
Expand All @@ -261,32 +258,26 @@ fn wait(queue: &AtomicPtr<Waiter>, mut curr_queue: *mut Waiter) {
}
}

// This trait is copied directly from the implementation of https://crates.io/crates/sptr
trait Strict {
type Pointee;
fn addr(self) -> usize;
fn with_addr(self, addr: usize) -> Self;
fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self;
}

impl<T> Strict for *mut T {
type Pointee = T;

// Polyfill of strict provenance from https://crates.io/crates/sptr.
//
// Use free-standing function rather than a trait to keep things simple and
// avoid any potential conflicts with future stabile std API.
mod strict {
#[must_use]
#[inline]
fn addr(self) -> usize
pub(crate) fn addr<T>(ptr: *mut T) -> usize
where
T: Sized,
{
// FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic.
// SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the
// provenance).
unsafe { core::mem::transmute(self) }
unsafe { core::mem::transmute(ptr) }
}

#[must_use]
#[inline]
fn with_addr(self, addr: usize) -> Self
pub(crate) fn with_addr<T>(ptr: *mut T, addr: usize) -> *mut T
where
T: Sized,
{
Expand All @@ -295,23 +286,23 @@ impl<T> Strict for *mut T {
// In the mean-time, this operation is defined to be "as if" it was
// a wrapping_offset, so we can emulate it as such. This should properly
// restore pointer provenance even under today's compiler.
let self_addr = self.addr() as isize;
let self_addr = self::addr(ptr) as isize;
let dest_addr = addr as isize;
let offset = dest_addr.wrapping_sub(self_addr);

// This is the canonical desugarring of this operation,
// but `pointer::cast` was only stabilized in 1.38.
// self.cast::<u8>().wrapping_offset(offset).cast::<T>()
(self as *mut u8).wrapping_offset(offset) as *mut T
(ptr as *mut u8).wrapping_offset(offset) as *mut T
}

#[must_use]
#[inline]
fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self
pub(crate) fn map_addr<T>(ptr: *mut T, f: impl FnOnce(usize) -> usize) -> *mut T
where
T: Sized,
{
self.with_addr(f(self.addr()))
self::with_addr(ptr, f(addr(ptr)))
}
}

Expand Down

0 comments on commit 865ab57

Please sign in to comment.