From dd413a9bb6d0b298d84305effb3a8393bd1e0f9e Mon Sep 17 00:00:00 2001 From: Ben Kimock Date: Mon, 4 Jul 2022 12:44:26 -0400 Subject: [PATCH] Paste in enough of sptr to have a ptr.map_addr --- src/imp_std.rs | 80 ++++++++++++++++++++++++++++++++++++++++++-------- 1 file changed, 68 insertions(+), 12 deletions(-) diff --git a/src/imp_std.rs b/src/imp_std.rs index f023c69..31c4555 100644 --- a/src/imp_std.rs +++ b/src/imp_std.rs @@ -3,6 +3,9 @@ // * no poisoning // * init function can fail +// Our polyfills collide with the strict_provenance feature +#![allow(unstable_name_collisions)] + use std::{ cell::{Cell, UnsafeCell}, hint::unreachable_unchecked, @@ -145,8 +148,8 @@ impl OnceCell { const INCOMPLETE: usize = 0x0; const RUNNING: usize = 0x1; const COMPLETE: usize = 0x2; -const INCOMPLETE_PTR: *mut Waiter = 0x0 as *mut Waiter; -const COMPLETE_PTR: *mut Waiter = 0x2 as *mut Waiter; +const INCOMPLETE_PTR: *mut Waiter = INCOMPLETE as *mut Waiter; +const COMPLETE_PTR: *mut Waiter = COMPLETE as *mut Waiter; // Mask to learn about the state. All other bits are the queue of waiters if // this is in the RUNNING state. @@ -171,11 +174,11 @@ impl Drop for Guard<'_> { fn drop(&mut self) { let queue = self.queue.swap(self.new_queue, Ordering::AcqRel); - let state = queue as usize & STATE_MASK; - assert_eq!(state, RUNNING as usize); + let state = queue.addr() & STATE_MASK; + assert_eq!(state, RUNNING); unsafe { - let mut waiter = (queue as *mut u8).wrapping_sub(state) as *mut Waiter; + let mut waiter = queue.map_addr(|q| q & !STATE_MASK); while !waiter.is_null() { let next = (*waiter).next; let thread = (*waiter).thread.take().unwrap(); @@ -198,14 +201,13 @@ fn initialize_or_wait(queue: &AtomicPtr, mut init: Option<&mut dyn FnMut let mut curr_queue = queue.load(Ordering::Acquire); loop { - let curr_state = curr_queue as usize & STATE_MASK; + let curr_state = curr_queue.addr() & STATE_MASK; match (curr_state, &mut init) { (COMPLETE, _) => return, (INCOMPLETE, Some(init)) => { let exchange = queue.compare_exchange( curr_queue, - (curr_queue as *mut u8).wrapping_sub(curr_state).wrapping_add(RUNNING) - as *mut Waiter, + curr_queue.map_addr(|q| (q & !STATE_MASK) | RUNNING), Ordering::Acquire, Ordering::Acquire, ); @@ -229,23 +231,23 @@ fn initialize_or_wait(queue: &AtomicPtr, mut init: Option<&mut dyn FnMut } fn wait(queue: &AtomicPtr, mut curr_queue: *mut Waiter) { - let curr_state = curr_queue as usize & STATE_MASK; + let curr_state = curr_queue.addr() & STATE_MASK; loop { let node = Waiter { thread: Cell::new(Some(thread::current())), signaled: AtomicBool::new(false), - next: (curr_queue as *mut u8).wrapping_sub(curr_state) as *mut Waiter, + next: curr_queue.map_addr(|q| q & !STATE_MASK), }; let me = &node as *const Waiter as *mut Waiter; let exchange = queue.compare_exchange( curr_queue, - (me as *mut u8).wrapping_add(curr_state) as *mut Waiter, + me.map_addr(|q| q | curr_state), Ordering::Release, Ordering::Relaxed, ); if let Err(new_queue) = exchange { - if new_queue as usize & STATE_MASK != curr_state { + if new_queue.addr() & STATE_MASK != curr_state { return; } curr_queue = new_queue; @@ -259,6 +261,60 @@ fn wait(queue: &AtomicPtr, mut curr_queue: *mut Waiter) { } } +// This trait is copied directly from the implementation of https://crates.io/crates/sptr +trait Strict { + type Pointee; + fn addr(self) -> usize; + fn with_addr(self, addr: usize) -> Self; + fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self; +} + +impl Strict for *mut T { + type Pointee = T; + + #[must_use] + #[inline] + fn addr(self) -> usize + where + T: Sized, + { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the + // provenance). + unsafe { core::mem::transmute(self) } + } + + #[must_use] + #[inline] + fn with_addr(self, addr: usize) -> Self + where + T: Sized, + { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // + // In the mean-time, this operation is defined to be "as if" it was + // a wrapping_offset, so we can emulate it as such. This should properly + // restore pointer provenance even under today's compiler. + let self_addr = self.addr() as isize; + let dest_addr = addr as isize; + let offset = dest_addr.wrapping_sub(self_addr); + + // This is the canonical desugarring of this operation, + // but `pointer::cast` was only stabilized in 1.38. + // self.cast::().wrapping_offset(offset).cast::() + (self as *mut u8).wrapping_offset(offset) as *mut T + } + + #[must_use] + #[inline] + fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self + where + T: Sized, + { + self.with_addr(f(self.addr())) + } +} + // These test are snatched from std as well. #[cfg(test)] mod tests {