diff --git a/src/imp_std.rs b/src/imp_std.rs index f023c69..286b42b 100644 --- a/src/imp_std.rs +++ b/src/imp_std.rs @@ -175,7 +175,7 @@ impl Drop for Guard<'_> { assert_eq!(state, RUNNING as usize); unsafe { - let mut waiter = (queue as *mut u8).wrapping_sub(state) as *mut Waiter; + let mut waiter = queue.map_addr(|q| q & !STATE_MASK); while !waiter.is_null() { let next = (*waiter).next; let thread = (*waiter).thread.take().unwrap(); @@ -204,8 +204,7 @@ fn initialize_or_wait(queue: &AtomicPtr, mut init: Option<&mut dyn FnMut (INCOMPLETE, Some(init)) => { let exchange = queue.compare_exchange( curr_queue, - (curr_queue as *mut u8).wrapping_sub(curr_state).wrapping_add(RUNNING) - as *mut Waiter, + curr_queue.map_addr(|q| (q & !STATE_MASK) | RUNNING), Ordering::Acquire, Ordering::Acquire, ); @@ -234,13 +233,13 @@ fn wait(queue: &AtomicPtr, mut curr_queue: *mut Waiter) { let node = Waiter { thread: Cell::new(Some(thread::current())), signaled: AtomicBool::new(false), - next: (curr_queue as *mut u8).wrapping_sub(curr_state) as *mut Waiter, + next: curr_queue.map_addr(|q| q & !STATE_MASK), }; let me = &node as *const Waiter as *mut Waiter; let exchange = queue.compare_exchange( curr_queue, - (me as *mut u8).wrapping_add(curr_state) as *mut Waiter, + me.map_addr(|q| q | curr_state), Ordering::Release, Ordering::Relaxed, ); @@ -259,6 +258,60 @@ fn wait(queue: &AtomicPtr, mut curr_queue: *mut Waiter) { } } +// This trait is copied directly from the implementation of https://crates.io/crates/sptr +trait Strict { + type Pointee; + fn addr(self) -> usize; + fn with_addr(self, addr: usize) -> Self; + fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self; +} + +impl Strict for *mut T { + type Pointee = T; + + #[must_use] + #[inline] + fn addr(self) -> usize + where + T: Sized, + { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // SAFETY: Pointer-to-integer transmutes are valid (if you are okay with losing the + // provenance). + unsafe { core::mem::transmute(self) } + } + + #[must_use] + #[inline] + fn with_addr(self, addr: usize) -> Self + where + T: Sized, + { + // FIXME(strict_provenance_magic): I am magic and should be a compiler intrinsic. + // + // In the mean-time, this operation is defined to be "as if" it was + // a wrapping_offset, so we can emulate it as such. This should properly + // restore pointer provenance even under today's compiler. + let self_addr = self.addr() as isize; + let dest_addr = addr as isize; + let offset = dest_addr.wrapping_sub(self_addr); + + // This is the canonical desugarring of this operation, + // but `pointer::cast` was only stabilized in 1.38. + // self.cast::().wrapping_offset(offset).cast::() + (self as *mut u8).wrapping_offset(offset) as *mut T + } + + #[must_use] + #[inline] + fn map_addr(self, f: impl FnOnce(usize) -> usize) -> Self + where + T: Sized, + { + self.with_addr(f(self.addr())) + } +} + // These test are snatched from std as well. #[cfg(test)] mod tests {