From 4793409e09a867501ac84b0df947bb572ba63d32 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 11 Oct 2022 19:36:46 +0200 Subject: [PATCH 1/4] Re-enable stacked borrows in CI Also improve performance of some tests in Miri --- .github/workflows/test.yaml | 3 +-- src/lib.rs | 13 ++++++++++--- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/.github/workflows/test.yaml b/.github/workflows/test.yaml index 843e325..bde4450 100644 --- a/.github/workflows/test.yaml +++ b/.github/workflows/test.yaml @@ -231,8 +231,7 @@ jobs: - name: Run miri env: PROPTEST_CASES: "10" - # TODO: Do something with the stacked borrows. Figure out what it means. - MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-disable-stacked-borrows" + MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-permissive-provenance" run: cargo miri test --all-features thread_sanitizer-MacOS: diff --git a/src/lib.rs b/src/lib.rs index 7e2e6c2..c9526a3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -883,7 +883,7 @@ macro_rules! t { const WRITER_CNT: usize = 2; const READER_CNT: usize = 3; #[cfg(miri)] - const ITERATIONS: usize = 10; + const ITERATIONS: usize = 5; #[cfg(not(miri))] const ITERATIONS: usize = 100; const SEQ: usize = 50; @@ -1160,6 +1160,9 @@ macro_rules! t { #[test] /// Make sure the reference count and compare_and_swap works as expected. fn cas_ref_cnt() { + #[cfg(miri)] + const ITERATIONS: usize = 10; + #[cfg(not(miri))] const ITERATIONS: usize = 50; let shared = ArcSwap::from(Arc::new(0)); for i in 0..ITERATIONS { @@ -1173,7 +1176,7 @@ macro_rules! t { // Fill up the slots sometimes let fillup = || { if i % 2 == 0 { - Some((0..50).map(|_| shared.load()).collect::>()) + Some((0..ITERATIONS).map(|_| shared.load()).collect::>()) } else { None } @@ -1272,10 +1275,14 @@ mod tests { /// created, but contain full Arcs. #[test] fn lease_overflow() { + #[cfg(miri)] + const GUARD_COUNT: usize = 100; + #[cfg(not(miri))] + const GUARD_COUNT: usize = 1000; let a = Arc::new(0); let shared = ArcSwap::from(Arc::clone(&a)); assert_eq!(2, Arc::strong_count(&a)); - let mut guards = (0..1000).map(|_| shared.load()).collect::>(); + let mut guards = (0..GUARD_COUNT).map(|_| shared.load()).collect::>(); let count = Arc::strong_count(&a); assert!(count > 2); let guard = shared.load(); From eaeae27d72510d9ecc2e620c06c7b42059eea90d Mon Sep 17 00:00:00 2001 From: clubby789 Date: Wed, 19 Oct 2022 21:11:33 +0100 Subject: [PATCH 2/4] Avoid stacked-borrows violations Turn into a pointer without re-borrowing. --- src/ref_cnt.rs | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/ref_cnt.rs b/src/ref_cnt.rs index 5128f5c..2ea4ec6 100644 --- a/src/ref_cnt.rs +++ b/src/ref_cnt.rs @@ -92,7 +92,9 @@ unsafe impl RefCnt for Arc { Arc::into_raw(me) as *mut T } fn as_ptr(me: &Arc) -> *mut T { - me as &T as *const T as *mut T + // SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads + let ptr = Arc::into_raw(unsafe { std::ptr::read(me) }); + ptr as *mut T } unsafe fn from_ptr(ptr: *const T) -> Arc { Arc::from_raw(ptr) @@ -105,7 +107,9 @@ unsafe impl RefCnt for Rc { Rc::into_raw(me) as *mut T } fn as_ptr(me: &Rc) -> *mut T { - me as &T as *const T as *mut T + // SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads + let ptr = Rc::into_raw(unsafe { std::ptr::read(me) }); + ptr as *mut T } unsafe fn from_ptr(ptr: *const T) -> Rc { Rc::from_raw(ptr) From fd04e208d52331563b5f5d38a7b021f936c36587 Mon Sep 17 00:00:00 2001 From: Michal 'vorner' Vaner Date: Sun, 25 Dec 2022 13:26:36 +0100 Subject: [PATCH 3/4] Future-proof as_ptr impls Introduce a bit of future proof code to properly "close the brackets" around some operations (todays there are no-ops). Provide a bit of documentation for it, explaining the gymnastics there. --- CHANGELOG.md | 2 ++ src/ref_cnt.rs | 23 ++++++++++++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b41d0c6..ed9669a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,5 @@ +* Avoid violating stacked borrows (AFAIK these are still experimental and not + normative, but better safe than sorry). (#80). * The `AccessConvert` wrapper is needed less often in practice (#77). # 1.5.1 diff --git a/src/ref_cnt.rs b/src/ref_cnt.rs index 2ea4ec6..4c31739 100644 --- a/src/ref_cnt.rs +++ b/src/ref_cnt.rs @@ -1,3 +1,4 @@ +use std::mem; use std::ptr; use std::rc::Rc; use std::sync::Arc; @@ -92,9 +93,29 @@ unsafe impl RefCnt for Arc { Arc::into_raw(me) as *mut T } fn as_ptr(me: &Arc) -> *mut T { + // Slightly convoluted way to do this, but this avoids stacked borrows violations. The same + // intention as + // + // me as &T as *const T as *mut T + // + // We first create a "shallow copy" of me - one that doesn't really own its ref count + // (that's OK, me _does_ own it, so it can't be destroyed in the meantime). + // Then we can use into_raw (which preserves not having the ref count). + // + // We need to "revert" the changes we did. In current std implementation, the combination + // of from_raw and forget is no-op. But formally, into_raw shall be paired with from_raw + // and that read shall be paired with forget to properly "close the brackets". In future + // versions of STD, these may become something else that's not really no-op (unlikely, but + // possible), so we future-proof it a bit. + // SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads let ptr = Arc::into_raw(unsafe { std::ptr::read(me) }); - ptr as *mut T + let ptr = ptr as *mut T; + + // SAFETY: We got the pointer from into_raw just above + mem::forget(unsafe { Arc::from_raw(ptr) }); + + ptr } unsafe fn from_ptr(ptr: *const T) -> Arc { Arc::from_raw(ptr) From 97a65cd1e99739e0683700719fd33afa6865c2b2 Mon Sep 17 00:00:00 2001 From: Michal 'vorner' Vaner Date: Sun, 25 Dec 2022 13:37:53 +0100 Subject: [PATCH 4/4] Store the debt list as pointers, not refs The idea is that the whole list (tail of the list) is synchronized by the access of the head. Nevertheless, we fill in the next pointer before that and having a reference is formally considered a read of the destination. Even though we _use_ the reference only after the synchronization point, we have it before and MIRI considers it a data race. Using a pointer is fine, as dereferencing is manual and considered read only at that point. --- CHANGELOG.md | 1 + src/debt/list.rs | 13 +++++++++---- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ed9669a..0dea87a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,4 @@ +* Fix a data race reported by MIRI. * Avoid violating stacked borrows (AFAIK these are still experimental and not normative, but better safe than sorry). (#80). * The `AccessConvert` wrapper is needed less often in practice (#77). diff --git a/src/debt/list.rs b/src/debt/list.rs index a98a02e..3d17388 100644 --- a/src/debt/list.rs +++ b/src/debt/list.rs @@ -58,7 +58,12 @@ pub(crate) struct Node { fast: FastSlots, helping: HelpingSlots, in_use: AtomicUsize, - next: Option<&'static Node>, + // Next node in the list. + // + // It is a pointer because we touch it before synchronization (we don't _dereference_ it before + // synchronization, only manipulate the pointer itself). That is illegal according to strict + // interpretation of the rules by MIRI on references. + next: *const Node, active_writers: AtomicUsize, } @@ -68,7 +73,7 @@ impl Default for Node { fast: FastSlots::default(), helping: HelpingSlots::default(), in_use: AtomicUsize::new(NODE_USED), - next: None, + next: ptr::null(), active_writers: AtomicUsize::new(0), } } @@ -94,7 +99,7 @@ impl Node { if result.is_some() { return result; } - current = node.next; + current = unsafe { node.next.as_ref() }; } None } @@ -165,7 +170,7 @@ impl Node { // compare_exchange below. let mut head = LIST_HEAD.load(Relaxed); loop { - node.next = unsafe { head.as_ref() }; + node.next = head; if let Err(old) = LIST_HEAD.compare_exchange_weak( head, node, // We need to release *the whole chain* here. For that, we need to