Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

UB fixes #86

Merged
merged 4 commits into from Dec 25, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
3 changes: 1 addition & 2 deletions .github/workflows/test.yaml
Expand Up @@ -231,8 +231,7 @@ jobs:
- name: Run miri
env:
PROPTEST_CASES: "10"
# TODO: Do something with the stacked borrows. Figure out what it means.
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-disable-stacked-borrows"
MIRIFLAGS: "-Zmiri-disable-isolation -Zmiri-permissive-provenance"
run: cargo miri test --all-features

thread_sanitizer-MacOS:
Expand Down
3 changes: 3 additions & 0 deletions CHANGELOG.md
@@ -1,3 +1,6 @@
* Fix a data race reported by MIRI.
* Avoid violating stacked borrows (AFAIK these are still experimental and not
normative, but better safe than sorry). (#80).
* The `AccessConvert` wrapper is needed less often in practice (#77).

# 1.5.1
Expand Down
13 changes: 9 additions & 4 deletions src/debt/list.rs
Expand Up @@ -58,7 +58,12 @@ pub(crate) struct Node {
fast: FastSlots,
helping: HelpingSlots,
in_use: AtomicUsize,
next: Option<&'static Node>,
// Next node in the list.
//
// It is a pointer because we touch it before synchronization (we don't _dereference_ it before
// synchronization, only manipulate the pointer itself). That is illegal according to strict
// interpretation of the rules by MIRI on references.
next: *const Node,
active_writers: AtomicUsize,
}

Expand All @@ -68,7 +73,7 @@ impl Default for Node {
fast: FastSlots::default(),
helping: HelpingSlots::default(),
in_use: AtomicUsize::new(NODE_USED),
next: None,
next: ptr::null(),
active_writers: AtomicUsize::new(0),
}
}
Expand All @@ -94,7 +99,7 @@ impl Node {
if result.is_some() {
return result;
}
current = node.next;
current = unsafe { node.next.as_ref() };
}
None
}
Expand Down Expand Up @@ -165,7 +170,7 @@ impl Node {
// compare_exchange below.
let mut head = LIST_HEAD.load(Relaxed);
loop {
node.next = unsafe { head.as_ref() };
node.next = head;
if let Err(old) = LIST_HEAD.compare_exchange_weak(
head, node,
// We need to release *the whole chain* here. For that, we need to
Expand Down
13 changes: 10 additions & 3 deletions src/lib.rs
Expand Up @@ -883,7 +883,7 @@ macro_rules! t {
const WRITER_CNT: usize = 2;
const READER_CNT: usize = 3;
#[cfg(miri)]
const ITERATIONS: usize = 10;
const ITERATIONS: usize = 5;
#[cfg(not(miri))]
const ITERATIONS: usize = 100;
const SEQ: usize = 50;
Expand Down Expand Up @@ -1160,6 +1160,9 @@ macro_rules! t {
#[test]
/// Make sure the reference count and compare_and_swap works as expected.
fn cas_ref_cnt() {
#[cfg(miri)]
const ITERATIONS: usize = 10;
#[cfg(not(miri))]
const ITERATIONS: usize = 50;
let shared = ArcSwap::from(Arc::new(0));
for i in 0..ITERATIONS {
Expand All @@ -1173,7 +1176,7 @@ macro_rules! t {
// Fill up the slots sometimes
let fillup = || {
if i % 2 == 0 {
Some((0..50).map(|_| shared.load()).collect::<Vec<_>>())
Some((0..ITERATIONS).map(|_| shared.load()).collect::<Vec<_>>())
} else {
None
}
Expand Down Expand Up @@ -1272,10 +1275,14 @@ mod tests {
/// created, but contain full Arcs.
#[test]
fn lease_overflow() {
#[cfg(miri)]
const GUARD_COUNT: usize = 100;
#[cfg(not(miri))]
const GUARD_COUNT: usize = 1000;
let a = Arc::new(0);
let shared = ArcSwap::from(Arc::clone(&a));
assert_eq!(2, Arc::strong_count(&a));
let mut guards = (0..1000).map(|_| shared.load()).collect::<Vec<_>>();
let mut guards = (0..GUARD_COUNT).map(|_| shared.load()).collect::<Vec<_>>();
let count = Arc::strong_count(&a);
assert!(count > 2);
let guard = shared.load();
Expand Down
29 changes: 27 additions & 2 deletions src/ref_cnt.rs
@@ -1,3 +1,4 @@
use std::mem;
use std::ptr;
use std::rc::Rc;
use std::sync::Arc;
Expand Down Expand Up @@ -92,7 +93,29 @@ unsafe impl<T> RefCnt for Arc<T> {
Arc::into_raw(me) as *mut T
}
fn as_ptr(me: &Arc<T>) -> *mut T {
me as &T as *const T as *mut T
// Slightly convoluted way to do this, but this avoids stacked borrows violations. The same
// intention as
//
// me as &T as *const T as *mut T
//
// We first create a "shallow copy" of me - one that doesn't really own its ref count
// (that's OK, me _does_ own it, so it can't be destroyed in the meantime).
// Then we can use into_raw (which preserves not having the ref count).
//
// We need to "revert" the changes we did. In current std implementation, the combination
// of from_raw and forget is no-op. But formally, into_raw shall be paired with from_raw
// and that read shall be paired with forget to properly "close the brackets". In future
// versions of STD, these may become something else that's not really no-op (unlikely, but
// possible), so we future-proof it a bit.

// SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads
let ptr = Arc::into_raw(unsafe { std::ptr::read(me) });
let ptr = ptr as *mut T;

// SAFETY: We got the pointer from into_raw just above
mem::forget(unsafe { Arc::from_raw(ptr) });

ptr
}
unsafe fn from_ptr(ptr: *const T) -> Arc<T> {
Arc::from_raw(ptr)
Expand All @@ -105,7 +128,9 @@ unsafe impl<T> RefCnt for Rc<T> {
Rc::into_raw(me) as *mut T
}
fn as_ptr(me: &Rc<T>) -> *mut T {
me as &T as *const T as *mut T
// SAFETY: &T cast to *const T will always be aligned, initialised and valid for reads
let ptr = Rc::into_raw(unsafe { std::ptr::read(me) });
ptr as *mut T
}
unsafe fn from_ptr(ptr: *const T) -> Rc<T> {
Rc::from_raw(ptr)
Expand Down