Skip to content

Commit

Permalink
Merge pull request #127 from saethlin/master
Browse files Browse the repository at this point in the history
Remove ptr-int transmute by converting UnsafeWeakPointer from usize to *mut T
  • Loading branch information
tatsuya6502 committed May 19, 2022
2 parents 5a18eb4 + 41e85dd commit 90db6a8
Show file tree
Hide file tree
Showing 3 changed files with 23 additions and 28 deletions.
32 changes: 12 additions & 20 deletions src/common/unsafe_weak_pointer.rs
Expand Up @@ -3,39 +3,31 @@ use std::sync::{Arc, Weak};
/// WARNING: Do not use this struct unless you are absolutely sure what you are
/// doing. Using this struct is unsafe and may cause memory related crashes and/or
/// security vulnerabilities.
///
/// This struct exists with the sole purpose of avoiding compile errors relevant to
/// the thread pool usages. The thread pool requires that the generic parameters on
/// the `Cache` and `Inner` structs to have trait bounds `Send`, `Sync` and
/// `'static`. This will be unacceptable for many cache usages.
///
/// This struct avoids the trait bounds by transmuting a pointer between
/// `std::sync::Weak<Inner<K, V, S>>` and `usize`.
///
/// If you know a better solution than this, we would love te hear it.
pub(crate) struct UnsafeWeakPointer {
pub(crate) struct UnsafeWeakPointer<T> {
// This is a std::sync::Weak pointer to Inner<K, V, S>.
raw_ptr: usize,
raw_ptr: *mut T,
}

impl UnsafeWeakPointer {
pub(crate) fn from_weak_arc<T>(p: Weak<T>) -> Self {
unsafe impl<T> Send for UnsafeWeakPointer<T> {}

impl<T> UnsafeWeakPointer<T> {
pub(crate) fn from_weak_arc(p: Weak<T>) -> Self {
Self {
raw_ptr: unsafe { std::mem::transmute(p) },
raw_ptr: p.into_raw() as *mut T,
}
}

pub(crate) unsafe fn as_weak_arc<T>(&self) -> Weak<T> {
std::mem::transmute(self.raw_ptr)
pub(crate) unsafe fn as_weak_arc(&self) -> Weak<T> {
Weak::from_raw(self.raw_ptr.cast())
}

pub(crate) fn forget_arc<T>(p: Arc<T>) {
pub(crate) fn forget_arc(p: Arc<T>) {
// Downgrade the Arc to Weak, then forget.
let weak = Arc::downgrade(&p);
std::mem::forget(weak);
}

pub(crate) fn forget_weak_arc<T>(p: Weak<T>) {
pub(crate) fn forget_weak_arc(p: Weak<T>) {
std::mem::forget(p);
}
}
Expand All @@ -46,7 +38,7 @@ impl UnsafeWeakPointer {
///
/// When you want to drop the Weak pointer, ensure that you drop it only once for the
/// same `raw_ptr` across clones.
impl Clone for UnsafeWeakPointer {
impl<T> Clone for UnsafeWeakPointer<T> {
fn clone(&self) -> Self {
Self {
raw_ptr: self.raw_ptr,
Expand Down
15 changes: 9 additions & 6 deletions src/sync/housekeeper.rs
Expand Up @@ -39,7 +39,7 @@ pub(crate) trait InnerSync {
}

pub(crate) struct Housekeeper<T> {
inner: Arc<Mutex<UnsafeWeakPointer>>,
inner: Arc<Mutex<UnsafeWeakPointer<T>>>,
thread_pool: Arc<ThreadPool>,
is_shutting_down: Arc<AtomicBool>,
periodical_sync_job: Mutex<Option<JobHandle>>,
Expand Down Expand Up @@ -73,12 +73,15 @@ impl<T> Drop for Housekeeper<T> {

// All sync jobs should have been finished by now. Clean other stuff up.
ThreadPoolRegistry::release_pool(&self.thread_pool);
std::mem::drop(unsafe { self.inner.lock().as_weak_arc::<T>() });
std::mem::drop(unsafe { self.inner.lock().as_weak_arc() });
}
}

// functions/methods used by Cache
impl<T: InnerSync> Housekeeper<T> {
impl<T: InnerSync> Housekeeper<T>
where
T: 'static,
{
pub(crate) fn new(inner: Weak<T>) -> Self {
use crate::common::thread_pool::PoolName;

Expand Down Expand Up @@ -107,7 +110,7 @@ impl<T: InnerSync> Housekeeper<T> {

fn start_periodical_sync_job(
thread_pool: &Arc<ThreadPool>,
unsafe_weak_ptr: Arc<Mutex<UnsafeWeakPointer>>,
unsafe_weak_ptr: Arc<Mutex<UnsafeWeakPointer<T>>>,
is_shutting_down: Arc<AtomicBool>,
periodical_sync_running: Arc<Mutex<()>>,
) -> JobHandle {
Expand Down Expand Up @@ -173,10 +176,10 @@ impl<T: InnerSync> Housekeeper<T> {

// private functions/methods
impl<T: InnerSync> Housekeeper<T> {
fn call_sync(unsafe_weak_ptr: &Arc<Mutex<UnsafeWeakPointer>>) -> Option<SyncPace> {
fn call_sync(unsafe_weak_ptr: &Arc<Mutex<UnsafeWeakPointer<T>>>) -> Option<SyncPace> {
let lock = unsafe_weak_ptr.lock();
// Restore the Weak pointer to Inner<K, V, S>.
let weak = unsafe { lock.as_weak_arc::<T>() };
let weak = unsafe { lock.as_weak_arc() };
if let Some(inner) = weak.upgrade() {
// TODO: Protect this call with catch_unwind().
let sync_pace = inner.sync(MAX_SYNC_REPEATS);
Expand Down
4 changes: 2 additions & 2 deletions src/sync/invalidator.rs
Expand Up @@ -288,7 +288,7 @@ impl<K, V, S> Invalidator<K, V, S> {

struct ScanContext<K, V, S> {
predicates: Mutex<Vec<Predicate<K, V>>>,
cache: Mutex<UnsafeWeakPointer>,
cache: Mutex<UnsafeWeakPointer<Inner<K, V, S>>>,
result: Mutex<Option<ScanResult<K, V>>>,
is_running: AtomicBool,
is_shutting_down: AtomicBool,
Expand Down Expand Up @@ -373,7 +373,7 @@ where
let cache_lock = self.scan_context.cache.lock();

// Restore the Weak pointer to Inner<K, V, S>.
let weak = unsafe { cache_lock.as_weak_arc::<Inner<K, V, S>>() };
let weak = unsafe { cache_lock.as_weak_arc() };
if let Some(inner_cache) = weak.upgrade() {
// TODO: Protect this call with catch_unwind().
*self.scan_context.result.lock() = Some(self.do_execute(&inner_cache));
Expand Down

0 comments on commit 90db6a8

Please sign in to comment.