From 7fc6d2bb7ab2a76697e1c15f7c947df7194b5067 Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Tue, 29 Nov 2022 14:23:05 -0500 Subject: [PATCH 1/7] Remove unnecessary use of unsafe block --- src/lib.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 2d39b74..df94216 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -354,10 +354,9 @@ impl LruCache { None => { let (replaced, mut node) = self.replace_or_create_node(k, v); - let node_ptr: *mut LruEntry = &mut *node; - self.attach(node_ptr); + self.attach(&mut *node); - let keyref = unsafe { (*node_ptr).key.as_ptr() }; + let keyref = node.key.as_ptr(); self.map.insert(KeyRef { k: keyref }, node); replaced.filter(|_| capture) From d82a0da7562dd12a25d20c8d02652da430ef0834 Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Tue, 29 Nov 2022 22:49:53 -0500 Subject: [PATCH 2/7] Change map to hold a pointer to, rather than owned LruEntry --- src/lib.rs | 105 +++++++++++++++++++++++++++++------------------------ 1 file changed, 57 insertions(+), 48 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index df94216..8f3ff25 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -189,7 +189,7 @@ pub type DefaultHasher = std::collections::hash_map::RandomState; /// An LRU Cache pub struct LruCache { - map: HashMap, Box>, S>, + map: HashMap, *mut LruEntry, S>, cap: NonZeroUsize, // head and tail are sigil nodes to facilitate inserting entries @@ -266,7 +266,7 @@ impl LruCache { /// Creates a new LRU Cache with the given capacity. fn construct( cap: NonZeroUsize, - map: HashMap, Box>, S>, + map: HashMap, *mut LruEntry, S>, ) -> LruCache { // NB: The compiler warns that cache does not need to be marked as mutable if we // declare it as such since we only mutate it inside the unsafe block. @@ -342,7 +342,7 @@ impl LruCache { match node_ref { Some(node_ref) => { - let node_ptr: *mut LruEntry = &mut **node_ref; + let node_ptr: *mut LruEntry = *node_ref; // if the key is already in the cache just update its value and move it to the // front of the list @@ -352,11 +352,11 @@ impl LruCache { Some((k, v)) } None => { - let (replaced, mut node) = self.replace_or_create_node(k, v); + let (replaced, node) = self.replace_or_create_node(k, v); - self.attach(&mut *node); + self.attach(node); - let keyref = node.key.as_ptr(); + let keyref = unsafe { (*node).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); replaced.filter(|_| capture) @@ -367,27 +367,29 @@ impl LruCache { // Used internally to swap out a node if the cache is full or to create a new node if space // is available. Shared between `put`, `push`, `get_or_insert`, and `get_or_insert_mut`. #[allow(clippy::type_complexity)] - fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, Box>) { + fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, *mut LruEntry) { if self.len() == self.cap().get() { // if the cache is full, remove the last entry so we can use it for the new key let old_key = KeyRef { k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, }; - let mut old_node = self.map.remove(&old_key).unwrap(); + let old_node = self.map.remove(&old_key).unwrap(); // read out the node's old key and value and then replace it - let replaced = unsafe { (old_node.key.assume_init(), old_node.val.assume_init()) }; - - old_node.key = mem::MaybeUninit::new(k); - old_node.val = mem::MaybeUninit::new(v); + let replaced = unsafe { + ( + mem::replace(&mut (*old_node).key, mem::MaybeUninit::new(k)).assume_init(), + mem::replace(&mut (*old_node).val, mem::MaybeUninit::new(v)).assume_init(), + ) + }; - let node_ptr: *mut LruEntry = &mut *old_node; + let node_ptr: *mut LruEntry = old_node; self.detach(node_ptr); (Some(replaced), old_node) } else { // if the cache is not full allocate a new LruEntry - (None, Box::new(LruEntry::new(k, v))) + (None, Box::into_raw(Box::new(LruEntry::new(k, v)))) } } @@ -416,7 +418,7 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach(node_ptr); @@ -452,7 +454,7 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach(node_ptr); @@ -490,7 +492,7 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach(node_ptr); @@ -498,14 +500,13 @@ impl LruCache { unsafe { &(*(*node_ptr).val.as_ptr()) as &V } } else { let v = f(); - let (_, mut node) = self.replace_or_create_node(k, v); + let (_, node) = self.replace_or_create_node(k, v); - let node_ptr: *mut LruEntry = &mut *node; - self.attach(node_ptr); + self.attach(node); - let keyref = unsafe { (*node_ptr).key.as_ptr() }; + let keyref = unsafe { (*node).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); - unsafe { &(*(*node_ptr).val.as_ptr()) as &V } + unsafe { &(*(*node).val.as_ptr()) as &V } } } @@ -536,7 +537,7 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach(node_ptr); @@ -544,14 +545,13 @@ impl LruCache { unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V } } else { let v = f(); - let (_, mut node) = self.replace_or_create_node(k, v); + let (_, node) = self.replace_or_create_node(k, v); - let node_ptr: *mut LruEntry = &mut *node; - self.attach(node_ptr); + self.attach(node); - let keyref = unsafe { (*node_ptr).key.as_ptr() }; + let keyref = unsafe { (*node).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); - unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V } + unsafe { &mut (*(*node).val.as_mut_ptr()) as &mut V } } } @@ -579,7 +579,7 @@ impl LruCache { { self.map .get(k) - .map(|node| unsafe { &(*node.val.as_ptr()) as &V }) + .map(|node| unsafe { &(*(**node).val.as_ptr()) as &V }) } /// Returns a mutable reference to the value corresponding to the key in the cache or `None` @@ -606,7 +606,7 @@ impl LruCache { { match self.map.get_mut(k) { None => None, - Some(node) => Some(unsafe { &mut (*node.val.as_mut_ptr()) as &mut V }), + Some(node) => Some(unsafe { &mut (*(**node).val.as_mut_ptr()) as &mut V }), } } @@ -691,13 +691,18 @@ impl LruCache { { match self.map.remove(k) { None => None, - Some(mut old_node) => { - unsafe { + Some(old_node) => { + let mut old_node = unsafe { + let mut old_node = *Box::from_raw(old_node); ptr::drop_in_place(old_node.key.as_mut_ptr()); - } - let node_ptr: *mut LruEntry = &mut *old_node; - self.detach(node_ptr); - unsafe { Some(old_node.val.assume_init()) } + + old_node + }; + + self.detach(&mut old_node); + + let LruEntry { key: _, val, .. } = old_node; + unsafe { Some(val.assume_init()) } } } } @@ -728,10 +733,13 @@ impl LruCache { { match self.map.remove(k) { None => None, - Some(mut old_node) => { - let node_ptr: *mut LruEntry = &mut *old_node; - self.detach(node_ptr); - unsafe { Some((old_node.key.assume_init(), old_node.val.assume_init())) } + Some(old_node) => { + let mut old_node = unsafe { *Box::from_raw(old_node) }; + + self.detach(&mut old_node); + + let LruEntry { key, val, .. } = old_node; + unsafe { Some((key.assume_init(), val.assume_init())) } } } } @@ -792,7 +800,7 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach(node_ptr); } @@ -828,7 +836,7 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = &mut **node; + let node_ptr: *mut LruEntry = *node; self.detach(node_ptr); self.attach_last(node_ptr); } @@ -1017,10 +1025,10 @@ impl LruCache { let old_key = KeyRef { k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, }; - let mut old_node = self.map.remove(&old_key).unwrap(); - let node_ptr: *mut LruEntry = &mut *old_node; + let old_node = self.map.remove(&old_key).unwrap(); + let node_ptr: *mut LruEntry = old_node; self.detach(node_ptr); - Some(old_node) + unsafe { Some(Box::from_raw(old_node)) } } else { None } @@ -1056,9 +1064,10 @@ impl LruCache { impl Drop for LruCache { fn drop(&mut self) { - self.map.values_mut().for_each(|e| unsafe { - ptr::drop_in_place(e.key.as_mut_ptr()); - ptr::drop_in_place(e.val.as_mut_ptr()); + self.map.drain().for_each(|(_, node)| unsafe { + let mut node = *Box::from_raw(node); + ptr::drop_in_place((node).key.as_mut_ptr()); + ptr::drop_in_place((node).val.as_mut_ptr()); }); // We rebox the head/tail, and because these are maybe-uninit // they do not have the absent k/v dropped. From 4b37e6dddb3a85f38f0179e2b2c8e2397d5de061 Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Wed, 30 Nov 2022 09:14:38 -0500 Subject: [PATCH 3/7] add bench --- src/lib.rs | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/src/lib.rs b/src/lib.rs index 8f3ff25..ae0954c 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -58,6 +58,7 @@ //! ``` #![no_std] +#![feature(test)] #![cfg_attr(feature = "nightly", feature(negative_impls, auto_traits))] #[cfg(feature = "hashbrown")] @@ -66,6 +67,8 @@ extern crate hashbrown; #[cfg(test)] extern crate scoped_threadpool; +extern crate test; + use alloc::borrow::Borrow; use alloc::boxed::Box; use core::fmt; @@ -2059,6 +2062,34 @@ mod tests { assert_eq!(cache.pop_lru(), Some((0, 0))); assert_eq!(cache.pop_lru(), None); } + + use test::Bencher; + + #[bench] + fn bench_pop_lru(b: &mut Bencher) { + b.iter(|| { + static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); + + struct DropCounter; + + impl Drop for DropCounter { + fn drop(&mut self) { + DROP_COUNT.fetch_add(1, Ordering::SeqCst); + } + } + + let n = test::black_box(100); + + for _ in 0..n { + let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); + + let n = test::black_box(100); + for i in 0..n { + cache.put(i, DropCounter {}); + } + } + }); + } } /// Doctests for what should *not* compile From 8c59bd3af393ff62d68c66160c426cd571f9360e Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Wed, 30 Nov 2022 09:19:47 -0500 Subject: [PATCH 4/7] rename bench function --- src/lib.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib.rs b/src/lib.rs index ae0954c..63436da 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2066,7 +2066,7 @@ mod tests { use test::Bencher; #[bench] - fn bench_pop_lru(b: &mut Bencher) { + fn bench_pointer_impl(b: &mut Bencher) { b.iter(|| { static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); From c559309049d803067399b06ab977bb323c2cd98a Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Wed, 30 Nov 2022 20:18:00 -0500 Subject: [PATCH 5/7] change *mut to Nonnull --- src/lib.rs | 127 +++++++++++++++++++++-------------------------------- 1 file changed, 51 insertions(+), 76 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 63436da..38edcd7 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -58,7 +58,6 @@ //! ``` #![no_std] -#![feature(test)] #![cfg_attr(feature = "nightly", feature(negative_impls, auto_traits))] #[cfg(feature = "hashbrown")] @@ -67,8 +66,6 @@ extern crate hashbrown; #[cfg(test)] extern crate scoped_threadpool; -extern crate test; - use alloc::borrow::Borrow; use alloc::boxed::Box; use core::fmt; @@ -77,7 +74,7 @@ use core::iter::FusedIterator; use core::marker::PhantomData; use core::mem; use core::num::NonZeroUsize; -use core::ptr; +use core::ptr::{self, NonNull}; use core::usize; #[cfg(any(test, not(feature = "hashbrown")))] @@ -192,7 +189,7 @@ pub type DefaultHasher = std::collections::hash_map::RandomState; /// An LRU Cache pub struct LruCache { - map: HashMap, *mut LruEntry, S>, + map: HashMap, NonNull>, S>, cap: NonZeroUsize, // head and tail are sigil nodes to facilitate inserting entries @@ -269,7 +266,7 @@ impl LruCache { /// Creates a new LRU Cache with the given capacity. fn construct( cap: NonZeroUsize, - map: HashMap, *mut LruEntry, S>, + map: HashMap, NonNull>, S>, ) -> LruCache { // NB: The compiler warns that cache does not need to be marked as mutable if we // declare it as such since we only mutate it inside the unsafe block. @@ -345,21 +342,22 @@ impl LruCache { match node_ref { Some(node_ref) => { - let node_ptr: *mut LruEntry = *node_ref; + let node_ref: &mut LruEntry = unsafe { node_ref.as_mut() }; // if the key is already in the cache just update its value and move it to the // front of the list - unsafe { mem::swap(&mut v, &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V) } - self.detach(node_ptr); - self.attach(node_ptr); + unsafe { mem::swap(&mut v, &mut *node_ref.val.as_mut_ptr()) } + self.detach(node_ref); + self.attach(node_ref); Some((k, v)) } None => { let (replaced, node) = self.replace_or_create_node(k, v); + let node_ptr: *mut LruEntry = node.as_ptr(); - self.attach(node); + self.attach(node_ptr); - let keyref = unsafe { (*node).key.as_ptr() }; + let keyref = unsafe { (*node_ptr).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); replaced.filter(|_| capture) @@ -370,29 +368,32 @@ impl LruCache { // Used internally to swap out a node if the cache is full or to create a new node if space // is available. Shared between `put`, `push`, `get_or_insert`, and `get_or_insert_mut`. #[allow(clippy::type_complexity)] - fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, *mut LruEntry) { + fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, NonNull>) { if self.len() == self.cap().get() { // if the cache is full, remove the last entry so we can use it for the new key let old_key = KeyRef { k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, }; let old_node = self.map.remove(&old_key).unwrap(); + let node_ptr: *mut LruEntry = old_node.as_ptr(); // read out the node's old key and value and then replace it let replaced = unsafe { ( - mem::replace(&mut (*old_node).key, mem::MaybeUninit::new(k)).assume_init(), - mem::replace(&mut (*old_node).val, mem::MaybeUninit::new(v)).assume_init(), + mem::replace(&mut (*node_ptr).key, mem::MaybeUninit::new(k)).assume_init(), + mem::replace(&mut (*node_ptr).val, mem::MaybeUninit::new(v)).assume_init(), ) }; - let node_ptr: *mut LruEntry = old_node; self.detach(node_ptr); (Some(replaced), old_node) } else { // if the cache is not full allocate a new LruEntry - (None, Box::into_raw(Box::new(LruEntry::new(k, v)))) + // Safety: We allocate, turn into raw, and get NonNull all in one step. + (None, unsafe { + NonNull::new_unchecked(Box::into_raw(Box::new(LruEntry::new(k, v)))) + }) } } @@ -421,12 +422,12 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = *node; + let node_ptr: *mut LruEntry = node.as_ptr(); self.detach(node_ptr); self.attach(node_ptr); - Some(unsafe { &(*(*node_ptr).val.as_ptr()) as &V }) + Some(unsafe { &*(*node_ptr).val.as_ptr() }) } else { None } @@ -457,12 +458,12 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = *node; + let node_ptr: *mut LruEntry = node.as_ptr(); self.detach(node_ptr); self.attach(node_ptr); - Some(unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V }) + Some(unsafe { &mut *(*node_ptr).val.as_mut_ptr() }) } else { None } @@ -495,21 +496,22 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ptr: *mut LruEntry = *node; + let node_ref: &mut LruEntry = unsafe { node.as_mut() }; - self.detach(node_ptr); - self.attach(node_ptr); + self.detach(node_ref); + self.attach(node_ref); - unsafe { &(*(*node_ptr).val.as_ptr()) as &V } + unsafe { &*node_ref.val.as_ptr() } } else { let v = f(); - let (_, node) = self.replace_or_create_node(k, v); + let (_, mut node) = self.replace_or_create_node(k, v); + let node_ref: &mut LruEntry = unsafe { node.as_mut() }; - self.attach(node); + self.attach(node_ref); - let keyref = unsafe { (*node).key.as_ptr() }; + let keyref = node_ref.key.as_ptr(); self.map.insert(KeyRef { k: keyref }, node); - unsafe { &(*(*node).val.as_ptr()) as &V } + unsafe { &*node_ref.val.as_ptr() } } } @@ -540,21 +542,22 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ptr: *mut LruEntry = *node; + let node_ref: &mut LruEntry = unsafe { node.as_mut() }; - self.detach(node_ptr); - self.attach(node_ptr); + self.detach(node_ref); + self.attach(node_ref); - unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V } + unsafe { &mut *node_ref.val.as_mut_ptr() } } else { let v = f(); - let (_, node) = self.replace_or_create_node(k, v); + let (_, mut node) = self.replace_or_create_node(k, v); + let node_ref: &mut LruEntry = unsafe { node.as_mut() }; - self.attach(node); + self.attach(node_ref); - let keyref = unsafe { (*node).key.as_ptr() }; + let keyref = node_ref.key.as_ptr(); self.map.insert(KeyRef { k: keyref }, node); - unsafe { &mut (*(*node).val.as_mut_ptr()) as &mut V } + unsafe { &mut *node_ref.val.as_mut_ptr() } } } @@ -582,7 +585,7 @@ impl LruCache { { self.map .get(k) - .map(|node| unsafe { &(*(**node).val.as_ptr()) as &V }) + .map(|node| unsafe { &*node.as_ref().val.as_ptr() }) } /// Returns a mutable reference to the value corresponding to the key in the cache or `None` @@ -609,7 +612,7 @@ impl LruCache { { match self.map.get_mut(k) { None => None, - Some(node) => Some(unsafe { &mut (*(**node).val.as_mut_ptr()) as &mut V }), + Some(node) => Some(unsafe { &mut *(*node.as_ptr()).val.as_mut_ptr() }), } } @@ -696,7 +699,7 @@ impl LruCache { None => None, Some(old_node) => { let mut old_node = unsafe { - let mut old_node = *Box::from_raw(old_node); + let mut old_node = *Box::from_raw(old_node.as_ptr()); ptr::drop_in_place(old_node.key.as_mut_ptr()); old_node @@ -737,7 +740,7 @@ impl LruCache { match self.map.remove(k) { None => None, Some(old_node) => { - let mut old_node = unsafe { *Box::from_raw(old_node) }; + let mut old_node = unsafe { *Box::from_raw(old_node.as_ptr()) }; self.detach(&mut old_node); @@ -803,9 +806,9 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = *node; - self.detach(node_ptr); - self.attach(node_ptr); + let node_ref: &mut LruEntry = unsafe { node.as_mut() }; + self.detach(node_ref); + self.attach(node_ref); } } @@ -839,7 +842,7 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ptr: *mut LruEntry = *node; + let node_ptr: *mut LruEntry = node.as_ptr(); self.detach(node_ptr); self.attach_last(node_ptr); } @@ -1029,9 +1032,9 @@ impl LruCache { k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) }, }; let old_node = self.map.remove(&old_key).unwrap(); - let node_ptr: *mut LruEntry = old_node; + let node_ptr: *mut LruEntry = old_node.as_ptr(); self.detach(node_ptr); - unsafe { Some(Box::from_raw(old_node)) } + unsafe { Some(Box::from_raw(node_ptr)) } } else { None } @@ -1068,7 +1071,7 @@ impl LruCache { impl Drop for LruCache { fn drop(&mut self) { self.map.drain().for_each(|(_, node)| unsafe { - let mut node = *Box::from_raw(node); + let mut node = *Box::from_raw(node.as_ptr()); ptr::drop_in_place((node).key.as_mut_ptr()); ptr::drop_in_place((node).val.as_mut_ptr()); }); @@ -2062,34 +2065,6 @@ mod tests { assert_eq!(cache.pop_lru(), Some((0, 0))); assert_eq!(cache.pop_lru(), None); } - - use test::Bencher; - - #[bench] - fn bench_pointer_impl(b: &mut Bencher) { - b.iter(|| { - static DROP_COUNT: AtomicUsize = AtomicUsize::new(0); - - struct DropCounter; - - impl Drop for DropCounter { - fn drop(&mut self) { - DROP_COUNT.fetch_add(1, Ordering::SeqCst); - } - } - - let n = test::black_box(100); - - for _ in 0..n { - let mut cache = LruCache::new(NonZeroUsize::new(1).unwrap()); - - let n = test::black_box(100); - for i in 0..n { - cache.put(i, DropCounter {}); - } - } - }); - } } /// Doctests for what should *not* compile From 133f76b402e8429ccffce50fc508d2690c38c9c1 Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Wed, 30 Nov 2022 23:12:21 -0500 Subject: [PATCH 6/7] Change to some mut borrows to mut pointers --- src/lib.rs | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 38edcd7..0a96da6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -342,13 +342,13 @@ impl LruCache { match node_ref { Some(node_ref) => { - let node_ref: &mut LruEntry = unsafe { node_ref.as_mut() }; + let node_ptr: *mut LruEntry = node_ref.as_ptr(); // if the key is already in the cache just update its value and move it to the // front of the list - unsafe { mem::swap(&mut v, &mut *node_ref.val.as_mut_ptr()) } - self.detach(node_ref); - self.attach(node_ref); + unsafe { mem::swap(&mut v, &mut *(*node_ptr).val.as_mut_ptr()) } + self.detach(node_ptr); + self.attach(node_ptr); Some((k, v)) } None => { @@ -496,22 +496,22 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ref: &mut LruEntry = unsafe { node.as_mut() }; + let node_ptr: *mut LruEntry = node.as_ptr(); - self.detach(node_ref); - self.attach(node_ref); + self.detach(node_ptr); + self.attach(node_ptr); - unsafe { &*node_ref.val.as_ptr() } + unsafe { &*(*node_ptr).val.as_ptr() } } else { let v = f(); - let (_, mut node) = self.replace_or_create_node(k, v); - let node_ref: &mut LruEntry = unsafe { node.as_mut() }; + let (_, node) = self.replace_or_create_node(k, v); + let node_ptr: *mut LruEntry = node.as_ptr(); - self.attach(node_ref); + self.attach(node_ptr); - let keyref = node_ref.key.as_ptr(); + let keyref = unsafe { (*node_ptr).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); - unsafe { &*node_ref.val.as_ptr() } + unsafe { &*(*node_ptr).val.as_ptr() } } } @@ -542,22 +542,22 @@ impl LruCache { F: FnOnce() -> V, { if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) { - let node_ref: &mut LruEntry = unsafe { node.as_mut() }; + let node_ptr: *mut LruEntry = node.as_ptr(); - self.detach(node_ref); - self.attach(node_ref); + self.detach(node_ptr); + self.attach(node_ptr); - unsafe { &mut *node_ref.val.as_mut_ptr() } + unsafe { &mut *(*node_ptr).val.as_mut_ptr() } } else { let v = f(); - let (_, mut node) = self.replace_or_create_node(k, v); - let node_ref: &mut LruEntry = unsafe { node.as_mut() }; + let (_, node) = self.replace_or_create_node(k, v); + let node_ptr: *mut LruEntry = node.as_ptr(); - self.attach(node_ref); + self.attach(node_ptr); - let keyref = node_ref.key.as_ptr(); + let keyref = unsafe { (*node_ptr).key.as_ptr() }; self.map.insert(KeyRef { k: keyref }, node); - unsafe { &mut *node_ref.val.as_mut_ptr() } + unsafe { &mut *(*node_ptr).val.as_mut_ptr() } } } @@ -806,9 +806,9 @@ impl LruCache { Q: Hash + Eq + ?Sized, { if let Some(node) = self.map.get_mut(k) { - let node_ref: &mut LruEntry = unsafe { node.as_mut() }; - self.detach(node_ref); - self.attach(node_ref); + let node_ptr: *mut LruEntry = node.as_ptr(); + self.detach(node_ptr); + self.attach(node_ptr); } } From b1d858a61f332424a666959a7a5e604cc62c91d1 Mon Sep 17 00:00:00 2001 From: Emil Hofstetter Date: Mon, 12 Dec 2022 09:37:52 -0500 Subject: [PATCH 7/7] fix clippy lint by dereferencing ptr explicitly before swapping mem --- src/lib.rs | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index 0a96da6..07cdca6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -342,11 +342,15 @@ impl LruCache { match node_ref { Some(node_ref) => { - let node_ptr: *mut LruEntry = node_ref.as_ptr(); - // if the key is already in the cache just update its value and move it to the // front of the list - unsafe { mem::swap(&mut v, &mut *(*node_ptr).val.as_mut_ptr()) } + let node_ptr: *mut LruEntry = node_ref.as_ptr(); + + // gets a reference to the node to perform a swap and drops it right after + let node_ref = unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) }; + mem::swap(&mut v, node_ref); + let _ = node_ref; + self.detach(node_ptr); self.attach(node_ptr); Some((k, v))