Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Change LruCache.map to hold a pointer, rather than owned LruEntry. #161

Merged
merged 10 commits into from Dec 19, 2022
118 changes: 68 additions & 50 deletions src/lib.rs
Expand Up @@ -74,7 +74,7 @@ use core::iter::FusedIterator;
use core::marker::PhantomData;
use core::mem;
use core::num::NonZeroUsize;
use core::ptr;
use core::ptr::{self, NonNull};
use core::usize;

#[cfg(any(test, not(feature = "hashbrown")))]
Expand Down Expand Up @@ -189,7 +189,7 @@ pub type DefaultHasher = std::collections::hash_map::RandomState;

/// An LRU Cache
pub struct LruCache<K, V, S = DefaultHasher> {
map: HashMap<KeyRef<K>, Box<LruEntry<K, V>>, S>,
map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
cap: NonZeroUsize,

// head and tail are sigil nodes to facilitate inserting entries
Expand Down Expand Up @@ -266,7 +266,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
/// Creates a new LRU Cache with the given capacity.
fn construct(
cap: NonZeroUsize,
map: HashMap<KeyRef<K>, Box<LruEntry<K, V>>, S>,
map: HashMap<KeyRef<K>, NonNull<LruEntry<K, V>>, S>,
) -> LruCache<K, V, S> {
// NB: The compiler warns that cache does not need to be marked as mutable if we
// declare it as such since we only mutate it inside the unsafe block.
Expand Down Expand Up @@ -342,19 +342,23 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {

match node_ref {
Some(node_ref) => {
let node_ptr: *mut LruEntry<K, V> = &mut **node_ref;

// if the key is already in the cache just update its value and move it to the
// front of the list
unsafe { mem::swap(&mut v, &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V) }
let node_ptr: *mut LruEntry<K, V> = node_ref.as_ptr();

// gets a reference to the node to perform a swap and drops it right after
let node_ref = unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) };
mem::swap(&mut v, node_ref);
let _ = node_ref;

self.detach(node_ptr);
self.attach(node_ptr);
Some((k, v))
}
None => {
let (replaced, mut node) = self.replace_or_create_node(k, v);
let (replaced, node) = self.replace_or_create_node(k, v);
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

let node_ptr: *mut LruEntry<K, V> = &mut *node;
self.attach(node_ptr);

let keyref = unsafe { (*node_ptr).key.as_ptr() };
Expand All @@ -368,27 +372,32 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
// Used internally to swap out a node if the cache is full or to create a new node if space
// is available. Shared between `put`, `push`, `get_or_insert`, and `get_or_insert_mut`.
#[allow(clippy::type_complexity)]
fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, Box<LruEntry<K, V>>) {
fn replace_or_create_node(&mut self, k: K, v: V) -> (Option<(K, V)>, NonNull<LruEntry<K, V>>) {
if self.len() == self.cap().get() {
// if the cache is full, remove the last entry so we can use it for the new key
let old_key = KeyRef {
k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
};
let mut old_node = self.map.remove(&old_key).unwrap();
let old_node = self.map.remove(&old_key).unwrap();
let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();

// read out the node's old key and value and then replace it
let replaced = unsafe { (old_node.key.assume_init(), old_node.val.assume_init()) };

old_node.key = mem::MaybeUninit::new(k);
old_node.val = mem::MaybeUninit::new(v);
let replaced = unsafe {
(
mem::replace(&mut (*node_ptr).key, mem::MaybeUninit::new(k)).assume_init(),
mem::replace(&mut (*node_ptr).val, mem::MaybeUninit::new(v)).assume_init(),
)
};

let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
self.detach(node_ptr);

(Some(replaced), old_node)
} else {
// if the cache is not full allocate a new LruEntry
(None, Box::new(LruEntry::new(k, v)))
// Safety: We allocate, turn into raw, and get NonNull all in one step.
(None, unsafe {
NonNull::new_unchecked(Box::into_raw(Box::new(LruEntry::new(k, v))))
})
}
}

Expand Down Expand Up @@ -417,12 +426,12 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
Q: Hash + Eq + ?Sized,
{
if let Some(node) = self.map.get_mut(k) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

self.detach(node_ptr);
self.attach(node_ptr);

Some(unsafe { &(*(*node_ptr).val.as_ptr()) as &V })
Some(unsafe { &*(*node_ptr).val.as_ptr() })
} else {
None
}
Expand Down Expand Up @@ -453,12 +462,12 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
Q: Hash + Eq + ?Sized,
{
if let Some(node) = self.map.get_mut(k) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

self.detach(node_ptr);
self.attach(node_ptr);

Some(unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V })
Some(unsafe { &mut *(*node_ptr).val.as_mut_ptr() })
} else {
None
}
Expand Down Expand Up @@ -491,22 +500,22 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
F: FnOnce() -> V,
{
if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

self.detach(node_ptr);
self.attach(node_ptr);

unsafe { &(*(*node_ptr).val.as_ptr()) as &V }
unsafe { &*(*node_ptr).val.as_ptr() }
} else {
let v = f();
let (_, mut node) = self.replace_or_create_node(k, v);
let (_, node) = self.replace_or_create_node(k, v);
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

let node_ptr: *mut LruEntry<K, V> = &mut *node;
self.attach(node_ptr);

let keyref = unsafe { (*node_ptr).key.as_ptr() };
self.map.insert(KeyRef { k: keyref }, node);
unsafe { &(*(*node_ptr).val.as_ptr()) as &V }
unsafe { &*(*node_ptr).val.as_ptr() }
}
}

Expand Down Expand Up @@ -537,22 +546,22 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
F: FnOnce() -> V,
{
if let Some(node) = self.map.get_mut(&KeyRef { k: &k }) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

self.detach(node_ptr);
self.attach(node_ptr);

unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V }
unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
} else {
let v = f();
let (_, mut node) = self.replace_or_create_node(k, v);
let (_, node) = self.replace_or_create_node(k, v);
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();

let node_ptr: *mut LruEntry<K, V> = &mut *node;
self.attach(node_ptr);

let keyref = unsafe { (*node_ptr).key.as_ptr() };
self.map.insert(KeyRef { k: keyref }, node);
unsafe { &mut (*(*node_ptr).val.as_mut_ptr()) as &mut V }
unsafe { &mut *(*node_ptr).val.as_mut_ptr() }
}
}

Expand Down Expand Up @@ -580,7 +589,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
{
self.map
.get(k)
.map(|node| unsafe { &(*node.val.as_ptr()) as &V })
.map(|node| unsafe { &*node.as_ref().val.as_ptr() })
}

/// Returns a mutable reference to the value corresponding to the key in the cache or `None`
Expand All @@ -607,7 +616,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
{
match self.map.get_mut(k) {
None => None,
Some(node) => Some(unsafe { &mut (*node.val.as_mut_ptr()) as &mut V }),
Some(node) => Some(unsafe { &mut *(*node.as_ptr()).val.as_mut_ptr() }),
}
}

Expand Down Expand Up @@ -692,13 +701,18 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
{
match self.map.remove(k) {
None => None,
Some(mut old_node) => {
unsafe {
Some(old_node) => {
let mut old_node = unsafe {
let mut old_node = *Box::from_raw(old_node.as_ptr());
ptr::drop_in_place(old_node.key.as_mut_ptr());
}
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
self.detach(node_ptr);
unsafe { Some(old_node.val.assume_init()) }

old_node
};

self.detach(&mut old_node);

let LruEntry { key: _, val, .. } = old_node;
unsafe { Some(val.assume_init()) }
}
}
}
Expand Down Expand Up @@ -729,10 +743,13 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
{
match self.map.remove(k) {
None => None,
Some(mut old_node) => {
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
self.detach(node_ptr);
unsafe { Some((old_node.key.assume_init(), old_node.val.assume_init())) }
Some(old_node) => {
let mut old_node = unsafe { *Box::from_raw(old_node.as_ptr()) };

self.detach(&mut old_node);

let LruEntry { key, val, .. } = old_node;
unsafe { Some((key.assume_init(), val.assume_init())) }
}
}
}
Expand Down Expand Up @@ -793,7 +810,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
Q: Hash + Eq + ?Sized,
{
if let Some(node) = self.map.get_mut(k) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
self.detach(node_ptr);
self.attach(node_ptr);
}
Expand Down Expand Up @@ -829,7 +846,7 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
Q: Hash + Eq + ?Sized,
{
if let Some(node) = self.map.get_mut(k) {
let node_ptr: *mut LruEntry<K, V> = &mut **node;
let node_ptr: *mut LruEntry<K, V> = node.as_ptr();
self.detach(node_ptr);
self.attach_last(node_ptr);
}
Expand Down Expand Up @@ -1018,10 +1035,10 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {
let old_key = KeyRef {
k: unsafe { &(*(*(*self.tail).prev).key.as_ptr()) },
};
let mut old_node = self.map.remove(&old_key).unwrap();
let node_ptr: *mut LruEntry<K, V> = &mut *old_node;
let old_node = self.map.remove(&old_key).unwrap();
let node_ptr: *mut LruEntry<K, V> = old_node.as_ptr();
self.detach(node_ptr);
Some(old_node)
unsafe { Some(Box::from_raw(node_ptr)) }
} else {
None
}
Expand Down Expand Up @@ -1057,9 +1074,10 @@ impl<K: Hash + Eq, V, S: BuildHasher> LruCache<K, V, S> {

impl<K, V, S> Drop for LruCache<K, V, S> {
fn drop(&mut self) {
self.map.values_mut().for_each(|e| unsafe {
ptr::drop_in_place(e.key.as_mut_ptr());
ptr::drop_in_place(e.val.as_mut_ptr());
self.map.drain().for_each(|(_, node)| unsafe {
let mut node = *Box::from_raw(node.as_ptr());
ptr::drop_in_place((node).key.as_mut_ptr());
ptr::drop_in_place((node).val.as_mut_ptr());
});
// We rebox the head/tail, and because these are maybe-uninit
// they do not have the absent k/v dropped.
Expand Down