diff --git a/CHANGELOG.md b/CHANGELOG.md index 491beb9..d369b24 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,9 @@ # Changelog +## Unreleased + +- Add `LruCache::promote` and `LruCache::demote` API to manipulate LRU order of the entry directly. + ## [v0.7.8](https://github.com/jeromefroe/lru-rs/tree/0.7.8) - 2022-07-19 - Update dependency on hashbrown to 0.12. diff --git a/src/lib.rs b/src/lib.rs index 61924f5..2d39b74 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -765,6 +765,76 @@ impl LruCache { unsafe { Some((key.assume_init(), val.assume_init())) } } + /// Marks the key as the most recently used one. + /// + /// # Example + /// + /// ``` + /// use lru::LruCache; + /// use std::num::NonZeroUsize; + /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); + /// + /// cache.put(1, "a"); + /// cache.put(2, "b"); + /// cache.put(3, "c"); + /// cache.get(&1); + /// cache.get(&2); + /// + /// // If we do `pop_lru` now, we would pop 3. + /// // assert_eq!(cache.pop_lru(), Some((3, "c"))); + /// + /// // By promoting 3, we make sure it isn't popped. + /// cache.promote(&3); + /// assert_eq!(cache.pop_lru(), Some((1, "a"))); + /// ``` + pub fn promote<'a, Q>(&'a mut self, k: &Q) + where + KeyRef: Borrow, + Q: Hash + Eq + ?Sized, + { + if let Some(node) = self.map.get_mut(k) { + let node_ptr: *mut LruEntry = &mut **node; + self.detach(node_ptr); + self.attach(node_ptr); + } + } + + /// Marks the key as the least recently used one. + /// + /// # Example + /// + /// ``` + /// use lru::LruCache; + /// use std::num::NonZeroUsize; + /// let mut cache = LruCache::new(NonZeroUsize::new(3).unwrap()); + /// + /// cache.put(1, "a"); + /// cache.put(2, "b"); + /// cache.put(3, "c"); + /// cache.get(&1); + /// cache.get(&2); + /// + /// // If we do `pop_lru` now, we would pop 3. + /// // assert_eq!(cache.pop_lru(), Some((3, "c"))); + /// + /// // By demoting 1 and 2, we make sure those are popped first. + /// cache.demote(&2); + /// cache.demote(&1); + /// assert_eq!(cache.pop_lru(), Some((1, "a"))); + /// assert_eq!(cache.pop_lru(), Some((2, "b"))); + /// ``` + pub fn demote<'a, Q>(&'a mut self, k: &Q) + where + KeyRef: Borrow, + Q: Hash + Eq + ?Sized, + { + if let Some(node) = self.map.get_mut(k) { + let node_ptr: *mut LruEntry = &mut **node; + self.detach(node_ptr); + self.attach_last(node_ptr); + } + } + /// Returns the number of key-value pairs that are currently in the the cache. /// /// # Example @@ -964,6 +1034,7 @@ impl LruCache { } } + // Attaches `node` after the sigil `self.head` node. fn attach(&mut self, node: *mut LruEntry) { unsafe { (*node).next = (*self.head).next; @@ -972,6 +1043,16 @@ impl LruCache { (*(*node).next).prev = node; } } + + // Attaches `node` before the sigil `self.tail` node. + fn attach_last(&mut self, node: *mut LruEntry) { + unsafe { + (*node).next = self.tail; + (*node).prev = (*self.tail).prev; + (*self.tail).prev = node; + (*(*node).prev).next = node; + } + } } impl Drop for LruCache { @@ -1952,6 +2033,24 @@ mod tests { assert_eq!(DROP_COUNT.load(Ordering::SeqCst), n * n * 2); } + + #[test] + fn test_promote_and_demote() { + let mut cache = LruCache::new(NonZeroUsize::new(5).unwrap()); + for i in 0..5 { + cache.push(i, i); + } + cache.promote(&1); + cache.promote(&0); + cache.demote(&3); + cache.demote(&4); + assert_eq!(cache.pop_lru(), Some((4, 4))); + assert_eq!(cache.pop_lru(), Some((3, 3))); + assert_eq!(cache.pop_lru(), Some((2, 2))); + assert_eq!(cache.pop_lru(), Some((1, 1))); + assert_eq!(cache.pop_lru(), Some((0, 0))); + assert_eq!(cache.pop_lru(), None); + } } /// Doctests for what should *not* compile