WIP: DO-NOT-MERGE: NCE experiments: Better LRU cache implementation

It uses a better lru implementation, since initial as basic functions to LRU, use a modern system to it to make performance better
This commit is contained in:
MrPurple666 2025-04-12 17:13:24 -03:00
parent ce1a1912ae
commit 739e5c47ea

View file

@ -2,12 +2,16 @@
#include <list> #include <list>
#include <unordered_map> #include <unordered_map>
#include <optional>
template<typename KeyType, typename ValueType> template<typename KeyType, typename ValueType>
class LRUCache { class LRUCache {
public: public:
explicit LRUCache(size_t capacity) : capacity(capacity) {} explicit LRUCache(size_t capacity) : capacity(capacity) {
cache_map.reserve(capacity);
}
// Returns pointer to value if found, nullptr otherwise
ValueType* get(const KeyType& key) { ValueType* get(const KeyType& key) {
auto it = cache_map.find(key); auto it = cache_map.find(key);
if (it == cache_map.end()) { if (it == cache_map.end()) {
@ -19,6 +23,13 @@ public:
return &(it->second.second); return &(it->second.second);
} }
// Returns pointer to value if found (without promoting it), nullptr otherwise
ValueType* peek(const KeyType& key) const {
auto it = cache_map.find(key);
return it != cache_map.end() ? &(it->second.second) : nullptr;
}
// Inserts or updates a key-value pair
void put(const KeyType& key, const ValueType& value) { void put(const KeyType& key, const ValueType& value) {
auto it = cache_map.find(key); auto it = cache_map.find(key);
@ -41,15 +52,56 @@ public:
cache_map[key] = {cache_list.begin(), value}; cache_map[key] = {cache_list.begin(), value};
} }
// Attempts to get value, returns std::nullopt if not found
std::optional<ValueType> try_get(const KeyType& key) {
auto* val = get(key);
return val ? std::optional<ValueType>(*val) : std::nullopt;
}
// Checks if key exists in cache
bool contains(const KeyType& key) const {
return cache_map.find(key) != cache_map.end();
}
// Removes a key from the cache if it exists
bool erase(const KeyType& key) {
auto it = cache_map.find(key);
if (it == cache_map.end()) {
return false;
}
cache_list.erase(it->second.first);
cache_map.erase(it);
return true;
}
// Removes all elements from the cache
void clear() { void clear() {
cache_map.clear(); cache_map.clear();
cache_list.clear(); cache_list.clear();
} }
// Returns current number of elements in cache
size_t size() const { size_t size() const {
return cache_map.size(); return cache_map.size();
} }
// Returns maximum capacity of cache
size_t get_capacity() const {
return capacity;
}
// Resizes the cache, evicting LRU items if new capacity is smaller
void resize(size_t new_capacity) {
capacity = new_capacity;
while (cache_map.size() > capacity) {
auto last = cache_list.back();
cache_map.erase(last);
cache_list.pop_back();
}
cache_map.reserve(capacity);
}
private: private:
size_t capacity; size_t capacity;
std::list<KeyType> cache_list; std::list<KeyType> cache_list;