I’m implementing a templated LRU cache in C++17 and C++20.
High-level design:
Recency: std::list<Key> (most-recent at the front).
Lookup: std::unordered_map<Key, std::pair<Value, std::list<Key>::const_iterator>>.
Behavior: promote on access/update; evict from tail on insert when full. Target: O(1) average for all ops.
Public API (simplified):
insertKeyValuePair(Key key, Value value) — insert or update, promote to most-recent
getValueFromKey(const Key& key) — return the value and promote on hit
getMostRecentKey() const — peek the most-recent key (no reordering)
Could you please review this code for simplicity, efficiency, and the best way to signal “empty / key not found” without unnecessary copies?
I’d also appreciate guidance on alternative return mechanisms (e.g., exceptions vs. std::optional vs. pointer)
https://godbolt.org/z/aE7EE3j5W
#include <cassert>
#include <list>
#include <cstddef>
#include <stdexcept>
#include <iostream>
#include <optional>
#include <unordered_map>
#include <print>
template <typename Key, typename Value, typename Hash = std::hash<Key>>
class LRUCache {
public:
using size_type = std::size_t;
using key_type = Key;
using value_type = Value;
explicit LRUCache(size_type capacity)
: m_capacity(capacity > 0 ? capacity : throw std::invalid_argument("capacity must be > 0"))
{
assert(m_capacity > 0 && "capacity must be > 0");
m_lookup.reserve(m_capacity);
}
/************************getMostRecentKey***************************************/
// Q: are universal references better choice?
void insertKeyValuePair(key_type key, value_type value) {
if (auto it = m_lookup.find(key); it != m_lookup.end()) {
m_cache.splice(m_cache.begin(), m_cache, it->second.second);
it->second.first = std::move(value);
return;
}
else if (!m_lookup.empty() && m_lookup.size() == m_capacity) {
m_lookup.erase(m_cache.back());
m_cache.pop_back();
}
m_cache.push_front(key);
// Q emplace or insert?
m_lookup.emplace(std::move(key), std::make_pair(std::move(value), m_cache.begin()));
}
/************************getMostRecentKey****************************************/
[[nodiscard("use the key or handle the exception")]] const key_type&
getMostRecentKey() const {
if (m_cache.empty()) {
throw std::runtime_error("Cache is empty.");
}
return m_cache.front();
}
/************************getValueFromKey****************************************/
[[nodiscard]] decltype(auto)
getValueFromKey(const key_type& key) {
if (m_lookup.empty()) {
throw std::runtime_error("Cache is empty.");
}
if (auto it = m_lookup.find(key); it != m_lookup.end()) {
m_cache.splice(m_cache.begin(), m_cache, it->second.second);
return it->second.first;
}
throw std::runtime_error("Key not found.");
}
/************************size()****************************************/
size_type size() const noexcept {
return m_lookup.size();
}
size_type capacity() const noexcept {
return m_capacity;
}
#if 0 // Alternatives
[[nodiscard]] std::optional<key_type>
getMostRecentKey_() const noexcept {
if (m_keysByRecency.empty()) {
return std::nullopt;
}
return m_keysByRecency.front();
}
[[nodiscard]] const key_type*
getMostRecentKey__() const noexcept {
return m_keysByRecency.empty() ? nullptr : &m_keysByRecency.front();
}
#endif
// Data--------------------------------------------------------------
private:
size_type m_capacity;
using Cache = std::list<key_type>;
using NodeIt = Cache::const_iterator;
using Node = std::pair<value_type, NodeIt>;
using Lookup = std::unordered_map<key_type, Node,Hash>;
Lookup m_lookup;
Cache m_cache;
};
<print>is a c++23 library, annotated[[nodiscard(string_literal)]]is a C++20 feature, and contextual implicittypenameis a C++20 feature. \$\endgroup\$