Here is a modern implementation of a LRUCache<TKey, TValue>
collection, for .NET 6 and later. The main feature is the method GetOrAdd
. This method either returns an existing value, or invokes the valueFactory
and returns a new value. Each time a new value is added, the boundedCapacity
policy is enforced by evicting the least recently used value from the collection. The valueFactory
is invoked lazily, so that multiple concurrent GetOrAdd
calls for the same key receive the same value.
public class LRUCache<TKey, TValue> : IEnumerable<KeyValuePair<TKey, TValue>>
{
private readonly Dictionary<TKey, LinkedListNode<Entry>> _dictionary;
private readonly LinkedList<Entry> _linkedList;
private readonly int _boundedCapacity;
private readonly record struct Entry(TKey Key, Lazy<TValue> Lazy);
public LRUCache(int boundedCapacity, IEqualityComparer<TKey> comparer = default)
{
if (boundedCapacity < 0)
throw new ArgumentOutOfRangeException(nameof(boundedCapacity));
_dictionary = new(boundedCapacity + 1, comparer);
_linkedList = new();
_boundedCapacity = boundedCapacity;
}
private object SyncRoot => _dictionary;
public int Count { get { lock (SyncRoot) return _dictionary.Count; } }
public TValue GetOrAdd(TKey key, Func<TKey, TValue> valueFactory)
{
ArgumentNullException.ThrowIfNull(valueFactory);
Lazy<TValue> lazy;
lock (SyncRoot)
{
ref LinkedListNode<Entry> refNode = ref CollectionsMarshal
.GetValueRefOrAddDefault(_dictionary, key, out bool exists);
if (exists)
{
lazy = refNode.Value.Lazy;
if (!ReferenceEquals(refNode, _linkedList.Last))
{
_linkedList.Remove(refNode);
_linkedList.AddLast(refNode);
}
}
else
{
lazy = new(() => valueFactory(key));
refNode = new(new Entry(key, lazy));
_linkedList.AddLast(refNode);
if (_dictionary.Count > _boundedCapacity)
{
bool removed = _dictionary.Remove(_linkedList.First.Value.Key);
Debug.Assert(removed);
_linkedList.RemoveFirst();
}
}
Debug.Assert(_dictionary.Count == _linkedList.Count);
}
return lazy.Value;
}
public void Clear()
{
lock (SyncRoot)
{
_dictionary.Clear();
_linkedList.Clear();
}
}
public IEnumerator<KeyValuePair<TKey, TValue>> GetEnumerator()
{
lock (SyncRoot)
{
return _linkedList
.ToArray()
.Select((Entry e) => KeyValuePair.Create(e.Key, e.Lazy.Value))
.AsEnumerable()
.GetEnumerator();
}
}
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
}
Usage example:
LRUCache<string, object> cache = new(30);
object value = cache.GetOrAdd("SomeKey", key => GetObject(key));
The advanced API CollectionsMarshal.GetValueRefOrAddDefault
is used so that the key
is hashed only once per GetOrAdd
call.
In case the valueFactory
fails, the behavior of the Lazy<T>
class is to cache permanently the exception. This behavior might not be suitable for a caching system, so you may want to substitute the Lazy<T>
with the simple LazyWithRetry<T>
implementation that I have posted here.
In case you would like to use an asynchronous valueFactory
, there are AsyncLazy<T>
implementations in this question.
The LRUCache<TKey, TValue>
class is thread-safe.