In-Memory Cache with LRU Eviction
July 12, 2025In-Memory Cache with LRU Eviction
Our cache implements an LRU (Least Recently Used) eviction policy with TTL support.
Implementation
// shreder/cache.gotype CacheItem struct { Value string TimeToLive time.Time}
type Cache struct { mu sync.RWMutex Items map[string]*list.Element eviction *list.List capacity int}
func (c *Cache) Get(key string) (string, bool) { c.mu.RLock() defer c.mu.RUnlock() item, found := c.Items[key] if !found || time.Now().Local().After(item.Value.(entry).value.TimeToLive) { if found { c.eviction.Remove(item) delete(c.Items, key) } return "", false }
c.eviction.MoveToFront(item) return item.Value.(entry).value.Value, true}
func (c *Cache) Set(key string, value string, ttl time.Duration) { c.mu.Lock() defer c.mu.Unlock() if item, found := c.Items[key]; found { c.eviction.Remove(item) delete(c.Items, key) }
if c.eviction.Len() >= c.capacity { c.evictLRU() }
item := CacheItem{ Value: value, TimeToLive: time.Now().Local().Add(ttl), }
elem := c.eviction.PushFront(entry{key, item}) c.Items[key] = elem}
Key Features
- Thread-safe: Uses RWMutex for concurrent access
- LRU eviction: Automatically removes least recently used items when capacity is reached
- TTL support: Items expire after a specified time duration
- Background cleanup: Expired items are cleaned up automatically