Get Going Mini LRU Cache
The core requirement of this data structure:
The cache serves as a key-value store and has some capacity
We can put items to the cache until it reaches the capacity
When capacity is reached, it will evict the "least used" item based on internal order
We can efficiently get the item based on certain keys
To achieve this, we need two data structures:
Doubly Linked List
Allow efficient movement of items. The new item is always pushed at the front. When reaching the capacity, it is always evicted from the end of list.
Hash Map or Dictionary
Allow efficient retrieval based on keys. When a key is hit, also need to move associated item to the front in linked list as most frequent one.
The implementation is provided with minimal API for put
and get
methods
type LRUCache struct {
capacity int
list *list.List
dict map[int]string
}
type LRUEntry struct {
K int
V string
}
func New(cap int) *LRUCache {
return &LRUCache{
capacity: cap,
list: list.New()
dict: make(map[int]*list.Element)
}
}
func(t *LRUCache) Put(k int, v string) {
if node, ok := t.dict[k]; ok {
node.Value.(*LRUEntry).V = v // update with new value
// could also mark it as used by moving to front
t.list.MoveToFront(node)
return
}
// if it's a new key, first check capacity before pushing
if len(dict) == t.capacity {
node := t.list.Back() // remove least used from the end
t.list.Remove(node)
delete(t.dict, node.Value.(*LRUEntry).K) // built in
}
node := t.list.PushFront(&LRUEntry{k, v})
dict[k] = node
}
func(t *LRUCache) Get(k int) (v string) {
node, ok := t.dict[k]
if !ok {
return ""
}
t.list.MoveToFront(node) // move used one to the front
return node.Value.(*LRUEntry).V
}