LRU Cache — Design with O(1) Get and Put

Sanjeev SharmaSanjeev Sharma
4 min read

Advertisement

Problem

Design a data structure that follows the Least Recently Used (LRU) cache policy.

Implement LRUCache(capacity), get(key), and put(key, value):

  • get(key) — return value if key exists (and mark as recently used), else -1
  • put(key, value) — insert/update key. If over capacity, evict the LRU item.

Both operations must run in O(1) average time.

Example:

LRUCache(2)
put(1,1), put(2,2)
get(1)1   (order: 2,1)
put(3,3)     → evicts key 2
get(2)-1  (evicted)

Key Insight — DLL + HashMap

  • HashMap → O(1) lookup of any node by key
  • DLL → O(1) move-to-front and remove-last

The most recently used node lives at the head of the DLL. The LRU node sits at the tail.

head ↔ [MRU]...[LRU] ↔ tail

Solutions

Python

class Node:
    def __init__(self, key=0, val=0):
        self.key = key
        self.val = val
        self.prev = self.next = None

class LRUCache:
    def __init__(self, capacity: int):
        self.cap = capacity
        self.cache = {}
        self.head = Node()
        self.tail = Node()
        self.head.next = self.tail
        self.tail.prev = self.head

    def _remove(self, node):
        node.prev.next = node.next
        node.next.prev = node.prev

    def _add_front(self, node):
        node.next = self.head.next
        node.prev = self.head
        self.head.next.prev = node
        self.head.next = node

    def get(self, key: int) -> int:
        if key not in self.cache:
            return -1
        node = self.cache[key]
        self._remove(node)
        self._add_front(node)
        return node.val

    def put(self, key: int, value: int) -> None:
        if key in self.cache:
            self._remove(self.cache[key])
        node = Node(key, value)
        self._add_front(node)
        self.cache[key] = node
        if len(self.cache) > self.cap:
            lru = self.tail.prev
            self._remove(lru)
            del self.cache[lru.key]

JavaScript

class Node {
    constructor(key = 0, val = 0) {
        this.key = key; this.val = val;
        this.prev = this.next = null;
    }
}
class LRUCache {
    constructor(capacity) {
        this.cap = capacity;
        this.cache = new Map();
        this.head = new Node(); this.tail = new Node();
        this.head.next = this.tail; this.tail.prev = this.head;
    }
    _remove(node) {
        node.prev.next = node.next;
        node.next.prev = node.prev;
    }
    _addFront(node) {
        node.next = this.head.next; node.prev = this.head;
        this.head.next.prev = node; this.head.next = node;
    }
    get(key) {
        if (!this.cache.has(key)) return -1;
        const node = this.cache.get(key);
        this._remove(node); this._addFront(node);
        return node.val;
    }
    put(key, value) {
        if (this.cache.has(key)) this._remove(this.cache.get(key));
        const node = new Node(key, value);
        this._addFront(node); this.cache.set(key, node);
        if (this.cache.size > this.cap) {
            const lru = this.tail.prev;
            this._remove(lru); this.cache.delete(lru.key);
        }
    }
}

Java

import java.util.*;
class LRUCache {
    class Node {
        int key, val;
        Node prev, next;
        Node(int k, int v) { key = k; val = v; }
    }
    int cap;
    Map<Integer, Node> cache = new HashMap<>();
    Node head = new Node(0, 0), tail = new Node(0, 0);
    public LRUCache(int capacity) {
        cap = capacity;
        head.next = tail; tail.prev = head;
    }
    void remove(Node n) { n.prev.next = n.next; n.next.prev = n.prev; }
    void addFront(Node n) {
        n.next = head.next; n.prev = head;
        head.next.prev = n; head.next = n;
    }
    public int get(int key) {
        if (!cache.containsKey(key)) return -1;
        Node n = cache.get(key); remove(n); addFront(n); return n.val;
    }
    public void put(int key, int value) {
        if (cache.containsKey(key)) remove(cache.get(key));
        Node n = new Node(key, value); addFront(n); cache.put(key, n);
        if (cache.size() > cap) {
            Node lru = tail.prev; remove(lru); cache.remove(lru.key);
        }
    }
}

C++

#include <unordered_map>
#include <list>
using namespace std;
class LRUCache {
    int cap;
    list<pair<int,int>> dll;
    unordered_map<int, list<pair<int,int>>::iterator> cache;
public:
    LRUCache(int capacity) : cap(capacity) {}
    int get(int key) {
        if (!cache.count(key)) return -1;
        dll.splice(dll.begin(), dll, cache[key]);
        return cache[key]->second;
    }
    void put(int key, int value) {
        if (cache.count(key)) dll.erase(cache[key]);
        dll.push_front({key, value});
        cache[key] = dll.begin();
        if ((int)cache.size() > cap) {
            cache.erase(dll.back().first);
            dll.pop_back();
        }
    }
};

C

#include <stdlib.h>
#include <string.h>
typedef struct Node {
    int key, val;
    struct Node *prev, *next;
} Node;
typedef struct {
    int cap, size;
    Node *head, *tail;
    Node **table;
    int tsize;
} LRUCache;
/* Hash table backed DLL — core idea same as Python above */
/* Full C implementation uses open-addressing hash + sentinel DLL */

Complexity

OperationTimeSpace
getO(1)O(n)
putO(1)O(n)

Python Shortcut — OrderedDict

from collections import OrderedDict
class LRUCache:
    def __init__(self, capacity):
        self.cap = capacity
        self.cache = OrderedDict()
    def get(self, key):
        if key not in self.cache: return -1
        self.cache.move_to_end(key)
        return self.cache[key]
    def put(self, key, value):
        if key in self.cache: self.cache.move_to_end(key)
        self.cache[key] = value
        if len(self.cache) > self.cap:
            self.cache.popitem(last=False)

Use this in interviews when asked for a quick solution — but know the DLL version for follow-ups.

Advertisement

Sanjeev Sharma

Written by

Sanjeev Sharma

Full Stack Engineer · E-mopro