Skip to content

Lru Cache

Problem

Design a data structure that follows the Least Recently Used (LRU) eviction policy, supporting get and put in O(1) time.

Approach

OrderedDict version: move accessed keys to end; pop from front on evict. Manual version: doubly linked list + hash map for O(1) removal/insertion.

When to Use

O(1) cache with eviction — "design LRU/LFU cache", bounded-memory caching with recency tracking. Pattern: hash map + doubly linked list. Aviation: caching decoded METAR/TAF data, recent flight plan lookups.

Complexity

Time O(1) for both get and put
Space O(capacity)

Implementation

lru_cache

LRU Cache implementation.

Problem

Design a data structure that follows the Least Recently Used (LRU) eviction policy, supporting get and put in O(1) time.

Approach

OrderedDict version: move accessed keys to end; pop from front on evict. Manual version: doubly linked list + hash map for O(1) removal/insertion.

When to use

O(1) cache with eviction — "design LRU/LFU cache", bounded-memory caching with recency tracking. Pattern: hash map + doubly linked list. Aviation: caching decoded METAR/TAF data, recent flight plan lookups.

Complexity

Time: O(1) for both get and put Space: O(capacity)

LRUCache

LRU Cache using OrderedDict.

Source code in src/algo/linked_lists/lru_cache.py
class LRUCache:
    """LRU Cache using OrderedDict."""

    def __init__(self, capacity: int) -> None:
        if capacity <= 0:
            msg = f"capacity must be positive, got {capacity}"
            raise ValueError(msg)
        self._cache: OrderedDict[int, int] = OrderedDict()
        self._capacity = capacity

    def get(self, key: int) -> int:
        """Return value for key, or -1 if not found. Marks key as recently used."""
        if key not in self._cache:
            return -1
        self._cache.move_to_end(key)
        return self._cache[key]

    def put(self, key: int, value: int) -> None:
        """Insert or update key-value pair. Evicts LRU entry if at capacity."""
        if key in self._cache:
            self._cache.move_to_end(key)
        self._cache[key] = value
        if len(self._cache) > self._capacity:
            self._cache.popitem(last=False)

get

get(key: int) -> int

Return value for key, or -1 if not found. Marks key as recently used.

Source code in src/algo/linked_lists/lru_cache.py
def get(self, key: int) -> int:
    """Return value for key, or -1 if not found. Marks key as recently used."""
    if key not in self._cache:
        return -1
    self._cache.move_to_end(key)
    return self._cache[key]

put

put(key: int, value: int) -> None

Insert or update key-value pair. Evicts LRU entry if at capacity.

Source code in src/algo/linked_lists/lru_cache.py
def put(self, key: int, value: int) -> None:
    """Insert or update key-value pair. Evicts LRU entry if at capacity."""
    if key in self._cache:
        self._cache.move_to_end(key)
    self._cache[key] = value
    if len(self._cache) > self._capacity:
        self._cache.popitem(last=False)

LRUCacheManual

LRU Cache using a doubly linked list + hash map.

Source code in src/algo/linked_lists/lru_cache.py
class LRUCacheManual:
    """LRU Cache using a doubly linked list + hash map."""

    def __init__(self, capacity: int) -> None:
        if capacity <= 0:
            msg = f"capacity must be positive, got {capacity}"
            raise ValueError(msg)
        self._capacity = capacity
        self._cache: dict[int, _DNode] = {}
        # Sentinel nodes simplify edge-case handling.
        self._head = _DNode()
        self._tail = _DNode()
        self._head.next = self._tail
        self._tail.prev = self._head

    def _remove(self, node: _DNode) -> None:
        """Unlink a node from the doubly linked list."""
        assert node.prev is not None
        assert node.next is not None
        node.prev.next = node.next
        node.next.prev = node.prev

    def _add_to_front(self, node: _DNode) -> None:
        """Insert a node right after the head sentinel (most recent)."""
        assert self._head.next is not None
        node.next = self._head.next
        node.prev = self._head
        self._head.next.prev = node
        self._head.next = node

    def get(self, key: int) -> int:
        """Return value for key, or -1 if not found."""
        if key not in self._cache:
            return -1
        node = self._cache[key]
        self._remove(node)
        self._add_to_front(node)
        return node.val

    def put(self, key: int, value: int) -> None:
        """Insert or update key-value pair. Evicts LRU entry if at capacity."""
        if key in self._cache:
            self._remove(self._cache[key])
        node = _DNode(key, value)
        self._cache[key] = node
        self._add_to_front(node)
        if len(self._cache) > self._capacity:
            assert self._tail.prev is not None
            lru = self._tail.prev
            self._remove(lru)
            del self._cache[lru.key]

get

get(key: int) -> int

Return value for key, or -1 if not found.

Source code in src/algo/linked_lists/lru_cache.py
def get(self, key: int) -> int:
    """Return value for key, or -1 if not found."""
    if key not in self._cache:
        return -1
    node = self._cache[key]
    self._remove(node)
    self._add_to_front(node)
    return node.val

put

put(key: int, value: int) -> None

Insert or update key-value pair. Evicts LRU entry if at capacity.

Source code in src/algo/linked_lists/lru_cache.py
def put(self, key: int, value: int) -> None:
    """Insert or update key-value pair. Evicts LRU entry if at capacity."""
    if key in self._cache:
        self._remove(self._cache[key])
    node = _DNode(key, value)
    self._cache[key] = node
    self._add_to_front(node)
    if len(self._cache) > self._capacity:
        assert self._tail.prev is not None
        lru = self._tail.prev
        self._remove(lru)
        del self._cache[lru.key]
tests/linked_lists/test_lru_cache.py
"""Tests for LRU Cache."""

import pytest

from algo.linked_lists.lru_cache import LRUCache, LRUCacheManual


class TestLRUCache:
    def test_basic_get_put(self) -> None:
        cache = LRUCache(2)
        cache.put(1, 1)
        cache.put(2, 2)
        assert cache.get(1) == 1
        cache.put(3, 3)  # evicts key 2
        assert cache.get(2) == -1

    def test_update_existing_key(self) -> None:
        cache = LRUCache(2)
        cache.put(1, 1)
        cache.put(2, 2)
        cache.put(1, 10)  # update key 1
        assert cache.get(1) == 10
        cache.put(3, 3)  # evicts key 2 (not 1, since 1 was just updated)
        assert cache.get(2) == -1
        assert cache.get(1) == 10

    def test_get_missing_key(self) -> None:
        cache = LRUCache(1)
        assert cache.get(99) == -1

    def test_capacity_one(self) -> None:
        cache = LRUCache(1)
        cache.put(1, 1)
        cache.put(2, 2)  # evicts key 1
        assert cache.get(1) == -1
        assert cache.get(2) == 2

    def test_eviction_order(self) -> None:
        cache = LRUCache(3)
        cache.put(1, 1)
        cache.put(2, 2)
        cache.put(3, 3)
        cache.get(1)  # 1 is now most recently used
        cache.put(4, 4)  # evicts key 2 (least recently used)
        assert cache.get(2) == -1
        assert cache.get(1) == 1
        assert cache.get(3) == 3
        assert cache.get(4) == 4

    def test_zero_capacity_raises(self) -> None:
        with pytest.raises(ValueError):
            LRUCache(0)


class TestLRUCacheManual:
    def test_basic_get_put(self) -> None:
        cache = LRUCacheManual(2)
        cache.put(1, 1)
        cache.put(2, 2)
        assert cache.get(1) == 1
        cache.put(3, 3)  # evicts key 2
        assert cache.get(2) == -1

    def test_update_existing_key(self) -> None:
        cache = LRUCacheManual(2)
        cache.put(1, 1)
        cache.put(2, 2)
        cache.put(1, 10)
        assert cache.get(1) == 10
        cache.put(3, 3)  # evicts key 2
        assert cache.get(2) == -1
        assert cache.get(1) == 10

    def test_get_missing_key(self) -> None:
        cache = LRUCacheManual(1)
        assert cache.get(99) == -1

    def test_capacity_one(self) -> None:
        cache = LRUCacheManual(1)
        cache.put(1, 1)
        cache.put(2, 2)
        assert cache.get(1) == -1
        assert cache.get(2) == 2

    def test_eviction_order(self) -> None:
        cache = LRUCacheManual(3)
        cache.put(1, 1)
        cache.put(2, 2)
        cache.put(3, 3)
        cache.get(1)
        cache.put(4, 4)  # evicts key 2
        assert cache.get(2) == -1
        assert cache.get(1) == 1
        assert cache.get(3) == 3
        assert cache.get(4) == 4

    def test_zero_capacity_raises(self) -> None:
        with pytest.raises(ValueError):
            LRUCacheManual(0)

Implement it yourself

Run: just challenge linked_lists lru_cache

Then implement the functions to make all tests pass. Use just study linked_lists for watch mode.

Reveal Solution

lru_cache

LRU Cache implementation.

Problem

Design a data structure that follows the Least Recently Used (LRU) eviction policy, supporting get and put in O(1) time.

Approach

OrderedDict version: move accessed keys to end; pop from front on evict. Manual version: doubly linked list + hash map for O(1) removal/insertion.

When to use

O(1) cache with eviction — "design LRU/LFU cache", bounded-memory caching with recency tracking. Pattern: hash map + doubly linked list. Aviation: caching decoded METAR/TAF data, recent flight plan lookups.

Complexity

Time: O(1) for both get and put Space: O(capacity)

LRUCache

LRU Cache using OrderedDict.

Source code in src/algo/linked_lists/lru_cache.py
class LRUCache:
    """LRU Cache using OrderedDict."""

    def __init__(self, capacity: int) -> None:
        if capacity <= 0:
            msg = f"capacity must be positive, got {capacity}"
            raise ValueError(msg)
        self._cache: OrderedDict[int, int] = OrderedDict()
        self._capacity = capacity

    def get(self, key: int) -> int:
        """Return value for key, or -1 if not found. Marks key as recently used."""
        if key not in self._cache:
            return -1
        self._cache.move_to_end(key)
        return self._cache[key]

    def put(self, key: int, value: int) -> None:
        """Insert or update key-value pair. Evicts LRU entry if at capacity."""
        if key in self._cache:
            self._cache.move_to_end(key)
        self._cache[key] = value
        if len(self._cache) > self._capacity:
            self._cache.popitem(last=False)

get

get(key: int) -> int

Return value for key, or -1 if not found. Marks key as recently used.

Source code in src/algo/linked_lists/lru_cache.py
def get(self, key: int) -> int:
    """Return value for key, or -1 if not found. Marks key as recently used."""
    if key not in self._cache:
        return -1
    self._cache.move_to_end(key)
    return self._cache[key]

put

put(key: int, value: int) -> None

Insert or update key-value pair. Evicts LRU entry if at capacity.

Source code in src/algo/linked_lists/lru_cache.py
def put(self, key: int, value: int) -> None:
    """Insert or update key-value pair. Evicts LRU entry if at capacity."""
    if key in self._cache:
        self._cache.move_to_end(key)
    self._cache[key] = value
    if len(self._cache) > self._capacity:
        self._cache.popitem(last=False)

LRUCacheManual

LRU Cache using a doubly linked list + hash map.

Source code in src/algo/linked_lists/lru_cache.py
class LRUCacheManual:
    """LRU Cache using a doubly linked list + hash map."""

    def __init__(self, capacity: int) -> None:
        if capacity <= 0:
            msg = f"capacity must be positive, got {capacity}"
            raise ValueError(msg)
        self._capacity = capacity
        self._cache: dict[int, _DNode] = {}
        # Sentinel nodes simplify edge-case handling.
        self._head = _DNode()
        self._tail = _DNode()
        self._head.next = self._tail
        self._tail.prev = self._head

    def _remove(self, node: _DNode) -> None:
        """Unlink a node from the doubly linked list."""
        assert node.prev is not None
        assert node.next is not None
        node.prev.next = node.next
        node.next.prev = node.prev

    def _add_to_front(self, node: _DNode) -> None:
        """Insert a node right after the head sentinel (most recent)."""
        assert self._head.next is not None
        node.next = self._head.next
        node.prev = self._head
        self._head.next.prev = node
        self._head.next = node

    def get(self, key: int) -> int:
        """Return value for key, or -1 if not found."""
        if key not in self._cache:
            return -1
        node = self._cache[key]
        self._remove(node)
        self._add_to_front(node)
        return node.val

    def put(self, key: int, value: int) -> None:
        """Insert or update key-value pair. Evicts LRU entry if at capacity."""
        if key in self._cache:
            self._remove(self._cache[key])
        node = _DNode(key, value)
        self._cache[key] = node
        self._add_to_front(node)
        if len(self._cache) > self._capacity:
            assert self._tail.prev is not None
            lru = self._tail.prev
            self._remove(lru)
            del self._cache[lru.key]

get

get(key: int) -> int

Return value for key, or -1 if not found.

Source code in src/algo/linked_lists/lru_cache.py
def get(self, key: int) -> int:
    """Return value for key, or -1 if not found."""
    if key not in self._cache:
        return -1
    node = self._cache[key]
    self._remove(node)
    self._add_to_front(node)
    return node.val

put

put(key: int, value: int) -> None

Insert or update key-value pair. Evicts LRU entry if at capacity.

Source code in src/algo/linked_lists/lru_cache.py
def put(self, key: int, value: int) -> None:
    """Insert or update key-value pair. Evicts LRU entry if at capacity."""
    if key in self._cache:
        self._remove(self._cache[key])
    node = _DNode(key, value)
    self._cache[key] = node
    self._add_to_front(node)
    if len(self._cache) > self._capacity:
        assert self._tail.prev is not None
        lru = self._tail.prev
        self._remove(lru)
        del self._cache[lru.key]