Welcome to Subscribe On Youtube
146. LRU Cache
Description
Design a data structure that follows the constraints of a Least Recently Used (LRU) cache.
Implement the LRUCache
class:
LRUCache(int capacity)
Initialize the LRU cache with positive sizecapacity
.int get(int key)
Return the value of thekey
if the key exists, otherwise return-1
.void put(int key, int value)
Update the value of thekey
if thekey
exists. Otherwise, add thekey-value
pair to the cache. If the number of keys exceeds thecapacity
from this operation, evict the least recently used key.
The functions get
and put
must each run in O(1)
average time complexity.
Example 1:
Input ["LRUCache", "put", "put", "get", "put", "get", "put", "get", "get", "get"] [[2], [1, 1], [2, 2], [1], [3, 3], [2], [4, 4], [1], [3], [4]] Output [null, null, null, 1, null, -1, null, -1, 3, 4] Explanation LRUCache lRUCache = new LRUCache(2); lRUCache.put(1, 1); // cache is {1=1} lRUCache.put(2, 2); // cache is {1=1, 2=2} lRUCache.get(1); // return 1 lRUCache.put(3, 3); // LRU key was 2, evicts key 2, cache is {1=1, 3=3} lRUCache.get(2); // returns -1 (not found) lRUCache.put(4, 4); // LRU key was 1, evicts key 1, cache is {4=4, 3=3} lRUCache.get(1); // return -1 (not found) lRUCache.get(3); // return 3 lRUCache.get(4); // return 4
Constraints:
1 <= capacity <= 3000
0 <= key <= 104
0 <= value <= 105
- At most
2 * 105
calls will be made toget
andput
.
Solutions
Create a class MyNode
, which contains data fields int key
, int value
, Node prev
and Node next
. That is, each node of type MyNode
has a key and a value, and have references to its previous node and the next node.
In class LRUCache
, data fields include int capacity
that stores the capacity of the cache, Map<Integer, MyNode> map
that maps each key to its node, MyNode head
and Node tail
that represents the head node and the tail node respectively. For Least Recently Used cache, the most recently used node is the head node and the least recently used node is the tail node.
In the constructor, initialize capacity
with the given capacity
.
In get(key)
, if key
is not in map
, then key
is not in the cache, so return -1. If key
is in map
, obtain the node and its value
, remove the node and set the node to be the head, and return value
.
In put(key, value)
, if map
contains key
, then obtain the node and update its value
, remove the node, and set the node to be the head. If map
does not contain key
, then create a new node using key
and value
, and set the new node to be the head. If the size of map
is greater than or equal to capacity
, then remove the node tail
and remove the corresponding entry in map
. Add a new entry of the new node into the map.
Two supplementary methods are needed.
- Method
remove(MyNode node)
. ObtainMyNode
’s previous node and next node, and update their references to other nodes accordingly. IfMyNode
ishead
ortail
, then updatehead
ortail
accordingly. - Method
setHead(MyNode node)
. SetMyNode
to be the new head and set the previous head’s reference accordingly. Iftail
isnull
, then updatetail
as well.
-
class Node { int key; int val; Node prev; Node next; Node() { } Node(int key, int val) { this.key = key; this.val = val; } } class LRUCache { private Map<Integer, Node> cache = new HashMap<>(); private Node head = new Node(); private Node tail = new Node(); private int capacity; private int size; public LRUCache(int capacity) { this.capacity = capacity; head.next = tail; tail.prev = head; } public int get(int key) { if (!cache.containsKey(key)) { return -1; } Node node = cache.get(key); moveToHead(node); return node.val; } public void put(int key, int value) { if (cache.containsKey(key)) { Node node = cache.get(key); node.val = value; moveToHead(node); } else { Node node = new Node(key, value); cache.put(key, node); addToHead(node); ++size; if (size > capacity) { node = removeTail(); cache.remove(node.key); --size; } } } private void moveToHead(Node node) { removeNode(node); addToHead(node); } private void removeNode(Node node) { node.prev.next = node.next; node.next.prev = node.prev; } private void addToHead(Node node) { node.next = head.next; node.prev = head; head.next = node; node.next.prev = node; } private Node removeTail() { Node node = tail.prev; removeNode(node); return node; } } /** * Your LRUCache object will be instantiated and called as such: * LRUCache obj = new LRUCache(capacity); * int param_1 = obj.get(key); * obj.put(key,value); */
-
struct Node { int k; int v; Node* prev; Node* next; Node() : k(0) , v(0) , prev(nullptr) , next(nullptr) {} Node(int key, int val) : k(key) , v(val) , prev(nullptr) , next(nullptr) {} }; class LRUCache { public: LRUCache(int capacity) : cap(capacity) , size(0) { head = new Node(); tail = new Node(); head->next = tail; tail->prev = head; } int get(int key) { if (!cache.count(key)) return -1; Node* node = cache[key]; moveToHead(node); return node->v; } void put(int key, int value) { if (cache.count(key)) { Node* node = cache[key]; node->v = value; moveToHead(node); } else { Node* node = new Node(key, value); cache[key] = node; addToHead(node); ++size; if (size > cap) { node = removeTail(); cache.erase(node->k); --size; } } } private: unordered_map<int, Node*> cache; Node* head; Node* tail; int cap; int size; void moveToHead(Node* node) { removeNode(node); addToHead(node); } void removeNode(Node* node) { node->prev->next = node->next; node->next->prev = node->prev; } void addToHead(Node* node) { node->next = head->next; node->prev = head; head->next = node; node->next->prev = node; } Node* removeTail() { Node* node = tail->prev; removeNode(node); return node; } }; /** * Your LRUCache object will be instantiated and called as such: * LRUCache* obj = new LRUCache(capacity); * int param_1 = obj->get(key); * obj->put(key,value); */
-
class Node: def __init__(self, key=0, val=0): self.key = key self.val = val self.prev = None self.next = None class LRUCache: def __init__(self, capacity: int): self.cache = {} # key ==> Node(val) self.head = Node() # dummy node self.tail = Node() # dummy node self.capacity = capacity self.size = 0 self.head.next = self.tail # note: key setup self.tail.prev = self.head def get(self, key: int) -> int: if key not in self.cache: return -1 node = self.cache[key] self.move_to_head(node) return node.val def put(self, key: int, value: int) -> None: if key in self.cache: node = self.cache[key] node.val = value self.move_to_head(node) else: node = Node(key, value) self.cache[key] = node self.add_to_head(node) self.size += 1 if self.size > self.capacity: tail = self.remove_tail() self.cache.pop(tail.key) self.size -= 1 def move_to_head(self, node): self.remove_node(node) self.add_to_head(node) def remove_node(self, node): node.prev.next = node.next node.next.prev = node.prev def add_to_head(self, node): node.next = self.head.next node.prev = self.head self.head.next = node node.next.prev = node def remove_tail(self): node = self.tail.prev self.remove_node(node) return node # Your LRUCache object will be instantiated and called as such: # obj = LRUCache(capacity) # param_1 = obj.get(key) # obj.put(key,value) ############ ''' example: >>> od = collections.OrderedDict() >>> >>> od[1]=1 >>> od[2]=2 >>> od[3]=3 >>> >>> od OrderedDict([(1, 1), (2, 2), (3, 3)]) >>> od.move_to_end(1) >>> od OrderedDict([(2, 2), (3, 3), (1, 1)]) >>> >>> od.get(1) 1 >>> od.popitem() (1, 1) >>> od OrderedDict([(2, 2), (3, 3)]) >>> >>> od[1]=1 >>> od OrderedDict([(2, 2), (3, 3), (1, 1)]) >>> >>> od.popitem(last=False) (2, 2) >>> od OrderedDict([(3, 3), (1, 1)]) ''' import collections class LRUCache: def __init__(self, capacity: 'int'): self.cache = collections.OrderedDict() self.remain = capacity def get(self, key: 'int') -> 'int': if key not in self.cache: return -1 self.cache.move_to_end(key) # meaning end is the most recently used return self.cache.get(key) def put(self, key: 'int', value: 'int') -> 'None': if key not in self.cache: if self.remain > 0: self.remain -= 1 else: self.cache.popitem(last=False) # pop start position else: self.cache.pop(key) self.cache[key] = value # add to end of dict, meaning most recently used ############ ### below solution with no ordered-dict class DLinkedNode: def __init__(self, key=0, value=0): self.key = key self.val = value self.next = None self.prev = None class DLinkedList: def __init__(self): self.head = DLinkedNode() # dummy head, its next is real head self.tail = DLinkedNode() # dummy tail, its prev is real tail self.head.next, self.tail.prev = self.tail, self.head def add_node(self, node): # add to head node.prev, node.next = self.head, self.head.next node.next.prev, node.prev.next = node, node def remove_node(self, node): node.prev.next, node.next.prev = node.next, node.prev return node.key def remove_tail(self): return self.remove_node(self.tail.prev) # dummy tail's prev is real tail def move_to_head(self, node): self.remove_node(node) self.add_node(node) # def __repr__(self): # ans = [] # h = self.head # while h: # ans.append(str(h.val)) # h = h.next # return '<DLinkedList: {}>'.format('->'.join(ans)) class LRUCache: def __init__(self, capacity: int): self.capacity = capacity self.nodes_map = {} self.cache_list = DLinkedList() def get(self, key: int) -> int: node = self.nodes_map.get(key, None) if node: self.cache_list.move_to_head(node) return node.val else: return -1 def put(self, key: int, value: int) -> None: node = self.nodes_map.get(key, None) if not node: if self.capacity > 0: self.capacity -= 1 else: rm_key = self.cache_list.remove_tail() self.nodes_map.pop(rm_key) # note api new_node = DLinkedNode(key, value) self.nodes_map[key] = new_node self.cache_list.add_node(new_node) else: self.cache_list.move_to_head(node) node.val = value
-
type node struct { key, val int prev, next *node } type LRUCache struct { capacity int cache map[int]*node head, tail *node } func Constructor(capacity int) LRUCache { head := new(node) tail := new(node) head.next = tail tail.prev = head return LRUCache{ capacity: capacity, cache: make(map[int]*node, capacity), head: head, tail: tail, } } func (this *LRUCache) Get(key int) int { n, ok := this.cache[key] if !ok { return -1 } this.moveToFront(n) return n.val } func (this *LRUCache) Put(key int, value int) { n, ok := this.cache[key] if ok { n.val = value this.moveToFront(n) return } if len(this.cache) == this.capacity { back := this.tail.prev this.remove(back) delete(this.cache, back.key) } n = &node{key: key, val: value} this.pushFront(n) this.cache[key] = n } func (this *LRUCache) moveToFront(n *node) { this.remove(n) this.pushFront(n) } func (this *LRUCache) remove(n *node) { n.prev.next = n.next n.next.prev = n.prev n.prev = nil n.next = nil } func (this *LRUCache) pushFront(n *node) { n.prev = this.head n.next = this.head.next this.head.next.prev = n this.head.next = n }
-
class LRUCache { capacity: number; map: Map<number, number>; constructor(capacity: number) { this.capacity = capacity; this.map = new Map(); } get(key: number): number { if (this.map.has(key)) { const val = this.map.get(key)!; this.map.delete(key); this.map.set(key, val); return val; } return -1; } put(key: number, value: number): void { this.map.delete(key); this.map.set(key, value); if (this.map.size > this.capacity) { this.map.delete(this.map.keys().next().value); } } } /** * Your LRUCache object will be instantiated and called as such: * var obj = new LRUCache(capacity) * var param_1 = obj.get(key) * obj.put(key,value) */
-
public class LRUCache { class Node { public Node Prev; public Node Next; public int Key; public int Val; } private Node head = new Node(); private Node tail = new Node(); private Dictionary<int, Node> cache = new Dictionary<int, Node>(); private readonly int capacity; private int size; public LRUCache(int capacity) { this.capacity = capacity; head.Next = tail; tail.Prev = head; } public int Get(int key) { Node node; if (cache.TryGetValue(key, out node)) { moveToHead(node); return node.Val; } return -1; } public void Put(int key, int Val) { Node node; if (cache.TryGetValue(key, out node)) { moveToHead(node); node.Val = Val; } else { node = new Node() { Key = key, Val = Val }; cache.Add(key, node); addToHead(node); if (++size > capacity) { node = removeTail(); cache.Remove(node.Key); --size; } } } private void moveToHead(Node node) { removeNode(node); addToHead(node); } private void removeNode(Node node) { node.Prev.Next = node.Next; node.Next.Prev = node.Prev; } private void addToHead(Node node) { node.Next = head.Next; node.Prev = head; head.Next = node; node.Next.Prev = node; } private Node removeTail() { Node node = tail.Prev; removeNode(node); return node; } } /** * Your LRUCache object will be instantiated and called as such: * LRUCache obj = new LRUCache(capacity); * int param_1 = obj.Get(key); * obj.Put(key,Val); */
-
use std::cell::RefCell; use std::collections::HashMap; use std::rc::Rc; struct Node { key: i32, value: i32, prev: Option<Rc<RefCell<Node>>>, next: Option<Rc<RefCell<Node>>>, } impl Node { #[inline] fn new(key: i32, value: i32) -> Self { Self { key, value, prev: None, next: None, } } } struct LRUCache { capacity: usize, cache: HashMap<i32, Rc<RefCell<Node>>>, head: Option<Rc<RefCell<Node>>>, tail: Option<Rc<RefCell<Node>>>, } /** * `&self` means the method takes an immutable reference. * If you need a mutable reference, change it to `&mut self` instead. */ impl LRUCache { fn new(capacity: i32) -> Self { Self { capacity: capacity as usize, cache: HashMap::new(), head: None, tail: None, } } fn get(&mut self, key: i32) -> i32 { match self.cache.get(&key) { Some(node) => { let node = Rc::clone(node); self.remove(&node); self.push_front(&node); let value = node.borrow().value; value } None => -1, } } fn put(&mut self, key: i32, value: i32) { match self.cache.get(&key) { Some(node) => { let node = Rc::clone(node); node.borrow_mut().value = value; self.remove(&node); self.push_front(&node); } None => { let node = Rc::new(RefCell::new(Node::new(key, value))); self.cache.insert(key, Rc::clone(&node)); self.push_front(&node); if self.cache.len() > self.capacity { let back_key = self.pop_back().unwrap().borrow().key; self.cache.remove(&back_key); } } }; } fn push_front(&mut self, node: &Rc<RefCell<Node>>) { match self.head.take() { Some(head) => { head.borrow_mut().prev = Some(Rc::clone(node)); node.borrow_mut().prev = None; node.borrow_mut().next = Some(head); self.head = Some(Rc::clone(node)); } None => { self.head = Some(Rc::clone(node)); self.tail = Some(Rc::clone(node)); } }; } fn remove(&mut self, node: &Rc<RefCell<Node>>) { match (node.borrow().prev.as_ref(), node.borrow().next.as_ref()) { (None, None) => { self.head = None; self.tail = None; } (None, Some(next)) => { self.head = Some(Rc::clone(next)); next.borrow_mut().prev = None; } (Some(prev), None) => { self.tail = Some(Rc::clone(prev)); prev.borrow_mut().next = None; } (Some(prev), Some(next)) => { next.borrow_mut().prev = Some(Rc::clone(prev)); prev.borrow_mut().next = Some(Rc::clone(next)); } }; } fn pop_back(&mut self) -> Option<Rc<RefCell<Node>>> { match self.tail.take() { Some(tail) => { self.remove(&tail); Some(tail) } None => None, } } }/** * Your LRUCache object will be instantiated and called as such: * let obj = LRUCache::new(capacity); * let ret_1: i32 = obj.get(key); * obj.put(key, value); */