Design and implement a data structure for Least Recently Used (LRU) cache. It should support the following operations: get and put.
get(key) - Get the value (will always be positive) of the key if the key exists in the cache, otherwise return -1.
put(key, value) - Set or insert the value if the key is not already present. When the cache reached its capacity, it should invalidate the least recently used item before inserting a new item.
Follow up:
Could you do both operations in O(1) time complexity?
Example:
LRUCache cache = new LRUCache( 2 /* capacity */ );
cache.put(1, 1);
cache.put(2, 2);
cache.get(1); // returns 1
cache.put(3, 3); // evicts key 2
cache.get(2); // returns -1 (not found)
cache.put(4, 4); // evicts key 1
cache.get(1); // returns -1 (not found)
cache.get(3); // returns 3
cache.get(4); // returns 4
My code:
class DLN():
def __init__(self,key,val):
self.key = key
self.val = val
self.next = None
self.prev = None
class LRUCache(object):
def __init__(self, capacity):
#Here actually tail.prev always points to the 1st element
#Here head always on the tail to add element
self.N = capacity
self.head = DLN(float('Inf'),float('Inf'))
self.tail = DLN(float('Inf'),float('Inf'))
self.head.next = self.tail
self.tail.prev = self.head
self.memo = {}
def get(self, key):
if key not in self.memo:
return -1
node = self.memo[key]
val = node.val
#refresh it
self.remove(node)
self.add(node)
#print(self.tail.prev.val)
return val
def put(self, key, value):
if key not in self.memo:
node = DLN(key,value)
if len(self.memo) == self.N:
self.remove(self.tail.prev)
else:
node = self.memo[key]
node.val = value
self.remove(node)
self.add(node)
#print(self.tail.prev.val)
def remove(self,node):
#Delete the node and change the pre,post node pointer
prenode = node.prev
postnode = node.next
prenode.next = postnode
postnode.prev = prenode
del self.memo[node.key]
def add(self,node):
#change the head position and the postnode position
postnode = self.head.next#Init, it's tail, then it's changed to real tail
self.head.next = node
node.prev = self.head
node.next = postnode
postnode.prev = node
self.memo[node.key] = node
Solution: https://leetcode.com/problems/lru-cache/