146. LRU Cache
# Medium
Solution 1(非常笨的办法):
hashMap = {key: [value, level]}
, low level means the least recently used.
update levels while doing get/put operations. Complicated.
class LRUCache:
def __init__(self, capacity: int):
self.cap = capacity
# {key: [value, level]} level is higher means used the most recently
self.cache = {}
def get(self, key: int) -> int:
if key in self.cache:
# update levels
for k, v in self.cache.items():
if v[1] > self.cache[key][1]:
v[1] -= 1
self.cache[key][1] = len(self.cache) # cache maybe not full
return self.cache[key][0]
else:
return -1
def put(self, key: int, value: int) -> None:
if key in self.cache:
# update levels
for k, v in self.cache.items():
if v[1] > self.cache[key][1]:
v[1] -= 1
self.cache[key] = [value, len(self.cache)] # cache maybe not full
else:
n = len(self.cache)
if n == self.cap: # cache storage is full
# remove key with lowest level
for k, v in self.cache.items():
if v[1] == 1:
self.cache.pop(k)
break
# update levels
for k in self.cache:
self.cache[k][1] -= 1
# add new key
self.cache[key] = [value, self.cap]
else: # cache still has space
self.cache[key] = [value, n+1]
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
Solution 2 (queue manages priority, 效率提高但依然不够快):
hashMap = {key: value}
, queue is cache space, adjust the position of keys according to get/put
operations. Simplify to say, when get/put
key, queue.pop(key) -> queue.append(key)
, if want to remove the least recently used, queue.pop(0)
class LRUCache:
def __init__(self, capacity: int):
self.cap = capacity
# {key: [value, level]} level is higher means used the most recently
self.cache = {}
self.queue = []
def get(self, key: int) -> int:
if key in self.cache:
# update priority in queue
self.queue.remove(key)
self.queue.append(key)
return self.cache[key]
else:
return -1
def put(self, key: int, value: int) -> None:
if key in self.cache:
self.cache[key] = value
# update priority in queue
self.queue.remove(key)
self.queue.append(key)
else:
self.cache[key] = value
n = len(self.queue)
if n == self.cap: # cache is full
k = self.queue.pop(0)
self.cache.pop(k)
self.queue.append(key)
# Your LRUCache object will be instantiated and called as such:
# obj = LRUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
总结,更新least recently used秩序的时候可以继续优化时间,用link list
是最好的方法,但没有尝试
关于Python3 知识点:
dictionary:
for key in dic
for key, value in dic.items()
list:
list.remove(value)
, the first value in listlist[index] = list.pop(index)
, have return value and remove this element, can without return valuelist.append(value)
, add value at the end of the listlist.extend(list2)
, flat adding to listlist.append(subList[:])
, add whole sublist to nested list
Last updated
Was this helpful?