146. LRU Cache
基本想法就是用一个双向链表加上一个hash(key, node), 节点的val等于相应key的value。当get一个key的时候,把相应的节点删除,并且放到链表的最后去。当put一个key,value的时候,如果key在hash里就先删除key和相应的node,然后创建新的key和node并放到链表的最后。如果此时链表的大小超过了capacity,就把链表的第一个值pop出去。虽然在python里用ordereddict做更加快捷,但是下面的解法更是体现了这个思想。
class Node:
def __init__(self, k, v):
self.key = k
self.val = v
self.prev = None
self.next = None
class LRUCache:
def __init__(self, capacity):
self.capacity = capacity
self.dic = dict()
self.head = Node(0, 0)
self.tail = Node(0, 0)
self.head.next = self.tail
self.tail.prev = self.head
def get(self, key):
if key in self.dic:
n = self.dic[key]
self._remove(n)
self._add(n)
return n.val
return -1
def set(self, key, value):
if key in self.dic:
self._remove(self.dic[key])
n = Node(key, value)
self._add(n)
self.dic[key] = n
if len(self.dic) > self.capacity:
n = self.head.next
self._remove(n)
del self.dic[n.key]
def _remove(self, node):
p = node.prev
n = node.next
p.next = n
n.prev = p
def _add(self, node):
p = self.tail.prev
p.next = node
self.tail.prev = node
node.prev = p
node.next = self.tail