一.LRU简介
LRU(Least recently used,最近最少使用)算法根据数据的历史访问记录来进行淘汰数据,其核心思想是“如果数据最近被访问过,那么将来被访问的几率也更高”。
见leetcode146题:
LRU缓存机制
二.在iOS中的运用
实现一个 Memory Cache 类 LRUCache,使用 LRU 淘汰策略,它有容量上的限制 capacity,实现接口:
- (id)initWithCapacity:(NSInteger)capacity;
- (void)setItem:(id)item forKey:(NSString *)key;
- (id)getItemForKey:(NSString *)key;
分析:
使用双向链表实现,可以在时间复杂度O(1)内完成删除和插入的操作.
模版代码
class LRUCache {
init(_ capacity: Int) {
}
func get(_ key: Int) -> Int {
}
func put(_ key: Int, _ value: Int) {
}
}
三.实现
1.先实现链表节点
class ListNode {
var key: Int
var value: Int
var next: ListNode?
var prev: ListNode?
init(key: Int, value: Int) {
self.key = key
self.value = value
}
}
2.实现LRUCache
class LRUCache {
private var cache = [Int: ListNode]()
// 最大size
private var max_size = 0
// 当前size
private var cur_size = 0
// 头
private var head: ListNode?
// 尾
private var tail: ListNode?
init(_ capacity: Int) {
max_size = capacity
}
public func get(_ key: Int) -> Int {
if let node = cache[key] {
moveToHead(node: node)
return node.value
}
return -1
}
public func put(_ key: Int, _ value: Int) {
if let node = cache[key] {
node.value = value
moveToHead(node: node)
} else {
let node = ListNode(key: key, value: value)
addNode(node: node)
cache[key] = node
cur_size += 1
if cur_size > max_size {
removeTail()
cur_size -= 1
}
}
}
/// 添加节点到头部
private func addNode(node: ListNode) {
if self.head == nil {
self.head = node
self.tail = node
} else {
let temp = self.head!
self.head = node
self.head?.next = temp
temp.prev = self.head
}
}
/// 移动到头部
private func moveToHead(node: ListNode) {
if node === self.head {
return
}
let prev = node.prev
let next = node.next
prev?.next = next
if next != nil {
next!.prev = prev
} else {
self.tail = prev
}
let origin = self.head
self.head = node
self.head?.next = origin
origin?.prev = self.head
}
/// 删除尾部
@discardableResult
private func removeTail() -> ListNode? {
if let tail = self.tail {
cache.removeValue(forKey: tail.key)
self.tail = tail.prev
self.tail?.next = nil
return tail
}
return nil
}
}
四.测试用例
let cache = LRUCache(2)
cache.put(1, 1)
cache.put(2, 2)
cache.get(1)
cache.put(3, 3)
cache.get(2)
// ["LRUCache","put","put","put","put","get","get","get","get","put","get","get","get","get","get"]
// [[3],[1,1],[2,2],[3,3],[4,4],[4],[3],[2],[1],[5,5],[1],[2],[3],[4],[5]]
let cache = LRUCache(3)
cache.put(1, 1)
cache.put(2, 2)
cache.put(3, 3)
cache.put(4, 4)
print(cache.get(4))
print(cache.get(3))
print(cache.get(2))
print(cache.get(1))
cache.put(5, 5)
print(cache.get(1))
print(cache.get(2))
print(cache.get(3))
print(cache.get(4))
print(cache.get(5))
// ["LRUCache","put","get","put","get","get"]
// [[1],[2,1],[2],[3,2],[2],[3]]
let cache = LRUCache(1)
cache.put(2, 1)
print(cache.get(2))
cache.put(3, 2)
print(cache.get(2))
print(cache.get(3))