Python实现LRU/LFU

Python/LRU/LFU

自引用实现

146. LRU 缓存

class LRUCache:
    def __init__(self, capacity: int):
        self.capacity = capacity
        self.cache = {}
        self.root = [None, None, None, None]  # 初始化哨兵节点,格式为 [prev, next, key, value]
        self.root[:2] = [self.root, self.root]  # 哨兵节点指向自己,形成循环

    def _remove_node(self, node):
        prev, next = node[0], node[1]
        prev[1], next[0] = next, prev

    def _add_to_front(self, node):
        node[0] = self.root  # node.prev = root
        node[1] = self.root[1]  # node.next = root.next
        self.root[1][0] = node  # root.next.prev = node
        self.root[1] = node  # root.next = node

    def _move_to_front(self, node):
        self._remove_node(node)
        self._add_to_front(node)

    def _pop_tail(self):
        tail = self.root[0]  # root.prev
        self._remove_node(tail)
        return tail

    def get(self, key: int) -> int:
        node = self.cache.get(key)
        if not node:
            return -1
        self._move_to_front(node)
        return node[3]  

    def put(self, key: int, value: int) -> None:
        node = self.cache.get(key)
        if not node:
            new_node = [None, None, key, value]  # [prev, next, key, value]
            self.cache[key] = new_node
            self._add_to_front(new_node)
            if len(self.cache) > self.capacity:
                # pop the tail
                tail = self._pop_tail()
                del self.cache[tail[2]]  # tail.key
        else:
            # update 
            node[3] = value
            self._move_to_front(node)

460. LFU 缓存

class DoublyLink:
    def __init__(self):
        self.root = [None]*5
        self.root[:2] = [self.root, self.root]
        self.size = 0

    def append(self, node):
        node[0], node[1] = self.root, self.root[1]
        self.root[1][0] = node
        self.root[1] = node
        self.size += 1

    def remove(self, node):
        prev, next = node[:2]
        prev[1], next[0] = next, prev
        self.size -= 1


    def pop(self):
        if self.size == 0:
            return None
        node = self.root[0]
        self.remove(node)
        return node

class LFUCache:

    def __init__(self, capacity: int):

        self.capacity = capacity
        self.cache = {}
        self.frep_map = {}
        self.min_freq = 0

    def get(self, key: int) -> int:

        node = self.cache.get(key)
        if not node:
            return -1
        else:
            self.update(node)
            return node[3]

    def put(self, key: int, value: int) -> None:


        node = self.cache.get(key)
        if node:
            node[3] = value
            self.update(node)
        else:
            node = [None,None,key,value,1]
            if len(self.cache) == self.capacity:
                self.evict()
            self.cache[key] = node
            self.min_freq = 1
            if 1 not  in self.frep_map:
                self.frep_map[1] = DoublyLink()
            self.frep_map[1].append(node)

    def update(self, node):
        freq = node[4]
        self.frep_map[freq].remove(node)
        if self.frep_map[freq].size == 0:
            if self.min_freq == freq:
                self.min_freq += 1
            del self.frep_map[freq]
        if freq+1 not in self.frep_map:
            self.frep_map[freq+1] = DoublyLink()
        self.frep_map[freq+1].append(node)

        node[4] += 1

    def evict(self):
        
        node = self.frep_map[self.min_freq].pop()
        del self.cache[node[2]]
        if self.frep_map[self.min_freq].size == 0:
            del self.frep_map[self.min_freq]



# Your LFUCache object will be instantiated and called as such:
# obj = LFUCache(capacity)
# param_1 = obj.get(key)
# obj.put(key,value)
  • 3
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 1
    评论
LFU (Least Frequently Used) 和 LRU (Least Recently Used) 都是常见的缓存淘汰策略,它们可以用 PriorityQueue(优先队列)来实现LFU 的思想是,当缓存空间满时,淘汰掉最不经常使用的缓存。具体实现时,我们可以使用一个字典来记录每个缓存的访问次数,然后将缓存按照访问次数从小到大排序,淘汰访问次数最小的缓存。 LRU 的思想是,当缓存空间满时,淘汰最近最少使用的缓存。具体实现时,我们可以使用一个双向链表和一个字典来记录缓存的顺序和存储的位置。每当访问一个缓存时,将其移动到链表头部;当缓存空间满时,淘汰链表尾部的缓存。 下面是使用 PriorityQueue 实现 LFULRUPython 代码示例: LFU: ```python import heapq class LFUCache: def __init__(self, capacity: int): self.capacity = capacity self.cache = {} self.freq = {} self.count = 0 def get(self, key: int) -> int: if key not in self.cache: return -1 self.freq[key] += 1 heapq.heapify(self.cache[key]) return self.cache[key][0] def put(self, key: int, value: int) -> None: if self.capacity == 0: return if key in self.cache: self.cache[key].append(value) self.freq[key] += 1 heapq.heapify(self.cache[key]) else: if self.count == self.capacity: min_freq = min(self.freq.values()) for k, v in self.freq.items(): if v == min_freq: del self.cache[k] del self.freq[k] break self.count -= 1 self.cache[key] = [value] self.freq[key] = 1 self.count += 1 ``` LRU: ```python class LRUCache: def __init__(self, capacity: int): self.capacity = capacity self.cache = {} self.head = Node(0, 0) self.tail = Node(0, 0) self.head.next = self.tail self.tail.prev = self.head def get(self, key: int) -> int: if key not in self.cache: return -1 node = self.cache[key] self._remove(node) self._add(node) return node.val def put(self, key: int, value: int) -> None: if key in self.cache: self._remove(self.cache[key]) node = Node(key, value) self.cache[key] = node self._add(node) if len(self.cache) > self.capacity: node = self.head.next self._remove(node) del self.cache[node.key] def _remove(self, node): node.prev.next = node.next node.next.prev = node.prev def _add(self, node): node.prev = self.tail.prev node.next = self.tail self.tail.prev.next = node self.tail.prev = node class Node: def __init__(self, key, val): self.key = key self.val = val self.prev = None self.next = None ```
评论 1
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值