• python实现LRUCache


    1. 学习了一下lru_cache的实现方式

    # lru.py
    import weakref
    
    
    class LinkNode:
        __slots__ = ["value", "prev", "next", "__weakref__"]
        def __init__(self, value=None):
            self.value = value
            self.prev = None
            self.next = None
    
    
    class LRUCache:
        def __init__(self, max_size=1024):
            self._dict = weakref.WeakValueDictionary()
            self.head = LinkNode()
            self.tail = LinkNode()
            self.head.next = self.tail
            self.tail.next = self.head
            self.max_size = max_size
            self.node_cls = LinkNode
    
        @property
        def size(self):
            return len(self._dict)
    
        def move_node_to_head_next(self, node, is_old_node=True): # pylint:disable=inconsistent-return-statements
            if node.prev is self.head:
                return
            if is_old_node:
                node.prev.next = node.next
                node.next.prev = node.prev
            self.head.next.prev = node
            node.next = self.head.next
            self.head.next = node
            node.prev = self.head
    
        def get(self, key):
            node = self._dict.get(key)
            if node is None:
                return object # 为了缓存value为None的情况,这里用object标记key不存在
            self.move_node_to_head_next(node)
            return node.value
    
        def set(self, key, value):
            node = self._dict.get(key)
            if node is None:
                node = self.node_cls(value)
                is_old_node = False
            else:
                node.value = value
                is_old_node = True
            if self.size >= self.max_size:
                self.delete_tail_prev_node()
            self.move_node_to_head_next(node, is_old_node)
            self._dict[key] = node
    
        def delete_tail_prev_node(self):
            node = self.tail.prev
            node.prev.next = self.tail
            self.tail.prev = node.prev
    

    2. 测试如下

    # test_lru.py
    from unittest import TestCase
    
    from lru import LRUCache
    
    
    class TestLRUCache(TestCase):
    
        def test_get(self):
            lru_cache = LRUCache(2)
            for i in range(3):
                lru_cache.set(i, i)
            assert lru_cache.size == 2
            assert lru_cache.head.next.value == 2
            assert lru_cache.tail.prev.value == 1
            assert lru_cache.get(0) is object
            assert lru_cache.get(1) == 1
    
        
        def test_set(self):
            lru_cache = LRUCache(2)
            lru_cache.set(1, 1)
            assert lru_cache.head.next.value == 1
            assert lru_cache.tail.prev.value == 1
            lru_cache.set(2, 2)
            assert lru_cache.head.next.value == 2
            assert lru_cache.tail.prev.value == 1
            lru_cache.set(3, 3)
            assert lru_cache.head.next.value == 3
            assert lru_cache.tail.prev.value == 2
    

    3. 参考

    1. Caching in Python Using the LRU Cache Strategy
    2. 数据结构分析,Python 哈希 + 双向链表实现(代码注释多)
    3. cpython functools源码
  • 相关阅读:
    [原]zoj3772--【水题】线段树区间查询+矩阵乘法
    站立会议报告(5)
    团队博客(8)
    站立会议报告(4)
    团队博客(7)
    站立会议报告(3)
    团队博客(6)
    站立会议报告(2)
    团队博客(5)
    团队博客(4)
  • 原文地址:https://www.cnblogs.com/lyg-blog/p/15027306.html
Copyright © 2020-2023  润新知