|
1 |
| -from __future__ import annotations |
2 |
| - |
3 |
| -from collections.abc import Callable |
4 |
| -from functools import wraps |
5 |
| -from typing import Any, ParamSpec, TypeVar, cast |
6 |
| - |
7 |
| -P = ParamSpec("P") |
8 |
| -R = TypeVar("R") |
9 |
| - |
10 |
| - |
11 |
| -class DoubleLinkedListNode: |
12 |
| - """Node for LRU Cache""" |
13 |
| - |
14 |
| - __slots__ = ("key", "next", "prev", "val") |
15 |
| - |
16 |
| - def __init__(self, key: Any, val: Any) -> None: |
17 |
| - self.key = key |
18 |
| - self.val = val |
19 |
| - self.next: DoubleLinkedListNode | None = None |
20 |
| - self.prev: DoubleLinkedListNode | None = None |
21 |
| - |
22 |
| - def __repr__(self) -> str: |
23 |
| - return f"Node(key={self.key}, val={self.val})" |
24 |
| - |
25 |
| - |
26 |
| -class DoubleLinkedList: |
27 |
| - """Double Linked List for LRU Cache""" |
28 |
| - |
29 |
| - def __init__(self) -> None: |
30 |
| - # Create sentinel nodes |
31 |
| - self.head = DoubleLinkedListNode(None, None) |
32 |
| - self.rear = DoubleLinkedListNode(None, None) |
33 |
| - # Link sentinel nodes together |
34 |
| - self.head.next = self.rear |
35 |
| - self.rear.prev = self.head |
36 |
| - |
37 |
| - def __repr__(self) -> str: |
38 |
| - nodes = [] |
39 |
| - current = self.head |
40 |
| - while current: |
41 |
| - nodes.append(repr(current)) |
42 |
| - current = current.next |
43 |
| - return f"LinkedList({nodes})" |
44 |
| - |
45 |
| - def add(self, node: DoubleLinkedListNode) -> None: |
46 |
| - """Add node before rear""" |
47 |
| - prev = self.rear.prev |
48 |
| - if prev is None: |
49 |
| - return |
50 |
| - |
51 |
| - # Insert node between prev and rear |
52 |
| - prev.next = node |
53 |
| - node.prev = prev |
54 |
| - self.rear.prev = node |
55 |
| - node.next = self.rear |
56 |
| - |
57 |
| - def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode | None: |
58 |
| - """Remove node from list""" |
59 |
| - if node.prev is None or node.next is None: |
60 |
| - return None |
61 |
| - |
62 |
| - # Bypass node |
63 |
| - node.prev.next = node.next |
64 |
| - node.next.prev = node.prev |
65 |
| - |
66 |
| - # Clear node references |
67 |
| - node.prev = None |
68 |
| - node.next = None |
69 |
| - return node |
70 |
| - |
71 |
| - |
72 |
| -class LRUCache: |
73 |
| - """LRU Cache implementation""" |
74 |
| - |
75 |
| - def __init__(self, capacity: int) -> None: |
76 |
| - self.list = DoubleLinkedList() |
77 |
| - self.capacity = capacity |
78 |
| - self.size = 0 |
79 |
| - self.hits = 0 |
80 |
| - self.misses = 0 |
81 |
| - self.cache: dict[Any, DoubleLinkedListNode] = {} |
82 |
| - |
83 |
| - def __repr__(self) -> str: |
84 |
| - return ( |
85 |
| - f"Cache(hits={self.hits}, misses={self.misses}, " |
86 |
| - f"cap={self.capacity}, size={self.size})" |
87 |
| - ) |
88 |
| - |
89 |
| - def get(self, key: Any) -> Any | None: |
90 |
| - """Get value for key""" |
91 |
| - if key in self.cache: |
92 |
| - self.hits += 1 |
93 |
| - node = self.cache[key] |
94 |
| - if self.list.remove(node): |
95 |
| - self.list.add(node) |
96 |
| - return node.val |
97 |
| - self.misses += 1 |
| 1 | +from future import annotations |
| 2 | + |
| 3 | +from collections.abc import Callable from functools import wraps from typing import Any, ParamSpec, TypeVar, cast |
| 4 | + |
| 5 | +P = ParamSpec("P") R = TypeVar("R") |
| 6 | + |
| 7 | +class DoubleLinkedListNode: """Node for LRU Cache""" |
| 8 | + |
| 9 | +__slots__ = ("key", "next", "prev", "val") |
| 10 | + |
| 11 | +def __init__(self, key: Any, val: Any) -> None: |
| 12 | + self.key = key |
| 13 | + self.val = val |
| 14 | + self.next: DoubleLinkedListNode | None = None |
| 15 | + self.prev: DoubleLinkedListNode | None = None |
| 16 | + |
| 17 | +def __repr__(self) -> str: |
| 18 | + return f"Node(key={self.key}, val={self.val})" |
| 19 | + |
| 20 | +class DoubleLinkedList: """Double Linked List for LRU Cache""" |
| 21 | + |
| 22 | +def __init__(self) -> None: |
| 23 | + # Create sentinel nodes |
| 24 | + self.head = DoubleLinkedListNode(None, None) |
| 25 | + self.rear = DoubleLinkedListNode(None, None) |
| 26 | + # Link sentinel nodes together |
| 27 | + self.head.next = self.rear |
| 28 | + self.rear.prev = self.head |
| 29 | + |
| 30 | +def __repr__(self) -> str: |
| 31 | + nodes = [] |
| 32 | + current = self.head |
| 33 | + while current: |
| 34 | + nodes.append(repr(current)) |
| 35 | + current = current.next |
| 36 | + return f"LinkedList({nodes})" |
| 37 | + |
| 38 | +def add(self, node: DoubleLinkedListNode) -> None: |
| 39 | + """Add node before rear""" |
| 40 | + prev = self.rear.prev |
| 41 | + if prev is None: |
| 42 | + return |
| 43 | + |
| 44 | + # Insert node between prev and rear |
| 45 | + prev.next = node |
| 46 | + node.prev = prev |
| 47 | + self.rear.prev = node |
| 48 | + node.next = self.rear |
| 49 | + |
| 50 | +def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode | None: |
| 51 | + """Remove node from list""" |
| 52 | + if node.prev is None or node.next is None: |
98 | 53 | return None
|
| 54 | + |
| 55 | + # Bypass node |
| 56 | + node.prev.next = node.next |
| 57 | + node.next.prev = node.prev |
| 58 | + |
| 59 | + # Clear node references |
| 60 | + node.prev = None |
| 61 | + node.next = None |
| 62 | + return node |
| 63 | + |
| 64 | +class LRUCache: """LRU Cache implementation""" |
| 65 | + |
| 66 | +def __init__(self, capacity: int) -> None: |
| 67 | + self.list = DoubleLinkedList() |
| 68 | + self.capacity = capacity |
| 69 | + self.size = 0 |
| 70 | + self.hits = 0 |
| 71 | + self.misses = 0 |
| 72 | + self.cache: dict[Any, DoubleLinkedListNode] = {} |
| 73 | + |
| 74 | +def __repr__(self) -> str: |
| 75 | + return ( |
| 76 | + f"Cache(hits={self.hits}, misses={self.misses}, " |
| 77 | + f"cap={self.capacity}, size={self.size})" |
| 78 | + ) |
| 79 | + |
| 80 | +def get(self, key: Any) -> Any | None: |
| 81 | + """Get value for key""" |
| 82 | + if key in self.cache: |
| 83 | + self.hits += 1 |
| 84 | + node = self.cache[key] |
| 85 | + if self.list.remove(node): |
| 86 | + self.list.add(node) |
| 87 | + return node.val |
| 88 | + self.misses += 1 |
| 89 | + return None |
| 90 | + |
| 91 | +def put(self, key: Any, value: Any) -> None: |
| 92 | + """Set value for key""" |
| 93 | + if key in self.cache: |
| 94 | + # Update existing node |
| 95 | + node = self.cache[key] |
| 96 | + if self.list.remove(node): |
| 97 | + node.val = value |
| 98 | + self.list.add(node) |
| 99 | + return |
| 100 | + |
| 101 | + # Evict LRU item if at capacity |
| 102 | + if self.size >= self.capacity: |
| 103 | + # head.next may be None, so annotate as Optional |
| 104 | + first_node: DoubleLinkedListNode | None = self.list.head.next |
| 105 | + if ( |
| 106 | + first_node is not None |
| 107 | + and first_node.key is not None |
| 108 | + and first_node is not self.list.rear |
| 109 | + and self.list.remove(first_node) |
| 110 | + ): |
| 111 | + del self.cache[first_node.key] |
| 112 | + self.size -= 1 |
| 113 | + |
| 114 | + # Add new node |
| 115 | + new_node = DoubleLinkedListNode(key, value) |
| 116 | + self.cache[key] = new_node |
| 117 | + self.list.add(new_node) |
| 118 | + self.size += 1 |
| 119 | + |
| 120 | +def cache_info(self) -> dict[str, Any]: |
| 121 | + """Get cache statistics""" |
| 122 | + return { |
| 123 | + "hits": self.hits, |
| 124 | + "misses": self.misses, |
| 125 | + "capacity": self.capacity, |
| 126 | + "size": self.size |
| 127 | + } |
| 128 | + |
| 129 | +def lru_cache(maxsize: int = 128) -> Callable[[Callable[P, R]], Callable[P, R]]: """LRU Cache decorator""" def decorator(func: Callable[P, R]) -> Callable[P, R]: cache = LRUCache(maxsize) |
| 130 | + |
| 131 | +@wraps(func) |
| 132 | + def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: |
| 133 | + # Create normalized cache key |
| 134 | + key = (args, tuple(sorted(kwargs.items()))) |
| 135 | + |
| 136 | + # Try to get cached result |
| 137 | + cached = cache.get(key) |
| 138 | + if cached is not None: |
| 139 | + return cast(R, cached) |
| 140 | + |
| 141 | + # Compute and cache result |
| 142 | + result = func(*args, **kwargs) |
| 143 | + cache.put(key, result) |
| 144 | + return result |
| 145 | + |
| 146 | + # Attach cache info method |
| 147 | + wrapper.cache_info = cache.cache_info # type: ignore[attr-defined] |
| 148 | + return wrapper |
| 149 | + |
| 150 | +return decorator |
| 151 | + |
| 152 | +if name == "main": import doctest doctest.testmod() |
99 | 153 |
|
100 |
| - def put(self, key: Any, value: Any) -> None: |
101 |
| - """Set value for key""" |
102 |
| - if key in self.cache: |
103 |
| - # Update existing node |
104 |
| - node = self.cache[key] |
105 |
| - if self.list.remove(node): |
106 |
| - node.val = value |
107 |
| - self.list.add(node) |
108 |
| - return |
109 |
| - |
110 |
| - # Evict LRU item if at capacity |
111 |
| - if self.size >= self.capacity: |
112 |
| - first_node = self.list.head.next |
113 |
| - if ( |
114 |
| - first_node is not None |
115 |
| - and first_node.key is not None |
116 |
| - and first_node != self.list.rear |
117 |
| - and self.list.remove(first_node) |
118 |
| - ): |
119 |
| - del self.cache[first_node.key] |
120 |
| - self.size -= 1 |
121 |
| - |
122 |
| - # Add new node |
123 |
| - new_node = DoubleLinkedListNode(key, value) |
124 |
| - self.cache[key] = new_node |
125 |
| - self.list.add(new_node) |
126 |
| - self.size += 1 |
127 |
| - |
128 |
| - def cache_info(self) -> dict[str, Any]: |
129 |
| - """Get cache statistics""" |
130 |
| - return { |
131 |
| - "hits": self.hits, |
132 |
| - "misses": self.misses, |
133 |
| - "capacity": self.capacity, |
134 |
| - "size": self.size, |
135 |
| - } |
136 |
| - |
137 |
| - |
138 |
| -def lru_cache(maxsize: int = 128) -> Callable[[Callable[P, R]], Callable[P, R]]: |
139 |
| - """LRU Cache decorator""" |
140 |
| - |
141 |
| - def decorator(func: Callable[P, R]) -> Callable[P, R]: |
142 |
| - cache = LRUCache(maxsize) |
143 |
| - |
144 |
| - @wraps(func) |
145 |
| - def wrapper(*args: P.args, **kwargs: P.kwargs) -> R: |
146 |
| - # Create normalized cache key |
147 |
| - key = (args, tuple(sorted(kwargs.items()))) |
148 |
| - |
149 |
| - # Try to get cached result |
150 |
| - if (cached := cache.get(key)) is not None: |
151 |
| - return cast(R, cached) |
152 |
| - |
153 |
| - # Compute and cache result |
154 |
| - result = func(*args, **kwargs) |
155 |
| - cache.put(key, result) |
156 |
| - return result |
157 |
| - |
158 |
| - # Attach cache info method |
159 |
| - wrapper.cache_info = cache.cache_info # type: ignore[attr-defined] |
160 |
| - return wrapper |
161 |
| - |
162 |
| - return decorator |
163 |
| - |
164 |
| - |
165 |
| -if __name__ == "__main__": |
166 |
| - import doctest |
167 |
| - |
168 |
| - doctest.testmod() |
0 commit comments