Skip to content

Commit c940373

Browse files
committed
Fix #334: Drop MRUCache.
1 parent 8aedea5 commit c940373

File tree

5 files changed

+3
-161
lines changed

5 files changed

+3
-161
lines changed

docs/index.rst

+1-25
Original file line numberDiff line numberDiff line change
@@ -98,18 +98,6 @@ inserted into the cache.
9898
This class discards the least recently used items first to make
9999
space when necessary.
100100

101-
.. autoclass:: MRUCache(maxsize, getsizeof=None)
102-
:members: popitem
103-
104-
This class discards the most recently used items first to make
105-
space when necessary.
106-
107-
.. deprecated:: 5.4
108-
109-
`MRUCache` has been deprecated due to lack of use, to reduce
110-
maintenance. Please choose another cache implementation that suits
111-
your needs.
112-
113101
.. autoclass:: RRCache(maxsize, choice=random.choice, getsizeof=None)
114102
:members: choice, popitem
115103

@@ -174,7 +162,7 @@ inserted into the cache.
174162
value of `timer()`.
175163

176164
.. testcode::
177-
165+
178166
def my_ttu(_key, value, now):
179167
# assume value.ttu contains the item's time-to-use in seconds
180168
# note that the _key argument is ignored in this example
@@ -688,18 +676,6 @@ all the decorators in this module are thread-safe by default.
688676
saves up to `maxsize` results based on a Least Recently Used (LRU)
689677
algorithm.
690678

691-
.. decorator:: mru_cache(user_function)
692-
mru_cache(maxsize=128, typed=False)
693-
694-
Decorator that wraps a function with a memoizing callable that
695-
saves up to `maxsize` results based on a Most Recently Used (MRU)
696-
algorithm.
697-
698-
.. deprecated:: 5.4
699-
700-
The `mru_cache` decorator has been deprecated due to lack of use.
701-
Please choose a decorator based on some other algorithm.
702-
703679
.. decorator:: rr_cache(user_function)
704680
rr_cache(maxsize=128, choice=random.choice, typed=False)
705681

src/cachetools/__init__.py

-42
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,6 @@
55
"FIFOCache",
66
"LFUCache",
77
"LRUCache",
8-
"MRUCache",
98
"RRCache",
109
"TLRUCache",
1110
"TTLCache",
@@ -237,47 +236,6 @@ def __update(self, key):
237236
self.__order[key] = None
238237

239238

240-
class MRUCache(Cache):
241-
"""Most Recently Used (MRU) cache implementation."""
242-
243-
def __init__(self, maxsize, getsizeof=None):
244-
from warnings import warn
245-
246-
warn("MRUCache is deprecated", DeprecationWarning, stacklevel=2)
247-
248-
Cache.__init__(self, maxsize, getsizeof)
249-
self.__order = collections.OrderedDict()
250-
251-
def __getitem__(self, key, cache_getitem=Cache.__getitem__):
252-
value = cache_getitem(self, key)
253-
if key in self: # __missing__ may not store item
254-
self.__update(key)
255-
return value
256-
257-
def __setitem__(self, key, value, cache_setitem=Cache.__setitem__):
258-
cache_setitem(self, key, value)
259-
self.__update(key)
260-
261-
def __delitem__(self, key, cache_delitem=Cache.__delitem__):
262-
cache_delitem(self, key)
263-
del self.__order[key]
264-
265-
def popitem(self):
266-
"""Remove and return the `(key, value)` pair most recently used."""
267-
try:
268-
key = next(iter(self.__order))
269-
except StopIteration:
270-
raise KeyError("%s is empty" % type(self).__name__) from None
271-
else:
272-
return (key, self.pop(key))
273-
274-
def __update(self, key):
275-
try:
276-
self.__order.move_to_end(key, last=False)
277-
except KeyError:
278-
self.__order[key] = None
279-
280-
281239
class RRCache(Cache):
282240
"""Random Replacement (RR) cache implementation."""
283241

src/cachetools/func.py

+2-19
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""`functools.lru_cache` compatible memoizing function decorators."""
22

3-
__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "mru_cache", "rr_cache", "ttl_cache")
3+
__all__ = ("fifo_cache", "lfu_cache", "lru_cache", "rr_cache", "ttl_cache")
44

55
import math
66
import random
@@ -11,7 +11,7 @@
1111
except ImportError: # pragma: no cover
1212
from dummy_threading import RLock
1313

14-
from . import FIFOCache, LFUCache, LRUCache, MRUCache, RRCache, TTLCache
14+
from . import FIFOCache, LFUCache, LRUCache, RRCache, TTLCache
1515
from . import cached
1616
from . import keys
1717

@@ -77,23 +77,6 @@ def lru_cache(maxsize=128, typed=False):
7777
return _cache(LRUCache(maxsize), maxsize, typed)
7878

7979

80-
def mru_cache(maxsize=128, typed=False):
81-
"""Decorator to wrap a function with a memoizing callable that saves
82-
up to `maxsize` results based on a Most Recently Used (MRU)
83-
algorithm.
84-
"""
85-
from warnings import warn
86-
87-
warn("@mru_cache is deprecated", DeprecationWarning, stacklevel=2)
88-
89-
if maxsize is None:
90-
return _cache({}, None, typed)
91-
elif callable(maxsize):
92-
return _cache(MRUCache(128), 128, typed)(maxsize)
93-
else:
94-
return _cache(MRUCache(maxsize), maxsize, typed)
95-
96-
9780
def rr_cache(maxsize=128, choice=random.choice, typed=False):
9881
"""Decorator to wrap a function with a memoizing callable that saves
9982
up to `maxsize` results based on a Random Replacement (RR)

tests/test_func.py

-12
Original file line numberDiff line numberDiff line change
@@ -111,18 +111,6 @@ class LRUDecoratorTest(unittest.TestCase, DecoratorTestMixin):
111111
DECORATOR = staticmethod(cachetools.func.lru_cache)
112112

113113

114-
class MRUDecoratorTest(unittest.TestCase, DecoratorTestMixin):
115-
def decorator(self, maxsize, **kwargs):
116-
import warnings
117-
118-
with warnings.catch_warnings(record=True) as w:
119-
warnings.simplefilter("always")
120-
d = cachetools.func.mru_cache(maxsize, **kwargs)
121-
self.assertNotEqual(len(w), 0)
122-
self.assertIs(w[0].category, DeprecationWarning)
123-
return d
124-
125-
126114
class RRDecoratorTest(unittest.TestCase, DecoratorTestMixin):
127115
DECORATOR = staticmethod(cachetools.func.rr_cache)
128116

tests/test_mru.py

-63
This file was deleted.

0 commit comments

Comments
 (0)