cacheable
The library is considered production ready.
from pykit import cacheable
# create a `LRU`, capacity:10 timeout:60
c = cacheable.LRU(10, 60)
# set value like the `dict`
c['key'] = 'val'
# get value like the `dict`
# if item timeout, delete it and raise `KeyError`
# if item not exist, raise `KeyError`
try:
val = c['key']
except KeyError:
print('key error')
from pykit import cacheable
cache_data = {
'key1': 'val_1',
'key2': 'val_2',
}
# define the function with a decorator
@cacheable.cache('cache_name', capacity=100, timeout=60,
is_deepcopy=False, mutex_update=False)
def get_data(param):
return cache_data.get(param, '')
# call `get_data`, if item has not been cached, cache the return value
data = get_data('key1')
# call `get_data` use the same param, data will be got from cache
time.sleep(30)
data = get_data('key1')
# if item timeout, when call `get_data`, cache again
time.sleep(70)
data = get_data('key1')
# define a method with a decorator
class MethodCache(object):
@cacheable.cache('method_cache_name', capacity=100, timeout=60,
is_deepcopy=False, mutex_update=False)
def get_data(self, param):
return cache_data.get(param, '')
mm = MethodCache()
data = mm.get_data('key2')
Cache data which access frequently.
syntax:
cacheable.LRU(capacity, timeout=60)
Least Recently Used Cache.
arguments:
-
capacity
: capacity ofLRU
, when the size ofLRU
is greater thancapacity
* 1.5, clean old items until the size is equal tocapacity
-
timeout
: max cache time of item, unit is second, default is 60
syntax:
cacheable.Cacheable(capacity=1024 * 4, timeout=60, is_deepcopy=True, is_pack=False, mutex_update=False)
Create a LRU
object, all items will be cached in it.
arguments:
-
capacity
: for createLRU
object, default is 1024 * 4 -
timeout
: for createLRU
object, default is 60, unit is second -
is_deepcopy
:cacheable.cache
return a decorator that useis_deepcopy
to return deepcopy or reference of cached item.-
True
: return deepcopy of cached item -
False
: return reference of cached item
-
-
is_pack
:cacheable.cache
return a decorator that useis_pack
to returnmsgpack.pack
item.-
True
: returnmsgpack.pack
of cached item -
False
: return reference of cached item
-
-
mutex_update
: allows only one thread to update the cache item. Default isFalse
.-
True
: mutex update -
False
: concurrently update
-
LRU
contain __getitem__
and __setitem__
,
so can get value and set value like dict
-
LRU[key]
: return the item ofLRU
withkey
.If item exist, move it to the tail to avoid to be cleaned.
Raise a
KeyError
ifkey
is not inLRU
or has been timeout.from pykit import cacheable # create `LRU`, capacity:10, timeout:60 lru = cacheable.LRU(10, 60) # set `lru['a']` to 'val_a' lru['a'] = 'val_a' sleep_time = 30 try: time.sleep(sleep_time) val = lru['a'] # if sleep_time <= timeout of LRU, return the value # if sleep_time > timeout of LRU, delete it and raise a `KeyError` except KeyError as e: print('key not in lru') try: val = lru['b'] # if item not in lru, raise a `KeyError` except KeyError as e: print('key not in lru')
-
LRU[key] = value
: setLRU[key]
tovalue
and move it to tail ofLRU
to avoid to be cleaned.If size of
LRU
is greater thancapacity
* 1.5, clean items from head until size is equal tocapacity
.from pykit import cacheable # create a `LRU`, capacity:2 timeout:60 c = cacheable.LRU(2, 60) # insert new item to the tail of `LRU` c['a'] = 'val_a' c['b'] = 'val_b' c['c'] = 'val_c' # after insert `d`, `a` and `b` will be cleaned c['d'] = 'val_d'
syntax:
cacheable.cache(name, capacity=1024 * 4, timeout=60, is_deepcopy=True, is_pack=False, mutex_update=False)
If not exist, create a cacheable.Cacheable
and save it, else use exist one.
from pykit import cacheable
need_cache_data_aa = {'key': 'val_aa'}
need_cache_data_bb = {'key': 'val_bb'}
#use different `name` create two objects, they don't have any relation.
@cacheable.cache('name_aa', capacity=100, timeout=60, is_deepcopy=False, mutex_update=False)
def cache_aa(param):
return need_cache_data_aa.get(param, '')
@cacheable.cache('name_bb', capacity=100, timeout=60, is_deepcopy=False, mutex_update=False)
def cache_bb(param):
return need_cache_data_bb.get(param, '')
arguments:
-
name
: for distinguishing differentcacheable.Cacheable
-
capacity
: used ascapacity
ofcacheable.Cacheable
-
timeout
: used astimeout
ofcacheable.Cacheable
-
is_deepcopy
: used asis_deepcopy
ofcacheable.Cacheable
-
mutex_update
: used asmutex_update
ofcacheable.Cacheable
return: A decorator function that it checks whether the data has been cached, if not or has been timeout, cache and return the data.
from pykit import cacheable
need_cache_data = {
'key1': 'val_1',
'key2': 'val_2',
}
@cacheable.cache('cache', capacity=100, timeout=60, is_deepcopy=False, mutex_update=False)
def get_data(key):
return need_cache_data.get(key, '')
# params of `get_data` are used to generate key of LRU
# if params are different, cache them as different items
get_data('key1')
get_data('key2')
Baohai Liu(刘保海) [email protected]
The MIT License (MIT)
Copyright (c) 2017 Baohai Liu(刘保海) [email protected]