考虑以下几点:
@property
def name(self):
if not hasattr(self, '_name'):
# expensive calculation
self._name = 1 + 1
return self._name
我是新来的,但我认为缓存可以分解成一个装饰器。只是我没有找到一个这样的;)
PS,真正的计算不依赖于可变值
考虑以下几点:
@property
def name(self):
if not hasattr(self, '_name'):
# expensive calculation
self._name = 1 + 1
return self._name
我是新来的,但我认为缓存可以分解成一个装饰器。只是我没有找到一个这样的;)
PS,真正的计算不依赖于可变值
当前回答
functools。缓存已经在Python 3.9 (docs)中发布:
from functools import cache
@cache
def factorial(n):
return n * factorial(n-1) if n else 1
在以前的Python版本中,早期的答案之一仍然是有效的解决方案:使用lru_cache作为普通缓存,没有限制和lru特性。(文档)
如果maxsize设置为None,将禁用LRU特性,并将缓存 可以不受束缚地成长。
这里有一个更漂亮的版本:
cache = lru_cache(maxsize=None)
@cache
def func(param1):
pass
其他回答
函数缓存简单解决方案
TTL(时间到生命)和max_entries
当修饰函数接受不可哈希类型作为输入(例如dicts)时,不工作 可选参数:TTL(每个条目的生存时间) 可选参数:max_entries(如果缓存参数组合太多,不会使存储混乱) 确保该函数没有重要的副作用
示例使用
import time
@cache(ttl=timedelta(minutes=3), max_entries=300)
def add(a, b):
time.sleep(2)
return a + b
@cache()
def substract(a, b):
time.sleep(2)
return a - b
a = 5
# function is called with argument combinations the first time -> it takes some time
for i in range(5):
print(add(a, i))
# function is called with same arguments again? -> will answer from cache
for i in range(5):
print(add(a, i))
复制装饰器代码
from datetime import datetime, timedelta
def cache(**kwargs):
def decorator(function):
# static function variable for cache, lazy initialization
try: function.cache
except: function.cache = {}
def wrapper(*args):
# if nothing valid in cache, insert something
if not args in function.cache or datetime.now() > function.cache[args]['expiry']:
if 'max_entries' in kwargs:
max_entries = kwargs['max_entries']
if max_entries != None and len(function.cache) >= max_entries:
now = datetime.now()
# delete the the first expired entry that can be found (lazy deletion)
for key in function.cache:
if function.cache[key]['expiry'] < now:
del function.cache[key]
break
# if nothing is expired that is deletable, delete the first
if len(function.cache) >= max_entries:
del function.cache[next(iter(function.cache))]
function.cache[args] = {'result': function(*args), 'expiry': datetime.max if 'ttl' not in kwargs else datetime.now() + kwargs['ttl']}
# answer from cache
return function.cache[args]['result']
return wrapper
return decorator
啊,只需要给这个找到一个正确的名字:“懒惰的属性评估”。
我也经常这样做;也许有一天我会在我的代码中使用这个配方。
from functools import wraps
def cache(maxsize=128):
cache = {}
def decorator(func):
@wraps(func)
def inner(*args, no_cache=False, **kwargs):
if no_cache:
return func(*args, **kwargs)
key_base = "_".join(str(x) for x in args)
key_end = "_".join(f"{k}:{v}" for k, v in kwargs.items())
key = f"{key_base}-{key_end}"
if key in cache:
return cache[key]
res = func(*args, **kwargs)
if len(cache) > maxsize:
del cache[list(cache.keys())[0]]
cache[key] = res
return res
return inner
return decorator
def async_cache(maxsize=128):
cache = {}
def decorator(func):
@wraps(func)
async def inner(*args, no_cache=False, **kwargs):
if no_cache:
return await func(*args, **kwargs)
key_base = "_".join(str(x) for x in args)
key_end = "_".join(f"{k}:{v}" for k, v in kwargs.items())
key = f"{key_base}-{key_end}"
if key in cache:
return cache[key]
res = await func(*args, **kwargs)
if len(cache) > maxsize:
del cache[list(cache.keys())[0]]
cache[key] = res
return res
return inner
return decorator
示例使用
import asyncio
import aiohttp
# Removes the aiohttp ClientSession instance warning.
class HTTPSession(aiohttp.ClientSession):
""" Abstract class for aiohttp. """
def __init__(self, loop=None) -> None:
super().__init__(loop=loop or asyncio.get_event_loop())
def __del__(self) -> None:
if not self.closed:
self.loop.run_until_complete(self.close())
self.loop.close()
return
session = HTTPSession()
@async_cache()
async def query(url, method="get", res_method="text", *args, **kwargs):
async with getattr(session, method.lower())(url, *args, **kwargs) as res:
return await getattr(res, res_method)()
async def get(url, *args, **kwargs):
return await query(url, "get", *args, **kwargs)
async def post(url, *args, **kwargs):
return await query(url, "post", *args, **kwargs)
async def delete(url, *args, **kwargs):
return await query(url, "delete", *args, **kwargs)
fastcache,这是Python 3 functools.lru_cache的C实现。提供10-30倍于标准库的加速。”
和选择的答案一样,只是导入不同:
from fastcache import lru_cache
@lru_cache(maxsize=128, typed=False)
def f(a, b):
pass
此外,它安装在Anaconda中,不像functools需要安装。
尝试joblib https://joblib.readthedocs.io/en/latest/memory.html
from joblib import Memory
memory = Memory(cachedir=cachedir, verbose=0)
@memory.cache
def f(x):
print('Running f(%s)' % x)
return x