Detailed Explanation of Cached Data by Python Annotation
- 2021-12-09 09:23:56
- OfStack
Background
Load should be reloaded every time the data is loaded. I want to develop the cache mechanism by adding annotations, so I don't need to write code every time I cache it
Disadvantages: At present, only one return value is supported. Although it can be used as a dictionary, it has already met personal needs and has no motivation to change (dog head).
Bring it and use it
New File
Cache.py
class Cache:
def __init__(self, cache_path='.', nocache=False):
self.cache_path = cache_path
self.cache = not nocache
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
s = f'{func.__code__.co_filename}.{func.__name__}'
s += ','.join(list(args[1:]) + [f'{k}={v}' for k, v in kwargs.items()])
md5 = hashlib.md5()
md5.update(s.encode('utf-8'))
cache_file = f'{self.cache_path}/{md5.hexdigest()}'
if self.cache and os.path.exists(cache_file):
print('Loading from cache')
return pickle.load(open(cache_file, 'rb'))
else:
if not os.path.exists(self.cache_path):
os.makedirs(self.cache_path)
data = func(*args, **kwargs)
pickle.dump(data, file=open(cache_file, 'wb'))
print(f'Dump finished {cache_file}')
return data
return wrapper
from .Cache import Cache
@Cache(root_path, nocache=True)
def load_data(self, inpath):
return 'Wula~a~a~!'
Practice process
For the first time, let's simply inherit the parent class
class Cache(object):
def __init__(self, cache_path=None):
self.cache_path = cache_path if cache_path else '.'
self.cache_path = f'{self.cache_path}/cache'
self.data = self.load_cache()
def load_cache(self):
if os.path.exists(self.cache_path):
print('Loading from cache')
return pickle.load(open(self.cache_path, 'rb'))
else:
return None
def save_cache(self):
pickle.dump(self.data, file=open(self.cache_path, 'wb'))
print(f'Dump finished {self.cache_path}')
class Filter4Analyzer(Cache):
def __init__(self, rootpath, datapath):
super().__init__(rootpath)
self.root_path = rootpath
if self.data is None:
self.data = self.load_data(datapath)
self.save_cache()
As long as the inheritance Cache class is OK, but there are many limitations, for example, you can only specify a parameter to be cache, for example, you have to write saved code in Filter4Analyzer.
In the next step, python nests decorators to improve this problem
from functools import wraps
import hashlib
def cached(cache_path):
def wrapperper(func):
@wraps(func)
def wrapper(*args, **kwargs):
s = f'{func.__code__.co_filename}.{func.__name__}' + ','.join(args[1:])
s += ','.join(list(args[1:]) + [f'{k}={v}' for k, v in kwargs.items()])
md5 = hashlib.md5()
md5.update(s.encode('utf-8'))
cache_file = f'{cache_path}/{md5.hexdigest()}' if cache_path else './cache'
if os.path.exists(cache_file):
print('Loading from cache')
return pickle.load(open(cache_file, 'rb'))
else:
if not os.path.exists(cache_path):
os.makedirs(cache_path)
data = func(*args, **kwargs)
pickle.dump(data, file=open(cache_file, 'wb'))
print(f'Dump finished {cache_file}')
return data
return wrapper
return wrapperper
class Tester:
@cached(cache_path='./workpath_test')
def test(self, data_path):
return ['hiahia']
Simplify code through decorator classes
class Cache:
def __init__(self, cache_path='.', nocache=False):
self.cache_path = cache_path
self.cache = not nocache
def __call__(self, func):
@wraps(func)
def wrapper(*args, **kwargs):
s = f'{func.__code__.co_filename}.{func.__name__}'
s += ','.join(list(args[1:]) + [f'{k}={v}' for k, v in kwargs.items()])
md5 = hashlib.md5()
md5.update(s.encode('utf-8'))
cache_file = f'{self.cache_path}/{md5.hexdigest()}'
if self.cache and os.path.exists(cache_file):
print('Loading from cache')
return pickle.load(open(cache_file, 'rb'))
else:
if not os.path.exists(self.cache_path):
os.makedirs(self.cache_path)
data = func(*args, **kwargs)
pickle.dump(data, file=open(cache_file, 'wb'))
print(f'Dump finished {cache_file}')
return data
return wrapper
Reference:
Python Function Decorator
Python Function Properties and PyCodeObject
Summarize
This article is here, I hope to give you help, but also hope that you can pay more attention to this site more content!