cachier 2.0.2__tar.gz → 2.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cachier-2.0.2 → cachier-2.1.0}/PKG-INFO +53 -7
- {cachier-2.0.2 → cachier-2.1.0}/README.rst +52 -6
- {cachier-2.0.2 → cachier-2.1.0}/cachier/_version.py +2 -2
- {cachier-2.0.2 → cachier-2.1.0}/cachier/base_core.py +20 -9
- {cachier-2.0.2 → cachier-2.1.0}/cachier/core.py +27 -11
- {cachier-2.0.2 → cachier-2.1.0}/cachier/memory_core.py +2 -2
- {cachier-2.0.2 → cachier-2.1.0}/cachier/mongo_core.py +2 -4
- {cachier-2.0.2 → cachier-2.1.0}/cachier/pickle_core.py +3 -7
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/PKG-INFO +53 -7
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/SOURCES.txt +1 -0
- cachier-2.1.0/tests/standalone_script.py +11 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_general.py +90 -5
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_memory_core.py +20 -8
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_mongo_core.py +9 -5
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_pickle_core.py +14 -6
- {cachier-2.0.2 → cachier-2.1.0}/.codecov.yml +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.coveragerc +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.deepsource.toml +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.fdignore +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.flake8 +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.gitattributes +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.github/workflows/checkdocs.yml +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.github/workflows/lint.yml +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.github/workflows/test.yml +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.gitignore +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.ignore +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/.pylintrc +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/LICENSE +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/MANIFEST.in +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier/__init__.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier/scripts/__init__.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier/scripts/cli.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/dependency_links.txt +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/entry_points.txt +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/requires.txt +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/cachier.egg-info/top_level.txt +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/pytest.ini +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/setup.cfg +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/setup.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/__init__.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/speed_eval.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_core_lookup.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_quality.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/tests/test_security.py +0 -0
- {cachier-2.0.2 → cachier-2.1.0}/versioneer.py +0 -0
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: cachier
|
3
|
-
Version: 2.0
|
3
|
+
Version: 2.1.0
|
4
4
|
Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
|
5
5
|
Home-page: https://github.com/python-cachier/cachier
|
6
6
|
Author: Shay Palachy
|
@@ -111,6 +111,33 @@ You can add a default, pickle-based, persistent cache to your function - meaning
|
|
111
111
|
"""Your function now has a persistent cache mapped by argument values!"""
|
112
112
|
return {'arg1': arg1, 'arg2': arg2}
|
113
113
|
|
114
|
+
Class and object methods can also be cached. Cachier will automatically ignore the `self` parameter when determining the cache key for an object method. **This means that methods will be cached across all instances of an object, which may not be what you want.**
|
115
|
+
|
116
|
+
.. code-block:: python
|
117
|
+
|
118
|
+
from cachier import cachier
|
119
|
+
|
120
|
+
class Foo():
|
121
|
+
@staticmethod
|
122
|
+
@cachier()
|
123
|
+
def good_static_usage(arg_1, arg_2):
|
124
|
+
return arg_1 + arg_2
|
125
|
+
|
126
|
+
# Instance method does not depend on object's internal state, so good to cache
|
127
|
+
@cachier()
|
128
|
+
def good_usage_1(self, arg_1, arg_2)
|
129
|
+
return arg_1 + arg_2
|
130
|
+
|
131
|
+
# Instance method is calling external service, probably okay to cache
|
132
|
+
@cachier()
|
133
|
+
def good_usage_2(self, arg_1, arg_2)
|
134
|
+
result = self.call_api(arg_1, arg_2)
|
135
|
+
return result
|
136
|
+
|
137
|
+
# Instance method relies on object attribute, NOT good to cache
|
138
|
+
@cachier()
|
139
|
+
def bad_usage(self, arg_1, arg_2)
|
140
|
+
return arg_1 + arg_2 + self.arg_3
|
114
141
|
|
115
142
|
|
116
143
|
Resetting a Cache
|
@@ -121,7 +148,7 @@ The Cachier wrapper adds a ``clear_cache()`` function to each wrapped function.
|
|
121
148
|
|
122
149
|
foo.clear_cache()
|
123
150
|
|
124
|
-
|
151
|
+
General Configuration
|
125
152
|
----------------------
|
126
153
|
|
127
154
|
Threads Limit
|
@@ -163,11 +190,15 @@ Further function calls made while the calculation is being performed will not tr
|
|
163
190
|
Working with unhashable arguments
|
164
191
|
---------------------------------
|
165
192
|
|
166
|
-
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``
|
193
|
+
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``hash_func`` parameter of the ``cachier`` decorator can be provided with a callable that gets the args and kwargs from the decorated function and returns a hash key for them.
|
167
194
|
|
168
195
|
.. code-block:: python
|
169
196
|
|
170
|
-
|
197
|
+
def calculate_hash(args, kwds):
|
198
|
+
key = ... # compute a hash key here based on arguments
|
199
|
+
return key
|
200
|
+
|
201
|
+
@cachier(hash_func=calculate_hash)
|
171
202
|
def calculate_super_complex_stuff(custom_obj):
|
172
203
|
# amazing code goes here
|
173
204
|
|
@@ -176,6 +207,23 @@ See here for an example:
|
|
176
207
|
`Question: How to work with unhashable arguments <https://github.com/python-cachier/cachier/issues/91>`_
|
177
208
|
|
178
209
|
|
210
|
+
Precaching values
|
211
|
+
---------------------------------
|
212
|
+
|
213
|
+
If you want to load a value into the cache without calling the underlying function, this can be done with the `precache_value` function.
|
214
|
+
|
215
|
+
.. code-block:: python
|
216
|
+
|
217
|
+
@cachier()
|
218
|
+
def add(arg1, arg2):
|
219
|
+
return arg1 + arg2
|
220
|
+
|
221
|
+
add.precache_value(2, 2, value_to_cache=5)
|
222
|
+
|
223
|
+
result = add(2, 2)
|
224
|
+
print(result) # prints 5
|
225
|
+
|
226
|
+
|
179
227
|
Per-function call arguments
|
180
228
|
---------------------------
|
181
229
|
|
@@ -365,7 +413,7 @@ Other major contributors:
|
|
365
413
|
|
366
414
|
* `cthoyt <https://github.com/cthoyt>`_ - Base memory core implementation.
|
367
415
|
|
368
|
-
* `amarczew <https://github.com/amarczew>`_ - The ``
|
416
|
+
* `amarczew <https://github.com/amarczew>`_ - The ``hash_func`` kwarg.
|
369
417
|
|
370
418
|
* `non-senses <https://github.com/non-senses>`_ - The ``wait_for_calc_timeout`` kwarg.
|
371
419
|
|
@@ -411,5 +459,3 @@ Notable bugfixers:
|
|
411
459
|
.. _watchdog: https://github.com/gorakhargosh/watchdog
|
412
460
|
|
413
461
|
|
414
|
-
|
415
|
-
|
@@ -83,6 +83,33 @@ You can add a default, pickle-based, persistent cache to your function - meaning
|
|
83
83
|
"""Your function now has a persistent cache mapped by argument values!"""
|
84
84
|
return {'arg1': arg1, 'arg2': arg2}
|
85
85
|
|
86
|
+
Class and object methods can also be cached. Cachier will automatically ignore the `self` parameter when determining the cache key for an object method. **This means that methods will be cached across all instances of an object, which may not be what you want.**
|
87
|
+
|
88
|
+
.. code-block:: python
|
89
|
+
|
90
|
+
from cachier import cachier
|
91
|
+
|
92
|
+
class Foo():
|
93
|
+
@staticmethod
|
94
|
+
@cachier()
|
95
|
+
def good_static_usage(arg_1, arg_2):
|
96
|
+
return arg_1 + arg_2
|
97
|
+
|
98
|
+
# Instance method does not depend on object's internal state, so good to cache
|
99
|
+
@cachier()
|
100
|
+
def good_usage_1(self, arg_1, arg_2)
|
101
|
+
return arg_1 + arg_2
|
102
|
+
|
103
|
+
# Instance method is calling external service, probably okay to cache
|
104
|
+
@cachier()
|
105
|
+
def good_usage_2(self, arg_1, arg_2)
|
106
|
+
result = self.call_api(arg_1, arg_2)
|
107
|
+
return result
|
108
|
+
|
109
|
+
# Instance method relies on object attribute, NOT good to cache
|
110
|
+
@cachier()
|
111
|
+
def bad_usage(self, arg_1, arg_2)
|
112
|
+
return arg_1 + arg_2 + self.arg_3
|
86
113
|
|
87
114
|
|
88
115
|
Resetting a Cache
|
@@ -93,7 +120,7 @@ The Cachier wrapper adds a ``clear_cache()`` function to each wrapped function.
|
|
93
120
|
|
94
121
|
foo.clear_cache()
|
95
122
|
|
96
|
-
|
123
|
+
General Configuration
|
97
124
|
----------------------
|
98
125
|
|
99
126
|
Threads Limit
|
@@ -135,11 +162,15 @@ Further function calls made while the calculation is being performed will not tr
|
|
135
162
|
Working with unhashable arguments
|
136
163
|
---------------------------------
|
137
164
|
|
138
|
-
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``
|
165
|
+
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``hash_func`` parameter of the ``cachier`` decorator can be provided with a callable that gets the args and kwargs from the decorated function and returns a hash key for them.
|
139
166
|
|
140
167
|
.. code-block:: python
|
141
168
|
|
142
|
-
|
169
|
+
def calculate_hash(args, kwds):
|
170
|
+
key = ... # compute a hash key here based on arguments
|
171
|
+
return key
|
172
|
+
|
173
|
+
@cachier(hash_func=calculate_hash)
|
143
174
|
def calculate_super_complex_stuff(custom_obj):
|
144
175
|
# amazing code goes here
|
145
176
|
|
@@ -148,6 +179,23 @@ See here for an example:
|
|
148
179
|
`Question: How to work with unhashable arguments <https://github.com/python-cachier/cachier/issues/91>`_
|
149
180
|
|
150
181
|
|
182
|
+
Precaching values
|
183
|
+
---------------------------------
|
184
|
+
|
185
|
+
If you want to load a value into the cache without calling the underlying function, this can be done with the `precache_value` function.
|
186
|
+
|
187
|
+
.. code-block:: python
|
188
|
+
|
189
|
+
@cachier()
|
190
|
+
def add(arg1, arg2):
|
191
|
+
return arg1 + arg2
|
192
|
+
|
193
|
+
add.precache_value(2, 2, value_to_cache=5)
|
194
|
+
|
195
|
+
result = add(2, 2)
|
196
|
+
print(result) # prints 5
|
197
|
+
|
198
|
+
|
151
199
|
Per-function call arguments
|
152
200
|
---------------------------
|
153
201
|
|
@@ -337,7 +385,7 @@ Other major contributors:
|
|
337
385
|
|
338
386
|
* `cthoyt <https://github.com/cthoyt>`_ - Base memory core implementation.
|
339
387
|
|
340
|
-
* `amarczew <https://github.com/amarczew>`_ - The ``
|
388
|
+
* `amarczew <https://github.com/amarczew>`_ - The ``hash_func`` kwarg.
|
341
389
|
|
342
390
|
* `non-senses <https://github.com/non-senses>`_ - The ``wait_for_calc_timeout`` kwarg.
|
343
391
|
|
@@ -381,5 +429,3 @@ Notable bugfixers:
|
|
381
429
|
.. links:
|
382
430
|
.. _pymongo: https://api.mongodb.com/python/current/
|
383
431
|
.. _watchdog: https://github.com/gorakhargosh/watchdog
|
384
|
-
|
385
|
-
|
@@ -11,8 +11,8 @@ version_json = '''
|
|
11
11
|
{
|
12
12
|
"dirty": false,
|
13
13
|
"error": null,
|
14
|
-
"full-revisionid": "
|
15
|
-
"version": "2.0
|
14
|
+
"full-revisionid": "7be769b78acf3a0e0aff8d528476913571d79749",
|
15
|
+
"version": "2.1.0"
|
16
16
|
}
|
17
17
|
''' # END VERSION_JSON
|
18
18
|
|
@@ -9,26 +9,31 @@
|
|
9
9
|
import abc # for the _BaseCore abstract base class
|
10
10
|
import functools
|
11
11
|
import hashlib
|
12
|
+
import inspect
|
13
|
+
import pickle # nosec: B403
|
12
14
|
|
13
15
|
|
14
|
-
def
|
16
|
+
def _default_hash_func(args, kwds):
|
15
17
|
# pylint: disable-next=protected-access
|
16
|
-
key = functools._make_key(args, kwds, typed=
|
17
|
-
|
18
|
+
key = functools._make_key(args, kwds, typed=True)
|
19
|
+
hash = hashlib.sha256()
|
20
|
+
for item in key:
|
21
|
+
hash.update(pickle.dumps(item))
|
22
|
+
return hash.hexdigest()
|
18
23
|
|
19
24
|
|
20
25
|
class _BaseCore():
|
21
26
|
__metaclass__ = abc.ABCMeta
|
22
27
|
|
23
|
-
def __init__(self,
|
24
|
-
self.
|
25
|
-
self.next_time = next_time
|
26
|
-
self.hash_func = hash_params if hash_params else _default_hash_params
|
28
|
+
def __init__(self, hash_func):
|
29
|
+
self.hash_func = hash_func if hash_func else _default_hash_func
|
27
30
|
self.func = None
|
28
31
|
|
29
32
|
def set_func(self, func):
|
30
|
-
"""Sets the function this core will use. This has to be set before
|
31
|
-
|
33
|
+
"""Sets the function this core will use. This has to be set before any
|
34
|
+
method is called. Also determine if the funtion is an object method."""
|
35
|
+
func_params = list(inspect.signature(func).parameters)
|
36
|
+
self.func_is_method = func_params and func_params[0] == 'self'
|
32
37
|
self.func = func
|
33
38
|
|
34
39
|
def get_entry(self, args, kwds):
|
@@ -37,6 +42,12 @@ class _BaseCore():
|
|
37
42
|
key = self.hash_func(args, kwds)
|
38
43
|
return self.get_entry_by_key(key)
|
39
44
|
|
45
|
+
def precache_value(self, args, kwds, value_to_cache):
|
46
|
+
"""Writes a precomputed value into the cache."""
|
47
|
+
key = self.hash_func(args, kwds)
|
48
|
+
self.set_entry(key, value_to_cache)
|
49
|
+
return value_to_cache
|
50
|
+
|
40
51
|
@abc.abstractmethod
|
41
52
|
def get_entry_by_key(self, key):
|
42
53
|
"""Returns the result mapped to the given key in this core's cache,
|
@@ -14,6 +14,7 @@ from __future__ import print_function
|
|
14
14
|
|
15
15
|
import os
|
16
16
|
from functools import wraps
|
17
|
+
from warnings import warn
|
17
18
|
|
18
19
|
import datetime
|
19
20
|
from concurrent.futures import ThreadPoolExecutor
|
@@ -85,6 +86,7 @@ def cachier(
|
|
85
86
|
backend=None,
|
86
87
|
mongetter=None,
|
87
88
|
cache_dir=None,
|
89
|
+
hash_func=None,
|
88
90
|
hash_params=None,
|
89
91
|
wait_for_calc_timeout=0,
|
90
92
|
separate_files=False,
|
@@ -125,7 +127,7 @@ def cachier(
|
|
125
127
|
A fully qualified path to a file directory to be used for cache files.
|
126
128
|
The running process must have running permissions to this folder. If
|
127
129
|
not provided, a default directory at `~/.cachier/` is used.
|
128
|
-
|
130
|
+
hash_func : callable, optional
|
129
131
|
A callable that gets the args and kwargs from the decorated function
|
130
132
|
and returns a hash key for them. This parameter can be used to enable
|
131
133
|
the use of cachier with functions that get arguments that are not
|
@@ -141,15 +143,19 @@ def cachier(
|
|
141
143
|
split between several files, one for each argument set. This can help
|
142
144
|
if you per-function cache files become too large.
|
143
145
|
"""
|
146
|
+
# Check for deprecated parameters
|
147
|
+
if hash_params is not None:
|
148
|
+
message = 'hash_params will be removed in a future release, ' \
|
149
|
+
'please use hash_func instead'
|
150
|
+
warn(message, DeprecationWarning, stacklevel=2)
|
151
|
+
hash_func = hash_params
|
144
152
|
# The default is calculated dynamically to maintain previous behavior
|
145
153
|
# to default to pickle unless the ``mongetter`` argument is given.
|
146
154
|
if backend is None:
|
147
155
|
backend = 'pickle' if mongetter is None else 'mongo'
|
148
156
|
if backend == 'pickle':
|
149
157
|
core = _PickleCore( # pylint: disable=R0204
|
150
|
-
|
151
|
-
next_time=next_time,
|
152
|
-
hash_params=hash_params,
|
158
|
+
hash_func=hash_func,
|
153
159
|
reload=pickle_reload,
|
154
160
|
cache_dir=cache_dir,
|
155
161
|
separate_files=separate_files,
|
@@ -161,16 +167,12 @@ def cachier(
|
|
161
167
|
'must specify ``mongetter`` when using the mongo core')
|
162
168
|
core = _MongoCore(
|
163
169
|
mongetter=mongetter,
|
164
|
-
|
165
|
-
next_time=next_time,
|
166
|
-
hash_params=hash_params,
|
170
|
+
hash_func=hash_func,
|
167
171
|
wait_for_calc_timeout=wait_for_calc_timeout,
|
168
172
|
)
|
169
173
|
elif backend == 'memory':
|
170
174
|
core = _MemoryCore(
|
171
|
-
|
172
|
-
next_time=next_time,
|
173
|
-
hash_params=hash_params,
|
175
|
+
hash_func=hash_func,
|
174
176
|
)
|
175
177
|
elif backend == 'redis':
|
176
178
|
raise NotImplementedError(
|
@@ -194,7 +196,10 @@ def cachier(
|
|
194
196
|
_print = print
|
195
197
|
if ignore_cache:
|
196
198
|
return func(*args, **kwds)
|
197
|
-
|
199
|
+
if core.func_is_method:
|
200
|
+
key, entry = core.get_entry(args[1:], kwds)
|
201
|
+
else:
|
202
|
+
key, entry = core.get_entry(args, kwds)
|
198
203
|
if overwrite_cache:
|
199
204
|
return _calc_entry(core, key, func, args, kwds)
|
200
205
|
if entry is not None: # pylint: disable=R0101
|
@@ -259,9 +264,20 @@ def cachier(
|
|
259
264
|
except AttributeError:
|
260
265
|
return None
|
261
266
|
|
267
|
+
def precache_value(*args, value_to_cache, **kwds):
|
268
|
+
"""Add an initial value to the cache.
|
269
|
+
|
270
|
+
Arguments
|
271
|
+
---------
|
272
|
+
value : any
|
273
|
+
entry to be written into the cache
|
274
|
+
"""
|
275
|
+
return core.precache_value(args, kwds, value_to_cache)
|
276
|
+
|
262
277
|
func_wrapper.clear_cache = clear_cache
|
263
278
|
func_wrapper.clear_being_calculated = clear_being_calculated
|
264
279
|
func_wrapper.cache_dpath = cache_dpath
|
280
|
+
func_wrapper.precache_value = precache_value
|
265
281
|
return func_wrapper
|
266
282
|
|
267
283
|
return _cachier_decorator
|
@@ -17,8 +17,8 @@ class _MemoryCore(_BaseCore):
|
|
17
17
|
See :class:`_BaseCore` documentation.
|
18
18
|
"""
|
19
19
|
|
20
|
-
def __init__(self,
|
21
|
-
super().__init__(
|
20
|
+
def __init__(self, hash_func):
|
21
|
+
super().__init__(hash_func)
|
22
22
|
self.cache = {}
|
23
23
|
self.lock = threading.RLock()
|
24
24
|
|
@@ -37,14 +37,12 @@ class _MongoCore(_BaseCore):
|
|
37
37
|
|
38
38
|
_INDEX_NAME = 'func_1_key_1'
|
39
39
|
|
40
|
-
def __init__(
|
41
|
-
self, mongetter, stale_after, next_time,
|
42
|
-
hash_params, wait_for_calc_timeout):
|
40
|
+
def __init__(self, mongetter, hash_func, wait_for_calc_timeout):
|
43
41
|
if 'pymongo' not in sys.modules:
|
44
42
|
warnings.warn((
|
45
43
|
"Cachier warning: pymongo was not found. "
|
46
44
|
"MongoDB cores will not function."))
|
47
|
-
super().__init__(
|
45
|
+
super().__init__(hash_func)
|
48
46
|
self.mongetter = mongetter
|
49
47
|
self.mongo_collection = self.mongetter()
|
50
48
|
self.wait_for_calc_timeout = wait_for_calc_timeout
|
@@ -29,10 +29,6 @@ class _PickleCore(_BaseCore):
|
|
29
29
|
|
30
30
|
Parameters
|
31
31
|
----------
|
32
|
-
stale_after : datetime.timedelta, optional
|
33
|
-
See _BaseCore documentation.
|
34
|
-
next_time : bool, optional
|
35
|
-
See _BaseCore documentation.
|
36
32
|
pickle_reload : bool, optional
|
37
33
|
See core.cachier() documentation.
|
38
34
|
cache_dir : str, optional.
|
@@ -84,10 +80,10 @@ class _PickleCore(_BaseCore):
|
|
84
80
|
self._check_calculation()
|
85
81
|
|
86
82
|
def __init__(
|
87
|
-
self,
|
88
|
-
|
83
|
+
self, hash_func, reload, cache_dir,
|
84
|
+
separate_files, wait_for_calc_timeout,
|
89
85
|
):
|
90
|
-
super().__init__(
|
86
|
+
super().__init__(hash_func)
|
91
87
|
self.cache = None
|
92
88
|
self.reload = reload
|
93
89
|
self.cache_dir = DEF_CACHIER_DIR
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: cachier
|
3
|
-
Version: 2.0
|
3
|
+
Version: 2.1.0
|
4
4
|
Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
|
5
5
|
Home-page: https://github.com/python-cachier/cachier
|
6
6
|
Author: Shay Palachy
|
@@ -111,6 +111,33 @@ You can add a default, pickle-based, persistent cache to your function - meaning
|
|
111
111
|
"""Your function now has a persistent cache mapped by argument values!"""
|
112
112
|
return {'arg1': arg1, 'arg2': arg2}
|
113
113
|
|
114
|
+
Class and object methods can also be cached. Cachier will automatically ignore the `self` parameter when determining the cache key for an object method. **This means that methods will be cached across all instances of an object, which may not be what you want.**
|
115
|
+
|
116
|
+
.. code-block:: python
|
117
|
+
|
118
|
+
from cachier import cachier
|
119
|
+
|
120
|
+
class Foo():
|
121
|
+
@staticmethod
|
122
|
+
@cachier()
|
123
|
+
def good_static_usage(arg_1, arg_2):
|
124
|
+
return arg_1 + arg_2
|
125
|
+
|
126
|
+
# Instance method does not depend on object's internal state, so good to cache
|
127
|
+
@cachier()
|
128
|
+
def good_usage_1(self, arg_1, arg_2)
|
129
|
+
return arg_1 + arg_2
|
130
|
+
|
131
|
+
# Instance method is calling external service, probably okay to cache
|
132
|
+
@cachier()
|
133
|
+
def good_usage_2(self, arg_1, arg_2)
|
134
|
+
result = self.call_api(arg_1, arg_2)
|
135
|
+
return result
|
136
|
+
|
137
|
+
# Instance method relies on object attribute, NOT good to cache
|
138
|
+
@cachier()
|
139
|
+
def bad_usage(self, arg_1, arg_2)
|
140
|
+
return arg_1 + arg_2 + self.arg_3
|
114
141
|
|
115
142
|
|
116
143
|
Resetting a Cache
|
@@ -121,7 +148,7 @@ The Cachier wrapper adds a ``clear_cache()`` function to each wrapped function.
|
|
121
148
|
|
122
149
|
foo.clear_cache()
|
123
150
|
|
124
|
-
|
151
|
+
General Configuration
|
125
152
|
----------------------
|
126
153
|
|
127
154
|
Threads Limit
|
@@ -163,11 +190,15 @@ Further function calls made while the calculation is being performed will not tr
|
|
163
190
|
Working with unhashable arguments
|
164
191
|
---------------------------------
|
165
192
|
|
166
|
-
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``
|
193
|
+
As mentioned above, the positional and keyword arguments to the wrapped function must be hashable (i.e. Python's immutable built-in objects, not mutable containers). To get around this limitation the ``hash_func`` parameter of the ``cachier`` decorator can be provided with a callable that gets the args and kwargs from the decorated function and returns a hash key for them.
|
167
194
|
|
168
195
|
.. code-block:: python
|
169
196
|
|
170
|
-
|
197
|
+
def calculate_hash(args, kwds):
|
198
|
+
key = ... # compute a hash key here based on arguments
|
199
|
+
return key
|
200
|
+
|
201
|
+
@cachier(hash_func=calculate_hash)
|
171
202
|
def calculate_super_complex_stuff(custom_obj):
|
172
203
|
# amazing code goes here
|
173
204
|
|
@@ -176,6 +207,23 @@ See here for an example:
|
|
176
207
|
`Question: How to work with unhashable arguments <https://github.com/python-cachier/cachier/issues/91>`_
|
177
208
|
|
178
209
|
|
210
|
+
Precaching values
|
211
|
+
---------------------------------
|
212
|
+
|
213
|
+
If you want to load a value into the cache without calling the underlying function, this can be done with the `precache_value` function.
|
214
|
+
|
215
|
+
.. code-block:: python
|
216
|
+
|
217
|
+
@cachier()
|
218
|
+
def add(arg1, arg2):
|
219
|
+
return arg1 + arg2
|
220
|
+
|
221
|
+
add.precache_value(2, 2, value_to_cache=5)
|
222
|
+
|
223
|
+
result = add(2, 2)
|
224
|
+
print(result) # prints 5
|
225
|
+
|
226
|
+
|
179
227
|
Per-function call arguments
|
180
228
|
---------------------------
|
181
229
|
|
@@ -365,7 +413,7 @@ Other major contributors:
|
|
365
413
|
|
366
414
|
* `cthoyt <https://github.com/cthoyt>`_ - Base memory core implementation.
|
367
415
|
|
368
|
-
* `amarczew <https://github.com/amarczew>`_ - The ``
|
416
|
+
* `amarczew <https://github.com/amarczew>`_ - The ``hash_func`` kwarg.
|
369
417
|
|
370
418
|
* `non-senses <https://github.com/non-senses>`_ - The ``wait_for_calc_timeout`` kwarg.
|
371
419
|
|
@@ -411,5 +459,3 @@ Notable bugfixers:
|
|
411
459
|
.. _watchdog: https://github.com/gorakhargosh/watchdog
|
412
460
|
|
413
461
|
|
414
|
-
|
415
|
-
|
@@ -1,11 +1,13 @@
|
|
1
1
|
"""Non-core-specific tests for cachier."""
|
2
2
|
|
3
3
|
from __future__ import print_function
|
4
|
+
import functools
|
4
5
|
import os
|
5
6
|
import queue
|
7
|
+
import subprocess # nosec: B404
|
6
8
|
import threading
|
7
9
|
from random import random
|
8
|
-
from time import sleep
|
10
|
+
from time import sleep, time
|
9
11
|
import pytest
|
10
12
|
import cachier as cachier_dir
|
11
13
|
from cachier import cachier
|
@@ -84,10 +86,12 @@ def test_wait_for_calc_timeout_ok(mongetter, stale_after, separate_files):
|
|
84
86
|
res_queue = queue.Queue()
|
85
87
|
thread1 = threading.Thread(
|
86
88
|
target=_calls_wait_for_calc_timeout_fast,
|
87
|
-
kwargs={'res_queue': res_queue}
|
89
|
+
kwargs={'res_queue': res_queue},
|
90
|
+
daemon=True)
|
88
91
|
thread2 = threading.Thread(
|
89
92
|
target=_calls_wait_for_calc_timeout_fast,
|
90
|
-
kwargs={'res_queue': res_queue}
|
93
|
+
kwargs={'res_queue': res_queue},
|
94
|
+
daemon=True)
|
91
95
|
|
92
96
|
thread1.start()
|
93
97
|
thread2.start()
|
@@ -122,10 +126,12 @@ def test_wait_for_calc_timeout_slow(mongetter, stale_after, separate_files):
|
|
122
126
|
res_queue = queue.Queue()
|
123
127
|
thread1 = threading.Thread(
|
124
128
|
target=_calls_wait_for_calc_timeout_slow,
|
125
|
-
kwargs={'res_queue': res_queue}
|
129
|
+
kwargs={'res_queue': res_queue},
|
130
|
+
daemon=True)
|
126
131
|
thread2 = threading.Thread(
|
127
132
|
target=_calls_wait_for_calc_timeout_slow,
|
128
|
-
kwargs={'res_queue': res_queue}
|
133
|
+
kwargs={'res_queue': res_queue},
|
134
|
+
daemon=True)
|
129
135
|
|
130
136
|
thread1.start()
|
131
137
|
thread2.start()
|
@@ -141,3 +147,82 @@ def test_wait_for_calc_timeout_slow(mongetter, stale_after, separate_files):
|
|
141
147
|
res4 = _wait_for_calc_timeout_slow(1, 2)
|
142
148
|
# One of the cached values is returned
|
143
149
|
assert res1 == res4 or res2 == res4 or res3 == res4
|
150
|
+
|
151
|
+
|
152
|
+
@pytest.mark.parametrize(
|
153
|
+
'mongetter,backend',
|
154
|
+
[
|
155
|
+
(_test_mongetter, 'mongo'),
|
156
|
+
(None, 'memory'),
|
157
|
+
(None, 'pickle'),
|
158
|
+
]
|
159
|
+
)
|
160
|
+
def test_precache_value(mongetter, backend):
|
161
|
+
|
162
|
+
@cachier(backend=backend, mongetter=mongetter)
|
163
|
+
def func(arg_1, arg_2):
|
164
|
+
"""Some function."""
|
165
|
+
return arg_1 + arg_2
|
166
|
+
|
167
|
+
result = func.precache_value(2, 2, value_to_cache=5)
|
168
|
+
assert result == 5
|
169
|
+
result = func(2, 2)
|
170
|
+
assert result == 5
|
171
|
+
func.clear_cache()
|
172
|
+
result = func(2, 2)
|
173
|
+
assert result == 4
|
174
|
+
result = func.precache_value(2, arg_2=2, value_to_cache=5)
|
175
|
+
assert result == 5
|
176
|
+
result = func(2, arg_2=2)
|
177
|
+
assert result == 5
|
178
|
+
|
179
|
+
|
180
|
+
@pytest.mark.parametrize(
|
181
|
+
'mongetter,backend',
|
182
|
+
[
|
183
|
+
(_test_mongetter, 'mongo'),
|
184
|
+
(None, 'memory'),
|
185
|
+
(None, 'pickle'),
|
186
|
+
]
|
187
|
+
)
|
188
|
+
def test_ignore_self_in_methods(mongetter, backend):
|
189
|
+
|
190
|
+
class TestClass():
|
191
|
+
@cachier(backend=backend, mongetter=mongetter)
|
192
|
+
def takes_2_seconds(self, arg_1, arg_2):
|
193
|
+
"""Some function."""
|
194
|
+
sleep(2)
|
195
|
+
return arg_1 + arg_2
|
196
|
+
|
197
|
+
test_object_1 = TestClass()
|
198
|
+
test_object_2 = TestClass()
|
199
|
+
test_object_1.takes_2_seconds.clear_cache()
|
200
|
+
test_object_2.takes_2_seconds.clear_cache()
|
201
|
+
result_1 = test_object_1.takes_2_seconds(1, 2)
|
202
|
+
assert result_1 == 3
|
203
|
+
start = time()
|
204
|
+
result_2 = test_object_2.takes_2_seconds(1, 2)
|
205
|
+
end = time()
|
206
|
+
assert result_2 == 3
|
207
|
+
assert end - start < 1
|
208
|
+
|
209
|
+
|
210
|
+
def test_hash_params_deprecation():
|
211
|
+
with pytest.deprecated_call(match='hash_params will be removed'):
|
212
|
+
@cachier(hash_params=lambda a, k: 'key')
|
213
|
+
def test():
|
214
|
+
return 'value'
|
215
|
+
assert test() == 'value'
|
216
|
+
|
217
|
+
|
218
|
+
def test_separate_processes():
|
219
|
+
test_args = ('python', 'tests/standalone_script.py')
|
220
|
+
run_params = {'args': test_args, 'capture_output': True, 'text': True}
|
221
|
+
run_process = functools.partial(subprocess.run, **run_params)
|
222
|
+
result = run_process()
|
223
|
+
assert result.stdout.strip() == 'two 2'
|
224
|
+
start = time()
|
225
|
+
result = run_process()
|
226
|
+
end = time()
|
227
|
+
assert result.stdout.strip() == 'two 2'
|
228
|
+
assert end - start < 3
|
@@ -172,9 +172,13 @@ def test_memory_being_calculated():
|
|
172
172
|
_takes_time.clear_cache()
|
173
173
|
res_queue = queue.Queue()
|
174
174
|
thread1 = threading.Thread(
|
175
|
-
|
175
|
+
target=_calls_takes_time,
|
176
|
+
kwargs={'res_queue': res_queue},
|
177
|
+
daemon=True)
|
176
178
|
thread2 = threading.Thread(
|
177
|
-
|
179
|
+
target=_calls_takes_time,
|
180
|
+
kwargs={'res_queue': res_queue},
|
181
|
+
daemon=True)
|
178
182
|
thread1.start()
|
179
183
|
sleep(0.5)
|
180
184
|
thread2.start()
|
@@ -206,9 +210,13 @@ def test_being_calc_next_time():
|
|
206
210
|
sleep(1.1)
|
207
211
|
res_queue = queue.Queue()
|
208
212
|
thread1 = threading.Thread(
|
209
|
-
target=_calls_being_calc_next_time,
|
213
|
+
target=_calls_being_calc_next_time,
|
214
|
+
kwargs={'res_queue': res_queue},
|
215
|
+
daemon=True)
|
210
216
|
thread2 = threading.Thread(
|
211
|
-
target=_calls_being_calc_next_time,
|
217
|
+
target=_calls_being_calc_next_time,
|
218
|
+
kwargs={'res_queue': res_queue},
|
219
|
+
daemon=True)
|
212
220
|
thread1.start()
|
213
221
|
sleep(0.5)
|
214
222
|
thread2.start()
|
@@ -240,9 +248,13 @@ def test_clear_being_calculated():
|
|
240
248
|
_takes_time.clear_cache()
|
241
249
|
res_queue = queue.Queue()
|
242
250
|
thread1 = threading.Thread(
|
243
|
-
|
251
|
+
target=_calls_takes_time,
|
252
|
+
kwargs={'res_queue': res_queue},
|
253
|
+
daemon=True)
|
244
254
|
thread2 = threading.Thread(
|
245
|
-
|
255
|
+
target=_calls_takes_time,
|
256
|
+
kwargs={'res_queue': res_queue},
|
257
|
+
daemon=True)
|
246
258
|
thread1.start()
|
247
259
|
_takes_time.clear_being_calculated()
|
248
260
|
sleep(0.5)
|
@@ -283,7 +295,7 @@ def test_error_throwing_func():
|
|
283
295
|
|
284
296
|
@pytest.mark.memory
|
285
297
|
def test_callable_hash_param():
|
286
|
-
def
|
298
|
+
def _hash_func(args, kwargs):
|
287
299
|
def _hash(obj):
|
288
300
|
if isinstance(obj, pd.core.frame.DataFrame):
|
289
301
|
return hashlib.sha256(
|
@@ -295,7 +307,7 @@ def test_callable_hash_param():
|
|
295
307
|
{k: _hash(v) for k, v in kwargs.items()}.items()))
|
296
308
|
return k_args + k_kwargs
|
297
309
|
|
298
|
-
@cachier(backend='memory',
|
310
|
+
@cachier(backend='memory', hash_func=_hash_func)
|
299
311
|
def _params_with_dataframe(*args, **kwargs):
|
300
312
|
"""Some function."""
|
301
313
|
return random()
|
@@ -144,9 +144,13 @@ def test_mongo_being_calculated():
|
|
144
144
|
_takes_time.clear_cache()
|
145
145
|
res_queue = queue.Queue()
|
146
146
|
thread1 = threading.Thread(
|
147
|
-
target=_calls_takes_time,
|
147
|
+
target=_calls_takes_time,
|
148
|
+
kwargs={'res_queue': res_queue},
|
149
|
+
daemon=True)
|
148
150
|
thread2 = threading.Thread(
|
149
|
-
target=_calls_takes_time,
|
151
|
+
target=_calls_takes_time,
|
152
|
+
kwargs={'res_queue': res_queue},
|
153
|
+
daemon=True)
|
150
154
|
thread1.start()
|
151
155
|
sleep(1)
|
152
156
|
thread2.start()
|
@@ -209,7 +213,7 @@ def test_stalled_mongo_db_cache():
|
|
209
213
|
@cachier(mongetter=_test_mongetter)
|
210
214
|
def _stalled_func():
|
211
215
|
return 1
|
212
|
-
core = _MongoCore(_test_mongetter, None,
|
216
|
+
core = _MongoCore(_test_mongetter, None, 0)
|
213
217
|
core.set_func(_stalled_func)
|
214
218
|
core.clear_cache()
|
215
219
|
with pytest.raises(RecalculationNeeded):
|
@@ -262,7 +266,7 @@ def test_stalled_mong_db_core(monkeypatch):
|
|
262
266
|
@pytest.mark.mongo
|
263
267
|
def test_callable_hash_param():
|
264
268
|
|
265
|
-
def
|
269
|
+
def _hash_func(args, kwargs):
|
266
270
|
def _hash(obj):
|
267
271
|
if isinstance(obj, pd.core.frame.DataFrame):
|
268
272
|
return hashlib.sha256(
|
@@ -275,7 +279,7 @@ def test_callable_hash_param():
|
|
275
279
|
k: _hash(v) for k, v in kwargs.items()}.items()))
|
276
280
|
return k_args + k_kwargs
|
277
281
|
|
278
|
-
@cachier(mongetter=_test_mongetter,
|
282
|
+
@cachier(mongetter=_test_mongetter, hash_func=_hash_func)
|
279
283
|
def _params_with_dataframe(*args, **kwargs):
|
280
284
|
"""Some function."""
|
281
285
|
return random()
|
@@ -222,14 +222,16 @@ def test_pickle_being_calculated(separate_files):
|
|
222
222
|
kwargs={
|
223
223
|
'takes_time_func': _takes_time_decorated,
|
224
224
|
'res_queue': res_queue,
|
225
|
-
}
|
225
|
+
},
|
226
|
+
daemon=True,
|
226
227
|
)
|
227
228
|
thread2 = threading.Thread(
|
228
229
|
target=_calls_takes_time,
|
229
230
|
kwargs={
|
230
231
|
'takes_time_func': _takes_time_decorated,
|
231
232
|
'res_queue': res_queue,
|
232
|
-
}
|
233
|
+
},
|
234
|
+
daemon=True,
|
233
235
|
)
|
234
236
|
thread1.start()
|
235
237
|
sleep(0.5)
|
@@ -271,14 +273,16 @@ def test_being_calc_next_time(separate_files):
|
|
271
273
|
kwargs={
|
272
274
|
'being_calc_func': _being_calc_next_time_decorated,
|
273
275
|
'res_queue': res_queue,
|
274
|
-
}
|
276
|
+
},
|
277
|
+
daemon=True,
|
275
278
|
)
|
276
279
|
thread2 = threading.Thread(
|
277
280
|
target=_calls_being_calc_next_time,
|
278
281
|
kwargs={
|
279
282
|
'being_calc_func': _being_calc_next_time_decorated,
|
280
283
|
'res_queue': res_queue,
|
281
|
-
}
|
284
|
+
},
|
285
|
+
daemon=True,
|
282
286
|
)
|
283
287
|
thread1.start()
|
284
288
|
sleep(0.5)
|
@@ -339,6 +343,7 @@ def _helper_bad_cache_file(sleeptime, separate_files):
|
|
339
343
|
'trash_cache': True,
|
340
344
|
'separate_files': separate_files,
|
341
345
|
},
|
346
|
+
daemon=True,
|
342
347
|
)
|
343
348
|
thread2 = threading.Thread(
|
344
349
|
target=_calls_bad_cache,
|
@@ -348,6 +353,7 @@ def _helper_bad_cache_file(sleeptime, separate_files):
|
|
348
353
|
'trash_cache': False,
|
349
354
|
'separate_files': separate_files,
|
350
355
|
},
|
356
|
+
daemon=True,
|
351
357
|
)
|
352
358
|
thread1.start()
|
353
359
|
sleep(sleeptime)
|
@@ -427,6 +433,7 @@ def _helper_delete_cache_file(sleeptime, separate_files):
|
|
427
433
|
'del_cache': True,
|
428
434
|
'separate_files': separate_files,
|
429
435
|
},
|
436
|
+
daemon=True,
|
430
437
|
)
|
431
438
|
thread2 = threading.Thread(
|
432
439
|
target=_calls_delete_cache,
|
@@ -436,6 +443,7 @@ def _helper_delete_cache_file(sleeptime, separate_files):
|
|
436
443
|
'del_cache': False,
|
437
444
|
'separate_files': separate_files,
|
438
445
|
},
|
446
|
+
daemon=True,
|
439
447
|
)
|
440
448
|
thread1.start()
|
441
449
|
sleep(sleeptime)
|
@@ -534,7 +542,7 @@ def test_pickle_core_custom_cache_dir(separate_files):
|
|
534
542
|
|
535
543
|
@pytest.mark.parametrize('separate_files', [True, False])
|
536
544
|
def test_callable_hash_param(separate_files):
|
537
|
-
def
|
545
|
+
def _hash_func(args, kwargs):
|
538
546
|
def _hash(obj):
|
539
547
|
if isinstance(obj, pd.core.frame.DataFrame):
|
540
548
|
return hashlib.sha256(
|
@@ -550,7 +558,7 @@ def test_callable_hash_param(separate_files):
|
|
550
558
|
)
|
551
559
|
return k_args + k_kwargs
|
552
560
|
|
553
|
-
@cachier(
|
561
|
+
@cachier(hash_func=_hash_func, separate_files=separate_files)
|
554
562
|
def _params_with_dataframe(*args, **kwargs):
|
555
563
|
"""Some function."""
|
556
564
|
return random()
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|