cachier 3.1.1__py3-none-any.whl → 3.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachier/__init__.py +2 -1
- cachier/_version.py +2 -1
- cachier/config.py +6 -6
- cachier/core.py +16 -5
- cachier/cores/base.py +6 -1
- cachier/cores/memory.py +11 -3
- cachier/cores/mongo.py +9 -5
- cachier/cores/pickle.py +84 -51
- cachier/cores/sql.py +288 -0
- cachier/version.info +1 -1
- {cachier-3.1.1.dist-info → cachier-3.2.1.dist-info}/METADATA +65 -6
- cachier-3.2.1.dist-info/RECORD +20 -0
- {cachier-3.1.1.dist-info → cachier-3.2.1.dist-info}/WHEEL +1 -1
- cachier-3.1.1.dist-info/RECORD +0 -19
- {cachier-3.1.1.dist-info → cachier-3.2.1.dist-info}/entry_points.txt +0 -0
- {cachier-3.1.1.dist-info → cachier-3.2.1.dist-info/licenses}/LICENSE +0 -0
- {cachier-3.1.1.dist-info → cachier-3.2.1.dist-info}/top_level.txt +0 -0
cachier/__init__.py
CHANGED
cachier/_version.py
CHANGED
@@ -18,7 +18,8 @@ with open(_PATH_VERSION) as fopen:
|
|
18
18
|
def _get_git_sha() -> str:
|
19
19
|
from subprocess import DEVNULL, check_output
|
20
20
|
|
21
|
-
|
21
|
+
args = ["git", "rev-parse", "--short", "HEAD"]
|
22
|
+
out = check_output(args, stderr=DEVNULL) # noqa: S603
|
22
23
|
return out.decode("utf-8").strip()
|
23
24
|
|
24
25
|
|
cachier/config.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
|
-
import datetime
|
2
1
|
import hashlib
|
3
2
|
import os
|
4
3
|
import pickle
|
5
4
|
import threading
|
6
|
-
from collections.abc import Mapping
|
7
5
|
from dataclasses import dataclass, replace
|
6
|
+
from datetime import datetime, timedelta
|
8
7
|
from typing import Any, Optional, Union
|
9
8
|
|
10
9
|
from ._types import Backend, HashFunc, Mongetter
|
@@ -27,7 +26,7 @@ class Params:
|
|
27
26
|
hash_func: HashFunc = _default_hash_func
|
28
27
|
backend: Backend = "pickle"
|
29
28
|
mongetter: Optional[Mongetter] = None
|
30
|
-
stale_after:
|
29
|
+
stale_after: timedelta = timedelta.max
|
31
30
|
next_time: bool = False
|
32
31
|
cache_dir: Union[str, os.PathLike] = "~/.cachier/"
|
33
32
|
pickle_reload: bool = True
|
@@ -65,7 +64,7 @@ def _update_with_defaults(
|
|
65
64
|
return param
|
66
65
|
|
67
66
|
|
68
|
-
def set_default_params(**params:
|
67
|
+
def set_default_params(**params: Any) -> None:
|
69
68
|
"""Configure default parameters applicable to all memoized functions."""
|
70
69
|
# It is kept for backwards compatibility with desperation warning
|
71
70
|
import warnings
|
@@ -79,7 +78,7 @@ def set_default_params(**params: Mapping) -> None:
|
|
79
78
|
set_global_params(**params)
|
80
79
|
|
81
80
|
|
82
|
-
def set_global_params(**params:
|
81
|
+
def set_global_params(**params: Any) -> None:
|
83
82
|
"""Configure global parameters applicable to all memoized functions.
|
84
83
|
|
85
84
|
This function takes the same keyword parameters as the ones defined in the
|
@@ -100,7 +99,8 @@ def set_global_params(**params: Mapping) -> None:
|
|
100
99
|
if hasattr(cachier.config._global_params, k)
|
101
100
|
}
|
102
101
|
cachier.config._global_params = replace(
|
103
|
-
cachier.config._global_params,
|
102
|
+
cachier.config._global_params,
|
103
|
+
**valid_params, # type: ignore[arg-type]
|
104
104
|
)
|
105
105
|
|
106
106
|
|
cachier/core.py
CHANGED
@@ -7,14 +7,14 @@
|
|
7
7
|
# http://www.opensource.org/licenses/MIT-license
|
8
8
|
# Copyright (c) 2016, Shay Palachy <shaypal5@gmail.com>
|
9
9
|
|
10
|
-
import datetime
|
11
10
|
import inspect
|
12
11
|
import os
|
13
12
|
import warnings
|
14
13
|
from collections import OrderedDict
|
15
14
|
from concurrent.futures import ThreadPoolExecutor
|
15
|
+
from datetime import datetime, timedelta
|
16
16
|
from functools import wraps
|
17
|
-
from typing import Any, Optional, Union
|
17
|
+
from typing import Any, Callable, Optional, Union
|
18
18
|
from warnings import warn
|
19
19
|
|
20
20
|
from .config import (
|
@@ -27,6 +27,7 @@ from .cores.base import RecalculationNeeded, _BaseCore
|
|
27
27
|
from .cores.memory import _MemoryCore
|
28
28
|
from .cores.mongo import _MongoCore
|
29
29
|
from .cores.pickle import _PickleCore
|
30
|
+
from .cores.sql import _SQLCore
|
30
31
|
|
31
32
|
MAX_WORKERS_ENVAR_NAME = "CACHIER_MAX_WORKERS"
|
32
33
|
DEFAULT_MAX_WORKERS = 8
|
@@ -107,7 +108,8 @@ def cachier(
|
|
107
108
|
hash_params: Optional[HashFunc] = None,
|
108
109
|
backend: Optional[Backend] = None,
|
109
110
|
mongetter: Optional[Mongetter] = None,
|
110
|
-
|
111
|
+
sql_engine: Optional[Union[str, Any, Callable[[], Any]]] = None,
|
112
|
+
stale_after: Optional[timedelta] = None,
|
111
113
|
next_time: Optional[bool] = None,
|
112
114
|
cache_dir: Optional[Union[str, os.PathLike]] = None,
|
113
115
|
pickle_reload: Optional[bool] = None,
|
@@ -134,13 +136,16 @@ def cachier(
|
|
134
136
|
hash_params : callable, optional
|
135
137
|
backend : str, optional
|
136
138
|
The name of the backend to use. Valid options currently include
|
137
|
-
'pickle', 'mongo' and '
|
139
|
+
'pickle', 'mongo', 'memory', and 'sql'. If not provided, defaults to
|
138
140
|
'pickle' unless the 'mongetter' argument is passed, in which
|
139
141
|
case the mongo backend is automatically selected.
|
140
142
|
mongetter : callable, optional
|
141
143
|
A callable that takes no arguments and returns a pymongo.Collection
|
142
144
|
object with writing permissions. If unset a local pickle cache is used
|
143
145
|
instead.
|
146
|
+
sql_engine : str, Engine, or callable, optional
|
147
|
+
SQLAlchemy connection string, Engine, or callable returning an Engine.
|
148
|
+
Used for the SQL backend.
|
144
149
|
stale_after : datetime.timedelta, optional
|
145
150
|
The time delta after which a cached result is considered stale. Calls
|
146
151
|
made after the result goes stale will trigger a recalculation of the
|
@@ -208,6 +213,12 @@ def cachier(
|
|
208
213
|
core = _MemoryCore(
|
209
214
|
hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout
|
210
215
|
)
|
216
|
+
elif backend == "sql":
|
217
|
+
core = _SQLCore(
|
218
|
+
hash_func=hash_func,
|
219
|
+
sql_engine=sql_engine,
|
220
|
+
wait_for_calc_timeout=wait_for_calc_timeout,
|
221
|
+
)
|
211
222
|
else:
|
212
223
|
raise ValueError("specified an invalid core: %s" % backend)
|
213
224
|
|
@@ -259,7 +270,7 @@ def cachier(
|
|
259
270
|
_print("Entry found.")
|
260
271
|
if _allow_none or entry.value is not None:
|
261
272
|
_print("Cached result found.")
|
262
|
-
now = datetime.
|
273
|
+
now = datetime.now()
|
263
274
|
if now - entry.time <= _stale_after:
|
264
275
|
_print("And it is fresh!")
|
265
276
|
return entry.value
|
cachier/cores/base.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
"""Defines the interface of a cachier caching core."""
|
2
|
+
|
2
3
|
# This file is part of Cachier.
|
3
4
|
# https://github.com/python-cachier/cachier
|
4
5
|
|
@@ -28,7 +29,11 @@ def _get_func_str(func: Callable) -> str:
|
|
28
29
|
class _BaseCore:
|
29
30
|
__metaclass__ = abc.ABCMeta
|
30
31
|
|
31
|
-
def __init__(
|
32
|
+
def __init__(
|
33
|
+
self,
|
34
|
+
hash_func: Optional[HashFunc],
|
35
|
+
wait_for_calc_timeout: Optional[int],
|
36
|
+
):
|
32
37
|
self.hash_func = _update_with_defaults(hash_func, "hash_func")
|
33
38
|
self.wait_for_calc_timeout = wait_for_calc_timeout
|
34
39
|
self.lock = threading.RLock()
|
cachier/cores/memory.py
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
|
3
3
|
import threading
|
4
4
|
from datetime import datetime
|
5
|
-
from typing import Any, Optional, Tuple
|
5
|
+
from typing import Any, Dict, Optional, Tuple
|
6
6
|
|
7
7
|
from .._types import HashFunc
|
8
8
|
from ..config import CacheEntry
|
@@ -12,9 +12,13 @@ from .base import _BaseCore, _get_func_str
|
|
12
12
|
class _MemoryCore(_BaseCore):
|
13
13
|
"""The memory core class for cachier."""
|
14
14
|
|
15
|
-
def __init__(
|
15
|
+
def __init__(
|
16
|
+
self,
|
17
|
+
hash_func: Optional[HashFunc],
|
18
|
+
wait_for_calc_timeout: Optional[int],
|
19
|
+
):
|
16
20
|
super().__init__(hash_func, wait_for_calc_timeout)
|
17
|
-
self.cache = {}
|
21
|
+
self.cache: Dict[str, CacheEntry] = {}
|
18
22
|
|
19
23
|
def _hash_func_key(self, key: str) -> str:
|
20
24
|
return f"{_get_func_str(self.func)}:{key}"
|
@@ -79,8 +83,12 @@ class _MemoryCore(_BaseCore):
|
|
79
83
|
hash_key = self._hash_func_key(key)
|
80
84
|
with self.lock: # pragma: no cover
|
81
85
|
entry = self.cache[hash_key]
|
86
|
+
if entry is None:
|
87
|
+
return None
|
82
88
|
if not entry._processing:
|
83
89
|
return entry.value
|
90
|
+
if entry._condition is None:
|
91
|
+
raise RuntimeError("No condition set for entry")
|
84
92
|
entry._condition.acquire()
|
85
93
|
entry._condition.wait()
|
86
94
|
entry._condition.release()
|
cachier/cores/mongo.py
CHANGED
@@ -37,9 +37,9 @@ class _MongoCore(_BaseCore):
|
|
37
37
|
|
38
38
|
def __init__(
|
39
39
|
self,
|
40
|
-
hash_func: HashFunc,
|
41
|
-
mongetter: Mongetter,
|
42
|
-
wait_for_calc_timeout: int,
|
40
|
+
hash_func: Optional[HashFunc],
|
41
|
+
mongetter: Optional[Mongetter],
|
42
|
+
wait_for_calc_timeout: Optional[int],
|
43
43
|
):
|
44
44
|
if "pymongo" not in sys.modules:
|
45
45
|
warnings.warn(
|
@@ -48,7 +48,9 @@ class _MongoCore(_BaseCore):
|
|
48
48
|
stacklevel=2,
|
49
49
|
) # pragma: no cover
|
50
50
|
|
51
|
-
super().__init__(
|
51
|
+
super().__init__(
|
52
|
+
hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout
|
53
|
+
)
|
52
54
|
if mongetter is None:
|
53
55
|
raise MissingMongetter(
|
54
56
|
"must specify ``mongetter`` when using the mongo core"
|
@@ -73,7 +75,9 @@ class _MongoCore(_BaseCore):
|
|
73
75
|
)
|
74
76
|
if not res:
|
75
77
|
return key, None
|
76
|
-
val =
|
78
|
+
val = None
|
79
|
+
if "value" in res:
|
80
|
+
val = pickle.loads(res["value"]) # noqa: S301
|
77
81
|
entry = CacheEntry(
|
78
82
|
value=val,
|
79
83
|
time=res.get("time", None),
|
cachier/cores/pickle.py
CHANGED
@@ -9,7 +9,7 @@
|
|
9
9
|
import os
|
10
10
|
import pickle # for local caching
|
11
11
|
from datetime import datetime
|
12
|
-
from typing import Any, Dict, Optional, Tuple
|
12
|
+
from typing import Any, Dict, Optional, Tuple, Union
|
13
13
|
|
14
14
|
import portalocker # to lock on pickle cache IO
|
15
15
|
from watchdog.events import PatternMatchingEventHandler
|
@@ -54,7 +54,7 @@ class _PickleCore(_BaseCore):
|
|
54
54
|
self.observer.stop()
|
55
55
|
# else:
|
56
56
|
# print('NOT stopping observer... :(')
|
57
|
-
except
|
57
|
+
except AttributeError: # catching entry being None
|
58
58
|
self.value = None
|
59
59
|
self.observer.stop()
|
60
60
|
|
@@ -68,14 +68,14 @@ class _PickleCore(_BaseCore):
|
|
68
68
|
|
69
69
|
def __init__(
|
70
70
|
self,
|
71
|
-
hash_func: HashFunc,
|
72
|
-
pickle_reload: bool,
|
73
|
-
cache_dir: str,
|
74
|
-
separate_files: bool,
|
75
|
-
wait_for_calc_timeout: int,
|
71
|
+
hash_func: Optional[HashFunc],
|
72
|
+
pickle_reload: Optional[bool],
|
73
|
+
cache_dir: Optional[Union[str, os.PathLike]],
|
74
|
+
separate_files: Optional[bool],
|
75
|
+
wait_for_calc_timeout: Optional[int],
|
76
76
|
):
|
77
77
|
super().__init__(hash_func, wait_for_calc_timeout)
|
78
|
-
self.
|
78
|
+
self._cache_dict: Dict[str, CacheEntry] = {}
|
79
79
|
self.reload = _update_with_defaults(pickle_reload, "pickle_reload")
|
80
80
|
self.cache_dir = os.path.expanduser(
|
81
81
|
_update_with_defaults(cache_dir, "cache_dir")
|
@@ -83,6 +83,7 @@ class _PickleCore(_BaseCore):
|
|
83
83
|
self.separate_files = _update_with_defaults(
|
84
84
|
separate_files, "separate_files"
|
85
85
|
)
|
86
|
+
self._cache_used_fpath = ""
|
86
87
|
|
87
88
|
@property
|
88
89
|
def cache_fname(self) -> str:
|
@@ -96,28 +97,52 @@ class _PickleCore(_BaseCore):
|
|
96
97
|
os.path.join(os.path.realpath(self.cache_dir), self.cache_fname)
|
97
98
|
)
|
98
99
|
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
100
|
+
@staticmethod
|
101
|
+
def _convert_legacy_cache_entry(
|
102
|
+
entry: Union[dict, CacheEntry],
|
103
|
+
) -> CacheEntry:
|
104
|
+
if isinstance(entry, CacheEntry):
|
105
|
+
return entry
|
106
|
+
return CacheEntry(
|
107
|
+
value=entry["value"],
|
108
|
+
time=entry["time"],
|
109
|
+
stale=entry["stale"],
|
110
|
+
_processing=entry["being_calculated"],
|
111
|
+
_condition=entry.get("condition", None),
|
112
|
+
)
|
106
113
|
|
107
|
-
def
|
114
|
+
def _load_cache_dict(self) -> Dict[str, CacheEntry]:
|
115
|
+
try:
|
116
|
+
with portalocker.Lock(self.cache_fpath, mode="rb") as cf:
|
117
|
+
cache = pickle.load(cf) # noqa: S301
|
118
|
+
self._cache_used_fpath = str(self.cache_fpath)
|
119
|
+
except (FileNotFoundError, EOFError):
|
120
|
+
cache = {}
|
121
|
+
return {
|
122
|
+
k: _PickleCore._convert_legacy_cache_entry(v)
|
123
|
+
for k, v in cache.items()
|
124
|
+
}
|
125
|
+
|
126
|
+
def get_cache_dict(self, reload: bool = False) -> Dict[str, CacheEntry]:
|
127
|
+
if self._cache_used_fpath != self.cache_fpath:
|
128
|
+
# force reload if the cache file has changed
|
129
|
+
# this change is dies to using different wrapped function
|
130
|
+
reload = True
|
131
|
+
if self._cache_dict and not (self.reload or reload):
|
132
|
+
return self._cache_dict
|
108
133
|
with self.lock:
|
109
|
-
|
110
|
-
|
111
|
-
return self.cache
|
134
|
+
self._cache_dict = self._load_cache_dict()
|
135
|
+
return self._cache_dict
|
112
136
|
|
113
|
-
def
|
137
|
+
def _load_cache_by_key(
|
114
138
|
self, key=None, hash_str=None
|
115
|
-
) -> Optional[
|
139
|
+
) -> Optional[CacheEntry]:
|
116
140
|
fpath = self.cache_fpath
|
117
141
|
fpath += f"_{hash_str or key}"
|
118
142
|
try:
|
119
143
|
with portalocker.Lock(fpath, mode="rb") as cache_file:
|
120
|
-
|
144
|
+
entry = pickle.load(cache_file) # noqa: S301
|
145
|
+
return _PickleCore._convert_legacy_cache_entry(entry)
|
121
146
|
except (FileNotFoundError, EOFError):
|
122
147
|
return None
|
123
148
|
|
@@ -131,35 +156,42 @@ class _PickleCore(_BaseCore):
|
|
131
156
|
path, name = os.path.split(self.cache_fpath)
|
132
157
|
for subpath in os.listdir(path):
|
133
158
|
if subpath.startswith(name):
|
134
|
-
entry = self.
|
159
|
+
entry = self._load_cache_by_key(
|
160
|
+
hash_str=subpath.split("_")[-1]
|
161
|
+
)
|
135
162
|
if entry is not None:
|
136
|
-
entry.
|
163
|
+
entry._processing = False
|
137
164
|
self._save_cache(entry, hash_str=subpath.split("_")[-1])
|
138
165
|
|
139
166
|
def _save_cache(
|
140
|
-
self,
|
167
|
+
self,
|
168
|
+
cache: Union[Dict[str, CacheEntry], CacheEntry],
|
169
|
+
separate_file_key: Optional[str] = None,
|
170
|
+
hash_str: Optional[str] = None,
|
141
171
|
) -> None:
|
172
|
+
if separate_file_key and not isinstance(cache, CacheEntry):
|
173
|
+
raise ValueError(
|
174
|
+
"`separate_file_key` should only be used with a CacheEntry"
|
175
|
+
)
|
142
176
|
fpath = self.cache_fpath
|
143
|
-
if
|
144
|
-
fpath += f"_{
|
177
|
+
if separate_file_key is not None:
|
178
|
+
fpath += f"_{separate_file_key}"
|
145
179
|
elif hash_str is not None:
|
146
180
|
fpath += f"_{hash_str}"
|
147
181
|
with self.lock:
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
if
|
152
|
-
self.
|
182
|
+
with portalocker.Lock(fpath, mode="wb") as cf:
|
183
|
+
pickle.dump(cache, cf, protocol=4)
|
184
|
+
# the same as check for separate_file, but changed for typing
|
185
|
+
if isinstance(cache, dict):
|
186
|
+
self._cache_dict = cache
|
187
|
+
self._cache_used_fpath = str(self.cache_fpath)
|
153
188
|
|
154
189
|
def get_entry_by_key(
|
155
190
|
self, key: str, reload: bool = False
|
156
|
-
) -> Tuple[str, CacheEntry]:
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
if self.reload or reload:
|
161
|
-
self._reload_cache()
|
162
|
-
return key, self._get_cache().get(key, None)
|
191
|
+
) -> Tuple[str, Optional[CacheEntry]]:
|
192
|
+
if self.separate_files:
|
193
|
+
return key, self._load_cache_by_key(key)
|
194
|
+
return key, self.get_cache_dict(reload).get(key)
|
163
195
|
|
164
196
|
def set_entry(self, key: str, func_res: Any) -> None:
|
165
197
|
key_data = CacheEntry(
|
@@ -174,7 +206,7 @@ class _PickleCore(_BaseCore):
|
|
174
206
|
return # pragma: no cover
|
175
207
|
|
176
208
|
with self.lock:
|
177
|
-
cache = self.
|
209
|
+
cache = self.get_cache_dict()
|
178
210
|
cache[key] = key_data
|
179
211
|
self._save_cache(cache)
|
180
212
|
|
@@ -186,13 +218,15 @@ class _PickleCore(_BaseCore):
|
|
186
218
|
stale=False,
|
187
219
|
_processing=True,
|
188
220
|
),
|
189
|
-
|
221
|
+
separate_file_key=key,
|
190
222
|
)
|
191
223
|
|
192
|
-
def
|
224
|
+
def _mark_entry_not_calculated_separate_files(self, key: str) -> None:
|
193
225
|
_, entry = self.get_entry_by_key(key)
|
226
|
+
if entry is None:
|
227
|
+
return # that's ok, we don't need an entry in that case
|
194
228
|
entry._processing = False
|
195
|
-
self._save_cache(entry,
|
229
|
+
self._save_cache(entry, separate_file_key=key)
|
196
230
|
|
197
231
|
def mark_entry_being_calculated(self, key: str) -> None:
|
198
232
|
if self.separate_files:
|
@@ -200,7 +234,7 @@ class _PickleCore(_BaseCore):
|
|
200
234
|
return # pragma: no cover
|
201
235
|
|
202
236
|
with self.lock:
|
203
|
-
cache = self.
|
237
|
+
cache = self.get_cache_dict()
|
204
238
|
if key in cache:
|
205
239
|
cache[key]._processing = True
|
206
240
|
else:
|
@@ -214,9 +248,9 @@ class _PickleCore(_BaseCore):
|
|
214
248
|
|
215
249
|
def mark_entry_not_calculated(self, key: str) -> None:
|
216
250
|
if self.separate_files:
|
217
|
-
self.
|
251
|
+
self._mark_entry_not_calculated_separate_files(key)
|
218
252
|
with self.lock:
|
219
|
-
cache = self.
|
253
|
+
cache = self.get_cache_dict()
|
220
254
|
# that's ok, we don't need an entry in that case
|
221
255
|
if isinstance(cache, dict) and key in cache:
|
222
256
|
cache[key]._processing = False
|
@@ -224,14 +258,13 @@ class _PickleCore(_BaseCore):
|
|
224
258
|
|
225
259
|
def wait_on_entry_calc(self, key: str) -> Any:
|
226
260
|
if self.separate_files:
|
227
|
-
entry = self.
|
261
|
+
entry = self._load_cache_by_key(key)
|
228
262
|
filename = f"{self.cache_fname}_{key}"
|
229
263
|
else:
|
230
264
|
with self.lock:
|
231
|
-
self.
|
232
|
-
entry = self._get_cache()[key]
|
265
|
+
entry = self.get_cache_dict()[key]
|
233
266
|
filename = self.cache_fname
|
234
|
-
if not entry._processing:
|
267
|
+
if entry and not entry._processing:
|
235
268
|
return entry.value
|
236
269
|
event_handler = _PickleCore.CacheChangeHandler(
|
237
270
|
filename=filename, core=self, key=key
|
@@ -259,7 +292,7 @@ class _PickleCore(_BaseCore):
|
|
259
292
|
return # pragma: no cover
|
260
293
|
|
261
294
|
with self.lock:
|
262
|
-
cache = self.
|
295
|
+
cache = self.get_cache_dict()
|
263
296
|
for key in cache:
|
264
297
|
cache[key]._processing = False
|
265
298
|
self._save_cache(cache)
|
cachier/cores/sql.py
ADDED
@@ -0,0 +1,288 @@
|
|
1
|
+
"""A SQLAlchemy-based caching core for cachier."""
|
2
|
+
|
3
|
+
import pickle
|
4
|
+
import threading
|
5
|
+
from datetime import datetime
|
6
|
+
from typing import Any, Callable, Optional, Tuple, Union
|
7
|
+
|
8
|
+
try:
|
9
|
+
from sqlalchemy import (
|
10
|
+
Boolean,
|
11
|
+
Column,
|
12
|
+
DateTime,
|
13
|
+
Index,
|
14
|
+
LargeBinary,
|
15
|
+
String,
|
16
|
+
and_,
|
17
|
+
create_engine,
|
18
|
+
delete,
|
19
|
+
insert,
|
20
|
+
select,
|
21
|
+
update,
|
22
|
+
)
|
23
|
+
from sqlalchemy.engine import Engine
|
24
|
+
from sqlalchemy.orm import declarative_base, sessionmaker
|
25
|
+
|
26
|
+
SQLALCHEMY_AVAILABLE = True
|
27
|
+
except ImportError:
|
28
|
+
SQLALCHEMY_AVAILABLE = False
|
29
|
+
|
30
|
+
from .._types import HashFunc
|
31
|
+
from ..config import CacheEntry
|
32
|
+
from .base import RecalculationNeeded, _BaseCore, _get_func_str
|
33
|
+
|
34
|
+
if SQLALCHEMY_AVAILABLE:
|
35
|
+
Base = declarative_base()
|
36
|
+
|
37
|
+
class CacheTable(Base): # type: ignore[misc, valid-type]
|
38
|
+
"""SQLAlchemy model for cachier cache entries."""
|
39
|
+
|
40
|
+
__tablename__ = "cachier_cache"
|
41
|
+
id = Column(String, primary_key=True)
|
42
|
+
function_id = Column(String, index=True, nullable=False)
|
43
|
+
key = Column(String, index=True, nullable=False)
|
44
|
+
value = Column(LargeBinary, nullable=True)
|
45
|
+
timestamp = Column(DateTime, nullable=False)
|
46
|
+
stale = Column(Boolean, default=False)
|
47
|
+
processing = Column(Boolean, default=False)
|
48
|
+
completed = Column(Boolean, default=False)
|
49
|
+
__table_args__ = (
|
50
|
+
Index("ix_func_key", "function_id", "key", unique=True),
|
51
|
+
)
|
52
|
+
|
53
|
+
|
54
|
+
class _SQLCore(_BaseCore):
|
55
|
+
"""SQLAlchemy-based core for Cachier, supporting SQL-based backends.
|
56
|
+
|
57
|
+
This should work with SQLite, PostgreSQL and so on.
|
58
|
+
|
59
|
+
"""
|
60
|
+
|
61
|
+
def __init__(
|
62
|
+
self,
|
63
|
+
hash_func: Optional[HashFunc],
|
64
|
+
sql_engine: Optional[Union[str, "Engine", Callable[[], "Engine"]]],
|
65
|
+
wait_for_calc_timeout: Optional[int] = None,
|
66
|
+
):
|
67
|
+
if not SQLALCHEMY_AVAILABLE:
|
68
|
+
raise ImportError(
|
69
|
+
"SQLAlchemy is required for the SQL core. "
|
70
|
+
"Install with `pip install SQLAlchemy`."
|
71
|
+
)
|
72
|
+
super().__init__(
|
73
|
+
hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout
|
74
|
+
)
|
75
|
+
self._engine = self._resolve_engine(sql_engine)
|
76
|
+
self._Session = sessionmaker(bind=self._engine)
|
77
|
+
Base.metadata.create_all(self._engine)
|
78
|
+
self._lock = threading.RLock()
|
79
|
+
self._func_str = None
|
80
|
+
|
81
|
+
def _resolve_engine(self, sql_engine):
|
82
|
+
if isinstance(sql_engine, Engine):
|
83
|
+
return sql_engine
|
84
|
+
if isinstance(sql_engine, str):
|
85
|
+
return create_engine(sql_engine, future=True)
|
86
|
+
if callable(sql_engine):
|
87
|
+
return sql_engine()
|
88
|
+
raise ValueError(
|
89
|
+
"sql_engine must be a SQLAlchemy Engine, connection string, "
|
90
|
+
"or callable returning an Engine."
|
91
|
+
)
|
92
|
+
|
93
|
+
def set_func(self, func):
|
94
|
+
super().set_func(func)
|
95
|
+
self._func_str = _get_func_str(func)
|
96
|
+
|
97
|
+
def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
|
98
|
+
with self._lock, self._Session() as session:
|
99
|
+
row = session.execute(
|
100
|
+
select(CacheTable).where(
|
101
|
+
and_(
|
102
|
+
CacheTable.function_id == self._func_str,
|
103
|
+
CacheTable.key == key,
|
104
|
+
)
|
105
|
+
)
|
106
|
+
).scalar_one_or_none()
|
107
|
+
if not row:
|
108
|
+
return key, None
|
109
|
+
value = pickle.loads(row.value) if row.value is not None else None
|
110
|
+
entry = CacheEntry(
|
111
|
+
value=value,
|
112
|
+
time=row.timestamp,
|
113
|
+
stale=row.stale,
|
114
|
+
_processing=row.processing,
|
115
|
+
_completed=row.completed,
|
116
|
+
)
|
117
|
+
return key, entry
|
118
|
+
|
119
|
+
def set_entry(self, key: str, func_res: Any) -> None:
|
120
|
+
with self._lock, self._Session() as session:
|
121
|
+
thebytes = pickle.dumps(func_res)
|
122
|
+
now = datetime.now()
|
123
|
+
base_insert = insert(CacheTable)
|
124
|
+
stmt = (
|
125
|
+
base_insert.values(
|
126
|
+
id=f"{self._func_str}:{key}",
|
127
|
+
function_id=self._func_str,
|
128
|
+
key=key,
|
129
|
+
value=thebytes,
|
130
|
+
timestamp=now,
|
131
|
+
stale=False,
|
132
|
+
processing=False,
|
133
|
+
completed=True,
|
134
|
+
).on_conflict_do_update(
|
135
|
+
index_elements=[CacheTable.function_id, CacheTable.key],
|
136
|
+
set_={
|
137
|
+
"value": thebytes,
|
138
|
+
"timestamp": now,
|
139
|
+
"stale": False,
|
140
|
+
"processing": False,
|
141
|
+
"completed": True,
|
142
|
+
},
|
143
|
+
)
|
144
|
+
if hasattr(base_insert, "on_conflict_do_update")
|
145
|
+
else None
|
146
|
+
)
|
147
|
+
# Fallback for non-SQLite/Postgres: try update, else insert
|
148
|
+
if stmt:
|
149
|
+
session.execute(stmt)
|
150
|
+
else:
|
151
|
+
row = session.execute(
|
152
|
+
select(CacheTable).where(
|
153
|
+
and_(
|
154
|
+
CacheTable.function_id == self._func_str,
|
155
|
+
CacheTable.key == key,
|
156
|
+
)
|
157
|
+
)
|
158
|
+
).scalar_one_or_none()
|
159
|
+
if row:
|
160
|
+
session.execute(
|
161
|
+
update(CacheTable)
|
162
|
+
.where(
|
163
|
+
and_(
|
164
|
+
CacheTable.function_id == self._func_str,
|
165
|
+
CacheTable.key == key,
|
166
|
+
)
|
167
|
+
)
|
168
|
+
.values(
|
169
|
+
value=thebytes,
|
170
|
+
timestamp=now,
|
171
|
+
stale=False,
|
172
|
+
processing=False,
|
173
|
+
completed=True,
|
174
|
+
)
|
175
|
+
)
|
176
|
+
else:
|
177
|
+
session.add(
|
178
|
+
CacheTable(
|
179
|
+
id=f"{self._func_str}:{key}",
|
180
|
+
function_id=self._func_str,
|
181
|
+
key=key,
|
182
|
+
value=thebytes,
|
183
|
+
timestamp=now,
|
184
|
+
stale=False,
|
185
|
+
processing=False,
|
186
|
+
completed=True,
|
187
|
+
)
|
188
|
+
)
|
189
|
+
session.commit()
|
190
|
+
|
191
|
+
def mark_entry_being_calculated(self, key: str) -> None:
|
192
|
+
with self._lock, self._Session() as session:
|
193
|
+
row = session.execute(
|
194
|
+
select(CacheTable).where(
|
195
|
+
and_(
|
196
|
+
CacheTable.function_id == self._func_str,
|
197
|
+
CacheTable.key == key,
|
198
|
+
)
|
199
|
+
)
|
200
|
+
).scalar_one_or_none()
|
201
|
+
if row:
|
202
|
+
session.execute(
|
203
|
+
update(CacheTable)
|
204
|
+
.where(
|
205
|
+
and_(
|
206
|
+
CacheTable.function_id == self._func_str,
|
207
|
+
CacheTable.key == key,
|
208
|
+
)
|
209
|
+
)
|
210
|
+
.values(processing=True)
|
211
|
+
)
|
212
|
+
else:
|
213
|
+
session.add(
|
214
|
+
CacheTable(
|
215
|
+
id=f"{self._func_str}:{key}",
|
216
|
+
function_id=self._func_str,
|
217
|
+
key=key,
|
218
|
+
value=None,
|
219
|
+
timestamp=datetime.now(),
|
220
|
+
stale=False,
|
221
|
+
processing=True,
|
222
|
+
completed=False,
|
223
|
+
)
|
224
|
+
)
|
225
|
+
session.commit()
|
226
|
+
|
227
|
+
def mark_entry_not_calculated(self, key: str) -> None:
|
228
|
+
with self._lock, self._Session() as session:
|
229
|
+
session.execute(
|
230
|
+
update(CacheTable)
|
231
|
+
.where(
|
232
|
+
and_(
|
233
|
+
CacheTable.function_id == self._func_str,
|
234
|
+
CacheTable.key == key,
|
235
|
+
)
|
236
|
+
)
|
237
|
+
.values(processing=False)
|
238
|
+
)
|
239
|
+
session.commit()
|
240
|
+
|
241
|
+
def wait_on_entry_calc(self, key: str) -> Any:
|
242
|
+
import time
|
243
|
+
|
244
|
+
time_spent = 0
|
245
|
+
while True:
|
246
|
+
with self._lock, self._Session() as session:
|
247
|
+
row = session.execute(
|
248
|
+
select(CacheTable).where(
|
249
|
+
and_(
|
250
|
+
CacheTable.function_id == self._func_str,
|
251
|
+
CacheTable.key == key,
|
252
|
+
)
|
253
|
+
)
|
254
|
+
).scalar_one_or_none()
|
255
|
+
if not row:
|
256
|
+
raise RecalculationNeeded()
|
257
|
+
if not row.processing:
|
258
|
+
return (
|
259
|
+
pickle.loads(row.value)
|
260
|
+
if row.value is not None
|
261
|
+
else None
|
262
|
+
)
|
263
|
+
time.sleep(1)
|
264
|
+
time_spent += 1
|
265
|
+
self.check_calc_timeout(time_spent)
|
266
|
+
|
267
|
+
def clear_cache(self) -> None:
|
268
|
+
with self._lock, self._Session() as session:
|
269
|
+
session.execute(
|
270
|
+
delete(CacheTable).where(
|
271
|
+
CacheTable.function_id == self._func_str
|
272
|
+
)
|
273
|
+
)
|
274
|
+
session.commit()
|
275
|
+
|
276
|
+
def clear_being_calculated(self) -> None:
|
277
|
+
with self._lock, self._Session() as session:
|
278
|
+
session.execute(
|
279
|
+
update(CacheTable)
|
280
|
+
.where(
|
281
|
+
and_(
|
282
|
+
CacheTable.function_id == self._func_str,
|
283
|
+
CacheTable.processing,
|
284
|
+
)
|
285
|
+
)
|
286
|
+
.values(processing=False)
|
287
|
+
)
|
288
|
+
session.commit()
|
cachier/version.info
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.
|
1
|
+
3.2.1
|
@@ -1,6 +1,6 @@
|
|
1
|
-
Metadata-Version: 2.
|
1
|
+
Metadata-Version: 2.4
|
2
2
|
Name: cachier
|
3
|
-
Version: 3.
|
3
|
+
Version: 3.2.1
|
4
4
|
Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
|
5
5
|
Author-email: Shay Palachy Affek <shay.palachy@gmail.com>
|
6
6
|
License: MIT License
|
@@ -32,11 +32,11 @@ Classifier: Intended Audience :: Developers
|
|
32
32
|
Classifier: License :: OSI Approved :: MIT License
|
33
33
|
Classifier: Programming Language :: Python
|
34
34
|
Classifier: Programming Language :: Python :: 3 :: Only
|
35
|
-
Classifier: Programming Language :: Python :: 3.8
|
36
35
|
Classifier: Programming Language :: Python :: 3.9
|
37
36
|
Classifier: Programming Language :: Python :: 3.10
|
38
37
|
Classifier: Programming Language :: Python :: 3.11
|
39
38
|
Classifier: Programming Language :: Python :: 3.12
|
39
|
+
Classifier: Programming Language :: Python :: 3.13
|
40
40
|
Classifier: Topic :: Other/Nonlisted Topic
|
41
41
|
Classifier: Topic :: Software Development :: Libraries
|
42
42
|
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
@@ -45,6 +45,7 @@ Description-Content-Type: text/x-rst
|
|
45
45
|
License-File: LICENSE
|
46
46
|
Requires-Dist: portalocker>=2.3.2
|
47
47
|
Requires-Dist: watchdog>=2.3.1
|
48
|
+
Dynamic: license-file
|
48
49
|
|
49
50
|
Cachier
|
50
51
|
#######
|
@@ -92,7 +93,7 @@ Features
|
|
92
93
|
========
|
93
94
|
|
94
95
|
* Pure Python.
|
95
|
-
* Compatible with Python 3.
|
96
|
+
* Compatible with Python 3.9+ (Python 2.7 was discontinued in version 1.2.8).
|
96
97
|
* Supported and `tested on Linux, OS X and Windows <https://travis-ci.org/shaypal5/cachier>`_.
|
97
98
|
* A simple interface.
|
98
99
|
* Defining "shelf life" for cached values.
|
@@ -317,7 +318,7 @@ Cachier Cores
|
|
317
318
|
Pickle Core
|
318
319
|
-----------
|
319
320
|
|
320
|
-
The default core for Cachier is pickle based, meaning each function will store its cache
|
321
|
+
The default core for Cachier is pickle based, meaning each function will store its cache in a separate pickle file in the ``~/.cachier`` directory. Naturally, this kind of cache is both machine-specific and user-specific.
|
321
322
|
|
322
323
|
You can configure ``cachier`` to use another directory by providing the ``cache_dir`` parameter with the path to that directory:
|
323
324
|
|
@@ -390,11 +391,69 @@ You can set an in-memory cache by assigning the ``backend`` parameter with ``'me
|
|
390
391
|
|
391
392
|
Note, however, that ``cachier``'s in-memory core is simple, and has no monitoring or cap on cache size, and can thus lead to memory errors on large return values - it is mainly intended to be used with future multi-core functionality. As a rule, Python's built-in ``lru_cache`` is a much better stand-alone solution.
|
392
393
|
|
394
|
+
SQLAlchemy (SQL) Core
|
395
|
+
---------------------
|
396
|
+
|
397
|
+
**Note:** The SQL core requires SQLAlchemy to be installed. It is not installed by default with cachier. To use the SQL backend, run::
|
398
|
+
|
399
|
+
pip install SQLAlchemy
|
400
|
+
|
401
|
+
Cachier supports a generic SQL backend via SQLAlchemy, allowing you to use SQLite, PostgreSQL, MySQL, and other databases.
|
402
|
+
|
403
|
+
**Usage Example (SQLite in-memory):**
|
404
|
+
|
405
|
+
.. code-block:: python
|
406
|
+
|
407
|
+
from cachier import cachier
|
408
|
+
|
409
|
+
@cachier(backend="sql", sql_engine="sqlite:///:memory:")
|
410
|
+
def my_func(x):
|
411
|
+
return x * 2
|
412
|
+
|
413
|
+
**Usage Example (PostgreSQL):**
|
414
|
+
|
415
|
+
.. code-block:: python
|
416
|
+
|
417
|
+
@cachier(backend="sql", sql_engine="postgresql://user:pass@localhost/dbname")
|
418
|
+
def my_func(x):
|
419
|
+
return x * 2
|
420
|
+
|
421
|
+
**Usage Example (MySQL):**
|
422
|
+
|
423
|
+
.. code-block:: python
|
424
|
+
|
425
|
+
@cachier(backend="sql", sql_engine="mysql+pymysql://user:pass@localhost/dbname")
|
426
|
+
def my_func(x):
|
427
|
+
return x * 2
|
428
|
+
|
429
|
+
**Configuration Options:**
|
430
|
+
|
431
|
+
- ``sql_engine``: SQLAlchemy connection string, Engine, or callable returning an Engine.
|
432
|
+
- All other standard cachier options are supported.
|
433
|
+
|
434
|
+
**Table Schema:**
|
435
|
+
|
436
|
+
- ``function_id``: Unique identifier for the cached function
|
437
|
+
- ``key``: Cache key
|
438
|
+
- ``value``: Pickled result
|
439
|
+
- ``timestamp``: Datetime of cache entry
|
440
|
+
- ``stale``: Boolean, is value stale
|
441
|
+
- ``processing``: Boolean, is value being calculated
|
442
|
+
- ``completed``: Boolean, is value calculation completed
|
443
|
+
|
444
|
+
**Limitations & Notes:**
|
445
|
+
|
446
|
+
- Requires SQLAlchemy (install with ``pip install SQLAlchemy``)
|
447
|
+
- For production, use a persistent database (not ``:memory:``)
|
448
|
+
- Thread/process safety is handled via transactions and row-level locks
|
449
|
+
- Value serialization uses ``pickle``. **Warning:** `pickle` can execute arbitrary code during deserialization if the cache database is compromised. Ensure the cache is stored securely and consider using safer serialization methods like `json` if security is a concern.
|
450
|
+
- For best performance, ensure your DB supports row-level locking
|
451
|
+
|
393
452
|
|
394
453
|
Contributing
|
395
454
|
============
|
396
455
|
|
397
|
-
Current maintainers are Shay Palachy Affek (`shay.palachy@gmail.com <mailto:shay.palachy@gmail.com>`_, `@shaypal5 <https://github.com/shaypal5>`_) and
|
456
|
+
Current maintainers are Shay Palachy Affek (`shay.palachy@gmail.com <mailto:shay.palachy@gmail.com>`_, `@shaypal5 <https://github.com/shaypal5>`_) and `Jirka Borovec <https://github.com/Borda>`_ (`@Borda <https://github.com/Borda>`_ on GitHub); You are more than welcome to approach them for help. Contributions are very welcomed! :)
|
398
457
|
|
399
458
|
Installing for development
|
400
459
|
--------------------------
|
@@ -0,0 +1,20 @@
|
|
1
|
+
cachier/__init__.py,sha256=1isxXaP2l6Vq7gC1Gob6hduRLC07dHfWze1-oCjwSP0,415
|
2
|
+
cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
|
3
|
+
cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
|
4
|
+
cachier/_version.py,sha256=jnPPRn_qmjNi-qmQjlHnzNGf3LSBTYkMmJdGjxMTOBM,1089
|
5
|
+
cachier/config.py,sha256=6hyQtn9T6UXu2UQhKJltWT0Nu4OBS4ION1x7Lt1i8Og,3838
|
6
|
+
cachier/core.py,sha256=7pqf_EGvGXu5WWtC5MeY0tVW4M59XljZI9_2R4RVfRU,13627
|
7
|
+
cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
+
cachier/version.info,sha256=OCJyh4MEn8LyKlbR7nTWI1LEn7-BxrVLUIMNb2HDryI,6
|
9
|
+
cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
+
cachier/cores/base.py,sha256=s7qgmDJA4LGub6ydGfMk9vVJW4fgeU0EXl-9gmpuh28,3683
|
11
|
+
cachier/cores/memory.py,sha256=fsvqq9rwwmAaMBvYo-oUNAxB6UOyfBpuf8ACW_XTaU0,3572
|
12
|
+
cachier/cores/mongo.py,sha256=pCBrxLsmGr68Q50JVD_CUPAYwhaLDrJUQs_6A-_GYLA,4993
|
13
|
+
cachier/cores/pickle.py,sha256=FgfvZWAFdWQPOo3G-L57iEV2ujEkIDH8TyGzbarsZeE,10678
|
14
|
+
cachier/cores/sql.py,sha256=nuf2-Szo7VTPRa7IC3JGWEtGsBtdkIrx0bhOm3U0mfE,9895
|
15
|
+
cachier-3.2.1.dist-info/licenses/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
|
16
|
+
cachier-3.2.1.dist-info/METADATA,sha256=xowxZuYhc4r5f4pKxrhROtHGT7FYObRJ4ULWFDXwA-Q,22101
|
17
|
+
cachier-3.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
18
|
+
cachier-3.2.1.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
|
19
|
+
cachier-3.2.1.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
|
20
|
+
cachier-3.2.1.dist-info/RECORD,,
|
cachier-3.1.1.dist-info/RECORD
DELETED
@@ -1,19 +0,0 @@
|
|
1
|
-
cachier/__init__.py,sha256=GZeDebG0EgWIYBmRgPhO19dMiiaam8f9Pu7cWLv3ywY,400
|
2
|
-
cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
|
3
|
-
cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
|
4
|
-
cachier/_version.py,sha256=yE6UYwvdoIRpw3HmNifiGwV3fqVea5PwZj_EvyosiZ8,1079
|
5
|
-
cachier/config.py,sha256=KOGaXkBRgv66BexENrTMtrC_TYCeV1fA5v8l6Vj2CYI,3840
|
6
|
-
cachier/core.py,sha256=qQa_GT8WQYD-VFcTS8a2v-Hys4_A1no-aM-d3lw1AFY,13149
|
7
|
-
cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
-
cachier/version.info,sha256=bcOy3DE5t9Qhtvwmp_hTOJqbA16k8ukEe5UWBQ3WrHM,6
|
9
|
-
cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
-
cachier/cores/base.py,sha256=jo69c2RMOXbTzDvqRV0UGa5UvyToipv3f62bICIII1k,3631
|
11
|
-
cachier/cores/memory.py,sha256=SSa7qlSU_54YjNYEWrq9rxXozkMYXr5hadAZ3sz62l4,3336
|
12
|
-
cachier/cores/mongo.py,sha256=eRG6XP55G4IcWnoMl5xtDufM1szf8FVbOIBbDH_r-Po,4887
|
13
|
-
cachier/cores/pickle.py,sha256=20c5pg2CS6wAX1PdefCOjl-orec5w7tqEHVqNbZZv0s,9074
|
14
|
-
cachier-3.1.1.dist-info/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
|
15
|
-
cachier-3.1.1.dist-info/METADATA,sha256=QQxy_bvI6YCcmhvOj0GqWR4Ni31hCEYnDpTJegprWj4,20102
|
16
|
-
cachier-3.1.1.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
17
|
-
cachier-3.1.1.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
|
18
|
-
cachier-3.1.1.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
|
19
|
-
cachier-3.1.1.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|