cachier 3.3.0__py3-none-any.whl → 3.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cachier/_types.py +4 -2
- cachier/core.py +15 -3
- cachier/cores/mongo.py +1 -1
- cachier/cores/pickle.py +2 -2
- cachier/cores/redis.py +225 -0
- cachier/version.info +1 -1
- {cachier-3.3.0.dist-info → cachier-3.4.0.dist-info}/METADATA +52 -1
- cachier-3.4.0.dist-info/RECORD +21 -0
- cachier-3.3.0.dist-info/RECORD +0 -20
- {cachier-3.3.0.dist-info → cachier-3.4.0.dist-info}/WHEEL +0 -0
- {cachier-3.3.0.dist-info → cachier-3.4.0.dist-info}/entry_points.txt +0 -0
- {cachier-3.3.0.dist-info → cachier-3.4.0.dist-info}/licenses/LICENSE +0 -0
- {cachier-3.3.0.dist-info → cachier-3.4.0.dist-info}/top_level.txt +0 -0
cachier/_types.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1
|
-
from typing import TYPE_CHECKING, Callable, Literal
|
1
|
+
from typing import TYPE_CHECKING, Callable, Literal, Union
|
2
2
|
|
3
3
|
if TYPE_CHECKING:
|
4
4
|
import pymongo.collection
|
5
|
+
import redis
|
5
6
|
|
6
7
|
|
7
8
|
HashFunc = Callable[..., str]
|
8
9
|
Mongetter = Callable[[], "pymongo.collection.Collection"]
|
9
|
-
|
10
|
+
RedisClient = Union["redis.Redis", Callable[[], "redis.Redis"]]
|
11
|
+
Backend = Literal["pickle", "mongo", "memory", "redis"]
|
cachier/core.py
CHANGED
@@ -17,6 +17,7 @@ from functools import wraps
|
|
17
17
|
from typing import Any, Callable, Optional, Union
|
18
18
|
from warnings import warn
|
19
19
|
|
20
|
+
from ._types import RedisClient
|
20
21
|
from .config import (
|
21
22
|
Backend,
|
22
23
|
HashFunc,
|
@@ -27,6 +28,7 @@ from .cores.base import RecalculationNeeded, _BaseCore
|
|
27
28
|
from .cores.memory import _MemoryCore
|
28
29
|
from .cores.mongo import _MongoCore
|
29
30
|
from .cores.pickle import _PickleCore
|
31
|
+
from .cores.redis import _RedisCore
|
30
32
|
from .cores.sql import _SQLCore
|
31
33
|
|
32
34
|
MAX_WORKERS_ENVAR_NAME = "CACHIER_MAX_WORKERS"
|
@@ -110,6 +112,7 @@ def cachier(
|
|
110
112
|
backend: Optional[Backend] = None,
|
111
113
|
mongetter: Optional[Mongetter] = None,
|
112
114
|
sql_engine: Optional[Union[str, Any, Callable[[], Any]]] = None,
|
115
|
+
redis_client: Optional["RedisClient"] = None,
|
113
116
|
stale_after: Optional[timedelta] = None,
|
114
117
|
next_time: Optional[bool] = None,
|
115
118
|
cache_dir: Optional[Union[str, os.PathLike]] = None,
|
@@ -137,9 +140,9 @@ def cachier(
|
|
137
140
|
hash_params : callable, optional
|
138
141
|
backend : str, optional
|
139
142
|
The name of the backend to use. Valid options currently include
|
140
|
-
'pickle', 'mongo', 'memory', and '
|
141
|
-
'pickle' unless
|
142
|
-
|
143
|
+
'pickle', 'mongo', 'memory', 'sql', and 'redis'. If not provided,
|
144
|
+
defaults to 'pickle', unless a core-associated parameter is provided
|
145
|
+
|
143
146
|
mongetter : callable, optional
|
144
147
|
A callable that takes no arguments and returns a pymongo.Collection
|
145
148
|
object with writing permissions. If unset a local pickle cache is used
|
@@ -147,6 +150,9 @@ def cachier(
|
|
147
150
|
sql_engine : str, Engine, or callable, optional
|
148
151
|
SQLAlchemy connection string, Engine, or callable returning an Engine.
|
149
152
|
Used for the SQL backend.
|
153
|
+
redis_client : redis.Redis or callable, optional
|
154
|
+
Redis client instance or callable returning a Redis client.
|
155
|
+
Used for the Redis backend.
|
150
156
|
stale_after : datetime.timedelta, optional
|
151
157
|
The time delta after which a cached result is considered stale. Calls
|
152
158
|
made after the result goes stale will trigger a recalculation of the
|
@@ -220,6 +226,12 @@ def cachier(
|
|
220
226
|
sql_engine=sql_engine,
|
221
227
|
wait_for_calc_timeout=wait_for_calc_timeout,
|
222
228
|
)
|
229
|
+
elif backend == "redis":
|
230
|
+
core = _RedisCore(
|
231
|
+
hash_func=hash_func,
|
232
|
+
redis_client=redis_client,
|
233
|
+
wait_for_calc_timeout=wait_for_calc_timeout,
|
234
|
+
)
|
223
235
|
else:
|
224
236
|
raise ValueError("specified an invalid core: %s" % backend)
|
225
237
|
|
cachier/cores/mongo.py
CHANGED
cachier/cores/pickle.py
CHANGED
@@ -114,7 +114,7 @@ class _PickleCore(_BaseCore):
|
|
114
114
|
def _load_cache_dict(self) -> Dict[str, CacheEntry]:
|
115
115
|
try:
|
116
116
|
with portalocker.Lock(self.cache_fpath, mode="rb") as cf:
|
117
|
-
cache = pickle.load(cf)
|
117
|
+
cache = pickle.load(cf)
|
118
118
|
self._cache_used_fpath = str(self.cache_fpath)
|
119
119
|
except (FileNotFoundError, EOFError):
|
120
120
|
cache = {}
|
@@ -141,7 +141,7 @@ class _PickleCore(_BaseCore):
|
|
141
141
|
fpath += f"_{hash_str or key}"
|
142
142
|
try:
|
143
143
|
with portalocker.Lock(fpath, mode="rb") as cache_file:
|
144
|
-
entry = pickle.load(cache_file)
|
144
|
+
entry = pickle.load(cache_file)
|
145
145
|
return _PickleCore._convert_legacy_cache_entry(entry)
|
146
146
|
except (FileNotFoundError, EOFError):
|
147
147
|
return None
|
cachier/cores/redis.py
ADDED
@@ -0,0 +1,225 @@
|
|
1
|
+
"""A Redis-based caching core for cachier."""
|
2
|
+
|
3
|
+
import pickle
|
4
|
+
import time
|
5
|
+
import warnings
|
6
|
+
from datetime import datetime
|
7
|
+
from typing import Any, Callable, Optional, Tuple, Union
|
8
|
+
|
9
|
+
try:
|
10
|
+
import redis
|
11
|
+
|
12
|
+
REDIS_AVAILABLE = True
|
13
|
+
except ImportError:
|
14
|
+
REDIS_AVAILABLE = False
|
15
|
+
|
16
|
+
from .._types import HashFunc
|
17
|
+
from ..config import CacheEntry
|
18
|
+
from .base import RecalculationNeeded, _BaseCore, _get_func_str
|
19
|
+
|
20
|
+
REDIS_SLEEP_DURATION_IN_SEC = 1
|
21
|
+
|
22
|
+
|
23
|
+
class MissingRedisClient(ValueError):
|
24
|
+
"""Thrown when the redis_client keyword argument is missing."""
|
25
|
+
|
26
|
+
|
27
|
+
class _RedisCore(_BaseCore):
|
28
|
+
"""Redis-based core for Cachier, supporting Redis backends."""
|
29
|
+
|
30
|
+
def __init__(
|
31
|
+
self,
|
32
|
+
hash_func: Optional[HashFunc],
|
33
|
+
redis_client: Optional[
|
34
|
+
Union["redis.Redis", Callable[[], "redis.Redis"]]
|
35
|
+
],
|
36
|
+
wait_for_calc_timeout: Optional[int] = None,
|
37
|
+
key_prefix: str = "cachier",
|
38
|
+
):
|
39
|
+
if not REDIS_AVAILABLE:
|
40
|
+
warnings.warn(
|
41
|
+
"`redis` was not found. Redis cores will not function. "
|
42
|
+
"Install with `pip install redis`.",
|
43
|
+
ImportWarning,
|
44
|
+
stacklevel=2,
|
45
|
+
)
|
46
|
+
|
47
|
+
super().__init__(
|
48
|
+
hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout
|
49
|
+
)
|
50
|
+
if redis_client is None:
|
51
|
+
raise MissingRedisClient(
|
52
|
+
"must specify ``redis_client`` when using the redis core"
|
53
|
+
)
|
54
|
+
self.redis_client = redis_client
|
55
|
+
self.key_prefix = key_prefix
|
56
|
+
self._func_str = None
|
57
|
+
|
58
|
+
def _resolve_redis_client(self):
|
59
|
+
"""Resolve the Redis client from the provided parameter."""
|
60
|
+
if callable(self.redis_client):
|
61
|
+
return self.redis_client()
|
62
|
+
return self.redis_client
|
63
|
+
|
64
|
+
def _get_redis_key(self, key: str) -> str:
|
65
|
+
"""Generate a Redis key for the given cache key."""
|
66
|
+
return f"{self.key_prefix}:{self._func_str}:{key}"
|
67
|
+
|
68
|
+
def set_func(self, func):
|
69
|
+
"""Set the function this core will use."""
|
70
|
+
super().set_func(func)
|
71
|
+
self._func_str = _get_func_str(func)
|
72
|
+
|
73
|
+
def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
|
74
|
+
"""Get entry based on given key from Redis."""
|
75
|
+
redis_client = self._resolve_redis_client()
|
76
|
+
redis_key = self._get_redis_key(key)
|
77
|
+
|
78
|
+
try:
|
79
|
+
# Get the cached data from Redis
|
80
|
+
cached_data = redis_client.hgetall(redis_key)
|
81
|
+
if not cached_data:
|
82
|
+
return key, None
|
83
|
+
|
84
|
+
# Deserialize the value
|
85
|
+
value = None
|
86
|
+
if cached_data.get(b"value"):
|
87
|
+
value = pickle.loads(cached_data[b"value"])
|
88
|
+
|
89
|
+
# Parse timestamp
|
90
|
+
timestamp_str = cached_data.get(b"timestamp", b"").decode("utf-8")
|
91
|
+
timestamp = (
|
92
|
+
datetime.fromisoformat(timestamp_str)
|
93
|
+
if timestamp_str
|
94
|
+
else datetime.now()
|
95
|
+
)
|
96
|
+
|
97
|
+
# Parse boolean fields
|
98
|
+
stale = (
|
99
|
+
cached_data.get(b"stale", b"false").decode("utf-8").lower()
|
100
|
+
== "true"
|
101
|
+
)
|
102
|
+
processing = (
|
103
|
+
cached_data.get(b"processing", b"false")
|
104
|
+
.decode("utf-8")
|
105
|
+
.lower()
|
106
|
+
== "true"
|
107
|
+
)
|
108
|
+
completed = (
|
109
|
+
cached_data.get(b"completed", b"false").decode("utf-8").lower()
|
110
|
+
== "true"
|
111
|
+
)
|
112
|
+
|
113
|
+
entry = CacheEntry(
|
114
|
+
value=value,
|
115
|
+
time=timestamp,
|
116
|
+
stale=stale,
|
117
|
+
_processing=processing,
|
118
|
+
_completed=completed,
|
119
|
+
)
|
120
|
+
return key, entry
|
121
|
+
except Exception as e:
|
122
|
+
warnings.warn(f"Redis get_entry_by_key failed: {e}", stacklevel=2)
|
123
|
+
return key, None
|
124
|
+
|
125
|
+
def set_entry(self, key: str, func_res: Any) -> None:
|
126
|
+
"""Map the given result to the given key in Redis."""
|
127
|
+
redis_client = self._resolve_redis_client()
|
128
|
+
redis_key = self._get_redis_key(key)
|
129
|
+
|
130
|
+
try:
|
131
|
+
# Serialize the value
|
132
|
+
value_bytes = pickle.dumps(func_res)
|
133
|
+
now = datetime.now()
|
134
|
+
|
135
|
+
# Store in Redis using hash
|
136
|
+
redis_client.hset(
|
137
|
+
redis_key,
|
138
|
+
mapping={
|
139
|
+
"value": value_bytes,
|
140
|
+
"timestamp": now.isoformat(),
|
141
|
+
"stale": "false",
|
142
|
+
"processing": "false",
|
143
|
+
"completed": "true",
|
144
|
+
},
|
145
|
+
)
|
146
|
+
except Exception as e:
|
147
|
+
warnings.warn(f"Redis set_entry failed: {e}", stacklevel=2)
|
148
|
+
|
149
|
+
def mark_entry_being_calculated(self, key: str) -> None:
|
150
|
+
"""Mark the entry mapped by the given key as being calculated."""
|
151
|
+
redis_client = self._resolve_redis_client()
|
152
|
+
redis_key = self._get_redis_key(key)
|
153
|
+
|
154
|
+
try:
|
155
|
+
now = datetime.now()
|
156
|
+
redis_client.hset(
|
157
|
+
redis_key,
|
158
|
+
mapping={
|
159
|
+
"timestamp": now.isoformat(),
|
160
|
+
"stale": "false",
|
161
|
+
"processing": "true",
|
162
|
+
"completed": "false",
|
163
|
+
},
|
164
|
+
)
|
165
|
+
except Exception as e:
|
166
|
+
warnings.warn(
|
167
|
+
f"Redis mark_entry_being_calculated failed: {e}", stacklevel=2
|
168
|
+
)
|
169
|
+
|
170
|
+
def mark_entry_not_calculated(self, key: str) -> None:
|
171
|
+
"""Mark the entry mapped by the given key as not being calculated."""
|
172
|
+
redis_client = self._resolve_redis_client()
|
173
|
+
redis_key = self._get_redis_key(key)
|
174
|
+
|
175
|
+
try:
|
176
|
+
redis_client.hset(redis_key, "processing", "false")
|
177
|
+
except Exception as e:
|
178
|
+
warnings.warn(
|
179
|
+
f"Redis mark_entry_not_calculated failed: {e}", stacklevel=2
|
180
|
+
)
|
181
|
+
|
182
|
+
def wait_on_entry_calc(self, key: str) -> Any:
|
183
|
+
"""Wait on the entry with keys being calculated and returns result."""
|
184
|
+
time_spent = 0
|
185
|
+
while True:
|
186
|
+
time.sleep(REDIS_SLEEP_DURATION_IN_SEC)
|
187
|
+
time_spent += REDIS_SLEEP_DURATION_IN_SEC
|
188
|
+
key, entry = self.get_entry_by_key(key)
|
189
|
+
if entry is None:
|
190
|
+
raise RecalculationNeeded()
|
191
|
+
if not entry._processing:
|
192
|
+
return entry.value
|
193
|
+
self.check_calc_timeout(time_spent)
|
194
|
+
|
195
|
+
def clear_cache(self) -> None:
|
196
|
+
"""Clear the cache of this core."""
|
197
|
+
redis_client = self._resolve_redis_client()
|
198
|
+
pattern = f"{self.key_prefix}:{self._func_str}:*"
|
199
|
+
|
200
|
+
try:
|
201
|
+
# Find all keys matching the pattern
|
202
|
+
keys = redis_client.keys(pattern)
|
203
|
+
if keys:
|
204
|
+
redis_client.delete(*keys)
|
205
|
+
except Exception as e:
|
206
|
+
warnings.warn(f"Redis clear_cache failed: {e}", stacklevel=2)
|
207
|
+
|
208
|
+
def clear_being_calculated(self) -> None:
|
209
|
+
"""Mark all entries in this cache as not being calculated."""
|
210
|
+
redis_client = self._resolve_redis_client()
|
211
|
+
pattern = f"{self.key_prefix}:{self._func_str}:*"
|
212
|
+
|
213
|
+
try:
|
214
|
+
# Find all keys matching the pattern
|
215
|
+
keys = redis_client.keys(pattern)
|
216
|
+
if keys:
|
217
|
+
# Use pipeline for efficiency
|
218
|
+
pipe = redis_client.pipeline()
|
219
|
+
for key in keys:
|
220
|
+
pipe.hset(key, "processing", "false")
|
221
|
+
pipe.execute()
|
222
|
+
except Exception as e:
|
223
|
+
warnings.warn(
|
224
|
+
f"Redis clear_being_calculated failed: {e}", stacklevel=2
|
225
|
+
)
|
cachier/version.info
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.
|
1
|
+
3.4.0
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: cachier
|
3
|
-
Version: 3.
|
3
|
+
Version: 3.4.0
|
4
4
|
Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
|
5
5
|
Author-email: Shay Palachy Affek <shay.palachy@gmail.com>
|
6
6
|
License: MIT License
|
@@ -99,6 +99,7 @@ Features
|
|
99
99
|
* Defining "shelf life" for cached values.
|
100
100
|
* Local caching using pickle files.
|
101
101
|
* Cross-machine caching using MongoDB.
|
102
|
+
* Redis-based caching for high-performance scenarios.
|
102
103
|
* Thread-safety.
|
103
104
|
* **Per-call max age:** Specify a maximum age for cached values per call.
|
104
105
|
|
@@ -448,6 +449,56 @@ Cachier supports a generic SQL backend via SQLAlchemy, allowing you to use SQLit
|
|
448
449
|
def my_func(x):
|
449
450
|
return x * 2
|
450
451
|
|
452
|
+
Redis Core
|
453
|
+
----------
|
454
|
+
|
455
|
+
**Note:** The Redis core requires the redis package to be installed. It is not installed by default with cachier. To use the Redis backend, run::
|
456
|
+
|
457
|
+
pip install redis
|
458
|
+
|
459
|
+
Cachier supports Redis-based caching for high-performance scenarios. Redis provides fast in-memory storage with optional persistence.
|
460
|
+
|
461
|
+
**Usage Example (Local Redis):**
|
462
|
+
|
463
|
+
.. code-block:: python
|
464
|
+
|
465
|
+
import redis
|
466
|
+
from cachier import cachier
|
467
|
+
|
468
|
+
# Create Redis client
|
469
|
+
redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
470
|
+
|
471
|
+
@cachier(backend="redis", redis_client=redis_client)
|
472
|
+
def my_func(x):
|
473
|
+
return x * 2
|
474
|
+
|
475
|
+
**Usage Example (Redis with custom key prefix):**
|
476
|
+
|
477
|
+
.. code-block:: python
|
478
|
+
|
479
|
+
import redis
|
480
|
+
from cachier import cachier
|
481
|
+
|
482
|
+
redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
483
|
+
|
484
|
+
@cachier(backend="redis", redis_client=redis_client, key_prefix="myapp")
|
485
|
+
def my_func(x):
|
486
|
+
return x * 2
|
487
|
+
|
488
|
+
**Usage Example (Redis with callable client):**
|
489
|
+
|
490
|
+
.. code-block:: python
|
491
|
+
|
492
|
+
import redis
|
493
|
+
from cachier import cachier
|
494
|
+
|
495
|
+
def get_redis_client():
|
496
|
+
return redis.Redis(host='localhost', port=6379, db=0)
|
497
|
+
|
498
|
+
@cachier(backend="redis", redis_client=get_redis_client)
|
499
|
+
def my_func(x):
|
500
|
+
return x * 2
|
501
|
+
|
451
502
|
**Configuration Options:**
|
452
503
|
|
453
504
|
- ``sql_engine``: SQLAlchemy connection string, Engine, or callable returning an Engine.
|
@@ -0,0 +1,21 @@
|
|
1
|
+
cachier/__init__.py,sha256=1isxXaP2l6Vq7gC1Gob6hduRLC07dHfWze1-oCjwSP0,415
|
2
|
+
cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
|
3
|
+
cachier/_types.py,sha256=u-1NWfZqcvwLa5qDmiTxIN4btM6NiWta212u8yPo02o,335
|
4
|
+
cachier/_version.py,sha256=jnPPRn_qmjNi-qmQjlHnzNGf3LSBTYkMmJdGjxMTOBM,1089
|
5
|
+
cachier/config.py,sha256=6hyQtn9T6UXu2UQhKJltWT0Nu4OBS4ION1x7Lt1i8Og,3838
|
6
|
+
cachier/core.py,sha256=W1O_a9rkgsErG3spVPZpnfRu9k0MbV94rGkv79KDgBA,16243
|
7
|
+
cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
+
cachier/version.info,sha256=9ePpPlj_wqIKPKU-gCW8UNfB3MvvSzTiOIqpu_hnhGY,6
|
9
|
+
cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
+
cachier/cores/base.py,sha256=s7qgmDJA4LGub6ydGfMk9vVJW4fgeU0EXl-9gmpuh28,3683
|
11
|
+
cachier/cores/memory.py,sha256=fsvqq9rwwmAaMBvYo-oUNAxB6UOyfBpuf8ACW_XTaU0,3572
|
12
|
+
cachier/cores/mongo.py,sha256=wVit36jQeNkP1KyxMtm6jDglazpdORYzTOPJ0q10BWI,4979
|
13
|
+
cachier/cores/pickle.py,sha256=FuEl2i_U4Q3k9JUs4ylJqM3n7BVY6cXEqKmdW56AEDg,10650
|
14
|
+
cachier/cores/redis.py,sha256=rWrkEwWPzfFvH1eLkgBdhzxqPtJtx6N_sYcqRPy_P9Y,7553
|
15
|
+
cachier/cores/sql.py,sha256=nuf2-Szo7VTPRa7IC3JGWEtGsBtdkIrx0bhOm3U0mfE,9895
|
16
|
+
cachier-3.4.0.dist-info/licenses/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
|
17
|
+
cachier-3.4.0.dist-info/METADATA,sha256=V409D2EKlPVNAEf_uAzJDIJCQQJjzyoE_g1FnjaSnzs,24446
|
18
|
+
cachier-3.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
19
|
+
cachier-3.4.0.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
|
20
|
+
cachier-3.4.0.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
|
21
|
+
cachier-3.4.0.dist-info/RECORD,,
|
cachier-3.3.0.dist-info/RECORD
DELETED
@@ -1,20 +0,0 @@
|
|
1
|
-
cachier/__init__.py,sha256=1isxXaP2l6Vq7gC1Gob6hduRLC07dHfWze1-oCjwSP0,415
|
2
|
-
cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
|
3
|
-
cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
|
4
|
-
cachier/_version.py,sha256=jnPPRn_qmjNi-qmQjlHnzNGf3LSBTYkMmJdGjxMTOBM,1089
|
5
|
-
cachier/config.py,sha256=6hyQtn9T6UXu2UQhKJltWT0Nu4OBS4ION1x7Lt1i8Og,3838
|
6
|
-
cachier/core.py,sha256=PHLDA6Mabih-mi4y3CBsb8_vBIpvYQt547_Y57fo8uI,15825
|
7
|
-
cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
8
|
-
cachier/version.info,sha256=Xu5sDFxpAfXlI_CkNLykEG4f9xNt60UfmCsLsaexNxY,6
|
9
|
-
cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
10
|
-
cachier/cores/base.py,sha256=s7qgmDJA4LGub6ydGfMk9vVJW4fgeU0EXl-9gmpuh28,3683
|
11
|
-
cachier/cores/memory.py,sha256=fsvqq9rwwmAaMBvYo-oUNAxB6UOyfBpuf8ACW_XTaU0,3572
|
12
|
-
cachier/cores/mongo.py,sha256=pCBrxLsmGr68Q50JVD_CUPAYwhaLDrJUQs_6A-_GYLA,4993
|
13
|
-
cachier/cores/pickle.py,sha256=FgfvZWAFdWQPOo3G-L57iEV2ujEkIDH8TyGzbarsZeE,10678
|
14
|
-
cachier/cores/sql.py,sha256=nuf2-Szo7VTPRa7IC3JGWEtGsBtdkIrx0bhOm3U0mfE,9895
|
15
|
-
cachier-3.3.0.dist-info/licenses/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
|
16
|
-
cachier-3.3.0.dist-info/METADATA,sha256=P8GoF2LSZKA5Tp8NunrJvS_f1XlytO4HsCmDsUy4oxQ,23136
|
17
|
-
cachier-3.3.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
18
|
-
cachier-3.3.0.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
|
19
|
-
cachier-3.3.0.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
|
20
|
-
cachier-3.3.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|