cachier 3.2.1__py3-none-any.whl → 3.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachier/_types.py CHANGED
@@ -1,9 +1,11 @@
1
- from typing import TYPE_CHECKING, Callable, Literal
1
+ from typing import TYPE_CHECKING, Callable, Literal, Union
2
2
 
3
3
  if TYPE_CHECKING:
4
4
  import pymongo.collection
5
+ import redis
5
6
 
6
7
 
7
8
  HashFunc = Callable[..., str]
8
9
  Mongetter = Callable[[], "pymongo.collection.Collection"]
9
- Backend = Literal["pickle", "mongo", "memory"]
10
+ RedisClient = Union["redis.Redis", Callable[[], "redis.Redis"]]
11
+ Backend = Literal["pickle", "mongo", "memory", "redis"]
cachier/core.py CHANGED
@@ -17,6 +17,7 @@ from functools import wraps
17
17
  from typing import Any, Callable, Optional, Union
18
18
  from warnings import warn
19
19
 
20
+ from ._types import RedisClient
20
21
  from .config import (
21
22
  Backend,
22
23
  HashFunc,
@@ -27,10 +28,12 @@ from .cores.base import RecalculationNeeded, _BaseCore
27
28
  from .cores.memory import _MemoryCore
28
29
  from .cores.mongo import _MongoCore
29
30
  from .cores.pickle import _PickleCore
31
+ from .cores.redis import _RedisCore
30
32
  from .cores.sql import _SQLCore
31
33
 
32
34
  MAX_WORKERS_ENVAR_NAME = "CACHIER_MAX_WORKERS"
33
35
  DEFAULT_MAX_WORKERS = 8
36
+ ZERO_TIMEDELTA = timedelta(seconds=0)
34
37
 
35
38
 
36
39
  def _max_workers():
@@ -109,6 +112,7 @@ def cachier(
109
112
  backend: Optional[Backend] = None,
110
113
  mongetter: Optional[Mongetter] = None,
111
114
  sql_engine: Optional[Union[str, Any, Callable[[], Any]]] = None,
115
+ redis_client: Optional["RedisClient"] = None,
112
116
  stale_after: Optional[timedelta] = None,
113
117
  next_time: Optional[bool] = None,
114
118
  cache_dir: Optional[Union[str, os.PathLike]] = None,
@@ -136,9 +140,9 @@ def cachier(
136
140
  hash_params : callable, optional
137
141
  backend : str, optional
138
142
  The name of the backend to use. Valid options currently include
139
- 'pickle', 'mongo', 'memory', and 'sql'. If not provided, defaults to
140
- 'pickle' unless the 'mongetter' argument is passed, in which
141
- case the mongo backend is automatically selected.
143
+ 'pickle', 'mongo', 'memory', 'sql', and 'redis'. If not provided,
144
+ defaults to 'pickle', unless a core-associated parameter is provided
145
+
142
146
  mongetter : callable, optional
143
147
  A callable that takes no arguments and returns a pymongo.Collection
144
148
  object with writing permissions. If unset a local pickle cache is used
@@ -146,6 +150,9 @@ def cachier(
146
150
  sql_engine : str, Engine, or callable, optional
147
151
  SQLAlchemy connection string, Engine, or callable returning an Engine.
148
152
  Used for the SQL backend.
153
+ redis_client : redis.Redis or callable, optional
154
+ Redis client instance or callable returning a Redis client.
155
+ Used for the Redis backend.
149
156
  stale_after : datetime.timedelta, optional
150
157
  The time delta after which a cached result is considered stale. Calls
151
158
  made after the result goes stale will trigger a recalculation of the
@@ -219,14 +226,43 @@ def cachier(
219
226
  sql_engine=sql_engine,
220
227
  wait_for_calc_timeout=wait_for_calc_timeout,
221
228
  )
229
+ elif backend == "redis":
230
+ core = _RedisCore(
231
+ hash_func=hash_func,
232
+ redis_client=redis_client,
233
+ wait_for_calc_timeout=wait_for_calc_timeout,
234
+ )
222
235
  else:
223
236
  raise ValueError("specified an invalid core: %s" % backend)
224
237
 
225
238
  def _cachier_decorator(func):
226
239
  core.set_func(func)
227
240
 
228
- @wraps(func)
229
- def func_wrapper(*args, **kwds):
241
+ # ---
242
+ # MAINTAINER NOTE: max_age parameter
243
+ #
244
+ # The _call function below supports a per-call 'max_age' parameter,
245
+ # allowing users to specify a maximum allowed age for a cached value.
246
+ # If the cached value is older than 'max_age',
247
+ # a recalculation is triggered. This is in addition to the
248
+ # per-decorator 'stale_after' parameter.
249
+ #
250
+ # The effective staleness threshold is the minimum of 'stale_after'
251
+ # and 'max_age' (if provided).
252
+ # This ensures that the strictest max age requirement is enforced.
253
+ #
254
+ # The main function wrapper is a standard function that passes
255
+ # *args and **kwargs to _call. By default, max_age is None,
256
+ # so only 'stale_after' is considered unless overridden.
257
+ #
258
+ # The user-facing API exposes:
259
+ # - Per-call: myfunc(..., max_age=timedelta(...))
260
+ #
261
+ # This design allows both one-off (per-call) and default
262
+ # (per-decorator) max age constraints.
263
+ # ---
264
+
265
+ def _call(*args, max_age: Optional[timedelta] = None, **kwds):
230
266
  nonlocal allow_none
231
267
  _allow_none = _update_with_defaults(allow_none, "allow_none", kwds)
232
268
  # print('Inside general wrapper for {}.'.format(func.__name__))
@@ -271,7 +307,23 @@ def cachier(
271
307
  if _allow_none or entry.value is not None:
272
308
  _print("Cached result found.")
273
309
  now = datetime.now()
274
- if now - entry.time <= _stale_after:
310
+ max_allowed_age = _stale_after
311
+ nonneg_max_age = True
312
+ if max_age is not None:
313
+ if max_age < ZERO_TIMEDELTA:
314
+ _print(
315
+ "max_age is negative. "
316
+ "Cached result considered stale."
317
+ )
318
+ nonneg_max_age = False
319
+ else:
320
+ max_allowed_age = (
321
+ min(_stale_after, max_age)
322
+ if max_age is not None
323
+ else _stale_after
324
+ )
325
+ # note: if max_age < 0, we always consider a value stale
326
+ if nonneg_max_age and (now - entry.time <= max_allowed_age):
275
327
  _print("And it is fresh!")
276
328
  return entry.value
277
329
  _print("But it is stale... :(")
@@ -305,6 +357,14 @@ def cachier(
305
357
  _print("No entry found. No current calc. Calling like a boss.")
306
358
  return _calc_entry(core, key, func, args, kwds)
307
359
 
360
+ # MAINTAINER NOTE: The main function wrapper is now a standard function
361
+ # that passes *args and **kwargs to _call. This ensures that user
362
+ # arguments are not shifted, and max_age is only settable via keyword
363
+ # argument.
364
+ @wraps(func)
365
+ def func_wrapper(*args, **kwargs):
366
+ return _call(*args, **kwargs)
367
+
308
368
  def _clear_cache():
309
369
  """Clear the cache."""
310
370
  core.clear_cache()
cachier/cores/mongo.py CHANGED
@@ -77,7 +77,7 @@ class _MongoCore(_BaseCore):
77
77
  return key, None
78
78
  val = None
79
79
  if "value" in res:
80
- val = pickle.loads(res["value"]) # noqa: S301
80
+ val = pickle.loads(res["value"])
81
81
  entry = CacheEntry(
82
82
  value=val,
83
83
  time=res.get("time", None),
cachier/cores/pickle.py CHANGED
@@ -114,7 +114,7 @@ class _PickleCore(_BaseCore):
114
114
  def _load_cache_dict(self) -> Dict[str, CacheEntry]:
115
115
  try:
116
116
  with portalocker.Lock(self.cache_fpath, mode="rb") as cf:
117
- cache = pickle.load(cf) # noqa: S301
117
+ cache = pickle.load(cf)
118
118
  self._cache_used_fpath = str(self.cache_fpath)
119
119
  except (FileNotFoundError, EOFError):
120
120
  cache = {}
@@ -141,7 +141,7 @@ class _PickleCore(_BaseCore):
141
141
  fpath += f"_{hash_str or key}"
142
142
  try:
143
143
  with portalocker.Lock(fpath, mode="rb") as cache_file:
144
- entry = pickle.load(cache_file) # noqa: S301
144
+ entry = pickle.load(cache_file)
145
145
  return _PickleCore._convert_legacy_cache_entry(entry)
146
146
  except (FileNotFoundError, EOFError):
147
147
  return None
cachier/cores/redis.py ADDED
@@ -0,0 +1,225 @@
1
+ """A Redis-based caching core for cachier."""
2
+
3
+ import pickle
4
+ import time
5
+ import warnings
6
+ from datetime import datetime
7
+ from typing import Any, Callable, Optional, Tuple, Union
8
+
9
+ try:
10
+ import redis
11
+
12
+ REDIS_AVAILABLE = True
13
+ except ImportError:
14
+ REDIS_AVAILABLE = False
15
+
16
+ from .._types import HashFunc
17
+ from ..config import CacheEntry
18
+ from .base import RecalculationNeeded, _BaseCore, _get_func_str
19
+
20
+ REDIS_SLEEP_DURATION_IN_SEC = 1
21
+
22
+
23
+ class MissingRedisClient(ValueError):
24
+ """Thrown when the redis_client keyword argument is missing."""
25
+
26
+
27
+ class _RedisCore(_BaseCore):
28
+ """Redis-based core for Cachier, supporting Redis backends."""
29
+
30
+ def __init__(
31
+ self,
32
+ hash_func: Optional[HashFunc],
33
+ redis_client: Optional[
34
+ Union["redis.Redis", Callable[[], "redis.Redis"]]
35
+ ],
36
+ wait_for_calc_timeout: Optional[int] = None,
37
+ key_prefix: str = "cachier",
38
+ ):
39
+ if not REDIS_AVAILABLE:
40
+ warnings.warn(
41
+ "`redis` was not found. Redis cores will not function. "
42
+ "Install with `pip install redis`.",
43
+ ImportWarning,
44
+ stacklevel=2,
45
+ )
46
+
47
+ super().__init__(
48
+ hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout
49
+ )
50
+ if redis_client is None:
51
+ raise MissingRedisClient(
52
+ "must specify ``redis_client`` when using the redis core"
53
+ )
54
+ self.redis_client = redis_client
55
+ self.key_prefix = key_prefix
56
+ self._func_str = None
57
+
58
+ def _resolve_redis_client(self):
59
+ """Resolve the Redis client from the provided parameter."""
60
+ if callable(self.redis_client):
61
+ return self.redis_client()
62
+ return self.redis_client
63
+
64
+ def _get_redis_key(self, key: str) -> str:
65
+ """Generate a Redis key for the given cache key."""
66
+ return f"{self.key_prefix}:{self._func_str}:{key}"
67
+
68
+ def set_func(self, func):
69
+ """Set the function this core will use."""
70
+ super().set_func(func)
71
+ self._func_str = _get_func_str(func)
72
+
73
+ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
74
+ """Get entry based on given key from Redis."""
75
+ redis_client = self._resolve_redis_client()
76
+ redis_key = self._get_redis_key(key)
77
+
78
+ try:
79
+ # Get the cached data from Redis
80
+ cached_data = redis_client.hgetall(redis_key)
81
+ if not cached_data:
82
+ return key, None
83
+
84
+ # Deserialize the value
85
+ value = None
86
+ if cached_data.get(b"value"):
87
+ value = pickle.loads(cached_data[b"value"])
88
+
89
+ # Parse timestamp
90
+ timestamp_str = cached_data.get(b"timestamp", b"").decode("utf-8")
91
+ timestamp = (
92
+ datetime.fromisoformat(timestamp_str)
93
+ if timestamp_str
94
+ else datetime.now()
95
+ )
96
+
97
+ # Parse boolean fields
98
+ stale = (
99
+ cached_data.get(b"stale", b"false").decode("utf-8").lower()
100
+ == "true"
101
+ )
102
+ processing = (
103
+ cached_data.get(b"processing", b"false")
104
+ .decode("utf-8")
105
+ .lower()
106
+ == "true"
107
+ )
108
+ completed = (
109
+ cached_data.get(b"completed", b"false").decode("utf-8").lower()
110
+ == "true"
111
+ )
112
+
113
+ entry = CacheEntry(
114
+ value=value,
115
+ time=timestamp,
116
+ stale=stale,
117
+ _processing=processing,
118
+ _completed=completed,
119
+ )
120
+ return key, entry
121
+ except Exception as e:
122
+ warnings.warn(f"Redis get_entry_by_key failed: {e}", stacklevel=2)
123
+ return key, None
124
+
125
+ def set_entry(self, key: str, func_res: Any) -> None:
126
+ """Map the given result to the given key in Redis."""
127
+ redis_client = self._resolve_redis_client()
128
+ redis_key = self._get_redis_key(key)
129
+
130
+ try:
131
+ # Serialize the value
132
+ value_bytes = pickle.dumps(func_res)
133
+ now = datetime.now()
134
+
135
+ # Store in Redis using hash
136
+ redis_client.hset(
137
+ redis_key,
138
+ mapping={
139
+ "value": value_bytes,
140
+ "timestamp": now.isoformat(),
141
+ "stale": "false",
142
+ "processing": "false",
143
+ "completed": "true",
144
+ },
145
+ )
146
+ except Exception as e:
147
+ warnings.warn(f"Redis set_entry failed: {e}", stacklevel=2)
148
+
149
+ def mark_entry_being_calculated(self, key: str) -> None:
150
+ """Mark the entry mapped by the given key as being calculated."""
151
+ redis_client = self._resolve_redis_client()
152
+ redis_key = self._get_redis_key(key)
153
+
154
+ try:
155
+ now = datetime.now()
156
+ redis_client.hset(
157
+ redis_key,
158
+ mapping={
159
+ "timestamp": now.isoformat(),
160
+ "stale": "false",
161
+ "processing": "true",
162
+ "completed": "false",
163
+ },
164
+ )
165
+ except Exception as e:
166
+ warnings.warn(
167
+ f"Redis mark_entry_being_calculated failed: {e}", stacklevel=2
168
+ )
169
+
170
+ def mark_entry_not_calculated(self, key: str) -> None:
171
+ """Mark the entry mapped by the given key as not being calculated."""
172
+ redis_client = self._resolve_redis_client()
173
+ redis_key = self._get_redis_key(key)
174
+
175
+ try:
176
+ redis_client.hset(redis_key, "processing", "false")
177
+ except Exception as e:
178
+ warnings.warn(
179
+ f"Redis mark_entry_not_calculated failed: {e}", stacklevel=2
180
+ )
181
+
182
+ def wait_on_entry_calc(self, key: str) -> Any:
183
+ """Wait on the entry with keys being calculated and returns result."""
184
+ time_spent = 0
185
+ while True:
186
+ time.sleep(REDIS_SLEEP_DURATION_IN_SEC)
187
+ time_spent += REDIS_SLEEP_DURATION_IN_SEC
188
+ key, entry = self.get_entry_by_key(key)
189
+ if entry is None:
190
+ raise RecalculationNeeded()
191
+ if not entry._processing:
192
+ return entry.value
193
+ self.check_calc_timeout(time_spent)
194
+
195
+ def clear_cache(self) -> None:
196
+ """Clear the cache of this core."""
197
+ redis_client = self._resolve_redis_client()
198
+ pattern = f"{self.key_prefix}:{self._func_str}:*"
199
+
200
+ try:
201
+ # Find all keys matching the pattern
202
+ keys = redis_client.keys(pattern)
203
+ if keys:
204
+ redis_client.delete(*keys)
205
+ except Exception as e:
206
+ warnings.warn(f"Redis clear_cache failed: {e}", stacklevel=2)
207
+
208
+ def clear_being_calculated(self) -> None:
209
+ """Mark all entries in this cache as not being calculated."""
210
+ redis_client = self._resolve_redis_client()
211
+ pattern = f"{self.key_prefix}:{self._func_str}:*"
212
+
213
+ try:
214
+ # Find all keys matching the pattern
215
+ keys = redis_client.keys(pattern)
216
+ if keys:
217
+ # Use pipeline for efficiency
218
+ pipe = redis_client.pipeline()
219
+ for key in keys:
220
+ pipe.hset(key, "processing", "false")
221
+ pipe.execute()
222
+ except Exception as e:
223
+ warnings.warn(
224
+ f"Redis clear_being_calculated failed: {e}", stacklevel=2
225
+ )
cachier/version.info CHANGED
@@ -1 +1 @@
1
- 3.2.1
1
+ 3.4.0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cachier
3
- Version: 3.2.1
3
+ Version: 3.4.0
4
4
  Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
5
5
  Author-email: Shay Palachy Affek <shay.palachy@gmail.com>
6
6
  License: MIT License
@@ -99,7 +99,9 @@ Features
99
99
  * Defining "shelf life" for cached values.
100
100
  * Local caching using pickle files.
101
101
  * Cross-machine caching using MongoDB.
102
+ * Redis-based caching for high-performance scenarios.
102
103
  * Thread-safety.
104
+ * **Per-call max age:** Specify a maximum age for cached values per call.
103
105
 
104
106
  Cachier is **NOT**:
105
107
 
@@ -282,6 +284,27 @@ Per-function call arguments
282
284
 
283
285
  Cachier also accepts several keyword arguments in the calls of the function it wraps rather than in the decorator call, allowing you to modify its behaviour for a specific function call.
284
286
 
287
+ **Max Age (max_age)**
288
+ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
289
+ You can specify a maximum allowed age for a cached value on a per-call basis using the `max_age` keyword argument. If the cached value is older than this threshold, a recalculation is triggered. This is in addition to the `stale_after` parameter set at the decorator level; the strictest (smallest) threshold is enforced.
290
+
291
+ .. code-block:: python
292
+
293
+ from datetime import timedelta
294
+ from cachier import cachier
295
+
296
+ @cachier(stale_after=timedelta(days=3))
297
+ def add(a, b):
298
+ return a + b
299
+
300
+ # Use a per-call max age:
301
+ result = add(1, 2, max_age=timedelta(seconds=10)) # Only use cache if value is <10s old
302
+
303
+ **How it works:**
304
+ - The effective max age threshold is the minimum of `stale_after` (from the decorator) and `max_age` (from the call).
305
+ - If the cached value is older than this threshold, a new calculation is triggered and the cache is updated.
306
+ - If not, the cached value is returned as usual.
307
+
285
308
  Ignore Cache
286
309
  ~~~~~~~~~~~~
287
310
 
@@ -426,6 +449,56 @@ Cachier supports a generic SQL backend via SQLAlchemy, allowing you to use SQLit
426
449
  def my_func(x):
427
450
  return x * 2
428
451
 
452
+ Redis Core
453
+ ----------
454
+
455
+ **Note:** The Redis core requires the redis package to be installed. It is not installed by default with cachier. To use the Redis backend, run::
456
+
457
+ pip install redis
458
+
459
+ Cachier supports Redis-based caching for high-performance scenarios. Redis provides fast in-memory storage with optional persistence.
460
+
461
+ **Usage Example (Local Redis):**
462
+
463
+ .. code-block:: python
464
+
465
+ import redis
466
+ from cachier import cachier
467
+
468
+ # Create Redis client
469
+ redis_client = redis.Redis(host='localhost', port=6379, db=0)
470
+
471
+ @cachier(backend="redis", redis_client=redis_client)
472
+ def my_func(x):
473
+ return x * 2
474
+
475
+ **Usage Example (Redis with custom key prefix):**
476
+
477
+ .. code-block:: python
478
+
479
+ import redis
480
+ from cachier import cachier
481
+
482
+ redis_client = redis.Redis(host='localhost', port=6379, db=0)
483
+
484
+ @cachier(backend="redis", redis_client=redis_client, key_prefix="myapp")
485
+ def my_func(x):
486
+ return x * 2
487
+
488
+ **Usage Example (Redis with callable client):**
489
+
490
+ .. code-block:: python
491
+
492
+ import redis
493
+ from cachier import cachier
494
+
495
+ def get_redis_client():
496
+ return redis.Redis(host='localhost', port=6379, db=0)
497
+
498
+ @cachier(backend="redis", redis_client=get_redis_client)
499
+ def my_func(x):
500
+ return x * 2
501
+
429
502
  **Configuration Options:**
430
503
 
431
504
  - ``sql_engine``: SQLAlchemy connection string, Engine, or callable returning an Engine.
@@ -0,0 +1,21 @@
1
+ cachier/__init__.py,sha256=1isxXaP2l6Vq7gC1Gob6hduRLC07dHfWze1-oCjwSP0,415
2
+ cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
3
+ cachier/_types.py,sha256=u-1NWfZqcvwLa5qDmiTxIN4btM6NiWta212u8yPo02o,335
4
+ cachier/_version.py,sha256=jnPPRn_qmjNi-qmQjlHnzNGf3LSBTYkMmJdGjxMTOBM,1089
5
+ cachier/config.py,sha256=6hyQtn9T6UXu2UQhKJltWT0Nu4OBS4ION1x7Lt1i8Og,3838
6
+ cachier/core.py,sha256=W1O_a9rkgsErG3spVPZpnfRu9k0MbV94rGkv79KDgBA,16243
7
+ cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ cachier/version.info,sha256=9ePpPlj_wqIKPKU-gCW8UNfB3MvvSzTiOIqpu_hnhGY,6
9
+ cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ cachier/cores/base.py,sha256=s7qgmDJA4LGub6ydGfMk9vVJW4fgeU0EXl-9gmpuh28,3683
11
+ cachier/cores/memory.py,sha256=fsvqq9rwwmAaMBvYo-oUNAxB6UOyfBpuf8ACW_XTaU0,3572
12
+ cachier/cores/mongo.py,sha256=wVit36jQeNkP1KyxMtm6jDglazpdORYzTOPJ0q10BWI,4979
13
+ cachier/cores/pickle.py,sha256=FuEl2i_U4Q3k9JUs4ylJqM3n7BVY6cXEqKmdW56AEDg,10650
14
+ cachier/cores/redis.py,sha256=rWrkEwWPzfFvH1eLkgBdhzxqPtJtx6N_sYcqRPy_P9Y,7553
15
+ cachier/cores/sql.py,sha256=nuf2-Szo7VTPRa7IC3JGWEtGsBtdkIrx0bhOm3U0mfE,9895
16
+ cachier-3.4.0.dist-info/licenses/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
17
+ cachier-3.4.0.dist-info/METADATA,sha256=V409D2EKlPVNAEf_uAzJDIJCQQJjzyoE_g1FnjaSnzs,24446
18
+ cachier-3.4.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
19
+ cachier-3.4.0.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
20
+ cachier-3.4.0.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
21
+ cachier-3.4.0.dist-info/RECORD,,
@@ -1,20 +0,0 @@
1
- cachier/__init__.py,sha256=1isxXaP2l6Vq7gC1Gob6hduRLC07dHfWze1-oCjwSP0,415
2
- cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
3
- cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
4
- cachier/_version.py,sha256=jnPPRn_qmjNi-qmQjlHnzNGf3LSBTYkMmJdGjxMTOBM,1089
5
- cachier/config.py,sha256=6hyQtn9T6UXu2UQhKJltWT0Nu4OBS4ION1x7Lt1i8Og,3838
6
- cachier/core.py,sha256=7pqf_EGvGXu5WWtC5MeY0tVW4M59XljZI9_2R4RVfRU,13627
7
- cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- cachier/version.info,sha256=OCJyh4MEn8LyKlbR7nTWI1LEn7-BxrVLUIMNb2HDryI,6
9
- cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- cachier/cores/base.py,sha256=s7qgmDJA4LGub6ydGfMk9vVJW4fgeU0EXl-9gmpuh28,3683
11
- cachier/cores/memory.py,sha256=fsvqq9rwwmAaMBvYo-oUNAxB6UOyfBpuf8ACW_XTaU0,3572
12
- cachier/cores/mongo.py,sha256=pCBrxLsmGr68Q50JVD_CUPAYwhaLDrJUQs_6A-_GYLA,4993
13
- cachier/cores/pickle.py,sha256=FgfvZWAFdWQPOo3G-L57iEV2ujEkIDH8TyGzbarsZeE,10678
14
- cachier/cores/sql.py,sha256=nuf2-Szo7VTPRa7IC3JGWEtGsBtdkIrx0bhOm3U0mfE,9895
15
- cachier-3.2.1.dist-info/licenses/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
16
- cachier-3.2.1.dist-info/METADATA,sha256=xowxZuYhc4r5f4pKxrhROtHGT7FYObRJ4ULWFDXwA-Q,22101
17
- cachier-3.2.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
18
- cachier-3.2.1.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
19
- cachier-3.2.1.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
20
- cachier-3.2.1.dist-info/RECORD,,