cachier 3.0.0__py3-none-any.whl → 3.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cachier/__init__.py CHANGED
@@ -2,15 +2,15 @@ from ._version import * # noqa: F403
2
2
  from .config import (
3
3
  disable_caching,
4
4
  enable_caching,
5
- get_default_params,
6
- set_default_params,
5
+ get_global_params,
6
+ set_global_params,
7
7
  )
8
8
  from .core import cachier
9
9
 
10
10
  __all__ = [
11
11
  "cachier",
12
- "set_default_params",
13
- "get_default_params",
12
+ "set_global_params",
13
+ "get_global_params",
14
14
  "enable_caching",
15
15
  "disable_caching",
16
16
  ]
cachier/__main__.py CHANGED
@@ -7,7 +7,7 @@ from cachier.core import _set_max_workers
7
7
 
8
8
  @click.group()
9
9
  def cli():
10
- """A command-line interface for cachier."""
10
+ """A command-line interface for cachier.""" # noqa: D401
11
11
 
12
12
 
13
13
  @cli.command("Limits the number of worker threads used by cachier.")
cachier/config.py CHANGED
@@ -2,7 +2,10 @@ import datetime
2
2
  import hashlib
3
3
  import os
4
4
  import pickle
5
- from typing import Optional, TypedDict, Union
5
+ import threading
6
+ from collections.abc import Mapping
7
+ from dataclasses import dataclass, replace
8
+ from typing import Any, Optional, Union
6
9
 
7
10
  from ._types import Backend, HashFunc, Mongetter
8
11
 
@@ -16,33 +19,36 @@ def _default_hash_func(args, kwds):
16
19
  return hashlib.sha256(serialized).hexdigest()
17
20
 
18
21
 
19
- class Params(TypedDict):
20
- caching_enabled: bool
21
- hash_func: HashFunc
22
- backend: Backend
23
- mongetter: Optional[Mongetter]
24
- stale_after: datetime.timedelta
25
- next_time: bool
26
- cache_dir: Union[str, os.PathLike]
27
- pickle_reload: bool
28
- separate_files: bool
29
- wait_for_calc_timeout: int
30
- allow_none: bool
31
-
32
-
33
- _default_params: Params = {
34
- "caching_enabled": True,
35
- "hash_func": _default_hash_func,
36
- "backend": "pickle",
37
- "mongetter": None,
38
- "stale_after": datetime.timedelta.max,
39
- "next_time": False,
40
- "cache_dir": "~/.cachier/",
41
- "pickle_reload": True,
42
- "separate_files": False,
43
- "wait_for_calc_timeout": 0,
44
- "allow_none": False,
45
- }
22
+ @dataclass
23
+ class Params:
24
+ """Default definition for cachier parameters."""
25
+
26
+ caching_enabled: bool = True
27
+ hash_func: HashFunc = _default_hash_func
28
+ backend: Backend = "pickle"
29
+ mongetter: Optional[Mongetter] = None
30
+ stale_after: datetime.timedelta = datetime.timedelta.max
31
+ next_time: bool = False
32
+ cache_dir: Union[str, os.PathLike] = "~/.cachier/"
33
+ pickle_reload: bool = True
34
+ separate_files: bool = False
35
+ wait_for_calc_timeout: int = 0
36
+ allow_none: bool = False
37
+
38
+
39
+ _global_params = Params()
40
+
41
+
42
+ @dataclass
43
+ class CacheEntry:
44
+ """Data class for cache entries."""
45
+
46
+ value: Any
47
+ time: datetime
48
+ stale: bool
49
+ _processing: bool
50
+ _condition: Optional[threading.Condition] = None
51
+ _completed: bool = False
46
52
 
47
53
 
48
54
  def _update_with_defaults(
@@ -55,11 +61,25 @@ def _update_with_defaults(
55
61
  if kw_name in func_kwargs:
56
62
  return func_kwargs.pop(kw_name)
57
63
  if param is None:
58
- return cachier.config._default_params[name]
64
+ return getattr(cachier.config._global_params, name)
59
65
  return param
60
66
 
61
67
 
62
- def set_default_params(**params):
68
+ def set_default_params(**params: Mapping) -> None:
69
+ """Configure default parameters applicable to all memoized functions."""
70
+ # It is kept for backwards compatibility with desperation warning
71
+ import warnings
72
+
73
+ warnings.warn(
74
+ "Called `set_default_params` is deprecated and will be removed."
75
+ " Please use `set_global_params` instead.",
76
+ DeprecationWarning,
77
+ stacklevel=2,
78
+ )
79
+ set_global_params(**params)
80
+
81
+
82
+ def set_global_params(**params: Mapping) -> None:
63
83
  """Configure global parameters applicable to all memoized functions.
64
84
 
65
85
  This function takes the same keyword parameters as the ones defined in the
@@ -74,28 +94,46 @@ def set_default_params(**params):
74
94
  """
75
95
  import cachier
76
96
 
77
- valid_params = (
78
- p for p in params.items() if p[0] in cachier.config._default_params
97
+ valid_params = {
98
+ k: v
99
+ for k, v in params.items()
100
+ if hasattr(cachier.config._global_params, k)
101
+ }
102
+ cachier.config._global_params = replace(
103
+ cachier.config._global_params, **valid_params
104
+ )
105
+
106
+
107
+ def get_default_params() -> Params:
108
+ """Get current set of default parameters."""
109
+ # It is kept for backwards compatibility with desperation warning
110
+ import warnings
111
+
112
+ warnings.warn(
113
+ "Called `get_default_params` is deprecated and will be removed."
114
+ " Please use `get_global_params` instead.",
115
+ DeprecationWarning,
116
+ stacklevel=2,
79
117
  )
80
- _default_params.update(valid_params)
118
+ return get_global_params()
81
119
 
82
120
 
83
- def get_default_params():
121
+ def get_global_params() -> Params:
84
122
  """Get current set of default parameters."""
85
123
  import cachier
86
124
 
87
- return cachier.config._default_params
125
+ return cachier.config._global_params
88
126
 
89
127
 
90
128
  def enable_caching():
91
129
  """Enable caching globally."""
92
130
  import cachier
93
131
 
94
- cachier.config._default_params["caching_enabled"] = True
132
+ cachier.config._global_params.caching_enabled = True
95
133
 
96
134
 
97
135
  def disable_caching():
98
136
  """Disable caching globally."""
99
137
  import cachier
100
138
 
101
- cachier.config._default_params["caching_enabled"] = False
139
+ cachier.config._global_params.caching_enabled = False
cachier/core.py CHANGED
@@ -14,14 +14,13 @@ import warnings
14
14
  from collections import OrderedDict
15
15
  from concurrent.futures import ThreadPoolExecutor
16
16
  from functools import wraps
17
- from typing import Optional, Union
17
+ from typing import Any, Optional, Union
18
18
  from warnings import warn
19
19
 
20
20
  from .config import (
21
21
  Backend,
22
22
  HashFunc,
23
23
  Mongetter,
24
- _default_params,
25
24
  _update_with_defaults,
26
25
  )
27
26
  from .cores.base import RecalculationNeeded, _BaseCore
@@ -56,13 +55,11 @@ def _function_thread(core, key, func, args, kwds):
56
55
  print(f"Function call failed with the following exception:\n{exc}")
57
56
 
58
57
 
59
- def _calc_entry(core, key, func, args, kwds):
58
+ def _calc_entry(core, key, func, args, kwds) -> Optional[Any]:
59
+ core.mark_entry_being_calculated(key)
60
60
  try:
61
- core.mark_entry_being_calculated(key)
62
- # _get_executor().submit(core.mark_entry_being_calculated, key)
63
61
  func_res = func(*args, **kwds)
64
62
  core.set_entry(key, func_res)
65
- # _get_executor().submit(core.set_entry, key, func_res)
66
63
  return func_res
67
64
  finally:
68
65
  core.mark_entry_not_calculated(key)
@@ -118,7 +115,7 @@ def cachier(
118
115
  wait_for_calc_timeout: Optional[int] = None,
119
116
  allow_none: Optional[bool] = None,
120
117
  ):
121
- """A persistent, stale-free memoization decorator.
118
+ """Wrap as a persistent, stale-free memoization decorator.
122
119
 
123
120
  The positional and keyword arguments to the wrapped function must be
124
121
  hashable (i.e. Python's immutable built-in objects, not mutable
@@ -127,13 +124,14 @@ def cachier(
127
124
  value is their id), equal objects across different sessions will not yield
128
125
  identical keys.
129
126
 
130
- Arguments
127
+ Arguments:
131
128
  ---------
132
129
  hash_func : callable, optional
133
130
  A callable that gets the args and kwargs from the decorated function
134
131
  and returns a hash key for them. This parameter can be used to enable
135
132
  the use of cachier with functions that get arguments that are not
136
133
  automatically hashable by Python.
134
+ hash_params : callable, optional
137
135
  backend : str, optional
138
136
  The name of the backend to use. Valid options currently include
139
137
  'pickle', 'mongo' and 'memory'. If not provided, defaults to
@@ -175,6 +173,8 @@ def cachier(
175
173
  None will not be cached and are recalculated every call.
176
174
 
177
175
  """
176
+ from .config import _global_params
177
+
178
178
  # Check for deprecated parameters
179
179
  if hash_params is not None:
180
180
  message = (
@@ -240,29 +240,34 @@ def cachier(
240
240
  func, _is_method=core.func_is_method, args=args, kwds=kwds
241
241
  )
242
242
 
243
- _print = lambda x: None # noqa: E731
244
- if verbose:
245
- _print = print
246
- if ignore_cache or not _default_params["caching_enabled"]:
247
- return func(**kwargs)
248
- key, entry = core.get_entry(tuple(), kwargs)
243
+ _print = print if verbose else lambda x: None
244
+
245
+ if ignore_cache or not _global_params.caching_enabled:
246
+ return (
247
+ func(args[0], **kwargs)
248
+ if core.func_is_method
249
+ else func(**kwargs)
250
+ )
251
+ key, entry = core.get_entry((), kwargs)
249
252
  if overwrite_cache:
250
253
  return _calc_entry(core, key, func, args, kwds)
251
- if entry is None:
254
+ if entry is None or (
255
+ not entry._completed and not entry._processing
256
+ ):
252
257
  _print("No entry found. No current calc. Calling like a boss.")
253
258
  return _calc_entry(core, key, func, args, kwds)
254
259
  _print("Entry found.")
255
- if _allow_none or entry.get("value", None) is not None:
260
+ if _allow_none or entry.value is not None:
256
261
  _print("Cached result found.")
257
262
  now = datetime.datetime.now()
258
- if now - entry["time"] <= _stale_after:
263
+ if now - entry.time <= _stale_after:
259
264
  _print("And it is fresh!")
260
- return entry["value"]
265
+ return entry.value
261
266
  _print("But it is stale... :(")
262
- if entry["being_calculated"]:
267
+ if entry._processing:
263
268
  if _next_time:
264
269
  _print("Returning stale.")
265
- return entry["value"] # return stale val
270
+ return entry.value # return stale val
266
271
  _print("Already calc. Waiting on change.")
267
272
  try:
268
273
  return core.wait_on_entry_calc(key)
@@ -270,17 +275,17 @@ def cachier(
270
275
  return _calc_entry(core, key, func, args, kwds)
271
276
  if _next_time:
272
277
  _print("Async calc and return stale")
278
+ core.mark_entry_being_calculated(key)
273
279
  try:
274
- core.mark_entry_being_calculated(key)
275
280
  _get_executor().submit(
276
281
  _function_thread, core, key, func, args, kwds
277
282
  )
278
283
  finally:
279
284
  core.mark_entry_not_calculated(key)
280
- return entry["value"]
285
+ return entry.value
281
286
  _print("Calling decorated function and waiting")
282
287
  return _calc_entry(core, key, func, args, kwds)
283
- if entry["being_calculated"]:
288
+ if entry._processing:
284
289
  _print("No value but being calculated. Waiting.")
285
290
  try:
286
291
  return core.wait_on_entry_calc(key)
@@ -294,17 +299,17 @@ def cachier(
294
299
  core.clear_cache()
295
300
 
296
301
  def _clear_being_calculated():
297
- """Marks all entries in this cache as not being calculated."""
302
+ """Mark all entries in this cache as not being calculated."""
298
303
  core.clear_being_calculated()
299
304
 
300
305
  def _cache_dpath():
301
- """Returns the path to the cache dir, if exists; None if not."""
306
+ """Return the path to the cache dir, if exists; None if not."""
302
307
  return getattr(core, "cache_dir", None)
303
308
 
304
- def _precache_value(*args, value_to_cache, **kwds):
309
+ def _precache_value(*args, value_to_cache, **kwds): # noqa: D417
305
310
  """Add an initial value to the cache.
306
311
 
307
- Arguments
312
+ Arguments:
308
313
  ---------
309
314
  value_to_cache : any
310
315
  entry to be written into the cache
@@ -314,7 +319,7 @@ def cachier(
314
319
  kwargs = _convert_args_kwargs(
315
320
  func, _is_method=core.func_is_method, args=args, kwds=kwds
316
321
  )
317
- return core.precache_value(tuple(), kwargs, value_to_cache)
322
+ return core.precache_value((), kwargs, value_to_cache)
318
323
 
319
324
  func_wrapper.clear_cache = _clear_cache
320
325
  func_wrapper.clear_being_calculated = _clear_being_calculated
cachier/cores/base.py CHANGED
@@ -9,13 +9,15 @@
9
9
  import abc # for the _BaseCore abstract base class
10
10
  import inspect
11
11
  import threading
12
- from typing import Callable
12
+ from typing import Callable, Optional, Tuple
13
13
 
14
14
  from .._types import HashFunc
15
- from ..config import _update_with_defaults
15
+ from ..config import CacheEntry, _update_with_defaults
16
16
 
17
17
 
18
18
  class RecalculationNeeded(Exception):
19
+ """Exception raised when a recalculation is needed."""
20
+
19
21
  pass
20
22
 
21
23
 
@@ -32,7 +34,7 @@ class _BaseCore:
32
34
  self.lock = threading.RLock()
33
35
 
34
36
  def set_func(self, func):
35
- """Sets the function this core will use.
37
+ """Set the function this core will use.
36
38
 
37
39
  This has to be set before any method is called. Also determine if the
38
40
  function is an object method.
@@ -46,17 +48,21 @@ class _BaseCore:
46
48
  self.func = func
47
49
 
48
50
  def get_key(self, args, kwds):
49
- """Returns a unique key based on the arguments provided."""
51
+ """Return a unique key based on the arguments provided."""
50
52
  return self.hash_func(args, kwds)
51
53
 
52
- def get_entry(self, args, kwds):
53
- """Returns the result mapped to the given arguments in this core's
54
- cache, if such a mapping exists."""
54
+ def get_entry(self, args, kwds) -> Tuple[str, Optional[CacheEntry]]:
55
+ """Get entry based on given arguments.
56
+
57
+ Return the result mapped to the given arguments in this core's cache,
58
+ if such a mapping exists.
59
+
60
+ """
55
61
  key = self.get_key(args, kwds)
56
62
  return self.get_entry_by_key(key)
57
63
 
58
64
  def precache_value(self, args, kwds, value_to_cache):
59
- """Writes a precomputed value into the cache."""
65
+ """Write a precomputed value into the cache."""
60
66
  key = self.get_key(args, kwds)
61
67
  self.set_entry(key, value_to_cache)
62
68
  return value_to_cache
@@ -70,31 +76,34 @@ class _BaseCore:
70
76
  raise RecalculationNeeded()
71
77
 
72
78
  @abc.abstractmethod
73
- def get_entry_by_key(self, key):
74
- """Returns the result mapped to the given key in this core's cache, if
75
- such a mapping exists."""
79
+ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
80
+ """Get entry based on given key.
81
+
82
+ Return the result mapped to the given key in this core's cache, if such
83
+ a mapping exists.
84
+
85
+ """
76
86
 
77
87
  @abc.abstractmethod
78
- def set_entry(self, key, func_res):
79
- """Maps the given result to the given key in this core's cache."""
88
+ def set_entry(self, key: str, func_res):
89
+ """Map the given result to the given key in this core's cache."""
80
90
 
81
91
  @abc.abstractmethod
82
- def mark_entry_being_calculated(self, key):
83
- """Marks the entry mapped by the given key as being calculated."""
92
+ def mark_entry_being_calculated(self, key: str) -> None:
93
+ """Mark the entry mapped by the given key as being calculated."""
84
94
 
85
95
  @abc.abstractmethod
86
- def mark_entry_not_calculated(self, key):
87
- """Marks the entry mapped by the given key as not being calculated."""
96
+ def mark_entry_not_calculated(self, key: str) -> None:
97
+ """Mark the entry mapped by the given key as not being calculated."""
88
98
 
89
99
  @abc.abstractmethod
90
- def wait_on_entry_calc(self, key):
91
- """Waits on the entry mapped by key being calculated and returns
92
- result."""
100
+ def wait_on_entry_calc(self, key: str) -> None:
101
+ """Wait on the entry with keys being calculated and returns result."""
93
102
 
94
103
  @abc.abstractmethod
95
- def clear_cache(self):
96
- """Clears the cache of this core."""
104
+ def clear_cache(self) -> None:
105
+ """Clear the cache of this core."""
97
106
 
98
107
  @abc.abstractmethod
99
- def clear_being_calculated(self):
100
- """Marks all entries in this cache as not being calculated."""
108
+ def clear_being_calculated(self) -> None:
109
+ """Mark all entries in this cache as not being calculated."""
cachier/cores/memory.py CHANGED
@@ -2,8 +2,10 @@
2
2
 
3
3
  import threading
4
4
  from datetime import datetime
5
+ from typing import Any, Optional, Tuple
5
6
 
6
7
  from .._types import HashFunc
8
+ from ..config import CacheEntry
7
9
  from .base import _BaseCore, _get_func_str
8
10
 
9
11
 
@@ -14,76 +16,82 @@ class _MemoryCore(_BaseCore):
14
16
  super().__init__(hash_func, wait_for_calc_timeout)
15
17
  self.cache = {}
16
18
 
17
- def _hash_func_key(self, key):
19
+ def _hash_func_key(self, key: str) -> str:
18
20
  return f"{_get_func_str(self.func)}:{key}"
19
21
 
20
- def get_entry_by_key(self, key, reload=False):
22
+ def get_entry_by_key(
23
+ self, key: str, reload=False
24
+ ) -> Tuple[str, Optional[CacheEntry]]:
21
25
  with self.lock:
22
26
  return key, self.cache.get(self._hash_func_key(key), None)
23
27
 
24
- def set_entry(self, key, func_res):
28
+ def set_entry(self, key: str, func_res: Any) -> None:
29
+ hash_key = self._hash_func_key(key)
25
30
  with self.lock:
26
31
  try:
27
32
  # we need to retain the existing condition so that
28
33
  # mark_entry_not_calculated can notify all possibly-waiting
29
34
  # threads about it
30
- cond = self.cache[self._hash_func_key(key)]["condition"]
35
+ cond = self.cache[hash_key]._condition
31
36
  except KeyError: # pragma: no cover
32
37
  cond = None
33
- self.cache[self._hash_func_key(key)] = {
34
- "value": func_res,
35
- "time": datetime.now(),
36
- "stale": False,
37
- "being_calculated": False,
38
- "condition": cond,
39
- }
38
+ self.cache[hash_key] = CacheEntry(
39
+ value=func_res,
40
+ time=datetime.now(),
41
+ stale=False,
42
+ _processing=False,
43
+ _condition=cond,
44
+ _completed=True,
45
+ )
40
46
 
41
- def mark_entry_being_calculated(self, key):
47
+ def mark_entry_being_calculated(self, key: str) -> None:
42
48
  with self.lock:
43
49
  condition = threading.Condition()
50
+ hash_key = self._hash_func_key(key)
51
+ if hash_key in self.cache:
52
+ self.cache[hash_key]._processing = True
53
+ self.cache[hash_key]._condition = condition
44
54
  # condition.acquire()
45
- try:
46
- self.cache[self._hash_func_key(key)]["being_calculated"] = True
47
- self.cache[self._hash_func_key(key)]["condition"] = condition
48
- except KeyError:
49
- self.cache[self._hash_func_key(key)] = {
50
- "value": None,
51
- "time": datetime.now(),
52
- "stale": False,
53
- "being_calculated": True,
54
- "condition": condition,
55
- }
55
+ else:
56
+ self.cache[hash_key] = CacheEntry(
57
+ value=None,
58
+ time=datetime.now(),
59
+ stale=False,
60
+ _processing=True,
61
+ _condition=condition,
62
+ )
56
63
 
57
- def mark_entry_not_calculated(self, key):
64
+ def mark_entry_not_calculated(self, key: str) -> None:
65
+ hash_key = self._hash_func_key(key)
58
66
  with self.lock:
59
- try:
60
- entry = self.cache[self._hash_func_key(key)]
61
- except KeyError: # pragma: no cover
67
+ if hash_key not in self.cache:
62
68
  return # that's ok, we don't need an entry in that case
63
- entry["being_calculated"] = False
64
- cond = entry["condition"]
69
+ entry = self.cache[hash_key]
70
+ entry._processing = False
71
+ cond = entry._condition
65
72
  if cond:
66
73
  cond.acquire()
67
74
  cond.notify_all()
68
75
  cond.release()
69
- entry["condition"] = None
76
+ entry._condition = None
70
77
 
71
- def wait_on_entry_calc(self, key):
78
+ def wait_on_entry_calc(self, key: str) -> Any:
79
+ hash_key = self._hash_func_key(key)
72
80
  with self.lock: # pragma: no cover
73
- entry = self.cache[self._hash_func_key(key)]
74
- if not entry["being_calculated"]:
75
- return entry["value"]
76
- entry["condition"].acquire()
77
- entry["condition"].wait()
78
- entry["condition"].release()
79
- return self.cache[self._hash_func_key(key)]["value"]
81
+ entry = self.cache[hash_key]
82
+ if not entry._processing:
83
+ return entry.value
84
+ entry._condition.acquire()
85
+ entry._condition.wait()
86
+ entry._condition.release()
87
+ return self.cache[hash_key].value
80
88
 
81
- def clear_cache(self):
89
+ def clear_cache(self) -> None:
82
90
  with self.lock:
83
91
  self.cache.clear()
84
92
 
85
- def clear_being_calculated(self):
93
+ def clear_being_calculated(self) -> None:
86
94
  with self.lock:
87
95
  for entry in self.cache.values():
88
- entry["being_calculated"] = False
89
- entry["condition"] = None
96
+ entry._processing = False
97
+ entry._condition = None
cachier/cores/mongo.py CHANGED
@@ -13,8 +13,10 @@ import time # to sleep when waiting on Mongo cache\
13
13
  import warnings # to warn if pymongo is missing
14
14
  from contextlib import suppress
15
15
  from datetime import datetime
16
+ from typing import Any, Optional, Tuple
16
17
 
17
18
  from .._types import HashFunc, Mongetter
19
+ from ..config import CacheEntry
18
20
 
19
21
  with suppress(ImportError):
20
22
  from bson.binary import Binary # to save binary data to mongodb
@@ -41,8 +43,9 @@ class _MongoCore(_BaseCore):
41
43
  ):
42
44
  if "pymongo" not in sys.modules:
43
45
  warnings.warn(
44
- "Cachier warning: pymongo was not found. "
45
- "MongoDB cores will not function."
46
+ "`pymongo` was not found. MongoDB cores will not function.",
47
+ ImportWarning,
48
+ stacklevel=2,
46
49
  ) # pragma: no cover
47
50
 
48
51
  super().__init__(hash_func, wait_for_calc_timeout)
@@ -64,29 +67,23 @@ class _MongoCore(_BaseCore):
64
67
  def _func_str(self) -> str:
65
68
  return _get_func_str(self.func)
66
69
 
67
- def get_entry_by_key(self, key):
70
+ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]:
68
71
  res = self.mongo_collection.find_one(
69
72
  {"func": self._func_str, "key": key}
70
73
  )
71
74
  if not res:
72
75
  return key, None
73
- try:
74
- entry = {
75
- "value": pickle.loads(res["value"]), # noqa: S301
76
- "time": res.get("time", None),
77
- "stale": res.get("stale", False),
78
- "being_calculated": res.get("being_calculated", False),
79
- }
80
- except KeyError:
81
- entry = {
82
- "value": None,
83
- "time": res.get("time", None),
84
- "stale": res.get("stale", False),
85
- "being_calculated": res.get("being_calculated", False),
86
- }
76
+ val = pickle.loads(res["value"]) if "value" in res else None # noqa: S301
77
+ entry = CacheEntry(
78
+ value=val,
79
+ time=res.get("time", None),
80
+ stale=res.get("stale", False),
81
+ _processing=res.get("processing", False),
82
+ _completed=res.get("completed", False),
83
+ )
87
84
  return key, entry
88
85
 
89
- def set_entry(self, key, func_res):
86
+ def set_entry(self, key: str, func_res: Any) -> None:
90
87
  thebytes = pickle.dumps(func_res)
91
88
  self.mongo_collection.update_one(
92
89
  filter={"func": self._func_str, "key": key},
@@ -97,31 +94,32 @@ class _MongoCore(_BaseCore):
97
94
  "value": Binary(thebytes),
98
95
  "time": datetime.now(),
99
96
  "stale": False,
100
- "being_calculated": False,
97
+ "processing": False,
98
+ "completed": True,
101
99
  }
102
100
  },
103
101
  upsert=True,
104
102
  )
105
103
 
106
- def mark_entry_being_calculated(self, key):
104
+ def mark_entry_being_calculated(self, key: str) -> None:
107
105
  self.mongo_collection.update_one(
108
106
  filter={"func": self._func_str, "key": key},
109
- update={"$set": {"being_calculated": True}},
107
+ update={"$set": {"processing": True}},
110
108
  upsert=True,
111
109
  )
112
110
 
113
- def mark_entry_not_calculated(self, key):
111
+ def mark_entry_not_calculated(self, key: str) -> None:
114
112
  with suppress(OperationFailure): # don't care in this case
115
113
  self.mongo_collection.update_one(
116
114
  filter={
117
115
  "func": self._func_str,
118
116
  "key": key,
119
117
  },
120
- update={"$set": {"being_calculated": False}},
118
+ update={"$set": {"processing": False}},
121
119
  upsert=False, # should not insert in this case
122
120
  )
123
121
 
124
- def wait_on_entry_calc(self, key):
122
+ def wait_on_entry_calc(self, key: str) -> Any:
125
123
  time_spent = 0
126
124
  while True:
127
125
  time.sleep(MONGO_SLEEP_DURATION_IN_SEC)
@@ -129,18 +127,18 @@ class _MongoCore(_BaseCore):
129
127
  key, entry = self.get_entry_by_key(key)
130
128
  if entry is None:
131
129
  raise RecalculationNeeded()
132
- if not entry["being_calculated"]:
133
- return entry["value"]
130
+ if not entry._processing:
131
+ return entry.value
134
132
  self.check_calc_timeout(time_spent)
135
133
 
136
- def clear_cache(self):
134
+ def clear_cache(self) -> None:
137
135
  self.mongo_collection.delete_many(filter={"func": self._func_str})
138
136
 
139
- def clear_being_calculated(self):
137
+ def clear_being_calculated(self) -> None:
140
138
  self.mongo_collection.update_many(
141
139
  filter={
142
140
  "func": self._func_str,
143
- "being_calculated": True,
141
+ "processing": True,
144
142
  },
145
- update={"$set": {"being_calculated": False}},
143
+ update={"$set": {"processing": False}},
146
144
  )
cachier/cores/pickle.py CHANGED
@@ -8,15 +8,15 @@
8
8
  # Copyright (c) 2016, Shay Palachy <shaypal5@gmail.com>
9
9
  import os
10
10
  import pickle # for local caching
11
- from contextlib import suppress
12
11
  from datetime import datetime
12
+ from typing import Any, Dict, Optional, Tuple
13
13
 
14
14
  import portalocker # to lock on pickle cache IO
15
15
  from watchdog.events import PatternMatchingEventHandler
16
16
  from watchdog.observers import Observer
17
17
 
18
18
  from .._types import HashFunc
19
- from ..config import _update_with_defaults
19
+ from ..config import CacheEntry, _update_with_defaults
20
20
 
21
21
  # Alternative: https://github.com/WoLpH/portalocker
22
22
  from .base import _BaseCore
@@ -41,32 +41,29 @@ class _PickleCore(_BaseCore):
41
41
  self.observer = None
42
42
  self.value = None
43
43
 
44
- def inject_observer(self, observer):
44
+ def inject_observer(self, observer) -> None:
45
45
  """Inject the observer running this handler."""
46
46
  self.observer = observer
47
47
 
48
- def _check_calculation(self):
49
- # print('checking calc')
48
+ def _check_calculation(self) -> None:
50
49
  entry = self.core.get_entry_by_key(self.key, True)[1]
51
- # print(self.key)
52
- # print(entry)
53
50
  try:
54
- if not entry["being_calculated"]:
51
+ if not entry._processing:
55
52
  # print('stopping observer!')
56
- self.value = entry["value"]
53
+ self.value = entry.value
57
54
  self.observer.stop()
58
55
  # else:
59
- # print('NOT stopping observer... :(')
56
+ # print('NOT stopping observer... :(')
60
57
  except TypeError:
61
58
  self.value = None
62
59
  self.observer.stop()
63
60
 
64
- def on_created(self, event):
65
- """A Watchdog Event Handler method."""
61
+ def on_created(self, event) -> None:
62
+ """A Watchdog Event Handler method.""" # noqa: D401
66
63
  self._check_calculation() # pragma: no cover
67
64
 
68
- def on_modified(self, event):
69
- """A Watchdog Event Handler method."""
65
+ def on_modified(self, event) -> None:
66
+ """A Watchdog Event Handler method.""" # noqa: D401
70
67
  self._check_calculation()
71
68
 
72
69
  def __init__(
@@ -99,52 +96,54 @@ class _PickleCore(_BaseCore):
99
96
  os.path.join(os.path.realpath(self.cache_dir), self.cache_fname)
100
97
  )
101
98
 
102
- def _reload_cache(self):
99
+ def _reload_cache(self) -> None:
103
100
  with self.lock:
104
101
  try:
105
- with portalocker.Lock(
106
- self.cache_fpath, mode="rb"
107
- ) as cache_file:
108
- self.cache = pickle.load(cache_file) # noqa: S301
102
+ with portalocker.Lock(self.cache_fpath, mode="rb") as cf:
103
+ self.cache = pickle.load(cf) # noqa: S301
109
104
  except (FileNotFoundError, EOFError):
110
105
  self.cache = {}
111
106
 
112
- def _get_cache(self):
107
+ def _get_cache(self) -> Dict[str, CacheEntry]:
113
108
  with self.lock:
114
109
  if not self.cache:
115
110
  self._reload_cache()
116
111
  return self.cache
117
112
 
118
- def _get_cache_by_key(self, key=None, hash=None):
113
+ def _get_cache_by_key(
114
+ self, key=None, hash_str=None
115
+ ) -> Optional[Dict[str, CacheEntry]]:
119
116
  fpath = self.cache_fpath
120
- fpath += f"_{key}" if hash is None else f"_{hash}"
117
+ fpath += f"_{hash_str or key}"
121
118
  try:
122
119
  with portalocker.Lock(fpath, mode="rb") as cache_file:
123
120
  return pickle.load(cache_file) # noqa: S301
124
121
  except (FileNotFoundError, EOFError):
125
122
  return None
126
123
 
127
- def _clear_all_cache_files(self):
124
+ def _clear_all_cache_files(self) -> None:
128
125
  path, name = os.path.split(self.cache_fpath)
129
126
  for subpath in os.listdir(path):
130
127
  if subpath.startswith(f"{name}_"):
131
128
  os.remove(os.path.join(path, subpath))
132
129
 
133
- def _clear_being_calculated_all_cache_files(self):
130
+ def _clear_being_calculated_all_cache_files(self) -> None:
134
131
  path, name = os.path.split(self.cache_fpath)
135
132
  for subpath in os.listdir(path):
136
133
  if subpath.startswith(name):
137
- entry = self._get_cache_by_key(hash=subpath.split("_")[-1])
134
+ entry = self._get_cache_by_key(hash_str=subpath.split("_")[-1])
138
135
  if entry is not None:
139
- entry["being_calculated"] = False
140
- self._save_cache(entry, hash=subpath.split("_")[-1])
136
+ entry.being_calculated = False
137
+ self._save_cache(entry, hash_str=subpath.split("_")[-1])
141
138
 
142
- def _save_cache(self, cache, key=None, hash=None):
139
+ def _save_cache(
140
+ self, cache, key: str = None, hash_str: str = None
141
+ ) -> None:
143
142
  fpath = self.cache_fpath
144
143
  if key is not None:
145
144
  fpath += f"_{key}"
146
- elif hash is not None:
147
- fpath += f"_{hash}"
145
+ elif hash_str is not None:
146
+ fpath += f"_{hash_str}"
148
147
  with self.lock:
149
148
  self.cache = cache
150
149
  with portalocker.Lock(fpath, mode="wb") as cache_file:
@@ -152,7 +151,9 @@ class _PickleCore(_BaseCore):
152
151
  if key is None:
153
152
  self._reload_cache()
154
153
 
155
- def get_entry_by_key(self, key, reload=False):
154
+ def get_entry_by_key(
155
+ self, key: str, reload: bool = False
156
+ ) -> Tuple[str, CacheEntry]:
156
157
  with self.lock:
157
158
  if self.separate_files:
158
159
  return key, self._get_cache_by_key(key)
@@ -160,13 +161,14 @@ class _PickleCore(_BaseCore):
160
161
  self._reload_cache()
161
162
  return key, self._get_cache().get(key, None)
162
163
 
163
- def set_entry(self, key, func_res):
164
- key_data = {
165
- "value": func_res,
166
- "time": datetime.now(),
167
- "stale": False,
168
- "being_calculated": False,
169
- }
164
+ def set_entry(self, key: str, func_res: Any) -> None:
165
+ key_data = CacheEntry(
166
+ value=func_res,
167
+ time=datetime.now(),
168
+ stale=False,
169
+ _processing=False,
170
+ _completed=True,
171
+ )
170
172
  if self.separate_files:
171
173
  self._save_cache(key_data, key)
172
174
  return # pragma: no cover
@@ -176,51 +178,51 @@ class _PickleCore(_BaseCore):
176
178
  cache[key] = key_data
177
179
  self._save_cache(cache)
178
180
 
179
- def mark_entry_being_calculated_separate_files(self, key):
181
+ def mark_entry_being_calculated_separate_files(self, key: str) -> None:
180
182
  self._save_cache(
181
- {
182
- "value": None,
183
- "time": datetime.now(),
184
- "stale": False,
185
- "being_calculated": True,
186
- },
183
+ CacheEntry(
184
+ value=None,
185
+ time=datetime.now(),
186
+ stale=False,
187
+ _processing=True,
188
+ ),
187
189
  key=key,
188
190
  )
189
191
 
190
- def mark_entry_not_calculated_separate_files(self, key):
192
+ def mark_entry_not_calculated_separate_files(self, key: str) -> None:
191
193
  _, entry = self.get_entry_by_key(key)
192
- entry["being_calculated"] = False
194
+ entry._processing = False
193
195
  self._save_cache(entry, key=key)
194
196
 
195
- def mark_entry_being_calculated(self, key):
197
+ def mark_entry_being_calculated(self, key: str) -> None:
196
198
  if self.separate_files:
197
199
  self.mark_entry_being_calculated_separate_files(key)
198
200
  return # pragma: no cover
199
201
 
200
202
  with self.lock:
201
203
  cache = self._get_cache()
202
- try:
203
- cache[key]["being_calculated"] = True
204
- except KeyError:
205
- cache[key] = {
206
- "value": None,
207
- "time": datetime.now(),
208
- "stale": False,
209
- "being_calculated": True,
210
- }
204
+ if key in cache:
205
+ cache[key]._processing = True
206
+ else:
207
+ cache[key] = CacheEntry(
208
+ value=None,
209
+ time=datetime.now(),
210
+ stale=False,
211
+ _processing=True,
212
+ )
211
213
  self._save_cache(cache)
212
214
 
213
- def mark_entry_not_calculated(self, key):
215
+ def mark_entry_not_calculated(self, key: str) -> None:
214
216
  if self.separate_files:
215
217
  self.mark_entry_not_calculated_separate_files(key)
216
218
  with self.lock:
217
219
  cache = self._get_cache()
218
220
  # that's ok, we don't need an entry in that case
219
- with suppress(KeyError):
220
- cache[key]["being_calculated"] = False
221
+ if isinstance(cache, dict) and key in cache:
222
+ cache[key]._processing = False
221
223
  self._save_cache(cache)
222
224
 
223
- def wait_on_entry_calc(self, key):
225
+ def wait_on_entry_calc(self, key: str) -> Any:
224
226
  if self.separate_files:
225
227
  entry = self._get_cache_by_key(key)
226
228
  filename = f"{self.cache_fname}_{key}"
@@ -229,8 +231,8 @@ class _PickleCore(_BaseCore):
229
231
  self._reload_cache()
230
232
  entry = self._get_cache()[key]
231
233
  filename = self.cache_fname
232
- if not entry["being_calculated"]:
233
- return entry["value"]
234
+ if not entry._processing:
235
+ return entry.value
234
236
  event_handler = _PickleCore.CacheChangeHandler(
235
237
  filename=filename, core=self, key=key
236
238
  )
@@ -245,13 +247,13 @@ class _PickleCore(_BaseCore):
245
247
  self.check_calc_timeout(time_spent)
246
248
  return event_handler.value
247
249
 
248
- def clear_cache(self):
250
+ def clear_cache(self) -> None:
249
251
  if self.separate_files:
250
252
  self._clear_all_cache_files()
251
253
  else:
252
254
  self._save_cache({})
253
255
 
254
- def clear_being_calculated(self):
256
+ def clear_being_calculated(self) -> None:
255
257
  if self.separate_files:
256
258
  self._clear_being_calculated_all_cache_files()
257
259
  return # pragma: no cover
@@ -259,5 +261,5 @@ class _PickleCore(_BaseCore):
259
261
  with self.lock:
260
262
  cache = self._get_cache()
261
263
  for key in cache:
262
- cache[key]["being_calculated"] = False
264
+ cache[key]._processing = False
263
265
  self._save_cache(cache)
cachier/version.info CHANGED
@@ -1 +1 @@
1
- 3.0.0
1
+ 3.1.0
@@ -1,33 +1,50 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: cachier
3
- Version: 3.0.0
3
+ Version: 3.1.0
4
4
  Summary: Persistent, stale-free, local and cross-machine caching for Python functions.
5
- Home-page: https://github.com/python-cachier/cachier
6
- Author: Shay Palachy & al.
7
- Author-email: shay.palachy@gmail.com
8
- License: MIT
9
- Keywords: cache,persistence,mongo,memoization,decorator
10
- Platform: linux
11
- Platform: osx
12
- Platform: windows
5
+ Author-email: Shay Palachy Affek <shay.palachy@gmail.com>
6
+ License: MIT License
7
+
8
+ Copyright (c) 2016 Shay Palachy
9
+
10
+ Permission is hereby granted, free of charge, to any person obtaining a copy
11
+ of this software and associated documentation files (the "Software"), to deal
12
+ in the Software without restriction, including without limitation the rights
13
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
14
+ copies of the Software, and to permit persons to whom the Software is
15
+ furnished to do so, subject to the following conditions:
16
+
17
+ The above copyright notice and this permission notice shall be included in all
18
+ copies or substantial portions of the Software.
19
+
20
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
21
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
22
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
23
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
24
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
25
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
26
+ SOFTWARE.
27
+
28
+ Project-URL: Source, https://github.com/python-cachier/cachier
29
+ Keywords: cache,caching,cross-machine,decorator,local,memoization,mongo,persistent
13
30
  Classifier: Development Status :: 4 - Beta
31
+ Classifier: Intended Audience :: Developers
14
32
  Classifier: License :: OSI Approved :: MIT License
15
33
  Classifier: Programming Language :: Python
16
- Classifier: Programming Language :: Python :: 3
34
+ Classifier: Programming Language :: Python :: 3 :: Only
17
35
  Classifier: Programming Language :: Python :: 3.8
18
36
  Classifier: Programming Language :: Python :: 3.9
19
37
  Classifier: Programming Language :: Python :: 3.10
20
38
  Classifier: Programming Language :: Python :: 3.11
21
39
  Classifier: Programming Language :: Python :: 3.12
40
+ Classifier: Topic :: Other/Nonlisted Topic
22
41
  Classifier: Topic :: Software Development :: Libraries
23
42
  Classifier: Topic :: Software Development :: Libraries :: Python Modules
24
43
  Classifier: Topic :: Utilities
25
- Classifier: Topic :: Other/Nonlisted Topic
26
- Classifier: Intended Audience :: Developers
44
+ Description-Content-Type: text/x-rst
27
45
  License-File: LICENSE
28
- Requires-Dist: watchdog >=2.3.1
29
- Requires-Dist: portalocker >=2.3.2
30
- Requires-Dist: setuptools >=67.6.0
46
+ Requires-Dist: portalocker>=2.3.2
47
+ Requires-Dist: watchdog>=2.3.1
31
48
 
32
49
  Cachier
33
50
  #######
@@ -492,8 +509,8 @@ Notable bugfixers:
492
509
  .. |PyPI-Versions| image:: https://img.shields.io/pypi/pyversions/cachier.svg
493
510
  :target: https://pypi.python.org/pypi/cachier
494
511
 
495
- .. |Build-Status| image:: https://github.com/python-cachier/cachier/actions/workflows/test.yml/badge.svg
496
- :target: https://github.com/python-cachier/cachier/actions/workflows/test.yml
512
+ .. |Build-Status| image:: https://github.com/python-cachier/cachier/actions/workflows/ci-test.yml/badge.svg
513
+ :target: https://github.com/python-cachier/cachier/actions/workflows/ci-test.yml
497
514
 
498
515
  .. |LICENCE| image:: https://img.shields.io/pypi/l/cachier.svg
499
516
  :target: https://pypi.python.org/pypi/cachier
@@ -0,0 +1,19 @@
1
+ cachier/__init__.py,sha256=ePVg3SxZQqCXDBSaNVziv5rK8ZjxWsUEAecjvP87-BY,300
2
+ cachier/__main__.py,sha256=upg-TlHs1vngKYvkjoPpl3Pvl6xOx4ut-M1mElMiAo0,443
3
+ cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
4
+ cachier/_version.py,sha256=yE6UYwvdoIRpw3HmNifiGwV3fqVea5PwZj_EvyosiZ8,1079
5
+ cachier/config.py,sha256=KOGaXkBRgv66BexENrTMtrC_TYCeV1fA5v8l6Vj2CYI,3840
6
+ cachier/core.py,sha256=qQa_GT8WQYD-VFcTS8a2v-Hys4_A1no-aM-d3lw1AFY,13149
7
+ cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
+ cachier/version.info,sha256=svRNO24p-LG3PqRzXwBq_8TRmOH9nH1Q5zYVmx72NsY,6
9
+ cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ cachier/cores/base.py,sha256=jo69c2RMOXbTzDvqRV0UGa5UvyToipv3f62bICIII1k,3631
11
+ cachier/cores/memory.py,sha256=SSa7qlSU_54YjNYEWrq9rxXozkMYXr5hadAZ3sz62l4,3336
12
+ cachier/cores/mongo.py,sha256=eRG6XP55G4IcWnoMl5xtDufM1szf8FVbOIBbDH_r-Po,4887
13
+ cachier/cores/pickle.py,sha256=20c5pg2CS6wAX1PdefCOjl-orec5w7tqEHVqNbZZv0s,9074
14
+ cachier-3.1.0.dist-info/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
15
+ cachier-3.1.0.dist-info/METADATA,sha256=pDqPoWSFSkwdCHUQ-kvEzf517DippyuSaf9kDWkUtas,20102
16
+ cachier-3.1.0.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
17
+ cachier-3.1.0.dist-info/entry_points.txt,sha256=x4Y7t6Y0Qev_3fgG-Jv7TrsvVdJty3FnGAdkT8-_5mY,49
18
+ cachier-3.1.0.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
19
+ cachier-3.1.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: bdist_wheel (0.42.0)
2
+ Generator: setuptools (75.2.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ cachier = cachier.__main__:cli
@@ -1,19 +0,0 @@
1
- cachier/__init__.py,sha256=6i43oFM_ZTp47b6R8Ws2vvf2JXLvbn-TKzWESwBDb-M,304
2
- cachier/__main__.py,sha256=g8dgovKdDgOMgNIpnmzVRuI5Dj5xODTav45TpZlH_iA,429
3
- cachier/_types.py,sha256=EGJMiw-oCIC_cDLyzw7YC40lfo8jnD3zMmoJpA9Y8Iw,238
4
- cachier/_version.py,sha256=yE6UYwvdoIRpw3HmNifiGwV3fqVea5PwZj_EvyosiZ8,1079
5
- cachier/config.py,sha256=xEWpt_BeaqtrdDdZPnvZN_O3Wp4h6X9_6FfBpMeQZPw,2703
6
- cachier/core.py,sha256=IdLT2sfguDSvtCyX1EJfgCQnbEDECil1U2JhO5wBP5U,13080
7
- cachier/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
- cachier/version.info,sha256=KYW-iyjTrehY6Nj7S8IvVlsb9gIN_5gtzhQfdyG5mZw,6
9
- cachier/cores/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
- cachier/cores/base.py,sha256=cxtcDfRttGU6YBtq7aO9mAqzEza2y7KhHDZhRbv-aOU,3324
11
- cachier/cores/memory.py,sha256=JFXnXBAeP0nOXOf_vNgpcEuRNjxre8WQjiUBa7yEYmY,3128
12
- cachier/cores/mongo.py,sha256=jOcqxIU8wqF2jS2lOfp-VFk5iyVMKHm6AzhFGkxGm0k,4889
13
- cachier/cores/pickle.py,sha256=dZXUBv7xZKg39IaUERnjPEAcjpDS8sBuObrnxMTqMmk,8871
14
- cachier-3.0.0.dist-info/LICENSE,sha256=-2WrMJkIa0gVP6YQHXXDT7ws-S3M2NEVEF4XF3K8qrY,1069
15
- cachier-3.0.0.dist-info/METADATA,sha256=-hfKZSKXrKC7m102SRmJjpcelAZDtD6DoS0gSgm_wv8,18871
16
- cachier-3.0.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
17
- cachier-3.0.0.dist-info/entry_points.txt,sha256=16JU8wc6a62BLuOXVNjAiXJRp0AB5LtEwiKGYMjZjv0,49
18
- cachier-3.0.0.dist-info/top_level.txt,sha256=_rW_HiJumDCch67YT-WAgzcyvKg5RiYDMZq9d-0ZpaE,8
19
- cachier-3.0.0.dist-info/RECORD,,
@@ -1,2 +0,0 @@
1
- [console_scripts]
2
- cachier = cachier.__naim__:cli