checkpointer 2.5.0__py3-none-any.whl → 2.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checkpointer/checkpoint.py +6 -10
- checkpointer/fn_ident.py +1 -1
- checkpointer/object_hash.py +46 -40
- checkpointer/test_checkpointer.py +0 -11
- {checkpointer-2.5.0.dist-info → checkpointer-2.6.0.dist-info}/METADATA +24 -23
- {checkpointer-2.5.0.dist-info → checkpointer-2.6.0.dist-info}/RECORD +8 -8
- {checkpointer-2.5.0.dist-info → checkpointer-2.6.0.dist-info}/WHEEL +0 -0
- {checkpointer-2.5.0.dist-info → checkpointer-2.6.0.dist-info}/licenses/LICENSE +0 -0
checkpointer/checkpoint.py
CHANGED
@@ -23,7 +23,7 @@ class CheckpointerOpts(TypedDict, total=False):
|
|
23
23
|
root_path: Path | str | None
|
24
24
|
when: bool
|
25
25
|
verbosity: Literal[0, 1]
|
26
|
-
|
26
|
+
hash_by: Callable | None
|
27
27
|
should_expire: Callable[[datetime], bool] | None
|
28
28
|
capture: bool
|
29
29
|
|
@@ -33,7 +33,7 @@ class Checkpointer:
|
|
33
33
|
self.root_path = Path(opts.get("root_path", DEFAULT_DIR) or ".")
|
34
34
|
self.when = opts.get("when", True)
|
35
35
|
self.verbosity = opts.get("verbosity", 1)
|
36
|
-
self.
|
36
|
+
self.hash_by = opts.get("hash_by")
|
37
37
|
self.should_expire = opts.get("should_expire")
|
38
38
|
self.capture = opts.get("capture", False)
|
39
39
|
|
@@ -66,7 +66,7 @@ class CheckpointFn(Generic[Fn]):
|
|
66
66
|
store_format = self.checkpointer.format
|
67
67
|
Storage = STORAGE_MAP[store_format] if isinstance(store_format, str) else store_format
|
68
68
|
deep_hashes = [child._set_ident().fn_hash_raw for child in iterate_checkpoint_fns(self)]
|
69
|
-
self.fn_hash = str(ObjectHash().
|
69
|
+
self.fn_hash = str(ObjectHash().write_text(self.fn_hash_raw, iter=deep_hashes))
|
70
70
|
self.fn_subdir = f"{fn_file}/{fn_name}/{self.fn_hash[:16]}"
|
71
71
|
self.is_async = inspect.iscoroutinefunction(wrapped)
|
72
72
|
self.storage = Storage(self)
|
@@ -88,13 +88,9 @@ class CheckpointFn(Generic[Fn]):
|
|
88
88
|
pointfn._lazyinit()
|
89
89
|
|
90
90
|
def get_checkpoint_id(self, args: tuple, kw: dict) -> str:
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
checkpoint_id = self.checkpointer.path(*args, **kw)
|
95
|
-
if not isinstance(checkpoint_id, str):
|
96
|
-
raise CheckpointError(f"path function must return a string, got {type(checkpoint_id)}")
|
97
|
-
return checkpoint_id
|
91
|
+
hash_params = [self.checkpointer.hash_by(*args, **kw)] if self.checkpointer.hash_by else (args, kw)
|
92
|
+
call_hash = ObjectHash(self.fn_hash, *hash_params, digest_size=16)
|
93
|
+
return f"{self.fn_subdir}/{call_hash}"
|
98
94
|
|
99
95
|
async def _store_on_demand(self, args: tuple, kw: dict, rerun: bool):
|
100
96
|
checkpoint_id = self.get_checkpoint_id(args, kw)
|
checkpointer/fn_ident.py
CHANGED
@@ -11,7 +11,7 @@ from .utils import AttrDict, distinct, get_cell_contents, iterate_and_upcoming,
|
|
11
11
|
cwd = Path.cwd()
|
12
12
|
|
13
13
|
def is_class(obj) -> TypeGuard[Type]:
|
14
|
-
# isinstance works too, but needlessly triggers
|
14
|
+
# isinstance works too, but needlessly triggers _lazyinit()
|
15
15
|
return issubclass(type(obj), type)
|
16
16
|
|
17
17
|
def extract_classvars(code: CodeType, scope_vars: AttrDict) -> dict[str, dict[str, Type]]:
|
checkpointer/object_hash.py
CHANGED
@@ -3,7 +3,7 @@ import hashlib
|
|
3
3
|
import io
|
4
4
|
import re
|
5
5
|
from collections.abc import Iterable
|
6
|
-
from contextlib import nullcontext
|
6
|
+
from contextlib import nullcontext, suppress
|
7
7
|
from decimal import Decimal
|
8
8
|
from itertools import chain
|
9
9
|
from pickle import HIGHEST_PROTOCOL as PROTOCOL
|
@@ -11,19 +11,18 @@ from types import BuiltinFunctionType, FunctionType, GeneratorType, MethodType,
|
|
11
11
|
from typing import Any, TypeAliasType, TypeVar
|
12
12
|
from .utils import ContextVar, get_fn_body
|
13
13
|
|
14
|
-
|
14
|
+
np, torch = None, None
|
15
|
+
|
16
|
+
with suppress(Exception):
|
15
17
|
import numpy as np
|
16
|
-
|
17
|
-
|
18
|
-
try:
|
18
|
+
|
19
|
+
with suppress(Exception):
|
19
20
|
import torch
|
20
|
-
except:
|
21
|
-
torch = None
|
22
21
|
|
23
22
|
def encode_type(t: type | FunctionType) -> str:
|
24
23
|
return f"{t.__module__}:{t.__qualname__}"
|
25
24
|
|
26
|
-
def
|
25
|
+
def encode_type_of(v: Any) -> str:
|
27
26
|
return encode_type(type(v))
|
28
27
|
|
29
28
|
class ObjectHashError(Exception):
|
@@ -32,11 +31,11 @@ class ObjectHashError(Exception):
|
|
32
31
|
self.obj = obj
|
33
32
|
|
34
33
|
class ObjectHash:
|
35
|
-
def __init__(self, *
|
34
|
+
def __init__(self, *objs: Any, iter: Iterable[Any] = (), digest_size=64, tolerate_errors=False) -> None:
|
36
35
|
self.hash = hashlib.blake2b(digest_size=digest_size)
|
37
36
|
self.current: dict[int, int] = {}
|
38
37
|
self.tolerate_errors = ContextVar(tolerate_errors)
|
39
|
-
self.update(iter=chain(
|
38
|
+
self.update(iter=chain(objs, iter))
|
40
39
|
|
41
40
|
def copy(self) -> "ObjectHash":
|
42
41
|
new = ObjectHash(tolerate_errors=self.tolerate_errors.value)
|
@@ -48,15 +47,21 @@ class ObjectHash:
|
|
48
47
|
|
49
48
|
__str__ = hexdigest
|
50
49
|
|
51
|
-
def
|
50
|
+
def nested_hash(self, *objs: Any) -> str:
|
51
|
+
return ObjectHash(iter=objs, tolerate_errors=self.tolerate_errors.value).hexdigest()
|
52
|
+
|
53
|
+
def write_bytes(self, *data: bytes, iter: Iterable[bytes] = ()) -> "ObjectHash":
|
52
54
|
for d in chain(data, iter):
|
53
|
-
self.hash.update(d
|
55
|
+
self.hash.update(d)
|
54
56
|
return self
|
55
57
|
|
58
|
+
def write_text(self, *data: str, iter: Iterable[str] = ()) -> "ObjectHash":
|
59
|
+
return self.write_bytes(iter=(d.encode() for d in chain(data, iter)))
|
60
|
+
|
56
61
|
def header(self, *args: Any) -> "ObjectHash":
|
57
|
-
return self.
|
62
|
+
return self.write_bytes(":".join(map(str, args)).encode())
|
58
63
|
|
59
|
-
def update(self, *objs: Any, iter: Iterable[Any] =
|
64
|
+
def update(self, *objs: Any, iter: Iterable[Any] = (), tolerate_errors: bool | None=None) -> "ObjectHash":
|
60
65
|
with nullcontext() if tolerate_errors is None else self.tolerate_errors.set(tolerate_errors):
|
61
66
|
for obj in chain(objs, iter):
|
62
67
|
try:
|
@@ -74,19 +79,20 @@ class ObjectHash:
|
|
74
79
|
self.header("null")
|
75
80
|
|
76
81
|
case bool() | int() | float() | complex() | Decimal() | ObjectHash():
|
77
|
-
self.header("number",
|
82
|
+
self.header("number", encode_type_of(obj), obj)
|
78
83
|
|
79
84
|
case str() | bytes() | bytearray() | memoryview():
|
80
|
-
|
85
|
+
b = obj.encode() if isinstance(obj, str) else obj
|
86
|
+
self.header("bytes", encode_type_of(obj), len(b)).write_bytes(b)
|
81
87
|
|
82
88
|
case set() | frozenset():
|
83
|
-
self.header("set", encode_val(obj), len(obj))
|
84
89
|
try:
|
85
90
|
items = sorted(obj)
|
91
|
+
header = "set"
|
86
92
|
except:
|
87
|
-
self.
|
88
|
-
|
89
|
-
self.update(iter=items)
|
93
|
+
items = sorted(map(self.nested_hash, obj))
|
94
|
+
header = "set-unsortable"
|
95
|
+
self.header(header, encode_type_of(obj), len(obj)).update(iter=items)
|
90
96
|
|
91
97
|
case TypeVar():
|
92
98
|
self.header("TypeVar").update(obj.__name__, obj.__bound__, obj.__constraints__, obj.__contravariant__, obj.__covariant__)
|
@@ -113,7 +119,7 @@ class ObjectHash:
|
|
113
119
|
self.header("generator", obj.__qualname__)._update_iterator(obj)
|
114
120
|
|
115
121
|
case io.TextIOWrapper() | io.FileIO() | io.BufferedRandom() | io.BufferedWriter() | io.BufferedReader():
|
116
|
-
self.header("file",
|
122
|
+
self.header("file", encode_type_of(obj)).update(obj.name, obj.mode, obj.tell())
|
117
123
|
|
118
124
|
case type():
|
119
125
|
self.header("type", encode_type(obj))
|
@@ -122,20 +128,20 @@ class ObjectHash:
|
|
122
128
|
self.header("dtype").update(obj.__class__, obj.descr)
|
123
129
|
|
124
130
|
case _ if np and isinstance(obj, np.ndarray):
|
125
|
-
self.header("ndarray",
|
131
|
+
self.header("ndarray", encode_type_of(obj), obj.shape, obj.strides).update(obj.dtype)
|
126
132
|
if obj.dtype.hasobject:
|
127
133
|
self.update(obj.__reduce_ex__(PROTOCOL))
|
128
134
|
else:
|
129
135
|
array = np.ascontiguousarray(obj if obj.base is None else obj.base).view(np.uint8)
|
130
|
-
self.
|
136
|
+
self.write_bytes(array.data)
|
131
137
|
|
132
138
|
case _ if torch and isinstance(obj, torch.Tensor):
|
133
|
-
self.header("tensor",
|
139
|
+
self.header("tensor", encode_type_of(obj), obj.dtype, tuple(obj.shape), obj.stride(), obj.device)
|
134
140
|
if obj.device.type != "cpu":
|
135
141
|
obj = obj.cpu()
|
136
142
|
storage = obj.storage()
|
137
|
-
buffer = (ctypes.c_ubyte *
|
138
|
-
self.
|
143
|
+
buffer = (ctypes.c_ubyte * storage.nbytes()).from_address(storage.data_ptr())
|
144
|
+
self.write_bytes(memoryview(buffer))
|
139
145
|
|
140
146
|
case _ if id(obj) in self.current:
|
141
147
|
self.header("circular", self.current[id(obj)])
|
@@ -145,36 +151,36 @@ class ObjectHash:
|
|
145
151
|
self.current[id(obj)] = len(self.current)
|
146
152
|
match obj:
|
147
153
|
case list() | tuple():
|
148
|
-
self.header("list",
|
154
|
+
self.header("list", encode_type_of(obj), len(obj)).update(iter=obj)
|
149
155
|
case dict():
|
150
156
|
try:
|
151
157
|
items = sorted(obj.items())
|
158
|
+
header = "dict"
|
152
159
|
except:
|
153
|
-
items = sorted((
|
154
|
-
|
160
|
+
items = sorted((self.nested_hash(key), val) for key, val in obj.items())
|
161
|
+
header = "dict-unsortable"
|
162
|
+
self.header(header, encode_type_of(obj), len(obj)).update(iter=chain.from_iterable(items))
|
155
163
|
case _:
|
156
164
|
self._update_object(obj)
|
157
165
|
finally:
|
158
166
|
del self.current[id(obj)]
|
159
167
|
|
160
|
-
def _update_iterator(self, obj: Iterable) ->
|
161
|
-
self.header("iterator",
|
168
|
+
def _update_iterator(self, obj: Iterable) -> "ObjectHash":
|
169
|
+
return self.header("iterator", encode_type_of(obj)).update(iter=obj).header("iterator-end")
|
162
170
|
|
163
171
|
def _update_object(self, obj: object) -> "ObjectHash":
|
164
|
-
self.header("instance",
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
172
|
+
self.header("instance", encode_type_of(obj))
|
173
|
+
reduced = None
|
174
|
+
with suppress(Exception):
|
175
|
+
reduced = obj.__reduce_ex__(PROTOCOL)
|
176
|
+
with suppress(Exception):
|
177
|
+
reduced = reduced or obj.__reduce__()
|
169
178
|
if isinstance(reduced, str):
|
170
179
|
return self.header("reduce-str").update(reduced)
|
171
180
|
if reduced:
|
172
181
|
reduced = list(reduced)
|
173
182
|
it = reduced.pop(3) if len(reduced) >= 4 else None
|
174
|
-
self.header("reduce").update(reduced)
|
175
|
-
if it is not None:
|
176
|
-
self._update_iterator(it)
|
177
|
-
return self
|
183
|
+
return self.header("reduce").update(reduced)._update_iterator(it or ())
|
178
184
|
if state := hasattr(obj, "__getstate__") and obj.__getstate__():
|
179
185
|
return self.header("getstate").update(state)
|
180
186
|
if len(getattr(obj, "__slots__", [])):
|
@@ -83,17 +83,6 @@ async def test_async_caching():
|
|
83
83
|
|
84
84
|
assert result1 == result2 == 9
|
85
85
|
|
86
|
-
def test_custom_path_caching():
|
87
|
-
def custom_path(a, b):
|
88
|
-
return f"add/{a}-{b}"
|
89
|
-
|
90
|
-
@checkpoint(path=custom_path)
|
91
|
-
def add(a, b):
|
92
|
-
return a + b
|
93
|
-
|
94
|
-
add(3, 4)
|
95
|
-
assert (checkpoint.root_path / "add/3-4.pkl").exists()
|
96
|
-
|
97
86
|
def test_force_recalculation():
|
98
87
|
@checkpoint
|
99
88
|
def square(x: int) -> int:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: checkpointer
|
3
|
-
Version: 2.
|
3
|
+
Version: 2.6.0
|
4
4
|
Summary: A Python library for memoizing function results with support for multiple storage backends, async runtimes, and automatic cache invalidation
|
5
5
|
Project-URL: Repository, https://github.com/Reddan/checkpointer.git
|
6
6
|
Author: Hampus Hallman
|
@@ -12,6 +12,8 @@ License: Copyright 2018-2025 Hampus Hallman
|
|
12
12
|
|
13
13
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
14
14
|
License-File: LICENSE
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
16
|
+
Classifier: Programming Language :: Python :: 3.13
|
15
17
|
Requires-Python: >=3.12
|
16
18
|
Description-Content-Type: text/markdown
|
17
19
|
|
@@ -28,6 +30,7 @@ Adding or removing `@checkpoint` doesn't change how your code works. You can app
|
|
28
30
|
- ⏲️ **Custom Expiration Logic**: Automatically invalidate old checkpoints.
|
29
31
|
- 📂 **Flexible Path Configuration**: Control where checkpoints are stored.
|
30
32
|
- 📦 **Captured Variables Handling**: Optionally include captured variables in cache invalidation.
|
33
|
+
- ⚡ **Custom Argument Hashing**: Override argument hashing for speed or specialized hashing logic.
|
31
34
|
|
32
35
|
---
|
33
36
|
|
@@ -61,9 +64,9 @@ When you use `@checkpoint`, the function's **arguments** (`args`, `kwargs`) are
|
|
61
64
|
|
62
65
|
Additionally, `checkpointer` ensures that caches are invalidated when a function's implementation or any of its dependencies change. Each function is assigned a hash based on:
|
63
66
|
|
64
|
-
1. **
|
65
|
-
2. **
|
66
|
-
3. **
|
67
|
+
1. **Function Code**: The hash updates when the function’s own source code changes.
|
68
|
+
2. **Dependencies**: If the function calls other user-defined functions, changes in those dependencies also update the hash.
|
69
|
+
3. **External Variables** *(with `capture=True`)*: Any global or closure-based variables used by the function are included in its hash, so changes to those variables also trigger cache invalidation.
|
67
70
|
|
68
71
|
### Example: Cache Invalidation
|
69
72
|
|
@@ -155,21 +158,19 @@ stored_result = expensive_function.get(4)
|
|
155
158
|
|
156
159
|
### Refresh Function Hash
|
157
160
|
|
158
|
-
|
159
|
-
|
160
|
-
Use the `reinit` method to manually refresh the function's hash within the same session:
|
161
|
+
If `capture=True`, you might need to re-hash a function during the same Python session. For that, call `reinit`:
|
161
162
|
|
162
163
|
```python
|
163
164
|
expensive_function.reinit()
|
164
165
|
```
|
165
166
|
|
166
|
-
This
|
167
|
+
This tells `checkpointer` to recalculate the function hash, reflecting changes in captured variables.
|
167
168
|
|
168
169
|
---
|
169
170
|
|
170
171
|
## Storage Backends
|
171
172
|
|
172
|
-
`checkpointer` works with
|
173
|
+
`checkpointer` works with built-in and custom storage backends, so you can use what's provided or roll your own as needed.
|
173
174
|
|
174
175
|
### Built-In Backends
|
175
176
|
|
@@ -201,10 +202,10 @@ from checkpointer import checkpoint, Storage
|
|
201
202
|
from datetime import datetime
|
202
203
|
|
203
204
|
class CustomStorage(Storage):
|
204
|
-
def
|
205
|
-
def
|
206
|
-
def
|
207
|
-
def load(self, path): ... #
|
205
|
+
def exists(self, path) -> bool: ... # Check if a checkpoint exists
|
206
|
+
def checkpoint_date(self, path) -> datetime: ... # Get the checkpoint's timestamp
|
207
|
+
def store(self, path, data): ... # Save data to the checkpoint
|
208
|
+
def load(self, path): ... # Load data from the checkpoint
|
208
209
|
def delete(self, path): ... # Delete the checkpoint
|
209
210
|
|
210
211
|
@checkpoint(format=CustomStorage)
|
@@ -212,21 +213,21 @@ def custom_cached(x: int):
|
|
212
213
|
return x ** 2
|
213
214
|
```
|
214
215
|
|
215
|
-
|
216
|
+
Use a custom backend to integrate with databases, cloud storage, or specialized file formats.
|
216
217
|
|
217
218
|
---
|
218
219
|
|
219
220
|
## Configuration Options ⚙️
|
220
221
|
|
221
|
-
| Option | Type
|
222
|
-
|
223
|
-
| `capture` | `bool`
|
224
|
-
| `format` | `"pickle"`, `"memory"`, `Storage`
|
225
|
-
| `root_path` | `Path`, `str`, or `None`
|
226
|
-
| `when` | `bool`
|
227
|
-
| `verbosity` | `0` or `1`
|
228
|
-
| `
|
229
|
-
| `
|
222
|
+
| Option | Type | Default | Description |
|
223
|
+
|-----------------|-------------------------------------|----------------------|-----------------------------------------------------------|
|
224
|
+
| `capture` | `bool` | `False` | Include captured variables in function hashes. |
|
225
|
+
| `format` | `"pickle"`, `"memory"`, `Storage` | `"pickle"` | Storage backend format. |
|
226
|
+
| `root_path` | `Path`, `str`, or `None` | ~/.cache/checkpoints | Root directory for storing checkpoints. |
|
227
|
+
| `when` | `bool` | `True` | Enable or disable checkpointing. |
|
228
|
+
| `verbosity` | `0` or `1` | `1` | Logging verbosity. |
|
229
|
+
| `should_expire` | `Callable[[datetime], bool]` | `None` | Custom expiration logic. |
|
230
|
+
| `hash_by` | `Callable[..., Any]` | `None` | Custom function that transforms arguments before hashing. |
|
230
231
|
|
231
232
|
---
|
232
233
|
|
@@ -1,16 +1,16 @@
|
|
1
1
|
checkpointer/__init__.py,sha256=ZJ6frUNgkklUi85b5uXTyTfRzMvZgQOJY-ZOnu7jh78,777
|
2
|
-
checkpointer/checkpoint.py,sha256=
|
3
|
-
checkpointer/fn_ident.py,sha256=
|
4
|
-
checkpointer/object_hash.py,sha256=
|
2
|
+
checkpointer/checkpoint.py,sha256=2CAlNPnMLeou8SwHWeIjJ4kQhSqnpM9EBP_f-cYdSlE,6123
|
3
|
+
checkpointer/fn_ident.py,sha256=TEM_SdjzQ5OgRJnqNgyYZZKONx7tM7Dk4bNM5TB_RyY,4311
|
4
|
+
checkpointer/object_hash.py,sha256=o7Qr9VViWPmG1Fy1S3RediCagRydEVgTVZrZ9nRN54E,7215
|
5
5
|
checkpointer/print_checkpoint.py,sha256=21aeqgM9CMjNAJyScqFmXCWWfh3jBIn7o7i5zJkZGaA,1369
|
6
|
-
checkpointer/test_checkpointer.py,sha256=
|
6
|
+
checkpointer/test_checkpointer.py,sha256=uJ2Pg9Miq1W0l28eNlRhMjuT_R8c-ygYwp3KP3VW8Os,3600
|
7
7
|
checkpointer/utils.py,sha256=Rvm2NaJHtPTusM7fyHz_w9HUy_fqQfx8S1fr5CBWGL0,3047
|
8
8
|
checkpointer/storages/__init__.py,sha256=Kl4Og5jhYxn6m3tB_kTMsabf4_eWVLmFVAoC-pikNQE,301
|
9
9
|
checkpointer/storages/bcolz_storage.py,sha256=3QkSUSeG5s2kFuVV_LZpzMn1A5E7kqC7jk7w35c0NyQ,2314
|
10
10
|
checkpointer/storages/memory_storage.py,sha256=S5ayOZE_CyaFQJ-vSgObTanldPzG3gh3NksjNAc7vsk,1282
|
11
11
|
checkpointer/storages/pickle_storage.py,sha256=lJ0ton9ib3eifiny8XtPSNsx-w4Cm8oYUlbmKob34xU,1554
|
12
12
|
checkpointer/storages/storage.py,sha256=_m18Z8TKrdAbi6YYYQmuNOnhna4RB2sJDn1v3liaU3U,721
|
13
|
-
checkpointer-2.
|
14
|
-
checkpointer-2.
|
15
|
-
checkpointer-2.
|
16
|
-
checkpointer-2.
|
13
|
+
checkpointer-2.6.0.dist-info/METADATA,sha256=RWzn1RNHN76iF4hNkomVLouWvtjHtOody5doC4ajRk8,10556
|
14
|
+
checkpointer-2.6.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
15
|
+
checkpointer-2.6.0.dist-info/licenses/LICENSE,sha256=9xVsdtv_-uSyY9Xl9yujwAPm4-mjcCLeVy-ljwXEWbo,1059
|
16
|
+
checkpointer-2.6.0.dist-info/RECORD,,
|
File without changes
|
File without changes
|