checkpointer 1.1.0__py3-none-any.whl → 2.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checkpointer/__init__.py +7 -8
- checkpointer/checkpoint.py +120 -65
- checkpointer/function_body.py +34 -40
- checkpointer/print_checkpoint.py +52 -0
- checkpointer/storages/bcolz_storage.py +78 -72
- checkpointer/storages/memory_storage.py +21 -10
- checkpointer/storages/pickle_storage.py +43 -37
- checkpointer/types.py +19 -0
- checkpointer/utils.py +4 -5
- checkpointer-2.0.0.dist-info/METADATA +212 -0
- checkpointer-2.0.0.dist-info/RECORD +13 -0
- {checkpointer-1.1.0.dist-info → checkpointer-2.0.0.dist-info}/WHEEL +1 -2
- checkpointer-1.1.0.dist-info/LICENSE.txt → checkpointer-2.0.0.dist-info/licenses/LICENSE +1 -1
- checkpointer/storage.py +0 -60
- checkpointer-1.1.0.dist-info/METADATA +0 -12
- checkpointer-1.1.0.dist-info/RECORD +0 -13
- checkpointer-1.1.0.dist-info/top_level.txt +0 -1
checkpointer/__init__.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
|
-
import
|
2
|
-
|
3
|
-
from .
|
4
|
-
from .storage import store_on_demand, read_from_store
|
1
|
+
from .checkpoint import Checkpointer, CheckpointFn
|
2
|
+
from .checkpoint import CheckpointError, CheckpointReadFail
|
3
|
+
from .types import Storage
|
5
4
|
from .function_body import get_function_hash
|
6
5
|
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
6
|
+
create_checkpointer = Checkpointer
|
7
|
+
checkpoint = Checkpointer()
|
8
|
+
memory_checkpoint = Checkpointer(format="memory")
|
9
|
+
tmp_checkpoint = Checkpointer(root_path="/tmp/checkpoints")
|
checkpointer/checkpoint.py
CHANGED
@@ -1,68 +1,123 @@
|
|
1
1
|
import inspect
|
2
|
-
from collections import namedtuple
|
3
|
-
from pathlib import Path
|
4
|
-
from functools import wraps
|
5
2
|
import relib.hashing as hashing
|
6
|
-
from
|
3
|
+
from typing import Generic, TypeVar, TypedDict, Unpack, Literal, Union, Any, cast, overload
|
4
|
+
from collections.abc import Callable
|
5
|
+
from datetime import datetime
|
6
|
+
from pathlib import Path
|
7
|
+
from functools import update_wrapper
|
8
|
+
from .types import Storage
|
7
9
|
from .function_body import get_function_hash
|
8
|
-
from .utils import
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
def
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
10
|
+
from .utils import unwrap_fn, sync_resolve_coroutine
|
11
|
+
from .storages.pickle_storage import PickleStorage
|
12
|
+
from .storages.memory_storage import MemoryStorage
|
13
|
+
from .storages.bcolz_storage import BcolzStorage
|
14
|
+
from .print_checkpoint import print_checkpoint
|
15
|
+
|
16
|
+
Fn = TypeVar("Fn", bound=Callable)
|
17
|
+
|
18
|
+
DEFAULT_DIR = Path.home() / ".cache/checkpoints"
|
19
|
+
STORAGE_MAP = {"memory": MemoryStorage, "pickle": PickleStorage, "bcolz": BcolzStorage}
|
20
|
+
|
21
|
+
class CheckpointError(Exception):
|
22
|
+
pass
|
23
|
+
|
24
|
+
class CheckpointReadFail(CheckpointError):
|
25
|
+
pass
|
26
|
+
|
27
|
+
StorageType = Literal["pickle", "memory", "bcolz"] | Storage
|
28
|
+
CheckpointPath = str | Callable[..., str] | None
|
29
|
+
ShouldExpire = Callable[[datetime], bool]
|
30
|
+
|
31
|
+
class CheckpointerOpts(TypedDict, total=False):
|
32
|
+
format: StorageType
|
33
|
+
root_path: Path | str | None
|
34
|
+
when: bool
|
35
|
+
verbosity: Literal[0, 1]
|
36
|
+
path: CheckpointPath
|
37
|
+
should_expire: ShouldExpire
|
38
|
+
|
39
|
+
class Checkpointer:
|
40
|
+
def __init__(self, **opts: Unpack[CheckpointerOpts]):
|
41
|
+
self.format = opts.get("format", "pickle")
|
42
|
+
self.root_path = Path(opts.get("root_path", DEFAULT_DIR) or ".")
|
43
|
+
self.when = opts.get("when", True)
|
44
|
+
self.verbosity = opts.get("verbosity", 1)
|
45
|
+
self.path = opts.get("path")
|
46
|
+
self.should_expire = opts.get("should_expire")
|
47
|
+
|
48
|
+
def get_storage(self) -> Storage:
|
49
|
+
return STORAGE_MAP[self.format] if isinstance(self.format, str) else self.format
|
50
|
+
|
51
|
+
@overload
|
52
|
+
def __call__(self, fn: Fn, **override_opts: Unpack[CheckpointerOpts]) -> "CheckpointFn[Fn]": ...
|
53
|
+
@overload
|
54
|
+
def __call__(self, fn=None, **override_opts: Unpack[CheckpointerOpts]) -> "Checkpointer": ...
|
55
|
+
def __call__(self, fn: Fn | None=None, **override_opts: Unpack[CheckpointerOpts]) -> Union["Checkpointer", "CheckpointFn[Fn]"]:
|
56
|
+
if override_opts:
|
57
|
+
opts = CheckpointerOpts(**{**self.__dict__, **override_opts})
|
58
|
+
return Checkpointer(**opts)(fn)
|
59
|
+
|
60
|
+
return CheckpointFn(self, fn) if callable(fn) else self
|
61
|
+
|
62
|
+
class CheckpointFn(Generic[Fn]):
|
63
|
+
def __init__(self, checkpointer: Checkpointer, fn: Fn):
|
64
|
+
wrapped = unwrap_fn(fn)
|
65
|
+
file_name = Path(wrapped.__code__.co_filename).name
|
66
|
+
update_wrapper(cast(Callable, self), wrapped)
|
67
|
+
self.checkpointer = checkpointer
|
68
|
+
self.fn = fn
|
69
|
+
self.fn_hash = get_function_hash(wrapped)
|
70
|
+
self.fn_id = f"{file_name}/{wrapped.__name__}"
|
71
|
+
self.is_async = inspect.iscoroutinefunction(fn)
|
72
|
+
|
73
|
+
def get_checkpoint_id(self, args: tuple, kw: dict) -> str:
|
74
|
+
match self.checkpointer.path:
|
75
|
+
case str() as path:
|
76
|
+
return path
|
77
|
+
case Callable() as path:
|
78
|
+
p = path(*args, **kw)
|
79
|
+
assert isinstance(p, str), "path function must return a string"
|
80
|
+
return p
|
81
|
+
case _:
|
82
|
+
return f"{self.fn_id}/{hashing.hash([self.fn_hash, args, kw or 0])}"
|
83
|
+
|
84
|
+
async def _store_on_demand(self, args: tuple, kw: dict, force: bool):
|
85
|
+
checkpoint_id = self.get_checkpoint_id(args, kw)
|
86
|
+
checkpoint_path = self.checkpointer.root_path / checkpoint_id
|
87
|
+
storage = self.checkpointer.get_storage()
|
88
|
+
should_log = storage is not MemoryStorage and self.checkpointer.verbosity > 0
|
89
|
+
refresh = force \
|
90
|
+
or storage.is_expired(checkpoint_path) \
|
91
|
+
or (self.checkpointer.should_expire and storage.should_expire(checkpoint_path, self.checkpointer.should_expire))
|
92
|
+
|
93
|
+
if refresh:
|
94
|
+
print_checkpoint(should_log, "MEMORIZING", checkpoint_id, "blue")
|
95
|
+
data = self.fn(*args, **kw)
|
96
|
+
if inspect.iscoroutine(data):
|
97
|
+
data = await data
|
98
|
+
return storage.store_data(checkpoint_path, data)
|
99
|
+
|
100
|
+
try:
|
101
|
+
data = storage.load_data(checkpoint_path)
|
102
|
+
print_checkpoint(should_log, "REMEMBERED", checkpoint_id, "green")
|
103
|
+
return data
|
104
|
+
except (EOFError, FileNotFoundError):
|
105
|
+
print_checkpoint(should_log, "CORRUPTED", checkpoint_id, "yellow")
|
106
|
+
storage.delete_data(checkpoint_path)
|
107
|
+
return await self._store_on_demand(args, kw, force)
|
108
|
+
|
109
|
+
def _call(self, args: tuple, kw: dict, force=False):
|
110
|
+
if not self.checkpointer.when:
|
111
|
+
return self.fn(*args, **kw)
|
112
|
+
coroutine = self._store_on_demand(args, kw, force)
|
113
|
+
return coroutine if self.is_async else sync_resolve_coroutine(coroutine)
|
114
|
+
|
115
|
+
__call__: Fn = cast(Fn, lambda self, *args, **kw: self._call(args, kw))
|
116
|
+
rerun: Fn = cast(Fn, lambda self, *args, **kw: self._call(args, kw, True))
|
117
|
+
|
118
|
+
def get(self, *args, **kw) -> Any:
|
119
|
+
checkpoint_path = self.checkpointer.root_path / self.get_checkpoint_id(args, kw)
|
120
|
+
try:
|
121
|
+
return self.checkpointer.get_storage().load_data(checkpoint_path)
|
122
|
+
except:
|
123
|
+
raise CheckpointReadFail()
|
checkpointer/function_body.py
CHANGED
@@ -1,52 +1,46 @@
|
|
1
1
|
import inspect
|
2
|
-
from types import FunctionType, CodeType
|
3
2
|
import relib.hashing as hashing
|
3
|
+
from collections.abc import Callable
|
4
|
+
from types import FunctionType, CodeType
|
4
5
|
from pathlib import Path
|
5
|
-
from .utils import
|
6
|
+
from .utils import unwrap_fn
|
6
7
|
|
7
8
|
cwd = Path.cwd()
|
8
9
|
|
9
|
-
def
|
10
|
-
return Path(inspect.getfile(
|
10
|
+
def get_fn_path(fn: Callable) -> Path:
|
11
|
+
return Path(inspect.getfile(fn)).resolve()
|
11
12
|
|
12
|
-
def get_function_body(
|
13
|
+
def get_function_body(fn: Callable) -> str:
|
13
14
|
# TODO: Strip comments
|
14
|
-
lines = inspect.getsourcelines(
|
15
|
+
lines = inspect.getsourcelines(fn)[0]
|
15
16
|
lines = [line.rstrip() for line in lines]
|
16
17
|
lines = [line for line in lines if line]
|
17
|
-
return
|
18
|
+
return "\n".join(lines)
|
18
19
|
|
19
|
-
def get_code_children(
|
20
|
-
consts = [const for const in
|
20
|
+
def get_code_children(code: CodeType) -> list[str]:
|
21
|
+
consts = [const for const in code.co_consts if isinstance(const, CodeType)]
|
21
22
|
children = [child for const in consts for child in get_code_children(const)]
|
22
|
-
return list(
|
23
|
-
|
24
|
-
def
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
]
|
44
|
-
|
45
|
-
|
46
|
-
return funcs
|
47
|
-
|
48
|
-
def get_function_hash(func):
|
49
|
-
funcs = [func] + get_func_children(func)
|
50
|
-
function_bodies = list(map(get_function_body, funcs))
|
51
|
-
function_bodies_hash = hashing.hash(function_bodies)
|
52
|
-
return function_bodies_hash
|
23
|
+
return list(code.co_names) + children
|
24
|
+
|
25
|
+
def is_user_fn(candidate_fn, cleared_fns: set[Callable]) -> bool:
|
26
|
+
return isinstance(candidate_fn, FunctionType) \
|
27
|
+
and candidate_fn not in cleared_fns \
|
28
|
+
and cwd in get_fn_path(candidate_fn).parents
|
29
|
+
|
30
|
+
def append_fn_children(cleared_fns: set[Callable], fn: Callable) -> None:
|
31
|
+
code_children = get_code_children(fn.__code__)
|
32
|
+
fn_children = [unwrap_fn(fn.__globals__.get(co_name, None)) for co_name in code_children]
|
33
|
+
fn_children = [child for child in fn_children if is_user_fn(child, cleared_fns)]
|
34
|
+
cleared_fns.update(fn_children)
|
35
|
+
for child_fn in fn_children:
|
36
|
+
append_fn_children(cleared_fns, child_fn)
|
37
|
+
|
38
|
+
def get_fn_children(fn: Callable) -> list[Callable]:
|
39
|
+
cleared_fns: set[Callable] = set()
|
40
|
+
append_fn_children(cleared_fns, fn)
|
41
|
+
return sorted(cleared_fns, key=lambda fn: fn.__name__)
|
42
|
+
|
43
|
+
def get_function_hash(fn: Callable) -> str:
|
44
|
+
fns = [fn] + get_fn_children(fn)
|
45
|
+
fn_bodies = list(map(get_function_body, fns))
|
46
|
+
return hashing.hash(fn_bodies)
|
@@ -0,0 +1,52 @@
|
|
1
|
+
import io
|
2
|
+
import os
|
3
|
+
import sys
|
4
|
+
from typing import Literal
|
5
|
+
|
6
|
+
Color = Literal[
|
7
|
+
"black", "grey", "red", "green", "yellow", "blue", "magenta",
|
8
|
+
"cyan", "light_grey", "dark_grey", "light_red", "light_green",
|
9
|
+
"light_yellow", "light_blue", "light_magenta", "light_cyan", "white",
|
10
|
+
]
|
11
|
+
|
12
|
+
COLOR_MAP: dict[Color, int] = {
|
13
|
+
"black": 30,
|
14
|
+
"grey": 30,
|
15
|
+
"red": 31,
|
16
|
+
"green": 32,
|
17
|
+
"yellow": 33,
|
18
|
+
"blue": 34,
|
19
|
+
"magenta": 35,
|
20
|
+
"cyan": 36,
|
21
|
+
"light_grey": 37,
|
22
|
+
"dark_grey": 90,
|
23
|
+
"light_red": 91,
|
24
|
+
"light_green": 92,
|
25
|
+
"light_yellow": 93,
|
26
|
+
"light_blue": 94,
|
27
|
+
"light_magenta": 95,
|
28
|
+
"light_cyan": 96,
|
29
|
+
"white": 97,
|
30
|
+
}
|
31
|
+
|
32
|
+
def allow_color() -> bool:
|
33
|
+
if "NO_COLOR" in os.environ or os.environ.get("TERM") == "dumb" or not hasattr(sys.stdout, "fileno"):
|
34
|
+
return False
|
35
|
+
try:
|
36
|
+
return os.isatty(sys.stdout.fileno())
|
37
|
+
except io.UnsupportedOperation:
|
38
|
+
return sys.stdout.isatty()
|
39
|
+
|
40
|
+
def colored_(text: str, color: Color | None = None, on_color: Color | None = None) -> str:
|
41
|
+
if color:
|
42
|
+
text = f"\033[{COLOR_MAP[color]}m{text}"
|
43
|
+
if on_color:
|
44
|
+
text = f"\033[{COLOR_MAP[on_color] + 10}m{text}"
|
45
|
+
return text + "\033[0m"
|
46
|
+
|
47
|
+
noop = lambda *args, **_: args[0]
|
48
|
+
colored = colored_ if allow_color() else noop
|
49
|
+
|
50
|
+
def print_checkpoint(should_log: bool, title: str, text: str, color: Color):
|
51
|
+
if should_log:
|
52
|
+
print(f"{colored(f" {title} ", "grey", color)} {colored(text, color)}")
|
@@ -1,92 +1,98 @@
|
|
1
1
|
import shutil
|
2
2
|
from pathlib import Path
|
3
3
|
from datetime import datetime
|
4
|
+
from ..types import Storage
|
4
5
|
|
5
6
|
def get_data_type_str(x):
|
6
7
|
if isinstance(x, tuple):
|
7
|
-
return
|
8
|
+
return "tuple"
|
8
9
|
elif isinstance(x, dict):
|
9
|
-
return
|
10
|
+
return "dict"
|
10
11
|
elif isinstance(x, list):
|
11
|
-
return
|
12
|
-
elif isinstance(x, str) or not hasattr(x,
|
13
|
-
return
|
12
|
+
return "list"
|
13
|
+
elif isinstance(x, str) or not hasattr(x, "__len__"):
|
14
|
+
return "other"
|
14
15
|
else:
|
15
|
-
return
|
16
|
+
return "ndarray"
|
16
17
|
|
17
|
-
def
|
18
|
-
|
19
|
-
meta_full_path = full_path.with_name(full_path.name + '_meta')
|
20
|
-
return full_path, meta_full_path
|
18
|
+
def get_metapath(path: Path):
|
19
|
+
return path.with_name(f"{path.name}_meta")
|
21
20
|
|
22
|
-
def get_collection_timestamp(
|
21
|
+
def get_collection_timestamp(path: Path):
|
23
22
|
import bcolz
|
24
|
-
|
25
|
-
meta_data = bcolz.open(
|
26
|
-
return meta_data[
|
23
|
+
metapath = get_metapath(path)
|
24
|
+
meta_data = bcolz.open(metapath)[:][0]
|
25
|
+
return meta_data["created"]
|
27
26
|
|
28
|
-
def
|
29
|
-
try:
|
30
|
-
get_collection_timestamp(config, path)
|
31
|
-
return False
|
32
|
-
except (FileNotFoundError, EOFError):
|
33
|
-
return True
|
34
|
-
|
35
|
-
def should_expire(config, path, expire_fn):
|
36
|
-
return expire_fn(get_collection_timestamp(config, path))
|
37
|
-
|
38
|
-
def insert_data(path, data):
|
27
|
+
def insert_data(path: Path, data):
|
39
28
|
import bcolz
|
40
|
-
c = bcolz.carray(data, rootdir=path, mode=
|
29
|
+
c = bcolz.carray(data, rootdir=path, mode="w")
|
41
30
|
c.flush()
|
42
31
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
fields = sorted(data.keys())
|
52
|
-
else:
|
53
|
-
fields = []
|
54
|
-
meta_data = {'created': created, 'data_type_str': data_type_str, 'fields': fields}
|
55
|
-
insert_data(meta_full_path, meta_data)
|
56
|
-
if data_type_str in ['tuple', 'dict']:
|
57
|
-
for i in range(len(fields)):
|
58
|
-
sub_path = f"{path} ({i})"
|
59
|
-
store_data(config, sub_path, data[fields[i]])
|
60
|
-
else:
|
61
|
-
insert_data(full_path, data)
|
62
|
-
return data
|
32
|
+
class BcolzStorage(Storage):
|
33
|
+
@staticmethod
|
34
|
+
def is_expired(path):
|
35
|
+
try:
|
36
|
+
get_collection_timestamp(path)
|
37
|
+
return False
|
38
|
+
except (FileNotFoundError, EOFError):
|
39
|
+
return True
|
63
40
|
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
41
|
+
@staticmethod
|
42
|
+
def should_expire(path, expire_fn):
|
43
|
+
return expire_fn(get_collection_timestamp(path))
|
44
|
+
|
45
|
+
@staticmethod
|
46
|
+
def store_data(path, data):
|
47
|
+
metapath = get_metapath(path)
|
48
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
49
|
+
created = datetime.now()
|
50
|
+
data_type_str = get_data_type_str(data)
|
51
|
+
if data_type_str == "tuple":
|
52
|
+
fields = list(range(len(data)))
|
53
|
+
elif data_type_str == "dict":
|
54
|
+
fields = sorted(data.keys())
|
75
55
|
else:
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
if data_type_str
|
80
|
-
|
81
|
-
|
82
|
-
|
56
|
+
fields = []
|
57
|
+
meta_data = {"created": created, "data_type_str": data_type_str, "fields": fields}
|
58
|
+
insert_data(metapath, meta_data)
|
59
|
+
if data_type_str in ["tuple", "dict"]:
|
60
|
+
for i in range(len(fields)):
|
61
|
+
child_path = Path(f"{path} ({i})")
|
62
|
+
BcolzStorage.store_data(child_path, data[fields[i]])
|
63
|
+
else:
|
64
|
+
insert_data(path, data)
|
65
|
+
return data
|
66
|
+
|
67
|
+
@staticmethod
|
68
|
+
def load_data(path):
|
69
|
+
import bcolz
|
70
|
+
metapath = get_metapath(path)
|
71
|
+
meta_data = bcolz.open(metapath)[:][0]
|
72
|
+
data_type_str = meta_data["data_type_str"]
|
73
|
+
if data_type_str in ["tuple", "dict"]:
|
74
|
+
fields = meta_data["fields"]
|
75
|
+
partitions = range(len(fields))
|
76
|
+
data = [BcolzStorage.load_data(Path(f"{path} ({i})")) for i in partitions]
|
77
|
+
if data_type_str == "tuple":
|
78
|
+
return tuple(data)
|
79
|
+
else:
|
80
|
+
return dict(zip(fields, data))
|
83
81
|
else:
|
84
|
-
|
82
|
+
data = bcolz.open(path)
|
83
|
+
if data_type_str == "list":
|
84
|
+
return list(data)
|
85
|
+
elif data_type_str == "other":
|
86
|
+
return data[0]
|
87
|
+
else:
|
88
|
+
return data[:]
|
85
89
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
90
|
+
@staticmethod
|
91
|
+
def delete_data(path):
|
92
|
+
# NOTE: Not recursive
|
93
|
+
metapath = get_metapath(path)
|
94
|
+
try:
|
95
|
+
shutil.rmtree(metapath)
|
96
|
+
shutil.rmtree(path)
|
97
|
+
except FileNotFoundError:
|
98
|
+
pass
|
@@ -1,18 +1,29 @@
|
|
1
1
|
from datetime import datetime
|
2
|
+
from ..types import Storage
|
2
3
|
|
3
4
|
store = {}
|
4
5
|
date_stored = {}
|
5
6
|
|
6
|
-
|
7
|
-
|
7
|
+
class MemoryStorage(Storage):
|
8
|
+
@staticmethod
|
9
|
+
def is_expired(path):
|
10
|
+
return path not in store
|
8
11
|
|
9
|
-
|
10
|
-
|
12
|
+
@staticmethod
|
13
|
+
def should_expire(path, expire_fn):
|
14
|
+
return expire_fn(date_stored[path])
|
11
15
|
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
+
@staticmethod
|
17
|
+
def store_data(path, data):
|
18
|
+
store[path] = data
|
19
|
+
date_stored[path] = datetime.now()
|
20
|
+
return data
|
16
21
|
|
17
|
-
|
18
|
-
|
22
|
+
@staticmethod
|
23
|
+
def load_data(path):
|
24
|
+
return store[path]
|
25
|
+
|
26
|
+
@staticmethod
|
27
|
+
def delete_data(path):
|
28
|
+
del store[path]
|
29
|
+
del date_stored[path]
|
@@ -1,49 +1,55 @@
|
|
1
1
|
import pickle
|
2
2
|
from pathlib import Path
|
3
3
|
from datetime import datetime
|
4
|
+
from ..types import Storage
|
4
5
|
|
5
|
-
def get_paths(
|
6
|
-
|
7
|
-
|
8
|
-
pkl_full_path = p.with_name(p.name + '.pkl')
|
6
|
+
def get_paths(path: Path):
|
7
|
+
meta_full_path = path.with_name(f"{path.name}_meta.pkl")
|
8
|
+
pkl_full_path = path.with_name(f"{path.name}.pkl")
|
9
9
|
return meta_full_path, pkl_full_path
|
10
10
|
|
11
|
-
def get_collection_timestamp(
|
12
|
-
meta_full_path,
|
13
|
-
with meta_full_path.open(
|
11
|
+
def get_collection_timestamp(path: Path):
|
12
|
+
meta_full_path, _ = get_paths(path)
|
13
|
+
with meta_full_path.open("rb") as file:
|
14
14
|
meta_data = pickle.load(file)
|
15
|
-
return meta_data[
|
15
|
+
return meta_data["created"]
|
16
16
|
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
17
|
+
class PickleStorage(Storage):
|
18
|
+
@staticmethod
|
19
|
+
def is_expired(path):
|
20
|
+
try:
|
21
|
+
get_collection_timestamp(path)
|
22
|
+
return False
|
23
|
+
except (FileNotFoundError, EOFError):
|
24
|
+
return True
|
23
25
|
|
24
|
-
|
25
|
-
|
26
|
+
@staticmethod
|
27
|
+
def should_expire(path, expire_fn):
|
28
|
+
return expire_fn(get_collection_timestamp(path))
|
26
29
|
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
30
|
+
@staticmethod
|
31
|
+
def store_data(path, data):
|
32
|
+
created = datetime.now()
|
33
|
+
meta_data = {"created": created} # TODO: this should just be a JSON or binary dump of the unix timestamp and other metadata - not pickle
|
34
|
+
meta_full_path, pkl_full_path = get_paths(path)
|
35
|
+
pkl_full_path.parent.mkdir(parents=True, exist_ok=True)
|
36
|
+
with pkl_full_path.open("wb") as file:
|
37
|
+
pickle.dump(data, file, -1)
|
38
|
+
with meta_full_path.open("wb") as file:
|
39
|
+
pickle.dump(meta_data, file, -1)
|
40
|
+
return data
|
37
41
|
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
+
@staticmethod
|
43
|
+
def load_data(path):
|
44
|
+
_, full_path = get_paths(path)
|
45
|
+
with full_path.open("rb") as file:
|
46
|
+
return pickle.load(file)
|
42
47
|
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
48
|
+
@staticmethod
|
49
|
+
def delete_data(path):
|
50
|
+
meta_full_path, pkl_full_path = get_paths(path)
|
51
|
+
try:
|
52
|
+
meta_full_path.unlink()
|
53
|
+
pkl_full_path.unlink()
|
54
|
+
except FileNotFoundError:
|
55
|
+
pass
|
checkpointer/types.py
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
from typing import Callable, Protocol, Any
|
2
|
+
from pathlib import Path
|
3
|
+
from datetime import datetime
|
4
|
+
|
5
|
+
class Storage(Protocol):
|
6
|
+
@staticmethod
|
7
|
+
def is_expired(path: Path) -> bool: ...
|
8
|
+
|
9
|
+
@staticmethod
|
10
|
+
def should_expire(path: Path, expire_fn: Callable[[datetime], bool]) -> bool: ...
|
11
|
+
|
12
|
+
@staticmethod
|
13
|
+
def store_data(path: Path, data: Any) -> Any: ...
|
14
|
+
|
15
|
+
@staticmethod
|
16
|
+
def load_data(path: Path) -> Any: ...
|
17
|
+
|
18
|
+
@staticmethod
|
19
|
+
def delete_data(path: Path) -> None: ...
|
checkpointer/utils.py
CHANGED
@@ -1,10 +1,9 @@
|
|
1
1
|
import types
|
2
2
|
|
3
|
-
def
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
return func
|
3
|
+
def unwrap_fn[T](fn: T) -> T:
|
4
|
+
while hasattr(fn, "__wrapped__"):
|
5
|
+
fn = getattr(fn, "__wrapped__")
|
6
|
+
return fn
|
8
7
|
|
9
8
|
@types.coroutine
|
10
9
|
def coroutine_as_generator(coroutine):
|
@@ -0,0 +1,212 @@
|
|
1
|
+
Metadata-Version: 2.3
|
2
|
+
Name: checkpointer
|
3
|
+
Version: 2.0.0
|
4
|
+
Summary: A Python library for memoizing function results with support for multiple storage backends, async runtimes, and automatic cache invalidation
|
5
|
+
Project-URL: Repository, https://github.com/Reddan/checkpointer.git
|
6
|
+
Author: Hampus Hallman
|
7
|
+
License: Copyright 2024 Hampus Hallman
|
8
|
+
|
9
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
10
|
+
|
11
|
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
12
|
+
|
13
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
14
|
+
Requires-Python: >=3.12
|
15
|
+
Requires-Dist: relib
|
16
|
+
Description-Content-Type: text/markdown
|
17
|
+
|
18
|
+
# checkpointer · [](https://github.com/Reddan/checkpointer/blob/master/LICENSE) [](https://pypi.org/project/checkpointer/) [](https://pypi.org/project/checkpointer/)
|
19
|
+
|
20
|
+
`checkpointer` is a Python library for memoizing function results. It simplifies caching by providing a decorator-based API and supports various storage backends. It's designed for computationally expensive operations where caching can save time, or during development to avoid waiting for redundant computations. 🚀
|
21
|
+
|
22
|
+
Adding or removing `@checkpoint` doesn't change how your code works, and it can be applied to any function, including ones you've already written, without altering their behavior or introducing side effects. The original function remains unchanged and can still be called directly when needed.
|
23
|
+
|
24
|
+
### Key Features:
|
25
|
+
- **Multiple Storage Backends**: Supports in-memory, pickle, or your own custom storage.
|
26
|
+
- **Simple Decorator API**: Apply `@checkpoint` to functions.
|
27
|
+
- **Async and Sync Compatibility**: Works with synchronous functions and any Python async runtime (e.g., `asyncio`, `Trio`, `Curio`).
|
28
|
+
- **Custom Expiration Logic**: Automatically invalidate old checkpoints.
|
29
|
+
- **Flexible Path Configuration**: Control where checkpoints are stored.
|
30
|
+
|
31
|
+
### How It Works
|
32
|
+
|
33
|
+
When you use `@checkpoint`, the function's **arguments** (`args`, `kwargs`) are hashed to create a unique identifier for each call. This identifier is used to store and retrieve cached results. If the same arguments are passed again, `checkpointer` will return the cached result instead of recomputing.
|
34
|
+
|
35
|
+
Additionally, `checkpointer` ensures that caches are invalidated when a function’s implementation or any of its dependencies change. Each function is assigned a hash based on:
|
36
|
+
1. **Its source code**: Changes to the function’s code update its hash.
|
37
|
+
2. **Dependent functions**: If a function calls others, changes to those will also update the hash.
|
38
|
+
|
39
|
+
### Example: Cache Invalidation by Function Dependencies
|
40
|
+
|
41
|
+
```python
|
42
|
+
def multiply(a, b):
|
43
|
+
return a * b
|
44
|
+
|
45
|
+
@checkpoint
|
46
|
+
def helper(x):
|
47
|
+
return multiply(x + 1, 2)
|
48
|
+
|
49
|
+
@checkpoint
|
50
|
+
def compute(a, b):
|
51
|
+
return helper(a) + helper(b)
|
52
|
+
```
|
53
|
+
|
54
|
+
If you change `multiply`, the checkpoints for both `helper` and `compute` will be invalidated and recomputed.
|
55
|
+
|
56
|
+
---
|
57
|
+
|
58
|
+
## Installation
|
59
|
+
|
60
|
+
```bash
|
61
|
+
pip install checkpointer
|
62
|
+
```
|
63
|
+
|
64
|
+
---
|
65
|
+
|
66
|
+
## Quick Start
|
67
|
+
|
68
|
+
```python
|
69
|
+
from checkpointer import checkpoint
|
70
|
+
|
71
|
+
@checkpoint
|
72
|
+
def expensive_function(x: int) -> int:
|
73
|
+
print("Computing...")
|
74
|
+
return x ** 2
|
75
|
+
|
76
|
+
result = expensive_function(4) # Computes and stores result
|
77
|
+
result = expensive_function(4) # Loads from checkpoint
|
78
|
+
```
|
79
|
+
|
80
|
+
---
|
81
|
+
|
82
|
+
## Parameterization
|
83
|
+
|
84
|
+
### Global Configuration
|
85
|
+
|
86
|
+
You can configure a custom `Checkpointer`:
|
87
|
+
|
88
|
+
```python
|
89
|
+
from checkpointer import checkpoint
|
90
|
+
|
91
|
+
checkpoint = checkpoint(format="memory", root_path="/tmp/checkpoints")
|
92
|
+
```
|
93
|
+
|
94
|
+
Extend this configuration by calling itself again:
|
95
|
+
|
96
|
+
```python
|
97
|
+
extended_checkpoint = checkpoint(format="pickle", verbosity=0)
|
98
|
+
```
|
99
|
+
|
100
|
+
### Per-Function Customization
|
101
|
+
|
102
|
+
```python
|
103
|
+
@checkpoint(format="pickle", verbosity=0)
|
104
|
+
def my_function(x, y):
|
105
|
+
return x + y
|
106
|
+
```
|
107
|
+
|
108
|
+
### Combining Configurations
|
109
|
+
|
110
|
+
```python
|
111
|
+
checkpoint = checkpoint(format="memory", verbosity=1)
|
112
|
+
quiet_checkpoint = checkpoint(verbosity=0)
|
113
|
+
pickle_checkpoint = checkpoint(format="pickle", root_path="/tmp/pickle_checkpoints")
|
114
|
+
|
115
|
+
@checkpoint
|
116
|
+
def compute_square(n: int) -> int:
|
117
|
+
return n ** 2
|
118
|
+
|
119
|
+
@quiet_checkpoint
|
120
|
+
def compute_quietly(n: int) -> int:
|
121
|
+
return n ** 3
|
122
|
+
|
123
|
+
@pickle_checkpoint
|
124
|
+
def compute_sum(a: int, b: int) -> int:
|
125
|
+
return a + b
|
126
|
+
```
|
127
|
+
|
128
|
+
### Layered Caching
|
129
|
+
|
130
|
+
```python
|
131
|
+
IS_DEVELOPMENT = True # Toggle based on environment
|
132
|
+
|
133
|
+
dev_checkpoint = checkpoint(when=IS_DEVELOPMENT)
|
134
|
+
|
135
|
+
@checkpoint(format="memory")
|
136
|
+
@dev_checkpoint
|
137
|
+
def some_expensive_function():
|
138
|
+
print("Performing a time-consuming operation...")
|
139
|
+
return sum(i * i for i in range(10**6))
|
140
|
+
```
|
141
|
+
|
142
|
+
- In development: Both `dev_checkpoint` and `memory` caches are active.
|
143
|
+
- In production: Only the `memory` cache is active.
|
144
|
+
|
145
|
+
---
|
146
|
+
|
147
|
+
## Usage
|
148
|
+
|
149
|
+
### Force Recalculation
|
150
|
+
Use `rerun` to force a recalculation and overwrite the stored checkpoint:
|
151
|
+
|
152
|
+
```python
|
153
|
+
result = expensive_function.rerun(4)
|
154
|
+
```
|
155
|
+
|
156
|
+
### Bypass Checkpointer
|
157
|
+
Use `fn` to directly call the original, undecorated function:
|
158
|
+
|
159
|
+
```python
|
160
|
+
result = expensive_function.fn(4)
|
161
|
+
```
|
162
|
+
|
163
|
+
### Retrieve Stored Checkpoints
|
164
|
+
Access stored results without recalculating:
|
165
|
+
|
166
|
+
```python
|
167
|
+
stored_result = expensive_function.get(4)
|
168
|
+
```
|
169
|
+
|
170
|
+
---
|
171
|
+
|
172
|
+
## Configuration Options
|
173
|
+
|
174
|
+
| Option | Type | Default | Description |
|
175
|
+
|----------------|-------------------------------------|-------------|---------------------------------------------|
|
176
|
+
| `format` | `"pickle"`, `"memory"`, `Storage` | `"pickle"` | Storage backend format. |
|
177
|
+
| `root_path` | `Path`, `str`, or `None` | User Cache | Root directory for storing checkpoints. |
|
178
|
+
| `when` | `bool` | `True` | Enable or disable checkpointing. |
|
179
|
+
| `verbosity` | `0` or `1` | `1` | Logging verbosity. |
|
180
|
+
| `path` | `str` or `Callable[..., str]` | `None` | Custom path for checkpoint storage. |
|
181
|
+
| `should_expire`| `Callable[[datetime], bool]` | `None` | Custom expiration logic. |
|
182
|
+
|
183
|
+
---
|
184
|
+
|
185
|
+
## Full Example
|
186
|
+
|
187
|
+
```python
|
188
|
+
import asyncio
|
189
|
+
from checkpointer import checkpoint
|
190
|
+
|
191
|
+
@checkpoint
|
192
|
+
def compute_square(n: int) -> int:
|
193
|
+
print(f"Computing {n}^2...")
|
194
|
+
return n ** 2
|
195
|
+
|
196
|
+
@checkpoint(format="memory")
|
197
|
+
async def async_compute_sum(a: int, b: int) -> int:
|
198
|
+
await asyncio.sleep(1)
|
199
|
+
return a + b
|
200
|
+
|
201
|
+
async def main():
|
202
|
+
result1 = compute_square(5)
|
203
|
+
print(result1)
|
204
|
+
|
205
|
+
result2 = await async_compute_sum(3, 7)
|
206
|
+
print(result2)
|
207
|
+
|
208
|
+
result3 = async_compute_sum.get(3, 7)
|
209
|
+
print(result3)
|
210
|
+
|
211
|
+
asyncio.run(main())
|
212
|
+
```
|
@@ -0,0 +1,13 @@
|
|
1
|
+
checkpointer/__init__.py,sha256=TODGBGbZYBJ5LIpz5t6tnQNJ7ODPRUvXjv3Ooqb8-cc,357
|
2
|
+
checkpointer/checkpoint.py,sha256=V8JL8ibmMeqZjLjaigeAWa-8c948VfIiYqO8t5OFk48,4812
|
3
|
+
checkpointer/function_body.py,sha256=92mnTY9d_JhKnKugeySYRP6qhU4fH6F6zesb7h2pEi0,1720
|
4
|
+
checkpointer/print_checkpoint.py,sha256=wHC2xWNwNfFhRHyhrmLkadYoyThRTJWiox3NjgE9Ubc,1369
|
5
|
+
checkpointer/types.py,sha256=yoNPnN_QJHfyK_Gs8c0SoywHHDUlU7uhKqPPTTWjRTE,469
|
6
|
+
checkpointer/utils.py,sha256=UrQt689UHUjl7kXpTbUCGkHUgQZllByX2rbuvZdt9vk,368
|
7
|
+
checkpointer/storages/bcolz_storage.py,sha256=5hbJB0VJ2k-FHf7rItywMXP74WT-JTqeNK5N8yftcnw,2647
|
8
|
+
checkpointer/storages/memory_storage.py,sha256=5ITKjh_bVNfj1C6pcyMgB4YU4sy6jOLlvH0_3Pl1Elo,558
|
9
|
+
checkpointer/storages/pickle_storage.py,sha256=ipXG2dht8YQAXJFEK5-OwOb8xP8ij_v7K0Qu5Xz9aVE,1622
|
10
|
+
checkpointer-2.0.0.dist-info/METADATA,sha256=5DZmJ0rMnPeRZ9b5REXv1h7z8tbj0llgTO9yp8xEbnQ,7561
|
11
|
+
checkpointer-2.0.0.dist-info/WHEEL,sha256=C2FUgwZgiLbznR-k0b_5k3Ai_1aASOXDss3lzCUsUug,87
|
12
|
+
checkpointer-2.0.0.dist-info/licenses/LICENSE,sha256=0cmUKqBotzbBcysIexd52AhjwbphhlGYiWbvg5l2QAU,1054
|
13
|
+
checkpointer-2.0.0.dist-info/RECORD,,
|
@@ -1,4 +1,4 @@
|
|
1
|
-
Copyright
|
1
|
+
Copyright 2024 Hampus Hallman
|
2
2
|
|
3
3
|
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
4
4
|
|
checkpointer/storage.py
DELETED
@@ -1,60 +0,0 @@
|
|
1
|
-
import inspect
|
2
|
-
from termcolor import colored
|
3
|
-
from .storages import memory_storage, pickle_storage, bcolz_storage
|
4
|
-
|
5
|
-
storages = {
|
6
|
-
'memory': memory_storage,
|
7
|
-
'pickle': pickle_storage,
|
8
|
-
'bcolz': bcolz_storage,
|
9
|
-
}
|
10
|
-
|
11
|
-
initialized_storages = set()
|
12
|
-
|
13
|
-
def create_logger(should_log):
|
14
|
-
def log(color, title, text):
|
15
|
-
if should_log:
|
16
|
-
title_log = colored(f' {title} ', 'grey', 'on_' + color)
|
17
|
-
rest_log = colored(text, color)
|
18
|
-
print(title_log + ' ' + rest_log)
|
19
|
-
return log
|
20
|
-
|
21
|
-
def get_storage(storage):
|
22
|
-
if type(storage) == str:
|
23
|
-
storage = storages[storage]
|
24
|
-
if storage not in initialized_storages:
|
25
|
-
if hasattr(storage, 'initialize'):
|
26
|
-
storage.initialize()
|
27
|
-
initialized_storages.add(storage)
|
28
|
-
return storage
|
29
|
-
|
30
|
-
async def store_on_demand(get_data, name, config, force=False, should_expire=None):
|
31
|
-
storage = get_storage(config.format)
|
32
|
-
should_log = storage != memory_storage and config.verbosity != 0
|
33
|
-
log = create_logger(should_log)
|
34
|
-
refresh = force \
|
35
|
-
or storage.get_is_expired(config, name) \
|
36
|
-
or (should_expire and storage.should_expire(config, name, should_expire))
|
37
|
-
|
38
|
-
if refresh:
|
39
|
-
log('blue', 'MEMORIZING', name)
|
40
|
-
data = get_data()
|
41
|
-
if inspect.iscoroutine(data):
|
42
|
-
data = await data
|
43
|
-
return storage.store_data(config, name, data)
|
44
|
-
else:
|
45
|
-
try:
|
46
|
-
data = storage.load_data(config, name)
|
47
|
-
log('green', 'REMEMBERED', name)
|
48
|
-
return data
|
49
|
-
except (EOFError, FileNotFoundError):
|
50
|
-
log('yellow', 'CORRUPTED', name)
|
51
|
-
storage.delete_data(config, name)
|
52
|
-
result = await store_on_demand(get_data, name, config, force, should_expire)
|
53
|
-
return result
|
54
|
-
|
55
|
-
def read_from_store(name, config, storage='pickle'):
|
56
|
-
storage = get_storage(storage)
|
57
|
-
try:
|
58
|
-
return storage.load_data(config, name)
|
59
|
-
except:
|
60
|
-
return None
|
@@ -1,12 +0,0 @@
|
|
1
|
-
Metadata-Version: 2.1
|
2
|
-
Name: checkpointer
|
3
|
-
Version: 1.1.0
|
4
|
-
Home-page: https://github.com/Reddan/checkpointer
|
5
|
-
Author: Hampus Hallman
|
6
|
-
Author-email: me@hampushallman.com
|
7
|
-
License: MIT
|
8
|
-
Requires-Python: ~=3.5
|
9
|
-
License-File: LICENSE.txt
|
10
|
-
Requires-Dist: relib
|
11
|
-
Requires-Dist: termcolor
|
12
|
-
|
@@ -1,13 +0,0 @@
|
|
1
|
-
checkpointer/__init__.py,sha256=_RYcKsZbeUf08KZ-DcXlNn4eAMWh2LN9o-KvchYVmmk,380
|
2
|
-
checkpointer/checkpoint.py,sha256=FyL78HvAvPtgl-esiAkt-CdekT18J2Sh0SriMtX4QLc,2367
|
3
|
-
checkpointer/function_body.py,sha256=aitzBFoEIo_E-aL2oe4Nr_T_Oy8ehKInGb1XYjeLzX8,1703
|
4
|
-
checkpointer/storage.py,sha256=Ofuh0dKF5vk4_B4djt3Q6qyZhIO5f59uCNCZjMrto0U,1782
|
5
|
-
checkpointer/utils.py,sha256=Kzh2qXxq2Lgjiqfv6KcOwXdw7sh70uCn4a_uXYwLhiM,377
|
6
|
-
checkpointer/storages/bcolz_storage.py,sha256=Yk7FI75noe9hZBWVFIRetiFSR7tkzbryYlBmxX-lVlw,2728
|
7
|
-
checkpointer/storages/memory_storage.py,sha256=S4SgKSApbQE-pxxKRWLNJqyZMRQwaw5-N0DOIsZM7mE,364
|
8
|
-
checkpointer/storages/pickle_storage.py,sha256=zcnX1GG6XPHvVxi7gCab5oFxKoz5E7LZHYH74VL1hkY,1542
|
9
|
-
checkpointer-1.1.0.dist-info/LICENSE.txt,sha256=2c7g4mni-RUemFGkk6GnoFwknh-leF04BF_J_3gp4sg,1054
|
10
|
-
checkpointer-1.1.0.dist-info/METADATA,sha256=_isCKu9zdEI1Z-umSB6mr-X7lEVBVjlU_QvRNYolUNQ,273
|
11
|
-
checkpointer-1.1.0.dist-info/WHEEL,sha256=oiQVh_5PnQM0E3gPdiz09WCNmwiHDMaGer_elqB3coM,92
|
12
|
-
checkpointer-1.1.0.dist-info/top_level.txt,sha256=uF0eyHpShnsHI3sobErnhQ8LWCT8DPViqAznKeTaZlw,13
|
13
|
-
checkpointer-1.1.0.dist-info/RECORD,,
|
@@ -1 +0,0 @@
|
|
1
|
-
checkpointer
|