hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/sdk/__init__.py +21 -15
- hpcflow/sdk/app.py +2133 -770
- hpcflow/sdk/cli.py +281 -250
- hpcflow/sdk/cli_common.py +6 -2
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +77 -42
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +578 -311
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +112 -85
- hpcflow/sdk/config/types.py +145 -0
- hpcflow/sdk/core/actions.py +1054 -994
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +81 -63
- hpcflow/sdk/core/command_files.py +275 -185
- hpcflow/sdk/core/commands.py +111 -107
- hpcflow/sdk/core/element.py +724 -503
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +398 -51
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +380 -334
- hpcflow/sdk/core/loop_cache.py +160 -43
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +728 -600
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +33 -22
- hpcflow/sdk/core/task.py +1546 -1325
- hpcflow/sdk/core/task_schema.py +240 -196
- hpcflow/sdk/core/test_utils.py +126 -88
- hpcflow/sdk/core/types.py +387 -0
- hpcflow/sdk/core/utils.py +410 -305
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +1192 -1028
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/demo/cli.py +46 -33
- hpcflow/sdk/helper/cli.py +18 -16
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +83 -59
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +988 -586
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +408 -153
- hpcflow/sdk/persistence/pending.py +158 -123
- hpcflow/sdk/persistence/store_resource.py +37 -22
- hpcflow/sdk/persistence/types.py +307 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +477 -420
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +444 -404
- hpcflow/sdk/submission/schedulers/__init__.py +133 -40
- hpcflow/sdk/submission/schedulers/direct.py +97 -71
- hpcflow/sdk/submission/schedulers/sge.py +132 -126
- hpcflow/sdk/submission/schedulers/slurm.py +263 -268
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +102 -29
- hpcflow/sdk/submission/shells/bash.py +72 -55
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +37 -29
- hpcflow/sdk/submission/submission.py +203 -257
- hpcflow/sdk/submission/types.py +143 -0
- hpcflow/sdk/typing.py +163 -12
- hpcflow/tests/conftest.py +8 -6
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_main_scripts.py +60 -30
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
- hpcflow/tests/unit/test_action.py +86 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +13 -6
- hpcflow/tests/unit/test_cli.py +1 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +20 -15
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +3 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +65 -58
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +16 -7
- hpcflow/tests/unit/test_persistence.py +48 -35
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +8 -3
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +3 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +39 -19
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/workflows/test_jobscript.py +2 -1
- hpcflow/tests/workflows/test_workflows.py +18 -13
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
- hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/persistence/json.py
CHANGED
@@ -3,21 +3,20 @@ Persistence model based on writing JSON documents.
|
|
3
3
|
"""
|
4
4
|
|
5
5
|
from __future__ import annotations
|
6
|
+
|
6
7
|
from contextlib import contextmanager
|
7
8
|
import copy
|
8
|
-
from datetime import datetime
|
9
9
|
import json
|
10
10
|
from pathlib import Path
|
11
|
+
from typing import cast, TYPE_CHECKING
|
12
|
+
from typing_extensions import override
|
11
13
|
|
12
|
-
from
|
13
|
-
|
14
|
-
from fsspec import filesystem
|
14
|
+
from fsspec import filesystem, AbstractFileSystem # type: ignore
|
15
15
|
from hpcflow.sdk.core.errors import (
|
16
16
|
MissingParameterData,
|
17
17
|
MissingStoreEARError,
|
18
18
|
MissingStoreElementError,
|
19
19
|
MissingStoreElementIterationError,
|
20
|
-
MissingStoreTaskError,
|
21
20
|
)
|
22
21
|
from hpcflow.sdk.persistence.base import (
|
23
22
|
PersistentStoreFeatures,
|
@@ -27,19 +26,164 @@ from hpcflow.sdk.persistence.base import (
|
|
27
26
|
StoreElementIter,
|
28
27
|
StoreParameter,
|
29
28
|
StoreTask,
|
29
|
+
update_param_source_dict,
|
30
30
|
)
|
31
31
|
from hpcflow.sdk.persistence.pending import CommitResourceMap
|
32
32
|
from hpcflow.sdk.persistence.store_resource import JSONFileStoreResource
|
33
|
-
from hpcflow.sdk.persistence.base import update_param_source_dict
|
34
33
|
|
34
|
+
if TYPE_CHECKING:
|
35
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
36
|
+
from datetime import datetime
|
37
|
+
from typing import Any, ClassVar
|
38
|
+
from typing_extensions import Self
|
39
|
+
from ..app import BaseApp
|
40
|
+
from ..core.json_like import JSONed, JSONDocument
|
41
|
+
from ..core.workflow import Workflow
|
42
|
+
from ..typing import ParamSource
|
43
|
+
from .types import (
|
44
|
+
ElemMeta,
|
45
|
+
IterMeta,
|
46
|
+
LoopDescriptor,
|
47
|
+
Metadata,
|
48
|
+
RunMeta,
|
49
|
+
StoreCreationInfo,
|
50
|
+
TaskMeta,
|
51
|
+
TemplateMeta,
|
52
|
+
)
|
53
|
+
|
54
|
+
|
55
|
+
class JsonStoreTask(StoreTask["TaskMeta"]):
|
56
|
+
"""
|
57
|
+
Persisted task that is serialized using JSON.
|
58
|
+
"""
|
59
|
+
|
60
|
+
@override
|
61
|
+
def encode(self) -> tuple[int, TaskMeta, dict[str, Any]]:
|
62
|
+
"""Prepare store task data for the persistent store."""
|
63
|
+
assert self.task_template is not None
|
64
|
+
wk_task: TaskMeta = {
|
65
|
+
"id_": self.id_,
|
66
|
+
"element_IDs": self.element_IDs,
|
67
|
+
"index": self.index,
|
68
|
+
}
|
69
|
+
task = {"id_": self.id_, **self.task_template}
|
70
|
+
return self.index, wk_task, task
|
71
|
+
|
72
|
+
@override
|
73
|
+
@classmethod
|
74
|
+
def decode(cls, task_dat: TaskMeta) -> Self:
|
75
|
+
"""Initialise a `StoreTask` from store task data
|
76
|
+
|
77
|
+
Note: the `task_template` is only needed for encoding because it is retrieved as
|
78
|
+
part of the `WorkflowTemplate` so we don't need to load it when decoding.
|
79
|
+
|
80
|
+
"""
|
81
|
+
return cls(is_pending=False, **task_dat)
|
82
|
+
|
83
|
+
|
84
|
+
class JsonStoreElement(StoreElement["ElemMeta", None]):
|
85
|
+
"""
|
86
|
+
Persisted element that is serialized using JSON.
|
87
|
+
"""
|
88
|
+
|
89
|
+
@override
|
90
|
+
def encode(self, context: None) -> ElemMeta:
|
91
|
+
"""Prepare store element data for the persistent store."""
|
92
|
+
dct = self.__dict__
|
93
|
+
del dct["is_pending"]
|
94
|
+
return cast("ElemMeta", dct)
|
95
|
+
|
96
|
+
@override
|
97
|
+
@classmethod
|
98
|
+
def decode(cls, elem_dat: ElemMeta, context: None) -> Self:
|
99
|
+
"""Initialise a `JsonStoreElement` from store element data"""
|
100
|
+
return cls(is_pending=False, **elem_dat)
|
101
|
+
|
102
|
+
|
103
|
+
class JsonStoreElementIter(StoreElementIter["IterMeta", None]):
|
104
|
+
"""
|
105
|
+
Persisted element iteration that is serialized using JSON.
|
106
|
+
"""
|
107
|
+
|
108
|
+
@override
|
109
|
+
def encode(self, context: None) -> IterMeta:
|
110
|
+
"""Prepare store element iteration data for the persistent store."""
|
111
|
+
dct = self.__dict__
|
112
|
+
del dct["is_pending"]
|
113
|
+
return cast("IterMeta", dct)
|
114
|
+
|
115
|
+
@override
|
116
|
+
@classmethod
|
117
|
+
def decode(cls, iter_dat: IterMeta, context: None) -> Self:
|
118
|
+
"""Initialise a `JsonStoreElementIter` from persistent store element iteration data"""
|
119
|
+
|
120
|
+
iter_dat = copy.deepcopy(iter_dat) # to avoid mutating; can we avoid this?
|
121
|
+
|
122
|
+
# cast JSON string keys to integers:
|
123
|
+
if EAR_IDs := iter_dat["EAR_IDs"]:
|
124
|
+
for act_idx in list(EAR_IDs):
|
125
|
+
EAR_IDs[int(act_idx)] = EAR_IDs.pop(act_idx)
|
35
126
|
|
36
|
-
|
127
|
+
return cls(is_pending=False, **cast("dict", iter_dat))
|
128
|
+
|
129
|
+
|
130
|
+
class JsonStoreEAR(StoreEAR["RunMeta", None]):
|
131
|
+
"""
|
132
|
+
Persisted element action run that is serialized using JSON.
|
133
|
+
"""
|
134
|
+
|
135
|
+
@override
|
136
|
+
def encode(self, ts_fmt: str, context: None) -> RunMeta:
|
137
|
+
"""Prepare store EAR data for the persistent store."""
|
138
|
+
return {
|
139
|
+
"id_": self.id_,
|
140
|
+
"elem_iter_ID": self.elem_iter_ID,
|
141
|
+
"action_idx": self.action_idx,
|
142
|
+
"commands_idx": self.commands_idx,
|
143
|
+
"data_idx": self.data_idx,
|
144
|
+
"submission_idx": self.submission_idx,
|
145
|
+
"success": self.success,
|
146
|
+
"skip": self.skip,
|
147
|
+
"start_time": self._encode_datetime(self.start_time, ts_fmt),
|
148
|
+
"end_time": self._encode_datetime(self.end_time, ts_fmt),
|
149
|
+
"snapshot_start": self.snapshot_start,
|
150
|
+
"snapshot_end": self.snapshot_end,
|
151
|
+
"exit_code": self.exit_code,
|
152
|
+
"metadata": self.metadata,
|
153
|
+
"run_hostname": self.run_hostname,
|
154
|
+
}
|
155
|
+
|
156
|
+
@override
|
157
|
+
@classmethod
|
158
|
+
def decode(cls, EAR_dat: RunMeta, ts_fmt: str, context: None) -> Self:
|
159
|
+
"""Initialise a `JsonStoreEAR` from persistent store EAR data"""
|
160
|
+
# don't want to mutate EAR_dat:
|
161
|
+
EAR_dat = copy.deepcopy(EAR_dat)
|
162
|
+
start_time = cls._decode_datetime(EAR_dat.pop("start_time"), ts_fmt)
|
163
|
+
end_time = cls._decode_datetime(EAR_dat.pop("end_time"), ts_fmt)
|
164
|
+
return cls(
|
165
|
+
is_pending=False,
|
166
|
+
**cast("dict", EAR_dat),
|
167
|
+
start_time=start_time,
|
168
|
+
end_time=end_time,
|
169
|
+
)
|
170
|
+
|
171
|
+
|
172
|
+
class JSONPersistentStore(
|
173
|
+
PersistentStore[
|
174
|
+
JsonStoreTask,
|
175
|
+
JsonStoreElement,
|
176
|
+
JsonStoreElementIter,
|
177
|
+
JsonStoreEAR,
|
178
|
+
StoreParameter,
|
179
|
+
]
|
180
|
+
):
|
37
181
|
"""
|
38
182
|
A store that writes JSON files for all its state serialization.
|
39
183
|
"""
|
40
184
|
|
41
|
-
_name = "json"
|
42
|
-
_features = PersistentStoreFeatures(
|
185
|
+
_name: ClassVar[str] = "json"
|
186
|
+
_features: ClassVar[PersistentStoreFeatures] = PersistentStoreFeatures(
|
43
187
|
create=True,
|
44
188
|
edit=True,
|
45
189
|
jobscript_parallelism=False,
|
@@ -48,17 +192,17 @@ class JSONPersistentStore(PersistentStore):
|
|
48
192
|
submission=True,
|
49
193
|
)
|
50
194
|
|
51
|
-
_meta_res = "metadata"
|
52
|
-
_params_res = "parameters"
|
53
|
-
_subs_res = "submissions"
|
195
|
+
_meta_res: ClassVar[str] = "metadata"
|
196
|
+
_params_res: ClassVar[str] = "parameters"
|
197
|
+
_subs_res: ClassVar[str] = "submissions"
|
54
198
|
|
55
|
-
_res_file_names = {
|
199
|
+
_res_file_names: ClassVar[Mapping[str, str]] = {
|
56
200
|
_meta_res: "metadata.json",
|
57
201
|
_params_res: "parameters.json",
|
58
202
|
_subs_res: "submissions.json",
|
59
203
|
}
|
60
204
|
|
61
|
-
_res_map = CommitResourceMap(
|
205
|
+
_res_map: ClassVar[CommitResourceMap] = CommitResourceMap(
|
62
206
|
commit_tasks=(_meta_res,),
|
63
207
|
commit_loops=(_meta_res,),
|
64
208
|
commit_loop_num_iters=(_meta_res,),
|
@@ -83,7 +227,29 @@ class JSONPersistentStore(PersistentStore):
|
|
83
227
|
commit_param_sources=(_params_res,),
|
84
228
|
)
|
85
229
|
|
86
|
-
|
230
|
+
@classmethod
|
231
|
+
def _store_task_cls(cls) -> type[JsonStoreTask]:
|
232
|
+
return JsonStoreTask
|
233
|
+
|
234
|
+
@classmethod
|
235
|
+
def _store_elem_cls(cls) -> type[JsonStoreElement]:
|
236
|
+
return JsonStoreElement
|
237
|
+
|
238
|
+
@classmethod
|
239
|
+
def _store_iter_cls(cls) -> type[JsonStoreElementIter]:
|
240
|
+
return JsonStoreElementIter
|
241
|
+
|
242
|
+
@classmethod
|
243
|
+
def _store_EAR_cls(cls) -> type[JsonStoreEAR]:
|
244
|
+
return JsonStoreEAR
|
245
|
+
|
246
|
+
@classmethod
|
247
|
+
def _store_param_cls(cls) -> type[StoreParameter]:
|
248
|
+
return StoreParameter
|
249
|
+
|
250
|
+
def __init__(
|
251
|
+
self, app, workflow: Workflow | None, path: Path, fs: AbstractFileSystem
|
252
|
+
):
|
87
253
|
self._resources = {
|
88
254
|
self._meta_res: self._get_store_resource(app, "metadata", path, fs),
|
89
255
|
self._params_res: self._get_store_resource(app, "parameters", path, fs),
|
@@ -92,10 +258,10 @@ class JSONPersistentStore(PersistentStore):
|
|
92
258
|
super().__init__(app, workflow, path, fs)
|
93
259
|
|
94
260
|
@contextmanager
|
95
|
-
def cached_load(self) -> Iterator[
|
261
|
+
def cached_load(self) -> Iterator[None]:
|
96
262
|
"""Context manager to cache the metadata."""
|
97
|
-
with self.using_resource("metadata", "read")
|
98
|
-
yield
|
263
|
+
with self.using_resource("metadata", "read"):
|
264
|
+
yield
|
99
265
|
|
100
266
|
def remove_replaced_dir(self) -> None:
|
101
267
|
"""
|
@@ -103,9 +269,10 @@ class JSONPersistentStore(PersistentStore):
|
|
103
269
|
"""
|
104
270
|
with self.using_resource("metadata", "update") as md:
|
105
271
|
if "replaced_workflow" in md:
|
106
|
-
|
272
|
+
assert self.fs is not None
|
273
|
+
self.remove_path(md["replaced_workflow"])
|
107
274
|
self.logger.debug("removing temporarily renamed pre-existing workflow.")
|
108
|
-
md["replaced_workflow"]
|
275
|
+
del md["replaced_workflow"]
|
109
276
|
|
110
277
|
def reinstate_replaced_dir(self) -> None:
|
111
278
|
"""
|
@@ -113,13 +280,16 @@ class JSONPersistentStore(PersistentStore):
|
|
113
280
|
"""
|
114
281
|
with self.using_resource("metadata", "read") as md:
|
115
282
|
if "replaced_workflow" in md:
|
283
|
+
assert self.fs is not None
|
116
284
|
self.logger.debug(
|
117
285
|
"reinstating temporarily renamed pre-existing workflow."
|
118
286
|
)
|
119
|
-
self.rename_path(md["replaced_workflow"], self.path
|
287
|
+
self.rename_path(md["replaced_workflow"], self.path)
|
120
288
|
|
121
289
|
@classmethod
|
122
|
-
def _get_store_resource(
|
290
|
+
def _get_store_resource(
|
291
|
+
cls, app: BaseApp, name: str, path: str | Path, fs: AbstractFileSystem
|
292
|
+
) -> JSONFileStoreResource:
|
123
293
|
return JSONFileStoreResource(
|
124
294
|
app=app,
|
125
295
|
name=name,
|
@@ -131,14 +301,15 @@ class JSONPersistentStore(PersistentStore):
|
|
131
301
|
@classmethod
|
132
302
|
def write_empty_workflow(
|
133
303
|
cls,
|
134
|
-
app,
|
135
|
-
|
136
|
-
|
304
|
+
app: BaseApp,
|
305
|
+
*,
|
306
|
+
template_js: TemplateMeta,
|
307
|
+
template_components_js: dict[str, Any],
|
137
308
|
wk_path: str,
|
138
|
-
fs,
|
309
|
+
fs: AbstractFileSystem,
|
139
310
|
name: str,
|
140
|
-
replaced_wk: str,
|
141
|
-
creation_info:
|
311
|
+
replaced_wk: str | None,
|
312
|
+
creation_info: StoreCreationInfo,
|
142
313
|
ts_fmt: str,
|
143
314
|
ts_name_fmt: str,
|
144
315
|
) -> None:
|
@@ -146,12 +317,12 @@ class JSONPersistentStore(PersistentStore):
|
|
146
317
|
Write an empty persistent workflow.
|
147
318
|
"""
|
148
319
|
fs.mkdir(wk_path)
|
149
|
-
submissions = []
|
150
|
-
parameters = {
|
320
|
+
submissions: list[None] = []
|
321
|
+
parameters: dict[str, dict[None, None]] = {
|
151
322
|
"data": {},
|
152
323
|
"sources": {},
|
153
324
|
}
|
154
|
-
metadata = {
|
325
|
+
metadata: Metadata = {
|
155
326
|
"name": name,
|
156
327
|
"ts_fmt": ts_fmt,
|
157
328
|
"ts_name_fmt": ts_name_fmt,
|
@@ -172,17 +343,19 @@ class JSONPersistentStore(PersistentStore):
|
|
172
343
|
cls._get_store_resource(app, "parameters", wk_path, fs)._dump(parameters)
|
173
344
|
cls._get_store_resource(app, "submissions", wk_path, fs)._dump(submissions)
|
174
345
|
|
175
|
-
def _append_tasks(self, tasks:
|
346
|
+
def _append_tasks(self, tasks: Iterable[StoreTask]):
|
176
347
|
with self.using_resource("metadata", action="update") as md:
|
177
|
-
|
178
|
-
|
179
|
-
|
348
|
+
assert "tasks" in md and "template" in md and "num_added_tasks" in md
|
349
|
+
for task in tasks:
|
350
|
+
idx, wk_task_i, task_i = task.encode()
|
351
|
+
md["tasks"].insert(idx, cast("TaskMeta", wk_task_i))
|
180
352
|
md["template"]["tasks"].insert(idx, task_i)
|
181
353
|
md["num_added_tasks"] += 1
|
182
354
|
|
183
|
-
def _append_loops(self, loops:
|
355
|
+
def _append_loops(self, loops: dict[int, LoopDescriptor]):
|
184
356
|
with self.using_resource("metadata", action="update") as md:
|
185
|
-
|
357
|
+
assert "loops" in md and "template" in md
|
358
|
+
for _, loop in loops.items():
|
186
359
|
md["loops"].append(
|
187
360
|
{
|
188
361
|
"num_added_iterations": loop["num_added_iterations"],
|
@@ -192,81 +365,103 @@ class JSONPersistentStore(PersistentStore):
|
|
192
365
|
)
|
193
366
|
md["template"]["loops"].append(loop["loop_template"])
|
194
367
|
|
195
|
-
def _append_submissions(self, subs:
|
368
|
+
def _append_submissions(self, subs: dict[int, JSONDocument]):
|
196
369
|
with self.using_resource("submissions", action="update") as subs_res:
|
197
|
-
|
198
|
-
subs_res.append(sub_i)
|
370
|
+
subs_res.extend(subs.values())
|
199
371
|
|
200
|
-
def _append_task_element_IDs(self, task_ID: int, elem_IDs:
|
372
|
+
def _append_task_element_IDs(self, task_ID: int, elem_IDs: list[int]):
|
201
373
|
with self.using_resource("metadata", action="update") as md:
|
374
|
+
assert "tasks" in md
|
202
375
|
md["tasks"][task_ID]["element_IDs"].extend(elem_IDs)
|
203
376
|
|
204
|
-
def _append_elements(self, elems:
|
377
|
+
def _append_elements(self, elems: Sequence[JsonStoreElement]):
|
205
378
|
with self.using_resource("metadata", action="update") as md:
|
206
|
-
|
379
|
+
assert "elements" in md
|
380
|
+
md["elements"].extend(elem.encode(None) for elem in elems)
|
207
381
|
|
208
|
-
def _append_element_sets(self, task_id: int, es_js:
|
382
|
+
def _append_element_sets(self, task_id: int, es_js: Sequence[Mapping]):
|
209
383
|
task_idx = self._get_task_id_to_idx_map()[task_id]
|
210
384
|
with self.using_resource("metadata", "update") as md:
|
385
|
+
assert "template" in md
|
211
386
|
md["template"]["tasks"][task_idx]["element_sets"].extend(es_js)
|
212
387
|
|
213
|
-
def _append_elem_iter_IDs(self, elem_ID: int, iter_IDs:
|
388
|
+
def _append_elem_iter_IDs(self, elem_ID: int, iter_IDs: Iterable[int]):
|
214
389
|
with self.using_resource("metadata", action="update") as md:
|
390
|
+
assert "elements" in md
|
215
391
|
md["elements"][elem_ID]["iteration_IDs"].extend(iter_IDs)
|
216
392
|
|
217
|
-
def _append_elem_iters(self, iters:
|
393
|
+
def _append_elem_iters(self, iters: Sequence[JsonStoreElementIter]):
|
218
394
|
with self.using_resource("metadata", action="update") as md:
|
219
|
-
|
395
|
+
assert "iters" in md
|
396
|
+
md["iters"].extend(it.encode(None) for it in iters)
|
220
397
|
|
221
|
-
def _append_elem_iter_EAR_IDs(
|
398
|
+
def _append_elem_iter_EAR_IDs(
|
399
|
+
self, iter_ID: int, act_idx: int, EAR_IDs: Sequence[int]
|
400
|
+
):
|
222
401
|
with self.using_resource("metadata", action="update") as md:
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
md["iters"][iter_ID]["EAR_IDs"][act_idx].extend(EAR_IDs)
|
402
|
+
assert "iters" in md
|
403
|
+
md["iters"][iter_ID].setdefault("EAR_IDs", {}).setdefault(act_idx, []).extend(
|
404
|
+
EAR_IDs
|
405
|
+
)
|
228
406
|
|
229
407
|
def _update_elem_iter_EARs_initialised(self, iter_ID: int):
|
230
408
|
with self.using_resource("metadata", action="update") as md:
|
409
|
+
assert "iters" in md
|
231
410
|
md["iters"][iter_ID]["EARs_initialised"] = True
|
232
411
|
|
233
|
-
def _append_submission_parts(self, sub_parts:
|
412
|
+
def _append_submission_parts(self, sub_parts: dict[int, dict[str, list[int]]]):
|
234
413
|
with self.using_resource("submissions", action="update") as subs_res:
|
235
414
|
for sub_idx, sub_i_parts in sub_parts.items():
|
415
|
+
sub = subs_res[sub_idx]
|
416
|
+
assert isinstance(sub, dict)
|
236
417
|
for dt_str, parts_j in sub_i_parts.items():
|
237
|
-
|
418
|
+
sub["submission_parts"][dt_str] = parts_j
|
238
419
|
|
239
|
-
def _update_loop_index(self, iter_ID: int, loop_idx:
|
420
|
+
def _update_loop_index(self, iter_ID: int, loop_idx: Mapping[str, int]):
|
240
421
|
with self.using_resource("metadata", action="update") as md:
|
422
|
+
assert "iters" in md
|
241
423
|
md["iters"][iter_ID]["loop_idx"].update(loop_idx)
|
242
424
|
|
243
|
-
def _update_loop_num_iters(self, index: int, num_iters: int):
|
425
|
+
def _update_loop_num_iters(self, index: int, num_iters: list[list[list[int] | int]]):
|
244
426
|
with self.using_resource("metadata", action="update") as md:
|
427
|
+
assert "loops" in md
|
245
428
|
md["loops"][index]["num_added_iterations"] = num_iters
|
246
429
|
|
247
|
-
def _update_loop_parents(self, index: int, parents:
|
430
|
+
def _update_loop_parents(self, index: int, parents: list[str]):
|
248
431
|
with self.using_resource("metadata", action="update") as md:
|
432
|
+
assert "loops" in md
|
249
433
|
md["loops"][index]["parents"] = parents
|
250
434
|
|
251
|
-
def _append_EARs(self, EARs:
|
435
|
+
def _append_EARs(self, EARs: Sequence[JsonStoreEAR]):
|
252
436
|
with self.using_resource("metadata", action="update") as md:
|
253
|
-
|
437
|
+
assert "runs" in md
|
438
|
+
md["runs"].extend(ear.encode(self.ts_fmt, None) for ear in EARs)
|
254
439
|
|
255
|
-
def _update_EAR_submission_indices(self, sub_indices:
|
440
|
+
def _update_EAR_submission_indices(self, sub_indices: Mapping[int, int]):
|
256
441
|
with self.using_resource("metadata", action="update") as md:
|
442
|
+
assert "runs" in md
|
257
443
|
for EAR_ID_i, sub_idx_i in sub_indices.items():
|
258
444
|
md["runs"][EAR_ID_i]["submission_idx"] = sub_idx_i
|
259
445
|
|
260
|
-
def _update_EAR_start(
|
446
|
+
def _update_EAR_start(
|
447
|
+
self, EAR_id: int, s_time: datetime, s_snap: dict[str, Any], s_hn: str
|
448
|
+
):
|
261
449
|
with self.using_resource("metadata", action="update") as md:
|
450
|
+
assert "runs" in md
|
262
451
|
md["runs"][EAR_id]["start_time"] = s_time.strftime(self.ts_fmt)
|
263
452
|
md["runs"][EAR_id]["snapshot_start"] = s_snap
|
264
453
|
md["runs"][EAR_id]["run_hostname"] = s_hn
|
265
454
|
|
266
455
|
def _update_EAR_end(
|
267
|
-
self,
|
456
|
+
self,
|
457
|
+
EAR_id: int,
|
458
|
+
e_time: datetime,
|
459
|
+
e_snap: dict[str, Any],
|
460
|
+
ext_code: int,
|
461
|
+
success: bool,
|
268
462
|
):
|
269
463
|
with self.using_resource("metadata", action="update") as md:
|
464
|
+
assert "runs" in md
|
270
465
|
md["runs"][EAR_id]["end_time"] = e_time.strftime(self.ts_fmt)
|
271
466
|
md["runs"][EAR_id]["snapshot_end"] = e_snap
|
272
467
|
md["runs"][EAR_id]["exit_code"] = ext_code
|
@@ -274,24 +469,26 @@ class JSONPersistentStore(PersistentStore):
|
|
274
469
|
|
275
470
|
def _update_EAR_skip(self, EAR_id: int):
|
276
471
|
with self.using_resource("metadata", action="update") as md:
|
472
|
+
assert "runs" in md
|
277
473
|
md["runs"][EAR_id]["skip"] = True
|
278
474
|
|
279
|
-
def _update_js_metadata(self, js_meta:
|
475
|
+
def _update_js_metadata(self, js_meta: dict[int, dict[int, dict[str, Any]]]):
|
280
476
|
with self.using_resource("submissions", action="update") as sub_res:
|
281
477
|
for sub_idx, all_js_md in js_meta.items():
|
478
|
+
sub = cast("dict[str, list[dict[str, Any]]]", sub_res[sub_idx])
|
282
479
|
for js_idx, js_meta_i in all_js_md.items():
|
283
|
-
|
480
|
+
sub_i = sub["jobscripts"][js_idx]
|
481
|
+
sub_i.update(**js_meta_i)
|
284
482
|
|
285
|
-
def _append_parameters(self,
|
286
|
-
with self.using_resource("parameters", "update") as
|
287
|
-
for param_i in
|
288
|
-
|
289
|
-
|
483
|
+
def _append_parameters(self, params: Sequence[StoreParameter]):
|
484
|
+
with self.using_resource("parameters", "update") as params_u:
|
485
|
+
for param_i in params:
|
486
|
+
params_u["data"][str(param_i.id_)] = param_i.encode()
|
487
|
+
params_u["sources"][str(param_i.id_)] = param_i.source
|
290
488
|
|
291
|
-
def _set_parameter_values(self, set_parameters:
|
489
|
+
def _set_parameter_values(self, set_parameters: dict[int, tuple[Any, bool]]):
|
292
490
|
"""Set multiple unset persistent parameters."""
|
293
|
-
|
294
|
-
param_objs = self._get_persistent_parameters(param_ids)
|
491
|
+
param_objs = self._get_persistent_parameters(set_parameters)
|
295
492
|
with self.using_resource("parameters", "update") as params:
|
296
493
|
for param_id, (value, is_file) in set_parameters.items():
|
297
494
|
param_i = param_objs[param_id]
|
@@ -301,12 +498,9 @@ class JSONPersistentStore(PersistentStore):
|
|
301
498
|
param_i = param_i.set_data(value)
|
302
499
|
params["data"][str(param_id)] = param_i.encode()
|
303
500
|
|
304
|
-
def _update_parameter_sources(self, sources:
|
501
|
+
def _update_parameter_sources(self, sources: Mapping[int, ParamSource]):
|
305
502
|
"""Update the sources of multiple persistent parameters."""
|
306
|
-
|
307
|
-
param_ids = list(sources.keys())
|
308
|
-
param_objs = self._get_persistent_parameters(param_ids)
|
309
|
-
|
503
|
+
param_objs = self._get_persistent_parameters(sources)
|
310
504
|
with self.using_resource("parameters", "update") as params:
|
311
505
|
# no need to update data array:
|
312
506
|
for p_id, src_i in sources.items():
|
@@ -314,7 +508,7 @@ class JSONPersistentStore(PersistentStore):
|
|
314
508
|
new_src_i = update_param_source_dict(param_i.source, src_i)
|
315
509
|
params["sources"][str(p_id)] = new_src_i
|
316
510
|
|
317
|
-
def _update_template_components(self, tc:
|
511
|
+
def _update_template_components(self, tc: dict[str, Any]):
|
318
512
|
with self.using_resource("metadata", "update") as md:
|
319
513
|
md["template_components"] = tc
|
320
514
|
|
@@ -324,6 +518,7 @@ class JSONPersistentStore(PersistentStore):
|
|
324
518
|
num = self.num_tasks_cache
|
325
519
|
else:
|
326
520
|
with self.using_resource("metadata", action="read") as md:
|
521
|
+
assert "tasks" in md
|
327
522
|
num = len(md["tasks"])
|
328
523
|
if self.use_cache and self.num_tasks_cache is None:
|
329
524
|
self.num_tasks_cache = num
|
@@ -332,6 +527,7 @@ class JSONPersistentStore(PersistentStore):
|
|
332
527
|
def _get_num_persistent_loops(self) -> int:
|
333
528
|
"""Get the number of persistent loops."""
|
334
529
|
with self.using_resource("metadata", action="read") as md:
|
530
|
+
assert "loops" in md
|
335
531
|
return len(md["loops"])
|
336
532
|
|
337
533
|
def _get_num_persistent_submissions(self) -> int:
|
@@ -342,11 +538,13 @@ class JSONPersistentStore(PersistentStore):
|
|
342
538
|
def _get_num_persistent_elements(self) -> int:
|
343
539
|
"""Get the number of persistent elements."""
|
344
540
|
with self.using_resource("metadata", action="read") as md:
|
541
|
+
assert "elements" in md
|
345
542
|
return len(md["elements"])
|
346
543
|
|
347
544
|
def _get_num_persistent_elem_iters(self) -> int:
|
348
545
|
"""Get the number of persistent element iterations."""
|
349
546
|
with self.using_resource("metadata", action="read") as md:
|
547
|
+
assert "iters" in md
|
350
548
|
return len(md["iters"])
|
351
549
|
|
352
550
|
def _get_num_persistent_EARs(self) -> int:
|
@@ -355,65 +553,73 @@ class JSONPersistentStore(PersistentStore):
|
|
355
553
|
num = self.num_EARs_cache
|
356
554
|
else:
|
357
555
|
with self.using_resource("metadata", action="read") as md:
|
556
|
+
assert "runs" in md
|
358
557
|
num = len(md["runs"])
|
359
558
|
if self.use_cache and self.num_EARs_cache is None:
|
360
559
|
self.num_EARs_cache = num
|
361
560
|
return num
|
362
561
|
|
363
|
-
def _get_num_persistent_parameters(self):
|
562
|
+
def _get_num_persistent_parameters(self) -> int:
|
364
563
|
with self.using_resource("parameters", "read") as params:
|
365
564
|
return len(params["data"])
|
366
565
|
|
367
|
-
def _get_num_persistent_added_tasks(self):
|
566
|
+
def _get_num_persistent_added_tasks(self) -> int:
|
368
567
|
with self.using_resource("metadata", "read") as md:
|
568
|
+
assert "num_added_tasks" in md
|
369
569
|
return md["num_added_tasks"]
|
370
570
|
|
371
571
|
@classmethod
|
372
572
|
def make_test_store_from_spec(
|
373
573
|
cls,
|
374
|
-
app,
|
574
|
+
app: BaseApp,
|
375
575
|
spec,
|
376
576
|
dir=None,
|
377
577
|
path="test_store.json",
|
378
578
|
overwrite=False,
|
579
|
+
ts_fmt="%d/%m/%Y, %H:%M:%S", # FIXME: use the right default timestamp format
|
379
580
|
):
|
380
581
|
"""Generate an store for testing purposes."""
|
381
582
|
|
382
|
-
|
583
|
+
tasks_, elems, elem_iters, EARs = super().prepare_test_store_from_spec(spec)
|
383
584
|
|
384
|
-
|
385
|
-
tasks = [
|
386
|
-
|
387
|
-
|
388
|
-
|
585
|
+
path_ = Path(path).resolve()
|
586
|
+
tasks = [JsonStoreTask(**task_info).encode() for task_info in tasks_]
|
587
|
+
elements_ = [JsonStoreElement(**elem_info).encode(None) for elem_info in elems]
|
588
|
+
elem_iters_ = [
|
589
|
+
JsonStoreElementIter(**it_info).encode(None) for it_info in elem_iters
|
590
|
+
]
|
591
|
+
EARs_ = [JsonStoreEAR(**ear_info).encode(ts_fmt, None) for ear_info in EARs]
|
389
592
|
|
390
593
|
persistent_data = {
|
391
594
|
"tasks": tasks,
|
392
|
-
"elements":
|
393
|
-
"iters":
|
394
|
-
"runs":
|
595
|
+
"elements": elements_,
|
596
|
+
"iters": elem_iters_,
|
597
|
+
"runs": EARs_,
|
395
598
|
}
|
396
599
|
|
397
|
-
|
398
|
-
with
|
600
|
+
path_ = Path(dir or "", path_)
|
601
|
+
with path_.open("wt") as fp:
|
399
602
|
json.dump(persistent_data, fp, indent=2)
|
400
603
|
|
401
|
-
return cls(app=app, workflow=None, path=
|
604
|
+
return cls(app=app, workflow=None, path=path_, fs=filesystem("file"))
|
402
605
|
|
403
|
-
def _get_persistent_template_components(self):
|
606
|
+
def _get_persistent_template_components(self) -> dict[str, Any]:
|
404
607
|
with self.using_resource("metadata", "read") as md:
|
608
|
+
assert "template_components" in md
|
405
609
|
return md["template_components"]
|
406
610
|
|
407
|
-
def _get_persistent_template(self) ->
|
611
|
+
def _get_persistent_template(self) -> dict[str, JSONed]:
|
408
612
|
with self.using_resource("metadata", "read") as md:
|
409
|
-
|
613
|
+
assert "template" in md
|
614
|
+
return cast("dict[str, JSONed]", md["template"])
|
410
615
|
|
411
|
-
def _get_persistent_tasks(self, id_lst: Iterable[int]) ->
|
616
|
+
def _get_persistent_tasks(self, id_lst: Iterable[int]) -> dict[int, JsonStoreTask]:
|
412
617
|
tasks, id_lst = self._get_cached_persistent_tasks(id_lst)
|
413
618
|
if id_lst:
|
414
619
|
with self.using_resource("metadata", action="read") as md:
|
620
|
+
assert "tasks" in md
|
415
621
|
new_tasks = {
|
416
|
-
i["id_"]:
|
622
|
+
i["id_"]: JsonStoreTask.decode({**i, "index": idx})
|
417
623
|
for idx, i in enumerate(md["tasks"])
|
418
624
|
if id_lst is None or i["id_"] in id_lst
|
419
625
|
}
|
@@ -421,16 +627,20 @@ class JSONPersistentStore(PersistentStore):
|
|
421
627
|
tasks.update(new_tasks)
|
422
628
|
return tasks
|
423
629
|
|
424
|
-
def _get_persistent_loops(
|
630
|
+
def _get_persistent_loops(
|
631
|
+
self, id_lst: Iterable[int] | None = None
|
632
|
+
) -> dict[int, LoopDescriptor]:
|
425
633
|
with self.using_resource("metadata", "read") as md:
|
426
|
-
|
427
|
-
|
634
|
+
assert "loops" in md
|
635
|
+
return {
|
636
|
+
idx: cast("LoopDescriptor", i)
|
428
637
|
for idx, i in enumerate(md["loops"])
|
429
638
|
if id_lst is None or idx in id_lst
|
430
639
|
}
|
431
|
-
return loop_dat
|
432
640
|
|
433
|
-
def _get_persistent_submissions(
|
641
|
+
def _get_persistent_submissions(
|
642
|
+
self, id_lst: Iterable[int] | None = None
|
643
|
+
) -> dict[int, JSONDocument]:
|
434
644
|
with self.using_resource("submissions", "read") as sub_res:
|
435
645
|
subs_dat = copy.deepcopy(
|
436
646
|
{
|
@@ -440,73 +650,83 @@ class JSONPersistentStore(PersistentStore):
|
|
440
650
|
}
|
441
651
|
)
|
442
652
|
# cast jobscript submit-times and jobscript `task_elements` keys:
|
443
|
-
for
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
] =
|
449
|
-
key
|
450
|
-
)
|
653
|
+
for sub in subs_dat.values():
|
654
|
+
js: dict[str, dict[str | int, Any]]
|
655
|
+
assert isinstance(sub, dict)
|
656
|
+
for js in sub["jobscripts"]:
|
657
|
+
for key in list(te := js["task_elements"]):
|
658
|
+
te[int(key)] = te.pop(key)
|
451
659
|
|
452
660
|
return subs_dat
|
453
661
|
|
454
|
-
def _get_persistent_elements(
|
455
|
-
|
456
|
-
|
662
|
+
def _get_persistent_elements(
|
663
|
+
self, id_lst: Iterable[int]
|
664
|
+
) -> dict[int, JsonStoreElement]:
|
665
|
+
elems, id_lst_ = self._get_cached_persistent_elements(id_lst)
|
666
|
+
if id_lst_:
|
457
667
|
# could convert `id_lst` to e.g. slices if more efficient for a given store
|
458
668
|
with self.using_resource("metadata", action="read") as md:
|
459
669
|
try:
|
460
|
-
|
670
|
+
if "elements" not in md:
|
671
|
+
raise KeyError
|
672
|
+
elem_dat = {id_: md["elements"][id_] for id_ in id_lst_}
|
461
673
|
except KeyError:
|
462
|
-
raise MissingStoreElementError(
|
463
|
-
new_elems = {
|
674
|
+
raise MissingStoreElementError(id_lst_)
|
675
|
+
new_elems = {
|
676
|
+
k: JsonStoreElement.decode(v, None) for k, v in elem_dat.items()
|
677
|
+
}
|
464
678
|
self.element_cache.update(new_elems)
|
465
679
|
elems.update(new_elems)
|
466
680
|
return elems
|
467
681
|
|
468
682
|
def _get_persistent_element_iters(
|
469
683
|
self, id_lst: Iterable[int]
|
470
|
-
) ->
|
471
|
-
iters,
|
472
|
-
if
|
684
|
+
) -> dict[int, JsonStoreElementIter]:
|
685
|
+
iters, id_lst_ = self._get_cached_persistent_element_iters(id_lst)
|
686
|
+
if id_lst_:
|
473
687
|
with self.using_resource("metadata", action="read") as md:
|
474
688
|
try:
|
475
|
-
|
689
|
+
if "iters" not in md:
|
690
|
+
raise KeyError
|
691
|
+
iter_dat = {id_: md["iters"][id_] for id_ in id_lst_}
|
476
692
|
except KeyError:
|
477
|
-
raise MissingStoreElementIterationError(
|
478
|
-
new_iters = {
|
693
|
+
raise MissingStoreElementIterationError(id_lst_)
|
694
|
+
new_iters = {
|
695
|
+
k: JsonStoreElementIter.decode(v, None) for k, v in iter_dat.items()
|
696
|
+
}
|
479
697
|
self.element_iter_cache.update(new_iters)
|
480
698
|
iters.update(new_iters)
|
481
699
|
return iters
|
482
700
|
|
483
|
-
def _get_persistent_EARs(self, id_lst: Iterable[int]) ->
|
484
|
-
runs,
|
485
|
-
if
|
701
|
+
def _get_persistent_EARs(self, id_lst: Iterable[int]) -> dict[int, JsonStoreEAR]:
|
702
|
+
runs, id_lst_ = self._get_cached_persistent_EARs(id_lst)
|
703
|
+
if id_lst_:
|
486
704
|
with self.using_resource("metadata", action="read") as md:
|
487
705
|
try:
|
488
|
-
|
706
|
+
if "runs" not in md:
|
707
|
+
raise KeyError
|
708
|
+
EAR_dat = {id_: md["runs"][id_] for id_ in id_lst_}
|
489
709
|
except KeyError:
|
490
|
-
raise MissingStoreEARError(
|
710
|
+
raise MissingStoreEARError(id_lst_)
|
491
711
|
new_runs = {
|
492
|
-
k:
|
712
|
+
k: JsonStoreEAR.decode(v, self.ts_fmt, None)
|
713
|
+
for k, v in EAR_dat.items()
|
493
714
|
}
|
494
715
|
self.EAR_cache.update(new_runs)
|
495
716
|
runs.update(new_runs)
|
496
717
|
return runs
|
497
718
|
|
498
719
|
def _get_persistent_parameters(
|
499
|
-
self,
|
500
|
-
|
501
|
-
|
502
|
-
|
503
|
-
|
504
|
-
with self.using_resource("parameters", "read") as params:
|
720
|
+
self, id_lst: Iterable[int], **kwargs
|
721
|
+
) -> Mapping[int, StoreParameter]:
|
722
|
+
params, id_lst_ = self._get_cached_persistent_parameters(id_lst)
|
723
|
+
if id_lst_:
|
724
|
+
with self.using_resource("parameters", "read") as params_:
|
505
725
|
try:
|
506
|
-
param_dat = {
|
507
|
-
src_dat = {
|
726
|
+
param_dat = {id_: params_["data"][str(id_)] for id_ in id_lst_}
|
727
|
+
src_dat = {id_: params_["sources"][str(id_)] for id_ in id_lst_}
|
508
728
|
except KeyError:
|
509
|
-
raise MissingParameterData(
|
729
|
+
raise MissingParameterData(id_lst_)
|
510
730
|
|
511
731
|
new_params = {
|
512
732
|
k: StoreParameter.decode(id_=k, data=v, source=src_dat[k])
|
@@ -516,56 +736,91 @@ class JSONPersistentStore(PersistentStore):
|
|
516
736
|
params.update(new_params)
|
517
737
|
return params
|
518
738
|
|
519
|
-
def _get_persistent_param_sources(
|
520
|
-
|
521
|
-
|
739
|
+
def _get_persistent_param_sources(
|
740
|
+
self, id_lst: Iterable[int]
|
741
|
+
) -> dict[int, ParamSource]:
|
742
|
+
sources, id_lst_ = self._get_cached_persistent_param_sources(id_lst)
|
743
|
+
if id_lst_:
|
522
744
|
with self.using_resource("parameters", "read") as params:
|
523
745
|
try:
|
524
|
-
new_sources = {
|
746
|
+
new_sources = {id_: params["sources"][str(id_)] for id_ in id_lst_}
|
525
747
|
except KeyError:
|
526
|
-
raise MissingParameterData(
|
748
|
+
raise MissingParameterData(id_lst_)
|
527
749
|
self.param_sources_cache.update(new_sources)
|
528
750
|
sources.update(new_sources)
|
529
751
|
return sources
|
530
752
|
|
531
753
|
def _get_persistent_parameter_set_status(
|
532
754
|
self, id_lst: Iterable[int]
|
533
|
-
) ->
|
755
|
+
) -> dict[int, bool]:
|
534
756
|
with self.using_resource("parameters", "read") as params:
|
535
757
|
try:
|
536
|
-
param_dat = {
|
758
|
+
param_dat = {id_: params["data"][str(id_)] for id_ in id_lst}
|
537
759
|
except KeyError:
|
538
|
-
raise MissingParameterData(id_lst)
|
760
|
+
raise MissingParameterData(id_lst)
|
539
761
|
return {k: v is not None for k, v in param_dat.items()}
|
540
762
|
|
541
|
-
def _get_persistent_parameter_IDs(self) ->
|
763
|
+
def _get_persistent_parameter_IDs(self) -> list[int]:
|
542
764
|
with self.using_resource("parameters", "read") as params:
|
543
|
-
return
|
765
|
+
return [int(i) for i in params["data"]]
|
544
766
|
|
545
|
-
def get_ts_fmt(self):
|
767
|
+
def get_ts_fmt(self) -> str:
|
546
768
|
"""
|
547
769
|
Get the format for timestamps.
|
548
770
|
"""
|
549
771
|
with self.using_resource("metadata", action="read") as md:
|
772
|
+
assert "ts_fmt" in md
|
550
773
|
return md["ts_fmt"]
|
551
774
|
|
552
|
-
def get_ts_name_fmt(self):
|
775
|
+
def get_ts_name_fmt(self) -> str:
|
553
776
|
"""
|
554
777
|
Get the format for timestamps to use in names.
|
555
778
|
"""
|
556
779
|
with self.using_resource("metadata", action="read") as md:
|
780
|
+
assert "ts_name_fmt" in md
|
557
781
|
return md["ts_name_fmt"]
|
558
782
|
|
559
|
-
def get_creation_info(self):
|
783
|
+
def get_creation_info(self) -> StoreCreationInfo:
|
560
784
|
"""
|
561
785
|
Get information about the creation of the workflow.
|
562
786
|
"""
|
563
787
|
with self.using_resource("metadata", action="read") as md:
|
788
|
+
assert "creation_info" in md
|
564
789
|
return copy.deepcopy(md["creation_info"])
|
565
790
|
|
566
|
-
def get_name(self):
|
791
|
+
def get_name(self) -> str:
|
567
792
|
"""
|
568
793
|
Get the name of the workflow.
|
569
794
|
"""
|
570
795
|
with self.using_resource("metadata", action="read") as md:
|
796
|
+
assert "name" in md
|
571
797
|
return md["name"]
|
798
|
+
|
799
|
+
def zip(
|
800
|
+
self,
|
801
|
+
path: str = ".",
|
802
|
+
log: str | None = None,
|
803
|
+
overwrite=False,
|
804
|
+
include_execute=False,
|
805
|
+
include_rechunk_backups=False,
|
806
|
+
) -> str:
|
807
|
+
raise TypeError("unsupported operation: zipping-json")
|
808
|
+
|
809
|
+
def unzip(self, path: str = ".", log: str | None = None) -> str:
|
810
|
+
raise TypeError("unsupported operation: unzipping-json")
|
811
|
+
|
812
|
+
def rechunk_parameter_base(
|
813
|
+
self,
|
814
|
+
chunk_size: int | None = None,
|
815
|
+
backup: bool = True,
|
816
|
+
status: bool = True,
|
817
|
+
) -> Any:
|
818
|
+
raise TypeError("unsupported operation: rechunk-json")
|
819
|
+
|
820
|
+
def rechunk_runs(
|
821
|
+
self,
|
822
|
+
chunk_size: int | None = None,
|
823
|
+
backup: bool = True,
|
824
|
+
status: bool = True,
|
825
|
+
) -> Any:
|
826
|
+
raise TypeError("unsupported operation: rechunk-json")
|