hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/sdk/__init__.py +21 -15
- hpcflow/sdk/app.py +2133 -770
- hpcflow/sdk/cli.py +281 -250
- hpcflow/sdk/cli_common.py +6 -2
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +77 -42
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +578 -311
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +112 -85
- hpcflow/sdk/config/types.py +145 -0
- hpcflow/sdk/core/actions.py +1054 -994
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +81 -63
- hpcflow/sdk/core/command_files.py +275 -185
- hpcflow/sdk/core/commands.py +111 -107
- hpcflow/sdk/core/element.py +724 -503
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +398 -51
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +380 -334
- hpcflow/sdk/core/loop_cache.py +160 -43
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +728 -600
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +33 -22
- hpcflow/sdk/core/task.py +1546 -1325
- hpcflow/sdk/core/task_schema.py +240 -196
- hpcflow/sdk/core/test_utils.py +126 -88
- hpcflow/sdk/core/types.py +387 -0
- hpcflow/sdk/core/utils.py +410 -305
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +1192 -1028
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/demo/cli.py +46 -33
- hpcflow/sdk/helper/cli.py +18 -16
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +83 -59
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +988 -586
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +408 -153
- hpcflow/sdk/persistence/pending.py +158 -123
- hpcflow/sdk/persistence/store_resource.py +37 -22
- hpcflow/sdk/persistence/types.py +307 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +477 -420
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +444 -404
- hpcflow/sdk/submission/schedulers/__init__.py +133 -40
- hpcflow/sdk/submission/schedulers/direct.py +97 -71
- hpcflow/sdk/submission/schedulers/sge.py +132 -126
- hpcflow/sdk/submission/schedulers/slurm.py +263 -268
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +102 -29
- hpcflow/sdk/submission/shells/bash.py +72 -55
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +37 -29
- hpcflow/sdk/submission/submission.py +203 -257
- hpcflow/sdk/submission/types.py +143 -0
- hpcflow/sdk/typing.py +163 -12
- hpcflow/tests/conftest.py +8 -6
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_main_scripts.py +60 -30
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
- hpcflow/tests/unit/test_action.py +86 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +13 -6
- hpcflow/tests/unit/test_cli.py +1 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +20 -15
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +3 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +65 -58
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +16 -7
- hpcflow/tests/unit/test_persistence.py +48 -35
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +8 -3
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +3 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +39 -19
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/workflows/test_jobscript.py +2 -1
- hpcflow/tests/workflows/test_workflows.py +18 -13
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
- hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
@@ -4,65 +4,103 @@ Model of files that hold commands.
|
|
4
4
|
|
5
5
|
from __future__ import annotations
|
6
6
|
import copy
|
7
|
-
from dataclasses import dataclass, field
|
7
|
+
from dataclasses import dataclass, field, InitVar
|
8
8
|
from pathlib import Path
|
9
9
|
from textwrap import dedent
|
10
|
-
from typing import
|
10
|
+
from typing import Protocol, cast, overload, TYPE_CHECKING
|
11
|
+
from typing_extensions import Final, override
|
11
12
|
|
12
|
-
from hpcflow.sdk import
|
13
|
+
from hpcflow.sdk.typing import hydrate, ParamSource
|
13
14
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
14
|
-
from hpcflow.sdk.core.environment import Environment
|
15
15
|
from hpcflow.sdk.core.utils import search_dir_files_by_regex
|
16
16
|
from hpcflow.sdk.core.zarr_io import zarr_decode
|
17
17
|
from hpcflow.sdk.core.parameters import _process_demo_data_strings
|
18
18
|
|
19
|
+
if TYPE_CHECKING:
|
20
|
+
from collections.abc import Mapping
|
21
|
+
from typing import Any, ClassVar
|
22
|
+
from typing_extensions import Self
|
23
|
+
from .actions import Action, ActionRule
|
24
|
+
from .environment import Environment
|
25
|
+
from .object_list import CommandFilesList
|
26
|
+
from .parameters import Parameter
|
27
|
+
from .task import ElementSet
|
28
|
+
from .workflow import Workflow
|
19
29
|
|
20
|
-
|
30
|
+
|
31
|
+
class FileNamePart(Protocol):
|
32
|
+
"""
|
33
|
+
A filename or piece of filename that can be expanded.
|
34
|
+
"""
|
35
|
+
|
36
|
+
def value(self, directory: str = ".") -> str | list[str]:
|
37
|
+
"""
|
38
|
+
Get the part of the file, possibly with directory specified.
|
39
|
+
Implementations of this may ignore the directory.
|
40
|
+
If a pattern, the expanded value may be a list of strings.
|
41
|
+
"""
|
42
|
+
|
43
|
+
|
44
|
+
@dataclass(init=False)
|
45
|
+
@hydrate
|
21
46
|
class FileSpec(JSONLike):
|
22
47
|
"""
|
23
48
|
A specification of a file handled by a workflow.
|
24
49
|
"""
|
25
50
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
51
|
+
_validation_schema: ClassVar[str] = "files_spec_schema.yaml"
|
52
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
53
|
+
ChildObjectSpec(name="name", class_name="FileNameSpec"),
|
54
|
+
)
|
30
55
|
|
31
56
|
#: Label for this file specification.
|
32
|
-
label: str
|
57
|
+
label: Final[str]
|
33
58
|
#: The name of the file.
|
34
|
-
name:
|
59
|
+
name: Final[FileNameSpec]
|
35
60
|
#: Documentation for the file specification.
|
36
|
-
doc: str
|
37
|
-
_hash_value:
|
61
|
+
doc: Final[str]
|
62
|
+
_hash_value: str | None = field(default=None, repr=False)
|
38
63
|
|
39
|
-
def
|
40
|
-
self
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
64
|
+
def __init__(
|
65
|
+
self,
|
66
|
+
label: str,
|
67
|
+
name: str | FileNameSpec,
|
68
|
+
doc: str = "",
|
69
|
+
_hash_value: str | None = None,
|
70
|
+
) -> None:
|
71
|
+
self.label = label
|
72
|
+
self.name = self._app.FileNameSpec(name) if isinstance(name, str) else name
|
73
|
+
self.doc = doc
|
74
|
+
self._hash_value = _hash_value
|
75
|
+
self.__hash = hash((label, self.name))
|
76
|
+
|
77
|
+
def value(self, directory: str = ".") -> str:
|
45
78
|
"""
|
46
79
|
The path to a file, optionally resolved with respect to a particular directory.
|
47
80
|
"""
|
48
|
-
return self.name.value(directory)
|
81
|
+
return cast("str", self.name.value(directory))
|
49
82
|
|
50
83
|
def __eq__(self, other: object) -> bool:
|
51
84
|
if not isinstance(other, self.__class__):
|
52
85
|
return False
|
53
|
-
|
54
|
-
|
55
|
-
|
86
|
+
return self.label == other.label and self.name == other.name
|
87
|
+
|
88
|
+
def __hash__(self) -> int:
|
89
|
+
return self.__hash
|
90
|
+
|
91
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
92
|
+
d.pop("_FileSpec__hash")
|
93
|
+
return d
|
56
94
|
|
57
95
|
@property
|
58
|
-
def stem(self):
|
96
|
+
def stem(self) -> FileNameStem:
|
59
97
|
"""
|
60
98
|
The stem of the file name.
|
61
99
|
"""
|
62
100
|
return self.name.stem
|
63
101
|
|
64
102
|
@property
|
65
|
-
def ext(self):
|
103
|
+
def ext(self) -> FileNameExt:
|
66
104
|
"""
|
67
105
|
The extension of the file name.
|
68
106
|
"""
|
@@ -80,6 +118,7 @@ class FileSpec(JSONLike):
|
|
80
118
|
return repr(self)
|
81
119
|
|
82
120
|
|
121
|
+
@hydrate
|
83
122
|
class FileNameSpec(JSONLike):
|
84
123
|
"""
|
85
124
|
The name of a file handled by a workflow, or a pattern that matches multiple files.
|
@@ -95,15 +134,19 @@ class FileNameSpec(JSONLike):
|
|
95
134
|
If true, the name is used as a regex to search for actual files.
|
96
135
|
"""
|
97
136
|
|
98
|
-
|
99
|
-
|
100
|
-
|
137
|
+
def __init__(
|
138
|
+
self,
|
139
|
+
name: str,
|
140
|
+
args: list[FileNamePart] | None = None,
|
141
|
+
is_regex: bool = False,
|
142
|
+
) -> None:
|
101
143
|
#: The name or pattern.
|
102
|
-
self.name = name
|
144
|
+
self.name: Final[str] = name
|
103
145
|
#: Positional arguments to use when formatting the name.
|
104
|
-
self.args = args
|
146
|
+
self.args: Final[tuple[FileNamePart, ...]] = tuple(args or [])
|
105
147
|
#: Whether the name is used as a regex to search for actual files.
|
106
|
-
self.is_regex = is_regex
|
148
|
+
self.is_regex: Final[bool] = is_regex
|
149
|
+
self.__hash = hash((name, self.args, is_regex))
|
107
150
|
|
108
151
|
def __eq__(self, other: object) -> bool:
|
109
152
|
if not isinstance(other, self.__class__):
|
@@ -114,21 +157,28 @@ class FileNameSpec(JSONLike):
|
|
114
157
|
and self.is_regex == other.is_regex
|
115
158
|
)
|
116
159
|
|
160
|
+
def __hash__(self) -> int:
|
161
|
+
return self.__hash
|
162
|
+
|
163
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
164
|
+
d.pop("_FileNameSpec__hash")
|
165
|
+
return d
|
166
|
+
|
117
167
|
@property
|
118
|
-
def stem(self):
|
168
|
+
def stem(self) -> FileNameStem:
|
119
169
|
"""
|
120
170
|
The stem of the name or pattern.
|
121
171
|
"""
|
122
|
-
return self.
|
172
|
+
return self._app.FileNameStem(self)
|
123
173
|
|
124
174
|
@property
|
125
|
-
def ext(self):
|
175
|
+
def ext(self) -> FileNameExt:
|
126
176
|
"""
|
127
177
|
The extension of the name or pattern.
|
128
178
|
"""
|
129
|
-
return self.
|
179
|
+
return self._app.FileNameExt(self)
|
130
180
|
|
131
|
-
def value(self, directory="."):
|
181
|
+
def value(self, directory: str = ".") -> list[str] | str:
|
132
182
|
"""
|
133
183
|
Get the template-resolved name of the file
|
134
184
|
(or files matched if the name is a regex pattern).
|
@@ -138,13 +188,13 @@ class FileNameSpec(JSONLike):
|
|
138
188
|
directory: str
|
139
189
|
Where to resolve values with respect to.
|
140
190
|
"""
|
141
|
-
format_args = [
|
191
|
+
format_args = [arg.value(directory) for arg in self.args]
|
142
192
|
value = self.name.format(*format_args)
|
143
193
|
if self.is_regex:
|
144
|
-
|
194
|
+
return search_dir_files_by_regex(value, directory=directory)
|
145
195
|
return value
|
146
196
|
|
147
|
-
def __repr__(self):
|
197
|
+
def __repr__(self) -> str:
|
148
198
|
return f"{self.__class__.__name__}({self.name})"
|
149
199
|
|
150
200
|
|
@@ -155,13 +205,17 @@ class FileNameStem(JSONLike):
|
|
155
205
|
"""
|
156
206
|
|
157
207
|
#: The file specification this is derived from.
|
158
|
-
file_name:
|
208
|
+
file_name: FileNameSpec
|
159
209
|
|
160
|
-
def value(self, directory=
|
210
|
+
def value(self, directory: str = ".") -> str:
|
161
211
|
"""
|
162
212
|
Get the stem, possibly with directory specified.
|
163
213
|
"""
|
164
|
-
|
214
|
+
d = self.file_name.value(directory)
|
215
|
+
if self.file_name.is_regex:
|
216
|
+
raise ValueError("cannot get the stem of a regex match")
|
217
|
+
assert not isinstance(d, list)
|
218
|
+
return Path(d).stem
|
165
219
|
|
166
220
|
|
167
221
|
@dataclass
|
@@ -171,16 +225,21 @@ class FileNameExt(JSONLike):
|
|
171
225
|
"""
|
172
226
|
|
173
227
|
#: The file specification this is derived from.
|
174
|
-
file_name:
|
228
|
+
file_name: FileNameSpec
|
175
229
|
|
176
|
-
def value(self, directory=
|
230
|
+
def value(self, directory: str = ".") -> str:
|
177
231
|
"""
|
178
232
|
Get the extension.
|
179
233
|
"""
|
180
|
-
|
234
|
+
d = self.file_name.value(directory)
|
235
|
+
if self.file_name.is_regex:
|
236
|
+
raise ValueError("cannot get the extension of a regex match")
|
237
|
+
assert not isinstance(d, list)
|
238
|
+
return Path(d).suffix
|
181
239
|
|
182
240
|
|
183
241
|
@dataclass
|
242
|
+
@hydrate
|
184
243
|
class InputFileGenerator(JSONLike):
|
185
244
|
"""
|
186
245
|
Represents a script that is run to generate input files for an action.
|
@@ -204,9 +263,7 @@ class InputFileGenerator(JSONLike):
|
|
204
263
|
User-specified rules for whether to run the generator.
|
205
264
|
"""
|
206
265
|
|
207
|
-
|
208
|
-
|
209
|
-
_child_objects = (
|
266
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
210
267
|
ChildObjectSpec(
|
211
268
|
name="input_file",
|
212
269
|
class_name="FileSpec",
|
@@ -230,32 +287,31 @@ class InputFileGenerator(JSONLike):
|
|
230
287
|
)
|
231
288
|
|
232
289
|
#: The file to generate.
|
233
|
-
input_file:
|
290
|
+
input_file: FileSpec
|
234
291
|
#: The input parameters to the generator.
|
235
|
-
inputs:
|
292
|
+
inputs: list[Parameter]
|
236
293
|
#: The script that generates the inputs.
|
237
|
-
script: str = None
|
294
|
+
script: str | None = None
|
238
295
|
#: The environment in which to run the generator.
|
239
|
-
environment:
|
296
|
+
environment: Environment | None = None
|
240
297
|
#: Whether to pass in the environment.
|
241
|
-
script_pass_env_spec:
|
298
|
+
script_pass_env_spec: bool = False
|
242
299
|
#: Whether the generator can be stopped early.
|
243
300
|
#: Quick-running scripts tend to not need this.
|
244
|
-
abortable:
|
301
|
+
abortable: bool = False
|
245
302
|
#: User-specified rules for whether to run the generator.
|
246
|
-
rules:
|
303
|
+
rules: list[ActionRule] = field(default_factory=list)
|
247
304
|
|
248
|
-
def
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
"""
|
253
|
-
not for a given element."""
|
305
|
+
def get_action_rules(self) -> list[ActionRule]:
|
306
|
+
"""
|
307
|
+
Get the rules that allow testing if this input file generator must be run or
|
308
|
+
not for a given element.
|
309
|
+
"""
|
254
310
|
return [
|
255
|
-
self.
|
311
|
+
self._app.ActionRule.check_missing(f"input_files.{self.input_file.label}")
|
256
312
|
] + self.rules
|
257
313
|
|
258
|
-
def compose_source(self, snip_path) -> str:
|
314
|
+
def compose_source(self, snip_path: Path) -> str:
|
259
315
|
"""Generate the file contents of this input file generator source."""
|
260
316
|
|
261
317
|
script_main_func = snip_path.stem
|
@@ -281,10 +337,10 @@ class InputFileGenerator(JSONLike):
|
|
281
337
|
"""
|
282
338
|
)
|
283
339
|
main_block = main_block.format(
|
284
|
-
run_log_file=self.
|
285
|
-
app_module=self.
|
286
|
-
cfg_dir=self.
|
287
|
-
cfg_invoc_key=self.
|
340
|
+
run_log_file=self._app.RunDirAppFiles.get_log_file_name(),
|
341
|
+
app_module=self._app.module,
|
342
|
+
cfg_dir=self._app.config.config_directory,
|
343
|
+
cfg_invoc_key=self._app.config.config_key,
|
288
344
|
script_main_func=script_main_func,
|
289
345
|
file_path=self.input_file.name.value(),
|
290
346
|
)
|
@@ -296,22 +352,20 @@ class InputFileGenerator(JSONLike):
|
|
296
352
|
"""
|
297
353
|
)
|
298
354
|
|
299
|
-
|
300
|
-
return out
|
355
|
+
return out.format(script_str=script_str, main_block=main_block)
|
301
356
|
|
302
|
-
def write_source(self, action, env_spec:
|
357
|
+
def write_source(self, action: Action, env_spec: Mapping[str, Any]) -> None:
|
303
358
|
"""
|
304
359
|
Write the script if it is specified as a snippet script, otherwise we assume
|
305
360
|
the script already exists in the working directory.
|
306
361
|
"""
|
307
|
-
snip_path
|
308
|
-
if snip_path:
|
309
|
-
source_str = self.compose_source(snip_path)
|
362
|
+
if snip_path := action.get_snippet_script_path(self.script, env_spec):
|
310
363
|
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
311
|
-
fp.write(
|
364
|
+
fp.write(self.compose_source(snip_path))
|
312
365
|
|
313
366
|
|
314
367
|
@dataclass
|
368
|
+
@hydrate
|
315
369
|
class OutputFileParser(JSONLike):
|
316
370
|
"""
|
317
371
|
Represents a script that is run to parse output files from an action and create outputs.
|
@@ -345,7 +399,7 @@ class OutputFileParser(JSONLike):
|
|
345
399
|
Rules for whether to enable this parser.
|
346
400
|
"""
|
347
401
|
|
348
|
-
_child_objects = (
|
402
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
349
403
|
ChildObjectSpec(
|
350
404
|
name="output",
|
351
405
|
class_name="Parameter",
|
@@ -383,46 +437,55 @@ class OutputFileParser(JSONLike):
|
|
383
437
|
)
|
384
438
|
|
385
439
|
#: The output files that this parser will parse.
|
386
|
-
output_files:
|
440
|
+
output_files: list[FileSpec]
|
387
441
|
#: The singular output parsed by this parser.
|
388
442
|
#: Not to be confused with :py:attr:`outputs` (plural).
|
389
|
-
output:
|
443
|
+
output: Parameter | None = None
|
390
444
|
#: The name of the file containing the output file parser source.
|
391
|
-
script: str = None
|
445
|
+
script: str | None = None
|
392
446
|
#: The environment to use to run the parser.
|
393
|
-
environment: Environment = None
|
447
|
+
environment: Environment | None = None
|
394
448
|
#: The other inputs to the parser.
|
395
|
-
inputs:
|
449
|
+
inputs: list[str] | None = None
|
396
450
|
#: Optional multiple outputs from the upstream actions of the schema that are
|
397
451
|
#: required to parametrise this parser.
|
398
452
|
#: Not to be confused with :py:attr:`output` (plural).
|
399
|
-
outputs:
|
453
|
+
outputs: list[str] | None = None
|
400
454
|
#: Miscellaneous options.
|
401
|
-
options:
|
455
|
+
options: dict[str, Any] | None = None
|
402
456
|
#: Whether to pass the environment specifier to the script.
|
403
|
-
script_pass_env_spec:
|
457
|
+
script_pass_env_spec: bool = False
|
404
458
|
#: Whether this script can be aborted.
|
405
|
-
abortable:
|
459
|
+
abortable: bool = False
|
406
460
|
#: The files that should be saved to the persistent store for the workflow.
|
407
|
-
save_files:
|
461
|
+
save_files: InitVar[list[FileSpec] | bool] = True
|
462
|
+
_save_files: list[FileSpec] = field(init=False)
|
408
463
|
#: The files that should be immediately removed.
|
409
|
-
clean_up:
|
464
|
+
clean_up: list[str] = field(default_factory=list)
|
410
465
|
#: Rules for whether to enable this parser.
|
411
|
-
rules:
|
466
|
+
rules: list[ActionRule] = field(default_factory=list)
|
412
467
|
|
413
|
-
def __post_init__(self):
|
414
|
-
if not
|
468
|
+
def __post_init__(self, save_files: list[FileSpec] | bool) -> None:
|
469
|
+
if not save_files:
|
415
470
|
# save no files
|
416
|
-
self.
|
417
|
-
elif
|
471
|
+
self._save_files = []
|
472
|
+
elif save_files is True:
|
418
473
|
# save all output files
|
419
|
-
self.
|
420
|
-
|
421
|
-
self.
|
422
|
-
|
474
|
+
self._save_files = [out_f for out_f in self.output_files]
|
475
|
+
else:
|
476
|
+
self._save_files = save_files
|
477
|
+
|
478
|
+
@override
|
479
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
480
|
+
d = super()._postprocess_to_dict(d)
|
481
|
+
if "_save_files" in d:
|
482
|
+
d["save_files"] = d.pop("_save_files")
|
483
|
+
return d
|
423
484
|
|
424
485
|
@classmethod
|
425
|
-
def from_json_like(
|
486
|
+
def from_json_like( # type: ignore[override]
|
487
|
+
cls, json_like: dict[str, Any], shared_data: Mapping | None = None
|
488
|
+
) -> Self:
|
426
489
|
if "save_files" in json_like:
|
427
490
|
if not json_like["save_files"]:
|
428
491
|
json_like["save_files"] = []
|
@@ -430,20 +493,20 @@ class OutputFileParser(JSONLike):
|
|
430
493
|
json_like["save_files"] = [i for i in json_like["output_files"]]
|
431
494
|
return super().from_json_like(json_like, shared_data)
|
432
495
|
|
433
|
-
def get_action_rules(self):
|
496
|
+
def get_action_rules(self) -> list[ActionRule]:
|
434
497
|
"""Get the rules that allow testing if this output file parser must be run or not
|
435
498
|
for a given element."""
|
436
499
|
return [
|
437
|
-
self.
|
438
|
-
for
|
500
|
+
self._app.ActionRule.check_missing(f"output_files.{out_f.label}")
|
501
|
+
for out_f in self.output_files
|
439
502
|
] + self.rules
|
440
503
|
|
441
|
-
def compose_source(self, snip_path) -> str:
|
504
|
+
def compose_source(self, snip_path: Path) -> str:
|
442
505
|
"""Generate the file contents of this output file parser source."""
|
443
506
|
|
444
507
|
if self.output is None:
|
445
508
|
# might be used just for saving files:
|
446
|
-
return
|
509
|
+
return ""
|
447
510
|
|
448
511
|
script_main_func = snip_path.stem
|
449
512
|
with snip_path.open("rt") as fp:
|
@@ -474,10 +537,10 @@ class OutputFileParser(JSONLike):
|
|
474
537
|
"""
|
475
538
|
)
|
476
539
|
main_block = main_block.format(
|
477
|
-
run_log_file=self.
|
478
|
-
app_module=self.
|
479
|
-
cfg_dir=self.
|
480
|
-
cfg_invoc_key=self.
|
540
|
+
run_log_file=self._app.RunDirAppFiles.get_log_file_name(),
|
541
|
+
app_module=self._app.module,
|
542
|
+
cfg_dir=self._app.config.config_directory,
|
543
|
+
cfg_invoc_key=self._app.config.config_key,
|
481
544
|
script_main_func=script_main_func,
|
482
545
|
param_name=f"outputs.{self.output.typ}",
|
483
546
|
)
|
@@ -489,10 +552,9 @@ class OutputFileParser(JSONLike):
|
|
489
552
|
"""
|
490
553
|
)
|
491
554
|
|
492
|
-
|
493
|
-
return out
|
555
|
+
return out.format(script_str=script_str, main_block=main_block)
|
494
556
|
|
495
|
-
def write_source(self, action, env_spec:
|
557
|
+
def write_source(self, action: Action, env_spec: Mapping[str, Any]) -> None:
|
496
558
|
"""
|
497
559
|
Write the actual output parser to a file so it can be enacted.
|
498
560
|
"""
|
@@ -502,24 +564,26 @@ class OutputFileParser(JSONLike):
|
|
502
564
|
|
503
565
|
# write the script if it is specified as a snippet script, otherwise we assume
|
504
566
|
# the script already exists in the working directory:
|
505
|
-
snip_path
|
506
|
-
if snip_path:
|
507
|
-
source_str = self.compose_source(snip_path)
|
567
|
+
if snip_path := action.get_snippet_script_path(self.script, env_spec):
|
508
568
|
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
509
|
-
fp.write(
|
569
|
+
fp.write(self.compose_source(snip_path))
|
510
570
|
|
511
571
|
|
572
|
+
@hydrate
|
512
573
|
class _FileContentsSpecifier(JSONLike):
|
513
574
|
"""Class to represent the contents of a file, either via a file-system path or
|
514
575
|
directly."""
|
515
576
|
|
577
|
+
#: What file is this? Only if known.
|
578
|
+
file: FileSpec
|
579
|
+
|
516
580
|
def __init__(
|
517
581
|
self,
|
518
|
-
path:
|
519
|
-
contents:
|
520
|
-
extension:
|
521
|
-
store_contents:
|
522
|
-
):
|
582
|
+
path: Path | str | None = None,
|
583
|
+
contents: str | None = None,
|
584
|
+
extension: str = "",
|
585
|
+
store_contents: bool = True,
|
586
|
+
) -> None:
|
523
587
|
if path is not None and contents is not None:
|
524
588
|
raise ValueError("Specify exactly one of `path` and `contents`.")
|
525
589
|
|
@@ -528,19 +592,19 @@ class _FileContentsSpecifier(JSONLike):
|
|
528
592
|
"`store_contents` cannot be set to False if `contents` was specified."
|
529
593
|
)
|
530
594
|
|
531
|
-
self._path = _process_demo_data_strings(self.
|
595
|
+
self._path = _process_demo_data_strings(self._app, path)
|
532
596
|
self._contents = contents
|
533
597
|
self._extension = extension
|
534
598
|
self._store_contents = store_contents
|
535
599
|
|
536
600
|
# assigned by `make_persistent`
|
537
|
-
self._workflow = None
|
538
|
-
self._value_group_idx = None
|
601
|
+
self._workflow: Workflow | None = None
|
602
|
+
self._value_group_idx: int | None = None
|
539
603
|
|
540
604
|
# assigned by parent `ElementSet`
|
541
|
-
self._element_set = None
|
605
|
+
self._element_set: ElementSet | None = None
|
542
606
|
|
543
|
-
def __deepcopy__(self, memo):
|
607
|
+
def __deepcopy__(self, memo: dict | None) -> Self:
|
544
608
|
kwargs = self.to_dict()
|
545
609
|
value_group_idx = kwargs.pop("value_group_idx")
|
546
610
|
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
@@ -549,16 +613,27 @@ class _FileContentsSpecifier(JSONLike):
|
|
549
613
|
obj._element_set = self._element_set
|
550
614
|
return obj
|
551
615
|
|
552
|
-
|
553
|
-
|
616
|
+
@property
|
617
|
+
def normalised_path(self) -> str:
|
618
|
+
"""
|
619
|
+
Full workflow value path to the file.
|
620
|
+
|
621
|
+
Note
|
622
|
+
----
|
623
|
+
This is not the same as the path in the filesystem, but is closely
|
624
|
+
related.
|
625
|
+
"""
|
626
|
+
return str(self._path) if self._path else "."
|
627
|
+
|
628
|
+
@override
|
629
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
630
|
+
out = super()._postprocess_to_dict(d)
|
554
631
|
if "_workflow" in out:
|
555
632
|
del out["_workflow"]
|
556
|
-
|
557
|
-
out = {k.lstrip("_"): v for k, v in out.items()}
|
558
|
-
return out
|
633
|
+
return {k.lstrip("_"): v for k, v in out.items()}
|
559
634
|
|
560
635
|
@classmethod
|
561
|
-
def _json_like_constructor(cls, json_like):
|
636
|
+
def _json_like_constructor(cls, json_like: dict[str, Any]) -> Self:
|
562
637
|
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
563
638
|
|
564
639
|
_value_group_idx = json_like.pop("value_group_idx", None)
|
@@ -567,7 +642,7 @@ class _FileContentsSpecifier(JSONLike):
|
|
567
642
|
|
568
643
|
return obj
|
569
644
|
|
570
|
-
def _get_members(self, ensure_contents=False):
|
645
|
+
def _get_members(self, ensure_contents: bool = False) -> dict[str, Any]:
|
571
646
|
out = self.to_dict()
|
572
647
|
del out["value_group_idx"]
|
573
648
|
|
@@ -578,9 +653,9 @@ class _FileContentsSpecifier(JSONLike):
|
|
578
653
|
|
579
654
|
def make_persistent(
|
580
655
|
self,
|
581
|
-
workflow:
|
582
|
-
source:
|
583
|
-
) ->
|
656
|
+
workflow: Workflow,
|
657
|
+
source: ParamSource,
|
658
|
+
) -> tuple[str, list[int], bool]:
|
584
659
|
"""Save to a persistent workflow.
|
585
660
|
|
586
661
|
Returns
|
@@ -594,9 +669,9 @@ class _FileContentsSpecifier(JSONLike):
|
|
594
669
|
if self._value_group_idx is not None:
|
595
670
|
data_ref = self._value_group_idx
|
596
671
|
is_new = False
|
597
|
-
if not workflow.
|
672
|
+
if not workflow.check_parameters_exist(data_ref):
|
598
673
|
raise RuntimeError(
|
599
|
-
f"{self.__class__.__name__} has a
|
674
|
+
f"{self.__class__.__name__} has a data reference "
|
600
675
|
f"({data_ref}), but does not exist in the workflow."
|
601
676
|
)
|
602
677
|
# TODO: log if already persistent.
|
@@ -618,32 +693,51 @@ class _FileContentsSpecifier(JSONLike):
|
|
618
693
|
self._workflow = workflow
|
619
694
|
self._path = None
|
620
695
|
self._contents = None
|
621
|
-
self._extension =
|
622
|
-
self._store_contents =
|
696
|
+
self._extension = ""
|
697
|
+
self._store_contents = True
|
623
698
|
|
624
699
|
return (self.normalised_path, [data_ref], is_new)
|
625
700
|
|
626
|
-
|
701
|
+
@overload
|
702
|
+
def _get_value(self, value_name: None = None) -> dict[str, Any]:
|
703
|
+
...
|
704
|
+
|
705
|
+
@overload
|
706
|
+
def _get_value(self, value_name: str) -> Any:
|
707
|
+
...
|
708
|
+
|
709
|
+
def _get_value(self, value_name: str | None = None) -> Any:
|
627
710
|
# TODO: fix
|
711
|
+
assert self._value_group_idx is None
|
628
712
|
if self._value_group_idx is not None:
|
629
|
-
|
713
|
+
from ..persistence.zarr import ZarrPersistentStore
|
714
|
+
|
715
|
+
assert isinstance(self.workflow._store, ZarrPersistentStore)
|
716
|
+
# FIXME: Next two lines are both thoroughly broken, but at least resolve to something
|
717
|
+
grp = self.workflow._store._get_parameter_group(self._value_group_idx)
|
630
718
|
val = zarr_decode(grp)
|
631
719
|
else:
|
632
720
|
val = self._get_members(ensure_contents=(value_name == "contents"))
|
633
721
|
if value_name:
|
634
|
-
|
722
|
+
return val.get(value_name)
|
635
723
|
|
636
724
|
return val
|
637
725
|
|
638
|
-
def read_contents(self):
|
726
|
+
def read_contents(self) -> str:
|
639
727
|
"""
|
640
728
|
Get the actual contents of the file.
|
641
729
|
"""
|
642
|
-
with self.
|
730
|
+
with self.__path.open("r") as fh:
|
643
731
|
return fh.read()
|
644
732
|
|
645
733
|
@property
|
646
|
-
def
|
734
|
+
def __path(self) -> Path:
|
735
|
+
path = self._get_value("path")
|
736
|
+
assert path is not None
|
737
|
+
return Path(path)
|
738
|
+
|
739
|
+
@property
|
740
|
+
def path(self) -> Path | None:
|
647
741
|
"""
|
648
742
|
The path to the file.
|
649
743
|
"""
|
@@ -651,42 +745,44 @@ class _FileContentsSpecifier(JSONLike):
|
|
651
745
|
return Path(path) if path else None
|
652
746
|
|
653
747
|
@property
|
654
|
-
def store_contents(self):
|
748
|
+
def store_contents(self) -> Any:
|
655
749
|
"""
|
656
750
|
Whether the file's contents are stored in the workflow's persistent store.
|
657
751
|
"""
|
658
752
|
return self._get_value("store_contents")
|
659
753
|
|
660
754
|
@property
|
661
|
-
def contents(self):
|
755
|
+
def contents(self) -> str:
|
662
756
|
"""
|
663
757
|
The contents of the file.
|
664
758
|
"""
|
665
759
|
if self.store_contents:
|
666
|
-
|
760
|
+
return self._get_value("contents")
|
667
761
|
else:
|
668
|
-
|
669
|
-
|
670
|
-
return contents
|
762
|
+
return self.read_contents()
|
671
763
|
|
672
764
|
@property
|
673
|
-
def extension(self):
|
765
|
+
def extension(self) -> str:
|
674
766
|
"""
|
675
767
|
The extension of the file.
|
676
768
|
"""
|
677
769
|
return self._get_value("extension")
|
678
770
|
|
679
771
|
@property
|
680
|
-
def workflow(self) ->
|
772
|
+
def workflow(self) -> Workflow:
|
681
773
|
"""
|
682
774
|
The owning workflow.
|
683
775
|
"""
|
684
776
|
if self._workflow:
|
685
777
|
return self._workflow
|
686
778
|
elif self._element_set:
|
687
|
-
|
779
|
+
w_tmpl = self._element_set.task_template.workflow_template
|
780
|
+
if w_tmpl and w_tmpl.workflow:
|
781
|
+
return w_tmpl.workflow
|
782
|
+
raise NotImplementedError
|
688
783
|
|
689
784
|
|
785
|
+
@hydrate
|
690
786
|
class InputFile(_FileContentsSpecifier):
|
691
787
|
"""
|
692
788
|
An input file.
|
@@ -705,7 +801,7 @@ class InputFile(_FileContentsSpecifier):
|
|
705
801
|
Are the file's contents to be cached in the workflow persistent store?
|
706
802
|
"""
|
707
803
|
|
708
|
-
_child_objects = (
|
804
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
709
805
|
ChildObjectSpec(
|
710
806
|
name="file",
|
711
807
|
class_name="FileSpec",
|
@@ -716,30 +812,29 @@ class InputFile(_FileContentsSpecifier):
|
|
716
812
|
|
717
813
|
def __init__(
|
718
814
|
self,
|
719
|
-
file:
|
720
|
-
path:
|
721
|
-
contents:
|
722
|
-
extension:
|
723
|
-
store_contents:
|
724
|
-
):
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
815
|
+
file: FileSpec | str,
|
816
|
+
path: Path | str | None = None,
|
817
|
+
contents: str | None = None,
|
818
|
+
extension: str = "",
|
819
|
+
store_contents: bool = True,
|
820
|
+
) -> None:
|
821
|
+
if not isinstance(file, FileSpec):
|
822
|
+
files: CommandFilesList = self._app.command_files
|
823
|
+
self.file = files.get(file)
|
824
|
+
else:
|
825
|
+
self.file = file
|
729
826
|
|
730
827
|
super().__init__(path, contents, extension, store_contents)
|
731
828
|
|
732
|
-
def
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
def _get_members(self, ensure_contents=False, use_file_label=False):
|
829
|
+
def _get_members(
|
830
|
+
self, ensure_contents: bool = False, use_file_label: bool = False
|
831
|
+
) -> dict[str, Any]:
|
737
832
|
out = super()._get_members(ensure_contents)
|
738
833
|
if use_file_label:
|
739
834
|
out["file"] = self.file.label
|
740
835
|
return out
|
741
836
|
|
742
|
-
def __repr__(self):
|
837
|
+
def __repr__(self) -> str:
|
743
838
|
val_grp_idx = ""
|
744
839
|
if self._value_group_idx is not None:
|
745
840
|
val_grp_idx = f", value_group_idx={self._value_group_idx}"
|
@@ -757,24 +852,18 @@ class InputFile(_FileContentsSpecifier):
|
|
757
852
|
)
|
758
853
|
|
759
854
|
@property
|
760
|
-
def normalised_files_path(self):
|
855
|
+
def normalised_files_path(self) -> str:
|
761
856
|
"""
|
762
857
|
Standard name for the file within the workflow.
|
763
858
|
"""
|
764
859
|
return self.file.label
|
765
860
|
|
766
861
|
@property
|
767
|
-
def normalised_path(self):
|
768
|
-
"""
|
769
|
-
Full workflow value path to the file.
|
770
|
-
|
771
|
-
Note
|
772
|
-
----
|
773
|
-
This is not the same as the path in the filesystem.
|
774
|
-
"""
|
862
|
+
def normalised_path(self) -> str:
|
775
863
|
return f"input_files.{self.normalised_files_path}"
|
776
864
|
|
777
865
|
|
866
|
+
@hydrate
|
778
867
|
class InputFileGeneratorSource(_FileContentsSpecifier):
|
779
868
|
"""
|
780
869
|
The source of code for use in an input file generator.
|
@@ -784,7 +873,7 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
784
873
|
generator:
|
785
874
|
How to generate the file.
|
786
875
|
path:
|
787
|
-
Path to the file.
|
876
|
+
Path to the file to generate.
|
788
877
|
contents:
|
789
878
|
Contents of the file. Only used when recreating this object.
|
790
879
|
extension:
|
@@ -793,9 +882,9 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
793
882
|
|
794
883
|
def __init__(
|
795
884
|
self,
|
796
|
-
generator:
|
797
|
-
path:
|
798
|
-
contents: str = None,
|
885
|
+
generator: InputFileGenerator,
|
886
|
+
path: Path | str | None = None,
|
887
|
+
contents: str | None = None,
|
799
888
|
extension: str = "",
|
800
889
|
):
|
801
890
|
#: How to generate the file.
|
@@ -803,6 +892,7 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
803
892
|
super().__init__(path, contents, extension)
|
804
893
|
|
805
894
|
|
895
|
+
@hydrate
|
806
896
|
class OutputFileParserSource(_FileContentsSpecifier):
|
807
897
|
"""
|
808
898
|
The source of code for use in an output file parser.
|
@@ -812,7 +902,7 @@ class OutputFileParserSource(_FileContentsSpecifier):
|
|
812
902
|
parser:
|
813
903
|
How to parse the file.
|
814
904
|
path: Path
|
815
|
-
Path to the file.
|
905
|
+
Path to the file to parse.
|
816
906
|
contents:
|
817
907
|
Contents of the file. Only used when recreating this object.
|
818
908
|
extension:
|
@@ -821,9 +911,9 @@ class OutputFileParserSource(_FileContentsSpecifier):
|
|
821
911
|
|
822
912
|
def __init__(
|
823
913
|
self,
|
824
|
-
parser:
|
825
|
-
path:
|
826
|
-
contents: str = None,
|
914
|
+
parser: OutputFileParser,
|
915
|
+
path: Path | str | None = None,
|
916
|
+
contents: str | None = None,
|
827
917
|
extension: str = "",
|
828
918
|
):
|
829
919
|
#: How to parse the file.
|