hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -4,65 +4,104 @@ Model of files that hold commands.
|
|
4
4
|
|
5
5
|
from __future__ import annotations
|
6
6
|
import copy
|
7
|
-
from dataclasses import dataclass, field
|
7
|
+
from dataclasses import dataclass, field, InitVar
|
8
8
|
from pathlib import Path
|
9
9
|
from textwrap import dedent
|
10
|
-
from typing import
|
10
|
+
from typing import Protocol, cast, overload, TYPE_CHECKING
|
11
|
+
from typing_extensions import Final, override
|
11
12
|
|
12
|
-
from hpcflow.sdk import
|
13
|
+
from hpcflow.sdk.typing import PathLike, hydrate, ParamSource
|
13
14
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
14
|
-
from hpcflow.sdk.core.environment import Environment
|
15
15
|
from hpcflow.sdk.core.utils import search_dir_files_by_regex
|
16
16
|
from hpcflow.sdk.core.zarr_io import zarr_decode
|
17
17
|
from hpcflow.sdk.core.parameters import _process_demo_data_strings
|
18
18
|
|
19
|
+
if TYPE_CHECKING:
|
20
|
+
import os
|
21
|
+
from collections.abc import Mapping
|
22
|
+
from typing import Any, ClassVar
|
23
|
+
from typing_extensions import Self
|
24
|
+
from .actions import Action, ActionRule
|
25
|
+
from .environment import Environment
|
26
|
+
from .object_list import CommandFilesList
|
27
|
+
from .parameters import Parameter
|
28
|
+
from .task import ElementSet
|
29
|
+
from .workflow import Workflow
|
19
30
|
|
20
|
-
|
31
|
+
|
32
|
+
class FileNamePart(Protocol):
|
33
|
+
"""
|
34
|
+
A filename or piece of filename that can be expanded.
|
35
|
+
"""
|
36
|
+
|
37
|
+
def value(self, directory: str | os.PathLike = ".") -> str | list[str]:
|
38
|
+
"""
|
39
|
+
Get the part of the file, possibly with directory specified.
|
40
|
+
Implementations of this may ignore the directory.
|
41
|
+
If a pattern, the expanded value may be a list of strings.
|
42
|
+
"""
|
43
|
+
|
44
|
+
|
45
|
+
@dataclass(init=False)
|
46
|
+
@hydrate
|
21
47
|
class FileSpec(JSONLike):
|
22
48
|
"""
|
23
49
|
A specification of a file handled by a workflow.
|
24
50
|
"""
|
25
51
|
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
52
|
+
_validation_schema: ClassVar[str] = "files_spec_schema.yaml"
|
53
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
54
|
+
ChildObjectSpec(name="name", class_name="FileNameSpec"),
|
55
|
+
)
|
30
56
|
|
31
57
|
#: Label for this file specification.
|
32
|
-
label: str
|
58
|
+
label: Final[str]
|
33
59
|
#: The name of the file.
|
34
|
-
name:
|
60
|
+
name: Final[FileNameSpec]
|
35
61
|
#: Documentation for the file specification.
|
36
|
-
doc: str
|
37
|
-
_hash_value:
|
62
|
+
doc: Final[str]
|
63
|
+
_hash_value: str | None = field(default=None, repr=False)
|
38
64
|
|
39
|
-
def
|
40
|
-
self
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
65
|
+
def __init__(
|
66
|
+
self,
|
67
|
+
label: str,
|
68
|
+
name: str | FileNameSpec,
|
69
|
+
doc: str = "",
|
70
|
+
_hash_value: str | None = None,
|
71
|
+
) -> None:
|
72
|
+
self.label = label
|
73
|
+
self.name = self._app.FileNameSpec(name) if isinstance(name, str) else name
|
74
|
+
self.doc = doc
|
75
|
+
self._hash_value = _hash_value
|
76
|
+
self.__hash = hash((label, self.name))
|
77
|
+
|
78
|
+
def value(self, directory: str | os.PathLike = ".") -> str:
|
45
79
|
"""
|
46
80
|
The path to a file, optionally resolved with respect to a particular directory.
|
47
81
|
"""
|
48
|
-
return self.name.value(directory)
|
82
|
+
return cast("str", self.name.value(directory))
|
49
83
|
|
50
84
|
def __eq__(self, other: object) -> bool:
|
51
85
|
if not isinstance(other, self.__class__):
|
52
86
|
return False
|
53
|
-
|
54
|
-
|
55
|
-
|
87
|
+
return self.label == other.label and self.name == other.name
|
88
|
+
|
89
|
+
def __hash__(self) -> int:
|
90
|
+
return self.__hash
|
91
|
+
|
92
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
93
|
+
d.pop("_FileSpec__hash")
|
94
|
+
return d
|
56
95
|
|
57
96
|
@property
|
58
|
-
def stem(self):
|
97
|
+
def stem(self) -> FileNameStem:
|
59
98
|
"""
|
60
99
|
The stem of the file name.
|
61
100
|
"""
|
62
101
|
return self.name.stem
|
63
102
|
|
64
103
|
@property
|
65
|
-
def ext(self):
|
104
|
+
def ext(self) -> FileNameExt:
|
66
105
|
"""
|
67
106
|
The extension of the file name.
|
68
107
|
"""
|
@@ -80,6 +119,7 @@ class FileSpec(JSONLike):
|
|
80
119
|
return repr(self)
|
81
120
|
|
82
121
|
|
122
|
+
@hydrate
|
83
123
|
class FileNameSpec(JSONLike):
|
84
124
|
"""
|
85
125
|
The name of a file handled by a workflow, or a pattern that matches multiple files.
|
@@ -95,15 +135,19 @@ class FileNameSpec(JSONLike):
|
|
95
135
|
If true, the name is used as a regex to search for actual files.
|
96
136
|
"""
|
97
137
|
|
98
|
-
|
99
|
-
|
100
|
-
|
138
|
+
def __init__(
|
139
|
+
self,
|
140
|
+
name: str,
|
141
|
+
args: list[FileNamePart] | None = None,
|
142
|
+
is_regex: bool = False,
|
143
|
+
) -> None:
|
101
144
|
#: The name or pattern.
|
102
|
-
self.name = name
|
145
|
+
self.name: Final[str] = name
|
103
146
|
#: Positional arguments to use when formatting the name.
|
104
|
-
self.args = args
|
147
|
+
self.args: Final[tuple[FileNamePart, ...]] = tuple(args or [])
|
105
148
|
#: Whether the name is used as a regex to search for actual files.
|
106
|
-
self.is_regex = is_regex
|
149
|
+
self.is_regex: Final[bool] = is_regex
|
150
|
+
self.__hash = hash((name, self.args, is_regex))
|
107
151
|
|
108
152
|
def __eq__(self, other: object) -> bool:
|
109
153
|
if not isinstance(other, self.__class__):
|
@@ -114,37 +158,44 @@ class FileNameSpec(JSONLike):
|
|
114
158
|
and self.is_regex == other.is_regex
|
115
159
|
)
|
116
160
|
|
161
|
+
def __hash__(self) -> int:
|
162
|
+
return self.__hash
|
163
|
+
|
164
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
165
|
+
d.pop("_FileNameSpec__hash")
|
166
|
+
return d
|
167
|
+
|
117
168
|
@property
|
118
|
-
def stem(self):
|
169
|
+
def stem(self) -> FileNameStem:
|
119
170
|
"""
|
120
171
|
The stem of the name or pattern.
|
121
172
|
"""
|
122
|
-
return self.
|
173
|
+
return self._app.FileNameStem(self)
|
123
174
|
|
124
175
|
@property
|
125
|
-
def ext(self):
|
176
|
+
def ext(self) -> FileNameExt:
|
126
177
|
"""
|
127
178
|
The extension of the name or pattern.
|
128
179
|
"""
|
129
|
-
return self.
|
180
|
+
return self._app.FileNameExt(self)
|
130
181
|
|
131
|
-
def value(self, directory="."):
|
182
|
+
def value(self, directory: str | os.PathLike = ".") -> list[str] | str:
|
132
183
|
"""
|
133
184
|
Get the template-resolved name of the file
|
134
185
|
(or files matched if the name is a regex pattern).
|
135
186
|
|
136
187
|
Parameters
|
137
188
|
----------
|
138
|
-
directory:
|
189
|
+
directory: PathLike
|
139
190
|
Where to resolve values with respect to.
|
140
191
|
"""
|
141
|
-
format_args = [
|
192
|
+
format_args = [arg.value(Path(directory)) for arg in self.args]
|
142
193
|
value = self.name.format(*format_args)
|
143
194
|
if self.is_regex:
|
144
|
-
|
195
|
+
return search_dir_files_by_regex(value, directory=directory)
|
145
196
|
return value
|
146
197
|
|
147
|
-
def __repr__(self):
|
198
|
+
def __repr__(self) -> str:
|
148
199
|
return f"{self.__class__.__name__}({self.name})"
|
149
200
|
|
150
201
|
|
@@ -155,13 +206,17 @@ class FileNameStem(JSONLike):
|
|
155
206
|
"""
|
156
207
|
|
157
208
|
#: The file specification this is derived from.
|
158
|
-
file_name:
|
209
|
+
file_name: FileNameSpec
|
159
210
|
|
160
|
-
def value(self, directory=
|
211
|
+
def value(self, directory: str | os.PathLike = ".") -> str:
|
161
212
|
"""
|
162
213
|
Get the stem, possibly with directory specified.
|
163
214
|
"""
|
164
|
-
|
215
|
+
d = self.file_name.value(directory)
|
216
|
+
if self.file_name.is_regex:
|
217
|
+
raise ValueError("cannot get the stem of a regex match")
|
218
|
+
assert not isinstance(d, list)
|
219
|
+
return Path(d).stem
|
165
220
|
|
166
221
|
|
167
222
|
@dataclass
|
@@ -171,16 +226,21 @@ class FileNameExt(JSONLike):
|
|
171
226
|
"""
|
172
227
|
|
173
228
|
#: The file specification this is derived from.
|
174
|
-
file_name:
|
229
|
+
file_name: FileNameSpec
|
175
230
|
|
176
|
-
def value(self, directory=
|
231
|
+
def value(self, directory: str | os.PathLike = ".") -> str:
|
177
232
|
"""
|
178
233
|
Get the extension.
|
179
234
|
"""
|
180
|
-
|
235
|
+
d = self.file_name.value(directory)
|
236
|
+
if self.file_name.is_regex:
|
237
|
+
raise ValueError("cannot get the extension of a regex match")
|
238
|
+
assert not isinstance(d, list)
|
239
|
+
return Path(d).suffix
|
181
240
|
|
182
241
|
|
183
242
|
@dataclass
|
243
|
+
@hydrate
|
184
244
|
class InputFileGenerator(JSONLike):
|
185
245
|
"""
|
186
246
|
Represents a script that is run to generate input files for an action.
|
@@ -204,9 +264,7 @@ class InputFileGenerator(JSONLike):
|
|
204
264
|
User-specified rules for whether to run the generator.
|
205
265
|
"""
|
206
266
|
|
207
|
-
|
208
|
-
|
209
|
-
_child_objects = (
|
267
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
210
268
|
ChildObjectSpec(
|
211
269
|
name="input_file",
|
212
270
|
class_name="FileSpec",
|
@@ -230,88 +288,35 @@ class InputFileGenerator(JSONLike):
|
|
230
288
|
)
|
231
289
|
|
232
290
|
#: The file to generate.
|
233
|
-
input_file:
|
291
|
+
input_file: FileSpec
|
234
292
|
#: The input parameters to the generator.
|
235
|
-
inputs:
|
293
|
+
inputs: list[Parameter]
|
236
294
|
#: The script that generates the inputs.
|
237
|
-
script: str = None
|
295
|
+
script: str | None = None
|
238
296
|
#: The environment in which to run the generator.
|
239
|
-
environment:
|
297
|
+
environment: Environment | None = None
|
240
298
|
#: Whether to pass in the environment.
|
241
|
-
script_pass_env_spec:
|
299
|
+
script_pass_env_spec: bool = False
|
242
300
|
#: Whether the generator can be stopped early.
|
243
301
|
#: Quick-running scripts tend to not need this.
|
244
|
-
abortable:
|
302
|
+
abortable: bool = False
|
245
303
|
#: User-specified rules for whether to run the generator.
|
246
|
-
rules:
|
247
|
-
|
248
|
-
|
249
|
-
self.rules = self.rules or []
|
250
|
-
|
251
|
-
def get_action_rules(self):
|
252
|
-
"""Get the rules that allow testing if this input file generator must be run or
|
253
|
-
not for a given element."""
|
254
|
-
return [
|
255
|
-
self.app.ActionRule.check_missing(f"input_files.{self.input_file.label}")
|
256
|
-
] + self.rules
|
257
|
-
|
258
|
-
def compose_source(self, snip_path) -> str:
|
259
|
-
"""Generate the file contents of this input file generator source."""
|
260
|
-
|
261
|
-
script_main_func = snip_path.stem
|
262
|
-
with snip_path.open("rt") as fp:
|
263
|
-
script_str = fp.read()
|
264
|
-
|
265
|
-
main_block = dedent(
|
266
|
-
"""\
|
267
|
-
if __name__ == "__main__":
|
268
|
-
import sys
|
269
|
-
from pathlib import Path
|
270
|
-
import {app_module} as app
|
271
|
-
app.load_config(
|
272
|
-
log_file_path=Path("{run_log_file}").resolve(),
|
273
|
-
config_dir=r"{cfg_dir}",
|
274
|
-
config_key=r"{cfg_invoc_key}",
|
275
|
-
)
|
276
|
-
wk_path, EAR_ID = sys.argv[1:]
|
277
|
-
EAR_ID = int(EAR_ID)
|
278
|
-
wk = app.Workflow(wk_path)
|
279
|
-
EAR = wk.get_EARs_from_IDs([EAR_ID])[0]
|
280
|
-
{script_main_func}(path=Path({file_path!r}), **EAR.get_IFG_input_values())
|
281
|
-
"""
|
282
|
-
)
|
283
|
-
main_block = main_block.format(
|
284
|
-
run_log_file=self.app.RunDirAppFiles.get_log_file_name(),
|
285
|
-
app_module=self.app.module,
|
286
|
-
cfg_dir=self.app.config.config_directory,
|
287
|
-
cfg_invoc_key=self.app.config.config_key,
|
288
|
-
script_main_func=script_main_func,
|
289
|
-
file_path=self.input_file.name.value(),
|
290
|
-
)
|
291
|
-
|
292
|
-
out = dedent(
|
293
|
-
"""\
|
294
|
-
{script_str}
|
295
|
-
{main_block}
|
296
|
-
"""
|
297
|
-
)
|
298
|
-
|
299
|
-
out = out.format(script_str=script_str, main_block=main_block)
|
300
|
-
return out
|
304
|
+
rules: list[ActionRule] = field(default_factory=list)
|
305
|
+
#: Whether the generator requires a working directory.
|
306
|
+
requires_dir: bool = True
|
301
307
|
|
302
|
-
def
|
308
|
+
def get_action_rules(self) -> list[ActionRule]:
|
303
309
|
"""
|
304
|
-
|
305
|
-
|
310
|
+
Get the rules that allow testing if this input file generator must be run or
|
311
|
+
not for a given element.
|
306
312
|
"""
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
311
|
-
fp.write(source_str)
|
313
|
+
return [
|
314
|
+
self._app.ActionRule.check_missing(f"input_files.{self.input_file.label}")
|
315
|
+
] + self.rules
|
312
316
|
|
313
317
|
|
314
318
|
@dataclass
|
319
|
+
@hydrate
|
315
320
|
class OutputFileParser(JSONLike):
|
316
321
|
"""
|
317
322
|
Represents a script that is run to parse output files from an action and create outputs.
|
@@ -345,7 +350,7 @@ class OutputFileParser(JSONLike):
|
|
345
350
|
Rules for whether to enable this parser.
|
346
351
|
"""
|
347
352
|
|
348
|
-
_child_objects = (
|
353
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
349
354
|
ChildObjectSpec(
|
350
355
|
name="output",
|
351
356
|
class_name="Parameter",
|
@@ -383,46 +388,57 @@ class OutputFileParser(JSONLike):
|
|
383
388
|
)
|
384
389
|
|
385
390
|
#: The output files that this parser will parse.
|
386
|
-
output_files:
|
391
|
+
output_files: list[FileSpec]
|
387
392
|
#: The singular output parsed by this parser.
|
388
393
|
#: Not to be confused with :py:attr:`outputs` (plural).
|
389
|
-
output:
|
394
|
+
output: Parameter | None = None
|
390
395
|
#: The name of the file containing the output file parser source.
|
391
|
-
script: str = None
|
396
|
+
script: str | None = None
|
392
397
|
#: The environment to use to run the parser.
|
393
|
-
environment: Environment = None
|
398
|
+
environment: Environment | None = None
|
394
399
|
#: The other inputs to the parser.
|
395
|
-
inputs:
|
400
|
+
inputs: list[str] | None = None
|
396
401
|
#: Optional multiple outputs from the upstream actions of the schema that are
|
397
402
|
#: required to parametrise this parser.
|
398
|
-
#: Not to be confused with :py:attr:`output` (
|
399
|
-
outputs:
|
403
|
+
#: Not to be confused with :py:attr:`output` (singular).
|
404
|
+
outputs: list[str] | None = None
|
400
405
|
#: Miscellaneous options.
|
401
|
-
options:
|
406
|
+
options: dict[str, Any] | None = None
|
402
407
|
#: Whether to pass the environment specifier to the script.
|
403
|
-
script_pass_env_spec:
|
408
|
+
script_pass_env_spec: bool = False
|
404
409
|
#: Whether this script can be aborted.
|
405
|
-
abortable:
|
410
|
+
abortable: bool = False
|
406
411
|
#: The files that should be saved to the persistent store for the workflow.
|
407
|
-
save_files:
|
412
|
+
save_files: InitVar[list[FileSpec] | bool] = True
|
413
|
+
_save_files: list[FileSpec] = field(init=False)
|
408
414
|
#: The files that should be immediately removed.
|
409
|
-
clean_up:
|
415
|
+
clean_up: list[str] = field(default_factory=list)
|
410
416
|
#: Rules for whether to enable this parser.
|
411
|
-
rules:
|
417
|
+
rules: list[ActionRule] = field(default_factory=list)
|
418
|
+
#: Whether the parser requires a working directory.
|
419
|
+
requires_dir: bool = True
|
412
420
|
|
413
|
-
def __post_init__(self):
|
414
|
-
if not
|
421
|
+
def __post_init__(self, save_files: list[FileSpec] | bool) -> None:
|
422
|
+
if not save_files:
|
415
423
|
# save no files
|
416
|
-
self.
|
417
|
-
elif
|
424
|
+
self._save_files = []
|
425
|
+
elif save_files is True:
|
418
426
|
# save all output files
|
419
|
-
self.
|
420
|
-
|
421
|
-
self.
|
422
|
-
|
427
|
+
self._save_files = [out_f for out_f in self.output_files]
|
428
|
+
else:
|
429
|
+
self._save_files = save_files
|
430
|
+
|
431
|
+
@override
|
432
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
433
|
+
d = super()._postprocess_to_dict(d)
|
434
|
+
if "_save_files" in d:
|
435
|
+
d["save_files"] = d.pop("_save_files")
|
436
|
+
return d
|
423
437
|
|
424
438
|
@classmethod
|
425
|
-
def from_json_like(
|
439
|
+
def from_json_like( # type: ignore[override]
|
440
|
+
cls, json_like: dict[str, Any], shared_data: Mapping | None = None
|
441
|
+
) -> Self:
|
426
442
|
if "save_files" in json_like:
|
427
443
|
if not json_like["save_files"]:
|
428
444
|
json_like["save_files"] = []
|
@@ -430,96 +446,30 @@ class OutputFileParser(JSONLike):
|
|
430
446
|
json_like["save_files"] = [i for i in json_like["output_files"]]
|
431
447
|
return super().from_json_like(json_like, shared_data)
|
432
448
|
|
433
|
-
def get_action_rules(self):
|
449
|
+
def get_action_rules(self) -> list[ActionRule]:
|
434
450
|
"""Get the rules that allow testing if this output file parser must be run or not
|
435
451
|
for a given element."""
|
436
452
|
return [
|
437
|
-
self.
|
438
|
-
for
|
453
|
+
self._app.ActionRule.check_missing(f"output_files.{out_f.label}")
|
454
|
+
for out_f in self.output_files
|
439
455
|
] + self.rules
|
440
456
|
|
441
|
-
def compose_source(self, snip_path) -> str:
|
442
|
-
"""Generate the file contents of this output file parser source."""
|
443
|
-
|
444
|
-
if self.output is None:
|
445
|
-
# might be used just for saving files:
|
446
|
-
return
|
447
|
-
|
448
|
-
script_main_func = snip_path.stem
|
449
|
-
with snip_path.open("rt") as fp:
|
450
|
-
script_str = fp.read()
|
451
|
-
|
452
|
-
main_block = dedent(
|
453
|
-
"""\
|
454
|
-
if __name__ == "__main__":
|
455
|
-
import sys
|
456
|
-
from pathlib import Path
|
457
|
-
import {app_module} as app
|
458
|
-
app.load_config(
|
459
|
-
log_file_path=Path("{run_log_file}").resolve(),
|
460
|
-
config_dir=r"{cfg_dir}",
|
461
|
-
config_key=r"{cfg_invoc_key}",
|
462
|
-
)
|
463
|
-
wk_path, EAR_ID = sys.argv[1:]
|
464
|
-
EAR_ID = int(EAR_ID)
|
465
|
-
wk = app.Workflow(wk_path)
|
466
|
-
EAR = wk.get_EARs_from_IDs([EAR_ID])[0]
|
467
|
-
value = {script_main_func}(
|
468
|
-
**EAR.get_OFP_output_files(),
|
469
|
-
**EAR.get_OFP_inputs(),
|
470
|
-
**EAR.get_OFP_outputs(),
|
471
|
-
)
|
472
|
-
wk.save_parameter(name="{param_name}", value=value, EAR_ID=EAR_ID)
|
473
|
-
|
474
|
-
"""
|
475
|
-
)
|
476
|
-
main_block = main_block.format(
|
477
|
-
run_log_file=self.app.RunDirAppFiles.get_log_file_name(),
|
478
|
-
app_module=self.app.module,
|
479
|
-
cfg_dir=self.app.config.config_directory,
|
480
|
-
cfg_invoc_key=self.app.config.config_key,
|
481
|
-
script_main_func=script_main_func,
|
482
|
-
param_name=f"outputs.{self.output.typ}",
|
483
|
-
)
|
484
|
-
|
485
|
-
out = dedent(
|
486
|
-
"""\
|
487
|
-
{script_str}
|
488
|
-
{main_block}
|
489
|
-
"""
|
490
|
-
)
|
491
|
-
|
492
|
-
out = out.format(script_str=script_str, main_block=main_block)
|
493
|
-
return out
|
494
|
-
|
495
|
-
def write_source(self, action, env_spec: Dict[str, Any]):
|
496
|
-
"""
|
497
|
-
Write the actual output parser to a file so it can be enacted.
|
498
|
-
"""
|
499
|
-
if self.output is None:
|
500
|
-
# might be used just for saving files:
|
501
|
-
return
|
502
|
-
|
503
|
-
# write the script if it is specified as a snippet script, otherwise we assume
|
504
|
-
# the script already exists in the working directory:
|
505
|
-
snip_path = action.get_snippet_script_path(self.script, env_spec)
|
506
|
-
if snip_path:
|
507
|
-
source_str = self.compose_source(snip_path)
|
508
|
-
with Path(snip_path.name).open("wt", newline="\n") as fp:
|
509
|
-
fp.write(source_str)
|
510
|
-
|
511
457
|
|
458
|
+
@hydrate
|
512
459
|
class _FileContentsSpecifier(JSONLike):
|
513
460
|
"""Class to represent the contents of a file, either via a file-system path or
|
514
461
|
directly."""
|
515
462
|
|
463
|
+
#: What file is this? Only if known.
|
464
|
+
file: FileSpec
|
465
|
+
|
516
466
|
def __init__(
|
517
467
|
self,
|
518
|
-
path:
|
519
|
-
contents:
|
520
|
-
extension:
|
521
|
-
store_contents:
|
522
|
-
):
|
468
|
+
path: Path | str | None = None,
|
469
|
+
contents: str | None = None,
|
470
|
+
extension: str = "",
|
471
|
+
store_contents: bool = True,
|
472
|
+
) -> None:
|
523
473
|
if path is not None and contents is not None:
|
524
474
|
raise ValueError("Specify exactly one of `path` and `contents`.")
|
525
475
|
|
@@ -528,19 +478,19 @@ class _FileContentsSpecifier(JSONLike):
|
|
528
478
|
"`store_contents` cannot be set to False if `contents` was specified."
|
529
479
|
)
|
530
480
|
|
531
|
-
self._path = _process_demo_data_strings(self.
|
481
|
+
self._path = _process_demo_data_strings(self._app, path)
|
532
482
|
self._contents = contents
|
533
483
|
self._extension = extension
|
534
484
|
self._store_contents = store_contents
|
535
485
|
|
536
486
|
# assigned by `make_persistent`
|
537
|
-
self._workflow = None
|
538
|
-
self._value_group_idx = None
|
487
|
+
self._workflow: Workflow | None = None
|
488
|
+
self._value_group_idx: int | None = None
|
539
489
|
|
540
490
|
# assigned by parent `ElementSet`
|
541
|
-
self._element_set = None
|
491
|
+
self._element_set: ElementSet | None = None
|
542
492
|
|
543
|
-
def __deepcopy__(self, memo):
|
493
|
+
def __deepcopy__(self, memo: dict | None) -> Self:
|
544
494
|
kwargs = self.to_dict()
|
545
495
|
value_group_idx = kwargs.pop("value_group_idx")
|
546
496
|
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
@@ -549,16 +499,27 @@ class _FileContentsSpecifier(JSONLike):
|
|
549
499
|
obj._element_set = self._element_set
|
550
500
|
return obj
|
551
501
|
|
552
|
-
|
553
|
-
|
502
|
+
@property
|
503
|
+
def normalised_path(self) -> str:
|
504
|
+
"""
|
505
|
+
Full workflow value path to the file.
|
506
|
+
|
507
|
+
Note
|
508
|
+
----
|
509
|
+
This is not the same as the path in the filesystem, but is closely
|
510
|
+
related.
|
511
|
+
"""
|
512
|
+
return str(self._path) if self._path else "."
|
513
|
+
|
514
|
+
@override
|
515
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
516
|
+
out = super()._postprocess_to_dict(d)
|
554
517
|
if "_workflow" in out:
|
555
518
|
del out["_workflow"]
|
556
|
-
|
557
|
-
out = {k.lstrip("_"): v for k, v in out.items()}
|
558
|
-
return out
|
519
|
+
return {k.lstrip("_"): v for k, v in out.items()}
|
559
520
|
|
560
521
|
@classmethod
|
561
|
-
def _json_like_constructor(cls, json_like):
|
522
|
+
def _json_like_constructor(cls, json_like: dict[str, Any]) -> Self:
|
562
523
|
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
563
524
|
|
564
525
|
_value_group_idx = json_like.pop("value_group_idx", None)
|
@@ -567,7 +528,7 @@ class _FileContentsSpecifier(JSONLike):
|
|
567
528
|
|
568
529
|
return obj
|
569
530
|
|
570
|
-
def _get_members(self, ensure_contents=False):
|
531
|
+
def _get_members(self, ensure_contents: bool = False) -> dict[str, Any]:
|
571
532
|
out = self.to_dict()
|
572
533
|
del out["value_group_idx"]
|
573
534
|
|
@@ -578,9 +539,9 @@ class _FileContentsSpecifier(JSONLike):
|
|
578
539
|
|
579
540
|
def make_persistent(
|
580
541
|
self,
|
581
|
-
workflow:
|
582
|
-
source:
|
583
|
-
) ->
|
542
|
+
workflow: Workflow,
|
543
|
+
source: ParamSource,
|
544
|
+
) -> tuple[str, list[int], bool]:
|
584
545
|
"""Save to a persistent workflow.
|
585
546
|
|
586
547
|
Returns
|
@@ -594,9 +555,9 @@ class _FileContentsSpecifier(JSONLike):
|
|
594
555
|
if self._value_group_idx is not None:
|
595
556
|
data_ref = self._value_group_idx
|
596
557
|
is_new = False
|
597
|
-
if not workflow.
|
558
|
+
if not workflow.check_parameters_exist(data_ref):
|
598
559
|
raise RuntimeError(
|
599
|
-
f"{self.__class__.__name__} has a
|
560
|
+
f"{self.__class__.__name__} has a data reference "
|
600
561
|
f"({data_ref}), but does not exist in the workflow."
|
601
562
|
)
|
602
563
|
# TODO: log if already persistent.
|
@@ -618,32 +579,51 @@ class _FileContentsSpecifier(JSONLike):
|
|
618
579
|
self._workflow = workflow
|
619
580
|
self._path = None
|
620
581
|
self._contents = None
|
621
|
-
self._extension =
|
622
|
-
self._store_contents =
|
582
|
+
self._extension = ""
|
583
|
+
self._store_contents = True
|
623
584
|
|
624
585
|
return (self.normalised_path, [data_ref], is_new)
|
625
586
|
|
626
|
-
|
587
|
+
@overload
|
588
|
+
def _get_value(self, value_name: None = None) -> dict[str, Any]:
|
589
|
+
...
|
590
|
+
|
591
|
+
@overload
|
592
|
+
def _get_value(self, value_name: str) -> Any:
|
593
|
+
...
|
594
|
+
|
595
|
+
def _get_value(self, value_name: str | None = None) -> Any:
|
627
596
|
# TODO: fix
|
597
|
+
assert self._value_group_idx is None
|
628
598
|
if self._value_group_idx is not None:
|
629
|
-
|
599
|
+
from ..persistence.zarr import ZarrPersistentStore
|
600
|
+
|
601
|
+
assert isinstance(self.workflow._store, ZarrPersistentStore)
|
602
|
+
# FIXME: Next two lines are both thoroughly broken, but at least resolve to something
|
603
|
+
grp = self.workflow._store._get_parameter_group(self._value_group_idx)
|
630
604
|
val = zarr_decode(grp)
|
631
605
|
else:
|
632
606
|
val = self._get_members(ensure_contents=(value_name == "contents"))
|
633
607
|
if value_name:
|
634
|
-
|
608
|
+
return val.get(value_name)
|
635
609
|
|
636
610
|
return val
|
637
611
|
|
638
|
-
def read_contents(self):
|
612
|
+
def read_contents(self) -> str:
|
639
613
|
"""
|
640
614
|
Get the actual contents of the file.
|
641
615
|
"""
|
642
|
-
with self.
|
616
|
+
with self.__path.open("r") as fh:
|
643
617
|
return fh.read()
|
644
618
|
|
645
619
|
@property
|
646
|
-
def
|
620
|
+
def __path(self) -> Path:
|
621
|
+
path = self._get_value("path")
|
622
|
+
assert path is not None
|
623
|
+
return Path(path)
|
624
|
+
|
625
|
+
@property
|
626
|
+
def path(self) -> Path | None:
|
647
627
|
"""
|
648
628
|
The path to the file.
|
649
629
|
"""
|
@@ -651,42 +631,44 @@ class _FileContentsSpecifier(JSONLike):
|
|
651
631
|
return Path(path) if path else None
|
652
632
|
|
653
633
|
@property
|
654
|
-
def store_contents(self):
|
634
|
+
def store_contents(self) -> Any:
|
655
635
|
"""
|
656
636
|
Whether the file's contents are stored in the workflow's persistent store.
|
657
637
|
"""
|
658
638
|
return self._get_value("store_contents")
|
659
639
|
|
660
640
|
@property
|
661
|
-
def contents(self):
|
641
|
+
def contents(self) -> str:
|
662
642
|
"""
|
663
643
|
The contents of the file.
|
664
644
|
"""
|
665
645
|
if self.store_contents:
|
666
|
-
|
646
|
+
return self._get_value("contents")
|
667
647
|
else:
|
668
|
-
|
669
|
-
|
670
|
-
return contents
|
648
|
+
return self.read_contents()
|
671
649
|
|
672
650
|
@property
|
673
|
-
def extension(self):
|
651
|
+
def extension(self) -> str:
|
674
652
|
"""
|
675
653
|
The extension of the file.
|
676
654
|
"""
|
677
655
|
return self._get_value("extension")
|
678
656
|
|
679
657
|
@property
|
680
|
-
def workflow(self) ->
|
658
|
+
def workflow(self) -> Workflow:
|
681
659
|
"""
|
682
660
|
The owning workflow.
|
683
661
|
"""
|
684
662
|
if self._workflow:
|
685
663
|
return self._workflow
|
686
664
|
elif self._element_set:
|
687
|
-
|
665
|
+
w_tmpl = self._element_set.task_template.workflow_template
|
666
|
+
if w_tmpl and w_tmpl.workflow:
|
667
|
+
return w_tmpl.workflow
|
668
|
+
raise NotImplementedError
|
688
669
|
|
689
670
|
|
671
|
+
@hydrate
|
690
672
|
class InputFile(_FileContentsSpecifier):
|
691
673
|
"""
|
692
674
|
An input file.
|
@@ -705,7 +687,7 @@ class InputFile(_FileContentsSpecifier):
|
|
705
687
|
Are the file's contents to be cached in the workflow persistent store?
|
706
688
|
"""
|
707
689
|
|
708
|
-
_child_objects = (
|
690
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
709
691
|
ChildObjectSpec(
|
710
692
|
name="file",
|
711
693
|
class_name="FileSpec",
|
@@ -716,30 +698,29 @@ class InputFile(_FileContentsSpecifier):
|
|
716
698
|
|
717
699
|
def __init__(
|
718
700
|
self,
|
719
|
-
file:
|
720
|
-
path:
|
721
|
-
contents:
|
722
|
-
extension:
|
723
|
-
store_contents:
|
724
|
-
):
|
725
|
-
|
726
|
-
|
727
|
-
|
728
|
-
|
701
|
+
file: FileSpec | str,
|
702
|
+
path: Path | str | None = None,
|
703
|
+
contents: str | None = None,
|
704
|
+
extension: str = "",
|
705
|
+
store_contents: bool = True,
|
706
|
+
) -> None:
|
707
|
+
if not isinstance(file, FileSpec):
|
708
|
+
files: CommandFilesList = self._app.command_files
|
709
|
+
self.file = files.get(file)
|
710
|
+
else:
|
711
|
+
self.file = file
|
729
712
|
|
730
713
|
super().__init__(path, contents, extension, store_contents)
|
731
714
|
|
732
|
-
def
|
733
|
-
|
734
|
-
|
735
|
-
|
736
|
-
def _get_members(self, ensure_contents=False, use_file_label=False):
|
715
|
+
def _get_members(
|
716
|
+
self, ensure_contents: bool = False, use_file_label: bool = False
|
717
|
+
) -> dict[str, Any]:
|
737
718
|
out = super()._get_members(ensure_contents)
|
738
719
|
if use_file_label:
|
739
720
|
out["file"] = self.file.label
|
740
721
|
return out
|
741
722
|
|
742
|
-
def __repr__(self):
|
723
|
+
def __repr__(self) -> str:
|
743
724
|
val_grp_idx = ""
|
744
725
|
if self._value_group_idx is not None:
|
745
726
|
val_grp_idx = f", value_group_idx={self._value_group_idx}"
|
@@ -757,24 +738,18 @@ class InputFile(_FileContentsSpecifier):
|
|
757
738
|
)
|
758
739
|
|
759
740
|
@property
|
760
|
-
def normalised_files_path(self):
|
741
|
+
def normalised_files_path(self) -> str:
|
761
742
|
"""
|
762
743
|
Standard name for the file within the workflow.
|
763
744
|
"""
|
764
745
|
return self.file.label
|
765
746
|
|
766
747
|
@property
|
767
|
-
def normalised_path(self):
|
768
|
-
"""
|
769
|
-
Full workflow value path to the file.
|
770
|
-
|
771
|
-
Note
|
772
|
-
----
|
773
|
-
This is not the same as the path in the filesystem.
|
774
|
-
"""
|
748
|
+
def normalised_path(self) -> str:
|
775
749
|
return f"input_files.{self.normalised_files_path}"
|
776
750
|
|
777
751
|
|
752
|
+
@hydrate
|
778
753
|
class InputFileGeneratorSource(_FileContentsSpecifier):
|
779
754
|
"""
|
780
755
|
The source of code for use in an input file generator.
|
@@ -784,7 +759,7 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
784
759
|
generator:
|
785
760
|
How to generate the file.
|
786
761
|
path:
|
787
|
-
Path to the file.
|
762
|
+
Path to the file to generate.
|
788
763
|
contents:
|
789
764
|
Contents of the file. Only used when recreating this object.
|
790
765
|
extension:
|
@@ -793,9 +768,9 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
793
768
|
|
794
769
|
def __init__(
|
795
770
|
self,
|
796
|
-
generator:
|
797
|
-
path:
|
798
|
-
contents: str = None,
|
771
|
+
generator: InputFileGenerator,
|
772
|
+
path: Path | str | None = None,
|
773
|
+
contents: str | None = None,
|
799
774
|
extension: str = "",
|
800
775
|
):
|
801
776
|
#: How to generate the file.
|
@@ -803,6 +778,7 @@ class InputFileGeneratorSource(_FileContentsSpecifier):
|
|
803
778
|
super().__init__(path, contents, extension)
|
804
779
|
|
805
780
|
|
781
|
+
@hydrate
|
806
782
|
class OutputFileParserSource(_FileContentsSpecifier):
|
807
783
|
"""
|
808
784
|
The source of code for use in an output file parser.
|
@@ -812,7 +788,7 @@ class OutputFileParserSource(_FileContentsSpecifier):
|
|
812
788
|
parser:
|
813
789
|
How to parse the file.
|
814
790
|
path: Path
|
815
|
-
Path to the file.
|
791
|
+
Path to the file to parse.
|
816
792
|
contents:
|
817
793
|
Contents of the file. Only used when recreating this object.
|
818
794
|
extension:
|
@@ -821,9 +797,9 @@ class OutputFileParserSource(_FileContentsSpecifier):
|
|
821
797
|
|
822
798
|
def __init__(
|
823
799
|
self,
|
824
|
-
parser:
|
825
|
-
path:
|
826
|
-
contents: str = None,
|
800
|
+
parser: OutputFileParser,
|
801
|
+
path: Path | str | None = None,
|
802
|
+
contents: str | None = None,
|
827
803
|
extension: str = "",
|
828
804
|
):
|
829
805
|
#: How to parse the file.
|