hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/test_utils.py
CHANGED
@@ -2,68 +2,92 @@
|
|
2
2
|
Utilities for making data to use in testing.
|
3
3
|
"""
|
4
4
|
|
5
|
+
from __future__ import annotations
|
5
6
|
from dataclasses import dataclass
|
6
|
-
from importlib import resources
|
7
7
|
from pathlib import Path
|
8
|
-
from typing import
|
8
|
+
from typing import Any, ClassVar, TYPE_CHECKING
|
9
9
|
from hpcflow.app import app as hf
|
10
10
|
from hpcflow.sdk.core.parameters import ParameterValue
|
11
|
-
|
12
|
-
|
13
|
-
|
11
|
+
from hpcflow.sdk.core.utils import get_file_context
|
12
|
+
from hpcflow.sdk.typing import hydrate
|
13
|
+
|
14
|
+
if TYPE_CHECKING:
|
15
|
+
from collections.abc import Iterable, Mapping
|
16
|
+
from typing_extensions import TypeAlias
|
17
|
+
from h5py import Group as HDFSGroup # type: ignore
|
18
|
+
from .actions import Action
|
19
|
+
from .element import ElementGroup
|
20
|
+
from .loop import Loop
|
21
|
+
from .parameters import InputSource, Parameter
|
22
|
+
from .task import Task
|
23
|
+
from .task_schema import TaskSchema
|
24
|
+
from .types import Resources
|
25
|
+
from .workflow import Workflow, WorkflowTemplate
|
26
|
+
from ..app import BaseApp
|
27
|
+
from ..typing import PathLike
|
28
|
+
# mypy: disable-error-code="no-untyped-def"
|
29
|
+
|
30
|
+
#: A string or a tuple of strings.
|
31
|
+
Strs: TypeAlias = "str | tuple[str, ...]"
|
32
|
+
|
33
|
+
|
34
|
+
def make_schemas(
|
35
|
+
*ins_outs: tuple[dict[str, Any], tuple[str, ...]]
|
36
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
37
|
+
| tuple[dict[str, Any], tuple[str, ...], str, dict[str, Any]]
|
38
|
+
) -> list[TaskSchema]:
|
14
39
|
"""
|
15
40
|
Construct a collection of schemas.
|
16
41
|
"""
|
17
|
-
out = []
|
42
|
+
out: list[TaskSchema] = []
|
18
43
|
for idx, info in enumerate(ins_outs):
|
44
|
+
act_kwargs: dict[str, Any] = {}
|
19
45
|
if len(info) == 2:
|
20
46
|
(ins_i, outs_i) = info
|
21
47
|
obj = f"t{idx}"
|
22
|
-
|
48
|
+
elif len(info) == 3:
|
23
49
|
(ins_i, outs_i, obj) = info
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
50
|
+
else:
|
51
|
+
(ins_i, outs_i, obj, act_kwargs) = info
|
52
|
+
|
53
|
+
# distribute outputs over multiple commands' stdout:
|
54
|
+
cmds_lst = []
|
55
|
+
for out_idx, out_j in enumerate(outs_i):
|
56
|
+
cmd = hf.Command(
|
57
|
+
command=(
|
58
|
+
"echo $(("
|
59
|
+
+ " + ".join(f"<<parameter:{i}>> + {100 + out_idx}" for i in ins_i)
|
60
|
+
+ "))"
|
61
|
+
),
|
62
|
+
stdout=f"<<int(parameter:{out_j})>>",
|
63
|
+
)
|
64
|
+
cmds_lst.append(cmd)
|
65
|
+
|
66
|
+
if not outs_i:
|
67
|
+
# no outputs
|
68
|
+
cmds_lst = [
|
69
|
+
hf.Command(
|
70
|
+
command=(
|
71
|
+
"echo $(("
|
72
|
+
+ " + ".join(f"<<parameter:{i}>> + 100" for i in ins_i)
|
73
|
+
+ "))"
|
74
|
+
),
|
39
75
|
)
|
40
|
-
for out_i in outs_i[2:]
|
41
76
|
]
|
42
|
-
cmd = hf.Command(
|
43
|
-
" ".join(f"echo $((<<parameter:{i}>> + 100))" for i in ins_i.keys()),
|
44
|
-
stdout=stdout,
|
45
|
-
stderr=stderr,
|
46
|
-
)
|
47
77
|
|
48
|
-
act_i = hf.Action(
|
49
|
-
commands=[cmd],
|
50
|
-
output_file_parsers=out_file_parsers,
|
51
|
-
environments=[hf.ActionEnvironment("env_1")],
|
52
|
-
)
|
78
|
+
act_i = hf.Action(commands=cmds_lst, **act_kwargs)
|
53
79
|
out.append(
|
54
80
|
hf.TaskSchema(
|
55
81
|
objective=obj,
|
56
82
|
actions=[act_i],
|
57
83
|
inputs=[hf.SchemaInput(k, default_value=v) for k, v in ins_i.items()],
|
58
|
-
outputs=[hf.SchemaOutput(k) for k in outs_i],
|
84
|
+
outputs=[hf.SchemaOutput(hf.Parameter(k)) for k in outs_i],
|
59
85
|
)
|
60
86
|
)
|
61
|
-
if len(ins_outs) == 1 and not ret_list:
|
62
|
-
out = out[0]
|
63
87
|
return out
|
64
88
|
|
65
89
|
|
66
|
-
def make_parameters(num):
|
90
|
+
def make_parameters(num: int) -> list[Parameter]:
|
67
91
|
"""
|
68
92
|
Construct a sequence of parameters.
|
69
93
|
"""
|
@@ -71,9 +95,9 @@ def make_parameters(num):
|
|
71
95
|
|
72
96
|
|
73
97
|
def make_actions(
|
74
|
-
ins_outs:
|
75
|
-
env="env1",
|
76
|
-
) ->
|
98
|
+
ins_outs: list[tuple[Strs, str] | tuple[Strs, str, str]],
|
99
|
+
env: str = "env1",
|
100
|
+
) -> list[Action]:
|
77
101
|
"""
|
78
102
|
Construct a collection of actions.
|
79
103
|
"""
|
@@ -82,7 +106,7 @@ def make_actions(
|
|
82
106
|
for ins_outs_i in ins_outs:
|
83
107
|
if len(ins_outs_i) == 2:
|
84
108
|
ins, out = ins_outs_i
|
85
|
-
err = None
|
109
|
+
err: str | None = None
|
86
110
|
else:
|
87
111
|
ins, out, err = ins_outs_i
|
88
112
|
if not isinstance(ins, tuple):
|
@@ -103,14 +127,19 @@ def make_actions(
|
|
103
127
|
|
104
128
|
|
105
129
|
def make_tasks(
|
106
|
-
schemas_spec
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
)
|
130
|
+
schemas_spec: Iterable[
|
131
|
+
tuple[dict[str, Any], tuple[str, ...]]
|
132
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
133
|
+
],
|
134
|
+
local_inputs: dict[int, Iterable[str]] | None = None,
|
135
|
+
local_sequences: (
|
136
|
+
dict[int, Iterable[tuple[str, int, int | float | None]]] | None
|
137
|
+
) = None,
|
138
|
+
local_resources: dict[int, dict[str, dict]] | None = None,
|
139
|
+
nesting_orders: dict[int, dict[str, float]] | None = None,
|
140
|
+
input_sources: dict[int, dict[str, list[InputSource]]] | None = None,
|
141
|
+
groups: dict[int, Iterable[ElementGroup]] | None = None,
|
142
|
+
) -> list[Task]:
|
114
143
|
"""
|
115
144
|
Construct a sequence of tasks.
|
116
145
|
"""
|
@@ -120,12 +149,12 @@ def make_tasks(
|
|
120
149
|
nesting_orders = nesting_orders or {}
|
121
150
|
input_sources = input_sources or {}
|
122
151
|
groups = groups or {}
|
123
|
-
schemas = make_schemas(schemas_spec
|
124
|
-
tasks = []
|
152
|
+
schemas = make_schemas(*schemas_spec)
|
153
|
+
tasks: list[Task] = []
|
125
154
|
for s_idx, s in enumerate(schemas):
|
126
155
|
inputs = [
|
127
156
|
hf.InputValue(hf.Parameter(i), value=int(i[1:]) * 100)
|
128
|
-
for i in local_inputs.get(s_idx,
|
157
|
+
for i in local_inputs.get(s_idx, ())
|
129
158
|
]
|
130
159
|
seqs = [
|
131
160
|
hf.ValueSequence(
|
@@ -133,7 +162,7 @@ def make_tasks(
|
|
133
162
|
values=[(int(i[0].split(".")[1][1:]) * 100) + j for j in range(i[1])],
|
134
163
|
nesting_order=i[2],
|
135
164
|
)
|
136
|
-
for i in local_sequences.get(s_idx,
|
165
|
+
for i in local_sequences.get(s_idx, ())
|
137
166
|
]
|
138
167
|
res = {k: v for k, v in local_resources.get(s_idx, {}).items()}
|
139
168
|
task = hf.Task(
|
@@ -143,27 +172,32 @@ def make_tasks(
|
|
143
172
|
resources=res,
|
144
173
|
nesting_order=nesting_orders.get(s_idx, {}),
|
145
174
|
input_sources=input_sources.get(s_idx, None),
|
146
|
-
groups=groups.get(s_idx),
|
175
|
+
groups=list(groups.get(s_idx, ())),
|
147
176
|
)
|
148
177
|
tasks.append(task)
|
149
178
|
return tasks
|
150
179
|
|
151
180
|
|
152
181
|
def make_workflow(
|
153
|
-
schemas_spec
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
182
|
+
schemas_spec: Iterable[
|
183
|
+
tuple[dict[str, Any], tuple[str, ...]]
|
184
|
+
| tuple[dict[str, Any], tuple[str, ...], str]
|
185
|
+
],
|
186
|
+
path: PathLike,
|
187
|
+
local_inputs: dict[int, Iterable[str]] | None = None,
|
188
|
+
local_sequences: (
|
189
|
+
dict[int, Iterable[tuple[str, int, int | float | None]]] | None
|
190
|
+
) = None,
|
191
|
+
local_resources: dict[int, dict[str, dict]] | None = None,
|
192
|
+
nesting_orders: dict[int, dict[str, float]] | None = None,
|
193
|
+
input_sources: dict[int, dict[str, list[InputSource]]] | None = None,
|
194
|
+
resources: Resources = None,
|
195
|
+
loops: list[Loop] | None = None,
|
196
|
+
groups: dict[int, Iterable[ElementGroup]] | None = None,
|
197
|
+
name: str = "w1",
|
198
|
+
overwrite: bool = False,
|
199
|
+
store: str = "zarr",
|
200
|
+
) -> Workflow:
|
167
201
|
"""
|
168
202
|
Construct a workflow.
|
169
203
|
"""
|
@@ -176,13 +210,12 @@ def make_workflow(
|
|
176
210
|
input_sources=input_sources,
|
177
211
|
groups=groups,
|
178
212
|
)
|
179
|
-
template = {
|
213
|
+
template: Mapping[str, Any] = {
|
180
214
|
"name": name,
|
181
215
|
"tasks": tasks,
|
182
216
|
"resources": resources,
|
217
|
+
**({"loops": loops} if loops else {}),
|
183
218
|
}
|
184
|
-
if loops:
|
185
|
-
template["loops"] = loops
|
186
219
|
wk = hf.Workflow.from_template(
|
187
220
|
hf.WorkflowTemplate(**template),
|
188
221
|
path=path,
|
@@ -194,36 +227,40 @@ def make_workflow(
|
|
194
227
|
|
195
228
|
|
196
229
|
def make_test_data_YAML_workflow(
|
197
|
-
workflow_name
|
198
|
-
|
230
|
+
workflow_name: str,
|
231
|
+
path: PathLike,
|
232
|
+
app: BaseApp | None = None,
|
233
|
+
pkg: str = "hpcflow.tests.data",
|
234
|
+
**kwargs,
|
235
|
+
) -> Workflow:
|
199
236
|
"""Generate a workflow whose template file is defined in the test data directory."""
|
200
|
-
app =
|
201
|
-
|
202
|
-
|
203
|
-
with script_ctx as file_path:
|
237
|
+
app = app or hf
|
238
|
+
with get_file_context(pkg, workflow_name) as file_path:
|
204
239
|
return app.Workflow.from_YAML_file(YAML_path=file_path, path=path, **kwargs)
|
205
240
|
|
206
241
|
|
207
242
|
def make_test_data_YAML_workflow_template(
|
208
|
-
workflow_name
|
209
|
-
|
243
|
+
workflow_name: str,
|
244
|
+
app: BaseApp | None = None,
|
245
|
+
pkg: str = "hpcflow.tests.data",
|
246
|
+
**kwargs,
|
247
|
+
) -> WorkflowTemplate:
|
210
248
|
"""Generate a workflow template whose file is defined in the test data directory."""
|
211
|
-
app =
|
212
|
-
|
213
|
-
|
214
|
-
with script_ctx as file_path:
|
249
|
+
app = app or hf
|
250
|
+
with get_file_context(pkg, workflow_name) as file_path:
|
215
251
|
return app.WorkflowTemplate.from_file(path=file_path, **kwargs)
|
216
252
|
|
217
253
|
|
218
254
|
@dataclass
|
255
|
+
@hydrate
|
219
256
|
class P1_sub_parameter_cls(ParameterValue):
|
220
257
|
"""
|
221
258
|
Parameter value handler: ``p1_sub``
|
222
259
|
"""
|
223
260
|
|
224
|
-
_typ = "p1_sub"
|
261
|
+
_typ: ClassVar[str] = "p1_sub"
|
225
262
|
|
226
|
-
e: int
|
263
|
+
e: int = 0
|
227
264
|
|
228
265
|
def CLI_format(self) -> str:
|
229
266
|
return str(self.e)
|
@@ -232,25 +269,27 @@ class P1_sub_parameter_cls(ParameterValue):
|
|
232
269
|
def twice_e(self):
|
233
270
|
return self.e * 2
|
234
271
|
|
235
|
-
def prepare_JSON_dump(self) ->
|
272
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
236
273
|
return {"e": self.e}
|
237
274
|
|
238
|
-
def dump_to_HDF5_group(self, group):
|
275
|
+
def dump_to_HDF5_group(self, group: HDFSGroup):
|
239
276
|
group.attrs["e"] = self.e
|
240
277
|
|
241
278
|
|
242
279
|
@dataclass
|
280
|
+
@hydrate
|
243
281
|
class P1_sub_parameter_cls_2(ParameterValue):
|
244
282
|
"""
|
245
283
|
Parameter value handler: ``p1_sub_2``
|
246
284
|
"""
|
247
285
|
|
248
|
-
_typ = "p1_sub_2"
|
286
|
+
_typ: ClassVar[str] = "p1_sub_2"
|
249
287
|
|
250
|
-
f: int
|
288
|
+
f: int = 0
|
251
289
|
|
252
290
|
|
253
291
|
@dataclass
|
292
|
+
@hydrate
|
254
293
|
class P1_parameter_cls(ParameterValue):
|
255
294
|
"""
|
256
295
|
Parameter value handler: ``p1c``
|
@@ -260,12 +299,15 @@ class P1_parameter_cls(ParameterValue):
|
|
260
299
|
This is a composite value handler.
|
261
300
|
"""
|
262
301
|
|
263
|
-
_typ = "p1c"
|
264
|
-
_sub_parameters
|
302
|
+
_typ: ClassVar[str] = "p1c"
|
303
|
+
_sub_parameters: ClassVar[dict[str, str]] = {
|
304
|
+
"sub_param": "p1_sub",
|
305
|
+
"sub_param_2": "p1_sub_2",
|
306
|
+
}
|
265
307
|
|
266
|
-
a: int
|
267
|
-
d:
|
268
|
-
sub_param:
|
308
|
+
a: int = 0
|
309
|
+
d: int | None = None
|
310
|
+
sub_param: P1_sub_parameter_cls | None = None
|
269
311
|
|
270
312
|
def __post_init__(self):
|
271
313
|
if self.sub_param is not None and not isinstance(
|
@@ -274,22 +316,22 @@ class P1_parameter_cls(ParameterValue):
|
|
274
316
|
self.sub_param = P1_sub_parameter_cls(**self.sub_param)
|
275
317
|
|
276
318
|
@classmethod
|
277
|
-
def from_data(cls, b, c):
|
319
|
+
def from_data(cls, b: int, c: int):
|
278
320
|
return cls(a=b + c)
|
279
321
|
|
280
322
|
@classmethod
|
281
|
-
def from_file(cls, path):
|
323
|
+
def from_file(cls, path: str):
|
282
324
|
with Path(path).open("rt") as fh:
|
283
325
|
lns = fh.readlines()
|
284
326
|
a = int(lns[0])
|
285
327
|
return cls(a=a)
|
286
328
|
|
287
329
|
@property
|
288
|
-
def twice_a(self):
|
330
|
+
def twice_a(self) -> int:
|
289
331
|
return self.a * 2
|
290
332
|
|
291
333
|
@property
|
292
|
-
def sub_param_prop(self):
|
334
|
+
def sub_param_prop(self) -> P1_sub_parameter_cls:
|
293
335
|
return P1_sub_parameter_cls(e=4 * self.a)
|
294
336
|
|
295
337
|
def CLI_format(self) -> str:
|
@@ -297,20 +339,18 @@ class P1_parameter_cls(ParameterValue):
|
|
297
339
|
|
298
340
|
@staticmethod
|
299
341
|
def CLI_format_group(*objs) -> str:
|
300
|
-
|
342
|
+
return ""
|
301
343
|
|
302
344
|
@staticmethod
|
303
345
|
def sum(*objs, **kwargs) -> str:
|
304
346
|
return str(sum(i.a for i in objs))
|
305
347
|
|
306
|
-
def custom_CLI_format(
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
sub = 0 if sub is None else int(sub)
|
311
|
-
return str(self.a + add - sub)
|
348
|
+
def custom_CLI_format(self, add: str | None = None, sub: str | None = None) -> str:
|
349
|
+
add_i = 4 if add is None else int(add)
|
350
|
+
sub_i = 0 if sub is None else int(sub)
|
351
|
+
return str(self.a + add_i - sub_i)
|
312
352
|
|
313
|
-
def custom_CLI_format_prep(self, reps:
|
353
|
+
def custom_CLI_format_prep(self, reps: str | None = None) -> list[int]:
|
314
354
|
"""Used for testing custom object CLI formatting.
|
315
355
|
|
316
356
|
For example, with a command like this:
|
@@ -318,11 +358,11 @@ class P1_parameter_cls(ParameterValue):
|
|
318
358
|
`<<join[delim=","](parameter:p1c.custom_CLI_format_prep(reps=4))>>`.
|
319
359
|
|
320
360
|
"""
|
321
|
-
|
322
|
-
return [self.a] *
|
361
|
+
reps_int = 1 if reps is None else int(reps)
|
362
|
+
return [self.a] * reps_int
|
323
363
|
|
324
364
|
@classmethod
|
325
|
-
def CLI_parse(cls, a_str: str, double:
|
365
|
+
def CLI_parse(cls, a_str: str, double: str = "", e: str | None = None):
|
326
366
|
a = int(a_str)
|
327
367
|
if double.lower() == "true":
|
328
368
|
a *= 2
|
@@ -332,25 +372,25 @@ class P1_parameter_cls(ParameterValue):
|
|
332
372
|
sub_param = None
|
333
373
|
return cls(a=a, sub_param=sub_param)
|
334
374
|
|
335
|
-
def prepare_JSON_dump(self) ->
|
375
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
336
376
|
sub_param_js = self.sub_param.prepare_JSON_dump() if self.sub_param else None
|
337
377
|
return {"a": self.a, "d": self.d, "sub_param": sub_param_js}
|
338
378
|
|
339
|
-
def dump_to_HDF5_group(self, group):
|
379
|
+
def dump_to_HDF5_group(self, group: HDFSGroup):
|
340
380
|
group.attrs["a"] = self.a
|
341
381
|
if self.d is not None:
|
342
382
|
group.attrs["d"] = self.d
|
343
383
|
if self.sub_param:
|
344
|
-
sub_group = group.
|
384
|
+
sub_group = group.create_group("sub_param")
|
345
385
|
self.sub_param.dump_to_HDF5_group(sub_group)
|
346
386
|
|
347
387
|
@classmethod
|
348
|
-
def save_from_JSON(cls, data, param_id: int, workflow):
|
388
|
+
def save_from_JSON(cls, data: dict, param_id: int | list[int], workflow: Workflow):
|
349
389
|
obj = cls(**data) # TODO: pass sub-param
|
350
390
|
workflow.set_parameter_value(param_id=param_id, value=obj, commit=True)
|
351
391
|
|
352
392
|
@classmethod
|
353
|
-
def save_from_HDF5_group(cls, group, param_id: int, workflow):
|
393
|
+
def save_from_HDF5_group(cls, group: HDFSGroup, param_id: int, workflow: Workflow):
|
354
394
|
a = group.attrs["a"].item()
|
355
395
|
if "d" in group.attrs:
|
356
396
|
d = group.attrs["d"].item()
|
@@ -364,3 +404,40 @@ class P1_parameter_cls(ParameterValue):
|
|
364
404
|
sub_param = None
|
365
405
|
obj = cls(a=a, d=d, sub_param=sub_param)
|
366
406
|
workflow.set_parameter_value(param_id=param_id, value=obj, commit=True)
|
407
|
+
|
408
|
+
|
409
|
+
def make_workflow_to_run_command(
|
410
|
+
command,
|
411
|
+
path,
|
412
|
+
outputs=None,
|
413
|
+
name="w1",
|
414
|
+
overwrite=False,
|
415
|
+
store="zarr",
|
416
|
+
requires_dir=False,
|
417
|
+
):
|
418
|
+
"""Generate a single-task single-action workflow that runs the specified command,
|
419
|
+
optionally generating some outputs."""
|
420
|
+
|
421
|
+
outputs = outputs or []
|
422
|
+
commands = [hf.Command(command=command)]
|
423
|
+
commands += [
|
424
|
+
hf.Command(command=f'echo "output_{out}"', stdout=f"<<parameter:{out}>>")
|
425
|
+
for out in outputs
|
426
|
+
]
|
427
|
+
schema = hf.TaskSchema(
|
428
|
+
objective="run_command",
|
429
|
+
outputs=[hf.SchemaOutput(i) for i in outputs],
|
430
|
+
actions=[hf.Action(commands=commands, requires_dir=requires_dir)],
|
431
|
+
)
|
432
|
+
template = {
|
433
|
+
"name": name,
|
434
|
+
"tasks": [hf.Task(schema=schema)],
|
435
|
+
}
|
436
|
+
wk = hf.Workflow.from_template(
|
437
|
+
hf.WorkflowTemplate(**template),
|
438
|
+
path=path,
|
439
|
+
name=name,
|
440
|
+
overwrite=overwrite,
|
441
|
+
store=store,
|
442
|
+
)
|
443
|
+
return wk
|