hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,325 @@
|
|
1
|
+
from textwrap import dedent
|
2
|
+
|
3
|
+
import pytest
|
4
|
+
|
5
|
+
from hpcflow.app import app as hf
|
6
|
+
from hpcflow.sdk.config.errors import UnknownMetaTaskConstitutiveSchema
|
7
|
+
|
8
|
+
|
9
|
+
def test_basic_meta_task_workflow(new_null_config, tmp_path):
|
10
|
+
wk_yaml = dedent(
|
11
|
+
"""\
|
12
|
+
name: test_meta_task
|
13
|
+
template_components:
|
14
|
+
task_schemas:
|
15
|
+
- objective: s0
|
16
|
+
inputs:
|
17
|
+
- parameter: p1
|
18
|
+
outputs:
|
19
|
+
- parameter: p2
|
20
|
+
actions:
|
21
|
+
- commands:
|
22
|
+
- command: echo "$((<<parameter:p1>> + 1))"
|
23
|
+
stdout: <<int(parameter:p2)>>
|
24
|
+
|
25
|
+
- objective: s1
|
26
|
+
inputs:
|
27
|
+
- parameter: p2
|
28
|
+
- parameter: p2b
|
29
|
+
outputs:
|
30
|
+
- parameter: p3
|
31
|
+
actions:
|
32
|
+
- commands:
|
33
|
+
- command: echo "$((<<parameter:p2>> + <<parameter:p2b>>))"
|
34
|
+
stdout: <<int(parameter:p3)>>
|
35
|
+
|
36
|
+
- objective: s2
|
37
|
+
inputs:
|
38
|
+
- parameter: p3
|
39
|
+
outputs:
|
40
|
+
- parameter: p4
|
41
|
+
actions:
|
42
|
+
- commands:
|
43
|
+
- command: echo "$((<<parameter:p3>> + 1))"
|
44
|
+
stdout: <<int(parameter:p4)>>
|
45
|
+
|
46
|
+
- objective: s3
|
47
|
+
inputs:
|
48
|
+
- parameter: p4
|
49
|
+
outputs:
|
50
|
+
- parameter: p5
|
51
|
+
actions:
|
52
|
+
- commands:
|
53
|
+
- command: echo "$((<<parameter:p4>> + 1))"
|
54
|
+
stdout: <<int(parameter:p5)>>
|
55
|
+
|
56
|
+
meta_task_schemas:
|
57
|
+
- objective: system_analysis
|
58
|
+
inputs:
|
59
|
+
- parameter: p2
|
60
|
+
outputs:
|
61
|
+
- parameter: p4
|
62
|
+
|
63
|
+
meta_tasks:
|
64
|
+
system_analysis:
|
65
|
+
- schema: s1
|
66
|
+
inputs:
|
67
|
+
p2b: 220
|
68
|
+
- schema: s2
|
69
|
+
|
70
|
+
tasks:
|
71
|
+
- schema: s0
|
72
|
+
inputs:
|
73
|
+
p1: 100
|
74
|
+
- schema: system_analysis
|
75
|
+
- schema: s3
|
76
|
+
"""
|
77
|
+
)
|
78
|
+
wk = hf.Workflow.from_YAML_string(wk_yaml, path=tmp_path)
|
79
|
+
|
80
|
+
# basic check of param dependendices
|
81
|
+
s0_di = wk.tasks.s0.elements[0].get_data_idx()
|
82
|
+
s1_di = wk.tasks.s1.elements[0].get_data_idx()
|
83
|
+
s2_di = wk.tasks.s2.elements[0].get_data_idx()
|
84
|
+
s3_di = wk.tasks.s3.elements[0].get_data_idx()
|
85
|
+
|
86
|
+
assert s0_di["outputs.p2"] == s1_di["inputs.p2"]
|
87
|
+
assert s1_di["outputs.p3"] == s2_di["inputs.p3"]
|
88
|
+
assert s2_di["outputs.p4"] == s3_di["inputs.p4"]
|
89
|
+
|
90
|
+
|
91
|
+
def test_basic_meta_task_workflow_API(new_null_config, tmp_path):
|
92
|
+
"""as above but using Python API."""
|
93
|
+
# normal task schemas:
|
94
|
+
s0 = hf.TaskSchema(
|
95
|
+
objective="s0",
|
96
|
+
inputs=[hf.SchemaInput("p1")],
|
97
|
+
outputs=[hf.SchemaOutput("p2")],
|
98
|
+
actions=[
|
99
|
+
hf.Action(
|
100
|
+
commands=[
|
101
|
+
hf.Command(
|
102
|
+
command='echo "$((<<parameter:p1>> + 1))"',
|
103
|
+
stdout="<<int(parameter:p2)>>",
|
104
|
+
)
|
105
|
+
]
|
106
|
+
)
|
107
|
+
],
|
108
|
+
)
|
109
|
+
s1 = hf.TaskSchema(
|
110
|
+
objective="s1",
|
111
|
+
inputs=[hf.SchemaInput("p2")],
|
112
|
+
outputs=[hf.SchemaOutput("p3")],
|
113
|
+
actions=[
|
114
|
+
hf.Action(
|
115
|
+
commands=[
|
116
|
+
hf.Command(
|
117
|
+
command='echo "$((<<parameter:p2>> + 1))"',
|
118
|
+
stdout="<<int(parameter:p3)>>",
|
119
|
+
)
|
120
|
+
]
|
121
|
+
)
|
122
|
+
],
|
123
|
+
)
|
124
|
+
s2 = hf.TaskSchema(
|
125
|
+
objective="s2",
|
126
|
+
inputs=[hf.SchemaInput("p3")],
|
127
|
+
outputs=[hf.SchemaOutput("p4")],
|
128
|
+
actions=[
|
129
|
+
hf.Action(
|
130
|
+
commands=[
|
131
|
+
hf.Command(
|
132
|
+
command='echo "$((<<parameter:p3>> + 1))"',
|
133
|
+
stdout="<<int(parameter:p4)>>",
|
134
|
+
)
|
135
|
+
]
|
136
|
+
)
|
137
|
+
],
|
138
|
+
)
|
139
|
+
s3 = hf.TaskSchema(
|
140
|
+
objective="s3",
|
141
|
+
inputs=[hf.SchemaInput("p4")],
|
142
|
+
outputs=[hf.SchemaOutput("p5")],
|
143
|
+
actions=[
|
144
|
+
hf.Action(
|
145
|
+
commands=[
|
146
|
+
hf.Command(
|
147
|
+
command='echo "$((<<parameter:p4>> + 1))"',
|
148
|
+
stdout="<<int(parameter:p5)>>",
|
149
|
+
)
|
150
|
+
]
|
151
|
+
)
|
152
|
+
],
|
153
|
+
)
|
154
|
+
|
155
|
+
# meta=task schema:
|
156
|
+
ms = hf.MetaTaskSchema(
|
157
|
+
objective="system_analysis",
|
158
|
+
inputs=[hf.SchemaInput("p2")],
|
159
|
+
outputs=[hf.SchemaOutput("p4")],
|
160
|
+
)
|
161
|
+
|
162
|
+
# meta-task:
|
163
|
+
m1 = hf.MetaTask(
|
164
|
+
schema=ms,
|
165
|
+
tasks=[
|
166
|
+
hf.Task(schema=s1),
|
167
|
+
hf.Task(schema=s2),
|
168
|
+
],
|
169
|
+
)
|
170
|
+
|
171
|
+
# workflow template tasks list:
|
172
|
+
tasks = [
|
173
|
+
hf.Task(schema=s0, inputs={"p1": 100}),
|
174
|
+
m1,
|
175
|
+
hf.Task(schema=s3),
|
176
|
+
]
|
177
|
+
|
178
|
+
wk = hf.Workflow.from_template_data(
|
179
|
+
template_name="meta_task_workflow",
|
180
|
+
tasks=tasks,
|
181
|
+
path=tmp_path,
|
182
|
+
)
|
183
|
+
|
184
|
+
# basic check of param dependendices
|
185
|
+
s0_di = wk.tasks.s0.elements[0].get_data_idx()
|
186
|
+
s1_di = wk.tasks.s1.elements[0].get_data_idx()
|
187
|
+
s2_di = wk.tasks.s2.elements[0].get_data_idx()
|
188
|
+
s3_di = wk.tasks.s3.elements[0].get_data_idx()
|
189
|
+
|
190
|
+
assert s0_di["outputs.p2"] == s1_di["inputs.p2"]
|
191
|
+
assert s1_di["outputs.p3"] == s2_di["inputs.p3"]
|
192
|
+
assert s2_di["outputs.p4"] == s3_di["inputs.p4"]
|
193
|
+
|
194
|
+
|
195
|
+
def test_meta_task_custom_parametrisation(new_null_config, tmp_path):
|
196
|
+
"""test customising the parametrisation of inputs, sequences, and resources within the
|
197
|
+
`tasks` list."""
|
198
|
+
wk_yaml = dedent(
|
199
|
+
"""\
|
200
|
+
name: test_metatask_multi_element_sets_custom_parametrisation
|
201
|
+
template_components:
|
202
|
+
task_schemas:
|
203
|
+
- objective: s1
|
204
|
+
inputs:
|
205
|
+
- parameter: p1
|
206
|
+
- parameter: p2
|
207
|
+
outputs:
|
208
|
+
- parameter: p3
|
209
|
+
actions:
|
210
|
+
- commands:
|
211
|
+
- command: echo "$((<<parameter:p1>> + <<parameter:p2>>))"
|
212
|
+
stdout: <<int(parameter:p3)>>
|
213
|
+
|
214
|
+
meta_task_schemas:
|
215
|
+
- objective: system_analysis
|
216
|
+
inputs:
|
217
|
+
- parameter: p1
|
218
|
+
- parameter: p2
|
219
|
+
outputs:
|
220
|
+
- parameter: p3
|
221
|
+
|
222
|
+
meta_tasks:
|
223
|
+
system_analysis:
|
224
|
+
- schema: s1
|
225
|
+
element_sets:
|
226
|
+
- inputs:
|
227
|
+
p1: 100
|
228
|
+
p2: 200
|
229
|
+
- inputs:
|
230
|
+
p1: 100
|
231
|
+
sequences:
|
232
|
+
- path: inputs.p2
|
233
|
+
values: [200, 201]
|
234
|
+
tasks:
|
235
|
+
- schema: system_analysis
|
236
|
+
inputs:
|
237
|
+
s1: # should apply to first element set by default
|
238
|
+
p1: 101
|
239
|
+
resources:
|
240
|
+
s1: # should apply to first element set by default
|
241
|
+
any:
|
242
|
+
num_cores: 2
|
243
|
+
- schema: system_analysis
|
244
|
+
inputs:
|
245
|
+
s1.0: # applies to first element set of s1
|
246
|
+
p1: 102
|
247
|
+
s1.1: # applies to second element set of s1
|
248
|
+
p1: 103
|
249
|
+
sequences:
|
250
|
+
s1.1: # sequences list in second element set is replaced with this list:
|
251
|
+
- path: inputs.p2
|
252
|
+
values: [300, 301]
|
253
|
+
"""
|
254
|
+
)
|
255
|
+
wk = hf.Workflow.from_YAML_string(wk_yaml, path=tmp_path)
|
256
|
+
|
257
|
+
assert wk.tasks.s1_1.template.element_sets[0].resources[0].num_cores == 2 # modified
|
258
|
+
assert (
|
259
|
+
wk.tasks.s1_2.template.element_sets[0].resources[0].num_cores is None
|
260
|
+
) # unaffected
|
261
|
+
|
262
|
+
assert wk.tasks.s1_1.template.element_sets[0].inputs[0].value == 101 # modified
|
263
|
+
assert wk.tasks.s1_1.template.element_sets[0].inputs[1].value == 200 # unaffected
|
264
|
+
assert wk.tasks.s1_1.template.element_sets[1].sequences[0].values == [
|
265
|
+
200,
|
266
|
+
201,
|
267
|
+
] # unaffected
|
268
|
+
|
269
|
+
assert wk.tasks.s1_2.template.element_sets[0].inputs[0].value == 102 # modified
|
270
|
+
assert wk.tasks.s1_2.template.element_sets[1].inputs[0].value == 103 # modified
|
271
|
+
assert wk.tasks.s1_2.template.element_sets[1].sequences[0].values == [
|
272
|
+
300,
|
273
|
+
301,
|
274
|
+
] # modified
|
275
|
+
|
276
|
+
|
277
|
+
def test_meta_task_custom_parametrisation_raises_on_bad_schema_name(
|
278
|
+
new_null_config, tmp_path
|
279
|
+
):
|
280
|
+
wk_yaml = dedent(
|
281
|
+
"""\
|
282
|
+
name: test_metatask_raise_on_bad_schema_name
|
283
|
+
template_components:
|
284
|
+
task_schemas:
|
285
|
+
- objective: s1
|
286
|
+
inputs:
|
287
|
+
- parameter: p1
|
288
|
+
- parameter: p2
|
289
|
+
outputs:
|
290
|
+
- parameter: p3
|
291
|
+
actions:
|
292
|
+
- commands:
|
293
|
+
- command: echo "$((<<parameter:p1>> + <<parameter:p2>>))"
|
294
|
+
stdout: <<int(parameter:p3)>>
|
295
|
+
|
296
|
+
meta_task_schemas:
|
297
|
+
- objective: system_analysis
|
298
|
+
inputs:
|
299
|
+
- parameter: p1
|
300
|
+
- parameter: p2
|
301
|
+
outputs:
|
302
|
+
- parameter: p3
|
303
|
+
|
304
|
+
meta_tasks:
|
305
|
+
system_analysis:
|
306
|
+
- schema: s1
|
307
|
+
element_sets:
|
308
|
+
- inputs:
|
309
|
+
p1: 100
|
310
|
+
p2: 200
|
311
|
+
- inputs:
|
312
|
+
p1: 100
|
313
|
+
sequences:
|
314
|
+
- path: inputs.p2
|
315
|
+
values: [200, 201]
|
316
|
+
tasks:
|
317
|
+
- schema: system_analysis
|
318
|
+
resources:
|
319
|
+
BAD_SCHEMA_NAME: # should raise!
|
320
|
+
any:
|
321
|
+
num_cores: 2
|
322
|
+
"""
|
323
|
+
)
|
324
|
+
with pytest.raises(UnknownMetaTaskConstitutiveSchema):
|
325
|
+
wk = hf.Workflow.from_YAML_string(wk_yaml, path=tmp_path)
|
@@ -0,0 +1,229 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
from textwrap import dedent
|
3
|
+
|
4
|
+
import numpy as np
|
5
|
+
import pytest
|
6
|
+
|
7
|
+
from hpcflow.app import app as hf
|
8
|
+
from hpcflow.sdk.core.test_utils import make_schemas
|
9
|
+
from hpcflow.sdk.core.utils import get_file_context
|
10
|
+
|
11
|
+
|
12
|
+
def test_MPS_sequences():
|
13
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
14
|
+
assert mps.sequences == [
|
15
|
+
hf.ValueSequence(path="inputs.p1", values=[0, 1]),
|
16
|
+
hf.ValueSequence(path="inputs.p2", values=[2, 3]),
|
17
|
+
]
|
18
|
+
|
19
|
+
|
20
|
+
def test_MPS_sequences_moved_to_element_set():
|
21
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
22
|
+
es = hf.ElementSet(multi_path_sequences=[mps])
|
23
|
+
expected_mps_seqs = [
|
24
|
+
hf.ValueSequence(path="inputs.p1", values=[0, 1]),
|
25
|
+
hf.ValueSequence(path="inputs.p2", values=[2, 3]),
|
26
|
+
]
|
27
|
+
assert mps._sequences is None
|
28
|
+
assert mps.sequence_indices == [0, 2]
|
29
|
+
assert es.sequences == mps.sequences
|
30
|
+
assert es.sequences == expected_mps_seqs
|
31
|
+
|
32
|
+
|
33
|
+
def test_MPS_sequences_moved_to_element_set_with_existing_sequences():
|
34
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
35
|
+
seq = hf.ValueSequence(path="inputs.p0", values=[0, 1])
|
36
|
+
expected_mps_seqs = [
|
37
|
+
hf.ValueSequence(path="inputs.p1", values=[0, 1]),
|
38
|
+
hf.ValueSequence(path="inputs.p2", values=[2, 3]),
|
39
|
+
]
|
40
|
+
es = hf.ElementSet(
|
41
|
+
sequences=[seq],
|
42
|
+
multi_path_sequences=[mps],
|
43
|
+
)
|
44
|
+
assert mps._sequences is None
|
45
|
+
assert mps.sequence_indices == [1, 3]
|
46
|
+
assert mps.sequences == expected_mps_seqs
|
47
|
+
assert es.sequences == [seq, *expected_mps_seqs]
|
48
|
+
|
49
|
+
|
50
|
+
def test_MPS_sequences_moved_to_task_element_set():
|
51
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
52
|
+
s1 = make_schemas(({"p1": None, "p2": None}, ()))
|
53
|
+
t1 = hf.Task(s1, multi_path_sequences=[mps])
|
54
|
+
expected_mps_seqs = [
|
55
|
+
hf.ValueSequence(path="inputs.p1", values=[0, 1]),
|
56
|
+
hf.ValueSequence(path="inputs.p2", values=[2, 3]),
|
57
|
+
]
|
58
|
+
es = t1.element_sets[0]
|
59
|
+
assert mps._sequences is None
|
60
|
+
assert mps.sequence_indices == [0, 2]
|
61
|
+
assert es.sequences == mps.sequences
|
62
|
+
assert es.sequences == expected_mps_seqs
|
63
|
+
|
64
|
+
|
65
|
+
def test_MPS_sequences_moved_to_task_element_set_with_existing_sequences():
|
66
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
67
|
+
seq = hf.ValueSequence(path="inputs.p0", values=[0, 1])
|
68
|
+
s1 = make_schemas(({"p0": None, "p1": None, "p2": None}, ()))
|
69
|
+
t1 = hf.Task(s1, sequences=[seq], multi_path_sequences=[mps])
|
70
|
+
expected_mps_seqs = [
|
71
|
+
hf.ValueSequence(path="inputs.p1", values=[0, 1]),
|
72
|
+
hf.ValueSequence(path="inputs.p2", values=[2, 3]),
|
73
|
+
]
|
74
|
+
es = t1.element_sets[0]
|
75
|
+
assert mps._sequences is None
|
76
|
+
assert mps.sequence_indices == [1, 3]
|
77
|
+
assert mps.sequences == expected_mps_seqs
|
78
|
+
assert es.sequences == [seq, *expected_mps_seqs]
|
79
|
+
|
80
|
+
|
81
|
+
def test_MPS_sequence_element_inputs(null_config, tmp_path):
|
82
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
83
|
+
s1 = make_schemas(({"p1": None, "p2": None}, ()))
|
84
|
+
t1 = hf.Task(s1, multi_path_sequences=[mps])
|
85
|
+
wf = hf.Workflow.from_template_data(
|
86
|
+
tasks=[t1],
|
87
|
+
template_name="test_multi_path_sequence",
|
88
|
+
path=tmp_path,
|
89
|
+
)
|
90
|
+
assert len(wf.template.tasks[0].element_sets[0].sequences) == 2
|
91
|
+
assert wf.tasks[0].num_elements == 2
|
92
|
+
assert wf.tasks[0].elements[0].get("inputs") == {"p1": 0, "p2": 2}
|
93
|
+
assert wf.tasks[0].elements[1].get("inputs") == {"p1": 1, "p2": 3}
|
94
|
+
|
95
|
+
|
96
|
+
def test_MPS_sequence_element_inputs_with_existing_sequence(null_config, tmp_path):
|
97
|
+
mps = hf.MultiPathSequence(paths=("inputs.p1", "inputs.p2"), values=[[0, 1], [2, 3]])
|
98
|
+
seq = hf.ValueSequence(path="inputs.p0", values=[0, 1])
|
99
|
+
s1 = make_schemas(({"p0": None, "p1": None, "p2": None}, ()))
|
100
|
+
t1 = hf.Task(s1, sequences=[seq], multi_path_sequences=[mps])
|
101
|
+
wf = hf.Workflow.from_template_data(
|
102
|
+
tasks=[t1],
|
103
|
+
template_name="test_multi_path_sequence",
|
104
|
+
path=tmp_path,
|
105
|
+
)
|
106
|
+
assert len(wf.template.tasks[0].element_sets[0].sequences) == 3
|
107
|
+
assert wf.tasks[0].num_elements == 2
|
108
|
+
assert wf.tasks[0].elements[0].get("inputs") == {"p0": 0, "p1": 0, "p2": 2}
|
109
|
+
assert wf.tasks[0].elements[1].get("inputs") == {"p0": 1, "p1": 1, "p2": 3}
|
110
|
+
|
111
|
+
# check the same on reload:
|
112
|
+
wf = wf.reload()
|
113
|
+
assert len(wf.template.tasks[0].element_sets[0].sequences) == 3
|
114
|
+
assert wf.tasks[0].num_elements == 2
|
115
|
+
assert wf.tasks[0].elements[0].get("inputs") == {"p0": 0, "p1": 0, "p2": 2}
|
116
|
+
assert wf.tasks[0].elements[1].get("inputs") == {"p0": 1, "p1": 1, "p2": 3}
|
117
|
+
|
118
|
+
|
119
|
+
@pytest.mark.integration
|
120
|
+
def test_MPS_element_outputs(null_config, tmp_path):
|
121
|
+
with get_file_context("hpcflow.tests.data", "multi_path_sequences.yaml") as file_path:
|
122
|
+
wf = hf.make_and_submit_workflow(
|
123
|
+
file_path,
|
124
|
+
path=tmp_path,
|
125
|
+
status=False,
|
126
|
+
add_to_known=False,
|
127
|
+
wait=True,
|
128
|
+
)
|
129
|
+
assert wf.tasks[0].num_elements == 2
|
130
|
+
|
131
|
+
p2 = wf.tasks[0].elements[0].outputs.p2
|
132
|
+
assert isinstance(p2, hf.ElementParameter)
|
133
|
+
assert p2.value == 302
|
134
|
+
|
135
|
+
p2 = wf.tasks[0].elements[1].outputs.p2
|
136
|
+
assert isinstance(p2, hf.ElementParameter)
|
137
|
+
assert p2.value == 304
|
138
|
+
|
139
|
+
|
140
|
+
def test_MPS_latin_hypercube_sequence_values():
|
141
|
+
wft_yaml = dedent(
|
142
|
+
"""\
|
143
|
+
name: test_latin_hypercube_sampling
|
144
|
+
template_components:
|
145
|
+
task_schemas:
|
146
|
+
- objective: define_p1
|
147
|
+
inputs:
|
148
|
+
- parameter: p1
|
149
|
+
tasks:
|
150
|
+
- schema: define_p1
|
151
|
+
inputs:
|
152
|
+
p1: {}
|
153
|
+
multi_path_sequences:
|
154
|
+
- paths: [inputs.p1.a, inputs.p1.b]
|
155
|
+
values::from_latin_hypercube:
|
156
|
+
num_samples: 5
|
157
|
+
"""
|
158
|
+
)
|
159
|
+
wft = hf.WorkflowTemplate.from_YAML_string(wft_yaml)
|
160
|
+
es = wft.tasks[0].element_sets[0]
|
161
|
+
assert len(es.multi_path_sequences) == 1
|
162
|
+
mps_values = np.asarray(es.multi_path_sequences[0].values)
|
163
|
+
assert mps_values.shape == (2, 5)
|
164
|
+
assert len(es.sequences) == 2
|
165
|
+
seq_1 = es.sequences[0]
|
166
|
+
seq_2 = es.sequences[1]
|
167
|
+
assert seq_1.path == "inputs.p1.a"
|
168
|
+
assert seq_2.path == "inputs.p1.b"
|
169
|
+
assert np.array_equal(np.asarray(seq_1.values), mps_values[0])
|
170
|
+
assert np.array_equal(np.asarray(seq_2.values), mps_values[1])
|
171
|
+
|
172
|
+
|
173
|
+
def test_MPS_move_from_sequences_list():
|
174
|
+
wft_yaml = dedent(
|
175
|
+
"""\
|
176
|
+
name: test_latin_hypercube_sampling
|
177
|
+
template_components:
|
178
|
+
task_schemas:
|
179
|
+
- objective: define_p1_p2_p3_p4
|
180
|
+
inputs:
|
181
|
+
- parameter: p1
|
182
|
+
- parameter: p2
|
183
|
+
- parameter: p3
|
184
|
+
- parameter: p4
|
185
|
+
tasks:
|
186
|
+
- schema: define_p1_p2_p3_p4
|
187
|
+
inputs:
|
188
|
+
p1: {}
|
189
|
+
p2: {}
|
190
|
+
p3: {}
|
191
|
+
|
192
|
+
multi_path_sequences:
|
193
|
+
- paths: [inputs.p1.a, inputs.p1.b]
|
194
|
+
values::from_latin_hypercube:
|
195
|
+
num_samples: 4
|
196
|
+
|
197
|
+
sequences:
|
198
|
+
- paths: [inputs.p2.a, inputs.p2.b] # actually a multi-path sequence
|
199
|
+
values::from_latin_hypercube:
|
200
|
+
num_samples: 4
|
201
|
+
|
202
|
+
- path: inputs.p4 # a normal sequence
|
203
|
+
values: [0, 1, 2, 3]
|
204
|
+
|
205
|
+
- paths: [inputs.p3.a, inputs.p3.b] # actually a multi-path sequence
|
206
|
+
values::from_latin_hypercube:
|
207
|
+
num_samples: 4
|
208
|
+
"""
|
209
|
+
)
|
210
|
+
wft = hf.WorkflowTemplate.from_YAML_string(wft_yaml)
|
211
|
+
es = wft.tasks[0].element_sets[0]
|
212
|
+
mps_lst = es.multi_path_sequences
|
213
|
+
seq_lst = es.sequences
|
214
|
+
assert len(mps_lst) == 3
|
215
|
+
assert len(seq_lst) == 7 # one original plus three multi-path with two paths each
|
216
|
+
|
217
|
+
# check ordering of multi-path sequences is preserved:
|
218
|
+
assert mps_lst[0].paths == ["inputs.p1.a", "inputs.p1.b"]
|
219
|
+
assert mps_lst[1].paths == ["inputs.p2.a", "inputs.p2.b"]
|
220
|
+
assert mps_lst[2].paths == ["inputs.p3.a", "inputs.p3.b"]
|
221
|
+
|
222
|
+
# check sensible ordering of sequences:
|
223
|
+
assert seq_lst[0].path == "inputs.p4"
|
224
|
+
assert seq_lst[1].path == "inputs.p1.a"
|
225
|
+
assert seq_lst[2].path == "inputs.p1.b"
|
226
|
+
assert seq_lst[3].path == "inputs.p2.a"
|
227
|
+
assert seq_lst[4].path == "inputs.p2.b"
|
228
|
+
assert seq_lst[5].path == "inputs.p3.a"
|
229
|
+
assert seq_lst[6].path == "inputs.p3.b"
|
@@ -1,5 +1,6 @@
|
|
1
|
+
from __future__ import annotations
|
1
2
|
from dataclasses import dataclass
|
2
|
-
|
3
|
+
from typing_extensions import TypedDict
|
3
4
|
import pytest
|
4
5
|
|
5
6
|
from hpcflow.app import app as hf
|
@@ -18,43 +19,47 @@ class MyObj:
|
|
18
19
|
data: int
|
19
20
|
|
20
21
|
|
22
|
+
class SimpleObjectList(TypedDict):
|
23
|
+
objects: list[MyObj]
|
24
|
+
object_list: DotAccessObjectList
|
25
|
+
|
26
|
+
|
21
27
|
@pytest.fixture
|
22
|
-
def simple_object_list(null_config):
|
28
|
+
def simple_object_list(null_config) -> SimpleObjectList:
|
23
29
|
my_objs = [MyObj(name="A", data=1), MyObj(name="B", data=2)]
|
24
30
|
obj_list = DotAccessObjectList(my_objs, access_attribute="name")
|
25
|
-
|
26
|
-
return out
|
31
|
+
return {"objects": my_objs, "object_list": obj_list}
|
27
32
|
|
28
33
|
|
29
|
-
def test_get_item(simple_object_list):
|
34
|
+
def test_get_item(simple_object_list: SimpleObjectList):
|
30
35
|
objects = simple_object_list["objects"]
|
31
36
|
obj_list = simple_object_list["object_list"]
|
32
37
|
|
33
38
|
assert obj_list[0] == objects[0] and obj_list[1] == objects[1]
|
34
39
|
|
35
40
|
|
36
|
-
def test_get_dot_notation(simple_object_list):
|
41
|
+
def test_get_dot_notation(simple_object_list: SimpleObjectList):
|
37
42
|
objects = simple_object_list["objects"]
|
38
43
|
obj_list = simple_object_list["object_list"]
|
39
44
|
|
40
45
|
assert obj_list.A == objects[0] and obj_list.B == objects[1]
|
41
46
|
|
42
47
|
|
43
|
-
def test_add_obj_to_end(simple_object_list):
|
48
|
+
def test_add_obj_to_end(simple_object_list: SimpleObjectList):
|
44
49
|
obj_list = simple_object_list["object_list"]
|
45
50
|
new_obj = MyObj("C", 3)
|
46
51
|
obj_list.add_object(new_obj)
|
47
52
|
assert obj_list[-1] == new_obj
|
48
53
|
|
49
54
|
|
50
|
-
def test_add_obj_to_start(simple_object_list):
|
55
|
+
def test_add_obj_to_start(simple_object_list: SimpleObjectList):
|
51
56
|
obj_list = simple_object_list["object_list"]
|
52
57
|
new_obj = MyObj("C", 3)
|
53
58
|
obj_list.add_object(new_obj, 0)
|
54
59
|
assert obj_list[0] == new_obj
|
55
60
|
|
56
61
|
|
57
|
-
def test_add_obj_to_middle(simple_object_list):
|
62
|
+
def test_add_obj_to_middle(simple_object_list: SimpleObjectList):
|
58
63
|
obj_list = simple_object_list["object_list"]
|
59
64
|
new_obj = MyObj("C", 3)
|
60
65
|
obj_list.add_object(new_obj, 1)
|
@@ -80,12 +85,12 @@ def test_get_obj_attr_custom_callable(null_config):
|
|
80
85
|
assert o1.get(c1=2) == o1[0]
|
81
86
|
|
82
87
|
|
83
|
-
def test_get_with_missing_key(null_config):
|
88
|
+
def test_get_with_missing_key(null_config) -> None:
|
84
89
|
o1 = ObjectList([{"a": 1}, {"b": 2}])
|
85
90
|
assert o1.get(a=1) == {"a": 1}
|
86
91
|
|
87
92
|
|
88
|
-
def test_parameters_list_get_equivalence(null_config):
|
93
|
+
def test_parameters_list_get_equivalence(null_config) -> None:
|
89
94
|
p_name = "p12334567898765432101"
|
90
95
|
hf.parameters.add_object(hf.Parameter(p_name))
|
91
96
|
assert p_name in hf.parameters.list_attrs()
|
@@ -97,7 +102,7 @@ def test_parameters_list_get_equivalence(null_config):
|
|
97
102
|
)
|
98
103
|
|
99
104
|
|
100
|
-
def test_parameters_list_get_equivalence_non_existent(new_null_config):
|
105
|
+
def test_parameters_list_get_equivalence_non_existent(new_null_config) -> None:
|
101
106
|
# non-existent parameters should be created, unlike other ObjectList sub-classes,
|
102
107
|
# which raise
|
103
108
|
hf.reload_template_components()
|