hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a200__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
- hpcflow/_version.py +1 -1
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +5 -0
- hpcflow/sdk/app.py +166 -92
- hpcflow/sdk/cli.py +263 -84
- hpcflow/sdk/cli_common.py +99 -5
- hpcflow/sdk/config/callbacks.py +38 -1
- hpcflow/sdk/config/config.py +102 -13
- hpcflow/sdk/config/errors.py +19 -5
- hpcflow/sdk/config/types.py +3 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +914 -262
- hpcflow/sdk/core/cache.py +76 -34
- hpcflow/sdk/core/command_files.py +14 -128
- hpcflow/sdk/core/commands.py +35 -6
- hpcflow/sdk/core/element.py +122 -50
- hpcflow/sdk/core/errors.py +58 -2
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/loop.py +408 -50
- hpcflow/sdk/core/loop_cache.py +4 -4
- hpcflow/sdk/core/parameters.py +382 -37
- hpcflow/sdk/core/run_dir_files.py +13 -40
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +119 -30
- hpcflow/sdk/core/task_schema.py +68 -0
- hpcflow/sdk/core/test_utils.py +66 -27
- hpcflow/sdk/core/types.py +54 -1
- hpcflow/sdk/core/utils.py +136 -19
- hpcflow/sdk/core/workflow.py +1587 -356
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +7 -0
- hpcflow/sdk/helper/cli.py +1 -0
- hpcflow/sdk/log.py +42 -15
- hpcflow/sdk/persistence/base.py +405 -53
- hpcflow/sdk/persistence/json.py +177 -52
- hpcflow/sdk/persistence/pending.py +237 -69
- hpcflow/sdk/persistence/store_resource.py +3 -2
- hpcflow/sdk/persistence/types.py +15 -4
- hpcflow/sdk/persistence/zarr.py +928 -81
- hpcflow/sdk/submission/jobscript.py +1408 -489
- hpcflow/sdk/submission/schedulers/__init__.py +40 -5
- hpcflow/sdk/submission/schedulers/direct.py +33 -19
- hpcflow/sdk/submission/schedulers/sge.py +51 -16
- hpcflow/sdk/submission/schedulers/slurm.py +44 -16
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/base.py +68 -20
- hpcflow/sdk/submission/shells/bash.py +222 -129
- hpcflow/sdk/submission/shells/powershell.py +200 -150
- hpcflow/sdk/submission/submission.py +852 -119
- hpcflow/sdk/submission/types.py +18 -21
- hpcflow/sdk/typing.py +24 -5
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +19 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +821 -70
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
- hpcflow/tests/unit/test_action.py +176 -0
- hpcflow/tests/unit/test_app.py +20 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +133 -0
- hpcflow/tests/unit/test_config.py +122 -1
- hpcflow/tests/unit/test_element_iteration.py +47 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_loop.py +1332 -27
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_parameter.py +13 -0
- hpcflow/tests/unit/test_persistence.py +190 -8
- hpcflow/tests/unit/test_run.py +109 -3
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_shell.py +20 -0
- hpcflow/tests/unit/test_submission.py +5 -76
- hpcflow/tests/unit/test_workflow_template.py +31 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +332 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +142 -2
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/METADATA +7 -4
- hpcflow_new2-0.2.0a200.dist-info/RECORD +222 -0
- hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
- {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,282 @@
|
|
1
|
+
import os
|
2
|
+
import time
|
3
|
+
|
4
|
+
import pytest
|
5
|
+
from hpcflow.app import app as hf
|
6
|
+
|
7
|
+
|
8
|
+
@pytest.mark.integration
|
9
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
10
|
+
def test_input_file_generator_creates_file(null_config, tmp_path):
|
11
|
+
|
12
|
+
inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
|
13
|
+
|
14
|
+
if os.name == "nt":
|
15
|
+
cmd = "Get-Content <<file:my_input_file>>"
|
16
|
+
else:
|
17
|
+
cmd = "cat <<file:my_input_file>>"
|
18
|
+
|
19
|
+
s1 = hf.TaskSchema(
|
20
|
+
objective="t1",
|
21
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
22
|
+
actions=[
|
23
|
+
hf.Action(
|
24
|
+
commands=[hf.Command(cmd)],
|
25
|
+
input_file_generators=[
|
26
|
+
hf.InputFileGenerator(
|
27
|
+
input_file=inp_file,
|
28
|
+
inputs=[hf.Parameter("p1")],
|
29
|
+
script="<<script:input_file_generator_basic.py>>",
|
30
|
+
),
|
31
|
+
],
|
32
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
33
|
+
)
|
34
|
+
],
|
35
|
+
)
|
36
|
+
p1_val = 101
|
37
|
+
t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
|
38
|
+
wk = hf.Workflow.from_template_data(
|
39
|
+
tasks=[t1],
|
40
|
+
template_name="input_file_generator_test",
|
41
|
+
path=tmp_path,
|
42
|
+
)
|
43
|
+
wk.submit(wait=True, add_to_known=False)
|
44
|
+
|
45
|
+
# check the input file is written
|
46
|
+
run_0 = wk.get_all_EARs()[0]
|
47
|
+
exec_path = run_0.get_directory()
|
48
|
+
inp_file_path = exec_path.joinpath(inp_file.name.name)
|
49
|
+
inp_file_contents = inp_file_path.read_text()
|
50
|
+
assert inp_file_contents.strip() == str(p1_val)
|
51
|
+
|
52
|
+
# check the command successfully printed the file contents to stdout:
|
53
|
+
std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
|
54
|
+
assert std_out.strip() == str(p1_val)
|
55
|
+
|
56
|
+
|
57
|
+
@pytest.mark.integration
|
58
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
59
|
+
def test_IFG_std_stream_redirect_on_exception(new_null_config, tmp_path):
|
60
|
+
"""Test exceptions raised by the app during execution of a IFG script are printed to the
|
61
|
+
std-stream redirect file (and not the jobscript's standard error file)."""
|
62
|
+
|
63
|
+
# define a custom python environment which redefines the `WK_PATH` shell variable to
|
64
|
+
# a nonsense value so the app cannot load the workflow and thus raises an exception
|
65
|
+
|
66
|
+
app_caps = hf.package_name.upper()
|
67
|
+
if os.name == "nt":
|
68
|
+
env_cmd = f'$env:{app_caps}_WK_PATH = "nonsense_path"'
|
69
|
+
else:
|
70
|
+
env_cmd = f'export {app_caps}_WK_PATH="nonsense_path"'
|
71
|
+
|
72
|
+
env_cmd += "; python <<script_path>> <<args>>"
|
73
|
+
bad_env = hf.Environment(
|
74
|
+
name="bad_python_env",
|
75
|
+
executables=[
|
76
|
+
hf.Executable(
|
77
|
+
label="python_script",
|
78
|
+
instances=[
|
79
|
+
hf.ExecutableInstance(
|
80
|
+
command=env_cmd,
|
81
|
+
num_cores=1,
|
82
|
+
parallel_mode=None,
|
83
|
+
)
|
84
|
+
],
|
85
|
+
)
|
86
|
+
],
|
87
|
+
)
|
88
|
+
hf.envs.add_object(bad_env, skip_duplicates=True)
|
89
|
+
|
90
|
+
inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
|
91
|
+
s1 = hf.TaskSchema(
|
92
|
+
objective="t1",
|
93
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
94
|
+
actions=[
|
95
|
+
hf.Action(
|
96
|
+
input_file_generators=[
|
97
|
+
hf.InputFileGenerator(
|
98
|
+
input_file=inp_file,
|
99
|
+
inputs=[hf.Parameter("p1")],
|
100
|
+
script="<<script:input_file_generator_basic.py>>",
|
101
|
+
),
|
102
|
+
],
|
103
|
+
environments=[hf.ActionEnvironment(environment="bad_python_env")],
|
104
|
+
)
|
105
|
+
],
|
106
|
+
)
|
107
|
+
|
108
|
+
p1_val = 101
|
109
|
+
t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
|
110
|
+
wk = hf.Workflow.from_template_data(
|
111
|
+
tasks=[t1], template_name="input_file_generator_test", path=tmp_path
|
112
|
+
)
|
113
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
114
|
+
|
115
|
+
# jobscript stderr should be empty
|
116
|
+
assert not wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
|
117
|
+
|
118
|
+
# std stream file has workflow not found traceback
|
119
|
+
run = wk.get_all_EARs()[0]
|
120
|
+
std_stream_path = run.get_app_std_path()
|
121
|
+
assert std_stream_path.is_file()
|
122
|
+
assert "WorkflowNotFoundError" in std_stream_path.read_text()
|
123
|
+
|
124
|
+
hf.reload_template_components() # remove extra envs
|
125
|
+
|
126
|
+
|
127
|
+
@pytest.mark.integration
|
128
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
129
|
+
def test_IFG_std_out_std_err_not_redirected(null_config, tmp_path):
|
130
|
+
"""Test that standard error and output streams from an IFG script are written to the jobscript
|
131
|
+
standard error and output files."""
|
132
|
+
inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
|
133
|
+
s1 = hf.TaskSchema(
|
134
|
+
objective="t1",
|
135
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
136
|
+
actions=[
|
137
|
+
hf.Action(
|
138
|
+
input_file_generators=[
|
139
|
+
hf.InputFileGenerator(
|
140
|
+
input_file=inp_file,
|
141
|
+
inputs=[hf.Parameter("p1")],
|
142
|
+
script="<<script:input_file_generator_test_stdout_stderr.py>>",
|
143
|
+
),
|
144
|
+
],
|
145
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
146
|
+
)
|
147
|
+
],
|
148
|
+
)
|
149
|
+
p1_val = 101
|
150
|
+
stdout_msg = str(p1_val)
|
151
|
+
stderr_msg = str(p1_val)
|
152
|
+
t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
|
153
|
+
wk = hf.Workflow.from_template_data(
|
154
|
+
tasks=[t1],
|
155
|
+
template_name="input_file_generator_test",
|
156
|
+
path=tmp_path,
|
157
|
+
)
|
158
|
+
wk.submit(wait=True, add_to_known=False)
|
159
|
+
|
160
|
+
if wk.submissions[0].jobscripts[0].resources.combine_jobscript_std:
|
161
|
+
std_out_err = wk.submissions[0].jobscripts[0].direct_std_out_err_path.read_text()
|
162
|
+
assert std_out_err.strip() == f"{stdout_msg}\n{stderr_msg}"
|
163
|
+
else:
|
164
|
+
std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
|
165
|
+
std_err = wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
|
166
|
+
assert std_out.strip() == stdout_msg
|
167
|
+
assert std_err.strip() == stderr_msg
|
168
|
+
|
169
|
+
|
170
|
+
@pytest.mark.integration
|
171
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
172
|
+
def test_IFG_pass_env_spec(null_config, tmp_path):
|
173
|
+
inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
|
174
|
+
|
175
|
+
if os.name == "nt":
|
176
|
+
cmd = "Get-Content <<file:my_input_file>>"
|
177
|
+
else:
|
178
|
+
cmd = "cat <<file:my_input_file>>"
|
179
|
+
|
180
|
+
s1 = hf.TaskSchema(
|
181
|
+
objective="t1",
|
182
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
183
|
+
actions=[
|
184
|
+
hf.Action(
|
185
|
+
commands=[hf.Command(cmd)],
|
186
|
+
input_file_generators=[
|
187
|
+
hf.InputFileGenerator(
|
188
|
+
input_file=inp_file,
|
189
|
+
inputs=[hf.Parameter("p1")],
|
190
|
+
script="<<script:env_specifier_test/input_file_generator_pass_env_spec.py>>",
|
191
|
+
script_pass_env_spec=True,
|
192
|
+
),
|
193
|
+
],
|
194
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
195
|
+
)
|
196
|
+
],
|
197
|
+
)
|
198
|
+
p1_val = 101
|
199
|
+
t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
|
200
|
+
wk = hf.Workflow.from_template_data(
|
201
|
+
tasks=[t1],
|
202
|
+
template_name="input_file_generator_pass_env_spec",
|
203
|
+
path=tmp_path,
|
204
|
+
)
|
205
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
206
|
+
|
207
|
+
# check the command successfully printed the env spec and file contents to stdout:
|
208
|
+
std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
|
209
|
+
assert std_out.strip() == f"{{'name': 'python_env'}}\n{str(p1_val)}"
|
210
|
+
|
211
|
+
|
212
|
+
@pytest.mark.integration
|
213
|
+
@pytest.mark.skipif("hf.run_time_info.is_frozen")
|
214
|
+
def test_env_specifier_in_input_file_generator_script_path(new_null_config, tmp_path):
|
215
|
+
|
216
|
+
py_env = hf.Environment(
|
217
|
+
name="python_env",
|
218
|
+
specifiers={"version": "v1"},
|
219
|
+
executables=[
|
220
|
+
hf.Executable(
|
221
|
+
label="python_script",
|
222
|
+
instances=[
|
223
|
+
hf.ExecutableInstance(
|
224
|
+
command="python <<script_path>> <<args>>",
|
225
|
+
num_cores=1,
|
226
|
+
parallel_mode=None,
|
227
|
+
)
|
228
|
+
],
|
229
|
+
)
|
230
|
+
],
|
231
|
+
)
|
232
|
+
hf.envs.add_object(py_env, skip_duplicates=True)
|
233
|
+
|
234
|
+
inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
|
235
|
+
|
236
|
+
if os.name == "nt":
|
237
|
+
cmd = "Get-Content <<file:my_input_file>>"
|
238
|
+
else:
|
239
|
+
cmd = "cat <<file:my_input_file>>"
|
240
|
+
|
241
|
+
s1 = hf.TaskSchema(
|
242
|
+
objective="t1",
|
243
|
+
inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
|
244
|
+
actions=[
|
245
|
+
hf.Action(
|
246
|
+
commands=[hf.Command(cmd)],
|
247
|
+
input_file_generators=[
|
248
|
+
hf.InputFileGenerator(
|
249
|
+
input_file=inp_file,
|
250
|
+
inputs=[hf.Parameter("p1")],
|
251
|
+
script="<<script:env_specifier_test/<<env:version>>/input_file_generator_basic.py>>",
|
252
|
+
),
|
253
|
+
],
|
254
|
+
environments=[hf.ActionEnvironment(environment="python_env")],
|
255
|
+
)
|
256
|
+
],
|
257
|
+
)
|
258
|
+
p1_val = 101
|
259
|
+
t1 = hf.Task(
|
260
|
+
schema=s1,
|
261
|
+
inputs={"p1": p1_val},
|
262
|
+
environments={"python_env": {"version": "v1"}},
|
263
|
+
)
|
264
|
+
wk = hf.Workflow.from_template_data(
|
265
|
+
tasks=[t1],
|
266
|
+
template_name="input_file_generator_test_env_specifier",
|
267
|
+
path=tmp_path,
|
268
|
+
)
|
269
|
+
wk.submit(wait=True, add_to_known=False, status=False)
|
270
|
+
|
271
|
+
# check the input file is written
|
272
|
+
run_0 = wk.get_all_EARs()[0]
|
273
|
+
exec_path = run_0.get_directory()
|
274
|
+
inp_file_path = exec_path.joinpath(inp_file.name.name)
|
275
|
+
inp_file_contents = inp_file_path.read_text()
|
276
|
+
assert inp_file_contents.strip() == str(p1_val)
|
277
|
+
|
278
|
+
# check the command successfully printed the file contents to stdout:
|
279
|
+
std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
|
280
|
+
assert std_out.strip() == str(p1_val)
|
281
|
+
|
282
|
+
hf.reload_template_components() # remove extra envs
|