hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
  2. hpcflow/_version.py +1 -1
  3. hpcflow/app.py +1 -0
  4. hpcflow/data/scripts/bad_script.py +2 -0
  5. hpcflow/data/scripts/do_nothing.py +2 -0
  6. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  7. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  8. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  11. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  12. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  13. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  15. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  16. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  23. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
  24. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  25. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
  26. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  27. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  28. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  29. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  30. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  31. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  32. hpcflow/data/scripts/script_exit_test.py +5 -0
  33. hpcflow/data/template_components/environments.yaml +1 -1
  34. hpcflow/sdk/__init__.py +26 -15
  35. hpcflow/sdk/app.py +2192 -768
  36. hpcflow/sdk/cli.py +506 -296
  37. hpcflow/sdk/cli_common.py +105 -7
  38. hpcflow/sdk/config/__init__.py +1 -1
  39. hpcflow/sdk/config/callbacks.py +115 -43
  40. hpcflow/sdk/config/cli.py +126 -103
  41. hpcflow/sdk/config/config.py +674 -318
  42. hpcflow/sdk/config/config_file.py +131 -95
  43. hpcflow/sdk/config/errors.py +125 -84
  44. hpcflow/sdk/config/types.py +148 -0
  45. hpcflow/sdk/core/__init__.py +25 -1
  46. hpcflow/sdk/core/actions.py +1771 -1059
  47. hpcflow/sdk/core/app_aware.py +24 -0
  48. hpcflow/sdk/core/cache.py +139 -79
  49. hpcflow/sdk/core/command_files.py +263 -287
  50. hpcflow/sdk/core/commands.py +145 -112
  51. hpcflow/sdk/core/element.py +828 -535
  52. hpcflow/sdk/core/enums.py +192 -0
  53. hpcflow/sdk/core/environment.py +74 -93
  54. hpcflow/sdk/core/errors.py +455 -52
  55. hpcflow/sdk/core/execute.py +207 -0
  56. hpcflow/sdk/core/json_like.py +540 -272
  57. hpcflow/sdk/core/loop.py +751 -347
  58. hpcflow/sdk/core/loop_cache.py +164 -47
  59. hpcflow/sdk/core/object_list.py +370 -207
  60. hpcflow/sdk/core/parameters.py +1100 -627
  61. hpcflow/sdk/core/rule.py +59 -41
  62. hpcflow/sdk/core/run_dir_files.py +21 -37
  63. hpcflow/sdk/core/skip_reason.py +7 -0
  64. hpcflow/sdk/core/task.py +1649 -1339
  65. hpcflow/sdk/core/task_schema.py +308 -196
  66. hpcflow/sdk/core/test_utils.py +191 -114
  67. hpcflow/sdk/core/types.py +440 -0
  68. hpcflow/sdk/core/utils.py +485 -309
  69. hpcflow/sdk/core/validation.py +82 -9
  70. hpcflow/sdk/core/workflow.py +2544 -1178
  71. hpcflow/sdk/core/zarr_io.py +98 -137
  72. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  73. hpcflow/sdk/demo/cli.py +53 -33
  74. hpcflow/sdk/helper/cli.py +18 -15
  75. hpcflow/sdk/helper/helper.py +75 -63
  76. hpcflow/sdk/helper/watcher.py +61 -28
  77. hpcflow/sdk/log.py +122 -71
  78. hpcflow/sdk/persistence/__init__.py +8 -31
  79. hpcflow/sdk/persistence/base.py +1360 -606
  80. hpcflow/sdk/persistence/defaults.py +6 -0
  81. hpcflow/sdk/persistence/discovery.py +38 -0
  82. hpcflow/sdk/persistence/json.py +568 -188
  83. hpcflow/sdk/persistence/pending.py +382 -179
  84. hpcflow/sdk/persistence/store_resource.py +39 -23
  85. hpcflow/sdk/persistence/types.py +318 -0
  86. hpcflow/sdk/persistence/utils.py +14 -11
  87. hpcflow/sdk/persistence/zarr.py +1337 -433
  88. hpcflow/sdk/runtime.py +44 -41
  89. hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
  90. hpcflow/sdk/submission/jobscript.py +1651 -692
  91. hpcflow/sdk/submission/schedulers/__init__.py +167 -39
  92. hpcflow/sdk/submission/schedulers/direct.py +121 -81
  93. hpcflow/sdk/submission/schedulers/sge.py +170 -129
  94. hpcflow/sdk/submission/schedulers/slurm.py +291 -268
  95. hpcflow/sdk/submission/schedulers/utils.py +12 -2
  96. hpcflow/sdk/submission/shells/__init__.py +14 -15
  97. hpcflow/sdk/submission/shells/base.py +150 -29
  98. hpcflow/sdk/submission/shells/bash.py +283 -173
  99. hpcflow/sdk/submission/shells/os_version.py +31 -30
  100. hpcflow/sdk/submission/shells/powershell.py +228 -170
  101. hpcflow/sdk/submission/submission.py +1014 -335
  102. hpcflow/sdk/submission/types.py +140 -0
  103. hpcflow/sdk/typing.py +182 -12
  104. hpcflow/sdk/utils/arrays.py +71 -0
  105. hpcflow/sdk/utils/deferred_file.py +55 -0
  106. hpcflow/sdk/utils/hashing.py +16 -0
  107. hpcflow/sdk/utils/patches.py +12 -0
  108. hpcflow/sdk/utils/strings.py +33 -0
  109. hpcflow/tests/api/test_api.py +32 -0
  110. hpcflow/tests/conftest.py +27 -6
  111. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  112. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  113. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  114. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
  115. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  116. hpcflow/tests/scripts/test_main_scripts.py +866 -85
  117. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  118. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  119. hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
  120. hpcflow/tests/unit/test_action.py +262 -75
  121. hpcflow/tests/unit/test_action_rule.py +9 -4
  122. hpcflow/tests/unit/test_app.py +33 -6
  123. hpcflow/tests/unit/test_cache.py +46 -0
  124. hpcflow/tests/unit/test_cli.py +134 -1
  125. hpcflow/tests/unit/test_command.py +71 -54
  126. hpcflow/tests/unit/test_config.py +142 -16
  127. hpcflow/tests/unit/test_config_file.py +21 -18
  128. hpcflow/tests/unit/test_element.py +58 -62
  129. hpcflow/tests/unit/test_element_iteration.py +50 -1
  130. hpcflow/tests/unit/test_element_set.py +29 -19
  131. hpcflow/tests/unit/test_group.py +4 -2
  132. hpcflow/tests/unit/test_input_source.py +116 -93
  133. hpcflow/tests/unit/test_input_value.py +29 -24
  134. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  135. hpcflow/tests/unit/test_json_like.py +44 -35
  136. hpcflow/tests/unit/test_loop.py +1396 -84
  137. hpcflow/tests/unit/test_meta_task.py +325 -0
  138. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  139. hpcflow/tests/unit/test_object_list.py +17 -12
  140. hpcflow/tests/unit/test_parameter.py +29 -7
  141. hpcflow/tests/unit/test_persistence.py +237 -42
  142. hpcflow/tests/unit/test_resources.py +20 -18
  143. hpcflow/tests/unit/test_run.py +117 -6
  144. hpcflow/tests/unit/test_run_directories.py +29 -0
  145. hpcflow/tests/unit/test_runtime.py +2 -1
  146. hpcflow/tests/unit/test_schema_input.py +23 -15
  147. hpcflow/tests/unit/test_shell.py +23 -2
  148. hpcflow/tests/unit/test_slurm.py +8 -7
  149. hpcflow/tests/unit/test_submission.py +38 -89
  150. hpcflow/tests/unit/test_task.py +352 -247
  151. hpcflow/tests/unit/test_task_schema.py +33 -20
  152. hpcflow/tests/unit/test_utils.py +9 -11
  153. hpcflow/tests/unit/test_value_sequence.py +15 -12
  154. hpcflow/tests/unit/test_workflow.py +114 -83
  155. hpcflow/tests/unit/test_workflow_template.py +0 -1
  156. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  157. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  158. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  159. hpcflow/tests/unit/utils/test_patches.py +5 -0
  160. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  161. hpcflow/tests/workflows/__init__.py +0 -0
  162. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  163. hpcflow/tests/workflows/test_jobscript.py +334 -1
  164. hpcflow/tests/workflows/test_run_status.py +198 -0
  165. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  166. hpcflow/tests/workflows/test_submission.py +140 -0
  167. hpcflow/tests/workflows/test_workflows.py +160 -15
  168. hpcflow/tests/workflows/test_zip.py +18 -0
  169. hpcflow/viz_demo.ipynb +6587 -3
  170. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
  171. hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
  172. hpcflow/sdk/core/parallel.py +0 -21
  173. hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
  174. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
  175. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
  176. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,46 @@
1
+ import os
2
+ import time
3
+
4
+ import pytest
5
+
6
+ from hpcflow.app import app as hf
7
+
8
+
9
+ @pytest.mark.integration
10
+ def test_non_snippet_script_execution(null_config, tmp_path):
11
+ test_str = "non-snippet script!"
12
+ script_name = "my_script.py"
13
+ script_contents = f'print("{test_str}")'
14
+
15
+ if os.name == "nt":
16
+ cmd = f"Set-Content -Path {script_name} -Value '{script_contents}'"
17
+ else:
18
+ cmd = f"echo '{script_contents}' > {script_name}"
19
+
20
+ act_1 = hf.Action(
21
+ commands=[hf.Command(cmd)],
22
+ )
23
+ act_2 = hf.Action(
24
+ script="my_script.py",
25
+ script_exe="python_script",
26
+ script_data_in="direct",
27
+ environments=[hf.ActionEnvironment(environment="python_env")],
28
+ )
29
+ s1 = hf.TaskSchema(
30
+ objective="t1",
31
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
32
+ actions=[act_1, act_2],
33
+ )
34
+
35
+ t1 = hf.Task(schema=s1, inputs={"p1": 101})
36
+ wk = hf.Workflow.from_template_data(
37
+ tasks=[t1],
38
+ template_name="non_snippet_script_test",
39
+ workflow_name="non_snippet_script_test",
40
+ overwrite=True,
41
+ path=tmp_path,
42
+ )
43
+ wk.submit(wait=True, add_to_known=False, status=False)
44
+
45
+ std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text().strip()
46
+ assert std_out.endswith(test_str)
@@ -0,0 +1,353 @@
1
+ import os
2
+ import time
3
+
4
+ import pytest
5
+ from hpcflow.app import app as hf
6
+
7
+
8
+ @pytest.mark.integration
9
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
10
+ def test_output_file_parser_parses_file(null_config, tmp_path):
11
+ out_file_name = "my_output_file.txt"
12
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
13
+
14
+ if os.name == "nt":
15
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
16
+ else:
17
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
18
+
19
+ act = hf.Action(
20
+ commands=[hf.Command(cmd)],
21
+ output_file_parsers=[
22
+ hf.OutputFileParser(
23
+ output_files=[out_file],
24
+ output=hf.Parameter("p2"),
25
+ script="<<script:output_file_parser_basic.py>>",
26
+ ),
27
+ ],
28
+ environments=[hf.ActionEnvironment(environment="python_env")],
29
+ )
30
+ s1 = hf.TaskSchema(
31
+ objective="t1",
32
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
33
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
34
+ actions=[act],
35
+ )
36
+
37
+ p1_val = 101
38
+ p2_val_expected = p1_val + 100
39
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
40
+ wk = hf.Workflow.from_template_data(
41
+ tasks=[t1],
42
+ template_name="output_file_parser_test",
43
+ path=tmp_path,
44
+ )
45
+
46
+ wk.submit(wait=True, add_to_known=False)
47
+
48
+ # check the command successfully generated the output file:
49
+ run_0 = wk.get_all_EARs()[0]
50
+ exec_path = run_0.get_directory()
51
+ out_file_path = exec_path.joinpath(out_file.name.name)
52
+ out_file_contents = out_file_path.read_text()
53
+ assert out_file_contents.strip() == str(p2_val_expected)
54
+
55
+ # check the output is parsed correctly:
56
+ assert wk.tasks[0].elements[0].outputs.p2.value == p2_val_expected
57
+
58
+
59
+ @pytest.mark.integration
60
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
61
+ def test_OFP_std_stream_redirect_on_exception(new_null_config, tmp_path):
62
+ """Test exceptions raised by the app during execution of an OFP script are printed to the
63
+ std-stream redirect file (and not the jobscript's standard error file)."""
64
+
65
+ # define a custom python environment which redefines the `WK_PATH` shell variable to
66
+ # a nonsense value so the app cannot load the workflow and thus raises an exception
67
+
68
+ app_caps = hf.package_name.upper()
69
+ if os.name == "nt":
70
+ env_cmd = f'$env:{app_caps}_WK_PATH = "nonsense_path"'
71
+ else:
72
+ env_cmd = f'export {app_caps}_WK_PATH="nonsense_path"'
73
+
74
+ env_cmd += "; python <<script_path>> <<args>>"
75
+ bad_env = hf.Environment(
76
+ name="bad_python_env",
77
+ executables=[
78
+ hf.Executable(
79
+ label="python_script",
80
+ instances=[
81
+ hf.ExecutableInstance(
82
+ command=env_cmd,
83
+ num_cores=1,
84
+ parallel_mode=None,
85
+ )
86
+ ],
87
+ )
88
+ ],
89
+ )
90
+ hf.envs.add_object(bad_env, skip_duplicates=True)
91
+
92
+ out_file_name = "my_output_file.txt"
93
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
94
+
95
+ if os.name == "nt":
96
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
97
+ else:
98
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
99
+
100
+ act = hf.Action(
101
+ commands=[hf.Command(cmd)],
102
+ output_file_parsers=[
103
+ hf.OutputFileParser(
104
+ output_files=[out_file],
105
+ output=hf.Parameter("p2"),
106
+ script="<<script:output_file_parser_basic.py>>",
107
+ ),
108
+ ],
109
+ environments=[hf.ActionEnvironment(environment="bad_python_env")],
110
+ )
111
+
112
+ s1 = hf.TaskSchema(
113
+ objective="t1",
114
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
115
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
116
+ actions=[act],
117
+ )
118
+
119
+ p1_val = 101
120
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
121
+ wk = hf.Workflow.from_template_data(
122
+ tasks=[t1],
123
+ template_name="output_file_parser_test",
124
+ path=tmp_path,
125
+ )
126
+ wk.submit(wait=True, add_to_known=False, status=False)
127
+
128
+ # jobscript stderr should be empty
129
+ assert not wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
130
+
131
+ # std stream file has workflow not found traceback
132
+ run = wk.get_all_EARs()[1]
133
+ std_stream_path = run.get_app_std_path()
134
+ assert std_stream_path.is_file()
135
+ assert "WorkflowNotFoundError" in std_stream_path.read_text()
136
+
137
+ hf.reload_template_components() # remove extra envs
138
+
139
+
140
+ @pytest.mark.integration
141
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
142
+ def test_OFP_std_out_std_err_not_redirected(null_config, tmp_path):
143
+ """Test that standard error and output streams from an OFP script are written to the jobscript
144
+ standard error and output files."""
145
+ out_file_name = "my_output_file.txt"
146
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
147
+
148
+ if os.name == "nt":
149
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
150
+ else:
151
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
152
+
153
+ act = hf.Action(
154
+ commands=[hf.Command(cmd)],
155
+ output_file_parsers=[
156
+ hf.OutputFileParser(
157
+ output_files=[out_file],
158
+ output=hf.Parameter("p2"),
159
+ inputs=["p1"],
160
+ script="<<script:output_file_parser_test_stdout_stderr.py>>",
161
+ ),
162
+ ],
163
+ environments=[hf.ActionEnvironment(environment="python_env")],
164
+ )
165
+
166
+ s1 = hf.TaskSchema(
167
+ objective="t1",
168
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
169
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
170
+ actions=[act],
171
+ )
172
+ p1_val = 101
173
+ stdout_msg = str(p1_val)
174
+ stderr_msg = str(p1_val)
175
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
176
+ wk = hf.Workflow.from_template_data(
177
+ tasks=[t1],
178
+ template_name="ouput_file_parser_test",
179
+ path=tmp_path,
180
+ )
181
+ wk.submit(wait=True, add_to_known=False)
182
+
183
+ if wk.submissions[0].jobscripts[0].resources.combine_jobscript_std:
184
+ std_out_err = wk.submissions[0].jobscripts[0].direct_std_out_err_path.read_text()
185
+ assert std_out_err.strip() == f"{stdout_msg}\n{stderr_msg}"
186
+ else:
187
+ std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
188
+ std_err = wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
189
+ assert std_out.strip() == stdout_msg
190
+ assert std_err.strip() == stderr_msg
191
+
192
+
193
+ @pytest.mark.integration
194
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
195
+ def test_output_file_parser_pass_env_spec(null_config, tmp_path):
196
+ out_file_name = "my_output_file.txt"
197
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
198
+
199
+ if os.name == "nt":
200
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
201
+ else:
202
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
203
+
204
+ act = hf.Action(
205
+ commands=[hf.Command(cmd)],
206
+ output_file_parsers=[
207
+ hf.OutputFileParser(
208
+ output_files=[out_file],
209
+ output=hf.Parameter("p2"),
210
+ script="<<script:env_specifier_test/output_file_parser_pass_env_spec.py>>",
211
+ script_pass_env_spec=True,
212
+ ),
213
+ ],
214
+ environments=[hf.ActionEnvironment(environment="python_env")],
215
+ )
216
+ s1 = hf.TaskSchema(
217
+ objective="t1",
218
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
219
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
220
+ actions=[act],
221
+ )
222
+
223
+ t1 = hf.Task(schema=s1, inputs={"p1": 101})
224
+ wk = hf.Workflow.from_template_data(
225
+ tasks=[t1],
226
+ template_name="output_file_parser_pass_env_spec",
227
+ path=tmp_path,
228
+ )
229
+
230
+ wk.submit(wait=True, add_to_known=False, status=False)
231
+
232
+ std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text().strip()
233
+ assert std_out == "{'name': 'python_env'}"
234
+
235
+
236
+ @pytest.mark.integration
237
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
238
+ def test_env_specifier_in_output_file_parser_script_path(new_null_config, tmp_path):
239
+
240
+ py_env = hf.Environment(
241
+ name="python_env",
242
+ specifiers={"version": "v1"},
243
+ executables=[
244
+ hf.Executable(
245
+ label="python_script",
246
+ instances=[
247
+ hf.ExecutableInstance(
248
+ command="python <<script_path>> <<args>>",
249
+ num_cores=1,
250
+ parallel_mode=None,
251
+ )
252
+ ],
253
+ )
254
+ ],
255
+ )
256
+ hf.envs.add_object(py_env, skip_duplicates=True)
257
+
258
+ out_file_name = "my_output_file.txt"
259
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
260
+
261
+ if os.name == "nt":
262
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
263
+ else:
264
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
265
+
266
+ act = hf.Action(
267
+ commands=[hf.Command(cmd)],
268
+ output_file_parsers=[
269
+ hf.OutputFileParser(
270
+ output_files=[out_file],
271
+ output=hf.Parameter("p2"),
272
+ script="<<script:env_specifier_test/<<env:version>>/output_file_parser_basic.py>>",
273
+ ),
274
+ ],
275
+ environments=[hf.ActionEnvironment(environment="python_env")],
276
+ )
277
+ s1 = hf.TaskSchema(
278
+ objective="t1",
279
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
280
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
281
+ actions=[act],
282
+ )
283
+
284
+ p1_val = 101
285
+ p2_val_expected = p1_val + 100
286
+ t1 = hf.Task(
287
+ schema=s1,
288
+ inputs={"p1": p1_val},
289
+ environments={"python_env": {"version": "v1"}},
290
+ )
291
+ wk = hf.Workflow.from_template_data(
292
+ tasks=[t1],
293
+ template_name="output_file_parser_test_env_specifier",
294
+ path=tmp_path,
295
+ )
296
+
297
+ wk.submit(wait=True, add_to_known=False, status=False)
298
+
299
+ # check the command successfully generated the output file:
300
+ run_0 = wk.get_all_EARs()[0]
301
+ exec_path = run_0.get_directory()
302
+ out_file_path = exec_path.joinpath(out_file.name.name)
303
+ out_file_contents = out_file_path.read_text()
304
+ assert out_file_contents.strip() == str(p2_val_expected)
305
+
306
+ # check the output is parsed correctly:
307
+ assert wk.tasks[0].elements[0].outputs.p2.value == p2_val_expected
308
+
309
+ hf.reload_template_components() # remove extra envs
310
+
311
+
312
+ @pytest.mark.integration
313
+ def test_no_script_no_output_saves_files(null_config, tmp_path):
314
+ """Check we can use an output file parser with no script or output to save files."""
315
+ out_file_name = "my_output_file.txt"
316
+ out_file = hf.FileSpec(label="my_output_file", name=out_file_name)
317
+
318
+ if os.name == "nt":
319
+ cmd = f"Set-Content -Path {out_file_name} -Value (<<parameter:p1>> + 100)"
320
+ else:
321
+ cmd = f"echo $(( <<parameter:p1>> + 100 )) > {out_file_name}"
322
+
323
+ act = hf.Action(
324
+ commands=[hf.Command(cmd)],
325
+ output_file_parsers=[hf.OutputFileParser(output_files=[out_file])],
326
+ environments=[hf.ActionEnvironment(environment="python_env")],
327
+ )
328
+ s1 = hf.TaskSchema(
329
+ objective="t1",
330
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
331
+ actions=[act],
332
+ )
333
+
334
+ p1_val = 101
335
+ p2_val_expected = p1_val + 100
336
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
337
+ wk = hf.Workflow.from_template_data(
338
+ tasks=[t1],
339
+ template_name="output_file_parser_test_no_output_no_script",
340
+ path=tmp_path,
341
+ )
342
+
343
+ wk.submit(wait=True, add_to_known=False, status=False)
344
+
345
+ # check the output file is saved to artifacts:
346
+ run_0 = wk.get_all_EARs()[0]
347
+ exec_path = run_0.get_directory()
348
+ out_file_path = exec_path.joinpath(out_file.name.name)
349
+ out_file_contents = out_file_path.read_text()
350
+ assert out_file_contents.strip() == str(p2_val_expected)
351
+
352
+ # check no scripts generated
353
+ assert not any(wk.submissions[0].scripts_path.iterdir())
@@ -1,14 +1,22 @@
1
1
  import time
2
-
2
+ from pathlib import Path
3
3
  import pytest
4
-
4
+ from hpcflow.app import app as hf
5
5
  from hpcflow.sdk.core.test_utils import make_test_data_YAML_workflow
6
6
 
7
7
 
8
8
  @pytest.mark.wsl
9
- def test_workflow_1(tmp_path, null_config):
9
+ def test_workflow_1(tmp_path: Path, null_config):
10
10
  wk = make_test_data_YAML_workflow("workflow_1_wsl.yaml", path=tmp_path)
11
11
  wk.submit(wait=True, add_to_known=False)
12
12
  time.sleep(20) # TODO: bug! for some reason the new parameter isn't actually written
13
13
  # to disk when using WSL until several seconds after the workflow has finished!
14
- assert wk.tasks[0].elements[0].outputs.p2.value == "201"
14
+ # this is probably because the NTFS filesystem is "sync'd" via polling in this case?
15
+ # so changes made on the NTFS files by WSL are not immediate on the Windows side.
16
+ # perhaps when we re-wire the wait command, we could add an option to wait on a
17
+ # parameter being set, which could watch the relevant chunk file for changes?
18
+
19
+ # ACTUALLY: I think wait is not working here at all for WSL... it's returning early!
20
+ p2 = wk.tasks[0].elements[0].outputs.p2
21
+ assert isinstance(p2, hf.ElementParameter)
22
+ assert p2.value == "201"