hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (275) hide show
  1. hpcflow/__init__.py +2 -11
  2. hpcflow/__pyinstaller/__init__.py +5 -0
  3. hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
  4. hpcflow/_version.py +1 -1
  5. hpcflow/app.py +43 -0
  6. hpcflow/cli.py +2 -462
  7. hpcflow/data/demo_data_manifest/__init__.py +3 -0
  8. hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
  9. hpcflow/data/jinja_templates/test/test_template.txt +8 -0
  10. hpcflow/data/programs/hello_world/README.md +1 -0
  11. hpcflow/data/programs/hello_world/hello_world.c +87 -0
  12. hpcflow/data/programs/hello_world/linux/hello_world +0 -0
  13. hpcflow/data/programs/hello_world/macos/hello_world +0 -0
  14. hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
  15. hpcflow/data/scripts/__init__.py +1 -0
  16. hpcflow/data/scripts/bad_script.py +2 -0
  17. hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
  18. hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
  19. hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
  20. hpcflow/data/scripts/do_nothing.py +2 -0
  21. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  22. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  23. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  24. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  25. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  26. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  27. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  28. hpcflow/data/scripts/generate_t1_file_01.py +7 -0
  29. hpcflow/data/scripts/import_future_script.py +7 -0
  30. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  31. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  32. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  33. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  34. hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
  35. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  36. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  37. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  38. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  39. hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
  40. hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
  41. hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
  42. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  43. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  44. hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
  45. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
  46. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  47. hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
  48. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
  49. hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
  50. hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
  51. hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
  52. hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
  53. hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
  54. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  55. hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
  56. hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
  57. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  58. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  59. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  60. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  61. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  62. hpcflow/data/scripts/parse_t1_file_01.py +4 -0
  63. hpcflow/data/scripts/script_exit_test.py +5 -0
  64. hpcflow/data/template_components/__init__.py +1 -0
  65. hpcflow/data/template_components/command_files.yaml +26 -0
  66. hpcflow/data/template_components/environments.yaml +13 -0
  67. hpcflow/data/template_components/parameters.yaml +14 -0
  68. hpcflow/data/template_components/task_schemas.yaml +139 -0
  69. hpcflow/data/workflows/workflow_1.yaml +5 -0
  70. hpcflow/examples.ipynb +1037 -0
  71. hpcflow/sdk/__init__.py +149 -0
  72. hpcflow/sdk/app.py +4266 -0
  73. hpcflow/sdk/cli.py +1479 -0
  74. hpcflow/sdk/cli_common.py +385 -0
  75. hpcflow/sdk/config/__init__.py +5 -0
  76. hpcflow/sdk/config/callbacks.py +246 -0
  77. hpcflow/sdk/config/cli.py +388 -0
  78. hpcflow/sdk/config/config.py +1410 -0
  79. hpcflow/sdk/config/config_file.py +501 -0
  80. hpcflow/sdk/config/errors.py +272 -0
  81. hpcflow/sdk/config/types.py +150 -0
  82. hpcflow/sdk/core/__init__.py +38 -0
  83. hpcflow/sdk/core/actions.py +3857 -0
  84. hpcflow/sdk/core/app_aware.py +25 -0
  85. hpcflow/sdk/core/cache.py +224 -0
  86. hpcflow/sdk/core/command_files.py +814 -0
  87. hpcflow/sdk/core/commands.py +424 -0
  88. hpcflow/sdk/core/element.py +2071 -0
  89. hpcflow/sdk/core/enums.py +221 -0
  90. hpcflow/sdk/core/environment.py +256 -0
  91. hpcflow/sdk/core/errors.py +1043 -0
  92. hpcflow/sdk/core/execute.py +207 -0
  93. hpcflow/sdk/core/json_like.py +809 -0
  94. hpcflow/sdk/core/loop.py +1320 -0
  95. hpcflow/sdk/core/loop_cache.py +282 -0
  96. hpcflow/sdk/core/object_list.py +933 -0
  97. hpcflow/sdk/core/parameters.py +3371 -0
  98. hpcflow/sdk/core/rule.py +196 -0
  99. hpcflow/sdk/core/run_dir_files.py +57 -0
  100. hpcflow/sdk/core/skip_reason.py +7 -0
  101. hpcflow/sdk/core/task.py +3792 -0
  102. hpcflow/sdk/core/task_schema.py +993 -0
  103. hpcflow/sdk/core/test_utils.py +538 -0
  104. hpcflow/sdk/core/types.py +447 -0
  105. hpcflow/sdk/core/utils.py +1207 -0
  106. hpcflow/sdk/core/validation.py +87 -0
  107. hpcflow/sdk/core/values.py +477 -0
  108. hpcflow/sdk/core/workflow.py +4820 -0
  109. hpcflow/sdk/core/zarr_io.py +206 -0
  110. hpcflow/sdk/data/__init__.py +13 -0
  111. hpcflow/sdk/data/config_file_schema.yaml +34 -0
  112. hpcflow/sdk/data/config_schema.yaml +260 -0
  113. hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
  114. hpcflow/sdk/data/files_spec_schema.yaml +5 -0
  115. hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
  116. hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
  117. hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
  118. hpcflow/sdk/demo/__init__.py +3 -0
  119. hpcflow/sdk/demo/cli.py +242 -0
  120. hpcflow/sdk/helper/__init__.py +3 -0
  121. hpcflow/sdk/helper/cli.py +137 -0
  122. hpcflow/sdk/helper/helper.py +300 -0
  123. hpcflow/sdk/helper/watcher.py +192 -0
  124. hpcflow/sdk/log.py +288 -0
  125. hpcflow/sdk/persistence/__init__.py +18 -0
  126. hpcflow/sdk/persistence/base.py +2817 -0
  127. hpcflow/sdk/persistence/defaults.py +6 -0
  128. hpcflow/sdk/persistence/discovery.py +39 -0
  129. hpcflow/sdk/persistence/json.py +954 -0
  130. hpcflow/sdk/persistence/pending.py +948 -0
  131. hpcflow/sdk/persistence/store_resource.py +203 -0
  132. hpcflow/sdk/persistence/types.py +309 -0
  133. hpcflow/sdk/persistence/utils.py +73 -0
  134. hpcflow/sdk/persistence/zarr.py +2388 -0
  135. hpcflow/sdk/runtime.py +320 -0
  136. hpcflow/sdk/submission/__init__.py +3 -0
  137. hpcflow/sdk/submission/enums.py +70 -0
  138. hpcflow/sdk/submission/jobscript.py +2379 -0
  139. hpcflow/sdk/submission/schedulers/__init__.py +281 -0
  140. hpcflow/sdk/submission/schedulers/direct.py +233 -0
  141. hpcflow/sdk/submission/schedulers/sge.py +376 -0
  142. hpcflow/sdk/submission/schedulers/slurm.py +598 -0
  143. hpcflow/sdk/submission/schedulers/utils.py +25 -0
  144. hpcflow/sdk/submission/shells/__init__.py +52 -0
  145. hpcflow/sdk/submission/shells/base.py +229 -0
  146. hpcflow/sdk/submission/shells/bash.py +504 -0
  147. hpcflow/sdk/submission/shells/os_version.py +115 -0
  148. hpcflow/sdk/submission/shells/powershell.py +352 -0
  149. hpcflow/sdk/submission/submission.py +1402 -0
  150. hpcflow/sdk/submission/types.py +140 -0
  151. hpcflow/sdk/typing.py +194 -0
  152. hpcflow/sdk/utils/arrays.py +69 -0
  153. hpcflow/sdk/utils/deferred_file.py +55 -0
  154. hpcflow/sdk/utils/hashing.py +16 -0
  155. hpcflow/sdk/utils/patches.py +31 -0
  156. hpcflow/sdk/utils/strings.py +69 -0
  157. hpcflow/tests/api/test_api.py +32 -0
  158. hpcflow/tests/conftest.py +123 -0
  159. hpcflow/tests/data/__init__.py +0 -0
  160. hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
  161. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  162. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  163. hpcflow/tests/data/workflow_1.json +10 -0
  164. hpcflow/tests/data/workflow_1.yaml +5 -0
  165. hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
  166. hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
  167. hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
  168. hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
  169. hpcflow/tests/programs/test_programs.py +180 -0
  170. hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
  171. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  172. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
  173. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  174. hpcflow/tests/scripts/test_main_scripts.py +1361 -0
  175. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  176. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  177. hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
  178. hpcflow/tests/unit/test_action.py +1066 -0
  179. hpcflow/tests/unit/test_action_rule.py +24 -0
  180. hpcflow/tests/unit/test_app.py +132 -0
  181. hpcflow/tests/unit/test_cache.py +46 -0
  182. hpcflow/tests/unit/test_cli.py +172 -0
  183. hpcflow/tests/unit/test_command.py +377 -0
  184. hpcflow/tests/unit/test_config.py +195 -0
  185. hpcflow/tests/unit/test_config_file.py +162 -0
  186. hpcflow/tests/unit/test_element.py +666 -0
  187. hpcflow/tests/unit/test_element_iteration.py +88 -0
  188. hpcflow/tests/unit/test_element_set.py +158 -0
  189. hpcflow/tests/unit/test_group.py +115 -0
  190. hpcflow/tests/unit/test_input_source.py +1479 -0
  191. hpcflow/tests/unit/test_input_value.py +398 -0
  192. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  193. hpcflow/tests/unit/test_json_like.py +1247 -0
  194. hpcflow/tests/unit/test_loop.py +2674 -0
  195. hpcflow/tests/unit/test_meta_task.py +325 -0
  196. hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
  197. hpcflow/tests/unit/test_object_list.py +116 -0
  198. hpcflow/tests/unit/test_parameter.py +243 -0
  199. hpcflow/tests/unit/test_persistence.py +664 -0
  200. hpcflow/tests/unit/test_resources.py +243 -0
  201. hpcflow/tests/unit/test_run.py +286 -0
  202. hpcflow/tests/unit/test_run_directories.py +29 -0
  203. hpcflow/tests/unit/test_runtime.py +9 -0
  204. hpcflow/tests/unit/test_schema_input.py +372 -0
  205. hpcflow/tests/unit/test_shell.py +129 -0
  206. hpcflow/tests/unit/test_slurm.py +39 -0
  207. hpcflow/tests/unit/test_submission.py +502 -0
  208. hpcflow/tests/unit/test_task.py +2560 -0
  209. hpcflow/tests/unit/test_task_schema.py +182 -0
  210. hpcflow/tests/unit/test_utils.py +616 -0
  211. hpcflow/tests/unit/test_value_sequence.py +549 -0
  212. hpcflow/tests/unit/test_values.py +91 -0
  213. hpcflow/tests/unit/test_workflow.py +827 -0
  214. hpcflow/tests/unit/test_workflow_template.py +186 -0
  215. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  216. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  217. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  218. hpcflow/tests/unit/utils/test_patches.py +5 -0
  219. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  220. hpcflow/tests/unit/utils/test_strings.py +97 -0
  221. hpcflow/tests/workflows/__init__.py +0 -0
  222. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  223. hpcflow/tests/workflows/test_jobscript.py +355 -0
  224. hpcflow/tests/workflows/test_run_status.py +198 -0
  225. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  226. hpcflow/tests/workflows/test_submission.py +140 -0
  227. hpcflow/tests/workflows/test_workflows.py +564 -0
  228. hpcflow/tests/workflows/test_zip.py +18 -0
  229. hpcflow/viz_demo.ipynb +6794 -0
  230. hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
  231. hpcflow-0.2.0a271.dist-info/METADATA +65 -0
  232. hpcflow-0.2.0a271.dist-info/RECORD +237 -0
  233. {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
  234. hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
  235. hpcflow/api.py +0 -458
  236. hpcflow/archive/archive.py +0 -308
  237. hpcflow/archive/cloud/cloud.py +0 -47
  238. hpcflow/archive/cloud/errors.py +0 -9
  239. hpcflow/archive/cloud/providers/dropbox.py +0 -432
  240. hpcflow/archive/errors.py +0 -5
  241. hpcflow/base_db.py +0 -4
  242. hpcflow/config.py +0 -232
  243. hpcflow/copytree.py +0 -66
  244. hpcflow/data/examples/_config.yml +0 -14
  245. hpcflow/data/examples/damask/demo/1.run.yml +0 -4
  246. hpcflow/data/examples/damask/demo/2.process.yml +0 -29
  247. hpcflow/data/examples/damask/demo/geom.geom +0 -2052
  248. hpcflow/data/examples/damask/demo/load.load +0 -1
  249. hpcflow/data/examples/damask/demo/material.config +0 -185
  250. hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
  251. hpcflow/data/examples/damask/inputs/load.load +0 -1
  252. hpcflow/data/examples/damask/inputs/material.config +0 -185
  253. hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
  254. hpcflow/data/examples/damask/profiles/damask.yml +0 -4
  255. hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
  256. hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
  257. hpcflow/data/examples/damask/profiles/default.yml +0 -6
  258. hpcflow/data/examples/thinking.yml +0 -177
  259. hpcflow/errors.py +0 -2
  260. hpcflow/init_db.py +0 -37
  261. hpcflow/models.py +0 -2549
  262. hpcflow/nesting.py +0 -9
  263. hpcflow/profiles.py +0 -455
  264. hpcflow/project.py +0 -81
  265. hpcflow/scheduler.py +0 -323
  266. hpcflow/utils.py +0 -103
  267. hpcflow/validation.py +0 -167
  268. hpcflow/variables.py +0 -544
  269. hpcflow-0.1.9.dist-info/METADATA +0 -168
  270. hpcflow-0.1.9.dist-info/RECORD +0 -45
  271. hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
  272. hpcflow-0.1.9.dist-info/top_level.txt +0 -1
  273. /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
  274. /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
  275. /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
@@ -0,0 +1,140 @@
1
+ import os
2
+ from pathlib import Path
3
+ import pytest
4
+ from hpcflow.app import app as hf
5
+
6
+
7
+ @pytest.mark.integration
8
+ def test_zarr_metadata_file_modification_times_many_jobscripts(null_config, tmp_path):
9
+ """Test that root group attributes are modified first, then individual jobscript
10
+ at-submit-metadata chunk files, then the submission at-submit-metadata group
11
+ attributes."""
12
+
13
+ num_js = 30
14
+ t1 = hf.Task(
15
+ schema=hf.task_schemas.test_t1_conditional_OS,
16
+ inputs={"p1": 100},
17
+ sequences=[
18
+ hf.ValueSequence(
19
+ path="resources.any.resources_id", values=list(range(num_js))
20
+ )
21
+ ],
22
+ )
23
+ wk = hf.Workflow.from_template_data(
24
+ template_name="test_zarr_metadata_attrs_modified_times",
25
+ path=tmp_path,
26
+ tasks=[t1],
27
+ store="zarr",
28
+ )
29
+ wk.submit(add_to_known=False, status=False, cancel=True)
30
+
31
+ mtime_meta_group = Path(wk.path).joinpath(".zattrs").stat().st_mtime
32
+ mtime_mid_jobscript_chunk = (
33
+ wk._store._get_jobscripts_at_submit_metadata_arr_path(0)
34
+ .joinpath(str(int(num_js / 2)))
35
+ .stat()
36
+ .st_mtime
37
+ )
38
+ mtime_submission_group = (
39
+ wk._store._get_submission_metadata_group_path(0)
40
+ .joinpath(".zattrs")
41
+ .stat()
42
+ .st_mtime
43
+ )
44
+ assert mtime_meta_group < mtime_mid_jobscript_chunk < mtime_submission_group
45
+
46
+
47
+ @pytest.mark.integration
48
+ def test_json_metadata_file_modification_times_many_jobscripts(null_config, tmp_path):
49
+ """Test that the metadata.json file is modified first, then the submissions.json
50
+ file."""
51
+
52
+ num_js = 30
53
+ t1 = hf.Task(
54
+ schema=hf.task_schemas.test_t1_conditional_OS,
55
+ inputs={"p1": 100},
56
+ sequences=[
57
+ hf.ValueSequence(
58
+ path="resources.any.resources_id", values=list(range(num_js))
59
+ )
60
+ ],
61
+ )
62
+ wk = hf.Workflow.from_template_data(
63
+ template_name="test_zarr_metadata_attrs_modified_times",
64
+ path=tmp_path,
65
+ tasks=[t1],
66
+ store="json",
67
+ )
68
+ wk.submit(add_to_known=False, status=False, cancel=True)
69
+
70
+ mtime_meta = Path(wk.path).joinpath("metadata.json").stat().st_mtime
71
+ mtime_subs = Path(wk.path).joinpath("submissions.json").stat().st_mtime
72
+ assert mtime_meta < mtime_subs
73
+
74
+
75
+ @pytest.mark.integration
76
+ def test_subission_start_end_times_equal_to_first_and_last_jobscript_start_end_times(
77
+ null_config, tmp_path
78
+ ):
79
+ num_js = 2
80
+ t1 = hf.Task(
81
+ schema=hf.task_schemas.test_t1_conditional_OS,
82
+ inputs={"p1": 100},
83
+ sequences=[
84
+ hf.ValueSequence(
85
+ path="resources.any.resources_id", values=list(range(num_js))
86
+ )
87
+ ],
88
+ )
89
+ wk = hf.Workflow.from_template_data(
90
+ template_name="test_subission_start_end_times",
91
+ path=tmp_path,
92
+ tasks=[t1],
93
+ )
94
+ wk.submit(wait=True, add_to_known=False, status=False)
95
+
96
+ sub = wk.submissions[0]
97
+ jobscripts = sub.jobscripts
98
+
99
+ assert len(jobscripts) == num_js
100
+
101
+ # submission has two jobscripts, so start time should be start time of first jobscript:
102
+ assert sub.start_time == jobscripts[0].start_time
103
+
104
+ # ...and end time should be end time of second jobscript:
105
+ assert sub.end_time == jobscripts[1].end_time
106
+
107
+
108
+ @pytest.mark.integration
109
+ def test_multiple_jobscript_functions_files(null_config, tmp_path):
110
+ if os.name == "nt":
111
+ shell_exes = ["powershell.exe", "pwsh.exe", "pwsh.exe"]
112
+ else:
113
+ shell_exes = ["/bin/bash", "bash", "bash"]
114
+ t1 = hf.Task(
115
+ schema=hf.task_schemas.test_t1_conditional_OS,
116
+ inputs={"p1": 100},
117
+ sequences=[
118
+ hf.ValueSequence(
119
+ path="resources.any.shell_args.executable",
120
+ values=shell_exes,
121
+ )
122
+ ],
123
+ )
124
+ wk = hf.Workflow.from_template_data(
125
+ template_name="test_multi_js_funcs_files",
126
+ path=tmp_path,
127
+ tasks=[t1],
128
+ store="json",
129
+ )
130
+ wk.submit(add_to_known=True, status=False, cancel=True)
131
+
132
+ sub_js = wk.submissions[0].jobscripts
133
+ assert len(sub_js) == 2
134
+
135
+ funcs_0 = sub_js[0].jobscript_functions_path
136
+ funcs_1 = sub_js[1].jobscript_functions_path
137
+
138
+ assert funcs_0.is_file()
139
+ assert funcs_1.is_file()
140
+ assert funcs_0 != funcs_1
@@ -0,0 +1,564 @@
1
+ import os
2
+ import sys
3
+ from pathlib import Path
4
+ import time
5
+ import pytest
6
+ from hpcflow.app import app as hf
7
+ from hpcflow.sdk.core.enums import EARStatus
8
+ from hpcflow.sdk.core.skip_reason import SkipReason
9
+ from hpcflow.sdk.core.test_utils import (
10
+ P1_parameter_cls as P1,
11
+ P1_sub_parameter_cls as P1_sub,
12
+ make_test_data_YAML_workflow,
13
+ )
14
+
15
+
16
+ @pytest.mark.integration
17
+ def test_workflow_1(tmp_path: Path, new_null_config):
18
+ wk = make_test_data_YAML_workflow("workflow_1.yaml", path=tmp_path)
19
+ wk.submit(wait=True, add_to_known=False)
20
+ p2 = wk.tasks[0].elements[0].outputs.p2
21
+ assert isinstance(p2, hf.ElementParameter)
22
+ assert p2.value == "201"
23
+
24
+
25
+ @pytest.mark.integration
26
+ def test_workflow_1_with_working_dir_with_spaces(tmp_path: Path, new_null_config):
27
+ workflow_dir = tmp_path / "sub path with spaces"
28
+ workflow_dir.mkdir()
29
+ wk = make_test_data_YAML_workflow("workflow_1.yaml", path=workflow_dir)
30
+ wk.submit(wait=True, add_to_known=False)
31
+ p2 = wk.tasks[0].elements[0].outputs.p2
32
+ assert isinstance(p2, hf.ElementParameter)
33
+ assert p2.value == "201"
34
+
35
+
36
+ @pytest.mark.integration
37
+ @pytest.mark.skipif(
38
+ sys.platform == "darwin", reason="fails/too slow; need to investigate"
39
+ )
40
+ def test_run_abort(tmp_path: Path, new_null_config):
41
+ wk = make_test_data_YAML_workflow("workflow_test_run_abort.yaml", path=tmp_path)
42
+ wk.submit(add_to_known=False)
43
+
44
+ # wait for the run to start;
45
+ # TODO: instead of this: we should add a `wait_to_start=RUN_ID` method to submit()
46
+ max_wait_iter = 15
47
+ aborted = False
48
+ for _ in range(max_wait_iter):
49
+ time.sleep(4)
50
+ try:
51
+ wk.abort_run() # single task and element so no need to disambiguate
52
+ except ValueError:
53
+ continue
54
+ else:
55
+ aborted = True
56
+ break
57
+ if not aborted:
58
+ raise RuntimeError("Could not abort the run")
59
+
60
+ wk.wait()
61
+ assert wk.tasks[0].outputs.is_finished[0].value == "true"
62
+
63
+
64
+ @pytest.mark.integration
65
+ @pytest.mark.parametrize("store", ["json", "zarr"])
66
+ def test_multi_command_action_stdout_parsing(null_config, tmp_path: Path, store: str):
67
+ if os.name == "nt":
68
+ cmds = [
69
+ "Write-Output (<<parameter:p1>> + 100)",
70
+ "Write-Output (<<parameter:p1>> + 200)",
71
+ ]
72
+ else:
73
+ cmds = [
74
+ 'echo "$((<<parameter:p1>> + 100))"',
75
+ 'echo "$((<<parameter:p1>> + 200))"',
76
+ ]
77
+ act = hf.Action(
78
+ commands=[
79
+ hf.Command(
80
+ command=cmds[0],
81
+ stdout="<<int(parameter:p2)>>",
82
+ ),
83
+ hf.Command(
84
+ command=cmds[1],
85
+ stdout="<<float(parameter:p3)>>",
86
+ ),
87
+ ]
88
+ )
89
+ s1 = hf.TaskSchema(
90
+ objective="t1",
91
+ actions=[act],
92
+ inputs=[hf.SchemaInput("p1")],
93
+ outputs=[hf.SchemaOutput("p2"), hf.SchemaOutput("p3")],
94
+ )
95
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 1)])
96
+ wk = hf.Workflow.from_template_data(
97
+ tasks=[t1],
98
+ template_name="wk2",
99
+ path=tmp_path,
100
+ store=store,
101
+ )
102
+ wk.submit(wait=True, add_to_known=False)
103
+ assert wk.tasks.t1.elements[0].get("outputs") == {"p2": 101, "p3": 201.0}
104
+
105
+
106
+ @pytest.mark.integration
107
+ @pytest.mark.parametrize("store", ["json", "zarr"])
108
+ def test_element_get_group(null_config, tmp_path: Path, store: str):
109
+ if os.name == "nt":
110
+ cmd = "Write-Output (<<parameter:p1c>> + 100)"
111
+ else:
112
+ cmd = 'echo "$((<<parameter:p1c>> + 100))"'
113
+ s1 = hf.TaskSchema(
114
+ objective="t1",
115
+ inputs=[hf.SchemaInput(parameter="p1c")],
116
+ outputs=[hf.SchemaOutput(parameter="p1c")],
117
+ actions=[
118
+ hf.Action(
119
+ commands=[
120
+ hf.Command(
121
+ command=cmd,
122
+ stdout="<<parameter:p1c.CLI_parse()>>",
123
+ )
124
+ ],
125
+ ),
126
+ ],
127
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
128
+ )
129
+ s2 = hf.TaskSchema(
130
+ objective="t2",
131
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"), group="my_group")],
132
+ )
133
+
134
+ t1 = hf.Task(
135
+ schema=s1,
136
+ inputs=[hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5)))],
137
+ sequences=[hf.ValueSequence("inputs.p1c.a", values=[20, 30], nesting_order=0)],
138
+ groups=[hf.ElementGroup(name="my_group")],
139
+ )
140
+ t2 = hf.Task(
141
+ schema=s2,
142
+ nesting_order={"inputs.p1c": 0},
143
+ )
144
+ wk = hf.Workflow.from_template_data(
145
+ tasks=[t1, t2],
146
+ template_name="w1",
147
+ path=tmp_path,
148
+ store=store,
149
+ )
150
+ wk.submit(wait=True, add_to_known=False)
151
+ assert wk.tasks.t2.num_elements == 1
152
+ assert wk.tasks.t2.elements[0].get("inputs.p1c") == [P1(a=120), P1(a=130)]
153
+
154
+
155
+ @pytest.mark.integration
156
+ def test_element_get_sub_object_group(null_config, tmp_path: Path):
157
+ if os.name == "nt":
158
+ cmd = "Write-Output (<<parameter:p1c>> + 100)"
159
+ else:
160
+ cmd = 'echo "$((<<parameter:p1c>> + 100))"'
161
+ s1 = hf.TaskSchema(
162
+ objective="t1",
163
+ inputs=[hf.SchemaInput(parameter="p1c")],
164
+ outputs=[hf.SchemaOutput(parameter="p1c")],
165
+ actions=[
166
+ hf.Action(
167
+ commands=[
168
+ hf.Command(
169
+ command=cmd,
170
+ stdout="<<parameter:p1c.CLI_parse(e=10)>>",
171
+ )
172
+ ],
173
+ ),
174
+ ],
175
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
176
+ )
177
+ s2 = hf.TaskSchema(
178
+ objective="t2",
179
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"), group="my_group")],
180
+ )
181
+
182
+ t1 = hf.Task(
183
+ schema=s1,
184
+ inputs=[hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5)))],
185
+ sequences=[hf.ValueSequence("inputs.p1c.a", values=[20, 30], nesting_order=0)],
186
+ groups=[hf.ElementGroup(name="my_group")],
187
+ )
188
+ t2 = hf.Task(
189
+ schema=s2,
190
+ nesting_order={"inputs.p1c": 0},
191
+ )
192
+ wk = hf.Workflow.from_template_data(
193
+ tasks=[t1, t2],
194
+ template_name="w1",
195
+ path=tmp_path,
196
+ )
197
+ wk.submit(wait=True, add_to_known=False)
198
+ assert wk.tasks.t2.num_elements == 1
199
+ assert wk.tasks.t2.elements[0].get("inputs.p1c.sub_param") == [
200
+ P1_sub(e=10),
201
+ P1_sub(e=10),
202
+ ]
203
+
204
+
205
+ @pytest.mark.integration
206
+ def test_element_get_sub_data_group(null_config, tmp_path: Path):
207
+ if os.name == "nt":
208
+ cmd = "Write-Output (<<parameter:p1c>> + 100)"
209
+ else:
210
+ cmd = 'echo "$((<<parameter:p1c>> + 100))"'
211
+ s1 = hf.TaskSchema(
212
+ objective="t1",
213
+ inputs=[hf.SchemaInput(parameter="p1c")],
214
+ outputs=[hf.SchemaOutput(parameter="p1c")],
215
+ actions=[
216
+ hf.Action(
217
+ commands=[
218
+ hf.Command(
219
+ command=cmd,
220
+ stdout="<<parameter:p1c.CLI_parse(e=10)>>",
221
+ )
222
+ ],
223
+ ),
224
+ ],
225
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
226
+ )
227
+ s2 = hf.TaskSchema(
228
+ objective="t2",
229
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"), group="my_group")],
230
+ )
231
+
232
+ t1 = hf.Task(
233
+ schema=s1,
234
+ inputs=[hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5)))],
235
+ sequences=[hf.ValueSequence("inputs.p1c.a", values=[20, 30], nesting_order=0)],
236
+ groups=[hf.ElementGroup(name="my_group")],
237
+ )
238
+ t2 = hf.Task(
239
+ schema=s2,
240
+ nesting_order={"inputs.p1c": 0},
241
+ )
242
+ wk = hf.Workflow.from_template_data(
243
+ tasks=[t1, t2],
244
+ template_name="w1",
245
+ path=tmp_path,
246
+ )
247
+ wk.submit(wait=True, add_to_known=False)
248
+ assert wk.tasks.t2.num_elements == 1
249
+ assert wk.tasks.t2.elements[0].get("inputs.p1c.a") == [120, 130]
250
+
251
+
252
+ @pytest.mark.integration
253
+ def test_input_source_labels_and_groups(null_config, tmp_path: Path):
254
+ """This is structurally the same as the `fit_yield_functions` MatFlow workflow."""
255
+ if os.name == "nt":
256
+ cmds = [
257
+ "Write-Output (<<parameter:p1>> + 100)",
258
+ "Write-Output (<<parameter:p2[one]>> + <<sum(parameter:p2[two])>>)",
259
+ ]
260
+ else:
261
+ cmds = [
262
+ 'echo "$((<<parameter:p1>> + 100))"',
263
+ 'echo "$((<<parameter:p2[one]>> + <<sum(parameter:p2[two])>>))"',
264
+ ]
265
+ s1 = hf.TaskSchema(
266
+ objective="t1",
267
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
268
+ )
269
+ s2 = hf.TaskSchema(
270
+ objective="t2",
271
+ inputs=[hf.SchemaInput(parameter="p1")],
272
+ outputs=[hf.SchemaInput(parameter="p2")],
273
+ actions=[
274
+ hf.Action(
275
+ commands=[hf.Command(command=cmds[0], stdout="<<int(parameter:p2)>>")]
276
+ )
277
+ ],
278
+ )
279
+ s3 = hf.TaskSchema(
280
+ objective="t3",
281
+ inputs=[
282
+ hf.SchemaInput(
283
+ parameter="p2",
284
+ multiple=True,
285
+ labels={"one": {}, "two": {"group": "my_group"}},
286
+ ),
287
+ ],
288
+ outputs=[hf.SchemaInput(parameter="p3")],
289
+ actions=[
290
+ hf.Action(
291
+ commands=[hf.Command(command=cmds[1], stdout="<<int(parameter:p3)>>")]
292
+ )
293
+ ],
294
+ )
295
+ tasks = [
296
+ hf.Task(
297
+ schema=s1,
298
+ element_sets=[
299
+ hf.ElementSet(inputs=[hf.InputValue("p1", 1)]),
300
+ hf.ElementSet(
301
+ sequences=[
302
+ hf.ValueSequence(
303
+ path="inputs.p1",
304
+ values=[2, 3, 4],
305
+ nesting_order=0,
306
+ ),
307
+ ],
308
+ groups=[hf.ElementGroup(name="my_group")],
309
+ ),
310
+ ],
311
+ ),
312
+ hf.Task(
313
+ schema=s2,
314
+ nesting_order={"inputs.p1": 0},
315
+ ),
316
+ hf.Task(
317
+ schema=s3,
318
+ input_sources={
319
+ "p2[one]": [
320
+ hf.InputSource.task(
321
+ task_ref=1,
322
+ where=hf.Rule(path="inputs.p1", condition={"value.equal_to": 1}),
323
+ )
324
+ ],
325
+ "p2[two]": [
326
+ hf.InputSource.task(
327
+ task_ref=1,
328
+ where=hf.Rule(
329
+ path="inputs.p1", condition={"value.not_equal_to": 1}
330
+ ),
331
+ )
332
+ ],
333
+ },
334
+ ),
335
+ ]
336
+ wk = hf.Workflow.from_template_data(
337
+ tasks=tasks,
338
+ path=tmp_path,
339
+ template_name="wk0",
340
+ )
341
+ wk.submit(wait=True, add_to_known=False)
342
+ assert wk.tasks.t2.num_elements == 4
343
+ assert wk.tasks.t3.num_elements == 1
344
+ assert wk.tasks.t3.elements[0].outputs.p3.value == 410
345
+
346
+
347
+ @pytest.mark.integration
348
+ def test_loop_simple(null_config, tmp_path: Path):
349
+ if os.name == "nt":
350
+ cmd = "Write-Output (<<parameter:p1>> + 100)"
351
+ else:
352
+ cmd = 'echo "$((<<parameter:p1>> + 100))"'
353
+ s1 = hf.TaskSchema(
354
+ objective="t1",
355
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
356
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
357
+ actions=[
358
+ hf.Action(commands=[hf.Command(command=cmd, stdout="<<int(parameter:p1)>>")]),
359
+ ],
360
+ )
361
+ wk = hf.Workflow.from_template_data(
362
+ tasks=[hf.Task(schema=s1, inputs=[hf.InputValue("p1", value=1)])],
363
+ loops=[hf.Loop(tasks=[0], num_iterations=3)],
364
+ path=tmp_path,
365
+ template_name="wk0",
366
+ )
367
+ wk.submit(wait=True, add_to_known=False)
368
+ assert wk.tasks.t1.elements[0].get("outputs.p1") == 301
369
+
370
+
371
+ @pytest.mark.integration
372
+ @pytest.mark.skip(reason="need to fix loop termination for multiple elements")
373
+ def test_loop_termination_multi_element(null_config, tmp_path: Path):
374
+ if os.name == "nt":
375
+ cmds = [
376
+ "Write-Output (<<parameter:p1>> + 100)",
377
+ "Write-Output 'Hello from the second action!'",
378
+ ]
379
+ else:
380
+ cmds = [
381
+ 'echo "$((<<parameter:p1>> + 100))"',
382
+ 'echo "Hello from the second action!"',
383
+ ]
384
+ s1 = hf.TaskSchema(
385
+ objective="t1",
386
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
387
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
388
+ actions=[
389
+ hf.Action(
390
+ commands=[hf.Command(command=cmds[0], stdout="<<int(parameter:p1)>>")]
391
+ ),
392
+ hf.Action(commands=[hf.Command(command=cmds[1])]),
393
+ ],
394
+ )
395
+ tasks = [
396
+ hf.Task(
397
+ schema=s1,
398
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2], nesting_order=0)],
399
+ ),
400
+ ]
401
+ wk = hf.Workflow.from_template_data(
402
+ tasks=tasks,
403
+ loops=[
404
+ hf.Loop(
405
+ tasks=[0],
406
+ num_iterations=3,
407
+ termination=hf.Rule(
408
+ path="outputs.p1", condition={"value.greater_than": 201}
409
+ ),
410
+ )
411
+ ],
412
+ path=tmp_path,
413
+ template_name="wk0",
414
+ )
415
+ wk.submit(wait=True, add_to_known=False)
416
+ elem_0 = wk.tasks.t1.elements[0]
417
+ elem_1 = wk.tasks.t1.elements[1]
418
+
419
+ # all three iterations needed for first element:
420
+ assert elem_0.iterations[0].action_runs[0].status is EARStatus.success
421
+ assert elem_0.iterations[1].action_runs[0].status is EARStatus.success
422
+ assert elem_0.iterations[2].action_runs[0].status is EARStatus.success
423
+
424
+ # only first two iterations needed for second element:
425
+ assert elem_1.iterations[0].action_runs[0].status is EARStatus.success
426
+ assert elem_1.iterations[1].action_runs[0].status is EARStatus.success
427
+ assert elem_1.iterations[2].action_runs[0].status is EARStatus.skipped
428
+
429
+
430
+ @pytest.mark.integration
431
+ def test_input_file_generator_no_errors_on_skip(null_config, tmp_path):
432
+ """i.e. we don't try to save a file that hasn't been created because the run was
433
+ skipped"""
434
+
435
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
436
+
437
+ if os.name == "nt":
438
+ cmds = (
439
+ "Write-Output ((<<parameter:p0>> + 1))",
440
+ "Get-Content <<file:my_input_file>>",
441
+ )
442
+ else:
443
+ cmds = ('echo "$((<<parameter:p0>> + 1))"', "cat <<file:my_input_file>>")
444
+
445
+ s1 = hf.TaskSchema(
446
+ objective="t1",
447
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p0"))],
448
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p1"))],
449
+ actions=[
450
+ hf.Action(
451
+ commands=[hf.Command(command=cmds[0], stdout="<<parameter:p1>>")],
452
+ )
453
+ ],
454
+ )
455
+
456
+ s2 = hf.TaskSchema(
457
+ objective="t2",
458
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
459
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p0"))],
460
+ actions=[
461
+ hf.Action(
462
+ commands=[hf.Command(cmds[1], stdout="<<int(parameter:p0)>>")],
463
+ input_file_generators=[
464
+ hf.InputFileGenerator(
465
+ input_file=inp_file,
466
+ inputs=[hf.Parameter("p1")],
467
+ script="<<script:input_file_generator_basic.py>>",
468
+ ),
469
+ ],
470
+ environments=[hf.ActionEnvironment(environment="python_env")],
471
+ )
472
+ ],
473
+ )
474
+ p0_val = 100
475
+ t1 = hf.Task(schema=s1, inputs={"p0": p0_val})
476
+ t2 = hf.Task(schema=s2)
477
+ wk = hf.Workflow.from_template_data(
478
+ tasks=[t1, t2],
479
+ loops=[
480
+ hf.Loop(
481
+ tasks=[0, 1],
482
+ num_iterations=2,
483
+ termination={"path": "outputs.p0", "condition": {"value.equal_to": 101}},
484
+ )
485
+ ],
486
+ template_name="input_file_generator_skip_test",
487
+ path=tmp_path,
488
+ )
489
+
490
+ wk.submit(wait=True, add_to_known=False)
491
+
492
+ # check correct runs are set to skip due to loop termination:
493
+ runs = wk.get_all_EARs()
494
+ assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
495
+ assert runs[1].skip_reason is SkipReason.NOT_SKIPPED
496
+ assert runs[2].skip_reason is SkipReason.NOT_SKIPPED
497
+ assert runs[3].skip_reason is SkipReason.LOOP_TERMINATION
498
+ assert runs[4].skip_reason is SkipReason.LOOP_TERMINATION
499
+ assert runs[5].skip_reason is SkipReason.LOOP_TERMINATION
500
+
501
+ # run 4 is the input file generator of the second iteration, which should be skipped
502
+ # check no error from trying to save the input file:
503
+ std_stream_path = runs[4].get_app_std_path()
504
+ if std_stream_path.is_file():
505
+ assert "FileNotFoundError" not in std_stream_path.read_text()
506
+
507
+
508
+ @pytest.mark.integration
509
+ @pytest.mark.parametrize("store", ["zarr", "json"])
510
+ def test_get_text_file(null_config, tmp_path, store):
511
+
512
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
513
+ wk = hf.Workflow.from_template_data(
514
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store=store
515
+ )
516
+ wk.submit(wait=True, add_to_known=False, status=False)
517
+
518
+ combine = wk.submissions[0].jobscripts[0].resources.combine_jobscript_std
519
+ filename = "js_0_std.log" if combine else "js_0_stdout.log"
520
+ rel_path = f"artifacts/submissions/0/js_std/0/{filename}"
521
+ abs_path = f"{wk.url}/{rel_path}"
522
+
523
+ assert wk.get_text_file(rel_path) == "hi!\n"
524
+ assert wk.get_text_file(abs_path) == "hi!\n"
525
+
526
+
527
+ @pytest.mark.integration
528
+ def test_get_text_file_zarr_zip(null_config, tmp_path):
529
+
530
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
531
+ wk = hf.Workflow.from_template_data(
532
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store="zarr"
533
+ )
534
+ wk.submit(wait=True, add_to_known=False, status=False)
535
+
536
+ wkz = hf.Workflow(wk.zip(path=tmp_path))
537
+
538
+ combine = wkz.submissions[0].jobscripts[0].resources.combine_jobscript_std
539
+ filename = "js_0_std.log" if combine else "js_0_stdout.log"
540
+ rel_path = f"artifacts/submissions/0/js_std/0/{filename}"
541
+ abs_path = f"{wkz.url}/{rel_path}"
542
+
543
+ assert wkz.get_text_file(rel_path) == "hi!\n"
544
+ assert wkz.get_text_file(abs_path) == "hi!\n"
545
+
546
+
547
+ @pytest.mark.parametrize("store", ["zarr", "json"])
548
+ def test_get_text_file_file_not_found(null_config, tmp_path, store):
549
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
550
+ wk = hf.Workflow.from_template_data(
551
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store=store
552
+ )
553
+ with pytest.raises(FileNotFoundError):
554
+ wk.get_text_file("non_existent_file.txt")
555
+
556
+
557
+ def test_get_text_file_file_not_found_zarr_zip(null_config, tmp_path):
558
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
559
+ wk = hf.Workflow.from_template_data(
560
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store="zarr"
561
+ )
562
+ wkz = hf.Workflow(wk.zip())
563
+ with pytest.raises(FileNotFoundError):
564
+ wkz.get_text_file("non_existent_file.txt")