hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (275) hide show
  1. hpcflow/__init__.py +2 -11
  2. hpcflow/__pyinstaller/__init__.py +5 -0
  3. hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
  4. hpcflow/_version.py +1 -1
  5. hpcflow/app.py +43 -0
  6. hpcflow/cli.py +2 -462
  7. hpcflow/data/demo_data_manifest/__init__.py +3 -0
  8. hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
  9. hpcflow/data/jinja_templates/test/test_template.txt +8 -0
  10. hpcflow/data/programs/hello_world/README.md +1 -0
  11. hpcflow/data/programs/hello_world/hello_world.c +87 -0
  12. hpcflow/data/programs/hello_world/linux/hello_world +0 -0
  13. hpcflow/data/programs/hello_world/macos/hello_world +0 -0
  14. hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
  15. hpcflow/data/scripts/__init__.py +1 -0
  16. hpcflow/data/scripts/bad_script.py +2 -0
  17. hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
  18. hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
  19. hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
  20. hpcflow/data/scripts/do_nothing.py +2 -0
  21. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  22. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  23. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  24. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  25. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  26. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  27. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  28. hpcflow/data/scripts/generate_t1_file_01.py +7 -0
  29. hpcflow/data/scripts/import_future_script.py +7 -0
  30. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  31. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  32. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  33. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  34. hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
  35. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  36. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  37. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  38. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  39. hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
  40. hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
  41. hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
  42. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  43. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  44. hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
  45. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
  46. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  47. hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
  48. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
  49. hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
  50. hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
  51. hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
  52. hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
  53. hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
  54. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  55. hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
  56. hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
  57. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  58. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  59. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  60. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  61. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  62. hpcflow/data/scripts/parse_t1_file_01.py +4 -0
  63. hpcflow/data/scripts/script_exit_test.py +5 -0
  64. hpcflow/data/template_components/__init__.py +1 -0
  65. hpcflow/data/template_components/command_files.yaml +26 -0
  66. hpcflow/data/template_components/environments.yaml +13 -0
  67. hpcflow/data/template_components/parameters.yaml +14 -0
  68. hpcflow/data/template_components/task_schemas.yaml +139 -0
  69. hpcflow/data/workflows/workflow_1.yaml +5 -0
  70. hpcflow/examples.ipynb +1037 -0
  71. hpcflow/sdk/__init__.py +149 -0
  72. hpcflow/sdk/app.py +4266 -0
  73. hpcflow/sdk/cli.py +1479 -0
  74. hpcflow/sdk/cli_common.py +385 -0
  75. hpcflow/sdk/config/__init__.py +5 -0
  76. hpcflow/sdk/config/callbacks.py +246 -0
  77. hpcflow/sdk/config/cli.py +388 -0
  78. hpcflow/sdk/config/config.py +1410 -0
  79. hpcflow/sdk/config/config_file.py +501 -0
  80. hpcflow/sdk/config/errors.py +272 -0
  81. hpcflow/sdk/config/types.py +150 -0
  82. hpcflow/sdk/core/__init__.py +38 -0
  83. hpcflow/sdk/core/actions.py +3857 -0
  84. hpcflow/sdk/core/app_aware.py +25 -0
  85. hpcflow/sdk/core/cache.py +224 -0
  86. hpcflow/sdk/core/command_files.py +814 -0
  87. hpcflow/sdk/core/commands.py +424 -0
  88. hpcflow/sdk/core/element.py +2071 -0
  89. hpcflow/sdk/core/enums.py +221 -0
  90. hpcflow/sdk/core/environment.py +256 -0
  91. hpcflow/sdk/core/errors.py +1043 -0
  92. hpcflow/sdk/core/execute.py +207 -0
  93. hpcflow/sdk/core/json_like.py +809 -0
  94. hpcflow/sdk/core/loop.py +1320 -0
  95. hpcflow/sdk/core/loop_cache.py +282 -0
  96. hpcflow/sdk/core/object_list.py +933 -0
  97. hpcflow/sdk/core/parameters.py +3371 -0
  98. hpcflow/sdk/core/rule.py +196 -0
  99. hpcflow/sdk/core/run_dir_files.py +57 -0
  100. hpcflow/sdk/core/skip_reason.py +7 -0
  101. hpcflow/sdk/core/task.py +3792 -0
  102. hpcflow/sdk/core/task_schema.py +993 -0
  103. hpcflow/sdk/core/test_utils.py +538 -0
  104. hpcflow/sdk/core/types.py +447 -0
  105. hpcflow/sdk/core/utils.py +1207 -0
  106. hpcflow/sdk/core/validation.py +87 -0
  107. hpcflow/sdk/core/values.py +477 -0
  108. hpcflow/sdk/core/workflow.py +4820 -0
  109. hpcflow/sdk/core/zarr_io.py +206 -0
  110. hpcflow/sdk/data/__init__.py +13 -0
  111. hpcflow/sdk/data/config_file_schema.yaml +34 -0
  112. hpcflow/sdk/data/config_schema.yaml +260 -0
  113. hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
  114. hpcflow/sdk/data/files_spec_schema.yaml +5 -0
  115. hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
  116. hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
  117. hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
  118. hpcflow/sdk/demo/__init__.py +3 -0
  119. hpcflow/sdk/demo/cli.py +242 -0
  120. hpcflow/sdk/helper/__init__.py +3 -0
  121. hpcflow/sdk/helper/cli.py +137 -0
  122. hpcflow/sdk/helper/helper.py +300 -0
  123. hpcflow/sdk/helper/watcher.py +192 -0
  124. hpcflow/sdk/log.py +288 -0
  125. hpcflow/sdk/persistence/__init__.py +18 -0
  126. hpcflow/sdk/persistence/base.py +2817 -0
  127. hpcflow/sdk/persistence/defaults.py +6 -0
  128. hpcflow/sdk/persistence/discovery.py +39 -0
  129. hpcflow/sdk/persistence/json.py +954 -0
  130. hpcflow/sdk/persistence/pending.py +948 -0
  131. hpcflow/sdk/persistence/store_resource.py +203 -0
  132. hpcflow/sdk/persistence/types.py +309 -0
  133. hpcflow/sdk/persistence/utils.py +73 -0
  134. hpcflow/sdk/persistence/zarr.py +2388 -0
  135. hpcflow/sdk/runtime.py +320 -0
  136. hpcflow/sdk/submission/__init__.py +3 -0
  137. hpcflow/sdk/submission/enums.py +70 -0
  138. hpcflow/sdk/submission/jobscript.py +2379 -0
  139. hpcflow/sdk/submission/schedulers/__init__.py +281 -0
  140. hpcflow/sdk/submission/schedulers/direct.py +233 -0
  141. hpcflow/sdk/submission/schedulers/sge.py +376 -0
  142. hpcflow/sdk/submission/schedulers/slurm.py +598 -0
  143. hpcflow/sdk/submission/schedulers/utils.py +25 -0
  144. hpcflow/sdk/submission/shells/__init__.py +52 -0
  145. hpcflow/sdk/submission/shells/base.py +229 -0
  146. hpcflow/sdk/submission/shells/bash.py +504 -0
  147. hpcflow/sdk/submission/shells/os_version.py +115 -0
  148. hpcflow/sdk/submission/shells/powershell.py +352 -0
  149. hpcflow/sdk/submission/submission.py +1402 -0
  150. hpcflow/sdk/submission/types.py +140 -0
  151. hpcflow/sdk/typing.py +194 -0
  152. hpcflow/sdk/utils/arrays.py +69 -0
  153. hpcflow/sdk/utils/deferred_file.py +55 -0
  154. hpcflow/sdk/utils/hashing.py +16 -0
  155. hpcflow/sdk/utils/patches.py +31 -0
  156. hpcflow/sdk/utils/strings.py +69 -0
  157. hpcflow/tests/api/test_api.py +32 -0
  158. hpcflow/tests/conftest.py +123 -0
  159. hpcflow/tests/data/__init__.py +0 -0
  160. hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
  161. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  162. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  163. hpcflow/tests/data/workflow_1.json +10 -0
  164. hpcflow/tests/data/workflow_1.yaml +5 -0
  165. hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
  166. hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
  167. hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
  168. hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
  169. hpcflow/tests/programs/test_programs.py +180 -0
  170. hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
  171. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  172. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
  173. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  174. hpcflow/tests/scripts/test_main_scripts.py +1361 -0
  175. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  176. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  177. hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
  178. hpcflow/tests/unit/test_action.py +1066 -0
  179. hpcflow/tests/unit/test_action_rule.py +24 -0
  180. hpcflow/tests/unit/test_app.py +132 -0
  181. hpcflow/tests/unit/test_cache.py +46 -0
  182. hpcflow/tests/unit/test_cli.py +172 -0
  183. hpcflow/tests/unit/test_command.py +377 -0
  184. hpcflow/tests/unit/test_config.py +195 -0
  185. hpcflow/tests/unit/test_config_file.py +162 -0
  186. hpcflow/tests/unit/test_element.py +666 -0
  187. hpcflow/tests/unit/test_element_iteration.py +88 -0
  188. hpcflow/tests/unit/test_element_set.py +158 -0
  189. hpcflow/tests/unit/test_group.py +115 -0
  190. hpcflow/tests/unit/test_input_source.py +1479 -0
  191. hpcflow/tests/unit/test_input_value.py +398 -0
  192. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  193. hpcflow/tests/unit/test_json_like.py +1247 -0
  194. hpcflow/tests/unit/test_loop.py +2674 -0
  195. hpcflow/tests/unit/test_meta_task.py +325 -0
  196. hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
  197. hpcflow/tests/unit/test_object_list.py +116 -0
  198. hpcflow/tests/unit/test_parameter.py +243 -0
  199. hpcflow/tests/unit/test_persistence.py +664 -0
  200. hpcflow/tests/unit/test_resources.py +243 -0
  201. hpcflow/tests/unit/test_run.py +286 -0
  202. hpcflow/tests/unit/test_run_directories.py +29 -0
  203. hpcflow/tests/unit/test_runtime.py +9 -0
  204. hpcflow/tests/unit/test_schema_input.py +372 -0
  205. hpcflow/tests/unit/test_shell.py +129 -0
  206. hpcflow/tests/unit/test_slurm.py +39 -0
  207. hpcflow/tests/unit/test_submission.py +502 -0
  208. hpcflow/tests/unit/test_task.py +2560 -0
  209. hpcflow/tests/unit/test_task_schema.py +182 -0
  210. hpcflow/tests/unit/test_utils.py +616 -0
  211. hpcflow/tests/unit/test_value_sequence.py +549 -0
  212. hpcflow/tests/unit/test_values.py +91 -0
  213. hpcflow/tests/unit/test_workflow.py +827 -0
  214. hpcflow/tests/unit/test_workflow_template.py +186 -0
  215. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  216. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  217. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  218. hpcflow/tests/unit/utils/test_patches.py +5 -0
  219. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  220. hpcflow/tests/unit/utils/test_strings.py +97 -0
  221. hpcflow/tests/workflows/__init__.py +0 -0
  222. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  223. hpcflow/tests/workflows/test_jobscript.py +355 -0
  224. hpcflow/tests/workflows/test_run_status.py +198 -0
  225. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  226. hpcflow/tests/workflows/test_submission.py +140 -0
  227. hpcflow/tests/workflows/test_workflows.py +564 -0
  228. hpcflow/tests/workflows/test_zip.py +18 -0
  229. hpcflow/viz_demo.ipynb +6794 -0
  230. hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
  231. hpcflow-0.2.0a271.dist-info/METADATA +65 -0
  232. hpcflow-0.2.0a271.dist-info/RECORD +237 -0
  233. {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
  234. hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
  235. hpcflow/api.py +0 -458
  236. hpcflow/archive/archive.py +0 -308
  237. hpcflow/archive/cloud/cloud.py +0 -47
  238. hpcflow/archive/cloud/errors.py +0 -9
  239. hpcflow/archive/cloud/providers/dropbox.py +0 -432
  240. hpcflow/archive/errors.py +0 -5
  241. hpcflow/base_db.py +0 -4
  242. hpcflow/config.py +0 -232
  243. hpcflow/copytree.py +0 -66
  244. hpcflow/data/examples/_config.yml +0 -14
  245. hpcflow/data/examples/damask/demo/1.run.yml +0 -4
  246. hpcflow/data/examples/damask/demo/2.process.yml +0 -29
  247. hpcflow/data/examples/damask/demo/geom.geom +0 -2052
  248. hpcflow/data/examples/damask/demo/load.load +0 -1
  249. hpcflow/data/examples/damask/demo/material.config +0 -185
  250. hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
  251. hpcflow/data/examples/damask/inputs/load.load +0 -1
  252. hpcflow/data/examples/damask/inputs/material.config +0 -185
  253. hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
  254. hpcflow/data/examples/damask/profiles/damask.yml +0 -4
  255. hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
  256. hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
  257. hpcflow/data/examples/damask/profiles/default.yml +0 -6
  258. hpcflow/data/examples/thinking.yml +0 -177
  259. hpcflow/errors.py +0 -2
  260. hpcflow/init_db.py +0 -37
  261. hpcflow/models.py +0 -2549
  262. hpcflow/nesting.py +0 -9
  263. hpcflow/profiles.py +0 -455
  264. hpcflow/project.py +0 -81
  265. hpcflow/scheduler.py +0 -323
  266. hpcflow/utils.py +0 -103
  267. hpcflow/validation.py +0 -167
  268. hpcflow/variables.py +0 -544
  269. hpcflow-0.1.9.dist-info/METADATA +0 -168
  270. hpcflow-0.1.9.dist-info/RECORD +0 -45
  271. hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
  272. hpcflow-0.1.9.dist-info/top_level.txt +0 -1
  273. /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
  274. /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
  275. /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
@@ -0,0 +1,1361 @@
1
+ import json
2
+ import os
3
+ from pathlib import Path
4
+ import shutil
5
+ import time
6
+ import pytest
7
+
8
+ from hpcflow.app import app as hf
9
+ from hpcflow.sdk.core.enums import EARStatus
10
+ from hpcflow.sdk.core.test_utils import P1_parameter_cls as P1
11
+
12
+ # note: when testing the frozen app, we might not have MatFlow installed in the built in
13
+ # python_env MatFlow environment, so we should skip these tests.
14
+
15
+
16
+ @pytest.mark.integration
17
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
18
+ @pytest.mark.parametrize("combine_scripts", [False, True])
19
+ def test_script_direct_in_direct_out(null_config, tmp_path: Path, combine_scripts: bool):
20
+ s1 = hf.TaskSchema(
21
+ objective="t1",
22
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
23
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
24
+ actions=[
25
+ hf.Action(
26
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
27
+ script_data_in="direct",
28
+ script_data_out="direct",
29
+ script_exe="python_script",
30
+ environments=[hf.ActionEnvironment(environment="python_env")],
31
+ )
32
+ ],
33
+ )
34
+ p1_val = 101
35
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
36
+ wk = hf.Workflow.from_template_data(
37
+ tasks=[t1],
38
+ template_name="main_script_test",
39
+ path=tmp_path,
40
+ resources={"any": {"combine_scripts": combine_scripts}},
41
+ )
42
+ wk.submit(wait=True, add_to_known=False)
43
+
44
+ p2 = wk.tasks[0].elements[0].outputs.p2
45
+ assert isinstance(p2, hf.ElementParameter)
46
+ assert p2.value == p1_val + 100
47
+
48
+
49
+ @pytest.mark.integration
50
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
51
+ @pytest.mark.parametrize("combine_scripts", [False, True])
52
+ def test_script_direct_sub_param_in_direct_out(
53
+ null_config, tmp_path: Path, combine_scripts: bool
54
+ ):
55
+ s1 = hf.TaskSchema(
56
+ objective="t1",
57
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
58
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
59
+ actions=[
60
+ hf.Action(
61
+ script="<<script:main_script_test_direct_sub_param_in_direct_out.py>>",
62
+ script_data_in={"p1.a": "direct"},
63
+ script_data_out="direct",
64
+ script_exe="python_script",
65
+ environments=[hf.ActionEnvironment(environment="python_env")],
66
+ )
67
+ ],
68
+ )
69
+ p1_val = {"a": 101}
70
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
71
+ wk = hf.Workflow.from_template_data(
72
+ tasks=[t1],
73
+ template_name="main_script_test",
74
+ path=tmp_path,
75
+ resources={"any": {"combine_scripts": combine_scripts}},
76
+ )
77
+ wk.submit(wait=True, add_to_known=False)
78
+
79
+ p2 = wk.tasks[0].elements[0].outputs.p2
80
+ assert isinstance(p2, hf.ElementParameter)
81
+ assert p2.value == p1_val["a"] + 100
82
+
83
+
84
+ @pytest.mark.integration
85
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
86
+ @pytest.mark.parametrize("combine_scripts", [False, True])
87
+ def test_script_direct_in_direct_out_single_label(
88
+ null_config, tmp_path: Path, combine_scripts: bool
89
+ ):
90
+ """This uses the same test script as the `test_script_direct_in_direct_out` test;
91
+ single labels are trivial and need not be referenced in the script."""
92
+ p1_label = "one"
93
+ s1 = hf.TaskSchema(
94
+ objective="t1",
95
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"), labels={p1_label: {}})],
96
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
97
+ actions=[
98
+ hf.Action(
99
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
100
+ script_data_in="direct",
101
+ script_data_out="direct",
102
+ script_exe="python_script",
103
+ environments=[hf.ActionEnvironment(environment="python_env")],
104
+ )
105
+ ],
106
+ )
107
+ p1_val = 101
108
+ t1 = hf.Task(schema=s1, inputs={f"p1[{p1_label}]": p1_val})
109
+ wk = hf.Workflow.from_template_data(
110
+ tasks=[t1],
111
+ template_name="main_script_test",
112
+ path=tmp_path,
113
+ resources={"any": {"combine_scripts": combine_scripts}},
114
+ )
115
+ wk.submit(wait=True, add_to_known=False)
116
+
117
+ p2 = wk.tasks[0].elements[0].outputs.p2
118
+ assert isinstance(p2, hf.ElementParameter)
119
+ assert p2.value == p1_val + 100
120
+
121
+
122
+ @pytest.mark.integration
123
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
124
+ @pytest.mark.parametrize("combine_scripts", [False, True])
125
+ def test_script_direct_in_direct_out_labels(
126
+ null_config, tmp_path: Path, combine_scripts: bool
127
+ ):
128
+ p1_label_1 = "one"
129
+ p1_label_2 = "two"
130
+ s1 = hf.TaskSchema(
131
+ objective="t1",
132
+ inputs=[
133
+ hf.SchemaInput(
134
+ parameter=hf.Parameter("p1"),
135
+ labels={p1_label_1: {}, p1_label_2: {}},
136
+ multiple=True,
137
+ )
138
+ ],
139
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
140
+ actions=[
141
+ hf.Action(
142
+ script="<<script:main_script_test_direct_in_direct_out_labels.py>>",
143
+ script_data_in="direct",
144
+ script_data_out="direct",
145
+ script_exe="python_script",
146
+ environments=[hf.ActionEnvironment(environment="python_env")],
147
+ )
148
+ ],
149
+ )
150
+ p1_1_val = 101
151
+ p1_2_val = 201
152
+ t1 = hf.Task(
153
+ schema=s1,
154
+ inputs={
155
+ f"p1[{p1_label_1}]": p1_1_val,
156
+ f"p1[{p1_label_2}]": p1_2_val,
157
+ },
158
+ )
159
+ wk = hf.Workflow.from_template_data(
160
+ tasks=[t1],
161
+ template_name="main_script_test",
162
+ path=tmp_path,
163
+ resources={"any": {"combine_scripts": combine_scripts}},
164
+ )
165
+ wk.submit(wait=True, add_to_known=False)
166
+
167
+ p2 = wk.tasks[0].elements[0].outputs.p2
168
+ assert isinstance(p2, hf.ElementParameter)
169
+ assert p2.value == p1_1_val + p1_2_val
170
+
171
+
172
+ @pytest.mark.integration
173
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
174
+ @pytest.mark.parametrize("combine_scripts", [False, True])
175
+ def test_script_json_in_json_out(null_config, tmp_path: Path, combine_scripts: bool):
176
+ s1 = hf.TaskSchema(
177
+ objective="t1",
178
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
179
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
180
+ actions=[
181
+ hf.Action(
182
+ script="<<script:main_script_test_json_in_json_out.py>>",
183
+ script_data_in="json",
184
+ script_data_out="json",
185
+ script_exe="python_script",
186
+ environments=[hf.ActionEnvironment(environment="python_env")],
187
+ requires_dir=True,
188
+ )
189
+ ],
190
+ )
191
+ p1_val = 101
192
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
193
+ wk = hf.Workflow.from_template_data(
194
+ tasks=[t1],
195
+ template_name="main_script_test",
196
+ path=tmp_path,
197
+ resources={"any": {"combine_scripts": combine_scripts}},
198
+ )
199
+ wk.submit(wait=True, add_to_known=False)
200
+
201
+ p2 = wk.tasks[0].elements[0].outputs.p2
202
+ assert isinstance(p2, hf.ElementParameter)
203
+ assert p2.value == p1_val + 100
204
+
205
+
206
+ @pytest.mark.integration
207
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
208
+ @pytest.mark.parametrize("combine_scripts", [False, True])
209
+ def test_script_json_in_json_out_labels(
210
+ null_config, tmp_path: Path, combine_scripts: bool
211
+ ):
212
+ p1_label_1 = "one"
213
+ p1_label_2 = "two"
214
+ s1 = hf.TaskSchema(
215
+ objective="t1",
216
+ inputs=[
217
+ hf.SchemaInput(
218
+ parameter=hf.Parameter("p1"),
219
+ labels={p1_label_1: {}, p1_label_2: {}},
220
+ multiple=True,
221
+ )
222
+ ],
223
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
224
+ actions=[
225
+ hf.Action(
226
+ script="<<script:main_script_test_json_in_json_out_labels.py>>",
227
+ script_data_in="json",
228
+ script_data_out="json",
229
+ script_exe="python_script",
230
+ environments=[hf.ActionEnvironment(environment="python_env")],
231
+ requires_dir=True,
232
+ )
233
+ ],
234
+ )
235
+ p1_1_val = 101
236
+ p1_2_val = 201
237
+ t1 = hf.Task(
238
+ schema=s1,
239
+ inputs={
240
+ f"p1[{p1_label_1}]": p1_1_val,
241
+ f"p1[{p1_label_2}]": p1_2_val,
242
+ },
243
+ )
244
+ wk = hf.Workflow.from_template_data(
245
+ tasks=[t1],
246
+ template_name="main_script_test",
247
+ path=tmp_path,
248
+ resources={"any": {"combine_scripts": combine_scripts}},
249
+ )
250
+ wk.submit(wait=True, add_to_known=False)
251
+
252
+ p2 = wk.tasks[0].elements[0].outputs.p2
253
+ assert isinstance(p2, hf.ElementParameter)
254
+ assert p2.value == p1_1_val + p1_2_val
255
+
256
+
257
+ @pytest.mark.integration
258
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
259
+ @pytest.mark.parametrize("combine_scripts", [False, True])
260
+ def test_script_json_sub_param_in_json_out_labels(
261
+ null_config, tmp_path: Path, combine_scripts: bool
262
+ ):
263
+ p1_label_1 = "one"
264
+ p1_label_2 = "two"
265
+ s1 = hf.TaskSchema(
266
+ objective="t1",
267
+ inputs=[
268
+ hf.SchemaInput(
269
+ parameter=hf.Parameter("p1"),
270
+ labels={p1_label_1: {}, p1_label_2: {}},
271
+ multiple=True,
272
+ )
273
+ ],
274
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
275
+ actions=[
276
+ hf.Action(
277
+ script="<<script:main_script_test_json_sub_param_in_json_out_labels.py>>",
278
+ script_data_in={"p1[one].a": "json", "p1[two]": "json"},
279
+ script_data_out="json",
280
+ script_exe="python_script",
281
+ environments=[hf.ActionEnvironment(environment="python_env")],
282
+ requires_dir=True,
283
+ )
284
+ ],
285
+ )
286
+ a_val = 101
287
+ p1_2_val = 201
288
+ t1 = hf.Task(
289
+ schema=s1,
290
+ inputs={
291
+ f"p1[{p1_label_1}]": {"a": a_val},
292
+ f"p1[{p1_label_2}]": p1_2_val,
293
+ },
294
+ )
295
+ wk = hf.Workflow.from_template_data(
296
+ tasks=[t1],
297
+ template_name="main_script_test",
298
+ path=tmp_path,
299
+ resources={"any": {"combine_scripts": combine_scripts}},
300
+ )
301
+ wk.submit(wait=True, add_to_known=False)
302
+
303
+ p2 = wk.tasks[0].elements[0].outputs.p2
304
+ assert isinstance(p2, hf.ElementParameter)
305
+ assert p2.value == a_val + p1_2_val
306
+
307
+
308
+ @pytest.mark.integration
309
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
310
+ @pytest.mark.parametrize("combine_scripts", [False, True])
311
+ def test_script_json_and_direct_in_json_out(
312
+ null_config, tmp_path: Path, combine_scripts: bool
313
+ ):
314
+ s1 = hf.TaskSchema(
315
+ objective="t1",
316
+ inputs=[
317
+ hf.SchemaInput(parameter=hf.Parameter("p1")),
318
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
319
+ ],
320
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p3"))],
321
+ actions=[
322
+ hf.Action(
323
+ script="<<script:main_script_test_json_and_direct_in_json_out.py>>",
324
+ script_data_in={"p1": "json", "p2": "direct"},
325
+ script_data_out="json",
326
+ script_exe="python_script",
327
+ environments=[hf.ActionEnvironment(environment="python_env")],
328
+ requires_dir=True,
329
+ )
330
+ ],
331
+ )
332
+ p1_val = 101
333
+ p2_val = 201
334
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val, "p2": p2_val})
335
+ wk = hf.Workflow.from_template_data(
336
+ tasks=[t1],
337
+ template_name="main_script_test",
338
+ path=tmp_path,
339
+ resources={"any": {"combine_scripts": combine_scripts}},
340
+ )
341
+ wk.submit(wait=True, add_to_known=False)
342
+
343
+ p3 = wk.tasks[0].elements[0].outputs.p3
344
+ assert isinstance(p3, hf.ElementParameter)
345
+ assert p3.value == p1_val + p2_val
346
+
347
+
348
+ @pytest.mark.integration
349
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
350
+ @pytest.mark.parametrize("combine_scripts", [False, True])
351
+ def test_script_json_in_json_and_direct_out(
352
+ null_config, tmp_path: Path, combine_scripts: bool
353
+ ):
354
+ s1 = hf.TaskSchema(
355
+ objective="t1",
356
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
357
+ outputs=[
358
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
359
+ hf.SchemaOutput(parameter=hf.Parameter("p3")),
360
+ ],
361
+ actions=[
362
+ hf.Action(
363
+ script="<<script:main_script_test_json_in_json_and_direct_out.py>>",
364
+ script_data_in="json",
365
+ script_data_out={"p2": "json", "p3": "direct"},
366
+ script_exe="python_script",
367
+ environments=[hf.ActionEnvironment(environment="python_env")],
368
+ requires_dir=True,
369
+ )
370
+ ],
371
+ )
372
+ p1_val = 101
373
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
374
+ wk = hf.Workflow.from_template_data(
375
+ tasks=[t1],
376
+ template_name="main_script_test",
377
+ path=tmp_path,
378
+ resources={"any": {"combine_scripts": combine_scripts}},
379
+ )
380
+ wk.submit(wait=True, add_to_known=False)
381
+
382
+ p2 = wk.tasks[0].elements[0].outputs.p2
383
+ assert isinstance(p2, hf.ElementParameter)
384
+ p3 = wk.tasks[0].elements[0].outputs.p3
385
+ assert isinstance(p3, hf.ElementParameter)
386
+ assert p2.value == p1_val + 100
387
+ assert p3.value == p1_val + 200
388
+
389
+
390
+ @pytest.mark.integration
391
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
392
+ @pytest.mark.parametrize("combine_scripts", [False, True])
393
+ def test_script_json_in_obj(null_config, tmp_path: Path, combine_scripts: bool):
394
+ """Use a custom JSON dumper defined in the P1 class."""
395
+ s1 = hf.TaskSchema(
396
+ objective="t1",
397
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
398
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
399
+ actions=[
400
+ hf.Action(
401
+ script="<<script:main_script_test_json_in_obj.py>>",
402
+ script_data_in="json",
403
+ script_data_out="direct",
404
+ script_exe="python_script",
405
+ environments=[hf.ActionEnvironment(environment="python_env")],
406
+ requires_dir=True,
407
+ )
408
+ ],
409
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
410
+ )
411
+ a_val = 1
412
+ t1 = hf.Task(schema=s1, inputs={"p1c": P1(a=a_val)})
413
+ wk = hf.Workflow.from_template_data(
414
+ tasks=[t1],
415
+ template_name="main_script_test",
416
+ path=tmp_path,
417
+ resources={"any": {"combine_scripts": combine_scripts}},
418
+ )
419
+ wk.submit(wait=True, add_to_known=False)
420
+
421
+ p2 = wk.tasks[0].elements[0].outputs.p2
422
+ assert isinstance(p2, hf.ElementParameter)
423
+ assert p2.value == a_val + 100
424
+
425
+
426
+ @pytest.mark.integration
427
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
428
+ @pytest.mark.parametrize("combine_scripts", [False, True])
429
+ def test_script_hdf5_in_obj(null_config, tmp_path: Path, combine_scripts: bool):
430
+ """Use a custom HDF5 dumper defined in the P1 class."""
431
+ s1 = hf.TaskSchema(
432
+ objective="t1",
433
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
434
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
435
+ actions=[
436
+ hf.Action(
437
+ script="<<script:main_script_test_hdf5_in_obj.py>>",
438
+ script_data_in="hdf5",
439
+ script_data_out="direct",
440
+ script_exe="python_script",
441
+ environments=[hf.ActionEnvironment(environment="python_env")],
442
+ requires_dir=True,
443
+ )
444
+ ],
445
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
446
+ )
447
+ a_val = 1
448
+ t1 = hf.Task(schema=s1, inputs={"p1c": P1(a=a_val)})
449
+ wk = hf.Workflow.from_template_data(
450
+ tasks=[t1],
451
+ template_name="main_script_test",
452
+ path=tmp_path,
453
+ resources={"any": {"combine_scripts": combine_scripts}},
454
+ )
455
+ wk.submit(wait=True, add_to_known=False)
456
+
457
+ p2 = wk.tasks[0].elements[0].outputs.p2
458
+ assert isinstance(p2, hf.ElementParameter)
459
+ assert p2.value == a_val + 100
460
+
461
+
462
+ @pytest.mark.integration
463
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
464
+ @pytest.mark.parametrize("combine_scripts", [False, True])
465
+ def test_script_hdf5_in_obj_group(null_config, tmp_path: Path, combine_scripts: bool):
466
+ s0 = hf.TaskSchema(
467
+ objective="define_p1c",
468
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
469
+ )
470
+ s1 = hf.TaskSchema(
471
+ objective="t1",
472
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"), group="my_group")],
473
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
474
+ actions=[
475
+ hf.Action(
476
+ script="<<script:main_script_test_hdf5_in_obj_group.py>>",
477
+ script_data_in="hdf5",
478
+ script_data_out="direct",
479
+ script_exe="python_script",
480
+ environments=[hf.ActionEnvironment(environment="python_env")],
481
+ requires_dir=True,
482
+ )
483
+ ],
484
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
485
+ )
486
+ a_vals = (1, 2)
487
+ t0 = hf.Task(
488
+ schema=s0,
489
+ sequences=[hf.ValueSequence(path="inputs.p1c", values=[P1(a=i) for i in a_vals])],
490
+ groups=[hf.ElementGroup("my_group")],
491
+ )
492
+ t1 = hf.Task(schema=s1)
493
+ wk = hf.Workflow.from_template_data(
494
+ tasks=[t0, t1],
495
+ template_name="main_script_test",
496
+ path=tmp_path,
497
+ resources={"any": {"combine_scripts": combine_scripts}},
498
+ )
499
+ wk.submit(wait=True, add_to_known=False, status=False)
500
+
501
+ p2 = wk.tasks[1].elements[0].outputs.p2
502
+ assert isinstance(p2, hf.ElementParameter)
503
+ assert p2.value == sum(a_vals) + 100
504
+
505
+
506
+ @pytest.mark.integration
507
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
508
+ @pytest.mark.parametrize("combine_scripts", [False, True])
509
+ def test_script_json_out_obj(null_config, tmp_path: Path, combine_scripts: bool):
510
+ """Use a custom JSON saver defined in the P1 class."""
511
+ s1 = hf.TaskSchema(
512
+ objective="t1",
513
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
514
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p1c"))],
515
+ actions=[
516
+ hf.Action(
517
+ script="<<script:main_script_test_json_out_obj.py>>",
518
+ script_data_in="direct",
519
+ script_data_out="json",
520
+ script_exe="python_script",
521
+ environments=[hf.ActionEnvironment(environment="python_env")],
522
+ requires_dir=True,
523
+ )
524
+ ],
525
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
526
+ )
527
+ p1_val = 1
528
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
529
+ wk = hf.Workflow.from_template_data(
530
+ tasks=[t1],
531
+ template_name="main_script_test",
532
+ path=tmp_path,
533
+ resources={"any": {"combine_scripts": combine_scripts}},
534
+ )
535
+ wk.submit(wait=True, add_to_known=False)
536
+
537
+ p1c = wk.tasks[0].elements[0].outputs.p1c
538
+ assert isinstance(p1c, hf.ElementParameter)
539
+ assert p1c.value == P1(a=p1_val + 100)
540
+
541
+
542
+ @pytest.mark.integration
543
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
544
+ @pytest.mark.parametrize("combine_scripts", [False, True])
545
+ def test_script_hdf5_out_obj(null_config, tmp_path: Path, combine_scripts: bool):
546
+ """Use a custom HDF5 saver defined in the P1 class."""
547
+ s1 = hf.TaskSchema(
548
+ objective="t1",
549
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
550
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p1c"))],
551
+ actions=[
552
+ hf.Action(
553
+ script="<<script:main_script_test_hdf5_out_obj.py>>",
554
+ script_data_in="direct",
555
+ script_data_out="hdf5",
556
+ script_exe="python_script",
557
+ environments=[hf.ActionEnvironment(environment="python_env")],
558
+ requires_dir=True,
559
+ )
560
+ ],
561
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
562
+ )
563
+ p1_val = 1
564
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
565
+ wk = hf.Workflow.from_template_data(
566
+ tasks=[t1],
567
+ template_name="main_script_test",
568
+ path=tmp_path,
569
+ resources={"any": {"combine_scripts": combine_scripts}},
570
+ )
571
+ wk.submit(wait=True, add_to_known=False)
572
+
573
+ p1c = wk.tasks[0].elements[0].outputs.p1c
574
+ assert isinstance(p1c, hf.ElementParameter)
575
+ assert p1c.value == P1(a=p1_val + 100)
576
+
577
+
578
+ @pytest.mark.integration
579
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
580
+ @pytest.mark.parametrize("combine_scripts", [False, True])
581
+ def test_script_direct_in_pass_env_spec(
582
+ new_null_config, tmp_path: Path, combine_scripts: bool
583
+ ):
584
+
585
+ vers_spec = {"version": "1.2"}
586
+ env = hf.Environment(
587
+ name="python_env_with_specifiers",
588
+ specifiers=vers_spec,
589
+ executables=[
590
+ hf.Executable(
591
+ label="python_script",
592
+ instances=[
593
+ hf.ExecutableInstance(
594
+ command="python <<script_path>> <<args>>",
595
+ num_cores=1,
596
+ parallel_mode=None,
597
+ )
598
+ ],
599
+ )
600
+ ],
601
+ )
602
+ hf.envs.add_object(env, skip_duplicates=True)
603
+
604
+ s1 = hf.TaskSchema(
605
+ objective="t1",
606
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
607
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
608
+ actions=[
609
+ hf.Action(
610
+ script="<<script:main_script_test_direct_in_direct_out_env_spec.py>>",
611
+ script_data_in="direct",
612
+ script_data_out="direct",
613
+ script_exe="python_script",
614
+ script_pass_env_spec=True,
615
+ environments=[
616
+ hf.ActionEnvironment(environment="python_env_with_specifiers")
617
+ ],
618
+ )
619
+ ],
620
+ )
621
+ t1 = hf.Task(
622
+ schema=s1,
623
+ inputs={"p1": 101},
624
+ environments={"python_env_with_specifiers": vers_spec},
625
+ )
626
+ wk = hf.Workflow.from_template_data(
627
+ tasks=[t1],
628
+ template_name="main_script_test",
629
+ path=tmp_path,
630
+ resources={"any": {"combine_scripts": combine_scripts}},
631
+ )
632
+ wk.submit(wait=True, add_to_known=False)
633
+
634
+ p2 = wk.tasks[0].elements[0].outputs.p2
635
+ assert isinstance(p2, hf.ElementParameter)
636
+ assert p2.value == {
637
+ "name": "python_env_with_specifiers",
638
+ **vers_spec,
639
+ }
640
+ hf.reload_template_components() # remove extra envs
641
+
642
+
643
+ @pytest.mark.integration
644
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
645
+ @pytest.mark.parametrize("combine_scripts", [False, True])
646
+ def test_script_std_stream_redirect_on_exception(
647
+ new_null_config, tmp_path: Path, combine_scripts: bool
648
+ ):
649
+ """Test exceptions raised by the app during execution of a script are printed to the
650
+ std-stream redirect file (and not the jobscript's standard error file)."""
651
+
652
+ # define a custom python environment which redefines the `WK_PATH` shell variable to
653
+ # a nonsense value so the app cannot load the workflow and thus raises an exception
654
+ app_caps = hf.package_name.upper()
655
+ if os.name == "nt":
656
+ env_cmd = f'$env:{app_caps}_WK_PATH = "nonsense_path"'
657
+ else:
658
+ env_cmd = f'export {app_caps}_WK_PATH="nonsense_path"'
659
+
660
+ env_cmd += "; python <<script_path>> <<args>>"
661
+ bad_env = hf.Environment(
662
+ name="bad_python_env",
663
+ executables=[
664
+ hf.Executable(
665
+ label="python_script",
666
+ instances=[
667
+ hf.ExecutableInstance(
668
+ command=env_cmd,
669
+ num_cores=1,
670
+ parallel_mode=None,
671
+ )
672
+ ],
673
+ )
674
+ ],
675
+ )
676
+ hf.envs.add_object(bad_env, skip_duplicates=True)
677
+
678
+ s1 = hf.TaskSchema(
679
+ objective="t1",
680
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
681
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
682
+ actions=[
683
+ hf.Action(
684
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
685
+ script_data_in="direct",
686
+ script_data_out="direct",
687
+ script_exe="python_script",
688
+ environments=[hf.ActionEnvironment(environment="bad_python_env")],
689
+ )
690
+ ],
691
+ )
692
+ p1_val = 101
693
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
694
+ wk = hf.Workflow.from_template_data(
695
+ tasks=[t1],
696
+ template_name="main_script_test",
697
+ path=tmp_path,
698
+ resources={"any": {"combine_scripts": combine_scripts}},
699
+ )
700
+ wk.submit(wait=True, add_to_known=False, status=False)
701
+
702
+ # jobscript stderr should be empty
703
+ assert not wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
704
+
705
+ # std stream file has workflow not found traceback
706
+ if combine_scripts:
707
+ std_stream_path = wk.submissions[0].jobscripts[0].get_app_std_path()
708
+ else:
709
+ run = wk.get_all_EARs()[0]
710
+ std_stream_path = run.get_app_std_path()
711
+ assert std_stream_path.is_file()
712
+ assert "WorkflowNotFoundError" in std_stream_path.read_text()
713
+
714
+ hf.reload_template_components() # remove extra envs
715
+
716
+
717
+ @pytest.mark.integration
718
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
719
+ @pytest.mark.parametrize("combine_scripts", [False, True])
720
+ def test_script_std_out_std_err_not_redirected(
721
+ null_config, tmp_path: Path, combine_scripts: bool
722
+ ):
723
+ """Test that standard error and output streams from a script are written to the jobscript
724
+ standard error and output files."""
725
+ s1 = hf.TaskSchema(
726
+ objective="t1",
727
+ inputs=[
728
+ hf.SchemaInput(parameter=hf.Parameter("stdout_msg")),
729
+ hf.SchemaInput(parameter=hf.Parameter("stderr_msg")),
730
+ ],
731
+ actions=[
732
+ hf.Action(
733
+ script="<<script:main_script_test_std_out_std_err.py>>",
734
+ script_data_in="direct",
735
+ script_data_out="direct",
736
+ script_exe="python_script",
737
+ environments=[hf.ActionEnvironment(environment="python_env")],
738
+ )
739
+ ],
740
+ )
741
+ stdout_msg = "hello stdout!"
742
+ stderr_msg = "hello stderr!"
743
+ t1 = hf.Task(schema=s1, inputs={"stdout_msg": stdout_msg, "stderr_msg": stderr_msg})
744
+ wk = hf.Workflow.from_template_data(
745
+ tasks=[t1],
746
+ template_name="main_script_test",
747
+ path=tmp_path,
748
+ resources={"any": {"combine_scripts": combine_scripts}},
749
+ )
750
+ wk.submit(wait=True, add_to_known=False)
751
+
752
+ if wk.submissions[0].jobscripts[0].resources.combine_jobscript_std:
753
+ std_out_err = wk.submissions[0].jobscripts[0].direct_std_out_err_path.read_text()
754
+ assert std_out_err.strip() == f"{stdout_msg}\n{stderr_msg}"
755
+ else:
756
+ std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text()
757
+ std_err = wk.submissions[0].jobscripts[0].direct_stderr_path.read_text()
758
+ assert std_out.strip() == stdout_msg
759
+ assert std_err.strip() == stderr_msg
760
+
761
+
762
+ @pytest.mark.integration
763
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
764
+ @pytest.mark.parametrize("combine_scripts", [False, True])
765
+ def test_script_pass_env_spec(null_config, tmp_path: Path, combine_scripts: bool):
766
+ s1 = hf.TaskSchema(
767
+ objective="t1",
768
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
769
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
770
+ actions=[
771
+ hf.Action(
772
+ script="<<script:env_specifier_test/main_script_test_pass_env_spec.py>>",
773
+ script_data_in="direct",
774
+ script_data_out="direct",
775
+ script_exe="python_script",
776
+ script_pass_env_spec=True,
777
+ environments=[hf.ActionEnvironment(environment="python_env")],
778
+ )
779
+ ],
780
+ )
781
+ p1_val = 101
782
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
783
+ wk = hf.Workflow.from_template_data(
784
+ tasks=[t1],
785
+ template_name="main_script_test_pass_env_spec",
786
+ path=tmp_path,
787
+ resources={"any": {"combine_scripts": combine_scripts}},
788
+ )
789
+ wk.submit(wait=True, add_to_known=False, status=False)
790
+
791
+ std_out = wk.submissions[0].jobscripts[0].direct_stdout_path.read_text().strip()
792
+ p2 = wk.tasks[0].elements[0].outputs.p2
793
+ assert isinstance(p2, hf.ElementParameter)
794
+ assert p2.value == p1_val + 100
795
+ assert std_out == "{'name': 'python_env'}"
796
+
797
+
798
+ @pytest.mark.integration
799
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
800
+ @pytest.mark.parametrize("combine_scripts", [False, True])
801
+ def test_env_specifier_in_main_script_path(
802
+ new_null_config, tmp_path: Path, combine_scripts: bool
803
+ ):
804
+ py_env = hf.Environment(
805
+ name="python_env",
806
+ specifiers={"version": "v1"},
807
+ executables=[
808
+ hf.Executable(
809
+ label="python_script",
810
+ instances=[
811
+ hf.ExecutableInstance(
812
+ command="python <<script_path>> <<args>>",
813
+ num_cores=1,
814
+ parallel_mode=None,
815
+ )
816
+ ],
817
+ )
818
+ ],
819
+ )
820
+ hf.envs.add_object(py_env, skip_duplicates=True)
821
+
822
+ s1 = hf.TaskSchema(
823
+ objective="t1",
824
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
825
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
826
+ actions=[
827
+ hf.Action(
828
+ script="<<script:env_specifier_test/<<env:version>>/main_script_test_direct_in_direct_out.py>>",
829
+ script_data_in="direct",
830
+ script_data_out="direct",
831
+ script_exe="python_script",
832
+ environments=[hf.ActionEnvironment(environment="python_env")],
833
+ )
834
+ ],
835
+ )
836
+
837
+ p1_val = 101
838
+ t1 = hf.Task(
839
+ schema=s1,
840
+ inputs={"p1": p1_val},
841
+ environments={"python_env": {"version": "v1"}},
842
+ )
843
+ wk = hf.Workflow.from_template_data(
844
+ tasks=[t1],
845
+ template_name="main_script_test_env_spec_script_path",
846
+ path=tmp_path,
847
+ resources={"any": {"combine_scripts": combine_scripts}},
848
+ )
849
+ wk.submit(wait=True, add_to_known=False, status=False)
850
+
851
+ p2 = wk.tasks[0].elements[0].outputs.p2
852
+ assert isinstance(p2, hf.ElementParameter)
853
+ assert p2.value == p1_val + 100
854
+
855
+ hf.reload_template_components() # remove extra envs
856
+
857
+
858
+ @pytest.mark.integration
859
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
860
+ @pytest.mark.parametrize("combine_scripts", [False, True])
861
+ def test_env_specifier_in_main_script_path_multiple_scripts(
862
+ new_null_config, tmp_path: Path, combine_scripts: bool
863
+ ):
864
+ """Test two elements with different environment specifiers use two distinct scripts"""
865
+ py_env_v1 = hf.Environment(
866
+ name="python_env",
867
+ specifiers={"version": "v1"},
868
+ executables=[
869
+ hf.Executable(
870
+ label="python_script",
871
+ instances=[
872
+ hf.ExecutableInstance(
873
+ command="python <<script_path>> <<args>>",
874
+ num_cores=1,
875
+ parallel_mode=None,
876
+ )
877
+ ],
878
+ )
879
+ ],
880
+ )
881
+ py_env_v2 = hf.Environment(
882
+ name="python_env",
883
+ specifiers={"version": "v2"},
884
+ executables=[
885
+ hf.Executable(
886
+ label="python_script",
887
+ instances=[
888
+ hf.ExecutableInstance(
889
+ command="python <<script_path>> <<args>>",
890
+ num_cores=1,
891
+ parallel_mode=None,
892
+ )
893
+ ],
894
+ )
895
+ ],
896
+ )
897
+ hf.envs.add_objects([py_env_v1, py_env_v2], skip_duplicates=True)
898
+
899
+ s1 = hf.TaskSchema(
900
+ objective="t1",
901
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
902
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
903
+ actions=[
904
+ hf.Action(
905
+ script="<<script:env_specifier_test/<<env:version>>/main_script_test_direct_in_direct_out.py>>",
906
+ script_data_in="direct",
907
+ script_data_out="direct",
908
+ script_exe="python_script",
909
+ environments=[hf.ActionEnvironment(environment="python_env")],
910
+ )
911
+ ],
912
+ )
913
+
914
+ p1_val = 101
915
+ t1 = hf.Task(
916
+ schema=s1,
917
+ inputs={"p1": p1_val},
918
+ environments={"python_env": {"version": "v1"}},
919
+ sequences=[
920
+ hf.ValueSequence(
921
+ path="environments.python_env.version",
922
+ values=["v1", "v2"],
923
+ )
924
+ ],
925
+ )
926
+ wk = hf.Workflow.from_template_data(
927
+ tasks=[t1],
928
+ template_name="main_script_test_multiple_env_spec_script",
929
+ path=tmp_path,
930
+ resources={"any": {"combine_scripts": combine_scripts}},
931
+ )
932
+ wk.submit(wait=True, add_to_known=False, status=False)
933
+
934
+ # v1 and v2 scripts output different values:
935
+ e1, e2 = wk.tasks.t1.elements
936
+ e1_p2 = e1.outputs.p2
937
+ e2_p2 = e2.outputs.p2
938
+ assert isinstance(e1_p2, hf.ElementParameter)
939
+ assert isinstance(e2_p2, hf.ElementParameter)
940
+ assert e1_p2.value == 201
941
+ assert e2_p2.value == 301
942
+
943
+ hf.reload_template_components() # remove extra envs
944
+
945
+
946
+ @pytest.mark.integration
947
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
948
+ @pytest.mark.parametrize("combine_scripts", [False, True])
949
+ def test_script_direct_in_direct_out_multi_element(
950
+ null_config, tmp_path: Path, combine_scripts: bool
951
+ ):
952
+ s1 = hf.TaskSchema(
953
+ objective="t1",
954
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
955
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
956
+ actions=[
957
+ hf.Action(
958
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
959
+ script_data_in="direct",
960
+ script_data_out="direct",
961
+ script_exe="python_script",
962
+ environments=[hf.ActionEnvironment(environment="python_env")],
963
+ )
964
+ ],
965
+ )
966
+ p1_vals = (101, 102, 103)
967
+ t1 = hf.Task(
968
+ schema=s1, sequences=[hf.ValueSequence(path="inputs.p1", values=p1_vals)]
969
+ )
970
+ wk = hf.Workflow.from_template_data(
971
+ tasks=[t1],
972
+ template_name="main_script_test_multi_element",
973
+ path=tmp_path,
974
+ resources={"any": {"combine_scripts": combine_scripts}},
975
+ )
976
+ wk.submit(wait=True, add_to_known=False, status=False)
977
+
978
+ e0_p2 = wk.tasks[0].elements[0].outputs.p2
979
+ e1_p2 = wk.tasks[0].elements[1].outputs.p2
980
+ e2_p2 = wk.tasks[0].elements[2].outputs.p2
981
+
982
+ assert isinstance(e0_p2, hf.ElementParameter)
983
+ assert isinstance(e1_p2, hf.ElementParameter)
984
+ assert isinstance(e2_p2, hf.ElementParameter)
985
+
986
+ assert e0_p2.value == p1_vals[0] + 100
987
+ assert e1_p2.value == p1_vals[1] + 100
988
+ assert e2_p2.value == p1_vals[2] + 100
989
+
990
+ # check only one script generated, and its name:
991
+ script_name, _ = t1.schema.actions[0].get_script_artifact_name(env_spec={}, act_idx=0)
992
+ script_files = list(i.name for i in wk.submissions[0].scripts_path.glob("*"))
993
+ assert len(script_files) == 1
994
+ assert script_files[0] == script_name if not combine_scripts else "js_0.py"
995
+
996
+
997
+ @pytest.mark.integration
998
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
999
+ def test_repeated_action_in_schema(null_config, tmp_path: Path):
1000
+ # TODO: cannot currently use same Action object multiple times in a schema
1001
+ s1 = hf.TaskSchema(
1002
+ objective="t1",
1003
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1004
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
1005
+ actions=[
1006
+ hf.Action(
1007
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
1008
+ script_data_in="direct",
1009
+ script_data_out="direct",
1010
+ script_exe="python_script",
1011
+ environments=[hf.ActionEnvironment(environment="python_env")],
1012
+ ),
1013
+ hf.Action(
1014
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
1015
+ script_data_in="direct",
1016
+ script_data_out="direct",
1017
+ script_exe="python_script",
1018
+ environments=[hf.ActionEnvironment(environment="python_env")],
1019
+ ),
1020
+ ],
1021
+ )
1022
+ p1_val = 101
1023
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
1024
+ wk = hf.Workflow.from_template_data(
1025
+ tasks=[t1],
1026
+ template_name="test_repeated_action_in_schema",
1027
+ path=tmp_path,
1028
+ resources={"any": {"write_app_logs": True}},
1029
+ )
1030
+ wk.submit(wait=True, add_to_known=False, status=False)
1031
+
1032
+ # check scripts generated for act 0 and 1 have the same contents
1033
+ act_0_script, _ = wk.tasks.t1.template.schema.actions[0].get_script_artifact_name(
1034
+ env_spec={}, act_idx=0
1035
+ )
1036
+ act_1_script, _ = wk.tasks.t1.template.schema.actions[1].get_script_artifact_name(
1037
+ env_spec={}, act_idx=1
1038
+ )
1039
+ act_0_script_path = wk.submissions[0].scripts_path / act_0_script
1040
+ act_1_script_path = wk.submissions[0].scripts_path / act_1_script
1041
+ assert act_0_script_path.read_text() == act_1_script_path.read_text()
1042
+
1043
+ # the two files will be symlinked if not on Windows (may be symlinked on Windows,
1044
+ # depending on if user is admin)
1045
+ if os.name != "nt":
1046
+ assert act_1_script_path.is_symlink()
1047
+
1048
+ # output will be taken from second action
1049
+ p2 = wk.tasks[0].elements[0].outputs.p2
1050
+ assert isinstance(p2, hf.ElementParameter)
1051
+ assert p2.value == p1_val + 100
1052
+
1053
+
1054
+ # TODO: same action with different env spec path (v1/v2) in same schema (check contents
1055
+ # different!). Cannot yet do this because it is not possible to set environment spec
1056
+ # for diferrent "main" actions within the same task.
1057
+
1058
+
1059
+ @pytest.mark.integration
1060
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
1061
+ def test_main_script_two_schemas_same_action(null_config, tmp_path: Path):
1062
+ s1 = hf.TaskSchema(
1063
+ objective="t1",
1064
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1065
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
1066
+ actions=[
1067
+ hf.Action(
1068
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
1069
+ script_data_in="direct",
1070
+ script_data_out="direct",
1071
+ script_exe="python_script",
1072
+ environments=[hf.ActionEnvironment(environment="python_env")],
1073
+ ),
1074
+ ],
1075
+ )
1076
+ s2 = hf.TaskSchema(
1077
+ objective="t2",
1078
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1079
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
1080
+ actions=[
1081
+ hf.Action(
1082
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
1083
+ script_data_in="direct",
1084
+ script_data_out="direct",
1085
+ script_exe="python_script",
1086
+ environments=[hf.ActionEnvironment(environment="python_env")],
1087
+ ),
1088
+ ],
1089
+ )
1090
+ p1_val = 101
1091
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
1092
+ t2 = hf.Task(schema=s2, inputs={"p1": p1_val})
1093
+ wk = hf.Workflow.from_template_data(
1094
+ tasks=[t1, t2],
1095
+ template_name="main_script_test_two_schemas_same_action",
1096
+ path=tmp_path,
1097
+ )
1098
+ wk.submit(wait=True, add_to_known=False, status=False)
1099
+
1100
+ # check scripts generated for t1 and t2 have the same contents
1101
+ t1_script, _ = wk.tasks.t1.template.schema.actions[0].get_script_artifact_name(
1102
+ env_spec={}, act_idx=0
1103
+ )
1104
+ t2_script, _ = wk.tasks.t2.template.schema.actions[0].get_script_artifact_name(
1105
+ env_spec={}, act_idx=0
1106
+ )
1107
+ t1_script_path = wk.submissions[0].scripts_path / t1_script
1108
+ t2_script_path = wk.submissions[0].scripts_path / t2_script
1109
+ assert t1_script_path.read_text() == t2_script_path.read_text()
1110
+
1111
+ # the two files will be symlinked if not on Windows (may be symlinked on Windows,
1112
+ # depending on if user is admin)
1113
+ if os.name != "nt":
1114
+ assert t2_script_path.is_symlink()
1115
+
1116
+ # check output
1117
+ t0_p2 = wk.tasks[0].elements[0].outputs.p2
1118
+ t1_p2 = wk.tasks[1].elements[0].outputs.p2
1119
+ assert isinstance(t0_p2, hf.ElementParameter)
1120
+ assert isinstance(t1_p2, hf.ElementParameter)
1121
+ assert t0_p2.value == p1_val + 100
1122
+ assert t1_p2.value == p1_val + 100
1123
+
1124
+ # now copy the workflow elsewhere and check the symlink between the scripts still
1125
+ # works:
1126
+ wk_path = Path(wk.path)
1127
+ copy_path = wk_path.parent.joinpath(wk_path.with_suffix(".copy"))
1128
+ shutil.copytree(wk.path, copy_path, symlinks=True)
1129
+ t2_script_path_copy = Path(str(t2_script_path).replace(wk.path, f"{wk.path}.copy"))
1130
+ assert t1_script_path.read_text() == t2_script_path_copy.read_text()
1131
+
1132
+
1133
+ @pytest.mark.integration
1134
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
1135
+ def test_main_script_two_actions_same_schema(null_config, tmp_path: Path):
1136
+ s1 = hf.TaskSchema(
1137
+ objective="t1",
1138
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1139
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
1140
+ actions=[
1141
+ hf.Action(
1142
+ script="<<script:main_script_test_direct_in_direct_out.py>>",
1143
+ script_data_in="direct",
1144
+ script_data_out="direct",
1145
+ script_exe="python_script",
1146
+ environments=[hf.ActionEnvironment(environment="python_env")],
1147
+ ),
1148
+ hf.Action(
1149
+ script="<<script:main_script_test_json_in_json_out.py>>",
1150
+ script_data_in="json",
1151
+ script_data_out="json",
1152
+ script_exe="python_script",
1153
+ environments=[hf.ActionEnvironment(environment="python_env")],
1154
+ requires_dir=True,
1155
+ ),
1156
+ ],
1157
+ )
1158
+ p1_val = 101
1159
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
1160
+ wk = hf.Workflow.from_template_data(
1161
+ tasks=[t1],
1162
+ template_name="main_script_test_distinct_actions_same_schema",
1163
+ path=tmp_path,
1164
+ )
1165
+ wk.submit(wait=True, add_to_known=False, status=False)
1166
+
1167
+ # check scripts generated for act 0 and 1 have different contents
1168
+ act_0_script, _ = wk.tasks.t1.template.schema.actions[0].get_script_artifact_name(
1169
+ env_spec={}, act_idx=0
1170
+ )
1171
+ act_1_script, _ = wk.tasks.t1.template.schema.actions[1].get_script_artifact_name(
1172
+ env_spec={}, act_idx=1
1173
+ )
1174
+ act_0_script_path = wk.submissions[0].scripts_path / act_0_script
1175
+ act_1_script_path = wk.submissions[0].scripts_path / act_1_script
1176
+ assert act_0_script_path.read_text() != act_1_script_path.read_text()
1177
+
1178
+
1179
+ @pytest.mark.integration
1180
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
1181
+ def test_shell_env_vars(null_config, tmp_path: Path):
1182
+ s1 = hf.TaskSchema(
1183
+ objective="t1",
1184
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1185
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p1"))],
1186
+ actions=[
1187
+ hf.Action(
1188
+ script="<<script:main_script_test_shell_env_vars.py>>",
1189
+ script_data_in="direct",
1190
+ script_data_out="direct",
1191
+ script_exe="python_script",
1192
+ environments=[hf.ActionEnvironment(environment="python_env")],
1193
+ requires_dir=True,
1194
+ )
1195
+ ],
1196
+ )
1197
+ tasks = [
1198
+ hf.Task(
1199
+ schema=s1,
1200
+ inputs={"p1": 1},
1201
+ repeats=3,
1202
+ ),
1203
+ hf.Task(
1204
+ schema=s1,
1205
+ inputs={"p1": 1},
1206
+ ),
1207
+ hf.Task(
1208
+ schema=s1,
1209
+ inputs={"p1": 1},
1210
+ repeats=2,
1211
+ ),
1212
+ ]
1213
+ loops = [
1214
+ hf.Loop(
1215
+ tasks=[2],
1216
+ num_iterations=2,
1217
+ )
1218
+ ]
1219
+ wk = hf.Workflow.from_template_data(
1220
+ tasks=tasks,
1221
+ loops=loops,
1222
+ template_name="main_script_test_shell_env",
1223
+ path=tmp_path,
1224
+ )
1225
+ wk.add_submission(tasks=[0, 1])
1226
+ wk.submit(wait=True, add_to_known=False, status=False) # first submission
1227
+
1228
+ wk.submit(wait=True, add_to_known=False, status=False) # outstanding runs
1229
+
1230
+ for run in wk.get_all_EARs():
1231
+ run_dir = run.get_directory()
1232
+ assert run_dir
1233
+ with run_dir.joinpath("env_vars.json").open("rt") as fp:
1234
+ env_dat = json.load(fp)
1235
+
1236
+ assert env_dat["HPCFLOW_WK_PATH"] == str(run.workflow.path)
1237
+ assert env_dat["HPCFLOW_WK_PATH_ARG"] == str(run.workflow.path)
1238
+
1239
+ assert run.submission_idx is not None
1240
+ for js in wk.submissions[run.submission_idx].jobscripts:
1241
+ js_funcs_path = str(js.jobscript_functions_path)
1242
+ for block in js.blocks:
1243
+ for run_i in block.all_EARs:
1244
+ if run_i.id_ == run.id_:
1245
+ assert int(env_dat["HPCFLOW_JS_IDX"]) == js.index
1246
+ assert env_dat["HPCFLOW_JS_FUNCS_PATH"] == js_funcs_path
1247
+
1248
+ assert int(env_dat["HPCFLOW_RUN_ID"]) == run.id_
1249
+ assert int(env_dat["HPCFLOW_RUN_IDX"]) == run.index
1250
+ assert int(env_dat["HPCFLOW_RUN_PORT"]) == run.port_number
1251
+
1252
+ script_name = run.get_script_artifact_name()
1253
+ sub_scripts_dir = wk.submissions[run.submission_idx].scripts_path
1254
+ script_path = sub_scripts_dir.joinpath(script_name)
1255
+
1256
+ assert env_dat["HPCFLOW_SUB_SCRIPTS_DIR"] == str(sub_scripts_dir)
1257
+ assert int(env_dat["HPCFLOW_SUB_IDX"]) == run.submission_idx
1258
+
1259
+ assert env_dat["HPCFLOW_RUN_SCRIPT_DIR"] == str(script_path.parent)
1260
+ assert env_dat["HPCFLOW_RUN_SCRIPT_PATH"] == str(script_path)
1261
+ assert env_dat["HPCFLOW_RUN_SCRIPT_NAME"] == script_name
1262
+ assert env_dat["HPCFLOW_RUN_SCRIPT_NAME_NO_EXT"] == script_path.stem
1263
+
1264
+ assert env_dat["HPCFLOW_RUN_STD_PATH"] == str(run.get_app_std_path())
1265
+ assert (
1266
+ env_dat["HPCFLOW_RUN_LOG_PATH"]
1267
+ == env_dat["HPCFLOW_LOG_PATH"]
1268
+ == str(run.get_app_log_path())
1269
+ if run.resources.write_app_logs
1270
+ else " "
1271
+ )
1272
+
1273
+ assert env_dat["HPCFLOW_ELEMENT_ID"] == str(run.element.id_)
1274
+ assert env_dat["HPCFLOW_ELEMENT_IDX"] == str(run.element.index)
1275
+
1276
+ assert env_dat["HPCFLOW_ELEMENT_ITER_ID"] == str(run.element_iteration.id_)
1277
+ assert env_dat["HPCFLOW_ELEMENT_ITER_IDX"] == str(run.element_iteration.index)
1278
+
1279
+
1280
+ @pytest.mark.integration
1281
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
1282
+ def test_combine_scripts_script_data_multiple_input_file_formats(
1283
+ null_config, tmp_path: Path
1284
+ ):
1285
+ s1 = hf.TaskSchema(
1286
+ objective="t1",
1287
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
1288
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
1289
+ actions=[
1290
+ hf.Action(
1291
+ script="<<script:main_script_test_json_in_json_out.py>>",
1292
+ script_data_in="json",
1293
+ script_data_out="json",
1294
+ script_exe="python_script",
1295
+ environments=[hf.ActionEnvironment(environment="python_env")],
1296
+ requires_dir=True,
1297
+ ),
1298
+ ],
1299
+ )
1300
+ s2 = hf.TaskSchema(
1301
+ objective="t2",
1302
+ inputs=[
1303
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
1304
+ hf.SchemaInput(parameter=hf.Parameter("p1c")),
1305
+ ],
1306
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p3"))],
1307
+ actions=[
1308
+ hf.Action(
1309
+ script="<<script:main_script_test_hdf5_in_obj_2.py>>",
1310
+ script_data_in={"p2": "direct", "p1c": "hdf5"},
1311
+ script_data_out="direct",
1312
+ script_exe="python_script",
1313
+ environments=[hf.ActionEnvironment(environment="python_env")],
1314
+ requires_dir=True,
1315
+ ),
1316
+ ],
1317
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
1318
+ )
1319
+ p1_val = 101
1320
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
1321
+ t2 = hf.Task(schema=s2, inputs={"p1c": P1(a=p1_val)})
1322
+ wk = hf.Workflow.from_template_data(
1323
+ tasks=[t1, t2],
1324
+ template_name="main_script_test",
1325
+ path=tmp_path,
1326
+ resources={"any": {"combine_scripts": True}},
1327
+ )
1328
+ wk.submit(wait=True, add_to_known=False, status=False)
1329
+
1330
+ t0_p2 = wk.tasks[0].elements[0].outputs.p2
1331
+ t1_p3 = wk.tasks[1].elements[0].outputs.p3
1332
+ assert isinstance(t0_p2, hf.ElementParameter)
1333
+ assert isinstance(t1_p3, hf.ElementParameter)
1334
+ assert t0_p2.value == p1_val + 100
1335
+ assert t1_p3.value == p1_val + 100
1336
+
1337
+
1338
+ @pytest.mark.integration
1339
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
1340
+ def test_combine_scripts_from_future_import(null_config, tmp_path: Path):
1341
+ s1 = hf.TaskSchema(
1342
+ objective="t1",
1343
+ actions=[
1344
+ hf.Action(
1345
+ script="<<script:import_future_script.py>>",
1346
+ script_exe="python_script",
1347
+ environments=[hf.ActionEnvironment(environment="python_env")],
1348
+ ),
1349
+ ],
1350
+ )
1351
+
1352
+ wk = hf.Workflow.from_template_data(
1353
+ template_name="test_future_import",
1354
+ tasks=[hf.Task(schema=s1)],
1355
+ resources={"any": {"combine_scripts": True}},
1356
+ path=tmp_path,
1357
+ )
1358
+ wk.submit(status=False, add_to_known=False, wait=True)
1359
+
1360
+ run = wk.get_EARs_from_IDs([0])[0]
1361
+ assert run.status is EARStatus.success