hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (275) hide show
  1. hpcflow/__init__.py +2 -11
  2. hpcflow/__pyinstaller/__init__.py +5 -0
  3. hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
  4. hpcflow/_version.py +1 -1
  5. hpcflow/app.py +43 -0
  6. hpcflow/cli.py +2 -461
  7. hpcflow/data/demo_data_manifest/__init__.py +3 -0
  8. hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
  9. hpcflow/data/jinja_templates/test/test_template.txt +8 -0
  10. hpcflow/data/programs/hello_world/README.md +1 -0
  11. hpcflow/data/programs/hello_world/hello_world.c +87 -0
  12. hpcflow/data/programs/hello_world/linux/hello_world +0 -0
  13. hpcflow/data/programs/hello_world/macos/hello_world +0 -0
  14. hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
  15. hpcflow/data/scripts/__init__.py +1 -0
  16. hpcflow/data/scripts/bad_script.py +2 -0
  17. hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
  18. hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
  19. hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
  20. hpcflow/data/scripts/do_nothing.py +2 -0
  21. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  22. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  23. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  24. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  25. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  26. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  27. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  28. hpcflow/data/scripts/generate_t1_file_01.py +7 -0
  29. hpcflow/data/scripts/import_future_script.py +7 -0
  30. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  31. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  32. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  33. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  34. hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
  35. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  36. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  37. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  38. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  39. hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
  40. hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
  41. hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
  42. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  43. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  44. hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
  45. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
  46. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  47. hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
  48. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
  49. hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
  50. hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
  51. hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
  52. hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
  53. hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
  54. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  55. hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
  56. hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
  57. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  58. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  59. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  60. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  61. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  62. hpcflow/data/scripts/parse_t1_file_01.py +4 -0
  63. hpcflow/data/scripts/script_exit_test.py +5 -0
  64. hpcflow/data/template_components/__init__.py +1 -0
  65. hpcflow/data/template_components/command_files.yaml +26 -0
  66. hpcflow/data/template_components/environments.yaml +13 -0
  67. hpcflow/data/template_components/parameters.yaml +14 -0
  68. hpcflow/data/template_components/task_schemas.yaml +139 -0
  69. hpcflow/data/workflows/workflow_1.yaml +5 -0
  70. hpcflow/examples.ipynb +1037 -0
  71. hpcflow/sdk/__init__.py +149 -0
  72. hpcflow/sdk/app.py +4266 -0
  73. hpcflow/sdk/cli.py +1479 -0
  74. hpcflow/sdk/cli_common.py +385 -0
  75. hpcflow/sdk/config/__init__.py +5 -0
  76. hpcflow/sdk/config/callbacks.py +246 -0
  77. hpcflow/sdk/config/cli.py +388 -0
  78. hpcflow/sdk/config/config.py +1410 -0
  79. hpcflow/sdk/config/config_file.py +501 -0
  80. hpcflow/sdk/config/errors.py +272 -0
  81. hpcflow/sdk/config/types.py +150 -0
  82. hpcflow/sdk/core/__init__.py +38 -0
  83. hpcflow/sdk/core/actions.py +3857 -0
  84. hpcflow/sdk/core/app_aware.py +25 -0
  85. hpcflow/sdk/core/cache.py +224 -0
  86. hpcflow/sdk/core/command_files.py +814 -0
  87. hpcflow/sdk/core/commands.py +424 -0
  88. hpcflow/sdk/core/element.py +2071 -0
  89. hpcflow/sdk/core/enums.py +221 -0
  90. hpcflow/sdk/core/environment.py +256 -0
  91. hpcflow/sdk/core/errors.py +1043 -0
  92. hpcflow/sdk/core/execute.py +207 -0
  93. hpcflow/sdk/core/json_like.py +809 -0
  94. hpcflow/sdk/core/loop.py +1320 -0
  95. hpcflow/sdk/core/loop_cache.py +282 -0
  96. hpcflow/sdk/core/object_list.py +933 -0
  97. hpcflow/sdk/core/parameters.py +3371 -0
  98. hpcflow/sdk/core/rule.py +196 -0
  99. hpcflow/sdk/core/run_dir_files.py +57 -0
  100. hpcflow/sdk/core/skip_reason.py +7 -0
  101. hpcflow/sdk/core/task.py +3792 -0
  102. hpcflow/sdk/core/task_schema.py +993 -0
  103. hpcflow/sdk/core/test_utils.py +538 -0
  104. hpcflow/sdk/core/types.py +447 -0
  105. hpcflow/sdk/core/utils.py +1207 -0
  106. hpcflow/sdk/core/validation.py +87 -0
  107. hpcflow/sdk/core/values.py +477 -0
  108. hpcflow/sdk/core/workflow.py +4820 -0
  109. hpcflow/sdk/core/zarr_io.py +206 -0
  110. hpcflow/sdk/data/__init__.py +13 -0
  111. hpcflow/sdk/data/config_file_schema.yaml +34 -0
  112. hpcflow/sdk/data/config_schema.yaml +260 -0
  113. hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
  114. hpcflow/sdk/data/files_spec_schema.yaml +5 -0
  115. hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
  116. hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
  117. hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
  118. hpcflow/sdk/demo/__init__.py +3 -0
  119. hpcflow/sdk/demo/cli.py +242 -0
  120. hpcflow/sdk/helper/__init__.py +3 -0
  121. hpcflow/sdk/helper/cli.py +137 -0
  122. hpcflow/sdk/helper/helper.py +300 -0
  123. hpcflow/sdk/helper/watcher.py +192 -0
  124. hpcflow/sdk/log.py +288 -0
  125. hpcflow/sdk/persistence/__init__.py +18 -0
  126. hpcflow/sdk/persistence/base.py +2817 -0
  127. hpcflow/sdk/persistence/defaults.py +6 -0
  128. hpcflow/sdk/persistence/discovery.py +39 -0
  129. hpcflow/sdk/persistence/json.py +954 -0
  130. hpcflow/sdk/persistence/pending.py +948 -0
  131. hpcflow/sdk/persistence/store_resource.py +203 -0
  132. hpcflow/sdk/persistence/types.py +309 -0
  133. hpcflow/sdk/persistence/utils.py +73 -0
  134. hpcflow/sdk/persistence/zarr.py +2388 -0
  135. hpcflow/sdk/runtime.py +320 -0
  136. hpcflow/sdk/submission/__init__.py +3 -0
  137. hpcflow/sdk/submission/enums.py +70 -0
  138. hpcflow/sdk/submission/jobscript.py +2379 -0
  139. hpcflow/sdk/submission/schedulers/__init__.py +281 -0
  140. hpcflow/sdk/submission/schedulers/direct.py +233 -0
  141. hpcflow/sdk/submission/schedulers/sge.py +376 -0
  142. hpcflow/sdk/submission/schedulers/slurm.py +598 -0
  143. hpcflow/sdk/submission/schedulers/utils.py +25 -0
  144. hpcflow/sdk/submission/shells/__init__.py +52 -0
  145. hpcflow/sdk/submission/shells/base.py +229 -0
  146. hpcflow/sdk/submission/shells/bash.py +504 -0
  147. hpcflow/sdk/submission/shells/os_version.py +115 -0
  148. hpcflow/sdk/submission/shells/powershell.py +352 -0
  149. hpcflow/sdk/submission/submission.py +1402 -0
  150. hpcflow/sdk/submission/types.py +140 -0
  151. hpcflow/sdk/typing.py +194 -0
  152. hpcflow/sdk/utils/arrays.py +69 -0
  153. hpcflow/sdk/utils/deferred_file.py +55 -0
  154. hpcflow/sdk/utils/hashing.py +16 -0
  155. hpcflow/sdk/utils/patches.py +31 -0
  156. hpcflow/sdk/utils/strings.py +69 -0
  157. hpcflow/tests/api/test_api.py +32 -0
  158. hpcflow/tests/conftest.py +123 -0
  159. hpcflow/tests/data/__init__.py +0 -0
  160. hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
  161. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  162. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  163. hpcflow/tests/data/workflow_1.json +10 -0
  164. hpcflow/tests/data/workflow_1.yaml +5 -0
  165. hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
  166. hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
  167. hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
  168. hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
  169. hpcflow/tests/programs/test_programs.py +180 -0
  170. hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
  171. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  172. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
  173. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  174. hpcflow/tests/scripts/test_main_scripts.py +1361 -0
  175. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  176. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  177. hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
  178. hpcflow/tests/unit/test_action.py +1066 -0
  179. hpcflow/tests/unit/test_action_rule.py +24 -0
  180. hpcflow/tests/unit/test_app.py +132 -0
  181. hpcflow/tests/unit/test_cache.py +46 -0
  182. hpcflow/tests/unit/test_cli.py +172 -0
  183. hpcflow/tests/unit/test_command.py +377 -0
  184. hpcflow/tests/unit/test_config.py +195 -0
  185. hpcflow/tests/unit/test_config_file.py +162 -0
  186. hpcflow/tests/unit/test_element.py +666 -0
  187. hpcflow/tests/unit/test_element_iteration.py +88 -0
  188. hpcflow/tests/unit/test_element_set.py +158 -0
  189. hpcflow/tests/unit/test_group.py +115 -0
  190. hpcflow/tests/unit/test_input_source.py +1479 -0
  191. hpcflow/tests/unit/test_input_value.py +398 -0
  192. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  193. hpcflow/tests/unit/test_json_like.py +1247 -0
  194. hpcflow/tests/unit/test_loop.py +2674 -0
  195. hpcflow/tests/unit/test_meta_task.py +325 -0
  196. hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
  197. hpcflow/tests/unit/test_object_list.py +116 -0
  198. hpcflow/tests/unit/test_parameter.py +243 -0
  199. hpcflow/tests/unit/test_persistence.py +664 -0
  200. hpcflow/tests/unit/test_resources.py +243 -0
  201. hpcflow/tests/unit/test_run.py +286 -0
  202. hpcflow/tests/unit/test_run_directories.py +29 -0
  203. hpcflow/tests/unit/test_runtime.py +9 -0
  204. hpcflow/tests/unit/test_schema_input.py +372 -0
  205. hpcflow/tests/unit/test_shell.py +129 -0
  206. hpcflow/tests/unit/test_slurm.py +39 -0
  207. hpcflow/tests/unit/test_submission.py +502 -0
  208. hpcflow/tests/unit/test_task.py +2560 -0
  209. hpcflow/tests/unit/test_task_schema.py +182 -0
  210. hpcflow/tests/unit/test_utils.py +616 -0
  211. hpcflow/tests/unit/test_value_sequence.py +549 -0
  212. hpcflow/tests/unit/test_values.py +91 -0
  213. hpcflow/tests/unit/test_workflow.py +827 -0
  214. hpcflow/tests/unit/test_workflow_template.py +186 -0
  215. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  216. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  217. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  218. hpcflow/tests/unit/utils/test_patches.py +5 -0
  219. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  220. hpcflow/tests/unit/utils/test_strings.py +97 -0
  221. hpcflow/tests/workflows/__init__.py +0 -0
  222. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  223. hpcflow/tests/workflows/test_jobscript.py +355 -0
  224. hpcflow/tests/workflows/test_run_status.py +198 -0
  225. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  226. hpcflow/tests/workflows/test_submission.py +140 -0
  227. hpcflow/tests/workflows/test_workflows.py +564 -0
  228. hpcflow/tests/workflows/test_zip.py +18 -0
  229. hpcflow/viz_demo.ipynb +6794 -0
  230. hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
  231. hpcflow-0.2.0a271.dist-info/METADATA +65 -0
  232. hpcflow-0.2.0a271.dist-info/RECORD +237 -0
  233. {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
  234. hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
  235. hpcflow/api.py +0 -490
  236. hpcflow/archive/archive.py +0 -307
  237. hpcflow/archive/cloud/cloud.py +0 -45
  238. hpcflow/archive/cloud/errors.py +0 -9
  239. hpcflow/archive/cloud/providers/dropbox.py +0 -427
  240. hpcflow/archive/errors.py +0 -5
  241. hpcflow/base_db.py +0 -4
  242. hpcflow/config.py +0 -233
  243. hpcflow/copytree.py +0 -66
  244. hpcflow/data/examples/_config.yml +0 -14
  245. hpcflow/data/examples/damask/demo/1.run.yml +0 -4
  246. hpcflow/data/examples/damask/demo/2.process.yml +0 -29
  247. hpcflow/data/examples/damask/demo/geom.geom +0 -2052
  248. hpcflow/data/examples/damask/demo/load.load +0 -1
  249. hpcflow/data/examples/damask/demo/material.config +0 -185
  250. hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
  251. hpcflow/data/examples/damask/inputs/load.load +0 -1
  252. hpcflow/data/examples/damask/inputs/material.config +0 -185
  253. hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
  254. hpcflow/data/examples/damask/profiles/damask.yml +0 -4
  255. hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
  256. hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
  257. hpcflow/data/examples/damask/profiles/default.yml +0 -6
  258. hpcflow/data/examples/thinking.yml +0 -177
  259. hpcflow/errors.py +0 -2
  260. hpcflow/init_db.py +0 -37
  261. hpcflow/models.py +0 -2595
  262. hpcflow/nesting.py +0 -9
  263. hpcflow/profiles.py +0 -455
  264. hpcflow/project.py +0 -81
  265. hpcflow/scheduler.py +0 -322
  266. hpcflow/utils.py +0 -103
  267. hpcflow/validation.py +0 -166
  268. hpcflow/variables.py +0 -543
  269. hpcflow-0.1.15.dist-info/METADATA +0 -168
  270. hpcflow-0.1.15.dist-info/RECORD +0 -45
  271. hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
  272. hpcflow-0.1.15.dist-info/top_level.txt +0 -1
  273. /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
  274. /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
  275. /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
@@ -0,0 +1,1479 @@
1
+ from __future__ import annotations
2
+ from textwrap import dedent
3
+ from typing import TYPE_CHECKING
4
+ import numpy as np
5
+ import pytest
6
+ from hpcflow.app import app as hf
7
+ from hpcflow.sdk.core.errors import (
8
+ InapplicableInputSourceElementIters,
9
+ MissingInputs,
10
+ NoCoincidentInputSources,
11
+ UnavailableInputSource,
12
+ )
13
+ from hpcflow.sdk.core.test_utils import (
14
+ P1_parameter_cls as P1,
15
+ P1_sub_parameter_cls as P1_sub,
16
+ make_schemas,
17
+ )
18
+
19
+ if TYPE_CHECKING:
20
+ from pathlib import Path
21
+ from hpcflow.sdk.core.parameters import Parameter
22
+ from hpcflow.sdk.core.types import RuleArgs
23
+
24
+
25
+ def test_input_source_class_method_local() -> None:
26
+ assert hf.InputSource.local() == hf.InputSource(hf.InputSourceType.LOCAL)
27
+
28
+
29
+ def test_input_source_class_method_default() -> None:
30
+ assert hf.InputSource.default() == hf.InputSource(hf.InputSourceType.DEFAULT)
31
+
32
+
33
+ def test_input_source_class_method_task() -> None:
34
+ task_ref = 0
35
+ assert hf.InputSource.task(task_ref) == hf.InputSource(
36
+ source_type=hf.InputSourceType.TASK, task_ref=task_ref
37
+ )
38
+
39
+
40
+ def test_input_source_class_method_import() -> None:
41
+ import_ref = (
42
+ 0 # TODO: interface to imports (and so how to reference) is not yet decided
43
+ )
44
+ assert hf.InputSource.import_(import_ref) == hf.InputSource(
45
+ hf.InputSourceType.IMPORT, import_ref=import_ref
46
+ )
47
+
48
+
49
+ def test_input_source_class_method_task_same_default_task_source_type() -> None:
50
+ task_ref = 0
51
+ assert (
52
+ hf.InputSource(hf.InputSourceType.TASK, task_ref=task_ref).task_source_type
53
+ == hf.InputSource.task(task_ref=task_ref).task_source_type
54
+ )
55
+
56
+
57
+ def test_input_source_validate_source_type_string_local() -> None:
58
+ assert hf.InputSource("local") == hf.InputSource(hf.InputSourceType.LOCAL)
59
+
60
+
61
+ def test_input_source_validate_source_type_string_default() -> None:
62
+ assert hf.InputSource("default") == hf.InputSource(hf.InputSourceType.DEFAULT)
63
+
64
+
65
+ def test_input_source_validate_source_type_string_task() -> None:
66
+ task_ref = 0
67
+ assert hf.InputSource("task", task_ref=task_ref) == hf.InputSource(
68
+ hf.InputSourceType.TASK, task_ref=task_ref
69
+ )
70
+
71
+
72
+ def test_input_source_validate_source_type_string_import() -> None:
73
+ import_ref = (
74
+ 0 # TODO: interface to imports (and so how to reference) is not yet decided
75
+ )
76
+ assert hf.InputSource("import", import_ref=import_ref) == hf.InputSource(
77
+ hf.InputSourceType.IMPORT, import_ref=import_ref
78
+ )
79
+
80
+
81
+ def test_input_source_validate_source_type_raise_on_unknown_string() -> None:
82
+ with pytest.raises(ValueError):
83
+ hf.InputSource("bad_source_type")
84
+
85
+
86
+ def test_input_source_validate_task_source_type_string_any() -> None:
87
+ task_ref = 0
88
+ assert hf.InputSource(
89
+ hf.InputSourceType.TASK, task_ref=task_ref, task_source_type="any"
90
+ ) == hf.InputSource(
91
+ hf.InputSourceType.TASK, task_ref=task_ref, task_source_type=hf.TaskSourceType.ANY
92
+ )
93
+
94
+
95
+ def test_input_source_validate_task_source_type_string_input() -> None:
96
+ task_ref = 0
97
+ assert hf.InputSource(
98
+ hf.InputSourceType.TASK, task_ref=task_ref, task_source_type="input"
99
+ ) == hf.InputSource(
100
+ hf.InputSourceType.TASK,
101
+ task_ref=task_ref,
102
+ task_source_type=hf.TaskSourceType.INPUT,
103
+ )
104
+
105
+
106
+ def test_input_source_validate_task_source_type_string_output() -> None:
107
+ task_ref = 0
108
+ assert hf.InputSource(
109
+ hf.InputSourceType.TASK, task_ref=task_ref, task_source_type="output"
110
+ ) == hf.InputSource(
111
+ hf.InputSourceType.TASK,
112
+ task_ref=task_ref,
113
+ task_source_type=hf.TaskSourceType.OUTPUT,
114
+ )
115
+
116
+
117
+ def test_input_source_validate_task_source_type_raise_on_unknown_string() -> None:
118
+ task_ref = 0
119
+ with pytest.raises(ValueError):
120
+ hf.InputSource(
121
+ hf.InputSourceType.TASK,
122
+ task_ref=task_ref,
123
+ task_source_type="bad_task_source_type",
124
+ )
125
+
126
+
127
+ def test_input_source_to_string_local() -> None:
128
+ assert hf.InputSource.local().to_string() == "local"
129
+
130
+
131
+ def test_input_source_to_string_default() -> None:
132
+ assert hf.InputSource.default().to_string() == "default"
133
+
134
+
135
+ def test_input_source_to_string_task_output() -> None:
136
+ task_ref = 0
137
+ assert (
138
+ hf.InputSource.task(task_ref, task_source_type="output").to_string()
139
+ == f"task.{task_ref}.output"
140
+ )
141
+
142
+
143
+ def test_input_source_to_string_task_input() -> None:
144
+ task_ref = 0
145
+ assert (
146
+ hf.InputSource.task(task_ref, task_source_type="input").to_string()
147
+ == f"task.{task_ref}.input"
148
+ )
149
+
150
+
151
+ def test_input_source_to_string_task_any() -> None:
152
+ task_ref = 0
153
+ assert (
154
+ hf.InputSource.task(task_ref, task_source_type="any").to_string()
155
+ == f"task.{task_ref}.any"
156
+ )
157
+
158
+
159
+ def test_input_source_to_string_import() -> None:
160
+ import_ref = 0
161
+ assert hf.InputSource.import_(import_ref).to_string() == f"import.{import_ref}"
162
+
163
+
164
+ def test_input_source_from_string_local() -> None:
165
+ assert hf.InputSource.from_string("local") == hf.InputSource(hf.InputSourceType.LOCAL)
166
+
167
+
168
+ def test_input_source_from_string_default() -> None:
169
+ assert hf.InputSource.from_string("default") == hf.InputSource(
170
+ hf.InputSourceType.DEFAULT
171
+ )
172
+
173
+
174
+ def test_input_source_from_string_task() -> None:
175
+ assert hf.InputSource.from_string("task.0.output") == hf.InputSource(
176
+ hf.InputSourceType.TASK, task_ref=0, task_source_type=hf.TaskSourceType.OUTPUT
177
+ )
178
+
179
+
180
+ def test_input_source_from_string_task_same_default_task_source() -> None:
181
+ task_ref = 0
182
+ assert hf.InputSource.from_string(f"task.{task_ref}") == hf.InputSource(
183
+ hf.InputSourceType.TASK, task_ref=task_ref
184
+ )
185
+
186
+
187
+ @pytest.mark.skip(reason="Import not yet implemented.")
188
+ def test_input_source_from_string_import() -> None:
189
+ import_ref = 0
190
+ assert hf.InputSource.from_string(f"import.{import_ref}") == hf.InputSource(
191
+ hf.InputSourceType.IMPORT, import_ref=import_ref
192
+ )
193
+
194
+
195
+ @pytest.fixture
196
+ def param_p1() -> Parameter:
197
+ return hf.Parameter("p1")
198
+
199
+
200
+ @pytest.fixture
201
+ def param_p2() -> Parameter:
202
+ return hf.Parameter("p2")
203
+
204
+
205
+ @pytest.fixture
206
+ def param_p3() -> Parameter:
207
+ return hf.Parameter("p3")
208
+
209
+
210
+ @pytest.fixture
211
+ def null_config(tmp_path: Path):
212
+ if not hf.is_config_loaded:
213
+ hf.load_config(config_dir=tmp_path)
214
+
215
+
216
+ @pytest.mark.skip(reason="Need to add e.g. parameters of the workflow to the app data.")
217
+ def test_specified_sourceable_elements_subset(
218
+ null_config,
219
+ param_p1: Parameter,
220
+ param_p2: Parameter,
221
+ param_p3: Parameter,
222
+ tmp_path: Path,
223
+ ):
224
+ input_p1 = hf.SchemaInput(param_p1, default_value=1001)
225
+ input_p2 = hf.SchemaInput(param_p2, default_value=np.array([2002, 2003]))
226
+ input_p3 = hf.SchemaInput(param_p3)
227
+
228
+ s1 = hf.TaskSchema("ts1", actions=[], inputs=[input_p1], outputs=[input_p3])
229
+ s2 = hf.TaskSchema("ts2", actions=[], inputs=[input_p2, input_p3])
230
+
231
+ t1 = hf.Task(
232
+ schema=s1,
233
+ sequences=[
234
+ hf.ValueSequence("inputs.p1", values=[101, 102], nesting_order=0),
235
+ ],
236
+ )
237
+ t2 = hf.Task(
238
+ schema=s2,
239
+ inputs=[hf.InputValue(input_p2, 201)],
240
+ sourceable_elem_iters=[0],
241
+ nesting_order={"inputs.p3": 1},
242
+ )
243
+
244
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
245
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
246
+
247
+ assert (
248
+ wk.tasks[1].num_elements == 1
249
+ and wk.tasks[1].elements[0].input_sources["inputs.p3"] == "element.0.OUTPUT"
250
+ )
251
+
252
+
253
+ @pytest.mark.skip(reason="Need to add e.g. parameters of the workflow to the app data.")
254
+ def test_specified_sourceable_elements_all_available(
255
+ null_config,
256
+ param_p1: Parameter,
257
+ param_p2: Parameter,
258
+ param_p3: Parameter,
259
+ tmp_path: Path,
260
+ ):
261
+ input_p1 = hf.SchemaInput(param_p1, default_value=1001)
262
+ input_p2 = hf.SchemaInput(param_p2, default_value=np.array([2002, 2003]))
263
+ input_p3 = hf.SchemaInput(param_p3)
264
+
265
+ s1 = hf.TaskSchema("ts1", actions=[], inputs=[input_p1], outputs=[input_p3])
266
+ s2 = hf.TaskSchema("ts2", actions=[], inputs=[input_p2, input_p3])
267
+
268
+ t1 = hf.Task(
269
+ schema=s1,
270
+ sequences=[
271
+ hf.ValueSequence("inputs.p1", values=[101, 102], nesting_order=0),
272
+ ],
273
+ )
274
+ t2 = hf.Task(
275
+ schema=s2,
276
+ inputs=[hf.InputValue(input_p2, 201)],
277
+ sourceable_elem_iters=[0, 1],
278
+ nesting_order={"inputs.p3": 1},
279
+ )
280
+
281
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
282
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
283
+
284
+ assert (
285
+ wk.tasks[1].num_elements == 2
286
+ and wk.tasks[1].elements[0].input_sources["inputs.p3"] == "element.0.OUTPUT"
287
+ and wk.tasks[1].elements[1].input_sources["inputs.p3"] == "element.1.OUTPUT"
288
+ )
289
+
290
+
291
+ @pytest.mark.skip(reason="Need to add e.g. parameters of the workflow to the app data.")
292
+ def test_no_sourceable_elements_so_raise_missing(
293
+ null_config,
294
+ param_p1: Parameter,
295
+ param_p2: Parameter,
296
+ param_p3: Parameter,
297
+ tmp_path: Path,
298
+ ):
299
+ input_p1 = hf.SchemaInput(param_p1, default_value=1001)
300
+ input_p2 = hf.SchemaInput(param_p2, default_value=np.array([2002, 2003]))
301
+ input_p3 = hf.SchemaInput(param_p3)
302
+
303
+ s1 = hf.TaskSchema("ts1", actions=[], inputs=[input_p1], outputs=[input_p3])
304
+ s2 = hf.TaskSchema("ts2", actions=[], inputs=[input_p2, input_p3])
305
+
306
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue(input_p1, 101)])
307
+ t2 = hf.Task(
308
+ schema=s2,
309
+ inputs=[hf.InputValue(input_p2, 201)],
310
+ sourceable_elem_iters=[],
311
+ )
312
+
313
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
314
+
315
+ with pytest.raises(MissingInputs):
316
+ _ = hf.Workflow.from_template(wkt, path=tmp_path)
317
+
318
+
319
+ @pytest.mark.skip(reason="Need to add e.g. parameters of the workflow to the app data.")
320
+ def test_no_sourceable_elements_so_default_used(
321
+ null_config,
322
+ param_p1: Parameter,
323
+ param_p2: Parameter,
324
+ param_p3: Parameter,
325
+ tmp_path: Path,
326
+ ):
327
+ input_p1 = hf.SchemaInput(param_p1, default_value=1001)
328
+ input_p2 = hf.SchemaInput(param_p2, default_value=np.array([2002, 2003]))
329
+ input_p3 = hf.SchemaInput(param_p3, default_value=3001)
330
+
331
+ s1 = hf.TaskSchema("ts1", actions=[], inputs=[input_p1], outputs=[input_p3])
332
+ s2 = hf.TaskSchema("ts2", actions=[], inputs=[input_p2, input_p3])
333
+
334
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue(input_p1, 101)])
335
+ t2 = hf.Task(
336
+ schema=s2,
337
+ inputs=[hf.InputValue(input_p2, 201)],
338
+ sourceable_elem_iters=[],
339
+ )
340
+
341
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
342
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
343
+
344
+ assert wk.tasks[1].elements[0].input_sources["inputs.p3"] == "default"
345
+
346
+
347
+ def test_equivalent_where_args() -> None:
348
+ rule_args: RuleArgs = {"path": "inputs.p1", "condition": {"value.equal_to": 1}}
349
+ i1 = hf.InputSource.task(task_ref=0, where=rule_args)
350
+ i2 = hf.InputSource.task(task_ref=0, where=[rule_args])
351
+ i3 = hf.InputSource.task(task_ref=0, where=hf.Rule(**rule_args))
352
+ i4 = hf.InputSource.task(task_ref=0, where=[hf.Rule(**rule_args)])
353
+ i5 = hf.InputSource.task(task_ref=0, where=hf.ElementFilter([hf.Rule(**rule_args)]))
354
+ assert i1 == i2 == i3 == i4 == i5
355
+
356
+
357
+ @pytest.mark.parametrize("store", ["json", "zarr"])
358
+ def test_input_source_where(null_config, tmp_path: Path, store: str):
359
+ s1 = hf.TaskSchema(
360
+ objective="t1",
361
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
362
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
363
+ actions=[
364
+ hf.Action(
365
+ commands=[
366
+ hf.Command(
367
+ command="Write-Output (<<parameter:p1>> + 100)",
368
+ stdout="<<parameter:p2>>",
369
+ )
370
+ ]
371
+ )
372
+ ],
373
+ )
374
+ s2 = hf.TaskSchema(
375
+ objective="t2",
376
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
377
+ )
378
+ tasks = [
379
+ hf.Task(
380
+ schema=s1,
381
+ sequences=[
382
+ hf.ValueSequence(path="inputs.p1", values=[1, 2], nesting_order=0)
383
+ ],
384
+ ),
385
+ hf.Task(
386
+ schema=s2,
387
+ nesting_order={"inputs.p2": 0},
388
+ input_sources={
389
+ "p2": [
390
+ hf.InputSource.task(
391
+ task_ref=0,
392
+ where=hf.Rule(path="inputs.p1", condition={"value.equal_to": 2}),
393
+ )
394
+ ]
395
+ },
396
+ ),
397
+ ]
398
+ wk = hf.Workflow.from_template_data(
399
+ tasks=tasks,
400
+ path=tmp_path,
401
+ template_name="wk0",
402
+ overwrite=True,
403
+ store=store,
404
+ )
405
+ assert wk.tasks.t2.num_elements == 1
406
+ assert (
407
+ wk.tasks.t2.elements[0].get_data_idx("inputs.p2")["inputs.p2"]
408
+ == wk.tasks.t1.elements[1].get_data_idx("outputs.p2")["outputs.p2"]
409
+ )
410
+
411
+
412
+ @pytest.mark.parametrize("store", ["json", "zarr"])
413
+ def test_input_source_where_parameter_value_class_sub_parameter(
414
+ null_config, tmp_path: Path, store: str
415
+ ):
416
+ s1 = hf.TaskSchema(
417
+ objective="t1",
418
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
419
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
420
+ actions=[
421
+ hf.Action(
422
+ commands=[
423
+ hf.Command(
424
+ command="Write-Output (<<parameter:p1>> + 100)",
425
+ stdout="<<parameter:p2>>",
426
+ )
427
+ ]
428
+ )
429
+ ],
430
+ )
431
+ s2 = hf.TaskSchema(
432
+ objective="t2",
433
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
434
+ )
435
+ tasks = [
436
+ hf.Task(
437
+ schema=s1,
438
+ sequences=[
439
+ hf.ValueSequence(
440
+ path="inputs.p1", values=[P1(a=1), P1(a=2)], nesting_order=0
441
+ )
442
+ ],
443
+ ),
444
+ hf.Task(
445
+ schema=s2,
446
+ nesting_order={"inputs.p2": 0},
447
+ input_sources={
448
+ "p2": [
449
+ hf.InputSource.task(
450
+ task_ref=0,
451
+ where=hf.Rule(
452
+ path="inputs.p1.a", condition={"value.equal_to": 2}
453
+ ),
454
+ )
455
+ ]
456
+ },
457
+ ),
458
+ ]
459
+ wk = hf.Workflow.from_template_data(
460
+ tasks=tasks,
461
+ path=tmp_path,
462
+ template_name="wk0",
463
+ overwrite=True,
464
+ store=store,
465
+ )
466
+ assert wk.tasks.t2.num_elements == 1
467
+ assert (
468
+ wk.tasks.t2.elements[0].get_data_idx("inputs.p2")["inputs.p2"]
469
+ == wk.tasks.t1.elements[1].get_data_idx("outputs.p2")["outputs.p2"]
470
+ )
471
+
472
+
473
+ @pytest.mark.parametrize("store", ["json", "zarr"])
474
+ def test_input_source_where_parameter_value_class_sub_parameter_property(
475
+ null_config, tmp_path: Path, store: str
476
+ ):
477
+ s1 = hf.TaskSchema(
478
+ objective="t1",
479
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
480
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
481
+ actions=[
482
+ hf.Action(
483
+ commands=[
484
+ hf.Command(
485
+ command="Write-Output (<<parameter:p1c>> + 100)",
486
+ stdout="<<parameter:p2>>",
487
+ )
488
+ ]
489
+ )
490
+ ],
491
+ )
492
+ s2 = hf.TaskSchema(
493
+ objective="t2",
494
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
495
+ )
496
+ tasks = [
497
+ hf.Task(
498
+ schema=s1,
499
+ sequences=[
500
+ hf.ValueSequence(
501
+ path="inputs.p1c", values=[P1(a=1), P1(a=2)], nesting_order=0
502
+ )
503
+ ],
504
+ ),
505
+ hf.Task(
506
+ schema=s2,
507
+ nesting_order={"inputs.p2": 0},
508
+ input_sources={
509
+ "p2": [
510
+ hf.InputSource.task(
511
+ task_ref=0,
512
+ where=hf.Rule(
513
+ path="inputs.p1c.twice_a", condition={"value.equal_to": 4}
514
+ ),
515
+ )
516
+ ]
517
+ },
518
+ ),
519
+ ]
520
+ wk = hf.Workflow.from_template_data(
521
+ tasks=tasks,
522
+ path=tmp_path,
523
+ template_name="wk0",
524
+ overwrite=True,
525
+ store=store,
526
+ )
527
+ assert wk.tasks.t2.num_elements == 1
528
+ assert (
529
+ wk.tasks.t2.elements[0].get_data_idx("inputs.p2")["inputs.p2"]
530
+ == wk.tasks.t1.elements[1].get_data_idx("outputs.p2")["outputs.p2"]
531
+ )
532
+
533
+
534
+ def test_sub_parameter_task_input_source_excluded_when_root_parameter_is_task_output_source(
535
+ null_config, tmp_path: Path
536
+ ):
537
+ s1 = hf.TaskSchema(
538
+ objective="t1",
539
+ inputs=[hf.SchemaInput(parameter="p1c")],
540
+ outputs=[hf.SchemaOutput(parameter="p1c")],
541
+ actions=[
542
+ hf.Action(
543
+ commands=[
544
+ hf.Command(
545
+ command="Write-Output (<<parameter:p1c>> + 100)",
546
+ stdout="<<parameter:p1c.CLI_parse()>>",
547
+ )
548
+ ],
549
+ ),
550
+ ],
551
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
552
+ )
553
+ s2 = hf.TaskSchema(
554
+ objective="t2",
555
+ inputs=[
556
+ hf.SchemaInput(parameter=hf.Parameter("p1c")),
557
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
558
+ ],
559
+ )
560
+ t1 = hf.Task(
561
+ schema=s1,
562
+ inputs=[
563
+ hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5))),
564
+ hf.InputValue("p1c", path="a", value=20),
565
+ ],
566
+ )
567
+ t2 = hf.Task(
568
+ schema=s2,
569
+ inputs=[hf.InputValue("p2", value=201)],
570
+ )
571
+ wk = hf.Workflow.from_template_data(
572
+ tasks=[t1, t2],
573
+ template_name="w1",
574
+ path=tmp_path,
575
+ )
576
+ # "p1c.a" source should not be included, because it would be a task-input source, which
577
+ # should be overridden by the "p1c" task-output source:
578
+ assert wk.tasks.t2.template.element_sets[0].input_sources == {
579
+ "p1c": [
580
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0])
581
+ ],
582
+ "p2": [hf.InputSource.local()],
583
+ }
584
+
585
+
586
+ def test_sub_parameter_task_input_source_included_when_root_parameter_is_task_input_source(
587
+ null_config, tmp_path: Path
588
+ ):
589
+ s1 = hf.TaskSchema(
590
+ objective="t1",
591
+ inputs=[hf.SchemaInput(parameter="p1c")],
592
+ actions=[
593
+ hf.Action(
594
+ commands=[
595
+ hf.Command(
596
+ command="Write-Output (<<parameter:p1c>> + 100)",
597
+ )
598
+ ],
599
+ ),
600
+ ],
601
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
602
+ )
603
+ s2 = hf.TaskSchema(
604
+ objective="t2",
605
+ inputs=[
606
+ hf.SchemaInput(parameter=hf.Parameter("p1c")),
607
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
608
+ ],
609
+ )
610
+ t1 = hf.Task(
611
+ schema=s1,
612
+ inputs=[
613
+ hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5))),
614
+ hf.InputValue("p1c", path="a", value=20),
615
+ ],
616
+ )
617
+ t2 = hf.Task(
618
+ schema=s2,
619
+ inputs=[hf.InputValue("p2", value=201)],
620
+ )
621
+ wk = hf.Workflow.from_template_data(
622
+ tasks=[t1, t2],
623
+ template_name="w1",
624
+ path=tmp_path,
625
+ )
626
+ assert wk.tasks.t2.template.element_sets[0].input_sources == {
627
+ "p1c": [
628
+ hf.InputSource.task(task_ref=0, task_source_type="input", element_iters=[0])
629
+ ],
630
+ "p1c.a": [
631
+ hf.InputSource.task(task_ref=0, task_source_type="input", element_iters=[0])
632
+ ],
633
+ "p2": [hf.InputSource.local()],
634
+ }
635
+
636
+
637
+ def test_sub_parameter_task_input_source_allowed_when_root_parameter_is_task_output_source(
638
+ null_config, tmp_path: Path
639
+ ):
640
+ """Check we can override the default behaviour and specify that the sub-parameter
641
+ task-input source should be used despite the root-parameter being a task-output
642
+ source."""
643
+ s1 = hf.TaskSchema(
644
+ objective="t1",
645
+ inputs=[hf.SchemaInput(parameter="p1c")],
646
+ outputs=[hf.SchemaOutput(parameter="p1c")],
647
+ actions=[
648
+ hf.Action(
649
+ commands=[
650
+ hf.Command(
651
+ command="Write-Output (<<parameter:p1c>> + 100)",
652
+ stdout="<<parameter:p1c.CLI_parse()>>",
653
+ )
654
+ ],
655
+ ),
656
+ ],
657
+ parameter_class_modules=["hpcflow.sdk.core.test_utils"],
658
+ )
659
+ s2 = hf.TaskSchema(
660
+ objective="t2",
661
+ inputs=[
662
+ hf.SchemaInput(parameter=hf.Parameter("p1c")),
663
+ hf.SchemaInput(parameter=hf.Parameter("p2")),
664
+ ],
665
+ )
666
+ t1 = hf.Task(
667
+ schema=s1,
668
+ inputs=[
669
+ hf.InputValue("p1c", value=P1(a=10, sub_param=P1_sub(e=5))),
670
+ hf.InputValue("p1c", path="a", value=20),
671
+ ],
672
+ )
673
+ t2 = hf.Task(
674
+ schema=s2,
675
+ inputs=[hf.InputValue("p2", value=201)],
676
+ input_sources={
677
+ "p1c.a": [hf.InputSource.task(task_ref=0, task_source_type="input")]
678
+ },
679
+ )
680
+ wk = hf.Workflow.from_template_data(
681
+ tasks=[t1, t2],
682
+ template_name="w1",
683
+ path=tmp_path,
684
+ )
685
+ assert wk.tasks.t2.template.element_sets[0].input_sources == {
686
+ "p1c": [
687
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0])
688
+ ],
689
+ "p1c.a": [
690
+ hf.InputSource.task(task_ref=0, task_source_type="input", element_iters=[0])
691
+ ],
692
+ "p2": [hf.InputSource.local()],
693
+ }
694
+
695
+
696
+ def test_raise_unavailable_input_source(null_config, tmp_path: Path):
697
+ t1 = hf.Task(schema=hf.task_schemas.test_t1_ps, inputs={"p1": 1})
698
+ t2 = hf.Task(
699
+ schema=hf.task_schemas.test_t1_ps,
700
+ input_sources={"p1": [hf.InputSource.local()]},
701
+ )
702
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
703
+ with pytest.raises(UnavailableInputSource):
704
+ hf.Workflow.from_template(wkt, path=tmp_path)
705
+
706
+
707
+ def test_input_source_specify_element_iters(null_config, tmp_path: Path):
708
+ t1 = hf.Task(
709
+ schema=hf.task_schemas.test_t1_ps,
710
+ sequences=[
711
+ hf.ValueSequence(
712
+ path="inputs.p1",
713
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
714
+ ),
715
+ ],
716
+ )
717
+ t2 = hf.Task(
718
+ schema=hf.task_schemas.test_t1_ps,
719
+ input_sources={
720
+ "p1": [
721
+ hf.InputSource.task(
722
+ task_ref=0, task_source_type="input", element_iters=[0, 2]
723
+ )
724
+ ]
725
+ },
726
+ )
727
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
728
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
729
+ assert len(wk.tasks[1].elements) == 2
730
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 3]
731
+
732
+
733
+ def test_input_source_raise_on_inapplicable_specified_element_iters(
734
+ null_config, tmp_path: Path
735
+ ):
736
+ t1 = hf.Task(
737
+ schema=hf.task_schemas.test_t1_ps,
738
+ sequences=[
739
+ hf.ValueSequence(
740
+ path="inputs.p1",
741
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
742
+ ),
743
+ ],
744
+ )
745
+ t2 = hf.Task(
746
+ schema=hf.task_schemas.test_t1_ps,
747
+ input_sources={
748
+ "p1": [
749
+ hf.InputSource.task(
750
+ task_ref=0, task_source_type="input", element_iters=[0, 4]
751
+ )
752
+ ]
753
+ },
754
+ )
755
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
756
+ with pytest.raises(InapplicableInputSourceElementIters):
757
+ hf.Workflow.from_template(wkt, path=tmp_path)
758
+
759
+
760
+ def test_input_source_specify_element_iters_and_where(null_config, tmp_path: Path):
761
+ """Test the where argument further filters the element_iters argument."""
762
+ t1 = hf.Task(
763
+ schema=hf.task_schemas.test_t1_ps,
764
+ sequences=[
765
+ hf.ValueSequence(
766
+ path="inputs.p1",
767
+ values=[{"a": 1}, {"a": 2}, {"a": 3}],
768
+ ),
769
+ ],
770
+ )
771
+ t2 = hf.Task(
772
+ schema=hf.task_schemas.test_t1_ps,
773
+ input_sources={
774
+ "p1": [
775
+ hf.InputSource.task(
776
+ task_ref=0,
777
+ task_source_type="input",
778
+ element_iters=[0, 2],
779
+ where=hf.Rule(path="inputs.p1.a", condition={"value.equal_to": 3}),
780
+ )
781
+ ]
782
+ },
783
+ )
784
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
785
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
786
+ assert len(wk.tasks[1].elements) == 1
787
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [3]
788
+
789
+
790
+ def test_element_iters_order_with_allow_non_coincident_task_sources_False(
791
+ null_config, tmp_path: Path
792
+ ):
793
+ t1 = hf.Task(
794
+ schema=hf.task_schemas.test_t1_ps,
795
+ sequences=[
796
+ hf.ValueSequence(
797
+ path="inputs.p1",
798
+ values=[11, 12, 13],
799
+ ),
800
+ ],
801
+ )
802
+ t2 = hf.Task(
803
+ schema=hf.task_schemas.test_t1_ps,
804
+ input_sources={
805
+ "p1": [
806
+ hf.InputSource.task(
807
+ task_ref=0, task_source_type="input", element_iters=[2, 0, 1]
808
+ )
809
+ ],
810
+ },
811
+ allow_non_coincident_task_sources=False,
812
+ )
813
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
814
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
815
+
816
+ assert len(wk.tasks[1].elements) == 3
817
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [13, 11, 12]
818
+
819
+
820
+ def test_element_iters_order_with_allow_non_coincident_task_sources_True(
821
+ null_config, tmp_path: Path
822
+ ):
823
+ t1 = hf.Task(
824
+ schema=hf.task_schemas.test_t1_ps,
825
+ sequences=[
826
+ hf.ValueSequence(
827
+ path="inputs.p1",
828
+ values=[11, 12, 13],
829
+ ),
830
+ ],
831
+ )
832
+ t2 = hf.Task(
833
+ schema=hf.task_schemas.test_t1_ps,
834
+ input_sources={
835
+ "p1": [
836
+ hf.InputSource.task(
837
+ task_ref=0, task_source_type="input", element_iters=[2, 0, 1]
838
+ )
839
+ ],
840
+ },
841
+ allow_non_coincident_task_sources=True,
842
+ )
843
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
844
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
845
+
846
+ assert len(wk.tasks[1].elements) == 3
847
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [13, 11, 12]
848
+
849
+
850
+ def test_element_iters_order_with_allow_non_coincident_task_sources_True_multiple_sources(
851
+ null_config, tmp_path: Path
852
+ ):
853
+ """Test no-reordering of specified element iterations of sources from the same task."""
854
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ("p3",), "t1"))
855
+
856
+ t1 = hf.Task(
857
+ schema=s1,
858
+ sequences=[
859
+ hf.ValueSequence(
860
+ path="inputs.p1",
861
+ values=[11, 12, 13],
862
+ ),
863
+ hf.ValueSequence(
864
+ path="inputs.p2",
865
+ values=[21, 22, 23],
866
+ ),
867
+ ],
868
+ )
869
+ t2 = hf.Task(
870
+ schema=s1,
871
+ input_sources={
872
+ "p1": [
873
+ hf.InputSource.task(
874
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
875
+ )
876
+ ],
877
+ "p2": [
878
+ hf.InputSource.task(
879
+ task_ref=0, task_source_type="input", element_iters=[1, 0]
880
+ )
881
+ ],
882
+ },
883
+ allow_non_coincident_task_sources=True,
884
+ )
885
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
886
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
887
+
888
+ assert len(wk.tasks[1].elements) == 2
889
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
890
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22, 21]
891
+
892
+
893
+ def test_element_iters_order_with_allow_non_coincident_task_sources_False_multiple_sources(
894
+ null_config, tmp_path: Path
895
+ ):
896
+ """Test reordering of specified element iterations of sources from the same task."""
897
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ("p3",), "t1"))
898
+
899
+ t1 = hf.Task(
900
+ schema=s1,
901
+ sequences=[
902
+ hf.ValueSequence(
903
+ path="inputs.p1",
904
+ values=[11, 12, 13],
905
+ ),
906
+ hf.ValueSequence(
907
+ path="inputs.p2",
908
+ values=[21, 22, 23],
909
+ ),
910
+ ],
911
+ )
912
+ t2 = hf.Task(
913
+ schema=s1,
914
+ input_sources={
915
+ "p1": [
916
+ hf.InputSource.task(
917
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
918
+ )
919
+ ],
920
+ "p2": [
921
+ hf.InputSource.task(
922
+ task_ref=0, task_source_type="input", element_iters=[1, 0]
923
+ )
924
+ ],
925
+ },
926
+ allow_non_coincident_task_sources=False,
927
+ )
928
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
929
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
930
+
931
+ assert len(wk.tasks[1].elements) == 2
932
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
933
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [21, 22]
934
+
935
+
936
+ def test_not_allow_non_coincident_task_sources(null_config, tmp_path: Path):
937
+ """Test only one coincident element from the two input sources"""
938
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ("p3",), "t1"))
939
+ t1 = hf.Task(
940
+ schema=s1,
941
+ inputs={"p1": 1},
942
+ sequences=[
943
+ hf.ValueSequence(path="inputs.p2", values=[21, 22, 23]),
944
+ ],
945
+ )
946
+ t2 = hf.Task(
947
+ schema=s1,
948
+ input_sources={
949
+ "p1": [
950
+ hf.InputSource.task(
951
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
952
+ )
953
+ ],
954
+ "p2": [
955
+ hf.InputSource.task(
956
+ task_ref=0, task_source_type="input", element_iters=[1, 2]
957
+ )
958
+ ],
959
+ },
960
+ allow_non_coincident_task_sources=False,
961
+ )
962
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
963
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
964
+
965
+ assert len(wk.tasks[1].elements) == 1
966
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22]
967
+
968
+
969
+ def test_allow_non_coincident_task_sources(null_config, tmp_path: Path):
970
+ """Test can combine inputs from non-coincident element iterations of the same task."""
971
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ("p3",), "t1"))
972
+ t1 = hf.Task(
973
+ schema=s1,
974
+ sequences=[
975
+ hf.ValueSequence(
976
+ path="inputs.p1",
977
+ values=[11, 12, 13],
978
+ ),
979
+ hf.ValueSequence(
980
+ path="inputs.p2",
981
+ values=[21, 22, 23],
982
+ ),
983
+ ],
984
+ )
985
+ t2 = hf.Task(
986
+ schema=s1,
987
+ input_sources={
988
+ "p1": [
989
+ hf.InputSource.task(
990
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
991
+ )
992
+ ],
993
+ "p2": [
994
+ hf.InputSource.task(
995
+ task_ref=0, task_source_type="input", element_iters=[1, 2]
996
+ )
997
+ ],
998
+ },
999
+ allow_non_coincident_task_sources=True,
1000
+ )
1001
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1002
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1003
+
1004
+ assert len(wk.tasks[1].elements) == 2
1005
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [11, 12]
1006
+ assert [i.value for i in wk.tasks[1].inputs.p2] == [22, 23]
1007
+
1008
+
1009
+ def test_input_source_task_input_from_multiple_element_sets_with_param_sequence(
1010
+ null_config, tmp_path: Path
1011
+ ):
1012
+ t1 = hf.Task(
1013
+ schema=hf.task_schemas.test_t1_ps,
1014
+ element_sets=[
1015
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
1016
+ hf.ElementSet(
1017
+ sequences=[
1018
+ hf.ValueSequence(
1019
+ path="inputs.p1",
1020
+ values=[{"a": 2}, {"a": 3}],
1021
+ ),
1022
+ ],
1023
+ ),
1024
+ ],
1025
+ )
1026
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1027
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1028
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1029
+ assert len(wk.tasks[1].elements) == 3
1030
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2, 3]
1031
+
1032
+
1033
+ def test_raise_no_coincident_input_sources(null_config, tmp_path: Path):
1034
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ("p3",), "t1"))
1035
+ t1 = hf.Task(
1036
+ schema=s1,
1037
+ inputs={"p1": 100},
1038
+ sequences=[
1039
+ hf.ValueSequence.from_range(path="inputs.p2", start=0, stop=4),
1040
+ ],
1041
+ )
1042
+ t2 = hf.Task(
1043
+ schema=s1,
1044
+ allow_non_coincident_task_sources=False,
1045
+ input_sources={
1046
+ "p1": [
1047
+ hf.InputSource.task(
1048
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
1049
+ )
1050
+ ],
1051
+ "p2": [
1052
+ hf.InputSource.task(
1053
+ task_ref=0, task_source_type="input", element_iters=[2, 3]
1054
+ )
1055
+ ],
1056
+ },
1057
+ )
1058
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1059
+ with pytest.raises(NoCoincidentInputSources):
1060
+ hf.Workflow.from_template(wkt, path=tmp_path)
1061
+
1062
+
1063
+ def test_input_source_task_input_from_multiple_element_sets_with_sub_param_sequence(
1064
+ null_config, tmp_path: Path
1065
+ ):
1066
+ t1 = hf.Task(
1067
+ schema=hf.task_schemas.test_t1_ps,
1068
+ element_sets=[
1069
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
1070
+ hf.ElementSet(
1071
+ inputs={"p1": {"a": 1}},
1072
+ sequences=[
1073
+ hf.ValueSequence(
1074
+ path="inputs.p1.a",
1075
+ values=[2, 3],
1076
+ ),
1077
+ ],
1078
+ ),
1079
+ ],
1080
+ )
1081
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1082
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1083
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1084
+ assert len(wk.tasks[1].elements) == 3
1085
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2, 3]
1086
+
1087
+
1088
+ def test_input_source_task_input_from_multiple_element_sets_with_sub_param_sequence_manual_sources_root_param(
1089
+ null_config, tmp_path: Path
1090
+ ):
1091
+ t1 = hf.Task(
1092
+ schema=hf.task_schemas.test_t1_ps,
1093
+ element_sets=[
1094
+ hf.ElementSet(inputs={"p1": {"a": 1}}),
1095
+ hf.ElementSet(
1096
+ inputs={"p1": {"a": 1}},
1097
+ sequences=[
1098
+ hf.ValueSequence(
1099
+ path="inputs.p1.a",
1100
+ values=[2, 3],
1101
+ ),
1102
+ ],
1103
+ ),
1104
+ ],
1105
+ )
1106
+ t2 = hf.Task(
1107
+ schema=hf.task_schemas.test_t1_ps,
1108
+ input_sources={
1109
+ "p1": [
1110
+ hf.InputSource.task(
1111
+ task_ref=0, task_source_type="input", element_iters=[0, 1]
1112
+ )
1113
+ ]
1114
+ },
1115
+ )
1116
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1117
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1118
+ assert len(wk.tasks[1].elements) == 2
1119
+ assert [i.value["a"] for i in wk.tasks[1].inputs.p1] == [1, 2]
1120
+
1121
+
1122
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_complex(
1123
+ null_config, tmp_path: Path
1124
+ ):
1125
+ t1 = hf.Task(
1126
+ schema=hf.task_schemas.test_t1_ps,
1127
+ element_sets=[
1128
+ hf.ElementSet(
1129
+ inputs={"p1": {"a": 1}},
1130
+ sequences=[
1131
+ hf.ValueSequence(
1132
+ path="inputs.p1.a",
1133
+ values=[2],
1134
+ ),
1135
+ ],
1136
+ ),
1137
+ hf.ElementSet(
1138
+ inputs={"p1": {"a": 1}},
1139
+ sequences=[
1140
+ hf.ValueSequence(
1141
+ path="inputs.p1.c",
1142
+ values=[2, 3],
1143
+ ),
1144
+ ],
1145
+ ),
1146
+ hf.ElementSet(
1147
+ inputs={"p1": {"a": 1}},
1148
+ sequences=[
1149
+ hf.ValueSequence(
1150
+ path="inputs.p1.b",
1151
+ values=[22, 33],
1152
+ ),
1153
+ hf.ValueSequence(
1154
+ path="inputs.p1.a",
1155
+ values=[4, 5],
1156
+ ),
1157
+ ],
1158
+ ),
1159
+ ],
1160
+ )
1161
+ t2 = hf.Task(schema=hf.task_schemas.test_t1_ps)
1162
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1163
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1164
+
1165
+ assert len(wk.tasks[1].elements) == 5
1166
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1167
+ {"a": 2},
1168
+ {"a": 1, "c": 2},
1169
+ {"a": 1, "c": 3},
1170
+ {"a": 4, "b": 22},
1171
+ {"a": 5, "b": 33},
1172
+ ]
1173
+
1174
+
1175
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_complex_reordered_iters(
1176
+ null_config, tmp_path: Path
1177
+ ):
1178
+ t1 = hf.Task(
1179
+ schema=hf.task_schemas.test_t1_ps,
1180
+ element_sets=[
1181
+ hf.ElementSet(
1182
+ inputs={"p1": {"a": 1}},
1183
+ sequences=[
1184
+ hf.ValueSequence(
1185
+ path="inputs.p1.a",
1186
+ values=[2],
1187
+ ),
1188
+ ],
1189
+ ),
1190
+ hf.ElementSet(
1191
+ inputs={"p1": {"a": 1}},
1192
+ sequences=[
1193
+ hf.ValueSequence(
1194
+ path="inputs.p1.c",
1195
+ values=[2, 3],
1196
+ ),
1197
+ ],
1198
+ ),
1199
+ hf.ElementSet(
1200
+ inputs={"p1": {"a": 1}},
1201
+ sequences=[
1202
+ hf.ValueSequence(
1203
+ path="inputs.p1.b",
1204
+ values=[22, 33],
1205
+ ),
1206
+ hf.ValueSequence(
1207
+ path="inputs.p1.a",
1208
+ values=[4, 5],
1209
+ ),
1210
+ ],
1211
+ ),
1212
+ ],
1213
+ )
1214
+ t2 = hf.Task(
1215
+ schema=hf.task_schemas.test_t1_ps,
1216
+ input_sources={
1217
+ # reordered p1.c elem iters:
1218
+ "p1.c": [
1219
+ hf.InputSource.task(
1220
+ task_ref=0, task_source_type="input", element_iters=[2, 1]
1221
+ )
1222
+ ]
1223
+ },
1224
+ allow_non_coincident_task_sources=True, # to maintain custom ordering
1225
+ )
1226
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1227
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1228
+
1229
+ assert len(wk.tasks[1].elements) == 5
1230
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1231
+ {"a": 2},
1232
+ {"a": 1, "c": 3},
1233
+ {"a": 1, "c": 2},
1234
+ {"a": 4, "b": 22},
1235
+ {"a": 5, "b": 33},
1236
+ ]
1237
+
1238
+
1239
+ def test_input_source_inputs_from_multiple_element_sets_with_sub_parameter_sequences_mixed_padding(
1240
+ null_config, tmp_path: Path
1241
+ ):
1242
+
1243
+ t1 = hf.Task(
1244
+ schema=hf.task_schemas.test_t1_ps,
1245
+ element_sets=[
1246
+ hf.ElementSet(
1247
+ inputs={"p1": {"a": 1}},
1248
+ ),
1249
+ hf.ElementSet(
1250
+ inputs={"p1": {"a": 1}},
1251
+ nesting_order={"inputs.p1.a": 0, "inputs.p1.b": 1},
1252
+ sequences=[
1253
+ hf.ValueSequence(
1254
+ path="inputs.p1.a",
1255
+ values=[4, 5],
1256
+ ),
1257
+ hf.ValueSequence(
1258
+ path="inputs.p1.b",
1259
+ values=[22],
1260
+ ),
1261
+ ],
1262
+ ),
1263
+ ],
1264
+ )
1265
+ t2 = hf.Task(
1266
+ schema=hf.task_schemas.test_t1_ps,
1267
+ # `p1.b` has a different nesting order to the root param `p1`, so it will not be
1268
+ # "padded" to have the same multiplicity as `p1`/`p1.a`. With a higher nesting
1269
+ # order, it will be "applied" to all other elements, meaning we'll gain a value
1270
+ # for `p1.b` for all elements (including from the first element set, which didn't
1271
+ # have a value for `p1.b`):
1272
+ nesting_order={
1273
+ "inputs.p1": 0,
1274
+ "inputs.p1.a": 0,
1275
+ "inputs.p1.b": 1,
1276
+ },
1277
+ )
1278
+ wkt = hf.WorkflowTemplate(name="test", tasks=[t1, t2])
1279
+ wk = hf.Workflow.from_template(wkt, path=tmp_path)
1280
+
1281
+ assert len(wk.tasks[1].elements) == 4
1282
+ assert [i.value for i in wk.tasks[1].inputs.p1] == [
1283
+ {"a": 1, "b": 22},
1284
+ {"a": 1, "b": 22},
1285
+ {"a": 5, "b": 22},
1286
+ {"a": 5, "b": 22},
1287
+ ]
1288
+
1289
+
1290
+ def test_input_source_task_ref_equivalence(null_config, tmp_path):
1291
+ yml = dedent(
1292
+ """\
1293
+ name: test
1294
+ template_components:
1295
+ task_schemas:
1296
+ - objective: t1
1297
+ inputs:
1298
+ - parameter: p1
1299
+ tasks:
1300
+ - schema: t1
1301
+ inputs:
1302
+ p1: 100 # all subsequent tasks will source from this input
1303
+
1304
+ - schema: t1 # t1_2
1305
+ input_sources: # single source dict; by task insert ID
1306
+ p1:
1307
+ source_type: task
1308
+ task_source_type: input
1309
+ task_ref: 0
1310
+
1311
+ - schema: t1 # t1_3
1312
+ input_sources: # as a list of dicts; by task insert ID
1313
+ p1:
1314
+ - source_type: task
1315
+ task_source_type: input
1316
+ task_ref: 0
1317
+
1318
+ - schema: t1 # t1_4
1319
+ input_sources: # as a single source dict; by task unique name
1320
+ p1:
1321
+ source_type: task
1322
+ task_source_type: input
1323
+ task_ref: t1_1
1324
+
1325
+ - schema: t1 # t1_5
1326
+ input_sources: # as a list of dicts; by task unique name
1327
+ p1:
1328
+ - source_type: task
1329
+ task_source_type: input
1330
+ task_ref: t1_1
1331
+
1332
+ - schema: t1 # t1_6
1333
+ input_sources: # single source string; by task insert ID
1334
+ p1: task.0.input
1335
+
1336
+ - schema: t1 # t1_7
1337
+ input_sources: # as a list of strings; by task insert ID
1338
+ p1:
1339
+ - task.0.input
1340
+
1341
+ - schema: t1 # t1_8
1342
+ input_sources: # single source string; by task unique name
1343
+ p1: task.t1_1.input
1344
+
1345
+ - schema: t1 # t1_9
1346
+ input_sources: # as a list of strings; by task unique name
1347
+ p1:
1348
+ - task.t1_1.input
1349
+
1350
+ """
1351
+ )
1352
+ wk = hf.Workflow.from_YAML_string(YAML_str=yml, path=tmp_path)
1353
+
1354
+ all_sources = (task.elements[0].input_sources["inputs.p1"] for task in wk.tasks[1:])
1355
+ all_task_refs = (src.task_ref for src in all_sources)
1356
+ assert all(task_ref == 0 for task_ref in all_task_refs)
1357
+
1358
+
1359
+ def test_inp_src_task_output_precedence(null_config, tmp_path):
1360
+ # test a task output source takes precedence over a task input source, even if the
1361
+ # task input source is from a closer task.
1362
+
1363
+ s1, s2 = make_schemas(
1364
+ ({"p0": None}, ("p1",), "t1"),
1365
+ ({"p1": None, "p2": None}, ("p3",), "t2"),
1366
+ )
1367
+ s3 = hf.TaskSchema(
1368
+ "t3",
1369
+ inputs=[hf.SchemaInput("p1"), hf.SchemaInput("p3", group="my_group")],
1370
+ outputs=[hf.SchemaOutput("p4")],
1371
+ actions=[
1372
+ hf.Action(
1373
+ commands=[
1374
+ hf.Command(
1375
+ "echo $(( <<sum(parameter:p3)>> + <<parameter:p1>> ))",
1376
+ stdout="<<parameter:p4>>",
1377
+ )
1378
+ ]
1379
+ ),
1380
+ ],
1381
+ )
1382
+
1383
+ wk = hf.Workflow.from_template_data(
1384
+ template_name="test_inp_src",
1385
+ tasks=[
1386
+ hf.Task(s1, inputs={"p0": 1}),
1387
+ hf.Task(
1388
+ s2,
1389
+ sequences=[hf.ValueSequence("inputs.p2", [0, 1, 2])],
1390
+ groups=[hf.ElementGroup(name="my_group")],
1391
+ ),
1392
+ hf.Task(s3),
1393
+ ],
1394
+ path=tmp_path,
1395
+ )
1396
+
1397
+ task = wk.tasks.t3
1398
+ task_template = task.template
1399
+ inp_sources = task_template.get_available_task_input_sources(
1400
+ element_set=task_template.element_sets[0], source_tasks=list(task.upstream_tasks)
1401
+ )
1402
+ assert inp_sources["p1"] == [
1403
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0]),
1404
+ hf.InputSource.task(
1405
+ task_ref=1, task_source_type="input", element_iters=[1, 2, 3]
1406
+ ),
1407
+ ]
1408
+ # p1 source from t1 output should take precedence, rather than t2 input (t2 input has
1409
+ # multiple elements, so interferes with grouping on the other parameter, p3)
1410
+
1411
+
1412
+ def test_task_type_sources_output_input_swapped_on_local_inputs_defined(
1413
+ null_config, tmp_path
1414
+ ):
1415
+
1416
+ s1, s2, s3 = make_schemas(
1417
+ ({"p1": None}, ("p2",), "t1"),
1418
+ ({"p2": None}, ("p3",), "t2"),
1419
+ ({"p2": None}, ("p4",), "t3"),
1420
+ )
1421
+
1422
+ # t2's input sources for p2 do not include any local sources, so the task-output source
1423
+ # from t1 should be preferred.
1424
+
1425
+ wk = hf.Workflow.from_template_data(
1426
+ template_name="test_inp_src",
1427
+ tasks=[
1428
+ hf.Task(s1, inputs={"p1": 100}),
1429
+ hf.Task(s2),
1430
+ hf.Task(s3),
1431
+ ],
1432
+ path=tmp_path,
1433
+ )
1434
+
1435
+ task = wk.tasks.t3
1436
+ task_template = task.template
1437
+ inp_sources = task_template.get_available_task_input_sources(
1438
+ element_set=task_template.element_sets[0], source_tasks=list(task.upstream_tasks)
1439
+ )
1440
+ assert inp_sources["p2"] == [
1441
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0]),
1442
+ hf.InputSource.task(task_ref=1, task_source_type="input", element_iters=[1]),
1443
+ hf.InputSource.default(),
1444
+ ]
1445
+
1446
+
1447
+ def test_task_type_sources_output_input_not_swapped_on_no_local_inputs_defined(
1448
+ null_config, tmp_path
1449
+ ):
1450
+ s1, s2, s3 = make_schemas(
1451
+ ({"p1": None}, ("p2",), "t1"),
1452
+ ({"p2": None}, ("p3",), "t2"),
1453
+ ({"p2": None}, ("p4",), "t3"),
1454
+ )
1455
+
1456
+ # now include a local source in t2, which should switch p2's input source precedence in t3
1457
+ # such that the task-input source from t2 is preferred over the task-output source from
1458
+ # t1
1459
+
1460
+ wk = hf.Workflow.from_template_data(
1461
+ template_name="test_inp_src",
1462
+ tasks=[
1463
+ hf.Task(s1, inputs={"p1": 100}),
1464
+ hf.Task(s2, inputs={"p2": 200}),
1465
+ hf.Task(s3),
1466
+ ],
1467
+ path=tmp_path,
1468
+ )
1469
+
1470
+ task = wk.tasks.t3
1471
+ task_template = task.template
1472
+ inp_sources = task_template.get_available_task_input_sources(
1473
+ element_set=task_template.element_sets[0], source_tasks=list(task.upstream_tasks)
1474
+ )
1475
+ assert inp_sources["p2"] == [
1476
+ hf.InputSource.task(task_ref=1, task_source_type="input", element_iters=[1]),
1477
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0]),
1478
+ hf.InputSource.default(),
1479
+ ]