hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (275) hide show
  1. hpcflow/__init__.py +2 -11
  2. hpcflow/__pyinstaller/__init__.py +5 -0
  3. hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
  4. hpcflow/_version.py +1 -1
  5. hpcflow/app.py +43 -0
  6. hpcflow/cli.py +2 -461
  7. hpcflow/data/demo_data_manifest/__init__.py +3 -0
  8. hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
  9. hpcflow/data/jinja_templates/test/test_template.txt +8 -0
  10. hpcflow/data/programs/hello_world/README.md +1 -0
  11. hpcflow/data/programs/hello_world/hello_world.c +87 -0
  12. hpcflow/data/programs/hello_world/linux/hello_world +0 -0
  13. hpcflow/data/programs/hello_world/macos/hello_world +0 -0
  14. hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
  15. hpcflow/data/scripts/__init__.py +1 -0
  16. hpcflow/data/scripts/bad_script.py +2 -0
  17. hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
  18. hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
  19. hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
  20. hpcflow/data/scripts/do_nothing.py +2 -0
  21. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  22. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  23. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  24. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  25. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  26. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  27. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  28. hpcflow/data/scripts/generate_t1_file_01.py +7 -0
  29. hpcflow/data/scripts/import_future_script.py +7 -0
  30. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  31. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  32. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  33. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  34. hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
  35. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  36. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  37. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  38. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  39. hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
  40. hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
  41. hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
  42. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  43. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  44. hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
  45. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
  46. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  47. hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
  48. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
  49. hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
  50. hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
  51. hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
  52. hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
  53. hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
  54. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  55. hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
  56. hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
  57. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  58. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  59. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  60. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  61. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  62. hpcflow/data/scripts/parse_t1_file_01.py +4 -0
  63. hpcflow/data/scripts/script_exit_test.py +5 -0
  64. hpcflow/data/template_components/__init__.py +1 -0
  65. hpcflow/data/template_components/command_files.yaml +26 -0
  66. hpcflow/data/template_components/environments.yaml +13 -0
  67. hpcflow/data/template_components/parameters.yaml +14 -0
  68. hpcflow/data/template_components/task_schemas.yaml +139 -0
  69. hpcflow/data/workflows/workflow_1.yaml +5 -0
  70. hpcflow/examples.ipynb +1037 -0
  71. hpcflow/sdk/__init__.py +149 -0
  72. hpcflow/sdk/app.py +4266 -0
  73. hpcflow/sdk/cli.py +1479 -0
  74. hpcflow/sdk/cli_common.py +385 -0
  75. hpcflow/sdk/config/__init__.py +5 -0
  76. hpcflow/sdk/config/callbacks.py +246 -0
  77. hpcflow/sdk/config/cli.py +388 -0
  78. hpcflow/sdk/config/config.py +1410 -0
  79. hpcflow/sdk/config/config_file.py +501 -0
  80. hpcflow/sdk/config/errors.py +272 -0
  81. hpcflow/sdk/config/types.py +150 -0
  82. hpcflow/sdk/core/__init__.py +38 -0
  83. hpcflow/sdk/core/actions.py +3857 -0
  84. hpcflow/sdk/core/app_aware.py +25 -0
  85. hpcflow/sdk/core/cache.py +224 -0
  86. hpcflow/sdk/core/command_files.py +814 -0
  87. hpcflow/sdk/core/commands.py +424 -0
  88. hpcflow/sdk/core/element.py +2071 -0
  89. hpcflow/sdk/core/enums.py +221 -0
  90. hpcflow/sdk/core/environment.py +256 -0
  91. hpcflow/sdk/core/errors.py +1043 -0
  92. hpcflow/sdk/core/execute.py +207 -0
  93. hpcflow/sdk/core/json_like.py +809 -0
  94. hpcflow/sdk/core/loop.py +1320 -0
  95. hpcflow/sdk/core/loop_cache.py +282 -0
  96. hpcflow/sdk/core/object_list.py +933 -0
  97. hpcflow/sdk/core/parameters.py +3371 -0
  98. hpcflow/sdk/core/rule.py +196 -0
  99. hpcflow/sdk/core/run_dir_files.py +57 -0
  100. hpcflow/sdk/core/skip_reason.py +7 -0
  101. hpcflow/sdk/core/task.py +3792 -0
  102. hpcflow/sdk/core/task_schema.py +993 -0
  103. hpcflow/sdk/core/test_utils.py +538 -0
  104. hpcflow/sdk/core/types.py +447 -0
  105. hpcflow/sdk/core/utils.py +1207 -0
  106. hpcflow/sdk/core/validation.py +87 -0
  107. hpcflow/sdk/core/values.py +477 -0
  108. hpcflow/sdk/core/workflow.py +4820 -0
  109. hpcflow/sdk/core/zarr_io.py +206 -0
  110. hpcflow/sdk/data/__init__.py +13 -0
  111. hpcflow/sdk/data/config_file_schema.yaml +34 -0
  112. hpcflow/sdk/data/config_schema.yaml +260 -0
  113. hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
  114. hpcflow/sdk/data/files_spec_schema.yaml +5 -0
  115. hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
  116. hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
  117. hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
  118. hpcflow/sdk/demo/__init__.py +3 -0
  119. hpcflow/sdk/demo/cli.py +242 -0
  120. hpcflow/sdk/helper/__init__.py +3 -0
  121. hpcflow/sdk/helper/cli.py +137 -0
  122. hpcflow/sdk/helper/helper.py +300 -0
  123. hpcflow/sdk/helper/watcher.py +192 -0
  124. hpcflow/sdk/log.py +288 -0
  125. hpcflow/sdk/persistence/__init__.py +18 -0
  126. hpcflow/sdk/persistence/base.py +2817 -0
  127. hpcflow/sdk/persistence/defaults.py +6 -0
  128. hpcflow/sdk/persistence/discovery.py +39 -0
  129. hpcflow/sdk/persistence/json.py +954 -0
  130. hpcflow/sdk/persistence/pending.py +948 -0
  131. hpcflow/sdk/persistence/store_resource.py +203 -0
  132. hpcflow/sdk/persistence/types.py +309 -0
  133. hpcflow/sdk/persistence/utils.py +73 -0
  134. hpcflow/sdk/persistence/zarr.py +2388 -0
  135. hpcflow/sdk/runtime.py +320 -0
  136. hpcflow/sdk/submission/__init__.py +3 -0
  137. hpcflow/sdk/submission/enums.py +70 -0
  138. hpcflow/sdk/submission/jobscript.py +2379 -0
  139. hpcflow/sdk/submission/schedulers/__init__.py +281 -0
  140. hpcflow/sdk/submission/schedulers/direct.py +233 -0
  141. hpcflow/sdk/submission/schedulers/sge.py +376 -0
  142. hpcflow/sdk/submission/schedulers/slurm.py +598 -0
  143. hpcflow/sdk/submission/schedulers/utils.py +25 -0
  144. hpcflow/sdk/submission/shells/__init__.py +52 -0
  145. hpcflow/sdk/submission/shells/base.py +229 -0
  146. hpcflow/sdk/submission/shells/bash.py +504 -0
  147. hpcflow/sdk/submission/shells/os_version.py +115 -0
  148. hpcflow/sdk/submission/shells/powershell.py +352 -0
  149. hpcflow/sdk/submission/submission.py +1402 -0
  150. hpcflow/sdk/submission/types.py +140 -0
  151. hpcflow/sdk/typing.py +194 -0
  152. hpcflow/sdk/utils/arrays.py +69 -0
  153. hpcflow/sdk/utils/deferred_file.py +55 -0
  154. hpcflow/sdk/utils/hashing.py +16 -0
  155. hpcflow/sdk/utils/patches.py +31 -0
  156. hpcflow/sdk/utils/strings.py +69 -0
  157. hpcflow/tests/api/test_api.py +32 -0
  158. hpcflow/tests/conftest.py +123 -0
  159. hpcflow/tests/data/__init__.py +0 -0
  160. hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
  161. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  162. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  163. hpcflow/tests/data/workflow_1.json +10 -0
  164. hpcflow/tests/data/workflow_1.yaml +5 -0
  165. hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
  166. hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
  167. hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
  168. hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
  169. hpcflow/tests/programs/test_programs.py +180 -0
  170. hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
  171. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  172. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
  173. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  174. hpcflow/tests/scripts/test_main_scripts.py +1361 -0
  175. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  176. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  177. hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
  178. hpcflow/tests/unit/test_action.py +1066 -0
  179. hpcflow/tests/unit/test_action_rule.py +24 -0
  180. hpcflow/tests/unit/test_app.py +132 -0
  181. hpcflow/tests/unit/test_cache.py +46 -0
  182. hpcflow/tests/unit/test_cli.py +172 -0
  183. hpcflow/tests/unit/test_command.py +377 -0
  184. hpcflow/tests/unit/test_config.py +195 -0
  185. hpcflow/tests/unit/test_config_file.py +162 -0
  186. hpcflow/tests/unit/test_element.py +666 -0
  187. hpcflow/tests/unit/test_element_iteration.py +88 -0
  188. hpcflow/tests/unit/test_element_set.py +158 -0
  189. hpcflow/tests/unit/test_group.py +115 -0
  190. hpcflow/tests/unit/test_input_source.py +1479 -0
  191. hpcflow/tests/unit/test_input_value.py +398 -0
  192. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  193. hpcflow/tests/unit/test_json_like.py +1247 -0
  194. hpcflow/tests/unit/test_loop.py +2674 -0
  195. hpcflow/tests/unit/test_meta_task.py +325 -0
  196. hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
  197. hpcflow/tests/unit/test_object_list.py +116 -0
  198. hpcflow/tests/unit/test_parameter.py +243 -0
  199. hpcflow/tests/unit/test_persistence.py +664 -0
  200. hpcflow/tests/unit/test_resources.py +243 -0
  201. hpcflow/tests/unit/test_run.py +286 -0
  202. hpcflow/tests/unit/test_run_directories.py +29 -0
  203. hpcflow/tests/unit/test_runtime.py +9 -0
  204. hpcflow/tests/unit/test_schema_input.py +372 -0
  205. hpcflow/tests/unit/test_shell.py +129 -0
  206. hpcflow/tests/unit/test_slurm.py +39 -0
  207. hpcflow/tests/unit/test_submission.py +502 -0
  208. hpcflow/tests/unit/test_task.py +2560 -0
  209. hpcflow/tests/unit/test_task_schema.py +182 -0
  210. hpcflow/tests/unit/test_utils.py +616 -0
  211. hpcflow/tests/unit/test_value_sequence.py +549 -0
  212. hpcflow/tests/unit/test_values.py +91 -0
  213. hpcflow/tests/unit/test_workflow.py +827 -0
  214. hpcflow/tests/unit/test_workflow_template.py +186 -0
  215. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  216. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  217. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  218. hpcflow/tests/unit/utils/test_patches.py +5 -0
  219. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  220. hpcflow/tests/unit/utils/test_strings.py +97 -0
  221. hpcflow/tests/workflows/__init__.py +0 -0
  222. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  223. hpcflow/tests/workflows/test_jobscript.py +355 -0
  224. hpcflow/tests/workflows/test_run_status.py +198 -0
  225. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  226. hpcflow/tests/workflows/test_submission.py +140 -0
  227. hpcflow/tests/workflows/test_workflows.py +564 -0
  228. hpcflow/tests/workflows/test_zip.py +18 -0
  229. hpcflow/viz_demo.ipynb +6794 -0
  230. hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
  231. hpcflow-0.2.0a271.dist-info/METADATA +65 -0
  232. hpcflow-0.2.0a271.dist-info/RECORD +237 -0
  233. {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
  234. hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
  235. hpcflow/api.py +0 -490
  236. hpcflow/archive/archive.py +0 -307
  237. hpcflow/archive/cloud/cloud.py +0 -45
  238. hpcflow/archive/cloud/errors.py +0 -9
  239. hpcflow/archive/cloud/providers/dropbox.py +0 -427
  240. hpcflow/archive/errors.py +0 -5
  241. hpcflow/base_db.py +0 -4
  242. hpcflow/config.py +0 -233
  243. hpcflow/copytree.py +0 -66
  244. hpcflow/data/examples/_config.yml +0 -14
  245. hpcflow/data/examples/damask/demo/1.run.yml +0 -4
  246. hpcflow/data/examples/damask/demo/2.process.yml +0 -29
  247. hpcflow/data/examples/damask/demo/geom.geom +0 -2052
  248. hpcflow/data/examples/damask/demo/load.load +0 -1
  249. hpcflow/data/examples/damask/demo/material.config +0 -185
  250. hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
  251. hpcflow/data/examples/damask/inputs/load.load +0 -1
  252. hpcflow/data/examples/damask/inputs/material.config +0 -185
  253. hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
  254. hpcflow/data/examples/damask/profiles/damask.yml +0 -4
  255. hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
  256. hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
  257. hpcflow/data/examples/damask/profiles/default.yml +0 -6
  258. hpcflow/data/examples/thinking.yml +0 -177
  259. hpcflow/errors.py +0 -2
  260. hpcflow/init_db.py +0 -37
  261. hpcflow/models.py +0 -2595
  262. hpcflow/nesting.py +0 -9
  263. hpcflow/profiles.py +0 -455
  264. hpcflow/project.py +0 -81
  265. hpcflow/scheduler.py +0 -322
  266. hpcflow/utils.py +0 -103
  267. hpcflow/validation.py +0 -166
  268. hpcflow/variables.py +0 -543
  269. hpcflow-0.1.15.dist-info/METADATA +0 -168
  270. hpcflow-0.1.15.dist-info/RECORD +0 -45
  271. hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
  272. hpcflow-0.1.15.dist-info/top_level.txt +0 -1
  273. /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
  274. /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
  275. /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
@@ -0,0 +1,2560 @@
1
+ from __future__ import annotations
2
+ import copy
3
+ import os
4
+ import pytest
5
+ from typing import TYPE_CHECKING
6
+
7
+ from valida.conditions import Value # type: ignore
8
+
9
+ from hpcflow.app import app as hf
10
+ from hpcflow.sdk.core.errors import (
11
+ MissingInputs,
12
+ TaskTemplateInvalidNesting,
13
+ TaskTemplateMultipleInputValues,
14
+ TaskTemplateMultipleSchemaObjectives,
15
+ TaskTemplateUnexpectedInput,
16
+ UnknownEnvironmentPresetError,
17
+ UnsetParameterDataError,
18
+ )
19
+ from hpcflow.sdk.core.parameters import NullDefault
20
+ from hpcflow.sdk.core.test_utils import (
21
+ make_schemas,
22
+ make_tasks,
23
+ make_workflow,
24
+ P1_parameter_cls as P1,
25
+ P1_sub_parameter_cls as P1_sub_param,
26
+ P1_sub_parameter_cls_2 as P1_sub_param_2,
27
+ )
28
+
29
+ if TYPE_CHECKING:
30
+ from pathlib import Path
31
+ from hpcflow.sdk.core.actions import Action, ActionEnvironment
32
+ from hpcflow.sdk.core.command_files import FileSpec
33
+ from hpcflow.sdk.core.parameters import Parameter
34
+ from hpcflow.sdk.core.task_schema import TaskSchema
35
+ from hpcflow.sdk.core.workflow import Workflow
36
+
37
+
38
+ @pytest.fixture
39
+ def null_config(tmp_path: Path):
40
+ if not hf.is_config_loaded:
41
+ hf.load_config(config_dir=tmp_path)
42
+
43
+
44
+ @pytest.fixture
45
+ def param_p1() -> Parameter:
46
+ return hf.Parameter("p1")
47
+
48
+
49
+ @pytest.fixture
50
+ def param_p2() -> Parameter:
51
+ return hf.Parameter("p2")
52
+
53
+
54
+ @pytest.fixture
55
+ def param_p3() -> Parameter:
56
+ return hf.Parameter("p3")
57
+
58
+
59
+ @pytest.fixture
60
+ def workflow_w0(null_config, tmp_path: Path) -> Workflow:
61
+ t1 = hf.Task(schema=[hf.TaskSchema(objective="t1", actions=[])])
62
+ t2 = hf.Task(schema=[hf.TaskSchema(objective="t2", actions=[])])
63
+
64
+ wkt = hf.WorkflowTemplate(name="workflow_w0", tasks=[t1, t2])
65
+ return hf.Workflow.from_template(wkt, path=tmp_path)
66
+
67
+
68
+ @pytest.fixture
69
+ def workflow_w1(
70
+ null_config, tmp_path: Path, param_p1: Parameter, param_p2: Parameter
71
+ ) -> Workflow:
72
+ s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p2])
73
+ s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2])
74
+
75
+ t1 = hf.Task(
76
+ schema=s1,
77
+ sequences=[hf.ValueSequence("inputs.p1", values=[101, 102], nesting_order=1)],
78
+ )
79
+ t2 = hf.Task(schema=s2, nesting_order={"inputs.p2": 1})
80
+
81
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
82
+ return hf.Workflow.from_template(wkt, path=tmp_path)
83
+
84
+
85
+ @pytest.fixture
86
+ def workflow_w2(
87
+ null_config,
88
+ tmp_path: Path,
89
+ param_p1: Parameter,
90
+ param_p2: Parameter,
91
+ param_p3: Parameter,
92
+ ) -> Workflow:
93
+ s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p2])
94
+ s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2, param_p3])
95
+
96
+ t1 = hf.Task(
97
+ schema=s1,
98
+ sequences=[hf.ValueSequence("inputs.p1", values=[101, 102], nesting_order=1)],
99
+ )
100
+ t2 = hf.Task(
101
+ schema=s2,
102
+ sequences=[
103
+ hf.ValueSequence("inputs.p3", values=[301, 302, 303], nesting_order=1)
104
+ ],
105
+ nesting_order={"inputs.p2": 0},
106
+ )
107
+
108
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2])
109
+ return hf.Workflow.from_template(wkt, path=tmp_path)
110
+
111
+
112
+ @pytest.fixture
113
+ def workflow_w3(
114
+ null_config,
115
+ tmp_path: Path,
116
+ param_p1: Parameter,
117
+ param_p2: Parameter,
118
+ param_p3: Parameter,
119
+ param_p4: Parameter,
120
+ ) -> Workflow:
121
+ s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p3])
122
+ s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2, param_p3], outputs=[param_p4])
123
+ s3 = hf.TaskSchema("t3", actions=[], inputs=[param_p3, param_p4])
124
+
125
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue(param_p1, 101)])
126
+ t2 = hf.Task(
127
+ schema=s2,
128
+ sequences=[hf.ValueSequence("inputs.p2", values=[201, 202], nesting_order=1)],
129
+ )
130
+ t3 = hf.Task(schema=s3, nesting_order={"inputs.p3": 0, "inputs.p4": 1})
131
+
132
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1, t2, t3])
133
+ return hf.Workflow.from_template(wkt, name=wkt.name, overwrite=True)
134
+
135
+
136
+ @pytest.fixture
137
+ def file_spec_fs1() -> FileSpec:
138
+ return hf.FileSpec(label="file1", name="file1.txt")
139
+
140
+
141
+ @pytest.fixture
142
+ def act_env_1() -> ActionEnvironment:
143
+ return hf.ActionEnvironment("env_1")
144
+
145
+
146
+ @pytest.fixture
147
+ def act_3(
148
+ act_env_1: ActionEnvironment, param_p2: Parameter, file_spec_fs1: FileSpec
149
+ ) -> Action:
150
+ return hf.Action(
151
+ commands=[hf.Command("<<parameter:p1>>")],
152
+ output_file_parsers=[
153
+ hf.OutputFileParser(output=param_p2, output_files=[file_spec_fs1]),
154
+ ],
155
+ environments=[act_env_1],
156
+ )
157
+
158
+
159
+ @pytest.fixture
160
+ def schema_s3(param_p1: Parameter, param_p2: Parameter, act_3) -> TaskSchema:
161
+ return hf.TaskSchema("ts1", actions=[act_3], inputs=[param_p1], outputs=[param_p2])
162
+
163
+
164
+ @pytest.fixture
165
+ def workflow_w4(
166
+ null_config, tmp_path: Path, schema_s3: TaskSchema, param_p1: Parameter
167
+ ) -> Workflow:
168
+ t1 = hf.Task(schema=schema_s3, inputs=[hf.InputValue(param_p1, 101)])
169
+ wkt = hf.WorkflowTemplate(name="w1", tasks=[t1])
170
+ return hf.Workflow.from_template(wkt, path=tmp_path)
171
+
172
+
173
+ @pytest.fixture
174
+ def act_1(act_env_1: ActionEnvironment) -> Action:
175
+ return hf.Action(
176
+ commands=[hf.Command("<<parameter:p1>>")],
177
+ environments=[act_env_1],
178
+ )
179
+
180
+
181
+ @pytest.fixture
182
+ def act_2(act_env_1: ActionEnvironment) -> Action:
183
+ return hf.Action(
184
+ commands=[hf.Command("<<parameter:p2>>")],
185
+ environments=[act_env_1],
186
+ )
187
+
188
+
189
+ @pytest.fixture
190
+ def schema_s1(param_p1: Parameter, act_1) -> TaskSchema:
191
+ return hf.TaskSchema("ts1", actions=[act_1], inputs=[param_p1])
192
+
193
+
194
+ @pytest.fixture
195
+ def schema_s2(param_p1: Parameter, act_1) -> TaskSchema:
196
+ return hf.TaskSchema(
197
+ "ts1", actions=[act_1], inputs=[hf.SchemaInput(param_p1, default_value=101)]
198
+ )
199
+
200
+
201
+ @pytest.fixture
202
+ def schema_s4(param_p2: Parameter, act_2) -> TaskSchema:
203
+ return hf.TaskSchema("ts2", actions=[act_2], inputs=[param_p2])
204
+
205
+
206
+ @pytest.fixture
207
+ def schema_s5(param_p2: Parameter, act_2) -> TaskSchema:
208
+ return hf.TaskSchema(
209
+ "ts2", actions=[act_2], inputs=[hf.SchemaInput(param_p2, default_value=2002)]
210
+ )
211
+
212
+
213
+ def test_task_get_available_task_input_sources_expected_return_first_task_local_value(
214
+ schema_s1: TaskSchema,
215
+ param_p1: Parameter,
216
+ ):
217
+ t1 = hf.Task(schema=schema_s1, inputs=[hf.InputValue(param_p1, value=101)])
218
+
219
+ available = t1.get_available_task_input_sources(
220
+ element_set=t1.element_sets[0],
221
+ source_tasks=[],
222
+ )
223
+ available_exp = {"p1": [hf.InputSource(source_type=hf.InputSourceType.LOCAL)]}
224
+
225
+ assert available == available_exp
226
+
227
+
228
+ def test_task_get_available_task_input_sources_expected_return_first_task_default_value(
229
+ schema_s2: TaskSchema,
230
+ ):
231
+ t1 = hf.Task(schema=schema_s2)
232
+ available = t1.get_available_task_input_sources(element_set=t1.element_sets[0])
233
+ available_exp = {"p1": [hf.InputSource(source_type=hf.InputSourceType.DEFAULT)]}
234
+
235
+ assert available == available_exp
236
+
237
+
238
+ def test_task_get_available_task_input_sources_expected_return_one_param_one_output(
239
+ tmp_path: Path,
240
+ ):
241
+ t1, t2 = make_tasks(
242
+ schemas_spec=[
243
+ ({"p1": NullDefault.NULL}, ("p2",), "t1"),
244
+ ({"p2": NullDefault.NULL}, (), "t2"),
245
+ ],
246
+ local_inputs={0: ("p1",)},
247
+ )
248
+ wk = hf.Workflow.from_template(
249
+ hf.WorkflowTemplate(name="w1", tasks=[t1]), path=tmp_path
250
+ )
251
+ available = t2.get_available_task_input_sources(
252
+ element_set=t2.element_sets[0],
253
+ source_tasks=[wk.tasks.t1],
254
+ )
255
+ available_exp = {
256
+ "p2": [
257
+ hf.InputSource(
258
+ source_type=hf.InputSourceType.TASK,
259
+ task_ref=0,
260
+ task_source_type=hf.TaskSourceType.OUTPUT,
261
+ element_iters=[0],
262
+ )
263
+ ]
264
+ }
265
+ assert available == available_exp
266
+
267
+
268
+ def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_default(
269
+ tmp_path: Path,
270
+ ):
271
+ t1, t2 = make_tasks(
272
+ schemas_spec=[
273
+ ({"p1": None}, ("p2",), "t1"),
274
+ ({"p2": 2001}, (), "t2"),
275
+ ],
276
+ local_inputs={0: ("p1",)},
277
+ )
278
+ wk = hf.Workflow.from_template(
279
+ hf.WorkflowTemplate(name="w1", tasks=[t1]), path=tmp_path
280
+ )
281
+ available = t2.get_available_task_input_sources(
282
+ element_set=t2.element_sets[0],
283
+ source_tasks=[wk.tasks.t1],
284
+ )
285
+ available_exp = {
286
+ "p2": [
287
+ hf.InputSource(
288
+ source_type=hf.InputSourceType.TASK,
289
+ task_ref=0,
290
+ task_source_type=hf.TaskSourceType.OUTPUT,
291
+ element_iters=[0],
292
+ ),
293
+ hf.InputSource(source_type=hf.InputSourceType.DEFAULT),
294
+ ]
295
+ }
296
+ assert available == available_exp
297
+
298
+
299
+ def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_local(
300
+ tmp_path: Path,
301
+ ):
302
+ t1, t2 = make_tasks(
303
+ schemas_spec=[
304
+ ({"p1": NullDefault.NULL}, ("p2",), "t1"),
305
+ ({"p2": NullDefault.NULL}, (), "t2"),
306
+ ],
307
+ local_inputs={0: ("p1",), 1: ("p2",)},
308
+ )
309
+ wk = hf.Workflow.from_template(
310
+ hf.WorkflowTemplate(name="w1", tasks=[t1]), path=tmp_path
311
+ )
312
+ available = t2.get_available_task_input_sources(
313
+ element_set=t2.element_sets[0],
314
+ source_tasks=[wk.tasks.t1],
315
+ )
316
+ available_exp = {
317
+ "p2": [
318
+ hf.InputSource(source_type=hf.InputSourceType.LOCAL),
319
+ hf.InputSource(
320
+ source_type=hf.InputSourceType.TASK,
321
+ task_ref=0,
322
+ task_source_type=hf.TaskSourceType.OUTPUT,
323
+ element_iters=[0],
324
+ ),
325
+ ]
326
+ }
327
+ assert available == available_exp
328
+
329
+
330
+ def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_default_and_local(
331
+ tmp_path: Path,
332
+ ):
333
+ t1, t2 = make_tasks(
334
+ schemas_spec=[
335
+ ({"p1": None}, ("p2",), "t1"),
336
+ ({"p2": 2001}, (), "t2"),
337
+ ],
338
+ local_inputs={0: ("p1",), 1: ("p2",)},
339
+ )
340
+ wk = hf.Workflow.from_template(
341
+ hf.WorkflowTemplate(name="w1", tasks=[t1]), path=tmp_path
342
+ )
343
+ available = t2.get_available_task_input_sources(
344
+ element_set=t2.element_sets[0],
345
+ source_tasks=[wk.tasks.t1],
346
+ )
347
+ available_exp = {
348
+ "p2": [
349
+ hf.InputSource(source_type=hf.InputSourceType.LOCAL),
350
+ hf.InputSource(
351
+ source_type=hf.InputSourceType.TASK,
352
+ task_ref=0,
353
+ task_source_type=hf.TaskSourceType.OUTPUT,
354
+ element_iters=[0],
355
+ ),
356
+ hf.InputSource(source_type=hf.InputSourceType.DEFAULT),
357
+ ]
358
+ }
359
+ assert available == available_exp
360
+
361
+
362
+ def test_task_get_available_task_input_sources_expected_return_one_param_two_outputs(
363
+ tmp_path: Path,
364
+ ):
365
+ t1, t2, t3 = make_tasks(
366
+ schemas_spec=[
367
+ ({"p1": NullDefault.NULL}, ("p2", "p3"), "t1"),
368
+ ({"p2": NullDefault.NULL}, ("p3", "p4"), "t2"),
369
+ ({"p3": NullDefault.NULL}, (), "t3"),
370
+ ],
371
+ local_inputs={0: ("p1",), 1: ("p2",)},
372
+ )
373
+ wk = hf.Workflow.from_template(
374
+ hf.WorkflowTemplate(name="w1", tasks=[t1, t2]), path=tmp_path
375
+ )
376
+ available = t3.get_available_task_input_sources(
377
+ element_set=t3.element_sets[0],
378
+ source_tasks=[wk.tasks.t1, wk.tasks.t2],
379
+ )
380
+ available_exp = {
381
+ "p3": [
382
+ hf.InputSource(
383
+ source_type=hf.InputSourceType.TASK,
384
+ task_ref=1,
385
+ task_source_type=hf.TaskSourceType.OUTPUT,
386
+ element_iters=[1],
387
+ ),
388
+ hf.InputSource(
389
+ source_type=hf.InputSourceType.TASK,
390
+ task_ref=0,
391
+ task_source_type=hf.TaskSourceType.OUTPUT,
392
+ element_iters=[0],
393
+ ),
394
+ ]
395
+ }
396
+ assert available == available_exp
397
+
398
+
399
+ def test_task_get_available_task_input_sources_expected_return_two_params_one_output(
400
+ tmp_path: Path,
401
+ ):
402
+ t1, t2 = make_tasks(
403
+ schemas_spec=[
404
+ ({"p1": NullDefault.NULL}, ("p2", "p3"), "t1"),
405
+ ({"p2": NullDefault.NULL, "p3": NullDefault.NULL}, (), "t2"),
406
+ ],
407
+ local_inputs={0: ("p1",)},
408
+ )
409
+ wk = hf.Workflow.from_template(
410
+ hf.WorkflowTemplate(name="w1", tasks=[t1]), path=tmp_path
411
+ )
412
+ available = t2.get_available_task_input_sources(
413
+ element_set=t2.element_sets[0],
414
+ source_tasks=[wk.tasks.t1],
415
+ )
416
+ available_exp = {
417
+ "p2": [
418
+ hf.InputSource(
419
+ source_type=hf.InputSourceType.TASK,
420
+ task_ref=0,
421
+ task_source_type=hf.TaskSourceType.OUTPUT,
422
+ element_iters=[0],
423
+ )
424
+ ],
425
+ "p3": [
426
+ hf.InputSource(
427
+ source_type=hf.InputSourceType.TASK,
428
+ task_ref=0,
429
+ task_source_type=hf.TaskSourceType.OUTPUT,
430
+ element_iters=[0],
431
+ )
432
+ ],
433
+ }
434
+ assert available == available_exp
435
+
436
+
437
+ def test_task_get_available_task_input_sources_one_parameter_extravaganza(
438
+ tmp_path: Path,
439
+ ):
440
+ t1, t2, t3 = make_tasks(
441
+ schemas_spec=[
442
+ ({"p1": NullDefault.NULL}, ("p1",), "t1"), # sources for t3: input + output
443
+ ({"p1": NullDefault.NULL}, ("p1",), "t2"), # sources fot t3: input + output
444
+ ({"p1": NullDefault.NULL}, ("p1",), "t3"),
445
+ ],
446
+ local_inputs={0: ("p1",)},
447
+ )
448
+ wk = hf.Workflow.from_template(
449
+ hf.WorkflowTemplate(name="w1", tasks=[t1, t2]), path=tmp_path
450
+ )
451
+ available = t3.get_available_task_input_sources(
452
+ element_set=t3.element_sets[0],
453
+ source_tasks=[wk.tasks.t1, wk.tasks.t2],
454
+ )
455
+ available_exp = {
456
+ "p1": [
457
+ hf.InputSource(
458
+ source_type=hf.InputSourceType.TASK,
459
+ task_ref=1,
460
+ task_source_type=hf.TaskSourceType.OUTPUT,
461
+ element_iters=[1],
462
+ ),
463
+ hf.InputSource(
464
+ source_type=hf.InputSourceType.TASK,
465
+ task_ref=0,
466
+ task_source_type=hf.TaskSourceType.OUTPUT,
467
+ element_iters=[0],
468
+ ),
469
+ hf.InputSource(
470
+ source_type=hf.InputSourceType.TASK,
471
+ task_ref=1,
472
+ task_source_type=hf.TaskSourceType.INPUT,
473
+ element_iters=[1],
474
+ ),
475
+ hf.InputSource(
476
+ source_type=hf.InputSourceType.TASK,
477
+ task_ref=0,
478
+ task_source_type=hf.TaskSourceType.INPUT,
479
+ element_iters=[0],
480
+ ),
481
+ ],
482
+ }
483
+ assert available == available_exp
484
+
485
+
486
+ def test_task_input_sources_output_label(null_config, tmp_path: Path):
487
+ ts1 = hf.TaskSchema(
488
+ objective="t1",
489
+ outputs=[hf.SchemaOutput("p1")],
490
+ actions=[
491
+ hf.Action(
492
+ commands=[
493
+ hf.Command(command="Write-Host 101", stdout="<<int(parameter:p1)>>")
494
+ ]
495
+ )
496
+ ],
497
+ )
498
+ ts2 = hf.TaskSchema(
499
+ objective="t2", inputs=[hf.SchemaInput("p1", labels={"one": {}}, multiple=True)]
500
+ )
501
+
502
+ tasks = [
503
+ hf.Task(schema=ts1, output_labels=[hf.OutputLabel(parameter="p1", label="one")]),
504
+ hf.Task(schema=ts2),
505
+ ]
506
+ wk = hf.Workflow.from_template_data(
507
+ tasks=tasks, template_name="test_sources", path=tmp_path
508
+ )
509
+
510
+ assert wk.tasks.t2.template.element_sets[0].input_sources == {
511
+ "p1[one]": [
512
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[0])
513
+ ]
514
+ }
515
+
516
+
517
+ def test_task_input_sources_output_label_filtered(null_config, tmp_path: Path):
518
+ ts1 = hf.TaskSchema(
519
+ objective="t1",
520
+ inputs=[hf.SchemaInput("p1")],
521
+ outputs=[hf.SchemaOutput("p1")],
522
+ actions=[
523
+ hf.Action(
524
+ commands=[
525
+ hf.Command(
526
+ command="Write-Host (<<parameter:p1>> + 101)",
527
+ stdout="<<int(parameter:p1)>>",
528
+ ),
529
+ ],
530
+ ),
531
+ ],
532
+ )
533
+ ts2 = hf.TaskSchema(
534
+ objective="t2", inputs=[hf.SchemaInput("p1", labels={"one": {}}, multiple=True)]
535
+ )
536
+
537
+ tasks = [
538
+ hf.Task(
539
+ schema=ts1,
540
+ sequences=[hf.ValueSequence(path="inputs.p1", values=[1, 2])],
541
+ output_labels=[
542
+ hf.OutputLabel(
543
+ parameter="p1",
544
+ label="one",
545
+ where=hf.Rule(path="inputs.p1", condition={"value.equal_to": 2}),
546
+ ),
547
+ ],
548
+ ),
549
+ hf.Task(schema=ts2),
550
+ ]
551
+ wk = hf.Workflow.from_template_data(
552
+ tasks=tasks,
553
+ template_name="test_sources",
554
+ path=tmp_path,
555
+ )
556
+
557
+ assert wk.tasks.t2.template.element_sets[0].input_sources == {
558
+ "p1[one]": [
559
+ hf.InputSource.task(task_ref=0, task_source_type="output", element_iters=[1])
560
+ ]
561
+ }
562
+
563
+
564
+ def test_get_task_unique_names_two_tasks_no_repeats():
565
+ s1 = hf.TaskSchema("t1", actions=[])
566
+ s2 = hf.TaskSchema("t2", actions=[])
567
+
568
+ t1 = hf.Task(schema=s1)
569
+ t2 = hf.Task(schema=s2)
570
+
571
+ assert hf.Task.get_task_unique_names([t1, t2]) == ["t1", "t2"]
572
+
573
+
574
+ def test_get_task_unique_names_two_tasks_with_repeat():
575
+ s1 = hf.TaskSchema("t1", actions=[])
576
+
577
+ t1 = hf.Task(schema=s1)
578
+ t2 = hf.Task(schema=s1)
579
+
580
+ assert hf.Task.get_task_unique_names([t1, t2]) == ["t1_1", "t1_2"]
581
+
582
+
583
+ def test_raise_on_multiple_schema_objectives():
584
+ s1 = hf.TaskSchema("t1", actions=[])
585
+ s2 = hf.TaskSchema("t2", actions=[])
586
+ with pytest.raises(TaskTemplateMultipleSchemaObjectives):
587
+ hf.Task(schema=[s1, s2])
588
+
589
+
590
+ def test_raise_on_unexpected_inputs(param_p1: Parameter, param_p2: Parameter):
591
+ (s1,) = make_schemas(({"p1": None}, ()))
592
+
593
+ with pytest.raises(TaskTemplateUnexpectedInput):
594
+ hf.Task(
595
+ schema=s1,
596
+ inputs=[
597
+ hf.InputValue(param_p1, value=101),
598
+ hf.InputValue(param_p2, value=4),
599
+ ],
600
+ )
601
+
602
+
603
+ def test_raise_on_multiple_input_values(param_p1: Parameter):
604
+ (s1,) = make_schemas(({"p1": None}, ()))
605
+
606
+ with pytest.raises(TaskTemplateMultipleInputValues):
607
+ hf.Task(
608
+ schema=s1,
609
+ inputs=[
610
+ hf.InputValue(param_p1, value=101),
611
+ hf.InputValue(param_p1, value=7),
612
+ ],
613
+ )
614
+
615
+
616
+ def test_raise_on_multiple_input_values_same_label(param_p1: Parameter):
617
+ s1 = hf.TaskSchema(
618
+ objective="t1",
619
+ inputs=[hf.SchemaInput(parameter="p1", labels={"0": {}})],
620
+ )
621
+
622
+ with pytest.raises(TaskTemplateMultipleInputValues):
623
+ hf.Task(
624
+ schema=s1,
625
+ inputs=[
626
+ hf.InputValue(param_p1, value=101, label="0"),
627
+ hf.InputValue(param_p1, value=101, label="0"),
628
+ ],
629
+ )
630
+
631
+
632
+ def test_multiple_input_values_different_labels(param_p1: Parameter):
633
+ s1 = hf.TaskSchema(
634
+ objective="t1",
635
+ inputs=[
636
+ hf.SchemaInput(
637
+ parameter="p1",
638
+ labels={"0": {}, "1": {}},
639
+ multiple=True,
640
+ )
641
+ ],
642
+ )
643
+ hf.Task(
644
+ schema=s1,
645
+ inputs=[
646
+ hf.InputValue(param_p1, value=101, label="0"),
647
+ hf.InputValue(param_p1, value=101, label="1"),
648
+ ],
649
+ )
650
+
651
+
652
+ def test_expected_return_defined_and_undefined_input_types(
653
+ param_p1: Parameter, param_p2: Parameter
654
+ ):
655
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ()))
656
+
657
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue(param_p1, value=101)])
658
+ element_set = t1.element_sets[0]
659
+ assert element_set.defined_input_types == {
660
+ param_p1.typ
661
+ } and element_set.undefined_input_types == {param_p2.typ}
662
+
663
+
664
+ def test_expected_return_all_schema_input_types_single_schema(
665
+ param_p1: Parameter, param_p2: Parameter
666
+ ):
667
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ()))
668
+ t1 = hf.Task(schema=s1)
669
+
670
+ assert t1.all_schema_input_types == {param_p1.typ, param_p2.typ}
671
+
672
+
673
+ def test_expected_return_all_schema_input_types_multiple_schemas(
674
+ param_p1: Parameter, param_p2: Parameter, param_p3: Parameter
675
+ ):
676
+ s1, s2 = make_schemas(
677
+ ({"p1": None, "p2": None}, (), "t1"), ({"p1": None, "p3": None}, (), "t1")
678
+ )
679
+
680
+ t1 = hf.Task(schema=[s1, s2])
681
+
682
+ assert t1.all_schema_input_types == {param_p1.typ, param_p2.typ, param_p3.typ}
683
+
684
+
685
+ def test_expected_name_single_schema():
686
+ s1 = hf.TaskSchema("t1", actions=[])
687
+ t1 = hf.Task(schema=[s1])
688
+ assert t1.name == "t1"
689
+
690
+
691
+ def test_expected_name_single_schema_with_method():
692
+ s1 = hf.TaskSchema("t1", method="m1", actions=[])
693
+ t1 = hf.Task(schema=s1)
694
+ assert t1.name == "t1_m1"
695
+
696
+
697
+ def test_expected_name_single_schema_with_implementation():
698
+ s1 = hf.TaskSchema("t1", implementation="i1", actions=[])
699
+ t1 = hf.Task(schema=s1)
700
+ assert t1.name == "t1_i1"
701
+
702
+
703
+ def test_expected_name_single_schema_with_method_and_implementation():
704
+ s1 = hf.TaskSchema("t1", method="m1", implementation="i1", actions=[])
705
+ t1 = hf.Task(schema=s1)
706
+ assert t1.name == "t1_m1_i1"
707
+
708
+
709
+ def test_expected_name_multiple_schemas():
710
+ s1 = hf.TaskSchema("t1", actions=[])
711
+ s2 = hf.TaskSchema("t1", actions=[])
712
+ t1 = hf.Task(schema=[s1, s2])
713
+ assert t1.name == "t1"
714
+
715
+
716
+ def test_expected_name_two_schemas_first_with_method():
717
+ s1 = hf.TaskSchema("t1", method="m1", actions=[])
718
+ s2 = hf.TaskSchema("t1", actions=[])
719
+ t1 = hf.Task(schema=[s1, s2])
720
+ assert t1.name == "t1_m1"
721
+
722
+
723
+ def test_expected_name_two_schemas_first_with_method_and_implementation():
724
+ s1 = hf.TaskSchema("t1", method="m1", implementation="i1", actions=[])
725
+ s2 = hf.TaskSchema("t1", actions=[])
726
+ t1 = hf.Task(schema=[s1, s2])
727
+ assert t1.name == "t1_m1_i1"
728
+
729
+
730
+ def test_expected_name_two_schemas_both_with_method():
731
+ s1 = hf.TaskSchema("t1", method="m1", actions=[])
732
+ s2 = hf.TaskSchema("t1", method="m2", actions=[])
733
+ t1 = hf.Task(schema=[s1, s2])
734
+ assert t1.name == "t1_m1_and_m2"
735
+
736
+
737
+ def test_expected_name_two_schemas_first_with_method_second_with_implementation():
738
+ s1 = hf.TaskSchema("t1", method="m1", actions=[])
739
+ s2 = hf.TaskSchema("t1", implementation="i2", actions=[])
740
+ t1 = hf.Task(schema=[s1, s2])
741
+ assert t1.name == "t1_m1_and_i2"
742
+
743
+
744
+ def test_expected_name_two_schemas_first_with_implementation_second_with_method():
745
+ s1 = hf.TaskSchema("t1", implementation="i1", actions=[])
746
+ s2 = hf.TaskSchema("t1", method="m2", actions=[])
747
+ t1 = hf.Task(schema=[s1, s2])
748
+ assert t1.name == "t1_i1_and_m2"
749
+
750
+
751
+ def test_expected_name_two_schemas_both_with_method_and_implementation():
752
+ s1 = hf.TaskSchema("t1", method="m1", implementation="i1", actions=[])
753
+ s2 = hf.TaskSchema("t1", method="m2", implementation="i2", actions=[])
754
+ t1 = hf.Task(schema=[s1, s2])
755
+ assert t1.name == "t1_m1_i1_and_m2_i2"
756
+
757
+
758
+ def test_raise_on_negative_nesting_order():
759
+ (s1,) = make_schemas(({"p1": None}, ()))
760
+ with pytest.raises(TaskTemplateInvalidNesting):
761
+ hf.Task(schema=s1, nesting_order={"inputs.p1": -1})
762
+
763
+
764
+ # TODO: test resolution of elements and with raise MissingInputs
765
+
766
+
767
+ def test_empty_task_init():
768
+ """Check we can init a hf.Task with no input values."""
769
+ (s1,) = make_schemas(({"p1": None}, ()))
770
+ t1 = hf.Task(schema=s1)
771
+
772
+
773
+ def test_task_task_dependencies(tmp_path: Path):
774
+ wk = make_workflow(
775
+ schemas_spec=[
776
+ ({"p1": None}, ("p2",), "t1"),
777
+ ({"p2": None}, (), "t2"),
778
+ ],
779
+ local_inputs={0: ("p1",)},
780
+ path=tmp_path,
781
+ )
782
+ assert wk.tasks.t2.get_task_dependencies(as_objects=True) == [wk.tasks.t1]
783
+
784
+
785
+ def test_task_dependent_tasks(tmp_path: Path):
786
+ wk = make_workflow(
787
+ schemas_spec=[
788
+ ({"p1": None}, ("p2",), "t1"),
789
+ ({"p2": None}, (), "t2"),
790
+ ],
791
+ local_inputs={0: ("p1",)},
792
+ path=tmp_path,
793
+ )
794
+ assert wk.tasks.t1.get_dependent_tasks(as_objects=True) == [wk.tasks.t2]
795
+
796
+
797
+ def test_task_element_dependencies(tmp_path: Path):
798
+ wk = make_workflow(
799
+ schemas_spec=[
800
+ ({"p1": None}, ("p2",), "t1"),
801
+ ({"p2": None}, (), "t2"),
802
+ ],
803
+ local_sequences={0: [("inputs.p1", 2, 0)]},
804
+ nesting_orders={1: {"inputs.p2": 0}},
805
+ path=tmp_path,
806
+ )
807
+ assert wk.tasks.t2.get_element_dependencies() == {0, 1}
808
+
809
+
810
+ def test_task_dependent_elements(tmp_path: Path):
811
+ wk = make_workflow(
812
+ schemas_spec=[
813
+ ({"p1": None}, ("p2",), "t1"),
814
+ ({"p2": None}, (), "t2"),
815
+ ],
816
+ local_sequences={0: [("inputs.p1", 2, 0)]},
817
+ nesting_orders={1: {"inputs.p2": 0}},
818
+ path=tmp_path,
819
+ )
820
+ assert wk.tasks.t1.get_dependent_elements() == {2, 3}
821
+
822
+
823
+ def test_task_add_elements_without_propagation_expected_workflow_num_elements(
824
+ tmp_path: Path, param_p1: Parameter
825
+ ):
826
+ wk = make_workflow(
827
+ schemas_spec=[
828
+ ({"p1": None}, ("p2",), "t1"),
829
+ ({"p2": None}, (), "t2"),
830
+ ],
831
+ local_sequences={0: [("inputs.p1", 2, 0)]},
832
+ nesting_orders={1: {"inputs.p2": 0}},
833
+ path=tmp_path,
834
+ )
835
+ num_elems = wk.num_elements
836
+ wk.tasks.t1.add_elements(inputs=[hf.InputValue(param_p1, 103)])
837
+ num_elems_new = wk.num_elements
838
+ assert num_elems_new - num_elems == 1
839
+
840
+
841
+ def test_task_add_elements_without_propagation_expected_task_num_elements(
842
+ tmp_path: Path, param_p1: Parameter
843
+ ):
844
+ wk = make_workflow(
845
+ schemas_spec=[
846
+ ({"p1": None}, ("p2",), "t1"),
847
+ ({"p2": None}, (), "t2"),
848
+ ],
849
+ local_sequences={0: [("inputs.p1", 2, 0)]},
850
+ nesting_orders={1: {"inputs.p2": 0}},
851
+ path=tmp_path,
852
+ )
853
+ num_elems = wk.tasks.t1.num_elements
854
+ wk.tasks.t1.add_elements(inputs=[hf.InputValue(param_p1, 103)])
855
+ num_elems_new = wk.tasks.t1.num_elements
856
+ assert num_elems_new - num_elems == 1
857
+
858
+
859
+ def test_task_add_elements_without_propagation_expected_new_data_index(
860
+ tmp_path: Path, param_p1: Parameter
861
+ ):
862
+ wk = make_workflow(
863
+ schemas_spec=[
864
+ ({"p1": None}, ("p2",), "t1"),
865
+ ({"p2": None}, (), "t2"),
866
+ ],
867
+ local_sequences={0: [("inputs.p1", 2, 0)]},
868
+ nesting_orders={1: {"inputs.p2": 0}},
869
+ path=tmp_path,
870
+ )
871
+ data_index = [sorted(i.get_data_idx()) for i in wk.tasks.t1.elements[:]]
872
+ wk.tasks.t1.add_elements(inputs=[hf.InputValue(param_p1, 103)])
873
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.tasks.t1.elements[:]]
874
+ new_elems = data_index_new[len(data_index) :]
875
+ assert new_elems == [["inputs.p1", "outputs.p2", "resources.any"]]
876
+
877
+
878
+ def test_task_add_elements_with_propagation_expected_workflow_num_elements(
879
+ tmp_path: Path, param_p1: Parameter
880
+ ):
881
+ wk = make_workflow(
882
+ schemas_spec=[
883
+ ({"p1": None}, ("p2",), "t1"),
884
+ ({"p2": None}, (), "t2"),
885
+ ],
886
+ local_sequences={0: [("inputs.p1", 2, 0)]},
887
+ nesting_orders={1: {"inputs.p2": 0}},
888
+ path=tmp_path,
889
+ )
890
+ num_elems = wk.num_elements
891
+ wk.tasks.t1.add_elements(
892
+ inputs=[hf.InputValue(param_p1, 103)],
893
+ propagate_to=[hf.ElementPropagation(task=wk.tasks.t2)],
894
+ )
895
+ num_elems_new = wk.num_elements
896
+ assert num_elems_new - num_elems == 2
897
+
898
+
899
+ def test_task_add_elements_with_propagation_expected_task_num_elements(
900
+ tmp_path: Path, param_p1: Parameter
901
+ ):
902
+ wk = make_workflow(
903
+ schemas_spec=[
904
+ ({"p1": None}, ("p2",), "t1"),
905
+ ({"p2": None}, (), "t2"),
906
+ ],
907
+ local_sequences={0: [("inputs.p1", 2, 0)]},
908
+ nesting_orders={1: {"inputs.p2": 0}},
909
+ path=tmp_path,
910
+ )
911
+ num_elems = [task.num_elements for task in wk.tasks]
912
+ wk.tasks.t1.add_elements(
913
+ inputs=[hf.InputValue(param_p1, 103)],
914
+ propagate_to=[hf.ElementPropagation(task=wk.tasks.t2)],
915
+ )
916
+ num_elems_new = [task.num_elements for task in wk.tasks]
917
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
918
+ assert num_elems_diff[0] == 1 and num_elems_diff[1] == 1
919
+
920
+
921
+ def test_task_add_elements_with_propagation_expected_new_data_index(
922
+ tmp_path: Path, param_p1: Parameter
923
+ ):
924
+ wk = make_workflow(
925
+ schemas_spec=[
926
+ ({"p1": None}, ("p2",), "t1"),
927
+ ({"p2": None}, (), "t2"),
928
+ ],
929
+ local_sequences={0: [("inputs.p1", 2, 0)]},
930
+ nesting_orders={1: {"inputs.p2": 0}},
931
+ path=tmp_path,
932
+ )
933
+ t1_num_elems = wk.tasks.t1.num_elements
934
+ t2_num_elems = wk.tasks.t2.num_elements
935
+ wk.tasks.t1.add_elements(
936
+ inputs=[hf.InputValue(param_p1, 103)],
937
+ propagate_to=[hf.ElementPropagation(task=wk.tasks.t2)],
938
+ )
939
+ t1_num_elems_new = wk.tasks.t1.num_elements
940
+ t2_num_elems_new = wk.tasks.t2.num_elements
941
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
942
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
943
+ new_elems_t2 = data_index_new[
944
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
945
+ ]
946
+ assert new_elems_t1 == [
947
+ [
948
+ "inputs.p1",
949
+ "outputs.p2",
950
+ "resources.any",
951
+ ]
952
+ ] and new_elems_t2 == [["inputs.p2", "resources.any"]]
953
+
954
+
955
+ def test_task_add_elements_sequence_without_propagation_expected_workflow_num_elements(
956
+ tmp_path: Path,
957
+ ):
958
+ wk = make_workflow(
959
+ schemas_spec=[
960
+ ({"p1": None}, ("p2",), "t1"),
961
+ ({"p2": None}, (), "t2"),
962
+ ],
963
+ local_sequences={0: [("inputs.p1", 2, 0)]},
964
+ nesting_orders={1: {"inputs.p2": 0}},
965
+ path=tmp_path,
966
+ )
967
+ num_elems = wk.num_elements
968
+ wk.tasks.t1.add_elements(
969
+ sequences=[hf.ValueSequence("inputs.p1", values=[103, 104], nesting_order=1)]
970
+ )
971
+ num_elems_new = wk.num_elements
972
+ assert num_elems_new - num_elems == 2
973
+
974
+
975
+ def test_task_add_elements_sequence_without_propagation_expected_task_num_elements(
976
+ tmp_path: Path,
977
+ ):
978
+ wk = make_workflow(
979
+ schemas_spec=[
980
+ ({"p1": None}, ("p2",), "t1"),
981
+ ({"p2": None}, (), "t2"),
982
+ ],
983
+ local_sequences={0: [("inputs.p1", 2, 0)]},
984
+ nesting_orders={1: {"inputs.p2": 0}},
985
+ path=tmp_path,
986
+ )
987
+ num_elems = wk.tasks.t1.num_elements
988
+ wk.tasks.t1.add_elements(
989
+ sequences=[hf.ValueSequence("inputs.p1", values=[103, 104], nesting_order=1)]
990
+ )
991
+ num_elems_new = wk.tasks.t1.num_elements
992
+ assert num_elems_new - num_elems == 2
993
+
994
+
995
+ def test_task_add_elements_sequence_without_propagation_expected_new_data_index(
996
+ tmp_path: Path,
997
+ ):
998
+ wk = make_workflow(
999
+ schemas_spec=[
1000
+ ({"p1": None}, ("p2",), "t1"),
1001
+ ({"p2": None}, (), "t2"),
1002
+ ],
1003
+ local_sequences={0: [("inputs.p1", 2, 0)]},
1004
+ nesting_orders={1: {"inputs.p2": 0}},
1005
+ path=tmp_path,
1006
+ )
1007
+ t1_num_elems = wk.tasks.t1.num_elements
1008
+ wk.tasks.t1.add_elements(
1009
+ sequences=[hf.ValueSequence("inputs.p1", values=[103, 104], nesting_order=1)]
1010
+ )
1011
+ t1_num_elems_new = wk.tasks.t1.num_elements
1012
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1013
+ new_elems = data_index_new[t1_num_elems:t1_num_elems_new]
1014
+ assert new_elems == [
1015
+ ["inputs.p1", "outputs.p2", "resources.any"],
1016
+ ["inputs.p1", "outputs.p2", "resources.any"],
1017
+ ]
1018
+
1019
+
1020
+ def test_task_add_elements_sequence_with_propagation_expected_workflow_num_elements(
1021
+ tmp_path: Path,
1022
+ ):
1023
+ wk = make_workflow(
1024
+ schemas_spec=[
1025
+ ({"p1": None}, ("p2",), "t1"),
1026
+ ({"p2": None}, (), "t2"),
1027
+ ],
1028
+ local_sequences={0: [("inputs.p1", 2, 0)]},
1029
+ nesting_orders={1: {"inputs.p2": 0}},
1030
+ path=tmp_path,
1031
+ )
1032
+ num_elems = wk.num_elements
1033
+ wk.tasks.t1.add_elements(
1034
+ sequences=[
1035
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1036
+ ],
1037
+ propagate_to=[
1038
+ hf.ElementPropagation(task=wk.tasks.t2, nesting_order={"inputs.p2": 1}),
1039
+ ],
1040
+ )
1041
+ num_elems_new = wk.num_elements
1042
+ assert num_elems_new - num_elems == 6
1043
+
1044
+
1045
+ def test_task_add_elements_sequence_with_propagation_expected_task_num_elements(
1046
+ tmp_path: Path,
1047
+ ):
1048
+ wk = make_workflow(
1049
+ schemas_spec=[
1050
+ ({"p1": None}, ("p2",), "t1"),
1051
+ ({"p2": None}, (), "t2"),
1052
+ ],
1053
+ local_sequences={0: [("inputs.p1", 2, 0)]},
1054
+ nesting_orders={1: {"inputs.p2": 0}},
1055
+ path=tmp_path,
1056
+ )
1057
+ num_elems = [task.num_elements for task in wk.tasks]
1058
+ wk.tasks.t1.add_elements(
1059
+ sequences=[
1060
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1061
+ ],
1062
+ propagate_to=[
1063
+ hf.ElementPropagation(task=wk.tasks.t2, nesting_order={"inputs.p2": 1}),
1064
+ ],
1065
+ )
1066
+ num_elems_new = [task.num_elements for task in wk.tasks]
1067
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1068
+ assert num_elems_diff[0] == 3 and num_elems_diff[1] == 3
1069
+
1070
+
1071
+ def test_task_add_elements_sequence_with_propagation_expected_new_data_index(
1072
+ tmp_path: Path,
1073
+ ):
1074
+ wk = make_workflow(
1075
+ schemas_spec=[
1076
+ ({"p1": None}, ("p2",), "t1"),
1077
+ ({"p2": None}, (), "t2"),
1078
+ ],
1079
+ local_sequences={0: [("inputs.p1", 2, 0)]},
1080
+ nesting_orders={1: {"inputs.p2": 0}},
1081
+ path=tmp_path,
1082
+ )
1083
+ t1_num_elems = wk.tasks.t1.num_elements
1084
+ t2_num_elems = wk.tasks.t2.num_elements
1085
+ wk.tasks.t1.add_elements(
1086
+ sequences=[
1087
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1088
+ ],
1089
+ propagate_to=[
1090
+ hf.ElementPropagation(task=wk.tasks.t2, nesting_order={"inputs.p2": 1}),
1091
+ ],
1092
+ )
1093
+ t1_num_elems_new = wk.tasks.t1.num_elements
1094
+ t2_num_elems_new = wk.tasks.t2.num_elements
1095
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1096
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1097
+ new_elems_t2 = data_index_new[
1098
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1099
+ ]
1100
+ assert new_elems_t1 == [
1101
+ ["inputs.p1", "outputs.p2", "resources.any"],
1102
+ ["inputs.p1", "outputs.p2", "resources.any"],
1103
+ ["inputs.p1", "outputs.p2", "resources.any"],
1104
+ ] and new_elems_t2 == [
1105
+ ["inputs.p2", "resources.any"],
1106
+ ["inputs.p2", "resources.any"],
1107
+ ["inputs.p2", "resources.any"],
1108
+ ]
1109
+
1110
+
1111
+ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_workflow_num_elements(
1112
+ tmp_path: Path,
1113
+ ):
1114
+ wk = make_workflow(
1115
+ schemas_spec=[
1116
+ ({"p1": None}, ("p2",), "t1"),
1117
+ ({"p2": None, "p3": None}, (), "t2"),
1118
+ ],
1119
+ local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1120
+ nesting_orders={1: {"inputs.p2": 0}},
1121
+ path=tmp_path,
1122
+ )
1123
+ num_elems = wk.num_elements
1124
+ wk.tasks.t1.add_elements(
1125
+ sequences=[
1126
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1127
+ ],
1128
+ propagate_to=[
1129
+ hf.ElementPropagation(
1130
+ task=wk.tasks.t2, nesting_order={"inputs.p2": 1, "inputs.p3": 2}
1131
+ ),
1132
+ ],
1133
+ )
1134
+ num_elems_new = wk.num_elements
1135
+ assert num_elems_new - num_elems == 12
1136
+
1137
+
1138
+ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_task_num_elements(
1139
+ tmp_path: Path,
1140
+ ):
1141
+ wk = make_workflow(
1142
+ schemas_spec=[
1143
+ ({"p1": None}, ("p2",), "t1"),
1144
+ ({"p2": None, "p3": None}, (), "t2"),
1145
+ ],
1146
+ local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1147
+ nesting_orders={1: {"inputs.p2": 0}},
1148
+ path=tmp_path,
1149
+ )
1150
+ num_elems = [task.num_elements for task in wk.tasks]
1151
+ wk.tasks.t1.add_elements(
1152
+ sequences=[
1153
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1154
+ ],
1155
+ propagate_to=[
1156
+ hf.ElementPropagation(
1157
+ task=wk.tasks.t2, nesting_order={"inputs.p2": 1, "inputs.p3": 2}
1158
+ ),
1159
+ ],
1160
+ )
1161
+ num_elems_new = [task.num_elements for task in wk.tasks]
1162
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1163
+ assert num_elems_diff[0] == 3 and num_elems_diff[1] == 9
1164
+
1165
+
1166
+ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_new_data_index(
1167
+ tmp_path: Path,
1168
+ ):
1169
+ wk = make_workflow(
1170
+ schemas_spec=[
1171
+ ({"p1": None}, ("p2",), "t1"),
1172
+ ({"p2": None, "p3": None}, (), "t2"),
1173
+ ],
1174
+ local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1175
+ nesting_orders={1: {"inputs.p2": 0}},
1176
+ path=tmp_path,
1177
+ )
1178
+
1179
+ t1_num_elems = wk.tasks.t1.num_elements
1180
+ t2_num_elems = wk.tasks.t2.num_elements
1181
+ wk.tasks.t1.add_elements(
1182
+ sequences=[
1183
+ hf.ValueSequence("inputs.p1", values=[103, 104, 105], nesting_order=1)
1184
+ ],
1185
+ propagate_to=[
1186
+ hf.ElementPropagation(
1187
+ task=wk.tasks.t2, nesting_order={"inputs.p2": 1, "inputs.p3": 2}
1188
+ ),
1189
+ ],
1190
+ )
1191
+ t1_num_elems_new = wk.tasks.t1.num_elements
1192
+ t2_num_elems_new = wk.tasks.t2.num_elements
1193
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1194
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1195
+ new_elems_t2 = data_index_new[
1196
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1197
+ ]
1198
+ assert new_elems_t1 == [
1199
+ ["inputs.p1", "outputs.p2", "resources.any"],
1200
+ ["inputs.p1", "outputs.p2", "resources.any"],
1201
+ ["inputs.p1", "outputs.p2", "resources.any"],
1202
+ ] and new_elems_t2 == [
1203
+ ["inputs.p2", "inputs.p3", "resources.any"],
1204
+ ["inputs.p2", "inputs.p3", "resources.any"],
1205
+ ["inputs.p2", "inputs.p3", "resources.any"],
1206
+ ["inputs.p2", "inputs.p3", "resources.any"],
1207
+ ["inputs.p2", "inputs.p3", "resources.any"],
1208
+ ["inputs.p2", "inputs.p3", "resources.any"],
1209
+ ["inputs.p2", "inputs.p3", "resources.any"],
1210
+ ["inputs.p2", "inputs.p3", "resources.any"],
1211
+ ["inputs.p2", "inputs.p3", "resources.any"],
1212
+ ]
1213
+
1214
+
1215
+ def test_task_add_elements_multi_task_dependence_expected_workflow_num_elements(
1216
+ tmp_path: Path, param_p1: Parameter
1217
+ ):
1218
+ wk = make_workflow(
1219
+ schemas_spec=[
1220
+ ({"p1": None}, ("p3",), "t1"),
1221
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1222
+ ({"p3": None, "p4": None}, (), "t3"),
1223
+ ],
1224
+ local_inputs={0: ("p1",)},
1225
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1226
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1227
+ input_sources={2: {"p3": [hf.InputSource.task(1, "input")]}},
1228
+ path=tmp_path,
1229
+ )
1230
+ num_elems = wk.num_elements
1231
+ num_task_elems = [task.num_elements for task in wk.tasks]
1232
+ wk.tasks.t1.add_elements(
1233
+ inputs=[hf.InputValue(param_p1, 102)],
1234
+ propagate_to={
1235
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1236
+ "t3": {
1237
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1238
+ "input_sources": {"p3": [hf.InputSource.task(1, "input")]},
1239
+ },
1240
+ },
1241
+ )
1242
+ num_elems_new = wk.num_elements
1243
+ assert num_elems_new - num_elems == 7
1244
+
1245
+ num_task_elems_new = [task.num_elements for task in wk.tasks]
1246
+ num_elems_diff = [i - j for i, j in zip(num_task_elems_new, num_task_elems)]
1247
+ assert num_elems_diff == [1, 2, 4]
1248
+
1249
+
1250
+ def test_task_add_elements_multi_task_dependence_expected_task_num_elements_custom_input_source(
1251
+ tmp_path: Path, param_p1: Parameter
1252
+ ):
1253
+ wk = make_workflow(
1254
+ schemas_spec=[
1255
+ ({"p1": None}, ("p3",), "t1"),
1256
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1257
+ ({"p3": None, "p4": None}, (), "t3"),
1258
+ ],
1259
+ local_inputs={0: ("p1",)},
1260
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1261
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1262
+ input_sources={
1263
+ 2: {"p3": [hf.InputSource.task(0)]}
1264
+ }, # override default (t2 input)
1265
+ path=tmp_path,
1266
+ )
1267
+ num_elems = [task.num_elements for task in wk.tasks]
1268
+ wk.tasks.t1.add_elements(
1269
+ inputs=[hf.InputValue(param_p1, 102)],
1270
+ propagate_to={
1271
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1272
+ "t3": {
1273
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1274
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1275
+ },
1276
+ },
1277
+ )
1278
+ num_elems_new = [task.num_elements for task in wk.tasks]
1279
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1280
+ assert num_elems_diff == [1, 2, 2]
1281
+
1282
+
1283
+ def test_task_add_elements_multi_task_dependence_expected_new_data_index(
1284
+ tmp_path: Path, param_p1: Parameter
1285
+ ):
1286
+ wk = make_workflow(
1287
+ schemas_spec=[
1288
+ ({"p1": None}, ("p3",), "t1"),
1289
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1290
+ ({"p3": None, "p4": None}, (), "t3"),
1291
+ ],
1292
+ local_inputs={0: ("p1",)},
1293
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1294
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1295
+ input_sources={2: {"p3": [hf.InputSource.task(1, "input")]}},
1296
+ path=tmp_path,
1297
+ )
1298
+ t1_num_elems = wk.tasks.t1.num_elements
1299
+ t2_num_elems = wk.tasks.t2.num_elements
1300
+ t3_num_elems = wk.tasks.t3.num_elements
1301
+ wk.tasks.t1.add_elements(
1302
+ inputs=[hf.InputValue(param_p1, 102)],
1303
+ propagate_to={
1304
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1305
+ "t3": {
1306
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1307
+ "input_sources": {"p3": [hf.InputSource.task(1, "input")]},
1308
+ },
1309
+ },
1310
+ )
1311
+ t1_num_elems_new = wk.tasks.t1.num_elements
1312
+ t2_num_elems_new = wk.tasks.t2.num_elements
1313
+ t3_num_elems_new = wk.tasks.t3.num_elements
1314
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1315
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1316
+ new_elems_t2 = data_index_new[
1317
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1318
+ ]
1319
+ new_elems_t3 = data_index_new[
1320
+ t1_num_elems_new
1321
+ + t2_num_elems_new
1322
+ + t3_num_elems : t1_num_elems_new
1323
+ + t2_num_elems_new
1324
+ + t3_num_elems_new
1325
+ ]
1326
+
1327
+ assert (
1328
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]]
1329
+ and new_elems_t2
1330
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 2
1331
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 4
1332
+ )
1333
+
1334
+
1335
+ def test_task_add_elements_multi_task_dependence_expected_new_data_index_custom_input_source(
1336
+ tmp_path: Path, param_p1: Parameter
1337
+ ):
1338
+ wk = make_workflow(
1339
+ schemas_spec=[
1340
+ ({"p1": None}, ("p3",), "t1"),
1341
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1342
+ ({"p3": None, "p4": None}, (), "t3"),
1343
+ ],
1344
+ local_inputs={0: ("p1",)},
1345
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1346
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1347
+ input_sources={
1348
+ 2: {"p3": [hf.InputSource.task(0)]}
1349
+ }, # override default (t2 input)
1350
+ path=tmp_path,
1351
+ )
1352
+ t1_num_elems = wk.tasks.t1.num_elements
1353
+ t2_num_elems = wk.tasks.t2.num_elements
1354
+ t3_num_elems = wk.tasks.t3.num_elements
1355
+ wk.tasks.t1.add_elements(
1356
+ inputs=[hf.InputValue(param_p1, 102)],
1357
+ propagate_to={
1358
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1359
+ "t3": {
1360
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1361
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1362
+ },
1363
+ },
1364
+ )
1365
+ t1_num_elems_new = wk.tasks.t1.num_elements
1366
+ t2_num_elems_new = wk.tasks.t2.num_elements
1367
+ t3_num_elems_new = wk.tasks.t3.num_elements
1368
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1369
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1370
+ new_elems_t2 = data_index_new[
1371
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1372
+ ]
1373
+ new_elems_t3 = data_index_new[
1374
+ t1_num_elems_new
1375
+ + t2_num_elems_new
1376
+ + t3_num_elems : t1_num_elems_new
1377
+ + t2_num_elems_new
1378
+ + t3_num_elems_new
1379
+ ]
1380
+
1381
+ assert (
1382
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]]
1383
+ and new_elems_t2
1384
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 2
1385
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 2
1386
+ )
1387
+
1388
+
1389
+ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements(
1390
+ tmp_path: Path,
1391
+ ):
1392
+ wk = make_workflow(
1393
+ schemas_spec=[
1394
+ ({"p1": None}, ("p3",), "t1"),
1395
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1396
+ ({"p3": None, "p4": None}, (), "t3"),
1397
+ ],
1398
+ local_inputs={0: ("p1",)},
1399
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1400
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1401
+ input_sources={2: {"p3": [hf.InputSource.task(1, "input")]}},
1402
+ path=tmp_path,
1403
+ )
1404
+ num_elems = wk.num_elements
1405
+ wk.tasks.t1.add_elements(
1406
+ sequences=[
1407
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1408
+ ],
1409
+ propagate_to={
1410
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1411
+ "t3": {
1412
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1413
+ "input_sources": {"p3": [hf.InputSource.task(1, "input")]},
1414
+ },
1415
+ },
1416
+ )
1417
+ num_elems_new = wk.num_elements
1418
+ assert num_elems_new - num_elems == 45
1419
+
1420
+
1421
+ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements_custom_input_source(
1422
+ tmp_path: Path,
1423
+ ):
1424
+ wk = make_workflow(
1425
+ schemas_spec=[
1426
+ ({"p1": None}, ("p3",), "t1"),
1427
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1428
+ ({"p3": None, "p4": None}, (), "t3"),
1429
+ ],
1430
+ local_inputs={0: ("p1",)},
1431
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1432
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1433
+ input_sources={
1434
+ 2: {"p3": [hf.InputSource.task(0)]}
1435
+ }, # override default (t2 input)
1436
+ path=tmp_path,
1437
+ )
1438
+ num_elems = wk.num_elements
1439
+ wk.tasks.t1.add_elements(
1440
+ sequences=[
1441
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1442
+ ],
1443
+ propagate_to={
1444
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1445
+ "t3": {
1446
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1447
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1448
+ },
1449
+ },
1450
+ )
1451
+ num_elems_new = wk.num_elements
1452
+ assert num_elems_new - num_elems == 27
1453
+
1454
+
1455
+ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elements(
1456
+ tmp_path: Path,
1457
+ ):
1458
+ wk = make_workflow(
1459
+ schemas_spec=[
1460
+ ({"p1": None}, ("p3",), "t1"),
1461
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1462
+ ({"p3": None, "p4": None}, (), "t3"),
1463
+ ],
1464
+ local_inputs={0: ("p1",)},
1465
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1466
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1467
+ input_sources={2: {"p3": [hf.InputSource.task(1, "input")]}},
1468
+ path=tmp_path,
1469
+ )
1470
+ num_elems = [task.num_elements for task in wk.tasks]
1471
+ wk.tasks.t1.add_elements(
1472
+ sequences=[
1473
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1474
+ ],
1475
+ propagate_to={
1476
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1477
+ "t3": {
1478
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1479
+ "input_sources": {"p3": [hf.InputSource.task(1, "input")]},
1480
+ },
1481
+ },
1482
+ )
1483
+ num_elems_new = [task.num_elements for task in wk.tasks]
1484
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1485
+ assert num_elems_diff == [3, 6, 36]
1486
+
1487
+
1488
+ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elements_custom_input_source(
1489
+ tmp_path: Path,
1490
+ ):
1491
+ wk = make_workflow(
1492
+ schemas_spec=[
1493
+ ({"p1": None}, ("p3",), "t1"),
1494
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1495
+ ({"p3": None, "p4": None}, (), "t3"),
1496
+ ],
1497
+ local_inputs={0: ("p1",)},
1498
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1499
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1500
+ input_sources={
1501
+ 2: {"p3": [hf.InputSource.task(0)]}
1502
+ }, # override default (t2 input)
1503
+ path=tmp_path,
1504
+ )
1505
+ num_elems = [task.num_elements for task in wk.tasks]
1506
+ wk.tasks.t1.add_elements(
1507
+ sequences=[
1508
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1509
+ ],
1510
+ propagate_to={
1511
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1512
+ "t3": {
1513
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1514
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1515
+ },
1516
+ },
1517
+ )
1518
+ num_elems_new = [task.num_elements for task in wk.tasks]
1519
+ num_elems_diff = [i - j for i, j in zip(num_elems_new, num_elems)]
1520
+ assert num_elems_diff == [3, 6, 18]
1521
+
1522
+
1523
+ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_index(
1524
+ tmp_path: Path,
1525
+ ):
1526
+ wk = make_workflow(
1527
+ schemas_spec=[
1528
+ ({"p1": None}, ("p3",), "t1"),
1529
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1530
+ ({"p3": None, "p4": None}, (), "t3"),
1531
+ ],
1532
+ local_inputs={0: ("p1",)},
1533
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1534
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1535
+ input_sources={2: {"p3": [hf.InputSource.task(1, "input")]}},
1536
+ path=tmp_path,
1537
+ )
1538
+ t1_num_elems = wk.tasks.t1.num_elements
1539
+ t2_num_elems = wk.tasks.t2.num_elements
1540
+ t3_num_elems = wk.tasks.t3.num_elements
1541
+ wk.tasks.t1.add_elements(
1542
+ sequences=[
1543
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1544
+ ],
1545
+ propagate_to={
1546
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1547
+ "t3": {
1548
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1549
+ "input_sources": {"p3": [hf.InputSource.task(1, "input")]},
1550
+ },
1551
+ },
1552
+ )
1553
+ t1_num_elems_new = wk.tasks.t1.num_elements
1554
+ t2_num_elems_new = wk.tasks.t2.num_elements
1555
+ t3_num_elems_new = wk.tasks.t3.num_elements
1556
+
1557
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1558
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1559
+ new_elems_t2 = data_index_new[
1560
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1561
+ ]
1562
+ new_elems_t3 = data_index_new[
1563
+ t1_num_elems_new
1564
+ + t2_num_elems_new
1565
+ + t3_num_elems : t1_num_elems_new
1566
+ + t2_num_elems_new
1567
+ + t3_num_elems_new
1568
+ ]
1569
+ assert (
1570
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]] * 3
1571
+ and new_elems_t2
1572
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 6
1573
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 36
1574
+ )
1575
+
1576
+
1577
+ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_index_custom_input_source(
1578
+ tmp_path: Path,
1579
+ ):
1580
+ wk = make_workflow(
1581
+ schemas_spec=[
1582
+ ({"p1": None}, ("p3",), "t1"),
1583
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1584
+ ({"p3": None, "p4": None}, (), "t3"),
1585
+ ],
1586
+ local_inputs={0: ("p1",)},
1587
+ local_sequences={1: [("inputs.p2", 2, 1)]},
1588
+ nesting_orders={2: {"inputs.p3": 0, "inputs.p4": 1}},
1589
+ input_sources={
1590
+ 2: {"p3": [hf.InputSource.task(0)]}
1591
+ }, # override default (t2 input)
1592
+ path=tmp_path,
1593
+ )
1594
+ t1_num_elems = wk.tasks.t1.num_elements
1595
+ t2_num_elems = wk.tasks.t2.num_elements
1596
+ t3_num_elems = wk.tasks.t3.num_elements
1597
+ wk.tasks.t1.add_elements(
1598
+ sequences=[
1599
+ hf.ValueSequence("inputs.p1", values=[102, 103, 104], nesting_order=1)
1600
+ ],
1601
+ propagate_to={
1602
+ "t2": {"nesting_order": {"inputs.p2": 0, "inputs.p3": 1}},
1603
+ "t3": {
1604
+ "nesting_order": {"inputs.p3": 0, "inputs.p4": 1},
1605
+ "input_sources": {"p3": [hf.InputSource.task(0)]}, # override default
1606
+ },
1607
+ },
1608
+ )
1609
+ t1_num_elems_new = wk.tasks.t1.num_elements
1610
+ t2_num_elems_new = wk.tasks.t2.num_elements
1611
+ t3_num_elems_new = wk.tasks.t3.num_elements
1612
+
1613
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1614
+ new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1615
+ new_elems_t2 = data_index_new[
1616
+ t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
1617
+ ]
1618
+ new_elems_t3 = data_index_new[
1619
+ t1_num_elems_new
1620
+ + t2_num_elems_new
1621
+ + t3_num_elems : t1_num_elems_new
1622
+ + t2_num_elems_new
1623
+ + t3_num_elems_new
1624
+ ]
1625
+ assert (
1626
+ new_elems_t1 == [["inputs.p1", "outputs.p3", "resources.any"]] * 3
1627
+ and new_elems_t2
1628
+ == [["inputs.p2", "inputs.p3", "outputs.p4", "resources.any"]] * 6
1629
+ and new_elems_t3 == [["inputs.p3", "inputs.p4", "resources.any"]] * 18
1630
+ )
1631
+
1632
+
1633
+ def test_task_add_elements_simple_dependence_three_tasks(
1634
+ tmp_path: Path, param_p1: Parameter
1635
+ ):
1636
+ wk = make_workflow(
1637
+ schemas_spec=[
1638
+ ({"p1": None}, ("p2",), "t1"),
1639
+ ({"p2": None}, ("p3",), "t2"),
1640
+ ({"p3": None}, (), "t3"),
1641
+ ],
1642
+ local_inputs={0: ("p1",)},
1643
+ path=tmp_path,
1644
+ )
1645
+ num_elems = [i.num_elements for i in wk.tasks]
1646
+ wk.tasks.t1.add_elements(
1647
+ inputs=[hf.InputValue(param_p1, 102)],
1648
+ propagate_to={"t2": {}, "t3": {}},
1649
+ )
1650
+ num_elems_new = [i.num_elements for i in wk.tasks]
1651
+ assert num_elems_new == [i + 1 for i in num_elems]
1652
+
1653
+
1654
+ def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path: Path):
1655
+ wk = make_workflow(
1656
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1657
+ local_inputs={0: ("p1",)},
1658
+ path=tmp_path,
1659
+ )
1660
+ tasks_meta = copy.deepcopy(wk._store.get_tasks())
1661
+
1662
+ (s2,) = make_schemas(({"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()))
1663
+ t2 = hf.Task(schema=s2)
1664
+ with pytest.raises(MissingInputs) as exc_info:
1665
+ wk.add_task(t2)
1666
+
1667
+ assert wk._store.get_tasks() == tasks_meta
1668
+
1669
+
1670
+ def test_no_change_to_parameter_data_on_add_task_failure(
1671
+ tmp_path: Path, param_p2: Parameter, param_p3: Parameter
1672
+ ):
1673
+ wk = make_workflow(
1674
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1675
+ local_inputs={0: ("p1",)},
1676
+ path=tmp_path,
1677
+ )
1678
+ param_data: list = copy.deepcopy(wk.get_all_parameters())
1679
+ (s2,) = make_schemas(
1680
+ ({"p1": NullDefault.NULL, "p2": NullDefault.NULL, "p3": NullDefault.NULL}, ())
1681
+ )
1682
+ t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p2, 201)])
1683
+ with pytest.raises(MissingInputs) as exc_info:
1684
+ wk.add_task(t2)
1685
+
1686
+ assert wk.get_all_parameters() == param_data
1687
+
1688
+
1689
+ def test_expected_additional_parameter_data_on_add_task(
1690
+ tmp_path: Path, param_p3: Parameter
1691
+ ):
1692
+ wk = make_workflow(
1693
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1694
+ local_inputs={0: ("p1",)},
1695
+ path=tmp_path,
1696
+ )
1697
+ param_data = copy.deepcopy(wk.get_all_parameter_data())
1698
+
1699
+ (s2,) = make_schemas(({"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()))
1700
+ t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1701
+ wk.add_task(t2)
1702
+
1703
+ param_data_new = wk.get_all_parameter_data()
1704
+
1705
+ new_keys = sorted(set(param_data_new).difference(param_data))
1706
+ new_data = [param_data_new[k] for k in new_keys]
1707
+
1708
+ # one new key for resources, one for param_p3 value
1709
+ assert len(new_data) == 2
1710
+ assert new_data[1] == 301
1711
+
1712
+
1713
+ def test_parameters_accepted_on_add_task(tmp_path: Path, param_p3: Parameter):
1714
+ wk = make_workflow(
1715
+ schemas_spec=[({"p1": None}, (), "t1")],
1716
+ local_inputs={0: ("p1",)},
1717
+ path=tmp_path,
1718
+ )
1719
+ (s2,) = make_schemas(({"p1": None, "p3": None}, ()))
1720
+ t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1721
+ wk.add_task(t2)
1722
+ assert not wk._store._pending.add_parameters
1723
+
1724
+
1725
+ def test_parameters_pending_during_add_task(tmp_path: Path, param_p3: Parameter):
1726
+ wk = make_workflow(
1727
+ schemas_spec=[({"p1": None}, (), "t1")],
1728
+ local_inputs={0: ("p1",)},
1729
+ path=tmp_path,
1730
+ )
1731
+ (s2,) = make_schemas(({"p1": None, "p3": None}, ()))
1732
+ t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1733
+ with wk.batch_update():
1734
+ wk.add_task(t2)
1735
+ assert wk._store._pending.add_parameters
1736
+
1737
+
1738
+ def test_add_task_after(workflow_w0: Workflow):
1739
+ new_task = hf.Task(schema=hf.TaskSchema(objective="after_t1", actions=[]))
1740
+ workflow_w0.add_task_after(new_task, workflow_w0.tasks.t1)
1741
+ assert [i.name for i in workflow_w0.tasks] == ["t1", "after_t1", "t2"]
1742
+
1743
+
1744
+ def test_add_task_after_no_ref(workflow_w0: Workflow):
1745
+ new_task = hf.Task(schema=hf.TaskSchema(objective="at_end", actions=[]))
1746
+ workflow_w0.add_task_after(new_task)
1747
+ assert [i.name for i in workflow_w0.tasks] == ["t1", "t2", "at_end"]
1748
+
1749
+
1750
+ def test_add_task_before(workflow_w0: Workflow):
1751
+ new_task = hf.Task(schema=hf.TaskSchema(objective="before_t2", actions=[]))
1752
+ workflow_w0.add_task_before(new_task, workflow_w0.tasks.t2)
1753
+ assert [i.name for i in workflow_w0.tasks] == ["t1", "before_t2", "t2"]
1754
+
1755
+
1756
+ def test_add_task_before_no_ref(workflow_w0: Workflow):
1757
+ new_task = hf.Task(schema=hf.TaskSchema(objective="at_start", actions=[]))
1758
+ workflow_w0.add_task_before(new_task)
1759
+ assert [i.name for i in workflow_w0.tasks] == ["at_start", "t1", "t2"]
1760
+
1761
+
1762
+ def test_parameter_two_modifying_actions_expected_data_indices(
1763
+ tmp_path: Path, act_env_1: ActionEnvironment, param_p1: Parameter
1764
+ ):
1765
+ act1 = hf.Action(
1766
+ commands=[hf.Command("doSomething <<parameter:p1>>", stdout="<<parameter:p1>>")],
1767
+ environments=[act_env_1],
1768
+ )
1769
+ act2 = hf.Action(
1770
+ commands=[hf.Command("doSomething <<parameter:p1>>", stdout="<<parameter:p1>>")],
1771
+ environments=[act_env_1],
1772
+ )
1773
+
1774
+ s1 = hf.TaskSchema("t1", actions=[act1, act2], inputs=[param_p1], outputs=[param_p1])
1775
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue(param_p1, 101)])
1776
+
1777
+ wkt = hf.WorkflowTemplate(name="w3", tasks=[t1])
1778
+ wk = hf.Workflow.from_template(template=wkt, path=tmp_path)
1779
+ iter_0 = wk.tasks.t1.elements[0].iterations[0]
1780
+ act_runs = iter_0.action_runs
1781
+
1782
+ p1_idx_schema_in = iter_0.data_idx["inputs.p1"]
1783
+ p1_idx_schema_out = iter_0.data_idx["outputs.p1"]
1784
+
1785
+ p1_idx_0 = act_runs[0].data_idx["inputs.p1"]
1786
+ p1_idx_1 = act_runs[0].data_idx["outputs.p1"]
1787
+ p1_idx_2 = act_runs[1].data_idx["inputs.p1"]
1788
+ p1_idx_3 = act_runs[1].data_idx["outputs.p1"]
1789
+
1790
+ assert (
1791
+ p1_idx_schema_in == p1_idx_0
1792
+ and p1_idx_1 == p1_idx_2
1793
+ and p1_idx_3 == p1_idx_schema_out
1794
+ )
1795
+
1796
+
1797
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1798
+ def test_conditional_shell_schema_single_initialised_action(
1799
+ null_config, tmp_path: Path, store: str
1800
+ ):
1801
+ rules = {
1802
+ "posix": hf.ActionRule(
1803
+ rule=hf.Rule(path="resources.os_name", condition=Value.equal_to("posix"))
1804
+ ),
1805
+ "nt": hf.ActionRule(
1806
+ rule=hf.Rule(path="resources.os_name", condition=Value.equal_to("nt"))
1807
+ ),
1808
+ }
1809
+ s1 = hf.TaskSchema(
1810
+ objective="test_conditional_on_shell",
1811
+ inputs=[hf.SchemaInput("p1")],
1812
+ outputs=[hf.SchemaInput("p2")],
1813
+ actions=[
1814
+ hf.Action(
1815
+ environments=[hf.ActionEnvironment("null_env")],
1816
+ commands=[
1817
+ hf.Command(
1818
+ command="echo $((<<parameter:p1>> + 100))",
1819
+ stdout="<<parameter:p2>>",
1820
+ )
1821
+ ],
1822
+ rules=[rules["posix"]],
1823
+ ),
1824
+ hf.Action(
1825
+ environments=[hf.ActionEnvironment("null_env")],
1826
+ commands=[
1827
+ hf.Command(
1828
+ command="Write-Output ((<<parameter:p1>> + 100))",
1829
+ stdout="<<parameter:p2>>",
1830
+ )
1831
+ ],
1832
+ rules=[rules["nt"]],
1833
+ ),
1834
+ ],
1835
+ )
1836
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 101)])
1837
+ wk = hf.Workflow.from_template_data(
1838
+ tasks=[t1],
1839
+ template_name="w1",
1840
+ overwrite=True,
1841
+ path=tmp_path,
1842
+ store=store,
1843
+ )
1844
+ action_runs = wk.tasks[0].elements[0].iterations[0].action_runs
1845
+ assert len(action_runs) == 1
1846
+ assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
1847
+ assert action_runs[0].action.rules[0] == rules[os.name]
1848
+
1849
+
1850
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1851
+ def test_element_iteration_EARs_initialised_on_make_workflow(
1852
+ null_config, tmp_path: Path, store: str
1853
+ ):
1854
+ s1 = hf.TaskSchema(
1855
+ objective="t1",
1856
+ inputs=[hf.SchemaInput("p1")],
1857
+ outputs=[hf.SchemaInput("p2")],
1858
+ actions=[
1859
+ hf.Action(
1860
+ environments=[hf.ActionEnvironment("null_env")],
1861
+ commands=[
1862
+ hf.Command(
1863
+ command="echo $((<<parameter:p1>> + 100))",
1864
+ stdout="<<parameter:p2>>",
1865
+ )
1866
+ ],
1867
+ ),
1868
+ ],
1869
+ )
1870
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 101)])
1871
+ wk = hf.Workflow.from_template_data(
1872
+ tasks=[t1],
1873
+ template_name="w1",
1874
+ overwrite=True,
1875
+ path=tmp_path,
1876
+ store=store,
1877
+ )
1878
+ assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
1879
+ assert len(wk.tasks[0].elements[0].iterations[0].action_runs) == 1
1880
+
1881
+
1882
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1883
+ def test_element_iteration_EARs_initialised_on_make_workflow_with_no_actions(
1884
+ null_config, tmp_path: Path, store: str
1885
+ ):
1886
+ s1 = hf.TaskSchema(
1887
+ objective="t1",
1888
+ inputs=[hf.SchemaInput("p1")],
1889
+ actions=[],
1890
+ )
1891
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 101)])
1892
+ wk = hf.Workflow.from_template_data(
1893
+ tasks=[t1],
1894
+ template_name="w1",
1895
+ overwrite=True,
1896
+ path=tmp_path,
1897
+ store=store,
1898
+ )
1899
+ assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
1900
+ assert not wk.tasks[0].elements[0].iterations[0].action_runs
1901
+
1902
+
1903
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1904
+ def test_element_iteration_EARs_not_initialised_on_make_workflow_due_to_unset(
1905
+ null_config, tmp_path: Path, store: str
1906
+ ):
1907
+ s1 = hf.TaskSchema(
1908
+ objective="t1",
1909
+ inputs=[hf.SchemaInput("p1")],
1910
+ outputs=[hf.SchemaInput("p2")],
1911
+ actions=[
1912
+ hf.Action(
1913
+ environments=[hf.ActionEnvironment("null_env")],
1914
+ commands=[
1915
+ hf.Command(
1916
+ command="echo $((<<parameter:p1>> + 100))",
1917
+ stdout="<<parameter:p2>>",
1918
+ )
1919
+ ],
1920
+ ),
1921
+ ],
1922
+ )
1923
+ s2 = hf.TaskSchema(
1924
+ objective="t2",
1925
+ inputs=[hf.SchemaInput("p2")],
1926
+ outputs=[hf.SchemaInput("p3")],
1927
+ actions=[
1928
+ hf.Action(
1929
+ environments=[hf.ActionEnvironment("null_env")],
1930
+ commands=[
1931
+ hf.Command(
1932
+ command="echo $((<<parameter:p2>> + 100))",
1933
+ stdout="<<parameter:p3>>",
1934
+ )
1935
+ ],
1936
+ rules=[hf.ActionRule(path="inputs.p2", condition=Value.less_than(500))],
1937
+ ),
1938
+ ],
1939
+ )
1940
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 101)])
1941
+ t2 = hf.Task(schema=[s2])
1942
+ wk = hf.Workflow.from_template_data(
1943
+ tasks=[t1, t2],
1944
+ template_name="w1",
1945
+ overwrite=True,
1946
+ path=tmp_path,
1947
+ store=store,
1948
+ )
1949
+ # second task cannot initialise runs because it depends on the value of an output of
1950
+ # the first task:
1951
+ assert not wk.tasks[1].elements[0].iterations[0].EARs_initialised
1952
+
1953
+
1954
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1955
+ def test_element_iteration_EARs_initialised_on_make_workflow_with_no_valid_actions(
1956
+ null_config, tmp_path: Path, store: str
1957
+ ):
1958
+ rules = {
1959
+ "posix": hf.ActionRule(
1960
+ rule=hf.Rule(path="resources.os_name", condition=Value.equal_to("posix"))
1961
+ ),
1962
+ "nt": hf.ActionRule(
1963
+ rule=hf.Rule(path="resources.os_name", condition=Value.equal_to("nt"))
1964
+ ),
1965
+ }
1966
+ s1 = hf.TaskSchema(
1967
+ objective="test_conditional_on_shell",
1968
+ inputs=[hf.SchemaInput("p1")],
1969
+ outputs=[hf.SchemaInput("p2")],
1970
+ actions=[
1971
+ hf.Action(
1972
+ environments=[hf.ActionEnvironment("null_env")],
1973
+ commands=[
1974
+ hf.Command(
1975
+ command="some command that uses <<parameter:p1>>",
1976
+ stdout="<<parameter:p2>>",
1977
+ )
1978
+ ],
1979
+ rules=[rules["posix"] if os.name == "nt" else rules["nt"]],
1980
+ ),
1981
+ ],
1982
+ )
1983
+ t1 = hf.Task(schema=[s1], inputs=[hf.InputValue("p1", 101)])
1984
+ wk = hf.Workflow.from_template_data(
1985
+ tasks=[t1],
1986
+ template_name="w1",
1987
+ overwrite=True,
1988
+ path=tmp_path,
1989
+ store=store,
1990
+ )
1991
+ action_runs = wk.tasks[0].elements[0].iterations[0].action_runs
1992
+ assert len(action_runs) == 0
1993
+ assert wk.tasks[0].elements[0].iterations[0].EARs_initialised
1994
+
1995
+
1996
+ @pytest.mark.parametrize("store", ["json", "zarr"])
1997
+ def test_get_merged_parameter_data_unset_data_raise(
1998
+ null_config, tmp_path: Path, store: str
1999
+ ):
2000
+ s1 = hf.TaskSchema(
2001
+ objective="t1",
2002
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2003
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2004
+ actions=[
2005
+ hf.Action(
2006
+ commands=[
2007
+ hf.Command(
2008
+ command="Write-Output (<<parameter:p1>> + 100)",
2009
+ stdout="<<parameter:p2>>",
2010
+ )
2011
+ ]
2012
+ )
2013
+ ],
2014
+ )
2015
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue("p1", value=1)])
2016
+ wk = hf.Workflow.from_template_data(
2017
+ tasks=[t1],
2018
+ template_name="w1",
2019
+ overwrite=True,
2020
+ path=tmp_path,
2021
+ store=store,
2022
+ )
2023
+ data_idx = wk.tasks.t1.elements[0].get_data_idx()
2024
+ with pytest.raises(UnsetParameterDataError):
2025
+ wk.tasks.t1._get_merged_parameter_data(
2026
+ data_index=data_idx,
2027
+ path="outputs.p2",
2028
+ raise_on_unset=True,
2029
+ )
2030
+
2031
+
2032
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2033
+ def test_get_merged_parameter_data_unset_data_no_raise(
2034
+ null_config, tmp_path: Path, store: str
2035
+ ):
2036
+ s1 = hf.TaskSchema(
2037
+ objective="t1",
2038
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2039
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2040
+ actions=[
2041
+ hf.Action(
2042
+ commands=[
2043
+ hf.Command(
2044
+ command="Write-Output (<<parameter:p1>> + 100)",
2045
+ stdout="<<parameter:p2>>",
2046
+ )
2047
+ ]
2048
+ )
2049
+ ],
2050
+ )
2051
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue("p1", value=1)])
2052
+ wk = hf.Workflow.from_template_data(
2053
+ tasks=[t1],
2054
+ template_name="w1",
2055
+ overwrite=True,
2056
+ path=tmp_path,
2057
+ store=store,
2058
+ )
2059
+ data_idx = wk.tasks.t1.elements[0].get_data_idx()
2060
+ assert None == wk.tasks.t1._get_merged_parameter_data(
2061
+ data_index=data_idx,
2062
+ path="outputs.p2",
2063
+ raise_on_unset=False,
2064
+ )
2065
+
2066
+
2067
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2068
+ def test_get_merged_parameter_data_missing_data_raise(
2069
+ null_config, tmp_path: Path, store: str
2070
+ ):
2071
+ s1 = hf.TaskSchema(
2072
+ objective="t1",
2073
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2074
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2075
+ actions=[
2076
+ hf.Action(
2077
+ commands=[
2078
+ hf.Command(
2079
+ command="Write-Output (<<parameter:p1>> + 100)",
2080
+ stdout="<<parameter:p2>>",
2081
+ )
2082
+ ]
2083
+ )
2084
+ ],
2085
+ )
2086
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue("p1", value=1)])
2087
+ wk = hf.Workflow.from_template_data(
2088
+ tasks=[t1],
2089
+ template_name="w1",
2090
+ overwrite=True,
2091
+ path=tmp_path,
2092
+ store=store,
2093
+ )
2094
+ data_idx = wk.tasks.t1.elements[0].get_data_idx()
2095
+ with pytest.raises(ValueError):
2096
+ wk.tasks.t1._get_merged_parameter_data(
2097
+ data_index=data_idx,
2098
+ path="inputs.p4",
2099
+ raise_on_missing=True,
2100
+ )
2101
+
2102
+
2103
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2104
+ def test_get_merged_parameter_data_missing_data_no_raise(
2105
+ null_config, tmp_path: Path, store: str
2106
+ ):
2107
+ s1 = hf.TaskSchema(
2108
+ objective="t1",
2109
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2110
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2111
+ actions=[
2112
+ hf.Action(
2113
+ commands=[
2114
+ hf.Command(
2115
+ command="Write-Output (<<parameter:p1>> + 100)",
2116
+ stdout="<<parameter:p2>>",
2117
+ )
2118
+ ]
2119
+ )
2120
+ ],
2121
+ )
2122
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue("p1", value=1)])
2123
+ wk = hf.Workflow.from_template_data(
2124
+ tasks=[t1],
2125
+ template_name="w1",
2126
+ overwrite=True,
2127
+ path=tmp_path,
2128
+ store=store,
2129
+ )
2130
+ data_idx = wk.tasks.t1.elements[0].get_data_idx()
2131
+ assert None == wk.tasks.t1._get_merged_parameter_data(
2132
+ data_index=data_idx,
2133
+ path="inputs.p4",
2134
+ raise_on_missing=False,
2135
+ )
2136
+
2137
+
2138
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2139
+ def test_get_merged_parameter_data_group_unset_data_raise(
2140
+ null_config, tmp_path: Path, store: str
2141
+ ):
2142
+ s1 = hf.TaskSchema(
2143
+ objective="t1",
2144
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2145
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2146
+ actions=[
2147
+ hf.Action(
2148
+ commands=[
2149
+ hf.Command(
2150
+ command="Write-Output (<<parameter:p1>> + 100)",
2151
+ stdout="<<parameter:p2>>",
2152
+ )
2153
+ ]
2154
+ )
2155
+ ],
2156
+ )
2157
+ s2 = hf.TaskSchema(
2158
+ objective="t2",
2159
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"), group="my_group")],
2160
+ )
2161
+ t1 = hf.Task(
2162
+ schema=s1,
2163
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2], nesting_order=0)],
2164
+ groups=[hf.ElementGroup(name="my_group")],
2165
+ )
2166
+ t2 = hf.Task(schema=s2)
2167
+ wk = hf.Workflow.from_template_data(
2168
+ tasks=[t1, t2],
2169
+ template_name="w1",
2170
+ overwrite=True,
2171
+ path=tmp_path,
2172
+ store=store,
2173
+ )
2174
+ data_idx_t1 = wk.tasks.t1.elements[0].get_data_idx()
2175
+ data_idx_t2 = wk.tasks.t2.elements[0].get_data_idx()
2176
+ with pytest.raises(UnsetParameterDataError):
2177
+ wk.tasks.t1._get_merged_parameter_data(
2178
+ data_index=data_idx_t1,
2179
+ path="outputs.p2",
2180
+ raise_on_unset=True,
2181
+ )
2182
+ with pytest.raises(UnsetParameterDataError):
2183
+ wk.tasks.t2._get_merged_parameter_data(
2184
+ data_index=data_idx_t2,
2185
+ path="inputs.p2",
2186
+ raise_on_unset=True,
2187
+ )
2188
+
2189
+
2190
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2191
+ def test_get_merged_parameter_data_group_unset_data_no_raise(
2192
+ null_config, tmp_path: Path, store: str
2193
+ ):
2194
+ s1 = hf.TaskSchema(
2195
+ objective="t1",
2196
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2197
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2198
+ actions=[
2199
+ hf.Action(
2200
+ commands=[
2201
+ hf.Command(
2202
+ command="Write-Output (<<parameter:p1>> + 100)",
2203
+ stdout="<<parameter:p2>>",
2204
+ )
2205
+ ]
2206
+ )
2207
+ ],
2208
+ )
2209
+ s2 = hf.TaskSchema(
2210
+ objective="t2",
2211
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"), group="my_group")],
2212
+ )
2213
+ t1 = hf.Task(
2214
+ schema=s1,
2215
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2], nesting_order=0)],
2216
+ groups=[hf.ElementGroup(name="my_group")],
2217
+ )
2218
+ t2 = hf.Task(schema=s2)
2219
+ wk = hf.Workflow.from_template_data(
2220
+ tasks=[t1, t2],
2221
+ template_name="w1",
2222
+ overwrite=True,
2223
+ path=tmp_path,
2224
+ store=store,
2225
+ )
2226
+ data_idx_t1 = wk.tasks.t1.elements[0].get_data_idx()
2227
+ data_idx_t2 = wk.tasks.t2.elements[0].get_data_idx()
2228
+ assert None == wk.tasks.t1._get_merged_parameter_data(
2229
+ data_index=data_idx_t1,
2230
+ path="outputs.p2",
2231
+ raise_on_unset=False,
2232
+ )
2233
+ assert [None, None] == wk.tasks.t2._get_merged_parameter_data(
2234
+ data_index=data_idx_t2,
2235
+ path="inputs.p2",
2236
+ raise_on_unset=False,
2237
+ )
2238
+
2239
+
2240
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2241
+ def test_get_merged_parameter_data_group_missing_data_raise(
2242
+ null_config, tmp_path: Path, store: str
2243
+ ):
2244
+ s1 = hf.TaskSchema(
2245
+ objective="t1",
2246
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2247
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2248
+ actions=[
2249
+ hf.Action(
2250
+ commands=[
2251
+ hf.Command(
2252
+ command="Write-Output (<<parameter:p1>> + 100)",
2253
+ stdout="<<parameter:p2>>",
2254
+ )
2255
+ ]
2256
+ )
2257
+ ],
2258
+ )
2259
+ s2 = hf.TaskSchema(
2260
+ objective="t2",
2261
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"), group="my_group")],
2262
+ )
2263
+ t1 = hf.Task(
2264
+ schema=s1,
2265
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2], nesting_order=0)],
2266
+ groups=[hf.ElementGroup(name="my_group")],
2267
+ )
2268
+ t2 = hf.Task(schema=s2)
2269
+ wk = hf.Workflow.from_template_data(
2270
+ tasks=[t1, t2],
2271
+ template_name="w1",
2272
+ overwrite=True,
2273
+ path=tmp_path,
2274
+ store=store,
2275
+ )
2276
+ data_idx_t1 = wk.tasks.t1.elements[0].get_data_idx()
2277
+ data_idx_t2 = wk.tasks.t2.elements[0].get_data_idx()
2278
+ with pytest.raises(ValueError):
2279
+ wk.tasks.t1._get_merged_parameter_data(
2280
+ data_index=data_idx_t1,
2281
+ path="outputs.p4",
2282
+ raise_on_missing=True,
2283
+ )
2284
+ with pytest.raises(ValueError):
2285
+ wk.tasks.t2._get_merged_parameter_data(
2286
+ data_index=data_idx_t2,
2287
+ path="inputs.p4",
2288
+ raise_on_missing=True,
2289
+ )
2290
+
2291
+
2292
+ @pytest.mark.parametrize("store", ["json", "zarr"])
2293
+ def test_get_merged_parameter_data_group_missing_data_no_raise(
2294
+ null_config, tmp_path: Path, store: str
2295
+ ):
2296
+ s1 = hf.TaskSchema(
2297
+ objective="t1",
2298
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2299
+ outputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))],
2300
+ actions=[
2301
+ hf.Action(
2302
+ commands=[
2303
+ hf.Command(
2304
+ command="Write-Output (<<parameter:p1>> + 100)",
2305
+ stdout="<<parameter:p2>>",
2306
+ )
2307
+ ]
2308
+ )
2309
+ ],
2310
+ )
2311
+ s2 = hf.TaskSchema(
2312
+ objective="t2",
2313
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"), group="my_group")],
2314
+ )
2315
+ t1 = hf.Task(
2316
+ schema=s1,
2317
+ sequences=[hf.ValueSequence("inputs.p1", values=[1, 2], nesting_order=0)],
2318
+ groups=[hf.ElementGroup(name="my_group")],
2319
+ )
2320
+ t2 = hf.Task(schema=s2)
2321
+ wk = hf.Workflow.from_template_data(
2322
+ tasks=[t1, t2],
2323
+ template_name="w1",
2324
+ overwrite=True,
2325
+ path=tmp_path,
2326
+ store=store,
2327
+ )
2328
+ data_idx_t1 = wk.tasks.t1.elements[0].get_data_idx()
2329
+ data_idx_t2 = wk.tasks.t2.elements[0].get_data_idx()
2330
+ assert None == wk.tasks.t1._get_merged_parameter_data(
2331
+ data_index=data_idx_t1,
2332
+ path="outputs.p4",
2333
+ raise_on_missing=False,
2334
+ )
2335
+ assert None == wk.tasks.t2._get_merged_parameter_data(
2336
+ data_index=data_idx_t2,
2337
+ path="inputs.p4",
2338
+ raise_on_missing=False,
2339
+ )
2340
+
2341
+
2342
+ @pytest.fixture
2343
+ def path_to_PV_classes_workflow(null_config, tmp_path: Path) -> Workflow:
2344
+ s1 = hf.TaskSchema(
2345
+ objective="t1",
2346
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
2347
+ actions=[
2348
+ hf.Action(commands=[hf.Command("Write-Output (<<parameter:p1c>> + 100)")])
2349
+ ],
2350
+ )
2351
+ p1_value = P1(a=10, sub_param=P1_sub_param(e=5))
2352
+ t1 = hf.Task(schema=s1, inputs=[hf.InputValue("p1c", value=p1_value)])
2353
+ wk = hf.Workflow.from_template_data(
2354
+ tasks=[t1],
2355
+ template_name="w1",
2356
+ overwrite=True,
2357
+ path=tmp_path,
2358
+ )
2359
+ return wk
2360
+
2361
+
2362
+ def test_path_to_PV_classes(path_to_PV_classes_workflow: Workflow):
2363
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes("inputs.p1c") == {
2364
+ "inputs.p1c": P1,
2365
+ }
2366
+
2367
+
2368
+ def test_path_to_PV_classes_sub_data(path_to_PV_classes_workflow: Workflow):
2369
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes("inputs.p1c.a") == {
2370
+ "inputs.p1c": P1,
2371
+ }
2372
+
2373
+
2374
+ def test_path_to_PV_classes_sub_parameter(path_to_PV_classes_workflow: Workflow):
2375
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2376
+ "inputs.p1c.sub_param"
2377
+ ) == {
2378
+ "inputs.p1c": P1,
2379
+ "inputs.p1c.sub_param": P1_sub_param,
2380
+ }
2381
+
2382
+
2383
+ def test_path_to_PV_classes_multiple_sub_parameters(
2384
+ path_to_PV_classes_workflow: Workflow,
2385
+ ):
2386
+ paths = ["inputs.p1c.sub_param", "inputs.p1c.sub_param_2"]
2387
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths) == {
2388
+ "inputs.p1c": P1,
2389
+ "inputs.p1c.sub_param": P1_sub_param,
2390
+ "inputs.p1c.sub_param_2": P1_sub_param_2,
2391
+ }
2392
+
2393
+
2394
+ def test_path_to_PV_classes_multiple_sub_parameter_attr(
2395
+ path_to_PV_classes_workflow: Workflow,
2396
+ ):
2397
+ paths = ["inputs.p1c.sub_param.e"]
2398
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths) == {
2399
+ "inputs.p1c": P1,
2400
+ "inputs.p1c.sub_param": P1_sub_param,
2401
+ }
2402
+
2403
+
2404
+ def test_path_to_PV_classes_inputs_only_path_ignored(
2405
+ path_to_PV_classes_workflow: Workflow,
2406
+ ):
2407
+ paths_1 = ["inputs", "inputs.p1c"]
2408
+ paths_2 = ["inputs.p1c"]
2409
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2410
+ *paths_1
2411
+ ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths_2)
2412
+
2413
+
2414
+ def test_path_to_PV_classes_resources_path_ignored(path_to_PV_classes_workflow: Workflow):
2415
+ paths_1 = ["resources", "inputs.p1c"]
2416
+ paths_2 = ["inputs.p1c"]
2417
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2418
+ *paths_1
2419
+ ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths_2)
2420
+
2421
+
2422
+ def test_input_values_specified_by_dict(null_config):
2423
+ ts = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput("p1")])
2424
+ t1 = hf.Task(schema=ts, inputs=[hf.InputValue(parameter="p1", value=101)])
2425
+ t2 = hf.Task(schema=ts, inputs={"p1": 101})
2426
+ assert t1 == t2
2427
+
2428
+
2429
+ def test_labelled_input_values_specified_by_dict(null_config):
2430
+ ts = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput("p1", labels={"one": {}})])
2431
+ t1 = hf.Task(
2432
+ schema=ts, inputs=[hf.InputValue(parameter="p1", label="one", value=101)]
2433
+ )
2434
+ t2 = hf.Task(schema=ts, inputs={"p1[one]": 101})
2435
+ assert t1 == t2
2436
+
2437
+
2438
+ def test_raise_UnknownEnvironmentPresetError(null_config):
2439
+ ts = hf.TaskSchema(objective="t1")
2440
+ with pytest.raises(UnknownEnvironmentPresetError):
2441
+ hf.Task(schema=ts, env_preset="my_env_preset")
2442
+
2443
+
2444
+ def test_raise_UnknownEnvironmentPresetError_sequence(null_config):
2445
+ ts = hf.TaskSchema(objective="t1")
2446
+ seq = hf.ValueSequence(path="env_preset", values=["my_env_preset"])
2447
+ with pytest.raises(UnknownEnvironmentPresetError):
2448
+ hf.Task(schema=ts, sequences=[seq])
2449
+
2450
+
2451
+ def test_group_values_input_and_output_source_from_upstream(null_config, tmp_path: Path):
2452
+ """
2453
+ | task | inputs | outputs | group | num_elements |
2454
+ | ---- | ------ | ------- | -------- | ---------------------------|
2455
+ | t1 | p0 | p1 | - | 3 |
2456
+ | t2 | p1 | p2 | my_group | 3 |
2457
+ | t3 | p1, p2 | - | - | 1 (grouped p1, grouped p2) |
2458
+ """
2459
+ s1 = hf.TaskSchema(
2460
+ objective="t1",
2461
+ inputs=[hf.SchemaInput("p0")],
2462
+ outputs=[hf.SchemaOutput("p1")],
2463
+ actions=[
2464
+ hf.Action(
2465
+ commands=[
2466
+ hf.Command(
2467
+ command="echo <<parameter:p0>> + 1",
2468
+ stdout="<<parameter:p1>>",
2469
+ )
2470
+ ]
2471
+ )
2472
+ ],
2473
+ )
2474
+ s2 = hf.TaskSchema(
2475
+ objective="t2",
2476
+ inputs=[hf.SchemaInput("p1")],
2477
+ outputs=[hf.SchemaOutput("p2")],
2478
+ actions=[
2479
+ hf.Action(
2480
+ commands=[
2481
+ hf.Command(
2482
+ command="echo <<parameter:p1>> + 1",
2483
+ stdout="<<parameter:p2>>",
2484
+ )
2485
+ ]
2486
+ )
2487
+ ],
2488
+ )
2489
+ s3 = hf.TaskSchema(
2490
+ objective="t3",
2491
+ inputs=[
2492
+ hf.SchemaInput("p1", group="my_group"),
2493
+ hf.SchemaInput("p2", group="my_group"),
2494
+ ],
2495
+ )
2496
+ t1 = hf.Task(
2497
+ schema=s1,
2498
+ inputs={"p0": 1},
2499
+ repeats=3,
2500
+ )
2501
+ t2 = hf.Task(schema=s2, groups=[hf.ElementGroup("my_group")])
2502
+ t3 = hf.Task(schema=s3, input_sources={"p1": [hf.InputSource.task(1, "input")]})
2503
+ wk = hf.Workflow.from_template_data(
2504
+ template_name="test_group",
2505
+ tasks=[t1, t2, t3],
2506
+ path=tmp_path,
2507
+ )
2508
+ assert wk.tasks[0].num_elements == 3
2509
+ assert wk.tasks[1].num_elements == 3
2510
+ assert wk.tasks[2].num_elements == 1
2511
+ assert [i.value for i in wk.tasks[2].inputs.p1] == [[None, None, None]]
2512
+ assert [i.value for i in wk.tasks[2].inputs.p2] == [[None, None, None]]
2513
+
2514
+
2515
+ def test_is_input_type_required_True(null_config):
2516
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
2517
+ s1 = hf.TaskSchema(
2518
+ objective="t1",
2519
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2520
+ actions=[
2521
+ hf.Action(
2522
+ commands=[hf.Command("cat <<file:my_input_file>>")],
2523
+ input_file_generators=[
2524
+ hf.InputFileGenerator(
2525
+ input_file=inp_file,
2526
+ inputs=[hf.Parameter("p1")],
2527
+ script="NOT-SET-FOR-THIS-TEST",
2528
+ ),
2529
+ ],
2530
+ environments=[hf.ActionEnvironment(environment="python_env")],
2531
+ )
2532
+ ],
2533
+ )
2534
+ t1 = hf.Task(schema=s1, inputs={"p1": 100})
2535
+ assert t1.is_input_type_required(typ="p1", element_set=t1.element_sets[0])
2536
+
2537
+
2538
+ def test_is_input_type_required_False(null_config):
2539
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
2540
+ s1 = hf.TaskSchema(
2541
+ objective="t1",
2542
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2543
+ actions=[
2544
+ hf.Action(
2545
+ commands=[hf.Command("cat <<file:my_input_file>>")],
2546
+ input_file_generators=[
2547
+ hf.InputFileGenerator(
2548
+ input_file=inp_file,
2549
+ inputs=[hf.Parameter("p1")],
2550
+ script="NOT-SET-FOR-THIS-TEST",
2551
+ ),
2552
+ ],
2553
+ environments=[hf.ActionEnvironment(environment="python_env")],
2554
+ )
2555
+ ],
2556
+ )
2557
+ t1 = hf.Task(
2558
+ schema=s1, input_files=[hf.InputFile(file=inp_file, path="NOT-SET-FOR-THIS-TEST")]
2559
+ )
2560
+ assert not t1.is_input_type_required(typ="p1", element_set=t1.element_sets[0])