hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
  2. hpcflow/_version.py +1 -1
  3. hpcflow/app.py +1 -0
  4. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
  5. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
  6. hpcflow/sdk/__init__.py +21 -15
  7. hpcflow/sdk/app.py +2133 -770
  8. hpcflow/sdk/cli.py +281 -250
  9. hpcflow/sdk/cli_common.py +6 -2
  10. hpcflow/sdk/config/__init__.py +1 -1
  11. hpcflow/sdk/config/callbacks.py +77 -42
  12. hpcflow/sdk/config/cli.py +126 -103
  13. hpcflow/sdk/config/config.py +578 -311
  14. hpcflow/sdk/config/config_file.py +131 -95
  15. hpcflow/sdk/config/errors.py +112 -85
  16. hpcflow/sdk/config/types.py +145 -0
  17. hpcflow/sdk/core/actions.py +1054 -994
  18. hpcflow/sdk/core/app_aware.py +24 -0
  19. hpcflow/sdk/core/cache.py +81 -63
  20. hpcflow/sdk/core/command_files.py +275 -185
  21. hpcflow/sdk/core/commands.py +111 -107
  22. hpcflow/sdk/core/element.py +724 -503
  23. hpcflow/sdk/core/enums.py +192 -0
  24. hpcflow/sdk/core/environment.py +74 -93
  25. hpcflow/sdk/core/errors.py +398 -51
  26. hpcflow/sdk/core/json_like.py +540 -272
  27. hpcflow/sdk/core/loop.py +380 -334
  28. hpcflow/sdk/core/loop_cache.py +160 -43
  29. hpcflow/sdk/core/object_list.py +370 -207
  30. hpcflow/sdk/core/parameters.py +728 -600
  31. hpcflow/sdk/core/rule.py +59 -41
  32. hpcflow/sdk/core/run_dir_files.py +33 -22
  33. hpcflow/sdk/core/task.py +1546 -1325
  34. hpcflow/sdk/core/task_schema.py +240 -196
  35. hpcflow/sdk/core/test_utils.py +126 -88
  36. hpcflow/sdk/core/types.py +387 -0
  37. hpcflow/sdk/core/utils.py +410 -305
  38. hpcflow/sdk/core/validation.py +82 -9
  39. hpcflow/sdk/core/workflow.py +1192 -1028
  40. hpcflow/sdk/core/zarr_io.py +98 -137
  41. hpcflow/sdk/demo/cli.py +46 -33
  42. hpcflow/sdk/helper/cli.py +18 -16
  43. hpcflow/sdk/helper/helper.py +75 -63
  44. hpcflow/sdk/helper/watcher.py +61 -28
  45. hpcflow/sdk/log.py +83 -59
  46. hpcflow/sdk/persistence/__init__.py +8 -31
  47. hpcflow/sdk/persistence/base.py +988 -586
  48. hpcflow/sdk/persistence/defaults.py +6 -0
  49. hpcflow/sdk/persistence/discovery.py +38 -0
  50. hpcflow/sdk/persistence/json.py +408 -153
  51. hpcflow/sdk/persistence/pending.py +158 -123
  52. hpcflow/sdk/persistence/store_resource.py +37 -22
  53. hpcflow/sdk/persistence/types.py +307 -0
  54. hpcflow/sdk/persistence/utils.py +14 -11
  55. hpcflow/sdk/persistence/zarr.py +477 -420
  56. hpcflow/sdk/runtime.py +44 -41
  57. hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
  58. hpcflow/sdk/submission/jobscript.py +444 -404
  59. hpcflow/sdk/submission/schedulers/__init__.py +133 -40
  60. hpcflow/sdk/submission/schedulers/direct.py +97 -71
  61. hpcflow/sdk/submission/schedulers/sge.py +132 -126
  62. hpcflow/sdk/submission/schedulers/slurm.py +263 -268
  63. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  64. hpcflow/sdk/submission/shells/__init__.py +14 -15
  65. hpcflow/sdk/submission/shells/base.py +102 -29
  66. hpcflow/sdk/submission/shells/bash.py +72 -55
  67. hpcflow/sdk/submission/shells/os_version.py +31 -30
  68. hpcflow/sdk/submission/shells/powershell.py +37 -29
  69. hpcflow/sdk/submission/submission.py +203 -257
  70. hpcflow/sdk/submission/types.py +143 -0
  71. hpcflow/sdk/typing.py +163 -12
  72. hpcflow/tests/conftest.py +8 -6
  73. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
  74. hpcflow/tests/scripts/test_main_scripts.py +60 -30
  75. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
  76. hpcflow/tests/unit/test_action.py +86 -75
  77. hpcflow/tests/unit/test_action_rule.py +9 -4
  78. hpcflow/tests/unit/test_app.py +13 -6
  79. hpcflow/tests/unit/test_cli.py +1 -1
  80. hpcflow/tests/unit/test_command.py +71 -54
  81. hpcflow/tests/unit/test_config.py +20 -15
  82. hpcflow/tests/unit/test_config_file.py +21 -18
  83. hpcflow/tests/unit/test_element.py +58 -62
  84. hpcflow/tests/unit/test_element_iteration.py +3 -1
  85. hpcflow/tests/unit/test_element_set.py +29 -19
  86. hpcflow/tests/unit/test_group.py +4 -2
  87. hpcflow/tests/unit/test_input_source.py +116 -93
  88. hpcflow/tests/unit/test_input_value.py +29 -24
  89. hpcflow/tests/unit/test_json_like.py +44 -35
  90. hpcflow/tests/unit/test_loop.py +65 -58
  91. hpcflow/tests/unit/test_object_list.py +17 -12
  92. hpcflow/tests/unit/test_parameter.py +16 -7
  93. hpcflow/tests/unit/test_persistence.py +48 -35
  94. hpcflow/tests/unit/test_resources.py +20 -18
  95. hpcflow/tests/unit/test_run.py +8 -3
  96. hpcflow/tests/unit/test_runtime.py +2 -1
  97. hpcflow/tests/unit/test_schema_input.py +23 -15
  98. hpcflow/tests/unit/test_shell.py +3 -2
  99. hpcflow/tests/unit/test_slurm.py +8 -7
  100. hpcflow/tests/unit/test_submission.py +39 -19
  101. hpcflow/tests/unit/test_task.py +352 -247
  102. hpcflow/tests/unit/test_task_schema.py +33 -20
  103. hpcflow/tests/unit/test_utils.py +9 -11
  104. hpcflow/tests/unit/test_value_sequence.py +15 -12
  105. hpcflow/tests/unit/test_workflow.py +114 -83
  106. hpcflow/tests/unit/test_workflow_template.py +0 -1
  107. hpcflow/tests/workflows/test_jobscript.py +2 -1
  108. hpcflow/tests/workflows/test_workflows.py +18 -13
  109. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
  110. hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
  111. hpcflow/sdk/core/parallel.py +0 -21
  112. hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
  113. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
  114. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
  115. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
@@ -1,8 +1,10 @@
1
+ from __future__ import annotations
1
2
  import copy
2
3
  import os
3
4
  import pytest
5
+ from typing import TYPE_CHECKING
4
6
 
5
- from valida.conditions import Value
7
+ from valida.conditions import Value # type: ignore
6
8
 
7
9
  from hpcflow.app import app as hf
8
10
  from hpcflow.sdk.core.errors import (
@@ -24,30 +26,38 @@ from hpcflow.sdk.core.test_utils import (
24
26
  P1_sub_parameter_cls_2 as P1_sub_param_2,
25
27
  )
26
28
 
29
+ if TYPE_CHECKING:
30
+ from pathlib import Path
31
+ from hpcflow.sdk.core.actions import Action, ActionEnvironment
32
+ from hpcflow.sdk.core.command_files import FileSpec
33
+ from hpcflow.sdk.core.parameters import Parameter
34
+ from hpcflow.sdk.core.task_schema import TaskSchema
35
+ from hpcflow.sdk.core.workflow import Workflow
36
+
27
37
 
28
38
  @pytest.fixture
29
- def null_config(tmp_path):
39
+ def null_config(tmp_path: Path):
30
40
  if not hf.is_config_loaded:
31
41
  hf.load_config(config_dir=tmp_path)
32
42
 
33
43
 
34
44
  @pytest.fixture
35
- def param_p1():
45
+ def param_p1() -> Parameter:
36
46
  return hf.Parameter("p1")
37
47
 
38
48
 
39
49
  @pytest.fixture
40
- def param_p2():
50
+ def param_p2() -> Parameter:
41
51
  return hf.Parameter("p2")
42
52
 
43
53
 
44
54
  @pytest.fixture
45
- def param_p3():
55
+ def param_p3() -> Parameter:
46
56
  return hf.Parameter("p3")
47
57
 
48
58
 
49
59
  @pytest.fixture
50
- def workflow_w0(null_config, tmp_path):
60
+ def workflow_w0(null_config, tmp_path: Path) -> Workflow:
51
61
  t1 = hf.Task(schema=[hf.TaskSchema(objective="t1", actions=[])])
52
62
  t2 = hf.Task(schema=[hf.TaskSchema(objective="t2", actions=[])])
53
63
 
@@ -56,7 +66,9 @@ def workflow_w0(null_config, tmp_path):
56
66
 
57
67
 
58
68
  @pytest.fixture
59
- def workflow_w1(null_config, tmp_path, param_p1, param_p2):
69
+ def workflow_w1(
70
+ null_config, tmp_path: Path, param_p1: Parameter, param_p2: Parameter
71
+ ) -> Workflow:
60
72
  s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p2])
61
73
  s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2])
62
74
 
@@ -71,7 +83,13 @@ def workflow_w1(null_config, tmp_path, param_p1, param_p2):
71
83
 
72
84
 
73
85
  @pytest.fixture
74
- def workflow_w2(null_config, tmp_path, param_p1, param_p2):
86
+ def workflow_w2(
87
+ null_config,
88
+ tmp_path: Path,
89
+ param_p1: Parameter,
90
+ param_p2: Parameter,
91
+ param_p3: Parameter,
92
+ ) -> Workflow:
75
93
  s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p2])
76
94
  s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2, param_p3])
77
95
 
@@ -92,7 +110,14 @@ def workflow_w2(null_config, tmp_path, param_p1, param_p2):
92
110
 
93
111
 
94
112
  @pytest.fixture
95
- def workflow_w3(null_config, tmp_path, param_p1, param_p2, param_p3, param_p4):
113
+ def workflow_w3(
114
+ null_config,
115
+ tmp_path: Path,
116
+ param_p1: Parameter,
117
+ param_p2: Parameter,
118
+ param_p3: Parameter,
119
+ param_p4: Parameter,
120
+ ) -> Workflow:
96
121
  s1 = hf.TaskSchema("t1", actions=[], inputs=[param_p1], outputs=[param_p3])
97
122
  s2 = hf.TaskSchema("t2", actions=[], inputs=[param_p2, param_p3], outputs=[param_p4])
98
123
  s3 = hf.TaskSchema("t3", actions=[], inputs=[param_p3, param_p4])
@@ -109,17 +134,19 @@ def workflow_w3(null_config, tmp_path, param_p1, param_p2, param_p3, param_p4):
109
134
 
110
135
 
111
136
  @pytest.fixture
112
- def file_spec_fs1():
137
+ def file_spec_fs1() -> FileSpec:
113
138
  return hf.FileSpec(label="file1", name="file1.txt")
114
139
 
115
140
 
116
141
  @pytest.fixture
117
- def act_env_1():
142
+ def act_env_1() -> ActionEnvironment:
118
143
  return hf.ActionEnvironment("env_1")
119
144
 
120
145
 
121
146
  @pytest.fixture
122
- def act_3(act_env_1, param_p2, file_spec_fs1):
147
+ def act_3(
148
+ act_env_1: ActionEnvironment, param_p2: Parameter, file_spec_fs1: FileSpec
149
+ ) -> Action:
123
150
  return hf.Action(
124
151
  commands=[hf.Command("<<parameter:p1>>")],
125
152
  output_file_parsers=[
@@ -130,24 +157,21 @@ def act_3(act_env_1, param_p2, file_spec_fs1):
130
157
 
131
158
 
132
159
  @pytest.fixture
133
- def schema_s3(param_p1, param_p2, act_3):
160
+ def schema_s3(param_p1: Parameter, param_p2: Parameter, act_3) -> TaskSchema:
134
161
  return hf.TaskSchema("ts1", actions=[act_3], inputs=[param_p1], outputs=[param_p2])
135
162
 
136
163
 
137
164
  @pytest.fixture
138
- def workflow_w4(null_config, tmp_path, schema_s3, param_p1):
165
+ def workflow_w4(
166
+ null_config, tmp_path: Path, schema_s3: TaskSchema, param_p1: Parameter
167
+ ) -> Workflow:
139
168
  t1 = hf.Task(schema=schema_s3, inputs=[hf.InputValue(param_p1, 101)])
140
169
  wkt = hf.WorkflowTemplate(name="w1", tasks=[t1])
141
170
  return hf.Workflow.from_template(wkt, path=tmp_path)
142
171
 
143
172
 
144
173
  @pytest.fixture
145
- def act_env_1():
146
- return hf.ActionEnvironment("env_1")
147
-
148
-
149
- @pytest.fixture
150
- def act_1(act_env_1):
174
+ def act_1(act_env_1: ActionEnvironment) -> Action:
151
175
  return hf.Action(
152
176
  commands=[hf.Command("<<parameter:p1>>")],
153
177
  environments=[act_env_1],
@@ -155,7 +179,7 @@ def act_1(act_env_1):
155
179
 
156
180
 
157
181
  @pytest.fixture
158
- def act_2(act_env_1):
182
+ def act_2(act_env_1: ActionEnvironment) -> Action:
159
183
  return hf.Action(
160
184
  commands=[hf.Command("<<parameter:p2>>")],
161
185
  environments=[act_env_1],
@@ -163,32 +187,32 @@ def act_2(act_env_1):
163
187
 
164
188
 
165
189
  @pytest.fixture
166
- def schema_s1(param_p1, act_1):
190
+ def schema_s1(param_p1: Parameter, act_1) -> TaskSchema:
167
191
  return hf.TaskSchema("ts1", actions=[act_1], inputs=[param_p1])
168
192
 
169
193
 
170
194
  @pytest.fixture
171
- def schema_s2(param_p1, act_1):
195
+ def schema_s2(param_p1: Parameter, act_1) -> TaskSchema:
172
196
  return hf.TaskSchema(
173
197
  "ts1", actions=[act_1], inputs=[hf.SchemaInput(param_p1, default_value=101)]
174
198
  )
175
199
 
176
200
 
177
201
  @pytest.fixture
178
- def schema_s4(param_p2, act_2):
202
+ def schema_s4(param_p2: Parameter, act_2) -> TaskSchema:
179
203
  return hf.TaskSchema("ts2", actions=[act_2], inputs=[param_p2])
180
204
 
181
205
 
182
206
  @pytest.fixture
183
- def schema_s5(param_p2, act_2):
207
+ def schema_s5(param_p2: Parameter, act_2) -> TaskSchema:
184
208
  return hf.TaskSchema(
185
209
  "ts2", actions=[act_2], inputs=[hf.SchemaInput(param_p2, default_value=2002)]
186
210
  )
187
211
 
188
212
 
189
213
  def test_task_get_available_task_input_sources_expected_return_first_task_local_value(
190
- schema_s1,
191
- param_p1,
214
+ schema_s1: TaskSchema,
215
+ param_p1: Parameter,
192
216
  ):
193
217
  t1 = hf.Task(schema=schema_s1, inputs=[hf.InputValue(param_p1, value=101)])
194
218
 
@@ -202,7 +226,7 @@ def test_task_get_available_task_input_sources_expected_return_first_task_local_
202
226
 
203
227
 
204
228
  def test_task_get_available_task_input_sources_expected_return_first_task_default_value(
205
- schema_s2,
229
+ schema_s2: TaskSchema,
206
230
  ):
207
231
  t1 = hf.Task(schema=schema_s2)
208
232
  available = t1.get_available_task_input_sources(element_set=t1.element_sets[0])
@@ -212,12 +236,12 @@ def test_task_get_available_task_input_sources_expected_return_first_task_defaul
212
236
 
213
237
 
214
238
  def test_task_get_available_task_input_sources_expected_return_one_param_one_output(
215
- tmp_path,
239
+ tmp_path: Path,
216
240
  ):
217
241
  t1, t2 = make_tasks(
218
242
  schemas_spec=[
219
- [{"p1": NullDefault.NULL}, ("p2",), "t1"],
220
- [{"p2": NullDefault.NULL}, (), "t2"],
243
+ ({"p1": NullDefault.NULL}, ("p2",), "t1"),
244
+ ({"p2": NullDefault.NULL}, (), "t2"),
221
245
  ],
222
246
  local_inputs={0: ("p1",)},
223
247
  )
@@ -242,10 +266,13 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
242
266
 
243
267
 
244
268
  def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_default(
245
- tmp_path,
269
+ tmp_path: Path,
246
270
  ):
247
271
  t1, t2 = make_tasks(
248
- schemas_spec=[[{"p1": None}, ("p2",), "t1"], [{"p2": 2001}, (), "t2"]],
272
+ schemas_spec=[
273
+ ({"p1": None}, ("p2",), "t1"),
274
+ ({"p2": 2001}, (), "t2"),
275
+ ],
249
276
  local_inputs={0: ("p1",)},
250
277
  )
251
278
  wk = hf.Workflow.from_template(
@@ -270,12 +297,12 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
270
297
 
271
298
 
272
299
  def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_local(
273
- tmp_path,
300
+ tmp_path: Path,
274
301
  ):
275
302
  t1, t2 = make_tasks(
276
303
  schemas_spec=[
277
- [{"p1": NullDefault.NULL}, ("p2",), "t1"],
278
- [{"p2": NullDefault.NULL}, (), "t2"],
304
+ ({"p1": NullDefault.NULL}, ("p2",), "t1"),
305
+ ({"p2": NullDefault.NULL}, (), "t2"),
279
306
  ],
280
307
  local_inputs={0: ("p1",), 1: ("p2",)},
281
308
  )
@@ -301,10 +328,13 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
301
328
 
302
329
 
303
330
  def test_task_get_available_task_input_sources_expected_return_one_param_one_output_with_default_and_local(
304
- tmp_path,
331
+ tmp_path: Path,
305
332
  ):
306
333
  t1, t2 = make_tasks(
307
- schemas_spec=[[{"p1": None}, ("p2",), "t1"], [{"p2": 2001}, (), "t2"]],
334
+ schemas_spec=[
335
+ ({"p1": None}, ("p2",), "t1"),
336
+ ({"p2": 2001}, (), "t2"),
337
+ ],
308
338
  local_inputs={0: ("p1",), 1: ("p2",)},
309
339
  )
310
340
  wk = hf.Workflow.from_template(
@@ -330,13 +360,13 @@ def test_task_get_available_task_input_sources_expected_return_one_param_one_out
330
360
 
331
361
 
332
362
  def test_task_get_available_task_input_sources_expected_return_one_param_two_outputs(
333
- tmp_path,
363
+ tmp_path: Path,
334
364
  ):
335
365
  t1, t2, t3 = make_tasks(
336
366
  schemas_spec=[
337
- [{"p1": NullDefault.NULL}, ("p2", "p3"), "t1"],
338
- [{"p2": NullDefault.NULL}, ("p3", "p4"), "t2"],
339
- [{"p3": NullDefault.NULL}, (), "t3"],
367
+ ({"p1": NullDefault.NULL}, ("p2", "p3"), "t1"),
368
+ ({"p2": NullDefault.NULL}, ("p3", "p4"), "t2"),
369
+ ({"p3": NullDefault.NULL}, (), "t3"),
340
370
  ],
341
371
  local_inputs={0: ("p1",), 1: ("p2",)},
342
372
  )
@@ -367,12 +397,12 @@ def test_task_get_available_task_input_sources_expected_return_one_param_two_out
367
397
 
368
398
 
369
399
  def test_task_get_available_task_input_sources_expected_return_two_params_one_output(
370
- tmp_path,
400
+ tmp_path: Path,
371
401
  ):
372
402
  t1, t2 = make_tasks(
373
403
  schemas_spec=[
374
- [{"p1": NullDefault.NULL}, ("p2", "p3"), "t1"],
375
- [{"p2": NullDefault.NULL, "p3": NullDefault.NULL}, (), "t2"],
404
+ ({"p1": NullDefault.NULL}, ("p2", "p3"), "t1"),
405
+ ({"p2": NullDefault.NULL, "p3": NullDefault.NULL}, (), "t2"),
376
406
  ],
377
407
  local_inputs={0: ("p1",)},
378
408
  )
@@ -405,16 +435,16 @@ def test_task_get_available_task_input_sources_expected_return_two_params_one_ou
405
435
 
406
436
 
407
437
  def test_task_get_available_task_input_sources_one_parameter_extravaganza(
408
- tmp_path,
438
+ tmp_path: Path,
409
439
  ):
410
440
  """Test an input source is excluded if it is not locally defined (meaning it comes
411
441
  from another task)."""
412
442
 
413
443
  t1, t2, t3 = make_tasks(
414
444
  schemas_spec=[
415
- [{"p1": NullDefault.NULL}, ("p1",), "t1"], # sources for t3: input + output
416
- [{"p1": NullDefault.NULL}, ("p1",), "t2"], # sources fot t3: output only
417
- [{"p1": NullDefault.NULL}, ("p1",), "t3"],
445
+ ({"p1": NullDefault.NULL}, ("p1",), "t1"), # sources for t3: input + output
446
+ ({"p1": NullDefault.NULL}, ("p1",), "t2"), # sources fot t3: output only
447
+ ({"p1": NullDefault.NULL}, ("p1",), "t3"),
418
448
  ],
419
449
  local_inputs={0: ("p1",)},
420
450
  )
@@ -456,7 +486,7 @@ def test_task_get_available_task_input_sources_one_parameter_extravaganza(
456
486
  assert available == available_exp
457
487
 
458
488
 
459
- def test_task_input_sources_output_label(null_config, tmp_path):
489
+ def test_task_input_sources_output_label(null_config, tmp_path: Path):
460
490
  ts1 = hf.TaskSchema(
461
491
  objective="t1",
462
492
  outputs=[hf.SchemaOutput("p1")],
@@ -487,7 +517,7 @@ def test_task_input_sources_output_label(null_config, tmp_path):
487
517
  }
488
518
 
489
519
 
490
- def test_task_input_sources_output_label_filtered(null_config, tmp_path):
520
+ def test_task_input_sources_output_label_filtered(null_config, tmp_path: Path):
491
521
  ts1 = hf.TaskSchema(
492
522
  objective="t1",
493
523
  inputs=[hf.SchemaInput("p1")],
@@ -560,8 +590,8 @@ def test_raise_on_multiple_schema_objectives():
560
590
  hf.Task(schema=[s1, s2])
561
591
 
562
592
 
563
- def test_raise_on_unexpected_inputs(param_p1, param_p2):
564
- s1 = make_schemas([[{"p1": None}, ()]])
593
+ def test_raise_on_unexpected_inputs(param_p1: Parameter, param_p2: Parameter):
594
+ (s1,) = make_schemas(({"p1": None}, ()))
565
595
 
566
596
  with pytest.raises(TaskTemplateUnexpectedInput):
567
597
  hf.Task(
@@ -573,8 +603,8 @@ def test_raise_on_unexpected_inputs(param_p1, param_p2):
573
603
  )
574
604
 
575
605
 
576
- def test_raise_on_multiple_input_values(param_p1):
577
- s1 = make_schemas([[{"p1": None}, ()]])
606
+ def test_raise_on_multiple_input_values(param_p1: Parameter):
607
+ (s1,) = make_schemas(({"p1": None}, ()))
578
608
 
579
609
  with pytest.raises(TaskTemplateMultipleInputValues):
580
610
  hf.Task(
@@ -586,7 +616,7 @@ def test_raise_on_multiple_input_values(param_p1):
586
616
  )
587
617
 
588
618
 
589
- def test_raise_on_multiple_input_values_same_label(param_p1):
619
+ def test_raise_on_multiple_input_values_same_label(param_p1: Parameter):
590
620
  s1 = hf.TaskSchema(
591
621
  objective="t1",
592
622
  inputs=[hf.SchemaInput(parameter="p1", labels={"0": {}})],
@@ -596,13 +626,13 @@ def test_raise_on_multiple_input_values_same_label(param_p1):
596
626
  hf.Task(
597
627
  schema=s1,
598
628
  inputs=[
599
- hf.InputValue(param_p1, value=101, label=0),
600
- hf.InputValue(param_p1, value=101, label=0),
629
+ hf.InputValue(param_p1, value=101, label="0"),
630
+ hf.InputValue(param_p1, value=101, label="0"),
601
631
  ],
602
632
  )
603
633
 
604
634
 
605
- def test_multiple_input_values_different_labels(param_p1):
635
+ def test_multiple_input_values_different_labels(param_p1: Parameter):
606
636
  s1 = hf.TaskSchema(
607
637
  objective="t1",
608
638
  inputs=[
@@ -616,14 +646,16 @@ def test_multiple_input_values_different_labels(param_p1):
616
646
  hf.Task(
617
647
  schema=s1,
618
648
  inputs=[
619
- hf.InputValue(param_p1, value=101, label=0),
620
- hf.InputValue(param_p1, value=101, label=1),
649
+ hf.InputValue(param_p1, value=101, label="0"),
650
+ hf.InputValue(param_p1, value=101, label="1"),
621
651
  ],
622
652
  )
623
653
 
624
654
 
625
- def test_expected_return_defined_and_undefined_input_types(param_p1, param_p2):
626
- s1 = make_schemas([[{"p1": None, "p2": None}, ()]])
655
+ def test_expected_return_defined_and_undefined_input_types(
656
+ param_p1: Parameter, param_p2: Parameter
657
+ ):
658
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ()))
627
659
 
628
660
  t1 = hf.Task(schema=s1, inputs=[hf.InputValue(param_p1, value=101)])
629
661
  element_set = t1.element_sets[0]
@@ -632,18 +664,20 @@ def test_expected_return_defined_and_undefined_input_types(param_p1, param_p2):
632
664
  } and element_set.undefined_input_types == {param_p2.typ}
633
665
 
634
666
 
635
- def test_expected_return_all_schema_input_types_single_schema(param_p1, param_p2):
636
- s1 = make_schemas([[{"p1": None, "p2": None}, ()]])
667
+ def test_expected_return_all_schema_input_types_single_schema(
668
+ param_p1: Parameter, param_p2: Parameter
669
+ ):
670
+ (s1,) = make_schemas(({"p1": None, "p2": None}, ()))
637
671
  t1 = hf.Task(schema=s1)
638
672
 
639
673
  assert t1.all_schema_input_types == {param_p1.typ, param_p2.typ}
640
674
 
641
675
 
642
676
  def test_expected_return_all_schema_input_types_multiple_schemas(
643
- param_p1, param_p2, param_p3
677
+ param_p1: Parameter, param_p2: Parameter, param_p3: Parameter
644
678
  ):
645
679
  s1, s2 = make_schemas(
646
- [[{"p1": None, "p2": None}, (), "t1"], [{"p1": None, "p3": None}, (), "t1"]]
680
+ ({"p1": None, "p2": None}, (), "t1"), ({"p1": None, "p3": None}, (), "t1")
647
681
  )
648
682
 
649
683
  t1 = hf.Task(schema=[s1, s2])
@@ -725,7 +759,7 @@ def test_expected_name_two_schemas_both_with_method_and_implementation():
725
759
 
726
760
 
727
761
  def test_raise_on_negative_nesting_order():
728
- s1 = make_schemas([[{"p1": None}, ()]])
762
+ (s1,) = make_schemas(({"p1": None}, ()))
729
763
  with pytest.raises(TaskTemplateInvalidNesting):
730
764
  hf.Task(schema=s1, nesting_order={"inputs.p1": -1})
731
765
 
@@ -735,15 +769,15 @@ def test_raise_on_negative_nesting_order():
735
769
 
736
770
  def test_empty_task_init():
737
771
  """Check we can init a hf.Task with no input values."""
738
- s1 = make_schemas([[{"p1": None}, ()]])
772
+ (s1,) = make_schemas(({"p1": None}, ()))
739
773
  t1 = hf.Task(schema=s1)
740
774
 
741
775
 
742
- def test_task_task_dependencies(tmp_path):
776
+ def test_task_task_dependencies(tmp_path: Path):
743
777
  wk = make_workflow(
744
778
  schemas_spec=[
745
- [{"p1": None}, ("p2",), "t1"],
746
- [{"p2": None}, (), "t2"],
779
+ ({"p1": None}, ("p2",), "t1"),
780
+ ({"p2": None}, (), "t2"),
747
781
  ],
748
782
  local_inputs={0: ("p1",)},
749
783
  path=tmp_path,
@@ -751,11 +785,11 @@ def test_task_task_dependencies(tmp_path):
751
785
  assert wk.tasks.t2.get_task_dependencies(as_objects=True) == [wk.tasks.t1]
752
786
 
753
787
 
754
- def test_task_dependent_tasks(tmp_path):
788
+ def test_task_dependent_tasks(tmp_path: Path):
755
789
  wk = make_workflow(
756
790
  schemas_spec=[
757
- [{"p1": None}, ("p2",), "t1"],
758
- [{"p2": None}, (), "t2"],
791
+ ({"p1": None}, ("p2",), "t1"),
792
+ ({"p2": None}, (), "t2"),
759
793
  ],
760
794
  local_inputs={0: ("p1",)},
761
795
  path=tmp_path,
@@ -763,39 +797,39 @@ def test_task_dependent_tasks(tmp_path):
763
797
  assert wk.tasks.t1.get_dependent_tasks(as_objects=True) == [wk.tasks.t2]
764
798
 
765
799
 
766
- def test_task_element_dependencies(tmp_path):
800
+ def test_task_element_dependencies(tmp_path: Path):
767
801
  wk = make_workflow(
768
802
  schemas_spec=[
769
- [{"p1": None}, ("p2",), "t1"],
770
- [{"p2": None}, (), "t2"],
803
+ ({"p1": None}, ("p2",), "t1"),
804
+ ({"p2": None}, (), "t2"),
771
805
  ],
772
806
  local_sequences={0: [("inputs.p1", 2, 0)]},
773
807
  nesting_orders={1: {"inputs.p2": 0}},
774
808
  path=tmp_path,
775
809
  )
776
- assert wk.tasks.t2.get_element_dependencies() == [0, 1]
810
+ assert wk.tasks.t2.get_element_dependencies() == {0, 1}
777
811
 
778
812
 
779
- def test_task_dependent_elements(tmp_path):
813
+ def test_task_dependent_elements(tmp_path: Path):
780
814
  wk = make_workflow(
781
815
  schemas_spec=[
782
- [{"p1": None}, ("p2",), "t1"],
783
- [{"p2": None}, (), "t2"],
816
+ ({"p1": None}, ("p2",), "t1"),
817
+ ({"p2": None}, (), "t2"),
784
818
  ],
785
819
  local_sequences={0: [("inputs.p1", 2, 0)]},
786
820
  nesting_orders={1: {"inputs.p2": 0}},
787
821
  path=tmp_path,
788
822
  )
789
- assert wk.tasks.t1.get_dependent_elements() == [2, 3]
823
+ assert wk.tasks.t1.get_dependent_elements() == {2, 3}
790
824
 
791
825
 
792
826
  def test_task_add_elements_without_propagation_expected_workflow_num_elements(
793
- tmp_path, param_p1
827
+ tmp_path: Path, param_p1: Parameter
794
828
  ):
795
829
  wk = make_workflow(
796
830
  schemas_spec=[
797
- [{"p1": None}, ("p2",), "t1"],
798
- [{"p2": None}, (), "t2"],
831
+ ({"p1": None}, ("p2",), "t1"),
832
+ ({"p2": None}, (), "t2"),
799
833
  ],
800
834
  local_sequences={0: [("inputs.p1", 2, 0)]},
801
835
  nesting_orders={1: {"inputs.p2": 0}},
@@ -808,12 +842,12 @@ def test_task_add_elements_without_propagation_expected_workflow_num_elements(
808
842
 
809
843
 
810
844
  def test_task_add_elements_without_propagation_expected_task_num_elements(
811
- tmp_path, param_p1
845
+ tmp_path: Path, param_p1: Parameter
812
846
  ):
813
847
  wk = make_workflow(
814
848
  schemas_spec=[
815
- [{"p1": None}, ("p2",), "t1"],
816
- [{"p2": None}, (), "t2"],
849
+ ({"p1": None}, ("p2",), "t1"),
850
+ ({"p2": None}, (), "t2"),
817
851
  ],
818
852
  local_sequences={0: [("inputs.p1", 2, 0)]},
819
853
  nesting_orders={1: {"inputs.p2": 0}},
@@ -826,31 +860,31 @@ def test_task_add_elements_without_propagation_expected_task_num_elements(
826
860
 
827
861
 
828
862
  def test_task_add_elements_without_propagation_expected_new_data_index(
829
- tmp_path, param_p1
863
+ tmp_path: Path, param_p1: Parameter
830
864
  ):
831
865
  wk = make_workflow(
832
866
  schemas_spec=[
833
- [{"p1": None}, ("p2",), "t1"],
834
- [{"p2": None}, (), "t2"],
867
+ ({"p1": None}, ("p2",), "t1"),
868
+ ({"p2": None}, (), "t2"),
835
869
  ],
836
870
  local_sequences={0: [("inputs.p1", 2, 0)]},
837
871
  nesting_orders={1: {"inputs.p2": 0}},
838
872
  path=tmp_path,
839
873
  )
840
- data_index = [sorted(i.get_data_idx().keys()) for i in wk.tasks.t1.elements[:]]
874
+ data_index = [sorted(i.get_data_idx()) for i in wk.tasks.t1.elements[:]]
841
875
  wk.tasks.t1.add_elements(inputs=[hf.InputValue(param_p1, 103)])
842
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.tasks.t1.elements[:]]
876
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.tasks.t1.elements[:]]
843
877
  new_elems = data_index_new[len(data_index) :]
844
878
  assert new_elems == [["inputs.p1", "outputs.p2", "resources.any"]]
845
879
 
846
880
 
847
881
  def test_task_add_elements_with_propagation_expected_workflow_num_elements(
848
- tmp_path, param_p1
882
+ tmp_path: Path, param_p1: Parameter
849
883
  ):
850
884
  wk = make_workflow(
851
885
  schemas_spec=[
852
- [{"p1": None}, ("p2",), "t1"],
853
- [{"p2": None}, (), "t2"],
886
+ ({"p1": None}, ("p2",), "t1"),
887
+ ({"p2": None}, (), "t2"),
854
888
  ],
855
889
  local_sequences={0: [("inputs.p1", 2, 0)]},
856
890
  nesting_orders={1: {"inputs.p2": 0}},
@@ -866,12 +900,12 @@ def test_task_add_elements_with_propagation_expected_workflow_num_elements(
866
900
 
867
901
 
868
902
  def test_task_add_elements_with_propagation_expected_task_num_elements(
869
- tmp_path, param_p1
903
+ tmp_path: Path, param_p1: Parameter
870
904
  ):
871
905
  wk = make_workflow(
872
906
  schemas_spec=[
873
- [{"p1": None}, ("p2",), "t1"],
874
- [{"p2": None}, (), "t2"],
907
+ ({"p1": None}, ("p2",), "t1"),
908
+ ({"p2": None}, (), "t2"),
875
909
  ],
876
910
  local_sequences={0: [("inputs.p1", 2, 0)]},
877
911
  nesting_orders={1: {"inputs.p2": 0}},
@@ -887,11 +921,13 @@ def test_task_add_elements_with_propagation_expected_task_num_elements(
887
921
  assert num_elems_diff[0] == 1 and num_elems_diff[1] == 1
888
922
 
889
923
 
890
- def test_task_add_elements_with_propagation_expected_new_data_index(tmp_path, param_p1):
924
+ def test_task_add_elements_with_propagation_expected_new_data_index(
925
+ tmp_path: Path, param_p1: Parameter
926
+ ):
891
927
  wk = make_workflow(
892
928
  schemas_spec=[
893
- [{"p1": None}, ("p2",), "t1"],
894
- [{"p2": None}, (), "t2"],
929
+ ({"p1": None}, ("p2",), "t1"),
930
+ ({"p2": None}, (), "t2"),
895
931
  ],
896
932
  local_sequences={0: [("inputs.p1", 2, 0)]},
897
933
  nesting_orders={1: {"inputs.p2": 0}},
@@ -905,7 +941,7 @@ def test_task_add_elements_with_propagation_expected_new_data_index(tmp_path, pa
905
941
  )
906
942
  t1_num_elems_new = wk.tasks.t1.num_elements
907
943
  t2_num_elems_new = wk.tasks.t2.num_elements
908
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
944
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
909
945
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
910
946
  new_elems_t2 = data_index_new[
911
947
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -920,12 +956,12 @@ def test_task_add_elements_with_propagation_expected_new_data_index(tmp_path, pa
920
956
 
921
957
 
922
958
  def test_task_add_elements_sequence_without_propagation_expected_workflow_num_elements(
923
- tmp_path,
959
+ tmp_path: Path,
924
960
  ):
925
961
  wk = make_workflow(
926
962
  schemas_spec=[
927
- [{"p1": None}, ("p2",), "t1"],
928
- [{"p2": None}, (), "t2"],
963
+ ({"p1": None}, ("p2",), "t1"),
964
+ ({"p2": None}, (), "t2"),
929
965
  ],
930
966
  local_sequences={0: [("inputs.p1", 2, 0)]},
931
967
  nesting_orders={1: {"inputs.p2": 0}},
@@ -940,12 +976,12 @@ def test_task_add_elements_sequence_without_propagation_expected_workflow_num_el
940
976
 
941
977
 
942
978
  def test_task_add_elements_sequence_without_propagation_expected_task_num_elements(
943
- tmp_path,
979
+ tmp_path: Path,
944
980
  ):
945
981
  wk = make_workflow(
946
982
  schemas_spec=[
947
- [{"p1": None}, ("p2",), "t1"],
948
- [{"p2": None}, (), "t2"],
983
+ ({"p1": None}, ("p2",), "t1"),
984
+ ({"p2": None}, (), "t2"),
949
985
  ],
950
986
  local_sequences={0: [("inputs.p1", 2, 0)]},
951
987
  nesting_orders={1: {"inputs.p2": 0}},
@@ -960,12 +996,12 @@ def test_task_add_elements_sequence_without_propagation_expected_task_num_elemen
960
996
 
961
997
 
962
998
  def test_task_add_elements_sequence_without_propagation_expected_new_data_index(
963
- tmp_path,
999
+ tmp_path: Path,
964
1000
  ):
965
1001
  wk = make_workflow(
966
1002
  schemas_spec=[
967
- [{"p1": None}, ("p2",), "t1"],
968
- [{"p2": None}, (), "t2"],
1003
+ ({"p1": None}, ("p2",), "t1"),
1004
+ ({"p2": None}, (), "t2"),
969
1005
  ],
970
1006
  local_sequences={0: [("inputs.p1", 2, 0)]},
971
1007
  nesting_orders={1: {"inputs.p2": 0}},
@@ -976,7 +1012,7 @@ def test_task_add_elements_sequence_without_propagation_expected_new_data_index(
976
1012
  sequences=[hf.ValueSequence("inputs.p1", values=[103, 104], nesting_order=1)]
977
1013
  )
978
1014
  t1_num_elems_new = wk.tasks.t1.num_elements
979
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1015
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
980
1016
  new_elems = data_index_new[t1_num_elems:t1_num_elems_new]
981
1017
  assert new_elems == [
982
1018
  ["inputs.p1", "outputs.p2", "resources.any"],
@@ -985,12 +1021,12 @@ def test_task_add_elements_sequence_without_propagation_expected_new_data_index(
985
1021
 
986
1022
 
987
1023
  def test_task_add_elements_sequence_with_propagation_expected_workflow_num_elements(
988
- tmp_path,
1024
+ tmp_path: Path,
989
1025
  ):
990
1026
  wk = make_workflow(
991
1027
  schemas_spec=[
992
- [{"p1": None}, ("p2",), "t1"],
993
- [{"p2": None}, (), "t2"],
1028
+ ({"p1": None}, ("p2",), "t1"),
1029
+ ({"p2": None}, (), "t2"),
994
1030
  ],
995
1031
  local_sequences={0: [("inputs.p1", 2, 0)]},
996
1032
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1010,12 +1046,12 @@ def test_task_add_elements_sequence_with_propagation_expected_workflow_num_eleme
1010
1046
 
1011
1047
 
1012
1048
  def test_task_add_elements_sequence_with_propagation_expected_task_num_elements(
1013
- tmp_path,
1049
+ tmp_path: Path,
1014
1050
  ):
1015
1051
  wk = make_workflow(
1016
1052
  schemas_spec=[
1017
- [{"p1": None}, ("p2",), "t1"],
1018
- [{"p2": None}, (), "t2"],
1053
+ ({"p1": None}, ("p2",), "t1"),
1054
+ ({"p2": None}, (), "t2"),
1019
1055
  ],
1020
1056
  local_sequences={0: [("inputs.p1", 2, 0)]},
1021
1057
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1035,11 +1071,13 @@ def test_task_add_elements_sequence_with_propagation_expected_task_num_elements(
1035
1071
  assert num_elems_diff[0] == 3 and num_elems_diff[1] == 3
1036
1072
 
1037
1073
 
1038
- def test_task_add_elements_sequence_with_propagation_expected_new_data_index(tmp_path):
1074
+ def test_task_add_elements_sequence_with_propagation_expected_new_data_index(
1075
+ tmp_path: Path,
1076
+ ):
1039
1077
  wk = make_workflow(
1040
1078
  schemas_spec=[
1041
- [{"p1": None}, ("p2",), "t1"],
1042
- [{"p2": None}, (), "t2"],
1079
+ ({"p1": None}, ("p2",), "t1"),
1080
+ ({"p2": None}, (), "t2"),
1043
1081
  ],
1044
1082
  local_sequences={0: [("inputs.p1", 2, 0)]},
1045
1083
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1057,7 +1095,7 @@ def test_task_add_elements_sequence_with_propagation_expected_new_data_index(tmp
1057
1095
  )
1058
1096
  t1_num_elems_new = wk.tasks.t1.num_elements
1059
1097
  t2_num_elems_new = wk.tasks.t2.num_elements
1060
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1098
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1061
1099
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1062
1100
  new_elems_t2 = data_index_new[
1063
1101
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1074,12 +1112,12 @@ def test_task_add_elements_sequence_with_propagation_expected_new_data_index(tmp
1074
1112
 
1075
1113
 
1076
1114
  def test_task_add_elements_sequence_with_propagation_into_sequence_expected_workflow_num_elements(
1077
- tmp_path,
1115
+ tmp_path: Path,
1078
1116
  ):
1079
1117
  wk = make_workflow(
1080
1118
  schemas_spec=[
1081
- [{"p1": None}, ("p2",), "t1"],
1082
- [{"p2": None, "p3": None}, (), "t2"],
1119
+ ({"p1": None}, ("p2",), "t1"),
1120
+ ({"p2": None, "p3": None}, (), "t2"),
1083
1121
  ],
1084
1122
  local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1085
1123
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1101,12 +1139,12 @@ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_work
1101
1139
 
1102
1140
 
1103
1141
  def test_task_add_elements_sequence_with_propagation_into_sequence_expected_task_num_elements(
1104
- tmp_path,
1142
+ tmp_path: Path,
1105
1143
  ):
1106
1144
  wk = make_workflow(
1107
1145
  schemas_spec=[
1108
- [{"p1": None}, ("p2",), "t1"],
1109
- [{"p2": None, "p3": None}, (), "t2"],
1146
+ ({"p1": None}, ("p2",), "t1"),
1147
+ ({"p2": None, "p3": None}, (), "t2"),
1110
1148
  ],
1111
1149
  local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1112
1150
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1129,12 +1167,12 @@ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_task
1129
1167
 
1130
1168
 
1131
1169
  def test_task_add_elements_sequence_with_propagation_into_sequence_expected_new_data_index(
1132
- tmp_path,
1170
+ tmp_path: Path,
1133
1171
  ):
1134
1172
  wk = make_workflow(
1135
1173
  schemas_spec=[
1136
- [{"p1": None}, ("p2",), "t1"],
1137
- [{"p2": None, "p3": None}, (), "t2"],
1174
+ ({"p1": None}, ("p2",), "t1"),
1175
+ ({"p2": None, "p3": None}, (), "t2"),
1138
1176
  ],
1139
1177
  local_sequences={0: [("inputs.p1", 2, 1)], 1: [("inputs.p3", 3, 1)]},
1140
1178
  nesting_orders={1: {"inputs.p2": 0}},
@@ -1155,7 +1193,7 @@ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_new_
1155
1193
  )
1156
1194
  t1_num_elems_new = wk.tasks.t1.num_elements
1157
1195
  t2_num_elems_new = wk.tasks.t2.num_elements
1158
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1196
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1159
1197
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1160
1198
  new_elems_t2 = data_index_new[
1161
1199
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1178,13 +1216,13 @@ def test_task_add_elements_sequence_with_propagation_into_sequence_expected_new_
1178
1216
 
1179
1217
 
1180
1218
  def test_task_add_elements_multi_task_dependence_expected_workflow_num_elements(
1181
- tmp_path, param_p1
1219
+ tmp_path: Path, param_p1: Parameter
1182
1220
  ):
1183
1221
  wk = make_workflow(
1184
1222
  schemas_spec=[
1185
- [{"p1": None}, ("p3",), "t1"],
1186
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1187
- [{"p3": None, "p4": None}, (), "t3"],
1223
+ ({"p1": None}, ("p3",), "t1"),
1224
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1225
+ ({"p3": None, "p4": None}, (), "t3"),
1188
1226
  ],
1189
1227
  local_inputs={0: ("p1",)},
1190
1228
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1204,13 +1242,13 @@ def test_task_add_elements_multi_task_dependence_expected_workflow_num_elements(
1204
1242
 
1205
1243
 
1206
1244
  def test_task_add_elements_multi_task_dependence_expected_task_num_elements(
1207
- tmp_path, param_p1
1245
+ tmp_path: Path, param_p1: Parameter
1208
1246
  ):
1209
1247
  wk = make_workflow(
1210
1248
  schemas_spec=[
1211
- [{"p1": None}, ("p3",), "t1"],
1212
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1213
- [{"p3": None, "p4": None}, (), "t3"],
1249
+ ({"p1": None}, ("p3",), "t1"),
1250
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1251
+ ({"p3": None, "p4": None}, (), "t3"),
1214
1252
  ],
1215
1253
  local_inputs={0: ("p1",)},
1216
1254
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1231,13 +1269,13 @@ def test_task_add_elements_multi_task_dependence_expected_task_num_elements(
1231
1269
 
1232
1270
 
1233
1271
  def test_task_add_elements_multi_task_dependence_expected_task_num_elements_custom_input_source(
1234
- tmp_path, param_p1
1272
+ tmp_path: Path, param_p1: Parameter
1235
1273
  ):
1236
1274
  wk = make_workflow(
1237
1275
  schemas_spec=[
1238
- [{"p1": None}, ("p3",), "t1"],
1239
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1240
- [{"p3": None, "p4": None}, (), "t3"],
1276
+ ({"p1": None}, ("p3",), "t1"),
1277
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1278
+ ({"p3": None, "p4": None}, (), "t3"),
1241
1279
  ],
1242
1280
  local_inputs={0: ("p1",)},
1243
1281
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1264,13 +1302,13 @@ def test_task_add_elements_multi_task_dependence_expected_task_num_elements_cust
1264
1302
 
1265
1303
 
1266
1304
  def test_task_add_elements_multi_task_dependence_expected_new_data_index(
1267
- tmp_path, param_p1
1305
+ tmp_path: Path, param_p1: Parameter
1268
1306
  ):
1269
1307
  wk = make_workflow(
1270
1308
  schemas_spec=[
1271
- [{"p1": None}, ("p3",), "t1"],
1272
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1273
- [{"p3": None, "p4": None}, (), "t3"],
1309
+ ({"p1": None}, ("p3",), "t1"),
1310
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1311
+ ({"p3": None, "p4": None}, (), "t3"),
1274
1312
  ],
1275
1313
  local_inputs={0: ("p1",)},
1276
1314
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1290,7 +1328,7 @@ def test_task_add_elements_multi_task_dependence_expected_new_data_index(
1290
1328
  t1_num_elems_new = wk.tasks.t1.num_elements
1291
1329
  t2_num_elems_new = wk.tasks.t2.num_elements
1292
1330
  t3_num_elems_new = wk.tasks.t3.num_elements
1293
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1331
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1294
1332
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1295
1333
  new_elems_t2 = data_index_new[
1296
1334
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1312,13 +1350,13 @@ def test_task_add_elements_multi_task_dependence_expected_new_data_index(
1312
1350
 
1313
1351
 
1314
1352
  def test_task_add_elements_multi_task_dependence_expected_new_data_index_custom_input_source(
1315
- tmp_path, param_p1
1353
+ tmp_path: Path, param_p1: Parameter
1316
1354
  ):
1317
1355
  wk = make_workflow(
1318
1356
  schemas_spec=[
1319
- [{"p1": None}, ("p3",), "t1"],
1320
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1321
- [{"p3": None, "p4": None}, (), "t3"],
1357
+ ({"p1": None}, ("p3",), "t1"),
1358
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1359
+ ({"p3": None, "p4": None}, (), "t3"),
1322
1360
  ],
1323
1361
  local_inputs={0: ("p1",)},
1324
1362
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1344,7 +1382,7 @@ def test_task_add_elements_multi_task_dependence_expected_new_data_index_custom_
1344
1382
  t1_num_elems_new = wk.tasks.t1.num_elements
1345
1383
  t2_num_elems_new = wk.tasks.t2.num_elements
1346
1384
  t3_num_elems_new = wk.tasks.t3.num_elements
1347
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1385
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1348
1386
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1349
1387
  new_elems_t2 = data_index_new[
1350
1388
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1366,13 +1404,13 @@ def test_task_add_elements_multi_task_dependence_expected_new_data_index_custom_
1366
1404
 
1367
1405
 
1368
1406
  def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements(
1369
- tmp_path,
1407
+ tmp_path: Path,
1370
1408
  ):
1371
1409
  wk = make_workflow(
1372
1410
  schemas_spec=[
1373
- [{"p1": None}, ("p3",), "t1"],
1374
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1375
- [{"p3": None, "p4": None}, (), "t3"],
1411
+ ({"p1": None}, ("p3",), "t1"),
1412
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1413
+ ({"p3": None, "p4": None}, (), "t3"),
1376
1414
  ],
1377
1415
  local_inputs={0: ("p1",)},
1378
1416
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1394,13 +1432,13 @@ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements(
1394
1432
 
1395
1433
 
1396
1434
  def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements_custom_input_source(
1397
- tmp_path,
1435
+ tmp_path: Path,
1398
1436
  ):
1399
1437
  wk = make_workflow(
1400
1438
  schemas_spec=[
1401
- [{"p1": None}, ("p3",), "t1"],
1402
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1403
- [{"p3": None, "p4": None}, (), "t3"],
1439
+ ({"p1": None}, ("p3",), "t1"),
1440
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1441
+ ({"p3": None, "p4": None}, (), "t3"),
1404
1442
  ],
1405
1443
  local_inputs={0: ("p1",)},
1406
1444
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1428,13 +1466,13 @@ def test_task_add_elements_sequence_multi_task_dependence_workflow_num_elements_
1428
1466
 
1429
1467
 
1430
1468
  def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elements(
1431
- tmp_path,
1469
+ tmp_path: Path,
1432
1470
  ):
1433
1471
  wk = make_workflow(
1434
1472
  schemas_spec=[
1435
- [{"p1": None}, ("p3",), "t1"],
1436
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1437
- [{"p3": None, "p4": None}, (), "t3"],
1473
+ ({"p1": None}, ("p3",), "t1"),
1474
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1475
+ ({"p3": None, "p4": None}, (), "t3"),
1438
1476
  ],
1439
1477
  local_inputs={0: ("p1",)},
1440
1478
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1457,13 +1495,13 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elem
1457
1495
 
1458
1496
 
1459
1497
  def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elements_custom_input_source(
1460
- tmp_path,
1498
+ tmp_path: Path,
1461
1499
  ):
1462
1500
  wk = make_workflow(
1463
1501
  schemas_spec=[
1464
- [{"p1": None}, ("p3",), "t1"],
1465
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1466
- [{"p3": None, "p4": None}, (), "t3"],
1502
+ ({"p1": None}, ("p3",), "t1"),
1503
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1504
+ ({"p3": None, "p4": None}, (), "t3"),
1467
1505
  ],
1468
1506
  local_inputs={0: ("p1",)},
1469
1507
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1492,13 +1530,13 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_task_num_elem
1492
1530
 
1493
1531
 
1494
1532
  def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_index(
1495
- tmp_path,
1533
+ tmp_path: Path,
1496
1534
  ):
1497
1535
  wk = make_workflow(
1498
1536
  schemas_spec=[
1499
- [{"p1": None}, ("p3",), "t1"],
1500
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1501
- [{"p3": None, "p4": None}, (), "t3"],
1537
+ ({"p1": None}, ("p3",), "t1"),
1538
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1539
+ ({"p3": None, "p4": None}, (), "t3"),
1502
1540
  ],
1503
1541
  local_inputs={0: ("p1",)},
1504
1542
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1521,7 +1559,7 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_inde
1521
1559
  t2_num_elems_new = wk.tasks.t2.num_elements
1522
1560
  t3_num_elems_new = wk.tasks.t3.num_elements
1523
1561
 
1524
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1562
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1525
1563
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1526
1564
  new_elems_t2 = data_index_new[
1527
1565
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1542,13 +1580,13 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_inde
1542
1580
 
1543
1581
 
1544
1582
  def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_index_custom_input_source(
1545
- tmp_path,
1583
+ tmp_path: Path,
1546
1584
  ):
1547
1585
  wk = make_workflow(
1548
1586
  schemas_spec=[
1549
- [{"p1": None}, ("p3",), "t1"],
1550
- [{"p2": None, "p3": None}, ("p4",), "t2"],
1551
- [{"p3": None, "p4": None}, (), "t3"],
1587
+ ({"p1": None}, ("p3",), "t1"),
1588
+ ({"p2": None, "p3": None}, ("p4",), "t2"),
1589
+ ({"p3": None, "p4": None}, (), "t3"),
1552
1590
  ],
1553
1591
  local_inputs={0: ("p1",)},
1554
1592
  local_sequences={1: [("inputs.p2", 2, 1)]},
@@ -1577,7 +1615,7 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_inde
1577
1615
  t2_num_elems_new = wk.tasks.t2.num_elements
1578
1616
  t3_num_elems_new = wk.tasks.t3.num_elements
1579
1617
 
1580
- data_index_new = [sorted(i.get_data_idx().keys()) for i in wk.elements()]
1618
+ data_index_new = [sorted(i.get_data_idx()) for i in wk.elements()]
1581
1619
  new_elems_t1 = data_index_new[t1_num_elems:t1_num_elems_new]
1582
1620
  new_elems_t2 = data_index_new[
1583
1621
  t1_num_elems_new + t2_num_elems : t1_num_elems_new + t2_num_elems_new
@@ -1597,12 +1635,14 @@ def test_task_add_elements_sequence_multi_task_dependence_expected_new_data_inde
1597
1635
  )
1598
1636
 
1599
1637
 
1600
- def test_task_add_elements_simple_dependence_three_tasks(tmp_path, param_p1):
1638
+ def test_task_add_elements_simple_dependence_three_tasks(
1639
+ tmp_path: Path, param_p1: Parameter
1640
+ ):
1601
1641
  wk = make_workflow(
1602
1642
  schemas_spec=[
1603
- [{"p1": None}, ("p2",), "t1"],
1604
- [{"p2": None}, ("p3",), "t2"],
1605
- [{"p3": None}, (), "t3"],
1643
+ ({"p1": None}, ("p2",), "t1"),
1644
+ ({"p2": None}, ("p3",), "t2"),
1645
+ ({"p3": None}, (), "t3"),
1606
1646
  ],
1607
1647
  local_inputs={0: ("p1",)},
1608
1648
  path=tmp_path,
@@ -1616,15 +1656,15 @@ def test_task_add_elements_simple_dependence_three_tasks(tmp_path, param_p1):
1616
1656
  assert num_elems_new == [i + 1 for i in num_elems]
1617
1657
 
1618
1658
 
1619
- def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path):
1659
+ def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path: Path):
1620
1660
  wk = make_workflow(
1621
- schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1661
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1622
1662
  local_inputs={0: ("p1",)},
1623
1663
  path=tmp_path,
1624
1664
  )
1625
1665
  tasks_meta = copy.deepcopy(wk._store.get_tasks())
1626
1666
 
1627
- s2 = make_schemas([[{"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()]])
1667
+ (s2,) = make_schemas(({"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()))
1628
1668
  t2 = hf.Task(schema=s2)
1629
1669
  with pytest.raises(MissingInputs) as exc_info:
1630
1670
  wk.add_task(t2)
@@ -1632,15 +1672,17 @@ def test_no_change_to_tasks_metadata_on_add_task_failure(tmp_path):
1632
1672
  assert wk._store.get_tasks() == tasks_meta
1633
1673
 
1634
1674
 
1635
- def test_no_change_to_parameter_data_on_add_task_failure(tmp_path, param_p2, param_p3):
1675
+ def test_no_change_to_parameter_data_on_add_task_failure(
1676
+ tmp_path: Path, param_p2: Parameter, param_p3: Parameter
1677
+ ):
1636
1678
  wk = make_workflow(
1637
- schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1679
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1638
1680
  local_inputs={0: ("p1",)},
1639
1681
  path=tmp_path,
1640
1682
  )
1641
- param_data = copy.deepcopy(wk.get_all_parameters())
1642
- s2 = make_schemas(
1643
- [[{"p1": NullDefault.NULL, "p2": NullDefault.NULL, "p3": NullDefault.NULL}, ()]]
1683
+ param_data: list = copy.deepcopy(wk.get_all_parameters())
1684
+ (s2,) = make_schemas(
1685
+ ({"p1": NullDefault.NULL, "p2": NullDefault.NULL, "p3": NullDefault.NULL}, ())
1644
1686
  )
1645
1687
  t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p2, 201)])
1646
1688
  with pytest.raises(MissingInputs) as exc_info:
@@ -1649,21 +1691,23 @@ def test_no_change_to_parameter_data_on_add_task_failure(tmp_path, param_p2, par
1649
1691
  assert wk.get_all_parameters() == param_data
1650
1692
 
1651
1693
 
1652
- def test_expected_additional_parameter_data_on_add_task(tmp_path, param_p3):
1694
+ def test_expected_additional_parameter_data_on_add_task(
1695
+ tmp_path: Path, param_p3: Parameter
1696
+ ):
1653
1697
  wk = make_workflow(
1654
- schemas_spec=[[{"p1": NullDefault.NULL}, (), "t1"]],
1698
+ schemas_spec=[({"p1": NullDefault.NULL}, (), "t1")],
1655
1699
  local_inputs={0: ("p1",)},
1656
1700
  path=tmp_path,
1657
1701
  )
1658
1702
  param_data = copy.deepcopy(wk.get_all_parameter_data())
1659
1703
 
1660
- s2 = make_schemas([[{"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()]])
1704
+ (s2,) = make_schemas(({"p1": NullDefault.NULL, "p3": NullDefault.NULL}, ()))
1661
1705
  t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1662
1706
  wk.add_task(t2)
1663
1707
 
1664
1708
  param_data_new = wk.get_all_parameter_data()
1665
1709
 
1666
- new_keys = set(param_data_new.keys()) - set(param_data.keys())
1710
+ new_keys = sorted(set(param_data_new).difference(param_data))
1667
1711
  new_data = [param_data_new[k] for k in new_keys]
1668
1712
 
1669
1713
  # one new key for resources, one for param_p3 value
@@ -1671,62 +1715,57 @@ def test_expected_additional_parameter_data_on_add_task(tmp_path, param_p3):
1671
1715
  assert new_data[1] == 301
1672
1716
 
1673
1717
 
1674
- def test_parameters_accepted_on_add_task(tmp_path, param_p3):
1718
+ def test_parameters_accepted_on_add_task(tmp_path: Path, param_p3: Parameter):
1675
1719
  wk = make_workflow(
1676
- schemas_spec=[[{"p1": None}, (), "t1"]],
1720
+ schemas_spec=[({"p1": None}, (), "t1")],
1677
1721
  local_inputs={0: ("p1",)},
1678
1722
  path=tmp_path,
1679
1723
  )
1680
- s2 = make_schemas([[{"p1": None, "p3": None}, ()]])
1724
+ (s2,) = make_schemas(({"p1": None, "p3": None}, ()))
1681
1725
  t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1682
1726
  wk.add_task(t2)
1683
1727
  assert not wk._store._pending.add_parameters
1684
1728
 
1685
1729
 
1686
- def test_parameters_pending_during_add_task(tmp_path, param_p3):
1730
+ def test_parameters_pending_during_add_task(tmp_path: Path, param_p3: Parameter):
1687
1731
  wk = make_workflow(
1688
- schemas_spec=[[{"p1": None}, (), "t1"]],
1732
+ schemas_spec=[({"p1": None}, (), "t1")],
1689
1733
  local_inputs={0: ("p1",)},
1690
1734
  path=tmp_path,
1691
1735
  )
1692
- s2 = make_schemas([[{"p1": None, "p3": None}, ()]])
1736
+ (s2,) = make_schemas(({"p1": None, "p3": None}, ()))
1693
1737
  t2 = hf.Task(schema=s2, inputs=[hf.InputValue(param_p3, 301)])
1694
1738
  with wk.batch_update():
1695
1739
  wk.add_task(t2)
1696
1740
  assert wk._store._pending.add_parameters
1697
1741
 
1698
1742
 
1699
- def test_add_task_after(workflow_w0):
1743
+ def test_add_task_after(workflow_w0: Workflow):
1700
1744
  new_task = hf.Task(schema=hf.TaskSchema(objective="after_t1", actions=[]))
1701
1745
  workflow_w0.add_task_after(new_task, workflow_w0.tasks.t1)
1702
1746
  assert [i.name for i in workflow_w0.tasks] == ["t1", "after_t1", "t2"]
1703
1747
 
1704
1748
 
1705
- def test_add_task_after_no_ref(workflow_w0):
1749
+ def test_add_task_after_no_ref(workflow_w0: Workflow):
1706
1750
  new_task = hf.Task(schema=hf.TaskSchema(objective="at_end", actions=[]))
1707
1751
  workflow_w0.add_task_after(new_task)
1708
1752
  assert [i.name for i in workflow_w0.tasks] == ["t1", "t2", "at_end"]
1709
1753
 
1710
1754
 
1711
- def test_add_task_before(workflow_w0):
1755
+ def test_add_task_before(workflow_w0: Workflow):
1712
1756
  new_task = hf.Task(schema=hf.TaskSchema(objective="before_t2", actions=[]))
1713
1757
  workflow_w0.add_task_before(new_task, workflow_w0.tasks.t2)
1714
1758
  assert [i.name for i in workflow_w0.tasks] == ["t1", "before_t2", "t2"]
1715
1759
 
1716
1760
 
1717
- def test_add_task_before_no_ref(workflow_w0):
1761
+ def test_add_task_before_no_ref(workflow_w0: Workflow):
1718
1762
  new_task = hf.Task(schema=hf.TaskSchema(objective="at_start", actions=[]))
1719
1763
  workflow_w0.add_task_before(new_task)
1720
1764
  assert [i.name for i in workflow_w0.tasks] == ["at_start", "t1", "t2"]
1721
1765
 
1722
1766
 
1723
- @pytest.fixture
1724
- def act_env_1():
1725
- return hf.ActionEnvironment("env_1")
1726
-
1727
-
1728
1767
  def test_parameter_two_modifying_actions_expected_data_indices(
1729
- tmp_path, act_env_1, param_p1
1768
+ tmp_path: Path, act_env_1: ActionEnvironment, param_p1: Parameter
1730
1769
  ):
1731
1770
  act1 = hf.Action(
1732
1771
  commands=[hf.Command("doSomething <<parameter:p1>>", stdout="<<parameter:p1>>")],
@@ -1761,7 +1800,9 @@ def test_parameter_two_modifying_actions_expected_data_indices(
1761
1800
 
1762
1801
 
1763
1802
  @pytest.mark.parametrize("store", ["json", "zarr"])
1764
- def test_conditional_shell_schema_single_initialised_action(null_config, tmp_path, store):
1803
+ def test_conditional_shell_schema_single_initialised_action(
1804
+ null_config, tmp_path: Path, store: str
1805
+ ):
1765
1806
  rules = {
1766
1807
  "posix": hf.ActionRule(
1767
1808
  rule=hf.Rule(path="resources.os_name", condition=Value.equal_to("posix"))
@@ -1813,7 +1854,7 @@ def test_conditional_shell_schema_single_initialised_action(null_config, tmp_pat
1813
1854
 
1814
1855
  @pytest.mark.parametrize("store", ["json", "zarr"])
1815
1856
  def test_element_iteration_EARs_initialised_on_make_workflow(
1816
- null_config, tmp_path, store
1857
+ null_config, tmp_path: Path, store: str
1817
1858
  ):
1818
1859
  s1 = hf.TaskSchema(
1819
1860
  objective="t1",
@@ -1845,7 +1886,7 @@ def test_element_iteration_EARs_initialised_on_make_workflow(
1845
1886
 
1846
1887
  @pytest.mark.parametrize("store", ["json", "zarr"])
1847
1888
  def test_element_iteration_EARs_initialised_on_make_workflow_with_no_actions(
1848
- null_config, tmp_path, store
1889
+ null_config, tmp_path: Path, store: str
1849
1890
  ):
1850
1891
  s1 = hf.TaskSchema(
1851
1892
  objective="t1",
@@ -1866,7 +1907,7 @@ def test_element_iteration_EARs_initialised_on_make_workflow_with_no_actions(
1866
1907
 
1867
1908
  @pytest.mark.parametrize("store", ["json", "zarr"])
1868
1909
  def test_element_iteration_EARs_not_initialised_on_make_workflow_due_to_unset(
1869
- null_config, tmp_path, store
1910
+ null_config, tmp_path: Path, store: str
1870
1911
  ):
1871
1912
  s1 = hf.TaskSchema(
1872
1913
  objective="t1",
@@ -1917,7 +1958,7 @@ def test_element_iteration_EARs_not_initialised_on_make_workflow_due_to_unset(
1917
1958
 
1918
1959
  @pytest.mark.parametrize("store", ["json", "zarr"])
1919
1960
  def test_element_iteration_EARs_initialised_on_make_workflow_with_no_valid_actions(
1920
- null_config, tmp_path, store
1961
+ null_config, tmp_path: Path, store: str
1921
1962
  ):
1922
1963
  rules = {
1923
1964
  "posix": hf.ActionRule(
@@ -1958,7 +1999,9 @@ def test_element_iteration_EARs_initialised_on_make_workflow_with_no_valid_actio
1958
1999
 
1959
2000
 
1960
2001
  @pytest.mark.parametrize("store", ["json", "zarr"])
1961
- def test_get_merged_parameter_data_unset_data_raise(null_config, tmp_path, store):
2002
+ def test_get_merged_parameter_data_unset_data_raise(
2003
+ null_config, tmp_path: Path, store: str
2004
+ ):
1962
2005
  s1 = hf.TaskSchema(
1963
2006
  objective="t1",
1964
2007
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -1992,7 +2035,9 @@ def test_get_merged_parameter_data_unset_data_raise(null_config, tmp_path, store
1992
2035
 
1993
2036
 
1994
2037
  @pytest.mark.parametrize("store", ["json", "zarr"])
1995
- def test_get_merged_parameter_data_unset_data_no_raise(null_config, tmp_path, store):
2038
+ def test_get_merged_parameter_data_unset_data_no_raise(
2039
+ null_config, tmp_path: Path, store: str
2040
+ ):
1996
2041
  s1 = hf.TaskSchema(
1997
2042
  objective="t1",
1998
2043
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -2025,7 +2070,9 @@ def test_get_merged_parameter_data_unset_data_no_raise(null_config, tmp_path, st
2025
2070
 
2026
2071
 
2027
2072
  @pytest.mark.parametrize("store", ["json", "zarr"])
2028
- def test_get_merged_parameter_data_missing_data_raise(null_config, tmp_path, store):
2073
+ def test_get_merged_parameter_data_missing_data_raise(
2074
+ null_config, tmp_path: Path, store: str
2075
+ ):
2029
2076
  s1 = hf.TaskSchema(
2030
2077
  objective="t1",
2031
2078
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -2059,7 +2106,9 @@ def test_get_merged_parameter_data_missing_data_raise(null_config, tmp_path, sto
2059
2106
 
2060
2107
 
2061
2108
  @pytest.mark.parametrize("store", ["json", "zarr"])
2062
- def test_get_merged_parameter_data_missing_data_no_raise(null_config, tmp_path, store):
2109
+ def test_get_merged_parameter_data_missing_data_no_raise(
2110
+ null_config, tmp_path: Path, store: str
2111
+ ):
2063
2112
  s1 = hf.TaskSchema(
2064
2113
  objective="t1",
2065
2114
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -2092,7 +2141,9 @@ def test_get_merged_parameter_data_missing_data_no_raise(null_config, tmp_path,
2092
2141
 
2093
2142
 
2094
2143
  @pytest.mark.parametrize("store", ["json", "zarr"])
2095
- def test_get_merged_parameter_data_group_unset_data_raise(null_config, tmp_path, store):
2144
+ def test_get_merged_parameter_data_group_unset_data_raise(
2145
+ null_config, tmp_path: Path, store: str
2146
+ ):
2096
2147
  s1 = hf.TaskSchema(
2097
2148
  objective="t1",
2098
2149
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -2143,7 +2194,7 @@ def test_get_merged_parameter_data_group_unset_data_raise(null_config, tmp_path,
2143
2194
 
2144
2195
  @pytest.mark.parametrize("store", ["json", "zarr"])
2145
2196
  def test_get_merged_parameter_data_group_unset_data_no_raise(
2146
- null_config, tmp_path, store
2197
+ null_config, tmp_path: Path, store: str
2147
2198
  ):
2148
2199
  s1 = hf.TaskSchema(
2149
2200
  objective="t1",
@@ -2192,7 +2243,9 @@ def test_get_merged_parameter_data_group_unset_data_no_raise(
2192
2243
 
2193
2244
 
2194
2245
  @pytest.mark.parametrize("store", ["json", "zarr"])
2195
- def test_get_merged_parameter_data_group_missing_data_raise(null_config, tmp_path, store):
2246
+ def test_get_merged_parameter_data_group_missing_data_raise(
2247
+ null_config, tmp_path: Path, store: str
2248
+ ):
2196
2249
  s1 = hf.TaskSchema(
2197
2250
  objective="t1",
2198
2251
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
@@ -2243,7 +2296,7 @@ def test_get_merged_parameter_data_group_missing_data_raise(null_config, tmp_pat
2243
2296
 
2244
2297
  @pytest.mark.parametrize("store", ["json", "zarr"])
2245
2298
  def test_get_merged_parameter_data_group_missing_data_no_raise(
2246
- null_config, tmp_path, store
2299
+ null_config, tmp_path: Path, store: str
2247
2300
  ):
2248
2301
  s1 = hf.TaskSchema(
2249
2302
  objective="t1",
@@ -2292,7 +2345,7 @@ def test_get_merged_parameter_data_group_missing_data_no_raise(
2292
2345
 
2293
2346
 
2294
2347
  @pytest.fixture
2295
- def path_to_PV_classes_workflow(null_config, tmp_path):
2348
+ def path_to_PV_classes_workflow(null_config, tmp_path: Path) -> Workflow:
2296
2349
  s1 = hf.TaskSchema(
2297
2350
  objective="t1",
2298
2351
  inputs=[hf.SchemaInput(parameter=hf.Parameter("p1c"))],
@@ -2311,60 +2364,64 @@ def path_to_PV_classes_workflow(null_config, tmp_path):
2311
2364
  return wk
2312
2365
 
2313
2366
 
2314
- def test_path_to_PV_classes(path_to_PV_classes_workflow):
2315
- assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(["inputs.p1c"]) == {
2367
+ def test_path_to_PV_classes(path_to_PV_classes_workflow: Workflow):
2368
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes("inputs.p1c") == {
2316
2369
  "inputs.p1c": P1,
2317
2370
  }
2318
2371
 
2319
2372
 
2320
- def test_path_to_PV_classes_sub_data(path_to_PV_classes_workflow):
2321
- assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2322
- ["inputs.p1c.a"]
2323
- ) == {
2373
+ def test_path_to_PV_classes_sub_data(path_to_PV_classes_workflow: Workflow):
2374
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes("inputs.p1c.a") == {
2324
2375
  "inputs.p1c": P1,
2325
2376
  }
2326
2377
 
2327
2378
 
2328
- def test_path_to_PV_classes_sub_parameter(path_to_PV_classes_workflow):
2379
+ def test_path_to_PV_classes_sub_parameter(path_to_PV_classes_workflow: Workflow):
2329
2380
  assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2330
- ["inputs.p1c.sub_param"]
2381
+ "inputs.p1c.sub_param"
2331
2382
  ) == {
2332
2383
  "inputs.p1c": P1,
2333
2384
  "inputs.p1c.sub_param": P1_sub_param,
2334
2385
  }
2335
2386
 
2336
2387
 
2337
- def test_path_to_PV_classes_multiple_sub_parameters(path_to_PV_classes_workflow):
2388
+ def test_path_to_PV_classes_multiple_sub_parameters(
2389
+ path_to_PV_classes_workflow: Workflow,
2390
+ ):
2338
2391
  paths = ["inputs.p1c.sub_param", "inputs.p1c.sub_param_2"]
2339
- assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(paths) == {
2392
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths) == {
2340
2393
  "inputs.p1c": P1,
2341
2394
  "inputs.p1c.sub_param": P1_sub_param,
2342
2395
  "inputs.p1c.sub_param_2": P1_sub_param_2,
2343
2396
  }
2344
2397
 
2345
2398
 
2346
- def test_path_to_PV_classes_multiple_sub_parameter_attr(path_to_PV_classes_workflow):
2399
+ def test_path_to_PV_classes_multiple_sub_parameter_attr(
2400
+ path_to_PV_classes_workflow: Workflow,
2401
+ ):
2347
2402
  paths = ["inputs.p1c.sub_param.e"]
2348
- assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(paths) == {
2403
+ assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths) == {
2349
2404
  "inputs.p1c": P1,
2350
2405
  "inputs.p1c.sub_param": P1_sub_param,
2351
2406
  }
2352
2407
 
2353
2408
 
2354
- def test_path_to_PV_classes_inputs_only_path_ignored(path_to_PV_classes_workflow):
2409
+ def test_path_to_PV_classes_inputs_only_path_ignored(
2410
+ path_to_PV_classes_workflow: Workflow,
2411
+ ):
2355
2412
  paths_1 = ["inputs", "inputs.p1c"]
2356
2413
  paths_2 = ["inputs.p1c"]
2357
2414
  assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2358
- paths_1
2359
- ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(paths_2)
2415
+ *paths_1
2416
+ ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths_2)
2360
2417
 
2361
2418
 
2362
- def test_path_to_PV_classes_resources_path_ignored(path_to_PV_classes_workflow):
2419
+ def test_path_to_PV_classes_resources_path_ignored(path_to_PV_classes_workflow: Workflow):
2363
2420
  paths_1 = ["resources", "inputs.p1c"]
2364
2421
  paths_2 = ["inputs.p1c"]
2365
2422
  assert path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(
2366
- paths_1
2367
- ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(paths_2)
2423
+ *paths_1
2424
+ ) == path_to_PV_classes_workflow.tasks.t1._paths_to_PV_classes(*paths_2)
2368
2425
 
2369
2426
 
2370
2427
  def test_input_values_specified_by_dict(null_config):
@@ -2396,7 +2453,7 @@ def test_raise_UnknownEnvironmentPresetError_sequence(null_config):
2396
2453
  hf.Task(schema=ts, sequences=[seq])
2397
2454
 
2398
2455
 
2399
- def test_group_values_input_and_output_source_from_upstream(null_config, tmp_path):
2456
+ def test_group_values_input_and_output_source_from_upstream(null_config, tmp_path: Path):
2400
2457
  """
2401
2458
  | task | inputs | outputs | group | num_elements |
2402
2459
  | ---- | ------ | ------- | -------- | ---------------------------|
@@ -2458,3 +2515,51 @@ def test_group_values_input_and_output_source_from_upstream(null_config, tmp_pat
2458
2515
  assert wk.tasks[2].num_elements == 1
2459
2516
  assert [i.value for i in wk.tasks[2].inputs.p1] == [[None, None, None]]
2460
2517
  assert [i.value for i in wk.tasks[2].inputs.p2] == [[None, None, None]]
2518
+
2519
+
2520
+ def test_is_input_type_required_True(null_config):
2521
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
2522
+ s1 = hf.TaskSchema(
2523
+ objective="t1",
2524
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2525
+ actions=[
2526
+ hf.Action(
2527
+ commands=[hf.Command("cat <<file:my_input_file>>")],
2528
+ input_file_generators=[
2529
+ hf.InputFileGenerator(
2530
+ input_file=inp_file,
2531
+ inputs=[hf.Parameter("p1")],
2532
+ script="NOT-SET-FOR-THIS-TEST",
2533
+ ),
2534
+ ],
2535
+ environments=[hf.ActionEnvironment(environment="python_env")],
2536
+ )
2537
+ ],
2538
+ )
2539
+ t1 = hf.Task(schema=s1, inputs={"p1": 100})
2540
+ assert t1.is_input_type_required(typ="p1", element_set=t1.element_sets[0])
2541
+
2542
+
2543
+ def test_is_input_type_required_False(null_config):
2544
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
2545
+ s1 = hf.TaskSchema(
2546
+ objective="t1",
2547
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
2548
+ actions=[
2549
+ hf.Action(
2550
+ commands=[hf.Command("cat <<file:my_input_file>>")],
2551
+ input_file_generators=[
2552
+ hf.InputFileGenerator(
2553
+ input_file=inp_file,
2554
+ inputs=[hf.Parameter("p1")],
2555
+ script="NOT-SET-FOR-THIS-TEST",
2556
+ ),
2557
+ ],
2558
+ environments=[hf.ActionEnvironment(environment="python_env")],
2559
+ )
2560
+ ],
2561
+ )
2562
+ t1 = hf.Task(
2563
+ schema=s1, input_files=[hf.InputFile(file=inp_file, path="NOT-SET-FOR-THIS-TEST")]
2564
+ )
2565
+ assert not t1.is_input_type_required(typ="p1", element_set=t1.element_sets[0])