hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/sdk/__init__.py +21 -15
- hpcflow/sdk/app.py +2133 -770
- hpcflow/sdk/cli.py +281 -250
- hpcflow/sdk/cli_common.py +6 -2
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +77 -42
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +578 -311
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +112 -85
- hpcflow/sdk/config/types.py +145 -0
- hpcflow/sdk/core/actions.py +1054 -994
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +81 -63
- hpcflow/sdk/core/command_files.py +275 -185
- hpcflow/sdk/core/commands.py +111 -107
- hpcflow/sdk/core/element.py +724 -503
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +398 -51
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +380 -334
- hpcflow/sdk/core/loop_cache.py +160 -43
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +728 -600
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +33 -22
- hpcflow/sdk/core/task.py +1546 -1325
- hpcflow/sdk/core/task_schema.py +240 -196
- hpcflow/sdk/core/test_utils.py +126 -88
- hpcflow/sdk/core/types.py +387 -0
- hpcflow/sdk/core/utils.py +410 -305
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +1192 -1028
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/demo/cli.py +46 -33
- hpcflow/sdk/helper/cli.py +18 -16
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +83 -59
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +988 -586
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +408 -153
- hpcflow/sdk/persistence/pending.py +158 -123
- hpcflow/sdk/persistence/store_resource.py +37 -22
- hpcflow/sdk/persistence/types.py +307 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +477 -420
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +444 -404
- hpcflow/sdk/submission/schedulers/__init__.py +133 -40
- hpcflow/sdk/submission/schedulers/direct.py +97 -71
- hpcflow/sdk/submission/schedulers/sge.py +132 -126
- hpcflow/sdk/submission/schedulers/slurm.py +263 -268
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +102 -29
- hpcflow/sdk/submission/shells/bash.py +72 -55
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +37 -29
- hpcflow/sdk/submission/submission.py +203 -257
- hpcflow/sdk/submission/types.py +143 -0
- hpcflow/sdk/typing.py +163 -12
- hpcflow/tests/conftest.py +8 -6
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_main_scripts.py +60 -30
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
- hpcflow/tests/unit/test_action.py +86 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +13 -6
- hpcflow/tests/unit/test_cli.py +1 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +20 -15
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +3 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +65 -58
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +16 -7
- hpcflow/tests/unit/test_persistence.py +48 -35
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +8 -3
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +3 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +39 -19
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/workflows/test_jobscript.py +2 -1
- hpcflow/tests/workflows/test_workflows.py +18 -13
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
- hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/element.py
CHANGED
@@ -5,16 +5,16 @@ Elements are components of tasks.
|
|
5
5
|
from __future__ import annotations
|
6
6
|
import copy
|
7
7
|
from dataclasses import dataclass, field
|
8
|
+
from itertools import chain
|
8
9
|
import os
|
9
|
-
from typing import
|
10
|
+
from typing import cast, overload, TYPE_CHECKING
|
10
11
|
|
11
|
-
from
|
12
|
-
from valida.rules import Rule
|
13
|
-
|
14
|
-
from hpcflow.sdk import app
|
12
|
+
from hpcflow.sdk.core.enums import ParallelMode
|
15
13
|
from hpcflow.sdk.core.errors import UnsupportedOSError, UnsupportedSchedulerError
|
16
14
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
17
|
-
from hpcflow.sdk.core.
|
15
|
+
from hpcflow.sdk.core.loop_cache import LoopIndex
|
16
|
+
from hpcflow.sdk.typing import hydrate
|
17
|
+
from hpcflow.sdk.core.app_aware import AppAware
|
18
18
|
from hpcflow.sdk.core.utils import (
|
19
19
|
check_valid_py_identifier,
|
20
20
|
dict_values_process_flat,
|
@@ -24,75 +24,77 @@ from hpcflow.sdk.core.utils import (
|
|
24
24
|
from hpcflow.sdk.log import TimeIt
|
25
25
|
from hpcflow.sdk.submission.shells import get_shell
|
26
26
|
|
27
|
+
if TYPE_CHECKING:
|
28
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
29
|
+
from typing import Any, ClassVar, Literal
|
30
|
+
from ..app import BaseApp
|
31
|
+
from ..typing import DataIndex, ParamSource
|
32
|
+
from .actions import Action, ElementAction, ElementActionRun
|
33
|
+
from .parameters import InputSource, ParameterPath, InputValue, ResourceSpec
|
34
|
+
from .rule import Rule
|
35
|
+
from .task import WorkflowTask, ElementSet
|
36
|
+
from .workflow import Workflow
|
27
37
|
|
28
|
-
class _ElementPrefixedParameter:
|
29
|
-
_app_attr = "_app"
|
30
38
|
|
39
|
+
class _ElementPrefixedParameter(AppAware):
|
31
40
|
def __init__(
|
32
41
|
self,
|
33
42
|
prefix: str,
|
34
|
-
element_iteration:
|
35
|
-
element_action:
|
36
|
-
element_action_run:
|
43
|
+
element_iteration: ElementIteration | None = None,
|
44
|
+
element_action: ElementAction | None = None,
|
45
|
+
element_action_run: ElementActionRun | None = None,
|
37
46
|
) -> None:
|
38
47
|
self._prefix = prefix
|
39
48
|
self._element_iteration = element_iteration
|
40
49
|
self._element_action = element_action
|
41
50
|
self._element_action_run = element_action_run
|
42
51
|
|
43
|
-
|
52
|
+
# assigned on first access
|
53
|
+
self._prefixed_names_unlabelled: Mapping[str, Sequence[str]] | None = None
|
44
54
|
|
45
|
-
def __getattr__(self, name):
|
55
|
+
def __getattr__(self, name: str) -> ElementParameter | Mapping[str, ElementParameter]:
|
46
56
|
if name not in self.prefixed_names_unlabelled:
|
47
57
|
raise ValueError(
|
48
58
|
f"No {self._prefix} named {name!r}. Available {self._prefix} are: "
|
49
59
|
f"{self.prefixed_names_unlabelled_str}."
|
50
60
|
)
|
51
61
|
|
52
|
-
labels
|
53
|
-
if labels:
|
62
|
+
if labels := self.prefixed_names_unlabelled.get(name):
|
54
63
|
# is multiple; return a dict of `ElementParameter`s
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
path=path_i,
|
60
|
-
task=self._task,
|
61
|
-
parent=self._parent,
|
62
|
-
element=self._element_iteration_obj,
|
63
|
-
)
|
64
|
-
|
64
|
+
return {
|
65
|
+
label_i: self.__parameter(f"{self._prefix}.{name}[{label_i}]")
|
66
|
+
for label_i in labels
|
67
|
+
}
|
65
68
|
else:
|
66
69
|
# could be labelled still, but with `multiple=False`
|
67
|
-
|
68
|
-
out = self._app.ElementParameter(
|
69
|
-
path=path_i,
|
70
|
-
task=self._task,
|
71
|
-
parent=self._parent,
|
72
|
-
element=self._element_iteration_obj,
|
73
|
-
)
|
74
|
-
return out
|
70
|
+
return self.__parameter(f"{self._prefix}.{name}")
|
75
71
|
|
76
|
-
def __dir__(self):
|
77
|
-
|
72
|
+
def __dir__(self) -> Iterator[str]:
|
73
|
+
yield from super().__dir__()
|
74
|
+
yield from self.prefixed_names_unlabelled
|
78
75
|
|
79
76
|
@property
|
80
|
-
def
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
77
|
+
def __parent(self) -> ElementIteration | ElementActionRun | ElementAction:
|
78
|
+
p = self._element_iteration or self._element_action or self._element_action_run
|
79
|
+
assert p is not None
|
80
|
+
return p
|
81
|
+
|
82
|
+
def __parameter(self, name: str) -> ElementParameter:
|
83
|
+
"""Manufacture an ElementParameter with the given name."""
|
84
|
+
p = self.__parent
|
85
|
+
return self._app.ElementParameter(
|
86
|
+
path=name,
|
87
|
+
task=self._task,
|
88
|
+
parent=p,
|
89
|
+
element=p if isinstance(p, ElementIteration) else p.element_iteration,
|
90
|
+
)
|
89
91
|
|
90
92
|
@property
|
91
|
-
def _task(self):
|
92
|
-
return self.
|
93
|
+
def _task(self) -> WorkflowTask:
|
94
|
+
return self.__parent.task
|
93
95
|
|
94
96
|
@property
|
95
|
-
def prefixed_names_unlabelled(self) ->
|
97
|
+
def prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
96
98
|
"""
|
97
99
|
A mapping between input types and associated labels.
|
98
100
|
|
@@ -101,45 +103,40 @@ class _ElementPrefixedParameter:
|
|
101
103
|
|
102
104
|
"""
|
103
105
|
if self._prefixed_names_unlabelled is None:
|
104
|
-
self._prefixed_names_unlabelled = self.
|
106
|
+
self._prefixed_names_unlabelled = self.__get_prefixed_names_unlabelled()
|
105
107
|
return self._prefixed_names_unlabelled
|
106
108
|
|
107
109
|
@property
|
108
|
-
def prefixed_names_unlabelled_str(self):
|
110
|
+
def prefixed_names_unlabelled_str(self) -> str:
|
109
111
|
"""
|
110
112
|
A description of the prefixed names.
|
111
113
|
"""
|
112
|
-
return ", ".join(
|
114
|
+
return ", ".join(self.prefixed_names_unlabelled)
|
113
115
|
|
114
|
-
def __repr__(self):
|
116
|
+
def __repr__(self) -> str:
|
115
117
|
# If there are one or more labels present, then replace with a single name
|
116
118
|
# indicating there could be multiple (using a `*` prefix):
|
117
|
-
names =
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
return
|
125
|
-
|
126
|
-
def
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
if "[" in i:
|
134
|
-
unlab_i, label_i = split_param_label(i)
|
135
|
-
if unlab_i not in all_names:
|
136
|
-
all_names[unlab_i] = []
|
137
|
-
all_names[unlab_i].append(label_i)
|
119
|
+
names = ", ".join(
|
120
|
+
"*" + unlabelled if labels else unlabelled
|
121
|
+
for unlabelled, labels in self.prefixed_names_unlabelled.items()
|
122
|
+
)
|
123
|
+
return f"{self.__class__.__name__}({names})"
|
124
|
+
|
125
|
+
def _get_prefixed_names(self) -> list[str]:
|
126
|
+
return sorted(self.__parent.get_parameter_names(self._prefix))
|
127
|
+
|
128
|
+
def __get_prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
129
|
+
all_names: dict[str, list[str]] = {}
|
130
|
+
for name in self._get_prefixed_names():
|
131
|
+
if "[" in name:
|
132
|
+
unlab_i, label_i = split_param_label(name)
|
133
|
+
if unlab_i is not None and label_i is not None:
|
134
|
+
all_names.setdefault(unlab_i, []).append(label_i)
|
138
135
|
else:
|
139
|
-
all_names[
|
136
|
+
all_names[name] = []
|
140
137
|
return all_names
|
141
138
|
|
142
|
-
def __iter__(self):
|
139
|
+
def __iter__(self) -> Iterator[ElementParameter | Mapping[str, ElementParameter]]:
|
143
140
|
for name in self.prefixed_names_unlabelled:
|
144
141
|
yield getattr(self, name)
|
145
142
|
|
@@ -160,9 +157,9 @@ class ElementInputs(_ElementPrefixedParameter):
|
|
160
157
|
|
161
158
|
def __init__(
|
162
159
|
self,
|
163
|
-
element_iteration:
|
164
|
-
element_action:
|
165
|
-
element_action_run:
|
160
|
+
element_iteration: ElementIteration | None = None,
|
161
|
+
element_action: ElementAction | None = None,
|
162
|
+
element_action_run: ElementActionRun | None = None,
|
166
163
|
) -> None:
|
167
164
|
super().__init__("inputs", element_iteration, element_action, element_action_run)
|
168
165
|
|
@@ -183,9 +180,9 @@ class ElementOutputs(_ElementPrefixedParameter):
|
|
183
180
|
|
184
181
|
def __init__(
|
185
182
|
self,
|
186
|
-
element_iteration:
|
187
|
-
element_action:
|
188
|
-
element_action_run:
|
183
|
+
element_iteration: ElementIteration | None = None,
|
184
|
+
element_action: ElementAction | None = None,
|
185
|
+
element_action_run: ElementActionRun | None = None,
|
189
186
|
) -> None:
|
190
187
|
super().__init__("outputs", element_iteration, element_action, element_action_run)
|
191
188
|
|
@@ -206,9 +203,9 @@ class ElementInputFiles(_ElementPrefixedParameter):
|
|
206
203
|
|
207
204
|
def __init__(
|
208
205
|
self,
|
209
|
-
element_iteration:
|
210
|
-
element_action:
|
211
|
-
element_action_run:
|
206
|
+
element_iteration: ElementIteration | None = None,
|
207
|
+
element_action: ElementAction | None = None,
|
208
|
+
element_action_run: ElementActionRun | None = None,
|
212
209
|
) -> None:
|
213
210
|
super().__init__(
|
214
211
|
"input_files", element_iteration, element_action, element_action_run
|
@@ -231,9 +228,9 @@ class ElementOutputFiles(_ElementPrefixedParameter):
|
|
231
228
|
|
232
229
|
def __init__(
|
233
230
|
self,
|
234
|
-
element_iteration:
|
235
|
-
element_action:
|
236
|
-
element_action_run:
|
231
|
+
element_iteration: ElementIteration | None = None,
|
232
|
+
element_action: ElementAction | None = None,
|
233
|
+
element_action_run: ElementActionRun | None = None,
|
237
234
|
) -> None:
|
238
235
|
super().__init__(
|
239
236
|
"output_files", element_iteration, element_action, element_action_run
|
@@ -241,6 +238,7 @@ class ElementOutputFiles(_ElementPrefixedParameter):
|
|
241
238
|
|
242
239
|
|
243
240
|
@dataclass
|
241
|
+
@hydrate
|
244
242
|
class ElementResources(JSONLike):
|
245
243
|
"""
|
246
244
|
The resources an element requires.
|
@@ -299,52 +297,53 @@ class ElementResources(JSONLike):
|
|
299
297
|
# TODO: how to specify e.g. high-memory requirement?
|
300
298
|
|
301
299
|
#: Which scratch space to use.
|
302
|
-
scratch:
|
300
|
+
scratch: str | None = None
|
303
301
|
#: Which parallel mode to use.
|
304
|
-
parallel_mode:
|
302
|
+
parallel_mode: ParallelMode | None = None
|
305
303
|
#: How many cores to request.
|
306
|
-
num_cores:
|
304
|
+
num_cores: int | None = None
|
307
305
|
#: How many cores per compute node to request.
|
308
|
-
num_cores_per_node:
|
306
|
+
num_cores_per_node: int | None = None
|
309
307
|
#: How many threads to request.
|
310
|
-
num_threads:
|
308
|
+
num_threads: int | None = None
|
311
309
|
#: How many compute nodes to request.
|
312
|
-
num_nodes:
|
310
|
+
num_nodes: int | None = None
|
313
311
|
|
314
312
|
#: Which scheduler to use.
|
315
|
-
scheduler:
|
313
|
+
scheduler: str | None = None
|
316
314
|
#: Which system shell to use.
|
317
|
-
shell:
|
315
|
+
shell: str | None = None
|
318
316
|
#: Whether to use array jobs.
|
319
|
-
use_job_array:
|
317
|
+
use_job_array: bool | None = None
|
320
318
|
#: If using array jobs, up to how many items should be in the job array.
|
321
|
-
max_array_items:
|
319
|
+
max_array_items: int | None = None
|
322
320
|
#: How long to run for.
|
323
|
-
time_limit:
|
321
|
+
time_limit: str | None = None
|
322
|
+
|
324
323
|
#: Additional arguments to pass to the scheduler.
|
325
|
-
scheduler_args:
|
324
|
+
scheduler_args: dict[str, Any] = field(default_factory=dict)
|
326
325
|
#: Additional arguments to pass to the shell.
|
327
|
-
shell_args:
|
326
|
+
shell_args: dict[str, Any] = field(default_factory=dict)
|
328
327
|
#: Which OS to use.
|
329
|
-
os_name:
|
328
|
+
os_name: str | None = None
|
330
329
|
#: Which execution environments to use.
|
331
|
-
environments:
|
330
|
+
environments: dict[str, dict[str, Any]] | None = None
|
332
331
|
|
333
332
|
# SGE scheduler specific:
|
334
333
|
#: Which SGE parallel environment to request.
|
335
|
-
SGE_parallel_env: str = None
|
334
|
+
SGE_parallel_env: str | None = None
|
336
335
|
|
337
336
|
# SLURM scheduler specific:
|
338
337
|
#: Which SLURM partition to request.
|
339
|
-
SLURM_partition: str = None
|
338
|
+
SLURM_partition: str | None = None
|
340
339
|
#: How many SLURM tasks to request.
|
341
|
-
SLURM_num_tasks:
|
340
|
+
SLURM_num_tasks: int | None = None
|
342
341
|
#: How many SLURM tasks per compute node to request.
|
343
|
-
SLURM_num_tasks_per_node:
|
342
|
+
SLURM_num_tasks_per_node: int | None = None
|
344
343
|
#: How many compute nodes to request.
|
345
|
-
SLURM_num_nodes:
|
344
|
+
SLURM_num_nodes: int | None = None
|
346
345
|
#: How many CPU cores to ask for per SLURM task.
|
347
|
-
SLURM_num_cpus_per_task:
|
346
|
+
SLURM_num_cpus_per_task: int | None = None
|
348
347
|
|
349
348
|
def __post_init__(self):
|
350
349
|
if (
|
@@ -358,38 +357,30 @@ class ElementResources(JSONLike):
|
|
358
357
|
if self.parallel_mode:
|
359
358
|
self.parallel_mode = get_enum_by_name_or_val(ParallelMode, self.parallel_mode)
|
360
359
|
|
361
|
-
|
362
|
-
self.
|
363
|
-
|
364
|
-
def __eq__(self, other) -> bool:
|
365
|
-
if type(self) != type(other):
|
366
|
-
return False
|
367
|
-
else:
|
368
|
-
return self.__dict__ == other.__dict__
|
360
|
+
def __eq__(self, other: Any) -> bool:
|
361
|
+
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
|
369
362
|
|
370
|
-
def get_jobscript_hash(self):
|
363
|
+
def get_jobscript_hash(self) -> int:
|
371
364
|
"""Get hash from all arguments that distinguish jobscripts."""
|
372
365
|
|
373
|
-
def _hash_dict(d):
|
366
|
+
def _hash_dict(d: dict) -> int:
|
374
367
|
if not d:
|
375
368
|
return -1
|
376
369
|
keys, vals = zip(*d.items())
|
377
370
|
return hash(tuple((keys, vals)))
|
378
371
|
|
379
|
-
exclude =
|
372
|
+
exclude = {"time_limit"}
|
380
373
|
dct = {k: copy.deepcopy(v) for k, v in self.__dict__.items() if k not in exclude}
|
381
374
|
|
382
375
|
scheduler_args = dct["scheduler_args"]
|
383
376
|
shell_args = dct["shell_args"]
|
384
377
|
envs = dct["environments"]
|
385
378
|
|
386
|
-
if
|
387
|
-
|
388
|
-
|
389
|
-
dct["scheduler_args"] = _hash_dict(dct["scheduler_args"])
|
379
|
+
if "options" in scheduler_args:
|
380
|
+
dct["scheduler_args"]["options"] = _hash_dict(scheduler_args["options"])
|
381
|
+
dct["scheduler_args"] = _hash_dict(dct["scheduler_args"])
|
390
382
|
|
391
|
-
|
392
|
-
dct["shell_args"] = _hash_dict(shell_args)
|
383
|
+
dct["shell_args"] = _hash_dict(shell_args)
|
393
384
|
|
394
385
|
if isinstance(envs, dict):
|
395
386
|
for k, v in envs.items():
|
@@ -401,7 +392,7 @@ class ElementResources(JSONLike):
|
|
401
392
|
@property
|
402
393
|
def is_parallel(self) -> bool:
|
403
394
|
"""Returns True if any scheduler-agnostic arguments indicate a parallel job."""
|
404
|
-
return (
|
395
|
+
return bool(
|
405
396
|
(self.num_cores and self.num_cores != 1)
|
406
397
|
or (self.num_cores_per_node and self.num_cores_per_node != 1)
|
407
398
|
or (self.num_nodes and self.num_nodes != 1)
|
@@ -411,7 +402,7 @@ class ElementResources(JSONLike):
|
|
411
402
|
@property
|
412
403
|
def SLURM_is_parallel(self) -> bool:
|
413
404
|
"""Returns True if any SLURM-specific arguments indicate a parallel job."""
|
414
|
-
return (
|
405
|
+
return bool(
|
415
406
|
(self.SLURM_num_tasks and self.SLURM_num_tasks != 1)
|
416
407
|
or (self.SLURM_num_tasks_per_node and self.SLURM_num_tasks_per_node != 1)
|
417
408
|
or (self.SLURM_num_nodes and self.SLURM_num_nodes != 1)
|
@@ -419,34 +410,34 @@ class ElementResources(JSONLike):
|
|
419
410
|
)
|
420
411
|
|
421
412
|
@staticmethod
|
422
|
-
def get_env_instance_filterable_attributes() ->
|
413
|
+
def get_env_instance_filterable_attributes() -> tuple[str, ...]:
|
423
414
|
"""Get a tuple of resource attributes that are used to filter environment
|
424
415
|
executable instances at submit- and run-time."""
|
425
416
|
return ("num_cores",) # TODO: filter on `parallel_mode` later
|
426
417
|
|
427
418
|
@staticmethod
|
428
|
-
def get_default_os_name():
|
419
|
+
def get_default_os_name() -> str:
|
429
420
|
"""
|
430
421
|
Get the default value for OS name.
|
431
422
|
"""
|
432
423
|
return os.name
|
433
424
|
|
434
425
|
@classmethod
|
435
|
-
def get_default_shell(cls):
|
426
|
+
def get_default_shell(cls) -> str:
|
436
427
|
"""
|
437
428
|
Get the default value for name.
|
438
429
|
"""
|
439
|
-
return cls.
|
430
|
+
return cls._app.config.default_shell
|
440
431
|
|
441
432
|
@classmethod
|
442
|
-
def get_default_scheduler(cls, os_name, shell_name):
|
433
|
+
def get_default_scheduler(cls, os_name: str, shell_name: str) -> str:
|
443
434
|
"""
|
444
435
|
Get the default value for scheduler.
|
445
436
|
"""
|
446
437
|
if os_name == "nt" and "wsl" in shell_name:
|
447
438
|
# provide a "*_posix" default scheduler on windows if shell is WSL:
|
448
439
|
return "direct_posix"
|
449
|
-
return cls.
|
440
|
+
return cls._app.config.default_scheduler
|
450
441
|
|
451
442
|
def set_defaults(self):
|
452
443
|
"""
|
@@ -461,13 +452,13 @@ class ElementResources(JSONLike):
|
|
461
452
|
|
462
453
|
# merge defaults shell args from config:
|
463
454
|
self.shell_args = {
|
464
|
-
**self.
|
455
|
+
**self._app.config.shells.get(self.shell, {}).get("defaults", {}),
|
465
456
|
**self.shell_args,
|
466
457
|
}
|
467
458
|
|
468
459
|
# "direct_posix" scheduler is valid on Windows if using WSL:
|
469
460
|
cfg_lookup = f"{self.scheduler}_posix" if "wsl" in self.shell else self.scheduler
|
470
|
-
cfg_sched = copy.deepcopy(self.
|
461
|
+
cfg_sched = copy.deepcopy(self._app.config.schedulers.get(cfg_lookup, {}))
|
471
462
|
|
472
463
|
# merge defaults scheduler args from config:
|
473
464
|
cfg_defs = cfg_sched.get("defaults", {})
|
@@ -481,10 +472,10 @@ class ElementResources(JSONLike):
|
|
481
472
|
supported on this machine (as specified by the app configuration)."""
|
482
473
|
if self.os_name != os.name:
|
483
474
|
raise UnsupportedOSError(os_name=self.os_name)
|
484
|
-
if self.scheduler not in self.
|
475
|
+
if self.scheduler not in self._app.config.schedulers:
|
485
476
|
raise UnsupportedSchedulerError(
|
486
477
|
scheduler=self.scheduler,
|
487
|
-
supported=self.
|
478
|
+
supported=self._app.config.schedulers,
|
488
479
|
)
|
489
480
|
# might raise `UnsupportedShellError`:
|
490
481
|
get_shell(shell_name=self.shell, os_name=self.os_name)
|
@@ -495,11 +486,11 @@ class ElementResources(JSONLike):
|
|
495
486
|
key = tuple(self.scheduler.split("_"))
|
496
487
|
else:
|
497
488
|
key = (self.scheduler.lower(), self.os_name.lower())
|
498
|
-
scheduler_cls = self.
|
499
|
-
scheduler_cls.process_resources(self, self.
|
489
|
+
scheduler_cls = self._app.scheduler_lookup[key]
|
490
|
+
scheduler_cls.process_resources(self, self._app.config.schedulers[self.scheduler])
|
500
491
|
|
501
492
|
|
502
|
-
class ElementIteration:
|
493
|
+
class ElementIteration(AppAware):
|
503
494
|
"""
|
504
495
|
A particular iteration of an element.
|
505
496
|
|
@@ -527,38 +518,36 @@ class ElementIteration:
|
|
527
518
|
Indexing information from the loop.
|
528
519
|
"""
|
529
520
|
|
530
|
-
_app_attr = "app"
|
531
|
-
|
532
521
|
def __init__(
|
533
522
|
self,
|
534
523
|
id_: int,
|
535
524
|
is_pending: bool,
|
536
525
|
index: int,
|
537
|
-
element:
|
538
|
-
data_idx:
|
526
|
+
element: Element,
|
527
|
+
data_idx: DataIndex,
|
539
528
|
EARs_initialised: bool,
|
540
|
-
EAR_IDs:
|
541
|
-
EARs:
|
542
|
-
schema_parameters:
|
543
|
-
loop_idx:
|
529
|
+
EAR_IDs: dict[int, list[int]],
|
530
|
+
EARs: dict[int, dict[Mapping[str, Any], Any]] | None,
|
531
|
+
schema_parameters: list[str],
|
532
|
+
loop_idx: Mapping[str, int],
|
544
533
|
):
|
545
534
|
self._id = id_
|
546
535
|
self._is_pending = is_pending
|
547
536
|
self._index = index
|
548
537
|
self._element = element
|
549
538
|
self._data_idx = data_idx
|
550
|
-
self._loop_idx = loop_idx
|
539
|
+
self._loop_idx = LoopIndex(loop_idx)
|
551
540
|
self._schema_parameters = schema_parameters
|
552
541
|
self._EARs_initialised = EARs_initialised
|
553
542
|
self._EARs = EARs
|
554
543
|
self._EAR_IDs = EAR_IDs
|
555
544
|
|
556
545
|
# assigned on first access of corresponding properties:
|
557
|
-
self._inputs = None
|
558
|
-
self._outputs = None
|
559
|
-
self._input_files = None
|
560
|
-
self._output_files = None
|
561
|
-
self._action_objs = None
|
546
|
+
self._inputs: ElementInputs | None = None
|
547
|
+
self._outputs: ElementOutputs | None = None
|
548
|
+
self._input_files: ElementInputFiles | None = None
|
549
|
+
self._output_files: ElementOutputFiles | None = None
|
550
|
+
self._action_objs: dict[int, ElementAction] | None = None
|
562
551
|
|
563
552
|
def __repr__(self):
|
564
553
|
return (
|
@@ -569,24 +558,24 @@ class ElementIteration:
|
|
569
558
|
)
|
570
559
|
|
571
560
|
@property
|
572
|
-
def data_idx(self):
|
561
|
+
def data_idx(self) -> DataIndex:
|
573
562
|
"""The overall element iteration data index, before resolution of EARs."""
|
574
563
|
return self._data_idx
|
575
564
|
|
576
565
|
@property
|
577
|
-
def EARs_initialised(self):
|
566
|
+
def EARs_initialised(self) -> bool:
|
578
567
|
"""Whether or not the EARs have been initialised."""
|
579
568
|
return self._EARs_initialised
|
580
569
|
|
581
570
|
@property
|
582
|
-
def element(self):
|
571
|
+
def element(self) -> Element:
|
583
572
|
"""
|
584
573
|
The element this is an iteration of.
|
585
574
|
"""
|
586
575
|
return self._element
|
587
576
|
|
588
577
|
@property
|
589
|
-
def index(self):
|
578
|
+
def index(self) -> int:
|
590
579
|
"""
|
591
580
|
The index of this iteration in its parent element.
|
592
581
|
"""
|
@@ -607,108 +596,104 @@ class ElementIteration:
|
|
607
596
|
return self._is_pending
|
608
597
|
|
609
598
|
@property
|
610
|
-
def task(self):
|
599
|
+
def task(self) -> WorkflowTask:
|
611
600
|
"""
|
612
601
|
The task this is an iteration of an element for.
|
613
602
|
"""
|
614
603
|
return self.element.task
|
615
604
|
|
616
605
|
@property
|
617
|
-
def workflow(self):
|
606
|
+
def workflow(self) -> Workflow:
|
618
607
|
"""
|
619
608
|
The workflow this is a part of.
|
620
609
|
"""
|
621
610
|
return self.element.workflow
|
622
611
|
|
623
612
|
@property
|
624
|
-
def loop_idx(self) ->
|
613
|
+
def loop_idx(self) -> LoopIndex[str, int]:
|
625
614
|
"""
|
626
615
|
Indexing information from the loop.
|
627
616
|
"""
|
628
617
|
return self._loop_idx
|
629
618
|
|
630
619
|
@property
|
631
|
-
def schema_parameters(self) ->
|
620
|
+
def schema_parameters(self) -> Sequence[str]:
|
632
621
|
"""
|
633
622
|
Parameters from the schema.
|
634
623
|
"""
|
635
624
|
return self._schema_parameters
|
636
625
|
|
637
626
|
@property
|
638
|
-
def EAR_IDs(self) ->
|
627
|
+
def EAR_IDs(self) -> Mapping[int, Sequence[int]]:
|
639
628
|
"""
|
640
629
|
Mapping from iteration number to EAR ID, where known.
|
641
630
|
"""
|
642
631
|
return self._EAR_IDs
|
643
632
|
|
644
633
|
@property
|
645
|
-
def EAR_IDs_flat(self):
|
634
|
+
def EAR_IDs_flat(self) -> Iterable[int]:
|
646
635
|
"""
|
647
636
|
The EAR IDs.
|
648
637
|
"""
|
649
|
-
return
|
638
|
+
return chain.from_iterable(self.EAR_IDs.values())
|
650
639
|
|
651
640
|
@property
|
652
|
-
def actions(self) ->
|
641
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
653
642
|
"""
|
654
643
|
The actions of this iteration.
|
655
644
|
"""
|
656
645
|
if self._action_objs is None:
|
657
646
|
self._action_objs = {
|
658
|
-
act_idx: self.
|
659
|
-
element_iteration=self,
|
660
|
-
action_idx=act_idx,
|
661
|
-
runs=runs,
|
662
|
-
)
|
647
|
+
act_idx: self._app.ElementAction(self, act_idx, runs)
|
663
648
|
for act_idx, runs in (self._EARs or {}).items()
|
664
649
|
}
|
665
650
|
return self._action_objs
|
666
651
|
|
667
652
|
@property
|
668
|
-
def action_runs(self) ->
|
653
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
669
654
|
"""
|
670
655
|
A list of element action runs, where only the final run is taken for each
|
671
656
|
element action.
|
672
657
|
"""
|
673
|
-
return [
|
658
|
+
return [act.runs[-1] for act in self.actions.values()]
|
674
659
|
|
675
660
|
@property
|
676
|
-
def inputs(self) ->
|
661
|
+
def inputs(self) -> ElementInputs:
|
677
662
|
"""
|
678
663
|
The inputs to this element.
|
679
664
|
"""
|
680
665
|
if not self._inputs:
|
681
|
-
self._inputs = self.
|
666
|
+
self._inputs = self._app.ElementInputs(element_iteration=self)
|
682
667
|
return self._inputs
|
683
668
|
|
684
669
|
@property
|
685
|
-
def outputs(self) ->
|
670
|
+
def outputs(self) -> ElementOutputs:
|
686
671
|
"""
|
687
672
|
The outputs from this element.
|
688
673
|
"""
|
689
674
|
if not self._outputs:
|
690
|
-
self._outputs = self.
|
675
|
+
self._outputs = self._app.ElementOutputs(element_iteration=self)
|
691
676
|
return self._outputs
|
692
677
|
|
693
678
|
@property
|
694
|
-
def input_files(self) ->
|
679
|
+
def input_files(self) -> ElementInputFiles:
|
695
680
|
"""
|
696
681
|
The input files to this element.
|
697
682
|
"""
|
698
683
|
if not self._input_files:
|
699
|
-
self._input_files = self.
|
684
|
+
self._input_files = self._app.ElementInputFiles(element_iteration=self)
|
700
685
|
return self._input_files
|
701
686
|
|
702
687
|
@property
|
703
|
-
def output_files(self) ->
|
688
|
+
def output_files(self) -> ElementOutputFiles:
|
704
689
|
"""
|
705
690
|
The output files from this element.
|
706
691
|
"""
|
707
692
|
if not self._output_files:
|
708
|
-
self._output_files = self.
|
693
|
+
self._output_files = self._app.ElementOutputFiles(element_iteration=self)
|
709
694
|
return self._output_files
|
710
695
|
|
711
|
-
def get_parameter_names(self, prefix: str) ->
|
696
|
+
def get_parameter_names(self, prefix: str) -> list[str]:
|
712
697
|
"""Get parameter types associated with a given prefix.
|
713
698
|
|
714
699
|
For example, with the prefix "inputs", this would return `['p1', 'p2']` for a task
|
@@ -728,19 +713,19 @@ class ElementIteration:
|
|
728
713
|
|
729
714
|
"""
|
730
715
|
single_label_lookup = self.task.template._get_single_label_lookup("inputs")
|
731
|
-
return
|
732
|
-
".".join(single_label_lookup.get(
|
733
|
-
for
|
734
|
-
if
|
735
|
-
|
716
|
+
return [
|
717
|
+
".".join(single_label_lookup.get(param_name, param_name).split(".")[1:])
|
718
|
+
for param_name in self.schema_parameters
|
719
|
+
if param_name.startswith(prefix)
|
720
|
+
]
|
736
721
|
|
737
722
|
@TimeIt.decorator
|
738
723
|
def get_data_idx(
|
739
724
|
self,
|
740
|
-
path: str = None,
|
741
|
-
action_idx: int = None,
|
725
|
+
path: str | None = None,
|
726
|
+
action_idx: int | None = None,
|
742
727
|
run_idx: int = -1,
|
743
|
-
) ->
|
728
|
+
) -> DataIndex:
|
744
729
|
"""
|
745
730
|
Get the data index.
|
746
731
|
|
@@ -766,8 +751,7 @@ class ElementIteration:
|
|
766
751
|
data_idx = {}
|
767
752
|
for action in self.actions.values():
|
768
753
|
for k, v in action.runs[run_idx].data_idx.items():
|
769
|
-
|
770
|
-
if (is_input and k not in data_idx) or not is_input:
|
754
|
+
if not k.startswith("inputs") or k not in data_idx:
|
771
755
|
data_idx[k] = v
|
772
756
|
|
773
757
|
else:
|
@@ -779,16 +763,85 @@ class ElementIteration:
|
|
779
763
|
|
780
764
|
return copy.deepcopy(data_idx)
|
781
765
|
|
766
|
+
def __get_parameter_sources(
|
767
|
+
self, data_idx: DataIndex, filter_type: str | None, use_task_index: bool
|
768
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
769
|
+
# the value associated with `repeats.*` is the repeats index, not a parameter ID:
|
770
|
+
for k in tuple(data_idx):
|
771
|
+
if k.startswith("repeats."):
|
772
|
+
data_idx.pop(k)
|
773
|
+
|
774
|
+
out: Mapping[str, ParamSource | list[ParamSource]] = dict_values_process_flat(
|
775
|
+
data_idx,
|
776
|
+
callable=self.workflow.get_parameter_sources,
|
777
|
+
)
|
778
|
+
|
779
|
+
if use_task_index:
|
780
|
+
for k, v in out.items():
|
781
|
+
assert isinstance(v, dict)
|
782
|
+
if (insert_ID := v.pop("task_insert_ID", None)) is not None:
|
783
|
+
# Modify the contents of out
|
784
|
+
v["task_idx"] = self.workflow.tasks.get(insert_ID=insert_ID).index
|
785
|
+
|
786
|
+
if not filter_type:
|
787
|
+
return out
|
788
|
+
|
789
|
+
# Filter to just the elements that have the right type property
|
790
|
+
filtered = (
|
791
|
+
(k, self.__filter_param_source_by_type(v, filter_type))
|
792
|
+
for k, v in out.items()
|
793
|
+
)
|
794
|
+
return {k: v for k, v in filtered if v is not None}
|
795
|
+
|
796
|
+
@staticmethod
|
797
|
+
def __filter_param_source_by_type(
|
798
|
+
value: ParamSource | list[ParamSource], filter_type: str
|
799
|
+
) -> ParamSource | list[ParamSource] | None:
|
800
|
+
if isinstance(value, list):
|
801
|
+
if sources := [src for src in value if src["type"] == filter_type]:
|
802
|
+
return sources
|
803
|
+
else:
|
804
|
+
if value["type"] == filter_type:
|
805
|
+
return value
|
806
|
+
return None
|
807
|
+
|
808
|
+
@overload
|
809
|
+
def get_parameter_sources(
|
810
|
+
self,
|
811
|
+
path: str | None,
|
812
|
+
*,
|
813
|
+
action_idx: int | None,
|
814
|
+
run_idx: int = -1,
|
815
|
+
typ: str | None = None,
|
816
|
+
as_strings: Literal[True],
|
817
|
+
use_task_index: bool = False,
|
818
|
+
) -> Mapping[str, str]:
|
819
|
+
...
|
820
|
+
|
821
|
+
@overload
|
822
|
+
def get_parameter_sources(
|
823
|
+
self,
|
824
|
+
path: str | None = None,
|
825
|
+
*,
|
826
|
+
action_idx: int | None = None,
|
827
|
+
run_idx: int = -1,
|
828
|
+
typ: str | None = None,
|
829
|
+
as_strings: Literal[False] = False,
|
830
|
+
use_task_index: bool = False,
|
831
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
832
|
+
...
|
833
|
+
|
782
834
|
@TimeIt.decorator
|
783
835
|
def get_parameter_sources(
|
784
836
|
self,
|
785
|
-
path: str = None,
|
786
|
-
|
837
|
+
path: str | None = None,
|
838
|
+
*,
|
839
|
+
action_idx: int | None = None,
|
787
840
|
run_idx: int = -1,
|
788
|
-
typ: str = None,
|
841
|
+
typ: str | None = None,
|
789
842
|
as_strings: bool = False,
|
790
843
|
use_task_index: bool = False,
|
791
|
-
) ->
|
844
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
792
845
|
"""
|
793
846
|
Get the origin of parameters.
|
794
847
|
|
@@ -799,79 +852,40 @@ class ElementIteration:
|
|
799
852
|
ID.
|
800
853
|
"""
|
801
854
|
data_idx = self.get_data_idx(path, action_idx, run_idx)
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
if use_task_index:
|
815
|
-
task_key = "task_idx"
|
816
|
-
out_task_idx = {}
|
817
|
-
for k, v in out.items():
|
818
|
-
insert_ID = v.pop("task_insert_ID", None)
|
819
|
-
if insert_ID is not None:
|
820
|
-
v[task_key] = self.workflow.tasks.get(insert_ID=insert_ID).index
|
821
|
-
out_task_idx[k] = v
|
822
|
-
out = out_task_idx
|
823
|
-
|
824
|
-
if typ:
|
825
|
-
out_ = {}
|
826
|
-
for k, v in out.items():
|
827
|
-
is_multi = False
|
828
|
-
if isinstance(v, list):
|
829
|
-
is_multi = True
|
830
|
-
else:
|
831
|
-
v = [v]
|
832
|
-
|
833
|
-
sources_k = []
|
834
|
-
for src_i in v:
|
835
|
-
if src_i["type"] == typ:
|
836
|
-
if not is_multi:
|
837
|
-
sources_k = src_i
|
838
|
-
break
|
839
|
-
else:
|
840
|
-
sources_k.append(src_i)
|
841
|
-
|
842
|
-
if sources_k:
|
843
|
-
out_[k] = sources_k
|
844
|
-
|
845
|
-
out = out_
|
846
|
-
|
847
|
-
if as_strings:
|
848
|
-
# format as a dict with compact string values
|
849
|
-
self_task_val = (
|
850
|
-
self.task.index if task_key == "task_idx" else self.task.insert_ID
|
851
|
-
)
|
852
|
-
out_strs = {}
|
853
|
-
for k, v in out.items():
|
854
|
-
if v["type"] == "local_input":
|
855
|
-
if v[task_key] == self_task_val:
|
855
|
+
out = self.__get_parameter_sources(data_idx, typ or "", use_task_index)
|
856
|
+
if not as_strings:
|
857
|
+
return out
|
858
|
+
|
859
|
+
# format as a dict with compact string values
|
860
|
+
out_strs: dict[str, str] = {}
|
861
|
+
for k, v in out.items():
|
862
|
+
assert isinstance(v, dict)
|
863
|
+
if v["type"] == "local_input":
|
864
|
+
if use_task_index:
|
865
|
+
if v["task_idx"] == self.task.index:
|
856
866
|
out_strs[k] = "local"
|
857
867
|
else:
|
858
|
-
out_strs[k] = f"task.{v[
|
859
|
-
elif v["type"] == "default_input":
|
860
|
-
out_strs == "default"
|
868
|
+
out_strs[k] = f"task.{v['task_idx']}.input"
|
861
869
|
else:
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
870
|
+
if v["task_insert_ID"] == self.task.insert_ID:
|
871
|
+
out_strs[k] = "local"
|
872
|
+
else:
|
873
|
+
out_strs[k] = f"task.{v['task_insert_ID']}.input"
|
874
|
+
elif v["type"] == "default_input":
|
875
|
+
out_strs == "default"
|
876
|
+
else:
|
877
|
+
idx = v["task_idx"] if use_task_index else v["task_insert_ID"]
|
878
|
+
out_strs[k] = (
|
879
|
+
f"task.{idx}.element.{v['element_idx']}."
|
880
|
+
f"action.{v['action_idx']}.run.{v['run_idx']}"
|
881
|
+
)
|
882
|
+
return out_strs
|
869
883
|
|
870
884
|
@TimeIt.decorator
|
871
885
|
def get(
|
872
886
|
self,
|
873
|
-
path: str = None,
|
874
|
-
action_idx: int = None,
|
887
|
+
path: str | None = None,
|
888
|
+
action_idx: int | None = None,
|
875
889
|
run_idx: int = -1,
|
876
890
|
default: Any = None,
|
877
891
|
raise_on_missing: bool = False,
|
@@ -888,12 +902,11 @@ class ElementIteration:
|
|
888
902
|
if single_label_lookup:
|
889
903
|
# For any non-multiple `SchemaParameter`s of this task with non-empty labels,
|
890
904
|
# remove the trivial label:
|
891
|
-
for key in
|
905
|
+
for key in tuple(data_idx):
|
892
906
|
if (path or "").startswith(key):
|
893
907
|
# `path` uses labelled type, so no need to convert to non-labelled
|
894
908
|
continue
|
895
|
-
lookup_val
|
896
|
-
if lookup_val:
|
909
|
+
if lookup_val := single_label_lookup.get(key):
|
897
910
|
data_idx[lookup_val] = data_idx.pop(key)
|
898
911
|
|
899
912
|
return self.task._get_merged_parameter_data(
|
@@ -904,203 +917,286 @@ class ElementIteration:
|
|
904
917
|
default=default,
|
905
918
|
)
|
906
919
|
|
920
|
+
@overload
|
921
|
+
def get_EAR_dependencies(
|
922
|
+
self,
|
923
|
+
as_objects: Literal[False] = False,
|
924
|
+
) -> set[int]:
|
925
|
+
...
|
926
|
+
|
927
|
+
@overload
|
928
|
+
def get_EAR_dependencies(
|
929
|
+
self,
|
930
|
+
as_objects: Literal[True],
|
931
|
+
) -> list[ElementActionRun]:
|
932
|
+
...
|
933
|
+
|
907
934
|
@TimeIt.decorator
|
908
935
|
def get_EAR_dependencies(
|
909
936
|
self,
|
910
|
-
as_objects:
|
911
|
-
) ->
|
937
|
+
as_objects: bool = False,
|
938
|
+
) -> set[int] | list[ElementActionRun]:
|
912
939
|
"""Get EARs that this element iteration depends on (excluding EARs of this element
|
913
940
|
iteration)."""
|
914
941
|
# TODO: test this includes EARs of upstream iterations of this iteration's element
|
915
942
|
if self.action_runs:
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
)
|
943
|
+
EAR_IDs_set = frozenset(self.EAR_IDs_flat)
|
944
|
+
out = {
|
945
|
+
id_
|
946
|
+
for ear in self.action_runs
|
947
|
+
for id_ in ear.get_EAR_dependencies()
|
948
|
+
if id_ not in EAR_IDs_set
|
949
|
+
}
|
924
950
|
else:
|
925
951
|
# if an "input-only" task schema, then there will be no action runs, but the
|
926
952
|
# ElementIteration can still depend on other EARs if inputs are sourced from
|
927
953
|
# upstream tasks:
|
928
|
-
out =
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
EAR_ID_i = src_i["EAR_ID"]
|
934
|
-
out.append(EAR_ID_i)
|
935
|
-
out = sorted(set(out))
|
954
|
+
out = {
|
955
|
+
src_i["EAR_ID"]
|
956
|
+
for src in self.get_parameter_sources(typ="EAR_output").values()
|
957
|
+
for src_i in (src if isinstance(src, list) else [src])
|
958
|
+
}
|
936
959
|
|
937
960
|
if as_objects:
|
938
|
-
|
961
|
+
return self.workflow.get_EARs_from_IDs(sorted(out))
|
939
962
|
return out
|
940
963
|
|
964
|
+
@overload
|
965
|
+
def get_element_iteration_dependencies(
|
966
|
+
self, as_objects: Literal[True]
|
967
|
+
) -> list[ElementIteration]:
|
968
|
+
...
|
969
|
+
|
970
|
+
@overload
|
971
|
+
def get_element_iteration_dependencies(
|
972
|
+
self, as_objects: Literal[False] = False
|
973
|
+
) -> set[int]:
|
974
|
+
...
|
975
|
+
|
941
976
|
@TimeIt.decorator
|
942
977
|
def get_element_iteration_dependencies(
|
943
978
|
self, as_objects: bool = False
|
944
|
-
) ->
|
979
|
+
) -> set[int] | list[ElementIteration]:
|
945
980
|
"""Get element iterations that this element iteration depends on."""
|
946
981
|
# TODO: test this includes previous iterations of this iteration's element
|
947
|
-
EAR_IDs = self.get_EAR_dependencies(
|
948
|
-
out =
|
982
|
+
EAR_IDs = self.get_EAR_dependencies()
|
983
|
+
out = set(self.workflow.get_element_iteration_IDs_from_EAR_IDs(EAR_IDs))
|
949
984
|
if as_objects:
|
950
|
-
|
985
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(out))
|
951
986
|
return out
|
952
987
|
|
988
|
+
@overload
|
989
|
+
def get_element_dependencies(
|
990
|
+
self,
|
991
|
+
as_objects: Literal[False] = False,
|
992
|
+
) -> set[int]:
|
993
|
+
...
|
994
|
+
|
995
|
+
@overload
|
996
|
+
def get_element_dependencies(
|
997
|
+
self,
|
998
|
+
as_objects: Literal[True],
|
999
|
+
) -> list[Element]:
|
1000
|
+
...
|
1001
|
+
|
953
1002
|
@TimeIt.decorator
|
954
1003
|
def get_element_dependencies(
|
955
1004
|
self,
|
956
|
-
as_objects:
|
957
|
-
) ->
|
1005
|
+
as_objects: bool = False,
|
1006
|
+
) -> set[int] | list[Element]:
|
958
1007
|
"""Get elements that this element iteration depends on."""
|
959
1008
|
# TODO: this will be used in viz.
|
960
|
-
EAR_IDs = self.get_EAR_dependencies(
|
961
|
-
out =
|
1009
|
+
EAR_IDs = self.get_EAR_dependencies()
|
1010
|
+
out = set(self.workflow.get_element_IDs_from_EAR_IDs(EAR_IDs))
|
962
1011
|
if as_objects:
|
963
|
-
|
1012
|
+
return self.workflow.get_elements_from_IDs(sorted(out))
|
964
1013
|
return out
|
965
1014
|
|
966
|
-
def get_input_dependencies(self) ->
|
1015
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
967
1016
|
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
968
1017
|
element iteration depends on."""
|
969
|
-
out = {}
|
1018
|
+
out: dict[str, ParamSource] = {}
|
970
1019
|
for k, v in self.get_parameter_sources().items():
|
971
|
-
if
|
972
|
-
v = [v]
|
973
|
-
for v_i in v:
|
1020
|
+
for v_i in v if isinstance(v, list) else [v]:
|
974
1021
|
if (
|
975
1022
|
v_i["type"] in ["local_input", "default_input"]
|
976
1023
|
and v_i["task_insert_ID"] != self.task.insert_ID
|
977
1024
|
):
|
978
1025
|
out[k] = v_i
|
979
|
-
|
980
1026
|
return out
|
981
1027
|
|
1028
|
+
@overload
|
1029
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1030
|
+
...
|
1031
|
+
|
1032
|
+
@overload
|
1033
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1034
|
+
...
|
1035
|
+
|
982
1036
|
def get_task_dependencies(
|
983
1037
|
self, as_objects: bool = False
|
984
|
-
) ->
|
1038
|
+
) -> set[int] | list[WorkflowTask]:
|
985
1039
|
"""Get tasks (insert ID or WorkflowTask objects) that this element iteration
|
986
1040
|
depends on.
|
987
1041
|
|
988
1042
|
Dependencies may come from either elements from upstream tasks, or from locally
|
989
1043
|
defined inputs/sequences/defaults from upstream tasks."""
|
990
1044
|
|
991
|
-
out =
|
992
|
-
self.get_element_dependencies(
|
1045
|
+
out = set(
|
1046
|
+
self.workflow.get_task_IDs_from_element_IDs(self.get_element_dependencies())
|
993
1047
|
)
|
994
|
-
for
|
995
|
-
out.
|
996
|
-
|
997
|
-
out = sorted(set(out))
|
1048
|
+
for p_src in self.get_input_dependencies().values():
|
1049
|
+
out.add(p_src["task_insert_ID"])
|
998
1050
|
|
999
1051
|
if as_objects:
|
1000
|
-
|
1001
|
-
|
1052
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(out)]
|
1002
1053
|
return out
|
1003
1054
|
|
1055
|
+
@property
|
1056
|
+
def __elements(self) -> Iterator[Element]:
|
1057
|
+
"""
|
1058
|
+
This iteration's element and its downstream elements.
|
1059
|
+
"""
|
1060
|
+
for task in self.workflow.tasks[self.task.index :]:
|
1061
|
+
yield from task.elements[:]
|
1062
|
+
|
1063
|
+
@property
|
1064
|
+
def __iterations(self) -> Iterator[ElementIteration]:
|
1065
|
+
"""
|
1066
|
+
This iteration and its downstream iterations.
|
1067
|
+
"""
|
1068
|
+
for elem in self.__elements:
|
1069
|
+
yield from elem.iterations
|
1070
|
+
|
1071
|
+
@overload
|
1072
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]:
|
1073
|
+
...
|
1074
|
+
|
1075
|
+
@overload
|
1076
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1077
|
+
...
|
1078
|
+
|
1004
1079
|
@TimeIt.decorator
|
1005
1080
|
def get_dependent_EARs(
|
1006
1081
|
self, as_objects: bool = False
|
1007
|
-
) ->
|
1082
|
+
) -> set[int] | list[ElementActionRun]:
|
1008
1083
|
"""Get EARs of downstream iterations and tasks that depend on this element
|
1009
1084
|
iteration."""
|
1010
1085
|
# TODO: test this includes EARs of downstream iterations of this iteration's element
|
1011
|
-
deps =
|
1012
|
-
for
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
for dep_EAR_i in run.get_EAR_dependencies(as_objects=True):
|
1020
|
-
# does dep_EAR_i belong to self?
|
1021
|
-
if dep_EAR_i.id_ in self.EAR_IDs_flat and run.id_ not in deps:
|
1022
|
-
deps.append(run.id_)
|
1023
|
-
deps = sorted(deps)
|
1086
|
+
deps: set[int] = set()
|
1087
|
+
for iter_ in self.__iterations:
|
1088
|
+
if iter_.id_ == self.id_:
|
1089
|
+
# don't include EARs of this iteration
|
1090
|
+
continue
|
1091
|
+
for run in iter_.action_runs:
|
1092
|
+
if run.get_EAR_dependencies().intersection(self.EAR_IDs_flat):
|
1093
|
+
deps.add(run.id_)
|
1024
1094
|
if as_objects:
|
1025
|
-
|
1026
|
-
|
1095
|
+
return self.workflow.get_EARs_from_IDs(sorted(deps))
|
1027
1096
|
return deps
|
1028
1097
|
|
1098
|
+
@overload
|
1099
|
+
def get_dependent_element_iterations(
|
1100
|
+
self, as_objects: Literal[True]
|
1101
|
+
) -> list[ElementIteration]:
|
1102
|
+
...
|
1103
|
+
|
1104
|
+
@overload
|
1105
|
+
def get_dependent_element_iterations(
|
1106
|
+
self, as_objects: Literal[False] = False
|
1107
|
+
) -> set[int]:
|
1108
|
+
...
|
1109
|
+
|
1029
1110
|
@TimeIt.decorator
|
1030
1111
|
def get_dependent_element_iterations(
|
1031
1112
|
self, as_objects: bool = False
|
1032
|
-
) ->
|
1113
|
+
) -> set[int] | list[ElementIteration]:
|
1033
1114
|
"""Get elements iterations of downstream iterations and tasks that depend on this
|
1034
1115
|
element iteration."""
|
1035
1116
|
# TODO: test this includes downstream iterations of this iteration's element?
|
1036
|
-
deps =
|
1037
|
-
for
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
for dep_iter_i in iter_i.get_element_iteration_dependencies(
|
1043
|
-
as_objects=True
|
1044
|
-
):
|
1045
|
-
if dep_iter_i.id_ == self.id_ and iter_i.id_ not in deps:
|
1046
|
-
deps.append(iter_i.id_)
|
1047
|
-
deps = sorted(deps)
|
1117
|
+
deps: set[int] = set()
|
1118
|
+
for iter_i in self.__iterations:
|
1119
|
+
if iter_i.id_ == self.id_:
|
1120
|
+
continue
|
1121
|
+
if self.id_ in iter_i.get_element_iteration_dependencies():
|
1122
|
+
deps.add(iter_i.id_)
|
1048
1123
|
if as_objects:
|
1049
|
-
|
1050
|
-
|
1124
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(deps))
|
1051
1125
|
return deps
|
1052
1126
|
|
1127
|
+
@overload
|
1128
|
+
def get_dependent_elements(
|
1129
|
+
self,
|
1130
|
+
as_objects: Literal[True],
|
1131
|
+
) -> list[Element]:
|
1132
|
+
...
|
1133
|
+
|
1134
|
+
@overload
|
1135
|
+
def get_dependent_elements(
|
1136
|
+
self,
|
1137
|
+
as_objects: Literal[False] = False,
|
1138
|
+
) -> set[int]:
|
1139
|
+
...
|
1140
|
+
|
1053
1141
|
@TimeIt.decorator
|
1054
1142
|
def get_dependent_elements(
|
1055
1143
|
self,
|
1056
1144
|
as_objects: bool = False,
|
1057
|
-
) ->
|
1145
|
+
) -> set[int] | list[Element]:
|
1058
1146
|
"""Get elements of downstream tasks that depend on this element iteration."""
|
1059
|
-
deps =
|
1147
|
+
deps: set[int] = set()
|
1060
1148
|
for task in self.task.downstream_tasks:
|
1061
1149
|
for element in task.elements[:]:
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
deps.append(element.id_)
|
1068
|
-
|
1069
|
-
deps = sorted(deps)
|
1070
|
-
if as_objects:
|
1071
|
-
deps = self.workflow.get_elements_from_IDs(deps)
|
1150
|
+
if any(
|
1151
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
1152
|
+
for iter_i in element.iterations
|
1153
|
+
):
|
1154
|
+
deps.add(element.id_)
|
1072
1155
|
|
1156
|
+
if as_objects:
|
1157
|
+
return self.workflow.get_elements_from_IDs(sorted(deps))
|
1073
1158
|
return deps
|
1074
1159
|
|
1160
|
+
@overload
|
1161
|
+
def get_dependent_tasks(
|
1162
|
+
self,
|
1163
|
+
as_objects: Literal[True],
|
1164
|
+
) -> list[WorkflowTask]:
|
1165
|
+
...
|
1166
|
+
|
1167
|
+
@overload
|
1168
|
+
def get_dependent_tasks(
|
1169
|
+
self,
|
1170
|
+
as_objects: Literal[False] = False,
|
1171
|
+
) -> set[int]:
|
1172
|
+
...
|
1173
|
+
|
1075
1174
|
def get_dependent_tasks(
|
1076
1175
|
self,
|
1077
1176
|
as_objects: bool = False,
|
1078
|
-
) ->
|
1177
|
+
) -> set[int] | list[WorkflowTask]:
|
1079
1178
|
"""Get downstream tasks that depend on this element iteration."""
|
1080
|
-
deps =
|
1179
|
+
deps: set[int] = set()
|
1081
1180
|
for task in self.task.downstream_tasks:
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
deps.append(task.insert_ID)
|
1089
|
-
deps = sorted(deps)
|
1181
|
+
if any(
|
1182
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
1183
|
+
for element in task.elements[:]
|
1184
|
+
for iter_i in element.iterations
|
1185
|
+
):
|
1186
|
+
deps.add(task.insert_ID)
|
1090
1187
|
if as_objects:
|
1091
|
-
|
1092
|
-
|
1188
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(deps)]
|
1093
1189
|
return deps
|
1094
1190
|
|
1095
|
-
def get_template_resources(self) ->
|
1191
|
+
def get_template_resources(self) -> Mapping[str, Any]:
|
1096
1192
|
"""Get template-level resources."""
|
1097
|
-
|
1098
|
-
for res_i in
|
1099
|
-
out[res_i.scope.to_string()] = res_i._get_value()
|
1100
|
-
return out
|
1193
|
+
res = self.workflow.template._resources
|
1194
|
+
return {res_i.normalised_resources_path: res_i._get_value() for res_i in res}
|
1101
1195
|
|
1102
1196
|
@TimeIt.decorator
|
1103
|
-
def get_resources(
|
1197
|
+
def get_resources(
|
1198
|
+
self, action: Action, set_defaults: bool = False
|
1199
|
+
) -> Mapping[str, Any]:
|
1104
1200
|
"""Resolve specific resources for the specified action of this iteration,
|
1105
1201
|
considering all applicable scopes.
|
1106
1202
|
|
@@ -1121,60 +1217,58 @@ class ElementIteration:
|
|
1121
1217
|
# question is perhaps "what would the resources be if this action were to become
|
1122
1218
|
# an EAR?" which would then allow us to test a resources-based action rule.
|
1123
1219
|
|
1124
|
-
|
1220
|
+
# FIXME: Use a TypedDict?
|
1221
|
+
resource_specs: dict[str, dict[str, dict[str, Any]]] = copy.deepcopy(
|
1222
|
+
self.get("resources")
|
1223
|
+
)
|
1125
1224
|
|
1126
1225
|
env_spec = action.get_environment_spec()
|
1127
|
-
env_name = env_spec["name"]
|
1226
|
+
env_name: str = env_spec["name"]
|
1128
1227
|
|
1129
1228
|
# set default env specifiers, if none set:
|
1130
|
-
if "
|
1131
|
-
|
1132
|
-
if "environments" not in resource_specs["any"]:
|
1133
|
-
resource_specs["any"]["environments"] = {env_name: copy.deepcopy(env_spec)}
|
1229
|
+
if "environments" not in (any_specs := resource_specs.setdefault("any", {})):
|
1230
|
+
any_specs["environments"] = {env_name: copy.deepcopy(env_spec)}
|
1134
1231
|
|
1135
|
-
for
|
1232
|
+
for dat in resource_specs.values():
|
1136
1233
|
if "environments" in dat:
|
1137
1234
|
# keep only relevant user-provided environment specifiers:
|
1138
|
-
|
1235
|
+
dat["environments"] = {
|
1139
1236
|
k: v for k, v in dat["environments"].items() if k == env_name
|
1140
1237
|
}
|
1141
1238
|
# merge user-provided specifiers into action specifiers:
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
}
|
1239
|
+
dat["environments"].setdefault(env_name, {}).update(
|
1240
|
+
copy.deepcopy(env_spec)
|
1241
|
+
)
|
1146
1242
|
|
1147
|
-
resources = {}
|
1148
|
-
for scope in action.
|
1149
|
-
# loop
|
1150
|
-
|
1151
|
-
|
1152
|
-
resources.update({k: v for k, v in scope_res.items() if v is not None})
|
1243
|
+
resources: dict[str, Any] = {}
|
1244
|
+
for scope in action._get_possible_scopes_reversed():
|
1245
|
+
# loop from least-specific to most so higher-specificity scopes take precedence:
|
1246
|
+
if scope_res := resource_specs.get(scope.to_string()):
|
1247
|
+
resources.update((k, v) for k, v in scope_res.items() if v is not None)
|
1153
1248
|
|
1154
1249
|
if set_defaults:
|
1155
1250
|
# used in e.g. `Rule.test` if testing resource rules on element iterations:
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
resources["shell"] = self.app.ElementResources.get_default_shell()
|
1251
|
+
ER = self._app.ElementResources
|
1252
|
+
resources.setdefault("os_name", ER.get_default_os_name())
|
1253
|
+
resources.setdefault("shell", ER.get_default_shell())
|
1160
1254
|
if "scheduler" not in resources:
|
1161
|
-
resources["scheduler"] =
|
1255
|
+
resources["scheduler"] = ER.get_default_scheduler(
|
1162
1256
|
resources["os_name"], resources["shell"]
|
1163
1257
|
)
|
1164
1258
|
|
1165
1259
|
return resources
|
1166
1260
|
|
1167
1261
|
def get_resources_obj(
|
1168
|
-
self, action:
|
1169
|
-
) ->
|
1262
|
+
self, action: Action, set_defaults: bool = False
|
1263
|
+
) -> ElementResources:
|
1170
1264
|
"""
|
1171
1265
|
Get the resources for an action (see :py:meth:`get_resources`)
|
1172
1266
|
as a searchable model.
|
1173
1267
|
"""
|
1174
|
-
return self.
|
1268
|
+
return self._app.ElementResources(**self.get_resources(action, set_defaults))
|
1175
1269
|
|
1176
1270
|
|
1177
|
-
class Element:
|
1271
|
+
class Element(AppAware):
|
1178
1272
|
"""
|
1179
1273
|
A basic component of a workflow. Elements are enactments of tasks.
|
1180
1274
|
|
@@ -1200,8 +1294,6 @@ class Element:
|
|
1200
1294
|
Data for creating iteration objects.
|
1201
1295
|
"""
|
1202
1296
|
|
1203
|
-
_app_attr = "app"
|
1204
|
-
|
1205
1297
|
# TODO: use slots
|
1206
1298
|
# TODO:
|
1207
1299
|
# - add `iterations` property which returns `ElementIteration`
|
@@ -1211,13 +1303,13 @@ class Element:
|
|
1211
1303
|
self,
|
1212
1304
|
id_: int,
|
1213
1305
|
is_pending: bool,
|
1214
|
-
task:
|
1306
|
+
task: WorkflowTask,
|
1215
1307
|
index: int,
|
1216
1308
|
es_idx: int,
|
1217
|
-
seq_idx:
|
1218
|
-
src_idx:
|
1219
|
-
iteration_IDs:
|
1220
|
-
iterations:
|
1309
|
+
seq_idx: Mapping[str, int],
|
1310
|
+
src_idx: Mapping[str, int],
|
1311
|
+
iteration_IDs: list[int],
|
1312
|
+
iterations: list[dict[str, Any]],
|
1221
1313
|
) -> None:
|
1222
1314
|
self._id = id_
|
1223
1315
|
self._is_pending = is_pending
|
@@ -1231,7 +1323,7 @@ class Element:
|
|
1231
1323
|
self._iterations = iterations
|
1232
1324
|
|
1233
1325
|
# assigned on first access:
|
1234
|
-
self._iteration_objs = None
|
1326
|
+
self._iteration_objs: list[ElementIteration] | None = None
|
1235
1327
|
|
1236
1328
|
def __repr__(self):
|
1237
1329
|
return (
|
@@ -1255,7 +1347,7 @@ class Element:
|
|
1255
1347
|
return self._is_pending
|
1256
1348
|
|
1257
1349
|
@property
|
1258
|
-
def task(self) ->
|
1350
|
+
def task(self) -> WorkflowTask:
|
1259
1351
|
"""
|
1260
1352
|
The task this is part of the enactment of.
|
1261
1353
|
"""
|
@@ -1278,45 +1370,45 @@ class Element:
|
|
1278
1370
|
return self._es_idx
|
1279
1371
|
|
1280
1372
|
@property
|
1281
|
-
def element_set(self):
|
1373
|
+
def element_set(self) -> ElementSet:
|
1282
1374
|
"""
|
1283
1375
|
The element set containing this element.
|
1284
1376
|
"""
|
1285
1377
|
return self.task.template.element_sets[self.element_set_idx]
|
1286
1378
|
|
1287
1379
|
@property
|
1288
|
-
def sequence_idx(self) ->
|
1380
|
+
def sequence_idx(self) -> Mapping[str, int]:
|
1289
1381
|
"""
|
1290
1382
|
The sequence index IDs.
|
1291
1383
|
"""
|
1292
1384
|
return self._seq_idx
|
1293
1385
|
|
1294
1386
|
@property
|
1295
|
-
def input_source_idx(self) ->
|
1387
|
+
def input_source_idx(self) -> Mapping[str, int]:
|
1296
1388
|
"""
|
1297
1389
|
The input source indices.
|
1298
1390
|
"""
|
1299
1391
|
return self._src_idx
|
1300
1392
|
|
1301
1393
|
@property
|
1302
|
-
def input_sources(self) ->
|
1394
|
+
def input_sources(self) -> Mapping[str, InputSource]:
|
1303
1395
|
"""
|
1304
1396
|
The sources of the inputs to this element.
|
1305
1397
|
"""
|
1306
1398
|
return {
|
1307
|
-
k: self.element_set.input_sources[k.
|
1399
|
+
k: self.element_set.input_sources[k.removeprefix("inputs.")][v]
|
1308
1400
|
for k, v in self.input_source_idx.items()
|
1309
1401
|
}
|
1310
1402
|
|
1311
1403
|
@property
|
1312
|
-
def workflow(self) ->
|
1404
|
+
def workflow(self) -> Workflow:
|
1313
1405
|
"""
|
1314
1406
|
The workflow containing this element.
|
1315
1407
|
"""
|
1316
1408
|
return self.task.workflow
|
1317
1409
|
|
1318
1410
|
@property
|
1319
|
-
def iteration_IDs(self) ->
|
1411
|
+
def iteration_IDs(self) -> Sequence[int]:
|
1320
1412
|
"""
|
1321
1413
|
The IDs of the iterations of this element.
|
1322
1414
|
"""
|
@@ -1324,14 +1416,14 @@ class Element:
|
|
1324
1416
|
|
1325
1417
|
@property
|
1326
1418
|
@TimeIt.decorator
|
1327
|
-
def iterations(self) ->
|
1419
|
+
def iterations(self) -> Sequence[ElementIteration]:
|
1328
1420
|
"""
|
1329
1421
|
The iterations of this element.
|
1330
1422
|
"""
|
1331
1423
|
# TODO: fix this
|
1332
1424
|
if self._iteration_objs is None:
|
1333
1425
|
self._iteration_objs = [
|
1334
|
-
self.
|
1426
|
+
self._app.ElementIteration(
|
1335
1427
|
element=self,
|
1336
1428
|
index=idx,
|
1337
1429
|
**{k: v for k, v in iter_i.items() if k != "element_ID"},
|
@@ -1341,95 +1433,95 @@ class Element:
|
|
1341
1433
|
return self._iteration_objs
|
1342
1434
|
|
1343
1435
|
@property
|
1344
|
-
def dir_name(self):
|
1436
|
+
def dir_name(self) -> str:
|
1345
1437
|
"""
|
1346
1438
|
The name of the directory for containing temporary files for this element.
|
1347
1439
|
"""
|
1348
1440
|
return f"e_{self.index}"
|
1349
1441
|
|
1350
1442
|
@property
|
1351
|
-
def latest_iteration(self):
|
1443
|
+
def latest_iteration(self) -> ElementIteration:
|
1352
1444
|
"""
|
1353
1445
|
The most recent iteration of this element.
|
1354
1446
|
"""
|
1355
1447
|
return self.iterations[-1]
|
1356
1448
|
|
1357
1449
|
@property
|
1358
|
-
def inputs(self) ->
|
1450
|
+
def inputs(self) -> ElementInputs:
|
1359
1451
|
"""
|
1360
1452
|
The inputs to this element (or its most recent iteration).
|
1361
1453
|
"""
|
1362
1454
|
return self.latest_iteration.inputs
|
1363
1455
|
|
1364
1456
|
@property
|
1365
|
-
def outputs(self) ->
|
1457
|
+
def outputs(self) -> ElementOutputs:
|
1366
1458
|
"""
|
1367
1459
|
The outputs from this element (or its most recent iteration).
|
1368
1460
|
"""
|
1369
1461
|
return self.latest_iteration.outputs
|
1370
1462
|
|
1371
1463
|
@property
|
1372
|
-
def input_files(self) ->
|
1464
|
+
def input_files(self) -> ElementInputFiles:
|
1373
1465
|
"""
|
1374
1466
|
The input files to this element (or its most recent iteration).
|
1375
1467
|
"""
|
1376
1468
|
return self.latest_iteration.input_files
|
1377
1469
|
|
1378
1470
|
@property
|
1379
|
-
def output_files(self) ->
|
1471
|
+
def output_files(self) -> ElementOutputFiles:
|
1380
1472
|
"""
|
1381
1473
|
The output files from this element (or its most recent iteration).
|
1382
1474
|
"""
|
1383
1475
|
return self.latest_iteration.output_files
|
1384
1476
|
|
1385
1477
|
@property
|
1386
|
-
def schema_parameters(self) ->
|
1478
|
+
def schema_parameters(self) -> Sequence[str]:
|
1387
1479
|
"""
|
1388
1480
|
The schema-defined parameters to this element (or its most recent iteration).
|
1389
1481
|
"""
|
1390
1482
|
return self.latest_iteration.schema_parameters
|
1391
1483
|
|
1392
1484
|
@property
|
1393
|
-
def actions(self) ->
|
1485
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
1394
1486
|
"""
|
1395
1487
|
The actions of this element (or its most recent iteration).
|
1396
1488
|
"""
|
1397
1489
|
return self.latest_iteration.actions
|
1398
1490
|
|
1399
1491
|
@property
|
1400
|
-
def action_runs(self) ->
|
1492
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
1401
1493
|
"""
|
1402
1494
|
A list of element action runs from the latest iteration, where only the
|
1403
1495
|
final run is taken for each element action.
|
1404
1496
|
"""
|
1405
1497
|
return self.latest_iteration.action_runs
|
1406
1498
|
|
1407
|
-
def init_loop_index(self, loop_name: str):
|
1499
|
+
def init_loop_index(self, loop_name: str) -> None:
|
1408
1500
|
"""
|
1409
1501
|
Initialise the loop index if necessary.
|
1410
1502
|
"""
|
1411
1503
|
pass
|
1412
1504
|
|
1413
|
-
def to_element_set_data(self):
|
1505
|
+
def to_element_set_data(self) -> tuple[list[InputValue], list[ResourceSpec]]:
|
1414
1506
|
"""Generate lists of workflow-bound InputValues and ResourceList."""
|
1415
|
-
inputs = []
|
1416
|
-
resources = []
|
1507
|
+
inputs: list[InputValue] = []
|
1508
|
+
resources: list[ResourceSpec] = []
|
1417
1509
|
for k, v in self.get_data_idx().items():
|
1418
|
-
|
1510
|
+
kind, parameter_or_scope, *path = k.split(".")
|
1419
1511
|
|
1420
|
-
if
|
1421
|
-
inp_val = self.
|
1422
|
-
parameter=
|
1423
|
-
path=
|
1512
|
+
if kind == "inputs":
|
1513
|
+
inp_val = self._app.InputValue(
|
1514
|
+
parameter=parameter_or_scope,
|
1515
|
+
path=cast("str", path) or None, # FIXME: suspicious cast!
|
1424
1516
|
value=None,
|
1425
1517
|
)
|
1426
1518
|
inp_val._value_group_idx = v
|
1427
1519
|
inp_val._workflow = self.workflow
|
1428
1520
|
inputs.append(inp_val)
|
1429
1521
|
|
1430
|
-
elif
|
1431
|
-
scope = self.
|
1432
|
-
res = self.
|
1522
|
+
elif kind == "resources":
|
1523
|
+
scope = self._app.ActionScope.from_json_like(parameter_or_scope)
|
1524
|
+
res = self._app.ResourceSpec(scope=scope)
|
1433
1525
|
res._value_group_idx = v
|
1434
1526
|
res._workflow = self.workflow
|
1435
1527
|
resources.append(res)
|
@@ -1440,20 +1532,24 @@ class Element:
|
|
1440
1532
|
"""
|
1441
1533
|
Get the value of a sequence that applies.
|
1442
1534
|
"""
|
1443
|
-
|
1444
|
-
if not seq:
|
1535
|
+
|
1536
|
+
if not (seq := self.element_set.get_sequence_from_path(sequence_path)):
|
1445
1537
|
raise ValueError(
|
1446
1538
|
f"No sequence with path {sequence_path!r} in this element's originating "
|
1447
1539
|
f"element set."
|
1448
1540
|
)
|
1449
|
-
|
1541
|
+
if (values := seq.values) is None:
|
1542
|
+
raise ValueError(
|
1543
|
+
f"Sequence with path {sequence_path!r} has no defined values."
|
1544
|
+
)
|
1545
|
+
return values[self.sequence_idx[sequence_path]]
|
1450
1546
|
|
1451
1547
|
def get_data_idx(
|
1452
1548
|
self,
|
1453
|
-
path: str = None,
|
1454
|
-
action_idx: int = None,
|
1549
|
+
path: str | None = None,
|
1550
|
+
action_idx: int | None = None,
|
1455
1551
|
run_idx: int = -1,
|
1456
|
-
) ->
|
1552
|
+
) -> DataIndex:
|
1457
1553
|
"""Get the data index of the most recent element iteration.
|
1458
1554
|
|
1459
1555
|
Parameters
|
@@ -1467,15 +1563,42 @@ class Element:
|
|
1467
1563
|
run_idx=run_idx,
|
1468
1564
|
)
|
1469
1565
|
|
1566
|
+
@overload
|
1470
1567
|
def get_parameter_sources(
|
1471
1568
|
self,
|
1472
|
-
path: str = None,
|
1473
|
-
|
1569
|
+
path: str | None = None,
|
1570
|
+
*,
|
1571
|
+
action_idx: int | None = None,
|
1474
1572
|
run_idx: int = -1,
|
1475
|
-
typ: str = None,
|
1573
|
+
typ: str | None = None,
|
1574
|
+
as_strings: Literal[False] = False,
|
1575
|
+
use_task_index: bool = False,
|
1576
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
1577
|
+
...
|
1578
|
+
|
1579
|
+
@overload
|
1580
|
+
def get_parameter_sources(
|
1581
|
+
self,
|
1582
|
+
path: str | None = None,
|
1583
|
+
*,
|
1584
|
+
action_idx: int | None = None,
|
1585
|
+
run_idx: int = -1,
|
1586
|
+
typ: str | None = None,
|
1587
|
+
as_strings: Literal[True],
|
1588
|
+
use_task_index: bool = False,
|
1589
|
+
) -> Mapping[str, str]:
|
1590
|
+
...
|
1591
|
+
|
1592
|
+
def get_parameter_sources(
|
1593
|
+
self,
|
1594
|
+
path: str | None = None,
|
1595
|
+
*,
|
1596
|
+
action_idx: int | None = None,
|
1597
|
+
run_idx: int = -1,
|
1598
|
+
typ: str | None = None,
|
1476
1599
|
as_strings: bool = False,
|
1477
1600
|
use_task_index: bool = False,
|
1478
|
-
) ->
|
1601
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
1479
1602
|
""" "Get the parameter sources of the most recent element iteration.
|
1480
1603
|
|
1481
1604
|
Parameters
|
@@ -1484,19 +1607,27 @@ class Element:
|
|
1484
1607
|
If True, use the task index within the workflow, rather than the task insert
|
1485
1608
|
ID.
|
1486
1609
|
"""
|
1610
|
+
if as_strings:
|
1611
|
+
return self.latest_iteration.get_parameter_sources(
|
1612
|
+
path=path,
|
1613
|
+
action_idx=action_idx,
|
1614
|
+
run_idx=run_idx,
|
1615
|
+
typ=typ,
|
1616
|
+
as_strings=True,
|
1617
|
+
use_task_index=use_task_index,
|
1618
|
+
)
|
1487
1619
|
return self.latest_iteration.get_parameter_sources(
|
1488
1620
|
path=path,
|
1489
1621
|
action_idx=action_idx,
|
1490
1622
|
run_idx=run_idx,
|
1491
1623
|
typ=typ,
|
1492
|
-
as_strings=as_strings,
|
1493
1624
|
use_task_index=use_task_index,
|
1494
1625
|
)
|
1495
1626
|
|
1496
1627
|
def get(
|
1497
1628
|
self,
|
1498
|
-
path: str = None,
|
1499
|
-
action_idx: int = None,
|
1629
|
+
path: str | None = None,
|
1630
|
+
action_idx: int | None = None,
|
1500
1631
|
run_idx: int = -1,
|
1501
1632
|
default: Any = None,
|
1502
1633
|
raise_on_missing: bool = False,
|
@@ -1512,71 +1643,159 @@ class Element:
|
|
1512
1643
|
raise_on_unset=raise_on_unset,
|
1513
1644
|
)
|
1514
1645
|
|
1646
|
+
@overload
|
1647
|
+
def get_EAR_dependencies(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1648
|
+
...
|
1649
|
+
|
1650
|
+
@overload
|
1651
|
+
def get_EAR_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1652
|
+
...
|
1653
|
+
|
1515
1654
|
def get_EAR_dependencies(
|
1516
1655
|
self, as_objects: bool = False
|
1517
|
-
) ->
|
1656
|
+
) -> set[int] | list[ElementActionRun]:
|
1518
1657
|
"""Get EARs that the most recent iteration of this element depends on."""
|
1519
|
-
|
1658
|
+
if as_objects:
|
1659
|
+
return self.latest_iteration.get_EAR_dependencies(as_objects=True)
|
1660
|
+
return self.latest_iteration.get_EAR_dependencies()
|
1661
|
+
|
1662
|
+
@overload
|
1663
|
+
def get_element_iteration_dependencies(
|
1664
|
+
self, as_objects: Literal[True]
|
1665
|
+
) -> list[ElementIteration]:
|
1666
|
+
...
|
1667
|
+
|
1668
|
+
@overload
|
1669
|
+
def get_element_iteration_dependencies(
|
1670
|
+
self, as_objects: Literal[False] = False
|
1671
|
+
) -> set[int]:
|
1672
|
+
...
|
1520
1673
|
|
1521
1674
|
def get_element_iteration_dependencies(
|
1522
1675
|
self, as_objects: bool = False
|
1523
|
-
) ->
|
1676
|
+
) -> set[int] | list[ElementIteration]:
|
1524
1677
|
"""Get element iterations that the most recent iteration of this element depends
|
1525
1678
|
on."""
|
1526
|
-
|
1527
|
-
|
1528
|
-
|
1679
|
+
if as_objects:
|
1680
|
+
return self.latest_iteration.get_element_iteration_dependencies(
|
1681
|
+
as_objects=True
|
1682
|
+
)
|
1683
|
+
return self.latest_iteration.get_element_iteration_dependencies()
|
1684
|
+
|
1685
|
+
@overload
|
1686
|
+
def get_element_dependencies(self, as_objects: Literal[True]) -> list[Element]:
|
1687
|
+
...
|
1688
|
+
|
1689
|
+
@overload
|
1690
|
+
def get_element_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1691
|
+
...
|
1529
1692
|
|
1530
1693
|
def get_element_dependencies(
|
1531
1694
|
self, as_objects: bool = False
|
1532
|
-
) ->
|
1695
|
+
) -> set[int] | list[Element]:
|
1533
1696
|
"""Get elements that the most recent iteration of this element depends on."""
|
1534
|
-
|
1697
|
+
if as_objects:
|
1698
|
+
return self.latest_iteration.get_element_dependencies(as_objects=True)
|
1699
|
+
return self.latest_iteration.get_element_dependencies()
|
1535
1700
|
|
1536
|
-
def get_input_dependencies(self) ->
|
1701
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
1537
1702
|
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
1538
1703
|
the most recent iteration of this element depends on."""
|
1539
1704
|
return self.latest_iteration.get_input_dependencies()
|
1540
1705
|
|
1706
|
+
@overload
|
1707
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1708
|
+
...
|
1709
|
+
|
1710
|
+
@overload
|
1711
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1712
|
+
...
|
1713
|
+
|
1541
1714
|
def get_task_dependencies(
|
1542
1715
|
self, as_objects: bool = False
|
1543
|
-
) ->
|
1716
|
+
) -> set[int] | list[WorkflowTask]:
|
1544
1717
|
"""Get tasks (insert ID or WorkflowTask objects) that the most recent iteration of
|
1545
1718
|
this element depends on.
|
1546
1719
|
|
1547
1720
|
Dependencies may come from either elements from upstream tasks, or from locally
|
1548
1721
|
defined inputs/sequences/defaults from upstream tasks."""
|
1549
|
-
|
1722
|
+
if as_objects:
|
1723
|
+
return self.latest_iteration.get_task_dependencies(as_objects=True)
|
1724
|
+
return self.latest_iteration.get_task_dependencies()
|
1725
|
+
|
1726
|
+
@overload
|
1727
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1728
|
+
...
|
1729
|
+
|
1730
|
+
@overload
|
1731
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]:
|
1732
|
+
...
|
1550
1733
|
|
1551
1734
|
def get_dependent_EARs(
|
1552
1735
|
self, as_objects: bool = False
|
1553
|
-
) ->
|
1736
|
+
) -> set[int] | list[ElementActionRun]:
|
1554
1737
|
"""Get EARs that depend on the most recent iteration of this element."""
|
1555
|
-
|
1738
|
+
if as_objects:
|
1739
|
+
return self.latest_iteration.get_dependent_EARs(as_objects=True)
|
1740
|
+
return self.latest_iteration.get_dependent_EARs()
|
1741
|
+
|
1742
|
+
@overload
|
1743
|
+
def get_dependent_element_iterations(
|
1744
|
+
self, as_objects: Literal[True]
|
1745
|
+
) -> list[ElementIteration]:
|
1746
|
+
...
|
1747
|
+
|
1748
|
+
@overload
|
1749
|
+
def get_dependent_element_iterations(
|
1750
|
+
self, as_objects: Literal[False] = False
|
1751
|
+
) -> set[int]:
|
1752
|
+
...
|
1556
1753
|
|
1557
1754
|
def get_dependent_element_iterations(
|
1558
1755
|
self, as_objects: bool = False
|
1559
|
-
) ->
|
1756
|
+
) -> set[int] | list[ElementIteration]:
|
1560
1757
|
"""Get element iterations that depend on the most recent iteration of this
|
1561
1758
|
element."""
|
1562
|
-
|
1563
|
-
as_objects=
|
1564
|
-
)
|
1759
|
+
if as_objects:
|
1760
|
+
return self.latest_iteration.get_dependent_element_iterations(as_objects=True)
|
1761
|
+
return self.latest_iteration.get_dependent_element_iterations()
|
1762
|
+
|
1763
|
+
@overload
|
1764
|
+
def get_dependent_elements(self, as_objects: Literal[True]) -> list[Element]:
|
1765
|
+
...
|
1766
|
+
|
1767
|
+
@overload
|
1768
|
+
def get_dependent_elements(self, as_objects: Literal[False] = False) -> set[int]:
|
1769
|
+
...
|
1565
1770
|
|
1566
1771
|
def get_dependent_elements(
|
1567
1772
|
self, as_objects: bool = False
|
1568
|
-
) ->
|
1773
|
+
) -> set[int] | list[Element]:
|
1569
1774
|
"""Get elements that depend on the most recent iteration of this element."""
|
1570
|
-
|
1775
|
+
if as_objects:
|
1776
|
+
return self.latest_iteration.get_dependent_elements(as_objects=True)
|
1777
|
+
return self.latest_iteration.get_dependent_elements()
|
1778
|
+
|
1779
|
+
@overload
|
1780
|
+
def get_dependent_tasks(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1781
|
+
...
|
1782
|
+
|
1783
|
+
@overload
|
1784
|
+
def get_dependent_tasks(self, as_objects: Literal[False] = False) -> set[int]:
|
1785
|
+
...
|
1571
1786
|
|
1572
1787
|
def get_dependent_tasks(
|
1573
1788
|
self, as_objects: bool = False
|
1574
|
-
) ->
|
1789
|
+
) -> set[int] | list[WorkflowTask]:
|
1575
1790
|
"""Get tasks that depend on the most recent iteration of this element."""
|
1576
|
-
|
1791
|
+
if as_objects:
|
1792
|
+
return self.latest_iteration.get_dependent_tasks(as_objects=True)
|
1793
|
+
return self.latest_iteration.get_dependent_tasks()
|
1577
1794
|
|
1578
1795
|
@TimeIt.decorator
|
1579
|
-
def get_dependent_elements_recursively(
|
1796
|
+
def get_dependent_elements_recursively(
|
1797
|
+
self, task_insert_ID: int | None = None
|
1798
|
+
) -> list[Element]:
|
1580
1799
|
"""Get downstream elements that depend on this element, including recursive
|
1581
1800
|
dependencies.
|
1582
1801
|
|
@@ -1586,28 +1805,26 @@ class Element:
|
|
1586
1805
|
|
1587
1806
|
Parameters
|
1588
1807
|
----------
|
1589
|
-
task_insert_ID
|
1808
|
+
task_insert_ID: int
|
1590
1809
|
If specified, only return elements from this task.
|
1591
1810
|
|
1592
1811
|
"""
|
1593
1812
|
|
1594
|
-
def get_deps(element):
|
1595
|
-
deps = element.iterations[0].get_dependent_elements(
|
1813
|
+
def get_deps(element: Element) -> set[int]:
|
1814
|
+
deps = element.iterations[0].get_dependent_elements()
|
1596
1815
|
deps_objs = self.workflow.get_elements_from_IDs(deps)
|
1597
|
-
return
|
1598
|
-
[dep_j for deps_i in deps_objs for dep_j in get_deps(deps_i)]
|
1599
|
-
)
|
1816
|
+
return deps.union(dep_j for deps_i in deps_objs for dep_j in get_deps(deps_i))
|
1600
1817
|
|
1601
1818
|
all_deps = get_deps(self)
|
1602
|
-
|
1603
1819
|
if task_insert_ID is not None:
|
1604
|
-
|
1605
|
-
|
1606
|
-
|
1820
|
+
all_deps.intersection_update(
|
1821
|
+
self.workflow.tasks.get(insert_ID=task_insert_ID).element_IDs
|
1822
|
+
)
|
1607
1823
|
return self.workflow.get_elements_from_IDs(sorted(all_deps))
|
1608
1824
|
|
1609
1825
|
|
1610
|
-
@dataclass
|
1826
|
+
@dataclass(repr=False, eq=False)
|
1827
|
+
@hydrate
|
1611
1828
|
class ElementParameter:
|
1612
1829
|
"""
|
1613
1830
|
A parameter to an :py:class:`.Element`.
|
@@ -1624,19 +1841,22 @@ class ElementParameter:
|
|
1624
1841
|
The element that this is a parameter of.
|
1625
1842
|
"""
|
1626
1843
|
|
1627
|
-
|
1844
|
+
# Intended to be subclassed, so public
|
1845
|
+
#: Application context.
|
1846
|
+
app: ClassVar[BaseApp]
|
1847
|
+
_app_attr: ClassVar[str] = "app"
|
1628
1848
|
|
1629
1849
|
#: The task that this is part of.
|
1630
|
-
task:
|
1850
|
+
task: WorkflowTask
|
1631
1851
|
#: The path to this parameter.
|
1632
1852
|
path: str
|
1633
1853
|
#: The entity that owns this parameter.
|
1634
|
-
parent:
|
1854
|
+
parent: Element | ElementAction | ElementActionRun | ElementIteration
|
1635
1855
|
#: The element that this is a parameter of.
|
1636
|
-
element: Element
|
1856
|
+
element: Element | ElementIteration
|
1637
1857
|
|
1638
1858
|
@property
|
1639
|
-
def data_idx(self):
|
1859
|
+
def data_idx(self) -> DataIndex:
|
1640
1860
|
"""
|
1641
1861
|
The data indices associated with this parameter.
|
1642
1862
|
"""
|
@@ -1655,20 +1875,20 @@ class ElementParameter:
|
|
1655
1875
|
def __eq__(self, __o: object) -> bool:
|
1656
1876
|
if not isinstance(__o, self.__class__):
|
1657
1877
|
return False
|
1658
|
-
|
1659
|
-
return True
|
1878
|
+
return self.task == __o.task and self.path == __o.path
|
1660
1879
|
|
1661
1880
|
@property
|
1662
|
-
def data_idx_is_set(self):
|
1881
|
+
def data_idx_is_set(self) -> Mapping[str, bool]:
|
1663
1882
|
"""
|
1664
1883
|
The associated data indices for which this is set.
|
1665
1884
|
"""
|
1666
1885
|
return {
|
1667
|
-
k: self.task.workflow.is_parameter_set(
|
1886
|
+
k: self.task.workflow.is_parameter_set(cast("int", v))
|
1887
|
+
for k, v in self.data_idx.items()
|
1668
1888
|
}
|
1669
1889
|
|
1670
1890
|
@property
|
1671
|
-
def is_set(self):
|
1891
|
+
def is_set(self) -> bool:
|
1672
1892
|
"""
|
1673
1893
|
Whether this parameter is set.
|
1674
1894
|
"""
|
@@ -1682,6 +1902,7 @@ class ElementParameter:
|
|
1682
1902
|
|
1683
1903
|
|
1684
1904
|
@dataclass
|
1905
|
+
@hydrate
|
1685
1906
|
class ElementFilter(JSONLike):
|
1686
1907
|
"""
|
1687
1908
|
A filter for iterations.
|
@@ -1692,22 +1913,22 @@ class ElementFilter(JSONLike):
|
|
1692
1913
|
The filtering rules to use.
|
1693
1914
|
"""
|
1694
1915
|
|
1695
|
-
_child_objects
|
1916
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
1917
|
+
ChildObjectSpec(name="rules", is_multiple=True, class_name="Rule"),
|
1918
|
+
)
|
1696
1919
|
|
1697
1920
|
#: The filtering rules to use.
|
1698
|
-
rules:
|
1921
|
+
rules: list[Rule] = field(default_factory=list)
|
1699
1922
|
|
1700
|
-
def filter(
|
1701
|
-
self, element_iters: List[app.ElementIteration]
|
1702
|
-
) -> List[app.ElementIteration]:
|
1923
|
+
def filter(self, element_iters: list[ElementIteration]) -> list[ElementIteration]:
|
1703
1924
|
"""
|
1704
1925
|
Apply the filter rules to select a subsequence of iterations.
|
1705
1926
|
"""
|
1706
|
-
|
1707
|
-
|
1708
|
-
|
1709
|
-
|
1710
|
-
|
1927
|
+
return [
|
1928
|
+
el_iter
|
1929
|
+
for el_iter in element_iters
|
1930
|
+
if all(rule_j.test(el_iter) for rule_j in self.rules)
|
1931
|
+
]
|
1711
1932
|
|
1712
1933
|
|
1713
1934
|
@dataclass
|
@@ -1728,9 +1949,9 @@ class ElementGroup(JSONLike):
|
|
1728
1949
|
#: The name of the grouping rule.
|
1729
1950
|
name: str
|
1730
1951
|
#: A filtering rule to select which iterations to use in the group.
|
1731
|
-
where:
|
1952
|
+
where: ElementFilter | None = None
|
1732
1953
|
#: If specified, the name of the property to group iterations by.
|
1733
|
-
group_by_distinct:
|
1954
|
+
group_by_distinct: ParameterPath | None = None
|
1734
1955
|
|
1735
1956
|
def __post_init__(self):
|
1736
1957
|
self.name = check_valid_py_identifier(self.name)
|
@@ -1752,4 +1973,4 @@ class ElementRepeats:
|
|
1752
1973
|
#: The number of times to repeat.
|
1753
1974
|
number: int
|
1754
1975
|
#: A filtering rule for what to repeat.
|
1755
|
-
where:
|
1976
|
+
where: ElementFilter | None = None
|