hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/element.py
CHANGED
@@ -4,17 +4,28 @@ Elements are components of tasks.
|
|
4
4
|
|
5
5
|
from __future__ import annotations
|
6
6
|
import copy
|
7
|
-
from dataclasses import dataclass, field
|
7
|
+
from dataclasses import dataclass, field, fields
|
8
|
+
from operator import attrgetter
|
9
|
+
from itertools import chain
|
8
10
|
import os
|
9
|
-
from typing import
|
10
|
-
|
11
|
-
|
12
|
-
|
11
|
+
from typing import (
|
12
|
+
Any,
|
13
|
+
Callable,
|
14
|
+
Dict,
|
15
|
+
List,
|
16
|
+
Optional,
|
17
|
+
cast,
|
18
|
+
overload,
|
19
|
+
TYPE_CHECKING,
|
20
|
+
)
|
13
21
|
|
14
|
-
from hpcflow.sdk import
|
22
|
+
from hpcflow.sdk.core.enums import ParallelMode
|
23
|
+
from hpcflow.sdk.core.skip_reason import SkipReason
|
15
24
|
from hpcflow.sdk.core.errors import UnsupportedOSError, UnsupportedSchedulerError
|
16
25
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
17
|
-
from hpcflow.sdk.core.
|
26
|
+
from hpcflow.sdk.core.loop_cache import LoopIndex
|
27
|
+
from hpcflow.sdk.typing import hydrate
|
28
|
+
from hpcflow.sdk.core.app_aware import AppAware
|
18
29
|
from hpcflow.sdk.core.utils import (
|
19
30
|
check_valid_py_identifier,
|
20
31
|
dict_values_process_flat,
|
@@ -23,76 +34,79 @@ from hpcflow.sdk.core.utils import (
|
|
23
34
|
)
|
24
35
|
from hpcflow.sdk.log import TimeIt
|
25
36
|
from hpcflow.sdk.submission.shells import get_shell
|
37
|
+
from hpcflow.sdk.utils.hashing import get_hash
|
26
38
|
|
39
|
+
if TYPE_CHECKING:
|
40
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
41
|
+
from typing import Any, ClassVar, Literal
|
42
|
+
from ..app import BaseApp
|
43
|
+
from ..typing import DataIndex, ParamSource
|
44
|
+
from .actions import Action, ElementAction, ElementActionRun
|
45
|
+
from .parameters import InputSource, ParameterPath, InputValue, ResourceSpec
|
46
|
+
from .rule import Rule
|
47
|
+
from .task import WorkflowTask, ElementSet
|
48
|
+
from .workflow import Workflow
|
27
49
|
|
28
|
-
class _ElementPrefixedParameter:
|
29
|
-
_app_attr = "_app"
|
30
50
|
|
51
|
+
class _ElementPrefixedParameter(AppAware):
|
31
52
|
def __init__(
|
32
53
|
self,
|
33
54
|
prefix: str,
|
34
|
-
element_iteration:
|
35
|
-
element_action:
|
36
|
-
element_action_run:
|
55
|
+
element_iteration: ElementIteration | None = None,
|
56
|
+
element_action: ElementAction | None = None,
|
57
|
+
element_action_run: ElementActionRun | None = None,
|
37
58
|
) -> None:
|
38
59
|
self._prefix = prefix
|
39
60
|
self._element_iteration = element_iteration
|
40
61
|
self._element_action = element_action
|
41
62
|
self._element_action_run = element_action_run
|
42
63
|
|
43
|
-
|
64
|
+
# assigned on first access
|
65
|
+
self._prefixed_names_unlabelled: Mapping[str, Sequence[str]] | None = None
|
44
66
|
|
45
|
-
def __getattr__(self, name):
|
67
|
+
def __getattr__(self, name: str) -> ElementParameter | Mapping[str, ElementParameter]:
|
46
68
|
if name not in self.prefixed_names_unlabelled:
|
47
69
|
raise ValueError(
|
48
70
|
f"No {self._prefix} named {name!r}. Available {self._prefix} are: "
|
49
71
|
f"{self.prefixed_names_unlabelled_str}."
|
50
72
|
)
|
51
73
|
|
52
|
-
labels
|
53
|
-
if labels:
|
74
|
+
if labels := self.prefixed_names_unlabelled.get(name):
|
54
75
|
# is multiple; return a dict of `ElementParameter`s
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
path=path_i,
|
60
|
-
task=self._task,
|
61
|
-
parent=self._parent,
|
62
|
-
element=self._element_iteration_obj,
|
63
|
-
)
|
64
|
-
|
76
|
+
return {
|
77
|
+
label_i: self.__parameter(f"{self._prefix}.{name}[{label_i}]")
|
78
|
+
for label_i in labels
|
79
|
+
}
|
65
80
|
else:
|
66
81
|
# could be labelled still, but with `multiple=False`
|
67
|
-
|
68
|
-
out = self._app.ElementParameter(
|
69
|
-
path=path_i,
|
70
|
-
task=self._task,
|
71
|
-
parent=self._parent,
|
72
|
-
element=self._element_iteration_obj,
|
73
|
-
)
|
74
|
-
return out
|
82
|
+
return self.__parameter(f"{self._prefix}.{name}")
|
75
83
|
|
76
|
-
def __dir__(self):
|
77
|
-
|
84
|
+
def __dir__(self) -> Iterator[str]:
|
85
|
+
yield from super().__dir__()
|
86
|
+
yield from self.prefixed_names_unlabelled
|
78
87
|
|
79
88
|
@property
|
80
|
-
def
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
+
def __parent(self) -> ElementIteration | ElementActionRun | ElementAction:
|
90
|
+
p = self._element_iteration or self._element_action or self._element_action_run
|
91
|
+
assert p is not None
|
92
|
+
return p
|
93
|
+
|
94
|
+
def __parameter(self, name: str) -> ElementParameter:
|
95
|
+
"""Manufacture an ElementParameter with the given name."""
|
96
|
+
p = self.__parent
|
97
|
+
return self._app.ElementParameter(
|
98
|
+
path=name,
|
99
|
+
task=self._task,
|
100
|
+
parent=p,
|
101
|
+
element=p if isinstance(p, ElementIteration) else p.element_iteration,
|
102
|
+
)
|
89
103
|
|
90
104
|
@property
|
91
|
-
def _task(self):
|
92
|
-
return self.
|
105
|
+
def _task(self) -> WorkflowTask:
|
106
|
+
return self.__parent.task
|
93
107
|
|
94
108
|
@property
|
95
|
-
def prefixed_names_unlabelled(self) ->
|
109
|
+
def prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
96
110
|
"""
|
97
111
|
A mapping between input types and associated labels.
|
98
112
|
|
@@ -101,45 +115,40 @@ class _ElementPrefixedParameter:
|
|
101
115
|
|
102
116
|
"""
|
103
117
|
if self._prefixed_names_unlabelled is None:
|
104
|
-
self._prefixed_names_unlabelled = self.
|
118
|
+
self._prefixed_names_unlabelled = self.__get_prefixed_names_unlabelled()
|
105
119
|
return self._prefixed_names_unlabelled
|
106
120
|
|
107
121
|
@property
|
108
|
-
def prefixed_names_unlabelled_str(self):
|
122
|
+
def prefixed_names_unlabelled_str(self) -> str:
|
109
123
|
"""
|
110
124
|
A description of the prefixed names.
|
111
125
|
"""
|
112
|
-
return ", ".join(
|
126
|
+
return ", ".join(self.prefixed_names_unlabelled)
|
113
127
|
|
114
|
-
def __repr__(self):
|
128
|
+
def __repr__(self) -> str:
|
115
129
|
# If there are one or more labels present, then replace with a single name
|
116
130
|
# indicating there could be multiple (using a `*` prefix):
|
117
|
-
names =
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
return
|
125
|
-
|
126
|
-
def
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
if "[" in i:
|
134
|
-
unlab_i, label_i = split_param_label(i)
|
135
|
-
if unlab_i not in all_names:
|
136
|
-
all_names[unlab_i] = []
|
137
|
-
all_names[unlab_i].append(label_i)
|
131
|
+
names = ", ".join(
|
132
|
+
"*" + unlabelled if labels else unlabelled
|
133
|
+
for unlabelled, labels in self.prefixed_names_unlabelled.items()
|
134
|
+
)
|
135
|
+
return f"{self.__class__.__name__}({names})"
|
136
|
+
|
137
|
+
def _get_prefixed_names(self) -> list[str]:
|
138
|
+
return sorted(self.__parent.get_parameter_names(self._prefix))
|
139
|
+
|
140
|
+
def __get_prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
141
|
+
all_names: dict[str, list[str]] = {}
|
142
|
+
for name in self._get_prefixed_names():
|
143
|
+
if "[" in name:
|
144
|
+
unlab_i, label_i = split_param_label(name)
|
145
|
+
if unlab_i is not None and label_i is not None:
|
146
|
+
all_names.setdefault(unlab_i, []).append(label_i)
|
138
147
|
else:
|
139
|
-
all_names[
|
148
|
+
all_names[name] = []
|
140
149
|
return all_names
|
141
150
|
|
142
|
-
def __iter__(self):
|
151
|
+
def __iter__(self) -> Iterator[ElementParameter | Mapping[str, ElementParameter]]:
|
143
152
|
for name in self.prefixed_names_unlabelled:
|
144
153
|
yield getattr(self, name)
|
145
154
|
|
@@ -160,9 +169,9 @@ class ElementInputs(_ElementPrefixedParameter):
|
|
160
169
|
|
161
170
|
def __init__(
|
162
171
|
self,
|
163
|
-
element_iteration:
|
164
|
-
element_action:
|
165
|
-
element_action_run:
|
172
|
+
element_iteration: ElementIteration | None = None,
|
173
|
+
element_action: ElementAction | None = None,
|
174
|
+
element_action_run: ElementActionRun | None = None,
|
166
175
|
) -> None:
|
167
176
|
super().__init__("inputs", element_iteration, element_action, element_action_run)
|
168
177
|
|
@@ -183,9 +192,9 @@ class ElementOutputs(_ElementPrefixedParameter):
|
|
183
192
|
|
184
193
|
def __init__(
|
185
194
|
self,
|
186
|
-
element_iteration:
|
187
|
-
element_action:
|
188
|
-
element_action_run:
|
195
|
+
element_iteration: ElementIteration | None = None,
|
196
|
+
element_action: ElementAction | None = None,
|
197
|
+
element_action_run: ElementActionRun | None = None,
|
189
198
|
) -> None:
|
190
199
|
super().__init__("outputs", element_iteration, element_action, element_action_run)
|
191
200
|
|
@@ -206,9 +215,9 @@ class ElementInputFiles(_ElementPrefixedParameter):
|
|
206
215
|
|
207
216
|
def __init__(
|
208
217
|
self,
|
209
|
-
element_iteration:
|
210
|
-
element_action:
|
211
|
-
element_action_run:
|
218
|
+
element_iteration: ElementIteration | None = None,
|
219
|
+
element_action: ElementAction | None = None,
|
220
|
+
element_action_run: ElementActionRun | None = None,
|
212
221
|
) -> None:
|
213
222
|
super().__init__(
|
214
223
|
"input_files", element_iteration, element_action, element_action_run
|
@@ -231,9 +240,9 @@ class ElementOutputFiles(_ElementPrefixedParameter):
|
|
231
240
|
|
232
241
|
def __init__(
|
233
242
|
self,
|
234
|
-
element_iteration:
|
235
|
-
element_action:
|
236
|
-
element_action_run:
|
243
|
+
element_iteration: ElementIteration | None = None,
|
244
|
+
element_action: ElementAction | None = None,
|
245
|
+
element_action_run: ElementActionRun | None = None,
|
237
246
|
) -> None:
|
238
247
|
super().__init__(
|
239
248
|
"output_files", element_iteration, element_action, element_action_run
|
@@ -241,6 +250,7 @@ class ElementOutputFiles(_ElementPrefixedParameter):
|
|
241
250
|
|
242
251
|
|
243
252
|
@dataclass
|
253
|
+
@hydrate
|
244
254
|
class ElementResources(JSONLike):
|
245
255
|
"""
|
246
256
|
The resources an element requires.
|
@@ -272,6 +282,12 @@ class ElementResources(JSONLike):
|
|
272
282
|
Whether to use array jobs.
|
273
283
|
max_array_items: int
|
274
284
|
If using array jobs, up to how many items should be in the job array.
|
285
|
+
write_app_logs: bool
|
286
|
+
Whether an app log file should be written.
|
287
|
+
combine_jobscript_std: bool
|
288
|
+
Whether jobscript standard output and error streams should be combined.
|
289
|
+
combine_scripts: bool
|
290
|
+
Whether Python scripts should be combined.
|
275
291
|
time_limit: str
|
276
292
|
How long to run for.
|
277
293
|
scheduler_args: dict[str, Any]
|
@@ -282,6 +298,13 @@ class ElementResources(JSONLike):
|
|
282
298
|
Which OS to use.
|
283
299
|
environments: dict
|
284
300
|
Which execution environments to use.
|
301
|
+
resources_id: int
|
302
|
+
An arbitrary integer that can be used to force multiple jobscripts.
|
303
|
+
skip_downstream_on_failure: bool
|
304
|
+
Whether to skip downstream dependents on failure.
|
305
|
+
allow_failed_dependencies: int | float | bool | None
|
306
|
+
The failure tolerance with respect to dependencies, specified as a number or
|
307
|
+
proportion.
|
285
308
|
SGE_parallel_env: str
|
286
309
|
Which SGE parallel environment to request.
|
287
310
|
SLURM_partition: str
|
@@ -299,52 +322,66 @@ class ElementResources(JSONLike):
|
|
299
322
|
# TODO: how to specify e.g. high-memory requirement?
|
300
323
|
|
301
324
|
#: Which scratch space to use.
|
302
|
-
scratch:
|
325
|
+
scratch: str | None = None
|
303
326
|
#: Which parallel mode to use.
|
304
|
-
parallel_mode:
|
327
|
+
parallel_mode: ParallelMode | None = None
|
305
328
|
#: How many cores to request.
|
306
|
-
num_cores:
|
329
|
+
num_cores: int | None = None
|
307
330
|
#: How many cores per compute node to request.
|
308
|
-
num_cores_per_node:
|
331
|
+
num_cores_per_node: int | None = None
|
309
332
|
#: How many threads to request.
|
310
|
-
num_threads:
|
333
|
+
num_threads: int | None = None
|
311
334
|
#: How many compute nodes to request.
|
312
|
-
num_nodes:
|
335
|
+
num_nodes: int | None = None
|
313
336
|
|
314
337
|
#: Which scheduler to use.
|
315
|
-
scheduler:
|
338
|
+
scheduler: str | None = None
|
316
339
|
#: Which system shell to use.
|
317
|
-
shell:
|
340
|
+
shell: str | None = None
|
318
341
|
#: Whether to use array jobs.
|
319
|
-
use_job_array:
|
342
|
+
use_job_array: bool | None = None
|
320
343
|
#: If using array jobs, up to how many items should be in the job array.
|
321
|
-
max_array_items:
|
344
|
+
max_array_items: int | None = None
|
345
|
+
#: Whether an app log file should be written.
|
346
|
+
write_app_logs: bool = False
|
347
|
+
#: Whether jobscript standard output and error streams should be combined.
|
348
|
+
combine_jobscript_std: bool = field(default_factory=lambda: os.name != "nt")
|
349
|
+
#: Whether Python scripts should be combined.
|
350
|
+
combine_scripts: bool | None = None
|
322
351
|
#: How long to run for.
|
323
|
-
time_limit:
|
352
|
+
time_limit: str | None = None
|
353
|
+
|
324
354
|
#: Additional arguments to pass to the scheduler.
|
325
|
-
scheduler_args:
|
355
|
+
scheduler_args: dict[str, Any] = field(default_factory=dict)
|
326
356
|
#: Additional arguments to pass to the shell.
|
327
|
-
shell_args:
|
357
|
+
shell_args: dict[str, Any] = field(default_factory=dict)
|
328
358
|
#: Which OS to use.
|
329
|
-
os_name:
|
359
|
+
os_name: str | None = None
|
330
360
|
#: Which execution environments to use.
|
331
|
-
environments:
|
361
|
+
environments: dict[str, dict[str, Any]] | None = None
|
362
|
+
#: An arbitrary integer that can be used to force multiple jobscripts.
|
363
|
+
resources_id: int | None = None
|
364
|
+
#: Whether to skip downstream dependents on failure.
|
365
|
+
skip_downstream_on_failure: bool = True
|
366
|
+
#: The failure tolerance with respect to dependencies, specified as a number or
|
367
|
+
#: proportion.
|
368
|
+
allow_failed_dependencies: int | float | bool | None = False
|
332
369
|
|
333
370
|
# SGE scheduler specific:
|
334
371
|
#: Which SGE parallel environment to request.
|
335
|
-
SGE_parallel_env: str = None
|
372
|
+
SGE_parallel_env: str | None = None
|
336
373
|
|
337
374
|
# SLURM scheduler specific:
|
338
375
|
#: Which SLURM partition to request.
|
339
|
-
SLURM_partition: str = None
|
376
|
+
SLURM_partition: str | None = None
|
340
377
|
#: How many SLURM tasks to request.
|
341
|
-
SLURM_num_tasks:
|
378
|
+
SLURM_num_tasks: int | None = None
|
342
379
|
#: How many SLURM tasks per compute node to request.
|
343
|
-
SLURM_num_tasks_per_node:
|
380
|
+
SLURM_num_tasks_per_node: int | None = None
|
344
381
|
#: How many compute nodes to request.
|
345
|
-
SLURM_num_nodes:
|
382
|
+
SLURM_num_nodes: int | None = None
|
346
383
|
#: How many CPU cores to ask for per SLURM task.
|
347
|
-
SLURM_num_cpus_per_task:
|
384
|
+
SLURM_num_cpus_per_task: int | None = None
|
348
385
|
|
349
386
|
def __post_init__(self):
|
350
387
|
if (
|
@@ -367,41 +404,30 @@ class ElementResources(JSONLike):
|
|
367
404
|
else:
|
368
405
|
return self.__dict__ == other.__dict__
|
369
406
|
|
370
|
-
|
407
|
+
@TimeIt.decorator
|
408
|
+
def get_jobscript_hash(self) -> int:
|
371
409
|
"""Get hash from all arguments that distinguish jobscripts."""
|
372
410
|
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
411
|
+
exclude = ["time_limit", "skip_downstream_on_failure"]
|
412
|
+
if not self.combine_scripts:
|
413
|
+
# usually environment selection need not distinguish jobscripts because
|
414
|
+
# environments become effective/active within the command files, but if we
|
415
|
+
# are combining scripts, then the environments must be the same:
|
416
|
+
exclude.append("environments")
|
378
417
|
|
379
|
-
exclude = ("time_limit",)
|
380
418
|
dct = {k: copy.deepcopy(v) for k, v in self.__dict__.items() if k not in exclude}
|
381
419
|
|
382
|
-
|
383
|
-
|
384
|
-
|
385
|
-
|
386
|
-
if isinstance(scheduler_args, dict):
|
387
|
-
if "options" in scheduler_args:
|
388
|
-
dct["scheduler_args"]["options"] = _hash_dict(scheduler_args["options"])
|
389
|
-
dct["scheduler_args"] = _hash_dict(dct["scheduler_args"])
|
390
|
-
|
391
|
-
if isinstance(shell_args, dict):
|
392
|
-
dct["shell_args"] = _hash_dict(shell_args)
|
420
|
+
# `combine_scripts==False` and `combine_scripts==None` should have an equivalent
|
421
|
+
# contribution to the hash, so always set it to `False` if unset at this point:
|
422
|
+
if self.combine_scripts is None:
|
423
|
+
dct["combine_scripts"] = False
|
393
424
|
|
394
|
-
|
395
|
-
for k, v in envs.items():
|
396
|
-
dct["environments"][k] = _hash_dict(v)
|
397
|
-
dct["environments"] = _hash_dict(dct["environments"])
|
398
|
-
|
399
|
-
return _hash_dict(dct)
|
425
|
+
return get_hash(dct)
|
400
426
|
|
401
427
|
@property
|
402
428
|
def is_parallel(self) -> bool:
|
403
429
|
"""Returns True if any scheduler-agnostic arguments indicate a parallel job."""
|
404
|
-
return (
|
430
|
+
return bool(
|
405
431
|
(self.num_cores and self.num_cores != 1)
|
406
432
|
or (self.num_cores_per_node and self.num_cores_per_node != 1)
|
407
433
|
or (self.num_nodes and self.num_nodes != 1)
|
@@ -411,7 +437,7 @@ class ElementResources(JSONLike):
|
|
411
437
|
@property
|
412
438
|
def SLURM_is_parallel(self) -> bool:
|
413
439
|
"""Returns True if any SLURM-specific arguments indicate a parallel job."""
|
414
|
-
return (
|
440
|
+
return bool(
|
415
441
|
(self.SLURM_num_tasks and self.SLURM_num_tasks != 1)
|
416
442
|
or (self.SLURM_num_tasks_per_node and self.SLURM_num_tasks_per_node != 1)
|
417
443
|
or (self.SLURM_num_nodes and self.SLURM_num_nodes != 1)
|
@@ -419,35 +445,39 @@ class ElementResources(JSONLike):
|
|
419
445
|
)
|
420
446
|
|
421
447
|
@staticmethod
|
422
|
-
def get_env_instance_filterable_attributes() ->
|
448
|
+
def get_env_instance_filterable_attributes() -> tuple[str, ...]:
|
423
449
|
"""Get a tuple of resource attributes that are used to filter environment
|
424
450
|
executable instances at submit- and run-time."""
|
425
451
|
return ("num_cores",) # TODO: filter on `parallel_mode` later
|
426
452
|
|
427
453
|
@staticmethod
|
428
|
-
|
454
|
+
@TimeIt.decorator
|
455
|
+
def get_default_os_name() -> str:
|
429
456
|
"""
|
430
457
|
Get the default value for OS name.
|
431
458
|
"""
|
432
459
|
return os.name
|
433
460
|
|
434
461
|
@classmethod
|
435
|
-
|
462
|
+
@TimeIt.decorator
|
463
|
+
def get_default_shell(cls) -> str:
|
436
464
|
"""
|
437
465
|
Get the default value for name.
|
438
466
|
"""
|
439
|
-
return cls.
|
467
|
+
return cls._app.config.default_shell
|
440
468
|
|
441
469
|
@classmethod
|
442
|
-
|
470
|
+
@TimeIt.decorator
|
471
|
+
def get_default_scheduler(cls, os_name: str, shell_name: str) -> str:
|
443
472
|
"""
|
444
473
|
Get the default value for scheduler.
|
445
474
|
"""
|
446
475
|
if os_name == "nt" and "wsl" in shell_name:
|
447
476
|
# provide a "*_posix" default scheduler on windows if shell is WSL:
|
448
477
|
return "direct_posix"
|
449
|
-
return cls.
|
478
|
+
return cls._app.config.default_scheduler
|
450
479
|
|
480
|
+
@TimeIt.decorator
|
451
481
|
def set_defaults(self):
|
452
482
|
"""
|
453
483
|
Set defaults for unspecified values that need defaults.
|
@@ -461,31 +491,39 @@ class ElementResources(JSONLike):
|
|
461
491
|
|
462
492
|
# merge defaults shell args from config:
|
463
493
|
self.shell_args = {
|
464
|
-
**self.
|
494
|
+
**self._app.config.shells.get(self.shell, {}).get("defaults", {}),
|
465
495
|
**self.shell_args,
|
466
496
|
}
|
467
497
|
|
468
498
|
# "direct_posix" scheduler is valid on Windows if using WSL:
|
469
499
|
cfg_lookup = f"{self.scheduler}_posix" if "wsl" in self.shell else self.scheduler
|
470
|
-
cfg_sched = copy.deepcopy(self.
|
500
|
+
cfg_sched = copy.deepcopy(self._app.config.schedulers.get(cfg_lookup, {}))
|
471
501
|
|
472
502
|
# merge defaults scheduler args from config:
|
473
503
|
cfg_defs = cfg_sched.get("defaults", {})
|
474
504
|
cfg_opts = cfg_defs.pop("options", {})
|
475
505
|
opts = {**cfg_opts, **self.scheduler_args.get("options", {})}
|
476
|
-
|
506
|
+
if opts:
|
507
|
+
self.scheduler_args["options"] = opts
|
477
508
|
self.scheduler_args = {**cfg_defs, **self.scheduler_args}
|
478
509
|
|
510
|
+
@TimeIt.decorator
|
479
511
|
def validate_against_machine(self):
|
480
512
|
"""Validate the values for `os_name`, `shell` and `scheduler` against those
|
481
513
|
supported on this machine (as specified by the app configuration)."""
|
482
514
|
if self.os_name != os.name:
|
483
515
|
raise UnsupportedOSError(os_name=self.os_name)
|
484
|
-
if self.scheduler not in self.
|
516
|
+
if self.scheduler not in self._app.config.schedulers:
|
485
517
|
raise UnsupportedSchedulerError(
|
486
518
|
scheduler=self.scheduler,
|
487
|
-
supported=self.
|
519
|
+
supported=self._app.config.schedulers,
|
520
|
+
)
|
521
|
+
|
522
|
+
if self.os_name == "nt" and self.combine_jobscript_std:
|
523
|
+
raise NotImplementedError(
|
524
|
+
"`combine_jobscript_std` is not yet supported on Windows."
|
488
525
|
)
|
526
|
+
|
489
527
|
# might raise `UnsupportedShellError`:
|
490
528
|
get_shell(shell_name=self.shell, os_name=self.os_name)
|
491
529
|
|
@@ -495,11 +533,11 @@ class ElementResources(JSONLike):
|
|
495
533
|
key = tuple(self.scheduler.split("_"))
|
496
534
|
else:
|
497
535
|
key = (self.scheduler.lower(), self.os_name.lower())
|
498
|
-
scheduler_cls = self.
|
499
|
-
scheduler_cls.process_resources(self, self.
|
536
|
+
scheduler_cls = self._app.scheduler_lookup[key]
|
537
|
+
scheduler_cls.process_resources(self, self._app.config.schedulers[self.scheduler])
|
500
538
|
|
501
539
|
|
502
|
-
class ElementIteration:
|
540
|
+
class ElementIteration(AppAware):
|
503
541
|
"""
|
504
542
|
A particular iteration of an element.
|
505
543
|
|
@@ -527,38 +565,36 @@ class ElementIteration:
|
|
527
565
|
Indexing information from the loop.
|
528
566
|
"""
|
529
567
|
|
530
|
-
_app_attr = "app"
|
531
|
-
|
532
568
|
def __init__(
|
533
569
|
self,
|
534
570
|
id_: int,
|
535
571
|
is_pending: bool,
|
536
572
|
index: int,
|
537
|
-
element:
|
538
|
-
data_idx:
|
573
|
+
element: Element,
|
574
|
+
data_idx: DataIndex,
|
539
575
|
EARs_initialised: bool,
|
540
|
-
EAR_IDs:
|
541
|
-
EARs:
|
542
|
-
schema_parameters:
|
543
|
-
loop_idx:
|
576
|
+
EAR_IDs: dict[int, list[int]],
|
577
|
+
EARs: dict[int, dict[Mapping[str, Any], Any]] | None,
|
578
|
+
schema_parameters: list[str],
|
579
|
+
loop_idx: Mapping[str, int],
|
544
580
|
):
|
545
581
|
self._id = id_
|
546
582
|
self._is_pending = is_pending
|
547
583
|
self._index = index
|
548
584
|
self._element = element
|
549
585
|
self._data_idx = data_idx
|
550
|
-
self._loop_idx = loop_idx
|
586
|
+
self._loop_idx = LoopIndex(loop_idx)
|
551
587
|
self._schema_parameters = schema_parameters
|
552
588
|
self._EARs_initialised = EARs_initialised
|
553
589
|
self._EARs = EARs
|
554
590
|
self._EAR_IDs = EAR_IDs
|
555
591
|
|
556
592
|
# assigned on first access of corresponding properties:
|
557
|
-
self._inputs = None
|
558
|
-
self._outputs = None
|
559
|
-
self._input_files = None
|
560
|
-
self._output_files = None
|
561
|
-
self._action_objs = None
|
593
|
+
self._inputs: ElementInputs | None = None
|
594
|
+
self._outputs: ElementOutputs | None = None
|
595
|
+
self._input_files: ElementInputFiles | None = None
|
596
|
+
self._output_files: ElementOutputFiles | None = None
|
597
|
+
self._action_objs: dict[int, ElementAction] | None = None
|
562
598
|
|
563
599
|
def __repr__(self):
|
564
600
|
return (
|
@@ -569,24 +605,24 @@ class ElementIteration:
|
|
569
605
|
)
|
570
606
|
|
571
607
|
@property
|
572
|
-
def data_idx(self):
|
608
|
+
def data_idx(self) -> DataIndex:
|
573
609
|
"""The overall element iteration data index, before resolution of EARs."""
|
574
610
|
return self._data_idx
|
575
611
|
|
576
612
|
@property
|
577
|
-
def EARs_initialised(self):
|
613
|
+
def EARs_initialised(self) -> bool:
|
578
614
|
"""Whether or not the EARs have been initialised."""
|
579
615
|
return self._EARs_initialised
|
580
616
|
|
581
617
|
@property
|
582
|
-
def element(self):
|
618
|
+
def element(self) -> Element:
|
583
619
|
"""
|
584
620
|
The element this is an iteration of.
|
585
621
|
"""
|
586
622
|
return self._element
|
587
623
|
|
588
624
|
@property
|
589
|
-
def index(self):
|
625
|
+
def index(self) -> int:
|
590
626
|
"""
|
591
627
|
The index of this iteration in its parent element.
|
592
628
|
"""
|
@@ -607,108 +643,115 @@ class ElementIteration:
|
|
607
643
|
return self._is_pending
|
608
644
|
|
609
645
|
@property
|
610
|
-
def task(self):
|
646
|
+
def task(self) -> WorkflowTask:
|
611
647
|
"""
|
612
648
|
The task this is an iteration of an element for.
|
613
649
|
"""
|
614
650
|
return self.element.task
|
615
651
|
|
616
652
|
@property
|
617
|
-
def workflow(self):
|
653
|
+
def workflow(self) -> Workflow:
|
618
654
|
"""
|
619
655
|
The workflow this is a part of.
|
620
656
|
"""
|
621
657
|
return self.element.workflow
|
622
658
|
|
623
659
|
@property
|
624
|
-
def loop_idx(self) ->
|
660
|
+
def loop_idx(self) -> LoopIndex[str, int]:
|
625
661
|
"""
|
626
662
|
Indexing information from the loop.
|
627
663
|
"""
|
628
664
|
return self._loop_idx
|
629
665
|
|
630
666
|
@property
|
631
|
-
def schema_parameters(self) ->
|
667
|
+
def schema_parameters(self) -> Sequence[str]:
|
632
668
|
"""
|
633
669
|
Parameters from the schema.
|
634
670
|
"""
|
635
671
|
return self._schema_parameters
|
636
672
|
|
637
673
|
@property
|
638
|
-
def EAR_IDs(self) ->
|
674
|
+
def EAR_IDs(self) -> Mapping[int, Sequence[int]]:
|
639
675
|
"""
|
640
|
-
Mapping from
|
676
|
+
Mapping from action index to EAR ID, where known.
|
641
677
|
"""
|
642
678
|
return self._EAR_IDs
|
643
679
|
|
644
680
|
@property
|
645
|
-
def
|
681
|
+
def loop_skipped(self) -> bool:
|
682
|
+
"""True if the the iteration was skipped entirely due to a loop termination."""
|
683
|
+
if not self.action_runs:
|
684
|
+
# this includes when runs are not initialised
|
685
|
+
return False
|
686
|
+
else:
|
687
|
+
return all(
|
688
|
+
i.skip_reason is SkipReason.LOOP_TERMINATION for i in self.action_runs
|
689
|
+
)
|
690
|
+
|
691
|
+
@property
|
692
|
+
def EAR_IDs_flat(self) -> Iterable[int]:
|
646
693
|
"""
|
647
694
|
The EAR IDs.
|
648
695
|
"""
|
649
|
-
return
|
696
|
+
return chain.from_iterable(self.EAR_IDs.values())
|
650
697
|
|
651
698
|
@property
|
652
|
-
def actions(self) ->
|
699
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
653
700
|
"""
|
654
701
|
The actions of this iteration.
|
655
702
|
"""
|
656
703
|
if self._action_objs is None:
|
657
704
|
self._action_objs = {
|
658
|
-
act_idx: self.
|
659
|
-
element_iteration=self,
|
660
|
-
action_idx=act_idx,
|
661
|
-
runs=runs,
|
662
|
-
)
|
705
|
+
act_idx: self._app.ElementAction(self, act_idx, runs)
|
663
706
|
for act_idx, runs in (self._EARs or {}).items()
|
664
707
|
}
|
665
708
|
return self._action_objs
|
666
709
|
|
667
710
|
@property
|
668
|
-
def action_runs(self) ->
|
711
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
669
712
|
"""
|
670
713
|
A list of element action runs, where only the final run is taken for each
|
671
714
|
element action.
|
672
715
|
"""
|
673
|
-
return [
|
716
|
+
return [act.runs[-1] for act in self.actions.values()]
|
674
717
|
|
675
718
|
@property
|
676
|
-
def inputs(self) ->
|
719
|
+
def inputs(self) -> ElementInputs:
|
677
720
|
"""
|
678
721
|
The inputs to this element.
|
679
722
|
"""
|
680
723
|
if not self._inputs:
|
681
|
-
self._inputs = self.
|
724
|
+
self._inputs = self._app.ElementInputs(element_iteration=self)
|
682
725
|
return self._inputs
|
683
726
|
|
684
727
|
@property
|
685
|
-
def outputs(self) ->
|
728
|
+
def outputs(self) -> ElementOutputs:
|
686
729
|
"""
|
687
730
|
The outputs from this element.
|
688
731
|
"""
|
689
732
|
if not self._outputs:
|
690
|
-
self._outputs = self.
|
733
|
+
self._outputs = self._app.ElementOutputs(element_iteration=self)
|
691
734
|
return self._outputs
|
692
735
|
|
693
736
|
@property
|
694
|
-
def input_files(self) ->
|
737
|
+
def input_files(self) -> ElementInputFiles:
|
695
738
|
"""
|
696
739
|
The input files to this element.
|
697
740
|
"""
|
698
741
|
if not self._input_files:
|
699
|
-
self._input_files = self.
|
742
|
+
self._input_files = self._app.ElementInputFiles(element_iteration=self)
|
700
743
|
return self._input_files
|
701
744
|
|
702
745
|
@property
|
703
|
-
def output_files(self) ->
|
746
|
+
def output_files(self) -> ElementOutputFiles:
|
704
747
|
"""
|
705
748
|
The output files from this element.
|
706
749
|
"""
|
707
750
|
if not self._output_files:
|
708
|
-
self._output_files = self.
|
751
|
+
self._output_files = self._app.ElementOutputFiles(element_iteration=self)
|
709
752
|
return self._output_files
|
710
753
|
|
711
|
-
def get_parameter_names(self, prefix: str) ->
|
754
|
+
def get_parameter_names(self, prefix: str) -> list[str]:
|
712
755
|
"""Get parameter types associated with a given prefix.
|
713
756
|
|
714
757
|
For example, with the prefix "inputs", this would return `['p1', 'p2']` for a task
|
@@ -728,19 +771,19 @@ class ElementIteration:
|
|
728
771
|
|
729
772
|
"""
|
730
773
|
single_label_lookup = self.task.template._get_single_label_lookup("inputs")
|
731
|
-
return
|
732
|
-
".".join(single_label_lookup.get(
|
733
|
-
for
|
734
|
-
if
|
735
|
-
|
774
|
+
return [
|
775
|
+
".".join(single_label_lookup.get(param_name, param_name).split(".")[1:])
|
776
|
+
for param_name in self.schema_parameters
|
777
|
+
if param_name.startswith(prefix)
|
778
|
+
]
|
736
779
|
|
737
780
|
@TimeIt.decorator
|
738
781
|
def get_data_idx(
|
739
782
|
self,
|
740
|
-
path: str = None,
|
741
|
-
action_idx: int = None,
|
783
|
+
path: str | None = None,
|
784
|
+
action_idx: int | None = None,
|
742
785
|
run_idx: int = -1,
|
743
|
-
) ->
|
786
|
+
) -> DataIndex:
|
744
787
|
"""
|
745
788
|
Get the data index.
|
746
789
|
|
@@ -766,8 +809,7 @@ class ElementIteration:
|
|
766
809
|
data_idx = {}
|
767
810
|
for action in self.actions.values():
|
768
811
|
for k, v in action.runs[run_idx].data_idx.items():
|
769
|
-
|
770
|
-
if (is_input and k not in data_idx) or not is_input:
|
812
|
+
if not k.startswith("inputs") or k not in data_idx:
|
771
813
|
data_idx[k] = v
|
772
814
|
|
773
815
|
else:
|
@@ -779,16 +821,85 @@ class ElementIteration:
|
|
779
821
|
|
780
822
|
return copy.deepcopy(data_idx)
|
781
823
|
|
824
|
+
def __get_parameter_sources(
|
825
|
+
self, data_idx: DataIndex, filter_type: str | None, use_task_index: bool
|
826
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
827
|
+
# the value associated with `repeats.*` is the repeats index, not a parameter ID:
|
828
|
+
for k in tuple(data_idx):
|
829
|
+
if k.startswith("repeats."):
|
830
|
+
data_idx.pop(k)
|
831
|
+
|
832
|
+
out: Mapping[str, ParamSource | list[ParamSource]] = dict_values_process_flat(
|
833
|
+
data_idx,
|
834
|
+
callable=self.workflow.get_parameter_sources,
|
835
|
+
)
|
836
|
+
|
837
|
+
if use_task_index:
|
838
|
+
for k, v in out.items():
|
839
|
+
assert isinstance(v, dict)
|
840
|
+
if (insert_ID := v.pop("task_insert_ID", None)) is not None:
|
841
|
+
# Modify the contents of out
|
842
|
+
v["task_idx"] = self.workflow.tasks.get(insert_ID=insert_ID).index
|
843
|
+
|
844
|
+
if not filter_type:
|
845
|
+
return out
|
846
|
+
|
847
|
+
# Filter to just the elements that have the right type property
|
848
|
+
filtered = (
|
849
|
+
(k, self.__filter_param_source_by_type(v, filter_type))
|
850
|
+
for k, v in out.items()
|
851
|
+
)
|
852
|
+
return {k: v for k, v in filtered if v is not None}
|
853
|
+
|
854
|
+
@staticmethod
|
855
|
+
def __filter_param_source_by_type(
|
856
|
+
value: ParamSource | list[ParamSource], filter_type: str
|
857
|
+
) -> ParamSource | list[ParamSource] | None:
|
858
|
+
if isinstance(value, list):
|
859
|
+
if sources := [src for src in value if src["type"] == filter_type]:
|
860
|
+
return sources
|
861
|
+
else:
|
862
|
+
if value["type"] == filter_type:
|
863
|
+
return value
|
864
|
+
return None
|
865
|
+
|
866
|
+
@overload
|
867
|
+
def get_parameter_sources(
|
868
|
+
self,
|
869
|
+
path: str | None,
|
870
|
+
*,
|
871
|
+
action_idx: int | None,
|
872
|
+
run_idx: int = -1,
|
873
|
+
typ: str | None = None,
|
874
|
+
as_strings: Literal[True],
|
875
|
+
use_task_index: bool = False,
|
876
|
+
) -> Mapping[str, str]:
|
877
|
+
...
|
878
|
+
|
879
|
+
@overload
|
880
|
+
def get_parameter_sources(
|
881
|
+
self,
|
882
|
+
path: str | None = None,
|
883
|
+
*,
|
884
|
+
action_idx: int | None = None,
|
885
|
+
run_idx: int = -1,
|
886
|
+
typ: str | None = None,
|
887
|
+
as_strings: Literal[False] = False,
|
888
|
+
use_task_index: bool = False,
|
889
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
890
|
+
...
|
891
|
+
|
782
892
|
@TimeIt.decorator
|
783
893
|
def get_parameter_sources(
|
784
894
|
self,
|
785
|
-
path: str = None,
|
786
|
-
|
895
|
+
path: str | None = None,
|
896
|
+
*,
|
897
|
+
action_idx: int | None = None,
|
787
898
|
run_idx: int = -1,
|
788
|
-
typ: str = None,
|
899
|
+
typ: str | None = None,
|
789
900
|
as_strings: bool = False,
|
790
901
|
use_task_index: bool = False,
|
791
|
-
) ->
|
902
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
792
903
|
"""
|
793
904
|
Get the origin of parameters.
|
794
905
|
|
@@ -799,79 +910,40 @@ class ElementIteration:
|
|
799
910
|
ID.
|
800
911
|
"""
|
801
912
|
data_idx = self.get_data_idx(path, action_idx, run_idx)
|
802
|
-
|
803
|
-
|
804
|
-
|
805
|
-
|
806
|
-
|
807
|
-
|
808
|
-
|
809
|
-
|
810
|
-
|
811
|
-
|
812
|
-
|
813
|
-
|
814
|
-
if use_task_index:
|
815
|
-
task_key = "task_idx"
|
816
|
-
out_task_idx = {}
|
817
|
-
for k, v in out.items():
|
818
|
-
insert_ID = v.pop("task_insert_ID", None)
|
819
|
-
if insert_ID is not None:
|
820
|
-
v[task_key] = self.workflow.tasks.get(insert_ID=insert_ID).index
|
821
|
-
out_task_idx[k] = v
|
822
|
-
out = out_task_idx
|
823
|
-
|
824
|
-
if typ:
|
825
|
-
out_ = {}
|
826
|
-
for k, v in out.items():
|
827
|
-
is_multi = False
|
828
|
-
if isinstance(v, list):
|
829
|
-
is_multi = True
|
830
|
-
else:
|
831
|
-
v = [v]
|
832
|
-
|
833
|
-
sources_k = []
|
834
|
-
for src_i in v:
|
835
|
-
if src_i["type"] == typ:
|
836
|
-
if not is_multi:
|
837
|
-
sources_k = src_i
|
838
|
-
break
|
839
|
-
else:
|
840
|
-
sources_k.append(src_i)
|
841
|
-
|
842
|
-
if sources_k:
|
843
|
-
out_[k] = sources_k
|
844
|
-
|
845
|
-
out = out_
|
846
|
-
|
847
|
-
if as_strings:
|
848
|
-
# format as a dict with compact string values
|
849
|
-
self_task_val = (
|
850
|
-
self.task.index if task_key == "task_idx" else self.task.insert_ID
|
851
|
-
)
|
852
|
-
out_strs = {}
|
853
|
-
for k, v in out.items():
|
854
|
-
if v["type"] == "local_input":
|
855
|
-
if v[task_key] == self_task_val:
|
913
|
+
out = self.__get_parameter_sources(data_idx, typ or "", use_task_index)
|
914
|
+
if not as_strings:
|
915
|
+
return out
|
916
|
+
|
917
|
+
# format as a dict with compact string values
|
918
|
+
out_strs: dict[str, str] = {}
|
919
|
+
for k, v in out.items():
|
920
|
+
assert isinstance(v, dict)
|
921
|
+
if v["type"] == "local_input":
|
922
|
+
if use_task_index:
|
923
|
+
if v["task_idx"] == self.task.index:
|
856
924
|
out_strs[k] = "local"
|
857
925
|
else:
|
858
|
-
out_strs[k] = f"task.{v[
|
859
|
-
elif v["type"] == "default_input":
|
860
|
-
out_strs == "default"
|
926
|
+
out_strs[k] = f"task.{v['task_idx']}.input"
|
861
927
|
else:
|
862
|
-
|
863
|
-
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
868
|
-
|
928
|
+
if v["task_insert_ID"] == self.task.insert_ID:
|
929
|
+
out_strs[k] = "local"
|
930
|
+
else:
|
931
|
+
out_strs[k] = f"task.{v['task_insert_ID']}.input"
|
932
|
+
elif v["type"] == "default_input":
|
933
|
+
out_strs == "default"
|
934
|
+
else:
|
935
|
+
idx = v["task_idx"] if use_task_index else v["task_insert_ID"]
|
936
|
+
out_strs[k] = (
|
937
|
+
f"task.{idx}.element.{v['element_idx']}."
|
938
|
+
f"action.{v['action_idx']}.run.{v['run_idx']}"
|
939
|
+
)
|
940
|
+
return out_strs
|
869
941
|
|
870
942
|
@TimeIt.decorator
|
871
943
|
def get(
|
872
944
|
self,
|
873
|
-
path: str = None,
|
874
|
-
action_idx: int = None,
|
945
|
+
path: str | None = None,
|
946
|
+
action_idx: int | None = None,
|
875
947
|
run_idx: int = -1,
|
876
948
|
default: Any = None,
|
877
949
|
raise_on_missing: bool = False,
|
@@ -888,12 +960,11 @@ class ElementIteration:
|
|
888
960
|
if single_label_lookup:
|
889
961
|
# For any non-multiple `SchemaParameter`s of this task with non-empty labels,
|
890
962
|
# remove the trivial label:
|
891
|
-
for key in
|
963
|
+
for key in tuple(data_idx):
|
892
964
|
if (path or "").startswith(key):
|
893
965
|
# `path` uses labelled type, so no need to convert to non-labelled
|
894
966
|
continue
|
895
|
-
lookup_val
|
896
|
-
if lookup_val:
|
967
|
+
if lookup_val := single_label_lookup.get(key):
|
897
968
|
data_idx[lookup_val] = data_idx.pop(key)
|
898
969
|
|
899
970
|
return self.task._get_merged_parameter_data(
|
@@ -904,203 +975,286 @@ class ElementIteration:
|
|
904
975
|
default=default,
|
905
976
|
)
|
906
977
|
|
978
|
+
@overload
|
979
|
+
def get_EAR_dependencies(
|
980
|
+
self,
|
981
|
+
as_objects: Literal[False] = False,
|
982
|
+
) -> set[int]:
|
983
|
+
...
|
984
|
+
|
985
|
+
@overload
|
986
|
+
def get_EAR_dependencies(
|
987
|
+
self,
|
988
|
+
as_objects: Literal[True],
|
989
|
+
) -> list[ElementActionRun]:
|
990
|
+
...
|
991
|
+
|
907
992
|
@TimeIt.decorator
|
908
993
|
def get_EAR_dependencies(
|
909
994
|
self,
|
910
|
-
as_objects:
|
911
|
-
) ->
|
995
|
+
as_objects: bool = False,
|
996
|
+
) -> set[int] | list[ElementActionRun]:
|
912
997
|
"""Get EARs that this element iteration depends on (excluding EARs of this element
|
913
998
|
iteration)."""
|
914
999
|
# TODO: test this includes EARs of upstream iterations of this iteration's element
|
915
1000
|
if self.action_runs:
|
916
|
-
|
917
|
-
|
918
|
-
|
919
|
-
|
920
|
-
|
921
|
-
|
922
|
-
|
923
|
-
)
|
1001
|
+
EAR_IDs_set = frozenset(self.EAR_IDs_flat)
|
1002
|
+
out = {
|
1003
|
+
id_
|
1004
|
+
for ear in self.action_runs
|
1005
|
+
for id_ in ear.get_EAR_dependencies()
|
1006
|
+
if id_ not in EAR_IDs_set
|
1007
|
+
}
|
924
1008
|
else:
|
925
1009
|
# if an "input-only" task schema, then there will be no action runs, but the
|
926
1010
|
# ElementIteration can still depend on other EARs if inputs are sourced from
|
927
1011
|
# upstream tasks:
|
928
|
-
out =
|
929
|
-
|
930
|
-
|
931
|
-
|
932
|
-
|
933
|
-
EAR_ID_i = src_i["EAR_ID"]
|
934
|
-
out.append(EAR_ID_i)
|
935
|
-
out = sorted(set(out))
|
1012
|
+
out = {
|
1013
|
+
src_i["EAR_ID"]
|
1014
|
+
for src in self.get_parameter_sources(typ="EAR_output").values()
|
1015
|
+
for src_i in (src if isinstance(src, list) else [src])
|
1016
|
+
}
|
936
1017
|
|
937
1018
|
if as_objects:
|
938
|
-
|
1019
|
+
return self.workflow.get_EARs_from_IDs(sorted(out))
|
939
1020
|
return out
|
940
1021
|
|
1022
|
+
@overload
|
1023
|
+
def get_element_iteration_dependencies(
|
1024
|
+
self, as_objects: Literal[True]
|
1025
|
+
) -> list[ElementIteration]:
|
1026
|
+
...
|
1027
|
+
|
1028
|
+
@overload
|
1029
|
+
def get_element_iteration_dependencies(
|
1030
|
+
self, as_objects: Literal[False] = False
|
1031
|
+
) -> set[int]:
|
1032
|
+
...
|
1033
|
+
|
941
1034
|
@TimeIt.decorator
|
942
1035
|
def get_element_iteration_dependencies(
|
943
1036
|
self, as_objects: bool = False
|
944
|
-
) ->
|
1037
|
+
) -> set[int] | list[ElementIteration]:
|
945
1038
|
"""Get element iterations that this element iteration depends on."""
|
946
1039
|
# TODO: test this includes previous iterations of this iteration's element
|
947
|
-
EAR_IDs = self.get_EAR_dependencies(
|
948
|
-
out =
|
1040
|
+
EAR_IDs = self.get_EAR_dependencies()
|
1041
|
+
out = set(self.workflow.get_element_iteration_IDs_from_EAR_IDs(EAR_IDs))
|
949
1042
|
if as_objects:
|
950
|
-
|
1043
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(out))
|
951
1044
|
return out
|
952
1045
|
|
1046
|
+
@overload
|
1047
|
+
def get_element_dependencies(
|
1048
|
+
self,
|
1049
|
+
as_objects: Literal[False] = False,
|
1050
|
+
) -> set[int]:
|
1051
|
+
...
|
1052
|
+
|
1053
|
+
@overload
|
1054
|
+
def get_element_dependencies(
|
1055
|
+
self,
|
1056
|
+
as_objects: Literal[True],
|
1057
|
+
) -> list[Element]:
|
1058
|
+
...
|
1059
|
+
|
953
1060
|
@TimeIt.decorator
|
954
1061
|
def get_element_dependencies(
|
955
1062
|
self,
|
956
|
-
as_objects:
|
957
|
-
) ->
|
1063
|
+
as_objects: bool = False,
|
1064
|
+
) -> set[int] | list[Element]:
|
958
1065
|
"""Get elements that this element iteration depends on."""
|
959
1066
|
# TODO: this will be used in viz.
|
960
|
-
EAR_IDs = self.get_EAR_dependencies(
|
961
|
-
out =
|
1067
|
+
EAR_IDs = self.get_EAR_dependencies()
|
1068
|
+
out = set(self.workflow.get_element_IDs_from_EAR_IDs(EAR_IDs))
|
962
1069
|
if as_objects:
|
963
|
-
|
1070
|
+
return self.workflow.get_elements_from_IDs(sorted(out))
|
964
1071
|
return out
|
965
1072
|
|
966
|
-
def get_input_dependencies(self) ->
|
1073
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
967
1074
|
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
968
1075
|
element iteration depends on."""
|
969
|
-
out = {}
|
1076
|
+
out: dict[str, ParamSource] = {}
|
970
1077
|
for k, v in self.get_parameter_sources().items():
|
971
|
-
if
|
972
|
-
v = [v]
|
973
|
-
for v_i in v:
|
1078
|
+
for v_i in v if isinstance(v, list) else [v]:
|
974
1079
|
if (
|
975
1080
|
v_i["type"] in ["local_input", "default_input"]
|
976
1081
|
and v_i["task_insert_ID"] != self.task.insert_ID
|
977
1082
|
):
|
978
1083
|
out[k] = v_i
|
979
|
-
|
980
1084
|
return out
|
981
1085
|
|
1086
|
+
@overload
|
1087
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1088
|
+
...
|
1089
|
+
|
1090
|
+
@overload
|
1091
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1092
|
+
...
|
1093
|
+
|
982
1094
|
def get_task_dependencies(
|
983
1095
|
self, as_objects: bool = False
|
984
|
-
) ->
|
1096
|
+
) -> set[int] | list[WorkflowTask]:
|
985
1097
|
"""Get tasks (insert ID or WorkflowTask objects) that this element iteration
|
986
1098
|
depends on.
|
987
1099
|
|
988
1100
|
Dependencies may come from either elements from upstream tasks, or from locally
|
989
1101
|
defined inputs/sequences/defaults from upstream tasks."""
|
990
1102
|
|
991
|
-
out =
|
992
|
-
self.get_element_dependencies(
|
1103
|
+
out = set(
|
1104
|
+
self.workflow.get_task_IDs_from_element_IDs(self.get_element_dependencies())
|
993
1105
|
)
|
994
|
-
for
|
995
|
-
out.
|
996
|
-
|
997
|
-
out = sorted(set(out))
|
1106
|
+
for p_src in self.get_input_dependencies().values():
|
1107
|
+
out.add(p_src["task_insert_ID"])
|
998
1108
|
|
999
1109
|
if as_objects:
|
1000
|
-
|
1001
|
-
|
1110
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(out)]
|
1002
1111
|
return out
|
1003
1112
|
|
1113
|
+
@property
|
1114
|
+
def __elements(self) -> Iterator[Element]:
|
1115
|
+
"""
|
1116
|
+
This iteration's element and its downstream elements.
|
1117
|
+
"""
|
1118
|
+
for task in self.workflow.tasks[self.task.index :]:
|
1119
|
+
yield from task.elements[:]
|
1120
|
+
|
1121
|
+
@property
|
1122
|
+
def __iterations(self) -> Iterator[ElementIteration]:
|
1123
|
+
"""
|
1124
|
+
This iteration and its downstream iterations.
|
1125
|
+
"""
|
1126
|
+
for elem in self.__elements:
|
1127
|
+
yield from elem.iterations
|
1128
|
+
|
1129
|
+
@overload
|
1130
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]:
|
1131
|
+
...
|
1132
|
+
|
1133
|
+
@overload
|
1134
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1135
|
+
...
|
1136
|
+
|
1004
1137
|
@TimeIt.decorator
|
1005
1138
|
def get_dependent_EARs(
|
1006
1139
|
self, as_objects: bool = False
|
1007
|
-
) ->
|
1140
|
+
) -> set[int] | list[ElementActionRun]:
|
1008
1141
|
"""Get EARs of downstream iterations and tasks that depend on this element
|
1009
1142
|
iteration."""
|
1010
1143
|
# TODO: test this includes EARs of downstream iterations of this iteration's element
|
1011
|
-
deps =
|
1012
|
-
for
|
1013
|
-
|
1014
|
-
|
1015
|
-
|
1016
|
-
|
1017
|
-
|
1018
|
-
|
1019
|
-
for dep_EAR_i in run.get_EAR_dependencies(as_objects=True):
|
1020
|
-
# does dep_EAR_i belong to self?
|
1021
|
-
if dep_EAR_i.id_ in self.EAR_IDs_flat and run.id_ not in deps:
|
1022
|
-
deps.append(run.id_)
|
1023
|
-
deps = sorted(deps)
|
1144
|
+
deps: set[int] = set()
|
1145
|
+
for iter_ in self.__iterations:
|
1146
|
+
if iter_.id_ == self.id_:
|
1147
|
+
# don't include EARs of this iteration
|
1148
|
+
continue
|
1149
|
+
for run in iter_.action_runs:
|
1150
|
+
if run.get_EAR_dependencies().intersection(self.EAR_IDs_flat):
|
1151
|
+
deps.add(run.id_)
|
1024
1152
|
if as_objects:
|
1025
|
-
|
1026
|
-
|
1153
|
+
return self.workflow.get_EARs_from_IDs(sorted(deps))
|
1027
1154
|
return deps
|
1028
1155
|
|
1156
|
+
@overload
|
1157
|
+
def get_dependent_element_iterations(
|
1158
|
+
self, as_objects: Literal[True]
|
1159
|
+
) -> list[ElementIteration]:
|
1160
|
+
...
|
1161
|
+
|
1162
|
+
@overload
|
1163
|
+
def get_dependent_element_iterations(
|
1164
|
+
self, as_objects: Literal[False] = False
|
1165
|
+
) -> set[int]:
|
1166
|
+
...
|
1167
|
+
|
1029
1168
|
@TimeIt.decorator
|
1030
1169
|
def get_dependent_element_iterations(
|
1031
1170
|
self, as_objects: bool = False
|
1032
|
-
) ->
|
1171
|
+
) -> set[int] | list[ElementIteration]:
|
1033
1172
|
"""Get elements iterations of downstream iterations and tasks that depend on this
|
1034
1173
|
element iteration."""
|
1035
1174
|
# TODO: test this includes downstream iterations of this iteration's element?
|
1036
|
-
deps =
|
1037
|
-
for
|
1038
|
-
|
1039
|
-
|
1040
|
-
|
1041
|
-
|
1042
|
-
for dep_iter_i in iter_i.get_element_iteration_dependencies(
|
1043
|
-
as_objects=True
|
1044
|
-
):
|
1045
|
-
if dep_iter_i.id_ == self.id_ and iter_i.id_ not in deps:
|
1046
|
-
deps.append(iter_i.id_)
|
1047
|
-
deps = sorted(deps)
|
1175
|
+
deps: set[int] = set()
|
1176
|
+
for iter_i in self.__iterations:
|
1177
|
+
if iter_i.id_ == self.id_:
|
1178
|
+
continue
|
1179
|
+
if self.id_ in iter_i.get_element_iteration_dependencies():
|
1180
|
+
deps.add(iter_i.id_)
|
1048
1181
|
if as_objects:
|
1049
|
-
|
1050
|
-
|
1182
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(deps))
|
1051
1183
|
return deps
|
1052
1184
|
|
1185
|
+
@overload
|
1186
|
+
def get_dependent_elements(
|
1187
|
+
self,
|
1188
|
+
as_objects: Literal[True],
|
1189
|
+
) -> list[Element]:
|
1190
|
+
...
|
1191
|
+
|
1192
|
+
@overload
|
1193
|
+
def get_dependent_elements(
|
1194
|
+
self,
|
1195
|
+
as_objects: Literal[False] = False,
|
1196
|
+
) -> set[int]:
|
1197
|
+
...
|
1198
|
+
|
1053
1199
|
@TimeIt.decorator
|
1054
1200
|
def get_dependent_elements(
|
1055
1201
|
self,
|
1056
1202
|
as_objects: bool = False,
|
1057
|
-
) ->
|
1203
|
+
) -> set[int] | list[Element]:
|
1058
1204
|
"""Get elements of downstream tasks that depend on this element iteration."""
|
1059
|
-
deps =
|
1205
|
+
deps: set[int] = set()
|
1060
1206
|
for task in self.task.downstream_tasks:
|
1061
1207
|
for element in task.elements[:]:
|
1062
|
-
|
1063
|
-
|
1064
|
-
|
1065
|
-
|
1066
|
-
|
1067
|
-
deps.append(element.id_)
|
1068
|
-
|
1069
|
-
deps = sorted(deps)
|
1070
|
-
if as_objects:
|
1071
|
-
deps = self.workflow.get_elements_from_IDs(deps)
|
1208
|
+
if any(
|
1209
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
1210
|
+
for iter_i in element.iterations
|
1211
|
+
):
|
1212
|
+
deps.add(element.id_)
|
1072
1213
|
|
1214
|
+
if as_objects:
|
1215
|
+
return self.workflow.get_elements_from_IDs(sorted(deps))
|
1073
1216
|
return deps
|
1074
1217
|
|
1218
|
+
@overload
|
1219
|
+
def get_dependent_tasks(
|
1220
|
+
self,
|
1221
|
+
as_objects: Literal[True],
|
1222
|
+
) -> list[WorkflowTask]:
|
1223
|
+
...
|
1224
|
+
|
1225
|
+
@overload
|
1226
|
+
def get_dependent_tasks(
|
1227
|
+
self,
|
1228
|
+
as_objects: Literal[False] = False,
|
1229
|
+
) -> set[int]:
|
1230
|
+
...
|
1231
|
+
|
1075
1232
|
def get_dependent_tasks(
|
1076
1233
|
self,
|
1077
1234
|
as_objects: bool = False,
|
1078
|
-
) ->
|
1235
|
+
) -> set[int] | list[WorkflowTask]:
|
1079
1236
|
"""Get downstream tasks that depend on this element iteration."""
|
1080
|
-
deps =
|
1237
|
+
deps: set[int] = set()
|
1081
1238
|
for task in self.task.downstream_tasks:
|
1082
|
-
|
1083
|
-
|
1084
|
-
|
1085
|
-
|
1086
|
-
|
1087
|
-
|
1088
|
-
deps.append(task.insert_ID)
|
1089
|
-
deps = sorted(deps)
|
1239
|
+
if any(
|
1240
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
1241
|
+
for element in task.elements[:]
|
1242
|
+
for iter_i in element.iterations
|
1243
|
+
):
|
1244
|
+
deps.add(task.insert_ID)
|
1090
1245
|
if as_objects:
|
1091
|
-
|
1092
|
-
|
1246
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(deps)]
|
1093
1247
|
return deps
|
1094
1248
|
|
1095
|
-
def get_template_resources(self) ->
|
1249
|
+
def get_template_resources(self) -> Mapping[str, Any]:
|
1096
1250
|
"""Get template-level resources."""
|
1097
|
-
|
1098
|
-
for res_i in
|
1099
|
-
out[res_i.scope.to_string()] = res_i._get_value()
|
1100
|
-
return out
|
1251
|
+
res = self.workflow.template._resources
|
1252
|
+
return {res_i.normalised_resources_path: res_i._get_value() for res_i in res}
|
1101
1253
|
|
1102
1254
|
@TimeIt.decorator
|
1103
|
-
def get_resources(
|
1255
|
+
def get_resources(
|
1256
|
+
self, action: Action, set_defaults: bool = False
|
1257
|
+
) -> Mapping[str, Any]:
|
1104
1258
|
"""Resolve specific resources for the specified action of this iteration,
|
1105
1259
|
considering all applicable scopes.
|
1106
1260
|
|
@@ -1121,60 +1275,62 @@ class ElementIteration:
|
|
1121
1275
|
# question is perhaps "what would the resources be if this action were to become
|
1122
1276
|
# an EAR?" which would then allow us to test a resources-based action rule.
|
1123
1277
|
|
1124
|
-
|
1278
|
+
# FIXME: Use a TypedDict?
|
1279
|
+
resource_specs: dict[str, dict[str, dict[str, Any]]] = copy.deepcopy(
|
1280
|
+
self.get("resources")
|
1281
|
+
)
|
1125
1282
|
|
1126
1283
|
env_spec = action.get_environment_spec()
|
1127
|
-
env_name = env_spec["name"]
|
1284
|
+
env_name: str = env_spec["name"]
|
1128
1285
|
|
1129
1286
|
# set default env specifiers, if none set:
|
1130
|
-
if "
|
1131
|
-
|
1132
|
-
if "environments" not in resource_specs["any"]:
|
1133
|
-
resource_specs["any"]["environments"] = {env_name: copy.deepcopy(env_spec)}
|
1287
|
+
if "environments" not in (any_specs := resource_specs.setdefault("any", {})):
|
1288
|
+
any_specs["environments"] = {env_name: copy.deepcopy(env_spec)}
|
1134
1289
|
|
1135
|
-
for
|
1290
|
+
for dat in resource_specs.values():
|
1136
1291
|
if "environments" in dat:
|
1137
1292
|
# keep only relevant user-provided environment specifiers:
|
1138
|
-
|
1293
|
+
dat["environments"] = {
|
1139
1294
|
k: v for k, v in dat["environments"].items() if k == env_name
|
1140
1295
|
}
|
1141
1296
|
# merge user-provided specifiers into action specifiers:
|
1142
|
-
|
1143
|
-
|
1144
|
-
|
1145
|
-
}
|
1297
|
+
dat["environments"].setdefault(env_name, {}).update(
|
1298
|
+
copy.deepcopy(env_spec)
|
1299
|
+
)
|
1146
1300
|
|
1147
|
-
resources = {}
|
1148
|
-
for scope in action.
|
1149
|
-
# loop
|
1150
|
-
|
1151
|
-
|
1152
|
-
resources.update({k: v for k, v in scope_res.items() if v is not None})
|
1301
|
+
resources: dict[str, Any] = {}
|
1302
|
+
for scope in action._get_possible_scopes_reversed():
|
1303
|
+
# loop from least-specific to most so higher-specificity scopes take precedence:
|
1304
|
+
if scope_res := resource_specs.get(scope.to_string()):
|
1305
|
+
resources.update((k, v) for k, v in scope_res.items() if v is not None)
|
1153
1306
|
|
1154
1307
|
if set_defaults:
|
1155
1308
|
# used in e.g. `Rule.test` if testing resource rules on element iterations:
|
1156
|
-
|
1157
|
-
|
1158
|
-
|
1159
|
-
resources["shell"] = self.app.ElementResources.get_default_shell()
|
1309
|
+
ER = self._app.ElementResources
|
1310
|
+
resources.setdefault("os_name", ER.get_default_os_name())
|
1311
|
+
resources.setdefault("shell", ER.get_default_shell())
|
1160
1312
|
if "scheduler" not in resources:
|
1161
|
-
resources["scheduler"] =
|
1313
|
+
resources["scheduler"] = ER.get_default_scheduler(
|
1162
1314
|
resources["os_name"], resources["shell"]
|
1163
1315
|
)
|
1164
1316
|
|
1317
|
+
# unset inapplicable items:
|
1318
|
+
if "combine_scripts" in resources and not action.script_is_python_snippet:
|
1319
|
+
del resources["combine_scripts"]
|
1320
|
+
|
1165
1321
|
return resources
|
1166
1322
|
|
1167
1323
|
def get_resources_obj(
|
1168
|
-
self, action:
|
1169
|
-
) ->
|
1324
|
+
self, action: Action, set_defaults: bool = False
|
1325
|
+
) -> ElementResources:
|
1170
1326
|
"""
|
1171
1327
|
Get the resources for an action (see :py:meth:`get_resources`)
|
1172
1328
|
as a searchable model.
|
1173
1329
|
"""
|
1174
|
-
return self.
|
1330
|
+
return self._app.ElementResources(**self.get_resources(action, set_defaults))
|
1175
1331
|
|
1176
1332
|
|
1177
|
-
class Element:
|
1333
|
+
class Element(AppAware):
|
1178
1334
|
"""
|
1179
1335
|
A basic component of a workflow. Elements are enactments of tasks.
|
1180
1336
|
|
@@ -1200,8 +1356,6 @@ class Element:
|
|
1200
1356
|
Data for creating iteration objects.
|
1201
1357
|
"""
|
1202
1358
|
|
1203
|
-
_app_attr = "app"
|
1204
|
-
|
1205
1359
|
# TODO: use slots
|
1206
1360
|
# TODO:
|
1207
1361
|
# - add `iterations` property which returns `ElementIteration`
|
@@ -1211,13 +1365,13 @@ class Element:
|
|
1211
1365
|
self,
|
1212
1366
|
id_: int,
|
1213
1367
|
is_pending: bool,
|
1214
|
-
task:
|
1368
|
+
task: WorkflowTask,
|
1215
1369
|
index: int,
|
1216
1370
|
es_idx: int,
|
1217
|
-
seq_idx:
|
1218
|
-
src_idx:
|
1219
|
-
iteration_IDs:
|
1220
|
-
iterations:
|
1371
|
+
seq_idx: Mapping[str, int],
|
1372
|
+
src_idx: Mapping[str, int],
|
1373
|
+
iteration_IDs: list[int],
|
1374
|
+
iterations: list[dict[str, Any]],
|
1221
1375
|
) -> None:
|
1222
1376
|
self._id = id_
|
1223
1377
|
self._is_pending = is_pending
|
@@ -1231,7 +1385,7 @@ class Element:
|
|
1231
1385
|
self._iterations = iterations
|
1232
1386
|
|
1233
1387
|
# assigned on first access:
|
1234
|
-
self._iteration_objs = None
|
1388
|
+
self._iteration_objs: list[ElementIteration] | None = None
|
1235
1389
|
|
1236
1390
|
def __repr__(self):
|
1237
1391
|
return (
|
@@ -1255,7 +1409,7 @@ class Element:
|
|
1255
1409
|
return self._is_pending
|
1256
1410
|
|
1257
1411
|
@property
|
1258
|
-
def task(self) ->
|
1412
|
+
def task(self) -> WorkflowTask:
|
1259
1413
|
"""
|
1260
1414
|
The task this is part of the enactment of.
|
1261
1415
|
"""
|
@@ -1278,45 +1432,45 @@ class Element:
|
|
1278
1432
|
return self._es_idx
|
1279
1433
|
|
1280
1434
|
@property
|
1281
|
-
def element_set(self):
|
1435
|
+
def element_set(self) -> ElementSet:
|
1282
1436
|
"""
|
1283
1437
|
The element set containing this element.
|
1284
1438
|
"""
|
1285
1439
|
return self.task.template.element_sets[self.element_set_idx]
|
1286
1440
|
|
1287
1441
|
@property
|
1288
|
-
def sequence_idx(self) ->
|
1442
|
+
def sequence_idx(self) -> Mapping[str, int]:
|
1289
1443
|
"""
|
1290
1444
|
The sequence index IDs.
|
1291
1445
|
"""
|
1292
1446
|
return self._seq_idx
|
1293
1447
|
|
1294
1448
|
@property
|
1295
|
-
def input_source_idx(self) ->
|
1449
|
+
def input_source_idx(self) -> Mapping[str, int]:
|
1296
1450
|
"""
|
1297
1451
|
The input source indices.
|
1298
1452
|
"""
|
1299
1453
|
return self._src_idx
|
1300
1454
|
|
1301
1455
|
@property
|
1302
|
-
def input_sources(self) ->
|
1456
|
+
def input_sources(self) -> Mapping[str, InputSource]:
|
1303
1457
|
"""
|
1304
1458
|
The sources of the inputs to this element.
|
1305
1459
|
"""
|
1306
1460
|
return {
|
1307
|
-
k: self.element_set.input_sources[k.
|
1461
|
+
k: self.element_set.input_sources[k.removeprefix("inputs.")][v]
|
1308
1462
|
for k, v in self.input_source_idx.items()
|
1309
1463
|
}
|
1310
1464
|
|
1311
1465
|
@property
|
1312
|
-
def workflow(self) ->
|
1466
|
+
def workflow(self) -> Workflow:
|
1313
1467
|
"""
|
1314
1468
|
The workflow containing this element.
|
1315
1469
|
"""
|
1316
1470
|
return self.task.workflow
|
1317
1471
|
|
1318
1472
|
@property
|
1319
|
-
def iteration_IDs(self) ->
|
1473
|
+
def iteration_IDs(self) -> Sequence[int]:
|
1320
1474
|
"""
|
1321
1475
|
The IDs of the iterations of this element.
|
1322
1476
|
"""
|
@@ -1324,14 +1478,14 @@ class Element:
|
|
1324
1478
|
|
1325
1479
|
@property
|
1326
1480
|
@TimeIt.decorator
|
1327
|
-
def iterations(self) ->
|
1481
|
+
def iterations(self) -> Sequence[ElementIteration]:
|
1328
1482
|
"""
|
1329
1483
|
The iterations of this element.
|
1330
1484
|
"""
|
1331
1485
|
# TODO: fix this
|
1332
1486
|
if self._iteration_objs is None:
|
1333
1487
|
self._iteration_objs = [
|
1334
|
-
self.
|
1488
|
+
self._app.ElementIteration(
|
1335
1489
|
element=self,
|
1336
1490
|
index=idx,
|
1337
1491
|
**{k: v for k, v in iter_i.items() if k != "element_ID"},
|
@@ -1341,95 +1495,102 @@ class Element:
|
|
1341
1495
|
return self._iteration_objs
|
1342
1496
|
|
1343
1497
|
@property
|
1344
|
-
def dir_name(self):
|
1498
|
+
def dir_name(self) -> str:
|
1345
1499
|
"""
|
1346
1500
|
The name of the directory for containing temporary files for this element.
|
1347
1501
|
"""
|
1348
1502
|
return f"e_{self.index}"
|
1349
1503
|
|
1350
1504
|
@property
|
1351
|
-
def latest_iteration(self):
|
1505
|
+
def latest_iteration(self) -> ElementIteration:
|
1352
1506
|
"""
|
1353
1507
|
The most recent iteration of this element.
|
1354
1508
|
"""
|
1355
1509
|
return self.iterations[-1]
|
1356
1510
|
|
1357
1511
|
@property
|
1358
|
-
def
|
1512
|
+
def latest_iteration_non_skipped(self):
|
1513
|
+
"""Get the latest iteration that is not loop-skipped."""
|
1514
|
+
for iter_i in self.iterations[::-1]:
|
1515
|
+
if not iter_i.loop_skipped:
|
1516
|
+
return iter_i
|
1517
|
+
|
1518
|
+
@property
|
1519
|
+
def inputs(self) -> ElementInputs:
|
1359
1520
|
"""
|
1360
|
-
The inputs to this element (
|
1521
|
+
The inputs to this element's most recent iteration (that was not skipped due to
|
1522
|
+
loop termination).
|
1361
1523
|
"""
|
1362
|
-
return self.
|
1524
|
+
return self.latest_iteration_non_skipped.inputs
|
1363
1525
|
|
1364
1526
|
@property
|
1365
|
-
def outputs(self) ->
|
1527
|
+
def outputs(self) -> ElementOutputs:
|
1366
1528
|
"""
|
1367
|
-
The outputs from this element (
|
1529
|
+
The outputs from this element's most recent iteration (that was not skipped due to
|
1530
|
+
loop termination).
|
1368
1531
|
"""
|
1369
|
-
return self.
|
1532
|
+
return self.latest_iteration_non_skipped.outputs
|
1370
1533
|
|
1371
1534
|
@property
|
1372
|
-
def input_files(self) ->
|
1535
|
+
def input_files(self) -> ElementInputFiles:
|
1373
1536
|
"""
|
1374
|
-
The input files to this element (
|
1537
|
+
The input files to this element's most recent iteration (that was not skipped due
|
1538
|
+
to loop termination).
|
1375
1539
|
"""
|
1376
|
-
return self.
|
1540
|
+
return self.latest_iteration_non_skipped.input_files
|
1377
1541
|
|
1378
1542
|
@property
|
1379
|
-
def output_files(self) ->
|
1543
|
+
def output_files(self) -> ElementOutputFiles:
|
1380
1544
|
"""
|
1381
|
-
The output files from this element
|
1545
|
+
The output files from this element's most recent iteration (that was not skipped
|
1546
|
+
due to loop termination).
|
1382
1547
|
"""
|
1383
|
-
return self.
|
1548
|
+
return self.latest_iteration_non_skipped.output_files
|
1384
1549
|
|
1385
1550
|
@property
|
1386
|
-
def schema_parameters(self) ->
|
1551
|
+
def schema_parameters(self) -> Sequence[str]:
|
1387
1552
|
"""
|
1388
|
-
The schema-defined parameters to this element
|
1553
|
+
The schema-defined parameters to this element's most recent iteration (that was
|
1554
|
+
not skipped due to loop termination).
|
1389
1555
|
"""
|
1390
|
-
return self.
|
1556
|
+
return self.latest_iteration_non_skipped.schema_parameters
|
1391
1557
|
|
1392
1558
|
@property
|
1393
|
-
def actions(self) ->
|
1559
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
1394
1560
|
"""
|
1395
|
-
The actions of this element (
|
1561
|
+
The actions of this element's most recent iteration (that was not skipped due to
|
1562
|
+
loop termination).
|
1396
1563
|
"""
|
1397
|
-
return self.
|
1564
|
+
return self.latest_iteration_non_skipped.actions
|
1398
1565
|
|
1399
1566
|
@property
|
1400
|
-
def action_runs(self) ->
|
1567
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
1401
1568
|
"""
|
1402
1569
|
A list of element action runs from the latest iteration, where only the
|
1403
1570
|
final run is taken for each element action.
|
1404
1571
|
"""
|
1405
|
-
return self.
|
1406
|
-
|
1407
|
-
def init_loop_index(self, loop_name: str):
|
1408
|
-
"""
|
1409
|
-
Initialise the loop index if necessary.
|
1410
|
-
"""
|
1411
|
-
pass
|
1572
|
+
return self.latest_iteration_non_skipped.action_runs
|
1412
1573
|
|
1413
|
-
def to_element_set_data(self):
|
1574
|
+
def to_element_set_data(self) -> tuple[list[InputValue], list[ResourceSpec]]:
|
1414
1575
|
"""Generate lists of workflow-bound InputValues and ResourceList."""
|
1415
|
-
inputs = []
|
1416
|
-
resources = []
|
1576
|
+
inputs: list[InputValue] = []
|
1577
|
+
resources: list[ResourceSpec] = []
|
1417
1578
|
for k, v in self.get_data_idx().items():
|
1418
|
-
|
1579
|
+
kind, parameter_or_scope, *path = k.split(".")
|
1419
1580
|
|
1420
|
-
if
|
1421
|
-
inp_val = self.
|
1422
|
-
parameter=
|
1423
|
-
path=
|
1581
|
+
if kind == "inputs":
|
1582
|
+
inp_val = self._app.InputValue(
|
1583
|
+
parameter=parameter_or_scope,
|
1584
|
+
path=cast("str", path) or None, # FIXME: suspicious cast!
|
1424
1585
|
value=None,
|
1425
1586
|
)
|
1426
1587
|
inp_val._value_group_idx = v
|
1427
1588
|
inp_val._workflow = self.workflow
|
1428
1589
|
inputs.append(inp_val)
|
1429
1590
|
|
1430
|
-
elif
|
1431
|
-
scope = self.
|
1432
|
-
res = self.
|
1591
|
+
elif kind == "resources":
|
1592
|
+
scope = self._app.ActionScope.from_json_like(parameter_or_scope)
|
1593
|
+
res = self._app.ResourceSpec(scope=scope)
|
1433
1594
|
res._value_group_idx = v
|
1434
1595
|
res._workflow = self.workflow
|
1435
1596
|
resources.append(res)
|
@@ -1440,42 +1601,74 @@ class Element:
|
|
1440
1601
|
"""
|
1441
1602
|
Get the value of a sequence that applies.
|
1442
1603
|
"""
|
1443
|
-
|
1444
|
-
if not seq:
|
1604
|
+
|
1605
|
+
if not (seq := self.element_set.get_sequence_from_path(sequence_path)):
|
1445
1606
|
raise ValueError(
|
1446
1607
|
f"No sequence with path {sequence_path!r} in this element's originating "
|
1447
1608
|
f"element set."
|
1448
1609
|
)
|
1449
|
-
|
1610
|
+
if (values := seq.values) is None:
|
1611
|
+
raise ValueError(
|
1612
|
+
f"Sequence with path {sequence_path!r} has no defined values."
|
1613
|
+
)
|
1614
|
+
return values[self.sequence_idx[sequence_path]]
|
1450
1615
|
|
1451
1616
|
def get_data_idx(
|
1452
1617
|
self,
|
1453
|
-
path: str = None,
|
1454
|
-
action_idx: int = None,
|
1618
|
+
path: str | None = None,
|
1619
|
+
action_idx: int | None = None,
|
1455
1620
|
run_idx: int = -1,
|
1456
|
-
) ->
|
1457
|
-
"""Get the data index of the most recent element iteration
|
1621
|
+
) -> DataIndex:
|
1622
|
+
"""Get the data index of the most recent element iteration that
|
1623
|
+
is not loop-skipped.
|
1458
1624
|
|
1459
1625
|
Parameters
|
1460
1626
|
----------
|
1461
1627
|
action_idx
|
1462
1628
|
The index of the action within the schema.
|
1463
1629
|
"""
|
1464
|
-
return self.
|
1630
|
+
return self.latest_iteration_non_skipped.get_data_idx(
|
1465
1631
|
path=path,
|
1466
1632
|
action_idx=action_idx,
|
1467
1633
|
run_idx=run_idx,
|
1468
1634
|
)
|
1469
1635
|
|
1636
|
+
@overload
|
1637
|
+
def get_parameter_sources(
|
1638
|
+
self,
|
1639
|
+
path: str | None = None,
|
1640
|
+
*,
|
1641
|
+
action_idx: int | None = None,
|
1642
|
+
run_idx: int = -1,
|
1643
|
+
typ: str | None = None,
|
1644
|
+
as_strings: Literal[False] = False,
|
1645
|
+
use_task_index: bool = False,
|
1646
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
1647
|
+
...
|
1648
|
+
|
1649
|
+
@overload
|
1470
1650
|
def get_parameter_sources(
|
1471
1651
|
self,
|
1472
|
-
path: str = None,
|
1473
|
-
|
1652
|
+
path: str | None = None,
|
1653
|
+
*,
|
1654
|
+
action_idx: int | None = None,
|
1474
1655
|
run_idx: int = -1,
|
1475
|
-
typ: str = None,
|
1656
|
+
typ: str | None = None,
|
1657
|
+
as_strings: Literal[True],
|
1658
|
+
use_task_index: bool = False,
|
1659
|
+
) -> Mapping[str, str]:
|
1660
|
+
...
|
1661
|
+
|
1662
|
+
def get_parameter_sources(
|
1663
|
+
self,
|
1664
|
+
path: str | None = None,
|
1665
|
+
*,
|
1666
|
+
action_idx: int | None = None,
|
1667
|
+
run_idx: int = -1,
|
1668
|
+
typ: str | None = None,
|
1476
1669
|
as_strings: bool = False,
|
1477
1670
|
use_task_index: bool = False,
|
1478
|
-
) ->
|
1671
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
1479
1672
|
""" "Get the parameter sources of the most recent element iteration.
|
1480
1673
|
|
1481
1674
|
Parameters
|
@@ -1484,26 +1677,35 @@ class Element:
|
|
1484
1677
|
If True, use the task index within the workflow, rather than the task insert
|
1485
1678
|
ID.
|
1486
1679
|
"""
|
1680
|
+
if as_strings:
|
1681
|
+
return self.latest_iteration.get_parameter_sources(
|
1682
|
+
path=path,
|
1683
|
+
action_idx=action_idx,
|
1684
|
+
run_idx=run_idx,
|
1685
|
+
typ=typ,
|
1686
|
+
as_strings=True,
|
1687
|
+
use_task_index=use_task_index,
|
1688
|
+
)
|
1487
1689
|
return self.latest_iteration.get_parameter_sources(
|
1488
1690
|
path=path,
|
1489
1691
|
action_idx=action_idx,
|
1490
1692
|
run_idx=run_idx,
|
1491
1693
|
typ=typ,
|
1492
|
-
as_strings=as_strings,
|
1493
1694
|
use_task_index=use_task_index,
|
1494
1695
|
)
|
1495
1696
|
|
1496
1697
|
def get(
|
1497
1698
|
self,
|
1498
|
-
path: str = None,
|
1499
|
-
action_idx: int = None,
|
1699
|
+
path: str | None = None,
|
1700
|
+
action_idx: int | None = None,
|
1500
1701
|
run_idx: int = -1,
|
1501
1702
|
default: Any = None,
|
1502
1703
|
raise_on_missing: bool = False,
|
1503
1704
|
raise_on_unset: bool = False,
|
1504
1705
|
) -> Any:
|
1505
|
-
"""Get element data of the most recent iteration
|
1506
|
-
|
1706
|
+
"""Get element data of the most recent iteration that is not
|
1707
|
+
loop-skipped."""
|
1708
|
+
return self.latest_iteration_non_skipped.get(
|
1507
1709
|
path=path,
|
1508
1710
|
action_idx=action_idx,
|
1509
1711
|
run_idx=run_idx,
|
@@ -1512,71 +1714,160 @@ class Element:
|
|
1512
1714
|
raise_on_unset=raise_on_unset,
|
1513
1715
|
)
|
1514
1716
|
|
1717
|
+
@overload
|
1718
|
+
def get_EAR_dependencies(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1719
|
+
...
|
1720
|
+
|
1721
|
+
@overload
|
1722
|
+
def get_EAR_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1723
|
+
...
|
1724
|
+
|
1725
|
+
@TimeIt.decorator
|
1515
1726
|
def get_EAR_dependencies(
|
1516
1727
|
self, as_objects: bool = False
|
1517
|
-
) ->
|
1728
|
+
) -> set[int] | list[ElementActionRun]:
|
1518
1729
|
"""Get EARs that the most recent iteration of this element depends on."""
|
1519
|
-
|
1730
|
+
if as_objects:
|
1731
|
+
return self.latest_iteration.get_EAR_dependencies(as_objects=True)
|
1732
|
+
return self.latest_iteration.get_EAR_dependencies()
|
1733
|
+
|
1734
|
+
@overload
|
1735
|
+
def get_element_iteration_dependencies(
|
1736
|
+
self, as_objects: Literal[True]
|
1737
|
+
) -> list[ElementIteration]:
|
1738
|
+
...
|
1739
|
+
|
1740
|
+
@overload
|
1741
|
+
def get_element_iteration_dependencies(
|
1742
|
+
self, as_objects: Literal[False] = False
|
1743
|
+
) -> set[int]:
|
1744
|
+
...
|
1520
1745
|
|
1521
1746
|
def get_element_iteration_dependencies(
|
1522
1747
|
self, as_objects: bool = False
|
1523
|
-
) ->
|
1748
|
+
) -> set[int] | list[ElementIteration]:
|
1524
1749
|
"""Get element iterations that the most recent iteration of this element depends
|
1525
1750
|
on."""
|
1526
|
-
|
1527
|
-
|
1528
|
-
|
1751
|
+
if as_objects:
|
1752
|
+
return self.latest_iteration.get_element_iteration_dependencies(
|
1753
|
+
as_objects=True
|
1754
|
+
)
|
1755
|
+
return self.latest_iteration.get_element_iteration_dependencies()
|
1756
|
+
|
1757
|
+
@overload
|
1758
|
+
def get_element_dependencies(self, as_objects: Literal[True]) -> list[Element]:
|
1759
|
+
...
|
1760
|
+
|
1761
|
+
@overload
|
1762
|
+
def get_element_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1763
|
+
...
|
1529
1764
|
|
1530
1765
|
def get_element_dependencies(
|
1531
1766
|
self, as_objects: bool = False
|
1532
|
-
) ->
|
1767
|
+
) -> set[int] | list[Element]:
|
1533
1768
|
"""Get elements that the most recent iteration of this element depends on."""
|
1534
|
-
|
1769
|
+
if as_objects:
|
1770
|
+
return self.latest_iteration.get_element_dependencies(as_objects=True)
|
1771
|
+
return self.latest_iteration.get_element_dependencies()
|
1535
1772
|
|
1536
|
-
def get_input_dependencies(self) ->
|
1773
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
1537
1774
|
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
1538
1775
|
the most recent iteration of this element depends on."""
|
1539
1776
|
return self.latest_iteration.get_input_dependencies()
|
1540
1777
|
|
1778
|
+
@overload
|
1779
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1780
|
+
...
|
1781
|
+
|
1782
|
+
@overload
|
1783
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]:
|
1784
|
+
...
|
1785
|
+
|
1541
1786
|
def get_task_dependencies(
|
1542
1787
|
self, as_objects: bool = False
|
1543
|
-
) ->
|
1788
|
+
) -> set[int] | list[WorkflowTask]:
|
1544
1789
|
"""Get tasks (insert ID or WorkflowTask objects) that the most recent iteration of
|
1545
1790
|
this element depends on.
|
1546
1791
|
|
1547
1792
|
Dependencies may come from either elements from upstream tasks, or from locally
|
1548
1793
|
defined inputs/sequences/defaults from upstream tasks."""
|
1549
|
-
|
1794
|
+
if as_objects:
|
1795
|
+
return self.latest_iteration.get_task_dependencies(as_objects=True)
|
1796
|
+
return self.latest_iteration.get_task_dependencies()
|
1797
|
+
|
1798
|
+
@overload
|
1799
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]:
|
1800
|
+
...
|
1801
|
+
|
1802
|
+
@overload
|
1803
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]:
|
1804
|
+
...
|
1550
1805
|
|
1551
1806
|
def get_dependent_EARs(
|
1552
1807
|
self, as_objects: bool = False
|
1553
|
-
) ->
|
1808
|
+
) -> set[int] | list[ElementActionRun]:
|
1554
1809
|
"""Get EARs that depend on the most recent iteration of this element."""
|
1555
|
-
|
1810
|
+
if as_objects:
|
1811
|
+
return self.latest_iteration.get_dependent_EARs(as_objects=True)
|
1812
|
+
return self.latest_iteration.get_dependent_EARs()
|
1813
|
+
|
1814
|
+
@overload
|
1815
|
+
def get_dependent_element_iterations(
|
1816
|
+
self, as_objects: Literal[True]
|
1817
|
+
) -> list[ElementIteration]:
|
1818
|
+
...
|
1819
|
+
|
1820
|
+
@overload
|
1821
|
+
def get_dependent_element_iterations(
|
1822
|
+
self, as_objects: Literal[False] = False
|
1823
|
+
) -> set[int]:
|
1824
|
+
...
|
1556
1825
|
|
1557
1826
|
def get_dependent_element_iterations(
|
1558
1827
|
self, as_objects: bool = False
|
1559
|
-
) ->
|
1828
|
+
) -> set[int] | list[ElementIteration]:
|
1560
1829
|
"""Get element iterations that depend on the most recent iteration of this
|
1561
1830
|
element."""
|
1562
|
-
|
1563
|
-
as_objects=
|
1564
|
-
)
|
1831
|
+
if as_objects:
|
1832
|
+
return self.latest_iteration.get_dependent_element_iterations(as_objects=True)
|
1833
|
+
return self.latest_iteration.get_dependent_element_iterations()
|
1834
|
+
|
1835
|
+
@overload
|
1836
|
+
def get_dependent_elements(self, as_objects: Literal[True]) -> list[Element]:
|
1837
|
+
...
|
1838
|
+
|
1839
|
+
@overload
|
1840
|
+
def get_dependent_elements(self, as_objects: Literal[False] = False) -> set[int]:
|
1841
|
+
...
|
1565
1842
|
|
1566
1843
|
def get_dependent_elements(
|
1567
1844
|
self, as_objects: bool = False
|
1568
|
-
) ->
|
1845
|
+
) -> set[int] | list[Element]:
|
1569
1846
|
"""Get elements that depend on the most recent iteration of this element."""
|
1570
|
-
|
1847
|
+
if as_objects:
|
1848
|
+
return self.latest_iteration.get_dependent_elements(as_objects=True)
|
1849
|
+
return self.latest_iteration.get_dependent_elements()
|
1850
|
+
|
1851
|
+
@overload
|
1852
|
+
def get_dependent_tasks(self, as_objects: Literal[True]) -> list[WorkflowTask]:
|
1853
|
+
...
|
1854
|
+
|
1855
|
+
@overload
|
1856
|
+
def get_dependent_tasks(self, as_objects: Literal[False] = False) -> set[int]:
|
1857
|
+
...
|
1571
1858
|
|
1572
1859
|
def get_dependent_tasks(
|
1573
1860
|
self, as_objects: bool = False
|
1574
|
-
) ->
|
1861
|
+
) -> set[int] | list[WorkflowTask]:
|
1575
1862
|
"""Get tasks that depend on the most recent iteration of this element."""
|
1576
|
-
|
1863
|
+
if as_objects:
|
1864
|
+
return self.latest_iteration.get_dependent_tasks(as_objects=True)
|
1865
|
+
return self.latest_iteration.get_dependent_tasks()
|
1577
1866
|
|
1578
1867
|
@TimeIt.decorator
|
1579
|
-
def get_dependent_elements_recursively(
|
1868
|
+
def get_dependent_elements_recursively(
|
1869
|
+
self, task_insert_ID: int | None = None
|
1870
|
+
) -> list[Element]:
|
1580
1871
|
"""Get downstream elements that depend on this element, including recursive
|
1581
1872
|
dependencies.
|
1582
1873
|
|
@@ -1586,28 +1877,26 @@ class Element:
|
|
1586
1877
|
|
1587
1878
|
Parameters
|
1588
1879
|
----------
|
1589
|
-
task_insert_ID
|
1880
|
+
task_insert_ID: int
|
1590
1881
|
If specified, only return elements from this task.
|
1591
1882
|
|
1592
1883
|
"""
|
1593
1884
|
|
1594
|
-
def get_deps(element):
|
1595
|
-
deps = element.iterations[0].get_dependent_elements(
|
1885
|
+
def get_deps(element: Element) -> set[int]:
|
1886
|
+
deps = element.iterations[0].get_dependent_elements()
|
1596
1887
|
deps_objs = self.workflow.get_elements_from_IDs(deps)
|
1597
|
-
return
|
1598
|
-
[dep_j for deps_i in deps_objs for dep_j in get_deps(deps_i)]
|
1599
|
-
)
|
1888
|
+
return deps.union(dep_j for deps_i in deps_objs for dep_j in get_deps(deps_i))
|
1600
1889
|
|
1601
1890
|
all_deps = get_deps(self)
|
1602
|
-
|
1603
1891
|
if task_insert_ID is not None:
|
1604
|
-
|
1605
|
-
|
1606
|
-
|
1892
|
+
all_deps.intersection_update(
|
1893
|
+
self.workflow.tasks.get(insert_ID=task_insert_ID).element_IDs
|
1894
|
+
)
|
1607
1895
|
return self.workflow.get_elements_from_IDs(sorted(all_deps))
|
1608
1896
|
|
1609
1897
|
|
1610
|
-
@dataclass
|
1898
|
+
@dataclass(repr=False, eq=False)
|
1899
|
+
@hydrate
|
1611
1900
|
class ElementParameter:
|
1612
1901
|
"""
|
1613
1902
|
A parameter to an :py:class:`.Element`.
|
@@ -1624,19 +1913,22 @@ class ElementParameter:
|
|
1624
1913
|
The element that this is a parameter of.
|
1625
1914
|
"""
|
1626
1915
|
|
1627
|
-
|
1916
|
+
# Intended to be subclassed, so public
|
1917
|
+
#: Application context.
|
1918
|
+
app: ClassVar[BaseApp]
|
1919
|
+
_app_attr: ClassVar[str] = "app"
|
1628
1920
|
|
1629
1921
|
#: The task that this is part of.
|
1630
|
-
task:
|
1922
|
+
task: WorkflowTask
|
1631
1923
|
#: The path to this parameter.
|
1632
1924
|
path: str
|
1633
1925
|
#: The entity that owns this parameter.
|
1634
|
-
parent:
|
1926
|
+
parent: Element | ElementAction | ElementActionRun | ElementIteration
|
1635
1927
|
#: The element that this is a parameter of.
|
1636
|
-
element: Element
|
1928
|
+
element: Element | ElementIteration
|
1637
1929
|
|
1638
1930
|
@property
|
1639
|
-
def data_idx(self):
|
1931
|
+
def data_idx(self) -> DataIndex:
|
1640
1932
|
"""
|
1641
1933
|
The data indices associated with this parameter.
|
1642
1934
|
"""
|
@@ -1655,20 +1947,20 @@ class ElementParameter:
|
|
1655
1947
|
def __eq__(self, __o: object) -> bool:
|
1656
1948
|
if not isinstance(__o, self.__class__):
|
1657
1949
|
return False
|
1658
|
-
|
1659
|
-
return True
|
1950
|
+
return self.task == __o.task and self.path == __o.path
|
1660
1951
|
|
1661
1952
|
@property
|
1662
|
-
def data_idx_is_set(self):
|
1953
|
+
def data_idx_is_set(self) -> Mapping[str, bool]:
|
1663
1954
|
"""
|
1664
1955
|
The associated data indices for which this is set.
|
1665
1956
|
"""
|
1666
1957
|
return {
|
1667
|
-
k: self.task.workflow.is_parameter_set(
|
1958
|
+
k: self.task.workflow.is_parameter_set(cast("int", v))
|
1959
|
+
for k, v in self.data_idx.items()
|
1668
1960
|
}
|
1669
1961
|
|
1670
1962
|
@property
|
1671
|
-
def is_set(self):
|
1963
|
+
def is_set(self) -> bool:
|
1672
1964
|
"""
|
1673
1965
|
Whether this parameter is set.
|
1674
1966
|
"""
|
@@ -1682,6 +1974,7 @@ class ElementParameter:
|
|
1682
1974
|
|
1683
1975
|
|
1684
1976
|
@dataclass
|
1977
|
+
@hydrate
|
1685
1978
|
class ElementFilter(JSONLike):
|
1686
1979
|
"""
|
1687
1980
|
A filter for iterations.
|
@@ -1692,22 +1985,22 @@ class ElementFilter(JSONLike):
|
|
1692
1985
|
The filtering rules to use.
|
1693
1986
|
"""
|
1694
1987
|
|
1695
|
-
_child_objects
|
1988
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
1989
|
+
ChildObjectSpec(name="rules", is_multiple=True, class_name="Rule"),
|
1990
|
+
)
|
1696
1991
|
|
1697
1992
|
#: The filtering rules to use.
|
1698
|
-
rules:
|
1993
|
+
rules: list[Rule] = field(default_factory=list)
|
1699
1994
|
|
1700
|
-
def filter(
|
1701
|
-
self, element_iters: List[app.ElementIteration]
|
1702
|
-
) -> List[app.ElementIteration]:
|
1995
|
+
def filter(self, element_iters: list[ElementIteration]) -> list[ElementIteration]:
|
1703
1996
|
"""
|
1704
1997
|
Apply the filter rules to select a subsequence of iterations.
|
1705
1998
|
"""
|
1706
|
-
|
1707
|
-
|
1708
|
-
|
1709
|
-
|
1710
|
-
|
1999
|
+
return [
|
2000
|
+
el_iter
|
2001
|
+
for el_iter in element_iters
|
2002
|
+
if all(rule_j.test(el_iter) for rule_j in self.rules)
|
2003
|
+
]
|
1711
2004
|
|
1712
2005
|
|
1713
2006
|
@dataclass
|
@@ -1728,9 +2021,9 @@ class ElementGroup(JSONLike):
|
|
1728
2021
|
#: The name of the grouping rule.
|
1729
2022
|
name: str
|
1730
2023
|
#: A filtering rule to select which iterations to use in the group.
|
1731
|
-
where:
|
2024
|
+
where: ElementFilter | None = None
|
1732
2025
|
#: If specified, the name of the property to group iterations by.
|
1733
|
-
group_by_distinct:
|
2026
|
+
group_by_distinct: ParameterPath | None = None
|
1734
2027
|
|
1735
2028
|
def __post_init__(self):
|
1736
2029
|
self.name = check_valid_py_identifier(self.name)
|
@@ -1752,4 +2045,4 @@ class ElementRepeats:
|
|
1752
2045
|
#: The number of times to repeat.
|
1753
2046
|
number: int
|
1754
2047
|
#: A filtering rule for what to repeat.
|
1755
|
-
where:
|
2048
|
+
where: ElementFilter | None = None
|