hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/sdk/__init__.py +21 -15
- hpcflow/sdk/app.py +2133 -770
- hpcflow/sdk/cli.py +281 -250
- hpcflow/sdk/cli_common.py +6 -2
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +77 -42
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +578 -311
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +112 -85
- hpcflow/sdk/config/types.py +145 -0
- hpcflow/sdk/core/actions.py +1054 -994
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +81 -63
- hpcflow/sdk/core/command_files.py +275 -185
- hpcflow/sdk/core/commands.py +111 -107
- hpcflow/sdk/core/element.py +724 -503
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +398 -51
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +380 -334
- hpcflow/sdk/core/loop_cache.py +160 -43
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +728 -600
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +33 -22
- hpcflow/sdk/core/task.py +1546 -1325
- hpcflow/sdk/core/task_schema.py +240 -196
- hpcflow/sdk/core/test_utils.py +126 -88
- hpcflow/sdk/core/types.py +387 -0
- hpcflow/sdk/core/utils.py +410 -305
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +1192 -1028
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/demo/cli.py +46 -33
- hpcflow/sdk/helper/cli.py +18 -16
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +83 -59
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +988 -586
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +408 -153
- hpcflow/sdk/persistence/pending.py +158 -123
- hpcflow/sdk/persistence/store_resource.py +37 -22
- hpcflow/sdk/persistence/types.py +307 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +477 -420
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +444 -404
- hpcflow/sdk/submission/schedulers/__init__.py +133 -40
- hpcflow/sdk/submission/schedulers/direct.py +97 -71
- hpcflow/sdk/submission/schedulers/sge.py +132 -126
- hpcflow/sdk/submission/schedulers/slurm.py +263 -268
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +102 -29
- hpcflow/sdk/submission/shells/bash.py +72 -55
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +37 -29
- hpcflow/sdk/submission/submission.py +203 -257
- hpcflow/sdk/submission/types.py +143 -0
- hpcflow/sdk/typing.py +163 -12
- hpcflow/tests/conftest.py +8 -6
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_main_scripts.py +60 -30
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
- hpcflow/tests/unit/test_action.py +86 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +13 -6
- hpcflow/tests/unit/test_cli.py +1 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +20 -15
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +3 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +65 -58
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +16 -7
- hpcflow/tests/unit/test_persistence.py +48 -35
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +8 -3
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +3 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +39 -19
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/workflows/test_jobscript.py +2 -1
- hpcflow/tests/workflows/test_workflows.py +18 -13
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
- hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/parameters.py
CHANGED
@@ -3,48 +3,75 @@ Parameters represent information passed around within a workflow.
|
|
3
3
|
"""
|
4
4
|
|
5
5
|
from __future__ import annotations
|
6
|
+
from collections.abc import Sequence
|
6
7
|
import copy
|
7
8
|
from dataclasses import dataclass, field
|
8
9
|
from datetime import timedelta
|
9
10
|
import enum
|
10
11
|
from pathlib import Path
|
11
12
|
import re
|
12
|
-
from typing import
|
13
|
+
from typing import TypeVar, cast, TYPE_CHECKING
|
14
|
+
from typing_extensions import override, TypeIs
|
13
15
|
|
14
16
|
import numpy as np
|
15
|
-
import
|
16
|
-
|
17
|
-
from hpcflow.sdk import
|
18
|
-
from hpcflow.sdk.core.
|
17
|
+
from valida import Schema as ValidaSchema # type: ignore
|
18
|
+
|
19
|
+
from hpcflow.sdk.typing import hydrate
|
20
|
+
from hpcflow.sdk.core.enums import (
|
21
|
+
InputSourceType,
|
22
|
+
ParallelMode,
|
23
|
+
ParameterPropagationMode,
|
24
|
+
TaskSourceType,
|
25
|
+
)
|
19
26
|
from hpcflow.sdk.core.errors import (
|
20
27
|
MalformedParameterPathError,
|
21
28
|
UnknownResourceSpecItemError,
|
22
29
|
WorkflowParameterMissingError,
|
23
30
|
)
|
24
31
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
25
|
-
from hpcflow.sdk.core.parallel import ParallelMode
|
26
|
-
from hpcflow.sdk.core.rule import Rule
|
27
32
|
from hpcflow.sdk.core.utils import (
|
28
33
|
check_valid_py_identifier,
|
29
34
|
get_enum_by_name_or_val,
|
30
35
|
linspace_rect,
|
31
36
|
process_string_nodes,
|
32
37
|
split_param_label,
|
38
|
+
timedelta_format,
|
33
39
|
)
|
34
|
-
|
35
|
-
|
40
|
+
|
41
|
+
if TYPE_CHECKING:
|
42
|
+
from collections.abc import Iterable, Iterator, Mapping
|
43
|
+
from typing import Any, ClassVar, Literal
|
44
|
+
from typing_extensions import Self, TypeAlias
|
45
|
+
from h5py import Group # type: ignore
|
46
|
+
from ..app import BaseApp
|
47
|
+
from ..typing import ParamSource
|
48
|
+
from .actions import ActionScope
|
49
|
+
from .element import ElementFilter
|
50
|
+
from .object_list import ResourceList
|
51
|
+
from .rule import Rule
|
52
|
+
from .task import ElementSet, TaskSchema, TaskTemplate, WorkflowTask
|
53
|
+
from .types import (
|
54
|
+
Address,
|
55
|
+
Numeric,
|
56
|
+
LabelInfo,
|
57
|
+
LabellingDescriptor,
|
58
|
+
ResourcePersistingWorkflow,
|
59
|
+
RuleArgs,
|
60
|
+
SchemaInputKwargs,
|
61
|
+
)
|
62
|
+
from .workflow import Workflow, WorkflowTemplate
|
63
|
+
from .validation import Schema
|
36
64
|
|
37
65
|
|
38
|
-
|
39
|
-
Numeric = Union[int, float, np.number]
|
66
|
+
T = TypeVar("T")
|
40
67
|
|
41
68
|
|
42
|
-
def _process_demo_data_strings(app, value):
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
repl=lambda x: str(app.get_demo_data_file_path(x
|
69
|
+
def _process_demo_data_strings(app: BaseApp, value: T) -> T:
|
70
|
+
demo_pattern = re.compile(r"\<\<demo_data_file:(.*)\>\>")
|
71
|
+
|
72
|
+
def string_processor(str_in: str) -> str:
|
73
|
+
str_out = demo_pattern.sub(
|
74
|
+
repl=lambda x: str(app.get_demo_data_file_path(x[1])),
|
48
75
|
string=str_in,
|
49
76
|
)
|
50
77
|
return str_out
|
@@ -52,6 +79,8 @@ def _process_demo_data_strings(app, value):
|
|
52
79
|
return process_string_nodes(value, string_processor)
|
53
80
|
|
54
81
|
|
82
|
+
@dataclass
|
83
|
+
@hydrate
|
55
84
|
class ParameterValue:
|
56
85
|
"""
|
57
86
|
The value handler for a parameter.
|
@@ -59,58 +88,53 @@ class ParameterValue:
|
|
59
88
|
Intended to be subclassed.
|
60
89
|
"""
|
61
90
|
|
62
|
-
_typ = None
|
63
|
-
_sub_parameters = {}
|
91
|
+
_typ: ClassVar[str | None] = None
|
92
|
+
_sub_parameters: ClassVar[dict[str, str]] = {}
|
64
93
|
|
65
|
-
def to_dict(self):
|
94
|
+
def to_dict(self) -> dict[str, Any]:
|
66
95
|
"""
|
67
96
|
Serialise this parameter value as a dictionary.
|
68
97
|
"""
|
69
98
|
if hasattr(self, "__dict__"):
|
70
|
-
return dict(self.__dict__)
|
99
|
+
return self._postprocess_to_dict(dict(self.__dict__))
|
71
100
|
elif hasattr(self, "__slots__"):
|
72
|
-
return
|
101
|
+
return self._postprocess_to_dict(
|
102
|
+
{k: getattr(self, k) for k in self.__slots__}
|
103
|
+
)
|
104
|
+
else:
|
105
|
+
raise NotImplementedError
|
106
|
+
|
107
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
108
|
+
"""Postprocess the results of :meth:`to_dict`."""
|
109
|
+
return d
|
73
110
|
|
74
|
-
def prepare_JSON_dump(self) ->
|
111
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
75
112
|
"""
|
76
113
|
Prepare this parameter value for serialisation as JSON.
|
77
114
|
"""
|
78
115
|
raise NotImplementedError
|
79
116
|
|
80
|
-
def dump_to_HDF5_group(self, group):
|
117
|
+
def dump_to_HDF5_group(self, group: Group):
|
81
118
|
"""
|
82
119
|
Write this parameter value to an HDF5 group.
|
83
120
|
"""
|
84
121
|
raise NotImplementedError
|
85
122
|
|
86
123
|
@classmethod
|
87
|
-
def save_from_HDF5_group(cls, group, param_id: int, workflow):
|
124
|
+
def save_from_HDF5_group(cls, group: Group, param_id: int, workflow: Workflow):
|
88
125
|
"""
|
89
126
|
Extract a parameter value from an HDF5 group.
|
90
127
|
"""
|
91
128
|
raise NotImplementedError
|
92
129
|
|
93
130
|
@classmethod
|
94
|
-
def save_from_JSON(cls, data, param_id: int, workflow):
|
131
|
+
def save_from_JSON(cls, data, param_id: int | list[int], workflow: Workflow):
|
95
132
|
"""
|
96
133
|
Extract a parameter value from JSON data.
|
97
134
|
"""
|
98
135
|
raise NotImplementedError
|
99
136
|
|
100
137
|
|
101
|
-
class ParameterPropagationMode(enum.Enum):
|
102
|
-
"""
|
103
|
-
How a parameter is propagated.
|
104
|
-
"""
|
105
|
-
|
106
|
-
#: Parameter is propagated implicitly.
|
107
|
-
IMPLICIT = 0
|
108
|
-
#: Parameter is propagated explicitly.
|
109
|
-
EXPLICIT = 1
|
110
|
-
#: Parameter is never propagated.
|
111
|
-
NEVER = 2
|
112
|
-
|
113
|
-
|
114
138
|
@dataclass
|
115
139
|
class ParameterPath(JSONLike):
|
116
140
|
"""
|
@@ -119,14 +143,13 @@ class ParameterPath(JSONLike):
|
|
119
143
|
|
120
144
|
# TODO: unused?
|
121
145
|
#: The path to the parameter.
|
122
|
-
path: Sequence[
|
146
|
+
path: Sequence[str | int | float]
|
123
147
|
#: The task in which to look up the parameter.
|
124
|
-
task:
|
125
|
-
Union[app.TaskTemplate, app.TaskSchema]
|
126
|
-
] = None # default is "current" task
|
148
|
+
task: TaskTemplate | TaskSchema | None = None # default is "current" task
|
127
149
|
|
128
150
|
|
129
151
|
@dataclass
|
152
|
+
@hydrate
|
130
153
|
class Parameter(JSONLike):
|
131
154
|
"""
|
132
155
|
A general parameter to a workflow task.
|
@@ -150,15 +173,15 @@ class Parameter(JSONLike):
|
|
150
173
|
Validation schema.
|
151
174
|
"""
|
152
175
|
|
153
|
-
_validation_schema = "parameters_spec_schema.yaml"
|
154
|
-
_child_objects = (
|
176
|
+
_validation_schema: ClassVar[str] = "parameters_spec_schema.yaml"
|
177
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
155
178
|
ChildObjectSpec(
|
156
179
|
name="typ",
|
157
180
|
json_like_name="type",
|
158
181
|
),
|
159
182
|
ChildObjectSpec(
|
160
183
|
name="_validation",
|
161
|
-
class_obj=
|
184
|
+
class_obj=ValidaSchema,
|
162
185
|
),
|
163
186
|
)
|
164
187
|
|
@@ -168,10 +191,10 @@ class Parameter(JSONLike):
|
|
168
191
|
#: Whether this parameter represents a file.
|
169
192
|
is_file: bool = False
|
170
193
|
#: Any parameters packed within this one.
|
171
|
-
sub_parameters:
|
172
|
-
_value_class:
|
173
|
-
_hash_value:
|
174
|
-
_validation:
|
194
|
+
sub_parameters: list[SubParameter] = field(default_factory=list)
|
195
|
+
_value_class: type[ParameterValue] | None = None
|
196
|
+
_hash_value: str | None = field(default=None, repr=False)
|
197
|
+
_validation: Schema | None = None
|
175
198
|
|
176
199
|
def __repr__(self) -> str:
|
177
200
|
is_file_str = ""
|
@@ -192,31 +215,42 @@ class Parameter(JSONLike):
|
|
192
215
|
f")"
|
193
216
|
)
|
194
217
|
|
195
|
-
def __post_init__(self):
|
218
|
+
def __post_init__(self) -> None:
|
196
219
|
self.typ = check_valid_py_identifier(self.typ)
|
197
220
|
self._set_value_class()
|
198
221
|
|
199
|
-
def _set_value_class(self):
|
222
|
+
def _set_value_class(self) -> None:
|
200
223
|
# custom parameter classes must inherit from `ParameterValue` not the app
|
201
224
|
# subclass:
|
202
225
|
if self._value_class is None:
|
203
|
-
|
204
|
-
|
205
|
-
|
226
|
+
self._value_class = next(
|
227
|
+
(
|
228
|
+
pv_class
|
229
|
+
for pv_class in ParameterValue.__subclasses__()
|
230
|
+
if pv_class._typ == self.typ
|
231
|
+
),
|
232
|
+
None,
|
233
|
+
)
|
206
234
|
|
207
|
-
def
|
235
|
+
def __eq__(self, other: Any) -> bool:
|
236
|
+
return isinstance(other, self.__class__) and self.typ == other.typ
|
237
|
+
|
238
|
+
def __lt__(self, other: Parameter):
|
208
239
|
return self.typ < other.typ
|
209
240
|
|
210
|
-
def __deepcopy__(self, memo):
|
241
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
211
242
|
kwargs = self.to_dict()
|
212
243
|
_validation = kwargs.pop("_validation")
|
213
244
|
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
214
245
|
obj._validation = _validation
|
215
246
|
return obj
|
216
247
|
|
217
|
-
|
218
|
-
|
248
|
+
@override
|
249
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
250
|
+
dct = super()._postprocess_to_dict(d)
|
219
251
|
del dct["_value_class"]
|
252
|
+
if dct.get("name", None) is None:
|
253
|
+
dct.pop("name", None)
|
220
254
|
dct.pop("_task_schema", None) # TODO: how do we have a _task_schema ref?
|
221
255
|
return dct
|
222
256
|
|
@@ -227,6 +261,25 @@ class Parameter(JSONLike):
|
|
227
261
|
"""
|
228
262
|
return self.typ.lower().replace("_", "-")
|
229
263
|
|
264
|
+
def _instantiate_value(self, source: ParamSource, val: dict) -> Any:
|
265
|
+
"""
|
266
|
+
Convert the serialized form of this parameter to its "real" form,
|
267
|
+
if that is valid to do at all.
|
268
|
+
"""
|
269
|
+
if self._value_class is None:
|
270
|
+
return val
|
271
|
+
if (method_name := source.get("value_class_method")) is not None:
|
272
|
+
method = getattr(self._value_class, method_name)
|
273
|
+
else:
|
274
|
+
method = self._value_class
|
275
|
+
return method(**val)
|
276
|
+
|
277
|
+
def _force_value_class(self) -> type[ParameterValue] | None:
|
278
|
+
if (param_cls := self._value_class) is None:
|
279
|
+
self._set_value_class()
|
280
|
+
param_cls = self._value_class
|
281
|
+
return param_cls
|
282
|
+
|
230
283
|
|
231
284
|
@dataclass
|
232
285
|
class SubParameter:
|
@@ -237,10 +290,11 @@ class SubParameter:
|
|
237
290
|
#: How to find this within the containing paraneter.
|
238
291
|
address: Address
|
239
292
|
#: The containing main parameter.
|
240
|
-
parameter:
|
293
|
+
parameter: Parameter
|
241
294
|
|
242
295
|
|
243
296
|
@dataclass
|
297
|
+
@hydrate
|
244
298
|
class SchemaParameter(JSONLike):
|
245
299
|
"""
|
246
300
|
A parameter bound in a schema.
|
@@ -251,9 +305,7 @@ class SchemaParameter(JSONLike):
|
|
251
305
|
The parameter.
|
252
306
|
"""
|
253
307
|
|
254
|
-
|
255
|
-
|
256
|
-
_child_objects = (
|
308
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
257
309
|
ChildObjectSpec(
|
258
310
|
name="parameter",
|
259
311
|
class_name="Parameter",
|
@@ -262,22 +314,15 @@ class SchemaParameter(JSONLike):
|
|
262
314
|
),
|
263
315
|
)
|
264
316
|
|
265
|
-
def __post_init__(self):
|
317
|
+
def __post_init__(self) -> None:
|
266
318
|
self._validate()
|
267
319
|
|
268
|
-
def _validate(self):
|
320
|
+
def _validate(self) -> None:
|
269
321
|
if isinstance(self.parameter, str):
|
270
|
-
self.parameter = self.
|
322
|
+
self.parameter: Parameter = self._app.Parameter(typ=self.parameter)
|
271
323
|
|
272
324
|
@property
|
273
|
-
def
|
274
|
-
"""
|
275
|
-
The name of the parameter.
|
276
|
-
"""
|
277
|
-
return self.parameter.name
|
278
|
-
|
279
|
-
@property
|
280
|
-
def typ(self):
|
325
|
+
def typ(self) -> str:
|
281
326
|
"""
|
282
327
|
The type code of the parameter.
|
283
328
|
"""
|
@@ -294,6 +339,7 @@ class NullDefault(enum.Enum):
|
|
294
339
|
NULL = 0
|
295
340
|
|
296
341
|
|
342
|
+
@hydrate
|
297
343
|
class SchemaInput(SchemaParameter):
|
298
344
|
"""A Parameter as used within a particular schema, for which a default value may be
|
299
345
|
applied.
|
@@ -332,9 +378,9 @@ class SchemaInput(SchemaParameter):
|
|
332
378
|
does not exist.
|
333
379
|
"""
|
334
380
|
|
335
|
-
_task_schema = None # assigned by parent TaskSchema
|
381
|
+
_task_schema: TaskSchema | None = None # assigned by parent TaskSchema
|
336
382
|
|
337
|
-
_child_objects = (
|
383
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
338
384
|
ChildObjectSpec(
|
339
385
|
name="parameter",
|
340
386
|
class_name="Parameter",
|
@@ -345,12 +391,12 @@ class SchemaInput(SchemaParameter):
|
|
345
391
|
|
346
392
|
def __init__(
|
347
393
|
self,
|
348
|
-
parameter:
|
394
|
+
parameter: Parameter | str,
|
349
395
|
multiple: bool = False,
|
350
|
-
labels:
|
351
|
-
default_value:
|
396
|
+
labels: dict[str, LabelInfo] | None = None,
|
397
|
+
default_value: InputValue | Any | NullDefault = NullDefault.NULL,
|
352
398
|
propagation_mode: ParameterPropagationMode = ParameterPropagationMode.IMPLICIT,
|
353
|
-
group:
|
399
|
+
group: str | None = None,
|
354
400
|
):
|
355
401
|
# TODO: can we define elements groups on local inputs as well, or should these be
|
356
402
|
# just for elements from other tasks?
|
@@ -360,24 +406,26 @@ class SchemaInput(SchemaParameter):
|
|
360
406
|
|
361
407
|
if isinstance(parameter, str):
|
362
408
|
try:
|
363
|
-
parameter
|
409
|
+
#: The parameter (i.e. type) of this schema input.
|
410
|
+
self.parameter = self._app.parameters.get(parameter)
|
364
411
|
except ValueError:
|
365
|
-
parameter = self.
|
412
|
+
self.parameter = self._app.Parameter(parameter)
|
413
|
+
else:
|
414
|
+
self.parameter = parameter
|
366
415
|
|
367
|
-
#: The parameter (i.e. type) of this schema input.
|
368
|
-
self.parameter = parameter
|
369
416
|
#: Whether to expect more than of these parameters defined in the workflow.
|
370
417
|
self.multiple = multiple
|
418
|
+
|
371
419
|
#: Dict whose keys represent the string labels that distinguish multiple
|
372
420
|
#: parameters if `multiple` is `True`.
|
373
|
-
self.labels
|
374
|
-
|
375
|
-
if self.labels is None:
|
421
|
+
self.labels: dict[str, LabelInfo]
|
422
|
+
if labels is None:
|
376
423
|
if self.multiple:
|
377
424
|
self.labels = {"*": {}}
|
378
425
|
else:
|
379
426
|
self.labels = {"": {}}
|
380
427
|
else:
|
428
|
+
self.labels = labels
|
381
429
|
if not self.multiple:
|
382
430
|
# check single-item:
|
383
431
|
if len(self.labels) > 1:
|
@@ -387,7 +435,7 @@ class SchemaInput(SchemaParameter):
|
|
387
435
|
f"`labels` is: {self.labels!r}."
|
388
436
|
)
|
389
437
|
|
390
|
-
labels_defaults = {}
|
438
|
+
labels_defaults: LabelInfo = {}
|
391
439
|
if propagation_mode is not None:
|
392
440
|
labels_defaults["propagation_mode"] = propagation_mode
|
393
441
|
if group is not None:
|
@@ -397,14 +445,15 @@ class SchemaInput(SchemaParameter):
|
|
397
445
|
for k, v in self.labels.items():
|
398
446
|
labels_defaults_i = copy.deepcopy(labels_defaults)
|
399
447
|
if default_value is not NullDefault.NULL:
|
400
|
-
if
|
401
|
-
default_value =
|
448
|
+
if isinstance(default_value, InputValue):
|
449
|
+
labels_defaults_i["default_value"] = default_value
|
450
|
+
else:
|
451
|
+
labels_defaults_i["default_value"] = self._app.InputValue(
|
402
452
|
parameter=self.parameter,
|
403
453
|
value=default_value,
|
404
454
|
label=k,
|
405
455
|
)
|
406
|
-
|
407
|
-
label_i = {**labels_defaults_i, **v}
|
456
|
+
label_i: LabelInfo = {**labels_defaults_i, **v}
|
408
457
|
if "propagation_mode" in label_i:
|
409
458
|
label_i["propagation_mode"] = get_enum_by_name_or_val(
|
410
459
|
ParameterPropagationMode, label_i["propagation_mode"]
|
@@ -420,8 +469,8 @@ class SchemaInput(SchemaParameter):
|
|
420
469
|
default_str = ""
|
421
470
|
group_str = ""
|
422
471
|
labels_str = ""
|
423
|
-
if not self.multiple:
|
424
|
-
label = next(iter(self.labels
|
472
|
+
if not self.multiple and self.labels:
|
473
|
+
label = next(iter(self.labels)) # the single key
|
425
474
|
|
426
475
|
default_str = ""
|
427
476
|
if "default_value" in self.labels[label]:
|
@@ -429,8 +478,7 @@ class SchemaInput(SchemaParameter):
|
|
429
478
|
f", default_value={self.labels[label]['default_value'].value!r}"
|
430
479
|
)
|
431
480
|
|
432
|
-
group
|
433
|
-
if group is not None:
|
481
|
+
if (group := self.labels[label].get("group")) is not None:
|
434
482
|
group_str = f", group={group!r}"
|
435
483
|
|
436
484
|
else:
|
@@ -444,20 +492,20 @@ class SchemaInput(SchemaParameter):
|
|
444
492
|
f")"
|
445
493
|
)
|
446
494
|
|
447
|
-
|
448
|
-
|
495
|
+
@override
|
496
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
497
|
+
dct = super()._postprocess_to_dict(d)
|
498
|
+
v: dict[str, ParameterPropagationMode]
|
449
499
|
for k, v in dct["labels"].items():
|
450
|
-
prop_mode
|
451
|
-
if prop_mode:
|
500
|
+
if (prop_mode := v.get("parameter_propagation_mode")) is not None:
|
452
501
|
dct["labels"][k]["parameter_propagation_mode"] = prop_mode.name
|
453
502
|
return dct
|
454
503
|
|
455
|
-
def
|
456
|
-
|
457
|
-
for k, v in out["labels"].items():
|
504
|
+
def _postprocess_to_json(self, json_like):
|
505
|
+
for v in json_like["labels"].values():
|
458
506
|
if "default_value" in v:
|
459
|
-
|
460
|
-
return
|
507
|
+
v["default_value_is_input_value"] = True
|
508
|
+
return json_like
|
461
509
|
|
462
510
|
@classmethod
|
463
511
|
def from_json_like(cls, json_like, shared_data=None):
|
@@ -473,81 +521,85 @@ class SchemaInput(SchemaParameter):
|
|
473
521
|
}
|
474
522
|
json_like["labels"][k][
|
475
523
|
"default_value"
|
476
|
-
] = cls.
|
524
|
+
] = cls._app.InputValue.from_json_like(
|
477
525
|
json_like=inp_val_kwargs,
|
478
526
|
shared_data=shared_data,
|
479
527
|
)
|
480
528
|
|
481
|
-
|
482
|
-
return obj
|
529
|
+
return super().from_json_like(json_like, shared_data)
|
483
530
|
|
484
|
-
def __deepcopy__(self, memo):
|
485
|
-
kwargs = {
|
486
|
-
"parameter": self.parameter,
|
531
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
532
|
+
kwargs: SchemaInputKwargs = {
|
533
|
+
"parameter": copy.deepcopy(self.parameter, memo),
|
487
534
|
"multiple": self.multiple,
|
488
|
-
"labels": self.labels,
|
535
|
+
"labels": copy.deepcopy(self.labels, memo),
|
489
536
|
}
|
490
|
-
obj = self.__class__(**
|
537
|
+
obj = self.__class__(**kwargs)
|
491
538
|
obj._task_schema = self._task_schema
|
492
539
|
return obj
|
493
540
|
|
494
541
|
@property
|
495
|
-
def default_value(self):
|
542
|
+
def default_value(self) -> InputValue | Literal[NullDefault.NULL] | None:
|
496
543
|
"""
|
497
544
|
The default value of the input.
|
498
545
|
"""
|
499
|
-
if
|
500
|
-
if "default_value" in
|
501
|
-
return
|
546
|
+
if single_data := self.single_labelled_data:
|
547
|
+
if "default_value" in single_data:
|
548
|
+
return single_data["default_value"]
|
502
549
|
else:
|
503
550
|
return NullDefault.NULL
|
551
|
+
return None
|
504
552
|
|
505
553
|
@property
|
506
|
-
def task_schema(self):
|
554
|
+
def task_schema(self) -> TaskSchema:
|
507
555
|
"""
|
508
556
|
The schema containing this input.
|
509
557
|
"""
|
558
|
+
assert self._task_schema is not None
|
510
559
|
return self._task_schema
|
511
560
|
|
512
561
|
@property
|
513
|
-
def all_labelled_types(self):
|
562
|
+
def all_labelled_types(self) -> list[str]:
|
514
563
|
"""
|
515
564
|
The types of the input labels.
|
516
565
|
"""
|
517
|
-
return
|
566
|
+
return [(f"{self.typ}[{i}]" if i else self.typ) for i in self.labels]
|
518
567
|
|
519
568
|
@property
|
520
|
-
def single_label(self):
|
569
|
+
def single_label(self) -> str | None:
|
521
570
|
"""
|
522
571
|
The label of this input, assuming it is not mulitple.
|
523
572
|
"""
|
524
573
|
if not self.multiple:
|
525
574
|
return next(iter(self.labels))
|
575
|
+
return None
|
526
576
|
|
527
577
|
@property
|
528
|
-
def single_labelled_type(self):
|
578
|
+
def single_labelled_type(self) -> str | None:
|
529
579
|
"""
|
530
580
|
The type code of this input, assuming it is not mulitple.
|
531
581
|
"""
|
532
582
|
if not self.multiple:
|
533
583
|
return next(iter(self.labelled_info()))["labelled_type"]
|
584
|
+
return None
|
534
585
|
|
535
586
|
@property
|
536
|
-
def single_labelled_data(self):
|
587
|
+
def single_labelled_data(self) -> LabelInfo | None:
|
537
588
|
"""
|
538
589
|
The value of this input, assuming it is not mulitple.
|
539
590
|
"""
|
540
|
-
if
|
541
|
-
return self.labels[
|
591
|
+
if (label := self.single_label) is not None:
|
592
|
+
return self.labels[label]
|
593
|
+
return None
|
542
594
|
|
543
|
-
def labelled_info(self):
|
595
|
+
def labelled_info(self) -> Iterator[LabellingDescriptor]:
|
544
596
|
"""
|
545
597
|
Get descriptors for all the labels associated with this input.
|
546
598
|
"""
|
547
599
|
for k, v in self.labels.items():
|
548
|
-
label = f"[{k}]" if k else
|
549
|
-
dct = {
|
550
|
-
"labelled_type":
|
600
|
+
label = f"{self.parameter.typ}[{k}]" if k else self.parameter.typ
|
601
|
+
dct: LabellingDescriptor = {
|
602
|
+
"labelled_type": label,
|
551
603
|
"propagation_mode": v["propagation_mode"],
|
552
604
|
"group": v.get("group"),
|
553
605
|
}
|
@@ -555,18 +607,28 @@ class SchemaInput(SchemaParameter):
|
|
555
607
|
dct["default_value"] = v["default_value"]
|
556
608
|
yield dct
|
557
609
|
|
558
|
-
|
610
|
+
@property
|
611
|
+
def _simple_labelled_info(self) -> Iterator[tuple[str, ParameterPropagationMode]]:
|
612
|
+
"""
|
613
|
+
Cut-down version of :py:meth:`labelled_info` that has lower overheads.
|
614
|
+
"""
|
615
|
+
for k, v in self.labels.items():
|
616
|
+
label = f"{self.parameter.typ}[{k}]" if k else self.parameter.typ
|
617
|
+
yield label, v["propagation_mode"]
|
618
|
+
|
619
|
+
def _validate(self) -> None:
|
559
620
|
super()._validate()
|
560
621
|
for k, v in self.labels.items():
|
561
622
|
if "default_value" in v:
|
562
623
|
if not isinstance(v["default_value"], InputValue):
|
563
|
-
def_val = self.
|
624
|
+
def_val = self._app.InputValue(
|
564
625
|
parameter=self.parameter,
|
565
626
|
value=v["default_value"],
|
566
627
|
label=k,
|
567
628
|
)
|
568
|
-
|
569
|
-
|
629
|
+
v["default_value"] = def_val
|
630
|
+
else:
|
631
|
+
def_val = v["default_value"]
|
570
632
|
if def_val.parameter != self.parameter or def_val.label != k:
|
571
633
|
raise ValueError(
|
572
634
|
f"{self.__class__.__name__} `default_value` for label {k!r} must "
|
@@ -576,24 +638,36 @@ class SchemaInput(SchemaParameter):
|
|
576
638
|
)
|
577
639
|
|
578
640
|
@property
|
579
|
-
def input_or_output(self):
|
641
|
+
def input_or_output(self) -> str:
|
580
642
|
"""
|
581
643
|
Whether this is an input or output. Always ``input``.
|
582
644
|
"""
|
583
645
|
return "input"
|
584
646
|
|
585
647
|
|
586
|
-
@dataclass
|
648
|
+
@dataclass(init=False)
|
649
|
+
@hydrate
|
587
650
|
class SchemaOutput(SchemaParameter):
|
588
651
|
"""A Parameter as outputted from particular task."""
|
589
652
|
|
590
653
|
#: The basic parameter this supplies.
|
591
654
|
parameter: Parameter
|
592
655
|
#: How this output propagates.
|
593
|
-
propagation_mode: ParameterPropagationMode
|
656
|
+
propagation_mode: ParameterPropagationMode
|
657
|
+
|
658
|
+
def __init__(
|
659
|
+
self,
|
660
|
+
parameter: Parameter | str,
|
661
|
+
propagation_mode: ParameterPropagationMode = ParameterPropagationMode.IMPLICIT,
|
662
|
+
):
|
663
|
+
if isinstance(parameter, str):
|
664
|
+
self.parameter: Parameter = self._app.Parameter(typ=parameter)
|
665
|
+
else:
|
666
|
+
self.parameter = parameter
|
667
|
+
self.propagation_mode = propagation_mode
|
594
668
|
|
595
669
|
@property
|
596
|
-
def input_or_output(self):
|
670
|
+
def input_or_output(self) -> str:
|
597
671
|
"""
|
598
672
|
Whether this is an input or output. Always ``output``.
|
599
673
|
"""
|
@@ -645,39 +719,43 @@ class ValueSequence(JSONLike):
|
|
645
719
|
def __init__(
|
646
720
|
self,
|
647
721
|
path: str,
|
648
|
-
values:
|
649
|
-
nesting_order:
|
650
|
-
label:
|
651
|
-
value_class_method:
|
722
|
+
values: list[Any] | None,
|
723
|
+
nesting_order: int | float | None = None,
|
724
|
+
label: str | int | None = None,
|
725
|
+
value_class_method: str | None = None,
|
652
726
|
):
|
653
|
-
|
654
|
-
path, label = self._validate_parameter_path(path, label)
|
655
|
-
|
727
|
+
path_, label_ = self._validate_parameter_path(path, label)
|
656
728
|
#: The path to this sequence.
|
657
|
-
self.path =
|
729
|
+
self.path = path_
|
658
730
|
#: The label of this sequence.
|
659
|
-
self.label =
|
731
|
+
self.label = label_
|
660
732
|
#: The nesting order for this sequence.
|
661
|
-
self.nesting_order = nesting_order
|
733
|
+
self.nesting_order = None if nesting_order is None else float(nesting_order)
|
662
734
|
#: Name of a method used to generate sequence values.
|
663
735
|
self.value_class_method = value_class_method
|
664
736
|
|
665
737
|
if values is not None:
|
666
|
-
self._values
|
738
|
+
self._values: list[Any] | None = [
|
739
|
+
_process_demo_data_strings(self._app, i) for i in values
|
740
|
+
]
|
741
|
+
else:
|
742
|
+
self._values = None
|
667
743
|
|
668
|
-
self._values_group_idx = None
|
669
|
-
self._values_are_objs
|
744
|
+
self._values_group_idx: list[int] | None = None
|
745
|
+
self._values_are_objs: list[
|
746
|
+
bool
|
747
|
+
] | None = None # assigned initially on `make_persistent`
|
670
748
|
|
671
|
-
self._workflow = None
|
672
|
-
self._element_set = None # assigned by parent ElementSet
|
749
|
+
self._workflow: Workflow | None = None
|
750
|
+
self._element_set: ElementSet | None = None # assigned by parent ElementSet
|
673
751
|
|
674
752
|
# assigned if this is an "inputs" sequence in `WorkflowTask._add_element_set`:
|
675
|
-
self._parameter = None
|
753
|
+
self._parameter: Parameter | None = None
|
676
754
|
|
677
|
-
self._path_split = None # assigned by property `path_split`
|
755
|
+
self._path_split: list[str] | None = None # assigned by property `path_split`
|
678
756
|
|
679
|
-
self._values_method = None
|
680
|
-
self._values_method_args = None
|
757
|
+
self._values_method: str | None = None
|
758
|
+
self._values_method_args: dict | None = None
|
681
759
|
|
682
760
|
def __repr__(self):
|
683
761
|
label_str = ""
|
@@ -698,14 +776,12 @@ class ValueSequence(JSONLike):
|
|
698
776
|
f")"
|
699
777
|
)
|
700
778
|
|
701
|
-
def __eq__(self, other) -> bool:
|
779
|
+
def __eq__(self, other: Any) -> bool:
|
702
780
|
if not isinstance(other, self.__class__):
|
703
781
|
return False
|
704
|
-
|
705
|
-
return True
|
706
|
-
return False
|
782
|
+
return self.to_dict() == other.to_dict()
|
707
783
|
|
708
|
-
def __deepcopy__(self, memo):
|
784
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
709
785
|
kwargs = self.to_dict()
|
710
786
|
kwargs["values"] = kwargs.pop("_values")
|
711
787
|
|
@@ -735,16 +811,15 @@ class ValueSequence(JSONLike):
|
|
735
811
|
json_like["path"] = path
|
736
812
|
json_like["value_class_method"] = cls_method
|
737
813
|
|
738
|
-
val_key =
|
739
|
-
for i in json_like:
|
740
|
-
if "values" in i:
|
741
|
-
val_key = i
|
814
|
+
val_key = next((item for item in json_like if "values" in item), "")
|
742
815
|
if "::" in val_key:
|
743
816
|
# class method (e.g. `from_range`, `from_file` etc):
|
744
817
|
_, method = val_key.split("::")
|
745
818
|
_values_method_args = json_like.pop(val_key)
|
746
819
|
_values_method = f"_values_{method}"
|
747
|
-
_values_method_args = _process_demo_data_strings(
|
820
|
+
_values_method_args = _process_demo_data_strings(
|
821
|
+
cls._app, _values_method_args
|
822
|
+
)
|
748
823
|
json_like["values"] = getattr(cls, _values_method)(**_values_method_args)
|
749
824
|
|
750
825
|
obj = super().from_json_like(json_like, shared_data)
|
@@ -755,14 +830,14 @@ class ValueSequence(JSONLike):
|
|
755
830
|
return obj
|
756
831
|
|
757
832
|
@property
|
758
|
-
def parameter(self):
|
833
|
+
def parameter(self) -> Parameter | None:
|
759
834
|
"""
|
760
835
|
The parameter this sequence supplies.
|
761
836
|
"""
|
762
837
|
return self._parameter
|
763
838
|
|
764
839
|
@property
|
765
|
-
def path_split(self):
|
840
|
+
def path_split(self) -> Sequence[str]:
|
766
841
|
"""
|
767
842
|
The components of ths path.
|
768
843
|
"""
|
@@ -771,52 +846,56 @@ class ValueSequence(JSONLike):
|
|
771
846
|
return self._path_split
|
772
847
|
|
773
848
|
@property
|
774
|
-
def path_type(self):
|
849
|
+
def path_type(self) -> str:
|
775
850
|
"""
|
776
851
|
The type of path this is.
|
777
852
|
"""
|
778
853
|
return self.path_split[0]
|
779
854
|
|
780
855
|
@property
|
781
|
-
def input_type(self):
|
856
|
+
def input_type(self) -> str | None:
|
782
857
|
"""
|
783
858
|
The type of input sequence this is, if it is one.
|
784
859
|
"""
|
785
860
|
if self.path_type == "inputs":
|
786
861
|
return self.path_split[1].replace(self._label_fmt, "")
|
862
|
+
return None
|
787
863
|
|
788
864
|
@property
|
789
|
-
def input_path(self):
|
865
|
+
def input_path(self) -> str | None:
|
790
866
|
"""
|
791
867
|
The path of the input sequence this is, if it is one.
|
792
868
|
"""
|
793
869
|
if self.path_type == "inputs":
|
794
870
|
return ".".join(self.path_split[2:])
|
871
|
+
return None
|
795
872
|
|
796
873
|
@property
|
797
|
-
def resource_scope(self):
|
874
|
+
def resource_scope(self) -> str | None:
|
798
875
|
"""
|
799
876
|
The scope of the resources this is, if it is one.
|
800
877
|
"""
|
801
878
|
if self.path_type == "resources":
|
802
879
|
return self.path_split[1]
|
880
|
+
return None
|
803
881
|
|
804
882
|
@property
|
805
|
-
def is_sub_value(self):
|
883
|
+
def is_sub_value(self) -> bool:
|
806
884
|
"""True if the values are for a sub part of the parameter."""
|
807
|
-
return
|
885
|
+
return bool(self.input_path)
|
808
886
|
|
809
887
|
@property
|
810
|
-
def _label_fmt(self):
|
888
|
+
def _label_fmt(self) -> str:
|
811
889
|
return f"[{self.label}]" if self.label else ""
|
812
890
|
|
813
891
|
@property
|
814
|
-
def labelled_type(self):
|
892
|
+
def labelled_type(self) -> str | None:
|
815
893
|
"""
|
816
894
|
The labelled type of input sequence this is, if it is one.
|
817
895
|
"""
|
818
896
|
if self.input_type:
|
819
897
|
return f"{self.input_type}{self._label_fmt}"
|
898
|
+
return None
|
820
899
|
|
821
900
|
@classmethod
|
822
901
|
def _json_like_constructor(cls, json_like):
|
@@ -836,7 +915,9 @@ class ValueSequence(JSONLike):
|
|
836
915
|
obj._values_method_args = _values_method_args
|
837
916
|
return obj
|
838
917
|
|
839
|
-
def _validate_parameter_path(
|
918
|
+
def _validate_parameter_path(
|
919
|
+
self, path: str, label: str | int | None
|
920
|
+
) -> tuple[str, str | int | None]:
|
840
921
|
"""Parse the supplied path and perform basic checks on it.
|
841
922
|
|
842
923
|
This method also adds the specified `SchemaInput` label to the path and checks for
|
@@ -852,25 +933,24 @@ class ValueSequence(JSONLike):
|
|
852
933
|
)
|
853
934
|
path_l = path.lower()
|
854
935
|
path_split = path_l.split(".")
|
855
|
-
|
856
|
-
if not path_split[0] in
|
936
|
+
ALLOWED_PATH_START = ("inputs", "resources", "environments", "env_preset")
|
937
|
+
if not path_split[0] in ALLOWED_PATH_START:
|
857
938
|
raise MalformedParameterPathError(
|
858
939
|
f"`path` must start with one of: "
|
859
|
-
f'{", ".join(f"{
|
940
|
+
f'{", ".join(f"{pfx!r}" for pfx in ALLOWED_PATH_START)}, but given path '
|
860
941
|
f"is: {path!r}."
|
861
942
|
)
|
862
943
|
|
863
944
|
_, label_from_path = split_param_label(path_l)
|
864
945
|
|
865
946
|
if path_split[0] == "inputs":
|
866
|
-
if label_arg:
|
867
|
-
if
|
947
|
+
if label_arg is not None and label_arg != "":
|
948
|
+
if label_from_path is None:
|
868
949
|
# add label to path without lower casing any parts:
|
869
950
|
path_split_orig = path.split(".")
|
870
951
|
path_split_orig[1] += f"[{label_arg}]"
|
871
952
|
path = ".".join(path_split_orig)
|
872
|
-
|
873
|
-
elif label_arg != label_from_path:
|
953
|
+
elif str(label_arg) != label_from_path:
|
874
954
|
raise ValueError(
|
875
955
|
f"{self.__class__.__name__} `label` argument is specified as "
|
876
956
|
f"{label_arg!r}, but a distinct label is implied by the sequence "
|
@@ -887,7 +967,7 @@ class ValueSequence(JSONLike):
|
|
887
967
|
f"`resource` sequences."
|
888
968
|
)
|
889
969
|
try:
|
890
|
-
self.
|
970
|
+
self._app.ActionScope.from_json_like(path_split[1])
|
891
971
|
except Exception as err:
|
892
972
|
raise MalformedParameterPathError(
|
893
973
|
f"Cannot parse a resource action scope from the second component of the "
|
@@ -895,42 +975,42 @@ class ValueSequence(JSONLike):
|
|
895
975
|
) from None
|
896
976
|
|
897
977
|
if len(path_split) > 2:
|
898
|
-
|
899
|
-
allowed = ResourceSpec.ALLOWED_PARAMETERS
|
900
|
-
if path_split_2 not in allowed:
|
901
|
-
allowed_keys_str = ", ".join(f'"{i}"' for i in allowed)
|
978
|
+
if path_split[2] not in ResourceSpec.ALLOWED_PARAMETERS:
|
902
979
|
raise UnknownResourceSpecItemError(
|
903
|
-
f"Resource item name {
|
904
|
-
f"resource item names are: {
|
980
|
+
f"Resource item name {path_split[2]!r} is unknown. Allowed "
|
981
|
+
f"resource item names are: {ResourceSpec._allowed_params_quoted()}."
|
905
982
|
)
|
983
|
+
label = ""
|
906
984
|
|
907
985
|
elif path_split[0] == "environments":
|
908
986
|
# rewrite as a resources path:
|
909
987
|
path = f"resources.any.{path}"
|
910
|
-
|
911
|
-
|
912
|
-
|
913
|
-
|
988
|
+
label = str(label) if label is not None else ""
|
989
|
+
else:
|
990
|
+
pass
|
991
|
+
# note: `env_preset` paths also need to be transformed into `resources`
|
992
|
+
# paths, but we cannot do that until the sequence is part of a task, since
|
993
|
+
# the available environment presets are defined in the task schema.
|
914
994
|
|
915
995
|
return path, label
|
916
996
|
|
917
|
-
|
918
|
-
|
997
|
+
@override
|
998
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
999
|
+
out = super()._postprocess_to_dict(d)
|
919
1000
|
del out["_parameter"]
|
920
1001
|
del out["_path_split"]
|
921
|
-
|
922
|
-
del out["_workflow"]
|
1002
|
+
out.pop("_workflow", None)
|
923
1003
|
return out
|
924
1004
|
|
925
1005
|
@property
|
926
|
-
def normalised_path(self):
|
1006
|
+
def normalised_path(self) -> str:
|
927
1007
|
"""
|
928
1008
|
The path to this sequence.
|
929
1009
|
"""
|
930
1010
|
return self.path
|
931
1011
|
|
932
1012
|
@property
|
933
|
-
def normalised_inputs_path(self):
|
1013
|
+
def normalised_inputs_path(self) -> str | None:
|
934
1014
|
"""
|
935
1015
|
The normalised path without the "inputs" prefix, if the sequence is an
|
936
1016
|
inputs sequence, else return None.
|
@@ -941,63 +1021,66 @@ class ValueSequence(JSONLike):
|
|
941
1021
|
return f"{self.labelled_type}.{self.input_path}"
|
942
1022
|
else:
|
943
1023
|
return self.labelled_type
|
1024
|
+
return None
|
944
1025
|
|
945
1026
|
def make_persistent(
|
946
|
-
self, workflow:
|
947
|
-
) ->
|
1027
|
+
self, workflow: Workflow, source: ParamSource
|
1028
|
+
) -> tuple[str, list[int], bool]:
|
948
1029
|
"""Save value to a persistent workflow."""
|
949
1030
|
|
950
1031
|
if self._values_group_idx is not None:
|
951
|
-
|
952
|
-
data_ref = self._values_group_idx
|
953
|
-
if not all(workflow.check_parameters_exist(data_ref)):
|
1032
|
+
if not workflow.check_parameters_exist(self._values_group_idx):
|
954
1033
|
raise RuntimeError(
|
955
1034
|
f"{self.__class__.__name__} has a parameter group index "
|
956
|
-
f"({
|
1035
|
+
f"({self._values_group_idx}), but does not exist in the workflow."
|
957
1036
|
)
|
958
1037
|
# TODO: log if already persistent.
|
1038
|
+
return self.normalised_path, self._values_group_idx, False
|
959
1039
|
|
960
|
-
|
961
|
-
|
962
|
-
|
1040
|
+
data_ref: list[int] = []
|
1041
|
+
source = copy.deepcopy(source)
|
1042
|
+
if self.value_class_method:
|
963
1043
|
source["value_class_method"] = self.value_class_method
|
964
|
-
|
965
|
-
|
966
|
-
|
967
|
-
|
968
|
-
|
969
|
-
|
970
|
-
|
971
|
-
|
972
|
-
|
973
|
-
|
974
|
-
is_new = True
|
975
|
-
self._values_group_idx = data_ref
|
976
|
-
self._workflow = workflow
|
977
|
-
self._values = None
|
978
|
-
self._values_are_objs = are_objs
|
1044
|
+
are_objs: list[bool] = []
|
1045
|
+
assert self._values is not None
|
1046
|
+
for idx, item in enumerate(self._values):
|
1047
|
+
# record if ParameterValue sub-classes are passed for values, which allows
|
1048
|
+
# us to re-init the objects on access to `.value`:
|
1049
|
+
are_objs.append(isinstance(item, ParameterValue))
|
1050
|
+
source = copy.deepcopy(source)
|
1051
|
+
source["sequence_idx"] = idx
|
1052
|
+
pg_idx_i = workflow._add_parameter_data(item, source=source)
|
1053
|
+
data_ref.append(pg_idx_i)
|
979
1054
|
|
980
|
-
|
1055
|
+
self._values_group_idx = data_ref
|
1056
|
+
self._workflow = workflow
|
1057
|
+
self._values = None
|
1058
|
+
self._values_are_objs = are_objs
|
1059
|
+
return self.normalised_path, data_ref, True
|
981
1060
|
|
982
1061
|
@property
|
983
|
-
def workflow(self):
|
1062
|
+
def workflow(self) -> Workflow | None:
|
984
1063
|
"""
|
985
1064
|
The workflow containing this sequence.
|
986
1065
|
"""
|
987
1066
|
if self._workflow:
|
988
1067
|
return self._workflow
|
989
1068
|
elif self._element_set:
|
990
|
-
|
1069
|
+
if tmpl := self._element_set.task_template.workflow_template:
|
1070
|
+
return tmpl.workflow
|
1071
|
+
return None
|
991
1072
|
|
992
1073
|
@property
|
993
|
-
def values(self):
|
1074
|
+
def values(self) -> list[Any] | None:
|
994
1075
|
"""
|
995
1076
|
The values in this sequence.
|
996
1077
|
"""
|
997
1078
|
if self._values_group_idx is not None:
|
998
|
-
vals = []
|
1079
|
+
vals: list[Any] = []
|
999
1080
|
for idx, pg_idx_i in enumerate(self._values_group_idx):
|
1000
|
-
|
1081
|
+
if not (w := self.workflow):
|
1082
|
+
continue
|
1083
|
+
param_i = w.get_parameter(pg_idx_i)
|
1001
1084
|
if param_i.data is not None:
|
1002
1085
|
val_i = param_i.data
|
1003
1086
|
else:
|
@@ -1007,16 +1090,11 @@ class ValueSequence(JSONLike):
|
|
1007
1090
|
# yet been committed to disk:
|
1008
1091
|
if (
|
1009
1092
|
self.parameter
|
1010
|
-
and self.
|
1093
|
+
and self._values_are_objs
|
1011
1094
|
and self._values_are_objs[idx]
|
1012
|
-
and
|
1095
|
+
and isinstance(val_i, dict)
|
1013
1096
|
):
|
1014
|
-
|
1015
|
-
if method_name:
|
1016
|
-
method = getattr(self.parameter._value_class, method_name)
|
1017
|
-
else:
|
1018
|
-
method = self.parameter._value_class
|
1019
|
-
val_i = method(**val_i)
|
1097
|
+
val_i = self.parameter._instantiate_value(param_i.source, val_i)
|
1020
1098
|
|
1021
1099
|
vals.append(val_i)
|
1022
1100
|
return vals
|
@@ -1024,52 +1102,72 @@ class ValueSequence(JSONLike):
|
|
1024
1102
|
return self._values
|
1025
1103
|
|
1026
1104
|
@classmethod
|
1027
|
-
def _values_from_linear_space(
|
1105
|
+
def _values_from_linear_space(
|
1106
|
+
cls, start: float, stop: float, num: int, **kwargs
|
1107
|
+
) -> list[float]:
|
1028
1108
|
return np.linspace(start, stop, num=num, **kwargs).tolist()
|
1029
1109
|
|
1030
1110
|
@classmethod
|
1031
|
-
def _values_from_geometric_space(
|
1111
|
+
def _values_from_geometric_space(
|
1112
|
+
cls, start: float, stop: float, num: int, **kwargs
|
1113
|
+
) -> list[float]:
|
1032
1114
|
return np.geomspace(start, stop, num=num, **kwargs).tolist()
|
1033
1115
|
|
1034
1116
|
@classmethod
|
1035
|
-
def _values_from_log_space(
|
1117
|
+
def _values_from_log_space(
|
1118
|
+
cls, start: float, stop: float, num: int, base: float = 10.0, **kwargs
|
1119
|
+
) -> list[float]:
|
1036
1120
|
return np.logspace(start, stop, num=num, base=base, **kwargs).tolist()
|
1037
1121
|
|
1038
1122
|
@classmethod
|
1039
|
-
def _values_from_range(
|
1123
|
+
def _values_from_range(
|
1124
|
+
cls, start: int | float, stop: int | float, step: int | float, **kwargs
|
1125
|
+
) -> list[float]:
|
1040
1126
|
return np.arange(start, stop, step, **kwargs).tolist()
|
1041
1127
|
|
1042
1128
|
@classmethod
|
1043
|
-
def _values_from_file(cls, file_path):
|
1129
|
+
def _values_from_file(cls, file_path: str | Path) -> list[str]:
|
1044
1130
|
with Path(file_path).open("rt") as fh:
|
1045
|
-
|
1046
|
-
return vals
|
1131
|
+
return [line.strip() for line in fh.readlines()]
|
1047
1132
|
|
1048
1133
|
@classmethod
|
1049
|
-
def _values_from_rectangle(
|
1134
|
+
def _values_from_rectangle(
|
1135
|
+
cls,
|
1136
|
+
start: Sequence[float],
|
1137
|
+
stop: Sequence[float],
|
1138
|
+
num: Sequence[int],
|
1139
|
+
coord: int | tuple[int, int] | None = None,
|
1140
|
+
include: Sequence[str] | None = None,
|
1141
|
+
**kwargs,
|
1142
|
+
) -> list[float]:
|
1050
1143
|
vals = linspace_rect(start=start, stop=stop, num=num, include=include, **kwargs)
|
1051
1144
|
if coord is not None:
|
1052
|
-
|
1145
|
+
return vals[coord].tolist()
|
1053
1146
|
else:
|
1054
|
-
|
1055
|
-
return vals
|
1147
|
+
return (vals.T).tolist()
|
1056
1148
|
|
1057
1149
|
@classmethod
|
1058
|
-
def _values_from_random_uniform(
|
1150
|
+
def _values_from_random_uniform(
|
1151
|
+
cls,
|
1152
|
+
num: int,
|
1153
|
+
low: float = 0.0,
|
1154
|
+
high: float = 1.0,
|
1155
|
+
seed: int | list[int] | None = None,
|
1156
|
+
) -> list[float]:
|
1059
1157
|
rng = np.random.default_rng(seed)
|
1060
1158
|
return rng.uniform(low=low, high=high, size=num).tolist()
|
1061
1159
|
|
1062
1160
|
@classmethod
|
1063
1161
|
def from_linear_space(
|
1064
1162
|
cls,
|
1065
|
-
path,
|
1066
|
-
start,
|
1067
|
-
stop,
|
1068
|
-
num,
|
1069
|
-
nesting_order=0,
|
1070
|
-
label=None,
|
1163
|
+
path: str,
|
1164
|
+
start: float,
|
1165
|
+
stop: float,
|
1166
|
+
num: int,
|
1167
|
+
nesting_order: float = 0,
|
1168
|
+
label: str | int | None = None,
|
1071
1169
|
**kwargs,
|
1072
|
-
):
|
1170
|
+
) -> Self:
|
1073
1171
|
"""
|
1074
1172
|
Build a sequence from a NumPy linear space.
|
1075
1173
|
"""
|
@@ -1084,15 +1182,15 @@ class ValueSequence(JSONLike):
|
|
1084
1182
|
@classmethod
|
1085
1183
|
def from_geometric_space(
|
1086
1184
|
cls,
|
1087
|
-
path,
|
1088
|
-
start,
|
1089
|
-
stop,
|
1090
|
-
num,
|
1091
|
-
nesting_order=0,
|
1185
|
+
path: str,
|
1186
|
+
start: float,
|
1187
|
+
stop: float,
|
1188
|
+
num: int,
|
1189
|
+
nesting_order: float = 0,
|
1092
1190
|
endpoint=True,
|
1093
|
-
label=None,
|
1191
|
+
label: str | int | None = None,
|
1094
1192
|
**kwargs,
|
1095
|
-
):
|
1193
|
+
) -> Self:
|
1096
1194
|
"""
|
1097
1195
|
Build a sequence from a NumPy geometric space.
|
1098
1196
|
"""
|
@@ -1106,16 +1204,16 @@ class ValueSequence(JSONLike):
|
|
1106
1204
|
@classmethod
|
1107
1205
|
def from_log_space(
|
1108
1206
|
cls,
|
1109
|
-
path,
|
1110
|
-
start,
|
1111
|
-
stop,
|
1112
|
-
num,
|
1113
|
-
nesting_order=0,
|
1207
|
+
path: str,
|
1208
|
+
start: float,
|
1209
|
+
stop: float,
|
1210
|
+
num: int,
|
1211
|
+
nesting_order: float = 0,
|
1114
1212
|
base=10.0,
|
1115
1213
|
endpoint=True,
|
1116
|
-
label=None,
|
1214
|
+
label: str | int | None = None,
|
1117
1215
|
**kwargs,
|
1118
|
-
):
|
1216
|
+
) -> Self:
|
1119
1217
|
"""
|
1120
1218
|
Build a sequence from a NumPy logarithmic space.
|
1121
1219
|
"""
|
@@ -1136,14 +1234,14 @@ class ValueSequence(JSONLike):
|
|
1136
1234
|
@classmethod
|
1137
1235
|
def from_range(
|
1138
1236
|
cls,
|
1139
|
-
path,
|
1140
|
-
start,
|
1141
|
-
stop,
|
1142
|
-
nesting_order=0,
|
1143
|
-
step=1,
|
1144
|
-
label=None,
|
1237
|
+
path: str,
|
1238
|
+
start: float,
|
1239
|
+
stop: float,
|
1240
|
+
nesting_order: float = 0,
|
1241
|
+
step: int | float = 1,
|
1242
|
+
label: str | int | None = None,
|
1145
1243
|
**kwargs,
|
1146
|
-
):
|
1244
|
+
) -> Self:
|
1147
1245
|
"""
|
1148
1246
|
Build a sequence from a range.
|
1149
1247
|
"""
|
@@ -1173,12 +1271,12 @@ class ValueSequence(JSONLike):
|
|
1173
1271
|
@classmethod
|
1174
1272
|
def from_file(
|
1175
1273
|
cls,
|
1176
|
-
path,
|
1177
|
-
file_path,
|
1178
|
-
nesting_order=0,
|
1179
|
-
label=None,
|
1274
|
+
path: str,
|
1275
|
+
file_path: str | Path,
|
1276
|
+
nesting_order: float = 0,
|
1277
|
+
label: str | int | None = None,
|
1180
1278
|
**kwargs,
|
1181
|
-
):
|
1279
|
+
) -> Self:
|
1182
1280
|
"""
|
1183
1281
|
Build a sequence from a simple file.
|
1184
1282
|
"""
|
@@ -1198,16 +1296,16 @@ class ValueSequence(JSONLike):
|
|
1198
1296
|
@classmethod
|
1199
1297
|
def from_rectangle(
|
1200
1298
|
cls,
|
1201
|
-
path,
|
1202
|
-
start,
|
1203
|
-
stop,
|
1204
|
-
num,
|
1205
|
-
coord:
|
1206
|
-
include:
|
1207
|
-
nesting_order=0,
|
1208
|
-
label=None,
|
1299
|
+
path: str,
|
1300
|
+
start: Sequence[float],
|
1301
|
+
stop: Sequence[float],
|
1302
|
+
num: Sequence[int],
|
1303
|
+
coord: int | None = None,
|
1304
|
+
include: list[str] | None = None,
|
1305
|
+
nesting_order: float = 0,
|
1306
|
+
label: str | int | None = None,
|
1209
1307
|
**kwargs,
|
1210
|
-
):
|
1308
|
+
) -> Self:
|
1211
1309
|
"""
|
1212
1310
|
Build a sequence to cover a rectangle.
|
1213
1311
|
|
@@ -1238,14 +1336,14 @@ class ValueSequence(JSONLike):
|
|
1238
1336
|
def from_random_uniform(
|
1239
1337
|
cls,
|
1240
1338
|
path,
|
1241
|
-
num,
|
1242
|
-
low=0.0,
|
1243
|
-
high=1.0,
|
1244
|
-
seed=None,
|
1245
|
-
nesting_order=0,
|
1246
|
-
label=None,
|
1339
|
+
num: int,
|
1340
|
+
low: float = 0.0,
|
1341
|
+
high: float = 1.0,
|
1342
|
+
seed: int | list[int] | None = None,
|
1343
|
+
nesting_order: float = 0,
|
1344
|
+
label: str | int | None = None,
|
1247
1345
|
**kwargs,
|
1248
|
-
):
|
1346
|
+
) -> Self:
|
1249
1347
|
"""
|
1250
1348
|
Build a sequence from a uniform random number generator.
|
1251
1349
|
"""
|
@@ -1261,9 +1359,13 @@ class ValueSequence(JSONLike):
|
|
1261
1359
|
class AbstractInputValue(JSONLike):
|
1262
1360
|
"""Class to represent all sequence-able inputs to a task."""
|
1263
1361
|
|
1264
|
-
_workflow = None
|
1362
|
+
_workflow: Workflow | None = None
|
1363
|
+
_element_set: ElementSet | None = None
|
1364
|
+
_schema_input: SchemaInput | None = None
|
1365
|
+
_value: Any | None = None
|
1366
|
+
_value_group_idx: int | list[int] | None = None
|
1265
1367
|
|
1266
|
-
def __repr__(self):
|
1368
|
+
def __repr__(self) -> str:
|
1267
1369
|
try:
|
1268
1370
|
value_str = f", value={self.value}"
|
1269
1371
|
except WorkflowParameterMissingError:
|
@@ -1276,25 +1378,26 @@ class AbstractInputValue(JSONLike):
|
|
1276
1378
|
f")"
|
1277
1379
|
)
|
1278
1380
|
|
1279
|
-
|
1280
|
-
|
1281
|
-
|
1282
|
-
|
1283
|
-
|
1284
|
-
del out["_schema_input"]
|
1381
|
+
@override
|
1382
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
1383
|
+
out = super()._postprocess_to_dict(d)
|
1384
|
+
out.pop("_workflow", None)
|
1385
|
+
out.pop("_schema_input", None)
|
1285
1386
|
return out
|
1286
1387
|
|
1287
1388
|
def make_persistent(
|
1288
|
-
self, workflow:
|
1289
|
-
) ->
|
1389
|
+
self, workflow: Workflow, source: ParamSource
|
1390
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
1290
1391
|
"""Save value to a persistent workflow.
|
1291
1392
|
|
1292
1393
|
Returns
|
1293
1394
|
-------
|
1294
|
-
|
1295
|
-
|
1296
|
-
|
1297
|
-
|
1395
|
+
str
|
1396
|
+
Normalised path for this task input.
|
1397
|
+
list[int | list[int]]
|
1398
|
+
The index of the parameter data Zarr group where the data is stored.
|
1399
|
+
bool
|
1400
|
+
Whether this is newly persistent.
|
1298
1401
|
"""
|
1299
1402
|
|
1300
1403
|
if self._value_group_idx is not None:
|
@@ -1315,30 +1418,34 @@ class AbstractInputValue(JSONLike):
|
|
1315
1418
|
return (self.normalised_path, [data_ref], is_new)
|
1316
1419
|
|
1317
1420
|
@property
|
1318
|
-
def
|
1421
|
+
def normalised_path(self) -> str:
|
1422
|
+
"""
|
1423
|
+
The normalised path, if known.
|
1424
|
+
"""
|
1425
|
+
raise NotImplementedError
|
1426
|
+
|
1427
|
+
@property
|
1428
|
+
def workflow(self) -> Workflow | None:
|
1319
1429
|
"""
|
1320
1430
|
The workflow containing this input value.
|
1321
1431
|
"""
|
1322
1432
|
if self._workflow:
|
1323
1433
|
return self._workflow
|
1324
|
-
|
1325
|
-
|
1326
|
-
|
1327
|
-
|
1434
|
+
if self._element_set:
|
1435
|
+
if w_tmpl := self._element_set.task_template.workflow_template:
|
1436
|
+
return w_tmpl.workflow
|
1437
|
+
if self._schema_input:
|
1438
|
+
if t_tmpl := self._schema_input.task_schema.task_template:
|
1439
|
+
if w_tmpl := t_tmpl.workflow_template:
|
1440
|
+
return w_tmpl.workflow
|
1441
|
+
return None
|
1328
1442
|
|
1329
1443
|
@property
|
1330
|
-
def value(self):
|
1444
|
+
def value(self) -> Any:
|
1331
1445
|
"""
|
1332
1446
|
The value itself.
|
1333
1447
|
"""
|
1334
|
-
|
1335
|
-
val = self.workflow.get_parameter_data(self._value_group_idx)
|
1336
|
-
if self._value_is_obj and self.parameter._value_class:
|
1337
|
-
val = self.parameter._value_class(**val)
|
1338
|
-
else:
|
1339
|
-
val = self._value
|
1340
|
-
|
1341
|
-
return val
|
1448
|
+
return self._value
|
1342
1449
|
|
1343
1450
|
|
1344
1451
|
@dataclass
|
@@ -1348,13 +1455,16 @@ class ValuePerturbation(AbstractInputValue):
|
|
1348
1455
|
"""
|
1349
1456
|
|
1350
1457
|
#: The name of this perturbation.
|
1351
|
-
name: str
|
1458
|
+
name: str = ""
|
1352
1459
|
#: The path to the value(s) to perturb.
|
1353
|
-
path:
|
1460
|
+
path: Sequence[str | int | float] | None = None
|
1354
1461
|
#: The multiplicative factor to apply.
|
1355
|
-
multiplicative_factor:
|
1462
|
+
multiplicative_factor: Numeric | None = 1
|
1356
1463
|
#: The additive factor to apply.
|
1357
|
-
additive_factor:
|
1464
|
+
additive_factor: Numeric | None = 0
|
1465
|
+
|
1466
|
+
def __post_init__(self):
|
1467
|
+
assert self.name
|
1358
1468
|
|
1359
1469
|
@classmethod
|
1360
1470
|
def from_spec(cls, spec):
|
@@ -1364,6 +1474,7 @@ class ValuePerturbation(AbstractInputValue):
|
|
1364
1474
|
return cls(**spec)
|
1365
1475
|
|
1366
1476
|
|
1477
|
+
@hydrate
|
1367
1478
|
class InputValue(AbstractInputValue):
|
1368
1479
|
"""
|
1369
1480
|
An input value to a task.
|
@@ -1386,7 +1497,7 @@ class InputValue(AbstractInputValue):
|
|
1386
1497
|
|
1387
1498
|
"""
|
1388
1499
|
|
1389
|
-
_child_objects = (
|
1500
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
1390
1501
|
ChildObjectSpec(
|
1391
1502
|
name="parameter",
|
1392
1503
|
class_name="Parameter",
|
@@ -1397,45 +1508,40 @@ class InputValue(AbstractInputValue):
|
|
1397
1508
|
|
1398
1509
|
def __init__(
|
1399
1510
|
self,
|
1400
|
-
parameter:
|
1401
|
-
value:
|
1402
|
-
label:
|
1403
|
-
value_class_method:
|
1404
|
-
path:
|
1405
|
-
|
1511
|
+
parameter: Parameter | SchemaInput | str,
|
1512
|
+
value: Any | None = None,
|
1513
|
+
label: str | int | None = None,
|
1514
|
+
value_class_method: str | None = None,
|
1515
|
+
path: str | None = None,
|
1516
|
+
_check_obj: bool = True,
|
1406
1517
|
):
|
1518
|
+
super().__init__()
|
1407
1519
|
if isinstance(parameter, str):
|
1408
1520
|
try:
|
1409
|
-
|
1521
|
+
#: Parameter whose value is to be specified.
|
1522
|
+
self.parameter = self._app.parameters.get(parameter)
|
1410
1523
|
except ValueError:
|
1411
|
-
parameter = self.
|
1524
|
+
self.parameter = self._app.Parameter(parameter)
|
1412
1525
|
elif isinstance(parameter, SchemaInput):
|
1413
|
-
parameter = parameter.parameter
|
1526
|
+
self.parameter = parameter.parameter
|
1527
|
+
else:
|
1528
|
+
self.parameter = parameter
|
1414
1529
|
|
1415
|
-
#: Parameter whose value is to be specified.
|
1416
|
-
self.parameter = parameter
|
1417
1530
|
#: Identifier to be used where the associated `SchemaInput` accepts multiple
|
1418
1531
|
#: parameters of the specified type.
|
1419
1532
|
self.label = str(label) if label is not None else ""
|
1420
1533
|
#: Dot-delimited path within the parameter's nested data structure for which
|
1421
1534
|
#: `value` should be set.
|
1422
|
-
self.path = (path.strip(".") if path else None
|
1535
|
+
self.path = (path.strip(".") or None) if path else None
|
1423
1536
|
#: A class method that can be invoked with the `value` attribute as keyword
|
1424
1537
|
#: arguments.
|
1425
1538
|
self.value_class_method = value_class_method
|
1426
|
-
self._value = _process_demo_data_strings(self.
|
1427
|
-
|
1428
|
-
self._value_group_idx = None # assigned by method make_persistent
|
1429
|
-
self._element_set = None # assigned by parent ElementSet (if belonging)
|
1430
|
-
|
1431
|
-
# assigned by parent SchemaInput (if this object is a default value of a
|
1432
|
-
# SchemaInput):
|
1433
|
-
self._schema_input = None
|
1539
|
+
self._value = _process_demo_data_strings(self._app, value)
|
1434
1540
|
|
1435
1541
|
# record if a ParameterValue sub-class is passed for value, which allows us
|
1436
1542
|
# to re-init the object on `.value`:
|
1437
1543
|
self._value_is_obj = isinstance(value, ParameterValue)
|
1438
|
-
if
|
1544
|
+
if _check_obj:
|
1439
1545
|
self._check_dict_value_if_object()
|
1440
1546
|
|
1441
1547
|
def _check_dict_value_if_object(self):
|
@@ -1459,13 +1565,13 @@ class InputValue(AbstractInputValue):
|
|
1459
1565
|
f"dict."
|
1460
1566
|
)
|
1461
1567
|
|
1462
|
-
def __deepcopy__(self, memo):
|
1568
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> Self:
|
1463
1569
|
kwargs = self.to_dict()
|
1464
1570
|
_value = kwargs.pop("_value")
|
1465
1571
|
kwargs.pop("_schema_input", None)
|
1466
1572
|
_value_group_idx = kwargs.pop("_value_group_idx")
|
1467
1573
|
_value_is_obj = kwargs.pop("_value_is_obj")
|
1468
|
-
obj = self.__class__(**copy.deepcopy(kwargs, memo),
|
1574
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo), _check_obj=False)
|
1469
1575
|
obj._value = _value
|
1470
1576
|
obj._value_group_idx = _value_group_idx
|
1471
1577
|
obj._value_is_obj = _value_is_obj
|
@@ -1473,7 +1579,7 @@ class InputValue(AbstractInputValue):
|
|
1473
1579
|
obj._schema_input = self._schema_input
|
1474
1580
|
return obj
|
1475
1581
|
|
1476
|
-
def __repr__(self):
|
1582
|
+
def __repr__(self) -> str:
|
1477
1583
|
val_grp_idx = ""
|
1478
1584
|
if self._value_group_idx is not None:
|
1479
1585
|
val_grp_idx = f", value_group_idx={self._value_group_idx}"
|
@@ -1500,12 +1606,10 @@ class InputValue(AbstractInputValue):
|
|
1500
1606
|
f")"
|
1501
1607
|
)
|
1502
1608
|
|
1503
|
-
def __eq__(self, other) -> bool:
|
1609
|
+
def __eq__(self, other: Any) -> bool:
|
1504
1610
|
if not isinstance(other, self.__class__):
|
1505
1611
|
return False
|
1506
|
-
|
1507
|
-
return True
|
1508
|
-
return False
|
1612
|
+
return self.to_dict() == other.to_dict()
|
1509
1613
|
|
1510
1614
|
@classmethod
|
1511
1615
|
def _json_like_constructor(cls, json_like):
|
@@ -1516,14 +1620,14 @@ class InputValue(AbstractInputValue):
|
|
1516
1620
|
if "_value" in json_like:
|
1517
1621
|
json_like["value"] = json_like.pop("_value")
|
1518
1622
|
|
1519
|
-
obj = cls(**json_like,
|
1623
|
+
obj = cls(**json_like, _check_obj=False)
|
1520
1624
|
obj._value_group_idx = _value_group_idx
|
1521
1625
|
obj._value_is_obj = _value_is_obj
|
1522
1626
|
obj._check_dict_value_if_object()
|
1523
1627
|
return obj
|
1524
1628
|
|
1525
1629
|
@property
|
1526
|
-
def labelled_type(self):
|
1630
|
+
def labelled_type(self) -> str:
|
1527
1631
|
"""
|
1528
1632
|
The labelled type of this input value.
|
1529
1633
|
"""
|
@@ -1531,22 +1635,25 @@ class InputValue(AbstractInputValue):
|
|
1531
1635
|
return f"{self.parameter.typ}{label}"
|
1532
1636
|
|
1533
1637
|
@property
|
1534
|
-
def normalised_inputs_path(self):
|
1638
|
+
def normalised_inputs_path(self) -> str:
|
1535
1639
|
"""
|
1536
1640
|
The normalised input path without the ``inputs.`` prefix.
|
1537
1641
|
"""
|
1538
1642
|
return f"{self.labelled_type}{f'.{self.path}' if self.path else ''}"
|
1539
1643
|
|
1540
1644
|
@property
|
1541
|
-
def normalised_path(self):
|
1645
|
+
def normalised_path(self) -> str:
|
1542
1646
|
"""
|
1543
1647
|
The full normalised input path.
|
1544
1648
|
"""
|
1545
1649
|
return f"inputs.{self.normalised_inputs_path}"
|
1546
1650
|
|
1547
|
-
def make_persistent(
|
1651
|
+
def make_persistent(
|
1652
|
+
self, workflow: Workflow, source: ParamSource
|
1653
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
1548
1654
|
source = copy.deepcopy(source)
|
1549
|
-
|
1655
|
+
if self.value_class_method is not None:
|
1656
|
+
source["value_class_method"] = self.value_class_method
|
1550
1657
|
return super().make_persistent(workflow, source)
|
1551
1658
|
|
1552
1659
|
@classmethod
|
@@ -1563,20 +1670,28 @@ class InputValue(AbstractInputValue):
|
|
1563
1670
|
json_like["value_class_method"] = cls_method
|
1564
1671
|
|
1565
1672
|
if "path" not in json_like:
|
1566
|
-
|
1567
|
-
json_like["parameter"] =
|
1568
|
-
json_like["path"] = ".".join(
|
1569
|
-
|
1570
|
-
obj = super().from_json_like(json_like, shared_data)
|
1673
|
+
param, *path = json_like["parameter"].split(".")
|
1674
|
+
json_like["parameter"] = param
|
1675
|
+
json_like["path"] = ".".join(path)
|
1571
1676
|
|
1572
|
-
return
|
1677
|
+
return super().from_json_like(json_like, shared_data)
|
1573
1678
|
|
1574
1679
|
@property
|
1575
|
-
def is_sub_value(self):
|
1680
|
+
def is_sub_value(self) -> bool:
|
1576
1681
|
"""True if the value is for a sub part of the parameter (i.e. if `path` is set).
|
1577
1682
|
Sub-values are not added to the base parameter data, but are interpreted as
|
1578
1683
|
single-value sequences."""
|
1579
|
-
return
|
1684
|
+
return bool(self.path)
|
1685
|
+
|
1686
|
+
@property
|
1687
|
+
def value(self) -> Any:
|
1688
|
+
if self._value_group_idx is not None and self.workflow:
|
1689
|
+
val = self.workflow.get_parameter_data(cast("int", self._value_group_idx))
|
1690
|
+
if self._value_is_obj and self.parameter._value_class:
|
1691
|
+
return self.parameter._value_class(**val)
|
1692
|
+
return val
|
1693
|
+
else:
|
1694
|
+
return self._value
|
1580
1695
|
|
1581
1696
|
|
1582
1697
|
class ResourceSpec(JSONLike):
|
@@ -1636,7 +1751,7 @@ class ResourceSpec(JSONLike):
|
|
1636
1751
|
"""
|
1637
1752
|
|
1638
1753
|
#: The names of parameters that may be used when making an instance of this class.
|
1639
|
-
ALLOWED_PARAMETERS = {
|
1754
|
+
ALLOWED_PARAMETERS: ClassVar[set[str]] = {
|
1640
1755
|
"scratch",
|
1641
1756
|
"parallel_mode",
|
1642
1757
|
"num_cores",
|
@@ -1660,51 +1775,71 @@ class ResourceSpec(JSONLike):
|
|
1660
1775
|
"SLURM_num_cpus_per_task",
|
1661
1776
|
}
|
1662
1777
|
|
1663
|
-
_resource_list = None
|
1778
|
+
_resource_list: ResourceList | None = None
|
1664
1779
|
|
1665
|
-
_child_objects = (
|
1780
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
1666
1781
|
ChildObjectSpec(
|
1667
1782
|
name="scope",
|
1668
1783
|
class_name="ActionScope",
|
1669
1784
|
),
|
1670
1785
|
)
|
1671
1786
|
|
1787
|
+
@staticmethod
|
1788
|
+
def __quoted(values: Iterable):
|
1789
|
+
return ", ".join(f'"{item}"' for item in values)
|
1790
|
+
|
1791
|
+
@classmethod
|
1792
|
+
def _allowed_params_quoted(cls) -> str:
|
1793
|
+
"""
|
1794
|
+
The string version of the list of allowed parameters.
|
1795
|
+
"""
|
1796
|
+
return cls.__quoted(cls.ALLOWED_PARAMETERS)
|
1797
|
+
|
1798
|
+
@staticmethod
|
1799
|
+
def __parse_thing(
|
1800
|
+
typ: type[ActionScope], val: ActionScope | str | None
|
1801
|
+
) -> ActionScope | None:
|
1802
|
+
if isinstance(val, typ):
|
1803
|
+
return val
|
1804
|
+
elif val is None:
|
1805
|
+
return typ.any()
|
1806
|
+
else:
|
1807
|
+
return typ.from_json_like(cast("str", val))
|
1808
|
+
|
1672
1809
|
def __init__(
|
1673
1810
|
self,
|
1674
|
-
scope:
|
1675
|
-
scratch:
|
1676
|
-
parallel_mode:
|
1677
|
-
num_cores:
|
1678
|
-
num_cores_per_node:
|
1679
|
-
num_threads:
|
1680
|
-
num_nodes:
|
1681
|
-
scheduler:
|
1682
|
-
shell:
|
1683
|
-
use_job_array:
|
1684
|
-
max_array_items:
|
1685
|
-
time_limit:
|
1686
|
-
scheduler_args:
|
1687
|
-
shell_args:
|
1688
|
-
os_name:
|
1689
|
-
environments:
|
1690
|
-
SGE_parallel_env:
|
1691
|
-
SLURM_partition:
|
1692
|
-
SLURM_num_tasks:
|
1693
|
-
SLURM_num_tasks_per_node:
|
1694
|
-
SLURM_num_nodes:
|
1695
|
-
SLURM_num_cpus_per_task:
|
1811
|
+
scope: ActionScope | str | None = None,
|
1812
|
+
scratch: str | None = None,
|
1813
|
+
parallel_mode: str | ParallelMode | None = None,
|
1814
|
+
num_cores: int | None = None,
|
1815
|
+
num_cores_per_node: int | None = None,
|
1816
|
+
num_threads: int | None = None,
|
1817
|
+
num_nodes: int | None = None,
|
1818
|
+
scheduler: str | None = None,
|
1819
|
+
shell: str | None = None,
|
1820
|
+
use_job_array: bool | None = None,
|
1821
|
+
max_array_items: int | None = None,
|
1822
|
+
time_limit: str | timedelta | None = None,
|
1823
|
+
scheduler_args: dict[str, Any] | None = None,
|
1824
|
+
shell_args: dict[str, Any] | None = None,
|
1825
|
+
os_name: str | None = None,
|
1826
|
+
environments: Mapping[str, Mapping[str, Any]] | None = None,
|
1827
|
+
SGE_parallel_env: str | None = None,
|
1828
|
+
SLURM_partition: str | None = None,
|
1829
|
+
SLURM_num_tasks: str | None = None,
|
1830
|
+
SLURM_num_tasks_per_node: str | None = None,
|
1831
|
+
SLURM_num_nodes: str | None = None,
|
1832
|
+
SLURM_num_cpus_per_task: str | None = None,
|
1696
1833
|
):
|
1697
1834
|
#: Which scope does this apply to.
|
1698
|
-
self.scope =
|
1699
|
-
if not isinstance(self.scope, self.app.ActionScope):
|
1700
|
-
self.scope = self.app.ActionScope.from_json_like(self.scope)
|
1835
|
+
self.scope = self.__parse_thing(self._app.ActionScope, scope)
|
1701
1836
|
|
1702
1837
|
if isinstance(time_limit, timedelta):
|
1703
1838
|
time_limit = timedelta_format(time_limit)
|
1704
1839
|
|
1705
1840
|
# assigned by `make_persistent`
|
1706
|
-
self._workflow = None
|
1707
|
-
self._value_group_idx = None
|
1841
|
+
self._workflow: Workflow | None = None
|
1842
|
+
self._value_group_idx: int | list[int] | None = None
|
1708
1843
|
|
1709
1844
|
# user-specified resource parameters:
|
1710
1845
|
self._scratch = scratch
|
@@ -1733,7 +1868,7 @@ class ResourceSpec(JSONLike):
|
|
1733
1868
|
self._SLURM_num_nodes = SLURM_num_nodes
|
1734
1869
|
self._SLURM_num_cpus_per_task = SLURM_num_cpus_per_task
|
1735
1870
|
|
1736
|
-
def __deepcopy__(self, memo):
|
1871
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> Self:
|
1737
1872
|
kwargs = copy.deepcopy(self.to_dict(), memo)
|
1738
1873
|
_value_group_idx = kwargs.pop("value_group_idx", None)
|
1739
1874
|
obj = self.__class__(**kwargs)
|
@@ -1743,65 +1878,60 @@ class ResourceSpec(JSONLike):
|
|
1743
1878
|
|
1744
1879
|
def __repr__(self):
|
1745
1880
|
param_strs = ""
|
1746
|
-
for
|
1747
|
-
i_str = ""
|
1881
|
+
for param in self.ALLOWED_PARAMETERS:
|
1748
1882
|
try:
|
1749
|
-
i_val = getattr(self,
|
1883
|
+
i_val = getattr(self, param)
|
1750
1884
|
except WorkflowParameterMissingError:
|
1751
|
-
|
1752
|
-
|
1753
|
-
|
1754
|
-
i_str = f", {i}={i_val!r}"
|
1755
|
-
|
1756
|
-
param_strs += i_str
|
1885
|
+
continue
|
1886
|
+
if i_val is not None:
|
1887
|
+
param_strs += f", {param}={i_val!r}"
|
1757
1888
|
|
1758
1889
|
return f"{self.__class__.__name__}(scope={self.scope}{param_strs})"
|
1759
1890
|
|
1760
|
-
def __eq__(self, other) -> bool:
|
1891
|
+
def __eq__(self, other: Any) -> bool:
|
1761
1892
|
if not isinstance(other, self.__class__):
|
1762
1893
|
return False
|
1763
|
-
|
1764
|
-
return True
|
1765
|
-
return False
|
1894
|
+
return self.to_dict() == other.to_dict()
|
1766
1895
|
|
1767
1896
|
@classmethod
|
1768
|
-
def _json_like_constructor(cls, json_like):
|
1897
|
+
def _json_like_constructor(cls, json_like) -> Self:
|
1769
1898
|
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
1770
1899
|
|
1771
1900
|
_value_group_idx = json_like.pop("value_group_idx", None)
|
1772
1901
|
try:
|
1773
1902
|
obj = cls(**json_like)
|
1774
1903
|
except TypeError:
|
1775
|
-
given_keys = set(k for k in json_like
|
1776
|
-
bad_keys = given_keys - cls.ALLOWED_PARAMETERS
|
1777
|
-
|
1778
|
-
allowed_keys_str = ", ".join(f'"{i}"' for i in cls.ALLOWED_PARAMETERS)
|
1904
|
+
given_keys = set(k for k in json_like if k != "scope")
|
1905
|
+
bad_keys = cls.__quoted(given_keys - cls.ALLOWED_PARAMETERS)
|
1906
|
+
good_keys = cls._allowed_params_quoted()
|
1779
1907
|
raise UnknownResourceSpecItemError(
|
1780
|
-
f"The following resource item names are unknown: {
|
1781
|
-
f"resource item names are: {
|
1908
|
+
f"The following resource item names are unknown: {bad_keys}. "
|
1909
|
+
f"Allowed resource item names are: {good_keys}."
|
1782
1910
|
)
|
1783
1911
|
obj._value_group_idx = _value_group_idx
|
1784
1912
|
|
1785
1913
|
return obj
|
1786
1914
|
|
1787
1915
|
@property
|
1788
|
-
def normalised_resources_path(self):
|
1916
|
+
def normalised_resources_path(self) -> str:
|
1789
1917
|
"""
|
1790
1918
|
Standard name of this resource spec.
|
1791
1919
|
"""
|
1792
|
-
|
1920
|
+
scope = self.scope
|
1921
|
+
assert scope is not None
|
1922
|
+
return scope.to_string()
|
1793
1923
|
|
1794
1924
|
@property
|
1795
|
-
def normalised_path(self):
|
1925
|
+
def normalised_path(self) -> str:
|
1796
1926
|
"""
|
1797
1927
|
Full name of this resource spec.
|
1798
1928
|
"""
|
1799
1929
|
return f"resources.{self.normalised_resources_path}"
|
1800
1930
|
|
1801
|
-
|
1802
|
-
|
1803
|
-
|
1804
|
-
|
1931
|
+
@override
|
1932
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
1933
|
+
out = super()._postprocess_to_dict(d)
|
1934
|
+
out.pop("_workflow", None)
|
1805
1935
|
|
1806
1936
|
if self._value_group_idx is not None:
|
1807
1937
|
# only store pointer to persistent data:
|
@@ -1819,9 +1949,13 @@ class ResourceSpec(JSONLike):
|
|
1819
1949
|
out = {k: v for k, v in out.items() if v is not None}
|
1820
1950
|
return out
|
1821
1951
|
|
1952
|
+
@classmethod
|
1953
|
+
def __is_Workflow(cls, value) -> TypeIs[Workflow]:
|
1954
|
+
return isinstance(value, cls._app.Workflow)
|
1955
|
+
|
1822
1956
|
def make_persistent(
|
1823
|
-
self, workflow:
|
1824
|
-
) ->
|
1957
|
+
self, workflow: ResourcePersistingWorkflow, source: ParamSource
|
1958
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
1825
1959
|
"""Save to a persistent workflow.
|
1826
1960
|
|
1827
1961
|
Returns
|
@@ -1830,6 +1964,9 @@ class ResourceSpec(JSONLike):
|
|
1830
1964
|
contains the indices of the parameter data Zarr groups where the data is
|
1831
1965
|
stored.
|
1832
1966
|
|
1967
|
+
Note
|
1968
|
+
----
|
1969
|
+
May modify the internal state of this object.
|
1833
1970
|
"""
|
1834
1971
|
|
1835
1972
|
if self._value_group_idx is not None:
|
@@ -1845,7 +1982,8 @@ class ResourceSpec(JSONLike):
|
|
1845
1982
|
data_ref = workflow._add_parameter_data(self._get_members(), source=source)
|
1846
1983
|
is_new = True
|
1847
1984
|
self._value_group_idx = data_ref
|
1848
|
-
self.
|
1985
|
+
if self.__is_Workflow(workflow):
|
1986
|
+
self._workflow = workflow
|
1849
1987
|
|
1850
1988
|
self._num_cores = None
|
1851
1989
|
self._scratch = None
|
@@ -1868,18 +2006,18 @@ class ResourceSpec(JSONLike):
|
|
1868
2006
|
kwargs[name] = getattr(self, name)
|
1869
2007
|
return self.__class__(**kwargs)
|
1870
2008
|
|
1871
|
-
def _get_value(self, value_name=None):
|
1872
|
-
if self._value_group_idx is not None:
|
1873
|
-
val = self.workflow.get_parameter_data(self._value_group_idx)
|
2009
|
+
def _get_value(self, value_name: str | None = None):
|
2010
|
+
if self._value_group_idx is not None and self.workflow:
|
2011
|
+
val = self.workflow.get_parameter_data(cast("int", self._value_group_idx))
|
1874
2012
|
else:
|
1875
2013
|
val = self._get_members()
|
1876
|
-
if value_name:
|
1877
|
-
|
2014
|
+
if value_name is not None and val is not None:
|
2015
|
+
return val.get(value_name)
|
1878
2016
|
|
1879
2017
|
return val
|
1880
2018
|
|
1881
2019
|
@staticmethod
|
1882
|
-
def _process_string(value:
|
2020
|
+
def _process_string(value: str | None):
|
1883
2021
|
return value.lower().strip() if value else value
|
1884
2022
|
|
1885
2023
|
def _setter_persistent_check(self):
|
@@ -1889,7 +2027,7 @@ class ResourceSpec(JSONLike):
|
|
1889
2027
|
)
|
1890
2028
|
|
1891
2029
|
@property
|
1892
|
-
def scratch(self):
|
2030
|
+
def scratch(self) -> str | None:
|
1893
2031
|
"""
|
1894
2032
|
Which scratch space to use.
|
1895
2033
|
|
@@ -1900,164 +2038,162 @@ class ResourceSpec(JSONLike):
|
|
1900
2038
|
return self._get_value("scratch")
|
1901
2039
|
|
1902
2040
|
@property
|
1903
|
-
def parallel_mode(self):
|
2041
|
+
def parallel_mode(self) -> ParallelMode | None:
|
1904
2042
|
"""
|
1905
2043
|
Which parallel mode to use.
|
1906
2044
|
"""
|
1907
2045
|
return self._get_value("parallel_mode")
|
1908
2046
|
|
1909
2047
|
@property
|
1910
|
-
def num_cores(self):
|
2048
|
+
def num_cores(self) -> int | None:
|
1911
2049
|
"""
|
1912
2050
|
How many cores to request.
|
1913
2051
|
"""
|
1914
2052
|
return self._get_value("num_cores")
|
1915
2053
|
|
1916
2054
|
@property
|
1917
|
-
def num_cores_per_node(self):
|
2055
|
+
def num_cores_per_node(self) -> int | None:
|
1918
2056
|
"""
|
1919
2057
|
How many cores per compute node to request.
|
1920
2058
|
"""
|
1921
2059
|
return self._get_value("num_cores_per_node")
|
1922
2060
|
|
1923
2061
|
@property
|
1924
|
-
def num_nodes(self):
|
2062
|
+
def num_nodes(self) -> int | None:
|
1925
2063
|
"""
|
1926
2064
|
How many compute nodes to request.
|
1927
2065
|
"""
|
1928
2066
|
return self._get_value("num_nodes")
|
1929
2067
|
|
1930
2068
|
@property
|
1931
|
-
def num_threads(self):
|
2069
|
+
def num_threads(self) -> int | None:
|
1932
2070
|
"""
|
1933
2071
|
How many threads to request.
|
1934
2072
|
"""
|
1935
2073
|
return self._get_value("num_threads")
|
1936
2074
|
|
1937
2075
|
@property
|
1938
|
-
def scheduler(self):
|
2076
|
+
def scheduler(self) -> str | None:
|
1939
2077
|
"""
|
1940
2078
|
Which scheduler to use.
|
1941
2079
|
"""
|
1942
2080
|
return self._get_value("scheduler")
|
1943
2081
|
|
1944
2082
|
@scheduler.setter
|
1945
|
-
def scheduler(self, value):
|
2083
|
+
def scheduler(self, value: str | None):
|
1946
2084
|
self._setter_persistent_check()
|
1947
|
-
|
1948
|
-
self._scheduler = value
|
2085
|
+
self._scheduler = self._process_string(value)
|
1949
2086
|
|
1950
2087
|
@property
|
1951
|
-
def shell(self):
|
2088
|
+
def shell(self) -> str | None:
|
1952
2089
|
"""
|
1953
2090
|
Which system shell to use.
|
1954
2091
|
"""
|
1955
2092
|
return self._get_value("shell")
|
1956
2093
|
|
1957
2094
|
@shell.setter
|
1958
|
-
def shell(self, value):
|
2095
|
+
def shell(self, value: str | None):
|
1959
2096
|
self._setter_persistent_check()
|
1960
|
-
|
1961
|
-
self._shell = value
|
2097
|
+
self._shell = self._process_string(value)
|
1962
2098
|
|
1963
2099
|
@property
|
1964
|
-
def use_job_array(self):
|
2100
|
+
def use_job_array(self) -> bool:
|
1965
2101
|
"""
|
1966
2102
|
Whether to use array jobs.
|
1967
2103
|
"""
|
1968
2104
|
return self._get_value("use_job_array")
|
1969
2105
|
|
1970
2106
|
@property
|
1971
|
-
def max_array_items(self):
|
2107
|
+
def max_array_items(self) -> int | None:
|
1972
2108
|
"""
|
1973
2109
|
If using array jobs, up to how many items should be in the job array.
|
1974
2110
|
"""
|
1975
2111
|
return self._get_value("max_array_items")
|
1976
2112
|
|
1977
2113
|
@property
|
1978
|
-
def time_limit(self):
|
2114
|
+
def time_limit(self) -> str | None:
|
1979
2115
|
"""
|
1980
2116
|
How long to run for.
|
1981
2117
|
"""
|
1982
2118
|
return self._get_value("time_limit")
|
1983
2119
|
|
1984
2120
|
@property
|
1985
|
-
def scheduler_args(self):
|
2121
|
+
def scheduler_args(self) -> Mapping: # TODO: TypedDict
|
1986
2122
|
"""
|
1987
2123
|
Additional arguments to pass to the scheduler.
|
1988
2124
|
"""
|
1989
2125
|
return self._get_value("scheduler_args")
|
1990
2126
|
|
1991
2127
|
@property
|
1992
|
-
def shell_args(self):
|
2128
|
+
def shell_args(self) -> Mapping | None: # TODO: TypedDict
|
1993
2129
|
"""
|
1994
2130
|
Additional arguments to pass to the shell.
|
1995
2131
|
"""
|
1996
2132
|
return self._get_value("shell_args")
|
1997
2133
|
|
1998
2134
|
@property
|
1999
|
-
def os_name(self):
|
2135
|
+
def os_name(self) -> str:
|
2000
2136
|
"""
|
2001
2137
|
Which OS to use.
|
2002
2138
|
"""
|
2003
2139
|
return self._get_value("os_name")
|
2004
2140
|
|
2141
|
+
@os_name.setter
|
2142
|
+
def os_name(self, value: str):
|
2143
|
+
self._setter_persistent_check()
|
2144
|
+
self._os_name = self._process_string(value)
|
2145
|
+
|
2005
2146
|
@property
|
2006
|
-
def environments(self):
|
2147
|
+
def environments(self) -> Mapping | None: # TODO: TypedDict
|
2007
2148
|
"""
|
2008
2149
|
Which execution environments to use.
|
2009
2150
|
"""
|
2010
2151
|
return self._get_value("environments")
|
2011
2152
|
|
2012
2153
|
@property
|
2013
|
-
def SGE_parallel_env(self):
|
2154
|
+
def SGE_parallel_env(self) -> str | None:
|
2014
2155
|
"""
|
2015
2156
|
Which SGE parallel environment to request.
|
2016
2157
|
"""
|
2017
2158
|
return self._get_value("SGE_parallel_env")
|
2018
2159
|
|
2019
2160
|
@property
|
2020
|
-
def SLURM_partition(self):
|
2161
|
+
def SLURM_partition(self) -> str | None:
|
2021
2162
|
"""
|
2022
2163
|
Which SLURM partition to request.
|
2023
2164
|
"""
|
2024
2165
|
return self._get_value("SLURM_partition")
|
2025
2166
|
|
2026
2167
|
@property
|
2027
|
-
def SLURM_num_tasks(self):
|
2168
|
+
def SLURM_num_tasks(self) -> int | None:
|
2028
2169
|
"""
|
2029
2170
|
How many SLURM tasks to request.
|
2030
2171
|
"""
|
2031
2172
|
return self._get_value("SLURM_num_tasks")
|
2032
2173
|
|
2033
2174
|
@property
|
2034
|
-
def SLURM_num_tasks_per_node(self):
|
2175
|
+
def SLURM_num_tasks_per_node(self) -> int | None:
|
2035
2176
|
"""
|
2036
2177
|
How many SLURM tasks per compute node to request.
|
2037
2178
|
"""
|
2038
2179
|
return self._get_value("SLURM_num_tasks_per_node")
|
2039
2180
|
|
2040
2181
|
@property
|
2041
|
-
def SLURM_num_nodes(self):
|
2182
|
+
def SLURM_num_nodes(self) -> int | None:
|
2042
2183
|
"""
|
2043
2184
|
How many compute nodes to request.
|
2044
2185
|
"""
|
2045
2186
|
return self._get_value("SLURM_num_nodes")
|
2046
2187
|
|
2047
2188
|
@property
|
2048
|
-
def SLURM_num_cpus_per_task(self):
|
2189
|
+
def SLURM_num_cpus_per_task(self) -> int | None:
|
2049
2190
|
"""
|
2050
2191
|
How many CPU cores to ask for per SLURM task.
|
2051
2192
|
"""
|
2052
2193
|
return self._get_value("SLURM_num_cpus_per_task")
|
2053
2194
|
|
2054
|
-
@os_name.setter
|
2055
|
-
def os_name(self, value):
|
2056
|
-
self._setter_persistent_check()
|
2057
|
-
self._os_name = self._process_string(value)
|
2058
|
-
|
2059
2195
|
@property
|
2060
|
-
def workflow(self):
|
2196
|
+
def workflow(self) -> Workflow | None:
|
2061
2197
|
"""
|
2062
2198
|
The workflow owning this resource spec.
|
2063
2199
|
"""
|
@@ -2066,7 +2202,8 @@ class ResourceSpec(JSONLike):
|
|
2066
2202
|
|
2067
2203
|
elif self.element_set:
|
2068
2204
|
# element-set-level resources
|
2069
|
-
|
2205
|
+
wt = self.element_set.task_template.workflow_template
|
2206
|
+
return wt.workflow if wt else None
|
2070
2207
|
|
2071
2208
|
elif self.workflow_template:
|
2072
2209
|
# template-level resources
|
@@ -2079,47 +2216,29 @@ class ResourceSpec(JSONLike):
|
|
2079
2216
|
f"creating the workflow object."
|
2080
2217
|
)
|
2081
2218
|
|
2219
|
+
return None
|
2220
|
+
|
2082
2221
|
@property
|
2083
|
-
def element_set(self):
|
2222
|
+
def element_set(self) -> ElementSet | None:
|
2084
2223
|
"""
|
2085
2224
|
The element set that will use this resource spec.
|
2086
2225
|
"""
|
2226
|
+
if not self._resource_list:
|
2227
|
+
return None
|
2087
2228
|
return self._resource_list.element_set
|
2088
2229
|
|
2089
2230
|
@property
|
2090
|
-
def workflow_template(self):
|
2231
|
+
def workflow_template(self) -> WorkflowTemplate | None:
|
2091
2232
|
"""
|
2092
2233
|
The workflow template that will use this resource spec.
|
2093
2234
|
"""
|
2235
|
+
if not self._resource_list:
|
2236
|
+
return None
|
2094
2237
|
return self._resource_list.workflow_template
|
2095
2238
|
|
2096
2239
|
|
2097
|
-
|
2098
|
-
|
2099
|
-
The types if input sources.
|
2100
|
-
"""
|
2101
|
-
|
2102
|
-
#: Input source is an import.
|
2103
|
-
IMPORT = 0
|
2104
|
-
#: Input source is local.
|
2105
|
-
LOCAL = 1
|
2106
|
-
#: Input source is a default.
|
2107
|
-
DEFAULT = 2
|
2108
|
-
#: Input source is a task.
|
2109
|
-
TASK = 3
|
2110
|
-
|
2111
|
-
|
2112
|
-
class TaskSourceType(enum.Enum):
|
2113
|
-
"""
|
2114
|
-
The types of task-based input sources.
|
2115
|
-
"""
|
2116
|
-
|
2117
|
-
#: Input source is a task input.
|
2118
|
-
INPUT = 0
|
2119
|
-
#: Input source is a task output.
|
2120
|
-
OUTPUT = 1
|
2121
|
-
#: Input source is unspecified.
|
2122
|
-
ANY = 2
|
2240
|
+
#: How to specify a selection rule.
|
2241
|
+
Where: TypeAlias = "RuleArgs | Rule | Sequence[RuleArgs | Rule] | ElementFilter"
|
2123
2242
|
|
2124
2243
|
|
2125
2244
|
class InputSource(JSONLike):
|
@@ -2144,7 +2263,7 @@ class InputSource(JSONLike):
|
|
2144
2263
|
Filtering rules.
|
2145
2264
|
"""
|
2146
2265
|
|
2147
|
-
_child_objects = (
|
2266
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
2148
2267
|
ChildObjectSpec(
|
2149
2268
|
name="source_type",
|
2150
2269
|
json_like_name="type",
|
@@ -2153,55 +2272,61 @@ class InputSource(JSONLike):
|
|
2153
2272
|
),
|
2154
2273
|
)
|
2155
2274
|
|
2275
|
+
@classmethod
|
2276
|
+
def __is_ElementFilter(cls, value) -> TypeIs[ElementFilter]:
|
2277
|
+
return isinstance(value, cls._app.ElementFilter)
|
2278
|
+
|
2279
|
+
@classmethod
|
2280
|
+
def __is_Rule(cls, value) -> TypeIs[Rule]:
|
2281
|
+
return isinstance(value, cls._app.Rule)
|
2282
|
+
|
2156
2283
|
def __init__(
|
2157
2284
|
self,
|
2158
|
-
source_type,
|
2159
|
-
import_ref=None,
|
2160
|
-
task_ref=None,
|
2161
|
-
task_source_type=None,
|
2162
|
-
element_iters=None,
|
2163
|
-
path=None,
|
2164
|
-
where:
|
2165
|
-
Union[dict, app.Rule, List[dict], List[app.Rule], app.ElementFilter]
|
2166
|
-
] = None,
|
2285
|
+
source_type: InputSourceType | str,
|
2286
|
+
import_ref: int | None = None,
|
2287
|
+
task_ref: int | None = None,
|
2288
|
+
task_source_type: TaskSourceType | str | None = None,
|
2289
|
+
element_iters: list[int] | None = None,
|
2290
|
+
path: str | None = None,
|
2291
|
+
where: Where | None = None,
|
2167
2292
|
):
|
2168
|
-
if where is
|
2169
|
-
|
2170
|
-
|
2171
|
-
|
2172
|
-
|
2173
|
-
|
2174
|
-
|
2175
|
-
|
2293
|
+
if where is None or self.__is_ElementFilter(where):
|
2294
|
+
#: Filtering rules.
|
2295
|
+
self.where: ElementFilter | None = where
|
2296
|
+
else:
|
2297
|
+
self.where = self._app.ElementFilter(
|
2298
|
+
rules=[
|
2299
|
+
rule if self.__is_Rule(rule) else self._app.Rule(**rule)
|
2300
|
+
for rule in (where if isinstance(where, Sequence) else [where])
|
2301
|
+
]
|
2302
|
+
)
|
2176
2303
|
|
2177
2304
|
#: Type of the input source.
|
2178
|
-
self.source_type =
|
2305
|
+
self.source_type = get_enum_by_name_or_val(InputSourceType, source_type)
|
2179
2306
|
#: Where the input comes from when the type is `IMPORT`.
|
2180
2307
|
self.import_ref = import_ref
|
2181
2308
|
#: Which task is this an input for? Used when the type is `TASK`.
|
2182
2309
|
self.task_ref = task_ref
|
2183
2310
|
#: Type of task source.
|
2184
|
-
self.task_source_type =
|
2311
|
+
self.task_source_type = get_enum_by_name_or_val(TaskSourceType, task_source_type)
|
2185
2312
|
#: Which element iterations does this apply to?
|
2186
2313
|
self.element_iters = element_iters
|
2187
|
-
#: Filtering rules.
|
2188
|
-
self.where = where
|
2189
2314
|
#: Path to where this input goes.
|
2190
2315
|
self.path = path
|
2191
2316
|
|
2192
2317
|
if self.source_type is InputSourceType.TASK:
|
2193
2318
|
if self.task_ref is None:
|
2194
|
-
raise ValueError(
|
2319
|
+
raise ValueError("Must specify `task_ref` if `source_type` is TASK.")
|
2195
2320
|
if self.task_source_type is None:
|
2196
2321
|
self.task_source_type = TaskSourceType.OUTPUT
|
2197
2322
|
|
2198
2323
|
if self.source_type is InputSourceType.IMPORT and self.import_ref is None:
|
2199
|
-
raise ValueError(
|
2324
|
+
raise ValueError("Must specify `import_ref` if `source_type` is IMPORT.")
|
2200
2325
|
|
2201
|
-
def __eq__(self, other):
|
2326
|
+
def __eq__(self, other: Any):
|
2202
2327
|
if not isinstance(other, self.__class__):
|
2203
2328
|
return False
|
2204
|
-
|
2329
|
+
return (
|
2205
2330
|
self.source_type == other.source_type
|
2206
2331
|
and self.import_ref == other.import_ref
|
2207
2332
|
and self.task_ref == other.task_ref
|
@@ -2209,23 +2334,22 @@ class InputSource(JSONLike):
|
|
2209
2334
|
and self.element_iters == other.element_iters
|
2210
2335
|
and self.where == other.where
|
2211
2336
|
and self.path == other.path
|
2212
|
-
)
|
2213
|
-
return True
|
2214
|
-
else:
|
2215
|
-
return False
|
2337
|
+
)
|
2216
2338
|
|
2217
2339
|
def __repr__(self) -> str:
|
2340
|
+
assert self.source_type
|
2218
2341
|
cls_method_name = self.source_type.name.lower()
|
2219
2342
|
|
2220
|
-
args_lst = []
|
2343
|
+
args_lst: list[str] = []
|
2221
2344
|
|
2222
2345
|
if self.source_type is InputSourceType.IMPORT:
|
2223
2346
|
cls_method_name += "_"
|
2224
2347
|
args_lst.append(f"import_ref={self.import_ref}")
|
2225
2348
|
|
2226
2349
|
elif self.source_type is InputSourceType.TASK:
|
2227
|
-
|
2228
|
-
|
2350
|
+
assert self.task_source_type
|
2351
|
+
args_lst.append(f"task_ref={self.task_ref}")
|
2352
|
+
args_lst.append(
|
2229
2353
|
f"task_source_type={self.task_source_type.name.lower()!r}",
|
2230
2354
|
)
|
2231
2355
|
|
@@ -2240,15 +2364,16 @@ class InputSource(JSONLike):
|
|
2240
2364
|
|
2241
2365
|
return out
|
2242
2366
|
|
2243
|
-
def get_task(self, workflow):
|
2367
|
+
def get_task(self, workflow: Workflow) -> WorkflowTask | None:
|
2244
2368
|
"""If source_type is task, then return the referenced task from the given
|
2245
2369
|
workflow."""
|
2246
2370
|
if self.source_type is InputSourceType.TASK:
|
2247
|
-
|
2248
|
-
if task.insert_ID == self.task_ref
|
2249
|
-
|
2371
|
+
return next(
|
2372
|
+
(task for task in workflow.tasks if task.insert_ID == self.task_ref), None
|
2373
|
+
)
|
2374
|
+
return None
|
2250
2375
|
|
2251
|
-
def is_in(self, other_input_sources:
|
2376
|
+
def is_in(self, other_input_sources: Sequence[InputSource]) -> int | None:
|
2252
2377
|
"""Check if this input source is in a list of other input sources, without
|
2253
2378
|
considering the `element_iters` and `where` attributes."""
|
2254
2379
|
|
@@ -2263,51 +2388,38 @@ class InputSource(JSONLike):
|
|
2263
2388
|
return idx
|
2264
2389
|
return None
|
2265
2390
|
|
2266
|
-
def to_string(self):
|
2391
|
+
def to_string(self) -> str:
|
2267
2392
|
"""
|
2268
2393
|
Render this input source as a string.
|
2269
2394
|
"""
|
2270
2395
|
out = [self.source_type.name.lower()]
|
2271
2396
|
if self.source_type is InputSourceType.TASK:
|
2272
|
-
|
2397
|
+
assert self.task_source_type
|
2398
|
+
out.append(str(self.task_ref))
|
2399
|
+
out.append(self.task_source_type.name.lower())
|
2273
2400
|
if self.element_iters is not None:
|
2274
|
-
out
|
2401
|
+
out.append(f'[{",".join(map(str, self.element_iters))}]')
|
2275
2402
|
elif self.source_type is InputSourceType.IMPORT:
|
2276
|
-
out
|
2403
|
+
out.append(str(self.import_ref))
|
2277
2404
|
return ".".join(out)
|
2278
2405
|
|
2279
|
-
@staticmethod
|
2280
|
-
def _validate_source_type(src_type):
|
2281
|
-
if src_type is None:
|
2282
|
-
return None
|
2283
|
-
if isinstance(src_type, InputSourceType):
|
2284
|
-
return src_type
|
2285
|
-
try:
|
2286
|
-
src_type = getattr(InputSourceType, src_type.upper())
|
2287
|
-
except AttributeError:
|
2288
|
-
raise ValueError(
|
2289
|
-
f"InputSource `source_type` specified as {src_type!r}, but "
|
2290
|
-
f"must be one of: {[i.name for i in InputSourceType]!r}."
|
2291
|
-
)
|
2292
|
-
return src_type
|
2293
|
-
|
2294
2406
|
@classmethod
|
2295
|
-
def _validate_task_source_type(cls, task_src_type):
|
2407
|
+
def _validate_task_source_type(cls, task_src_type) -> None | TaskSourceType:
|
2296
2408
|
if task_src_type is None:
|
2297
2409
|
return None
|
2298
2410
|
if isinstance(task_src_type, TaskSourceType):
|
2299
2411
|
return task_src_type
|
2300
2412
|
try:
|
2301
|
-
task_source_type = getattr(cls.
|
2413
|
+
task_source_type = getattr(cls._app.TaskSourceType, task_src_type.upper())
|
2302
2414
|
except AttributeError:
|
2303
2415
|
raise ValueError(
|
2304
2416
|
f"InputSource `task_source_type` specified as {task_src_type!r}, but "
|
2305
|
-
f"must be one of: {
|
2417
|
+
f"must be one of: {TaskSourceType.names!r}."
|
2306
2418
|
)
|
2307
2419
|
return task_source_type
|
2308
2420
|
|
2309
2421
|
@classmethod
|
2310
|
-
def from_string(cls, str_defn):
|
2422
|
+
def from_string(cls, str_defn: str) -> Self:
|
2311
2423
|
"""Parse a dot-delimited string definition of an InputSource.
|
2312
2424
|
|
2313
2425
|
Parameter
|
@@ -2322,44 +2434,49 @@ class InputSource(JSONLike):
|
|
2322
2434
|
local
|
2323
2435
|
default
|
2324
2436
|
import.[import_ref]
|
2325
|
-
|
2326
2437
|
"""
|
2327
2438
|
return cls(**cls._parse_from_string(str_defn))
|
2328
2439
|
|
2329
|
-
@
|
2330
|
-
def _parse_from_string(
|
2440
|
+
@staticmethod
|
2441
|
+
def _parse_from_string(str_defn: str) -> dict[str, Any]:
|
2442
|
+
"""Parse a dot-delimited string definition of an InputSource.
|
2443
|
+
|
2444
|
+
Examples
|
2445
|
+
--------
|
2446
|
+
task.[task_ref].input
|
2447
|
+
task.[task_ref].output
|
2448
|
+
local
|
2449
|
+
default
|
2450
|
+
import.[import_ref]
|
2451
|
+
"""
|
2331
2452
|
parts = str_defn.split(".")
|
2332
|
-
source_type =
|
2333
|
-
task_ref = None
|
2334
|
-
task_source_type = None
|
2335
|
-
import_ref = None
|
2453
|
+
source_type = get_enum_by_name_or_val(InputSourceType, parts[0])
|
2454
|
+
task_ref: int | None = None
|
2455
|
+
task_source_type: TaskSourceType | None = None
|
2456
|
+
import_ref: int | None = None
|
2336
2457
|
if (
|
2337
2458
|
(
|
2338
|
-
source_type
|
2339
|
-
in (cls.app.InputSourceType.LOCAL, cls.app.InputSourceType.DEFAULT)
|
2459
|
+
source_type in (InputSourceType.LOCAL, InputSourceType.DEFAULT)
|
2340
2460
|
and len(parts) > 1
|
2341
2461
|
)
|
2342
|
-
or (source_type is
|
2343
|
-
or (source_type is
|
2462
|
+
or (source_type is InputSourceType.TASK and len(parts) > 3)
|
2463
|
+
or (source_type is InputSourceType.IMPORT and len(parts) > 2)
|
2344
2464
|
):
|
2345
2465
|
raise ValueError(f"InputSource string not understood: {str_defn!r}.")
|
2346
2466
|
|
2347
|
-
if source_type is
|
2467
|
+
if source_type is InputSourceType.TASK:
|
2348
2468
|
# TODO: does this include element_iters?
|
2349
|
-
task_ref = parts[1]
|
2350
2469
|
try:
|
2351
|
-
task_ref = int(
|
2470
|
+
task_ref = int(parts[1])
|
2352
2471
|
except ValueError:
|
2353
2472
|
pass
|
2354
2473
|
try:
|
2355
|
-
|
2474
|
+
task_source_type = get_enum_by_name_or_val(TaskSourceType, parts[2])
|
2356
2475
|
except IndexError:
|
2357
|
-
|
2358
|
-
|
2359
|
-
elif source_type is cls.app.InputSourceType.IMPORT:
|
2360
|
-
import_ref = parts[1]
|
2476
|
+
task_source_type = TaskSourceType.OUTPUT
|
2477
|
+
elif source_type is InputSourceType.IMPORT:
|
2361
2478
|
try:
|
2362
|
-
import_ref = int(
|
2479
|
+
import_ref = int(parts[1])
|
2363
2480
|
except ValueError:
|
2364
2481
|
pass
|
2365
2482
|
|
@@ -2377,9 +2494,14 @@ class InputSource(JSONLike):
|
|
2377
2494
|
return super().from_json_like(json_like, shared_data)
|
2378
2495
|
|
2379
2496
|
@classmethod
|
2380
|
-
def import_(
|
2497
|
+
def import_(
|
2498
|
+
cls,
|
2499
|
+
import_ref: int,
|
2500
|
+
element_iters: list[int] | None = None,
|
2501
|
+
where: Where | None = None,
|
2502
|
+
) -> Self:
|
2381
2503
|
"""
|
2382
|
-
Make an
|
2504
|
+
Make an instance of an input source that is an import.
|
2383
2505
|
|
2384
2506
|
Parameters
|
2385
2507
|
----------
|
@@ -2391,30 +2513,36 @@ class InputSource(JSONLike):
|
|
2391
2513
|
Filtering rule.
|
2392
2514
|
"""
|
2393
2515
|
return cls(
|
2394
|
-
source_type=
|
2516
|
+
source_type=InputSourceType.IMPORT,
|
2395
2517
|
import_ref=import_ref,
|
2396
2518
|
element_iters=element_iters,
|
2397
2519
|
where=where,
|
2398
2520
|
)
|
2399
2521
|
|
2400
2522
|
@classmethod
|
2401
|
-
def local(cls):
|
2523
|
+
def local(cls) -> Self:
|
2402
2524
|
"""
|
2403
|
-
Make an
|
2525
|
+
Make an instance of an input source that is local.
|
2404
2526
|
"""
|
2405
|
-
return cls(source_type=
|
2527
|
+
return cls(source_type=InputSourceType.LOCAL)
|
2406
2528
|
|
2407
2529
|
@classmethod
|
2408
|
-
def default(cls):
|
2530
|
+
def default(cls) -> Self:
|
2409
2531
|
"""
|
2410
|
-
Make an
|
2532
|
+
Make an instance of an input source that is default.
|
2411
2533
|
"""
|
2412
|
-
return cls(source_type=
|
2534
|
+
return cls(source_type=InputSourceType.DEFAULT)
|
2413
2535
|
|
2414
2536
|
@classmethod
|
2415
|
-
def task(
|
2537
|
+
def task(
|
2538
|
+
cls,
|
2539
|
+
task_ref: int,
|
2540
|
+
task_source_type: TaskSourceType | str | None = None,
|
2541
|
+
element_iters: list[int] | None = None,
|
2542
|
+
where: Where | None = None,
|
2543
|
+
) -> Self:
|
2416
2544
|
"""
|
2417
|
-
Make an
|
2545
|
+
Make an instance of an input source that is a task.
|
2418
2546
|
|
2419
2547
|
Parameters
|
2420
2548
|
----------
|
@@ -2427,12 +2555,12 @@ class InputSource(JSONLike):
|
|
2427
2555
|
where:
|
2428
2556
|
Filtering rule.
|
2429
2557
|
"""
|
2430
|
-
if not task_source_type:
|
2431
|
-
task_source_type = cls.app.TaskSourceType.OUTPUT
|
2432
2558
|
return cls(
|
2433
|
-
source_type=
|
2559
|
+
source_type=InputSourceType.TASK,
|
2434
2560
|
task_ref=task_ref,
|
2435
|
-
task_source_type=
|
2561
|
+
task_source_type=get_enum_by_name_or_val(
|
2562
|
+
TaskSourceType, task_source_type or TaskSourceType.OUTPUT
|
2563
|
+
),
|
2436
2564
|
where=where,
|
2437
2565
|
element_iters=element_iters,
|
2438
2566
|
)
|