hpcflow 0.1.9__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -462
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.9.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -458
- hpcflow/archive/archive.py +0 -308
- hpcflow/archive/cloud/cloud.py +0 -47
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -432
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -232
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2549
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -323
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -167
- hpcflow/variables.py +0 -544
- hpcflow-0.1.9.dist-info/METADATA +0 -168
- hpcflow-0.1.9.dist-info/RECORD +0 -45
- hpcflow-0.1.9.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.9.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,3371 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Parameters represent information passed around within a workflow.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from collections.abc import Sequence
|
|
7
|
+
import copy
|
|
8
|
+
from dataclasses import dataclass, field
|
|
9
|
+
from datetime import timedelta
|
|
10
|
+
import enum
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
from typing import TypeVar, cast, TYPE_CHECKING
|
|
13
|
+
from typing_extensions import override, TypeIs
|
|
14
|
+
import warnings
|
|
15
|
+
|
|
16
|
+
import numpy as np
|
|
17
|
+
from valida import Schema as ValidaSchema # type: ignore
|
|
18
|
+
|
|
19
|
+
from hpcflow.sdk.typing import hydrate
|
|
20
|
+
from hpcflow.sdk.core.enums import (
|
|
21
|
+
InputSourceType,
|
|
22
|
+
ParallelMode,
|
|
23
|
+
ParameterPropagationMode,
|
|
24
|
+
TaskSourceType,
|
|
25
|
+
)
|
|
26
|
+
from hpcflow.sdk.core.errors import (
|
|
27
|
+
MalformedParameterPathError,
|
|
28
|
+
UnknownResourceSpecItemError,
|
|
29
|
+
WorkflowParameterMissingError,
|
|
30
|
+
)
|
|
31
|
+
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
|
32
|
+
from hpcflow.sdk.core.utils import (
|
|
33
|
+
check_valid_py_identifier,
|
|
34
|
+
get_enum_by_name_or_val,
|
|
35
|
+
split_param_label,
|
|
36
|
+
timedelta_format,
|
|
37
|
+
)
|
|
38
|
+
from hpcflow.sdk.core.values import ValuesMixin, process_demo_data_strings
|
|
39
|
+
|
|
40
|
+
if TYPE_CHECKING:
|
|
41
|
+
from collections.abc import Iterable, Iterator, Mapping
|
|
42
|
+
from typing import Any, ClassVar, Literal
|
|
43
|
+
from typing_extensions import Self, TypeAlias
|
|
44
|
+
from h5py import Group as HDF5Group # type: ignore
|
|
45
|
+
from numpy.typing import NDArray
|
|
46
|
+
from ..typing import ParamSource
|
|
47
|
+
from .actions import ActionScope
|
|
48
|
+
from .element import ElementFilter
|
|
49
|
+
from .object_list import ResourceList
|
|
50
|
+
from .rule import Rule
|
|
51
|
+
from .task import ElementSet, TaskSchema, TaskTemplate, WorkflowTask
|
|
52
|
+
from .types import (
|
|
53
|
+
Address,
|
|
54
|
+
Numeric,
|
|
55
|
+
LabelInfo,
|
|
56
|
+
LabellingDescriptor,
|
|
57
|
+
ResourcePersistingWorkflow,
|
|
58
|
+
RuleArgs,
|
|
59
|
+
SchemaInputKwargs,
|
|
60
|
+
)
|
|
61
|
+
from .workflow import Workflow, WorkflowTemplate
|
|
62
|
+
from .validation import Schema
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
T = TypeVar("T")
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
@dataclass
|
|
69
|
+
@hydrate
|
|
70
|
+
class ParameterValue:
|
|
71
|
+
"""
|
|
72
|
+
The value handler for a parameter.
|
|
73
|
+
|
|
74
|
+
Intended to be subclassed.
|
|
75
|
+
"""
|
|
76
|
+
|
|
77
|
+
_typ: ClassVar[str | None] = None
|
|
78
|
+
_sub_parameters: ClassVar[dict[str, str]] = {}
|
|
79
|
+
|
|
80
|
+
def to_dict(self) -> dict[str, Any]:
|
|
81
|
+
"""
|
|
82
|
+
Serialise this parameter value as a dictionary.
|
|
83
|
+
"""
|
|
84
|
+
if hasattr(self, "__dict__"):
|
|
85
|
+
return self._postprocess_to_dict(dict(self.__dict__))
|
|
86
|
+
elif hasattr(self, "__slots__"):
|
|
87
|
+
return self._postprocess_to_dict(
|
|
88
|
+
{k: getattr(self, k) for k in self.__slots__}
|
|
89
|
+
)
|
|
90
|
+
else:
|
|
91
|
+
raise NotImplementedError
|
|
92
|
+
|
|
93
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
94
|
+
"""Postprocess the results of :meth:`to_dict`."""
|
|
95
|
+
return d
|
|
96
|
+
|
|
97
|
+
def prepare_JSON_dump(self) -> dict[str, Any]:
|
|
98
|
+
"""
|
|
99
|
+
Prepare this parameter value for serialisation as JSON.
|
|
100
|
+
"""
|
|
101
|
+
raise NotImplementedError
|
|
102
|
+
|
|
103
|
+
def dump_to_HDF5_group(self, group: HDF5Group):
|
|
104
|
+
"""
|
|
105
|
+
Write this parameter value to an HDF5 group.
|
|
106
|
+
"""
|
|
107
|
+
raise NotImplementedError
|
|
108
|
+
|
|
109
|
+
@classmethod
|
|
110
|
+
def dump_element_group_to_HDF5_group(cls, objs: list[Self], group: HDF5Group):
|
|
111
|
+
"""
|
|
112
|
+
Write a list (from an element group) of parameter values to an HDF5 group.
|
|
113
|
+
"""
|
|
114
|
+
raise NotImplementedError
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def save_from_HDF5_group(cls, group: HDF5Group, param_id: int, workflow: Workflow):
|
|
118
|
+
"""
|
|
119
|
+
Extract a parameter value from an HDF5 group.
|
|
120
|
+
"""
|
|
121
|
+
raise NotImplementedError
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def save_from_JSON(cls, data, param_id: int | list[int], workflow: Workflow):
|
|
125
|
+
"""
|
|
126
|
+
Extract a parameter value from JSON data.
|
|
127
|
+
"""
|
|
128
|
+
raise NotImplementedError
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
@dataclass
|
|
132
|
+
class ParameterPath(JSONLike):
|
|
133
|
+
"""
|
|
134
|
+
Path to a parameter.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
# TODO: unused?
|
|
138
|
+
#: The path to the parameter.
|
|
139
|
+
path: Sequence[str | int | float]
|
|
140
|
+
#: The task in which to look up the parameter.
|
|
141
|
+
task: TaskTemplate | TaskSchema | None = None # default is "current" task
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
@dataclass
|
|
145
|
+
@hydrate
|
|
146
|
+
class Parameter(JSONLike):
|
|
147
|
+
"""
|
|
148
|
+
A general parameter to a workflow task.
|
|
149
|
+
|
|
150
|
+
Parameters
|
|
151
|
+
----------
|
|
152
|
+
typ:
|
|
153
|
+
Type code.
|
|
154
|
+
Used to look up the :py:class:`ParameterValue` for this parameter,
|
|
155
|
+
if any.
|
|
156
|
+
is_file:
|
|
157
|
+
Whether this parameter represents a file.
|
|
158
|
+
sub_parameters: list[SubParameter]
|
|
159
|
+
Any parameters packed within this one.
|
|
160
|
+
_value_class: type[ParameterValue]
|
|
161
|
+
Class that provides the implementation of this parameter's values.
|
|
162
|
+
Not normally directly user-managed.
|
|
163
|
+
_hash_value:
|
|
164
|
+
Hash of this class. Not normally user-managed.
|
|
165
|
+
_validation:
|
|
166
|
+
Validation schema.
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
_validation_schema: ClassVar[str] = "parameters_spec_schema.yaml"
|
|
170
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
171
|
+
ChildObjectSpec(
|
|
172
|
+
name="typ",
|
|
173
|
+
json_like_name="type",
|
|
174
|
+
),
|
|
175
|
+
ChildObjectSpec(
|
|
176
|
+
name="_validation",
|
|
177
|
+
class_obj=ValidaSchema,
|
|
178
|
+
),
|
|
179
|
+
)
|
|
180
|
+
|
|
181
|
+
#: Type code. Used to look up the :py:class:`ParameterValue` for this parameter,
|
|
182
|
+
#: if any.
|
|
183
|
+
typ: str
|
|
184
|
+
#: Whether this parameter represents a file.
|
|
185
|
+
is_file: bool = False
|
|
186
|
+
#: Any parameters packed within this one.
|
|
187
|
+
sub_parameters: list[SubParameter] = field(default_factory=list)
|
|
188
|
+
_value_class: type[ParameterValue] | None = None
|
|
189
|
+
_hash_value: str | None = field(default=None, repr=False)
|
|
190
|
+
_validation: Schema | None = None
|
|
191
|
+
|
|
192
|
+
def __repr__(self) -> str:
|
|
193
|
+
is_file_str = ""
|
|
194
|
+
if self.is_file:
|
|
195
|
+
is_file_str = f", is_file={self.is_file!r}"
|
|
196
|
+
|
|
197
|
+
sub_parameters_str = ""
|
|
198
|
+
if self.sub_parameters:
|
|
199
|
+
sub_parameters_str = f", sub_parameters={self.sub_parameters!r}"
|
|
200
|
+
|
|
201
|
+
_value_class_str = ""
|
|
202
|
+
if self._value_class is not None:
|
|
203
|
+
_value_class_str = f", _value_class={self._value_class!r}"
|
|
204
|
+
|
|
205
|
+
return (
|
|
206
|
+
f"{self.__class__.__name__}("
|
|
207
|
+
f"typ={self.typ!r}{is_file_str}{sub_parameters_str}{_value_class_str}"
|
|
208
|
+
f")"
|
|
209
|
+
)
|
|
210
|
+
|
|
211
|
+
def __post_init__(self) -> None:
|
|
212
|
+
self.typ = check_valid_py_identifier(self.typ)
|
|
213
|
+
self._set_value_class()
|
|
214
|
+
|
|
215
|
+
def _set_value_class(self) -> None:
|
|
216
|
+
# custom parameter classes must inherit from `ParameterValue` not the app
|
|
217
|
+
# subclass:
|
|
218
|
+
if self._value_class is None:
|
|
219
|
+
self._value_class = next(
|
|
220
|
+
(
|
|
221
|
+
pv_class
|
|
222
|
+
for pv_class in ParameterValue.__subclasses__()
|
|
223
|
+
if pv_class._typ == self.typ
|
|
224
|
+
),
|
|
225
|
+
None,
|
|
226
|
+
)
|
|
227
|
+
|
|
228
|
+
def __eq__(self, other: Any) -> bool:
|
|
229
|
+
return isinstance(other, self.__class__) and self.typ == other.typ
|
|
230
|
+
|
|
231
|
+
def __lt__(self, other: Parameter):
|
|
232
|
+
return self.typ < other.typ
|
|
233
|
+
|
|
234
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
|
235
|
+
kwargs = self.to_dict()
|
|
236
|
+
_validation = kwargs.pop("_validation")
|
|
237
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
|
238
|
+
obj._validation = _validation
|
|
239
|
+
return obj
|
|
240
|
+
|
|
241
|
+
@override
|
|
242
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
243
|
+
dct = super()._postprocess_to_dict(d)
|
|
244
|
+
del dct["_value_class"]
|
|
245
|
+
if dct.get("name", None) is None:
|
|
246
|
+
dct.pop("name", None)
|
|
247
|
+
dct.pop("_task_schema", None) # TODO: how do we have a _task_schema ref?
|
|
248
|
+
return dct
|
|
249
|
+
|
|
250
|
+
@property
|
|
251
|
+
def url_slug(self) -> str:
|
|
252
|
+
"""
|
|
253
|
+
Representation of this parameter as part of a URL.
|
|
254
|
+
"""
|
|
255
|
+
return self.typ.lower().replace("_", "-")
|
|
256
|
+
|
|
257
|
+
def _instantiate_value(self, source: ParamSource, val: dict) -> Any:
|
|
258
|
+
"""
|
|
259
|
+
Convert the serialized form of this parameter to its "real" form,
|
|
260
|
+
if that is valid to do at all.
|
|
261
|
+
"""
|
|
262
|
+
if self._value_class is None:
|
|
263
|
+
return val
|
|
264
|
+
if (method_name := source.get("value_class_method")) is not None:
|
|
265
|
+
method = getattr(self._value_class, method_name)
|
|
266
|
+
else:
|
|
267
|
+
method = self._value_class
|
|
268
|
+
return method(**val)
|
|
269
|
+
|
|
270
|
+
def _force_value_class(self) -> type[ParameterValue] | None:
|
|
271
|
+
if (param_cls := self._value_class) is None:
|
|
272
|
+
self._set_value_class()
|
|
273
|
+
param_cls = self._value_class
|
|
274
|
+
return param_cls
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
@dataclass
|
|
278
|
+
class SubParameter:
|
|
279
|
+
"""
|
|
280
|
+
A parameter that is a component of another parameter.
|
|
281
|
+
"""
|
|
282
|
+
|
|
283
|
+
#: How to find this within the containing parameter.
|
|
284
|
+
address: Address
|
|
285
|
+
#: The containing main parameter.
|
|
286
|
+
parameter: Parameter
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
@dataclass
|
|
290
|
+
@hydrate
|
|
291
|
+
class SchemaParameter(JSONLike):
|
|
292
|
+
"""
|
|
293
|
+
A parameter bound in a schema.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
parameter: Parameter
|
|
298
|
+
The parameter.
|
|
299
|
+
"""
|
|
300
|
+
|
|
301
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
302
|
+
ChildObjectSpec(
|
|
303
|
+
name="parameter",
|
|
304
|
+
class_name="Parameter",
|
|
305
|
+
shared_data_name="parameters",
|
|
306
|
+
shared_data_primary_key="typ",
|
|
307
|
+
),
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
def __post_init__(self) -> None:
|
|
311
|
+
self._validate()
|
|
312
|
+
|
|
313
|
+
def _validate(self) -> None:
|
|
314
|
+
if isinstance(self.parameter, str):
|
|
315
|
+
self.parameter: Parameter = self._app.Parameter(typ=self.parameter)
|
|
316
|
+
|
|
317
|
+
@property
|
|
318
|
+
def typ(self) -> str:
|
|
319
|
+
"""
|
|
320
|
+
The type code of the parameter.
|
|
321
|
+
"""
|
|
322
|
+
return self.parameter.typ
|
|
323
|
+
|
|
324
|
+
|
|
325
|
+
class NullDefault(enum.Enum):
|
|
326
|
+
"""
|
|
327
|
+
Sentinel value used to distinguish an explicit null.
|
|
328
|
+
"""
|
|
329
|
+
|
|
330
|
+
#: Special sentinel.
|
|
331
|
+
#: Used in situations where otherwise a JSON object or array would be.
|
|
332
|
+
NULL = 0
|
|
333
|
+
|
|
334
|
+
|
|
335
|
+
@hydrate
|
|
336
|
+
class SchemaInput(SchemaParameter):
|
|
337
|
+
"""A Parameter as used within a particular schema, for which a default value may be
|
|
338
|
+
applied.
|
|
339
|
+
|
|
340
|
+
Parameters
|
|
341
|
+
----------
|
|
342
|
+
parameter:
|
|
343
|
+
The parameter (i.e. type) of this schema input.
|
|
344
|
+
multiple:
|
|
345
|
+
If True, expect one or more of these parameters defined in the workflow,
|
|
346
|
+
distinguished by a string label in square brackets. For example `p1[0]` for a
|
|
347
|
+
parameter `p1`.
|
|
348
|
+
labels:
|
|
349
|
+
Dict whose keys represent the string labels that distinguish multiple parameters
|
|
350
|
+
if `multiple` is `True`. Use the key "*" to mean all labels not matching
|
|
351
|
+
other label keys. If `multiple` is `False`, this will default to a
|
|
352
|
+
single-item dict with an empty string key: `{{"": {{}}}}`. If `multiple` is
|
|
353
|
+
`True`, this will default to a single-item dict with the catch-all key:
|
|
354
|
+
`{{"*": {{}}}}`. On initialisation, remaining keyword-arguments are treated as default
|
|
355
|
+
values for the dict values of `labels`.
|
|
356
|
+
default_value:
|
|
357
|
+
The default value for this input parameter. This is itself a default value that
|
|
358
|
+
will be applied to all `labels` values if a "default_value" key does not exist.
|
|
359
|
+
propagation_mode:
|
|
360
|
+
Determines how this input should propagate through the workflow. This is a default
|
|
361
|
+
value that will be applied to all `labels` values if a "propagation_mode" key does
|
|
362
|
+
not exist. By default, the input is allowed to be used in downstream tasks simply
|
|
363
|
+
because it has a compatible type (this is the "implicit" propagation mode). Other
|
|
364
|
+
options are "explicit", meaning that the parameter must be explicitly specified in
|
|
365
|
+
the downstream task `input_sources` for it to be used, and "never", meaning that
|
|
366
|
+
the parameter must not be used in downstream tasks and will be inaccessible to
|
|
367
|
+
those tasks.
|
|
368
|
+
group:
|
|
369
|
+
Determines the name of the element group from which this input should be sourced.
|
|
370
|
+
This is a default value that will be applied to all `labels` if a "group" key
|
|
371
|
+
does not exist.
|
|
372
|
+
allow_failed_dependencies
|
|
373
|
+
This controls whether failure to retrieve inputs (i.e. an
|
|
374
|
+
`UnsetParameterDataError` is raised for one of the input sources) should be
|
|
375
|
+
allowed. By default, the unset value, which is equivalent to `False`, means no
|
|
376
|
+
failures are allowed. If set to `True`, any number of failures are allowed. If an
|
|
377
|
+
integer is specified, that number of failures are permitted. Finally, if a float
|
|
378
|
+
is specified, that proportion of failures are allowed.
|
|
379
|
+
"""
|
|
380
|
+
|
|
381
|
+
_task_schema: TaskSchema | None = None # assigned by parent TaskSchema
|
|
382
|
+
|
|
383
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
384
|
+
ChildObjectSpec(
|
|
385
|
+
name="parameter",
|
|
386
|
+
class_name="Parameter",
|
|
387
|
+
shared_data_name="parameters",
|
|
388
|
+
shared_data_primary_key="typ",
|
|
389
|
+
),
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
def __init__(
|
|
393
|
+
self,
|
|
394
|
+
parameter: Parameter | str,
|
|
395
|
+
multiple: bool = False,
|
|
396
|
+
labels: dict[str, LabelInfo] | None = None,
|
|
397
|
+
default_value: InputValue | Any | NullDefault = NullDefault.NULL,
|
|
398
|
+
propagation_mode: ParameterPropagationMode = ParameterPropagationMode.IMPLICIT,
|
|
399
|
+
group: str | None = None,
|
|
400
|
+
allow_failed_dependencies: int | float | bool | None = False,
|
|
401
|
+
):
|
|
402
|
+
# TODO: can we define elements groups on local inputs as well, or should these be
|
|
403
|
+
# just for elements from other tasks?
|
|
404
|
+
|
|
405
|
+
# TODO: test we allow unlabelled with accepts-multiple True.
|
|
406
|
+
# TODO: test we allow a single labelled with accepts-multiple False.
|
|
407
|
+
|
|
408
|
+
if isinstance(parameter, str):
|
|
409
|
+
try:
|
|
410
|
+
#: The parameter (i.e. type) of this schema input.
|
|
411
|
+
self.parameter = self._app.parameters.get(parameter)
|
|
412
|
+
except ValueError:
|
|
413
|
+
self.parameter = self._app.Parameter(parameter)
|
|
414
|
+
else:
|
|
415
|
+
self.parameter = parameter
|
|
416
|
+
|
|
417
|
+
if allow_failed_dependencies is None:
|
|
418
|
+
allow_failed_dependencies = 0.0
|
|
419
|
+
elif isinstance(allow_failed_dependencies, bool):
|
|
420
|
+
allow_failed_dependencies = float(allow_failed_dependencies)
|
|
421
|
+
|
|
422
|
+
#: Whether to expect multiple labels for this parameter.
|
|
423
|
+
self.multiple = multiple
|
|
424
|
+
self.allow_failed_dependencies = allow_failed_dependencies
|
|
425
|
+
|
|
426
|
+
#: Dict whose keys represent the string labels that distinguish multiple
|
|
427
|
+
#: parameters if `multiple` is `True`.
|
|
428
|
+
self.labels: dict[str, LabelInfo]
|
|
429
|
+
if labels is None:
|
|
430
|
+
if self.multiple:
|
|
431
|
+
self.labels = {"*": {}}
|
|
432
|
+
else:
|
|
433
|
+
self.labels = {"": {}}
|
|
434
|
+
else:
|
|
435
|
+
self.labels = labels
|
|
436
|
+
if not self.multiple:
|
|
437
|
+
# check single-item:
|
|
438
|
+
if len(self.labels) > 1:
|
|
439
|
+
raise ValueError(
|
|
440
|
+
f"If `{self.__class__.__name__}.multiple` is `False`, "
|
|
441
|
+
f"then `labels` must be a single-item `dict` if specified, but "
|
|
442
|
+
f"`labels` is: {self.labels!r}."
|
|
443
|
+
)
|
|
444
|
+
|
|
445
|
+
labels_defaults: LabelInfo = {}
|
|
446
|
+
if propagation_mode is not None:
|
|
447
|
+
labels_defaults["propagation_mode"] = propagation_mode
|
|
448
|
+
if group is not None:
|
|
449
|
+
labels_defaults["group"] = group
|
|
450
|
+
|
|
451
|
+
# apply defaults:
|
|
452
|
+
for k, v in self.labels.items():
|
|
453
|
+
labels_defaults_i = copy.deepcopy(labels_defaults)
|
|
454
|
+
if default_value is not NullDefault.NULL:
|
|
455
|
+
if isinstance(default_value, InputValue):
|
|
456
|
+
labels_defaults_i["default_value"] = default_value
|
|
457
|
+
else:
|
|
458
|
+
labels_defaults_i["default_value"] = self._app.InputValue(
|
|
459
|
+
parameter=self.parameter,
|
|
460
|
+
value=default_value,
|
|
461
|
+
label=k,
|
|
462
|
+
)
|
|
463
|
+
label_i: LabelInfo = {**labels_defaults_i, **v}
|
|
464
|
+
if "propagation_mode" in label_i:
|
|
465
|
+
label_i["propagation_mode"] = get_enum_by_name_or_val(
|
|
466
|
+
ParameterPropagationMode, label_i["propagation_mode"]
|
|
467
|
+
)
|
|
468
|
+
if "default_value" in label_i:
|
|
469
|
+
label_i["default_value"]._schema_input = self
|
|
470
|
+
self.labels[k] = label_i
|
|
471
|
+
|
|
472
|
+
self._set_parent_refs()
|
|
473
|
+
self._validate()
|
|
474
|
+
|
|
475
|
+
def __repr__(self) -> str:
|
|
476
|
+
default_str = ""
|
|
477
|
+
group_str = ""
|
|
478
|
+
labels_str = ""
|
|
479
|
+
if not self.multiple and self.labels:
|
|
480
|
+
label = next(iter(self.labels)) # the single key
|
|
481
|
+
|
|
482
|
+
default_str = ""
|
|
483
|
+
if "default_value" in self.labels[label]:
|
|
484
|
+
default_str = (
|
|
485
|
+
f", default_value={self.labels[label]['default_value'].value!r}"
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
if (group := self.labels[label].get("group")) is not None:
|
|
489
|
+
group_str = f", group={group!r}"
|
|
490
|
+
|
|
491
|
+
else:
|
|
492
|
+
labels_str = f", labels={str(self.labels)!r}"
|
|
493
|
+
|
|
494
|
+
return (
|
|
495
|
+
f"{self.__class__.__name__}("
|
|
496
|
+
f"parameter={self.parameter.__class__.__name__}({self.parameter.typ!r}), "
|
|
497
|
+
f"multiple={self.multiple!r}"
|
|
498
|
+
f"{default_str}{group_str}{labels_str}"
|
|
499
|
+
f")"
|
|
500
|
+
)
|
|
501
|
+
|
|
502
|
+
@override
|
|
503
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
504
|
+
dct = super()._postprocess_to_dict(d)
|
|
505
|
+
v: dict[str, ParameterPropagationMode]
|
|
506
|
+
for k, v in dct["labels"].items():
|
|
507
|
+
if (prop_mode := v.get("parameter_propagation_mode")) is not None:
|
|
508
|
+
dct["labels"][k]["parameter_propagation_mode"] = prop_mode.name
|
|
509
|
+
return dct
|
|
510
|
+
|
|
511
|
+
def _postprocess_to_json(self, json_like):
|
|
512
|
+
for v in json_like["labels"].values():
|
|
513
|
+
if "default_value" in v:
|
|
514
|
+
v["default_value_is_input_value"] = True
|
|
515
|
+
return json_like
|
|
516
|
+
|
|
517
|
+
@classmethod
|
|
518
|
+
def from_json_like(cls, json_like, shared_data=None):
|
|
519
|
+
for k, v in json_like.get("labels", {}).items():
|
|
520
|
+
if "default_value" in v:
|
|
521
|
+
if "default_value_is_input_value" in v:
|
|
522
|
+
inp_val_kwargs = v["default_value"]
|
|
523
|
+
else:
|
|
524
|
+
inp_val_kwargs = {
|
|
525
|
+
"parameter": json_like["parameter"],
|
|
526
|
+
"value": v["default_value"],
|
|
527
|
+
"label": k,
|
|
528
|
+
}
|
|
529
|
+
json_like["labels"][k]["default_value"] = (
|
|
530
|
+
cls._app.InputValue.from_json_like(
|
|
531
|
+
json_like=inp_val_kwargs,
|
|
532
|
+
shared_data=shared_data,
|
|
533
|
+
)
|
|
534
|
+
)
|
|
535
|
+
|
|
536
|
+
return super().from_json_like(json_like, shared_data)
|
|
537
|
+
|
|
538
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
|
539
|
+
kwargs: SchemaInputKwargs = {
|
|
540
|
+
"parameter": copy.deepcopy(self.parameter, memo),
|
|
541
|
+
"multiple": self.multiple,
|
|
542
|
+
"labels": copy.deepcopy(self.labels, memo),
|
|
543
|
+
"allow_failed_dependencies": self.allow_failed_dependencies,
|
|
544
|
+
}
|
|
545
|
+
obj = self.__class__(**kwargs)
|
|
546
|
+
obj._task_schema = self._task_schema
|
|
547
|
+
return obj
|
|
548
|
+
|
|
549
|
+
@property
|
|
550
|
+
def default_value(self) -> InputValue | Literal[NullDefault.NULL] | None:
|
|
551
|
+
"""
|
|
552
|
+
The default value of the input.
|
|
553
|
+
"""
|
|
554
|
+
if single_data := self.single_labelled_data:
|
|
555
|
+
if "default_value" in single_data:
|
|
556
|
+
return single_data["default_value"]
|
|
557
|
+
else:
|
|
558
|
+
return NullDefault.NULL
|
|
559
|
+
return None
|
|
560
|
+
|
|
561
|
+
@property
|
|
562
|
+
def task_schema(self) -> TaskSchema:
|
|
563
|
+
"""
|
|
564
|
+
The schema containing this input.
|
|
565
|
+
"""
|
|
566
|
+
assert self._task_schema is not None
|
|
567
|
+
return self._task_schema
|
|
568
|
+
|
|
569
|
+
@property
|
|
570
|
+
def all_labelled_types(self) -> list[str]:
|
|
571
|
+
"""
|
|
572
|
+
The types of the input labels.
|
|
573
|
+
"""
|
|
574
|
+
return [(f"{self.typ}[{i}]" if i else self.typ) for i in self.labels]
|
|
575
|
+
|
|
576
|
+
@property
|
|
577
|
+
def single_label(self) -> str | None:
|
|
578
|
+
"""
|
|
579
|
+
The label of this input, assuming it is not multiple.
|
|
580
|
+
"""
|
|
581
|
+
if not self.multiple:
|
|
582
|
+
return next(iter(self.labels))
|
|
583
|
+
return None
|
|
584
|
+
|
|
585
|
+
@property
|
|
586
|
+
def single_labelled_type(self) -> str | None:
|
|
587
|
+
"""
|
|
588
|
+
The type code of this input, assuming it is not multiple.
|
|
589
|
+
"""
|
|
590
|
+
if not self.multiple:
|
|
591
|
+
return next(iter(self.labelled_info()))["labelled_type"]
|
|
592
|
+
return None
|
|
593
|
+
|
|
594
|
+
@property
|
|
595
|
+
def single_labelled_data(self) -> LabelInfo | None:
|
|
596
|
+
"""
|
|
597
|
+
The value of this input, assuming it is not multiple.
|
|
598
|
+
"""
|
|
599
|
+
if (label := self.single_label) is not None:
|
|
600
|
+
return self.labels[label]
|
|
601
|
+
return None
|
|
602
|
+
|
|
603
|
+
def labelled_info(self) -> Iterator[LabellingDescriptor]:
|
|
604
|
+
"""
|
|
605
|
+
Get descriptors for all the labels associated with this input.
|
|
606
|
+
"""
|
|
607
|
+
for k, v in self.labels.items():
|
|
608
|
+
label = f"{self.parameter.typ}[{k}]" if k else self.parameter.typ
|
|
609
|
+
dct: LabellingDescriptor = {
|
|
610
|
+
"labelled_type": label,
|
|
611
|
+
"propagation_mode": v["propagation_mode"],
|
|
612
|
+
"group": v.get("group"),
|
|
613
|
+
}
|
|
614
|
+
if "default_value" in v:
|
|
615
|
+
dct["default_value"] = v["default_value"]
|
|
616
|
+
yield dct
|
|
617
|
+
|
|
618
|
+
@property
|
|
619
|
+
def _simple_labelled_info(self) -> Iterator[tuple[str, ParameterPropagationMode]]:
|
|
620
|
+
"""
|
|
621
|
+
Cut-down version of :py:meth:`labelled_info` that has lower overheads.
|
|
622
|
+
"""
|
|
623
|
+
for k, v in self.labels.items():
|
|
624
|
+
label = f"{self.parameter.typ}[{k}]" if k else self.parameter.typ
|
|
625
|
+
yield label, v["propagation_mode"]
|
|
626
|
+
|
|
627
|
+
def _validate(self) -> None:
|
|
628
|
+
super()._validate()
|
|
629
|
+
for k, v in self.labels.items():
|
|
630
|
+
if "default_value" in v:
|
|
631
|
+
if not isinstance(v["default_value"], InputValue):
|
|
632
|
+
def_val = self._app.InputValue(
|
|
633
|
+
parameter=self.parameter,
|
|
634
|
+
value=v["default_value"],
|
|
635
|
+
label=k,
|
|
636
|
+
)
|
|
637
|
+
v["default_value"] = def_val
|
|
638
|
+
else:
|
|
639
|
+
def_val = v["default_value"]
|
|
640
|
+
if def_val.parameter != self.parameter or def_val.label != k:
|
|
641
|
+
raise ValueError(
|
|
642
|
+
f"{self.__class__.__name__} `default_value` for label {k!r} must "
|
|
643
|
+
f"be an `InputValue` for parameter: {self.parameter!r} with the "
|
|
644
|
+
f"same label, but specified `InputValue` is: "
|
|
645
|
+
f"{v['default_value']!r}."
|
|
646
|
+
)
|
|
647
|
+
|
|
648
|
+
@property
|
|
649
|
+
def input_or_output(self) -> str:
|
|
650
|
+
"""
|
|
651
|
+
Whether this is an input or output. Always ``input``.
|
|
652
|
+
"""
|
|
653
|
+
return "input"
|
|
654
|
+
|
|
655
|
+
|
|
656
|
+
@dataclass(init=False)
|
|
657
|
+
@hydrate
|
|
658
|
+
class SchemaOutput(SchemaParameter):
|
|
659
|
+
"""A Parameter as outputted from particular task."""
|
|
660
|
+
|
|
661
|
+
#: The basic parameter this supplies.
|
|
662
|
+
parameter: Parameter
|
|
663
|
+
#: How this output propagates.
|
|
664
|
+
propagation_mode: ParameterPropagationMode
|
|
665
|
+
|
|
666
|
+
def __init__(
|
|
667
|
+
self,
|
|
668
|
+
parameter: Parameter | str,
|
|
669
|
+
propagation_mode: ParameterPropagationMode = ParameterPropagationMode.IMPLICIT,
|
|
670
|
+
):
|
|
671
|
+
if isinstance(parameter, str):
|
|
672
|
+
self.parameter: Parameter = self._app.Parameter(typ=parameter)
|
|
673
|
+
else:
|
|
674
|
+
self.parameter = parameter
|
|
675
|
+
self.propagation_mode = propagation_mode
|
|
676
|
+
|
|
677
|
+
@property
|
|
678
|
+
def input_or_output(self) -> str:
|
|
679
|
+
"""
|
|
680
|
+
Whether this is an input or output. Always ``output``.
|
|
681
|
+
"""
|
|
682
|
+
return "output"
|
|
683
|
+
|
|
684
|
+
def __repr__(self) -> str:
|
|
685
|
+
return (
|
|
686
|
+
f"{self.__class__.__name__}("
|
|
687
|
+
f"parameter={self.parameter.__class__.__name__}({self.parameter.typ!r}), "
|
|
688
|
+
f"propagation_mode={self.propagation_mode.name!r}"
|
|
689
|
+
f")"
|
|
690
|
+
)
|
|
691
|
+
|
|
692
|
+
|
|
693
|
+
@dataclass
|
|
694
|
+
class BuiltinSchemaParameter:
|
|
695
|
+
"""
|
|
696
|
+
A parameter of a built-in schema.
|
|
697
|
+
"""
|
|
698
|
+
|
|
699
|
+
# TODO: Is this used anywhere?
|
|
700
|
+
# builtin inputs (resources,parameter_perturbations,method,implementation
|
|
701
|
+
# builtin outputs (time, memory use, node/hostname etc)
|
|
702
|
+
# - builtin parameters do not propagate to other tasks (since all tasks define the same
|
|
703
|
+
# builtin parameters).
|
|
704
|
+
# - however, builtin parameters can be accessed if a downstream task schema specifically
|
|
705
|
+
# asks for them (e.g. for calculating/plotting a convergence test)
|
|
706
|
+
pass
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
class _BaseSequence(JSONLike):
|
|
710
|
+
"""
|
|
711
|
+
A base class for shared methods of `ValueSequence` and `MultiPathSequence`.
|
|
712
|
+
"""
|
|
713
|
+
|
|
714
|
+
def __eq__(self, other: Any) -> bool:
|
|
715
|
+
if not isinstance(other, self.__class__):
|
|
716
|
+
return False
|
|
717
|
+
return self.to_dict() == other.to_dict()
|
|
718
|
+
|
|
719
|
+
@classmethod
|
|
720
|
+
def from_json_like(cls, json_like, shared_data=None):
|
|
721
|
+
if "path" in json_like: # note: singular
|
|
722
|
+
# only applicable to ValueSequence, although not well-defined/useful anyway,
|
|
723
|
+
# I think.
|
|
724
|
+
if "::" in json_like["path"]:
|
|
725
|
+
path, cls_method = json_like["path"].split("::")
|
|
726
|
+
json_like["path"] = path
|
|
727
|
+
json_like["value_class_method"] = cls_method
|
|
728
|
+
|
|
729
|
+
val_key = next((item for item in json_like if "values" in item), "")
|
|
730
|
+
if "::" in val_key:
|
|
731
|
+
# class method (e.g. `from_range`, `from_file` etc):
|
|
732
|
+
_, method = val_key.split("::")
|
|
733
|
+
json_like.update(json_like.pop(val_key))
|
|
734
|
+
json_like = process_demo_data_strings(cls._app, json_like)
|
|
735
|
+
obj = getattr(cls, method)(**json_like)
|
|
736
|
+
else:
|
|
737
|
+
obj = super().from_json_like(json_like, shared_data)
|
|
738
|
+
|
|
739
|
+
return obj
|
|
740
|
+
|
|
741
|
+
|
|
742
|
+
class ValueSequence(_BaseSequence, ValuesMixin):
|
|
743
|
+
"""
|
|
744
|
+
A sequence of values.
|
|
745
|
+
|
|
746
|
+
Parameters
|
|
747
|
+
----------
|
|
748
|
+
path:
|
|
749
|
+
The path to this sequence.
|
|
750
|
+
values:
|
|
751
|
+
The values in this sequence.
|
|
752
|
+
nesting_order: int
|
|
753
|
+
A nesting order for this sequence. Can be used to compose sequences together.
|
|
754
|
+
label: str
|
|
755
|
+
A label for this sequence.
|
|
756
|
+
value_class_method: str
|
|
757
|
+
Name of a method used to generate sequence values. Not normally used directly.
|
|
758
|
+
"""
|
|
759
|
+
|
|
760
|
+
def __init__(
|
|
761
|
+
self,
|
|
762
|
+
path: str,
|
|
763
|
+
values: Sequence[Any] | None,
|
|
764
|
+
nesting_order: int | float | None = None,
|
|
765
|
+
label: str | int | None = None,
|
|
766
|
+
value_class_method: str | None = None,
|
|
767
|
+
):
|
|
768
|
+
path_, label_ = self._validate_parameter_path(path, label)
|
|
769
|
+
#: The path to this sequence.
|
|
770
|
+
self.path = path_
|
|
771
|
+
#: The label of this sequence.
|
|
772
|
+
self.label = label_
|
|
773
|
+
#: The nesting order for this sequence.
|
|
774
|
+
self.nesting_order = None if nesting_order is None else float(nesting_order)
|
|
775
|
+
#: Name of a method used to generate sequence values.
|
|
776
|
+
self.value_class_method = value_class_method
|
|
777
|
+
|
|
778
|
+
if values is not None:
|
|
779
|
+
self._values: list[Any] | None = [
|
|
780
|
+
process_demo_data_strings(self._app, i) for i in values
|
|
781
|
+
]
|
|
782
|
+
else:
|
|
783
|
+
self._values = None
|
|
784
|
+
|
|
785
|
+
self._values_group_idx: list[int] | None = None
|
|
786
|
+
self._values_are_objs: list[bool] | None = (
|
|
787
|
+
None # assigned initially on `make_persistent`
|
|
788
|
+
)
|
|
789
|
+
|
|
790
|
+
self._workflow: Workflow | None = None # assigned in `make_persistent`
|
|
791
|
+
self._element_set: ElementSet | None = None # assigned by parent `ElementSet`
|
|
792
|
+
|
|
793
|
+
# assigned if this is an "inputs" sequence in `WorkflowTask._add_element_set`:
|
|
794
|
+
self._parameter: Parameter | None = None
|
|
795
|
+
|
|
796
|
+
self._path_split: list[str] | None = None # assigned by property `path_split`
|
|
797
|
+
|
|
798
|
+
#: Which class method of this class was used to instantiate this instance, if any:
|
|
799
|
+
self._values_method: str | None = None
|
|
800
|
+
#: Keyword-arguments that were passed to the factory class method of this class
|
|
801
|
+
#: to instantiate this instance, if such a method was used:
|
|
802
|
+
self._values_method_args: dict[str, Any] | None = None
|
|
803
|
+
|
|
804
|
+
def __repr__(self):
|
|
805
|
+
label_str = ""
|
|
806
|
+
if self.label:
|
|
807
|
+
label_str = f"label={self.label!r}, "
|
|
808
|
+
vals_grp_idx = (
|
|
809
|
+
f"values_group_idx={self._values_group_idx}, "
|
|
810
|
+
if self._values_group_idx
|
|
811
|
+
else ""
|
|
812
|
+
)
|
|
813
|
+
return (
|
|
814
|
+
f"{self.__class__.__name__}("
|
|
815
|
+
f"path={self.path!r}, "
|
|
816
|
+
f"{label_str}"
|
|
817
|
+
f"nesting_order={self.nesting_order}, "
|
|
818
|
+
f"{vals_grp_idx}"
|
|
819
|
+
f"values={self.values}"
|
|
820
|
+
f")"
|
|
821
|
+
)
|
|
822
|
+
|
|
823
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
|
824
|
+
kwargs = self.to_dict()
|
|
825
|
+
kwargs["values"] = kwargs.pop("_values")
|
|
826
|
+
|
|
827
|
+
_values_group_idx = kwargs.pop("_values_group_idx")
|
|
828
|
+
_values_are_objs = kwargs.pop("_values_are_objs")
|
|
829
|
+
_values_method = kwargs.pop("_values_method", None)
|
|
830
|
+
_values_method_args = kwargs.pop("_values_method_args", None)
|
|
831
|
+
|
|
832
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
|
833
|
+
|
|
834
|
+
obj._values_group_idx = _values_group_idx
|
|
835
|
+
obj._values_are_objs = _values_are_objs
|
|
836
|
+
obj._values_method = _values_method
|
|
837
|
+
obj._values_method_args = _values_method_args
|
|
838
|
+
|
|
839
|
+
obj._workflow = self._workflow
|
|
840
|
+
obj._element_set = self._element_set
|
|
841
|
+
obj._path_split = self._path_split
|
|
842
|
+
obj._parameter = self._parameter
|
|
843
|
+
|
|
844
|
+
return obj
|
|
845
|
+
|
|
846
|
+
@property
|
|
847
|
+
def parameter(self) -> Parameter | None:
|
|
848
|
+
"""
|
|
849
|
+
The parameter this sequence supplies.
|
|
850
|
+
"""
|
|
851
|
+
return self._parameter
|
|
852
|
+
|
|
853
|
+
@property
|
|
854
|
+
def path_split(self) -> Sequence[str]:
|
|
855
|
+
"""
|
|
856
|
+
The components of this path.
|
|
857
|
+
"""
|
|
858
|
+
if self._path_split is None:
|
|
859
|
+
self._path_split = self.path.split(".")
|
|
860
|
+
return self._path_split
|
|
861
|
+
|
|
862
|
+
@property
|
|
863
|
+
def path_type(self) -> str:
|
|
864
|
+
"""
|
|
865
|
+
The type of path this is.
|
|
866
|
+
"""
|
|
867
|
+
return self.path_split[0]
|
|
868
|
+
|
|
869
|
+
@property
|
|
870
|
+
def input_type(self) -> str | None:
|
|
871
|
+
"""
|
|
872
|
+
The type of input sequence this is, if it is one.
|
|
873
|
+
"""
|
|
874
|
+
if self.path_type == "inputs":
|
|
875
|
+
return self.path_split[1].replace(self._label_fmt, "")
|
|
876
|
+
return None
|
|
877
|
+
|
|
878
|
+
@property
|
|
879
|
+
def input_path(self) -> str | None:
|
|
880
|
+
"""
|
|
881
|
+
The path of the input sequence this is, if it is one.
|
|
882
|
+
"""
|
|
883
|
+
if self.path_type == "inputs":
|
|
884
|
+
return ".".join(self.path_split[2:])
|
|
885
|
+
return None
|
|
886
|
+
|
|
887
|
+
@property
|
|
888
|
+
def resource_scope(self) -> str | None:
|
|
889
|
+
"""
|
|
890
|
+
The scope of the resources this is, if it is one.
|
|
891
|
+
"""
|
|
892
|
+
if self.path_type == "resources":
|
|
893
|
+
return self.path_split[1]
|
|
894
|
+
return None
|
|
895
|
+
|
|
896
|
+
@property
|
|
897
|
+
def is_sub_value(self) -> bool:
|
|
898
|
+
"""True if the values are for a sub part of the parameter."""
|
|
899
|
+
return bool(self.input_path)
|
|
900
|
+
|
|
901
|
+
@property
|
|
902
|
+
def _label_fmt(self) -> str:
|
|
903
|
+
return f"[{self.label}]" if self.label else ""
|
|
904
|
+
|
|
905
|
+
@property
|
|
906
|
+
def labelled_type(self) -> str | None:
|
|
907
|
+
"""
|
|
908
|
+
The labelled type of input sequence this is, if it is one.
|
|
909
|
+
"""
|
|
910
|
+
if self.input_type:
|
|
911
|
+
return f"{self.input_type}{self._label_fmt}"
|
|
912
|
+
return None
|
|
913
|
+
|
|
914
|
+
@classmethod
|
|
915
|
+
def _json_like_constructor(cls, json_like):
|
|
916
|
+
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
|
917
|
+
|
|
918
|
+
_values_group_idx = json_like.pop("_values_group_idx", None)
|
|
919
|
+
_values_are_objs = json_like.pop("_values_are_objs", None)
|
|
920
|
+
_values_method = json_like.pop("_values_method", None)
|
|
921
|
+
_values_method_args = json_like.pop("_values_method_args", None)
|
|
922
|
+
if "_values" in json_like:
|
|
923
|
+
json_like["values"] = json_like.pop("_values")
|
|
924
|
+
|
|
925
|
+
obj = cls(**json_like)
|
|
926
|
+
obj._values_group_idx = _values_group_idx
|
|
927
|
+
obj._values_are_objs = _values_are_objs
|
|
928
|
+
obj._values_method = _values_method
|
|
929
|
+
obj._values_method_args = _values_method_args
|
|
930
|
+
return obj
|
|
931
|
+
|
|
932
|
+
def _validate_parameter_path(
|
|
933
|
+
self, path: str, label: str | int | None
|
|
934
|
+
) -> tuple[str, str | int | None]:
|
|
935
|
+
"""Parse the supplied path and perform basic checks on it.
|
|
936
|
+
|
|
937
|
+
This method also adds the specified `SchemaInput` label to the path and checks for
|
|
938
|
+
consistency if a label is already present.
|
|
939
|
+
|
|
940
|
+
"""
|
|
941
|
+
label_arg = label
|
|
942
|
+
|
|
943
|
+
if not isinstance(path, str):
|
|
944
|
+
raise MalformedParameterPathError(
|
|
945
|
+
f"`path` must be a string, but given path has type {type(path)} with value "
|
|
946
|
+
f"{path!r}."
|
|
947
|
+
)
|
|
948
|
+
path_l = path.lower()
|
|
949
|
+
path_split = path_l.split(".")
|
|
950
|
+
ALLOWED_PATH_START = ("inputs", "resources", "environments", "env_preset")
|
|
951
|
+
if not path_split[0] in ALLOWED_PATH_START:
|
|
952
|
+
raise MalformedParameterPathError(
|
|
953
|
+
f"`path` must start with one of: "
|
|
954
|
+
f'{", ".join(f"{pfx!r}" for pfx in ALLOWED_PATH_START)}, but given path '
|
|
955
|
+
f"is: {path!r}."
|
|
956
|
+
)
|
|
957
|
+
|
|
958
|
+
_, label_from_path = split_param_label(path_l)
|
|
959
|
+
|
|
960
|
+
if path_split[0] == "inputs":
|
|
961
|
+
if label_arg is not None and label_arg != "":
|
|
962
|
+
if label_from_path is None:
|
|
963
|
+
# add label to path without lower casing any parts:
|
|
964
|
+
path_split_orig = path.split(".")
|
|
965
|
+
path_split_orig[1] += f"[{label_arg}]"
|
|
966
|
+
path = ".".join(path_split_orig)
|
|
967
|
+
elif str(label_arg) != label_from_path:
|
|
968
|
+
raise ValueError(
|
|
969
|
+
f"{self.__class__.__name__} `label` argument is specified as "
|
|
970
|
+
f"{label_arg!r}, but a distinct label is implied by the sequence "
|
|
971
|
+
f"path: {path!r}."
|
|
972
|
+
)
|
|
973
|
+
elif label_from_path:
|
|
974
|
+
label = label_from_path
|
|
975
|
+
|
|
976
|
+
elif path_split[0] == "resources":
|
|
977
|
+
if label_from_path or label_arg:
|
|
978
|
+
raise ValueError(
|
|
979
|
+
f"{self.__class__.__name__} `label` argument ({label_arg!r}) and/or "
|
|
980
|
+
f"label specification via `path` ({path!r}) is not supported for "
|
|
981
|
+
f"`resource` sequences."
|
|
982
|
+
)
|
|
983
|
+
try:
|
|
984
|
+
self._app.ActionScope.from_json_like(path_split[1])
|
|
985
|
+
except Exception as err:
|
|
986
|
+
raise MalformedParameterPathError(
|
|
987
|
+
f"Cannot parse a resource action scope from the second component of the "
|
|
988
|
+
f"path: {path!r}. Exception was: {err}."
|
|
989
|
+
) from None
|
|
990
|
+
|
|
991
|
+
if len(path_split) > 2:
|
|
992
|
+
if path_split[2] not in ResourceSpec.ALLOWED_PARAMETERS:
|
|
993
|
+
raise UnknownResourceSpecItemError(
|
|
994
|
+
f"Resource item name {path_split[2]!r} is unknown. Allowed "
|
|
995
|
+
f"resource item names are: {ResourceSpec._allowed_params_quoted()}."
|
|
996
|
+
)
|
|
997
|
+
label = ""
|
|
998
|
+
|
|
999
|
+
elif path_split[0] == "environments":
|
|
1000
|
+
# rewrite as a resources path:
|
|
1001
|
+
path = f"resources.any.{path}"
|
|
1002
|
+
label = str(label) if label is not None else ""
|
|
1003
|
+
else:
|
|
1004
|
+
pass
|
|
1005
|
+
# note: `env_preset` paths also need to be transformed into `resources`
|
|
1006
|
+
# paths, but we cannot do that until the sequence is part of a task, since
|
|
1007
|
+
# the available environment presets are defined in the task schema.
|
|
1008
|
+
|
|
1009
|
+
return path, label
|
|
1010
|
+
|
|
1011
|
+
@override
|
|
1012
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
1013
|
+
out = super()._postprocess_to_dict(d)
|
|
1014
|
+
del out["_parameter"]
|
|
1015
|
+
del out["_path_split"]
|
|
1016
|
+
out.pop("_workflow", None)
|
|
1017
|
+
return out
|
|
1018
|
+
|
|
1019
|
+
@property
|
|
1020
|
+
def normalised_path(self) -> str:
|
|
1021
|
+
"""
|
|
1022
|
+
The path to this sequence.
|
|
1023
|
+
"""
|
|
1024
|
+
return self.path
|
|
1025
|
+
|
|
1026
|
+
@property
|
|
1027
|
+
def normalised_inputs_path(self) -> str | None:
|
|
1028
|
+
"""
|
|
1029
|
+
The normalised path without the "inputs" prefix, if the sequence is an
|
|
1030
|
+
inputs sequence, else return None.
|
|
1031
|
+
"""
|
|
1032
|
+
|
|
1033
|
+
if self.input_type:
|
|
1034
|
+
if self.input_path:
|
|
1035
|
+
return f"{self.labelled_type}.{self.input_path}"
|
|
1036
|
+
else:
|
|
1037
|
+
return self.labelled_type
|
|
1038
|
+
return None
|
|
1039
|
+
|
|
1040
|
+
def make_persistent(
|
|
1041
|
+
self, workflow: Workflow, source: ParamSource
|
|
1042
|
+
) -> tuple[str, list[int], bool]:
|
|
1043
|
+
"""Save value to a persistent workflow."""
|
|
1044
|
+
|
|
1045
|
+
if self._values_group_idx is not None:
|
|
1046
|
+
if not workflow.check_parameters_exist(self._values_group_idx):
|
|
1047
|
+
raise RuntimeError(
|
|
1048
|
+
f"{self.__class__.__name__} has a parameter group index "
|
|
1049
|
+
f"({self._values_group_idx}), but does not exist in the workflow."
|
|
1050
|
+
)
|
|
1051
|
+
# TODO: log if already persistent.
|
|
1052
|
+
return self.normalised_path, self._values_group_idx, False
|
|
1053
|
+
|
|
1054
|
+
data_ref: list[int] = []
|
|
1055
|
+
source = copy.deepcopy(source)
|
|
1056
|
+
if self.value_class_method:
|
|
1057
|
+
source["value_class_method"] = self.value_class_method
|
|
1058
|
+
are_objs: list[bool] = []
|
|
1059
|
+
assert self._values is not None
|
|
1060
|
+
for idx, item in enumerate(self._values):
|
|
1061
|
+
# record if ParameterValue sub-classes are passed for values, which allows
|
|
1062
|
+
# us to re-init the objects on access to `.value`:
|
|
1063
|
+
are_objs.append(isinstance(item, ParameterValue))
|
|
1064
|
+
source = copy.deepcopy(source)
|
|
1065
|
+
source["sequence_idx"] = idx
|
|
1066
|
+
pg_idx_i = workflow._add_parameter_data(item, source=source)
|
|
1067
|
+
data_ref.append(pg_idx_i)
|
|
1068
|
+
|
|
1069
|
+
self._values_group_idx = data_ref
|
|
1070
|
+
self._workflow = workflow
|
|
1071
|
+
self._values = None
|
|
1072
|
+
self._values_are_objs = are_objs
|
|
1073
|
+
return self.normalised_path, data_ref, True
|
|
1074
|
+
|
|
1075
|
+
@property
|
|
1076
|
+
def workflow(self) -> Workflow | None:
|
|
1077
|
+
"""
|
|
1078
|
+
The workflow containing this sequence.
|
|
1079
|
+
"""
|
|
1080
|
+
if self._workflow:
|
|
1081
|
+
# (assigned in `make_persistent`)
|
|
1082
|
+
return self._workflow
|
|
1083
|
+
elif self._element_set:
|
|
1084
|
+
# (assigned by parent `ElementSet`)
|
|
1085
|
+
if tmpl := self._element_set.task_template.workflow_template:
|
|
1086
|
+
return tmpl.workflow
|
|
1087
|
+
return None
|
|
1088
|
+
|
|
1089
|
+
@property
|
|
1090
|
+
def values(self) -> Sequence[Any] | None:
|
|
1091
|
+
"""
|
|
1092
|
+
The values in this sequence.
|
|
1093
|
+
"""
|
|
1094
|
+
if self._values_group_idx is not None:
|
|
1095
|
+
vals: list[Any] = []
|
|
1096
|
+
for idx, pg_idx_i in enumerate(self._values_group_idx):
|
|
1097
|
+
if not (w := self.workflow):
|
|
1098
|
+
continue
|
|
1099
|
+
param_i = w.get_parameter(pg_idx_i)
|
|
1100
|
+
if param_i.data is not None:
|
|
1101
|
+
val_i = param_i.data
|
|
1102
|
+
else:
|
|
1103
|
+
val_i = param_i.file
|
|
1104
|
+
|
|
1105
|
+
# `val_i` might already be a `_value_class` object if the store has not
|
|
1106
|
+
# yet been committed to disk:
|
|
1107
|
+
if (
|
|
1108
|
+
self.parameter
|
|
1109
|
+
and self._values_are_objs
|
|
1110
|
+
and self._values_are_objs[idx]
|
|
1111
|
+
and isinstance(val_i, dict)
|
|
1112
|
+
):
|
|
1113
|
+
val_i = self.parameter._instantiate_value(param_i.source, val_i)
|
|
1114
|
+
|
|
1115
|
+
vals.append(val_i)
|
|
1116
|
+
return vals
|
|
1117
|
+
else:
|
|
1118
|
+
return self._values
|
|
1119
|
+
|
|
1120
|
+
@classmethod
|
|
1121
|
+
def _process_mixin_args(
|
|
1122
|
+
cls,
|
|
1123
|
+
values: list[Any],
|
|
1124
|
+
parameter: Parameter | SchemaInput | str | None = None,
|
|
1125
|
+
path: str | None = None,
|
|
1126
|
+
nesting_order: float | None = None,
|
|
1127
|
+
label: str | int | None = None,
|
|
1128
|
+
value_class_method: str | None = None,
|
|
1129
|
+
):
|
|
1130
|
+
"""Process arguments as generated by the mixin class for instantiation of this
|
|
1131
|
+
specific class."""
|
|
1132
|
+
return {
|
|
1133
|
+
"values": values,
|
|
1134
|
+
"path": path,
|
|
1135
|
+
"nesting_order": nesting_order,
|
|
1136
|
+
"label": label,
|
|
1137
|
+
"value_class_method": value_class_method,
|
|
1138
|
+
}
|
|
1139
|
+
|
|
1140
|
+
def _remember_values_method_args(
|
|
1141
|
+
self, name: str | None, args: dict[str, Any]
|
|
1142
|
+
) -> Self:
|
|
1143
|
+
# note: plural value here
|
|
1144
|
+
self._values_method, self._values_method_args = name, args
|
|
1145
|
+
return self
|
|
1146
|
+
|
|
1147
|
+
@classmethod
|
|
1148
|
+
def from_linear_space(
|
|
1149
|
+
cls,
|
|
1150
|
+
path: str,
|
|
1151
|
+
start: float,
|
|
1152
|
+
stop: float,
|
|
1153
|
+
num: int,
|
|
1154
|
+
label: str | int | None = None,
|
|
1155
|
+
nesting_order: float = 0,
|
|
1156
|
+
value_class_method: str | None = None,
|
|
1157
|
+
**kwargs,
|
|
1158
|
+
) -> Self:
|
|
1159
|
+
"""
|
|
1160
|
+
Build a sequence from a NumPy linear space.
|
|
1161
|
+
"""
|
|
1162
|
+
return super()._from_linear_space(
|
|
1163
|
+
path=path,
|
|
1164
|
+
start=start,
|
|
1165
|
+
stop=stop,
|
|
1166
|
+
num=num,
|
|
1167
|
+
label=label,
|
|
1168
|
+
nesting_order=nesting_order,
|
|
1169
|
+
value_class_method=value_class_method,
|
|
1170
|
+
**kwargs,
|
|
1171
|
+
)
|
|
1172
|
+
|
|
1173
|
+
@classmethod
|
|
1174
|
+
def from_geometric_space(
|
|
1175
|
+
cls,
|
|
1176
|
+
path: str,
|
|
1177
|
+
start: float,
|
|
1178
|
+
stop: float,
|
|
1179
|
+
num: int,
|
|
1180
|
+
endpoint=True,
|
|
1181
|
+
label: str | int | None = None,
|
|
1182
|
+
nesting_order: float = 0,
|
|
1183
|
+
value_class_method: str | None = None,
|
|
1184
|
+
**kwargs,
|
|
1185
|
+
) -> Self:
|
|
1186
|
+
"""
|
|
1187
|
+
Build a sequence from a NumPy geometric space.
|
|
1188
|
+
"""
|
|
1189
|
+
return super()._from_geometric_space(
|
|
1190
|
+
path=path,
|
|
1191
|
+
start=start,
|
|
1192
|
+
stop=stop,
|
|
1193
|
+
num=num,
|
|
1194
|
+
endpoint=endpoint,
|
|
1195
|
+
label=label,
|
|
1196
|
+
nesting_order=nesting_order,
|
|
1197
|
+
value_class_method=value_class_method,
|
|
1198
|
+
**kwargs,
|
|
1199
|
+
)
|
|
1200
|
+
|
|
1201
|
+
@classmethod
|
|
1202
|
+
def from_log_space(
|
|
1203
|
+
cls,
|
|
1204
|
+
path: str,
|
|
1205
|
+
start: float,
|
|
1206
|
+
stop: float,
|
|
1207
|
+
num: int,
|
|
1208
|
+
base=10.0,
|
|
1209
|
+
endpoint=True,
|
|
1210
|
+
label: str | int | None = None,
|
|
1211
|
+
nesting_order: float = 0,
|
|
1212
|
+
value_class_method: str | None = None,
|
|
1213
|
+
**kwargs,
|
|
1214
|
+
) -> Self:
|
|
1215
|
+
"""
|
|
1216
|
+
Build a sequence from a NumPy geometric space.
|
|
1217
|
+
"""
|
|
1218
|
+
return super()._from_log_space(
|
|
1219
|
+
path=path,
|
|
1220
|
+
start=start,
|
|
1221
|
+
stop=stop,
|
|
1222
|
+
num=num,
|
|
1223
|
+
base=base,
|
|
1224
|
+
endpoint=endpoint,
|
|
1225
|
+
label=label,
|
|
1226
|
+
nesting_order=nesting_order,
|
|
1227
|
+
value_class_method=value_class_method,
|
|
1228
|
+
**kwargs,
|
|
1229
|
+
)
|
|
1230
|
+
|
|
1231
|
+
@classmethod
|
|
1232
|
+
def from_range(
|
|
1233
|
+
cls,
|
|
1234
|
+
path: str,
|
|
1235
|
+
start: float,
|
|
1236
|
+
stop: float,
|
|
1237
|
+
step: int | float = 1,
|
|
1238
|
+
label: str | int | None = None,
|
|
1239
|
+
nesting_order: float = 0,
|
|
1240
|
+
value_class_method: str | None = None,
|
|
1241
|
+
**kwargs,
|
|
1242
|
+
) -> Self:
|
|
1243
|
+
"""
|
|
1244
|
+
Build a sequence from a NumPy range.
|
|
1245
|
+
"""
|
|
1246
|
+
return super()._from_range(
|
|
1247
|
+
path=path,
|
|
1248
|
+
start=start,
|
|
1249
|
+
stop=stop,
|
|
1250
|
+
step=step,
|
|
1251
|
+
label=label,
|
|
1252
|
+
nesting_order=nesting_order,
|
|
1253
|
+
value_class_method=value_class_method,
|
|
1254
|
+
**kwargs,
|
|
1255
|
+
)
|
|
1256
|
+
|
|
1257
|
+
@classmethod
|
|
1258
|
+
def from_file(
|
|
1259
|
+
cls,
|
|
1260
|
+
path: str,
|
|
1261
|
+
file_path: str | Path,
|
|
1262
|
+
label: str | int | None = None,
|
|
1263
|
+
nesting_order: float = 0,
|
|
1264
|
+
value_class_method: str | None = None,
|
|
1265
|
+
**kwargs,
|
|
1266
|
+
) -> Self:
|
|
1267
|
+
"""
|
|
1268
|
+
Build a sequence from data within a simple file.
|
|
1269
|
+
"""
|
|
1270
|
+
return super()._from_file(
|
|
1271
|
+
path=path,
|
|
1272
|
+
file_path=file_path,
|
|
1273
|
+
label=label,
|
|
1274
|
+
nesting_order=nesting_order,
|
|
1275
|
+
value_class_method=value_class_method,
|
|
1276
|
+
**kwargs,
|
|
1277
|
+
)
|
|
1278
|
+
|
|
1279
|
+
@classmethod
|
|
1280
|
+
def from_load_txt(
|
|
1281
|
+
cls,
|
|
1282
|
+
path: str,
|
|
1283
|
+
file_path: str | Path,
|
|
1284
|
+
label: str | int | None = None,
|
|
1285
|
+
nesting_order: float = 0,
|
|
1286
|
+
value_class_method: str | None = None,
|
|
1287
|
+
**kwargs,
|
|
1288
|
+
) -> Self:
|
|
1289
|
+
"""
|
|
1290
|
+
Build a sequence from data within a text file using Numpy's `loadtxt`.
|
|
1291
|
+
"""
|
|
1292
|
+
return super()._from_load_txt(
|
|
1293
|
+
path=path,
|
|
1294
|
+
file_path=file_path,
|
|
1295
|
+
label=label,
|
|
1296
|
+
nesting_order=nesting_order,
|
|
1297
|
+
value_class_method=value_class_method,
|
|
1298
|
+
**kwargs,
|
|
1299
|
+
)
|
|
1300
|
+
|
|
1301
|
+
@classmethod
|
|
1302
|
+
def from_rectangle(
|
|
1303
|
+
cls,
|
|
1304
|
+
path: str,
|
|
1305
|
+
start: Sequence[float],
|
|
1306
|
+
stop: Sequence[float],
|
|
1307
|
+
num: Sequence[int],
|
|
1308
|
+
coord: int | None = None,
|
|
1309
|
+
include: list[str] | None = None,
|
|
1310
|
+
label: str | int | None = None,
|
|
1311
|
+
nesting_order: float = 0,
|
|
1312
|
+
value_class_method: str | None = None,
|
|
1313
|
+
**kwargs,
|
|
1314
|
+
) -> Self:
|
|
1315
|
+
"""
|
|
1316
|
+
Build a sequence from coordinates that cover the perimeter of a rectangle.
|
|
1317
|
+
|
|
1318
|
+
Parameters
|
|
1319
|
+
----------
|
|
1320
|
+
coord:
|
|
1321
|
+
Which coordinate to use. Either 0, 1, or `None`, meaning each value will be
|
|
1322
|
+
both coordinates.
|
|
1323
|
+
include
|
|
1324
|
+
If specified, include only the specified edges. Choose from "top", "right",
|
|
1325
|
+
"bottom", "left".
|
|
1326
|
+
"""
|
|
1327
|
+
return super()._from_rectangle(
|
|
1328
|
+
path=path,
|
|
1329
|
+
start=start,
|
|
1330
|
+
stop=stop,
|
|
1331
|
+
num=num,
|
|
1332
|
+
coord=coord,
|
|
1333
|
+
include=include,
|
|
1334
|
+
label=label,
|
|
1335
|
+
nesting_order=nesting_order,
|
|
1336
|
+
value_class_method=value_class_method,
|
|
1337
|
+
**kwargs,
|
|
1338
|
+
)
|
|
1339
|
+
|
|
1340
|
+
@classmethod
|
|
1341
|
+
def from_random_uniform(
|
|
1342
|
+
cls,
|
|
1343
|
+
path: str,
|
|
1344
|
+
num: int,
|
|
1345
|
+
low: float = 0.0,
|
|
1346
|
+
high: float = 1.0,
|
|
1347
|
+
seed: int | list[int] | None = None,
|
|
1348
|
+
label: str | int | None = None,
|
|
1349
|
+
nesting_order: float = 0,
|
|
1350
|
+
value_class_method: str | None = None,
|
|
1351
|
+
**kwargs,
|
|
1352
|
+
) -> Self:
|
|
1353
|
+
"""
|
|
1354
|
+
Build a sequence from a uniform random number generator.
|
|
1355
|
+
"""
|
|
1356
|
+
warnings.warn(
|
|
1357
|
+
f"{cls.__name__!r}: Please use `from_uniform` instead of "
|
|
1358
|
+
f"`from_random_uniform`, which will be removed in a future release.",
|
|
1359
|
+
DeprecationWarning,
|
|
1360
|
+
stacklevel=2,
|
|
1361
|
+
)
|
|
1362
|
+
return cls.from_uniform(
|
|
1363
|
+
path=path,
|
|
1364
|
+
shape=num,
|
|
1365
|
+
low=low,
|
|
1366
|
+
high=high,
|
|
1367
|
+
seed=seed,
|
|
1368
|
+
label=label,
|
|
1369
|
+
nesting_order=nesting_order,
|
|
1370
|
+
value_class_method=value_class_method,
|
|
1371
|
+
**kwargs,
|
|
1372
|
+
)
|
|
1373
|
+
|
|
1374
|
+
@classmethod
|
|
1375
|
+
def from_uniform(
|
|
1376
|
+
cls,
|
|
1377
|
+
path: str,
|
|
1378
|
+
shape: int | Sequence[int],
|
|
1379
|
+
low: float = 0.0,
|
|
1380
|
+
high: float = 1.0,
|
|
1381
|
+
seed: int | list[int] | None = None,
|
|
1382
|
+
label: str | int | None = None,
|
|
1383
|
+
nesting_order: float = 0,
|
|
1384
|
+
value_class_method: str | None = None,
|
|
1385
|
+
**kwargs,
|
|
1386
|
+
) -> Self:
|
|
1387
|
+
"""
|
|
1388
|
+
Build a sequence from a uniform random number generator.
|
|
1389
|
+
"""
|
|
1390
|
+
return super()._from_uniform(
|
|
1391
|
+
path=path,
|
|
1392
|
+
low=low,
|
|
1393
|
+
high=high,
|
|
1394
|
+
shape=shape,
|
|
1395
|
+
seed=seed,
|
|
1396
|
+
label=label,
|
|
1397
|
+
nesting_order=nesting_order,
|
|
1398
|
+
value_class_method=value_class_method,
|
|
1399
|
+
**kwargs,
|
|
1400
|
+
)
|
|
1401
|
+
|
|
1402
|
+
@classmethod
|
|
1403
|
+
def from_normal(
|
|
1404
|
+
cls,
|
|
1405
|
+
path: str,
|
|
1406
|
+
shape: int | Sequence[int],
|
|
1407
|
+
loc: float = 0.0,
|
|
1408
|
+
scale: float = 1.0,
|
|
1409
|
+
seed: int | list[int] | None = None,
|
|
1410
|
+
label: str | int | None = None,
|
|
1411
|
+
nesting_order: float = 0,
|
|
1412
|
+
value_class_method: str | None = None,
|
|
1413
|
+
**kwargs,
|
|
1414
|
+
) -> Self:
|
|
1415
|
+
"""
|
|
1416
|
+
Build a sequence from a normal (Gaussian) random number generator.
|
|
1417
|
+
"""
|
|
1418
|
+
return super()._from_normal(
|
|
1419
|
+
path=path,
|
|
1420
|
+
loc=loc,
|
|
1421
|
+
scale=scale,
|
|
1422
|
+
shape=shape,
|
|
1423
|
+
seed=seed,
|
|
1424
|
+
label=label,
|
|
1425
|
+
nesting_order=nesting_order,
|
|
1426
|
+
value_class_method=value_class_method,
|
|
1427
|
+
**kwargs,
|
|
1428
|
+
)
|
|
1429
|
+
|
|
1430
|
+
@classmethod
|
|
1431
|
+
def from_log_normal(
|
|
1432
|
+
cls,
|
|
1433
|
+
path: str,
|
|
1434
|
+
shape: int | Sequence[int],
|
|
1435
|
+
mean: float = 0.0,
|
|
1436
|
+
sigma: float = 1.0,
|
|
1437
|
+
seed: int | list[int] | None = None,
|
|
1438
|
+
label: str | int | None = None,
|
|
1439
|
+
nesting_order: float = 0,
|
|
1440
|
+
value_class_method: str | None = None,
|
|
1441
|
+
**kwargs,
|
|
1442
|
+
) -> Self:
|
|
1443
|
+
"""
|
|
1444
|
+
Build a sequence from a log-normal random number generator.
|
|
1445
|
+
"""
|
|
1446
|
+
return super()._from_log_normal(
|
|
1447
|
+
path=path,
|
|
1448
|
+
mean=mean,
|
|
1449
|
+
sigma=sigma,
|
|
1450
|
+
shape=shape,
|
|
1451
|
+
seed=seed,
|
|
1452
|
+
label=label,
|
|
1453
|
+
nesting_order=nesting_order,
|
|
1454
|
+
value_class_method=value_class_method,
|
|
1455
|
+
**kwargs,
|
|
1456
|
+
)
|
|
1457
|
+
|
|
1458
|
+
|
|
1459
|
+
class MultiPathSequence(_BaseSequence):
|
|
1460
|
+
"""
|
|
1461
|
+
A sequence of values to be distributed across one or more paths.
|
|
1462
|
+
|
|
1463
|
+
Notes
|
|
1464
|
+
-----
|
|
1465
|
+
This is useful when we would like to generate values for multiple input paths that
|
|
1466
|
+
have some interdependency, or when they must be generate together in one go.
|
|
1467
|
+
|
|
1468
|
+
Parameters
|
|
1469
|
+
----------
|
|
1470
|
+
paths:
|
|
1471
|
+
The paths to this multi-path sequence.
|
|
1472
|
+
values:
|
|
1473
|
+
The values in this multi-path sequence.
|
|
1474
|
+
nesting_order: int
|
|
1475
|
+
A nesting order for this multi-path sequence. Can be used to compose sequences
|
|
1476
|
+
together.
|
|
1477
|
+
label: str
|
|
1478
|
+
A label for this multi-path sequence.
|
|
1479
|
+
value_class_method: str
|
|
1480
|
+
Name of a method used to generate multi-path sequence values. Not normally used
|
|
1481
|
+
directly.
|
|
1482
|
+
"""
|
|
1483
|
+
|
|
1484
|
+
# TODO: add a `path_axis` argument with doc string like:
|
|
1485
|
+
# path_axis:
|
|
1486
|
+
# The axis (as in a Numpy axis) along `values` to which the different paths
|
|
1487
|
+
# correspond.
|
|
1488
|
+
|
|
1489
|
+
def __init__(
|
|
1490
|
+
self,
|
|
1491
|
+
paths: Sequence[str],
|
|
1492
|
+
values: NDArray | Sequence[Sequence] | None,
|
|
1493
|
+
nesting_order: int | float | None = None,
|
|
1494
|
+
label: str | int | None = None,
|
|
1495
|
+
value_class_method: str | None = None,
|
|
1496
|
+
):
|
|
1497
|
+
self.paths = list(paths)
|
|
1498
|
+
self.nesting_order = nesting_order
|
|
1499
|
+
self.label = label
|
|
1500
|
+
self.value_class_method = value_class_method
|
|
1501
|
+
|
|
1502
|
+
self._sequences: list[ValueSequence] | None = None
|
|
1503
|
+
self._values: NDArray | Sequence[Sequence] | None = None
|
|
1504
|
+
|
|
1505
|
+
if values is not None:
|
|
1506
|
+
if (len_paths := len(paths)) != (len_vals := len(values)):
|
|
1507
|
+
raise ValueError(
|
|
1508
|
+
f"The number of values ({len_vals}) must be equal to the number of "
|
|
1509
|
+
f"paths provided ({len_paths})."
|
|
1510
|
+
)
|
|
1511
|
+
self._values = values
|
|
1512
|
+
self._sequences = [
|
|
1513
|
+
self._app.ValueSequence(
|
|
1514
|
+
path=path,
|
|
1515
|
+
values=values[idx],
|
|
1516
|
+
label=label,
|
|
1517
|
+
nesting_order=nesting_order,
|
|
1518
|
+
value_class_method=value_class_method,
|
|
1519
|
+
)
|
|
1520
|
+
for idx, path in enumerate(paths)
|
|
1521
|
+
]
|
|
1522
|
+
|
|
1523
|
+
# assigned by `_move_to_sequence_list` (invoked by first init of parent
|
|
1524
|
+
# `ElementSet`), corresponds to the sequence indices with the element set's
|
|
1525
|
+
# sequence list:
|
|
1526
|
+
self._sequence_indices: Sequence[int] | None = None
|
|
1527
|
+
|
|
1528
|
+
self._element_set: ElementSet | None = None # assigned by parent `ElementSet`
|
|
1529
|
+
|
|
1530
|
+
self._values_method: str | None = None
|
|
1531
|
+
self._values_method_args: dict | None = None
|
|
1532
|
+
|
|
1533
|
+
def __repr__(self):
|
|
1534
|
+
|
|
1535
|
+
label_str = f"label={self.label!r}, " if self.label else ""
|
|
1536
|
+
val_cls_str = (
|
|
1537
|
+
f"value_class_method={self.value_class_method!r}, "
|
|
1538
|
+
if self.value_class_method
|
|
1539
|
+
else ""
|
|
1540
|
+
)
|
|
1541
|
+
return (
|
|
1542
|
+
f"{self.__class__.__name__}("
|
|
1543
|
+
f"paths={self.paths!r}, "
|
|
1544
|
+
f"{label_str}"
|
|
1545
|
+
f"nesting_order={self.nesting_order}, "
|
|
1546
|
+
f"{val_cls_str}"
|
|
1547
|
+
f"values={self.values}"
|
|
1548
|
+
f")"
|
|
1549
|
+
)
|
|
1550
|
+
|
|
1551
|
+
def __deepcopy__(self, memo: dict[int, Any]):
|
|
1552
|
+
kwargs = self.to_dict()
|
|
1553
|
+
kwargs["values"] = kwargs.pop("_values")
|
|
1554
|
+
|
|
1555
|
+
_sequences = kwargs.pop("_sequences", None)
|
|
1556
|
+
_sequence_indices = kwargs.pop("_sequence_indices", None)
|
|
1557
|
+
_values_method = kwargs.pop("_values_method", None)
|
|
1558
|
+
_values_method_args = kwargs.pop("_values_method_args", None)
|
|
1559
|
+
|
|
1560
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
|
1561
|
+
|
|
1562
|
+
obj._sequences = _sequences
|
|
1563
|
+
obj._sequence_indices = _sequence_indices
|
|
1564
|
+
obj._values_method = _values_method
|
|
1565
|
+
obj._values_method_args = _values_method_args
|
|
1566
|
+
|
|
1567
|
+
obj._element_set = self._element_set
|
|
1568
|
+
|
|
1569
|
+
return obj
|
|
1570
|
+
|
|
1571
|
+
@override
|
|
1572
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
1573
|
+
dct = super()._postprocess_to_dict(d)
|
|
1574
|
+
del dct["_sequences"]
|
|
1575
|
+
return dct
|
|
1576
|
+
|
|
1577
|
+
@classmethod
|
|
1578
|
+
def _json_like_constructor(cls, json_like):
|
|
1579
|
+
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
|
1580
|
+
|
|
1581
|
+
# pop the keys we don't accept in `__init__`, and then assign after `__init__`:
|
|
1582
|
+
_sequence_indices = json_like.pop("_sequence_indices", None)
|
|
1583
|
+
|
|
1584
|
+
_values_method = json_like.pop("_values_method", None)
|
|
1585
|
+
_values_method_args = json_like.pop("_values_method_args", None)
|
|
1586
|
+
if "_values" in json_like:
|
|
1587
|
+
json_like["values"] = json_like.pop("_values")
|
|
1588
|
+
|
|
1589
|
+
obj = cls(**json_like)
|
|
1590
|
+
obj._sequence_indices = _sequence_indices
|
|
1591
|
+
obj._values_method = _values_method
|
|
1592
|
+
obj._values_method_args = _values_method_args
|
|
1593
|
+
return obj
|
|
1594
|
+
|
|
1595
|
+
@property
|
|
1596
|
+
def sequence_indices(self) -> Sequence[int] | None:
|
|
1597
|
+
"""
|
|
1598
|
+
The range indices (start and stop) to the parent element set's sequences list that
|
|
1599
|
+
correspond to the `ValueSequence`s generated by this multi-path sequence, if this
|
|
1600
|
+
object is bound to a parent element set.
|
|
1601
|
+
"""
|
|
1602
|
+
return self._sequence_indices
|
|
1603
|
+
|
|
1604
|
+
@property
|
|
1605
|
+
def sequences(self) -> Sequence[ValueSequence]:
|
|
1606
|
+
"""
|
|
1607
|
+
The child value sequences, one for each path.
|
|
1608
|
+
"""
|
|
1609
|
+
if self._sequence_indices:
|
|
1610
|
+
# they are stored in the parent `ElementSet`
|
|
1611
|
+
assert self._element_set
|
|
1612
|
+
return self._element_set.sequences[slice(*self._sequence_indices)]
|
|
1613
|
+
else:
|
|
1614
|
+
# not yet bound to a parent `ElementSet`
|
|
1615
|
+
assert self._sequences
|
|
1616
|
+
return self._sequences
|
|
1617
|
+
|
|
1618
|
+
@property
|
|
1619
|
+
def values(self) -> list[Sequence[Any]]:
|
|
1620
|
+
values = []
|
|
1621
|
+
for seq_i in self.sequences:
|
|
1622
|
+
assert seq_i.values
|
|
1623
|
+
values.append(seq_i.values)
|
|
1624
|
+
return values
|
|
1625
|
+
|
|
1626
|
+
def _move_to_sequence_list(self, sequences: list[ValueSequence]) -> None:
|
|
1627
|
+
"""
|
|
1628
|
+
Move the individual value sequences to an external list of value sequences (i.e.,
|
|
1629
|
+
the parent `ElementSet`'s), and update the `sequence_indices` attribute so we can
|
|
1630
|
+
retrieve the sequences from that list at will.
|
|
1631
|
+
"""
|
|
1632
|
+
len_ours = len(self.sequences)
|
|
1633
|
+
len_ext = len(sequences)
|
|
1634
|
+
sequences.extend(self.sequences)
|
|
1635
|
+
|
|
1636
|
+
# child sequences are now stored externally, and values retrieved via those:
|
|
1637
|
+
self._sequences = None
|
|
1638
|
+
self._values = None
|
|
1639
|
+
self._sequence_indices = [len_ext, len_ext + len_ours]
|
|
1640
|
+
|
|
1641
|
+
@classmethod
|
|
1642
|
+
def _values_from_latin_hypercube(
|
|
1643
|
+
cls,
|
|
1644
|
+
paths: Sequence[str],
|
|
1645
|
+
num_samples: int,
|
|
1646
|
+
*,
|
|
1647
|
+
bounds: dict[str, dict[str, str | Sequence[float]]] | None = None,
|
|
1648
|
+
scramble: bool = True,
|
|
1649
|
+
strength: int = 1,
|
|
1650
|
+
optimization: Literal["random-cd", "lloyd"] | None = None,
|
|
1651
|
+
rng=None,
|
|
1652
|
+
) -> NDArray:
|
|
1653
|
+
|
|
1654
|
+
from scipy.stats.qmc import LatinHypercube, scale
|
|
1655
|
+
|
|
1656
|
+
num_paths = len(paths)
|
|
1657
|
+
kwargs = dict(
|
|
1658
|
+
d=num_paths,
|
|
1659
|
+
scramble=scramble,
|
|
1660
|
+
strength=strength,
|
|
1661
|
+
optimization=optimization,
|
|
1662
|
+
rng=rng,
|
|
1663
|
+
)
|
|
1664
|
+
|
|
1665
|
+
bounds = bounds or {}
|
|
1666
|
+
|
|
1667
|
+
scaling = np.asarray(
|
|
1668
|
+
[bounds.get(path, {}).get("scaling", "linear") for path in paths]
|
|
1669
|
+
)
|
|
1670
|
+
|
|
1671
|
+
# extents including defaults for unspecified:
|
|
1672
|
+
all_extents = [bounds.get(path, {}).get("extent", [0, 1]) for path in paths]
|
|
1673
|
+
|
|
1674
|
+
# extents accounting for scaling type:
|
|
1675
|
+
extent = np.asarray(
|
|
1676
|
+
[
|
|
1677
|
+
np.log10(all_extents[i]) if scaling[i] == "log" else all_extents[i]
|
|
1678
|
+
for i in range(len(scaling))
|
|
1679
|
+
]
|
|
1680
|
+
).T
|
|
1681
|
+
|
|
1682
|
+
try:
|
|
1683
|
+
sampler = LatinHypercube(**kwargs)
|
|
1684
|
+
except TypeError:
|
|
1685
|
+
# `rng` was previously (<1.15.0) `seed`:
|
|
1686
|
+
kwargs["seed"] = kwargs.pop("rng")
|
|
1687
|
+
sampler = LatinHypercube(**kwargs)
|
|
1688
|
+
|
|
1689
|
+
samples = scale(
|
|
1690
|
+
sampler.random(n=num_samples), l_bounds=extent[0], u_bounds=extent[1]
|
|
1691
|
+
)
|
|
1692
|
+
|
|
1693
|
+
for i in range(len(scaling)):
|
|
1694
|
+
if scaling[i] == "log":
|
|
1695
|
+
samples[:, i] = 10 ** samples[:, i]
|
|
1696
|
+
|
|
1697
|
+
return samples.T
|
|
1698
|
+
|
|
1699
|
+
@classmethod
|
|
1700
|
+
def from_latin_hypercube(
|
|
1701
|
+
cls,
|
|
1702
|
+
paths: Sequence[str],
|
|
1703
|
+
num_samples: int,
|
|
1704
|
+
*,
|
|
1705
|
+
bounds: dict[str, dict[str, str | Sequence[float]]] | None = None,
|
|
1706
|
+
scramble: bool = True,
|
|
1707
|
+
strength: int = 1,
|
|
1708
|
+
optimization: Literal["random-cd", "lloyd"] | None = None,
|
|
1709
|
+
rng=None,
|
|
1710
|
+
nesting_order: int | float | None = None,
|
|
1711
|
+
label: str | int | None = None,
|
|
1712
|
+
) -> Self:
|
|
1713
|
+
"""
|
|
1714
|
+
Generate values from SciPy's latin hypercube sampler: :class:`scipy.stats.qmc.LatinHypercube`.
|
|
1715
|
+
|
|
1716
|
+
Parameters
|
|
1717
|
+
----------
|
|
1718
|
+
paths : Sequence[str]
|
|
1719
|
+
List of dot-delimited paths within the parameter's nested data structure for which
|
|
1720
|
+
'value' should be set.
|
|
1721
|
+
num_samples : int
|
|
1722
|
+
Number of random hypercube samples to take.
|
|
1723
|
+
bounds : dict[str, dict[str, str | Sequence[float]]] | None, optional
|
|
1724
|
+
Bounds dictionary structure which takes a path as a key and returns another dictionary
|
|
1725
|
+
which takes `scaling` and `extent` as keys. `extent` defines the width of the parameter
|
|
1726
|
+
space, and `scaling` defines whether to take logarithmically spaced samples ("log") or not ("linear"). By default,
|
|
1727
|
+
linear scaling and an extent between 0 and 1 is used.
|
|
1728
|
+
scramble : bool, optional
|
|
1729
|
+
See `scipy.stats.qmc.LatinHypercube`, by default True
|
|
1730
|
+
strength : int, optional
|
|
1731
|
+
See 'scipy.stats.qmc.LatinHypercube', by default 1
|
|
1732
|
+
optimization : Literal["random, optional
|
|
1733
|
+
See 'scipy.stats.qmc.LatinHypercube', by default None
|
|
1734
|
+
rng : _type_, optional
|
|
1735
|
+
See 'scipy.stats.qmc.LatinHypercube', by default None
|
|
1736
|
+
|
|
1737
|
+
Returns
|
|
1738
|
+
-------
|
|
1739
|
+
NDArray
|
|
1740
|
+
Array of hypercube samples.
|
|
1741
|
+
"""
|
|
1742
|
+
kwargs = {
|
|
1743
|
+
"paths": paths,
|
|
1744
|
+
"num_samples": num_samples,
|
|
1745
|
+
"scramble": scramble,
|
|
1746
|
+
"strength": strength,
|
|
1747
|
+
"optimization": optimization,
|
|
1748
|
+
"rng": rng,
|
|
1749
|
+
"bounds": bounds,
|
|
1750
|
+
}
|
|
1751
|
+
values = cls._values_from_latin_hypercube(**kwargs)
|
|
1752
|
+
assert values is not None
|
|
1753
|
+
obj = cls(
|
|
1754
|
+
paths=paths,
|
|
1755
|
+
values=values,
|
|
1756
|
+
nesting_order=nesting_order,
|
|
1757
|
+
label=label,
|
|
1758
|
+
)
|
|
1759
|
+
obj._values_method = "from_latin_hypercube"
|
|
1760
|
+
obj._values_method_args = kwargs
|
|
1761
|
+
return obj
|
|
1762
|
+
|
|
1763
|
+
|
|
1764
|
+
@dataclass
|
|
1765
|
+
class AbstractInputValue(JSONLike):
|
|
1766
|
+
"""Class to represent all sequence-able inputs to a task."""
|
|
1767
|
+
|
|
1768
|
+
_workflow: Workflow | None = None
|
|
1769
|
+
_element_set: ElementSet | None = None
|
|
1770
|
+
_schema_input: SchemaInput | None = None
|
|
1771
|
+
_value: Any | None = None
|
|
1772
|
+
_value_group_idx: int | list[int] | None = None
|
|
1773
|
+
|
|
1774
|
+
def __repr__(self) -> str:
|
|
1775
|
+
try:
|
|
1776
|
+
value_str = f", value={self.value}"
|
|
1777
|
+
except WorkflowParameterMissingError:
|
|
1778
|
+
value_str = ""
|
|
1779
|
+
|
|
1780
|
+
return (
|
|
1781
|
+
f"{self.__class__.__name__}("
|
|
1782
|
+
f"_value_group_idx={self._value_group_idx}"
|
|
1783
|
+
f"{value_str}"
|
|
1784
|
+
f")"
|
|
1785
|
+
)
|
|
1786
|
+
|
|
1787
|
+
@override
|
|
1788
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
1789
|
+
out = super()._postprocess_to_dict(d)
|
|
1790
|
+
out.pop("_workflow", None)
|
|
1791
|
+
out.pop("_schema_input", None)
|
|
1792
|
+
return out
|
|
1793
|
+
|
|
1794
|
+
def make_persistent(
|
|
1795
|
+
self, workflow: Workflow, source: ParamSource
|
|
1796
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
|
1797
|
+
"""Save value to a persistent workflow.
|
|
1798
|
+
|
|
1799
|
+
Returns
|
|
1800
|
+
-------
|
|
1801
|
+
str
|
|
1802
|
+
Normalised path for this task input.
|
|
1803
|
+
list[int | list[int]]
|
|
1804
|
+
The index of the parameter data Zarr group where the data is stored.
|
|
1805
|
+
bool
|
|
1806
|
+
Whether this is newly persistent.
|
|
1807
|
+
"""
|
|
1808
|
+
|
|
1809
|
+
if self._value_group_idx is not None:
|
|
1810
|
+
data_ref = self._value_group_idx
|
|
1811
|
+
is_new = False
|
|
1812
|
+
if not workflow.check_parameters_exist(data_ref):
|
|
1813
|
+
raise RuntimeError(
|
|
1814
|
+
f"{self.__class__.__name__} has a data reference "
|
|
1815
|
+
f"({data_ref}), but does not exist in the workflow."
|
|
1816
|
+
)
|
|
1817
|
+
# TODO: log if already persistent.
|
|
1818
|
+
else:
|
|
1819
|
+
data_ref = workflow._add_parameter_data(self._value, source=source)
|
|
1820
|
+
self._value_group_idx = data_ref
|
|
1821
|
+
is_new = True
|
|
1822
|
+
self._value = None
|
|
1823
|
+
|
|
1824
|
+
return (self.normalised_path, [data_ref], is_new)
|
|
1825
|
+
|
|
1826
|
+
@property
|
|
1827
|
+
def normalised_path(self) -> str:
|
|
1828
|
+
"""
|
|
1829
|
+
The normalised path, if known.
|
|
1830
|
+
"""
|
|
1831
|
+
raise NotImplementedError
|
|
1832
|
+
|
|
1833
|
+
@property
|
|
1834
|
+
def workflow(self) -> Workflow | None:
|
|
1835
|
+
"""
|
|
1836
|
+
The workflow containing this input value.
|
|
1837
|
+
"""
|
|
1838
|
+
if self._workflow:
|
|
1839
|
+
return self._workflow
|
|
1840
|
+
if self._element_set:
|
|
1841
|
+
if w_tmpl := self._element_set.task_template.workflow_template:
|
|
1842
|
+
return w_tmpl.workflow
|
|
1843
|
+
if self._schema_input:
|
|
1844
|
+
if t_tmpl := self._schema_input.task_schema.task_template:
|
|
1845
|
+
if w_tmpl := t_tmpl.workflow_template:
|
|
1846
|
+
return w_tmpl.workflow
|
|
1847
|
+
return None
|
|
1848
|
+
|
|
1849
|
+
@property
|
|
1850
|
+
def value(self) -> Any:
|
|
1851
|
+
"""
|
|
1852
|
+
The value itself.
|
|
1853
|
+
"""
|
|
1854
|
+
return self._value
|
|
1855
|
+
|
|
1856
|
+
|
|
1857
|
+
@dataclass
|
|
1858
|
+
class ValuePerturbation(AbstractInputValue):
|
|
1859
|
+
"""
|
|
1860
|
+
A perturbation applied to a value.
|
|
1861
|
+
"""
|
|
1862
|
+
|
|
1863
|
+
#: The name of this perturbation.
|
|
1864
|
+
name: str = ""
|
|
1865
|
+
#: The path to the value(s) to perturb.
|
|
1866
|
+
path: Sequence[str | int | float] | None = None
|
|
1867
|
+
#: The multiplicative factor to apply.
|
|
1868
|
+
multiplicative_factor: Numeric | None = 1
|
|
1869
|
+
#: The additive factor to apply.
|
|
1870
|
+
additive_factor: Numeric | None = 0
|
|
1871
|
+
|
|
1872
|
+
def __post_init__(self):
|
|
1873
|
+
assert self.name
|
|
1874
|
+
|
|
1875
|
+
@classmethod
|
|
1876
|
+
def from_spec(cls, spec):
|
|
1877
|
+
"""
|
|
1878
|
+
Construct an instance from a specification dictionary.
|
|
1879
|
+
"""
|
|
1880
|
+
return cls(**spec)
|
|
1881
|
+
|
|
1882
|
+
|
|
1883
|
+
@hydrate
|
|
1884
|
+
class InputValue(AbstractInputValue, ValuesMixin):
|
|
1885
|
+
"""
|
|
1886
|
+
An input value to a task.
|
|
1887
|
+
|
|
1888
|
+
Parameters
|
|
1889
|
+
----------
|
|
1890
|
+
parameter: Parameter | SchemaInput | str
|
|
1891
|
+
Parameter whose value is to be specified.
|
|
1892
|
+
label: str
|
|
1893
|
+
Optional identifier to be used where the associated `SchemaInput` accepts multiple
|
|
1894
|
+
parameters of the specified type. This will be cast to a string.
|
|
1895
|
+
value: Any
|
|
1896
|
+
The input parameter value.
|
|
1897
|
+
value_class_method: How to obtain the real value.
|
|
1898
|
+
A class method that can be invoked with the `value` attribute as keyword
|
|
1899
|
+
arguments.
|
|
1900
|
+
path: str
|
|
1901
|
+
Dot-delimited path within the parameter's nested data structure for which `value`
|
|
1902
|
+
should be set.
|
|
1903
|
+
|
|
1904
|
+
"""
|
|
1905
|
+
|
|
1906
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
1907
|
+
ChildObjectSpec(
|
|
1908
|
+
name="parameter",
|
|
1909
|
+
class_name="Parameter",
|
|
1910
|
+
shared_data_primary_key="typ",
|
|
1911
|
+
shared_data_name="parameters",
|
|
1912
|
+
),
|
|
1913
|
+
)
|
|
1914
|
+
|
|
1915
|
+
def __init__(
|
|
1916
|
+
self,
|
|
1917
|
+
parameter: Parameter | SchemaInput | str,
|
|
1918
|
+
value: Any | None = None,
|
|
1919
|
+
label: str | int | None = None,
|
|
1920
|
+
value_class_method: str | None = None,
|
|
1921
|
+
path: str | None = None,
|
|
1922
|
+
_check_obj: bool = True,
|
|
1923
|
+
):
|
|
1924
|
+
super().__init__()
|
|
1925
|
+
if isinstance(parameter, str):
|
|
1926
|
+
try:
|
|
1927
|
+
#: Parameter whose value is to be specified.
|
|
1928
|
+
self.parameter = self._app.parameters.get(parameter)
|
|
1929
|
+
except ValueError:
|
|
1930
|
+
self.parameter = self._app.Parameter(parameter)
|
|
1931
|
+
elif isinstance(parameter, SchemaInput):
|
|
1932
|
+
self.parameter = parameter.parameter
|
|
1933
|
+
else:
|
|
1934
|
+
self.parameter = parameter
|
|
1935
|
+
|
|
1936
|
+
#: Identifier to be used where the associated `SchemaInput` accepts multiple
|
|
1937
|
+
#: parameters of the specified type.
|
|
1938
|
+
self.label = str(label) if label is not None else ""
|
|
1939
|
+
#: Dot-delimited path within the parameter's nested data structure for which
|
|
1940
|
+
#: `value` should be set.
|
|
1941
|
+
self.path = (path.strip(".") or None) if path else None
|
|
1942
|
+
#: A class method that can be invoked with the `value` attribute as keyword
|
|
1943
|
+
#: arguments.
|
|
1944
|
+
self.value_class_method = value_class_method
|
|
1945
|
+
self._value = process_demo_data_strings(self._app, value)
|
|
1946
|
+
|
|
1947
|
+
#: Which class method of this class was used to instantiate this instance, if any:
|
|
1948
|
+
self._value_method: str | None = None
|
|
1949
|
+
#: Keyword-arguments that were passed to the factory class method of this class
|
|
1950
|
+
#: to instantiate this instance, if such a method was used:
|
|
1951
|
+
self._value_method_args: dict[str, Any] | None = None
|
|
1952
|
+
|
|
1953
|
+
# record if a ParameterValue sub-class is passed for value, which allows us
|
|
1954
|
+
# to re-init the object on `.value`:
|
|
1955
|
+
self._value_is_obj = isinstance(value, ParameterValue)
|
|
1956
|
+
if _check_obj:
|
|
1957
|
+
self._check_dict_value_if_object()
|
|
1958
|
+
|
|
1959
|
+
def _check_dict_value_if_object(self):
|
|
1960
|
+
"""For non-persistent input values, check that, if a matching `ParameterValue`
|
|
1961
|
+
class exists and the specified value is not of that type, then the specified
|
|
1962
|
+
value is a dict, which can later be passed to the ParameterValue sub-class
|
|
1963
|
+
to initialise the object.
|
|
1964
|
+
"""
|
|
1965
|
+
if (
|
|
1966
|
+
self._value_group_idx is None
|
|
1967
|
+
and not self.path
|
|
1968
|
+
and not self._value_is_obj
|
|
1969
|
+
and self.parameter._value_class
|
|
1970
|
+
and self._value is not None
|
|
1971
|
+
and not isinstance(self._value, dict)
|
|
1972
|
+
):
|
|
1973
|
+
raise ValueError(
|
|
1974
|
+
f"{self.__class__.__name__} with specified value {self._value!r} is "
|
|
1975
|
+
f"associated with a ParameterValue subclass "
|
|
1976
|
+
f"({self.parameter._value_class!r}), but the value data type is not a "
|
|
1977
|
+
f"dict."
|
|
1978
|
+
)
|
|
1979
|
+
|
|
1980
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> Self:
|
|
1981
|
+
kwargs = self.to_dict()
|
|
1982
|
+
_value = kwargs.pop("_value")
|
|
1983
|
+
kwargs.pop("_schema_input", None)
|
|
1984
|
+
_value_group_idx = kwargs.pop("_value_group_idx")
|
|
1985
|
+
_value_is_obj = kwargs.pop("_value_is_obj")
|
|
1986
|
+
_value_method = kwargs.pop("_value_method", None)
|
|
1987
|
+
_value_method_args = kwargs.pop("_value_method_args", None)
|
|
1988
|
+
|
|
1989
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo), _check_obj=False)
|
|
1990
|
+
obj._value = _value
|
|
1991
|
+
obj._value_group_idx = _value_group_idx
|
|
1992
|
+
obj._value_is_obj = _value_is_obj
|
|
1993
|
+
obj._value_method = _value_method
|
|
1994
|
+
obj._value_method_args = _value_method_args
|
|
1995
|
+
obj._element_set = self._element_set
|
|
1996
|
+
obj._schema_input = self._schema_input
|
|
1997
|
+
return obj
|
|
1998
|
+
|
|
1999
|
+
def __repr__(self) -> str:
|
|
2000
|
+
val_grp_idx = ""
|
|
2001
|
+
if self._value_group_idx is not None:
|
|
2002
|
+
val_grp_idx = f", value_group_idx={self._value_group_idx}"
|
|
2003
|
+
|
|
2004
|
+
path_str = ""
|
|
2005
|
+
if self.path is not None:
|
|
2006
|
+
path_str = f", path={self.path!r}"
|
|
2007
|
+
|
|
2008
|
+
label_str = ""
|
|
2009
|
+
if self.label is not None:
|
|
2010
|
+
label_str = f", label={self.label!r}"
|
|
2011
|
+
|
|
2012
|
+
try:
|
|
2013
|
+
value_str = f", value={self.value!r}"
|
|
2014
|
+
except WorkflowParameterMissingError:
|
|
2015
|
+
value_str = ""
|
|
2016
|
+
|
|
2017
|
+
return (
|
|
2018
|
+
f"{self.__class__.__name__}("
|
|
2019
|
+
f"parameter={self.parameter.typ!r}{label_str}"
|
|
2020
|
+
f"{value_str}"
|
|
2021
|
+
f"{path_str}"
|
|
2022
|
+
f"{val_grp_idx}"
|
|
2023
|
+
f")"
|
|
2024
|
+
)
|
|
2025
|
+
|
|
2026
|
+
def __eq__(self, other: Any) -> bool:
|
|
2027
|
+
if not isinstance(other, self.__class__):
|
|
2028
|
+
return False
|
|
2029
|
+
return self.to_dict() == other.to_dict()
|
|
2030
|
+
|
|
2031
|
+
@classmethod
|
|
2032
|
+
def _json_like_constructor(cls, json_like):
|
|
2033
|
+
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
|
2034
|
+
|
|
2035
|
+
_value_group_idx = json_like.pop("_value_group_idx", None)
|
|
2036
|
+
_value_is_obj = json_like.pop("_value_is_obj", False)
|
|
2037
|
+
_value_method = json_like.pop("_value_method", None)
|
|
2038
|
+
_value_method_args = json_like.pop("_value_method_args", None)
|
|
2039
|
+
if "_value" in json_like:
|
|
2040
|
+
json_like["value"] = json_like.pop("_value")
|
|
2041
|
+
|
|
2042
|
+
obj = cls(**json_like, _check_obj=False)
|
|
2043
|
+
obj._value_group_idx = _value_group_idx
|
|
2044
|
+
obj._value_is_obj = _value_is_obj
|
|
2045
|
+
obj._value_method = _value_method
|
|
2046
|
+
obj._value_method_args = _value_method_args
|
|
2047
|
+
obj._check_dict_value_if_object()
|
|
2048
|
+
return obj
|
|
2049
|
+
|
|
2050
|
+
@property
|
|
2051
|
+
def labelled_type(self) -> str:
|
|
2052
|
+
"""
|
|
2053
|
+
The labelled type of this input value.
|
|
2054
|
+
"""
|
|
2055
|
+
label = f"[{self.label}]" if self.label else ""
|
|
2056
|
+
return f"{self.parameter.typ}{label}"
|
|
2057
|
+
|
|
2058
|
+
@property
|
|
2059
|
+
def normalised_inputs_path(self) -> str:
|
|
2060
|
+
"""
|
|
2061
|
+
The normalised input path without the ``inputs.`` prefix.
|
|
2062
|
+
"""
|
|
2063
|
+
return f"{self.labelled_type}{f'.{self.path}' if self.path else ''}"
|
|
2064
|
+
|
|
2065
|
+
@property
|
|
2066
|
+
def normalised_path(self) -> str:
|
|
2067
|
+
"""
|
|
2068
|
+
The full normalised input path.
|
|
2069
|
+
"""
|
|
2070
|
+
return f"inputs.{self.normalised_inputs_path}"
|
|
2071
|
+
|
|
2072
|
+
def make_persistent(
|
|
2073
|
+
self, workflow: Workflow, source: ParamSource
|
|
2074
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
|
2075
|
+
source = copy.deepcopy(source)
|
|
2076
|
+
if self.value_class_method is not None:
|
|
2077
|
+
source["value_class_method"] = self.value_class_method
|
|
2078
|
+
return super().make_persistent(workflow, source)
|
|
2079
|
+
|
|
2080
|
+
@classmethod
|
|
2081
|
+
def from_json_like(cls, json_like, shared_data=None):
|
|
2082
|
+
|
|
2083
|
+
param = json_like["parameter"]
|
|
2084
|
+
cls_method = None
|
|
2085
|
+
if "::" in json_like["parameter"]:
|
|
2086
|
+
param, cls_method = json_like["parameter"].split("::")
|
|
2087
|
+
|
|
2088
|
+
if "[" in param:
|
|
2089
|
+
# extract out the parameter label:
|
|
2090
|
+
param, label = split_param_label(param)
|
|
2091
|
+
json_like["label"] = label
|
|
2092
|
+
|
|
2093
|
+
if "." in param:
|
|
2094
|
+
param_split = param.split(".")
|
|
2095
|
+
param = param_split[0]
|
|
2096
|
+
json_like["path"] = ".".join(param_split[1:])
|
|
2097
|
+
|
|
2098
|
+
json_like["parameter"] = param
|
|
2099
|
+
|
|
2100
|
+
if cls_method:
|
|
2101
|
+
# double-colon syntax indicates either a `ParameterValue`-subclass class
|
|
2102
|
+
# method, or an InputValue class method should be used to construct the values
|
|
2103
|
+
|
|
2104
|
+
# first check for a parameter value class:
|
|
2105
|
+
param_obj = cls._app.Parameter(param)
|
|
2106
|
+
param_obj._set_value_class()
|
|
2107
|
+
if val_cls := param_obj._value_class:
|
|
2108
|
+
if hasattr(val_cls, cls_method):
|
|
2109
|
+
json_like["value_class_method"] = cls_method
|
|
2110
|
+
|
|
2111
|
+
elif hasattr(cls, cls_method):
|
|
2112
|
+
json_like.update(json_like.pop("value"))
|
|
2113
|
+
return getattr(cls, cls_method)(**json_like)
|
|
2114
|
+
|
|
2115
|
+
return super().from_json_like(json_like, shared_data)
|
|
2116
|
+
|
|
2117
|
+
@property
|
|
2118
|
+
def is_sub_value(self) -> bool:
|
|
2119
|
+
"""True if the value is for a sub part of the parameter (i.e. if `path` is set).
|
|
2120
|
+
Sub-values are not added to the base parameter data, but are interpreted as
|
|
2121
|
+
single-value sequences."""
|
|
2122
|
+
return bool(self.path)
|
|
2123
|
+
|
|
2124
|
+
@property
|
|
2125
|
+
def value(self) -> Any:
|
|
2126
|
+
if self._value_group_idx is not None and self.workflow:
|
|
2127
|
+
val = self.workflow.get_parameter_data(cast("int", self._value_group_idx))
|
|
2128
|
+
if self._value_is_obj and self.parameter._value_class:
|
|
2129
|
+
return self.parameter._value_class(**val)
|
|
2130
|
+
return val
|
|
2131
|
+
else:
|
|
2132
|
+
return self._value
|
|
2133
|
+
|
|
2134
|
+
@classmethod
|
|
2135
|
+
def _process_mixin_args(
|
|
2136
|
+
cls,
|
|
2137
|
+
values: list[Any],
|
|
2138
|
+
parameter: Parameter | SchemaInput | str | None = None,
|
|
2139
|
+
path: str | None = None,
|
|
2140
|
+
nesting_order: float | None = None,
|
|
2141
|
+
label: str | int | None = None,
|
|
2142
|
+
value_class_method: str | None = None,
|
|
2143
|
+
):
|
|
2144
|
+
"""Process arguments as generated by the mixin class for instantiation of this
|
|
2145
|
+
specific class."""
|
|
2146
|
+
return {
|
|
2147
|
+
"value": values,
|
|
2148
|
+
"parameter": parameter,
|
|
2149
|
+
"path": path,
|
|
2150
|
+
"label": label,
|
|
2151
|
+
}
|
|
2152
|
+
|
|
2153
|
+
def _remember_values_method_args(
|
|
2154
|
+
self, name: str | None, args: dict[str, Any]
|
|
2155
|
+
) -> Self:
|
|
2156
|
+
# note: singular value here
|
|
2157
|
+
self._value_method, self._value_method_args = name, args
|
|
2158
|
+
return self
|
|
2159
|
+
|
|
2160
|
+
@classmethod
|
|
2161
|
+
def from_linear_space(
|
|
2162
|
+
cls,
|
|
2163
|
+
parameter: Parameter | SchemaInput | str,
|
|
2164
|
+
start: float,
|
|
2165
|
+
stop: float,
|
|
2166
|
+
num: int,
|
|
2167
|
+
path: str | None = None,
|
|
2168
|
+
label: str | int | None = None,
|
|
2169
|
+
**kwargs,
|
|
2170
|
+
) -> Self:
|
|
2171
|
+
"""
|
|
2172
|
+
Generate a value from a NumPy linear space.
|
|
2173
|
+
"""
|
|
2174
|
+
return super()._from_linear_space(
|
|
2175
|
+
parameter=parameter,
|
|
2176
|
+
start=start,
|
|
2177
|
+
stop=stop,
|
|
2178
|
+
num=num,
|
|
2179
|
+
path=path,
|
|
2180
|
+
label=label,
|
|
2181
|
+
**kwargs,
|
|
2182
|
+
)
|
|
2183
|
+
|
|
2184
|
+
@classmethod
|
|
2185
|
+
def from_geometric_space(
|
|
2186
|
+
cls,
|
|
2187
|
+
parameter: Parameter | SchemaInput | str,
|
|
2188
|
+
start: float,
|
|
2189
|
+
stop: float,
|
|
2190
|
+
num: int,
|
|
2191
|
+
endpoint=True,
|
|
2192
|
+
path: str | None = None,
|
|
2193
|
+
label: str | int | None = None,
|
|
2194
|
+
**kwargs,
|
|
2195
|
+
) -> Self:
|
|
2196
|
+
"""
|
|
2197
|
+
Generate a value from a NumPy geometric space.
|
|
2198
|
+
"""
|
|
2199
|
+
return super()._from_geometric_space(
|
|
2200
|
+
parameter=parameter,
|
|
2201
|
+
start=start,
|
|
2202
|
+
stop=stop,
|
|
2203
|
+
num=num,
|
|
2204
|
+
endpoint=endpoint,
|
|
2205
|
+
path=path,
|
|
2206
|
+
label=label,
|
|
2207
|
+
**kwargs,
|
|
2208
|
+
)
|
|
2209
|
+
|
|
2210
|
+
@classmethod
|
|
2211
|
+
def from_log_space(
|
|
2212
|
+
cls,
|
|
2213
|
+
parameter: Parameter | SchemaInput | str,
|
|
2214
|
+
start: float,
|
|
2215
|
+
stop: float,
|
|
2216
|
+
num: int,
|
|
2217
|
+
base=10.0,
|
|
2218
|
+
endpoint=True,
|
|
2219
|
+
path: str | None = None,
|
|
2220
|
+
label: str | int | None = None,
|
|
2221
|
+
**kwargs,
|
|
2222
|
+
) -> Self:
|
|
2223
|
+
"""
|
|
2224
|
+
Generate a value from a NumPy geometric space.
|
|
2225
|
+
"""
|
|
2226
|
+
return super()._from_log_space(
|
|
2227
|
+
parameter=parameter,
|
|
2228
|
+
start=start,
|
|
2229
|
+
stop=stop,
|
|
2230
|
+
num=num,
|
|
2231
|
+
base=base,
|
|
2232
|
+
endpoint=endpoint,
|
|
2233
|
+
path=path,
|
|
2234
|
+
label=label,
|
|
2235
|
+
**kwargs,
|
|
2236
|
+
)
|
|
2237
|
+
|
|
2238
|
+
@classmethod
|
|
2239
|
+
def from_range(
|
|
2240
|
+
cls,
|
|
2241
|
+
parameter: Parameter | SchemaInput | str,
|
|
2242
|
+
start: float,
|
|
2243
|
+
stop: float,
|
|
2244
|
+
step: int | float = 1,
|
|
2245
|
+
path: str | None = None,
|
|
2246
|
+
label: str | int | None = None,
|
|
2247
|
+
**kwargs,
|
|
2248
|
+
) -> Self:
|
|
2249
|
+
"""
|
|
2250
|
+
Generate a value from a NumPy range.
|
|
2251
|
+
"""
|
|
2252
|
+
return super()._from_range(
|
|
2253
|
+
parameter=parameter,
|
|
2254
|
+
start=start,
|
|
2255
|
+
stop=stop,
|
|
2256
|
+
step=step,
|
|
2257
|
+
path=path,
|
|
2258
|
+
label=label,
|
|
2259
|
+
**kwargs,
|
|
2260
|
+
)
|
|
2261
|
+
|
|
2262
|
+
@classmethod
|
|
2263
|
+
def from_file(
|
|
2264
|
+
cls,
|
|
2265
|
+
parameter: Parameter | SchemaInput | str,
|
|
2266
|
+
file_path: str | Path,
|
|
2267
|
+
path: str | None = None,
|
|
2268
|
+
label: str | int | None = None,
|
|
2269
|
+
**kwargs,
|
|
2270
|
+
) -> Self:
|
|
2271
|
+
"""
|
|
2272
|
+
Generate a value from lines within a simple text file.
|
|
2273
|
+
"""
|
|
2274
|
+
return super()._from_file(
|
|
2275
|
+
parameter=parameter,
|
|
2276
|
+
file_path=file_path,
|
|
2277
|
+
path=path,
|
|
2278
|
+
label=label,
|
|
2279
|
+
**kwargs,
|
|
2280
|
+
)
|
|
2281
|
+
|
|
2282
|
+
@classmethod
|
|
2283
|
+
def from_load_txt(
|
|
2284
|
+
cls,
|
|
2285
|
+
parameter: Parameter | SchemaInput | str,
|
|
2286
|
+
file_path: str | Path,
|
|
2287
|
+
path: str | None = None,
|
|
2288
|
+
label: str | int | None = None,
|
|
2289
|
+
**kwargs,
|
|
2290
|
+
) -> Self:
|
|
2291
|
+
"""
|
|
2292
|
+
Generate a value from data within a text file using Numpy's `loadtxt`.
|
|
2293
|
+
"""
|
|
2294
|
+
return super()._from_load_txt(
|
|
2295
|
+
parameter=parameter,
|
|
2296
|
+
file_path=file_path,
|
|
2297
|
+
path=path,
|
|
2298
|
+
label=label,
|
|
2299
|
+
**kwargs,
|
|
2300
|
+
)
|
|
2301
|
+
|
|
2302
|
+
@classmethod
|
|
2303
|
+
def from_rectangle(
|
|
2304
|
+
cls,
|
|
2305
|
+
parameter: Parameter | SchemaInput | str,
|
|
2306
|
+
start: Sequence[float],
|
|
2307
|
+
stop: Sequence[float],
|
|
2308
|
+
num: Sequence[int],
|
|
2309
|
+
coord: int | None = None,
|
|
2310
|
+
include: list[str] | None = None,
|
|
2311
|
+
path: str | None = None,
|
|
2312
|
+
label: str | int | None = None,
|
|
2313
|
+
**kwargs,
|
|
2314
|
+
) -> Self:
|
|
2315
|
+
"""
|
|
2316
|
+
Generate a value from a sequence of coordinates to cover the perimeter of a
|
|
2317
|
+
rectangle.
|
|
2318
|
+
|
|
2319
|
+
Parameters
|
|
2320
|
+
----------
|
|
2321
|
+
coord:
|
|
2322
|
+
Which coordinate to use. Either 0, 1, or `None`, meaning each value will be
|
|
2323
|
+
both coordinates.
|
|
2324
|
+
include
|
|
2325
|
+
If specified, include only the specified edges. Choose from "top", "right",
|
|
2326
|
+
"bottom", "left".
|
|
2327
|
+
"""
|
|
2328
|
+
return super()._from_rectangle(
|
|
2329
|
+
parameter=parameter,
|
|
2330
|
+
start=start,
|
|
2331
|
+
stop=stop,
|
|
2332
|
+
num=num,
|
|
2333
|
+
coord=coord,
|
|
2334
|
+
include=include,
|
|
2335
|
+
path=path,
|
|
2336
|
+
label=label,
|
|
2337
|
+
**kwargs,
|
|
2338
|
+
)
|
|
2339
|
+
|
|
2340
|
+
@classmethod
|
|
2341
|
+
def from_random_uniform(
|
|
2342
|
+
cls,
|
|
2343
|
+
parameter: Parameter | SchemaInput | str,
|
|
2344
|
+
num: int,
|
|
2345
|
+
low: float = 0.0,
|
|
2346
|
+
high: float = 1.0,
|
|
2347
|
+
seed: int | list[int] | None = None,
|
|
2348
|
+
path: str | None = None,
|
|
2349
|
+
label: str | int | None = None,
|
|
2350
|
+
**kwargs,
|
|
2351
|
+
) -> Self:
|
|
2352
|
+
"""
|
|
2353
|
+
Generate a value from a uniform random number generator.
|
|
2354
|
+
"""
|
|
2355
|
+
warnings.warn(
|
|
2356
|
+
f"{cls.__name__!r}: Please use `from_uniform` instead of "
|
|
2357
|
+
f"`from_random_uniform`, which will be removed in a future release.",
|
|
2358
|
+
DeprecationWarning,
|
|
2359
|
+
stacklevel=2,
|
|
2360
|
+
)
|
|
2361
|
+
return cls.from_uniform(
|
|
2362
|
+
parameter=parameter,
|
|
2363
|
+
shape=num,
|
|
2364
|
+
low=low,
|
|
2365
|
+
high=high,
|
|
2366
|
+
seed=seed,
|
|
2367
|
+
path=path,
|
|
2368
|
+
label=label,
|
|
2369
|
+
**kwargs,
|
|
2370
|
+
)
|
|
2371
|
+
|
|
2372
|
+
@classmethod
|
|
2373
|
+
def from_uniform(
|
|
2374
|
+
cls,
|
|
2375
|
+
parameter: Parameter | SchemaInput | str,
|
|
2376
|
+
low: float = 0.0,
|
|
2377
|
+
high: float = 1.0,
|
|
2378
|
+
shape: int | Sequence[int] | None = None,
|
|
2379
|
+
seed: int | list[int] | None = None,
|
|
2380
|
+
path: str | None = None,
|
|
2381
|
+
label: str | int | None = None,
|
|
2382
|
+
**kwargs,
|
|
2383
|
+
) -> Self:
|
|
2384
|
+
"""
|
|
2385
|
+
Generate a value from a uniform random number generator.
|
|
2386
|
+
"""
|
|
2387
|
+
return super()._from_uniform(
|
|
2388
|
+
parameter=parameter,
|
|
2389
|
+
low=low,
|
|
2390
|
+
high=high,
|
|
2391
|
+
shape=shape,
|
|
2392
|
+
seed=seed,
|
|
2393
|
+
path=path,
|
|
2394
|
+
label=label,
|
|
2395
|
+
**kwargs,
|
|
2396
|
+
)
|
|
2397
|
+
|
|
2398
|
+
@classmethod
|
|
2399
|
+
def from_normal(
|
|
2400
|
+
cls,
|
|
2401
|
+
parameter: Parameter | SchemaInput | str,
|
|
2402
|
+
loc: float = 0.0,
|
|
2403
|
+
scale: float = 1.0,
|
|
2404
|
+
shape: int | Sequence[int] | None = None,
|
|
2405
|
+
seed: int | list[int] | None = None,
|
|
2406
|
+
path: str | None = None,
|
|
2407
|
+
label: str | int | None = None,
|
|
2408
|
+
**kwargs,
|
|
2409
|
+
) -> Self:
|
|
2410
|
+
"""
|
|
2411
|
+
Generate a value from a normal (Gaussian) random number generator.
|
|
2412
|
+
"""
|
|
2413
|
+
return super()._from_normal(
|
|
2414
|
+
parameter=parameter,
|
|
2415
|
+
loc=loc,
|
|
2416
|
+
scale=scale,
|
|
2417
|
+
shape=shape,
|
|
2418
|
+
seed=seed,
|
|
2419
|
+
path=path,
|
|
2420
|
+
label=label,
|
|
2421
|
+
**kwargs,
|
|
2422
|
+
)
|
|
2423
|
+
|
|
2424
|
+
@classmethod
|
|
2425
|
+
def from_log_normal(
|
|
2426
|
+
cls,
|
|
2427
|
+
parameter: Parameter | SchemaInput | str,
|
|
2428
|
+
mean: float = 0.0,
|
|
2429
|
+
sigma: float = 1.0,
|
|
2430
|
+
shape: int | Sequence[int] | None = None,
|
|
2431
|
+
seed: int | list[int] | None = None,
|
|
2432
|
+
path: str | None = None,
|
|
2433
|
+
label: str | int | None = None,
|
|
2434
|
+
**kwargs,
|
|
2435
|
+
) -> Self:
|
|
2436
|
+
"""
|
|
2437
|
+
Generate a value from a log-normal random number generator.
|
|
2438
|
+
"""
|
|
2439
|
+
return super()._from_log_normal(
|
|
2440
|
+
parameter=parameter,
|
|
2441
|
+
mean=mean,
|
|
2442
|
+
sigma=sigma,
|
|
2443
|
+
shape=shape,
|
|
2444
|
+
seed=seed,
|
|
2445
|
+
path=path,
|
|
2446
|
+
label=label,
|
|
2447
|
+
**kwargs,
|
|
2448
|
+
)
|
|
2449
|
+
|
|
2450
|
+
|
|
2451
|
+
class ResourceSpec(JSONLike):
|
|
2452
|
+
"""Class to represent specification of resource requirements for a (set of) actions.
|
|
2453
|
+
|
|
2454
|
+
Notes
|
|
2455
|
+
-----
|
|
2456
|
+
`os_name` is used for retrieving a default shell name and for retrieving the correct
|
|
2457
|
+
`Shell` class; when using WSL, it should still be `nt` (i.e. Windows).
|
|
2458
|
+
|
|
2459
|
+
Parameters
|
|
2460
|
+
----------
|
|
2461
|
+
scope:
|
|
2462
|
+
Which scope does this apply to.
|
|
2463
|
+
scratch: str
|
|
2464
|
+
Which scratch space to use.
|
|
2465
|
+
parallel_mode: ParallelMode
|
|
2466
|
+
Which parallel mode to use.
|
|
2467
|
+
num_cores: int
|
|
2468
|
+
How many cores to request.
|
|
2469
|
+
num_cores_per_node: int
|
|
2470
|
+
How many cores per compute node to request.
|
|
2471
|
+
num_threads: int
|
|
2472
|
+
How many threads to request.
|
|
2473
|
+
num_nodes: int
|
|
2474
|
+
How many compute nodes to request.
|
|
2475
|
+
scheduler: str
|
|
2476
|
+
Which scheduler to use.
|
|
2477
|
+
shell: str
|
|
2478
|
+
Which system shell to use.
|
|
2479
|
+
use_job_array: bool
|
|
2480
|
+
Whether to use array jobs.
|
|
2481
|
+
max_array_items: int
|
|
2482
|
+
If using array jobs, up to how many items should be in the job array.
|
|
2483
|
+
write_app_logs: bool
|
|
2484
|
+
Whether an app log file should be written.
|
|
2485
|
+
combine_jobscript_std: bool
|
|
2486
|
+
Whether jobscript standard output and error streams should be combined.
|
|
2487
|
+
combine_scripts: bool
|
|
2488
|
+
Whether Python scripts should be combined.
|
|
2489
|
+
time_limit: str
|
|
2490
|
+
How long to run for.
|
|
2491
|
+
scheduler_args: dict[str, Any]
|
|
2492
|
+
Additional arguments to pass to the scheduler.
|
|
2493
|
+
shell_args: dict[str, Any]
|
|
2494
|
+
Additional arguments to pass to the shell.
|
|
2495
|
+
os_name: str
|
|
2496
|
+
Which OS to use.
|
|
2497
|
+
environments: dict
|
|
2498
|
+
Which execution environments to use.
|
|
2499
|
+
resources_id: int
|
|
2500
|
+
An arbitrary integer that can be used to force multiple jobscripts.
|
|
2501
|
+
skip_downstream_on_failure: bool
|
|
2502
|
+
Whether to skip downstream dependents on failure.
|
|
2503
|
+
allow_failed_dependencies: int | float | bool | None
|
|
2504
|
+
The failure tolerance with respect to dependencies, specified as a number or
|
|
2505
|
+
proportion.
|
|
2506
|
+
SGE_parallel_env: str
|
|
2507
|
+
Which SGE parallel environment to request.
|
|
2508
|
+
SLURM_partition: str
|
|
2509
|
+
Which SLURM partition to request.
|
|
2510
|
+
SLURM_num_tasks: str
|
|
2511
|
+
How many SLURM tasks to request.
|
|
2512
|
+
SLURM_num_tasks_per_node: str
|
|
2513
|
+
How many SLURM tasks per compute node to request.
|
|
2514
|
+
SLURM_num_nodes: str
|
|
2515
|
+
How many compute nodes to request.
|
|
2516
|
+
SLURM_num_cpus_per_task: str
|
|
2517
|
+
How many CPU cores to ask for per SLURM task.
|
|
2518
|
+
"""
|
|
2519
|
+
|
|
2520
|
+
#: The names of parameters that may be used when making an instance of this class.
|
|
2521
|
+
ALLOWED_PARAMETERS: ClassVar[set[str]] = {
|
|
2522
|
+
"scratch",
|
|
2523
|
+
"parallel_mode",
|
|
2524
|
+
"num_cores",
|
|
2525
|
+
"num_cores_per_node",
|
|
2526
|
+
"num_threads",
|
|
2527
|
+
"num_nodes",
|
|
2528
|
+
"scheduler",
|
|
2529
|
+
"shell",
|
|
2530
|
+
"use_job_array",
|
|
2531
|
+
"max_array_items",
|
|
2532
|
+
"write_app_logs",
|
|
2533
|
+
"combine_jobscript_std",
|
|
2534
|
+
"combine_scripts",
|
|
2535
|
+
"time_limit",
|
|
2536
|
+
"scheduler_args",
|
|
2537
|
+
"shell_args",
|
|
2538
|
+
"os_name",
|
|
2539
|
+
"environments",
|
|
2540
|
+
"resources_id",
|
|
2541
|
+
"skip_downstream_on_failure",
|
|
2542
|
+
"SGE_parallel_env",
|
|
2543
|
+
"SLURM_partition",
|
|
2544
|
+
"SLURM_num_tasks",
|
|
2545
|
+
"SLURM_num_tasks_per_node",
|
|
2546
|
+
"SLURM_num_nodes",
|
|
2547
|
+
"SLURM_num_cpus_per_task",
|
|
2548
|
+
}
|
|
2549
|
+
|
|
2550
|
+
_resource_list: ResourceList | None = None
|
|
2551
|
+
|
|
2552
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
2553
|
+
ChildObjectSpec(
|
|
2554
|
+
name="scope",
|
|
2555
|
+
class_name="ActionScope",
|
|
2556
|
+
),
|
|
2557
|
+
)
|
|
2558
|
+
|
|
2559
|
+
@staticmethod
|
|
2560
|
+
def __quoted(values: Iterable):
|
|
2561
|
+
return ", ".join(f'"{item}"' for item in values)
|
|
2562
|
+
|
|
2563
|
+
@classmethod
|
|
2564
|
+
def _allowed_params_quoted(cls) -> str:
|
|
2565
|
+
"""
|
|
2566
|
+
The string version of the list of allowed parameters.
|
|
2567
|
+
"""
|
|
2568
|
+
return cls.__quoted(cls.ALLOWED_PARAMETERS)
|
|
2569
|
+
|
|
2570
|
+
@staticmethod
|
|
2571
|
+
def __parse_thing(
|
|
2572
|
+
typ: type[ActionScope], val: ActionScope | str | None
|
|
2573
|
+
) -> ActionScope | None:
|
|
2574
|
+
if isinstance(val, typ):
|
|
2575
|
+
return val
|
|
2576
|
+
elif val is None:
|
|
2577
|
+
return typ.any()
|
|
2578
|
+
else:
|
|
2579
|
+
return typ.from_json_like(cast("str", val))
|
|
2580
|
+
|
|
2581
|
+
def __init__(
|
|
2582
|
+
self,
|
|
2583
|
+
scope: ActionScope | str | None = None,
|
|
2584
|
+
scratch: str | None = None,
|
|
2585
|
+
parallel_mode: str | ParallelMode | None = None,
|
|
2586
|
+
num_cores: int | None = None,
|
|
2587
|
+
num_cores_per_node: int | None = None,
|
|
2588
|
+
num_threads: int | None = None,
|
|
2589
|
+
num_nodes: int | None = None,
|
|
2590
|
+
scheduler: str | None = None,
|
|
2591
|
+
shell: str | None = None,
|
|
2592
|
+
use_job_array: bool | None = None,
|
|
2593
|
+
max_array_items: int | None = None,
|
|
2594
|
+
write_app_logs: bool | None = None,
|
|
2595
|
+
combine_jobscript_std: bool | None = None,
|
|
2596
|
+
combine_scripts: bool | None = None,
|
|
2597
|
+
time_limit: str | timedelta | None = None,
|
|
2598
|
+
scheduler_args: dict[str, Any] | None = None,
|
|
2599
|
+
shell_args: dict[str, Any] | None = None,
|
|
2600
|
+
os_name: str | None = None,
|
|
2601
|
+
environments: Mapping[str, Mapping[str, Any]] | None = None,
|
|
2602
|
+
resources_id: int | None = None,
|
|
2603
|
+
skip_downstream_on_failure: bool | None = None,
|
|
2604
|
+
SGE_parallel_env: str | None = None,
|
|
2605
|
+
SLURM_partition: str | None = None,
|
|
2606
|
+
SLURM_num_tasks: str | None = None,
|
|
2607
|
+
SLURM_num_tasks_per_node: str | None = None,
|
|
2608
|
+
SLURM_num_nodes: str | None = None,
|
|
2609
|
+
SLURM_num_cpus_per_task: str | None = None,
|
|
2610
|
+
):
|
|
2611
|
+
#: Which scope does this apply to.
|
|
2612
|
+
self.scope = self.__parse_thing(self._app.ActionScope, scope)
|
|
2613
|
+
|
|
2614
|
+
if isinstance(time_limit, timedelta):
|
|
2615
|
+
time_limit = timedelta_format(time_limit)
|
|
2616
|
+
|
|
2617
|
+
# assigned by `make_persistent`
|
|
2618
|
+
self._workflow: Workflow | None = None
|
|
2619
|
+
self._value_group_idx: int | list[int] | None = None
|
|
2620
|
+
|
|
2621
|
+
# user-specified resource parameters:
|
|
2622
|
+
self._scratch = scratch
|
|
2623
|
+
self._parallel_mode = get_enum_by_name_or_val(ParallelMode, parallel_mode)
|
|
2624
|
+
self._num_cores = num_cores
|
|
2625
|
+
self._num_threads = num_threads
|
|
2626
|
+
self._num_nodes = num_nodes
|
|
2627
|
+
self._num_cores_per_node = num_cores_per_node
|
|
2628
|
+
self._scheduler = self._process_string(scheduler)
|
|
2629
|
+
self._shell = self._process_string(shell)
|
|
2630
|
+
self._os_name = self._process_string(os_name)
|
|
2631
|
+
self._environments = environments
|
|
2632
|
+
self._resources_id = resources_id
|
|
2633
|
+
self._skip_downstream_on_failure = skip_downstream_on_failure
|
|
2634
|
+
self._use_job_array = use_job_array
|
|
2635
|
+
self._max_array_items = max_array_items
|
|
2636
|
+
self._write_app_logs = write_app_logs
|
|
2637
|
+
self._combine_jobscript_std = combine_jobscript_std
|
|
2638
|
+
self._combine_scripts = combine_scripts
|
|
2639
|
+
self._time_limit = time_limit
|
|
2640
|
+
self._scheduler_args = scheduler_args
|
|
2641
|
+
self._shell_args = shell_args
|
|
2642
|
+
|
|
2643
|
+
# user-specified SGE-specific parameters:
|
|
2644
|
+
self._SGE_parallel_env = SGE_parallel_env
|
|
2645
|
+
|
|
2646
|
+
# user-specified SLURM-specific parameters:
|
|
2647
|
+
self._SLURM_partition = SLURM_partition
|
|
2648
|
+
self._SLURM_num_tasks = SLURM_num_tasks
|
|
2649
|
+
self._SLURM_num_tasks_per_node = SLURM_num_tasks_per_node
|
|
2650
|
+
self._SLURM_num_nodes = SLURM_num_nodes
|
|
2651
|
+
self._SLURM_num_cpus_per_task = SLURM_num_cpus_per_task
|
|
2652
|
+
|
|
2653
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> Self:
|
|
2654
|
+
kwargs = copy.deepcopy(self.to_dict(), memo)
|
|
2655
|
+
_value_group_idx = kwargs.pop("value_group_idx", None)
|
|
2656
|
+
obj = self.__class__(**kwargs)
|
|
2657
|
+
obj._value_group_idx = _value_group_idx
|
|
2658
|
+
obj._resource_list = self._resource_list
|
|
2659
|
+
return obj
|
|
2660
|
+
|
|
2661
|
+
def __repr__(self):
|
|
2662
|
+
param_strs = ""
|
|
2663
|
+
for param in self.ALLOWED_PARAMETERS:
|
|
2664
|
+
try:
|
|
2665
|
+
i_val = getattr(self, param)
|
|
2666
|
+
except WorkflowParameterMissingError:
|
|
2667
|
+
continue
|
|
2668
|
+
if i_val is not None:
|
|
2669
|
+
param_strs += f", {param}={i_val!r}"
|
|
2670
|
+
|
|
2671
|
+
return f"{self.__class__.__name__}(scope={self.scope}{param_strs})"
|
|
2672
|
+
|
|
2673
|
+
def __eq__(self, other: Any) -> bool:
|
|
2674
|
+
if not isinstance(other, self.__class__):
|
|
2675
|
+
return False
|
|
2676
|
+
return self.to_dict() == other.to_dict()
|
|
2677
|
+
|
|
2678
|
+
@classmethod
|
|
2679
|
+
def _json_like_constructor(cls, json_like) -> Self:
|
|
2680
|
+
"""Invoked by `JSONLike.from_json_like` instead of `__init__`."""
|
|
2681
|
+
|
|
2682
|
+
_value_group_idx = json_like.pop("value_group_idx", None)
|
|
2683
|
+
try:
|
|
2684
|
+
obj = cls(**json_like)
|
|
2685
|
+
except TypeError:
|
|
2686
|
+
given_keys = set(k for k in json_like if k != "scope")
|
|
2687
|
+
bad_keys = cls.__quoted(given_keys - cls.ALLOWED_PARAMETERS)
|
|
2688
|
+
good_keys = cls._allowed_params_quoted()
|
|
2689
|
+
raise UnknownResourceSpecItemError(
|
|
2690
|
+
f"The following resource item names are unknown: {bad_keys}. "
|
|
2691
|
+
f"Allowed resource item names are: {good_keys}."
|
|
2692
|
+
)
|
|
2693
|
+
obj._value_group_idx = _value_group_idx
|
|
2694
|
+
|
|
2695
|
+
return obj
|
|
2696
|
+
|
|
2697
|
+
@property
|
|
2698
|
+
def normalised_resources_path(self) -> str:
|
|
2699
|
+
"""
|
|
2700
|
+
Standard name of this resource spec.
|
|
2701
|
+
"""
|
|
2702
|
+
scope = self.scope
|
|
2703
|
+
assert scope is not None
|
|
2704
|
+
return scope.to_string()
|
|
2705
|
+
|
|
2706
|
+
@property
|
|
2707
|
+
def normalised_path(self) -> str:
|
|
2708
|
+
"""
|
|
2709
|
+
Full name of this resource spec.
|
|
2710
|
+
"""
|
|
2711
|
+
return f"resources.{self.normalised_resources_path}"
|
|
2712
|
+
|
|
2713
|
+
@override
|
|
2714
|
+
def _postprocess_to_dict(self, d: dict[str, Any]) -> dict[str, Any]:
|
|
2715
|
+
out = super()._postprocess_to_dict(d)
|
|
2716
|
+
out.pop("_workflow", None)
|
|
2717
|
+
|
|
2718
|
+
if self._value_group_idx is not None:
|
|
2719
|
+
# only store pointer to persistent data:
|
|
2720
|
+
out = {k: v for k, v in out.items() if k in ["_value_group_idx", "scope"]}
|
|
2721
|
+
else:
|
|
2722
|
+
out = {k: v for k, v in out.items() if v is not None}
|
|
2723
|
+
|
|
2724
|
+
out = {k.lstrip("_"): v for k, v in out.items()}
|
|
2725
|
+
return out
|
|
2726
|
+
|
|
2727
|
+
def _get_members(self):
|
|
2728
|
+
out = self.to_dict()
|
|
2729
|
+
out.pop("scope")
|
|
2730
|
+
out.pop("value_group_idx", None)
|
|
2731
|
+
out = {k: v for k, v in out.items() if v is not None}
|
|
2732
|
+
return out
|
|
2733
|
+
|
|
2734
|
+
@classmethod
|
|
2735
|
+
def __is_Workflow(cls, value) -> TypeIs[Workflow]:
|
|
2736
|
+
return isinstance(value, cls._app.Workflow)
|
|
2737
|
+
|
|
2738
|
+
def make_persistent(
|
|
2739
|
+
self, workflow: ResourcePersistingWorkflow, source: ParamSource
|
|
2740
|
+
) -> tuple[str, list[int | list[int]], bool]:
|
|
2741
|
+
"""Save to a persistent workflow.
|
|
2742
|
+
|
|
2743
|
+
Returns
|
|
2744
|
+
-------
|
|
2745
|
+
String is the data path for this task input and integer list
|
|
2746
|
+
contains the indices of the parameter data Zarr groups where the data is
|
|
2747
|
+
stored.
|
|
2748
|
+
|
|
2749
|
+
Note
|
|
2750
|
+
----
|
|
2751
|
+
May modify the internal state of this object.
|
|
2752
|
+
"""
|
|
2753
|
+
|
|
2754
|
+
if self._value_group_idx is not None:
|
|
2755
|
+
data_ref = self._value_group_idx
|
|
2756
|
+
is_new = False
|
|
2757
|
+
if not workflow.check_parameters_exist(data_ref):
|
|
2758
|
+
raise RuntimeError(
|
|
2759
|
+
f"{self.__class__.__name__} has a parameter group index "
|
|
2760
|
+
f"({data_ref}), but does not exist in the workflow."
|
|
2761
|
+
)
|
|
2762
|
+
# TODO: log if already persistent.
|
|
2763
|
+
else:
|
|
2764
|
+
data_ref = workflow._add_parameter_data(self._get_members(), source=source)
|
|
2765
|
+
is_new = True
|
|
2766
|
+
self._value_group_idx = data_ref
|
|
2767
|
+
if self.__is_Workflow(workflow):
|
|
2768
|
+
self._workflow = workflow
|
|
2769
|
+
|
|
2770
|
+
self._num_cores = None
|
|
2771
|
+
self._scratch = None
|
|
2772
|
+
self._scheduler = None
|
|
2773
|
+
self._shell = None
|
|
2774
|
+
self._use_job_array = None
|
|
2775
|
+
self._max_array_items = None
|
|
2776
|
+
self._write_app_logs = None
|
|
2777
|
+
self._combine_jobscript_std = None
|
|
2778
|
+
self._combine_scripts = None
|
|
2779
|
+
self._time_limit = None
|
|
2780
|
+
self._scheduler_args = None
|
|
2781
|
+
self._shell_args = None
|
|
2782
|
+
self._os_name = None
|
|
2783
|
+
self._environments = None
|
|
2784
|
+
self._resources_id = None
|
|
2785
|
+
self._skip_downstream_on_failure = None
|
|
2786
|
+
|
|
2787
|
+
return (self.normalised_path, [data_ref], is_new)
|
|
2788
|
+
|
|
2789
|
+
def copy_non_persistent(self):
|
|
2790
|
+
"""Make a non-persistent copy."""
|
|
2791
|
+
kwargs = {"scope": self.scope}
|
|
2792
|
+
for name in self.ALLOWED_PARAMETERS:
|
|
2793
|
+
kwargs[name] = getattr(self, name)
|
|
2794
|
+
return self.__class__(**kwargs)
|
|
2795
|
+
|
|
2796
|
+
def _get_value(self, value_name: str | None = None):
|
|
2797
|
+
if self._value_group_idx is not None and self.workflow:
|
|
2798
|
+
val = self.workflow.get_parameter_data(cast("int", self._value_group_idx))
|
|
2799
|
+
else:
|
|
2800
|
+
val = self._get_members()
|
|
2801
|
+
if value_name is not None and val is not None:
|
|
2802
|
+
return val.get(value_name)
|
|
2803
|
+
|
|
2804
|
+
return val
|
|
2805
|
+
|
|
2806
|
+
@staticmethod
|
|
2807
|
+
def _process_string(value: str | None):
|
|
2808
|
+
return value.lower().strip() if value else value
|
|
2809
|
+
|
|
2810
|
+
def _setter_persistent_check(self):
|
|
2811
|
+
if self._value_group_idx:
|
|
2812
|
+
raise ValueError(
|
|
2813
|
+
f"Cannot set attribute of a persistent {self.__class__.__name__!r}."
|
|
2814
|
+
)
|
|
2815
|
+
|
|
2816
|
+
@property
|
|
2817
|
+
def scratch(self) -> str | None:
|
|
2818
|
+
"""
|
|
2819
|
+
Which scratch space to use.
|
|
2820
|
+
|
|
2821
|
+
Todo
|
|
2822
|
+
----
|
|
2823
|
+
Currently unused, except in tests.
|
|
2824
|
+
"""
|
|
2825
|
+
return self._get_value("scratch")
|
|
2826
|
+
|
|
2827
|
+
@property
|
|
2828
|
+
def parallel_mode(self) -> ParallelMode | None:
|
|
2829
|
+
"""
|
|
2830
|
+
Which parallel mode to use.
|
|
2831
|
+
"""
|
|
2832
|
+
return self._get_value("parallel_mode")
|
|
2833
|
+
|
|
2834
|
+
@property
|
|
2835
|
+
def num_cores(self) -> int | None:
|
|
2836
|
+
"""
|
|
2837
|
+
How many cores to request.
|
|
2838
|
+
"""
|
|
2839
|
+
return self._get_value("num_cores")
|
|
2840
|
+
|
|
2841
|
+
@property
|
|
2842
|
+
def num_cores_per_node(self) -> int | None:
|
|
2843
|
+
"""
|
|
2844
|
+
How many cores per compute node to request.
|
|
2845
|
+
"""
|
|
2846
|
+
return self._get_value("num_cores_per_node")
|
|
2847
|
+
|
|
2848
|
+
@property
|
|
2849
|
+
def num_nodes(self) -> int | None:
|
|
2850
|
+
"""
|
|
2851
|
+
How many compute nodes to request.
|
|
2852
|
+
"""
|
|
2853
|
+
return self._get_value("num_nodes")
|
|
2854
|
+
|
|
2855
|
+
@property
|
|
2856
|
+
def num_threads(self) -> int | None:
|
|
2857
|
+
"""
|
|
2858
|
+
How many threads to request.
|
|
2859
|
+
"""
|
|
2860
|
+
return self._get_value("num_threads")
|
|
2861
|
+
|
|
2862
|
+
@property
|
|
2863
|
+
def scheduler(self) -> str | None:
|
|
2864
|
+
"""
|
|
2865
|
+
Which scheduler to use.
|
|
2866
|
+
"""
|
|
2867
|
+
return self._get_value("scheduler")
|
|
2868
|
+
|
|
2869
|
+
@scheduler.setter
|
|
2870
|
+
def scheduler(self, value: str | None):
|
|
2871
|
+
self._setter_persistent_check()
|
|
2872
|
+
self._scheduler = self._process_string(value)
|
|
2873
|
+
|
|
2874
|
+
@property
|
|
2875
|
+
def shell(self) -> str | None:
|
|
2876
|
+
"""
|
|
2877
|
+
Which system shell to use.
|
|
2878
|
+
"""
|
|
2879
|
+
return self._get_value("shell")
|
|
2880
|
+
|
|
2881
|
+
@shell.setter
|
|
2882
|
+
def shell(self, value: str | None):
|
|
2883
|
+
self._setter_persistent_check()
|
|
2884
|
+
self._shell = self._process_string(value)
|
|
2885
|
+
|
|
2886
|
+
@property
|
|
2887
|
+
def use_job_array(self) -> bool:
|
|
2888
|
+
"""
|
|
2889
|
+
Whether to use array jobs.
|
|
2890
|
+
"""
|
|
2891
|
+
return self._get_value("use_job_array")
|
|
2892
|
+
|
|
2893
|
+
@property
|
|
2894
|
+
def max_array_items(self) -> int | None:
|
|
2895
|
+
"""
|
|
2896
|
+
If using array jobs, up to how many items should be in the job array.
|
|
2897
|
+
"""
|
|
2898
|
+
return self._get_value("max_array_items")
|
|
2899
|
+
|
|
2900
|
+
@property
|
|
2901
|
+
def write_app_logs(self) -> bool:
|
|
2902
|
+
return self._get_value("write_app_logs")
|
|
2903
|
+
|
|
2904
|
+
@property
|
|
2905
|
+
def combine_jobscript_std(self) -> bool:
|
|
2906
|
+
return self._get_value("combine_jobscript_std")
|
|
2907
|
+
|
|
2908
|
+
@property
|
|
2909
|
+
def combine_scripts(self) -> bool:
|
|
2910
|
+
return self._get_value("combine_scripts")
|
|
2911
|
+
|
|
2912
|
+
@property
|
|
2913
|
+
def time_limit(self) -> str | None:
|
|
2914
|
+
"""
|
|
2915
|
+
How long to run for.
|
|
2916
|
+
"""
|
|
2917
|
+
return self._get_value("time_limit")
|
|
2918
|
+
|
|
2919
|
+
@property
|
|
2920
|
+
def scheduler_args(self) -> Mapping: # TODO: TypedDict
|
|
2921
|
+
"""
|
|
2922
|
+
Additional arguments to pass to the scheduler.
|
|
2923
|
+
"""
|
|
2924
|
+
return self._get_value("scheduler_args")
|
|
2925
|
+
|
|
2926
|
+
@property
|
|
2927
|
+
def shell_args(self) -> Mapping | None: # TODO: TypedDict
|
|
2928
|
+
"""
|
|
2929
|
+
Additional arguments to pass to the shell.
|
|
2930
|
+
"""
|
|
2931
|
+
return self._get_value("shell_args")
|
|
2932
|
+
|
|
2933
|
+
@property
|
|
2934
|
+
def os_name(self) -> str:
|
|
2935
|
+
"""
|
|
2936
|
+
Which OS to use.
|
|
2937
|
+
"""
|
|
2938
|
+
return self._get_value("os_name")
|
|
2939
|
+
|
|
2940
|
+
@os_name.setter
|
|
2941
|
+
def os_name(self, value: str):
|
|
2942
|
+
self._setter_persistent_check()
|
|
2943
|
+
self._os_name = self._process_string(value)
|
|
2944
|
+
|
|
2945
|
+
@property
|
|
2946
|
+
def environments(self) -> Mapping | None: # TODO: TypedDict
|
|
2947
|
+
"""
|
|
2948
|
+
Which execution environments to use.
|
|
2949
|
+
"""
|
|
2950
|
+
return self._get_value("environments")
|
|
2951
|
+
|
|
2952
|
+
@property
|
|
2953
|
+
def resources_id(self) -> int:
|
|
2954
|
+
return self._get_value("resources_id")
|
|
2955
|
+
|
|
2956
|
+
@property
|
|
2957
|
+
def skip_downstream_on_failure(self) -> bool:
|
|
2958
|
+
return self._get_value("skip_downstream_on_failure")
|
|
2959
|
+
|
|
2960
|
+
@property
|
|
2961
|
+
def SGE_parallel_env(self) -> str | None:
|
|
2962
|
+
"""
|
|
2963
|
+
Which SGE parallel environment to request.
|
|
2964
|
+
"""
|
|
2965
|
+
return self._get_value("SGE_parallel_env")
|
|
2966
|
+
|
|
2967
|
+
@property
|
|
2968
|
+
def SLURM_partition(self) -> str | None:
|
|
2969
|
+
"""
|
|
2970
|
+
Which SLURM partition to request.
|
|
2971
|
+
"""
|
|
2972
|
+
return self._get_value("SLURM_partition")
|
|
2973
|
+
|
|
2974
|
+
@property
|
|
2975
|
+
def SLURM_num_tasks(self) -> int | None:
|
|
2976
|
+
"""
|
|
2977
|
+
How many SLURM tasks to request.
|
|
2978
|
+
"""
|
|
2979
|
+
return self._get_value("SLURM_num_tasks")
|
|
2980
|
+
|
|
2981
|
+
@property
|
|
2982
|
+
def SLURM_num_tasks_per_node(self) -> int | None:
|
|
2983
|
+
"""
|
|
2984
|
+
How many SLURM tasks per compute node to request.
|
|
2985
|
+
"""
|
|
2986
|
+
return self._get_value("SLURM_num_tasks_per_node")
|
|
2987
|
+
|
|
2988
|
+
@property
|
|
2989
|
+
def SLURM_num_nodes(self) -> int | None:
|
|
2990
|
+
"""
|
|
2991
|
+
How many compute nodes to request.
|
|
2992
|
+
"""
|
|
2993
|
+
return self._get_value("SLURM_num_nodes")
|
|
2994
|
+
|
|
2995
|
+
@property
|
|
2996
|
+
def SLURM_num_cpus_per_task(self) -> int | None:
|
|
2997
|
+
"""
|
|
2998
|
+
How many CPU cores to ask for per SLURM task.
|
|
2999
|
+
"""
|
|
3000
|
+
return self._get_value("SLURM_num_cpus_per_task")
|
|
3001
|
+
|
|
3002
|
+
@property
|
|
3003
|
+
def workflow(self) -> Workflow | None:
|
|
3004
|
+
"""
|
|
3005
|
+
The workflow owning this resource spec.
|
|
3006
|
+
"""
|
|
3007
|
+
if self._workflow:
|
|
3008
|
+
return self._workflow
|
|
3009
|
+
|
|
3010
|
+
elif self.element_set:
|
|
3011
|
+
# element-set-level resources
|
|
3012
|
+
wt = self.element_set.task_template.workflow_template
|
|
3013
|
+
return wt.workflow if wt else None
|
|
3014
|
+
|
|
3015
|
+
elif self.workflow_template:
|
|
3016
|
+
# template-level resources
|
|
3017
|
+
return self.workflow_template.workflow
|
|
3018
|
+
|
|
3019
|
+
elif self._value_group_idx is not None:
|
|
3020
|
+
raise RuntimeError(
|
|
3021
|
+
f"`{self.__class__.__name__}._value_group_idx` is set but the `workflow` "
|
|
3022
|
+
f"attribute is not. This might be because we are in the process of "
|
|
3023
|
+
f"creating the workflow object."
|
|
3024
|
+
)
|
|
3025
|
+
|
|
3026
|
+
return None
|
|
3027
|
+
|
|
3028
|
+
@property
|
|
3029
|
+
def element_set(self) -> ElementSet | None:
|
|
3030
|
+
"""
|
|
3031
|
+
The element set that will use this resource spec.
|
|
3032
|
+
"""
|
|
3033
|
+
if not self._resource_list:
|
|
3034
|
+
return None
|
|
3035
|
+
return self._resource_list.element_set
|
|
3036
|
+
|
|
3037
|
+
@property
|
|
3038
|
+
def workflow_template(self) -> WorkflowTemplate | None:
|
|
3039
|
+
"""
|
|
3040
|
+
The workflow template that will use this resource spec.
|
|
3041
|
+
"""
|
|
3042
|
+
if not self._resource_list:
|
|
3043
|
+
return None
|
|
3044
|
+
return self._resource_list.workflow_template
|
|
3045
|
+
|
|
3046
|
+
|
|
3047
|
+
#: How to specify a selection rule.
|
|
3048
|
+
Where: TypeAlias = "RuleArgs | Rule | Sequence[RuleArgs | Rule] | ElementFilter"
|
|
3049
|
+
|
|
3050
|
+
|
|
3051
|
+
class InputSource(JSONLike):
|
|
3052
|
+
"""
|
|
3053
|
+
An input source to a workflow task.
|
|
3054
|
+
|
|
3055
|
+
Parameters
|
|
3056
|
+
----------
|
|
3057
|
+
source_type: InputSourceType
|
|
3058
|
+
Type of the input source.
|
|
3059
|
+
import_ref:
|
|
3060
|
+
Where the input comes from when the type is `IMPORT`.
|
|
3061
|
+
task_ref:
|
|
3062
|
+
Which task is this an input for? Used when the type is `TASK`.
|
|
3063
|
+
task_source_type: TaskSourceType
|
|
3064
|
+
Type of task source.
|
|
3065
|
+
element_iters:
|
|
3066
|
+
Which element iterations does this apply to?
|
|
3067
|
+
path:
|
|
3068
|
+
Path to where this input goes.
|
|
3069
|
+
where: ~hpcflow.app.Rule | list[~hpcflow.app.Rule] | ~hpcflow.app.ElementFilter
|
|
3070
|
+
Filtering rules.
|
|
3071
|
+
"""
|
|
3072
|
+
|
|
3073
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
3074
|
+
ChildObjectSpec(
|
|
3075
|
+
name="source_type",
|
|
3076
|
+
json_like_name="type",
|
|
3077
|
+
class_name="InputSourceType",
|
|
3078
|
+
is_enum=True,
|
|
3079
|
+
),
|
|
3080
|
+
)
|
|
3081
|
+
|
|
3082
|
+
@classmethod
|
|
3083
|
+
def __is_ElementFilter(cls, value) -> TypeIs[ElementFilter]:
|
|
3084
|
+
return isinstance(value, cls._app.ElementFilter)
|
|
3085
|
+
|
|
3086
|
+
@classmethod
|
|
3087
|
+
def __is_Rule(cls, value) -> TypeIs[Rule]:
|
|
3088
|
+
return isinstance(value, cls._app.Rule)
|
|
3089
|
+
|
|
3090
|
+
def __init__(
|
|
3091
|
+
self,
|
|
3092
|
+
source_type: InputSourceType | str,
|
|
3093
|
+
import_ref: int | None = None,
|
|
3094
|
+
task_ref: int | None = None,
|
|
3095
|
+
task_source_type: TaskSourceType | str | None = None,
|
|
3096
|
+
element_iters: list[int] | None = None,
|
|
3097
|
+
path: str | None = None,
|
|
3098
|
+
where: Where | None = None,
|
|
3099
|
+
):
|
|
3100
|
+
if where is None or self.__is_ElementFilter(where):
|
|
3101
|
+
#: Filtering rules.
|
|
3102
|
+
self.where: ElementFilter | None = where
|
|
3103
|
+
else:
|
|
3104
|
+
self.where = self._app.ElementFilter(
|
|
3105
|
+
rules=[
|
|
3106
|
+
rule if self.__is_Rule(rule) else self._app.Rule(**rule)
|
|
3107
|
+
for rule in (where if isinstance(where, Sequence) else [where])
|
|
3108
|
+
]
|
|
3109
|
+
)
|
|
3110
|
+
|
|
3111
|
+
#: Type of the input source.
|
|
3112
|
+
self.source_type = get_enum_by_name_or_val(InputSourceType, source_type)
|
|
3113
|
+
#: Where the input comes from when the type is `IMPORT`.
|
|
3114
|
+
self.import_ref = import_ref
|
|
3115
|
+
#: Which task is this an input for? Used when the type is `TASK`.
|
|
3116
|
+
self.task_ref = task_ref
|
|
3117
|
+
#: Type of task source.
|
|
3118
|
+
self.task_source_type = get_enum_by_name_or_val(TaskSourceType, task_source_type)
|
|
3119
|
+
#: Which element iterations does this apply to?
|
|
3120
|
+
self.element_iters = element_iters
|
|
3121
|
+
#: Path to where this input goes.
|
|
3122
|
+
self.path = path
|
|
3123
|
+
|
|
3124
|
+
if self.source_type is InputSourceType.TASK:
|
|
3125
|
+
if self.task_ref is None:
|
|
3126
|
+
raise ValueError("Must specify `task_ref` if `source_type` is TASK.")
|
|
3127
|
+
if self.task_source_type is None:
|
|
3128
|
+
self.task_source_type = TaskSourceType.OUTPUT
|
|
3129
|
+
|
|
3130
|
+
if self.source_type is InputSourceType.IMPORT and self.import_ref is None:
|
|
3131
|
+
raise ValueError("Must specify `import_ref` if `source_type` is IMPORT.")
|
|
3132
|
+
|
|
3133
|
+
def __eq__(self, other: Any):
|
|
3134
|
+
if not isinstance(other, self.__class__):
|
|
3135
|
+
return False
|
|
3136
|
+
return (
|
|
3137
|
+
self.source_type == other.source_type
|
|
3138
|
+
and self.import_ref == other.import_ref
|
|
3139
|
+
and self.task_ref == other.task_ref
|
|
3140
|
+
and self.task_source_type == other.task_source_type
|
|
3141
|
+
and self.element_iters == other.element_iters
|
|
3142
|
+
and self.where == other.where
|
|
3143
|
+
and self.path == other.path
|
|
3144
|
+
)
|
|
3145
|
+
|
|
3146
|
+
def __repr__(self) -> str:
|
|
3147
|
+
assert self.source_type
|
|
3148
|
+
cls_method_name = self.source_type.name.lower()
|
|
3149
|
+
|
|
3150
|
+
args_lst: list[str] = []
|
|
3151
|
+
|
|
3152
|
+
if self.source_type is InputSourceType.IMPORT:
|
|
3153
|
+
cls_method_name += "_"
|
|
3154
|
+
args_lst.append(f"import_ref={self.import_ref}")
|
|
3155
|
+
|
|
3156
|
+
elif self.source_type is InputSourceType.TASK:
|
|
3157
|
+
assert self.task_source_type
|
|
3158
|
+
args_lst.append(f"task_ref={self.task_ref}")
|
|
3159
|
+
args_lst.append(
|
|
3160
|
+
f"task_source_type={self.task_source_type.name.lower()!r}",
|
|
3161
|
+
)
|
|
3162
|
+
|
|
3163
|
+
if self.element_iters is not None:
|
|
3164
|
+
args_lst.append(f"element_iters={self.element_iters}")
|
|
3165
|
+
|
|
3166
|
+
if self.where is not None:
|
|
3167
|
+
args_lst.append(f"where={self.where!r}")
|
|
3168
|
+
|
|
3169
|
+
args = ", ".join(args_lst)
|
|
3170
|
+
out = f"{self.__class__.__name__}.{cls_method_name}({args})"
|
|
3171
|
+
|
|
3172
|
+
return out
|
|
3173
|
+
|
|
3174
|
+
def get_task(self, workflow: Workflow) -> WorkflowTask | None:
|
|
3175
|
+
"""If source_type is task, then return the referenced task from the given
|
|
3176
|
+
workflow."""
|
|
3177
|
+
if self.source_type is InputSourceType.TASK:
|
|
3178
|
+
return next(
|
|
3179
|
+
(task for task in workflow.tasks if task.insert_ID == self.task_ref), None
|
|
3180
|
+
)
|
|
3181
|
+
return None
|
|
3182
|
+
|
|
3183
|
+
def is_in(self, other_input_sources: Sequence[InputSource]) -> int | None:
|
|
3184
|
+
"""Check if this input source is in a list of other input sources, without
|
|
3185
|
+
considering the `element_iters` and `where` attributes."""
|
|
3186
|
+
|
|
3187
|
+
for idx, other in enumerate(other_input_sources):
|
|
3188
|
+
if (
|
|
3189
|
+
self.source_type == other.source_type
|
|
3190
|
+
and self.import_ref == other.import_ref
|
|
3191
|
+
and self.task_ref == other.task_ref
|
|
3192
|
+
and self.task_source_type == other.task_source_type
|
|
3193
|
+
and self.path == other.path
|
|
3194
|
+
):
|
|
3195
|
+
return idx
|
|
3196
|
+
return None
|
|
3197
|
+
|
|
3198
|
+
def to_string(self) -> str:
|
|
3199
|
+
"""
|
|
3200
|
+
Render this input source as a string.
|
|
3201
|
+
"""
|
|
3202
|
+
out = [self.source_type.name.lower()]
|
|
3203
|
+
if self.source_type is InputSourceType.TASK:
|
|
3204
|
+
assert self.task_source_type
|
|
3205
|
+
out.append(str(self.task_ref))
|
|
3206
|
+
out.append(self.task_source_type.name.lower())
|
|
3207
|
+
if self.element_iters is not None:
|
|
3208
|
+
out.append(f'[{",".join(map(str, self.element_iters))}]')
|
|
3209
|
+
elif self.source_type is InputSourceType.IMPORT:
|
|
3210
|
+
out.append(str(self.import_ref))
|
|
3211
|
+
return ".".join(out)
|
|
3212
|
+
|
|
3213
|
+
@classmethod
|
|
3214
|
+
def _validate_task_source_type(cls, task_src_type) -> None | TaskSourceType:
|
|
3215
|
+
if task_src_type is None:
|
|
3216
|
+
return None
|
|
3217
|
+
if isinstance(task_src_type, TaskSourceType):
|
|
3218
|
+
return task_src_type
|
|
3219
|
+
try:
|
|
3220
|
+
task_source_type = getattr(cls._app.TaskSourceType, task_src_type.upper())
|
|
3221
|
+
except AttributeError:
|
|
3222
|
+
raise ValueError(
|
|
3223
|
+
f"InputSource `task_source_type` specified as {task_src_type!r}, but "
|
|
3224
|
+
f"must be one of: {TaskSourceType.names!r}."
|
|
3225
|
+
)
|
|
3226
|
+
return task_source_type
|
|
3227
|
+
|
|
3228
|
+
@classmethod
|
|
3229
|
+
def from_string(cls, str_defn: str) -> Self:
|
|
3230
|
+
"""Parse a dot-delimited string definition of an InputSource.
|
|
3231
|
+
|
|
3232
|
+
Parameter
|
|
3233
|
+
---------
|
|
3234
|
+
str_defn:
|
|
3235
|
+
The string to parse.
|
|
3236
|
+
|
|
3237
|
+
Examples
|
|
3238
|
+
--------
|
|
3239
|
+
For a local task input source, use:
|
|
3240
|
+
|
|
3241
|
+
>>> InputSource.from_string("local")
|
|
3242
|
+
|
|
3243
|
+
For a schema input default source, use:
|
|
3244
|
+
|
|
3245
|
+
>>> InputSource.from_string("default")
|
|
3246
|
+
|
|
3247
|
+
For task input sources, specify either the task insert ID (typically this is just
|
|
3248
|
+
the task index within the workflow), or the task's unique name, which is usually
|
|
3249
|
+
just the associated task schema's objective, but if multiple tasks use the same
|
|
3250
|
+
schema, it will be suffixed by an index, starting from one.
|
|
3251
|
+
|
|
3252
|
+
>>> InputSource.from_string("task.0.input")
|
|
3253
|
+
>>> InputSource.from_string("task.my_task.input")
|
|
3254
|
+
"""
|
|
3255
|
+
return cls(**cls._parse_from_string(str_defn))
|
|
3256
|
+
|
|
3257
|
+
@staticmethod
|
|
3258
|
+
def _parse_from_string(str_defn: str) -> dict[str, Any]:
|
|
3259
|
+
"""Parse a dot-delimited string definition of an InputSource."""
|
|
3260
|
+
parts = str_defn.split(".")
|
|
3261
|
+
source_type = get_enum_by_name_or_val(InputSourceType, parts[0])
|
|
3262
|
+
task_ref: int | str | None = None
|
|
3263
|
+
task_source_type: TaskSourceType | None = None
|
|
3264
|
+
if (
|
|
3265
|
+
(
|
|
3266
|
+
source_type in (InputSourceType.LOCAL, InputSourceType.DEFAULT)
|
|
3267
|
+
and len(parts) > 1
|
|
3268
|
+
)
|
|
3269
|
+
or (source_type is InputSourceType.TASK and len(parts) > 3)
|
|
3270
|
+
or (source_type is InputSourceType.IMPORT and len(parts) > 2)
|
|
3271
|
+
):
|
|
3272
|
+
raise ValueError(f"InputSource string not understood: {str_defn!r}.")
|
|
3273
|
+
|
|
3274
|
+
if source_type is InputSourceType.TASK:
|
|
3275
|
+
# TODO: does this include element_iters?
|
|
3276
|
+
try:
|
|
3277
|
+
# assume specified by task insert ID
|
|
3278
|
+
task_ref = int(parts[1])
|
|
3279
|
+
except ValueError:
|
|
3280
|
+
# assume specified by task unique name
|
|
3281
|
+
task_ref = parts[1]
|
|
3282
|
+
try:
|
|
3283
|
+
task_source_type = get_enum_by_name_or_val(TaskSourceType, parts[2])
|
|
3284
|
+
except IndexError:
|
|
3285
|
+
task_source_type = TaskSourceType.OUTPUT
|
|
3286
|
+
elif source_type is InputSourceType.IMPORT:
|
|
3287
|
+
raise NotImplementedError("Import input sources are not yet supported.")
|
|
3288
|
+
|
|
3289
|
+
return {
|
|
3290
|
+
"source_type": source_type,
|
|
3291
|
+
"task_ref": task_ref,
|
|
3292
|
+
"task_source_type": task_source_type,
|
|
3293
|
+
}
|
|
3294
|
+
|
|
3295
|
+
@classmethod
|
|
3296
|
+
def from_json_like(cls, json_like, shared_data=None):
|
|
3297
|
+
if isinstance(json_like, str):
|
|
3298
|
+
json_like = cls._parse_from_string(json_like)
|
|
3299
|
+
return super().from_json_like(json_like, shared_data)
|
|
3300
|
+
|
|
3301
|
+
@classmethod
|
|
3302
|
+
def import_(
|
|
3303
|
+
cls,
|
|
3304
|
+
import_ref: int,
|
|
3305
|
+
element_iters: list[int] | None = None,
|
|
3306
|
+
where: Where | None = None,
|
|
3307
|
+
) -> Self:
|
|
3308
|
+
"""
|
|
3309
|
+
Make an instance of an input source that is an import.
|
|
3310
|
+
|
|
3311
|
+
Parameters
|
|
3312
|
+
----------
|
|
3313
|
+
import_ref:
|
|
3314
|
+
Import reference.
|
|
3315
|
+
element_iters:
|
|
3316
|
+
Originating element iterations.
|
|
3317
|
+
where:
|
|
3318
|
+
Filtering rule.
|
|
3319
|
+
"""
|
|
3320
|
+
return cls(
|
|
3321
|
+
source_type=InputSourceType.IMPORT,
|
|
3322
|
+
import_ref=import_ref,
|
|
3323
|
+
element_iters=element_iters,
|
|
3324
|
+
where=where,
|
|
3325
|
+
)
|
|
3326
|
+
|
|
3327
|
+
@classmethod
|
|
3328
|
+
def local(cls) -> Self:
|
|
3329
|
+
"""
|
|
3330
|
+
Make an instance of an input source that is local.
|
|
3331
|
+
"""
|
|
3332
|
+
return cls(source_type=InputSourceType.LOCAL)
|
|
3333
|
+
|
|
3334
|
+
@classmethod
|
|
3335
|
+
def default(cls) -> Self:
|
|
3336
|
+
"""
|
|
3337
|
+
Make an instance of an input source that is default.
|
|
3338
|
+
"""
|
|
3339
|
+
return cls(source_type=InputSourceType.DEFAULT)
|
|
3340
|
+
|
|
3341
|
+
@classmethod
|
|
3342
|
+
def task(
|
|
3343
|
+
cls,
|
|
3344
|
+
task_ref: int,
|
|
3345
|
+
task_source_type: TaskSourceType | str | None = None,
|
|
3346
|
+
element_iters: list[int] | None = None,
|
|
3347
|
+
where: Where | None = None,
|
|
3348
|
+
) -> Self:
|
|
3349
|
+
"""
|
|
3350
|
+
Make an instance of an input source that is a task.
|
|
3351
|
+
|
|
3352
|
+
Parameters
|
|
3353
|
+
----------
|
|
3354
|
+
task_ref:
|
|
3355
|
+
Source task reference.
|
|
3356
|
+
task_source_type:
|
|
3357
|
+
Type of task source.
|
|
3358
|
+
element_iters:
|
|
3359
|
+
Originating element iterations.
|
|
3360
|
+
where:
|
|
3361
|
+
Filtering rule.
|
|
3362
|
+
"""
|
|
3363
|
+
return cls(
|
|
3364
|
+
source_type=InputSourceType.TASK,
|
|
3365
|
+
task_ref=task_ref,
|
|
3366
|
+
task_source_type=get_enum_by_name_or_val(
|
|
3367
|
+
TaskSourceType, task_source_type or TaskSourceType.OUTPUT
|
|
3368
|
+
),
|
|
3369
|
+
where=where,
|
|
3370
|
+
element_iters=element_iters,
|
|
3371
|
+
)
|