hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,2071 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Elements are components of tasks.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
import copy
|
|
7
|
+
from dataclasses import dataclass, field, fields
|
|
8
|
+
from operator import attrgetter
|
|
9
|
+
from itertools import chain
|
|
10
|
+
import os
|
|
11
|
+
import sys
|
|
12
|
+
import platform
|
|
13
|
+
from typing import (
|
|
14
|
+
Any,
|
|
15
|
+
Callable,
|
|
16
|
+
Dict,
|
|
17
|
+
List,
|
|
18
|
+
Optional,
|
|
19
|
+
cast,
|
|
20
|
+
overload,
|
|
21
|
+
TYPE_CHECKING,
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
from hpcflow.sdk.core.enums import ParallelMode
|
|
25
|
+
from hpcflow.sdk.core.skip_reason import SkipReason
|
|
26
|
+
from hpcflow.sdk.core.errors import UnsupportedOSError, UnsupportedSchedulerError
|
|
27
|
+
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
|
28
|
+
from hpcflow.sdk.core.loop_cache import LoopIndex
|
|
29
|
+
from hpcflow.sdk.typing import hydrate
|
|
30
|
+
from hpcflow.sdk.core.app_aware import AppAware
|
|
31
|
+
from hpcflow.sdk.core.utils import (
|
|
32
|
+
check_valid_py_identifier,
|
|
33
|
+
dict_values_process_flat,
|
|
34
|
+
get_enum_by_name_or_val,
|
|
35
|
+
split_param_label,
|
|
36
|
+
)
|
|
37
|
+
from hpcflow.sdk.log import TimeIt
|
|
38
|
+
from hpcflow.sdk.submission.shells import get_shell
|
|
39
|
+
from hpcflow.sdk.utils.hashing import get_hash
|
|
40
|
+
|
|
41
|
+
if TYPE_CHECKING:
|
|
42
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
|
43
|
+
from typing import Any, ClassVar, Literal
|
|
44
|
+
from ..app import BaseApp
|
|
45
|
+
from ..typing import DataIndex, ParamSource
|
|
46
|
+
from .actions import Action, ElementAction, ElementActionRun
|
|
47
|
+
from .parameters import InputSource, ParameterPath, InputValue, ResourceSpec
|
|
48
|
+
from .rule import Rule
|
|
49
|
+
from .task import WorkflowTask, ElementSet
|
|
50
|
+
from .workflow import Workflow
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
class _ElementPrefixedParameter(AppAware):
|
|
54
|
+
def __init__(
|
|
55
|
+
self,
|
|
56
|
+
prefix: str,
|
|
57
|
+
element_iteration: ElementIteration | None = None,
|
|
58
|
+
element_action: ElementAction | None = None,
|
|
59
|
+
element_action_run: ElementActionRun | None = None,
|
|
60
|
+
) -> None:
|
|
61
|
+
self._prefix = prefix
|
|
62
|
+
self._element_iteration = element_iteration
|
|
63
|
+
self._element_action = element_action
|
|
64
|
+
self._element_action_run = element_action_run
|
|
65
|
+
|
|
66
|
+
# assigned on first access
|
|
67
|
+
self._prefixed_names_unlabelled: Mapping[str, Sequence[str]] | None = None
|
|
68
|
+
|
|
69
|
+
def __getattr__(self, name: str) -> ElementParameter | Mapping[str, ElementParameter]:
|
|
70
|
+
if name not in self.prefixed_names_unlabelled:
|
|
71
|
+
if names_str := self.prefixed_names_unlabelled_str:
|
|
72
|
+
msg_info = f"Available {self._prefix} are: {names_str}."
|
|
73
|
+
else:
|
|
74
|
+
msg_info = f"There are no {self._prefix} available."
|
|
75
|
+
raise ValueError(f"No {self._prefix} named {name!r}. {msg_info}")
|
|
76
|
+
|
|
77
|
+
if labels := self.prefixed_names_unlabelled.get(name):
|
|
78
|
+
# is multiple; return a dict of `ElementParameter`s
|
|
79
|
+
return {
|
|
80
|
+
label_i: self.__parameter(f"{self._prefix}.{name}[{label_i}]")
|
|
81
|
+
for label_i in labels
|
|
82
|
+
}
|
|
83
|
+
else:
|
|
84
|
+
# could be labelled still, but with `multiple=False`
|
|
85
|
+
return self.__parameter(f"{self._prefix}.{name}")
|
|
86
|
+
|
|
87
|
+
def __dir__(self) -> Iterator[str]:
|
|
88
|
+
yield from super().__dir__()
|
|
89
|
+
yield from self.prefixed_names_unlabelled
|
|
90
|
+
|
|
91
|
+
@property
|
|
92
|
+
def __parent(self) -> ElementIteration | ElementActionRun | ElementAction:
|
|
93
|
+
p = self._element_iteration or self._element_action or self._element_action_run
|
|
94
|
+
assert p is not None
|
|
95
|
+
return p
|
|
96
|
+
|
|
97
|
+
def __parameter(self, name: str) -> ElementParameter:
|
|
98
|
+
"""Manufacture an ElementParameter with the given name."""
|
|
99
|
+
p = self.__parent
|
|
100
|
+
return self._app.ElementParameter(
|
|
101
|
+
path=name,
|
|
102
|
+
task=self._task,
|
|
103
|
+
parent=p,
|
|
104
|
+
element=p if isinstance(p, ElementIteration) else p.element_iteration,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
@property
|
|
108
|
+
def _task(self) -> WorkflowTask:
|
|
109
|
+
return self.__parent.task
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
|
113
|
+
"""
|
|
114
|
+
A mapping between input types and associated labels.
|
|
115
|
+
|
|
116
|
+
If the schema input for a given input type has `multiple=False` (even if a label
|
|
117
|
+
is defined), the values for that input type will be an empty list.
|
|
118
|
+
|
|
119
|
+
"""
|
|
120
|
+
if self._prefixed_names_unlabelled is None:
|
|
121
|
+
self._prefixed_names_unlabelled = self.__get_prefixed_names_unlabelled()
|
|
122
|
+
return self._prefixed_names_unlabelled
|
|
123
|
+
|
|
124
|
+
@property
|
|
125
|
+
def prefixed_names_unlabelled_str(self) -> str:
|
|
126
|
+
"""
|
|
127
|
+
A description of the prefixed names.
|
|
128
|
+
"""
|
|
129
|
+
return ", ".join(self.prefixed_names_unlabelled)
|
|
130
|
+
|
|
131
|
+
def __repr__(self) -> str:
|
|
132
|
+
# If there are one or more labels present, then replace with a single name
|
|
133
|
+
# indicating there could be multiple (using a `*` prefix):
|
|
134
|
+
names = ", ".join(
|
|
135
|
+
"*" + unlabelled if labels else unlabelled
|
|
136
|
+
for unlabelled, labels in self.prefixed_names_unlabelled.items()
|
|
137
|
+
)
|
|
138
|
+
return f"{self.__class__.__name__}({names})"
|
|
139
|
+
|
|
140
|
+
def _get_prefixed_names(self) -> list[str]:
|
|
141
|
+
return sorted(self.__parent.get_parameter_names(self._prefix))
|
|
142
|
+
|
|
143
|
+
def __get_prefixed_names_unlabelled(self) -> Mapping[str, Sequence[str]]:
|
|
144
|
+
all_names: dict[str, list[str]] = {}
|
|
145
|
+
for name in self._get_prefixed_names():
|
|
146
|
+
if "[" in name:
|
|
147
|
+
unlab_i, label_i = split_param_label(name)
|
|
148
|
+
if unlab_i is not None and label_i is not None:
|
|
149
|
+
all_names.setdefault(unlab_i, []).append(label_i)
|
|
150
|
+
else:
|
|
151
|
+
all_names[name] = []
|
|
152
|
+
return all_names
|
|
153
|
+
|
|
154
|
+
def __iter__(self) -> Iterator[ElementParameter | Mapping[str, ElementParameter]]:
|
|
155
|
+
for name in self.prefixed_names_unlabelled:
|
|
156
|
+
yield getattr(self, name)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
class ElementInputs(_ElementPrefixedParameter):
|
|
160
|
+
"""
|
|
161
|
+
The inputs to an element.
|
|
162
|
+
|
|
163
|
+
Parameters
|
|
164
|
+
----------
|
|
165
|
+
element_iteration: ElementIteration
|
|
166
|
+
Which iteration does this refer to?
|
|
167
|
+
element_action: ~hpcflow.app.ElementAction
|
|
168
|
+
Which action does this refer to?
|
|
169
|
+
element_action_run: ~hpcflow.app.ElementActionRun
|
|
170
|
+
Which EAR does this refer to?
|
|
171
|
+
"""
|
|
172
|
+
|
|
173
|
+
def __init__(
|
|
174
|
+
self,
|
|
175
|
+
element_iteration: ElementIteration | None = None,
|
|
176
|
+
element_action: ElementAction | None = None,
|
|
177
|
+
element_action_run: ElementActionRun | None = None,
|
|
178
|
+
) -> None:
|
|
179
|
+
super().__init__("inputs", element_iteration, element_action, element_action_run)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
class ElementOutputs(_ElementPrefixedParameter):
|
|
183
|
+
"""
|
|
184
|
+
The outputs from an element.
|
|
185
|
+
|
|
186
|
+
Parameters
|
|
187
|
+
----------
|
|
188
|
+
element_iteration: ElementIteration
|
|
189
|
+
Which iteration does this refer to?
|
|
190
|
+
element_action: ~hpcflow.app.ElementAction
|
|
191
|
+
Which action does this refer to?
|
|
192
|
+
element_action_run: ~hpcflow.app.ElementActionRun
|
|
193
|
+
Which EAR does this refer to?
|
|
194
|
+
"""
|
|
195
|
+
|
|
196
|
+
def __init__(
|
|
197
|
+
self,
|
|
198
|
+
element_iteration: ElementIteration | None = None,
|
|
199
|
+
element_action: ElementAction | None = None,
|
|
200
|
+
element_action_run: ElementActionRun | None = None,
|
|
201
|
+
) -> None:
|
|
202
|
+
super().__init__("outputs", element_iteration, element_action, element_action_run)
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
class ElementInputFiles(_ElementPrefixedParameter):
|
|
206
|
+
"""
|
|
207
|
+
The input files to an element.
|
|
208
|
+
|
|
209
|
+
Parameters
|
|
210
|
+
----------
|
|
211
|
+
element_iteration: ElementIteration
|
|
212
|
+
Which iteration does this refer to?
|
|
213
|
+
element_action: ~hpcflow.app.ElementAction
|
|
214
|
+
Which action does this refer to?
|
|
215
|
+
element_action_run: ~hpcflow.app.ElementActionRun
|
|
216
|
+
Which EAR does this refer to?
|
|
217
|
+
"""
|
|
218
|
+
|
|
219
|
+
def __init__(
|
|
220
|
+
self,
|
|
221
|
+
element_iteration: ElementIteration | None = None,
|
|
222
|
+
element_action: ElementAction | None = None,
|
|
223
|
+
element_action_run: ElementActionRun | None = None,
|
|
224
|
+
) -> None:
|
|
225
|
+
super().__init__(
|
|
226
|
+
"input_files", element_iteration, element_action, element_action_run
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
class ElementOutputFiles(_ElementPrefixedParameter):
|
|
231
|
+
"""
|
|
232
|
+
The output files from an element.
|
|
233
|
+
|
|
234
|
+
Parameters
|
|
235
|
+
----------
|
|
236
|
+
element_iteration: ElementIteration
|
|
237
|
+
Which iteration does this refer to?
|
|
238
|
+
element_action: ~hpcflow.app.ElementAction
|
|
239
|
+
Which action does this refer to?
|
|
240
|
+
element_action_run: ~hpcflow.app.ElementActionRun
|
|
241
|
+
Which EAR does this refer to?
|
|
242
|
+
"""
|
|
243
|
+
|
|
244
|
+
def __init__(
|
|
245
|
+
self,
|
|
246
|
+
element_iteration: ElementIteration | None = None,
|
|
247
|
+
element_action: ElementAction | None = None,
|
|
248
|
+
element_action_run: ElementActionRun | None = None,
|
|
249
|
+
) -> None:
|
|
250
|
+
super().__init__(
|
|
251
|
+
"output_files", element_iteration, element_action, element_action_run
|
|
252
|
+
)
|
|
253
|
+
|
|
254
|
+
|
|
255
|
+
@dataclass
|
|
256
|
+
@hydrate
|
|
257
|
+
class ElementResources(JSONLike):
|
|
258
|
+
"""
|
|
259
|
+
The resources an element requires.
|
|
260
|
+
|
|
261
|
+
Note
|
|
262
|
+
----
|
|
263
|
+
This class is not typically instantiated by the user. It is instantiated when the
|
|
264
|
+
`ElementActionRun.resources` and `Jobscript.resources` attributes are accessed, and
|
|
265
|
+
when the `ElementIteration.get_resources_obj` method is called. It is common for most
|
|
266
|
+
of these attributes to be unspecified. Many of them have complex interactions with
|
|
267
|
+
each other.
|
|
268
|
+
|
|
269
|
+
|
|
270
|
+
Parameters
|
|
271
|
+
----------
|
|
272
|
+
scratch: str
|
|
273
|
+
Which scratch space to use.
|
|
274
|
+
parallel_mode: ParallelMode
|
|
275
|
+
Which parallel mode to use.
|
|
276
|
+
num_cores: int
|
|
277
|
+
How many cores to request.
|
|
278
|
+
num_cores_per_node: int
|
|
279
|
+
How many cores per compute node to request.
|
|
280
|
+
num_threads: int
|
|
281
|
+
How many threads to request.
|
|
282
|
+
num_nodes: int
|
|
283
|
+
How many compute nodes to request.
|
|
284
|
+
scheduler: str
|
|
285
|
+
Which scheduler to use.
|
|
286
|
+
shell: str
|
|
287
|
+
Which system shell to use.
|
|
288
|
+
use_job_array: bool
|
|
289
|
+
Whether to use array jobs.
|
|
290
|
+
max_array_items: int
|
|
291
|
+
If using array jobs, up to how many items should be in the job array.
|
|
292
|
+
write_app_logs: bool
|
|
293
|
+
Whether an app log file should be written.
|
|
294
|
+
combine_jobscript_std: bool
|
|
295
|
+
Whether jobscript standard output and error streams should be combined.
|
|
296
|
+
combine_scripts: bool
|
|
297
|
+
Whether Python scripts should be combined.
|
|
298
|
+
time_limit: str
|
|
299
|
+
How long to run for.
|
|
300
|
+
scheduler_args: dict[str, Any]
|
|
301
|
+
Additional arguments to pass to the scheduler.
|
|
302
|
+
shell_args: dict[str, Any]
|
|
303
|
+
Additional arguments to pass to the shell.
|
|
304
|
+
os_name: str
|
|
305
|
+
Which OS to use.
|
|
306
|
+
platform: str
|
|
307
|
+
System platform name, like "win", "linux", or "macos".
|
|
308
|
+
CPU_arch: str
|
|
309
|
+
CPU architecture, like "x86_64", "AMD64", or "arm64".
|
|
310
|
+
executable_extension: str
|
|
311
|
+
".exe" on Windows, empty otherwise.
|
|
312
|
+
environments: dict
|
|
313
|
+
Environment specifiers keyed by names.
|
|
314
|
+
resources_id: int
|
|
315
|
+
An arbitrary integer that can be used to force multiple jobscripts.
|
|
316
|
+
skip_downstream_on_failure: bool
|
|
317
|
+
Whether to skip downstream dependents on failure.
|
|
318
|
+
allow_failed_dependencies: int | float | bool | None
|
|
319
|
+
The failure tolerance with respect to dependencies, specified as a number or
|
|
320
|
+
proportion.
|
|
321
|
+
SGE_parallel_env: str
|
|
322
|
+
Which SGE parallel environment to request.
|
|
323
|
+
SLURM_partition: str
|
|
324
|
+
Which SLURM partition to request.
|
|
325
|
+
SLURM_num_tasks: str
|
|
326
|
+
How many SLURM tasks to request.
|
|
327
|
+
SLURM_num_tasks_per_node: str
|
|
328
|
+
How many SLURM tasks per compute node to request.
|
|
329
|
+
SLURM_num_nodes: str
|
|
330
|
+
How many compute nodes to request.
|
|
331
|
+
SLURM_num_cpus_per_task: str
|
|
332
|
+
How many CPU cores to ask for per SLURM task.
|
|
333
|
+
"""
|
|
334
|
+
|
|
335
|
+
# TODO: how to specify e.g. high-memory requirement?
|
|
336
|
+
|
|
337
|
+
#: Which scratch space to use.
|
|
338
|
+
scratch: str | None = None
|
|
339
|
+
#: Which parallel mode to use.
|
|
340
|
+
parallel_mode: ParallelMode | None = None
|
|
341
|
+
#: How many cores to request.
|
|
342
|
+
num_cores: int | None = None
|
|
343
|
+
#: How many cores per compute node to request.
|
|
344
|
+
num_cores_per_node: int | None = None
|
|
345
|
+
#: How many threads to request.
|
|
346
|
+
num_threads: int | None = None
|
|
347
|
+
#: How many compute nodes to request.
|
|
348
|
+
num_nodes: int | None = None
|
|
349
|
+
|
|
350
|
+
#: Which scheduler to use.
|
|
351
|
+
scheduler: str | None = None
|
|
352
|
+
#: Which system shell to use.
|
|
353
|
+
shell: str | None = None
|
|
354
|
+
#: Whether to use array jobs.
|
|
355
|
+
use_job_array: bool | None = None
|
|
356
|
+
#: If using array jobs, up to how many items should be in the job array.
|
|
357
|
+
max_array_items: int | None = None
|
|
358
|
+
#: Whether an app log file should be written.
|
|
359
|
+
write_app_logs: bool = False
|
|
360
|
+
#: Whether jobscript standard output and error streams should be combined.
|
|
361
|
+
combine_jobscript_std: bool = field(default_factory=lambda: os.name != "nt")
|
|
362
|
+
#: Whether Python scripts should be combined.
|
|
363
|
+
combine_scripts: bool | None = None
|
|
364
|
+
#: How long to run for.
|
|
365
|
+
time_limit: str | None = None
|
|
366
|
+
|
|
367
|
+
#: Additional arguments to pass to the scheduler.
|
|
368
|
+
scheduler_args: dict[str, Any] = field(default_factory=dict)
|
|
369
|
+
#: Additional arguments to pass to the shell.
|
|
370
|
+
shell_args: dict[str, Any] = field(default_factory=dict)
|
|
371
|
+
#: Which OS to use.
|
|
372
|
+
os_name: str | None = None
|
|
373
|
+
#: System platform name, like "win", "linux", or "macos"
|
|
374
|
+
platform: str | None = None
|
|
375
|
+
#: CPU architecture, like "x86_64", "AMD64", or "arm64"
|
|
376
|
+
CPU_arch: str | None = None
|
|
377
|
+
#: Typical extension used to indicate an executable file; ".exe" on Windows, empty on
|
|
378
|
+
#: all other platforms.
|
|
379
|
+
executable_extension: str | None = None
|
|
380
|
+
#: Environment specifiers keyed by names.
|
|
381
|
+
environments: dict[str, dict[str, Any]] | None = None
|
|
382
|
+
#: An arbitrary integer that can be used to force multiple jobscripts.
|
|
383
|
+
resources_id: int | None = None
|
|
384
|
+
#: Whether to skip downstream dependents on failure.
|
|
385
|
+
skip_downstream_on_failure: bool = True
|
|
386
|
+
#: The failure tolerance with respect to dependencies, specified as a number or
|
|
387
|
+
#: proportion.
|
|
388
|
+
allow_failed_dependencies: int | float | bool | None = False
|
|
389
|
+
|
|
390
|
+
# SGE scheduler specific:
|
|
391
|
+
#: Which SGE parallel environment to request.
|
|
392
|
+
SGE_parallel_env: str | None = None
|
|
393
|
+
|
|
394
|
+
# SLURM scheduler specific:
|
|
395
|
+
#: Which SLURM partition to request.
|
|
396
|
+
SLURM_partition: str | None = None
|
|
397
|
+
#: How many SLURM tasks to request.
|
|
398
|
+
SLURM_num_tasks: int | None = None
|
|
399
|
+
#: How many SLURM tasks per compute node to request.
|
|
400
|
+
SLURM_num_tasks_per_node: int | None = None
|
|
401
|
+
#: How many compute nodes to request.
|
|
402
|
+
SLURM_num_nodes: int | None = None
|
|
403
|
+
#: How many CPU cores to ask for per SLURM task.
|
|
404
|
+
SLURM_num_cpus_per_task: int | None = None
|
|
405
|
+
|
|
406
|
+
def __post_init__(self):
|
|
407
|
+
if (
|
|
408
|
+
self.num_cores is None
|
|
409
|
+
and self.num_cores_per_node is None
|
|
410
|
+
and self.num_threads is None
|
|
411
|
+
and self.num_nodes is None
|
|
412
|
+
):
|
|
413
|
+
self.num_cores = 1
|
|
414
|
+
|
|
415
|
+
if self.parallel_mode:
|
|
416
|
+
self.parallel_mode = get_enum_by_name_or_val(ParallelMode, self.parallel_mode)
|
|
417
|
+
|
|
418
|
+
self.scheduler_args = self.scheduler_args or {}
|
|
419
|
+
self.shell_args = self.shell_args or {}
|
|
420
|
+
|
|
421
|
+
def __eq__(self, other) -> bool:
|
|
422
|
+
if type(self) != type(other):
|
|
423
|
+
return False
|
|
424
|
+
else:
|
|
425
|
+
return self.__dict__ == other.__dict__
|
|
426
|
+
|
|
427
|
+
@TimeIt.decorator
|
|
428
|
+
def get_jobscript_hash(self) -> int:
|
|
429
|
+
"""Get hash from all arguments that distinguish jobscripts."""
|
|
430
|
+
|
|
431
|
+
exclude = ["time_limit", "skip_downstream_on_failure"]
|
|
432
|
+
if not self.combine_scripts:
|
|
433
|
+
# usually environment selection need not distinguish jobscripts because
|
|
434
|
+
# environments become effective/active within the command files, but if we
|
|
435
|
+
# are combining scripts, then the environments must be the same:
|
|
436
|
+
exclude.append("environments")
|
|
437
|
+
|
|
438
|
+
dct = {k: copy.deepcopy(v) for k, v in self.__dict__.items() if k not in exclude}
|
|
439
|
+
|
|
440
|
+
# `combine_scripts==False` and `combine_scripts==None` should have an equivalent
|
|
441
|
+
# contribution to the hash, so always set it to `False` if unset at this point:
|
|
442
|
+
if self.combine_scripts is None:
|
|
443
|
+
dct["combine_scripts"] = False
|
|
444
|
+
|
|
445
|
+
return get_hash(dct)
|
|
446
|
+
|
|
447
|
+
@property
|
|
448
|
+
def is_parallel(self) -> bool:
|
|
449
|
+
"""Returns True if any scheduler-agnostic arguments indicate a parallel job."""
|
|
450
|
+
return bool(
|
|
451
|
+
(self.num_cores and self.num_cores != 1)
|
|
452
|
+
or (self.num_cores_per_node and self.num_cores_per_node != 1)
|
|
453
|
+
or (self.num_nodes and self.num_nodes != 1)
|
|
454
|
+
or (self.num_threads and self.num_threads != 1)
|
|
455
|
+
)
|
|
456
|
+
|
|
457
|
+
@property
|
|
458
|
+
def SLURM_is_parallel(self) -> bool:
|
|
459
|
+
"""Returns True if any SLURM-specific arguments indicate a parallel job."""
|
|
460
|
+
return bool(
|
|
461
|
+
(self.SLURM_num_tasks and self.SLURM_num_tasks != 1)
|
|
462
|
+
or (self.SLURM_num_tasks_per_node and self.SLURM_num_tasks_per_node != 1)
|
|
463
|
+
or (self.SLURM_num_nodes and self.SLURM_num_nodes != 1)
|
|
464
|
+
or (self.SLURM_num_cpus_per_task and self.SLURM_num_cpus_per_task != 1)
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
@staticmethod
|
|
468
|
+
def get_env_instance_filterable_attributes() -> tuple[str, ...]:
|
|
469
|
+
"""Get a tuple of resource attributes that are used to filter environment
|
|
470
|
+
executable instances at submit- and run-time."""
|
|
471
|
+
return ("num_cores",) # TODO: filter on `parallel_mode` later
|
|
472
|
+
|
|
473
|
+
@staticmethod
|
|
474
|
+
@TimeIt.decorator
|
|
475
|
+
def get_default_os_name() -> str:
|
|
476
|
+
"""
|
|
477
|
+
Get the default value for OS name.
|
|
478
|
+
"""
|
|
479
|
+
return os.name
|
|
480
|
+
|
|
481
|
+
@classmethod
|
|
482
|
+
@TimeIt.decorator
|
|
483
|
+
def get_default_shell(cls) -> str:
|
|
484
|
+
"""
|
|
485
|
+
Get the default value for name.
|
|
486
|
+
"""
|
|
487
|
+
return cls._app.config.default_shell
|
|
488
|
+
|
|
489
|
+
@classmethod
|
|
490
|
+
@TimeIt.decorator
|
|
491
|
+
def get_default_platform(cls) -> str:
|
|
492
|
+
"""
|
|
493
|
+
Get the default value for platform.
|
|
494
|
+
"""
|
|
495
|
+
return cls._app.run_time_info.platform
|
|
496
|
+
|
|
497
|
+
@classmethod
|
|
498
|
+
@TimeIt.decorator
|
|
499
|
+
def get_default_CPU_arch(cls) -> str:
|
|
500
|
+
"""
|
|
501
|
+
Get the default value for the CPU architecture.
|
|
502
|
+
"""
|
|
503
|
+
return cls._app.run_time_info.CPU_arch
|
|
504
|
+
|
|
505
|
+
@classmethod
|
|
506
|
+
@TimeIt.decorator
|
|
507
|
+
def get_default_executable_extension(cls) -> str:
|
|
508
|
+
"""
|
|
509
|
+
Get the default value for the executable extension.
|
|
510
|
+
"""
|
|
511
|
+
return ".exe" if os.name == "nt" else ""
|
|
512
|
+
|
|
513
|
+
@classmethod
|
|
514
|
+
@TimeIt.decorator
|
|
515
|
+
def get_default_scheduler(cls, os_name: str, shell_name: str) -> str:
|
|
516
|
+
"""
|
|
517
|
+
Get the default value for scheduler.
|
|
518
|
+
"""
|
|
519
|
+
if os_name == "nt" and "wsl" in shell_name:
|
|
520
|
+
# provide a "*_posix" default scheduler on windows if shell is WSL:
|
|
521
|
+
return "direct_posix"
|
|
522
|
+
return cls._app.config.default_scheduler
|
|
523
|
+
|
|
524
|
+
@TimeIt.decorator
|
|
525
|
+
def set_defaults(self):
|
|
526
|
+
"""
|
|
527
|
+
Set defaults for unspecified values that need defaults.
|
|
528
|
+
"""
|
|
529
|
+
if self.os_name is None:
|
|
530
|
+
self.os_name = self.get_default_os_name()
|
|
531
|
+
if self.shell is None:
|
|
532
|
+
self.shell = self.get_default_shell()
|
|
533
|
+
if self.scheduler is None:
|
|
534
|
+
self.scheduler = self.get_default_scheduler(self.os_name, self.shell)
|
|
535
|
+
|
|
536
|
+
# this are not set by the user:
|
|
537
|
+
self.platform = self.get_default_platform()
|
|
538
|
+
self.CPU_arch = self.get_default_CPU_arch()
|
|
539
|
+
self.executable_extension = self.get_default_executable_extension()
|
|
540
|
+
|
|
541
|
+
# merge defaults shell args from config:
|
|
542
|
+
self.shell_args = {
|
|
543
|
+
**self._app.config.shells.get(self.shell, {}).get("defaults", {}),
|
|
544
|
+
**self.shell_args,
|
|
545
|
+
}
|
|
546
|
+
|
|
547
|
+
# "direct_posix" scheduler is valid on Windows if using WSL:
|
|
548
|
+
cfg_lookup = f"{self.scheduler}_posix" if "wsl" in self.shell else self.scheduler
|
|
549
|
+
cfg_sched = copy.deepcopy(self._app.config.schedulers.get(cfg_lookup, {}))
|
|
550
|
+
|
|
551
|
+
# merge defaults scheduler args from config:
|
|
552
|
+
cfg_defs = cfg_sched.get("defaults", {})
|
|
553
|
+
cfg_opts = cfg_defs.pop("options", {})
|
|
554
|
+
opts = {**cfg_opts, **self.scheduler_args.get("options", {})}
|
|
555
|
+
if opts:
|
|
556
|
+
self.scheduler_args["options"] = opts
|
|
557
|
+
self.scheduler_args = {**cfg_defs, **self.scheduler_args}
|
|
558
|
+
|
|
559
|
+
@TimeIt.decorator
|
|
560
|
+
def validate_against_machine(self):
|
|
561
|
+
"""Validate the values for `os_name`, `shell` and `scheduler` against those
|
|
562
|
+
supported on this machine (as specified by the app configuration)."""
|
|
563
|
+
if self.os_name != os.name:
|
|
564
|
+
raise UnsupportedOSError(os_name=self.os_name)
|
|
565
|
+
if self.scheduler not in self._app.config.schedulers:
|
|
566
|
+
raise UnsupportedSchedulerError(
|
|
567
|
+
scheduler=self.scheduler,
|
|
568
|
+
supported=self._app.config.schedulers,
|
|
569
|
+
)
|
|
570
|
+
|
|
571
|
+
if self.os_name == "nt" and self.combine_jobscript_std:
|
|
572
|
+
raise NotImplementedError(
|
|
573
|
+
"`combine_jobscript_std` is not yet supported on Windows."
|
|
574
|
+
)
|
|
575
|
+
|
|
576
|
+
# might raise `UnsupportedShellError`:
|
|
577
|
+
get_shell(shell_name=self.shell, os_name=self.os_name)
|
|
578
|
+
|
|
579
|
+
# Validate num_cores/num_nodes against options in config and set scheduler-
|
|
580
|
+
# specific resources (e.g. SGE parallel environmentPE, and SLURM partition)
|
|
581
|
+
if "_" in self.scheduler: # e.g. WSL on windows uses *_posix
|
|
582
|
+
key = tuple(self.scheduler.split("_"))
|
|
583
|
+
else:
|
|
584
|
+
key = (self.scheduler.lower(), self.os_name.lower())
|
|
585
|
+
scheduler_cls = self._app.scheduler_lookup[key]
|
|
586
|
+
scheduler_cls.process_resources(self, self._app.config.schedulers[self.scheduler])
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
class ElementIteration(AppAware):
|
|
590
|
+
"""
|
|
591
|
+
A particular iteration of an element.
|
|
592
|
+
|
|
593
|
+
Parameters
|
|
594
|
+
----------
|
|
595
|
+
id_ : int
|
|
596
|
+
The ID of this iteration.
|
|
597
|
+
is_pending: bool
|
|
598
|
+
Whether this iteration is pending execution.
|
|
599
|
+
index: int
|
|
600
|
+
The index of this iteration in its parent element.
|
|
601
|
+
element: Element
|
|
602
|
+
The element this is an iteration of.
|
|
603
|
+
data_idx: dict
|
|
604
|
+
The overall element iteration data index, before resolution of EARs.
|
|
605
|
+
EARs_initialised: bool
|
|
606
|
+
Whether EARs have been set up for the iteration.
|
|
607
|
+
EAR_IDs: dict[int, int]
|
|
608
|
+
Mapping from iteration number to EAR ID, where known.
|
|
609
|
+
EARs: list[dict]
|
|
610
|
+
Data about EARs.
|
|
611
|
+
schema_parameters: list[str]
|
|
612
|
+
Parameters from the schema.
|
|
613
|
+
loop_idx: dict[str, int]
|
|
614
|
+
Indexing information from the loop.
|
|
615
|
+
"""
|
|
616
|
+
|
|
617
|
+
def __init__(
|
|
618
|
+
self,
|
|
619
|
+
id_: int,
|
|
620
|
+
is_pending: bool,
|
|
621
|
+
index: int,
|
|
622
|
+
element: Element,
|
|
623
|
+
data_idx: DataIndex,
|
|
624
|
+
EARs_initialised: bool,
|
|
625
|
+
EAR_IDs: dict[int, list[int]],
|
|
626
|
+
EARs: dict[int, dict[Mapping[str, Any], Any]] | None,
|
|
627
|
+
schema_parameters: list[str],
|
|
628
|
+
loop_idx: Mapping[str, int],
|
|
629
|
+
):
|
|
630
|
+
self._id = id_
|
|
631
|
+
self._is_pending = is_pending
|
|
632
|
+
self._index = index
|
|
633
|
+
self._element = element
|
|
634
|
+
self._data_idx = data_idx
|
|
635
|
+
self._loop_idx = LoopIndex(loop_idx)
|
|
636
|
+
self._schema_parameters = schema_parameters
|
|
637
|
+
self._EARs_initialised = EARs_initialised
|
|
638
|
+
self._EARs = EARs
|
|
639
|
+
self._EAR_IDs = EAR_IDs
|
|
640
|
+
|
|
641
|
+
# assigned on first access of corresponding properties:
|
|
642
|
+
self._inputs: ElementInputs | None = None
|
|
643
|
+
self._outputs: ElementOutputs | None = None
|
|
644
|
+
self._input_files: ElementInputFiles | None = None
|
|
645
|
+
self._output_files: ElementOutputFiles | None = None
|
|
646
|
+
self._action_objs: dict[int, ElementAction] | None = None
|
|
647
|
+
|
|
648
|
+
def __repr__(self):
|
|
649
|
+
return (
|
|
650
|
+
f"{self.__class__.__name__}(id={self.id_!r}, "
|
|
651
|
+
f"index={self.index!r}, element={self.element!r}, "
|
|
652
|
+
f"EARs_initialised={self.EARs_initialised!r}"
|
|
653
|
+
f")"
|
|
654
|
+
)
|
|
655
|
+
|
|
656
|
+
@property
|
|
657
|
+
def data_idx(self) -> DataIndex:
|
|
658
|
+
"""The overall element iteration data index, before resolution of EARs."""
|
|
659
|
+
return self._data_idx
|
|
660
|
+
|
|
661
|
+
@property
|
|
662
|
+
def EARs_initialised(self) -> bool:
|
|
663
|
+
"""Whether or not the EARs have been initialised."""
|
|
664
|
+
return self._EARs_initialised
|
|
665
|
+
|
|
666
|
+
@property
|
|
667
|
+
def element(self) -> Element:
|
|
668
|
+
"""
|
|
669
|
+
The element this is an iteration of.
|
|
670
|
+
"""
|
|
671
|
+
return self._element
|
|
672
|
+
|
|
673
|
+
@property
|
|
674
|
+
def index(self) -> int:
|
|
675
|
+
"""
|
|
676
|
+
The index of this iteration in its parent element.
|
|
677
|
+
"""
|
|
678
|
+
return self._index
|
|
679
|
+
|
|
680
|
+
@property
|
|
681
|
+
def id_(self) -> int:
|
|
682
|
+
"""
|
|
683
|
+
The ID of this iteration.
|
|
684
|
+
"""
|
|
685
|
+
return self._id
|
|
686
|
+
|
|
687
|
+
@property
|
|
688
|
+
def is_pending(self) -> bool:
|
|
689
|
+
"""
|
|
690
|
+
Whether this iteration is pending execution.
|
|
691
|
+
"""
|
|
692
|
+
return self._is_pending
|
|
693
|
+
|
|
694
|
+
@property
|
|
695
|
+
def task(self) -> WorkflowTask:
|
|
696
|
+
"""
|
|
697
|
+
The task this is an iteration of an element for.
|
|
698
|
+
"""
|
|
699
|
+
return self.element.task
|
|
700
|
+
|
|
701
|
+
@property
|
|
702
|
+
def workflow(self) -> Workflow:
|
|
703
|
+
"""
|
|
704
|
+
The workflow this is a part of.
|
|
705
|
+
"""
|
|
706
|
+
return self.element.workflow
|
|
707
|
+
|
|
708
|
+
@property
|
|
709
|
+
def loop_idx(self) -> LoopIndex[str, int]:
|
|
710
|
+
"""
|
|
711
|
+
Indexing information from the loop.
|
|
712
|
+
"""
|
|
713
|
+
return self._loop_idx
|
|
714
|
+
|
|
715
|
+
@property
|
|
716
|
+
def schema_parameters(self) -> Sequence[str]:
|
|
717
|
+
"""
|
|
718
|
+
Parameters from the schema.
|
|
719
|
+
"""
|
|
720
|
+
return self._schema_parameters
|
|
721
|
+
|
|
722
|
+
@property
|
|
723
|
+
def EAR_IDs(self) -> Mapping[int, Sequence[int]]:
|
|
724
|
+
"""
|
|
725
|
+
Mapping from action index to EAR ID, where known.
|
|
726
|
+
"""
|
|
727
|
+
return self._EAR_IDs
|
|
728
|
+
|
|
729
|
+
@property
|
|
730
|
+
def loop_skipped(self) -> bool:
|
|
731
|
+
"""True if the the iteration was skipped entirely due to a loop termination."""
|
|
732
|
+
if not self.action_runs:
|
|
733
|
+
# this includes when runs are not initialised
|
|
734
|
+
return False
|
|
735
|
+
else:
|
|
736
|
+
return all(
|
|
737
|
+
i.skip_reason is SkipReason.LOOP_TERMINATION for i in self.action_runs
|
|
738
|
+
)
|
|
739
|
+
|
|
740
|
+
@property
|
|
741
|
+
def EAR_IDs_flat(self) -> Iterable[int]:
|
|
742
|
+
"""
|
|
743
|
+
The EAR IDs.
|
|
744
|
+
"""
|
|
745
|
+
return chain.from_iterable(self.EAR_IDs.values())
|
|
746
|
+
|
|
747
|
+
@property
|
|
748
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
|
749
|
+
"""
|
|
750
|
+
The actions of this iteration.
|
|
751
|
+
"""
|
|
752
|
+
if self._action_objs is None:
|
|
753
|
+
self._action_objs = {
|
|
754
|
+
act_idx: self._app.ElementAction(self, act_idx, runs)
|
|
755
|
+
for act_idx, runs in (self._EARs or {}).items()
|
|
756
|
+
}
|
|
757
|
+
return self._action_objs
|
|
758
|
+
|
|
759
|
+
@property
|
|
760
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
|
761
|
+
"""
|
|
762
|
+
A list of element action runs, where only the final run is taken for each
|
|
763
|
+
element action.
|
|
764
|
+
"""
|
|
765
|
+
return [act.runs[-1] for act in self.actions.values()]
|
|
766
|
+
|
|
767
|
+
@property
|
|
768
|
+
def inputs(self) -> ElementInputs:
|
|
769
|
+
"""
|
|
770
|
+
The inputs to this element.
|
|
771
|
+
"""
|
|
772
|
+
if not self._inputs:
|
|
773
|
+
self._inputs = self._app.ElementInputs(element_iteration=self)
|
|
774
|
+
return self._inputs
|
|
775
|
+
|
|
776
|
+
@property
|
|
777
|
+
def outputs(self) -> ElementOutputs:
|
|
778
|
+
"""
|
|
779
|
+
The outputs from this element.
|
|
780
|
+
"""
|
|
781
|
+
if not self._outputs:
|
|
782
|
+
self._outputs = self._app.ElementOutputs(element_iteration=self)
|
|
783
|
+
return self._outputs
|
|
784
|
+
|
|
785
|
+
@property
|
|
786
|
+
def input_files(self) -> ElementInputFiles:
|
|
787
|
+
"""
|
|
788
|
+
The input files to this element.
|
|
789
|
+
"""
|
|
790
|
+
if not self._input_files:
|
|
791
|
+
self._input_files = self._app.ElementInputFiles(element_iteration=self)
|
|
792
|
+
return self._input_files
|
|
793
|
+
|
|
794
|
+
@property
|
|
795
|
+
def output_files(self) -> ElementOutputFiles:
|
|
796
|
+
"""
|
|
797
|
+
The output files from this element.
|
|
798
|
+
"""
|
|
799
|
+
if not self._output_files:
|
|
800
|
+
self._output_files = self._app.ElementOutputFiles(element_iteration=self)
|
|
801
|
+
return self._output_files
|
|
802
|
+
|
|
803
|
+
def get_parameter_names(self, prefix: str) -> list[str]:
|
|
804
|
+
"""Get parameter types associated with a given prefix.
|
|
805
|
+
|
|
806
|
+
For example, with the prefix "inputs", this would return `['p1', 'p2']` for a task
|
|
807
|
+
schema that has input types `p1` and `p2`. For inputs, labels are ignored. For
|
|
808
|
+
example, for a task schema that accepts two inputs of the same type `p1`, with
|
|
809
|
+
labels `one` and `two`, this method would return (for the "inputs" prefix):
|
|
810
|
+
`['p1[one]', 'p1[two]']`.
|
|
811
|
+
|
|
812
|
+
This method is distinct from `Action.get_parameter_names` in that it returns
|
|
813
|
+
schema-level inputs/outputs, whereas `Action.get_parameter_names` returns
|
|
814
|
+
action-level input/output/file types/labels.
|
|
815
|
+
|
|
816
|
+
Parameters
|
|
817
|
+
----------
|
|
818
|
+
prefix
|
|
819
|
+
One of "inputs", "outputs".
|
|
820
|
+
|
|
821
|
+
"""
|
|
822
|
+
single_label_lookup = self.task.template._get_single_label_lookup("inputs")
|
|
823
|
+
return [
|
|
824
|
+
".".join(single_label_lookup.get(param_name, param_name).split(".")[1:])
|
|
825
|
+
for param_name in self.schema_parameters
|
|
826
|
+
if param_name.startswith(prefix)
|
|
827
|
+
]
|
|
828
|
+
|
|
829
|
+
@TimeIt.decorator
|
|
830
|
+
def get_data_idx(
|
|
831
|
+
self,
|
|
832
|
+
path: str | None = None,
|
|
833
|
+
action_idx: int | None = None,
|
|
834
|
+
run_idx: int = -1,
|
|
835
|
+
) -> DataIndex:
|
|
836
|
+
"""
|
|
837
|
+
Get the data index.
|
|
838
|
+
|
|
839
|
+
Parameters
|
|
840
|
+
----------
|
|
841
|
+
path:
|
|
842
|
+
If specified, filters the data indices to the ones relevant to this path.
|
|
843
|
+
action_idx:
|
|
844
|
+
The index of the action within the schema.
|
|
845
|
+
run_idx:
|
|
846
|
+
The index of the run within the action.
|
|
847
|
+
"""
|
|
848
|
+
|
|
849
|
+
if not self.actions:
|
|
850
|
+
data_idx = self.data_idx
|
|
851
|
+
|
|
852
|
+
elif action_idx is None:
|
|
853
|
+
# inputs should be from first action where that input is defined, and outputs
|
|
854
|
+
# should include modifications from all actions; we can't just take
|
|
855
|
+
# `self.data_idx`, because 1) this is used for initial runs, and subsequent
|
|
856
|
+
# runs might have different parametrisations, and 2) we want to include
|
|
857
|
+
# intermediate input/output_files:
|
|
858
|
+
data_idx = {}
|
|
859
|
+
for action in self.actions.values():
|
|
860
|
+
for k, v in action.runs[run_idx].data_idx.items():
|
|
861
|
+
if not k.startswith("inputs") or k not in data_idx:
|
|
862
|
+
data_idx[k] = v
|
|
863
|
+
|
|
864
|
+
else:
|
|
865
|
+
elem_act = self.actions[action_idx]
|
|
866
|
+
data_idx = elem_act.runs[run_idx].data_idx
|
|
867
|
+
|
|
868
|
+
if path:
|
|
869
|
+
data_idx = {k: v for k, v in data_idx.items() if k.startswith(path)}
|
|
870
|
+
|
|
871
|
+
return copy.deepcopy(data_idx)
|
|
872
|
+
|
|
873
|
+
def __get_parameter_sources(
|
|
874
|
+
self, data_idx: DataIndex, filter_type: str | None, use_task_index: bool
|
|
875
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]:
|
|
876
|
+
# the value associated with `repeats.*` is the repeats index, not a parameter ID:
|
|
877
|
+
for k in tuple(data_idx):
|
|
878
|
+
if k.startswith("repeats."):
|
|
879
|
+
data_idx.pop(k)
|
|
880
|
+
|
|
881
|
+
out: Mapping[str, ParamSource | list[ParamSource]] = dict_values_process_flat(
|
|
882
|
+
data_idx,
|
|
883
|
+
callable=self.workflow.get_parameter_sources,
|
|
884
|
+
)
|
|
885
|
+
|
|
886
|
+
if use_task_index:
|
|
887
|
+
for k, v in out.items():
|
|
888
|
+
assert isinstance(v, dict)
|
|
889
|
+
if (insert_ID := v.pop("task_insert_ID", None)) is not None:
|
|
890
|
+
# Modify the contents of out
|
|
891
|
+
v["task_idx"] = self.workflow.tasks.get(insert_ID=insert_ID).index
|
|
892
|
+
|
|
893
|
+
if not filter_type:
|
|
894
|
+
return out
|
|
895
|
+
|
|
896
|
+
# Filter to just the elements that have the right type property
|
|
897
|
+
filtered = (
|
|
898
|
+
(k, self.__filter_param_source_by_type(v, filter_type))
|
|
899
|
+
for k, v in out.items()
|
|
900
|
+
)
|
|
901
|
+
return {k: v for k, v in filtered if v is not None}
|
|
902
|
+
|
|
903
|
+
@staticmethod
|
|
904
|
+
def __filter_param_source_by_type(
|
|
905
|
+
value: ParamSource | list[ParamSource], filter_type: str
|
|
906
|
+
) -> ParamSource | list[ParamSource] | None:
|
|
907
|
+
if isinstance(value, list):
|
|
908
|
+
if sources := [src for src in value if src["type"] == filter_type]:
|
|
909
|
+
return sources
|
|
910
|
+
else:
|
|
911
|
+
if value["type"] == filter_type:
|
|
912
|
+
return value
|
|
913
|
+
return None
|
|
914
|
+
|
|
915
|
+
@overload
|
|
916
|
+
def get_parameter_sources(
|
|
917
|
+
self,
|
|
918
|
+
path: str | None,
|
|
919
|
+
*,
|
|
920
|
+
action_idx: int | None,
|
|
921
|
+
run_idx: int = -1,
|
|
922
|
+
typ: str | None = None,
|
|
923
|
+
as_strings: Literal[True],
|
|
924
|
+
use_task_index: bool = False,
|
|
925
|
+
) -> Mapping[str, str]: ...
|
|
926
|
+
|
|
927
|
+
@overload
|
|
928
|
+
def get_parameter_sources(
|
|
929
|
+
self,
|
|
930
|
+
path: str | None = None,
|
|
931
|
+
*,
|
|
932
|
+
action_idx: int | None = None,
|
|
933
|
+
run_idx: int = -1,
|
|
934
|
+
typ: str | None = None,
|
|
935
|
+
as_strings: Literal[False] = False,
|
|
936
|
+
use_task_index: bool = False,
|
|
937
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]: ...
|
|
938
|
+
|
|
939
|
+
@TimeIt.decorator
|
|
940
|
+
def get_parameter_sources(
|
|
941
|
+
self,
|
|
942
|
+
path: str | None = None,
|
|
943
|
+
*,
|
|
944
|
+
action_idx: int | None = None,
|
|
945
|
+
run_idx: int = -1,
|
|
946
|
+
typ: str | None = None,
|
|
947
|
+
as_strings: bool = False,
|
|
948
|
+
use_task_index: bool = False,
|
|
949
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
|
950
|
+
"""
|
|
951
|
+
Get the origin of parameters.
|
|
952
|
+
|
|
953
|
+
Parameters
|
|
954
|
+
----------
|
|
955
|
+
use_task_index
|
|
956
|
+
If True, use the task index within the workflow, rather than the task insert
|
|
957
|
+
ID.
|
|
958
|
+
"""
|
|
959
|
+
data_idx = self.get_data_idx(path, action_idx, run_idx)
|
|
960
|
+
out = self.__get_parameter_sources(data_idx, typ or "", use_task_index)
|
|
961
|
+
if not as_strings:
|
|
962
|
+
return out
|
|
963
|
+
|
|
964
|
+
# format as a dict with compact string values
|
|
965
|
+
out_strs: dict[str, str] = {}
|
|
966
|
+
for k, v in out.items():
|
|
967
|
+
assert isinstance(v, dict)
|
|
968
|
+
if v["type"] == "local_input":
|
|
969
|
+
if use_task_index:
|
|
970
|
+
if v["task_idx"] == self.task.index:
|
|
971
|
+
out_strs[k] = "local"
|
|
972
|
+
else:
|
|
973
|
+
out_strs[k] = f"task.{v['task_idx']}.input"
|
|
974
|
+
else:
|
|
975
|
+
if v["task_insert_ID"] == self.task.insert_ID:
|
|
976
|
+
out_strs[k] = "local"
|
|
977
|
+
else:
|
|
978
|
+
out_strs[k] = f"task.{v['task_insert_ID']}.input"
|
|
979
|
+
elif v["type"] == "default_input":
|
|
980
|
+
out_strs == "default"
|
|
981
|
+
else:
|
|
982
|
+
idx = v["task_idx"] if use_task_index else v["task_insert_ID"]
|
|
983
|
+
out_strs[k] = (
|
|
984
|
+
f"task.{idx}.element.{v['element_idx']}."
|
|
985
|
+
f"action.{v['action_idx']}.run.{v['run_idx']}"
|
|
986
|
+
)
|
|
987
|
+
return out_strs
|
|
988
|
+
|
|
989
|
+
@TimeIt.decorator
|
|
990
|
+
def get(
|
|
991
|
+
self,
|
|
992
|
+
path: str | None = None,
|
|
993
|
+
action_idx: int | None = None,
|
|
994
|
+
run_idx: int = -1,
|
|
995
|
+
default: Any = None,
|
|
996
|
+
raise_on_missing: bool = False,
|
|
997
|
+
raise_on_unset: bool = False,
|
|
998
|
+
) -> Any:
|
|
999
|
+
"""Get element data from the persistent store."""
|
|
1000
|
+
# TODO include a "stats" parameter which when set we know the run has been
|
|
1001
|
+
# executed (or if start time is set but not end time, we know it's running or
|
|
1002
|
+
# failed.)
|
|
1003
|
+
|
|
1004
|
+
data_idx = self.get_data_idx(action_idx=action_idx, run_idx=run_idx)
|
|
1005
|
+
single_label_lookup = self.task.template._get_single_label_lookup(prefix="inputs")
|
|
1006
|
+
|
|
1007
|
+
if single_label_lookup:
|
|
1008
|
+
# For any non-multiple `SchemaParameter`s of this task with non-empty labels,
|
|
1009
|
+
# remove the trivial label:
|
|
1010
|
+
for key in tuple(data_idx):
|
|
1011
|
+
if (path or "").startswith(key):
|
|
1012
|
+
# `path` uses labelled type, so no need to convert to non-labelled
|
|
1013
|
+
continue
|
|
1014
|
+
if lookup_val := single_label_lookup.get(key):
|
|
1015
|
+
data_idx[lookup_val] = data_idx.pop(key)
|
|
1016
|
+
|
|
1017
|
+
return self.task._get_merged_parameter_data(
|
|
1018
|
+
data_index=data_idx,
|
|
1019
|
+
path=path,
|
|
1020
|
+
raise_on_missing=raise_on_missing,
|
|
1021
|
+
raise_on_unset=raise_on_unset,
|
|
1022
|
+
default=default,
|
|
1023
|
+
)
|
|
1024
|
+
|
|
1025
|
+
@overload
|
|
1026
|
+
def get_EAR_dependencies(
|
|
1027
|
+
self,
|
|
1028
|
+
as_objects: Literal[False] = False,
|
|
1029
|
+
) -> set[int]: ...
|
|
1030
|
+
|
|
1031
|
+
@overload
|
|
1032
|
+
def get_EAR_dependencies(
|
|
1033
|
+
self,
|
|
1034
|
+
as_objects: Literal[True],
|
|
1035
|
+
) -> list[ElementActionRun]: ...
|
|
1036
|
+
|
|
1037
|
+
@TimeIt.decorator
|
|
1038
|
+
def get_EAR_dependencies(
|
|
1039
|
+
self,
|
|
1040
|
+
as_objects: bool = False,
|
|
1041
|
+
) -> set[int] | list[ElementActionRun]:
|
|
1042
|
+
"""Get EARs that this element iteration depends on (excluding EARs of this element
|
|
1043
|
+
iteration)."""
|
|
1044
|
+
# TODO: test this includes EARs of upstream iterations of this iteration's element
|
|
1045
|
+
if self.action_runs:
|
|
1046
|
+
EAR_IDs_set = frozenset(self.EAR_IDs_flat)
|
|
1047
|
+
out = {
|
|
1048
|
+
id_
|
|
1049
|
+
for ear in self.action_runs
|
|
1050
|
+
for id_ in ear.get_EAR_dependencies()
|
|
1051
|
+
if id_ not in EAR_IDs_set
|
|
1052
|
+
}
|
|
1053
|
+
else:
|
|
1054
|
+
# if an "input-only" task schema, then there will be no action runs, but the
|
|
1055
|
+
# ElementIteration can still depend on other EARs if inputs are sourced from
|
|
1056
|
+
# upstream tasks:
|
|
1057
|
+
out = {
|
|
1058
|
+
src_i["EAR_ID"]
|
|
1059
|
+
for src in self.get_parameter_sources(typ="EAR_output").values()
|
|
1060
|
+
for src_i in (src if isinstance(src, list) else [src])
|
|
1061
|
+
}
|
|
1062
|
+
|
|
1063
|
+
if as_objects:
|
|
1064
|
+
return self.workflow.get_EARs_from_IDs(sorted(out))
|
|
1065
|
+
return out
|
|
1066
|
+
|
|
1067
|
+
@overload
|
|
1068
|
+
def get_element_iteration_dependencies(
|
|
1069
|
+
self, as_objects: Literal[True]
|
|
1070
|
+
) -> list[ElementIteration]: ...
|
|
1071
|
+
|
|
1072
|
+
@overload
|
|
1073
|
+
def get_element_iteration_dependencies(
|
|
1074
|
+
self, as_objects: Literal[False] = False
|
|
1075
|
+
) -> set[int]: ...
|
|
1076
|
+
|
|
1077
|
+
@TimeIt.decorator
|
|
1078
|
+
def get_element_iteration_dependencies(
|
|
1079
|
+
self, as_objects: bool = False
|
|
1080
|
+
) -> set[int] | list[ElementIteration]:
|
|
1081
|
+
"""Get element iterations that this element iteration depends on."""
|
|
1082
|
+
# TODO: test this includes previous iterations of this iteration's element
|
|
1083
|
+
EAR_IDs = self.get_EAR_dependencies()
|
|
1084
|
+
out = set(self.workflow.get_element_iteration_IDs_from_EAR_IDs(EAR_IDs))
|
|
1085
|
+
if as_objects:
|
|
1086
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(out))
|
|
1087
|
+
return out
|
|
1088
|
+
|
|
1089
|
+
@overload
|
|
1090
|
+
def get_element_dependencies(
|
|
1091
|
+
self,
|
|
1092
|
+
as_objects: Literal[False] = False,
|
|
1093
|
+
) -> set[int]: ...
|
|
1094
|
+
|
|
1095
|
+
@overload
|
|
1096
|
+
def get_element_dependencies(
|
|
1097
|
+
self,
|
|
1098
|
+
as_objects: Literal[True],
|
|
1099
|
+
) -> list[Element]: ...
|
|
1100
|
+
|
|
1101
|
+
@TimeIt.decorator
|
|
1102
|
+
def get_element_dependencies(
|
|
1103
|
+
self,
|
|
1104
|
+
as_objects: bool = False,
|
|
1105
|
+
) -> set[int] | list[Element]:
|
|
1106
|
+
"""Get elements that this element iteration depends on."""
|
|
1107
|
+
# TODO: this will be used in viz.
|
|
1108
|
+
EAR_IDs = self.get_EAR_dependencies()
|
|
1109
|
+
out = set(self.workflow.get_element_IDs_from_EAR_IDs(EAR_IDs))
|
|
1110
|
+
if as_objects:
|
|
1111
|
+
return self.workflow.get_elements_from_IDs(sorted(out))
|
|
1112
|
+
return out
|
|
1113
|
+
|
|
1114
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
|
1115
|
+
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
|
1116
|
+
element iteration depends on."""
|
|
1117
|
+
out: dict[str, ParamSource] = {}
|
|
1118
|
+
for k, v in self.get_parameter_sources().items():
|
|
1119
|
+
for v_i in v if isinstance(v, list) else [v]:
|
|
1120
|
+
if (
|
|
1121
|
+
v_i["type"] in ["local_input", "default_input"]
|
|
1122
|
+
and v_i["task_insert_ID"] != self.task.insert_ID
|
|
1123
|
+
):
|
|
1124
|
+
out[k] = v_i
|
|
1125
|
+
return out
|
|
1126
|
+
|
|
1127
|
+
@overload
|
|
1128
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1129
|
+
|
|
1130
|
+
@overload
|
|
1131
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]: ...
|
|
1132
|
+
|
|
1133
|
+
def get_task_dependencies(
|
|
1134
|
+
self, as_objects: bool = False
|
|
1135
|
+
) -> set[int] | list[WorkflowTask]:
|
|
1136
|
+
"""Get tasks (insert ID or WorkflowTask objects) that this element iteration
|
|
1137
|
+
depends on.
|
|
1138
|
+
|
|
1139
|
+
Dependencies may come from either elements from upstream tasks, or from locally
|
|
1140
|
+
defined inputs/sequences/defaults from upstream tasks."""
|
|
1141
|
+
|
|
1142
|
+
out = set(
|
|
1143
|
+
self.workflow.get_task_IDs_from_element_IDs(self.get_element_dependencies())
|
|
1144
|
+
)
|
|
1145
|
+
for p_src in self.get_input_dependencies().values():
|
|
1146
|
+
out.add(p_src["task_insert_ID"])
|
|
1147
|
+
|
|
1148
|
+
if as_objects:
|
|
1149
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(out)]
|
|
1150
|
+
return out
|
|
1151
|
+
|
|
1152
|
+
@property
|
|
1153
|
+
def __elements(self) -> Iterator[Element]:
|
|
1154
|
+
"""
|
|
1155
|
+
This iteration's element and its downstream elements.
|
|
1156
|
+
"""
|
|
1157
|
+
for task in self.workflow.tasks[self.task.index :]:
|
|
1158
|
+
yield from task.elements[:]
|
|
1159
|
+
|
|
1160
|
+
@property
|
|
1161
|
+
def __iterations(self) -> Iterator[ElementIteration]:
|
|
1162
|
+
"""
|
|
1163
|
+
This iteration and its downstream iterations.
|
|
1164
|
+
"""
|
|
1165
|
+
for elem in self.__elements:
|
|
1166
|
+
yield from elem.iterations
|
|
1167
|
+
|
|
1168
|
+
@overload
|
|
1169
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1170
|
+
|
|
1171
|
+
@overload
|
|
1172
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]: ...
|
|
1173
|
+
|
|
1174
|
+
@TimeIt.decorator
|
|
1175
|
+
def get_dependent_EARs(
|
|
1176
|
+
self, as_objects: bool = False
|
|
1177
|
+
) -> set[int] | list[ElementActionRun]:
|
|
1178
|
+
"""Get EARs of downstream iterations and tasks that depend on this element
|
|
1179
|
+
iteration."""
|
|
1180
|
+
# TODO: test this includes EARs of downstream iterations of this iteration's element
|
|
1181
|
+
deps: set[int] = set()
|
|
1182
|
+
for iter_ in self.__iterations:
|
|
1183
|
+
if iter_.id_ == self.id_:
|
|
1184
|
+
# don't include EARs of this iteration
|
|
1185
|
+
continue
|
|
1186
|
+
for run in iter_.action_runs:
|
|
1187
|
+
if run.get_EAR_dependencies().intersection(self.EAR_IDs_flat):
|
|
1188
|
+
deps.add(run.id_)
|
|
1189
|
+
if as_objects:
|
|
1190
|
+
return self.workflow.get_EARs_from_IDs(sorted(deps))
|
|
1191
|
+
return deps
|
|
1192
|
+
|
|
1193
|
+
@overload
|
|
1194
|
+
def get_dependent_element_iterations(
|
|
1195
|
+
self, as_objects: Literal[True]
|
|
1196
|
+
) -> list[ElementIteration]: ...
|
|
1197
|
+
|
|
1198
|
+
@overload
|
|
1199
|
+
def get_dependent_element_iterations(
|
|
1200
|
+
self, as_objects: Literal[False] = False
|
|
1201
|
+
) -> set[int]: ...
|
|
1202
|
+
|
|
1203
|
+
@TimeIt.decorator
|
|
1204
|
+
def get_dependent_element_iterations(
|
|
1205
|
+
self, as_objects: bool = False
|
|
1206
|
+
) -> set[int] | list[ElementIteration]:
|
|
1207
|
+
"""Get elements iterations of downstream iterations and tasks that depend on this
|
|
1208
|
+
element iteration."""
|
|
1209
|
+
# TODO: test this includes downstream iterations of this iteration's element?
|
|
1210
|
+
deps: set[int] = set()
|
|
1211
|
+
for iter_i in self.__iterations:
|
|
1212
|
+
if iter_i.id_ == self.id_:
|
|
1213
|
+
continue
|
|
1214
|
+
if self.id_ in iter_i.get_element_iteration_dependencies():
|
|
1215
|
+
deps.add(iter_i.id_)
|
|
1216
|
+
if as_objects:
|
|
1217
|
+
return self.workflow.get_element_iterations_from_IDs(sorted(deps))
|
|
1218
|
+
return deps
|
|
1219
|
+
|
|
1220
|
+
@overload
|
|
1221
|
+
def get_dependent_elements(
|
|
1222
|
+
self,
|
|
1223
|
+
as_objects: Literal[True],
|
|
1224
|
+
) -> list[Element]: ...
|
|
1225
|
+
|
|
1226
|
+
@overload
|
|
1227
|
+
def get_dependent_elements(
|
|
1228
|
+
self,
|
|
1229
|
+
as_objects: Literal[False] = False,
|
|
1230
|
+
) -> set[int]: ...
|
|
1231
|
+
|
|
1232
|
+
@TimeIt.decorator
|
|
1233
|
+
def get_dependent_elements(
|
|
1234
|
+
self,
|
|
1235
|
+
as_objects: bool = False,
|
|
1236
|
+
) -> set[int] | list[Element]:
|
|
1237
|
+
"""Get elements of downstream tasks that depend on this element iteration."""
|
|
1238
|
+
deps: set[int] = set()
|
|
1239
|
+
for task in self.task.downstream_tasks:
|
|
1240
|
+
for element in task.elements[:]:
|
|
1241
|
+
if any(
|
|
1242
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
|
1243
|
+
for iter_i in element.iterations
|
|
1244
|
+
):
|
|
1245
|
+
deps.add(element.id_)
|
|
1246
|
+
|
|
1247
|
+
if as_objects:
|
|
1248
|
+
return self.workflow.get_elements_from_IDs(sorted(deps))
|
|
1249
|
+
return deps
|
|
1250
|
+
|
|
1251
|
+
@overload
|
|
1252
|
+
def get_dependent_tasks(
|
|
1253
|
+
self,
|
|
1254
|
+
as_objects: Literal[True],
|
|
1255
|
+
) -> list[WorkflowTask]: ...
|
|
1256
|
+
|
|
1257
|
+
@overload
|
|
1258
|
+
def get_dependent_tasks(
|
|
1259
|
+
self,
|
|
1260
|
+
as_objects: Literal[False] = False,
|
|
1261
|
+
) -> set[int]: ...
|
|
1262
|
+
|
|
1263
|
+
def get_dependent_tasks(
|
|
1264
|
+
self,
|
|
1265
|
+
as_objects: bool = False,
|
|
1266
|
+
) -> set[int] | list[WorkflowTask]:
|
|
1267
|
+
"""Get downstream tasks that depend on this element iteration."""
|
|
1268
|
+
deps: set[int] = set()
|
|
1269
|
+
for task in self.task.downstream_tasks:
|
|
1270
|
+
if any(
|
|
1271
|
+
self.id_ in iter_i.get_element_iteration_dependencies()
|
|
1272
|
+
for element in task.elements[:]
|
|
1273
|
+
for iter_i in element.iterations
|
|
1274
|
+
):
|
|
1275
|
+
deps.add(task.insert_ID)
|
|
1276
|
+
if as_objects:
|
|
1277
|
+
return [self.workflow.tasks.get(insert_ID=id_) for id_ in sorted(deps)]
|
|
1278
|
+
return deps
|
|
1279
|
+
|
|
1280
|
+
def get_template_resources(self) -> Mapping[str, Any]:
|
|
1281
|
+
"""Get template-level resources."""
|
|
1282
|
+
res = self.workflow.template._resources
|
|
1283
|
+
return {res_i.normalised_resources_path: res_i._get_value() for res_i in res}
|
|
1284
|
+
|
|
1285
|
+
@TimeIt.decorator
|
|
1286
|
+
def get_resources(
|
|
1287
|
+
self, action: Action, set_defaults: bool = False
|
|
1288
|
+
) -> Mapping[str, Any]:
|
|
1289
|
+
"""Resolve specific resources for the specified action of this iteration,
|
|
1290
|
+
considering all applicable scopes.
|
|
1291
|
+
|
|
1292
|
+
Parameters
|
|
1293
|
+
----------
|
|
1294
|
+
set_defaults
|
|
1295
|
+
If `True`, include machine defaults for `os_name`, `shell` and `scheduler`.
|
|
1296
|
+
|
|
1297
|
+
"""
|
|
1298
|
+
|
|
1299
|
+
# This method is currently accurate for both `ElementIteration` and `EAR` objects
|
|
1300
|
+
# because when generating the EAR data index we copy (from the schema data index)
|
|
1301
|
+
# anything that starts with "resources". BUT: when we support adding a run, the
|
|
1302
|
+
# user should be able to modify the resources! Which would invalidate this
|
|
1303
|
+
# assumption!!!!!
|
|
1304
|
+
|
|
1305
|
+
# --- so need to rethink...
|
|
1306
|
+
# question is perhaps "what would the resources be if this action were to become
|
|
1307
|
+
# an EAR?" which would then allow us to test a resources-based action rule.
|
|
1308
|
+
|
|
1309
|
+
# FIXME: Use a TypedDict?
|
|
1310
|
+
resource_specs: dict[str, dict[str, dict[str, Any]]] = copy.deepcopy(
|
|
1311
|
+
self.get("resources")
|
|
1312
|
+
)
|
|
1313
|
+
|
|
1314
|
+
env_spec = action.get_environment_spec()
|
|
1315
|
+
env_name: str = env_spec["name"]
|
|
1316
|
+
|
|
1317
|
+
# set default env specifiers, if none set:
|
|
1318
|
+
if "environments" not in (any_specs := resource_specs.setdefault("any", {})):
|
|
1319
|
+
any_specs["environments"] = {env_name: copy.deepcopy(env_spec)}
|
|
1320
|
+
|
|
1321
|
+
for dat in resource_specs.values():
|
|
1322
|
+
if "environments" in dat:
|
|
1323
|
+
# keep only relevant user-provided environment specifiers:
|
|
1324
|
+
dat["environments"] = {
|
|
1325
|
+
k: v for k, v in dat["environments"].items() if k == env_name
|
|
1326
|
+
}
|
|
1327
|
+
# merge user-provided specifiers into action specifiers:
|
|
1328
|
+
dat["environments"].setdefault(env_name, {}).update(
|
|
1329
|
+
copy.deepcopy(env_spec)
|
|
1330
|
+
)
|
|
1331
|
+
|
|
1332
|
+
resources: dict[str, Any] = {}
|
|
1333
|
+
for scope in action._get_possible_scopes_reversed():
|
|
1334
|
+
# loop from least-specific to most so higher-specificity scopes take precedence:
|
|
1335
|
+
if scope_res := resource_specs.get(scope.to_string()):
|
|
1336
|
+
resources.update((k, v) for k, v in scope_res.items() if v is not None)
|
|
1337
|
+
|
|
1338
|
+
if set_defaults:
|
|
1339
|
+
# used in e.g. `Rule.test` if testing resource rules on element iterations,
|
|
1340
|
+
# also might have resource keys in script, program paths:
|
|
1341
|
+
ER = self._app.ElementResources
|
|
1342
|
+
resources.setdefault("os_name", ER.get_default_os_name())
|
|
1343
|
+
resources.setdefault("shell", ER.get_default_shell())
|
|
1344
|
+
if "scheduler" not in resources:
|
|
1345
|
+
resources["scheduler"] = ER.get_default_scheduler(
|
|
1346
|
+
resources["os_name"], resources["shell"]
|
|
1347
|
+
)
|
|
1348
|
+
resources.setdefault("platform", ER.get_default_platform())
|
|
1349
|
+
resources.setdefault("CPU_arch", ER.get_default_CPU_arch())
|
|
1350
|
+
resources.setdefault(
|
|
1351
|
+
"executable_extension", ER.get_default_executable_extension()
|
|
1352
|
+
)
|
|
1353
|
+
|
|
1354
|
+
# unset inapplicable items:
|
|
1355
|
+
if "combine_scripts" in resources and not action.script_is_python_snippet:
|
|
1356
|
+
del resources["combine_scripts"]
|
|
1357
|
+
|
|
1358
|
+
return resources
|
|
1359
|
+
|
|
1360
|
+
def get_resources_obj(
|
|
1361
|
+
self, action: Action, set_defaults: bool = False
|
|
1362
|
+
) -> ElementResources:
|
|
1363
|
+
"""
|
|
1364
|
+
Get the resources for an action (see :py:meth:`get_resources`)
|
|
1365
|
+
as a searchable model.
|
|
1366
|
+
"""
|
|
1367
|
+
return self._app.ElementResources(**self.get_resources(action, set_defaults))
|
|
1368
|
+
|
|
1369
|
+
|
|
1370
|
+
class Element(AppAware):
|
|
1371
|
+
"""
|
|
1372
|
+
A basic component of a workflow. Elements are enactments of tasks.
|
|
1373
|
+
|
|
1374
|
+
Parameters
|
|
1375
|
+
----------
|
|
1376
|
+
id_ : int
|
|
1377
|
+
The ID of this element.
|
|
1378
|
+
is_pending: bool
|
|
1379
|
+
Whether this element is pending execution.
|
|
1380
|
+
task: ~hpcflow.app.WorkflowTask
|
|
1381
|
+
The task this is part of the enactment of.
|
|
1382
|
+
index: int
|
|
1383
|
+
The index of this element.
|
|
1384
|
+
es_idx: int
|
|
1385
|
+
The index within the task of the element set containing this element.
|
|
1386
|
+
seq_idx: dict[str, int]
|
|
1387
|
+
The sequence index IDs.
|
|
1388
|
+
src_idx: dict[str, int]
|
|
1389
|
+
The input source indices.
|
|
1390
|
+
iteration_IDs: list[int]
|
|
1391
|
+
The known IDs of iterations,
|
|
1392
|
+
iterations: list[dict]
|
|
1393
|
+
Data for creating iteration objects.
|
|
1394
|
+
"""
|
|
1395
|
+
|
|
1396
|
+
# TODO: use slots
|
|
1397
|
+
# TODO:
|
|
1398
|
+
# - add `iterations` property which returns `ElementIteration`
|
|
1399
|
+
# - also map iteration properties of the most recent iteration to this object
|
|
1400
|
+
|
|
1401
|
+
def __init__(
|
|
1402
|
+
self,
|
|
1403
|
+
id_: int,
|
|
1404
|
+
is_pending: bool,
|
|
1405
|
+
task: WorkflowTask,
|
|
1406
|
+
index: int,
|
|
1407
|
+
es_idx: int,
|
|
1408
|
+
seq_idx: Mapping[str, int],
|
|
1409
|
+
src_idx: Mapping[str, int],
|
|
1410
|
+
iteration_IDs: list[int],
|
|
1411
|
+
iterations: list[dict[str, Any]],
|
|
1412
|
+
) -> None:
|
|
1413
|
+
self._id = id_
|
|
1414
|
+
self._is_pending = is_pending
|
|
1415
|
+
self._task = task
|
|
1416
|
+
self._index = index
|
|
1417
|
+
self._es_idx = es_idx
|
|
1418
|
+
self._seq_idx = seq_idx
|
|
1419
|
+
self._src_idx = src_idx
|
|
1420
|
+
|
|
1421
|
+
self._iteration_IDs = iteration_IDs
|
|
1422
|
+
self._iterations = iterations
|
|
1423
|
+
|
|
1424
|
+
# assigned on first access:
|
|
1425
|
+
self._iteration_objs: list[ElementIteration] | None = None
|
|
1426
|
+
|
|
1427
|
+
def __repr__(self):
|
|
1428
|
+
return (
|
|
1429
|
+
f"{self.__class__.__name__}(id={self.id_!r}, "
|
|
1430
|
+
f"index={self.index!r}, task={self.task.unique_name!r}"
|
|
1431
|
+
f")"
|
|
1432
|
+
)
|
|
1433
|
+
|
|
1434
|
+
@property
|
|
1435
|
+
def id_(self) -> int:
|
|
1436
|
+
"""
|
|
1437
|
+
The ID of this element.
|
|
1438
|
+
"""
|
|
1439
|
+
return self._id
|
|
1440
|
+
|
|
1441
|
+
@property
|
|
1442
|
+
def is_pending(self) -> bool:
|
|
1443
|
+
"""
|
|
1444
|
+
Whether this element is pending execution.
|
|
1445
|
+
"""
|
|
1446
|
+
return self._is_pending
|
|
1447
|
+
|
|
1448
|
+
@property
|
|
1449
|
+
def task(self) -> WorkflowTask:
|
|
1450
|
+
"""
|
|
1451
|
+
The task this is part of the enactment of.
|
|
1452
|
+
"""
|
|
1453
|
+
return self._task
|
|
1454
|
+
|
|
1455
|
+
@property
|
|
1456
|
+
def index(self) -> int:
|
|
1457
|
+
"""Get the index of the element within the task.
|
|
1458
|
+
|
|
1459
|
+
Note: the `global_idx` attribute returns the index of the element within the
|
|
1460
|
+
workflow, across all tasks."""
|
|
1461
|
+
|
|
1462
|
+
return self._index
|
|
1463
|
+
|
|
1464
|
+
@property
|
|
1465
|
+
def element_set_idx(self) -> int:
|
|
1466
|
+
"""
|
|
1467
|
+
The index within the task of the element set containing this element.
|
|
1468
|
+
"""
|
|
1469
|
+
return self._es_idx
|
|
1470
|
+
|
|
1471
|
+
@property
|
|
1472
|
+
def element_set(self) -> ElementSet:
|
|
1473
|
+
"""
|
|
1474
|
+
The element set containing this element.
|
|
1475
|
+
"""
|
|
1476
|
+
return self.task.template.element_sets[self.element_set_idx]
|
|
1477
|
+
|
|
1478
|
+
@property
|
|
1479
|
+
def sequence_idx(self) -> Mapping[str, int]:
|
|
1480
|
+
"""
|
|
1481
|
+
The sequence index IDs.
|
|
1482
|
+
"""
|
|
1483
|
+
return self._seq_idx
|
|
1484
|
+
|
|
1485
|
+
@property
|
|
1486
|
+
def input_source_idx(self) -> Mapping[str, int]:
|
|
1487
|
+
"""
|
|
1488
|
+
The input source indices.
|
|
1489
|
+
"""
|
|
1490
|
+
return self._src_idx
|
|
1491
|
+
|
|
1492
|
+
@property
|
|
1493
|
+
def input_sources(self) -> Mapping[str, InputSource]:
|
|
1494
|
+
"""
|
|
1495
|
+
The sources of the inputs to this element.
|
|
1496
|
+
"""
|
|
1497
|
+
return {
|
|
1498
|
+
k: self.element_set.input_sources[k.removeprefix("inputs.")][v]
|
|
1499
|
+
for k, v in self.input_source_idx.items()
|
|
1500
|
+
}
|
|
1501
|
+
|
|
1502
|
+
@property
|
|
1503
|
+
def workflow(self) -> Workflow:
|
|
1504
|
+
"""
|
|
1505
|
+
The workflow containing this element.
|
|
1506
|
+
"""
|
|
1507
|
+
return self.task.workflow
|
|
1508
|
+
|
|
1509
|
+
@property
|
|
1510
|
+
def iteration_IDs(self) -> Sequence[int]:
|
|
1511
|
+
"""
|
|
1512
|
+
The IDs of the iterations of this element.
|
|
1513
|
+
"""
|
|
1514
|
+
return self._iteration_IDs
|
|
1515
|
+
|
|
1516
|
+
@property
|
|
1517
|
+
@TimeIt.decorator
|
|
1518
|
+
def iterations(self) -> Sequence[ElementIteration]:
|
|
1519
|
+
"""
|
|
1520
|
+
The iterations of this element.
|
|
1521
|
+
"""
|
|
1522
|
+
# TODO: fix this
|
|
1523
|
+
if self._iteration_objs is None:
|
|
1524
|
+
self._iteration_objs = [
|
|
1525
|
+
self._app.ElementIteration(
|
|
1526
|
+
element=self,
|
|
1527
|
+
index=idx,
|
|
1528
|
+
**{k: v for k, v in iter_i.items() if k != "element_ID"},
|
|
1529
|
+
)
|
|
1530
|
+
for idx, iter_i in enumerate(self._iterations)
|
|
1531
|
+
]
|
|
1532
|
+
return self._iteration_objs
|
|
1533
|
+
|
|
1534
|
+
@property
|
|
1535
|
+
def dir_name(self) -> str:
|
|
1536
|
+
"""
|
|
1537
|
+
The name of the directory for containing temporary files for this element.
|
|
1538
|
+
"""
|
|
1539
|
+
return f"e_{self.index}"
|
|
1540
|
+
|
|
1541
|
+
@property
|
|
1542
|
+
def latest_iteration(self) -> ElementIteration:
|
|
1543
|
+
"""
|
|
1544
|
+
The most recent iteration of this element.
|
|
1545
|
+
"""
|
|
1546
|
+
return self.iterations[-1]
|
|
1547
|
+
|
|
1548
|
+
@property
|
|
1549
|
+
def latest_iteration_non_skipped(self):
|
|
1550
|
+
"""Get the latest iteration that is not loop-skipped."""
|
|
1551
|
+
for iter_i in self.iterations[::-1]:
|
|
1552
|
+
if not iter_i.loop_skipped:
|
|
1553
|
+
return iter_i
|
|
1554
|
+
|
|
1555
|
+
@property
|
|
1556
|
+
def inputs(self) -> ElementInputs:
|
|
1557
|
+
"""
|
|
1558
|
+
The inputs to this element's most recent iteration (that was not skipped due to
|
|
1559
|
+
loop termination).
|
|
1560
|
+
"""
|
|
1561
|
+
return self.latest_iteration_non_skipped.inputs
|
|
1562
|
+
|
|
1563
|
+
@property
|
|
1564
|
+
def outputs(self) -> ElementOutputs:
|
|
1565
|
+
"""
|
|
1566
|
+
The outputs from this element's most recent iteration (that was not skipped due to
|
|
1567
|
+
loop termination).
|
|
1568
|
+
"""
|
|
1569
|
+
return self.latest_iteration_non_skipped.outputs
|
|
1570
|
+
|
|
1571
|
+
@property
|
|
1572
|
+
def input_files(self) -> ElementInputFiles:
|
|
1573
|
+
"""
|
|
1574
|
+
The input files to this element's most recent iteration (that was not skipped due
|
|
1575
|
+
to loop termination).
|
|
1576
|
+
"""
|
|
1577
|
+
return self.latest_iteration_non_skipped.input_files
|
|
1578
|
+
|
|
1579
|
+
@property
|
|
1580
|
+
def output_files(self) -> ElementOutputFiles:
|
|
1581
|
+
"""
|
|
1582
|
+
The output files from this element's most recent iteration (that was not skipped
|
|
1583
|
+
due to loop termination).
|
|
1584
|
+
"""
|
|
1585
|
+
return self.latest_iteration_non_skipped.output_files
|
|
1586
|
+
|
|
1587
|
+
@property
|
|
1588
|
+
def schema_parameters(self) -> Sequence[str]:
|
|
1589
|
+
"""
|
|
1590
|
+
The schema-defined parameters to this element's most recent iteration (that was
|
|
1591
|
+
not skipped due to loop termination).
|
|
1592
|
+
"""
|
|
1593
|
+
return self.latest_iteration_non_skipped.schema_parameters
|
|
1594
|
+
|
|
1595
|
+
@property
|
|
1596
|
+
def actions(self) -> Mapping[int, ElementAction]:
|
|
1597
|
+
"""
|
|
1598
|
+
The actions of this element's most recent iteration (that was not skipped due to
|
|
1599
|
+
loop termination).
|
|
1600
|
+
"""
|
|
1601
|
+
return self.latest_iteration_non_skipped.actions
|
|
1602
|
+
|
|
1603
|
+
@property
|
|
1604
|
+
def action_runs(self) -> Sequence[ElementActionRun]:
|
|
1605
|
+
"""
|
|
1606
|
+
A list of element action runs from the latest iteration, where only the
|
|
1607
|
+
final run is taken for each element action.
|
|
1608
|
+
"""
|
|
1609
|
+
return self.latest_iteration_non_skipped.action_runs
|
|
1610
|
+
|
|
1611
|
+
def to_element_set_data(self) -> tuple[list[InputValue], list[ResourceSpec]]:
|
|
1612
|
+
"""Generate lists of workflow-bound InputValues and ResourceList."""
|
|
1613
|
+
inputs: list[InputValue] = []
|
|
1614
|
+
resources: list[ResourceSpec] = []
|
|
1615
|
+
for k, v in self.get_data_idx().items():
|
|
1616
|
+
kind, parameter_or_scope, *path = k.split(".")
|
|
1617
|
+
|
|
1618
|
+
if kind == "inputs":
|
|
1619
|
+
inp_val = self._app.InputValue(
|
|
1620
|
+
parameter=parameter_or_scope,
|
|
1621
|
+
path=cast("str", path) or None, # FIXME: suspicious cast!
|
|
1622
|
+
value=None,
|
|
1623
|
+
)
|
|
1624
|
+
inp_val._value_group_idx = v
|
|
1625
|
+
inp_val._workflow = self.workflow
|
|
1626
|
+
inputs.append(inp_val)
|
|
1627
|
+
|
|
1628
|
+
elif kind == "resources":
|
|
1629
|
+
scope = self._app.ActionScope.from_json_like(parameter_or_scope)
|
|
1630
|
+
res = self._app.ResourceSpec(scope=scope)
|
|
1631
|
+
res._value_group_idx = v
|
|
1632
|
+
res._workflow = self.workflow
|
|
1633
|
+
resources.append(res)
|
|
1634
|
+
|
|
1635
|
+
return inputs, resources
|
|
1636
|
+
|
|
1637
|
+
def get_sequence_value(self, sequence_path: str) -> Any:
|
|
1638
|
+
"""
|
|
1639
|
+
Get the value of a sequence that applies.
|
|
1640
|
+
"""
|
|
1641
|
+
|
|
1642
|
+
if not (seq := self.element_set.get_sequence_from_path(sequence_path)):
|
|
1643
|
+
raise ValueError(
|
|
1644
|
+
f"No sequence with path {sequence_path!r} in this element's originating "
|
|
1645
|
+
f"element set."
|
|
1646
|
+
)
|
|
1647
|
+
if (values := seq.values) is None:
|
|
1648
|
+
raise ValueError(
|
|
1649
|
+
f"Sequence with path {sequence_path!r} has no defined values."
|
|
1650
|
+
)
|
|
1651
|
+
return values[self.sequence_idx[sequence_path]]
|
|
1652
|
+
|
|
1653
|
+
def get_data_idx(
|
|
1654
|
+
self,
|
|
1655
|
+
path: str | None = None,
|
|
1656
|
+
action_idx: int | None = None,
|
|
1657
|
+
run_idx: int = -1,
|
|
1658
|
+
) -> DataIndex:
|
|
1659
|
+
"""Get the data index of the most recent element iteration that
|
|
1660
|
+
is not loop-skipped.
|
|
1661
|
+
|
|
1662
|
+
Parameters
|
|
1663
|
+
----------
|
|
1664
|
+
action_idx
|
|
1665
|
+
The index of the action within the schema.
|
|
1666
|
+
"""
|
|
1667
|
+
return self.latest_iteration_non_skipped.get_data_idx(
|
|
1668
|
+
path=path,
|
|
1669
|
+
action_idx=action_idx,
|
|
1670
|
+
run_idx=run_idx,
|
|
1671
|
+
)
|
|
1672
|
+
|
|
1673
|
+
@overload
|
|
1674
|
+
def get_parameter_sources(
|
|
1675
|
+
self,
|
|
1676
|
+
path: str | None = None,
|
|
1677
|
+
*,
|
|
1678
|
+
action_idx: int | None = None,
|
|
1679
|
+
run_idx: int = -1,
|
|
1680
|
+
typ: str | None = None,
|
|
1681
|
+
as_strings: Literal[False] = False,
|
|
1682
|
+
use_task_index: bool = False,
|
|
1683
|
+
) -> Mapping[str, ParamSource | list[ParamSource]]: ...
|
|
1684
|
+
|
|
1685
|
+
@overload
|
|
1686
|
+
def get_parameter_sources(
|
|
1687
|
+
self,
|
|
1688
|
+
path: str | None = None,
|
|
1689
|
+
*,
|
|
1690
|
+
action_idx: int | None = None,
|
|
1691
|
+
run_idx: int = -1,
|
|
1692
|
+
typ: str | None = None,
|
|
1693
|
+
as_strings: Literal[True],
|
|
1694
|
+
use_task_index: bool = False,
|
|
1695
|
+
) -> Mapping[str, str]: ...
|
|
1696
|
+
|
|
1697
|
+
def get_parameter_sources(
|
|
1698
|
+
self,
|
|
1699
|
+
path: str | None = None,
|
|
1700
|
+
*,
|
|
1701
|
+
action_idx: int | None = None,
|
|
1702
|
+
run_idx: int = -1,
|
|
1703
|
+
typ: str | None = None,
|
|
1704
|
+
as_strings: bool = False,
|
|
1705
|
+
use_task_index: bool = False,
|
|
1706
|
+
) -> Mapping[str, str] | Mapping[str, ParamSource | list[ParamSource]]:
|
|
1707
|
+
""" "Get the parameter sources of the most recent element iteration.
|
|
1708
|
+
|
|
1709
|
+
Parameters
|
|
1710
|
+
----------
|
|
1711
|
+
use_task_index
|
|
1712
|
+
If True, use the task index within the workflow, rather than the task insert
|
|
1713
|
+
ID.
|
|
1714
|
+
"""
|
|
1715
|
+
if as_strings:
|
|
1716
|
+
return self.latest_iteration.get_parameter_sources(
|
|
1717
|
+
path=path,
|
|
1718
|
+
action_idx=action_idx,
|
|
1719
|
+
run_idx=run_idx,
|
|
1720
|
+
typ=typ,
|
|
1721
|
+
as_strings=True,
|
|
1722
|
+
use_task_index=use_task_index,
|
|
1723
|
+
)
|
|
1724
|
+
return self.latest_iteration.get_parameter_sources(
|
|
1725
|
+
path=path,
|
|
1726
|
+
action_idx=action_idx,
|
|
1727
|
+
run_idx=run_idx,
|
|
1728
|
+
typ=typ,
|
|
1729
|
+
use_task_index=use_task_index,
|
|
1730
|
+
)
|
|
1731
|
+
|
|
1732
|
+
def get(
|
|
1733
|
+
self,
|
|
1734
|
+
path: str | None = None,
|
|
1735
|
+
action_idx: int | None = None,
|
|
1736
|
+
run_idx: int = -1,
|
|
1737
|
+
default: Any = None,
|
|
1738
|
+
raise_on_missing: bool = False,
|
|
1739
|
+
raise_on_unset: bool = False,
|
|
1740
|
+
) -> Any:
|
|
1741
|
+
"""Get element data of the most recent iteration that is not
|
|
1742
|
+
loop-skipped."""
|
|
1743
|
+
return self.latest_iteration_non_skipped.get(
|
|
1744
|
+
path=path,
|
|
1745
|
+
action_idx=action_idx,
|
|
1746
|
+
run_idx=run_idx,
|
|
1747
|
+
default=default,
|
|
1748
|
+
raise_on_missing=raise_on_missing,
|
|
1749
|
+
raise_on_unset=raise_on_unset,
|
|
1750
|
+
)
|
|
1751
|
+
|
|
1752
|
+
@overload
|
|
1753
|
+
def get_EAR_dependencies(
|
|
1754
|
+
self, as_objects: Literal[True]
|
|
1755
|
+
) -> list[ElementActionRun]: ...
|
|
1756
|
+
|
|
1757
|
+
@overload
|
|
1758
|
+
def get_EAR_dependencies(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1759
|
+
|
|
1760
|
+
@TimeIt.decorator
|
|
1761
|
+
def get_EAR_dependencies(
|
|
1762
|
+
self, as_objects: bool = False
|
|
1763
|
+
) -> set[int] | list[ElementActionRun]:
|
|
1764
|
+
"""Get EARs that the most recent iteration of this element depends on."""
|
|
1765
|
+
if as_objects:
|
|
1766
|
+
return self.latest_iteration.get_EAR_dependencies(as_objects=True)
|
|
1767
|
+
return self.latest_iteration.get_EAR_dependencies()
|
|
1768
|
+
|
|
1769
|
+
@overload
|
|
1770
|
+
def get_element_iteration_dependencies(
|
|
1771
|
+
self, as_objects: Literal[True]
|
|
1772
|
+
) -> list[ElementIteration]: ...
|
|
1773
|
+
|
|
1774
|
+
@overload
|
|
1775
|
+
def get_element_iteration_dependencies(
|
|
1776
|
+
self, as_objects: Literal[False] = False
|
|
1777
|
+
) -> set[int]: ...
|
|
1778
|
+
|
|
1779
|
+
def get_element_iteration_dependencies(
|
|
1780
|
+
self, as_objects: bool = False
|
|
1781
|
+
) -> set[int] | list[ElementIteration]:
|
|
1782
|
+
"""Get element iterations that the most recent iteration of this element depends
|
|
1783
|
+
on."""
|
|
1784
|
+
if as_objects:
|
|
1785
|
+
return self.latest_iteration.get_element_iteration_dependencies(
|
|
1786
|
+
as_objects=True
|
|
1787
|
+
)
|
|
1788
|
+
return self.latest_iteration.get_element_iteration_dependencies()
|
|
1789
|
+
|
|
1790
|
+
@overload
|
|
1791
|
+
def get_element_dependencies(self, as_objects: Literal[True]) -> list[Element]: ...
|
|
1792
|
+
|
|
1793
|
+
@overload
|
|
1794
|
+
def get_element_dependencies(
|
|
1795
|
+
self, as_objects: Literal[False] = False
|
|
1796
|
+
) -> set[int]: ...
|
|
1797
|
+
|
|
1798
|
+
def get_element_dependencies(
|
|
1799
|
+
self, as_objects: bool = False
|
|
1800
|
+
) -> set[int] | list[Element]:
|
|
1801
|
+
"""Get elements that the most recent iteration of this element depends on."""
|
|
1802
|
+
if as_objects:
|
|
1803
|
+
return self.latest_iteration.get_element_dependencies(as_objects=True)
|
|
1804
|
+
return self.latest_iteration.get_element_dependencies()
|
|
1805
|
+
|
|
1806
|
+
def get_input_dependencies(self) -> Mapping[str, ParamSource]:
|
|
1807
|
+
"""Get locally defined inputs/sequences/defaults from other tasks that this
|
|
1808
|
+
the most recent iteration of this element depends on."""
|
|
1809
|
+
return self.latest_iteration.get_input_dependencies()
|
|
1810
|
+
|
|
1811
|
+
@overload
|
|
1812
|
+
def get_task_dependencies(self, as_objects: Literal[True]) -> list[WorkflowTask]: ...
|
|
1813
|
+
|
|
1814
|
+
@overload
|
|
1815
|
+
def get_task_dependencies(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1816
|
+
|
|
1817
|
+
def get_task_dependencies(
|
|
1818
|
+
self, as_objects: bool = False
|
|
1819
|
+
) -> set[int] | list[WorkflowTask]:
|
|
1820
|
+
"""Get tasks (insert ID or WorkflowTask objects) that the most recent iteration of
|
|
1821
|
+
this element depends on.
|
|
1822
|
+
|
|
1823
|
+
Dependencies may come from either elements from upstream tasks, or from locally
|
|
1824
|
+
defined inputs/sequences/defaults from upstream tasks."""
|
|
1825
|
+
if as_objects:
|
|
1826
|
+
return self.latest_iteration.get_task_dependencies(as_objects=True)
|
|
1827
|
+
return self.latest_iteration.get_task_dependencies()
|
|
1828
|
+
|
|
1829
|
+
@overload
|
|
1830
|
+
def get_dependent_EARs(self, as_objects: Literal[True]) -> list[ElementActionRun]: ...
|
|
1831
|
+
|
|
1832
|
+
@overload
|
|
1833
|
+
def get_dependent_EARs(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1834
|
+
|
|
1835
|
+
def get_dependent_EARs(
|
|
1836
|
+
self, as_objects: bool = False
|
|
1837
|
+
) -> set[int] | list[ElementActionRun]:
|
|
1838
|
+
"""Get EARs that depend on the most recent iteration of this element."""
|
|
1839
|
+
if as_objects:
|
|
1840
|
+
return self.latest_iteration.get_dependent_EARs(as_objects=True)
|
|
1841
|
+
return self.latest_iteration.get_dependent_EARs()
|
|
1842
|
+
|
|
1843
|
+
@overload
|
|
1844
|
+
def get_dependent_element_iterations(
|
|
1845
|
+
self, as_objects: Literal[True]
|
|
1846
|
+
) -> list[ElementIteration]: ...
|
|
1847
|
+
|
|
1848
|
+
@overload
|
|
1849
|
+
def get_dependent_element_iterations(
|
|
1850
|
+
self, as_objects: Literal[False] = False
|
|
1851
|
+
) -> set[int]: ...
|
|
1852
|
+
|
|
1853
|
+
def get_dependent_element_iterations(
|
|
1854
|
+
self, as_objects: bool = False
|
|
1855
|
+
) -> set[int] | list[ElementIteration]:
|
|
1856
|
+
"""Get element iterations that depend on the most recent iteration of this
|
|
1857
|
+
element."""
|
|
1858
|
+
if as_objects:
|
|
1859
|
+
return self.latest_iteration.get_dependent_element_iterations(as_objects=True)
|
|
1860
|
+
return self.latest_iteration.get_dependent_element_iterations()
|
|
1861
|
+
|
|
1862
|
+
@overload
|
|
1863
|
+
def get_dependent_elements(self, as_objects: Literal[True]) -> list[Element]: ...
|
|
1864
|
+
|
|
1865
|
+
@overload
|
|
1866
|
+
def get_dependent_elements(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1867
|
+
|
|
1868
|
+
def get_dependent_elements(
|
|
1869
|
+
self, as_objects: bool = False
|
|
1870
|
+
) -> set[int] | list[Element]:
|
|
1871
|
+
"""Get elements that depend on the most recent iteration of this element."""
|
|
1872
|
+
if as_objects:
|
|
1873
|
+
return self.latest_iteration.get_dependent_elements(as_objects=True)
|
|
1874
|
+
return self.latest_iteration.get_dependent_elements()
|
|
1875
|
+
|
|
1876
|
+
@overload
|
|
1877
|
+
def get_dependent_tasks(self, as_objects: Literal[True]) -> list[WorkflowTask]: ...
|
|
1878
|
+
|
|
1879
|
+
@overload
|
|
1880
|
+
def get_dependent_tasks(self, as_objects: Literal[False] = False) -> set[int]: ...
|
|
1881
|
+
|
|
1882
|
+
def get_dependent_tasks(
|
|
1883
|
+
self, as_objects: bool = False
|
|
1884
|
+
) -> set[int] | list[WorkflowTask]:
|
|
1885
|
+
"""Get tasks that depend on the most recent iteration of this element."""
|
|
1886
|
+
if as_objects:
|
|
1887
|
+
return self.latest_iteration.get_dependent_tasks(as_objects=True)
|
|
1888
|
+
return self.latest_iteration.get_dependent_tasks()
|
|
1889
|
+
|
|
1890
|
+
@TimeIt.decorator
|
|
1891
|
+
def get_dependent_elements_recursively(
|
|
1892
|
+
self, task_insert_ID: int | None = None
|
|
1893
|
+
) -> list[Element]:
|
|
1894
|
+
"""Get downstream elements that depend on this element, including recursive
|
|
1895
|
+
dependencies.
|
|
1896
|
+
|
|
1897
|
+
Dependencies are resolved using the initial iteration only. This method is used to
|
|
1898
|
+
identify from which element in the previous iteration a new iteration should be
|
|
1899
|
+
parametrised.
|
|
1900
|
+
|
|
1901
|
+
Parameters
|
|
1902
|
+
----------
|
|
1903
|
+
task_insert_ID: int
|
|
1904
|
+
If specified, only return elements from this task.
|
|
1905
|
+
|
|
1906
|
+
"""
|
|
1907
|
+
|
|
1908
|
+
def get_deps(element: Element) -> set[int]:
|
|
1909
|
+
deps = element.iterations[0].get_dependent_elements()
|
|
1910
|
+
deps_objs = self.workflow.get_elements_from_IDs(deps)
|
|
1911
|
+
return deps.union(dep_j for deps_i in deps_objs for dep_j in get_deps(deps_i))
|
|
1912
|
+
|
|
1913
|
+
all_deps = get_deps(self)
|
|
1914
|
+
if task_insert_ID is not None:
|
|
1915
|
+
all_deps.intersection_update(
|
|
1916
|
+
self.workflow.tasks.get(insert_ID=task_insert_ID).element_IDs
|
|
1917
|
+
)
|
|
1918
|
+
return self.workflow.get_elements_from_IDs(sorted(all_deps))
|
|
1919
|
+
|
|
1920
|
+
|
|
1921
|
+
@dataclass(repr=False, eq=False)
|
|
1922
|
+
@hydrate
|
|
1923
|
+
class ElementParameter:
|
|
1924
|
+
"""
|
|
1925
|
+
A parameter to an :py:class:`.Element`.
|
|
1926
|
+
|
|
1927
|
+
Parameters
|
|
1928
|
+
----------
|
|
1929
|
+
task: ~hpcflow.app.WorkflowTask
|
|
1930
|
+
The task that this is part of.
|
|
1931
|
+
path: str
|
|
1932
|
+
The path to this parameter.
|
|
1933
|
+
parent: Element | ~hpcflow.app.ElementAction | ~hpcflow.app.ElementActionRun | ~hpcflow.app.Parameters
|
|
1934
|
+
The entity that owns this parameter.
|
|
1935
|
+
element: Element
|
|
1936
|
+
The element that this is a parameter of.
|
|
1937
|
+
"""
|
|
1938
|
+
|
|
1939
|
+
# Intended to be subclassed, so public
|
|
1940
|
+
#: Application context.
|
|
1941
|
+
app: ClassVar[BaseApp]
|
|
1942
|
+
_app_attr: ClassVar[str] = "app"
|
|
1943
|
+
|
|
1944
|
+
#: The task that this is part of.
|
|
1945
|
+
task: WorkflowTask
|
|
1946
|
+
#: The path to this parameter.
|
|
1947
|
+
path: str
|
|
1948
|
+
#: The entity that owns this parameter.
|
|
1949
|
+
parent: Element | ElementAction | ElementActionRun | ElementIteration
|
|
1950
|
+
#: The element that this is a parameter of.
|
|
1951
|
+
element: Element | ElementIteration
|
|
1952
|
+
|
|
1953
|
+
@property
|
|
1954
|
+
def data_idx(self) -> DataIndex:
|
|
1955
|
+
"""
|
|
1956
|
+
The data indices associated with this parameter.
|
|
1957
|
+
"""
|
|
1958
|
+
return self.parent.get_data_idx(path=self.path)
|
|
1959
|
+
|
|
1960
|
+
@property
|
|
1961
|
+
def value(self) -> Any:
|
|
1962
|
+
"""
|
|
1963
|
+
The value of this parameter.
|
|
1964
|
+
"""
|
|
1965
|
+
return self.parent.get(path=self.path)
|
|
1966
|
+
|
|
1967
|
+
def __repr__(self) -> str:
|
|
1968
|
+
return f"{self.__class__.__name__}(element={self.element!r}, path={self.path!r})"
|
|
1969
|
+
|
|
1970
|
+
def __eq__(self, __o: object) -> bool:
|
|
1971
|
+
if not isinstance(__o, self.__class__):
|
|
1972
|
+
return False
|
|
1973
|
+
return self.task == __o.task and self.path == __o.path
|
|
1974
|
+
|
|
1975
|
+
@property
|
|
1976
|
+
def data_idx_is_set(self) -> Mapping[str, bool]:
|
|
1977
|
+
"""
|
|
1978
|
+
The associated data indices for which this is set.
|
|
1979
|
+
"""
|
|
1980
|
+
return {
|
|
1981
|
+
k: self.task.workflow.is_parameter_set(cast("int", v))
|
|
1982
|
+
for k, v in self.data_idx.items()
|
|
1983
|
+
}
|
|
1984
|
+
|
|
1985
|
+
@property
|
|
1986
|
+
def is_set(self) -> bool:
|
|
1987
|
+
"""
|
|
1988
|
+
Whether this parameter is set.
|
|
1989
|
+
"""
|
|
1990
|
+
return all(self.data_idx_is_set.values())
|
|
1991
|
+
|
|
1992
|
+
def get_size(self, **store_kwargs):
|
|
1993
|
+
"""
|
|
1994
|
+
Get the size of the parameter.
|
|
1995
|
+
"""
|
|
1996
|
+
raise NotImplementedError
|
|
1997
|
+
|
|
1998
|
+
|
|
1999
|
+
@dataclass
|
|
2000
|
+
@hydrate
|
|
2001
|
+
class ElementFilter(JSONLike):
|
|
2002
|
+
"""
|
|
2003
|
+
A filter for iterations.
|
|
2004
|
+
|
|
2005
|
+
Parameters
|
|
2006
|
+
----------
|
|
2007
|
+
rules: list[~hpcflow.app.Rule]
|
|
2008
|
+
The filtering rules to use.
|
|
2009
|
+
"""
|
|
2010
|
+
|
|
2011
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
2012
|
+
ChildObjectSpec(name="rules", is_multiple=True, class_name="Rule"),
|
|
2013
|
+
)
|
|
2014
|
+
|
|
2015
|
+
#: The filtering rules to use.
|
|
2016
|
+
rules: list[Rule] = field(default_factory=list)
|
|
2017
|
+
|
|
2018
|
+
def filter(self, element_iters: list[ElementIteration]) -> list[ElementIteration]:
|
|
2019
|
+
"""
|
|
2020
|
+
Apply the filter rules to select a subsequence of iterations.
|
|
2021
|
+
"""
|
|
2022
|
+
return [
|
|
2023
|
+
el_iter
|
|
2024
|
+
for el_iter in element_iters
|
|
2025
|
+
if all(rule_j.test(el_iter) for rule_j in self.rules)
|
|
2026
|
+
]
|
|
2027
|
+
|
|
2028
|
+
|
|
2029
|
+
@dataclass
|
|
2030
|
+
class ElementGroup(JSONLike):
|
|
2031
|
+
"""
|
|
2032
|
+
A grouping rule for element iterations.
|
|
2033
|
+
|
|
2034
|
+
Parameters
|
|
2035
|
+
----------
|
|
2036
|
+
name:
|
|
2037
|
+
The name of the grouping rule.
|
|
2038
|
+
where:
|
|
2039
|
+
A filtering rule to select which iterations to use in the group.
|
|
2040
|
+
group_by_distinct:
|
|
2041
|
+
If specified, the name of the property to group iterations by.
|
|
2042
|
+
"""
|
|
2043
|
+
|
|
2044
|
+
#: The name of the grouping rule.
|
|
2045
|
+
name: str
|
|
2046
|
+
#: A filtering rule to select which iterations to use in the group.
|
|
2047
|
+
where: ElementFilter | None = None
|
|
2048
|
+
#: If specified, the name of the property to group iterations by.
|
|
2049
|
+
group_by_distinct: ParameterPath | None = None
|
|
2050
|
+
|
|
2051
|
+
def __post_init__(self):
|
|
2052
|
+
self.name = check_valid_py_identifier(self.name)
|
|
2053
|
+
|
|
2054
|
+
|
|
2055
|
+
@dataclass
|
|
2056
|
+
class ElementRepeats:
|
|
2057
|
+
"""
|
|
2058
|
+
A repetition rule.
|
|
2059
|
+
|
|
2060
|
+
Parameters
|
|
2061
|
+
----------
|
|
2062
|
+
number:
|
|
2063
|
+
The number of times to repeat.
|
|
2064
|
+
where:
|
|
2065
|
+
A filtering rule for what to repeat.
|
|
2066
|
+
"""
|
|
2067
|
+
|
|
2068
|
+
#: The number of times to repeat.
|
|
2069
|
+
number: int
|
|
2070
|
+
#: A filtering rule for what to repeat.
|
|
2071
|
+
where: ElementFilter | None = None
|