hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,993 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Abstract task, prior to instantiation.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
from contextlib import contextmanager
|
|
7
|
+
import copy
|
|
8
|
+
from dataclasses import dataclass
|
|
9
|
+
from importlib import import_module
|
|
10
|
+
from itertools import chain
|
|
11
|
+
from typing import TYPE_CHECKING
|
|
12
|
+
from html import escape
|
|
13
|
+
|
|
14
|
+
from rich import print as rich_print
|
|
15
|
+
from rich.table import Table
|
|
16
|
+
from rich.panel import Panel
|
|
17
|
+
from rich.markup import escape as rich_esc
|
|
18
|
+
from rich.text import Text
|
|
19
|
+
|
|
20
|
+
from hpcflow.sdk.typing import hydrate
|
|
21
|
+
from hpcflow.sdk.core.enums import ParameterPropagationMode
|
|
22
|
+
from hpcflow.sdk.core.errors import EnvironmentPresetUnknownEnvironmentError
|
|
23
|
+
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
|
24
|
+
from hpcflow.sdk.core.parameters import Parameter
|
|
25
|
+
from hpcflow.sdk.core.utils import check_valid_py_identifier
|
|
26
|
+
|
|
27
|
+
if TYPE_CHECKING:
|
|
28
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
|
29
|
+
from typing import Any, ClassVar
|
|
30
|
+
from typing_extensions import Self, TypeIs
|
|
31
|
+
from .actions import Action
|
|
32
|
+
from .object_list import ParametersList, TaskSchemasList
|
|
33
|
+
from .parameters import InputValue, SchemaInput, SchemaOutput, SchemaParameter
|
|
34
|
+
from .task import TaskTemplate
|
|
35
|
+
from .types import ActParameterDependence
|
|
36
|
+
from .workflow import Workflow
|
|
37
|
+
from ..typing import ParamSource
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass
|
|
41
|
+
@hydrate
|
|
42
|
+
class TaskObjective(JSONLike):
|
|
43
|
+
"""
|
|
44
|
+
A thing that a task is attempting to achieve.
|
|
45
|
+
|
|
46
|
+
Parameter
|
|
47
|
+
---------
|
|
48
|
+
name: str
|
|
49
|
+
The name of the objective. A valid Python identifier.
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
53
|
+
ChildObjectSpec(
|
|
54
|
+
name="name",
|
|
55
|
+
is_single_attribute=True,
|
|
56
|
+
),
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
#: The name of the objective. A valid Python identifier.
|
|
60
|
+
name: str
|
|
61
|
+
|
|
62
|
+
def __post_init__(self):
|
|
63
|
+
self.name = check_valid_py_identifier(self.name)
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def _parse_from_string(cls, string):
|
|
67
|
+
return string
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
class TaskSchema(JSONLike):
|
|
71
|
+
"""Class to represent the inputs, outputs and implementation mechanism of a given
|
|
72
|
+
task.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
objective:
|
|
77
|
+
This is a string representing the objective of the task schema.
|
|
78
|
+
actions:
|
|
79
|
+
A list of Action objects whose commands are to be executed by the task.
|
|
80
|
+
method:
|
|
81
|
+
An optional string to label the task schema by its method.
|
|
82
|
+
implementation:
|
|
83
|
+
An optional string to label the task schema by its implementation.
|
|
84
|
+
inputs:
|
|
85
|
+
A list of SchemaInput objects that define the inputs to the task.
|
|
86
|
+
outputs:
|
|
87
|
+
A list of SchemaOutput objects that define the outputs of the task.
|
|
88
|
+
version:
|
|
89
|
+
The version of this task schema.
|
|
90
|
+
parameter_class_modules:
|
|
91
|
+
Where to find implementations of parameter value handlers.
|
|
92
|
+
web_doc:
|
|
93
|
+
True if this object should be included in the Sphinx documentation
|
|
94
|
+
(normally only relevant for built-in task schemas). True by default.
|
|
95
|
+
environment_presets:
|
|
96
|
+
Information about default execution environments. Can be overridden in specific
|
|
97
|
+
cases in the concrete tasks.
|
|
98
|
+
"""
|
|
99
|
+
|
|
100
|
+
_validation_schema: ClassVar[str] = "task_schema_spec_schema.yaml"
|
|
101
|
+
_hash_value = None
|
|
102
|
+
_validate_actions = True
|
|
103
|
+
|
|
104
|
+
_child_objects: ClassVar[tuple[ChildObjectSpec, ...]] = (
|
|
105
|
+
ChildObjectSpec(name="objective", class_name="TaskObjective"),
|
|
106
|
+
ChildObjectSpec(
|
|
107
|
+
name="inputs",
|
|
108
|
+
class_name="SchemaInput",
|
|
109
|
+
is_multiple=True,
|
|
110
|
+
parent_ref="_task_schema",
|
|
111
|
+
),
|
|
112
|
+
ChildObjectSpec(name="outputs", class_name="SchemaOutput", is_multiple=True),
|
|
113
|
+
ChildObjectSpec(
|
|
114
|
+
name="actions",
|
|
115
|
+
class_name="Action",
|
|
116
|
+
is_multiple=True,
|
|
117
|
+
parent_ref="_task_schema",
|
|
118
|
+
),
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
@classmethod
|
|
122
|
+
def __is_InputValue(cls, value) -> TypeIs[InputValue]:
|
|
123
|
+
return isinstance(value, cls._app.InputValue)
|
|
124
|
+
|
|
125
|
+
@classmethod
|
|
126
|
+
def __is_Parameter(cls, value) -> TypeIs[Parameter]:
|
|
127
|
+
return isinstance(value, cls._app.Parameter)
|
|
128
|
+
|
|
129
|
+
@classmethod
|
|
130
|
+
def __is_SchemaOutput(cls, value) -> TypeIs[SchemaOutput]:
|
|
131
|
+
return isinstance(value, cls._app.SchemaOutput)
|
|
132
|
+
|
|
133
|
+
def __init__(
|
|
134
|
+
self,
|
|
135
|
+
objective: TaskObjective | str,
|
|
136
|
+
actions: list[Action] | None = None,
|
|
137
|
+
method: str | None = None,
|
|
138
|
+
implementation: str | None = None,
|
|
139
|
+
inputs: list[Parameter | SchemaInput] | None = None,
|
|
140
|
+
outputs: list[Parameter | SchemaParameter] | None = None,
|
|
141
|
+
version: str | None = None,
|
|
142
|
+
parameter_class_modules: list[str] | None = None,
|
|
143
|
+
web_doc: bool | None = True,
|
|
144
|
+
environment_presets: Mapping[str, Mapping[str, Mapping[str, Any]]] | None = None,
|
|
145
|
+
doc: str = "",
|
|
146
|
+
_hash_value: str | None = None,
|
|
147
|
+
):
|
|
148
|
+
#: This is a string representing the objective of the task schema.
|
|
149
|
+
self.objective = self.__coerce_objective(objective)
|
|
150
|
+
#: A list of Action objects whose commands are to be executed by the task.
|
|
151
|
+
self.actions = actions or []
|
|
152
|
+
#: An optional string to label the task schema by its method.
|
|
153
|
+
self.method = method
|
|
154
|
+
#: An optional string to label the task schema by its implementation.
|
|
155
|
+
self.implementation = implementation
|
|
156
|
+
#: A list of SchemaInput objects that define the inputs to the task.
|
|
157
|
+
self.inputs = self.__coerce_inputs(inputs or ())
|
|
158
|
+
#: A list of SchemaOutput objects that define the outputs of the task.
|
|
159
|
+
self.outputs = self.__coerce_outputs(outputs or ())
|
|
160
|
+
#: Where to find implementations of parameter value handlers.
|
|
161
|
+
self.parameter_class_modules = parameter_class_modules or []
|
|
162
|
+
#: Whether this object should be included in the Sphinx documentation
|
|
163
|
+
#: (normally only relevant for built-in task schemas).
|
|
164
|
+
self.web_doc = web_doc
|
|
165
|
+
#: Information about default execution environments.
|
|
166
|
+
self.environment_presets = environment_presets
|
|
167
|
+
#: Documentation information about the task schema.
|
|
168
|
+
self.doc = doc
|
|
169
|
+
self._hash_value = _hash_value
|
|
170
|
+
|
|
171
|
+
self._set_parent_refs()
|
|
172
|
+
|
|
173
|
+
# process `Action` script/program_data_in/out formats:
|
|
174
|
+
for act in self.actions:
|
|
175
|
+
act.process_action_data_formats()
|
|
176
|
+
|
|
177
|
+
self._validate()
|
|
178
|
+
self.actions = self.__expand_actions()
|
|
179
|
+
#: The version of this task schema.
|
|
180
|
+
self.version = version
|
|
181
|
+
self._task_template: TaskTemplate | None = None # assigned by parent Task
|
|
182
|
+
|
|
183
|
+
self.__update_parameter_value_classes()
|
|
184
|
+
|
|
185
|
+
if self.environment_presets:
|
|
186
|
+
# validate against env names in actions:
|
|
187
|
+
env_names = {act.get_environment_name() for act in self.actions}
|
|
188
|
+
preset_envs = {
|
|
189
|
+
preset_name
|
|
190
|
+
for preset in self.environment_presets.values()
|
|
191
|
+
for preset_name in preset
|
|
192
|
+
}
|
|
193
|
+
if bad_envs := preset_envs - env_names:
|
|
194
|
+
raise EnvironmentPresetUnknownEnvironmentError(self.name, bad_envs)
|
|
195
|
+
|
|
196
|
+
# if version is not None: # TODO: this seems fragile
|
|
197
|
+
# self.assign_versions(
|
|
198
|
+
# version=version,
|
|
199
|
+
# app_data_obj_list=self._app.task_schemas
|
|
200
|
+
# if app.is_data_files_loaded
|
|
201
|
+
# else [],
|
|
202
|
+
# )
|
|
203
|
+
|
|
204
|
+
def __repr__(self) -> str:
|
|
205
|
+
return f"{self.__class__.__name__}({self.objective.name!r})"
|
|
206
|
+
|
|
207
|
+
@classmethod
|
|
208
|
+
def __parameters(cls) -> ParametersList:
|
|
209
|
+
# Workaround for a dumb mypy bug
|
|
210
|
+
return cls._app.parameters
|
|
211
|
+
|
|
212
|
+
@classmethod
|
|
213
|
+
def __task_schemas(cls) -> TaskSchemasList:
|
|
214
|
+
# Workaround for a dumb mypy bug
|
|
215
|
+
return cls._app.task_schemas
|
|
216
|
+
|
|
217
|
+
def __get_param_type_str(self, param: Parameter) -> str:
|
|
218
|
+
type_fmt = "-"
|
|
219
|
+
if param._validation:
|
|
220
|
+
try:
|
|
221
|
+
type_fmt = param._validation.to_tree()[0]["type_fmt"]
|
|
222
|
+
except Exception:
|
|
223
|
+
pass
|
|
224
|
+
elif param._value_class:
|
|
225
|
+
param_cls = param._value_class
|
|
226
|
+
cls_url = (
|
|
227
|
+
f"{self._app.docs_url}/reference/_autosummary/{param_cls.__module__}."
|
|
228
|
+
f"{param_cls.__name__}"
|
|
229
|
+
)
|
|
230
|
+
type_fmt = f"[link={cls_url}]{param_cls.__name__}[/link]"
|
|
231
|
+
return type_fmt
|
|
232
|
+
|
|
233
|
+
def __format_parameter_type(self, param: Parameter) -> str:
|
|
234
|
+
param_typ_fmt = param.typ
|
|
235
|
+
if param.typ in self.__parameters().list_attrs():
|
|
236
|
+
param_url = (
|
|
237
|
+
f"{self._app.docs_url}/reference/template_components/"
|
|
238
|
+
f"parameters.html#{param.url_slug}"
|
|
239
|
+
)
|
|
240
|
+
param_typ_fmt = f"[link={param_url}]{param_typ_fmt}[/link]"
|
|
241
|
+
return param_typ_fmt
|
|
242
|
+
|
|
243
|
+
def __get_info(self, include: Sequence[str] = ()):
|
|
244
|
+
if not include:
|
|
245
|
+
include = ("inputs", "outputs", "actions")
|
|
246
|
+
|
|
247
|
+
tab = Table(show_header=False, box=None, padding=(0, 0), collapse_padding=True)
|
|
248
|
+
tab.add_column(justify="right")
|
|
249
|
+
tab.add_column()
|
|
250
|
+
|
|
251
|
+
tab_ins_outs: Table | None = None
|
|
252
|
+
if "inputs" in include or "outputs" in include:
|
|
253
|
+
tab_ins_outs = Table(
|
|
254
|
+
show_header=False,
|
|
255
|
+
box=None,
|
|
256
|
+
padding=(0, 1),
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
tab_ins_outs.add_column(justify="left") # row heading ("Inputs" or "Outputs")
|
|
260
|
+
tab_ins_outs.add_column() # parameter name
|
|
261
|
+
tab_ins_outs.add_column() # type if available
|
|
262
|
+
tab_ins_outs.add_column() # default value (inputs only)
|
|
263
|
+
tab_ins_outs.add_row()
|
|
264
|
+
|
|
265
|
+
if "inputs" in include:
|
|
266
|
+
assert tab_ins_outs
|
|
267
|
+
if self.inputs:
|
|
268
|
+
tab_ins_outs.add_row(
|
|
269
|
+
"",
|
|
270
|
+
Text("parameter", style="italic grey50"),
|
|
271
|
+
Text("type", style="italic grey50"),
|
|
272
|
+
Text("default", style="italic grey50"),
|
|
273
|
+
)
|
|
274
|
+
for inp_idx, inp in enumerate(self.inputs):
|
|
275
|
+
def_str = "-"
|
|
276
|
+
if not inp.multiple:
|
|
277
|
+
if self.__is_InputValue(inp.default_value):
|
|
278
|
+
if inp.default_value.value is None:
|
|
279
|
+
def_str = "None"
|
|
280
|
+
else:
|
|
281
|
+
def_str = f"{rich_esc(str(inp.default_value.value))!r}"
|
|
282
|
+
tab_ins_outs.add_row(
|
|
283
|
+
"" if inp_idx > 0 else "[bold]Inputs[/bold]",
|
|
284
|
+
self.__format_parameter_type(inp.parameter),
|
|
285
|
+
self.__get_param_type_str(inp.parameter),
|
|
286
|
+
def_str,
|
|
287
|
+
)
|
|
288
|
+
|
|
289
|
+
if "outputs" in include:
|
|
290
|
+
assert tab_ins_outs
|
|
291
|
+
if "inputs" in include:
|
|
292
|
+
tab_ins_outs.add_row() # for spacing
|
|
293
|
+
else:
|
|
294
|
+
tab_ins_outs.add_row(
|
|
295
|
+
"",
|
|
296
|
+
Text("parameter", style="italic grey50"),
|
|
297
|
+
Text("type", style="italic grey50"),
|
|
298
|
+
"",
|
|
299
|
+
)
|
|
300
|
+
for out_idx, out in enumerate(self.outputs):
|
|
301
|
+
tab_ins_outs.add_row(
|
|
302
|
+
"" if out_idx > 0 else "[bold]Outputs[/bold]",
|
|
303
|
+
self.__format_parameter_type(out.parameter),
|
|
304
|
+
self.__get_param_type_str(out.parameter),
|
|
305
|
+
"",
|
|
306
|
+
)
|
|
307
|
+
|
|
308
|
+
if tab_ins_outs:
|
|
309
|
+
tab.add_row(tab_ins_outs)
|
|
310
|
+
|
|
311
|
+
if "actions" in include:
|
|
312
|
+
tab_acts = Table(
|
|
313
|
+
show_header=False, box=None, padding=(1, 1), collapse_padding=True
|
|
314
|
+
)
|
|
315
|
+
tab_acts.add_column()
|
|
316
|
+
tab_acts.add_row("[bold]Actions[/bold]")
|
|
317
|
+
for act in self.actions:
|
|
318
|
+
tab_cmds_i = Table(show_header=False, box=None)
|
|
319
|
+
tab_cmds_i.add_column(justify="right")
|
|
320
|
+
tab_cmds_i.add_column()
|
|
321
|
+
if act.rules:
|
|
322
|
+
seen_rules = [] # bug: some rules seem to be repeated
|
|
323
|
+
for act_rule_j in act.rules:
|
|
324
|
+
if act_rule_j.rule in seen_rules:
|
|
325
|
+
continue
|
|
326
|
+
else:
|
|
327
|
+
seen_rules.append(act_rule_j.rule)
|
|
328
|
+
r_path = ""
|
|
329
|
+
if act_rule_j.rule.check_missing:
|
|
330
|
+
r_cond = f"check missing: {act_rule_j.rule.check_missing}"
|
|
331
|
+
elif act_rule_j.rule.check_exists:
|
|
332
|
+
r_cond = f"check exists: {act_rule_j.rule.check_exists}"
|
|
333
|
+
elif act_rule_j.rule.condition:
|
|
334
|
+
r_path = f"{act_rule_j.rule.path}: "
|
|
335
|
+
r_cond = str(act_rule_j.rule.condition.to_json_like())
|
|
336
|
+
else:
|
|
337
|
+
continue
|
|
338
|
+
tab_cmds_i.add_row(
|
|
339
|
+
"[italic]rule:[/italic]",
|
|
340
|
+
rich_esc(f"{r_path}{r_cond}"),
|
|
341
|
+
)
|
|
342
|
+
tab_cmds_i.add_row(
|
|
343
|
+
"[italic]scope:[/italic]",
|
|
344
|
+
rich_esc(act.get_precise_scope().to_string()),
|
|
345
|
+
)
|
|
346
|
+
for cmd in act.commands:
|
|
347
|
+
cmd_str = "cmd" if cmd.command else "exe"
|
|
348
|
+
tab_cmds_i.add_row(
|
|
349
|
+
f"[italic]{cmd_str}:[/italic]",
|
|
350
|
+
rich_esc(cmd.command or cmd.executable or ""),
|
|
351
|
+
)
|
|
352
|
+
if cmd.stdout:
|
|
353
|
+
tab_cmds_i.add_row(
|
|
354
|
+
"[italic]out:[/italic]",
|
|
355
|
+
rich_esc(cmd.stdout),
|
|
356
|
+
)
|
|
357
|
+
if cmd.stderr:
|
|
358
|
+
tab_cmds_i.add_row(
|
|
359
|
+
"[italic]err:[/italic]",
|
|
360
|
+
rich_esc(cmd.stderr),
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
tab_acts.add_row(tab_cmds_i)
|
|
364
|
+
tab.add_row(tab_acts)
|
|
365
|
+
else:
|
|
366
|
+
tab.add_row()
|
|
367
|
+
|
|
368
|
+
panel = Panel(tab, title=f"Task schema: {rich_esc(self.objective.name)!r}")
|
|
369
|
+
return panel
|
|
370
|
+
|
|
371
|
+
def basic_info(self) -> None:
|
|
372
|
+
"""Show inputs and outputs, formatted in a table."""
|
|
373
|
+
rich_print(self.__get_info(include=("inputs", "outputs")))
|
|
374
|
+
|
|
375
|
+
def info(self) -> None:
|
|
376
|
+
"""Show inputs, outputs, and actions, formatted in a table."""
|
|
377
|
+
rich_print(self.__get_info(include=()))
|
|
378
|
+
|
|
379
|
+
def get_info_html(self) -> str:
|
|
380
|
+
"""
|
|
381
|
+
Describe the task schema as an HTML document.
|
|
382
|
+
"""
|
|
383
|
+
|
|
384
|
+
def _format_parameter_type(param: Parameter) -> str:
|
|
385
|
+
param_typ_fmt = param.typ
|
|
386
|
+
if param.typ in param_types:
|
|
387
|
+
param_url = (
|
|
388
|
+
f"{self._app.docs_url}/reference/template_components/"
|
|
389
|
+
f"parameters.html#{param.url_slug}"
|
|
390
|
+
)
|
|
391
|
+
param_typ_fmt = f'<a href="{param_url}">{param_typ_fmt}</a>'
|
|
392
|
+
return param_typ_fmt
|
|
393
|
+
|
|
394
|
+
def _get_param_type_str(param: Parameter) -> str:
|
|
395
|
+
type_fmt = "-"
|
|
396
|
+
if param._validation:
|
|
397
|
+
try:
|
|
398
|
+
type_fmt = param._validation.to_tree()[0]["type_fmt"]
|
|
399
|
+
except Exception:
|
|
400
|
+
pass
|
|
401
|
+
elif param._value_class:
|
|
402
|
+
param_cls = param._value_class
|
|
403
|
+
cls_url = (
|
|
404
|
+
f"{self._app.docs_url}/reference/_autosummary/{param_cls.__module__}."
|
|
405
|
+
f"{param_cls.__name__}"
|
|
406
|
+
)
|
|
407
|
+
type_fmt = f'<a href="{cls_url}">{param_cls.__name__}</a>'
|
|
408
|
+
return type_fmt
|
|
409
|
+
|
|
410
|
+
def _prepare_script_data_format_table(
|
|
411
|
+
script_data_grouped: Mapping[str, Mapping[str, Mapping[str, str]]],
|
|
412
|
+
) -> str:
|
|
413
|
+
out = ""
|
|
414
|
+
rows = ""
|
|
415
|
+
for fmt, params in script_data_grouped.items():
|
|
416
|
+
params_rows = "</tr><tr>".join(
|
|
417
|
+
f"<td><code>{k}</code></td><td><code>{v if v else ''}</code></td>"
|
|
418
|
+
for k, v in params.items()
|
|
419
|
+
)
|
|
420
|
+
rows += f'<tr><td rowspan="{len(params)}"><code>{fmt!r}</code></td>{params_rows}</tr>'
|
|
421
|
+
if rows:
|
|
422
|
+
out = f'<table class="script-data-format-table">{rows}</table>'
|
|
423
|
+
|
|
424
|
+
return out
|
|
425
|
+
|
|
426
|
+
param_types = self.__parameters().list_attrs()
|
|
427
|
+
|
|
428
|
+
inputs_header_row = "<tr><th>parameter</th><th>type</th><th>default</th></tr>"
|
|
429
|
+
input_rows = ""
|
|
430
|
+
for inp in self.inputs:
|
|
431
|
+
def_str = "-"
|
|
432
|
+
if not inp.multiple:
|
|
433
|
+
if self.__is_InputValue(inp.default_value):
|
|
434
|
+
if inp.default_value.value is None:
|
|
435
|
+
def_str = "None"
|
|
436
|
+
else:
|
|
437
|
+
def_str = f"{rich_esc(str(inp.default_value.value))!r}"
|
|
438
|
+
|
|
439
|
+
param_str = _format_parameter_type(inp.parameter)
|
|
440
|
+
type_str = _get_param_type_str(inp.parameter)
|
|
441
|
+
input_rows += (
|
|
442
|
+
f"<tr>"
|
|
443
|
+
f"<td>{param_str}</td>"
|
|
444
|
+
f"<td>{type_str}</td>"
|
|
445
|
+
f"<td>{def_str}</td>"
|
|
446
|
+
f"</tr>"
|
|
447
|
+
)
|
|
448
|
+
|
|
449
|
+
if input_rows:
|
|
450
|
+
inputs_table = (
|
|
451
|
+
f'<table class="schema-inputs-table">'
|
|
452
|
+
f"{inputs_header_row}{input_rows}</table>"
|
|
453
|
+
)
|
|
454
|
+
else:
|
|
455
|
+
inputs_table = (
|
|
456
|
+
'<span class="schema-note-no-inputs">This task schema has no input '
|
|
457
|
+
"parameters.</span>"
|
|
458
|
+
)
|
|
459
|
+
|
|
460
|
+
outputs_header_row = "<tr><th>parameter</th><th>type</th></tr>"
|
|
461
|
+
output_rows = ""
|
|
462
|
+
for out in self.outputs:
|
|
463
|
+
param_str = _format_parameter_type(out.parameter)
|
|
464
|
+
type_str = _get_param_type_str(out.parameter)
|
|
465
|
+
output_rows += f"<tr>" f"<td>{param_str}</td>" f"<td>{type_str}</td>" f"</tr>"
|
|
466
|
+
|
|
467
|
+
if output_rows:
|
|
468
|
+
outputs_table = (
|
|
469
|
+
f'<table class="schema-inputs-table">{outputs_header_row}{output_rows}'
|
|
470
|
+
f"</table>"
|
|
471
|
+
)
|
|
472
|
+
|
|
473
|
+
else:
|
|
474
|
+
outputs_table = (
|
|
475
|
+
'<span class="schema-note-no-outputs">This task schema has no output '
|
|
476
|
+
"parameters.</span>"
|
|
477
|
+
)
|
|
478
|
+
|
|
479
|
+
action_rows = ""
|
|
480
|
+
for act_idx, act in enumerate(self.actions):
|
|
481
|
+
act_i_rules = ""
|
|
482
|
+
if act.rules:
|
|
483
|
+
seen_rules = [] # bug: some rules seem to be repeated
|
|
484
|
+
for act_rule_j in act.rules:
|
|
485
|
+
if act_rule_j.rule in seen_rules:
|
|
486
|
+
continue
|
|
487
|
+
else:
|
|
488
|
+
seen_rules.append(act_rule_j.rule)
|
|
489
|
+
r_path = ""
|
|
490
|
+
if act_rule_j.rule.check_missing:
|
|
491
|
+
r_cond = f"check missing: {act_rule_j.rule.check_missing!r}"
|
|
492
|
+
elif act_rule_j.rule.check_exists:
|
|
493
|
+
r_cond = f"check exists: {act_rule_j.rule.check_exists!r}"
|
|
494
|
+
elif act_rule_j.rule.condition:
|
|
495
|
+
r_path = f"{act_rule_j.rule.path}: "
|
|
496
|
+
r_cond = str(act_rule_j.rule.condition.to_json_like())
|
|
497
|
+
else:
|
|
498
|
+
continue
|
|
499
|
+
act_i_rules += f"<div><code>{r_path}{r_cond}</code></div>"
|
|
500
|
+
|
|
501
|
+
act_i_script_rows = ""
|
|
502
|
+
num_script_rows = 0
|
|
503
|
+
if act.script:
|
|
504
|
+
act_i_script_rows += (
|
|
505
|
+
f'<tr><td class="action-header-cell">script:</td>'
|
|
506
|
+
f"<td><code>{escape(act.script)}</code></td></tr>"
|
|
507
|
+
)
|
|
508
|
+
num_script_rows += 1
|
|
509
|
+
if act.script_exe:
|
|
510
|
+
act_i_script_rows += (
|
|
511
|
+
f'<tr><td class="action-header-cell">script exe:</td>'
|
|
512
|
+
f"<td><code>{escape(act.script_exe)}</code></td></tr>"
|
|
513
|
+
)
|
|
514
|
+
num_script_rows += 1
|
|
515
|
+
if act.script_data_in_grouped:
|
|
516
|
+
act_i_script_rows += (
|
|
517
|
+
f'<tr><td class="action-header-cell">script data-in:</td>'
|
|
518
|
+
f"<td>{_prepare_script_data_format_table(act.script_data_in_grouped)}"
|
|
519
|
+
f"</td></tr>"
|
|
520
|
+
)
|
|
521
|
+
num_script_rows += 1
|
|
522
|
+
if act.script_data_out_grouped:
|
|
523
|
+
act_i_script_rows += (
|
|
524
|
+
f'<tr><td class="action-header-cell">script data-out:</td>'
|
|
525
|
+
f"<td>{_prepare_script_data_format_table(act.script_data_out_grouped)}"
|
|
526
|
+
f"</td></tr>"
|
|
527
|
+
)
|
|
528
|
+
num_script_rows += 1
|
|
529
|
+
|
|
530
|
+
inp_fg_rows = ""
|
|
531
|
+
num_inp_fg_rows = 0
|
|
532
|
+
if act.input_file_generators:
|
|
533
|
+
inp_fg = act.input_file_generators[0] # should be only one
|
|
534
|
+
inps = ", ".join(f"<code>{in_.typ}</code>" for in_ in inp_fg.inputs)
|
|
535
|
+
inp_fg_rows += (
|
|
536
|
+
f"<tr>"
|
|
537
|
+
f'<td class="action-header-cell">input file:</td>'
|
|
538
|
+
f"<td><code>{inp_fg.input_file.label}</code></td>"
|
|
539
|
+
f"</tr>"
|
|
540
|
+
f"<tr>"
|
|
541
|
+
f'<td class="action-header-cell">inputs:</td>'
|
|
542
|
+
f"<td>{inps}</td>"
|
|
543
|
+
f"</tr>"
|
|
544
|
+
)
|
|
545
|
+
num_inp_fg_rows += 2
|
|
546
|
+
|
|
547
|
+
out_fp_rows = ""
|
|
548
|
+
num_out_fp_rows = 0
|
|
549
|
+
if act.output_file_parsers:
|
|
550
|
+
out_fp = act.output_file_parsers[0] # should be only one
|
|
551
|
+
files = ", ".join(
|
|
552
|
+
f"<code>{of_.label}</code>" for of_ in out_fp.output_files
|
|
553
|
+
)
|
|
554
|
+
out_fp_rows += (
|
|
555
|
+
f"<tr>"
|
|
556
|
+
f'<td class="action-header-cell">output:</td>'
|
|
557
|
+
f"<td><code>{out_fp.output.typ if out_fp.output else ''}</code></td>"
|
|
558
|
+
f"</tr>"
|
|
559
|
+
f"<tr>"
|
|
560
|
+
f'<td class="action-header-cell">output files:</td>'
|
|
561
|
+
f"<td>{files}</td>"
|
|
562
|
+
f"</tr>"
|
|
563
|
+
)
|
|
564
|
+
num_out_fp_rows += 2
|
|
565
|
+
|
|
566
|
+
act_i_cmds_tab_rows = ""
|
|
567
|
+
for cmd_idx, cmd in enumerate(act.commands):
|
|
568
|
+
cmd_j_tab_rows = (
|
|
569
|
+
f'<tr><td colspan="3" class="commands-table-top-spacer-cell"></td>'
|
|
570
|
+
f"</tr><tr>"
|
|
571
|
+
f'<td rowspan="{bool(cmd.stdout) + bool(cmd.stderr) + 1}">'
|
|
572
|
+
f'<span class="cmd-idx-numeral">{cmd_idx}</span></td>'
|
|
573
|
+
f'<td class="command-header-cell">{"cmd" if cmd.command else "exe"}:'
|
|
574
|
+
f"</td><td><code><pre>{escape(cmd.command or cmd.executable or '')}</pre>"
|
|
575
|
+
f"</code></td></tr>"
|
|
576
|
+
)
|
|
577
|
+
if cmd.stdout:
|
|
578
|
+
cmd_j_tab_rows += (
|
|
579
|
+
f'<tr><td class="command-header-cell">out:</td>'
|
|
580
|
+
f"<td><code>{escape(cmd.stdout)}</code></td></tr>"
|
|
581
|
+
)
|
|
582
|
+
if cmd.stderr:
|
|
583
|
+
cmd_j_tab_rows += (
|
|
584
|
+
f'<tr><td class="command-header-cell">err:</td>'
|
|
585
|
+
f"<td><code>{escape(cmd.stderr)}</code></td></tr>"
|
|
586
|
+
)
|
|
587
|
+
if cmd_idx < len(act.commands) - 1:
|
|
588
|
+
cmd_j_tab_rows += (
|
|
589
|
+
'<tr><td colspan="3" class="commands-table-bottom-spacer-cell">'
|
|
590
|
+
"</td></tr>"
|
|
591
|
+
)
|
|
592
|
+
act_i_cmds_tab_rows += cmd_j_tab_rows
|
|
593
|
+
|
|
594
|
+
act_i_cmds_tab = (
|
|
595
|
+
f'<table class="actions-commands-table">{act_i_cmds_tab_rows}</table>'
|
|
596
|
+
)
|
|
597
|
+
|
|
598
|
+
idx_rowspan = 4 + num_script_rows + num_inp_fg_rows + num_out_fp_rows
|
|
599
|
+
action_rows += (
|
|
600
|
+
f'<tr><td colspan="3" class="action-table-top-spacer-cell"></td></tr>'
|
|
601
|
+
f'<tr><td rowspan="{idx_rowspan}" class="act-idx-cell">'
|
|
602
|
+
f'<span class="act-idx-numeral">{act_idx}</span></td>'
|
|
603
|
+
f'<td class="action-header-cell">rules:</td><td>{act_i_rules or "-"}</td>'
|
|
604
|
+
f'</tr><tr><td class="action-header-cell">scope:</td>'
|
|
605
|
+
f"<td><code>{act.get_precise_scope().to_string()}</code></td></tr>"
|
|
606
|
+
f'<tr><td class="action-header-cell">environment:</td>'
|
|
607
|
+
f"<td><code>{act.get_environment_name()}</code></td></tr>"
|
|
608
|
+
f"{inp_fg_rows}"
|
|
609
|
+
f"{out_fp_rows}"
|
|
610
|
+
f"{act_i_script_rows}"
|
|
611
|
+
f'<tr class="action-commands-row">'
|
|
612
|
+
f'<td class="action-header-cell" colspan="2">'
|
|
613
|
+
f"commands:{act_i_cmds_tab}</td></tr>"
|
|
614
|
+
f'<tr><td colspan="3" class="action-table-bottom-spacer-cell"></td></tr>'
|
|
615
|
+
)
|
|
616
|
+
|
|
617
|
+
if action_rows:
|
|
618
|
+
action_table = f'<table class="action-table hidden">{action_rows}</table>'
|
|
619
|
+
action_show_hide = (
|
|
620
|
+
'<span class="actions-show-hide-toggle">[<span class="action-show-text">'
|
|
621
|
+
'show ↓</span><span class="action-hide-text hidden">hide ↑</span>]'
|
|
622
|
+
"</span>"
|
|
623
|
+
)
|
|
624
|
+
act_heading_class = ' class="actions-heading"'
|
|
625
|
+
else:
|
|
626
|
+
action_table = (
|
|
627
|
+
'<span class="schema-note-no-actions">'
|
|
628
|
+
"This task schema has no actions.</span>"
|
|
629
|
+
)
|
|
630
|
+
action_show_hide = ""
|
|
631
|
+
act_heading_class = ""
|
|
632
|
+
description = (
|
|
633
|
+
f"<h3 class='task-desc'>Description</h3>{self.doc}" if self.doc else ""
|
|
634
|
+
)
|
|
635
|
+
return (
|
|
636
|
+
f"{description}"
|
|
637
|
+
f"<h3>Inputs</h3>{inputs_table}"
|
|
638
|
+
f"<h3>Outputs</h3>{outputs_table}"
|
|
639
|
+
# f"<h3>Examples</h3>examples here..." # TODO:
|
|
640
|
+
f"<h3{act_heading_class}>Actions{action_show_hide}</h3>"
|
|
641
|
+
f"{action_table}"
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
def __eq__(self, other: Any):
|
|
645
|
+
if id(self) == id(other):
|
|
646
|
+
return True
|
|
647
|
+
if not isinstance(other, self.__class__):
|
|
648
|
+
return False
|
|
649
|
+
return (
|
|
650
|
+
self.objective == other.objective
|
|
651
|
+
and self.actions == other.actions
|
|
652
|
+
and self.method == other.method
|
|
653
|
+
and self.implementation == other.implementation
|
|
654
|
+
and self.inputs == other.inputs
|
|
655
|
+
and self.outputs == other.outputs
|
|
656
|
+
and self.version == other.version
|
|
657
|
+
and self._hash_value == other._hash_value
|
|
658
|
+
)
|
|
659
|
+
|
|
660
|
+
def __deepcopy__(self, memo: dict[int, Any]) -> Self:
|
|
661
|
+
kwargs = self.to_dict()
|
|
662
|
+
obj = self.__class__(**copy.deepcopy(kwargs, memo))
|
|
663
|
+
obj._task_template = self._task_template
|
|
664
|
+
return obj
|
|
665
|
+
|
|
666
|
+
@classmethod
|
|
667
|
+
@contextmanager
|
|
668
|
+
def ignore_invalid_actions(cls) -> Iterator[None]:
|
|
669
|
+
"""
|
|
670
|
+
A context manager within which invalid actions will be ignored.
|
|
671
|
+
"""
|
|
672
|
+
try:
|
|
673
|
+
cls._validate_actions = False
|
|
674
|
+
yield
|
|
675
|
+
finally:
|
|
676
|
+
cls._validate_actions = True
|
|
677
|
+
|
|
678
|
+
@classmethod
|
|
679
|
+
def __coerce_objective(cls, objective: TaskObjective | str) -> TaskObjective:
|
|
680
|
+
if isinstance(objective, str):
|
|
681
|
+
return cls._app.TaskObjective(objective)
|
|
682
|
+
else:
|
|
683
|
+
return objective
|
|
684
|
+
|
|
685
|
+
@classmethod
|
|
686
|
+
def __coerce_one_input(cls, inp: Parameter | SchemaInput) -> SchemaInput:
|
|
687
|
+
return cls._app.SchemaInput(inp) if cls.__is_Parameter(inp) else inp
|
|
688
|
+
|
|
689
|
+
@classmethod
|
|
690
|
+
def __coerce_inputs(
|
|
691
|
+
cls, inputs: Iterable[Parameter | SchemaInput]
|
|
692
|
+
) -> list[SchemaInput]:
|
|
693
|
+
"""coerce Parameters to SchemaInputs"""
|
|
694
|
+
return [cls.__coerce_one_input(inp) for inp in inputs]
|
|
695
|
+
|
|
696
|
+
@classmethod
|
|
697
|
+
def __coerce_one_output(cls, out: Parameter | SchemaParameter) -> SchemaOutput:
|
|
698
|
+
return (
|
|
699
|
+
out
|
|
700
|
+
if cls.__is_SchemaOutput(out)
|
|
701
|
+
else cls._app.SchemaOutput(out if cls.__is_Parameter(out) else out.parameter)
|
|
702
|
+
)
|
|
703
|
+
|
|
704
|
+
@classmethod
|
|
705
|
+
def __coerce_outputs(
|
|
706
|
+
cls, outputs: Iterable[Parameter | SchemaParameter]
|
|
707
|
+
) -> list[SchemaOutput]:
|
|
708
|
+
"""coerce Parameters to SchemaOutputs"""
|
|
709
|
+
return [cls.__coerce_one_output(out) for out in outputs]
|
|
710
|
+
|
|
711
|
+
def _validate(self) -> None:
|
|
712
|
+
if self.method:
|
|
713
|
+
self.method = check_valid_py_identifier(self.method)
|
|
714
|
+
if self.implementation:
|
|
715
|
+
self.implementation = check_valid_py_identifier(self.implementation)
|
|
716
|
+
|
|
717
|
+
# check action input/outputs
|
|
718
|
+
if self._validate_actions:
|
|
719
|
+
has_script = any(
|
|
720
|
+
act.script
|
|
721
|
+
and not act.input_file_generators
|
|
722
|
+
and not act.output_file_parsers
|
|
723
|
+
for act in self.actions
|
|
724
|
+
)
|
|
725
|
+
has_program = any(act.has_program for act in self.actions)
|
|
726
|
+
|
|
727
|
+
all_outs: set[str] = set()
|
|
728
|
+
extra_ins = set(self.input_types)
|
|
729
|
+
|
|
730
|
+
act_ins_lst = [act.get_input_types() for act in self.actions]
|
|
731
|
+
act_outs_lst = [act.get_output_types() for act in self.actions]
|
|
732
|
+
|
|
733
|
+
schema_outs = set(self.output_types)
|
|
734
|
+
|
|
735
|
+
all_act_ins = set(chain.from_iterable(act_ins_lst))
|
|
736
|
+
all_act_outs = set(chain.from_iterable(act_outs_lst))
|
|
737
|
+
|
|
738
|
+
non_schema_act_ins = all_act_ins.difference(self.input_types)
|
|
739
|
+
non_schema_act_outs = all_act_outs.difference(schema_outs)
|
|
740
|
+
|
|
741
|
+
extra_act_outs = non_schema_act_outs
|
|
742
|
+
seen_act_outs: set[str] = set()
|
|
743
|
+
for act_idx in range(len(self.actions)):
|
|
744
|
+
for act_in in act_ins_lst[act_idx]:
|
|
745
|
+
if act_in in non_schema_act_ins and act_in not in seen_act_outs:
|
|
746
|
+
raise ValueError(
|
|
747
|
+
f"Action {act_idx} input {act_in!r} of schema {self.name!r} "
|
|
748
|
+
f"is not a schema input, but nor is it an action output from "
|
|
749
|
+
f"a preceding action."
|
|
750
|
+
)
|
|
751
|
+
seen_act_outs.update(act_outs_lst[act_idx])
|
|
752
|
+
extra_act_outs.difference_update(act_ins_lst[act_idx])
|
|
753
|
+
extra_ins.difference_update(act_ins_lst[act_idx])
|
|
754
|
+
all_outs.update(act_outs_lst[act_idx])
|
|
755
|
+
|
|
756
|
+
if extra_act_outs:
|
|
757
|
+
raise ValueError(
|
|
758
|
+
f"The following action outputs of schema {self.name!r} are not schema"
|
|
759
|
+
f" outputs, but nor are they consumed by subsequent actions as "
|
|
760
|
+
f"action inputs: {tuple(extra_act_outs)!r}."
|
|
761
|
+
)
|
|
762
|
+
|
|
763
|
+
if extra_ins and not (has_script or has_program):
|
|
764
|
+
# TODO: bit of a hack, need to consider script/program ins/outs later
|
|
765
|
+
# i.e. are all schema inputs "consumed" by an action?
|
|
766
|
+
|
|
767
|
+
# consider OFP inputs:
|
|
768
|
+
for act in self.actions:
|
|
769
|
+
for ofp in act.output_file_parsers:
|
|
770
|
+
extra_ins.difference_update(ofp.inputs or ())
|
|
771
|
+
|
|
772
|
+
if self.actions and extra_ins:
|
|
773
|
+
# allow for no actions (e.g. defining inputs for downstream tasks)
|
|
774
|
+
raise ValueError(
|
|
775
|
+
f"Schema {self.name!r} inputs {tuple(extra_ins)!r} are not used "
|
|
776
|
+
f"by any actions."
|
|
777
|
+
)
|
|
778
|
+
|
|
779
|
+
missing_outs = schema_outs - all_outs
|
|
780
|
+
if missing_outs and not (has_script or has_program):
|
|
781
|
+
# TODO: bit of a hack, need to consider script/program ins/outs later
|
|
782
|
+
raise ValueError(
|
|
783
|
+
f"Schema {self.name!r} outputs {tuple(missing_outs)!r} are not "
|
|
784
|
+
f"generated by any actions."
|
|
785
|
+
)
|
|
786
|
+
|
|
787
|
+
def __expand_actions(self) -> list[Action]:
|
|
788
|
+
"""Create new actions for input file generators and output parsers in existing
|
|
789
|
+
actions."""
|
|
790
|
+
return [new_act for act in self.actions for new_act in act.expand()]
|
|
791
|
+
|
|
792
|
+
def __update_parameter_value_classes(self):
|
|
793
|
+
# ensure any referenced parameter_class_modules are imported:
|
|
794
|
+
for module in self.parameter_class_modules:
|
|
795
|
+
import_module(module)
|
|
796
|
+
|
|
797
|
+
# TODO: support specifying file paths in addition to (instead of?) importable
|
|
798
|
+
# module paths
|
|
799
|
+
|
|
800
|
+
for inp in self.inputs:
|
|
801
|
+
inp.parameter._set_value_class()
|
|
802
|
+
|
|
803
|
+
for out in self.outputs:
|
|
804
|
+
out.parameter._set_value_class()
|
|
805
|
+
|
|
806
|
+
def make_persistent(
|
|
807
|
+
self, workflow: Workflow, source: ParamSource
|
|
808
|
+
) -> list[int | list[int]]:
|
|
809
|
+
"""
|
|
810
|
+
Convert this task schema to persistent form within the context of the given
|
|
811
|
+
workflow.
|
|
812
|
+
"""
|
|
813
|
+
new_refs: list[int | list[int]] = []
|
|
814
|
+
for input_i in self.inputs:
|
|
815
|
+
for lab_info in input_i.labelled_info():
|
|
816
|
+
if "default_value" in lab_info:
|
|
817
|
+
_, dat_ref, is_new = lab_info["default_value"].make_persistent(
|
|
818
|
+
workflow, source
|
|
819
|
+
)
|
|
820
|
+
new_refs.extend(dat_ref) if is_new else None
|
|
821
|
+
return new_refs
|
|
822
|
+
|
|
823
|
+
@property
|
|
824
|
+
def name(self) -> str:
|
|
825
|
+
"""
|
|
826
|
+
The name of this schema.
|
|
827
|
+
"""
|
|
828
|
+
return (
|
|
829
|
+
f"{self.objective.name}"
|
|
830
|
+
f"{f'_{self.method}' if self.method else ''}"
|
|
831
|
+
f"{f'_{self.implementation}' if self.implementation else ''}"
|
|
832
|
+
)
|
|
833
|
+
|
|
834
|
+
@property
|
|
835
|
+
def input_types(self) -> list[str]:
|
|
836
|
+
"""
|
|
837
|
+
The input types to the schema.
|
|
838
|
+
"""
|
|
839
|
+
return [typ for inp in self.inputs for typ in inp.all_labelled_types]
|
|
840
|
+
|
|
841
|
+
@property
|
|
842
|
+
def input_type_labels_map(self) -> dict[str, tuple[str, ...]]:
|
|
843
|
+
"""
|
|
844
|
+
A map between input types and their associated labelled types.
|
|
845
|
+
"""
|
|
846
|
+
return {inp.typ: tuple(inp.all_labelled_types) for inp in self.inputs}
|
|
847
|
+
|
|
848
|
+
@property
|
|
849
|
+
def output_types(self) -> list[str]:
|
|
850
|
+
"""
|
|
851
|
+
The output types from the schema.
|
|
852
|
+
"""
|
|
853
|
+
return [out.typ for out in self.outputs]
|
|
854
|
+
|
|
855
|
+
@property
|
|
856
|
+
def provides_parameters(self) -> Iterator[tuple[str, str]]:
|
|
857
|
+
"""
|
|
858
|
+
The parameters that this schema provides.
|
|
859
|
+
"""
|
|
860
|
+
for schema_inp in self.inputs:
|
|
861
|
+
for label, prop_mode in schema_inp._simple_labelled_info:
|
|
862
|
+
if prop_mode is not ParameterPropagationMode.NEVER:
|
|
863
|
+
yield (schema_inp.input_or_output, label)
|
|
864
|
+
for schema_out in self.outputs:
|
|
865
|
+
if schema_out.propagation_mode is not ParameterPropagationMode.NEVER:
|
|
866
|
+
yield (schema_out.input_or_output, schema_out.typ)
|
|
867
|
+
|
|
868
|
+
@property
|
|
869
|
+
def task_template(self) -> TaskTemplate | None:
|
|
870
|
+
"""
|
|
871
|
+
The template that this schema is contained in.
|
|
872
|
+
"""
|
|
873
|
+
return self._task_template
|
|
874
|
+
|
|
875
|
+
@classmethod
|
|
876
|
+
def get_by_key(cls, key: str) -> TaskSchema:
|
|
877
|
+
"""Get a config-loaded task schema from a key."""
|
|
878
|
+
return cls.__task_schemas().get(key)
|
|
879
|
+
|
|
880
|
+
def get_parameter_dependence(
|
|
881
|
+
self, parameter: SchemaParameter
|
|
882
|
+
) -> ActParameterDependence:
|
|
883
|
+
"""Find if/where a given parameter is used by the schema's actions."""
|
|
884
|
+
out: ActParameterDependence = {"input_file_writers": [], "commands": []}
|
|
885
|
+
for act_idx, action in enumerate(self.actions):
|
|
886
|
+
deps = action.get_parameter_dependence(parameter)
|
|
887
|
+
out["input_file_writers"].extend(
|
|
888
|
+
(act_idx, ifw) for ifw in deps["input_file_writers"]
|
|
889
|
+
)
|
|
890
|
+
out["commands"].extend((act_idx, cmd) for cmd in deps["commands"])
|
|
891
|
+
return out
|
|
892
|
+
|
|
893
|
+
def get_key(self) -> tuple:
|
|
894
|
+
"""
|
|
895
|
+
Get the hashable value that represents this schema.
|
|
896
|
+
"""
|
|
897
|
+
return (str(self.objective), self.method, self.implementation)
|
|
898
|
+
|
|
899
|
+
def _get_single_label_lookup(self, prefix: str = "") -> Mapping[str, str]:
|
|
900
|
+
"""
|
|
901
|
+
Get a mapping between schema input types that have a single label (i.e.
|
|
902
|
+
labelled but with `multiple=False`) and the non-labelled type string.
|
|
903
|
+
|
|
904
|
+
For example, if a task schema has a schema input like:
|
|
905
|
+
`SchemaInput(parameter="p1", labels={"one": {}}, multiple=False)`, this method
|
|
906
|
+
would return a dict that includes: `{"p1[one]": "p1"}`. If the `prefix` argument
|
|
907
|
+
is provided, this will be added to map key and value (and a terminating period
|
|
908
|
+
will be added to the end of the prefix if it does not already end in one). For
|
|
909
|
+
example, with `prefix="inputs"`, this method might return:
|
|
910
|
+
`{"inputs.p1[one]": "inputs.p1"}`.
|
|
911
|
+
|
|
912
|
+
"""
|
|
913
|
+
lookup: dict[str, str] = {}
|
|
914
|
+
if prefix and not prefix.endswith("."):
|
|
915
|
+
prefix += "."
|
|
916
|
+
for sch_inp in self.inputs:
|
|
917
|
+
if not sch_inp.multiple and sch_inp.single_label:
|
|
918
|
+
labelled_type = sch_inp.single_labelled_type
|
|
919
|
+
lookup[f"{prefix}{labelled_type}"] = f"{prefix}{sch_inp.typ}"
|
|
920
|
+
return lookup
|
|
921
|
+
|
|
922
|
+
@property
|
|
923
|
+
def multi_input_types(self) -> list[str]:
|
|
924
|
+
"""Get a list of input types that have multiple labels."""
|
|
925
|
+
return [inp.parameter.typ for inp in self.inputs if inp.multiple]
|
|
926
|
+
|
|
927
|
+
|
|
928
|
+
class MetaTaskSchema(TaskSchema):
|
|
929
|
+
"""Class to represent a task schema with no actions, that can be used to represent the
|
|
930
|
+
effect of multiple task schemas.
|
|
931
|
+
|
|
932
|
+
Parameters
|
|
933
|
+
----------
|
|
934
|
+
objective:
|
|
935
|
+
This is a string representing the objective of the task schema.
|
|
936
|
+
method:
|
|
937
|
+
An optional string to label the task schema by its method.
|
|
938
|
+
implementation:
|
|
939
|
+
An optional string to label the task schema by its implementation.
|
|
940
|
+
inputs:
|
|
941
|
+
A list of SchemaInput objects that define the inputs to the task.
|
|
942
|
+
outputs:
|
|
943
|
+
A list of SchemaOutput objects that define the outputs of the task.
|
|
944
|
+
version:
|
|
945
|
+
The version of this task schema.
|
|
946
|
+
web_doc:
|
|
947
|
+
True if this object should be included in the Sphinx documentation
|
|
948
|
+
(normally only relevant for built-in task schemas). True by default.
|
|
949
|
+
environment_presets:
|
|
950
|
+
Information about default execution environments. Can be overridden in specific
|
|
951
|
+
cases in the concrete tasks.
|
|
952
|
+
"""
|
|
953
|
+
|
|
954
|
+
_validation_schema: ClassVar[str] = "task_schema_spec_schema.yaml"
|
|
955
|
+
_hash_value = None
|
|
956
|
+
_validate_actions = False
|
|
957
|
+
|
|
958
|
+
_child_objects = (
|
|
959
|
+
ChildObjectSpec(name="objective", class_name="TaskObjective"),
|
|
960
|
+
ChildObjectSpec(
|
|
961
|
+
name="inputs",
|
|
962
|
+
class_name="SchemaInput",
|
|
963
|
+
is_multiple=True,
|
|
964
|
+
parent_ref="_task_schema",
|
|
965
|
+
),
|
|
966
|
+
ChildObjectSpec(name="outputs", class_name="SchemaOutput", is_multiple=True),
|
|
967
|
+
)
|
|
968
|
+
|
|
969
|
+
def __init__(
|
|
970
|
+
self,
|
|
971
|
+
objective: TaskObjective | str,
|
|
972
|
+
method: str | None = None,
|
|
973
|
+
implementation: str | None = None,
|
|
974
|
+
inputs: list[Parameter | SchemaInput] | None = None,
|
|
975
|
+
outputs: list[Parameter | SchemaParameter] | None = None,
|
|
976
|
+
version: str | None = None,
|
|
977
|
+
web_doc: bool | None = True,
|
|
978
|
+
environment_presets: Mapping[str, Mapping[str, Mapping[str, Any]]] | None = None,
|
|
979
|
+
doc: str = "",
|
|
980
|
+
_hash_value: str | None = None,
|
|
981
|
+
):
|
|
982
|
+
super().__init__(
|
|
983
|
+
objective=objective,
|
|
984
|
+
method=method,
|
|
985
|
+
implementation=implementation,
|
|
986
|
+
inputs=inputs,
|
|
987
|
+
outputs=outputs,
|
|
988
|
+
version=version,
|
|
989
|
+
web_doc=web_doc,
|
|
990
|
+
environment_presets=environment_presets,
|
|
991
|
+
doc=doc,
|
|
992
|
+
_hash_value=_hash_value,
|
|
993
|
+
)
|