hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
hpcflow/sdk/app.py
ADDED
|
@@ -0,0 +1,4266 @@
|
|
|
1
|
+
"""An hpcflow application."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections import Counter
|
|
6
|
+
from contextlib import AbstractContextManager, nullcontext
|
|
7
|
+
from datetime import datetime, timezone
|
|
8
|
+
import enum
|
|
9
|
+
import json
|
|
10
|
+
import shutil
|
|
11
|
+
from functools import wraps
|
|
12
|
+
from importlib import resources, import_module
|
|
13
|
+
import os
|
|
14
|
+
from contextlib import contextmanager
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
import sys
|
|
17
|
+
from tempfile import TemporaryDirectory
|
|
18
|
+
from typing import Any, TypeVar, Generic, cast, TYPE_CHECKING, Literal
|
|
19
|
+
import warnings
|
|
20
|
+
import zipfile
|
|
21
|
+
from platformdirs import user_cache_path, user_data_dir
|
|
22
|
+
import requests
|
|
23
|
+
from reretry import retry # type: ignore
|
|
24
|
+
from rich.console import Console, Group
|
|
25
|
+
from rich.syntax import Syntax
|
|
26
|
+
from rich.table import Table, box
|
|
27
|
+
from rich.text import Text
|
|
28
|
+
from rich.padding import Padding
|
|
29
|
+
from rich.panel import Panel
|
|
30
|
+
from rich import print as rich_print
|
|
31
|
+
from fsspec.core import url_to_fs # type: ignore
|
|
32
|
+
from fsspec.implementations.local import LocalFileSystem # type: ignore
|
|
33
|
+
|
|
34
|
+
from hpcflow import __version__
|
|
35
|
+
from hpcflow.sdk.core.enums import EARStatus
|
|
36
|
+
from hpcflow.sdk.core.utils import (
|
|
37
|
+
read_YAML_str,
|
|
38
|
+
read_YAML_file,
|
|
39
|
+
read_JSON_file,
|
|
40
|
+
write_YAML_file,
|
|
41
|
+
write_JSON_file,
|
|
42
|
+
redirect_std_to_file as redirect_std_to_file_hpcflow,
|
|
43
|
+
parse_timestamp,
|
|
44
|
+
get_file_context,
|
|
45
|
+
open_text_resource,
|
|
46
|
+
)
|
|
47
|
+
from hpcflow.sdk import sdk_classes, sdk_funcs, get_SDK_logger
|
|
48
|
+
from hpcflow.sdk.config import Config, ConfigFile
|
|
49
|
+
from hpcflow.sdk.core import ALL_TEMPLATE_FORMATS
|
|
50
|
+
from .core.workflow import Workflow as _Workflow
|
|
51
|
+
from hpcflow.sdk.log import AppLog, TimeIt
|
|
52
|
+
from hpcflow.sdk.persistence.defaults import DEFAULT_STORE_FORMAT
|
|
53
|
+
from hpcflow.sdk.persistence.base import TEMPLATE_COMP_TYPES
|
|
54
|
+
from hpcflow.sdk.runtime import RunTimeInfo
|
|
55
|
+
from hpcflow.sdk.cli import make_cli
|
|
56
|
+
from hpcflow.sdk.submission.enums import JobscriptElementState
|
|
57
|
+
from hpcflow.sdk.submission.shells import get_shell
|
|
58
|
+
from hpcflow.sdk.submission.shells.os_version import (
|
|
59
|
+
get_OS_info_POSIX,
|
|
60
|
+
get_OS_info_windows,
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
if TYPE_CHECKING:
|
|
64
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
|
65
|
+
from logging import Logger
|
|
66
|
+
from types import ModuleType
|
|
67
|
+
from typing import ClassVar, Literal, Protocol
|
|
68
|
+
from typing_extensions import Final
|
|
69
|
+
from rich.status import Status
|
|
70
|
+
from .typing import (
|
|
71
|
+
BasicTemplateComponents,
|
|
72
|
+
KnownSubmission,
|
|
73
|
+
KnownSubmissionItem,
|
|
74
|
+
PathLike,
|
|
75
|
+
TemplateComponents,
|
|
76
|
+
MakeWorkflowCommonArgs,
|
|
77
|
+
)
|
|
78
|
+
from .config.config import ConfigOptions
|
|
79
|
+
from .core.actions import (
|
|
80
|
+
ElementActionRun,
|
|
81
|
+
ElementAction,
|
|
82
|
+
ActionEnvironment,
|
|
83
|
+
Action,
|
|
84
|
+
ActionScope,
|
|
85
|
+
ActionRule,
|
|
86
|
+
)
|
|
87
|
+
from .core.command_files import (
|
|
88
|
+
FileSpec,
|
|
89
|
+
FileNameSpec,
|
|
90
|
+
InputFileGenerator,
|
|
91
|
+
FileNameStem,
|
|
92
|
+
FileNameExt,
|
|
93
|
+
OutputFileParser,
|
|
94
|
+
)
|
|
95
|
+
from .core.commands import Command
|
|
96
|
+
from .core.element import (
|
|
97
|
+
ElementInputs,
|
|
98
|
+
ElementOutputs,
|
|
99
|
+
ElementInputFiles,
|
|
100
|
+
ElementOutputFiles,
|
|
101
|
+
ElementIteration,
|
|
102
|
+
Element,
|
|
103
|
+
ElementParameter,
|
|
104
|
+
ElementResources,
|
|
105
|
+
ElementFilter,
|
|
106
|
+
ElementGroup,
|
|
107
|
+
)
|
|
108
|
+
from .core.enums import ActionScopeType, InputSourceType, TaskSourceType
|
|
109
|
+
from .core.environment import (
|
|
110
|
+
NumCores,
|
|
111
|
+
Environment,
|
|
112
|
+
Executable as _Executable,
|
|
113
|
+
ExecutableInstance,
|
|
114
|
+
)
|
|
115
|
+
from .core.loop import Loop, WorkflowLoop
|
|
116
|
+
from .core.object_list import (
|
|
117
|
+
CommandFilesList as _CommandFilesList,
|
|
118
|
+
EnvironmentsList as _EnvironmentsList,
|
|
119
|
+
ExecutablesList,
|
|
120
|
+
GroupList,
|
|
121
|
+
ParametersList as _ParametersList,
|
|
122
|
+
ResourceList,
|
|
123
|
+
TaskList,
|
|
124
|
+
TaskSchemasList as _TaskSchemasList,
|
|
125
|
+
TaskTemplateList,
|
|
126
|
+
WorkflowLoopList,
|
|
127
|
+
WorkflowTaskList,
|
|
128
|
+
)
|
|
129
|
+
from .core.parameters import (
|
|
130
|
+
SchemaParameter,
|
|
131
|
+
InputValue,
|
|
132
|
+
Parameter,
|
|
133
|
+
ParameterValue,
|
|
134
|
+
InputSource,
|
|
135
|
+
ResourceSpec,
|
|
136
|
+
SchemaOutput,
|
|
137
|
+
ValueSequence,
|
|
138
|
+
MultiPathSequence,
|
|
139
|
+
SchemaInput,
|
|
140
|
+
)
|
|
141
|
+
from .core.rule import Rule
|
|
142
|
+
from .core.run_dir_files import RunDirAppFiles
|
|
143
|
+
from .core.task import (
|
|
144
|
+
Task,
|
|
145
|
+
WorkflowTask,
|
|
146
|
+
Parameters,
|
|
147
|
+
TaskInputParameters,
|
|
148
|
+
TaskOutputParameters,
|
|
149
|
+
ElementPropagation,
|
|
150
|
+
ElementSet,
|
|
151
|
+
)
|
|
152
|
+
from .core.task_schema import TaskSchema, TaskObjective
|
|
153
|
+
from .core.workflow import WorkflowTemplate as _WorkflowTemplate
|
|
154
|
+
from .submission.jobscript import Jobscript
|
|
155
|
+
from .submission.submission import Submission as _Submission # TODO: why?
|
|
156
|
+
from .submission.schedulers import Scheduler, QueuedScheduler
|
|
157
|
+
from .submission.schedulers.direct import DirectPosix, DirectWindows
|
|
158
|
+
from .submission.schedulers.sge import SGEPosix
|
|
159
|
+
from .submission.schedulers.slurm import SlurmPosix
|
|
160
|
+
from .submission.shells.base import VersionInfo
|
|
161
|
+
|
|
162
|
+
# Complex types for SDK functions
|
|
163
|
+
class _MakeWorkflow(Protocol):
|
|
164
|
+
"""Type of :py:meth:`BaseApp.make_workflow`"""
|
|
165
|
+
|
|
166
|
+
def __call__(
|
|
167
|
+
self,
|
|
168
|
+
template_file_or_str: PathLike | str,
|
|
169
|
+
is_string: bool = False,
|
|
170
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
171
|
+
path: PathLike = None,
|
|
172
|
+
name: str | None = None,
|
|
173
|
+
name_add_timestamp: bool | None = None,
|
|
174
|
+
name_use_dir: bool | None = None,
|
|
175
|
+
overwrite: bool = False,
|
|
176
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
177
|
+
ts_fmt: str | None = None,
|
|
178
|
+
ts_name_fmt: str | None = None,
|
|
179
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
180
|
+
variables: dict[str, str] | None = None,
|
|
181
|
+
status: bool = True,
|
|
182
|
+
add_submission: bool = False,
|
|
183
|
+
) -> _Workflow | _Submission | None: ...
|
|
184
|
+
|
|
185
|
+
class _MakeDemoWorkflow(Protocol):
|
|
186
|
+
"""Type of :py:meth:`BaseApp.make_demo_workflow`"""
|
|
187
|
+
|
|
188
|
+
def __call__(
|
|
189
|
+
self,
|
|
190
|
+
workflow_name: str,
|
|
191
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
192
|
+
path: PathLike | None = None,
|
|
193
|
+
name: str | None = None,
|
|
194
|
+
name_add_timestamp: bool | None = None,
|
|
195
|
+
name_use_dir: bool | None = None,
|
|
196
|
+
overwrite: bool = False,
|
|
197
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
198
|
+
ts_fmt: str | None = None,
|
|
199
|
+
ts_name_fmt: str | None = None,
|
|
200
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
201
|
+
variables: dict[str, str] | None = None,
|
|
202
|
+
status: bool = True,
|
|
203
|
+
add_submission: bool = False,
|
|
204
|
+
) -> _Workflow | _Submission | None: ...
|
|
205
|
+
|
|
206
|
+
class _MakeAndSubmitWorkflow(Protocol):
|
|
207
|
+
"""Type of :py:meth:`BaseApp.make_and_submit_workflow`"""
|
|
208
|
+
|
|
209
|
+
# Should be overloaded on return_idx, but not bothering
|
|
210
|
+
def __call__(
|
|
211
|
+
self,
|
|
212
|
+
template_file_or_str: PathLike | str,
|
|
213
|
+
is_string: bool = False,
|
|
214
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
215
|
+
path: PathLike | None = None,
|
|
216
|
+
name: str | None = None,
|
|
217
|
+
name_add_timestamp: bool | None = None,
|
|
218
|
+
name_use_dir: bool | None = None,
|
|
219
|
+
overwrite: bool = False,
|
|
220
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
221
|
+
ts_fmt: str | None = None,
|
|
222
|
+
ts_name_fmt: str | None = None,
|
|
223
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
224
|
+
variables: dict[str, str] | None = None,
|
|
225
|
+
JS_parallelism: bool | None = None,
|
|
226
|
+
wait: bool = False,
|
|
227
|
+
add_to_known: bool = True,
|
|
228
|
+
return_idx: bool = False,
|
|
229
|
+
tasks: list[int] | None = None,
|
|
230
|
+
cancel: bool = False,
|
|
231
|
+
status: bool = True,
|
|
232
|
+
) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow: ...
|
|
233
|
+
|
|
234
|
+
class _MakeAndSubmitDemoWorkflow(Protocol):
|
|
235
|
+
"""Type of :py:meth:`BaseApp.make_and_submit_demo_workflow`"""
|
|
236
|
+
|
|
237
|
+
# Should be overloaded on return_idx, but not bothering
|
|
238
|
+
def __call__(
|
|
239
|
+
self,
|
|
240
|
+
workflow_name: str,
|
|
241
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
242
|
+
path: PathLike | None = None,
|
|
243
|
+
name: str | None = None,
|
|
244
|
+
name_add_timestamp: bool | None = None,
|
|
245
|
+
name_use_dir: bool | None = None,
|
|
246
|
+
overwrite: bool = False,
|
|
247
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
248
|
+
ts_fmt: str | None = None,
|
|
249
|
+
ts_name_fmt: str | None = None,
|
|
250
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
251
|
+
variables: dict[str, str] | None = None,
|
|
252
|
+
JS_parallelism: bool | None = None,
|
|
253
|
+
wait: bool = False,
|
|
254
|
+
add_to_known: bool = True,
|
|
255
|
+
return_idx: bool = False,
|
|
256
|
+
tasks: list[int] | None = None,
|
|
257
|
+
cancel: bool = False,
|
|
258
|
+
status: bool = True,
|
|
259
|
+
) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow: ...
|
|
260
|
+
|
|
261
|
+
class _SubmitWorkflow(Protocol):
|
|
262
|
+
"""Type of :py:meth:`BaseApp.submit_workflow`"""
|
|
263
|
+
|
|
264
|
+
# Should be overloaded on return_idx, but not bothering
|
|
265
|
+
def __call__(
|
|
266
|
+
self,
|
|
267
|
+
workflow_path: PathLike,
|
|
268
|
+
JS_parallelism: bool | None = None,
|
|
269
|
+
wait: bool = False,
|
|
270
|
+
return_idx: bool = False,
|
|
271
|
+
tasks: list[int] | None = None,
|
|
272
|
+
) -> Mapping[int, Sequence[int]] | None: ...
|
|
273
|
+
|
|
274
|
+
class _GetKnownSubmissions(Protocol):
|
|
275
|
+
"""Type of :py:meth:`BaseApp.get_known_submissions`"""
|
|
276
|
+
|
|
277
|
+
# Should be overloaded on as_json, but not bothering
|
|
278
|
+
def __call__(
|
|
279
|
+
self,
|
|
280
|
+
max_recent: int = 3,
|
|
281
|
+
no_update: bool = False,
|
|
282
|
+
as_json: bool = False,
|
|
283
|
+
status: Status | None = None,
|
|
284
|
+
) -> Sequence[KnownSubmissionItem]: ...
|
|
285
|
+
|
|
286
|
+
class _Show(Protocol):
|
|
287
|
+
"""Type of :py:meth:`BaseApp.show`"""
|
|
288
|
+
|
|
289
|
+
def __call__(
|
|
290
|
+
self,
|
|
291
|
+
max_recent: int = 3,
|
|
292
|
+
full: bool = False,
|
|
293
|
+
no_update: bool = False,
|
|
294
|
+
) -> None: ...
|
|
295
|
+
|
|
296
|
+
class _Cancel(Protocol):
|
|
297
|
+
"""Type of :py:meth:`BaseApp.cancel`"""
|
|
298
|
+
|
|
299
|
+
def __call__(
|
|
300
|
+
self,
|
|
301
|
+
workflow_ref: int | str | Path,
|
|
302
|
+
ref_is_path: str | None = None,
|
|
303
|
+
status: bool = False,
|
|
304
|
+
) -> None: ...
|
|
305
|
+
|
|
306
|
+
class _RunTests(Protocol):
|
|
307
|
+
"""Type of :py:meth:`BaseApp.run_tests and run_hpcflow_tests`"""
|
|
308
|
+
|
|
309
|
+
def __call__(self, *args: str) -> int: ...
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
SDK_logger = get_SDK_logger(__name__)
|
|
313
|
+
DEMO_WK_FORMATS = {".yaml": "yaml", ".yml": "yaml", ".json": "json", ".jsonc": "json"}
|
|
314
|
+
|
|
315
|
+
T = TypeVar("T")
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def rate_limit_safe_url_to_fs(
|
|
319
|
+
app: BaseApp, *args, logger: Logger | None = None, **kwargs
|
|
320
|
+
):
|
|
321
|
+
R"""
|
|
322
|
+
Call fsspec's ``url_to_fs`` but retry on ``requests.exceptions.HTTPError``\ s.
|
|
323
|
+
|
|
324
|
+
References
|
|
325
|
+
----------
|
|
326
|
+
[1]: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?
|
|
327
|
+
apiVersion=2022-11-28#about-secondary-rate-limits
|
|
328
|
+
"""
|
|
329
|
+
auth = {}
|
|
330
|
+
if app.run_time_info.in_pytest:
|
|
331
|
+
gh_token = os.environ.get("GH_TOKEN")
|
|
332
|
+
if gh_token:
|
|
333
|
+
# using the GitHub actions built in token increases the number of API
|
|
334
|
+
# requests allowed per hour to 1000 [1]. fsspec requires "username" to be
|
|
335
|
+
# set if using "token":
|
|
336
|
+
auth = {"username": "", "token": gh_token}
|
|
337
|
+
if logger:
|
|
338
|
+
logger.info(
|
|
339
|
+
"calling fsspec's `url_to_fs` with a token from the env variable "
|
|
340
|
+
"`GH_TOKEN`."
|
|
341
|
+
)
|
|
342
|
+
|
|
343
|
+
# GitHub actions testing is potentially highly concurrent, with multiple
|
|
344
|
+
# Python versions and OSes being tested at the same time; so we might hit
|
|
345
|
+
# GitHub's secondary rate limit:
|
|
346
|
+
@retry(
|
|
347
|
+
requests.exceptions.HTTPError,
|
|
348
|
+
tries=3,
|
|
349
|
+
delay=5,
|
|
350
|
+
backoff=1.5,
|
|
351
|
+
jitter=(0, 20),
|
|
352
|
+
logger=logger,
|
|
353
|
+
)
|
|
354
|
+
def _inner(*args, **kwargs):
|
|
355
|
+
kwargs.update(auth)
|
|
356
|
+
return url_to_fs(*args, **kwargs)
|
|
357
|
+
|
|
358
|
+
return _inner(*args, **kwargs)
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def __getattr__(name: str):
|
|
362
|
+
"""Allow access to core classes and API functions."""
|
|
363
|
+
try:
|
|
364
|
+
return get_app_attribute(name)
|
|
365
|
+
except AttributeError:
|
|
366
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
|
|
367
|
+
|
|
368
|
+
|
|
369
|
+
def get_app_attribute(name: str):
|
|
370
|
+
"""
|
|
371
|
+
A function to assign to an app module `__getattr__` to access app attributes.
|
|
372
|
+
"""
|
|
373
|
+
app_obj: BaseApp
|
|
374
|
+
try:
|
|
375
|
+
app_obj = cast("App", App.get_instance())
|
|
376
|
+
except RuntimeError:
|
|
377
|
+
app_obj = cast("BaseApp", BaseApp.get_instance())
|
|
378
|
+
try:
|
|
379
|
+
return getattr(app_obj, name)
|
|
380
|
+
except AttributeError:
|
|
381
|
+
raise AttributeError(f"module {app_obj.module!r} has no attribute {name!r}.")
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
def get_app_module_all() -> list[str]:
|
|
385
|
+
"""
|
|
386
|
+
The list of all symbols exported by this module.
|
|
387
|
+
"""
|
|
388
|
+
return ["app", *sdk_classes, *sdk_funcs]
|
|
389
|
+
|
|
390
|
+
|
|
391
|
+
def get_app_module_dir() -> Callable[[], list[str]]:
|
|
392
|
+
"""
|
|
393
|
+
The sorted list of all symbols exported by this module.
|
|
394
|
+
"""
|
|
395
|
+
return lambda: sorted(get_app_module_all())
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
class Singleton(type, Generic[T]):
|
|
399
|
+
"""
|
|
400
|
+
Metaclass that enforces that only one instance of a class can be made.
|
|
401
|
+
|
|
402
|
+
Type Parameters
|
|
403
|
+
---------------
|
|
404
|
+
T
|
|
405
|
+
The type of the class that is a singleton.
|
|
406
|
+
"""
|
|
407
|
+
|
|
408
|
+
_instances: ClassVar[dict[Singleton, Any]] = {}
|
|
409
|
+
|
|
410
|
+
def __call__(cls: Singleton[T], *args, **kwargs) -> T:
|
|
411
|
+
"""
|
|
412
|
+
Get the current instance or make it if it doesn't already exist.
|
|
413
|
+
|
|
414
|
+
Return
|
|
415
|
+
------
|
|
416
|
+
T:
|
|
417
|
+
The unique instance of the class.
|
|
418
|
+
"""
|
|
419
|
+
SDK_logger.info(
|
|
420
|
+
f"App metaclass __call__: "
|
|
421
|
+
f"name={kwargs['name']!r}, version={kwargs['version']!r}."
|
|
422
|
+
)
|
|
423
|
+
if cls not in cls._instances:
|
|
424
|
+
SDK_logger.info(f"App metaclass initialising new object {kwargs['name']!r}.")
|
|
425
|
+
cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
|
|
426
|
+
return cls._instances[cls]
|
|
427
|
+
|
|
428
|
+
def get_instance(cls: Singleton[T]) -> T:
|
|
429
|
+
"""
|
|
430
|
+
Retrieve the instance of the singleton class if initialised.
|
|
431
|
+
|
|
432
|
+
Raises
|
|
433
|
+
------
|
|
434
|
+
RuntimeError
|
|
435
|
+
If there is no instance already.
|
|
436
|
+
"""
|
|
437
|
+
try:
|
|
438
|
+
return cls._instances[cls]
|
|
439
|
+
except KeyError:
|
|
440
|
+
raise RuntimeError(f"{cls.__name__!r} object has not be instantiated!")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
class BaseApp(metaclass=Singleton):
|
|
444
|
+
"""
|
|
445
|
+
Class to generate the hpcflow application.
|
|
446
|
+
|
|
447
|
+
Parameters
|
|
448
|
+
----------
|
|
449
|
+
name:
|
|
450
|
+
The name of the application.
|
|
451
|
+
version:
|
|
452
|
+
The version of the application.
|
|
453
|
+
module:
|
|
454
|
+
The module name in which the app object is defined.
|
|
455
|
+
description:
|
|
456
|
+
Description of the application.
|
|
457
|
+
gh_org:
|
|
458
|
+
Name of Github organisation responsible for the application.
|
|
459
|
+
gh_repo:
|
|
460
|
+
Github repository containing the application source.
|
|
461
|
+
config_options:
|
|
462
|
+
Configuration options.
|
|
463
|
+
scripts_dir:
|
|
464
|
+
Directory for scripts.
|
|
465
|
+
jinja_templates_dir:
|
|
466
|
+
Directory for Jinja templates.
|
|
467
|
+
programs_dir
|
|
468
|
+
Directory for programs.
|
|
469
|
+
workflows_dir:
|
|
470
|
+
Directory for workflows.
|
|
471
|
+
demo_data_dir:
|
|
472
|
+
Directory for demonstration data.
|
|
473
|
+
data_data_manifest_dir:
|
|
474
|
+
Directory for demonstration data manifests.
|
|
475
|
+
template_components:
|
|
476
|
+
Template components.
|
|
477
|
+
pytest_args:
|
|
478
|
+
Arguments for pytest.
|
|
479
|
+
package_name:
|
|
480
|
+
Name of package if not the application name.
|
|
481
|
+
docs_import_conv:
|
|
482
|
+
The convention for the app alias used in import statements in the documentation.
|
|
483
|
+
E.g. for the `hpcflow` base app, this is `hf`. This is combined with `module` to
|
|
484
|
+
form the complete import statement. E.g. for the `hpcflow` base app, the complete
|
|
485
|
+
import statement is: `import hpcflow.app as hf`, where `hpcflow.app` is the
|
|
486
|
+
`module` argument and `hf` is the `docs_import_conv` argument.
|
|
487
|
+
docs_url:
|
|
488
|
+
URL to documentation.
|
|
489
|
+
encoders:
|
|
490
|
+
callable that takes no arguments and returns a mapping between string store types
|
|
491
|
+
(e.g. "zarr", "json") and a dictionary of additional parameter encoders.
|
|
492
|
+
decoders:
|
|
493
|
+
callable that takes no arguments and returns a mapping between string store types
|
|
494
|
+
(e.g. "zarr", "json") and a dictionary of additional parameter decoders.
|
|
495
|
+
"""
|
|
496
|
+
|
|
497
|
+
_known_subs_file_name: ClassVar = "known_submissions.txt"
|
|
498
|
+
_known_subs_file_sep: ClassVar = "::"
|
|
499
|
+
_submission_ts_fmt: ClassVar = r"%Y-%m-%d %H:%M:%S.%f"
|
|
500
|
+
__load_pending: ClassVar = False
|
|
501
|
+
|
|
502
|
+
def __init__(
|
|
503
|
+
self,
|
|
504
|
+
name: str,
|
|
505
|
+
version: str,
|
|
506
|
+
module: str,
|
|
507
|
+
description: str,
|
|
508
|
+
gh_org: str,
|
|
509
|
+
gh_repo: str,
|
|
510
|
+
config_options: ConfigOptions,
|
|
511
|
+
scripts_dir: str,
|
|
512
|
+
jinja_templates_dir: str | None = None,
|
|
513
|
+
programs_dir: str | None = None,
|
|
514
|
+
workflows_dir: str | None = None,
|
|
515
|
+
demo_data_dir: str | None = None,
|
|
516
|
+
demo_data_manifest_dir: str | None = None,
|
|
517
|
+
template_components: dict[str, list[dict]] | None = None,
|
|
518
|
+
pytest_args: list[str] | None = None,
|
|
519
|
+
package_name: str | None = None,
|
|
520
|
+
docs_import_conv: str | None = None,
|
|
521
|
+
docs_url: str | None = None,
|
|
522
|
+
encoders: Callable | None = None,
|
|
523
|
+
decoders: Callable | None = None,
|
|
524
|
+
):
|
|
525
|
+
SDK_logger.info(f"Generating {self.__class__.__name__} {name!r}.")
|
|
526
|
+
|
|
527
|
+
#: The name of the application.
|
|
528
|
+
self.name = name
|
|
529
|
+
#: Name of package.
|
|
530
|
+
self.package_name = package_name or name.lower()
|
|
531
|
+
#: The version of the application.
|
|
532
|
+
self.version = version
|
|
533
|
+
#: The module name in which the app object is defined.
|
|
534
|
+
self.module = module
|
|
535
|
+
#: Description of the application.
|
|
536
|
+
self.description = description
|
|
537
|
+
#: Name of Github organisation responsible for the application.
|
|
538
|
+
self.gh_org = gh_org
|
|
539
|
+
#: Github repository containing the application source.
|
|
540
|
+
self.gh_repo = gh_repo
|
|
541
|
+
#: Configuration options.
|
|
542
|
+
self.config_options = config_options
|
|
543
|
+
#: Arguments for pytest.
|
|
544
|
+
self.pytest_args = pytest_args
|
|
545
|
+
#: Directory for scripts.
|
|
546
|
+
self.scripts_dir = scripts_dir
|
|
547
|
+
#: Directory for Jinja templates.
|
|
548
|
+
self.jinja_templates_dir = jinja_templates_dir
|
|
549
|
+
#: Directory for programs.
|
|
550
|
+
self.programs_dir = programs_dir
|
|
551
|
+
#: Directory for workflows.
|
|
552
|
+
self.workflows_dir = workflows_dir
|
|
553
|
+
#: Directory for demonstration data.
|
|
554
|
+
self.demo_data_dir = demo_data_dir
|
|
555
|
+
#: Directory for demonstration data manifests.
|
|
556
|
+
self.demo_data_manifest_dir = demo_data_manifest_dir
|
|
557
|
+
#: The convention for the app alias used in import statements in the documentation.
|
|
558
|
+
self.docs_import_conv = docs_import_conv
|
|
559
|
+
#: URL to documentation.
|
|
560
|
+
self.docs_url = docs_url
|
|
561
|
+
#: Callable that returns additional parameter encoders.
|
|
562
|
+
self.encoders = encoders or (lambda: {})
|
|
563
|
+
#: Callable that returns additional parameter decoders.
|
|
564
|
+
self.decoders = decoders or (lambda: {})
|
|
565
|
+
|
|
566
|
+
#: Command line interface subsystem.
|
|
567
|
+
self.cli = make_cli(self)
|
|
568
|
+
|
|
569
|
+
self._log = AppLog(self)
|
|
570
|
+
self._run_time_info = RunTimeInfo(
|
|
571
|
+
self.name,
|
|
572
|
+
self.package_name,
|
|
573
|
+
self.version,
|
|
574
|
+
self.runtime_info_logger,
|
|
575
|
+
)
|
|
576
|
+
|
|
577
|
+
self._builtin_template_components = template_components or {}
|
|
578
|
+
|
|
579
|
+
self._config: Config | None = (
|
|
580
|
+
None # assigned on first access to `config` property
|
|
581
|
+
)
|
|
582
|
+
self._config_files: dict[str, ConfigFile] = (
|
|
583
|
+
{}
|
|
584
|
+
) # assigned on config load, keys are string absolute paths
|
|
585
|
+
|
|
586
|
+
# Set by `_load_template_components`:
|
|
587
|
+
self._template_components: TemplateComponents = {}
|
|
588
|
+
self._parameters: _ParametersList | None = None
|
|
589
|
+
self._command_files: _CommandFilesList | None = None
|
|
590
|
+
self._environments: _EnvironmentsList | None = None
|
|
591
|
+
self._task_schemas: _TaskSchemasList | None = None
|
|
592
|
+
self._scripts: dict[str, Path] | None = None
|
|
593
|
+
self._jinja_templates: dict[str, Path] | None = None
|
|
594
|
+
self._programs: dict[str, Path] | None = None
|
|
595
|
+
|
|
596
|
+
self.__app_type_cache: dict[str, type] = {}
|
|
597
|
+
self.__app_func_cache: dict[str, Callable[..., Any]] = {}
|
|
598
|
+
|
|
599
|
+
# assigned on first access to respective properties
|
|
600
|
+
self._user_data_dir: Path | None = None
|
|
601
|
+
self._user_cache_dir: Path | None = None
|
|
602
|
+
self._user_runtime_dir: Path | None = None
|
|
603
|
+
self._user_data_hostname_dir: Path | None = None
|
|
604
|
+
self._user_cache_hostname_dir: Path | None = None
|
|
605
|
+
self._demo_data_cache_dir: Path | None = None
|
|
606
|
+
|
|
607
|
+
@property
|
|
608
|
+
def ElementActionRun(self) -> type[ElementActionRun]:
|
|
609
|
+
"""
|
|
610
|
+
The :class:`ElementActionRun` class.
|
|
611
|
+
|
|
612
|
+
:meta private:
|
|
613
|
+
"""
|
|
614
|
+
return self._get_app_core_class("ElementActionRun")
|
|
615
|
+
|
|
616
|
+
@property
|
|
617
|
+
def ElementAction(self) -> type[ElementAction]:
|
|
618
|
+
"""
|
|
619
|
+
The :class:`ElementAction` class.
|
|
620
|
+
|
|
621
|
+
:meta private:
|
|
622
|
+
"""
|
|
623
|
+
return self._get_app_core_class("ElementAction")
|
|
624
|
+
|
|
625
|
+
@property
|
|
626
|
+
def ElementFilter(self) -> type[ElementFilter]:
|
|
627
|
+
"""
|
|
628
|
+
The :class:`ElementFilter` class.
|
|
629
|
+
|
|
630
|
+
:meta private:
|
|
631
|
+
"""
|
|
632
|
+
return self._get_app_core_class("ElementFilter")
|
|
633
|
+
|
|
634
|
+
@property
|
|
635
|
+
def ElementGroup(self) -> type[ElementGroup]:
|
|
636
|
+
"""
|
|
637
|
+
The :class:`ElementGroup` class.
|
|
638
|
+
|
|
639
|
+
:meta private:
|
|
640
|
+
"""
|
|
641
|
+
return self._get_app_core_class("ElementGroup")
|
|
642
|
+
|
|
643
|
+
@property
|
|
644
|
+
def Environment(self) -> type[Environment]:
|
|
645
|
+
"""
|
|
646
|
+
The :class:`Environment` class.
|
|
647
|
+
|
|
648
|
+
:meta private:
|
|
649
|
+
"""
|
|
650
|
+
return self._get_app_core_class("Environment")
|
|
651
|
+
|
|
652
|
+
@property
|
|
653
|
+
def Executable(self) -> type[_Executable]:
|
|
654
|
+
"""
|
|
655
|
+
The :class:`Executable` class.
|
|
656
|
+
|
|
657
|
+
:meta private:
|
|
658
|
+
"""
|
|
659
|
+
return self._get_app_core_class("Executable")
|
|
660
|
+
|
|
661
|
+
@property
|
|
662
|
+
def ExecutableInstance(self) -> type[ExecutableInstance]:
|
|
663
|
+
"""
|
|
664
|
+
The :class:`ExecutableInstance` class.
|
|
665
|
+
|
|
666
|
+
:meta private:
|
|
667
|
+
"""
|
|
668
|
+
return self._get_app_core_class("ExecutableInstance")
|
|
669
|
+
|
|
670
|
+
@property
|
|
671
|
+
def NumCores(self) -> type[NumCores]:
|
|
672
|
+
"""
|
|
673
|
+
The :class:`NumCores` class.
|
|
674
|
+
|
|
675
|
+
:meta private:
|
|
676
|
+
"""
|
|
677
|
+
return self._get_app_core_class("NumCores")
|
|
678
|
+
|
|
679
|
+
@property
|
|
680
|
+
def ActionEnvironment(self) -> type[ActionEnvironment]:
|
|
681
|
+
"""
|
|
682
|
+
The :class:`ActionEnvironment` class.
|
|
683
|
+
|
|
684
|
+
:meta private:
|
|
685
|
+
"""
|
|
686
|
+
return self._get_app_core_class("ActionEnvironment")
|
|
687
|
+
|
|
688
|
+
@property
|
|
689
|
+
def Action(self) -> type[Action]:
|
|
690
|
+
"""
|
|
691
|
+
The :class:`Action` class.
|
|
692
|
+
|
|
693
|
+
:meta private:
|
|
694
|
+
"""
|
|
695
|
+
return self._get_app_core_class("Action")
|
|
696
|
+
|
|
697
|
+
@property
|
|
698
|
+
def ActionRule(self) -> type[ActionRule]:
|
|
699
|
+
"""
|
|
700
|
+
The :class:`ActionRule` class.
|
|
701
|
+
|
|
702
|
+
:meta private:
|
|
703
|
+
"""
|
|
704
|
+
return self._get_app_core_class("ActionRule")
|
|
705
|
+
|
|
706
|
+
@property
|
|
707
|
+
def ActionScope(self) -> type[ActionScope]:
|
|
708
|
+
"""
|
|
709
|
+
The :class:`ActionScope` class.
|
|
710
|
+
|
|
711
|
+
:meta private:
|
|
712
|
+
"""
|
|
713
|
+
return self._get_app_core_class("ActionScope")
|
|
714
|
+
|
|
715
|
+
@property
|
|
716
|
+
def ActionScopeType(self) -> type[ActionScopeType]:
|
|
717
|
+
"""
|
|
718
|
+
The :class:`ActionScopeType` class.
|
|
719
|
+
|
|
720
|
+
:meta private:
|
|
721
|
+
"""
|
|
722
|
+
return self._get_app_core_class("ActionScopeType")
|
|
723
|
+
|
|
724
|
+
@property
|
|
725
|
+
def FileSpec(self) -> type[FileSpec]:
|
|
726
|
+
"""
|
|
727
|
+
The :class:`FileSpec` class.
|
|
728
|
+
|
|
729
|
+
:meta private:
|
|
730
|
+
"""
|
|
731
|
+
return self._get_app_core_class("FileSpec")
|
|
732
|
+
|
|
733
|
+
@property
|
|
734
|
+
def FileNameSpec(self) -> type[FileNameSpec]:
|
|
735
|
+
"""
|
|
736
|
+
The :class:`FileNameSpec` class.
|
|
737
|
+
|
|
738
|
+
:meta private:
|
|
739
|
+
"""
|
|
740
|
+
return self._get_app_core_class("FileNameSpec")
|
|
741
|
+
|
|
742
|
+
@property
|
|
743
|
+
def FileNameStem(self) -> type[FileNameStem]:
|
|
744
|
+
"""
|
|
745
|
+
The :class:`FileNameStem` class.
|
|
746
|
+
|
|
747
|
+
:meta private:
|
|
748
|
+
"""
|
|
749
|
+
return self._get_app_core_class("FileNameStem")
|
|
750
|
+
|
|
751
|
+
@property
|
|
752
|
+
def FileNameExt(self) -> type[FileNameExt]:
|
|
753
|
+
"""
|
|
754
|
+
The :class:`FileNameExt` class.
|
|
755
|
+
|
|
756
|
+
:meta private:
|
|
757
|
+
"""
|
|
758
|
+
return self._get_app_core_class("FileNameExt")
|
|
759
|
+
|
|
760
|
+
@property
|
|
761
|
+
def OutputFileParser(self) -> type[OutputFileParser]:
|
|
762
|
+
"""
|
|
763
|
+
The :class:`OutputFileParser` class.
|
|
764
|
+
|
|
765
|
+
:meta private:
|
|
766
|
+
"""
|
|
767
|
+
return self._get_app_core_class("OutputFileParser")
|
|
768
|
+
|
|
769
|
+
@property
|
|
770
|
+
def InputSource(self) -> type[InputSource]:
|
|
771
|
+
"""
|
|
772
|
+
The :class:`InputSource` class.
|
|
773
|
+
|
|
774
|
+
:meta private:
|
|
775
|
+
"""
|
|
776
|
+
return self._get_app_core_class("InputSource")
|
|
777
|
+
|
|
778
|
+
@property
|
|
779
|
+
def InputSourceType(self) -> type[InputSourceType]:
|
|
780
|
+
"""
|
|
781
|
+
The :class:`InputSourceType` class.
|
|
782
|
+
|
|
783
|
+
:meta private:
|
|
784
|
+
"""
|
|
785
|
+
return self._get_app_core_class("InputSourceType")
|
|
786
|
+
|
|
787
|
+
@property
|
|
788
|
+
def ValueSequence(self) -> type[ValueSequence]:
|
|
789
|
+
"""
|
|
790
|
+
The :class:`ValueSequence` class.
|
|
791
|
+
|
|
792
|
+
:meta private:
|
|
793
|
+
"""
|
|
794
|
+
return self._get_app_core_class("ValueSequence")
|
|
795
|
+
|
|
796
|
+
@property
|
|
797
|
+
def MultiPathSequence(self) -> type[MultiPathSequence]:
|
|
798
|
+
"""
|
|
799
|
+
The :class:`MultiPathSequence` class.
|
|
800
|
+
|
|
801
|
+
:meta private:
|
|
802
|
+
"""
|
|
803
|
+
return self._get_app_core_class("MultiPathSequence")
|
|
804
|
+
|
|
805
|
+
@property
|
|
806
|
+
def SchemaInput(self) -> type[SchemaInput]:
|
|
807
|
+
"""
|
|
808
|
+
The :class:`SchemaInput` class.
|
|
809
|
+
|
|
810
|
+
:meta private:
|
|
811
|
+
"""
|
|
812
|
+
return self._get_app_core_class("SchemaInput")
|
|
813
|
+
|
|
814
|
+
@property
|
|
815
|
+
def InputFileGenerator(self) -> type[InputFileGenerator]:
|
|
816
|
+
"""
|
|
817
|
+
The :class:`InputFileGenerator` class.
|
|
818
|
+
|
|
819
|
+
:meta private:
|
|
820
|
+
"""
|
|
821
|
+
return self._get_app_core_class("InputFileGenerator")
|
|
822
|
+
|
|
823
|
+
@property
|
|
824
|
+
def Command(self) -> type[Command]:
|
|
825
|
+
"""
|
|
826
|
+
The :class:`Command` class.
|
|
827
|
+
|
|
828
|
+
:meta private:
|
|
829
|
+
"""
|
|
830
|
+
return self._get_app_core_class("Command")
|
|
831
|
+
|
|
832
|
+
@property
|
|
833
|
+
def ElementInputs(self) -> type[ElementInputs]:
|
|
834
|
+
"""
|
|
835
|
+
The :class:`ElementInputs` class.
|
|
836
|
+
|
|
837
|
+
:meta private:
|
|
838
|
+
"""
|
|
839
|
+
return self._get_app_core_class("ElementInputs")
|
|
840
|
+
|
|
841
|
+
@property
|
|
842
|
+
def ElementOutputs(self) -> type[ElementOutputs]:
|
|
843
|
+
"""
|
|
844
|
+
The :class:`ElementOutputs` class.
|
|
845
|
+
|
|
846
|
+
:meta private:
|
|
847
|
+
"""
|
|
848
|
+
return self._get_app_core_class("ElementOutputs")
|
|
849
|
+
|
|
850
|
+
@property
|
|
851
|
+
def ElementInputFiles(self) -> type[ElementInputFiles]:
|
|
852
|
+
"""
|
|
853
|
+
The :class:`ElementInputFiles` class.
|
|
854
|
+
|
|
855
|
+
:meta private:
|
|
856
|
+
"""
|
|
857
|
+
return self._get_app_core_class("ElementInputFiles")
|
|
858
|
+
|
|
859
|
+
@property
|
|
860
|
+
def ElementOutputFiles(self) -> type[ElementOutputFiles]:
|
|
861
|
+
"""
|
|
862
|
+
The :class:`ElementOutputFiles` class.
|
|
863
|
+
|
|
864
|
+
:meta private:
|
|
865
|
+
"""
|
|
866
|
+
return self._get_app_core_class("ElementOutputFiles")
|
|
867
|
+
|
|
868
|
+
@property
|
|
869
|
+
def ElementResources(self) -> type[ElementResources]:
|
|
870
|
+
"""
|
|
871
|
+
The :class:`ElementResources` class.
|
|
872
|
+
|
|
873
|
+
:meta private:
|
|
874
|
+
"""
|
|
875
|
+
return self._get_app_core_class("ElementResources")
|
|
876
|
+
|
|
877
|
+
@property
|
|
878
|
+
def ElementIteration(self) -> type[ElementIteration]:
|
|
879
|
+
"""
|
|
880
|
+
The :class:`ElementIteration` class.
|
|
881
|
+
|
|
882
|
+
:meta private:
|
|
883
|
+
"""
|
|
884
|
+
return self._get_app_core_class("ElementIteration")
|
|
885
|
+
|
|
886
|
+
@property
|
|
887
|
+
def ElementSet(self) -> type[ElementSet]:
|
|
888
|
+
"""
|
|
889
|
+
The :class:`ElementSet` class.
|
|
890
|
+
|
|
891
|
+
:meta private:
|
|
892
|
+
"""
|
|
893
|
+
return self._get_app_core_class("ElementSet")
|
|
894
|
+
|
|
895
|
+
@property
|
|
896
|
+
def Element(self) -> type[Element]:
|
|
897
|
+
"""
|
|
898
|
+
The :class:`Element` class.
|
|
899
|
+
|
|
900
|
+
:meta private:
|
|
901
|
+
"""
|
|
902
|
+
return self._get_app_core_class("Element")
|
|
903
|
+
|
|
904
|
+
@property
|
|
905
|
+
def ElementParameter(self) -> type[ElementParameter]:
|
|
906
|
+
"""
|
|
907
|
+
The :class:`ElementParameter` class.
|
|
908
|
+
|
|
909
|
+
:meta private:
|
|
910
|
+
"""
|
|
911
|
+
return self._get_app_core_class("ElementParameter")
|
|
912
|
+
|
|
913
|
+
@property
|
|
914
|
+
def Loop(self) -> type[Loop]:
|
|
915
|
+
"""
|
|
916
|
+
The :class:`Loop` class.
|
|
917
|
+
|
|
918
|
+
:meta private:
|
|
919
|
+
"""
|
|
920
|
+
return self._get_app_core_class("Loop")
|
|
921
|
+
|
|
922
|
+
@property
|
|
923
|
+
def WorkflowLoop(self) -> type[WorkflowLoop]:
|
|
924
|
+
"""
|
|
925
|
+
The :class:`WorkflowLoop` class.
|
|
926
|
+
|
|
927
|
+
:meta private:
|
|
928
|
+
"""
|
|
929
|
+
return self._get_app_core_class("WorkflowLoop")
|
|
930
|
+
|
|
931
|
+
@property
|
|
932
|
+
def CommandFilesList(self) -> type[_CommandFilesList]:
|
|
933
|
+
"""
|
|
934
|
+
The :class:`CommandFilesList` class.
|
|
935
|
+
|
|
936
|
+
:meta private:
|
|
937
|
+
"""
|
|
938
|
+
return self._get_app_core_class("CommandFilesList")
|
|
939
|
+
|
|
940
|
+
@property
|
|
941
|
+
def EnvironmentsList(self) -> type[_EnvironmentsList]:
|
|
942
|
+
"""
|
|
943
|
+
The :class:`EnvironmentsList` class.
|
|
944
|
+
|
|
945
|
+
:meta private:
|
|
946
|
+
"""
|
|
947
|
+
return self._get_app_core_class("EnvironmentsList")
|
|
948
|
+
|
|
949
|
+
@property
|
|
950
|
+
def ExecutablesList(self) -> type[ExecutablesList]:
|
|
951
|
+
"""
|
|
952
|
+
The :class:`ExecutablesList` class.
|
|
953
|
+
|
|
954
|
+
:meta private:
|
|
955
|
+
"""
|
|
956
|
+
return self._get_app_core_class("ExecutablesList")
|
|
957
|
+
|
|
958
|
+
@property
|
|
959
|
+
def GroupList(self) -> type[GroupList]:
|
|
960
|
+
"""
|
|
961
|
+
The :class:`GroupList` class.
|
|
962
|
+
|
|
963
|
+
:meta private:
|
|
964
|
+
"""
|
|
965
|
+
return self._get_app_core_class("GroupList")
|
|
966
|
+
|
|
967
|
+
@property
|
|
968
|
+
def ParametersList(self) -> type[_ParametersList]:
|
|
969
|
+
"""
|
|
970
|
+
The :class:`ParametersList` class.
|
|
971
|
+
|
|
972
|
+
:meta private:
|
|
973
|
+
"""
|
|
974
|
+
return self._get_app_core_class("ParametersList")
|
|
975
|
+
|
|
976
|
+
@property
|
|
977
|
+
def ResourceList(self) -> type[ResourceList]:
|
|
978
|
+
"""
|
|
979
|
+
The :class:`ResourceList` class.
|
|
980
|
+
|
|
981
|
+
:meta private:
|
|
982
|
+
"""
|
|
983
|
+
return self._get_app_core_class("ResourceList")
|
|
984
|
+
|
|
985
|
+
@property
|
|
986
|
+
def ResourceSpec(self) -> type[ResourceSpec]:
|
|
987
|
+
"""
|
|
988
|
+
The :class:`ResourceSpec` class.
|
|
989
|
+
|
|
990
|
+
:meta private:
|
|
991
|
+
"""
|
|
992
|
+
return self._get_app_core_class("ResourceSpec")
|
|
993
|
+
|
|
994
|
+
@property
|
|
995
|
+
def TaskList(self) -> type[TaskList]:
|
|
996
|
+
"""
|
|
997
|
+
The :class:`TaskList` class.
|
|
998
|
+
|
|
999
|
+
:meta private:
|
|
1000
|
+
"""
|
|
1001
|
+
return self._get_app_core_class("TaskList")
|
|
1002
|
+
|
|
1003
|
+
@property
|
|
1004
|
+
def TaskSchemasList(self) -> type[_TaskSchemasList]:
|
|
1005
|
+
"""
|
|
1006
|
+
The :class:`TaskSchemasList` class.
|
|
1007
|
+
|
|
1008
|
+
:meta private:
|
|
1009
|
+
"""
|
|
1010
|
+
return self._get_app_core_class("TaskSchemasList")
|
|
1011
|
+
|
|
1012
|
+
@property
|
|
1013
|
+
def TaskTemplateList(self) -> type[TaskTemplateList]:
|
|
1014
|
+
"""
|
|
1015
|
+
The :class:`TaskTemplateList` class.
|
|
1016
|
+
|
|
1017
|
+
:meta private:
|
|
1018
|
+
"""
|
|
1019
|
+
return self._get_app_core_class("TaskTemplateList")
|
|
1020
|
+
|
|
1021
|
+
@property
|
|
1022
|
+
def WorkflowLoopList(self) -> type[WorkflowLoopList]:
|
|
1023
|
+
"""
|
|
1024
|
+
The :class:`WorkflowLoopList` class.
|
|
1025
|
+
|
|
1026
|
+
:meta private:
|
|
1027
|
+
"""
|
|
1028
|
+
return self._get_app_core_class("WorkflowLoopList")
|
|
1029
|
+
|
|
1030
|
+
@property
|
|
1031
|
+
def WorkflowTaskList(self) -> type[WorkflowTaskList]:
|
|
1032
|
+
"""
|
|
1033
|
+
The :class:`WorkflowTaskList` class.
|
|
1034
|
+
|
|
1035
|
+
:meta private:
|
|
1036
|
+
"""
|
|
1037
|
+
return self._get_app_core_class("WorkflowTaskList")
|
|
1038
|
+
|
|
1039
|
+
@property
|
|
1040
|
+
def SchemaParameter(self) -> type[SchemaParameter]:
|
|
1041
|
+
"""
|
|
1042
|
+
The :class:`SchemaParameter` class.
|
|
1043
|
+
|
|
1044
|
+
:meta private:
|
|
1045
|
+
"""
|
|
1046
|
+
return self._get_app_core_class("SchemaParameter")
|
|
1047
|
+
|
|
1048
|
+
@property
|
|
1049
|
+
def SchemaOutput(self) -> type[SchemaOutput]:
|
|
1050
|
+
"""
|
|
1051
|
+
The :class:`SchemaOutput` class.
|
|
1052
|
+
|
|
1053
|
+
:meta private:
|
|
1054
|
+
"""
|
|
1055
|
+
return self._get_app_core_class("SchemaOutput")
|
|
1056
|
+
|
|
1057
|
+
@property
|
|
1058
|
+
def Rule(self) -> type[Rule]:
|
|
1059
|
+
"""
|
|
1060
|
+
The :class:`Rule` class.
|
|
1061
|
+
|
|
1062
|
+
:meta private:
|
|
1063
|
+
"""
|
|
1064
|
+
return self._get_app_core_class("Rule")
|
|
1065
|
+
|
|
1066
|
+
@property
|
|
1067
|
+
def RunDirAppFiles(self) -> type[RunDirAppFiles]:
|
|
1068
|
+
"""
|
|
1069
|
+
The :class:`RunDirAppFiles` class.
|
|
1070
|
+
|
|
1071
|
+
:meta private:
|
|
1072
|
+
"""
|
|
1073
|
+
return self._get_app_core_class("RunDirAppFiles")
|
|
1074
|
+
|
|
1075
|
+
@property
|
|
1076
|
+
def WorkflowTask(self) -> type[WorkflowTask]:
|
|
1077
|
+
"""
|
|
1078
|
+
The :class:`WorkflowTask` class.
|
|
1079
|
+
|
|
1080
|
+
:meta private:
|
|
1081
|
+
"""
|
|
1082
|
+
return self._get_app_core_class("WorkflowTask")
|
|
1083
|
+
|
|
1084
|
+
@property
|
|
1085
|
+
def Parameters(self) -> type[Parameters]:
|
|
1086
|
+
"""
|
|
1087
|
+
The :class:`Parameters` class.
|
|
1088
|
+
|
|
1089
|
+
:meta private:
|
|
1090
|
+
"""
|
|
1091
|
+
return self._get_app_core_class("Parameters")
|
|
1092
|
+
|
|
1093
|
+
@property
|
|
1094
|
+
def Parameter(self) -> type[Parameter]:
|
|
1095
|
+
"""
|
|
1096
|
+
The :class:`Parameter` class.
|
|
1097
|
+
|
|
1098
|
+
:meta private:
|
|
1099
|
+
"""
|
|
1100
|
+
return self._get_app_core_class("Parameter")
|
|
1101
|
+
|
|
1102
|
+
@property
|
|
1103
|
+
def ParameterValue(self) -> type[ParameterValue]:
|
|
1104
|
+
"""
|
|
1105
|
+
The :class:`ParameterValue` class.
|
|
1106
|
+
|
|
1107
|
+
:meta private:
|
|
1108
|
+
"""
|
|
1109
|
+
return self._get_app_core_class("ParameterValue")
|
|
1110
|
+
|
|
1111
|
+
@property
|
|
1112
|
+
def InputValue(self) -> type[InputValue]:
|
|
1113
|
+
"""
|
|
1114
|
+
The :class:`InputValue` class.
|
|
1115
|
+
|
|
1116
|
+
:meta private:
|
|
1117
|
+
"""
|
|
1118
|
+
return self._get_app_core_class("InputValue")
|
|
1119
|
+
|
|
1120
|
+
@property
|
|
1121
|
+
def Task(self) -> type[Task]:
|
|
1122
|
+
"""
|
|
1123
|
+
The :class:`Task` class.
|
|
1124
|
+
|
|
1125
|
+
:meta private:
|
|
1126
|
+
"""
|
|
1127
|
+
return self._get_app_core_class("Task")
|
|
1128
|
+
|
|
1129
|
+
@property
|
|
1130
|
+
def TaskSchema(self) -> type[TaskSchema]:
|
|
1131
|
+
"""
|
|
1132
|
+
The :class:`TaskSchema` class.
|
|
1133
|
+
|
|
1134
|
+
:meta private:
|
|
1135
|
+
"""
|
|
1136
|
+
return self._get_app_core_class("TaskSchema")
|
|
1137
|
+
|
|
1138
|
+
@property
|
|
1139
|
+
def TaskSourceType(self) -> type[TaskSourceType]:
|
|
1140
|
+
"""
|
|
1141
|
+
The :class:`TaskSourceType` class.
|
|
1142
|
+
|
|
1143
|
+
:meta private:
|
|
1144
|
+
"""
|
|
1145
|
+
return self._get_app_core_class("TaskSourceType")
|
|
1146
|
+
|
|
1147
|
+
@property
|
|
1148
|
+
def TaskObjective(self) -> type[TaskObjective]:
|
|
1149
|
+
"""
|
|
1150
|
+
The :class:`TaskObjective` class.
|
|
1151
|
+
|
|
1152
|
+
:meta private:
|
|
1153
|
+
"""
|
|
1154
|
+
return self._get_app_core_class("TaskObjective")
|
|
1155
|
+
|
|
1156
|
+
@property
|
|
1157
|
+
def TaskInputParameters(self) -> type[TaskInputParameters]:
|
|
1158
|
+
"""
|
|
1159
|
+
The :class:`TaskInputParameters` class.
|
|
1160
|
+
|
|
1161
|
+
:meta private:
|
|
1162
|
+
"""
|
|
1163
|
+
return self._get_app_core_class("TaskInputParameters")
|
|
1164
|
+
|
|
1165
|
+
@property
|
|
1166
|
+
def TaskOutputParameters(self) -> type[TaskOutputParameters]:
|
|
1167
|
+
"""
|
|
1168
|
+
The :class:`TaskOutputParameters` class.
|
|
1169
|
+
|
|
1170
|
+
:meta private:
|
|
1171
|
+
"""
|
|
1172
|
+
return self._get_app_core_class("TaskOutputParameters")
|
|
1173
|
+
|
|
1174
|
+
@property
|
|
1175
|
+
def ElementPropagation(self) -> type[ElementPropagation]:
|
|
1176
|
+
"""
|
|
1177
|
+
The :class:`ElementPropagation` class.
|
|
1178
|
+
|
|
1179
|
+
:meta private:
|
|
1180
|
+
"""
|
|
1181
|
+
return self._get_app_core_class("ElementPropagation")
|
|
1182
|
+
|
|
1183
|
+
@property
|
|
1184
|
+
def WorkflowTemplate(self) -> type[_WorkflowTemplate]:
|
|
1185
|
+
"""
|
|
1186
|
+
The :class:`WorkflowTemplate` class.
|
|
1187
|
+
|
|
1188
|
+
:meta private:
|
|
1189
|
+
"""
|
|
1190
|
+
return self._get_app_core_class("WorkflowTemplate")
|
|
1191
|
+
|
|
1192
|
+
@property
|
|
1193
|
+
def Workflow(self) -> type[_Workflow]:
|
|
1194
|
+
"""
|
|
1195
|
+
The :class:`Workflow` class.
|
|
1196
|
+
|
|
1197
|
+
:meta private:
|
|
1198
|
+
"""
|
|
1199
|
+
return self._get_app_core_class("Workflow")
|
|
1200
|
+
|
|
1201
|
+
@property
|
|
1202
|
+
def Jobscript(self) -> type[Jobscript]:
|
|
1203
|
+
"""
|
|
1204
|
+
The :class:`Jobscript` class.
|
|
1205
|
+
|
|
1206
|
+
:meta private:
|
|
1207
|
+
"""
|
|
1208
|
+
return self._get_app_core_class("Jobscript")
|
|
1209
|
+
|
|
1210
|
+
@property
|
|
1211
|
+
def Submission(self) -> type[_Submission]:
|
|
1212
|
+
"""
|
|
1213
|
+
The :class:`Submission` class.
|
|
1214
|
+
|
|
1215
|
+
:meta private:
|
|
1216
|
+
"""
|
|
1217
|
+
return self._get_app_core_class("Submission")
|
|
1218
|
+
|
|
1219
|
+
@property
|
|
1220
|
+
def DirectPosix(self) -> type[DirectPosix]:
|
|
1221
|
+
"""
|
|
1222
|
+
The :class:`DirectPosix` class.
|
|
1223
|
+
|
|
1224
|
+
:meta private:
|
|
1225
|
+
"""
|
|
1226
|
+
return self._get_app_core_class("DirectPosix")
|
|
1227
|
+
|
|
1228
|
+
@property
|
|
1229
|
+
def DirectWindows(self) -> type[DirectWindows]:
|
|
1230
|
+
"""
|
|
1231
|
+
The :class:`DirectWindows` class.
|
|
1232
|
+
|
|
1233
|
+
:meta private:
|
|
1234
|
+
"""
|
|
1235
|
+
return self._get_app_core_class("DirectWindows")
|
|
1236
|
+
|
|
1237
|
+
@property
|
|
1238
|
+
def SGEPosix(self) -> type[SGEPosix]:
|
|
1239
|
+
"""
|
|
1240
|
+
The :class:`SGEPosix` class.
|
|
1241
|
+
|
|
1242
|
+
:meta private:
|
|
1243
|
+
"""
|
|
1244
|
+
return self._get_app_core_class("SGEPosix")
|
|
1245
|
+
|
|
1246
|
+
@property
|
|
1247
|
+
def SlurmPosix(self) -> type[SlurmPosix]:
|
|
1248
|
+
"""
|
|
1249
|
+
The :class:`SlurmPosix` class.
|
|
1250
|
+
|
|
1251
|
+
:meta private:
|
|
1252
|
+
"""
|
|
1253
|
+
return self._get_app_core_class("SlurmPosix")
|
|
1254
|
+
|
|
1255
|
+
@property
|
|
1256
|
+
def QueuedScheduler(self) -> type[QueuedScheduler]:
|
|
1257
|
+
"""
|
|
1258
|
+
The :class:`QueuedScheduler` class.
|
|
1259
|
+
|
|
1260
|
+
:meta private:
|
|
1261
|
+
"""
|
|
1262
|
+
return self._get_app_core_class("QueuedScheduler")
|
|
1263
|
+
|
|
1264
|
+
@property
|
|
1265
|
+
def make_workflow(self) -> _MakeWorkflow:
|
|
1266
|
+
"""
|
|
1267
|
+
Generate a new workflow from a file or string containing a workflow
|
|
1268
|
+
template parametrisation.
|
|
1269
|
+
|
|
1270
|
+
Parameters
|
|
1271
|
+
----------
|
|
1272
|
+
template_path_or_str: str
|
|
1273
|
+
Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
|
|
1274
|
+
is_string: bool
|
|
1275
|
+
Determines if passing a file path or a string.
|
|
1276
|
+
template_format: str
|
|
1277
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
1278
|
+
format.
|
|
1279
|
+
path: str | Path
|
|
1280
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
1281
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
1282
|
+
current directory is used.
|
|
1283
|
+
name: str
|
|
1284
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
1285
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
1286
|
+
`name_add_timestamp` is True).
|
|
1287
|
+
name_add_timestamp: bool
|
|
1288
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
1289
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
1290
|
+
name_use_dir: bool
|
|
1291
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
1292
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
1293
|
+
directory corresponding to the workflow name. A default value can be set
|
|
1294
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
1295
|
+
overwrite: bool
|
|
1296
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
1297
|
+
existing directory will be overwritten.
|
|
1298
|
+
store: str
|
|
1299
|
+
The persistent store type to use.
|
|
1300
|
+
ts_fmt: str
|
|
1301
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
1302
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
1303
|
+
include a time zone name.
|
|
1304
|
+
ts_name_fmt: str
|
|
1305
|
+
The datetime format to use when generating the workflow name, where it
|
|
1306
|
+
includes a timestamp.
|
|
1307
|
+
store_kwargs: dict[str, object]
|
|
1308
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
1309
|
+
variables: dict[str, str]
|
|
1310
|
+
String variables to substitute in `template_file_or_str`.
|
|
1311
|
+
status: bool
|
|
1312
|
+
If True, display a live status to track workflow creation progress.
|
|
1313
|
+
add_submission
|
|
1314
|
+
If True, add a submission to the workflow (but do not submit).
|
|
1315
|
+
|
|
1316
|
+
Returns
|
|
1317
|
+
-------
|
|
1318
|
+
Workflow
|
|
1319
|
+
The created workflow, if `add_submission` is `False`.
|
|
1320
|
+
Submission
|
|
1321
|
+
The created submission object, if `add_submission` is `True`.
|
|
1322
|
+
"""
|
|
1323
|
+
return self.__get_app_func("make_workflow")
|
|
1324
|
+
|
|
1325
|
+
@property
|
|
1326
|
+
def make_demo_workflow(self) -> _MakeDemoWorkflow:
|
|
1327
|
+
"""
|
|
1328
|
+
Generate a new workflow from a builtin demo workflow template.
|
|
1329
|
+
|
|
1330
|
+
Parameters
|
|
1331
|
+
----------
|
|
1332
|
+
workflow_name: str
|
|
1333
|
+
Name of the demo workflow to make.
|
|
1334
|
+
template_format: str
|
|
1335
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
1336
|
+
format.
|
|
1337
|
+
path: str | Path
|
|
1338
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
1339
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
1340
|
+
current directory is used.
|
|
1341
|
+
name: str
|
|
1342
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
1343
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
1344
|
+
`name_add_timestamp` is True).
|
|
1345
|
+
name_add_timestamp: bool
|
|
1346
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
1347
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
1348
|
+
name_use_dir: bool
|
|
1349
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
1350
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
1351
|
+
directory corresponding to the workflow name. A default value can be set
|
|
1352
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
1353
|
+
overwrite: bool
|
|
1354
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
1355
|
+
existing directory will be overwritten.
|
|
1356
|
+
store: str
|
|
1357
|
+
The persistent store type to use.
|
|
1358
|
+
ts_fmt: str
|
|
1359
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
1360
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
1361
|
+
include a time zone name.
|
|
1362
|
+
ts_name_fmt: str
|
|
1363
|
+
The datetime format to use when generating the workflow name, where it
|
|
1364
|
+
includes a timestamp.
|
|
1365
|
+
store_kwargs: dict[str, object]
|
|
1366
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
1367
|
+
variables: dict[str, str]
|
|
1368
|
+
String variables to substitute in the demo workflow template file.
|
|
1369
|
+
status: bool
|
|
1370
|
+
If True, display a live status to track workflow creation progress.
|
|
1371
|
+
add_submission
|
|
1372
|
+
If True, add a submission to the workflow (but do not submit).
|
|
1373
|
+
|
|
1374
|
+
Returns
|
|
1375
|
+
-------
|
|
1376
|
+
Workflow
|
|
1377
|
+
The created workflow, if `add_submission` is `False`.
|
|
1378
|
+
Submission
|
|
1379
|
+
The created submission object, if `add_submission` is `True`.
|
|
1380
|
+
"""
|
|
1381
|
+
return self.__get_app_func("make_demo_workflow")
|
|
1382
|
+
|
|
1383
|
+
@property
|
|
1384
|
+
def make_and_submit_workflow(self) -> _MakeAndSubmitWorkflow:
|
|
1385
|
+
"""
|
|
1386
|
+
Generate and submit a new workflow from a file or string containing a
|
|
1387
|
+
workflow template parametrisation.
|
|
1388
|
+
|
|
1389
|
+
Parameters
|
|
1390
|
+
----------
|
|
1391
|
+
|
|
1392
|
+
template_path_or_str: str
|
|
1393
|
+
Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
|
|
1394
|
+
is_string: str
|
|
1395
|
+
Determines whether `template_path_or_str` is a string or a file.
|
|
1396
|
+
template_format: str
|
|
1397
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
1398
|
+
format.
|
|
1399
|
+
path: str | Path
|
|
1400
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
1401
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
1402
|
+
current directory is used.
|
|
1403
|
+
name: str
|
|
1404
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
1405
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
1406
|
+
`name_add_timestamp` is True).
|
|
1407
|
+
name_add_timestamp: bool
|
|
1408
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
1409
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
1410
|
+
name_use_dir: bool
|
|
1411
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
1412
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
1413
|
+
directory corresponding to the workflow name. A default value can be set
|
|
1414
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
1415
|
+
overwrite: bool
|
|
1416
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
1417
|
+
existing directory will be overwritten.
|
|
1418
|
+
store: str
|
|
1419
|
+
The persistent store to use for this workflow.
|
|
1420
|
+
ts_fmt: str
|
|
1421
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
1422
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
1423
|
+
include a time zone name.
|
|
1424
|
+
ts_name_fmt: str
|
|
1425
|
+
The datetime format to use when generating the workflow name, where it
|
|
1426
|
+
includes a timestamp.
|
|
1427
|
+
store_kwargs: dict[str, object]
|
|
1428
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
1429
|
+
variables: dict[str, str]
|
|
1430
|
+
String variables to substitute in `template_file_or_str`.
|
|
1431
|
+
JS_parallelism: bool
|
|
1432
|
+
If True, allow multiple jobscripts to execute simultaneously. Raises if set to
|
|
1433
|
+
True but the store type does not support the `jobscript_parallelism` feature. If
|
|
1434
|
+
not set, jobscript parallelism will be used if the store type supports it.
|
|
1435
|
+
wait: bool
|
|
1436
|
+
If True, this command will block until the workflow execution is complete.
|
|
1437
|
+
add_to_known: bool
|
|
1438
|
+
If True, add the new submission to the known-submissions file, which is
|
|
1439
|
+
used by the `show` command to monitor current and recent submissions.
|
|
1440
|
+
return_idx: bool
|
|
1441
|
+
If True, return a dict representing the jobscript indices submitted for each
|
|
1442
|
+
submission.
|
|
1443
|
+
tasks: list[int]
|
|
1444
|
+
List of task indices to include in this submission. By default all tasks are
|
|
1445
|
+
included.
|
|
1446
|
+
cancel: bool
|
|
1447
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
|
1448
|
+
status: bool
|
|
1449
|
+
If True, display a live status to track workflow creation and submission
|
|
1450
|
+
progress.
|
|
1451
|
+
|
|
1452
|
+
Returns
|
|
1453
|
+
-------
|
|
1454
|
+
Workflow
|
|
1455
|
+
The created workflow.
|
|
1456
|
+
dict[int, list[int]]
|
|
1457
|
+
Mapping of submission handles. If requested by ``return_idx`` parameter.
|
|
1458
|
+
"""
|
|
1459
|
+
return self.__get_app_func("make_and_submit_workflow")
|
|
1460
|
+
|
|
1461
|
+
@property
|
|
1462
|
+
def make_and_submit_demo_workflow(self) -> _MakeAndSubmitDemoWorkflow:
|
|
1463
|
+
"""
|
|
1464
|
+
Generate and submit a new demo workflow from a file or string containing a
|
|
1465
|
+
workflow template parametrisation.
|
|
1466
|
+
|
|
1467
|
+
Parameters
|
|
1468
|
+
----------
|
|
1469
|
+
workflow_name: str
|
|
1470
|
+
Name of the demo workflow to make. **Required.**
|
|
1471
|
+
template_format: str
|
|
1472
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
1473
|
+
format.
|
|
1474
|
+
path: str | Path
|
|
1475
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
1476
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
1477
|
+
current directory is used.
|
|
1478
|
+
name: str
|
|
1479
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
1480
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
1481
|
+
`name_add_timestamp` is True).
|
|
1482
|
+
name_add_timestamp: bool
|
|
1483
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
1484
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
1485
|
+
name_use_dir: bool
|
|
1486
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
1487
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
1488
|
+
directory corresponding to the workflow name. A default value can be set
|
|
1489
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
1490
|
+
overwrite: bool
|
|
1491
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
1492
|
+
existing directory will be overwritten.
|
|
1493
|
+
store: str
|
|
1494
|
+
The persistent store to use for this workflow.
|
|
1495
|
+
ts_fmt: str
|
|
1496
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
1497
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
1498
|
+
include a time zone name.
|
|
1499
|
+
ts_name_fmt: str
|
|
1500
|
+
The datetime format to use when generating the workflow name, where it
|
|
1501
|
+
includes a timestamp.
|
|
1502
|
+
store_kwargs: dict[str, object]
|
|
1503
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
1504
|
+
variables: dict[str, str]
|
|
1505
|
+
String variables to substitute in the demo workflow template file.
|
|
1506
|
+
JS_parallelism: bool
|
|
1507
|
+
If True, allow multiple jobscripts to execute simultaneously. Raises if set to
|
|
1508
|
+
True but the store type does not support the `jobscript_parallelism` feature. If
|
|
1509
|
+
not set, jobscript parallelism will be used if the store type supports it.
|
|
1510
|
+
wait: bool
|
|
1511
|
+
If True, this command will block until the workflow execution is complete.
|
|
1512
|
+
add_to_known: bool
|
|
1513
|
+
If True, add the new submission to the known-submissions file, which is
|
|
1514
|
+
used by the `show` command to monitor current and recent submissions.
|
|
1515
|
+
return_idx: bool
|
|
1516
|
+
If True, return a dict representing the jobscript indices submitted for each
|
|
1517
|
+
submission.
|
|
1518
|
+
tasks: list[int]
|
|
1519
|
+
List of task indices to include in this submission. By default all tasks are
|
|
1520
|
+
included.
|
|
1521
|
+
cancel: bool
|
|
1522
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
|
1523
|
+
status: bool
|
|
1524
|
+
If True, display a live status to track submission progress.
|
|
1525
|
+
|
|
1526
|
+
Returns
|
|
1527
|
+
-------
|
|
1528
|
+
Workflow
|
|
1529
|
+
The created workflow.
|
|
1530
|
+
dict[int, list[int]]
|
|
1531
|
+
Mapping of submission handles. If requested by ``return_idx`` parameter.
|
|
1532
|
+
"""
|
|
1533
|
+
return self.__get_app_func("make_and_submit_demo_workflow")
|
|
1534
|
+
|
|
1535
|
+
@property
|
|
1536
|
+
def submit_workflow(self) -> _SubmitWorkflow:
|
|
1537
|
+
"""
|
|
1538
|
+
Submit an existing workflow.
|
|
1539
|
+
|
|
1540
|
+
Parameters
|
|
1541
|
+
----------
|
|
1542
|
+
workflow_path: str
|
|
1543
|
+
Path to an existing workflow
|
|
1544
|
+
JS_parallelism: bool
|
|
1545
|
+
If True, allow multiple jobscripts to execute simultaneously. Raises if set to
|
|
1546
|
+
True but the store type does not support the `jobscript_parallelism` feature. If
|
|
1547
|
+
not set, jobscript parallelism will be used if the store type supports it.
|
|
1548
|
+
tasks: list[int]
|
|
1549
|
+
List of task indices to include in this submission. By default all tasks are
|
|
1550
|
+
included.
|
|
1551
|
+
|
|
1552
|
+
Returns
|
|
1553
|
+
-------
|
|
1554
|
+
dict[int, list[int]]
|
|
1555
|
+
Mapping of submission handles. If requested by ``return_idx`` parameter.
|
|
1556
|
+
"""
|
|
1557
|
+
return self.__get_app_func("submit_workflow")
|
|
1558
|
+
|
|
1559
|
+
@property
|
|
1560
|
+
def run_hpcflow_tests(self) -> _RunTests:
|
|
1561
|
+
"""Run hpcflow test suite. This function is only available from derived apps."""
|
|
1562
|
+
return self.__get_app_func("run_hpcflow_tests")
|
|
1563
|
+
|
|
1564
|
+
@property
|
|
1565
|
+
def run_tests(self) -> _RunTests:
|
|
1566
|
+
"""Run the test suite."""
|
|
1567
|
+
return self.__get_app_func("run_tests")
|
|
1568
|
+
|
|
1569
|
+
@property
|
|
1570
|
+
def get_OS_info(self) -> Callable[[], Mapping[str, str]]:
|
|
1571
|
+
"""
|
|
1572
|
+
Get information about the operating system.
|
|
1573
|
+
|
|
1574
|
+
Returns
|
|
1575
|
+
-------
|
|
1576
|
+
dict[str, str]
|
|
1577
|
+
Key-value mapping containing system version information.
|
|
1578
|
+
"""
|
|
1579
|
+
return self.__get_app_func("get_OS_info")
|
|
1580
|
+
|
|
1581
|
+
@property
|
|
1582
|
+
def get_shell_info(self) -> Callable[[str, bool], VersionInfo]:
|
|
1583
|
+
"""
|
|
1584
|
+
Get information about a given shell and the operating system.
|
|
1585
|
+
|
|
1586
|
+
Parameters
|
|
1587
|
+
----------
|
|
1588
|
+
shell_name: str
|
|
1589
|
+
One of the supported shell names.
|
|
1590
|
+
exclude_os: bool
|
|
1591
|
+
If True, exclude operating system information.
|
|
1592
|
+
|
|
1593
|
+
Returns
|
|
1594
|
+
-------
|
|
1595
|
+
VersionInfo
|
|
1596
|
+
The shell version information descriptor.
|
|
1597
|
+
"""
|
|
1598
|
+
return self.__get_app_func("get_shell_info")
|
|
1599
|
+
|
|
1600
|
+
@property
|
|
1601
|
+
def get_known_submissions(self) -> _GetKnownSubmissions:
|
|
1602
|
+
"""
|
|
1603
|
+
Retrieve information about active and recently inactive finished workflows.
|
|
1604
|
+
|
|
1605
|
+
This method removes workflows from the known-submissions file that are found to be
|
|
1606
|
+
inactive on this machine (according to the scheduler/process ID).
|
|
1607
|
+
|
|
1608
|
+
Parameters
|
|
1609
|
+
----------
|
|
1610
|
+
max_recent: int
|
|
1611
|
+
Maximum number of inactive workflows to retrieve.
|
|
1612
|
+
no_update: bool
|
|
1613
|
+
If True, do not update the known-submissions file to set submissions that are
|
|
1614
|
+
now inactive.
|
|
1615
|
+
as_json: bool
|
|
1616
|
+
If True, only include JSON-compatible information. This will exclude the
|
|
1617
|
+
`submission` key, for instance.
|
|
1618
|
+
|
|
1619
|
+
Returns
|
|
1620
|
+
-------
|
|
1621
|
+
list[KnownSubmissionItem]
|
|
1622
|
+
List of descriptions of known items.
|
|
1623
|
+
"""
|
|
1624
|
+
return self.__get_app_func("get_known_submissions")
|
|
1625
|
+
|
|
1626
|
+
@property
|
|
1627
|
+
def show(self) -> _Show:
|
|
1628
|
+
"""
|
|
1629
|
+
Show information about running workflows.
|
|
1630
|
+
|
|
1631
|
+
Parameters
|
|
1632
|
+
----------
|
|
1633
|
+
max_recent: int
|
|
1634
|
+
Maximum number of inactive workflows to show.
|
|
1635
|
+
full: bool
|
|
1636
|
+
If True, provide more information; output may spans multiple lines for each
|
|
1637
|
+
workflow submission.
|
|
1638
|
+
no_update: bool
|
|
1639
|
+
If True, do not update the known-submissions file to remove workflows that are
|
|
1640
|
+
no longer running.
|
|
1641
|
+
"""
|
|
1642
|
+
return self.__get_app_func("show")
|
|
1643
|
+
|
|
1644
|
+
@property
|
|
1645
|
+
def show_legend(self) -> Callable[[], None]:
|
|
1646
|
+
"""
|
|
1647
|
+
Output a legend for the jobscript-element and EAR states that are displayed
|
|
1648
|
+
by the `show` command.
|
|
1649
|
+
"""
|
|
1650
|
+
return self.__get_app_func("show_legend")
|
|
1651
|
+
|
|
1652
|
+
@property
|
|
1653
|
+
def cancel(self) -> _Cancel:
|
|
1654
|
+
"""
|
|
1655
|
+
Cancel the execution of a workflow submission.
|
|
1656
|
+
|
|
1657
|
+
Parameters
|
|
1658
|
+
----------
|
|
1659
|
+
workflow_ref: int | str | Path
|
|
1660
|
+
Which workflow to cancel, by ID or path.
|
|
1661
|
+
ref_is_path: str
|
|
1662
|
+
One of "``id``", "``path``" or "``assume-id``" (the default)
|
|
1663
|
+
status: bool
|
|
1664
|
+
Whether to show a live status during cancel.
|
|
1665
|
+
"""
|
|
1666
|
+
return self.__get_app_func("cancel")
|
|
1667
|
+
|
|
1668
|
+
def __getattr__(self, name: str):
|
|
1669
|
+
if name in sdk_classes:
|
|
1670
|
+
return self._get_app_core_class(name)
|
|
1671
|
+
elif name in sdk_funcs:
|
|
1672
|
+
return self.__get_app_func(name)
|
|
1673
|
+
else:
|
|
1674
|
+
raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
|
|
1675
|
+
|
|
1676
|
+
def __repr__(self):
|
|
1677
|
+
return f"{self.__class__.__name__}(name={self.name!r}, version={self.version!r})"
|
|
1678
|
+
|
|
1679
|
+
def _get_app_core_class(self, name: str) -> type:
|
|
1680
|
+
if name in self.__app_type_cache:
|
|
1681
|
+
return self.__app_type_cache[name]
|
|
1682
|
+
obj_mod = import_module(sdk_classes[name])
|
|
1683
|
+
cls = getattr(obj_mod, name)
|
|
1684
|
+
if issubclass(cls, enum.Enum):
|
|
1685
|
+
sub_cls = cls
|
|
1686
|
+
else:
|
|
1687
|
+
dct: dict[str, Any] = {}
|
|
1688
|
+
if hasattr(cls, "_app_attr"):
|
|
1689
|
+
dct = {getattr(cls, "_app_attr"): self}
|
|
1690
|
+
sub_cls = type(cls.__name__, (cls,), dct)
|
|
1691
|
+
if cls.__doc__:
|
|
1692
|
+
sub_cls.__doc__ = cls.__doc__.format(app_name=self.name)
|
|
1693
|
+
sub_cls.__module__ = self.module
|
|
1694
|
+
self.__app_type_cache[name] = sub_cls
|
|
1695
|
+
return sub_cls
|
|
1696
|
+
|
|
1697
|
+
def __get_app_func(self, name: str) -> Callable[..., Any]:
|
|
1698
|
+
if name in self.__app_func_cache:
|
|
1699
|
+
return self.__app_func_cache[name]
|
|
1700
|
+
|
|
1701
|
+
def wrap_func(func) -> Callable[..., Any]:
|
|
1702
|
+
# this function avoids scope issues
|
|
1703
|
+
return lambda *args, **kwargs: func(*args, **kwargs)
|
|
1704
|
+
|
|
1705
|
+
# retrieve the "private" function:
|
|
1706
|
+
sdk_func = getattr(self, f"_{name}")
|
|
1707
|
+
|
|
1708
|
+
func = wrap_func(sdk_func)
|
|
1709
|
+
func = wraps(sdk_func)(func)
|
|
1710
|
+
if func.__doc__:
|
|
1711
|
+
func.__doc__ = func.__doc__.format(app_name=self.name)
|
|
1712
|
+
func.__module__ = self.module
|
|
1713
|
+
self.__app_func_cache[name] = func
|
|
1714
|
+
return func
|
|
1715
|
+
|
|
1716
|
+
@property
|
|
1717
|
+
def run_time_info(self) -> RunTimeInfo:
|
|
1718
|
+
"""
|
|
1719
|
+
Information about the runtime.
|
|
1720
|
+
"""
|
|
1721
|
+
return self._run_time_info
|
|
1722
|
+
|
|
1723
|
+
@property
|
|
1724
|
+
def log(self) -> AppLog:
|
|
1725
|
+
"""
|
|
1726
|
+
The application log.
|
|
1727
|
+
"""
|
|
1728
|
+
return self._log
|
|
1729
|
+
|
|
1730
|
+
@property
|
|
1731
|
+
def timeit(self) -> bool:
|
|
1732
|
+
"""
|
|
1733
|
+
Whether the timing analysis system is active.
|
|
1734
|
+
"""
|
|
1735
|
+
return TimeIt.active
|
|
1736
|
+
|
|
1737
|
+
@timeit.setter
|
|
1738
|
+
def timeit(self, value: bool):
|
|
1739
|
+
TimeIt.active = bool(value)
|
|
1740
|
+
|
|
1741
|
+
@property
|
|
1742
|
+
def template_components(self) -> TemplateComponents:
|
|
1743
|
+
"""
|
|
1744
|
+
The template component data.
|
|
1745
|
+
"""
|
|
1746
|
+
if not self.is_template_components_loaded:
|
|
1747
|
+
if BaseApp.__load_pending:
|
|
1748
|
+
return {}
|
|
1749
|
+
BaseApp.__load_pending = True
|
|
1750
|
+
self._load_template_components()
|
|
1751
|
+
BaseApp.__load_pending = False
|
|
1752
|
+
return self._template_components
|
|
1753
|
+
|
|
1754
|
+
@property
|
|
1755
|
+
def _shared_data(self) -> Mapping[str, Any]:
|
|
1756
|
+
return cast("Mapping[str, Any]", self.template_components)
|
|
1757
|
+
|
|
1758
|
+
def _ensure_template_component(self, name: str) -> None:
|
|
1759
|
+
"""Invoked by access to individual template components (e.g. parameters)"""
|
|
1760
|
+
if not getattr(self, f"_{name}"):
|
|
1761
|
+
self._load_template_components(name)
|
|
1762
|
+
else:
|
|
1763
|
+
self.logger.debug(f"Template component {name!r} already loaded")
|
|
1764
|
+
|
|
1765
|
+
def load_template_components(self, warn: bool = True) -> None:
|
|
1766
|
+
"""Load all template component data, warning by default if already loaded."""
|
|
1767
|
+
if warn and self.is_template_components_loaded:
|
|
1768
|
+
warnings.warn("Template components already loaded; reloading now.")
|
|
1769
|
+
self._load_template_components()
|
|
1770
|
+
|
|
1771
|
+
def reload_template_components(self, warn: bool = True) -> None:
|
|
1772
|
+
"""
|
|
1773
|
+
Reload all template component data, warning by default if not already
|
|
1774
|
+
loaded.
|
|
1775
|
+
"""
|
|
1776
|
+
if warn and not self.is_template_components_loaded:
|
|
1777
|
+
warnings.warn("Template components not loaded; loading now.")
|
|
1778
|
+
self._load_template_components()
|
|
1779
|
+
|
|
1780
|
+
@TimeIt.decorator
|
|
1781
|
+
def _load_template_components(self, *include: str) -> None:
|
|
1782
|
+
"""
|
|
1783
|
+
Combine any builtin template components with user-defined template components
|
|
1784
|
+
and initialise list objects.
|
|
1785
|
+
"""
|
|
1786
|
+
if not include or "task_schemas" in include:
|
|
1787
|
+
# task schemas require all other template components to be loaded first
|
|
1788
|
+
include = (
|
|
1789
|
+
"parameters",
|
|
1790
|
+
"command_files",
|
|
1791
|
+
"environments",
|
|
1792
|
+
"task_schemas",
|
|
1793
|
+
"scripts",
|
|
1794
|
+
"jinja_templates",
|
|
1795
|
+
"programs",
|
|
1796
|
+
)
|
|
1797
|
+
|
|
1798
|
+
self.logger.debug(f"Loading template components: {include!r}.")
|
|
1799
|
+
|
|
1800
|
+
self_tc: Any = self._template_components
|
|
1801
|
+
|
|
1802
|
+
if "parameters" in include:
|
|
1803
|
+
params: list[Any] = self._builtin_template_components.get("parameters", [])
|
|
1804
|
+
for path in self.config.parameter_sources:
|
|
1805
|
+
params.extend(read_YAML_file(path))
|
|
1806
|
+
param_list = self.ParametersList.from_json_like(params, shared_data=self_tc)
|
|
1807
|
+
self._template_components["parameters"] = param_list
|
|
1808
|
+
self._parameters = param_list
|
|
1809
|
+
|
|
1810
|
+
if "command_files" in include:
|
|
1811
|
+
cmd_files: list[Any] = self._builtin_template_components.get(
|
|
1812
|
+
"command_files", []
|
|
1813
|
+
)
|
|
1814
|
+
for path in self.config.command_file_sources:
|
|
1815
|
+
cmd_files.extend(read_YAML_file(path))
|
|
1816
|
+
cf_list = self.CommandFilesList.from_json_like(cmd_files, shared_data=self_tc)
|
|
1817
|
+
self._template_components["command_files"] = cf_list
|
|
1818
|
+
self._command_files = cf_list
|
|
1819
|
+
|
|
1820
|
+
if "environments" in include:
|
|
1821
|
+
envs = []
|
|
1822
|
+
builtin_envs: list[Any] = self._builtin_template_components.get(
|
|
1823
|
+
"environments", []
|
|
1824
|
+
)
|
|
1825
|
+
for e_path in self.config.environment_sources:
|
|
1826
|
+
for env_j in read_YAML_file(e_path):
|
|
1827
|
+
for b_idx, builtin_env in enumerate(list(builtin_envs)):
|
|
1828
|
+
# overwrite builtin envs with user-supplied:
|
|
1829
|
+
if builtin_env["name"] == env_j["name"]:
|
|
1830
|
+
builtin_envs.pop(b_idx)
|
|
1831
|
+
envs.append(env_j)
|
|
1832
|
+
envs = builtin_envs + envs
|
|
1833
|
+
env_list = self.EnvironmentsList.from_json_like(envs, shared_data=self_tc)
|
|
1834
|
+
self._template_components["environments"] = env_list
|
|
1835
|
+
self._environments = env_list
|
|
1836
|
+
|
|
1837
|
+
if "task_schemas" in include:
|
|
1838
|
+
schemas: list[Any] = self._builtin_template_components.get("task_schemas", [])
|
|
1839
|
+
for path in self.config.task_schema_sources:
|
|
1840
|
+
schemas.extend(read_YAML_file(path))
|
|
1841
|
+
ts_list = self.TaskSchemasList.from_json_like(schemas, shared_data=self_tc)
|
|
1842
|
+
self._template_components["task_schemas"] = ts_list
|
|
1843
|
+
self._task_schemas = ts_list
|
|
1844
|
+
|
|
1845
|
+
if "scripts" in include:
|
|
1846
|
+
scripts = self._load_scripts()
|
|
1847
|
+
self._template_components["scripts"] = scripts
|
|
1848
|
+
self._scripts = scripts
|
|
1849
|
+
|
|
1850
|
+
if "jinja_templates" in include:
|
|
1851
|
+
jinja_templates = self._load_jinja_templates()
|
|
1852
|
+
self._template_components["jinja_templates"] = jinja_templates
|
|
1853
|
+
self._jinja_templates = jinja_templates
|
|
1854
|
+
|
|
1855
|
+
if "programs" in include:
|
|
1856
|
+
programs = self._load_programs()
|
|
1857
|
+
self._template_components["programs"] = programs
|
|
1858
|
+
self._programs = programs
|
|
1859
|
+
|
|
1860
|
+
self.logger.info(f"Template components loaded ({include!r}).")
|
|
1861
|
+
|
|
1862
|
+
@classmethod
|
|
1863
|
+
def load_builtin_template_component_data(
|
|
1864
|
+
cls, package: ModuleType | str
|
|
1865
|
+
) -> BasicTemplateComponents:
|
|
1866
|
+
"""
|
|
1867
|
+
Load the template component data built into the package.
|
|
1868
|
+
This is as opposed to the template components defined by users.
|
|
1869
|
+
"""
|
|
1870
|
+
SDK_logger.info(
|
|
1871
|
+
f"Loading built-in template component data for package: {package!r}."
|
|
1872
|
+
)
|
|
1873
|
+
components: BasicTemplateComponents = {}
|
|
1874
|
+
for comp_type in TEMPLATE_COMP_TYPES:
|
|
1875
|
+
with open_text_resource(package, f"{comp_type}.yaml") as fh:
|
|
1876
|
+
SDK_logger.info(f"Parsing file as YAML: {fh.name!r}")
|
|
1877
|
+
source = f"from {Path(fh.name)!r}"
|
|
1878
|
+
components[comp_type] = read_YAML_str(fh.read(), source=source)
|
|
1879
|
+
|
|
1880
|
+
return components
|
|
1881
|
+
|
|
1882
|
+
@property
|
|
1883
|
+
def parameters(self) -> _ParametersList:
|
|
1884
|
+
"""
|
|
1885
|
+
The known template parameters.
|
|
1886
|
+
"""
|
|
1887
|
+
self._ensure_template_component("parameters")
|
|
1888
|
+
assert self._parameters is not None
|
|
1889
|
+
return self._parameters
|
|
1890
|
+
|
|
1891
|
+
@property
|
|
1892
|
+
def command_files(self) -> _CommandFilesList:
|
|
1893
|
+
"""
|
|
1894
|
+
The known template command files.
|
|
1895
|
+
"""
|
|
1896
|
+
self._ensure_template_component("command_files")
|
|
1897
|
+
assert self._command_files is not None
|
|
1898
|
+
return self._command_files
|
|
1899
|
+
|
|
1900
|
+
@property
|
|
1901
|
+
def envs(self) -> _EnvironmentsList:
|
|
1902
|
+
"""
|
|
1903
|
+
The known template execution environments.
|
|
1904
|
+
"""
|
|
1905
|
+
self._ensure_template_component("environments")
|
|
1906
|
+
assert self._environments is not None
|
|
1907
|
+
return self._environments
|
|
1908
|
+
|
|
1909
|
+
@property
|
|
1910
|
+
def scripts(self) -> dict[str, Path]:
|
|
1911
|
+
"""
|
|
1912
|
+
The known template scripts.
|
|
1913
|
+
"""
|
|
1914
|
+
self._ensure_template_component("scripts")
|
|
1915
|
+
assert self._scripts is not None
|
|
1916
|
+
return self._scripts
|
|
1917
|
+
|
|
1918
|
+
@property
|
|
1919
|
+
def jinja_templates(self) -> dict[str, Path]:
|
|
1920
|
+
"""
|
|
1921
|
+
The known Jinja template files.
|
|
1922
|
+
"""
|
|
1923
|
+
self._ensure_template_component("jinja_templates")
|
|
1924
|
+
assert self._jinja_templates is not None
|
|
1925
|
+
return self._jinja_templates
|
|
1926
|
+
|
|
1927
|
+
@property
|
|
1928
|
+
def programs(self) -> dict[str, Path]:
|
|
1929
|
+
"""
|
|
1930
|
+
The known programs.
|
|
1931
|
+
"""
|
|
1932
|
+
self._ensure_template_component("programs")
|
|
1933
|
+
assert self._programs is not None
|
|
1934
|
+
return self._programs
|
|
1935
|
+
|
|
1936
|
+
@property
|
|
1937
|
+
def task_schemas(self) -> _TaskSchemasList:
|
|
1938
|
+
"""
|
|
1939
|
+
The known template task schemas.
|
|
1940
|
+
"""
|
|
1941
|
+
self._ensure_template_component("task_schemas")
|
|
1942
|
+
assert self._task_schemas is not None
|
|
1943
|
+
return self._task_schemas
|
|
1944
|
+
|
|
1945
|
+
@property
|
|
1946
|
+
def logger(self) -> Logger:
|
|
1947
|
+
"""
|
|
1948
|
+
The main underlying logger.
|
|
1949
|
+
"""
|
|
1950
|
+
return self.log.logger
|
|
1951
|
+
|
|
1952
|
+
@property
|
|
1953
|
+
def API_logger(self) -> Logger:
|
|
1954
|
+
"""
|
|
1955
|
+
The logger for API messages.
|
|
1956
|
+
"""
|
|
1957
|
+
return self.logger.getChild("api")
|
|
1958
|
+
|
|
1959
|
+
@property
|
|
1960
|
+
def CLI_logger(self) -> Logger:
|
|
1961
|
+
"""
|
|
1962
|
+
The logger for CLI messages.
|
|
1963
|
+
"""
|
|
1964
|
+
return self.logger.getChild("cli")
|
|
1965
|
+
|
|
1966
|
+
@property
|
|
1967
|
+
def config_logger(self) -> Logger:
|
|
1968
|
+
"""
|
|
1969
|
+
The logger for configuration messages.
|
|
1970
|
+
"""
|
|
1971
|
+
return self.logger.getChild("config")
|
|
1972
|
+
|
|
1973
|
+
@property
|
|
1974
|
+
def persistence_logger(self) -> Logger:
|
|
1975
|
+
"""
|
|
1976
|
+
The logger for persistence engine messages.
|
|
1977
|
+
"""
|
|
1978
|
+
return self.logger.getChild("persistence")
|
|
1979
|
+
|
|
1980
|
+
@property
|
|
1981
|
+
def submission_logger(self) -> Logger:
|
|
1982
|
+
"""
|
|
1983
|
+
The logger for job submission messages.
|
|
1984
|
+
"""
|
|
1985
|
+
return self.logger.getChild("submission")
|
|
1986
|
+
|
|
1987
|
+
@property
|
|
1988
|
+
def runtime_info_logger(self) -> Logger:
|
|
1989
|
+
"""
|
|
1990
|
+
The logger for runtime messages.
|
|
1991
|
+
"""
|
|
1992
|
+
return self.logger.getChild("runtime")
|
|
1993
|
+
|
|
1994
|
+
@property
|
|
1995
|
+
def is_config_loaded(self) -> bool:
|
|
1996
|
+
"""
|
|
1997
|
+
Whether the configuration is loaded.
|
|
1998
|
+
"""
|
|
1999
|
+
return bool(self._config)
|
|
2000
|
+
|
|
2001
|
+
@property
|
|
2002
|
+
def is_template_components_loaded(self) -> bool:
|
|
2003
|
+
"""Whether any template component (e.g. parameters) has been loaded."""
|
|
2004
|
+
return bool(self._template_components)
|
|
2005
|
+
|
|
2006
|
+
@property
|
|
2007
|
+
def config(self) -> Config:
|
|
2008
|
+
"""
|
|
2009
|
+
The configuration.
|
|
2010
|
+
"""
|
|
2011
|
+
if not self.is_config_loaded:
|
|
2012
|
+
self.load_config()
|
|
2013
|
+
assert self._config
|
|
2014
|
+
return self._config
|
|
2015
|
+
|
|
2016
|
+
@property
|
|
2017
|
+
def scheduler_lookup(self) -> dict[tuple[str, str], type[Scheduler]]:
|
|
2018
|
+
"""
|
|
2019
|
+
The scheduler mapping.
|
|
2020
|
+
"""
|
|
2021
|
+
return {
|
|
2022
|
+
("direct", "posix"): self.DirectPosix,
|
|
2023
|
+
("direct", "nt"): self.DirectWindows,
|
|
2024
|
+
("sge", "posix"): self.SGEPosix,
|
|
2025
|
+
("slurm", "posix"): self.SlurmPosix,
|
|
2026
|
+
}
|
|
2027
|
+
|
|
2028
|
+
def get_scheduler(
|
|
2029
|
+
self,
|
|
2030
|
+
scheduler_name: str,
|
|
2031
|
+
os_name: str,
|
|
2032
|
+
scheduler_args: dict[str, Any] | None = None,
|
|
2033
|
+
) -> Scheduler:
|
|
2034
|
+
"""Get an arbitrary scheduler object."""
|
|
2035
|
+
scheduler_kwargs = scheduler_args or {}
|
|
2036
|
+
|
|
2037
|
+
os_name = os_name.lower()
|
|
2038
|
+
if os_name == "nt" and "_" in scheduler_name:
|
|
2039
|
+
# e.g. WSL on windows uses *_posix
|
|
2040
|
+
key = tuple(scheduler_name.split("_"))
|
|
2041
|
+
assert len(key) == 2
|
|
2042
|
+
else:
|
|
2043
|
+
key = (scheduler_name.lower(), os_name)
|
|
2044
|
+
|
|
2045
|
+
try:
|
|
2046
|
+
scheduler_cls = self.scheduler_lookup[key]
|
|
2047
|
+
except KeyError:
|
|
2048
|
+
raise ValueError(
|
|
2049
|
+
f"Unsupported combination of scheduler and operation system: {key!r}"
|
|
2050
|
+
)
|
|
2051
|
+
return scheduler_cls(**scheduler_kwargs)
|
|
2052
|
+
|
|
2053
|
+
def get_OS_supported_schedulers(self) -> Iterator[str]:
|
|
2054
|
+
"""
|
|
2055
|
+
Retrieve a list of schedulers that are supported in principle by this operating
|
|
2056
|
+
system.
|
|
2057
|
+
|
|
2058
|
+
This does not necessarily mean all the returned schedulers are available on this
|
|
2059
|
+
system.
|
|
2060
|
+
"""
|
|
2061
|
+
for k in self.scheduler_lookup:
|
|
2062
|
+
if os.name == "nt" and k == ("direct", "posix"):
|
|
2063
|
+
# this is valid for WSL on Windows
|
|
2064
|
+
yield "_".join(k)
|
|
2065
|
+
elif k[1] == os.name:
|
|
2066
|
+
yield k[0]
|
|
2067
|
+
|
|
2068
|
+
def perm_error_retry(self):
|
|
2069
|
+
"""
|
|
2070
|
+
Return a decorator for retrying functions on permission and OS errors that
|
|
2071
|
+
might be associated with cloud-storage desktop sync. engine operations.
|
|
2072
|
+
"""
|
|
2073
|
+
return retry(
|
|
2074
|
+
(PermissionError, OSError),
|
|
2075
|
+
tries=10,
|
|
2076
|
+
delay=1,
|
|
2077
|
+
backoff=2,
|
|
2078
|
+
logger=self.persistence_logger,
|
|
2079
|
+
)
|
|
2080
|
+
|
|
2081
|
+
@property
|
|
2082
|
+
def user_data_dir(self) -> Path:
|
|
2083
|
+
"""
|
|
2084
|
+
The user's data directory.
|
|
2085
|
+
"""
|
|
2086
|
+
if self._user_data_dir is None:
|
|
2087
|
+
self._user_data_dir = Path(user_data_dir(appname=self.package_name))
|
|
2088
|
+
return self._user_data_dir
|
|
2089
|
+
|
|
2090
|
+
@property
|
|
2091
|
+
def user_cache_dir(self) -> Path:
|
|
2092
|
+
"""The user's cache directory."""
|
|
2093
|
+
if self._user_cache_dir is None:
|
|
2094
|
+
self._user_cache_dir = Path(user_cache_path(appname=self.package_name))
|
|
2095
|
+
return self._user_cache_dir
|
|
2096
|
+
|
|
2097
|
+
@property
|
|
2098
|
+
def user_runtime_dir(self) -> Path:
|
|
2099
|
+
"""The user's temporary runtime directory."""
|
|
2100
|
+
if self._user_runtime_dir is None:
|
|
2101
|
+
self._user_runtime_dir = self.user_data_dir.joinpath("temp")
|
|
2102
|
+
return self._user_runtime_dir
|
|
2103
|
+
|
|
2104
|
+
@property
|
|
2105
|
+
def demo_data_cache_dir(self) -> Path:
|
|
2106
|
+
"""A directory for example data caching."""
|
|
2107
|
+
if self._demo_data_cache_dir is None:
|
|
2108
|
+
self._demo_data_cache_dir = self.user_cache_dir.joinpath("demo_data")
|
|
2109
|
+
return self._demo_data_cache_dir
|
|
2110
|
+
|
|
2111
|
+
@property
|
|
2112
|
+
def user_data_hostname_dir(self) -> Path:
|
|
2113
|
+
"""
|
|
2114
|
+
The directory for holding user data.
|
|
2115
|
+
|
|
2116
|
+
We segregate by hostname to account for the case where multiple machines might
|
|
2117
|
+
use the same shared file system.
|
|
2118
|
+
"""
|
|
2119
|
+
# This might need to cover e.g. multiple login nodes, as described in the
|
|
2120
|
+
# config file:
|
|
2121
|
+
if self._user_data_hostname_dir is None:
|
|
2122
|
+
machine_name = self.config.get("machine")
|
|
2123
|
+
self._user_data_hostname_dir = self.user_data_dir.joinpath(machine_name)
|
|
2124
|
+
return self._user_data_hostname_dir
|
|
2125
|
+
|
|
2126
|
+
@property
|
|
2127
|
+
def user_cache_hostname_dir(self) -> Path:
|
|
2128
|
+
"""The hostname-scoped app cache directory."""
|
|
2129
|
+
if self._user_cache_hostname_dir is None:
|
|
2130
|
+
machine_name = self.config.get("machine")
|
|
2131
|
+
self._user_cache_hostname_dir = self.user_cache_dir.joinpath(machine_name)
|
|
2132
|
+
return self._user_cache_hostname_dir
|
|
2133
|
+
|
|
2134
|
+
def _ensure_user_data_dir(self) -> Path:
|
|
2135
|
+
"""Ensure a user data directory exists."""
|
|
2136
|
+
if not self.user_data_dir.exists():
|
|
2137
|
+
self.user_data_dir.mkdir(parents=True)
|
|
2138
|
+
self.logger.info(f"Created user data directory: {self.user_data_dir!r}.")
|
|
2139
|
+
return self.user_data_dir
|
|
2140
|
+
|
|
2141
|
+
def _ensure_user_runtime_dir(self) -> Path:
|
|
2142
|
+
"""
|
|
2143
|
+
Generate a user runtime directory for this machine in which we can create
|
|
2144
|
+
semi-persistent temporary files.
|
|
2145
|
+
|
|
2146
|
+
Note
|
|
2147
|
+
----
|
|
2148
|
+
Unlike `_ensure_user_data_dir`, and `_ensure_user_data_hostname_dir`, this
|
|
2149
|
+
method is not invoked on config load, because it might need to be created after
|
|
2150
|
+
each reboot, and it is not routinely used.
|
|
2151
|
+
"""
|
|
2152
|
+
if not self.user_runtime_dir.exists():
|
|
2153
|
+
self.user_runtime_dir.mkdir(parents=True)
|
|
2154
|
+
self.logger.info(
|
|
2155
|
+
f"Created user runtime directory: {self.user_runtime_dir!r}."
|
|
2156
|
+
)
|
|
2157
|
+
return self.user_runtime_dir
|
|
2158
|
+
|
|
2159
|
+
def _ensure_user_cache_dir(self) -> Path:
|
|
2160
|
+
"""Ensure a cache directory exists."""
|
|
2161
|
+
if not self.user_cache_dir.exists():
|
|
2162
|
+
self.user_cache_dir.mkdir(parents=True)
|
|
2163
|
+
self.logger.info(f"Created user cache directory: {self.user_cache_dir!r}.")
|
|
2164
|
+
return self.user_cache_dir
|
|
2165
|
+
|
|
2166
|
+
def _ensure_demo_data_cache_dir(self) -> Path:
|
|
2167
|
+
"""Ensure a cache directory for example data files exists."""
|
|
2168
|
+
if not self.demo_data_cache_dir.exists():
|
|
2169
|
+
self.demo_data_cache_dir.mkdir(parents=True)
|
|
2170
|
+
self.logger.info(
|
|
2171
|
+
f"Created example data cache directory: " f"{self.demo_data_cache_dir!r}."
|
|
2172
|
+
)
|
|
2173
|
+
return self.demo_data_cache_dir
|
|
2174
|
+
|
|
2175
|
+
def _ensure_user_data_hostname_dir(self) -> Path:
|
|
2176
|
+
"""
|
|
2177
|
+
Ensure a user data directory for this machine exists (used by the helper
|
|
2178
|
+
process and the known-submissions file).
|
|
2179
|
+
"""
|
|
2180
|
+
if not self.user_data_hostname_dir.exists():
|
|
2181
|
+
self.user_data_hostname_dir.mkdir(parents=True)
|
|
2182
|
+
self.logger.info(
|
|
2183
|
+
f"Created user data hostname directory: {self.user_data_hostname_dir!r}."
|
|
2184
|
+
)
|
|
2185
|
+
return self.user_data_hostname_dir
|
|
2186
|
+
|
|
2187
|
+
def _ensure_user_cache_hostname_dir(self) -> Path:
|
|
2188
|
+
"""Ensure a cache directory exists."""
|
|
2189
|
+
if not self.user_cache_hostname_dir.exists():
|
|
2190
|
+
self.user_cache_hostname_dir.mkdir(parents=True)
|
|
2191
|
+
self.logger.info(
|
|
2192
|
+
f"Created hostname-scoped user cache directory: "
|
|
2193
|
+
f"{self.user_cache_hostname_dir!r}."
|
|
2194
|
+
)
|
|
2195
|
+
return self.user_cache_hostname_dir
|
|
2196
|
+
|
|
2197
|
+
def clear_user_runtime_dir(self) -> None:
|
|
2198
|
+
"""Delete the contents of the user runtime directory."""
|
|
2199
|
+
if self.user_runtime_dir.exists():
|
|
2200
|
+
shutil.rmtree(self.user_runtime_dir)
|
|
2201
|
+
self._ensure_user_runtime_dir()
|
|
2202
|
+
|
|
2203
|
+
def clear_user_cache_dir(self) -> None:
|
|
2204
|
+
"""Delete the contents of the cache directory."""
|
|
2205
|
+
if self.user_cache_dir.exists():
|
|
2206
|
+
shutil.rmtree(self.user_cache_dir)
|
|
2207
|
+
self._ensure_user_cache_dir()
|
|
2208
|
+
|
|
2209
|
+
def clear_demo_data_cache_dir(self) -> None:
|
|
2210
|
+
"""Delete the contents of the example data files cache directory."""
|
|
2211
|
+
if self.demo_data_cache_dir.exists():
|
|
2212
|
+
shutil.rmtree(self.demo_data_cache_dir)
|
|
2213
|
+
self._ensure_demo_data_cache_dir()
|
|
2214
|
+
|
|
2215
|
+
def clear_user_cache_hostname_dir(self) -> None:
|
|
2216
|
+
"""Delete the contents of the hostname-scoped cache directory."""
|
|
2217
|
+
if self.user_cache_hostname_dir.exists():
|
|
2218
|
+
shutil.rmtree(self.user_cache_hostname_dir)
|
|
2219
|
+
self._ensure_user_cache_hostname_dir()
|
|
2220
|
+
|
|
2221
|
+
@TimeIt.decorator
|
|
2222
|
+
def _load_config(
|
|
2223
|
+
self, config_dir: PathLike, config_key: str | None, **overrides
|
|
2224
|
+
) -> None:
|
|
2225
|
+
self.logger.info("Loading configuration.")
|
|
2226
|
+
self._ensure_user_data_dir()
|
|
2227
|
+
resolved_config_dir = ConfigFile._resolve_config_dir(
|
|
2228
|
+
config_opt=self.config_options,
|
|
2229
|
+
logger=self.config_logger,
|
|
2230
|
+
directory=config_dir,
|
|
2231
|
+
)
|
|
2232
|
+
if str(resolved_config_dir) not in self._config_files:
|
|
2233
|
+
self._config_files[str(resolved_config_dir)] = ConfigFile(
|
|
2234
|
+
directory=resolved_config_dir,
|
|
2235
|
+
logger=self.config_logger,
|
|
2236
|
+
config_options=self.config_options,
|
|
2237
|
+
)
|
|
2238
|
+
file = self._config_files[str(resolved_config_dir)]
|
|
2239
|
+
self._config = Config(
|
|
2240
|
+
app=self,
|
|
2241
|
+
config_file=file,
|
|
2242
|
+
options=self.config_options,
|
|
2243
|
+
config_key=config_key,
|
|
2244
|
+
logger=self.config_logger,
|
|
2245
|
+
variables={"app_name": self.name, "app_version": self.version},
|
|
2246
|
+
**overrides,
|
|
2247
|
+
)
|
|
2248
|
+
self.log.update_console_level(self.config.get("log_console_level"))
|
|
2249
|
+
log_file_path = self.config.get("log_file_path")
|
|
2250
|
+
if log_file_path:
|
|
2251
|
+
self.log.add_file_logger(
|
|
2252
|
+
path=log_file_path,
|
|
2253
|
+
level=self.config.get("log_file_level"),
|
|
2254
|
+
)
|
|
2255
|
+
self.logger.info(f"Configuration loaded from: {self.config.config_file_path}")
|
|
2256
|
+
self._ensure_user_data_hostname_dir()
|
|
2257
|
+
|
|
2258
|
+
def load_config(
|
|
2259
|
+
self,
|
|
2260
|
+
config_dir: PathLike = None,
|
|
2261
|
+
config_key: str | None = None,
|
|
2262
|
+
warn: bool = True,
|
|
2263
|
+
**overrides,
|
|
2264
|
+
) -> None:
|
|
2265
|
+
"""
|
|
2266
|
+
Load the user's configuration.
|
|
2267
|
+
|
|
2268
|
+
Parameters
|
|
2269
|
+
----------
|
|
2270
|
+
config_dir:
|
|
2271
|
+
Directory containing the configuration, if not default.
|
|
2272
|
+
config_key:
|
|
2273
|
+
Key to the configuration within the config file.
|
|
2274
|
+
warn:
|
|
2275
|
+
Whether to warn if a configuration is already loaded.
|
|
2276
|
+
"""
|
|
2277
|
+
if warn and self.is_config_loaded:
|
|
2278
|
+
warnings.warn("Configuration is already loaded; reloading.")
|
|
2279
|
+
self._load_config(config_dir, config_key, **overrides)
|
|
2280
|
+
|
|
2281
|
+
def unload_config(self) -> None:
|
|
2282
|
+
"""
|
|
2283
|
+
Discard any loaded configuration.
|
|
2284
|
+
"""
|
|
2285
|
+
self._config_files = {}
|
|
2286
|
+
self._config = None
|
|
2287
|
+
|
|
2288
|
+
def get_config_path(self, config_dir: PathLike = None) -> Path:
|
|
2289
|
+
"""Return the full path to the config file, without loading the config."""
|
|
2290
|
+
config_dir = ConfigFile._resolve_config_dir(
|
|
2291
|
+
config_opt=self.config_options,
|
|
2292
|
+
logger=self.logger,
|
|
2293
|
+
directory=config_dir,
|
|
2294
|
+
)
|
|
2295
|
+
return ConfigFile.get_config_file_path(config_dir)
|
|
2296
|
+
|
|
2297
|
+
def _delete_config_file(self, config_dir: PathLike = None) -> None:
|
|
2298
|
+
"""Delete the config file."""
|
|
2299
|
+
config_path = self.get_config_path(config_dir=config_dir)
|
|
2300
|
+
self.logger.info(f"deleting config file: {str(config_path)!r}.")
|
|
2301
|
+
config_path.unlink()
|
|
2302
|
+
|
|
2303
|
+
def reset_config(
|
|
2304
|
+
self,
|
|
2305
|
+
config_dir: PathLike = None,
|
|
2306
|
+
config_key: str | None = None,
|
|
2307
|
+
warn: bool = True,
|
|
2308
|
+
**overrides,
|
|
2309
|
+
) -> None:
|
|
2310
|
+
"""Reset the config file to defaults, and reload the config."""
|
|
2311
|
+
self.logger.info("resetting config")
|
|
2312
|
+
self._delete_config_file(config_dir=config_dir)
|
|
2313
|
+
self._config = None
|
|
2314
|
+
self._config_files = {}
|
|
2315
|
+
self.load_config(config_dir, config_key, warn=warn, **overrides)
|
|
2316
|
+
|
|
2317
|
+
def reload_config(
|
|
2318
|
+
self,
|
|
2319
|
+
config_dir: PathLike = None,
|
|
2320
|
+
config_key: str | None = None,
|
|
2321
|
+
warn: bool = True,
|
|
2322
|
+
**overrides,
|
|
2323
|
+
) -> None:
|
|
2324
|
+
"""
|
|
2325
|
+
Reload the configuration. Use if a user has updated the configuration file
|
|
2326
|
+
outside the scope of this application.
|
|
2327
|
+
"""
|
|
2328
|
+
if warn and not self.is_config_loaded:
|
|
2329
|
+
warnings.warn("Configuration is not loaded; loading.")
|
|
2330
|
+
self.log.remove_file_handler()
|
|
2331
|
+
self._config_files = {}
|
|
2332
|
+
self._load_config(config_dir, config_key, **overrides)
|
|
2333
|
+
|
|
2334
|
+
@TimeIt.decorator
|
|
2335
|
+
def __load_builtin_files_from_nested_package(
|
|
2336
|
+
self, directory: str | None
|
|
2337
|
+
) -> dict[str, Path]:
|
|
2338
|
+
"""Discover where the built-in files are (scripts or jinja templates)."""
|
|
2339
|
+
# TODO: load custom directories / custom functions (via decorator)
|
|
2340
|
+
|
|
2341
|
+
# must include an `__init__.py` file:
|
|
2342
|
+
package = f"{self.package_name}.{directory}"
|
|
2343
|
+
|
|
2344
|
+
out: dict[str, Path] = {}
|
|
2345
|
+
if not directory:
|
|
2346
|
+
return out
|
|
2347
|
+
try:
|
|
2348
|
+
with get_file_context(package) as path:
|
|
2349
|
+
for dirpath, _, filenames in os.walk(path):
|
|
2350
|
+
dirpath_ = Path(dirpath)
|
|
2351
|
+
if dirpath_.name == "__pycache__":
|
|
2352
|
+
continue
|
|
2353
|
+
for filename in filenames:
|
|
2354
|
+
if filename == "__init__.py":
|
|
2355
|
+
continue
|
|
2356
|
+
val = dirpath_.joinpath(filename)
|
|
2357
|
+
out[val.relative_to(path).as_posix()] = Path(val)
|
|
2358
|
+
except ModuleNotFoundError:
|
|
2359
|
+
self.logger.exception(f"failed to find built-in files at {package}.")
|
|
2360
|
+
SDK_logger.info(f"loaded {len(out)} files from {package}.")
|
|
2361
|
+
return out
|
|
2362
|
+
|
|
2363
|
+
@TimeIt.decorator
|
|
2364
|
+
def _load_scripts(self) -> dict[str, Path]:
|
|
2365
|
+
"""
|
|
2366
|
+
Discover where the built-in scripts are.
|
|
2367
|
+
"""
|
|
2368
|
+
return self.__load_builtin_files_from_nested_package(self.scripts_dir)
|
|
2369
|
+
|
|
2370
|
+
@TimeIt.decorator
|
|
2371
|
+
def _load_jinja_templates(self) -> dict[str, Path]:
|
|
2372
|
+
"""
|
|
2373
|
+
Discover where the built-in Jinja templates are.
|
|
2374
|
+
"""
|
|
2375
|
+
return self.__load_builtin_files_from_nested_package(self.jinja_templates_dir)
|
|
2376
|
+
|
|
2377
|
+
@TimeIt.decorator
|
|
2378
|
+
def _load_programs(self) -> dict[str, Path]:
|
|
2379
|
+
"""
|
|
2380
|
+
Discover where the built-in programs are.
|
|
2381
|
+
"""
|
|
2382
|
+
return self.__load_builtin_files_from_nested_package(self.programs_dir)
|
|
2383
|
+
|
|
2384
|
+
def _get_demo_workflows(self) -> dict[str, Path]:
|
|
2385
|
+
"""Get all builtin demo workflow template file paths."""
|
|
2386
|
+
templates: dict[str, Path] = {}
|
|
2387
|
+
pkg = f"{self.package_name}.{self.workflows_dir}"
|
|
2388
|
+
for file in resources.files(pkg).iterdir():
|
|
2389
|
+
p = Path(str(file))
|
|
2390
|
+
if p.exists() and p.suffix in (".yaml", ".yml", ".json", ".jsonc"):
|
|
2391
|
+
templates[p.stem] = p
|
|
2392
|
+
return templates
|
|
2393
|
+
|
|
2394
|
+
def list_demo_workflows(self) -> tuple[str, ...]:
|
|
2395
|
+
"""Return a list of demo workflow templates included in the app."""
|
|
2396
|
+
return tuple(sorted(self._get_demo_workflows()))
|
|
2397
|
+
|
|
2398
|
+
@contextmanager
|
|
2399
|
+
def get_demo_workflow_template_file(
|
|
2400
|
+
self, name: str, doc: bool = True, delete: bool = True
|
|
2401
|
+
) -> Iterator[Path]:
|
|
2402
|
+
"""
|
|
2403
|
+
Context manager to get a (temporary) file path to an included demo workflow
|
|
2404
|
+
template.
|
|
2405
|
+
|
|
2406
|
+
Parameters
|
|
2407
|
+
----------
|
|
2408
|
+
name:
|
|
2409
|
+
Name of the builtin demo workflow template whose file path is to be retrieved.
|
|
2410
|
+
doc:
|
|
2411
|
+
If False, the yielded path will be to a file without the `doc` attribute (if
|
|
2412
|
+
originally present).
|
|
2413
|
+
delete:
|
|
2414
|
+
If True, remove the temporary file on exit.
|
|
2415
|
+
"""
|
|
2416
|
+
tmp_dir = self._ensure_user_runtime_dir()
|
|
2417
|
+
builtin_path = self._get_demo_workflows()[name]
|
|
2418
|
+
path = tmp_dir / builtin_path.name
|
|
2419
|
+
|
|
2420
|
+
if doc:
|
|
2421
|
+
# copy the file to the temp location:
|
|
2422
|
+
path.write_text(builtin_path.read_text())
|
|
2423
|
+
else:
|
|
2424
|
+
# load the file, modify, then dump to temp location:
|
|
2425
|
+
if builtin_path.suffix in (".yaml", ".yml"):
|
|
2426
|
+
# use round-trip loader to preserve comments:
|
|
2427
|
+
data = read_YAML_file(builtin_path, typ="rt", variables={})
|
|
2428
|
+
data.pop("doc", None)
|
|
2429
|
+
write_YAML_file(data, path, typ="rt")
|
|
2430
|
+
|
|
2431
|
+
elif builtin_path.suffix in (".json", ".jsonc"):
|
|
2432
|
+
data = read_JSON_file(builtin_path, variables={})
|
|
2433
|
+
data.pop("doc", None)
|
|
2434
|
+
write_JSON_file(data, path)
|
|
2435
|
+
|
|
2436
|
+
yield path
|
|
2437
|
+
|
|
2438
|
+
if delete:
|
|
2439
|
+
path.unlink()
|
|
2440
|
+
|
|
2441
|
+
def copy_demo_workflow(
|
|
2442
|
+
self, name: str, dst: PathLike | None = None, doc: bool = True
|
|
2443
|
+
) -> str:
|
|
2444
|
+
"""
|
|
2445
|
+
Copy a builtin demo workflow to the specified location.
|
|
2446
|
+
|
|
2447
|
+
Parameters
|
|
2448
|
+
----------
|
|
2449
|
+
name
|
|
2450
|
+
The name of the demo workflow to copy
|
|
2451
|
+
dst
|
|
2452
|
+
Directory or full file path to copy the demo workflow to. If not specified,
|
|
2453
|
+
the current working directory will be used.
|
|
2454
|
+
doc
|
|
2455
|
+
If False, the copied workflow template file will not include the `doc`
|
|
2456
|
+
attribute (if originally present).
|
|
2457
|
+
"""
|
|
2458
|
+
dst = dst or Path(".")
|
|
2459
|
+
with self.get_demo_workflow_template_file(name, doc=doc) as src:
|
|
2460
|
+
shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
|
|
2461
|
+
|
|
2462
|
+
return src.name
|
|
2463
|
+
|
|
2464
|
+
def show_demo_workflow(self, name: str, syntax: bool = True, doc: bool = False):
|
|
2465
|
+
"""
|
|
2466
|
+
Print the contents of a builtin demo workflow template file.
|
|
2467
|
+
|
|
2468
|
+
Parameters
|
|
2469
|
+
----------
|
|
2470
|
+
name:
|
|
2471
|
+
The name of the demo workflow file to print.
|
|
2472
|
+
syntax:
|
|
2473
|
+
If True, use rich to syntax-highlight the output.
|
|
2474
|
+
doc:
|
|
2475
|
+
If False, the printed workflow template file contents will not include the
|
|
2476
|
+
`doc` attribute (if originally present).
|
|
2477
|
+
"""
|
|
2478
|
+
with self.get_demo_workflow_template_file(name, doc=doc) as path:
|
|
2479
|
+
with path.open("rt") as fp:
|
|
2480
|
+
contents = fp.read()
|
|
2481
|
+
|
|
2482
|
+
if syntax:
|
|
2483
|
+
fmt = DEMO_WK_FORMATS[path.suffix]
|
|
2484
|
+
Console().print(Syntax(contents, fmt))
|
|
2485
|
+
else:
|
|
2486
|
+
print(contents)
|
|
2487
|
+
|
|
2488
|
+
def load_demo_workflow(
|
|
2489
|
+
self, name: str, variables: dict[str, str] | Literal[False] | None = None
|
|
2490
|
+
) -> _WorkflowTemplate:
|
|
2491
|
+
"""Load a WorkflowTemplate object from a builtin demo template file.
|
|
2492
|
+
|
|
2493
|
+
Parameters
|
|
2494
|
+
----------
|
|
2495
|
+
name:
|
|
2496
|
+
Name of the demo workflow to load.
|
|
2497
|
+
variables:
|
|
2498
|
+
String variables to substitute in the demo workflow. Substitutions will be
|
|
2499
|
+
attempted if the file looks to contain variable references (like
|
|
2500
|
+
"<<var:name>>"). If set to `False`, no substitutions will occur, which may
|
|
2501
|
+
result in an invalid workflow template!
|
|
2502
|
+
"""
|
|
2503
|
+
with self.get_demo_workflow_template_file(name) as path:
|
|
2504
|
+
return self.WorkflowTemplate.from_file(path, variables=variables)
|
|
2505
|
+
|
|
2506
|
+
def template_components_from_json_like(
|
|
2507
|
+
self, json_like: dict[str, dict]
|
|
2508
|
+
) -> TemplateComponents:
|
|
2509
|
+
"""
|
|
2510
|
+
Get template components from a (simply parsed) JSON document.
|
|
2511
|
+
"""
|
|
2512
|
+
tc: TemplateComponents = {}
|
|
2513
|
+
sd: Mapping[str, Any] = tc
|
|
2514
|
+
tc["parameters"] = self.ParametersList.from_json_like(
|
|
2515
|
+
json_like.get("parameters", {}), shared_data=sd, is_hashed=True
|
|
2516
|
+
)
|
|
2517
|
+
tc["command_files"] = self.CommandFilesList.from_json_like(
|
|
2518
|
+
json_like.get("command_files", {}), shared_data=sd, is_hashed=True
|
|
2519
|
+
)
|
|
2520
|
+
tc["environments"] = self.EnvironmentsList.from_json_like(
|
|
2521
|
+
json_like.get("environments", {}), shared_data=sd, is_hashed=True
|
|
2522
|
+
)
|
|
2523
|
+
tc["task_schemas"] = self.TaskSchemasList.from_json_like(
|
|
2524
|
+
json_like.get("task_schemas", {}), shared_data=sd, is_hashed=True
|
|
2525
|
+
)
|
|
2526
|
+
return tc
|
|
2527
|
+
|
|
2528
|
+
def get_parameter_task_schema_map(self) -> dict[str, list[list[str]]]:
|
|
2529
|
+
"""
|
|
2530
|
+
Get a dict mapping parameter types to task schemas that input/output each
|
|
2531
|
+
parameter.
|
|
2532
|
+
"""
|
|
2533
|
+
param_map: dict[str, list[list[str]]] = {}
|
|
2534
|
+
for ts in self.task_schemas:
|
|
2535
|
+
for inp in ts.inputs:
|
|
2536
|
+
if inp.parameter.typ not in param_map:
|
|
2537
|
+
param_map[inp.parameter.typ] = [[], []]
|
|
2538
|
+
param_map[inp.parameter.typ][0].append(ts.objective.name)
|
|
2539
|
+
for out in ts.outputs:
|
|
2540
|
+
if out.parameter.typ not in param_map:
|
|
2541
|
+
param_map[out.parameter.typ] = [[], []]
|
|
2542
|
+
param_map[out.parameter.typ][1].append(ts.objective.name)
|
|
2543
|
+
|
|
2544
|
+
return param_map
|
|
2545
|
+
|
|
2546
|
+
def get_info(self) -> dict[str, Any]:
|
|
2547
|
+
"""
|
|
2548
|
+
Get miscellaneous runtime system information.
|
|
2549
|
+
"""
|
|
2550
|
+
return {
|
|
2551
|
+
"name": self.name,
|
|
2552
|
+
"version": self.version,
|
|
2553
|
+
"python_version": self.run_time_info.python_version,
|
|
2554
|
+
"is_frozen": self.run_time_info.is_frozen,
|
|
2555
|
+
}
|
|
2556
|
+
|
|
2557
|
+
@property
|
|
2558
|
+
def known_subs_file_path(self) -> Path:
|
|
2559
|
+
"""
|
|
2560
|
+
The path to the file describing known submissions.
|
|
2561
|
+
"""
|
|
2562
|
+
return self.user_data_hostname_dir / self._known_subs_file_name
|
|
2563
|
+
|
|
2564
|
+
def _format_known_submissions_line(
|
|
2565
|
+
self,
|
|
2566
|
+
local_id,
|
|
2567
|
+
workflow_id,
|
|
2568
|
+
submit_time,
|
|
2569
|
+
sub_idx,
|
|
2570
|
+
is_active,
|
|
2571
|
+
wk_path,
|
|
2572
|
+
start_time,
|
|
2573
|
+
end_time,
|
|
2574
|
+
) -> str:
|
|
2575
|
+
line = [
|
|
2576
|
+
str(local_id),
|
|
2577
|
+
workflow_id,
|
|
2578
|
+
str(int(is_active)),
|
|
2579
|
+
str(sub_idx),
|
|
2580
|
+
submit_time,
|
|
2581
|
+
str(wk_path),
|
|
2582
|
+
start_time,
|
|
2583
|
+
end_time,
|
|
2584
|
+
]
|
|
2585
|
+
return self._known_subs_file_sep.join(line) + "\n"
|
|
2586
|
+
|
|
2587
|
+
def _parse_known_submissions_line(self, line: str) -> KnownSubmission:
|
|
2588
|
+
(
|
|
2589
|
+
local_id,
|
|
2590
|
+
workflow_id,
|
|
2591
|
+
is_active,
|
|
2592
|
+
sub_idx,
|
|
2593
|
+
submit_time,
|
|
2594
|
+
path_i,
|
|
2595
|
+
start_time,
|
|
2596
|
+
end_time,
|
|
2597
|
+
) = line.split(self._known_subs_file_sep, maxsplit=7)
|
|
2598
|
+
return {
|
|
2599
|
+
"local_id": int(local_id),
|
|
2600
|
+
"workflow_id": workflow_id,
|
|
2601
|
+
"is_active": bool(int(is_active)),
|
|
2602
|
+
"sub_idx": int(sub_idx),
|
|
2603
|
+
"submit_time": submit_time,
|
|
2604
|
+
"path": path_i,
|
|
2605
|
+
"start_time": start_time,
|
|
2606
|
+
"end_time": end_time.strip(),
|
|
2607
|
+
}
|
|
2608
|
+
|
|
2609
|
+
@TimeIt.decorator
|
|
2610
|
+
def read_known_submissions_file(self) -> list[KnownSubmission]:
|
|
2611
|
+
"""Retrieve existing workflows that *might* be running."""
|
|
2612
|
+
with self.known_subs_file_path.open("rt", newline="\n") as fh:
|
|
2613
|
+
return [self._parse_known_submissions_line(ln) for ln in fh.readlines()]
|
|
2614
|
+
|
|
2615
|
+
def _add_to_known_submissions(
|
|
2616
|
+
self,
|
|
2617
|
+
wk_path: PathLike,
|
|
2618
|
+
wk_id: str,
|
|
2619
|
+
sub_idx: int,
|
|
2620
|
+
sub_time: str,
|
|
2621
|
+
) -> int:
|
|
2622
|
+
"""
|
|
2623
|
+
Ensure a the specified workflow submission is in the known-submissions file and
|
|
2624
|
+
return the associated local ID.
|
|
2625
|
+
"""
|
|
2626
|
+
try:
|
|
2627
|
+
known = self.read_known_submissions_file()
|
|
2628
|
+
except FileNotFoundError:
|
|
2629
|
+
known = []
|
|
2630
|
+
|
|
2631
|
+
wk_path = str(wk_path)
|
|
2632
|
+
all_ids = []
|
|
2633
|
+
for known_sub in known:
|
|
2634
|
+
all_ids.append(known_sub["local_id"])
|
|
2635
|
+
if (
|
|
2636
|
+
wk_path == known_sub["path"]
|
|
2637
|
+
and sub_idx == known_sub["sub_idx"]
|
|
2638
|
+
and sub_time == known_sub["submit_time"]
|
|
2639
|
+
):
|
|
2640
|
+
# workflow submission part already present
|
|
2641
|
+
return known_sub["local_id"]
|
|
2642
|
+
|
|
2643
|
+
# get the next available local ID:
|
|
2644
|
+
if all_ids:
|
|
2645
|
+
avail = set(range(0, max(all_ids) + 1)).difference(all_ids)
|
|
2646
|
+
next_id = min(avail) if avail else max(all_ids) + 1
|
|
2647
|
+
else:
|
|
2648
|
+
next_id = 0
|
|
2649
|
+
|
|
2650
|
+
run_line = self._format_known_submissions_line(
|
|
2651
|
+
local_id=next_id,
|
|
2652
|
+
workflow_id=wk_id,
|
|
2653
|
+
is_active=True,
|
|
2654
|
+
submit_time=sub_time,
|
|
2655
|
+
sub_idx=sub_idx,
|
|
2656
|
+
wk_path=wk_path,
|
|
2657
|
+
start_time="",
|
|
2658
|
+
end_time="",
|
|
2659
|
+
)
|
|
2660
|
+
with self.known_subs_file_path.open("at", newline="\n") as fh:
|
|
2661
|
+
# TODO: check wk_path is an absolute path? what about if a remote fsspec path?
|
|
2662
|
+
self.submission_logger.info(
|
|
2663
|
+
f"adding to known-submissions file workflow path: {wk_path}"
|
|
2664
|
+
)
|
|
2665
|
+
fh.write(run_line)
|
|
2666
|
+
|
|
2667
|
+
return next_id
|
|
2668
|
+
|
|
2669
|
+
@TimeIt.decorator
|
|
2670
|
+
def update_known_subs_file(
|
|
2671
|
+
self,
|
|
2672
|
+
inactive_IDs: list[int],
|
|
2673
|
+
start_times: dict[int, str],
|
|
2674
|
+
end_times: dict[int, str],
|
|
2675
|
+
) -> list[int]:
|
|
2676
|
+
"""
|
|
2677
|
+
Update submission records in the known-submission file.
|
|
2678
|
+
|
|
2679
|
+
Note
|
|
2680
|
+
----
|
|
2681
|
+
We aim for atomicity to help with the scenario where a new workflow
|
|
2682
|
+
submission is adding itself to the file at the same time as we have decided an
|
|
2683
|
+
existing workflow should no longer be part of this file. Ideally, such a scenario
|
|
2684
|
+
should not arise because both operations should only ever be interactively
|
|
2685
|
+
initiated by the single user (`Workflow.submit` and `App.get_known_submissions`).
|
|
2686
|
+
If this operation is atomic, then at least the known-submissions file should be
|
|
2687
|
+
left in a usable (but inaccurate) state.
|
|
2688
|
+
|
|
2689
|
+
Returns
|
|
2690
|
+
-------
|
|
2691
|
+
list[int]
|
|
2692
|
+
List of local IDs removed from the known-submissions file due to the maximum
|
|
2693
|
+
number of recent workflows to store being exceeded.
|
|
2694
|
+
"""
|
|
2695
|
+
self.submission_logger.info(
|
|
2696
|
+
f"setting these local IDs to inactive in known-submissions file: "
|
|
2697
|
+
f"{inactive_IDs}"
|
|
2698
|
+
)
|
|
2699
|
+
|
|
2700
|
+
max_inactive = 10
|
|
2701
|
+
|
|
2702
|
+
# keys are line indices of non-running submissions, values are submission
|
|
2703
|
+
# date-times:
|
|
2704
|
+
line_date: dict[int, str] = {}
|
|
2705
|
+
|
|
2706
|
+
removed_IDs: list[int] = (
|
|
2707
|
+
[]
|
|
2708
|
+
) # which submissions we completely remove from the file
|
|
2709
|
+
|
|
2710
|
+
new_lines: list[str] = []
|
|
2711
|
+
line_IDs: list[int] = []
|
|
2712
|
+
for ln_idx, line in enumerate(self.known_subs_file_path.read_text().split("\n")):
|
|
2713
|
+
if not line.strip():
|
|
2714
|
+
continue
|
|
2715
|
+
item = self._parse_known_submissions_line(line)
|
|
2716
|
+
line_IDs.append(item["local_id"])
|
|
2717
|
+
shows_as_active = item["is_active"]
|
|
2718
|
+
is_inactive = item["local_id"] in inactive_IDs
|
|
2719
|
+
start_time = item["start_time"] or start_times.get(item["local_id"], "")
|
|
2720
|
+
end_time = item["end_time"] or end_times.get(item["local_id"], "")
|
|
2721
|
+
|
|
2722
|
+
update_inactive = is_inactive and shows_as_active
|
|
2723
|
+
update_start = item["local_id"] in start_times
|
|
2724
|
+
update_end = item["local_id"] in end_times
|
|
2725
|
+
|
|
2726
|
+
if update_inactive or update_start or update_end:
|
|
2727
|
+
updated = self._format_known_submissions_line(
|
|
2728
|
+
local_id=item["local_id"],
|
|
2729
|
+
workflow_id=item["workflow_id"],
|
|
2730
|
+
is_active=not is_inactive,
|
|
2731
|
+
submit_time=item["submit_time"],
|
|
2732
|
+
sub_idx=item["sub_idx"],
|
|
2733
|
+
wk_path=item["path"],
|
|
2734
|
+
start_time=start_time,
|
|
2735
|
+
end_time=end_time,
|
|
2736
|
+
)
|
|
2737
|
+
new_lines.append(updated)
|
|
2738
|
+
|
|
2739
|
+
self.submission_logger.debug(
|
|
2740
|
+
f"Updating (workflow, submission) from the known-submissions file: "
|
|
2741
|
+
f"{'set to inactive; ' if update_inactive else ''}"
|
|
2742
|
+
f"{f'set start_time: {start_time!r}; ' if update_start else ''}"
|
|
2743
|
+
f"{f'set end_time: {end_time!r}; ' if update_end else ''}"
|
|
2744
|
+
f"({item['path']}, {item['sub_idx']})"
|
|
2745
|
+
)
|
|
2746
|
+
else:
|
|
2747
|
+
# leave this one alone:
|
|
2748
|
+
new_lines.append(line + "\n")
|
|
2749
|
+
|
|
2750
|
+
if is_inactive:
|
|
2751
|
+
line_date[ln_idx] = item["submit_time"]
|
|
2752
|
+
|
|
2753
|
+
ld_srt_idx = sorted(line_date, key=lambda x: line_date[x])
|
|
2754
|
+
|
|
2755
|
+
if len(line_date) > max_inactive:
|
|
2756
|
+
# remove oldest inactive submissions:
|
|
2757
|
+
num_remove = len(line_date) - max_inactive
|
|
2758
|
+
self.submission_logger.debug(
|
|
2759
|
+
f"will remove {num_remove} inactive workflow submissions from the "
|
|
2760
|
+
f"known-submissions file because the maximum number of stored inactive "
|
|
2761
|
+
f"workflows ({max_inactive}) has been exceeded."
|
|
2762
|
+
)
|
|
2763
|
+
|
|
2764
|
+
# sort in reverse so we can remove indices from new_lines:
|
|
2765
|
+
for i in sorted(ld_srt_idx[:num_remove], reverse=True):
|
|
2766
|
+
new_lines.pop(i)
|
|
2767
|
+
removed_IDs.append(line_IDs.pop(i))
|
|
2768
|
+
|
|
2769
|
+
# write the temp file:
|
|
2770
|
+
tmp_file = self.known_subs_file_path.with_suffix(
|
|
2771
|
+
self.known_subs_file_path.suffix + ".tmp"
|
|
2772
|
+
)
|
|
2773
|
+
with tmp_file.open("wt", newline="\n") as fh:
|
|
2774
|
+
fh.writelines(new_lines + [])
|
|
2775
|
+
|
|
2776
|
+
# hopefully atomic rename:
|
|
2777
|
+
os.replace(src=tmp_file, dst=self.known_subs_file_path)
|
|
2778
|
+
self.submission_logger.debug("known-submissions file updated")
|
|
2779
|
+
|
|
2780
|
+
return removed_IDs
|
|
2781
|
+
|
|
2782
|
+
def clear_known_submissions_file(self) -> None:
|
|
2783
|
+
"""
|
|
2784
|
+
Clear the known-submissions file of all submissions. This shouldn't be needed
|
|
2785
|
+
normally.
|
|
2786
|
+
"""
|
|
2787
|
+
self.submission_logger.warning(
|
|
2788
|
+
f"clearing the known-submissions file at {self.known_subs_file_path}"
|
|
2789
|
+
)
|
|
2790
|
+
with self.known_subs_file_path.open("wt", newline="\n"):
|
|
2791
|
+
pass
|
|
2792
|
+
|
|
2793
|
+
def _make_workflow(
|
|
2794
|
+
self,
|
|
2795
|
+
template_file_or_str: PathLike | str,
|
|
2796
|
+
is_string: bool = False,
|
|
2797
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
2798
|
+
path: PathLike = None,
|
|
2799
|
+
name: str | None = None,
|
|
2800
|
+
name_add_timestamp: bool | None = None,
|
|
2801
|
+
name_use_dir: bool | None = None,
|
|
2802
|
+
overwrite: bool = False,
|
|
2803
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
2804
|
+
ts_fmt: str | None = None,
|
|
2805
|
+
ts_name_fmt: str | None = None,
|
|
2806
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
2807
|
+
variables: dict[str, str] | None = None,
|
|
2808
|
+
status: bool = True,
|
|
2809
|
+
add_submission: bool = False,
|
|
2810
|
+
) -> _Workflow | _Submission | None:
|
|
2811
|
+
"""
|
|
2812
|
+
Generate a new {app_name} workflow from a file or string containing a workflow
|
|
2813
|
+
template parametrisation.
|
|
2814
|
+
|
|
2815
|
+
Parameters
|
|
2816
|
+
----------
|
|
2817
|
+
template_path_or_str
|
|
2818
|
+
Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
|
|
2819
|
+
is_string
|
|
2820
|
+
Determines if passing a file path or a string.
|
|
2821
|
+
template_format
|
|
2822
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
2823
|
+
format.
|
|
2824
|
+
path
|
|
2825
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
2826
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
2827
|
+
current directory is used.
|
|
2828
|
+
name
|
|
2829
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
2830
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
2831
|
+
`name_add_timestamp` is True).
|
|
2832
|
+
name_add_timestamp: bool
|
|
2833
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
2834
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
2835
|
+
name_use_dir: bool
|
|
2836
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
2837
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
2838
|
+
directory corresponding to the workflow name. A default value can be set
|
|
2839
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
2840
|
+
overwrite
|
|
2841
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
2842
|
+
existing directory will be overwritten.
|
|
2843
|
+
store
|
|
2844
|
+
The persistent store type to use.
|
|
2845
|
+
ts_fmt
|
|
2846
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
2847
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
2848
|
+
include a time zone name.
|
|
2849
|
+
ts_name_fmt
|
|
2850
|
+
The datetime format to use when generating the workflow name, where it
|
|
2851
|
+
includes a timestamp.
|
|
2852
|
+
store_kwargs
|
|
2853
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
2854
|
+
variables
|
|
2855
|
+
String variables to substitute in `template_file_or_str`.
|
|
2856
|
+
status
|
|
2857
|
+
If True, display a live status to track workflow creation progress.
|
|
2858
|
+
add_submission
|
|
2859
|
+
If True, add a submission to the workflow (but do not submit).
|
|
2860
|
+
|
|
2861
|
+
Returns
|
|
2862
|
+
-------
|
|
2863
|
+
Workflow
|
|
2864
|
+
The created workflow, if `add_submission` is `False`.
|
|
2865
|
+
Submission
|
|
2866
|
+
The created submission object, if `add_submission` is `True`.
|
|
2867
|
+
"""
|
|
2868
|
+
self.API_logger.info("make_workflow called")
|
|
2869
|
+
|
|
2870
|
+
status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
|
|
2871
|
+
Console().status("Making persistent workflow...") if status else nullcontext()
|
|
2872
|
+
)
|
|
2873
|
+
|
|
2874
|
+
with status_context as status_:
|
|
2875
|
+
|
|
2876
|
+
common: MakeWorkflowCommonArgs = {
|
|
2877
|
+
"path": str(path) if path else None,
|
|
2878
|
+
"name": name,
|
|
2879
|
+
"name_add_timestamp": name_add_timestamp,
|
|
2880
|
+
"name_use_dir": name_use_dir,
|
|
2881
|
+
"overwrite": overwrite,
|
|
2882
|
+
"store": store,
|
|
2883
|
+
"ts_fmt": ts_fmt,
|
|
2884
|
+
"ts_name_fmt": ts_name_fmt,
|
|
2885
|
+
"store_kwargs": store_kwargs,
|
|
2886
|
+
"variables": variables,
|
|
2887
|
+
"status": status_,
|
|
2888
|
+
}
|
|
2889
|
+
if not is_string:
|
|
2890
|
+
wk = self.Workflow.from_file(
|
|
2891
|
+
template_path=template_file_or_str,
|
|
2892
|
+
template_format=template_format,
|
|
2893
|
+
**common,
|
|
2894
|
+
)
|
|
2895
|
+
elif template_format == "json":
|
|
2896
|
+
wk = self.Workflow.from_JSON_string(
|
|
2897
|
+
JSON_str=str(template_file_or_str), **common
|
|
2898
|
+
)
|
|
2899
|
+
elif template_format == "yaml":
|
|
2900
|
+
wk = self.Workflow.from_YAML_string(
|
|
2901
|
+
YAML_str=str(template_file_or_str), **common
|
|
2902
|
+
)
|
|
2903
|
+
elif not template_format:
|
|
2904
|
+
raise ValueError(
|
|
2905
|
+
f"Must specify `template_format` if parsing a workflow template from a "
|
|
2906
|
+
f"string; available options are: {ALL_TEMPLATE_FORMATS!r}."
|
|
2907
|
+
)
|
|
2908
|
+
else:
|
|
2909
|
+
raise ValueError(
|
|
2910
|
+
f"Template format {template_format!r} not understood. Available template "
|
|
2911
|
+
f"formats are {ALL_TEMPLATE_FORMATS!r}."
|
|
2912
|
+
)
|
|
2913
|
+
if add_submission:
|
|
2914
|
+
with wk._store.cached_load(), wk.batch_update():
|
|
2915
|
+
return wk._add_submission(status=status_)
|
|
2916
|
+
|
|
2917
|
+
return wk
|
|
2918
|
+
|
|
2919
|
+
def _make_and_submit_workflow(
|
|
2920
|
+
self,
|
|
2921
|
+
template_file_or_str: PathLike | str,
|
|
2922
|
+
is_string: bool = False,
|
|
2923
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
2924
|
+
path: PathLike | None = None,
|
|
2925
|
+
name: str | None = None,
|
|
2926
|
+
name_add_timestamp: bool | None = None,
|
|
2927
|
+
name_use_dir: bool | None = None,
|
|
2928
|
+
overwrite: bool = False,
|
|
2929
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
2930
|
+
ts_fmt: str | None = None,
|
|
2931
|
+
ts_name_fmt: str | None = None,
|
|
2932
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
2933
|
+
variables: dict[str, str] | None = None,
|
|
2934
|
+
JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
|
|
2935
|
+
wait: bool = False,
|
|
2936
|
+
add_to_known: bool = True,
|
|
2937
|
+
return_idx: bool = False,
|
|
2938
|
+
tasks: list[int] | None = None,
|
|
2939
|
+
cancel: bool = False,
|
|
2940
|
+
status: bool = True,
|
|
2941
|
+
) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
|
|
2942
|
+
"""
|
|
2943
|
+
Generate and submit a new {app_name} workflow from a file or string containing a
|
|
2944
|
+
workflow template parametrisation.
|
|
2945
|
+
|
|
2946
|
+
Parameters
|
|
2947
|
+
----------
|
|
2948
|
+
|
|
2949
|
+
template_path_or_str
|
|
2950
|
+
Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
|
|
2951
|
+
is_string
|
|
2952
|
+
Determines whether `template_path_or_str` is a string or a file.
|
|
2953
|
+
template_format
|
|
2954
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
2955
|
+
format.
|
|
2956
|
+
path
|
|
2957
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
2958
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
2959
|
+
current directory is used.
|
|
2960
|
+
name
|
|
2961
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
2962
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
2963
|
+
`name_add_timestamp` is True).
|
|
2964
|
+
name_add_timestamp: bool
|
|
2965
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
2966
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
2967
|
+
name_use_dir: bool
|
|
2968
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
2969
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
2970
|
+
directory corresponding to the workflow name. A default value can be set
|
|
2971
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
2972
|
+
overwrite
|
|
2973
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
2974
|
+
existing directory will be overwritten.
|
|
2975
|
+
store
|
|
2976
|
+
The persistent store to use for this workflow.
|
|
2977
|
+
ts_fmt
|
|
2978
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
2979
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
2980
|
+
include a time zone name.
|
|
2981
|
+
ts_name_fmt
|
|
2982
|
+
The datetime format to use when generating the workflow name, where it
|
|
2983
|
+
includes a timestamp.
|
|
2984
|
+
store_kwargs
|
|
2985
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
2986
|
+
variables
|
|
2987
|
+
String variables to substitute in `template_file_or_str`.
|
|
2988
|
+
JS_parallelism
|
|
2989
|
+
If True, allow multiple jobscripts to execute simultaneously. If
|
|
2990
|
+
'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
|
|
2991
|
+
jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
|
|
2992
|
+
type does not support the `jobscript_parallelism` feature. If not set,
|
|
2993
|
+
jobscript parallelism will be used if the store type supports it, for
|
|
2994
|
+
scheduled jobscripts only.
|
|
2995
|
+
wait
|
|
2996
|
+
If True, this command will block until the workflow execution is complete.
|
|
2997
|
+
add_to_known
|
|
2998
|
+
If True, add the new submission to the known-submissions file, which is
|
|
2999
|
+
used by the `show` command to monitor current and recent submissions.
|
|
3000
|
+
return_idx
|
|
3001
|
+
If True, return a dict representing the jobscript indices submitted for each
|
|
3002
|
+
submission.
|
|
3003
|
+
tasks
|
|
3004
|
+
List of task indices to include in this submission. By default all tasks are
|
|
3005
|
+
included.
|
|
3006
|
+
cancel
|
|
3007
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
|
3008
|
+
status
|
|
3009
|
+
If True, display a live status to track workflow creation and submission
|
|
3010
|
+
progress.
|
|
3011
|
+
|
|
3012
|
+
Returns
|
|
3013
|
+
-------
|
|
3014
|
+
Workflow
|
|
3015
|
+
The created workflow.
|
|
3016
|
+
dict[int, list[int]]
|
|
3017
|
+
Mapping of submission handles. If requested by ``return_idx`` parameter.
|
|
3018
|
+
"""
|
|
3019
|
+
self.API_logger.info("make_and_submit_workflow called")
|
|
3020
|
+
|
|
3021
|
+
wk = self._make_workflow(
|
|
3022
|
+
template_file_or_str=template_file_or_str,
|
|
3023
|
+
is_string=is_string,
|
|
3024
|
+
template_format=template_format,
|
|
3025
|
+
path=path,
|
|
3026
|
+
name=name,
|
|
3027
|
+
name_add_timestamp=name_add_timestamp,
|
|
3028
|
+
name_use_dir=name_use_dir,
|
|
3029
|
+
overwrite=overwrite,
|
|
3030
|
+
store=store,
|
|
3031
|
+
ts_fmt=ts_fmt,
|
|
3032
|
+
ts_name_fmt=ts_name_fmt,
|
|
3033
|
+
store_kwargs=store_kwargs,
|
|
3034
|
+
variables=variables,
|
|
3035
|
+
status=status,
|
|
3036
|
+
)
|
|
3037
|
+
assert isinstance(wk, _Workflow)
|
|
3038
|
+
submitted_js = wk.submit(
|
|
3039
|
+
JS_parallelism=JS_parallelism,
|
|
3040
|
+
wait=wait,
|
|
3041
|
+
add_to_known=add_to_known,
|
|
3042
|
+
return_idx=True,
|
|
3043
|
+
tasks=tasks,
|
|
3044
|
+
cancel=cancel,
|
|
3045
|
+
status=status,
|
|
3046
|
+
)
|
|
3047
|
+
if return_idx:
|
|
3048
|
+
return (wk, submitted_js)
|
|
3049
|
+
else:
|
|
3050
|
+
return wk
|
|
3051
|
+
|
|
3052
|
+
def _make_demo_workflow(
|
|
3053
|
+
self,
|
|
3054
|
+
workflow_name: str,
|
|
3055
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
3056
|
+
path: PathLike | None = None,
|
|
3057
|
+
name: str | None = None,
|
|
3058
|
+
name_add_timestamp: bool | None = None,
|
|
3059
|
+
name_use_dir: bool | None = None,
|
|
3060
|
+
overwrite: bool = False,
|
|
3061
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
3062
|
+
ts_fmt: str | None = None,
|
|
3063
|
+
ts_name_fmt: str | None = None,
|
|
3064
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
3065
|
+
variables: dict[str, str] | None = None,
|
|
3066
|
+
status: bool = True,
|
|
3067
|
+
add_submission: bool = False,
|
|
3068
|
+
) -> _Workflow | _Submission | None:
|
|
3069
|
+
"""
|
|
3070
|
+
Generate a new {app_name} workflow from a builtin demo workflow template.
|
|
3071
|
+
|
|
3072
|
+
Parameters
|
|
3073
|
+
----------
|
|
3074
|
+
workflow_name
|
|
3075
|
+
Name of the demo workflow to make.
|
|
3076
|
+
template_format
|
|
3077
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
3078
|
+
format.
|
|
3079
|
+
path
|
|
3080
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
3081
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
3082
|
+
current directory is used.
|
|
3083
|
+
name
|
|
3084
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
3085
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
3086
|
+
`name_add_timestamp` is True).
|
|
3087
|
+
name_add_timestamp: bool
|
|
3088
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
3089
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
3090
|
+
name_use_dir: bool
|
|
3091
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
3092
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
3093
|
+
directory corresponding to the workflow name. A default value can be set
|
|
3094
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
3095
|
+
overwrite
|
|
3096
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
3097
|
+
existing directory will be overwritten.
|
|
3098
|
+
store
|
|
3099
|
+
The persistent store type to use.
|
|
3100
|
+
ts_fmt
|
|
3101
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
3102
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
3103
|
+
include a time zone name.
|
|
3104
|
+
ts_name_fmt
|
|
3105
|
+
The datetime format to use when generating the workflow name, where it
|
|
3106
|
+
includes a timestamp.
|
|
3107
|
+
store_kwargs
|
|
3108
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
3109
|
+
variables
|
|
3110
|
+
String variables to substitute in the demo workflow template file.
|
|
3111
|
+
status
|
|
3112
|
+
If True, display a live status to track workflow creation progress.
|
|
3113
|
+
add_submission
|
|
3114
|
+
If True, add a submission to the workflow (but do not submit).
|
|
3115
|
+
|
|
3116
|
+
Returns
|
|
3117
|
+
-------
|
|
3118
|
+
Workflow
|
|
3119
|
+
The created workflow, if `add_submission` is `False`.
|
|
3120
|
+
Submission
|
|
3121
|
+
The created submission object, if `add_submission` is `True`.
|
|
3122
|
+
"""
|
|
3123
|
+
self.API_logger.info("make_demo_workflow called")
|
|
3124
|
+
|
|
3125
|
+
status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
|
|
3126
|
+
Console().status("Making persistent workflow...") if status else nullcontext()
|
|
3127
|
+
)
|
|
3128
|
+
|
|
3129
|
+
with status_context as status_, self.get_demo_workflow_template_file(
|
|
3130
|
+
workflow_name
|
|
3131
|
+
) as template_path:
|
|
3132
|
+
wk = self.Workflow.from_file(
|
|
3133
|
+
template_path=template_path,
|
|
3134
|
+
template_format=template_format,
|
|
3135
|
+
path=str(path) if path else None,
|
|
3136
|
+
name=name,
|
|
3137
|
+
name_add_timestamp=name_add_timestamp,
|
|
3138
|
+
name_use_dir=name_use_dir,
|
|
3139
|
+
overwrite=overwrite,
|
|
3140
|
+
store=store,
|
|
3141
|
+
ts_fmt=ts_fmt,
|
|
3142
|
+
ts_name_fmt=ts_name_fmt,
|
|
3143
|
+
store_kwargs=store_kwargs,
|
|
3144
|
+
variables=variables,
|
|
3145
|
+
status=status_,
|
|
3146
|
+
)
|
|
3147
|
+
if add_submission:
|
|
3148
|
+
with wk._store.cached_load():
|
|
3149
|
+
with wk.batch_update():
|
|
3150
|
+
return wk._add_submission(status=status_)
|
|
3151
|
+
return wk
|
|
3152
|
+
|
|
3153
|
+
def _make_and_submit_demo_workflow(
|
|
3154
|
+
self,
|
|
3155
|
+
workflow_name: str,
|
|
3156
|
+
template_format: Literal["json", "yaml"] | None = None,
|
|
3157
|
+
path: PathLike | None = None,
|
|
3158
|
+
name: str | None = None,
|
|
3159
|
+
name_add_timestamp: bool | None = None,
|
|
3160
|
+
name_use_dir: bool | None = None,
|
|
3161
|
+
overwrite: bool = False,
|
|
3162
|
+
store: str = DEFAULT_STORE_FORMAT,
|
|
3163
|
+
ts_fmt: str | None = None,
|
|
3164
|
+
ts_name_fmt: str | None = None,
|
|
3165
|
+
store_kwargs: dict[str, Any] | None = None,
|
|
3166
|
+
variables: dict[str, str] | None = None,
|
|
3167
|
+
JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
|
|
3168
|
+
wait: bool = False,
|
|
3169
|
+
add_to_known: bool = True,
|
|
3170
|
+
return_idx: bool = False,
|
|
3171
|
+
tasks: list[int] | None = None,
|
|
3172
|
+
cancel: bool = False,
|
|
3173
|
+
status: bool = True,
|
|
3174
|
+
) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
|
|
3175
|
+
"""
|
|
3176
|
+
Generate and submit a new {app_name} workflow from a file or string containing a
|
|
3177
|
+
workflow template parametrisation.
|
|
3178
|
+
|
|
3179
|
+
Parameters
|
|
3180
|
+
----------
|
|
3181
|
+
workflow_name
|
|
3182
|
+
Name of the demo workflow to make.
|
|
3183
|
+
template_format
|
|
3184
|
+
If specified, one of "json" or "yaml". This forces parsing from a particular
|
|
3185
|
+
format.
|
|
3186
|
+
path
|
|
3187
|
+
The directory in which the workflow will be generated. If not specified, the
|
|
3188
|
+
config item `default_workflow_path` will be used; if that is not set, the
|
|
3189
|
+
current directory is used.
|
|
3190
|
+
name
|
|
3191
|
+
The name to use for the workflow. If not provided, the name will be set to
|
|
3192
|
+
that of the template (optionally suffixed by a date-timestamp if
|
|
3193
|
+
`name_add_timestamp` is True).
|
|
3194
|
+
name_add_timestamp: bool
|
|
3195
|
+
If True, suffix the name with a date-timestamp. A default value can be set
|
|
3196
|
+
with the config item `workflow_name_add_timestamp`; otherwise set to `True`.
|
|
3197
|
+
name_use_dir: bool
|
|
3198
|
+
If True, and `name_add_timestamp` is also True, the workflow directory name
|
|
3199
|
+
will be just the date-timestamp, and will be contained within a parent
|
|
3200
|
+
directory corresponding to the workflow name. A default value can be set
|
|
3201
|
+
with the config item `workflow_name_use_dir`; otherwise set to `False`.
|
|
3202
|
+
overwrite
|
|
3203
|
+
If True and the workflow directory (`path` + `name`) already exists, the
|
|
3204
|
+
existing directory will be overwritten.
|
|
3205
|
+
store
|
|
3206
|
+
The persistent store to use for this workflow.
|
|
3207
|
+
ts_fmt
|
|
3208
|
+
The datetime format to use for storing datetimes. Datetimes are always stored
|
|
3209
|
+
in UTC (because Numpy does not store time zone info), so this should not
|
|
3210
|
+
include a time zone name.
|
|
3211
|
+
ts_name_fmt
|
|
3212
|
+
The datetime format to use when generating the workflow name, where it
|
|
3213
|
+
includes a timestamp.
|
|
3214
|
+
store_kwargs
|
|
3215
|
+
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
|
3216
|
+
variables
|
|
3217
|
+
String variables to substitute in the demo workflow template file.
|
|
3218
|
+
JS_parallelism
|
|
3219
|
+
If True, allow multiple jobscripts to execute simultaneously. If
|
|
3220
|
+
'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
|
|
3221
|
+
jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
|
|
3222
|
+
type does not support the `jobscript_parallelism` feature. If not set,
|
|
3223
|
+
jobscript parallelism will be used if the store type supports it, for
|
|
3224
|
+
scheduled jobscripts only.
|
|
3225
|
+
wait
|
|
3226
|
+
If True, this command will block until the workflow execution is complete.
|
|
3227
|
+
add_to_known
|
|
3228
|
+
If True, add the new submission to the known-submissions file, which is
|
|
3229
|
+
used by the `show` command to monitor current and recent submissions.
|
|
3230
|
+
return_idx
|
|
3231
|
+
If True, return a dict representing the jobscript indices submitted for each
|
|
3232
|
+
submission.
|
|
3233
|
+
tasks
|
|
3234
|
+
List of task indices to include in this submission. By default all tasks are
|
|
3235
|
+
included.
|
|
3236
|
+
cancel
|
|
3237
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
|
3238
|
+
status
|
|
3239
|
+
If True, display a live status to track submission progress.
|
|
3240
|
+
|
|
3241
|
+
Returns
|
|
3242
|
+
-------
|
|
3243
|
+
Workflow
|
|
3244
|
+
The created workflow.
|
|
3245
|
+
dict[int, list[int]]
|
|
3246
|
+
Mapping of submission handles. If requested by ``return_idx`` parameter.
|
|
3247
|
+
"""
|
|
3248
|
+
self.API_logger.info("make_and_submit_demo_workflow called")
|
|
3249
|
+
|
|
3250
|
+
wk = self._make_demo_workflow(
|
|
3251
|
+
workflow_name=workflow_name,
|
|
3252
|
+
template_format=template_format,
|
|
3253
|
+
path=path,
|
|
3254
|
+
name=name,
|
|
3255
|
+
name_add_timestamp=name_add_timestamp,
|
|
3256
|
+
name_use_dir=name_use_dir,
|
|
3257
|
+
overwrite=overwrite,
|
|
3258
|
+
store=store,
|
|
3259
|
+
ts_fmt=ts_fmt,
|
|
3260
|
+
ts_name_fmt=ts_name_fmt,
|
|
3261
|
+
store_kwargs=store_kwargs,
|
|
3262
|
+
variables=variables,
|
|
3263
|
+
)
|
|
3264
|
+
assert isinstance(wk, _Workflow)
|
|
3265
|
+
submitted_js = wk.submit(
|
|
3266
|
+
JS_parallelism=JS_parallelism,
|
|
3267
|
+
wait=wait,
|
|
3268
|
+
add_to_known=add_to_known,
|
|
3269
|
+
return_idx=True,
|
|
3270
|
+
tasks=tasks,
|
|
3271
|
+
cancel=cancel,
|
|
3272
|
+
status=status,
|
|
3273
|
+
)
|
|
3274
|
+
if return_idx:
|
|
3275
|
+
return (wk, submitted_js)
|
|
3276
|
+
else:
|
|
3277
|
+
return wk
|
|
3278
|
+
|
|
3279
|
+
def _submit_workflow(
|
|
3280
|
+
self,
|
|
3281
|
+
workflow_path: PathLike,
|
|
3282
|
+
JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
|
|
3283
|
+
wait: bool = False,
|
|
3284
|
+
return_idx: bool = False,
|
|
3285
|
+
tasks: list[int] | None = None,
|
|
3286
|
+
) -> Mapping[int, Sequence[int]] | None:
|
|
3287
|
+
"""
|
|
3288
|
+
Submit an existing {app_name} workflow.
|
|
3289
|
+
|
|
3290
|
+
Parameters
|
|
3291
|
+
----------
|
|
3292
|
+
workflow_path:
|
|
3293
|
+
Path to an existing workflow.
|
|
3294
|
+
JS_parallelism:
|
|
3295
|
+
If True, allow multiple jobscripts to execute simultaneously. If
|
|
3296
|
+
'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
|
|
3297
|
+
jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
|
|
3298
|
+
type does not support the `jobscript_parallelism` feature. If not set,
|
|
3299
|
+
jobscript parallelism will be used if the store type supports it, for
|
|
3300
|
+
scheduled jobscripts only.
|
|
3301
|
+
wait:
|
|
3302
|
+
Whether to wait for the submission to complete.
|
|
3303
|
+
return_idx:
|
|
3304
|
+
Whether to return the index information.
|
|
3305
|
+
tasks:
|
|
3306
|
+
List of task indices to include in this submission. By default all tasks are
|
|
3307
|
+
included.
|
|
3308
|
+
|
|
3309
|
+
Returns
|
|
3310
|
+
-------
|
|
3311
|
+
dict[int, list[int]]
|
|
3312
|
+
Mapping of submission handles, if requested by ``return_idx`` parameter.
|
|
3313
|
+
"""
|
|
3314
|
+
self.API_logger.info("submit_workflow called")
|
|
3315
|
+
assert workflow_path is not None
|
|
3316
|
+
wk = self.Workflow(workflow_path)
|
|
3317
|
+
if return_idx:
|
|
3318
|
+
return wk.submit(
|
|
3319
|
+
JS_parallelism=JS_parallelism,
|
|
3320
|
+
wait=wait,
|
|
3321
|
+
return_idx=True,
|
|
3322
|
+
tasks=tasks,
|
|
3323
|
+
)
|
|
3324
|
+
wk.submit(JS_parallelism=JS_parallelism, wait=wait, tasks=tasks)
|
|
3325
|
+
return None
|
|
3326
|
+
|
|
3327
|
+
def _run_hpcflow_tests(self, *args: str) -> int:
|
|
3328
|
+
"""Run hpcflow test suite. This function is only available from derived apps."""
|
|
3329
|
+
from hpcflow import app as hf
|
|
3330
|
+
|
|
3331
|
+
return hf.app.run_tests(*args)
|
|
3332
|
+
|
|
3333
|
+
def _run_tests(self, *args: str) -> int:
|
|
3334
|
+
"""Run {app_name} test suite."""
|
|
3335
|
+
try:
|
|
3336
|
+
import pytest
|
|
3337
|
+
except ModuleNotFoundError:
|
|
3338
|
+
raise RuntimeError(
|
|
3339
|
+
f"{self.name} has not been built with testing dependencies."
|
|
3340
|
+
)
|
|
3341
|
+
with get_file_context(self.package_name, "tests") as test_dir:
|
|
3342
|
+
return pytest.main([str(test_dir), *(self.pytest_args or ()), *args])
|
|
3343
|
+
|
|
3344
|
+
def _get_OS_info(self) -> Mapping[str, str]:
|
|
3345
|
+
"""Get information about the operating system."""
|
|
3346
|
+
os_name = os.name
|
|
3347
|
+
if os_name == "posix":
|
|
3348
|
+
return get_OS_info_POSIX(
|
|
3349
|
+
linux_release_file=self.config.get("linux_release_file")
|
|
3350
|
+
)
|
|
3351
|
+
elif os_name == "nt":
|
|
3352
|
+
return get_OS_info_windows()
|
|
3353
|
+
else:
|
|
3354
|
+
raise Exception(f"unsupported OS '{os_name}'")
|
|
3355
|
+
|
|
3356
|
+
def _get_shell_info(
|
|
3357
|
+
self,
|
|
3358
|
+
shell_name: str,
|
|
3359
|
+
exclude_os: bool = False,
|
|
3360
|
+
) -> VersionInfo:
|
|
3361
|
+
"""
|
|
3362
|
+
Get information about a given shell and the operating system.
|
|
3363
|
+
|
|
3364
|
+
Parameters
|
|
3365
|
+
----------
|
|
3366
|
+
shell_name:
|
|
3367
|
+
One of the supported shell names.
|
|
3368
|
+
exclude_os:
|
|
3369
|
+
If True, exclude operating system information.
|
|
3370
|
+
"""
|
|
3371
|
+
shell = get_shell(
|
|
3372
|
+
shell_name=shell_name,
|
|
3373
|
+
os_args={"linux_release_file": self.config.linux_release_file},
|
|
3374
|
+
)
|
|
3375
|
+
return shell.get_version_info(exclude_os)
|
|
3376
|
+
|
|
3377
|
+
@TimeIt.decorator
|
|
3378
|
+
def _get_known_submissions(
|
|
3379
|
+
self,
|
|
3380
|
+
max_recent: int = 3,
|
|
3381
|
+
no_update: bool = False,
|
|
3382
|
+
as_json: bool = False,
|
|
3383
|
+
status: Status | None = None,
|
|
3384
|
+
) -> Sequence[KnownSubmissionItem]:
|
|
3385
|
+
"""
|
|
3386
|
+
Retrieve information about active and recently inactive finished {app_name}
|
|
3387
|
+
workflows.
|
|
3388
|
+
|
|
3389
|
+
This method removes workflows from the known-submissions file that are found to be
|
|
3390
|
+
inactive on this machine (according to the scheduler/process ID).
|
|
3391
|
+
|
|
3392
|
+
Parameters
|
|
3393
|
+
----------
|
|
3394
|
+
max_recent:
|
|
3395
|
+
Maximum number of inactive workflows to retrieve.
|
|
3396
|
+
no_update:
|
|
3397
|
+
If True, do not update the known-submissions file to set submissions that are
|
|
3398
|
+
now inactive.
|
|
3399
|
+
as_json:
|
|
3400
|
+
If True, only include JSON-compatible information. This will exclude the
|
|
3401
|
+
`submission` key, for instance.
|
|
3402
|
+
"""
|
|
3403
|
+
out: list[KnownSubmissionItem] = []
|
|
3404
|
+
inactive_IDs: list[int] = []
|
|
3405
|
+
start_times: dict[int, str] = {}
|
|
3406
|
+
end_times: dict[int, str] = {}
|
|
3407
|
+
|
|
3408
|
+
ts_fmt = self._submission_ts_fmt
|
|
3409
|
+
|
|
3410
|
+
try:
|
|
3411
|
+
if status:
|
|
3412
|
+
status.update("Reading known submissions file...")
|
|
3413
|
+
known_subs = self.read_known_submissions_file()
|
|
3414
|
+
except FileNotFoundError:
|
|
3415
|
+
known_subs = []
|
|
3416
|
+
|
|
3417
|
+
# keys are (workflow path, submission index)
|
|
3418
|
+
active_jobscripts: dict[
|
|
3419
|
+
tuple[str, int],
|
|
3420
|
+
Mapping[int, Mapping[int, Mapping[int, JobscriptElementState]]],
|
|
3421
|
+
] = {}
|
|
3422
|
+
loaded_workflows: dict[str, _Workflow] = {} # keys are workflow path
|
|
3423
|
+
|
|
3424
|
+
# loop in reverse so we process more-recent submissions first:
|
|
3425
|
+
for file_dat_i in known_subs[::-1]:
|
|
3426
|
+
submit_time_str = file_dat_i["submit_time"]
|
|
3427
|
+
submit_time_obj = parse_timestamp(submit_time_str, ts_fmt)
|
|
3428
|
+
|
|
3429
|
+
start_time_str = file_dat_i["start_time"]
|
|
3430
|
+
start_time_obj = None
|
|
3431
|
+
if start_time_str:
|
|
3432
|
+
start_time_obj = parse_timestamp(start_time_str, ts_fmt)
|
|
3433
|
+
|
|
3434
|
+
end_time_str = file_dat_i["end_time"]
|
|
3435
|
+
end_time_obj = None
|
|
3436
|
+
if end_time_str:
|
|
3437
|
+
end_time_obj = parse_timestamp(end_time_str, ts_fmt)
|
|
3438
|
+
|
|
3439
|
+
out_item: KnownSubmissionItem = {
|
|
3440
|
+
"local_id": file_dat_i["local_id"],
|
|
3441
|
+
"workflow_id": file_dat_i["workflow_id"],
|
|
3442
|
+
"workflow_path": file_dat_i["path"],
|
|
3443
|
+
"submit_time": submit_time_str,
|
|
3444
|
+
"submit_time_obj": submit_time_obj,
|
|
3445
|
+
"start_time": start_time_str,
|
|
3446
|
+
"start_time_obj": start_time_obj,
|
|
3447
|
+
"end_time": end_time_str,
|
|
3448
|
+
"end_time_obj": end_time_obj,
|
|
3449
|
+
"sub_idx": file_dat_i["sub_idx"],
|
|
3450
|
+
"jobscripts": [],
|
|
3451
|
+
"active_jobscripts": {},
|
|
3452
|
+
"deleted": False,
|
|
3453
|
+
"unloadable": False,
|
|
3454
|
+
}
|
|
3455
|
+
if file_dat_i["path"] in loaded_workflows:
|
|
3456
|
+
wk_i = loaded_workflows[file_dat_i["path"]]
|
|
3457
|
+
else:
|
|
3458
|
+
# might have been moved/archived/deleted:
|
|
3459
|
+
path_exists = Path(file_dat_i["path"]).exists()
|
|
3460
|
+
out_item["deleted"] = not path_exists
|
|
3461
|
+
if path_exists:
|
|
3462
|
+
try:
|
|
3463
|
+
if status:
|
|
3464
|
+
status.update(f"Inspecting workflow {file_dat_i['path']!r}.")
|
|
3465
|
+
wk_i = self.Workflow(file_dat_i["path"])
|
|
3466
|
+
except KeyboardInterrupt:
|
|
3467
|
+
raise
|
|
3468
|
+
except Exception:
|
|
3469
|
+
wk_i = None
|
|
3470
|
+
self.submission_logger.info(
|
|
3471
|
+
f"cannot load workflow from known-submissions file: "
|
|
3472
|
+
f"{file_dat_i['path']!r}!"
|
|
3473
|
+
)
|
|
3474
|
+
out_item["unloadable"] = True
|
|
3475
|
+
if file_dat_i["is_active"]:
|
|
3476
|
+
inactive_IDs.append(file_dat_i["local_id"])
|
|
3477
|
+
file_dat_i["is_active"] = False
|
|
3478
|
+
|
|
3479
|
+
else:
|
|
3480
|
+
# cache:
|
|
3481
|
+
loaded_workflows[file_dat_i["path"]] = wk_i
|
|
3482
|
+
else:
|
|
3483
|
+
wk_i = None
|
|
3484
|
+
|
|
3485
|
+
if wk_i:
|
|
3486
|
+
if wk_i.id_ != file_dat_i["workflow_id"]:
|
|
3487
|
+
# overwritten with a new workflow
|
|
3488
|
+
if file_dat_i["is_active"]:
|
|
3489
|
+
inactive_IDs.append(file_dat_i["local_id"])
|
|
3490
|
+
out_item["deleted"] = True
|
|
3491
|
+
|
|
3492
|
+
else:
|
|
3493
|
+
with wk_i._store.cache_ctx():
|
|
3494
|
+
sub = wk_i.submissions[file_dat_i["sub_idx"]]
|
|
3495
|
+
if status:
|
|
3496
|
+
status.update(
|
|
3497
|
+
f"Loading workflow {file_dat_i['path']!r} run metadata..."
|
|
3498
|
+
)
|
|
3499
|
+
sub.use_EARs_cache = True # pre-cache EARs of this submission
|
|
3500
|
+
|
|
3501
|
+
all_jobscripts = sub._submission_parts[submit_time_str]
|
|
3502
|
+
out_item["jobscripts"] = all_jobscripts
|
|
3503
|
+
out_item["submission"] = sub
|
|
3504
|
+
if not out_item["start_time"]:
|
|
3505
|
+
start_time_obj = sub.start_time
|
|
3506
|
+
if start_time_obj:
|
|
3507
|
+
start_time = datetime.strftime(start_time_obj, ts_fmt)
|
|
3508
|
+
out_item["start_time"] = start_time
|
|
3509
|
+
start_times[file_dat_i["local_id"]] = start_time
|
|
3510
|
+
out_item["start_time_obj"] = start_time_obj
|
|
3511
|
+
|
|
3512
|
+
if not out_item["end_time"]:
|
|
3513
|
+
end_time_obj = sub.end_time
|
|
3514
|
+
if end_time_obj:
|
|
3515
|
+
end_time = datetime.strftime(end_time_obj, ts_fmt)
|
|
3516
|
+
out_item["end_time"] = end_time
|
|
3517
|
+
end_times[file_dat_i["local_id"]] = end_time
|
|
3518
|
+
out_item["end_time_obj"] = end_time_obj
|
|
3519
|
+
|
|
3520
|
+
if file_dat_i["is_active"]:
|
|
3521
|
+
# check it really is active:
|
|
3522
|
+
run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
|
|
3523
|
+
act_i_js: Mapping[
|
|
3524
|
+
int, Mapping[int, Mapping[int, JobscriptElementState]]
|
|
3525
|
+
]
|
|
3526
|
+
if run_key in active_jobscripts:
|
|
3527
|
+
act_i_js = active_jobscripts[run_key]
|
|
3528
|
+
else:
|
|
3529
|
+
try:
|
|
3530
|
+
if as_json:
|
|
3531
|
+
act_i_js = cast( # not actually used?
|
|
3532
|
+
Any, sub.get_active_jobscripts(as_json=True)
|
|
3533
|
+
)
|
|
3534
|
+
else:
|
|
3535
|
+
act_i_js = sub.get_active_jobscripts()
|
|
3536
|
+
except KeyboardInterrupt:
|
|
3537
|
+
raise
|
|
3538
|
+
except Exception:
|
|
3539
|
+
self.submission_logger.info(
|
|
3540
|
+
f"failed to retrieve active jobscripts from workflow "
|
|
3541
|
+
f"at: {file_dat_i['path']!r}!"
|
|
3542
|
+
)
|
|
3543
|
+
out_item["unloadable"] = True
|
|
3544
|
+
act_i_js = {}
|
|
3545
|
+
else:
|
|
3546
|
+
active_jobscripts[run_key] = act_i_js
|
|
3547
|
+
|
|
3548
|
+
out_item["active_jobscripts"] = {
|
|
3549
|
+
k: v for k, v in act_i_js.items() if k in all_jobscripts
|
|
3550
|
+
}
|
|
3551
|
+
if (
|
|
3552
|
+
not out_item["unloadable"]
|
|
3553
|
+
and not act_i_js
|
|
3554
|
+
and file_dat_i["is_active"]
|
|
3555
|
+
):
|
|
3556
|
+
inactive_IDs.append(file_dat_i["local_id"])
|
|
3557
|
+
|
|
3558
|
+
out.append(out_item)
|
|
3559
|
+
|
|
3560
|
+
if (inactive_IDs or start_times or end_times) and not no_update:
|
|
3561
|
+
removed_IDs = self.update_known_subs_file(
|
|
3562
|
+
inactive_IDs, start_times, end_times
|
|
3563
|
+
)
|
|
3564
|
+
# remove these from the output, to avoid confusion (if kept, they would not
|
|
3565
|
+
# appear in the next invocation of this method):
|
|
3566
|
+
out = [item for item in out if item["local_id"] not in removed_IDs]
|
|
3567
|
+
|
|
3568
|
+
out_active, out_inactive = self.__partition(
|
|
3569
|
+
out, lambda item: item["active_jobscripts"]
|
|
3570
|
+
)
|
|
3571
|
+
# sort inactive by most-recently finished, then deleted:
|
|
3572
|
+
out_no_access, out_access = self.__partition(
|
|
3573
|
+
out_inactive, lambda item: item["deleted"] or item["unloadable"]
|
|
3574
|
+
)
|
|
3575
|
+
|
|
3576
|
+
# sort loadable inactive by end time or start time or submit time:
|
|
3577
|
+
out_access = sorted(
|
|
3578
|
+
out_access,
|
|
3579
|
+
key=lambda i: (
|
|
3580
|
+
i["end_time_obj"]
|
|
3581
|
+
or i["start_time_obj"]
|
|
3582
|
+
or i.get("submit_time_obj")
|
|
3583
|
+
or self.__DEF_TIMESTAMP
|
|
3584
|
+
),
|
|
3585
|
+
reverse=True,
|
|
3586
|
+
)
|
|
3587
|
+
out_inactive = (out_access + out_no_access)[:max_recent]
|
|
3588
|
+
|
|
3589
|
+
# show active submissions first:
|
|
3590
|
+
out = out_active + out_inactive
|
|
3591
|
+
|
|
3592
|
+
if as_json:
|
|
3593
|
+
for item in out:
|
|
3594
|
+
item.pop("submission", None)
|
|
3595
|
+
item.pop("submit_time_obj")
|
|
3596
|
+
return out
|
|
3597
|
+
|
|
3598
|
+
__DEF_TIMESTAMP: Final[datetime] = datetime.fromtimestamp(0, tz=timezone.utc)
|
|
3599
|
+
|
|
3600
|
+
@staticmethod
|
|
3601
|
+
def __partition(
|
|
3602
|
+
lst: Iterable[T], cond: Callable[[T], Any]
|
|
3603
|
+
) -> tuple[list[T], list[T]]:
|
|
3604
|
+
"""
|
|
3605
|
+
Split a list into two by whether the condition holds for each item.
|
|
3606
|
+
|
|
3607
|
+
Returns
|
|
3608
|
+
-------
|
|
3609
|
+
true_items
|
|
3610
|
+
List of items for which the condition is true (or at least truthy).
|
|
3611
|
+
false_items
|
|
3612
|
+
List of items for which the condition is false.
|
|
3613
|
+
"""
|
|
3614
|
+
lists: tuple[list[T], list[T]] = [], []
|
|
3615
|
+
for item in lst:
|
|
3616
|
+
lists[not cond(item)].append(item)
|
|
3617
|
+
return lists
|
|
3618
|
+
|
|
3619
|
+
def _show_legend(self) -> None:
|
|
3620
|
+
"""
|
|
3621
|
+
Output a legend for the jobscript-element and EAR states that are displayed
|
|
3622
|
+
by the `show` command.
|
|
3623
|
+
"""
|
|
3624
|
+
js_notes = Panel(
|
|
3625
|
+
"The [i]Status[/i] column of the `show` command output displays the set of "
|
|
3626
|
+
"unique jobscript-element states for that submission. Jobscript element "
|
|
3627
|
+
"state meanings are shown below.",
|
|
3628
|
+
width=80,
|
|
3629
|
+
box=box.SIMPLE,
|
|
3630
|
+
)
|
|
3631
|
+
|
|
3632
|
+
js_tab = Table(box=box.SQUARE, title="Jobscript element states")
|
|
3633
|
+
js_tab.add_column("Symbol")
|
|
3634
|
+
js_tab.add_column("State")
|
|
3635
|
+
js_tab.add_column("Description")
|
|
3636
|
+
for jse_state in JobscriptElementState.__members__.values():
|
|
3637
|
+
js_tab.add_row(jse_state.rich_repr, jse_state.name, jse_state.__doc__)
|
|
3638
|
+
|
|
3639
|
+
act_notes = Panel(
|
|
3640
|
+
"\nThe [i]Actions[/i] column of the `show` command output displays either the "
|
|
3641
|
+
"set of unique action states for that submission, or (with the `--full` "
|
|
3642
|
+
"option) an action state for each action of the submission. Action state "
|
|
3643
|
+
"meanings are shown below.",
|
|
3644
|
+
width=80,
|
|
3645
|
+
box=box.SIMPLE,
|
|
3646
|
+
)
|
|
3647
|
+
|
|
3648
|
+
act_tab = Table(box=box.SQUARE, title="Action states")
|
|
3649
|
+
act_tab.add_column("Symbol")
|
|
3650
|
+
act_tab.add_column("State")
|
|
3651
|
+
act_tab.add_column("Description")
|
|
3652
|
+
for ear_state in EARStatus.__members__.values():
|
|
3653
|
+
act_tab.add_row(ear_state.rich_repr, ear_state.name, ear_state.__doc__)
|
|
3654
|
+
|
|
3655
|
+
group = Group(
|
|
3656
|
+
js_notes,
|
|
3657
|
+
js_tab,
|
|
3658
|
+
act_notes,
|
|
3659
|
+
act_tab,
|
|
3660
|
+
)
|
|
3661
|
+
rich_print(group)
|
|
3662
|
+
|
|
3663
|
+
@TimeIt.decorator
|
|
3664
|
+
def _show(
|
|
3665
|
+
self,
|
|
3666
|
+
max_recent: int = 3,
|
|
3667
|
+
full: bool = False,
|
|
3668
|
+
no_update: bool = False,
|
|
3669
|
+
) -> None:
|
|
3670
|
+
"""
|
|
3671
|
+
Show information about running {app_name} workflows.
|
|
3672
|
+
|
|
3673
|
+
Parameters
|
|
3674
|
+
----------
|
|
3675
|
+
max_recent:
|
|
3676
|
+
Maximum number of inactive workflows to show.
|
|
3677
|
+
full:
|
|
3678
|
+
If True, provide more information; output may spans multiple lines for each
|
|
3679
|
+
workflow submission.
|
|
3680
|
+
no_update:
|
|
3681
|
+
If True, do not update the known-submissions file to remove workflows that are
|
|
3682
|
+
no longer running.
|
|
3683
|
+
"""
|
|
3684
|
+
# TODO: add --json to show, just returning this but without submissions?
|
|
3685
|
+
|
|
3686
|
+
allowed_cols = {
|
|
3687
|
+
"id": "ID",
|
|
3688
|
+
"name": "Name",
|
|
3689
|
+
"status": "Status",
|
|
3690
|
+
"submit_time": "Submit",
|
|
3691
|
+
"start_time": "Start",
|
|
3692
|
+
"end_time": "End",
|
|
3693
|
+
"times": "Times",
|
|
3694
|
+
"actions": "Actions",
|
|
3695
|
+
"actions_compact": "Actions",
|
|
3696
|
+
}
|
|
3697
|
+
|
|
3698
|
+
columns: tuple[str, ...]
|
|
3699
|
+
if full:
|
|
3700
|
+
columns = ("id", "name", "status", "actions")
|
|
3701
|
+
else:
|
|
3702
|
+
columns = (
|
|
3703
|
+
"id",
|
|
3704
|
+
"name",
|
|
3705
|
+
"status",
|
|
3706
|
+
# "submit_time",
|
|
3707
|
+
# "start_time",
|
|
3708
|
+
# "end_time",
|
|
3709
|
+
"actions_compact",
|
|
3710
|
+
)
|
|
3711
|
+
|
|
3712
|
+
unknown_cols = set(columns).difference(allowed_cols)
|
|
3713
|
+
if unknown_cols:
|
|
3714
|
+
raise ValueError(
|
|
3715
|
+
f"Unknown column names: {unknown_cols!r}. Allowed columns are "
|
|
3716
|
+
f"{list(allowed_cols)!r}."
|
|
3717
|
+
)
|
|
3718
|
+
|
|
3719
|
+
# TODO: add --filter option to filter by ID or name
|
|
3720
|
+
# TODO: add --sort option to sort by ID/name/start/end
|
|
3721
|
+
|
|
3722
|
+
ts_fmt = r"%Y-%m-%d %H:%M:%S"
|
|
3723
|
+
ts_fmt_part = r"%H:%M:%S"
|
|
3724
|
+
|
|
3725
|
+
console = Console()
|
|
3726
|
+
with console.status("Retrieving data...") as status:
|
|
3727
|
+
run_dat = self._get_known_submissions(
|
|
3728
|
+
max_recent=max_recent,
|
|
3729
|
+
no_update=no_update,
|
|
3730
|
+
status=status,
|
|
3731
|
+
)
|
|
3732
|
+
if not run_dat:
|
|
3733
|
+
return
|
|
3734
|
+
|
|
3735
|
+
status.update("Formatting...")
|
|
3736
|
+
table = Table(box=box.SQUARE, expand=False)
|
|
3737
|
+
for col_name in columns:
|
|
3738
|
+
table.add_column(allowed_cols[col_name])
|
|
3739
|
+
|
|
3740
|
+
row_pad = 1 if full else 0
|
|
3741
|
+
|
|
3742
|
+
for dat_i in run_dat:
|
|
3743
|
+
deleted = dat_i["deleted"]
|
|
3744
|
+
unloadable = dat_i["unloadable"]
|
|
3745
|
+
no_access = deleted or unloadable
|
|
3746
|
+
act_js = dat_i["active_jobscripts"]
|
|
3747
|
+
style = "grey42" if (no_access or not act_js) else ""
|
|
3748
|
+
style_wk_name = "grey42 strike" if deleted else style
|
|
3749
|
+
style_it = "italic grey42" if (no_access or not act_js) else "italic"
|
|
3750
|
+
|
|
3751
|
+
all_cells: dict[str, str | Text | Padding] = {}
|
|
3752
|
+
if "status" in columns:
|
|
3753
|
+
if act_js:
|
|
3754
|
+
act_js_states = set(
|
|
3755
|
+
state_i
|
|
3756
|
+
for js_dat in act_js.values()
|
|
3757
|
+
for block_dat in js_dat.values()
|
|
3758
|
+
for state_i in block_dat.values()
|
|
3759
|
+
)
|
|
3760
|
+
all_cells["status"] = "/".join(
|
|
3761
|
+
js_state.rich_repr
|
|
3762
|
+
for js_state in sorted(act_js_states, key=lambda x: x.id)
|
|
3763
|
+
)
|
|
3764
|
+
else:
|
|
3765
|
+
if deleted:
|
|
3766
|
+
txt = "deleted"
|
|
3767
|
+
elif unloadable:
|
|
3768
|
+
txt = "unloadable"
|
|
3769
|
+
else:
|
|
3770
|
+
txt = "inactive"
|
|
3771
|
+
all_cells["status"] = Text(txt, style=style_it)
|
|
3772
|
+
|
|
3773
|
+
if "id" in columns:
|
|
3774
|
+
all_cells["id"] = Text(str(dat_i["local_id"]), style=style)
|
|
3775
|
+
|
|
3776
|
+
if "name" in columns:
|
|
3777
|
+
all_cells["name"] = Text(
|
|
3778
|
+
Path(dat_i["workflow_path"]).name, style=style_wk_name
|
|
3779
|
+
)
|
|
3780
|
+
|
|
3781
|
+
start_time, end_time = None, None
|
|
3782
|
+
if not no_access:
|
|
3783
|
+
start_time = cast("datetime", dat_i["start_time_obj"])
|
|
3784
|
+
end_time = cast("datetime", dat_i["end_time_obj"])
|
|
3785
|
+
|
|
3786
|
+
if "actions" in columns:
|
|
3787
|
+
task_tab: str | Table
|
|
3788
|
+
if not no_access:
|
|
3789
|
+
task_tab = Table(box=None, show_header=False)
|
|
3790
|
+
task_tab.add_column()
|
|
3791
|
+
task_tab.add_column()
|
|
3792
|
+
|
|
3793
|
+
sub = dat_i["submission"]
|
|
3794
|
+
for task_idx, elements in sub.EARs_by_elements.items():
|
|
3795
|
+
task = sub.workflow.tasks[task_idx]
|
|
3796
|
+
|
|
3797
|
+
# inner table for elements/actions:
|
|
3798
|
+
elem_tab_i = Table(box=None, show_header=False)
|
|
3799
|
+
elem_tab_i.add_column()
|
|
3800
|
+
for elem_idx, EARs in elements.items():
|
|
3801
|
+
elem_status = Text(f"{elem_idx} | ", style=style)
|
|
3802
|
+
for ear in EARs:
|
|
3803
|
+
elem_status.append(
|
|
3804
|
+
ear.status.symbol, style=ear.status.colour
|
|
3805
|
+
)
|
|
3806
|
+
elem_tab_i.add_row(elem_status)
|
|
3807
|
+
task_tab.add_row(task.unique_name, elem_tab_i, style=style)
|
|
3808
|
+
else:
|
|
3809
|
+
task_tab = ""
|
|
3810
|
+
|
|
3811
|
+
all_cells["actions"] = Padding(task_tab, (0, 0, row_pad, 0))
|
|
3812
|
+
|
|
3813
|
+
if "actions_compact" in columns:
|
|
3814
|
+
if not no_access:
|
|
3815
|
+
EAR_stat_count = Counter(
|
|
3816
|
+
ear.status
|
|
3817
|
+
for elements in dat_i["submission"].EARs_by_elements.values()
|
|
3818
|
+
for EARs in elements.values()
|
|
3819
|
+
for ear in EARs
|
|
3820
|
+
)
|
|
3821
|
+
all_cells["actions_compact"] = " | ".join(
|
|
3822
|
+
f"[{k.colour}]{k.symbol}[/{k.colour}]:{v}" # type: ignore
|
|
3823
|
+
for k, v in dict(
|
|
3824
|
+
sorted(EAR_stat_count.items(), key=lambda x: x[0].id)
|
|
3825
|
+
).items()
|
|
3826
|
+
)
|
|
3827
|
+
else:
|
|
3828
|
+
all_cells["actions_compact"] = ""
|
|
3829
|
+
|
|
3830
|
+
if "submit_time" in columns or "times" in columns:
|
|
3831
|
+
submit_time = parse_timestamp(
|
|
3832
|
+
dat_i["submit_time"], self._submission_ts_fmt
|
|
3833
|
+
)
|
|
3834
|
+
submit_time_full = submit_time.strftime(ts_fmt)
|
|
3835
|
+
|
|
3836
|
+
if "start_time" in columns or "times" in columns:
|
|
3837
|
+
start_time_full = start_time.strftime(ts_fmt) if start_time else "-"
|
|
3838
|
+
start_time_part = start_time_full
|
|
3839
|
+
if start_time and start_time.date() == submit_time.date():
|
|
3840
|
+
start_time_part = start_time.strftime(ts_fmt_part)
|
|
3841
|
+
|
|
3842
|
+
if "end_time" in columns or "times" in columns:
|
|
3843
|
+
end_time_full = end_time.strftime(ts_fmt) if end_time else "-"
|
|
3844
|
+
end_time_part = end_time_full
|
|
3845
|
+
if end_time and start_time and end_time.date() == start_time.date():
|
|
3846
|
+
end_time_part = end_time.strftime(ts_fmt_part)
|
|
3847
|
+
|
|
3848
|
+
if "submit_time" in columns:
|
|
3849
|
+
all_cells["submit_time"] = Padding(
|
|
3850
|
+
Text(submit_time_full, style=style), (0, 0, row_pad, 0)
|
|
3851
|
+
)
|
|
3852
|
+
|
|
3853
|
+
if "start_time" in columns:
|
|
3854
|
+
all_cells["start_time"] = Padding(
|
|
3855
|
+
Text(start_time_part, style=style), (0, 0, row_pad, 0)
|
|
3856
|
+
)
|
|
3857
|
+
|
|
3858
|
+
if "end_time" in columns:
|
|
3859
|
+
all_cells["end_time"] = Padding(
|
|
3860
|
+
Text(end_time_part, style=style), (0, 0, row_pad, 0)
|
|
3861
|
+
)
|
|
3862
|
+
|
|
3863
|
+
if "times" in columns:
|
|
3864
|
+
# submit/start/end on separate lines:
|
|
3865
|
+
times_tab = Table(box=None, show_header=False)
|
|
3866
|
+
times_tab.add_column()
|
|
3867
|
+
times_tab.add_column(justify="right")
|
|
3868
|
+
|
|
3869
|
+
times_tab.add_row(
|
|
3870
|
+
Text("sb.", style=style_it), Text(submit_time_full, style=style)
|
|
3871
|
+
)
|
|
3872
|
+
|
|
3873
|
+
if start_time:
|
|
3874
|
+
times_tab.add_row(
|
|
3875
|
+
Text("st.", style=style_it),
|
|
3876
|
+
Text(start_time_part, style=style),
|
|
3877
|
+
)
|
|
3878
|
+
if end_time:
|
|
3879
|
+
times_tab.add_row(
|
|
3880
|
+
Text("en.", style=style_it), Text(end_time_part, style=style)
|
|
3881
|
+
)
|
|
3882
|
+
|
|
3883
|
+
all_cells["times"] = Padding(times_tab, (0, 0, row_pad, 0))
|
|
3884
|
+
|
|
3885
|
+
table.add_row(*(all_cells[col_name] for col_name in columns))
|
|
3886
|
+
|
|
3887
|
+
if table.row_count:
|
|
3888
|
+
console.print(table)
|
|
3889
|
+
|
|
3890
|
+
def _get_workflow_path_from_local_ID(self, local_ID: int) -> Path:
|
|
3891
|
+
try:
|
|
3892
|
+
known_subs = self.read_known_submissions_file()
|
|
3893
|
+
except FileNotFoundError:
|
|
3894
|
+
known_subs = []
|
|
3895
|
+
|
|
3896
|
+
if any((witness := sub)["local_id"] == local_ID for sub in known_subs):
|
|
3897
|
+
return Path(witness["path"])
|
|
3898
|
+
raise ValueError(f"Specified local ID is not valid: {local_ID}.")
|
|
3899
|
+
|
|
3900
|
+
def _resolve_workflow_reference(
|
|
3901
|
+
self, workflow_ref: str, ref_type: str | None
|
|
3902
|
+
) -> Path:
|
|
3903
|
+
path = None
|
|
3904
|
+
if ref_type == "path":
|
|
3905
|
+
path = Path(workflow_ref)
|
|
3906
|
+
|
|
3907
|
+
elif ref_type == "id":
|
|
3908
|
+
local_ID = int(workflow_ref)
|
|
3909
|
+
path = self._get_workflow_path_from_local_ID(local_ID)
|
|
3910
|
+
|
|
3911
|
+
elif ref_type in ("assume-id", None):
|
|
3912
|
+
# see if reference is a valid local ID:
|
|
3913
|
+
is_local_ID = True
|
|
3914
|
+
try:
|
|
3915
|
+
local_ID = int(workflow_ref)
|
|
3916
|
+
except ValueError:
|
|
3917
|
+
is_local_ID = False
|
|
3918
|
+
else:
|
|
3919
|
+
try:
|
|
3920
|
+
path = self._get_workflow_path_from_local_ID(local_ID)
|
|
3921
|
+
except ValueError:
|
|
3922
|
+
is_local_ID = False
|
|
3923
|
+
|
|
3924
|
+
if path is None:
|
|
3925
|
+
# see if reference is a valid path:
|
|
3926
|
+
is_path = True
|
|
3927
|
+
path = Path(workflow_ref)
|
|
3928
|
+
if not path.exists():
|
|
3929
|
+
is_path = False
|
|
3930
|
+
|
|
3931
|
+
if is_path and is_local_ID:
|
|
3932
|
+
raise ValueError(
|
|
3933
|
+
"Workflow reference appears to be both a valid path and a valid "
|
|
3934
|
+
"local ID; set `ref_is_path` to True or False to disambiguate: "
|
|
3935
|
+
f"{workflow_ref}."
|
|
3936
|
+
)
|
|
3937
|
+
elif not is_path and not is_local_ID:
|
|
3938
|
+
raise ValueError(
|
|
3939
|
+
"Workflow reference appears to be neither a valid path or a valid "
|
|
3940
|
+
f"local ID: {workflow_ref}."
|
|
3941
|
+
)
|
|
3942
|
+
return path.resolve()
|
|
3943
|
+
|
|
3944
|
+
def _cancel(
|
|
3945
|
+
self,
|
|
3946
|
+
workflow_ref: int | str | Path,
|
|
3947
|
+
ref_is_path: str | None = None,
|
|
3948
|
+
status: bool = True,
|
|
3949
|
+
) -> None:
|
|
3950
|
+
"""
|
|
3951
|
+
Cancel the execution of a workflow submission.
|
|
3952
|
+
|
|
3953
|
+
Parameters
|
|
3954
|
+
----------
|
|
3955
|
+
workflow_ref: int | str | Path
|
|
3956
|
+
Which workflow to cancel, by ID or path.
|
|
3957
|
+
ref_is_path: str
|
|
3958
|
+
One of "``id``", "``path``" or "``assume-id``" (the default)
|
|
3959
|
+
status: bool
|
|
3960
|
+
Whether to show a live status during cancel.
|
|
3961
|
+
"""
|
|
3962
|
+
path = self._resolve_workflow_reference(str(workflow_ref), ref_is_path)
|
|
3963
|
+
self.Workflow(path).cancel(status=status)
|
|
3964
|
+
|
|
3965
|
+
@staticmethod
|
|
3966
|
+
def redirect_std_to_file(*args, **kwargs):
|
|
3967
|
+
return redirect_std_to_file_hpcflow(*args, **kwargs)
|
|
3968
|
+
|
|
3969
|
+
def configure_env(
|
|
3970
|
+
self,
|
|
3971
|
+
name: str,
|
|
3972
|
+
setup: list[str] | None = None,
|
|
3973
|
+
executables: list[_Executable] | None = None,
|
|
3974
|
+
use_current_env: bool = False,
|
|
3975
|
+
env_source_file: Path | None = None,
|
|
3976
|
+
):
|
|
3977
|
+
"""
|
|
3978
|
+
Configure an execution environment.
|
|
3979
|
+
"""
|
|
3980
|
+
if not setup:
|
|
3981
|
+
setup = []
|
|
3982
|
+
if not executables:
|
|
3983
|
+
executables = []
|
|
3984
|
+
env_source = env_source_file or self.config.get("config_directory").joinpath(
|
|
3985
|
+
"configured_envs.yaml"
|
|
3986
|
+
)
|
|
3987
|
+
assert isinstance(env_source, Path)
|
|
3988
|
+
if use_current_env:
|
|
3989
|
+
if self.run_time_info.is_conda_venv:
|
|
3990
|
+
# use the currently activated conda environment for the new app environment:
|
|
3991
|
+
conda_exe = os.environ.get("MAMBA_EXE", os.environ.get("CONDA_EXE"))
|
|
3992
|
+
setup.append(f"{conda_exe} activate {os.environ['CONDA_PREFIX']}")
|
|
3993
|
+
elif self.run_time_info.is_venv:
|
|
3994
|
+
if os.name == "posix":
|
|
3995
|
+
cmd = f"source {self.run_time_info.venv_path}/bin/activate"
|
|
3996
|
+
elif os.name == "nt":
|
|
3997
|
+
cmd = f"{self.run_time_info.venv_path}\\Scripts\\activate.ps1"
|
|
3998
|
+
setup.append(cmd)
|
|
3999
|
+
|
|
4000
|
+
executables = [
|
|
4001
|
+
self.Executable(
|
|
4002
|
+
label="python_script",
|
|
4003
|
+
instances=[
|
|
4004
|
+
self.ExecutableInstance(
|
|
4005
|
+
command=f"{sys.executable} <<script_name>> <<args>>",
|
|
4006
|
+
num_cores=1,
|
|
4007
|
+
parallel_mode=None,
|
|
4008
|
+
),
|
|
4009
|
+
],
|
|
4010
|
+
),
|
|
4011
|
+
]
|
|
4012
|
+
|
|
4013
|
+
new_env = self.Environment(name=name, setup=setup, executables=executables)
|
|
4014
|
+
new_env_dat = new_env.to_json_like(exclude={"_hash_value"})[0]
|
|
4015
|
+
if env_source.exists():
|
|
4016
|
+
existing_env_dat: list[dict] = read_YAML_file(env_source, typ="rt")
|
|
4017
|
+
if any(name == i["name"] for i in existing_env_dat):
|
|
4018
|
+
# TODO: this doesn't check all app envs, just those added with this method
|
|
4019
|
+
raise ValueError(f"Environment {name!r} already exists.")
|
|
4020
|
+
|
|
4021
|
+
all_env_dat = [*existing_env_dat, new_env_dat]
|
|
4022
|
+
|
|
4023
|
+
# write a new temporary config file
|
|
4024
|
+
tmp_file = env_source.with_suffix(env_source.suffix + ".tmp")
|
|
4025
|
+
self.logger.debug(f"Creating temporary env source file: {tmp_file!r}.")
|
|
4026
|
+
write_YAML_file(all_env_dat, tmp_file, typ="rt")
|
|
4027
|
+
|
|
4028
|
+
# atomic rename, overwriting original:
|
|
4029
|
+
self.logger.debug("Replacing original env source file with temporary file.")
|
|
4030
|
+
os.replace(src=tmp_file, dst=env_source)
|
|
4031
|
+
|
|
4032
|
+
else:
|
|
4033
|
+
all_env_dat = [new_env_dat]
|
|
4034
|
+
write_YAML_file(all_env_dat, env_source, typ="rt")
|
|
4035
|
+
|
|
4036
|
+
cur_env_source_files = self.config.get("environment_sources")
|
|
4037
|
+
if env_source not in cur_env_source_files:
|
|
4038
|
+
self.config.append("environment_sources", str(env_source))
|
|
4039
|
+
self.config.save()
|
|
4040
|
+
|
|
4041
|
+
def get_demo_data_files_manifest(self) -> dict[str, Any]:
|
|
4042
|
+
"""
|
|
4043
|
+
Get a dict whose keys are example data file names and whose values are the
|
|
4044
|
+
source files if the source file required unzipping or `None` otherwise.
|
|
4045
|
+
|
|
4046
|
+
If the config item `demo_data_manifest_file` is set, this is used as the manifest
|
|
4047
|
+
file path. Otherwise, the app attribute `demo_data_manifest_dir` is used, and is
|
|
4048
|
+
expected to be the package/directory in the source code within which a file
|
|
4049
|
+
`demo_data_manifest.json` is expected.
|
|
4050
|
+
|
|
4051
|
+
"""
|
|
4052
|
+
if self.config.demo_data_manifest_file:
|
|
4053
|
+
self.logger.debug(
|
|
4054
|
+
f"loading example data files manifest from the config item "
|
|
4055
|
+
f"`demo_data_manifest_file`: "
|
|
4056
|
+
f"{self.config.demo_data_manifest_file!r}."
|
|
4057
|
+
)
|
|
4058
|
+
fs, url_path = rate_limit_safe_url_to_fs(
|
|
4059
|
+
self,
|
|
4060
|
+
str(self.config.demo_data_manifest_file),
|
|
4061
|
+
logger=self.logger,
|
|
4062
|
+
)
|
|
4063
|
+
with fs.open(url_path) as fh:
|
|
4064
|
+
return json.load(fh)
|
|
4065
|
+
else:
|
|
4066
|
+
self.logger.debug(
|
|
4067
|
+
f"loading example data files manifest from the app attribute "
|
|
4068
|
+
f"`demo_data_manifest_dir`: "
|
|
4069
|
+
f"{self.demo_data_manifest_dir!r}."
|
|
4070
|
+
)
|
|
4071
|
+
if (package := self.demo_data_manifest_dir) is None:
|
|
4072
|
+
self.logger.warning("no demo data dir defined")
|
|
4073
|
+
return {}
|
|
4074
|
+
with open_text_resource(package, "demo_data_manifest.json") as fh:
|
|
4075
|
+
return json.load(fh)
|
|
4076
|
+
|
|
4077
|
+
def list_demo_data_files(self) -> tuple[str, ...]:
|
|
4078
|
+
"""List available example data files."""
|
|
4079
|
+
return tuple(self.get_demo_data_files_manifest())
|
|
4080
|
+
|
|
4081
|
+
def _get_demo_data_file_source_path(self, file_name: str) -> tuple[Path, bool, bool]:
|
|
4082
|
+
"""
|
|
4083
|
+
Get the full path to an example data file on the local file system, whether
|
|
4084
|
+
the file must be unpacked, and whether the file should be deleted.
|
|
4085
|
+
|
|
4086
|
+
If `config.demo_data_dir` is set, this directory will be used as the example data
|
|
4087
|
+
file source directory. This could be set to a local path or an fsspec URL. This
|
|
4088
|
+
directory is expected to contain `file_name` if `file_name` exists in the
|
|
4089
|
+
manifest. If this points to a remote file system (e.g. GitHub), the file will be
|
|
4090
|
+
copied from the remote file system to a temporary local file, which should then be
|
|
4091
|
+
deleted at a later point.
|
|
4092
|
+
|
|
4093
|
+
If `config.demo_data_dir` is not set, we use the app attribute
|
|
4094
|
+
`app.demo_data_dir`, which should point to a package resource within the source
|
|
4095
|
+
code tree. It may be that this package resource is not present in the case of
|
|
4096
|
+
using the frozen app, or installing via PyPI. In this case, we then set a default
|
|
4097
|
+
value of `config.demo_data_dir` (without saving to the persistent config file),
|
|
4098
|
+
and then retrieve the example data file path as above. The default value is set to
|
|
4099
|
+
the GitHub repo of the app using the current tag/version.
|
|
4100
|
+
"""
|
|
4101
|
+
|
|
4102
|
+
def _retrieve_source_path_from_config(src_fn: str):
|
|
4103
|
+
fs, url_path = rate_limit_safe_url_to_fs(
|
|
4104
|
+
self,
|
|
4105
|
+
self.config.demo_data_dir,
|
|
4106
|
+
logger=self.logger,
|
|
4107
|
+
)
|
|
4108
|
+
if isinstance(fs, LocalFileSystem):
|
|
4109
|
+
out = url_path
|
|
4110
|
+
delete = False
|
|
4111
|
+
else:
|
|
4112
|
+
# download to a temporary directory:
|
|
4113
|
+
self._ensure_user_runtime_dir()
|
|
4114
|
+
temp_path = self.user_runtime_dir.joinpath(src_fn)
|
|
4115
|
+
self.logger.debug(
|
|
4116
|
+
f"downloading example data file source {src_fn!r} from remote file "
|
|
4117
|
+
f"system {fs!r} at remote path {url_path!r} to a temporary "
|
|
4118
|
+
f"directory file {temp_path!r}."
|
|
4119
|
+
)
|
|
4120
|
+
if temp_path.is_file():
|
|
4121
|
+
# overwrite if it already exists:
|
|
4122
|
+
temp_path.unlink()
|
|
4123
|
+
fs.get(rpath=f"{url_path}/{src_fn}", lpath=str(temp_path))
|
|
4124
|
+
delete = True
|
|
4125
|
+
out = temp_path
|
|
4126
|
+
return out, delete
|
|
4127
|
+
|
|
4128
|
+
manifest = self.get_demo_data_files_manifest()
|
|
4129
|
+
if file_name not in manifest:
|
|
4130
|
+
raise ValueError(f"No such example data file {file_name!r}.")
|
|
4131
|
+
|
|
4132
|
+
spec: dict[str, str] = manifest[file_name]
|
|
4133
|
+
requires_unpack = bool(spec)
|
|
4134
|
+
src_fn = spec["in_zip"] if requires_unpack else file_name
|
|
4135
|
+
|
|
4136
|
+
if self.config.demo_data_dir:
|
|
4137
|
+
self.logger.info(
|
|
4138
|
+
f"using config item `demo_data_dir` as example data file source "
|
|
4139
|
+
f"directory: {self.config.demo_data_dir!r}."
|
|
4140
|
+
)
|
|
4141
|
+
# use this directory (could be local or remote)
|
|
4142
|
+
out, delete = _retrieve_source_path_from_config(src_fn)
|
|
4143
|
+
|
|
4144
|
+
else:
|
|
4145
|
+
self.logger.info(
|
|
4146
|
+
f"trying to use app attribute `demo_data_dir` as example data file "
|
|
4147
|
+
f"source directory: {self.demo_data_dir!r}."
|
|
4148
|
+
)
|
|
4149
|
+
# `config.demo_data_dir` not set, so try to use `app.demo_data_dir`:
|
|
4150
|
+
|
|
4151
|
+
if package := self.demo_data_dir:
|
|
4152
|
+
try:
|
|
4153
|
+
with get_file_context(package, src_fn) as path:
|
|
4154
|
+
out = path
|
|
4155
|
+
delete = False
|
|
4156
|
+
except (ModuleNotFoundError, FileNotFoundError):
|
|
4157
|
+
# example data not included (e.g. frozen, or installed via
|
|
4158
|
+
# PyPI/conda), so set a default value for `config.demo_data_dir`
|
|
4159
|
+
# (point to the package GitHub repo for the current tag):
|
|
4160
|
+
path_ = package.replace(".", "/")
|
|
4161
|
+
url = self._get_github_url(sha=f"v{self.version}", path=path_)
|
|
4162
|
+
self.logger.info(
|
|
4163
|
+
f"path {path_!r} does not exist as a package resource (example data "
|
|
4164
|
+
f"was probably not included in the app), so non-persistently setting "
|
|
4165
|
+
f"the config item `demo_data_dir` to the app's GitHub repo path: "
|
|
4166
|
+
f"{url!r}."
|
|
4167
|
+
)
|
|
4168
|
+
self.config.demo_data_dir = url
|
|
4169
|
+
out, delete = _retrieve_source_path_from_config(src_fn)
|
|
4170
|
+
|
|
4171
|
+
return out, requires_unpack, delete
|
|
4172
|
+
|
|
4173
|
+
def get_demo_data_file_path(self, file_name: str) -> Path:
|
|
4174
|
+
"""
|
|
4175
|
+
Get the full path to an example data file in the app cache directory.
|
|
4176
|
+
|
|
4177
|
+
If the file does not already exist in the app cache directory, it will be added
|
|
4178
|
+
(and unzipped if required). The file may first be downloaded from a remote file
|
|
4179
|
+
system such as GitHub (see `_get_demo_data_file_source_path` for details).
|
|
4180
|
+
"""
|
|
4181
|
+
# check if file exists in cache dir already
|
|
4182
|
+
cache_file_path = self.demo_data_cache_dir.joinpath(file_name)
|
|
4183
|
+
if cache_file_path.exists():
|
|
4184
|
+
self.logger.info(
|
|
4185
|
+
f"example data file {file_name!r} is already in the cache: "
|
|
4186
|
+
f"{cache_file_path!r}."
|
|
4187
|
+
)
|
|
4188
|
+
else:
|
|
4189
|
+
self.logger.info(
|
|
4190
|
+
f"example data file {file_name!r} is not in the cache, so copying to the "
|
|
4191
|
+
f"cache: {cache_file_path!r}."
|
|
4192
|
+
)
|
|
4193
|
+
self._ensure_demo_data_cache_dir()
|
|
4194
|
+
src, unpack, delete = self._get_demo_data_file_source_path(file_name)
|
|
4195
|
+
if unpack:
|
|
4196
|
+
# extract file to cache dir:
|
|
4197
|
+
self.logger.debug(
|
|
4198
|
+
f"extracting example data file {file_name!r} source file {src!r}."
|
|
4199
|
+
)
|
|
4200
|
+
with TemporaryDirectory() as temp_dir:
|
|
4201
|
+
with zipfile.ZipFile(src, "r") as zip_ref:
|
|
4202
|
+
zip_ref.extractall(temp_dir)
|
|
4203
|
+
extracted = Path(temp_dir).joinpath(file_name)
|
|
4204
|
+
if extracted.is_dir():
|
|
4205
|
+
shutil.copytree(extracted, cache_file_path)
|
|
4206
|
+
else:
|
|
4207
|
+
shutil.copy(extracted, cache_file_path)
|
|
4208
|
+
else:
|
|
4209
|
+
# copy to cache dir:
|
|
4210
|
+
shutil.copy(src, cache_file_path)
|
|
4211
|
+
if delete:
|
|
4212
|
+
# e.g. `src` is in a temporary directory because it was downloaded from
|
|
4213
|
+
# GitHub:
|
|
4214
|
+
self.logger.debug(
|
|
4215
|
+
f"deleting example data file {file_name!r} source file {src!r}."
|
|
4216
|
+
)
|
|
4217
|
+
src.unlink()
|
|
4218
|
+
return cache_file_path
|
|
4219
|
+
|
|
4220
|
+
def cache_demo_data_file(self, file_name: str) -> Path:
|
|
4221
|
+
"""
|
|
4222
|
+
Get the name of a cached demo data file.
|
|
4223
|
+
"""
|
|
4224
|
+
return self.get_demo_data_file_path(file_name)
|
|
4225
|
+
|
|
4226
|
+
def cache_all_demo_data_files(self) -> list[Path]:
|
|
4227
|
+
"""
|
|
4228
|
+
Get the name of all cached demo data file.
|
|
4229
|
+
"""
|
|
4230
|
+
return [
|
|
4231
|
+
self.get_demo_data_file_path(filename)
|
|
4232
|
+
for filename in self.list_demo_data_files()
|
|
4233
|
+
]
|
|
4234
|
+
|
|
4235
|
+
def copy_demo_data(
|
|
4236
|
+
self, file_name: str, dst: PathLike | None = None, doc: bool = True
|
|
4237
|
+
) -> str:
|
|
4238
|
+
"""
|
|
4239
|
+
Copy a builtin demo data file to the specified location.
|
|
4240
|
+
|
|
4241
|
+
Parameters
|
|
4242
|
+
----------
|
|
4243
|
+
file_name
|
|
4244
|
+
The name of the demo data file to copy
|
|
4245
|
+
dst
|
|
4246
|
+
Directory or full file path to copy the demo data file to. If not specified,
|
|
4247
|
+
the current working directory will be used.
|
|
4248
|
+
"""
|
|
4249
|
+
dst = dst or Path(".")
|
|
4250
|
+
src = self.get_demo_data_file_path(file_name)
|
|
4251
|
+
shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
|
|
4252
|
+
|
|
4253
|
+
return src.name
|
|
4254
|
+
|
|
4255
|
+
def _get_github_url(self, sha: str, path: str):
|
|
4256
|
+
"""
|
|
4257
|
+
Return a fsspec URL for retrieving a file or directory on the app's GitHub
|
|
4258
|
+
repository.
|
|
4259
|
+
"""
|
|
4260
|
+
return f"github://{self.gh_org}:{self.gh_repo}@{sha}/{path}"
|
|
4261
|
+
|
|
4262
|
+
|
|
4263
|
+
class App(BaseApp):
|
|
4264
|
+
"""Class from which to instantiate downstream app objects (e.g. MatFlow)."""
|
|
4265
|
+
|
|
4266
|
+
pass
|