hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
  2. hpcflow/_version.py +1 -1
  3. hpcflow/app.py +1 -0
  4. hpcflow/data/scripts/bad_script.py +2 -0
  5. hpcflow/data/scripts/do_nothing.py +2 -0
  6. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  7. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  8. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  11. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  12. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  13. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  15. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  16. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  23. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
  24. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  25. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
  26. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  27. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  28. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  29. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  30. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  31. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  32. hpcflow/data/scripts/script_exit_test.py +5 -0
  33. hpcflow/data/template_components/environments.yaml +1 -1
  34. hpcflow/sdk/__init__.py +26 -15
  35. hpcflow/sdk/app.py +2192 -768
  36. hpcflow/sdk/cli.py +506 -296
  37. hpcflow/sdk/cli_common.py +105 -7
  38. hpcflow/sdk/config/__init__.py +1 -1
  39. hpcflow/sdk/config/callbacks.py +115 -43
  40. hpcflow/sdk/config/cli.py +126 -103
  41. hpcflow/sdk/config/config.py +674 -318
  42. hpcflow/sdk/config/config_file.py +131 -95
  43. hpcflow/sdk/config/errors.py +125 -84
  44. hpcflow/sdk/config/types.py +148 -0
  45. hpcflow/sdk/core/__init__.py +25 -1
  46. hpcflow/sdk/core/actions.py +1771 -1059
  47. hpcflow/sdk/core/app_aware.py +24 -0
  48. hpcflow/sdk/core/cache.py +139 -79
  49. hpcflow/sdk/core/command_files.py +263 -287
  50. hpcflow/sdk/core/commands.py +145 -112
  51. hpcflow/sdk/core/element.py +828 -535
  52. hpcflow/sdk/core/enums.py +192 -0
  53. hpcflow/sdk/core/environment.py +74 -93
  54. hpcflow/sdk/core/errors.py +455 -52
  55. hpcflow/sdk/core/execute.py +207 -0
  56. hpcflow/sdk/core/json_like.py +540 -272
  57. hpcflow/sdk/core/loop.py +751 -347
  58. hpcflow/sdk/core/loop_cache.py +164 -47
  59. hpcflow/sdk/core/object_list.py +370 -207
  60. hpcflow/sdk/core/parameters.py +1100 -627
  61. hpcflow/sdk/core/rule.py +59 -41
  62. hpcflow/sdk/core/run_dir_files.py +21 -37
  63. hpcflow/sdk/core/skip_reason.py +7 -0
  64. hpcflow/sdk/core/task.py +1649 -1339
  65. hpcflow/sdk/core/task_schema.py +308 -196
  66. hpcflow/sdk/core/test_utils.py +191 -114
  67. hpcflow/sdk/core/types.py +440 -0
  68. hpcflow/sdk/core/utils.py +485 -309
  69. hpcflow/sdk/core/validation.py +82 -9
  70. hpcflow/sdk/core/workflow.py +2544 -1178
  71. hpcflow/sdk/core/zarr_io.py +98 -137
  72. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  73. hpcflow/sdk/demo/cli.py +53 -33
  74. hpcflow/sdk/helper/cli.py +18 -15
  75. hpcflow/sdk/helper/helper.py +75 -63
  76. hpcflow/sdk/helper/watcher.py +61 -28
  77. hpcflow/sdk/log.py +122 -71
  78. hpcflow/sdk/persistence/__init__.py +8 -31
  79. hpcflow/sdk/persistence/base.py +1360 -606
  80. hpcflow/sdk/persistence/defaults.py +6 -0
  81. hpcflow/sdk/persistence/discovery.py +38 -0
  82. hpcflow/sdk/persistence/json.py +568 -188
  83. hpcflow/sdk/persistence/pending.py +382 -179
  84. hpcflow/sdk/persistence/store_resource.py +39 -23
  85. hpcflow/sdk/persistence/types.py +318 -0
  86. hpcflow/sdk/persistence/utils.py +14 -11
  87. hpcflow/sdk/persistence/zarr.py +1337 -433
  88. hpcflow/sdk/runtime.py +44 -41
  89. hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
  90. hpcflow/sdk/submission/jobscript.py +1651 -692
  91. hpcflow/sdk/submission/schedulers/__init__.py +167 -39
  92. hpcflow/sdk/submission/schedulers/direct.py +121 -81
  93. hpcflow/sdk/submission/schedulers/sge.py +170 -129
  94. hpcflow/sdk/submission/schedulers/slurm.py +291 -268
  95. hpcflow/sdk/submission/schedulers/utils.py +12 -2
  96. hpcflow/sdk/submission/shells/__init__.py +14 -15
  97. hpcflow/sdk/submission/shells/base.py +150 -29
  98. hpcflow/sdk/submission/shells/bash.py +283 -173
  99. hpcflow/sdk/submission/shells/os_version.py +31 -30
  100. hpcflow/sdk/submission/shells/powershell.py +228 -170
  101. hpcflow/sdk/submission/submission.py +1014 -335
  102. hpcflow/sdk/submission/types.py +140 -0
  103. hpcflow/sdk/typing.py +182 -12
  104. hpcflow/sdk/utils/arrays.py +71 -0
  105. hpcflow/sdk/utils/deferred_file.py +55 -0
  106. hpcflow/sdk/utils/hashing.py +16 -0
  107. hpcflow/sdk/utils/patches.py +12 -0
  108. hpcflow/sdk/utils/strings.py +33 -0
  109. hpcflow/tests/api/test_api.py +32 -0
  110. hpcflow/tests/conftest.py +27 -6
  111. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  112. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  113. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  114. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
  115. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  116. hpcflow/tests/scripts/test_main_scripts.py +866 -85
  117. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  118. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  119. hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
  120. hpcflow/tests/unit/test_action.py +262 -75
  121. hpcflow/tests/unit/test_action_rule.py +9 -4
  122. hpcflow/tests/unit/test_app.py +33 -6
  123. hpcflow/tests/unit/test_cache.py +46 -0
  124. hpcflow/tests/unit/test_cli.py +134 -1
  125. hpcflow/tests/unit/test_command.py +71 -54
  126. hpcflow/tests/unit/test_config.py +142 -16
  127. hpcflow/tests/unit/test_config_file.py +21 -18
  128. hpcflow/tests/unit/test_element.py +58 -62
  129. hpcflow/tests/unit/test_element_iteration.py +50 -1
  130. hpcflow/tests/unit/test_element_set.py +29 -19
  131. hpcflow/tests/unit/test_group.py +4 -2
  132. hpcflow/tests/unit/test_input_source.py +116 -93
  133. hpcflow/tests/unit/test_input_value.py +29 -24
  134. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  135. hpcflow/tests/unit/test_json_like.py +44 -35
  136. hpcflow/tests/unit/test_loop.py +1396 -84
  137. hpcflow/tests/unit/test_meta_task.py +325 -0
  138. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  139. hpcflow/tests/unit/test_object_list.py +17 -12
  140. hpcflow/tests/unit/test_parameter.py +29 -7
  141. hpcflow/tests/unit/test_persistence.py +237 -42
  142. hpcflow/tests/unit/test_resources.py +20 -18
  143. hpcflow/tests/unit/test_run.py +117 -6
  144. hpcflow/tests/unit/test_run_directories.py +29 -0
  145. hpcflow/tests/unit/test_runtime.py +2 -1
  146. hpcflow/tests/unit/test_schema_input.py +23 -15
  147. hpcflow/tests/unit/test_shell.py +23 -2
  148. hpcflow/tests/unit/test_slurm.py +8 -7
  149. hpcflow/tests/unit/test_submission.py +38 -89
  150. hpcflow/tests/unit/test_task.py +352 -247
  151. hpcflow/tests/unit/test_task_schema.py +33 -20
  152. hpcflow/tests/unit/test_utils.py +9 -11
  153. hpcflow/tests/unit/test_value_sequence.py +15 -12
  154. hpcflow/tests/unit/test_workflow.py +114 -83
  155. hpcflow/tests/unit/test_workflow_template.py +0 -1
  156. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  157. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  158. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  159. hpcflow/tests/unit/utils/test_patches.py +5 -0
  160. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  161. hpcflow/tests/workflows/__init__.py +0 -0
  162. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  163. hpcflow/tests/workflows/test_jobscript.py +334 -1
  164. hpcflow/tests/workflows/test_run_status.py +198 -0
  165. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  166. hpcflow/tests/workflows/test_submission.py +140 -0
  167. hpcflow/tests/workflows/test_workflows.py +160 -15
  168. hpcflow/tests/workflows/test_zip.py +18 -0
  169. hpcflow/viz_demo.ipynb +6587 -3
  170. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
  171. hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
  172. hpcflow/sdk/core/parallel.py +0 -21
  173. hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
  174. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
  175. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
  176. {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/app.py CHANGED
@@ -2,26 +2,25 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from collections import defaultdict
5
+ from collections import Counter
6
+ from contextlib import AbstractContextManager, nullcontext
6
7
  from datetime import datetime, timezone
7
8
  import enum
8
9
  import json
9
10
  import shutil
10
11
  from functools import wraps
11
12
  from importlib import resources, import_module
12
- from logging import Logger
13
13
  import os
14
14
  from contextlib import contextmanager
15
15
  from pathlib import Path
16
16
  import sys
17
17
  from tempfile import TemporaryDirectory
18
- from typing import Any, Callable, Dict, List, Optional, Type, Union, Tuple
18
+ from typing import Any, TypeVar, Generic, cast, TYPE_CHECKING, Literal
19
19
  import warnings
20
20
  import zipfile
21
21
  from platformdirs import user_cache_path, user_data_dir
22
22
  import requests
23
- from reretry import retry
24
- import rich
23
+ from reretry import retry # type: ignore
25
24
  from rich.console import Console, Group
26
25
  from rich.syntax import Syntax
27
26
  from rich.table import Table, box
@@ -29,50 +28,305 @@ from rich.text import Text
29
28
  from rich.padding import Padding
30
29
  from rich.panel import Panel
31
30
  from rich import print as rich_print
32
- from fsspec.core import url_to_fs
33
- from fsspec.implementations.local import LocalFileSystem
34
-
31
+ from fsspec.core import url_to_fs # type: ignore
32
+ from fsspec.implementations.local import LocalFileSystem # type: ignore
35
33
 
36
34
  from hpcflow import __version__
37
- from hpcflow.sdk.core.actions import EARStatus
38
- from hpcflow.sdk.core.errors import WorkflowNotFoundError
39
- from hpcflow.sdk.core.object_list import ObjectList
35
+ from hpcflow.sdk.core.enums import EARStatus
40
36
  from hpcflow.sdk.core.utils import (
41
37
  read_YAML_str,
42
38
  read_YAML_file,
43
39
  read_JSON_file,
44
40
  write_YAML_file,
45
41
  write_JSON_file,
42
+ redirect_std_to_file as redirect_std_to_file_hpcflow,
43
+ parse_timestamp,
44
+ get_file_context,
45
+ open_text_resource,
46
46
  )
47
47
  from hpcflow.sdk import sdk_classes, sdk_funcs, get_SDK_logger
48
48
  from hpcflow.sdk.config import Config, ConfigFile
49
49
  from hpcflow.sdk.core import ALL_TEMPLATE_FORMATS
50
+ from .core.workflow import Workflow as _Workflow
50
51
  from hpcflow.sdk.log import AppLog, TimeIt
51
- from hpcflow.sdk.persistence import DEFAULT_STORE_FORMAT
52
+ from hpcflow.sdk.persistence.defaults import DEFAULT_STORE_FORMAT
52
53
  from hpcflow.sdk.persistence.base import TEMPLATE_COMP_TYPES
53
54
  from hpcflow.sdk.runtime import RunTimeInfo
54
55
  from hpcflow.sdk.cli import make_cli
55
- from hpcflow.sdk.submission.jobscript_info import JobscriptElementState
56
+ from hpcflow.sdk.submission.enums import JobscriptElementState
56
57
  from hpcflow.sdk.submission.shells import get_shell
57
58
  from hpcflow.sdk.submission.shells.os_version import (
58
59
  get_OS_info_POSIX,
59
60
  get_OS_info_windows,
60
61
  )
61
- from hpcflow.sdk.typing import PathLike
62
+
63
+ if TYPE_CHECKING:
64
+ from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
65
+ from logging import Logger
66
+ from types import ModuleType
67
+ from typing import ClassVar, Literal, Protocol
68
+ from typing_extensions import Final
69
+ from rich.status import Status
70
+ from .typing import (
71
+ BasicTemplateComponents,
72
+ KnownSubmission,
73
+ KnownSubmissionItem,
74
+ PathLike,
75
+ TemplateComponents,
76
+ MakeWorkflowCommonArgs,
77
+ )
78
+ from .config.config import ConfigOptions
79
+ from .core.actions import (
80
+ ElementActionRun,
81
+ ElementAction,
82
+ ActionEnvironment,
83
+ Action,
84
+ ActionScope,
85
+ ActionRule,
86
+ )
87
+ from .core.command_files import (
88
+ FileSpec,
89
+ FileNameSpec,
90
+ InputFileGenerator,
91
+ FileNameStem,
92
+ FileNameExt,
93
+ OutputFileParser,
94
+ )
95
+ from .core.commands import Command
96
+ from .core.element import (
97
+ ElementInputs,
98
+ ElementOutputs,
99
+ ElementInputFiles,
100
+ ElementOutputFiles,
101
+ ElementIteration,
102
+ Element,
103
+ ElementParameter,
104
+ ElementResources,
105
+ ElementFilter,
106
+ ElementGroup,
107
+ )
108
+ from .core.enums import ActionScopeType, InputSourceType, TaskSourceType
109
+ from .core.environment import (
110
+ NumCores,
111
+ Environment,
112
+ Executable as _Executable,
113
+ ExecutableInstance,
114
+ )
115
+ from .core.loop import Loop, WorkflowLoop
116
+ from .core.object_list import (
117
+ CommandFilesList as _CommandFilesList,
118
+ EnvironmentsList as _EnvironmentsList,
119
+ ExecutablesList,
120
+ GroupList,
121
+ ParametersList as _ParametersList,
122
+ ResourceList,
123
+ TaskList,
124
+ TaskSchemasList as _TaskSchemasList,
125
+ TaskTemplateList,
126
+ WorkflowLoopList,
127
+ WorkflowTaskList,
128
+ )
129
+ from .core.parameters import (
130
+ SchemaParameter,
131
+ InputValue,
132
+ Parameter,
133
+ ParameterValue,
134
+ InputSource,
135
+ ResourceSpec,
136
+ SchemaOutput,
137
+ ValueSequence,
138
+ MultiPathSequence,
139
+ SchemaInput,
140
+ )
141
+ from .core.rule import Rule
142
+ from .core.run_dir_files import RunDirAppFiles
143
+ from .core.task import (
144
+ Task,
145
+ WorkflowTask,
146
+ Parameters,
147
+ TaskInputParameters,
148
+ TaskOutputParameters,
149
+ ElementPropagation,
150
+ ElementSet,
151
+ )
152
+ from .core.task_schema import TaskSchema, TaskObjective
153
+ from .core.workflow import WorkflowTemplate as _WorkflowTemplate
154
+ from .submission.jobscript import Jobscript
155
+ from .submission.submission import Submission as _Submission # TODO: why?
156
+ from .submission.schedulers import Scheduler, QueuedScheduler
157
+ from .submission.schedulers.direct import DirectPosix, DirectWindows
158
+ from .submission.schedulers.sge import SGEPosix
159
+ from .submission.schedulers.slurm import SlurmPosix
160
+ from .submission.shells.base import VersionInfo
161
+
162
+ # Complex types for SDK functions
163
+ class _MakeWorkflow(Protocol):
164
+ """Type of :py:meth:`BaseApp.make_workflow`"""
165
+
166
+ def __call__(
167
+ self,
168
+ template_file_or_str: PathLike | str,
169
+ is_string: bool = False,
170
+ template_format: Literal["json", "yaml"] | None = None,
171
+ path: PathLike = None,
172
+ name: str | None = None,
173
+ overwrite: bool = False,
174
+ store: str = DEFAULT_STORE_FORMAT,
175
+ ts_fmt: str | None = None,
176
+ ts_name_fmt: str | None = None,
177
+ store_kwargs: dict[str, Any] | None = None,
178
+ variables: dict[str, str] | None = None,
179
+ status: bool = True,
180
+ add_submission: bool = False,
181
+ ) -> _Workflow | _Submission | None:
182
+ ...
183
+
184
+ class _MakeDemoWorkflow(Protocol):
185
+ """Type of :py:meth:`BaseApp.make_demo_workflow`"""
186
+
187
+ def __call__(
188
+ self,
189
+ workflow_name: str,
190
+ template_format: Literal["json", "yaml"] | None = None,
191
+ path: PathLike | None = None,
192
+ name: str | None = None,
193
+ overwrite: bool = False,
194
+ store: str = DEFAULT_STORE_FORMAT,
195
+ ts_fmt: str | None = None,
196
+ ts_name_fmt: str | None = None,
197
+ store_kwargs: dict[str, Any] | None = None,
198
+ variables: dict[str, str] | None = None,
199
+ status: bool = True,
200
+ add_submission: bool = False,
201
+ ) -> _Workflow | _Submission | None:
202
+ ...
203
+
204
+ class _MakeAndSubmitWorkflow(Protocol):
205
+ """Type of :py:meth:`BaseApp.make_and_submit_workflow`"""
206
+
207
+ # Should be overloaded on return_idx, but not bothering
208
+ def __call__(
209
+ self,
210
+ template_file_or_str: PathLike | str,
211
+ is_string: bool = False,
212
+ template_format: Literal["json", "yaml"] | None = None,
213
+ path: PathLike | None = None,
214
+ name: str | None = None,
215
+ overwrite: bool = False,
216
+ store: str = DEFAULT_STORE_FORMAT,
217
+ ts_fmt: str | None = None,
218
+ ts_name_fmt: str | None = None,
219
+ store_kwargs: dict[str, Any] | None = None,
220
+ variables: dict[str, str] | None = None,
221
+ JS_parallelism: bool | None = None,
222
+ wait: bool = False,
223
+ add_to_known: bool = True,
224
+ return_idx: bool = False,
225
+ tasks: list[int] | None = None,
226
+ cancel: bool = False,
227
+ status: bool = True,
228
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
229
+ ...
230
+
231
+ class _MakeAndSubmitDemoWorkflow(Protocol):
232
+ """Type of :py:meth:`BaseApp.make_and_submit_demo_workflow`"""
233
+
234
+ # Should be overloaded on return_idx, but not bothering
235
+ def __call__(
236
+ self,
237
+ workflow_name: str,
238
+ template_format: Literal["json", "yaml"] | None = None,
239
+ path: PathLike | None = None,
240
+ name: str | None = None,
241
+ overwrite: bool = False,
242
+ store: str = DEFAULT_STORE_FORMAT,
243
+ ts_fmt: str | None = None,
244
+ ts_name_fmt: str | None = None,
245
+ store_kwargs: dict[str, Any] | None = None,
246
+ variables: dict[str, str] | None = None,
247
+ JS_parallelism: bool | None = None,
248
+ wait: bool = False,
249
+ add_to_known: bool = True,
250
+ return_idx: bool = False,
251
+ tasks: list[int] | None = None,
252
+ cancel: bool = False,
253
+ status: bool = True,
254
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
255
+ ...
256
+
257
+ class _SubmitWorkflow(Protocol):
258
+ """Type of :py:meth:`BaseApp.submit_workflow`"""
259
+
260
+ # Should be overloaded on return_idx, but not bothering
261
+ def __call__(
262
+ self,
263
+ workflow_path: PathLike,
264
+ JS_parallelism: bool | None = None,
265
+ wait: bool = False,
266
+ return_idx: bool = False,
267
+ tasks: list[int] | None = None,
268
+ ) -> Mapping[int, Sequence[int]] | None:
269
+ ...
270
+
271
+ class _GetKnownSubmissions(Protocol):
272
+ """Type of :py:meth:`BaseApp.get_known_submissions`"""
273
+
274
+ # Should be overloaded on as_json, but not bothering
275
+ def __call__(
276
+ self,
277
+ max_recent: int = 3,
278
+ no_update: bool = False,
279
+ as_json: bool = False,
280
+ status: Status | None = None,
281
+ ) -> Sequence[KnownSubmissionItem]:
282
+ ...
283
+
284
+ class _Show(Protocol):
285
+ """Type of :py:meth:`BaseApp.show`"""
286
+
287
+ def __call__(
288
+ self,
289
+ max_recent: int = 3,
290
+ full: bool = False,
291
+ no_update: bool = False,
292
+ ) -> None:
293
+ ...
294
+
295
+ class _Cancel(Protocol):
296
+ """Type of :py:meth:`BaseApp.cancel`"""
297
+
298
+ def __call__(
299
+ self,
300
+ workflow_ref: int | str | Path,
301
+ ref_is_path: str | None = None,
302
+ status: bool = False,
303
+ ) -> None:
304
+ ...
305
+
306
+ class _RunTests(Protocol):
307
+ """Type of :py:meth:`BaseApp.run_tests and run_hpcflow_tests`"""
308
+
309
+ def __call__(self, *args: str) -> int:
310
+ ...
311
+
62
312
 
63
313
  SDK_logger = get_SDK_logger(__name__)
64
314
  DEMO_WK_FORMATS = {".yaml": "yaml", ".yml": "yaml", ".json": "json", ".jsonc": "json"}
65
315
 
316
+ T = TypeVar("T")
317
+
66
318
 
67
- def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
68
- R"""Call fsspec's ``url_to_fs`` but retry on ``requests.exceptions.HTTPError``\ s.
319
+ def rate_limit_safe_url_to_fs(
320
+ app: BaseApp, *args, logger: Logger | None = None, **kwargs
321
+ ):
322
+ R"""
323
+ Call fsspec's ``url_to_fs`` but retry on ``requests.exceptions.HTTPError``\ s.
69
324
 
70
325
  References
71
326
  ----------
72
327
  [1]: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?
73
328
  apiVersion=2022-11-28#about-secondary-rate-limits
74
329
  """
75
-
76
330
  auth = {}
77
331
  if app.run_time_info.in_pytest:
78
332
  gh_token = os.environ.get("GH_TOKEN")
@@ -81,10 +335,11 @@ def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
81
335
  # requests allowed per hour to 1000 [1]. fsspec requires "username" to be
82
336
  # set if using "token":
83
337
  auth = {"username": "", "token": gh_token}
84
- logger.info(
85
- "calling fsspec's `url_to_fs` with a token from the env variable "
86
- "`GH_TOKEN`."
87
- )
338
+ if logger:
339
+ logger.info(
340
+ "calling fsspec's `url_to_fs` with a token from the env variable "
341
+ "`GH_TOKEN`."
342
+ )
88
343
 
89
344
  # GitHub actions testing is potentially highly concurrent, with multiple
90
345
  # Python versions and OSes being tested at the same time; so we might hit
@@ -104,48 +359,64 @@ def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
104
359
  return _inner(*args, **kwargs)
105
360
 
106
361
 
107
- def __getattr__(name):
108
- """Allow access to core classes and API functions (useful for type annotations)."""
362
+ def __getattr__(name: str):
363
+ """Allow access to core classes and API functions."""
109
364
  try:
110
365
  return get_app_attribute(name)
111
366
  except AttributeError:
112
367
  raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
113
368
 
114
369
 
115
- def get_app_attribute(name):
116
- """A function to assign to an app module `__getattr__` to access app attributes."""
370
+ def get_app_attribute(name: str):
371
+ """
372
+ A function to assign to an app module `__getattr__` to access app attributes.
373
+ """
374
+ app_obj: BaseApp
117
375
  try:
118
- app_obj = App.get_instance()
376
+ app_obj = cast("App", App.get_instance())
119
377
  except RuntimeError:
120
- app_obj = BaseApp.get_instance()
378
+ app_obj = cast("BaseApp", BaseApp.get_instance())
121
379
  try:
122
380
  return getattr(app_obj, name)
123
381
  except AttributeError:
124
382
  raise AttributeError(f"module {app_obj.module!r} has no attribute {name!r}.")
125
383
 
126
384
 
127
- def get_app_module_all():
385
+ def get_app_module_all() -> list[str]:
128
386
  """
129
387
  The list of all symbols exported by this module.
130
388
  """
131
- return ["app"] + list(sdk_classes.keys()) + list(sdk_funcs)
389
+ return ["app", *sdk_classes, *sdk_funcs]
132
390
 
133
391
 
134
- def get_app_module_dir():
392
+ def get_app_module_dir() -> Callable[[], list[str]]:
135
393
  """
136
394
  The sorted list of all symbols exported by this module.
137
395
  """
138
396
  return lambda: sorted(get_app_module_all())
139
397
 
140
398
 
141
- class Singleton(type):
399
+ class Singleton(type, Generic[T]):
142
400
  """
143
401
  Metaclass that enforces that only one instance of a class can be made.
402
+
403
+ Type Parameters
404
+ ---------------
405
+ T
406
+ The type of the class that is a singleton.
144
407
  """
145
408
 
146
- _instances = {}
409
+ _instances: ClassVar[dict[Singleton, Any]] = {}
410
+
411
+ def __call__(cls: Singleton[T], *args, **kwargs) -> T:
412
+ """
413
+ Get the current instance or make it if it doesn't already exist.
147
414
 
148
- def __call__(cls, *args, **kwargs):
415
+ Return
416
+ ------
417
+ T:
418
+ The unique instance of the class.
419
+ """
149
420
  SDK_logger.info(
150
421
  f"App metaclass __call__: "
151
422
  f"name={kwargs['name']!r}, version={kwargs['version']!r}."
@@ -155,8 +426,15 @@ class Singleton(type):
155
426
  cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
156
427
  return cls._instances[cls]
157
428
 
158
- def get_instance(cls):
159
- """Retrieve the instance of the singleton class if initialised."""
429
+ def get_instance(cls: Singleton[T]) -> T:
430
+ """
431
+ Retrieve the instance of the singleton class if initialised.
432
+
433
+ Raises
434
+ ------
435
+ RuntimeError
436
+ If there is no instance already.
437
+ """
160
438
  try:
161
439
  return cls._instances[cls]
162
440
  except KeyError:
@@ -164,7 +442,8 @@ class Singleton(type):
164
442
 
165
443
 
166
444
  class BaseApp(metaclass=Singleton):
167
- """Class to generate the hpcflow application.
445
+ """
446
+ Class to generate the hpcflow application.
168
447
 
169
448
  Parameters
170
449
  ----------
@@ -206,28 +485,29 @@ class BaseApp(metaclass=Singleton):
206
485
  URL to documentation.
207
486
  """
208
487
 
209
- _known_subs_file_name = "known_submissions.txt"
210
- _known_subs_file_sep = "::"
211
- _submission_ts_fmt = r"%Y-%m-%d %H:%M:%S.%f"
488
+ _known_subs_file_name: ClassVar = "known_submissions.txt"
489
+ _known_subs_file_sep: ClassVar = "::"
490
+ _submission_ts_fmt: ClassVar = r"%Y-%m-%d %H:%M:%S.%f"
491
+ __load_pending: ClassVar = False
212
492
 
213
493
  def __init__(
214
494
  self,
215
- name,
216
- version,
217
- module,
218
- description,
219
- gh_org,
220
- gh_repo,
221
- config_options,
222
- scripts_dir,
223
- workflows_dir: str = None,
224
- demo_data_dir: str = None,
225
- demo_data_manifest_dir: str = None,
226
- template_components: Dict = None,
227
- pytest_args=None,
228
- package_name=None,
229
- docs_import_conv=None,
230
- docs_url=None,
495
+ name: str,
496
+ version: str,
497
+ module: str,
498
+ description: str,
499
+ gh_org: str,
500
+ gh_repo: str,
501
+ config_options: ConfigOptions,
502
+ scripts_dir: str,
503
+ workflows_dir: str | None = None,
504
+ demo_data_dir: str | None = None,
505
+ demo_data_manifest_dir: str | None = None,
506
+ template_components: dict[str, list[dict]] | None = None,
507
+ pytest_args: list[str] | None = None,
508
+ package_name: str | None = None,
509
+ docs_import_conv: str | None = None,
510
+ docs_url: str | None = None,
231
511
  ):
232
512
  SDK_logger.info(f"Generating {self.__class__.__name__} {name!r}.")
233
513
 
@@ -265,84 +545,1114 @@ class BaseApp(metaclass=Singleton):
265
545
  #: Command line interface subsystem.
266
546
  self.cli = make_cli(self)
267
547
 
268
- self._log = AppLog(self)
269
- self._run_time_info: RunTimeInfo = RunTimeInfo(
270
- self.name,
271
- self.package_name,
272
- self.version,
273
- self.runtime_info_logger,
274
- )
548
+ self._log = AppLog(self)
549
+ self._run_time_info = RunTimeInfo(
550
+ self.name,
551
+ self.package_name,
552
+ self.version,
553
+ self.runtime_info_logger,
554
+ )
555
+
556
+ self._builtin_template_components = template_components or {}
557
+
558
+ self._config: Config | None = (
559
+ None # assigned on first access to `config` property
560
+ )
561
+ self._config_files: dict[
562
+ str, ConfigFile
563
+ ] = {} # assigned on config load, keys are string absolute paths
564
+
565
+ # Set by `_load_template_components`:
566
+ self._template_components: TemplateComponents = {}
567
+ self._parameters: _ParametersList | None = None
568
+ self._command_files: _CommandFilesList | None = None
569
+ self._environments: _EnvironmentsList | None = None
570
+ self._task_schemas: _TaskSchemasList | None = None
571
+ self._scripts: dict[str, Path] | None = None
572
+
573
+ self.__app_type_cache: dict[str, type] = {}
574
+ self.__app_func_cache: dict[str, Callable[..., Any]] = {}
575
+
576
+ # assigned on first access to respective properties
577
+ self._user_data_dir: Path | None = None
578
+ self._user_cache_dir: Path | None = None
579
+ self._user_runtime_dir: Path | None = None
580
+ self._user_data_hostname_dir: Path | None = None
581
+ self._user_cache_hostname_dir: Path | None = None
582
+ self._demo_data_cache_dir: Path | None = None
583
+
584
+ @property
585
+ def ElementActionRun(self) -> type[ElementActionRun]:
586
+ """
587
+ The :class:`ElementActionRun` class.
588
+
589
+ :meta private:
590
+ """
591
+ return self._get_app_core_class("ElementActionRun")
592
+
593
+ @property
594
+ def ElementAction(self) -> type[ElementAction]:
595
+ """
596
+ The :class:`ElementAction` class.
597
+
598
+ :meta private:
599
+ """
600
+ return self._get_app_core_class("ElementAction")
601
+
602
+ @property
603
+ def ElementFilter(self) -> type[ElementFilter]:
604
+ """
605
+ The :class:`ElementFilter` class.
606
+
607
+ :meta private:
608
+ """
609
+ return self._get_app_core_class("ElementFilter")
610
+
611
+ @property
612
+ def ElementGroup(self) -> type[ElementGroup]:
613
+ """
614
+ The :class:`ElementGroup` class.
615
+
616
+ :meta private:
617
+ """
618
+ return self._get_app_core_class("ElementGroup")
619
+
620
+ @property
621
+ def Environment(self) -> type[Environment]:
622
+ """
623
+ The :class:`Environment` class.
624
+
625
+ :meta private:
626
+ """
627
+ return self._get_app_core_class("Environment")
628
+
629
+ @property
630
+ def Executable(self) -> type[_Executable]:
631
+ """
632
+ The :class:`Executable` class.
633
+
634
+ :meta private:
635
+ """
636
+ return self._get_app_core_class("Executable")
637
+
638
+ @property
639
+ def ExecutableInstance(self) -> type[ExecutableInstance]:
640
+ """
641
+ The :class:`ExecutableInstance` class.
642
+
643
+ :meta private:
644
+ """
645
+ return self._get_app_core_class("ExecutableInstance")
646
+
647
+ @property
648
+ def NumCores(self) -> type[NumCores]:
649
+ """
650
+ The :class:`NumCores` class.
651
+
652
+ :meta private:
653
+ """
654
+ return self._get_app_core_class("NumCores")
655
+
656
+ @property
657
+ def ActionEnvironment(self) -> type[ActionEnvironment]:
658
+ """
659
+ The :class:`ActionEnvironment` class.
660
+
661
+ :meta private:
662
+ """
663
+ return self._get_app_core_class("ActionEnvironment")
664
+
665
+ @property
666
+ def Action(self) -> type[Action]:
667
+ """
668
+ The :class:`Action` class.
669
+
670
+ :meta private:
671
+ """
672
+ return self._get_app_core_class("Action")
673
+
674
+ @property
675
+ def ActionRule(self) -> type[ActionRule]:
676
+ """
677
+ The :class:`ActionRule` class.
678
+
679
+ :meta private:
680
+ """
681
+ return self._get_app_core_class("ActionRule")
682
+
683
+ @property
684
+ def ActionScope(self) -> type[ActionScope]:
685
+ """
686
+ The :class:`ActionScope` class.
687
+
688
+ :meta private:
689
+ """
690
+ return self._get_app_core_class("ActionScope")
691
+
692
+ @property
693
+ def ActionScopeType(self) -> type[ActionScopeType]:
694
+ """
695
+ The :class:`ActionScopeType` class.
696
+
697
+ :meta private:
698
+ """
699
+ return self._get_app_core_class("ActionScopeType")
700
+
701
+ @property
702
+ def FileSpec(self) -> type[FileSpec]:
703
+ """
704
+ The :class:`FileSpec` class.
705
+
706
+ :meta private:
707
+ """
708
+ return self._get_app_core_class("FileSpec")
709
+
710
+ @property
711
+ def FileNameSpec(self) -> type[FileNameSpec]:
712
+ """
713
+ The :class:`FileNameSpec` class.
714
+
715
+ :meta private:
716
+ """
717
+ return self._get_app_core_class("FileNameSpec")
718
+
719
+ @property
720
+ def FileNameStem(self) -> type[FileNameStem]:
721
+ """
722
+ The :class:`FileNameStem` class.
723
+
724
+ :meta private:
725
+ """
726
+ return self._get_app_core_class("FileNameStem")
727
+
728
+ @property
729
+ def FileNameExt(self) -> type[FileNameExt]:
730
+ """
731
+ The :class:`FileNameExt` class.
732
+
733
+ :meta private:
734
+ """
735
+ return self._get_app_core_class("FileNameExt")
736
+
737
+ @property
738
+ def OutputFileParser(self) -> type[OutputFileParser]:
739
+ """
740
+ The :class:`OutputFileParser` class.
741
+
742
+ :meta private:
743
+ """
744
+ return self._get_app_core_class("OutputFileParser")
745
+
746
+ @property
747
+ def InputSource(self) -> type[InputSource]:
748
+ """
749
+ The :class:`InputSource` class.
750
+
751
+ :meta private:
752
+ """
753
+ return self._get_app_core_class("InputSource")
754
+
755
+ @property
756
+ def InputSourceType(self) -> type[InputSourceType]:
757
+ """
758
+ The :class:`InputSourceType` class.
759
+
760
+ :meta private:
761
+ """
762
+ return self._get_app_core_class("InputSourceType")
763
+
764
+ @property
765
+ def ValueSequence(self) -> type[ValueSequence]:
766
+ """
767
+ The :class:`ValueSequence` class.
768
+
769
+ :meta private:
770
+ """
771
+ return self._get_app_core_class("ValueSequence")
772
+
773
+ @property
774
+ def MultiPathSequence(self) -> type[MultiPathSequence]:
775
+ """
776
+ The :class:`MultiPathSequence` class.
777
+
778
+ :meta private:
779
+ """
780
+ return self._get_app_core_class("MultiPathSequence")
781
+
782
+ @property
783
+ def SchemaInput(self) -> type[SchemaInput]:
784
+ """
785
+ The :class:`SchemaInput` class.
786
+
787
+ :meta private:
788
+ """
789
+ return self._get_app_core_class("SchemaInput")
790
+
791
+ @property
792
+ def InputFileGenerator(self) -> type[InputFileGenerator]:
793
+ """
794
+ The :class:`InputFileGenerator` class.
795
+
796
+ :meta private:
797
+ """
798
+ return self._get_app_core_class("InputFileGenerator")
799
+
800
+ @property
801
+ def Command(self) -> type[Command]:
802
+ """
803
+ The :class:`Command` class.
804
+
805
+ :meta private:
806
+ """
807
+ return self._get_app_core_class("Command")
808
+
809
+ @property
810
+ def ElementInputs(self) -> type[ElementInputs]:
811
+ """
812
+ The :class:`ElementInputs` class.
813
+
814
+ :meta private:
815
+ """
816
+ return self._get_app_core_class("ElementInputs")
817
+
818
+ @property
819
+ def ElementOutputs(self) -> type[ElementOutputs]:
820
+ """
821
+ The :class:`ElementOutputs` class.
822
+
823
+ :meta private:
824
+ """
825
+ return self._get_app_core_class("ElementOutputs")
826
+
827
+ @property
828
+ def ElementInputFiles(self) -> type[ElementInputFiles]:
829
+ """
830
+ The :class:`ElementInputFiles` class.
831
+
832
+ :meta private:
833
+ """
834
+ return self._get_app_core_class("ElementInputFiles")
835
+
836
+ @property
837
+ def ElementOutputFiles(self) -> type[ElementOutputFiles]:
838
+ """
839
+ The :class:`ElementOutputFiles` class.
840
+
841
+ :meta private:
842
+ """
843
+ return self._get_app_core_class("ElementOutputFiles")
844
+
845
+ @property
846
+ def ElementResources(self) -> type[ElementResources]:
847
+ """
848
+ The :class:`ElementResources` class.
849
+
850
+ :meta private:
851
+ """
852
+ return self._get_app_core_class("ElementResources")
853
+
854
+ @property
855
+ def ElementIteration(self) -> type[ElementIteration]:
856
+ """
857
+ The :class:`ElementIteration` class.
858
+
859
+ :meta private:
860
+ """
861
+ return self._get_app_core_class("ElementIteration")
862
+
863
+ @property
864
+ def ElementSet(self) -> type[ElementSet]:
865
+ """
866
+ The :class:`ElementSet` class.
867
+
868
+ :meta private:
869
+ """
870
+ return self._get_app_core_class("ElementSet")
871
+
872
+ @property
873
+ def Element(self) -> type[Element]:
874
+ """
875
+ The :class:`Element` class.
876
+
877
+ :meta private:
878
+ """
879
+ return self._get_app_core_class("Element")
880
+
881
+ @property
882
+ def ElementParameter(self) -> type[ElementParameter]:
883
+ """
884
+ The :class:`ElementParameter` class.
885
+
886
+ :meta private:
887
+ """
888
+ return self._get_app_core_class("ElementParameter")
889
+
890
+ @property
891
+ def Loop(self) -> type[Loop]:
892
+ """
893
+ The :class:`Loop` class.
894
+
895
+ :meta private:
896
+ """
897
+ return self._get_app_core_class("Loop")
898
+
899
+ @property
900
+ def WorkflowLoop(self) -> type[WorkflowLoop]:
901
+ """
902
+ The :class:`WorkflowLoop` class.
903
+
904
+ :meta private:
905
+ """
906
+ return self._get_app_core_class("WorkflowLoop")
907
+
908
+ @property
909
+ def CommandFilesList(self) -> type[_CommandFilesList]:
910
+ """
911
+ The :class:`CommandFilesList` class.
912
+
913
+ :meta private:
914
+ """
915
+ return self._get_app_core_class("CommandFilesList")
916
+
917
+ @property
918
+ def EnvironmentsList(self) -> type[_EnvironmentsList]:
919
+ """
920
+ The :class:`EnvironmentsList` class.
921
+
922
+ :meta private:
923
+ """
924
+ return self._get_app_core_class("EnvironmentsList")
925
+
926
+ @property
927
+ def ExecutablesList(self) -> type[ExecutablesList]:
928
+ """
929
+ The :class:`ExecutablesList` class.
930
+
931
+ :meta private:
932
+ """
933
+ return self._get_app_core_class("ExecutablesList")
934
+
935
+ @property
936
+ def GroupList(self) -> type[GroupList]:
937
+ """
938
+ The :class:`GroupList` class.
939
+
940
+ :meta private:
941
+ """
942
+ return self._get_app_core_class("GroupList")
943
+
944
+ @property
945
+ def ParametersList(self) -> type[_ParametersList]:
946
+ """
947
+ The :class:`ParametersList` class.
948
+
949
+ :meta private:
950
+ """
951
+ return self._get_app_core_class("ParametersList")
952
+
953
+ @property
954
+ def ResourceList(self) -> type[ResourceList]:
955
+ """
956
+ The :class:`ResourceList` class.
957
+
958
+ :meta private:
959
+ """
960
+ return self._get_app_core_class("ResourceList")
961
+
962
+ @property
963
+ def ResourceSpec(self) -> type[ResourceSpec]:
964
+ """
965
+ The :class:`ResourceSpec` class.
966
+
967
+ :meta private:
968
+ """
969
+ return self._get_app_core_class("ResourceSpec")
970
+
971
+ @property
972
+ def TaskList(self) -> type[TaskList]:
973
+ """
974
+ The :class:`TaskList` class.
975
+
976
+ :meta private:
977
+ """
978
+ return self._get_app_core_class("TaskList")
979
+
980
+ @property
981
+ def TaskSchemasList(self) -> type[_TaskSchemasList]:
982
+ """
983
+ The :class:`TaskSchemasList` class.
984
+
985
+ :meta private:
986
+ """
987
+ return self._get_app_core_class("TaskSchemasList")
988
+
989
+ @property
990
+ def TaskTemplateList(self) -> type[TaskTemplateList]:
991
+ """
992
+ The :class:`TaskTemplateList` class.
993
+
994
+ :meta private:
995
+ """
996
+ return self._get_app_core_class("TaskTemplateList")
997
+
998
+ @property
999
+ def WorkflowLoopList(self) -> type[WorkflowLoopList]:
1000
+ """
1001
+ The :class:`WorkflowLoopList` class.
1002
+
1003
+ :meta private:
1004
+ """
1005
+ return self._get_app_core_class("WorkflowLoopList")
1006
+
1007
+ @property
1008
+ def WorkflowTaskList(self) -> type[WorkflowTaskList]:
1009
+ """
1010
+ The :class:`WorkflowTaskList` class.
1011
+
1012
+ :meta private:
1013
+ """
1014
+ return self._get_app_core_class("WorkflowTaskList")
1015
+
1016
+ @property
1017
+ def SchemaParameter(self) -> type[SchemaParameter]:
1018
+ """
1019
+ The :class:`SchemaParameter` class.
1020
+
1021
+ :meta private:
1022
+ """
1023
+ return self._get_app_core_class("SchemaParameter")
1024
+
1025
+ @property
1026
+ def SchemaOutput(self) -> type[SchemaOutput]:
1027
+ """
1028
+ The :class:`SchemaOutput` class.
1029
+
1030
+ :meta private:
1031
+ """
1032
+ return self._get_app_core_class("SchemaOutput")
1033
+
1034
+ @property
1035
+ def Rule(self) -> type[Rule]:
1036
+ """
1037
+ The :class:`Rule` class.
1038
+
1039
+ :meta private:
1040
+ """
1041
+ return self._get_app_core_class("Rule")
1042
+
1043
+ @property
1044
+ def RunDirAppFiles(self) -> type[RunDirAppFiles]:
1045
+ """
1046
+ The :class:`RunDirAppFiles` class.
1047
+
1048
+ :meta private:
1049
+ """
1050
+ return self._get_app_core_class("RunDirAppFiles")
1051
+
1052
+ @property
1053
+ def WorkflowTask(self) -> type[WorkflowTask]:
1054
+ """
1055
+ The :class:`WorkflowTask` class.
1056
+
1057
+ :meta private:
1058
+ """
1059
+ return self._get_app_core_class("WorkflowTask")
1060
+
1061
+ @property
1062
+ def Parameters(self) -> type[Parameters]:
1063
+ """
1064
+ The :class:`Parameters` class.
1065
+
1066
+ :meta private:
1067
+ """
1068
+ return self._get_app_core_class("Parameters")
1069
+
1070
+ @property
1071
+ def Parameter(self) -> type[Parameter]:
1072
+ """
1073
+ The :class:`Parameter` class.
1074
+
1075
+ :meta private:
1076
+ """
1077
+ return self._get_app_core_class("Parameter")
1078
+
1079
+ @property
1080
+ def ParameterValue(self) -> type[ParameterValue]:
1081
+ """
1082
+ The :class:`ParameterValue` class.
1083
+
1084
+ :meta private:
1085
+ """
1086
+ return self._get_app_core_class("ParameterValue")
1087
+
1088
+ @property
1089
+ def InputValue(self) -> type[InputValue]:
1090
+ """
1091
+ The :class:`InputValue` class.
1092
+
1093
+ :meta private:
1094
+ """
1095
+ return self._get_app_core_class("InputValue")
1096
+
1097
+ @property
1098
+ def Task(self) -> type[Task]:
1099
+ """
1100
+ The :class:`Task` class.
1101
+
1102
+ :meta private:
1103
+ """
1104
+ return self._get_app_core_class("Task")
1105
+
1106
+ @property
1107
+ def TaskSchema(self) -> type[TaskSchema]:
1108
+ """
1109
+ The :class:`TaskSchema` class.
1110
+
1111
+ :meta private:
1112
+ """
1113
+ return self._get_app_core_class("TaskSchema")
1114
+
1115
+ @property
1116
+ def TaskSourceType(self) -> type[TaskSourceType]:
1117
+ """
1118
+ The :class:`TaskSourceType` class.
1119
+
1120
+ :meta private:
1121
+ """
1122
+ return self._get_app_core_class("TaskSourceType")
1123
+
1124
+ @property
1125
+ def TaskObjective(self) -> type[TaskObjective]:
1126
+ """
1127
+ The :class:`TaskObjective` class.
1128
+
1129
+ :meta private:
1130
+ """
1131
+ return self._get_app_core_class("TaskObjective")
1132
+
1133
+ @property
1134
+ def TaskInputParameters(self) -> type[TaskInputParameters]:
1135
+ """
1136
+ The :class:`TaskInputParameters` class.
1137
+
1138
+ :meta private:
1139
+ """
1140
+ return self._get_app_core_class("TaskInputParameters")
1141
+
1142
+ @property
1143
+ def TaskOutputParameters(self) -> type[TaskOutputParameters]:
1144
+ """
1145
+ The :class:`TaskOutputParameters` class.
1146
+
1147
+ :meta private:
1148
+ """
1149
+ return self._get_app_core_class("TaskOutputParameters")
1150
+
1151
+ @property
1152
+ def ElementPropagation(self) -> type[ElementPropagation]:
1153
+ """
1154
+ The :class:`ElementPropagation` class.
1155
+
1156
+ :meta private:
1157
+ """
1158
+ return self._get_app_core_class("ElementPropagation")
1159
+
1160
+ @property
1161
+ def WorkflowTemplate(self) -> type[_WorkflowTemplate]:
1162
+ """
1163
+ The :class:`WorkflowTemplate` class.
1164
+
1165
+ :meta private:
1166
+ """
1167
+ return self._get_app_core_class("WorkflowTemplate")
1168
+
1169
+ @property
1170
+ def Workflow(self) -> type[_Workflow]:
1171
+ """
1172
+ The :class:`Workflow` class.
1173
+
1174
+ :meta private:
1175
+ """
1176
+ return self._get_app_core_class("Workflow")
1177
+
1178
+ @property
1179
+ def Jobscript(self) -> type[Jobscript]:
1180
+ """
1181
+ The :class:`Jobscript` class.
1182
+
1183
+ :meta private:
1184
+ """
1185
+ return self._get_app_core_class("Jobscript")
1186
+
1187
+ @property
1188
+ def Submission(self) -> type[_Submission]:
1189
+ """
1190
+ The :class:`Submission` class.
1191
+
1192
+ :meta private:
1193
+ """
1194
+ return self._get_app_core_class("Submission")
1195
+
1196
+ @property
1197
+ def DirectPosix(self) -> type[DirectPosix]:
1198
+ """
1199
+ The :class:`DirectPosix` class.
1200
+
1201
+ :meta private:
1202
+ """
1203
+ return self._get_app_core_class("DirectPosix")
1204
+
1205
+ @property
1206
+ def DirectWindows(self) -> type[DirectWindows]:
1207
+ """
1208
+ The :class:`DirectWindows` class.
1209
+
1210
+ :meta private:
1211
+ """
1212
+ return self._get_app_core_class("DirectWindows")
1213
+
1214
+ @property
1215
+ def SGEPosix(self) -> type[SGEPosix]:
1216
+ """
1217
+ The :class:`SGEPosix` class.
1218
+
1219
+ :meta private:
1220
+ """
1221
+ return self._get_app_core_class("SGEPosix")
1222
+
1223
+ @property
1224
+ def SlurmPosix(self) -> type[SlurmPosix]:
1225
+ """
1226
+ The :class:`SlurmPosix` class.
1227
+
1228
+ :meta private:
1229
+ """
1230
+ return self._get_app_core_class("SlurmPosix")
1231
+
1232
+ @property
1233
+ def QueuedScheduler(self) -> type[QueuedScheduler]:
1234
+ """
1235
+ The :class:`QueuedScheduler` class.
1236
+
1237
+ :meta private:
1238
+ """
1239
+ return self._get_app_core_class("QueuedScheduler")
1240
+
1241
+ @property
1242
+ def make_workflow(self) -> _MakeWorkflow:
1243
+ """
1244
+ Generate a new workflow from a file or string containing a workflow
1245
+ template parametrisation.
1246
+
1247
+ Parameters
1248
+ ----------
1249
+ template_path_or_str: str
1250
+ Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
1251
+ is_string: bool
1252
+ Determines if passing a file path or a string.
1253
+ template_format: str
1254
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1255
+ format.
1256
+ path: str | Path
1257
+ The directory in which the workflow will be generated. The current directory
1258
+ if not specified.
1259
+ name: str
1260
+ The name of the workflow. If specified, the workflow directory will be `path`
1261
+ joined with `name`. If not specified the workflow template name will be used,
1262
+ in combination with a date-timestamp.
1263
+ overwrite: bool
1264
+ If True and the workflow directory (`path` + `name`) already exists, the
1265
+ existing directory will be overwritten.
1266
+ store: str
1267
+ The persistent store type to use.
1268
+ ts_fmt: str
1269
+ The datetime format to use for storing datetimes. Datetimes are always stored
1270
+ in UTC (because Numpy does not store time zone info), so this should not
1271
+ include a time zone name.
1272
+ ts_name_fmt: str
1273
+ The datetime format to use when generating the workflow name, where it
1274
+ includes a timestamp.
1275
+ store_kwargs: dict[str, object]
1276
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1277
+ variables: dict[str, str]
1278
+ String variables to substitute in `template_file_or_str`.
1279
+ status: bool
1280
+ If True, display a live status to track workflow creation progress.
1281
+ add_submission
1282
+ If True, add a submission to the workflow (but do not submit).
1283
+
1284
+ Returns
1285
+ -------
1286
+ Workflow
1287
+ The created workflow, if `add_submission` is `False`.
1288
+ Submission
1289
+ The created submission object, if `add_submission` is `True`.
1290
+ """
1291
+ return self.__get_app_func("make_workflow")
1292
+
1293
+ @property
1294
+ def make_demo_workflow(self) -> _MakeDemoWorkflow:
1295
+ """
1296
+ Generate a new workflow from a builtin demo workflow template.
1297
+
1298
+ Parameters
1299
+ ----------
1300
+ workflow_name: str
1301
+ Name of the demo workflow to make.
1302
+ template_format: str
1303
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1304
+ format.
1305
+ path: str | Path
1306
+ The directory in which the workflow will be generated. The current directory
1307
+ if not specified.
1308
+ name: str
1309
+ The name of the workflow. If specified, the workflow directory will be `path`
1310
+ joined with `name`. If not specified the workflow template name will be used,
1311
+ in combination with a date-timestamp.
1312
+ overwrite: bool
1313
+ If True and the workflow directory (`path` + `name`) already exists, the
1314
+ existing directory will be overwritten.
1315
+ store: str
1316
+ The persistent store type to use.
1317
+ ts_fmt: str
1318
+ The datetime format to use for storing datetimes. Datetimes are always stored
1319
+ in UTC (because Numpy does not store time zone info), so this should not
1320
+ include a time zone name.
1321
+ ts_name_fmt: str
1322
+ The datetime format to use when generating the workflow name, where it
1323
+ includes a timestamp.
1324
+ store_kwargs: dict[str, object]
1325
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1326
+ variables: dict[str, str]
1327
+ String variables to substitute in the demo workflow template file.
1328
+ status: bool
1329
+ If True, display a live status to track workflow creation progress.
1330
+ add_submission
1331
+ If True, add a submission to the workflow (but do not submit).
1332
+
1333
+ Returns
1334
+ -------
1335
+ Workflow
1336
+ The created workflow, if `add_submission` is `False`.
1337
+ Submission
1338
+ The created submission object, if `add_submission` is `True`.
1339
+ """
1340
+ return self.__get_app_func("make_demo_workflow")
1341
+
1342
+ @property
1343
+ def make_and_submit_workflow(self) -> _MakeAndSubmitWorkflow:
1344
+ """
1345
+ Generate and submit a new workflow from a file or string containing a
1346
+ workflow template parametrisation.
1347
+
1348
+ Parameters
1349
+ ----------
1350
+
1351
+ template_path_or_str: str
1352
+ Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
1353
+ is_string: str
1354
+ Determines whether `template_path_or_str` is a string or a file.
1355
+ template_format: str
1356
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1357
+ format.
1358
+ path: str | Path
1359
+ The directory in which the workflow will be generated. The current directory
1360
+ if not specified.
1361
+ name: str
1362
+ The name of the workflow. If specified, the workflow directory will be `path`
1363
+ joined with `name`. If not specified the `WorkflowTemplate` name will be used,
1364
+ in combination with a date-timestamp.
1365
+ overwrite: bool
1366
+ If True and the workflow directory (`path` + `name`) already exists, the
1367
+ existing directory will be overwritten.
1368
+ store: str
1369
+ The persistent store to use for this workflow.
1370
+ ts_fmt: str
1371
+ The datetime format to use for storing datetimes. Datetimes are always stored
1372
+ in UTC (because Numpy does not store time zone info), so this should not
1373
+ include a time zone name.
1374
+ ts_name_fmt: str
1375
+ The datetime format to use when generating the workflow name, where it
1376
+ includes a timestamp.
1377
+ store_kwargs: dict[str, object]
1378
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1379
+ variables: dict[str, str]
1380
+ String variables to substitute in `template_file_or_str`.
1381
+ JS_parallelism: bool
1382
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1383
+ True but the store type does not support the `jobscript_parallelism` feature. If
1384
+ not set, jobscript parallelism will be used if the store type supports it.
1385
+ wait: bool
1386
+ If True, this command will block until the workflow execution is complete.
1387
+ add_to_known: bool
1388
+ If True, add the new submission to the known-submissions file, which is
1389
+ used by the `show` command to monitor current and recent submissions.
1390
+ return_idx: bool
1391
+ If True, return a dict representing the jobscript indices submitted for each
1392
+ submission.
1393
+ tasks: list[int]
1394
+ List of task indices to include in this submission. By default all tasks are
1395
+ included.
1396
+ cancel: bool
1397
+ Immediately cancel the submission. Useful for testing and benchmarking.
1398
+ status: bool
1399
+ If True, display a live status to track workflow creation and submission
1400
+ progress.
1401
+
1402
+ Returns
1403
+ -------
1404
+ Workflow
1405
+ The created workflow.
1406
+ dict[int, list[int]]
1407
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1408
+ """
1409
+ return self.__get_app_func("make_and_submit_workflow")
1410
+
1411
+ @property
1412
+ def make_and_submit_demo_workflow(self) -> _MakeAndSubmitDemoWorkflow:
1413
+ """
1414
+ Generate and submit a new demo workflow from a file or string containing a
1415
+ workflow template parametrisation.
1416
+
1417
+ Parameters
1418
+ ----------
1419
+ workflow_name: str
1420
+ Name of the demo workflow to make. **Required.**
1421
+ template_format: str
1422
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1423
+ format.
1424
+ path: str | Path
1425
+ The directory in which the workflow will be generated. The current directory
1426
+ if not specified.
1427
+ name: str
1428
+ The name of the workflow. If specified, the workflow directory will be `path`
1429
+ joined with `name`. If not specified the `WorkflowTemplate` name will be used,
1430
+ in combination with a date-timestamp.
1431
+ overwrite: bool
1432
+ If True and the workflow directory (`path` + `name`) already exists, the
1433
+ existing directory will be overwritten.
1434
+ store: str
1435
+ The persistent store to use for this workflow.
1436
+ ts_fmt: str
1437
+ The datetime format to use for storing datetimes. Datetimes are always stored
1438
+ in UTC (because Numpy does not store time zone info), so this should not
1439
+ include a time zone name.
1440
+ ts_name_fmt: str
1441
+ The datetime format to use when generating the workflow name, where it
1442
+ includes a timestamp.
1443
+ store_kwargs: dict[str, object]
1444
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1445
+ variables: dict[str, str]
1446
+ String variables to substitute in the demo workflow template file.
1447
+ JS_parallelism: bool
1448
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1449
+ True but the store type does not support the `jobscript_parallelism` feature. If
1450
+ not set, jobscript parallelism will be used if the store type supports it.
1451
+ wait: bool
1452
+ If True, this command will block until the workflow execution is complete.
1453
+ add_to_known: bool
1454
+ If True, add the new submission to the known-submissions file, which is
1455
+ used by the `show` command to monitor current and recent submissions.
1456
+ return_idx: bool
1457
+ If True, return a dict representing the jobscript indices submitted for each
1458
+ submission.
1459
+ tasks: list[int]
1460
+ List of task indices to include in this submission. By default all tasks are
1461
+ included.
1462
+ cancel: bool
1463
+ Immediately cancel the submission. Useful for testing and benchmarking.
1464
+ status: bool
1465
+ If True, display a live status to track submission progress.
1466
+
1467
+ Returns
1468
+ -------
1469
+ Workflow
1470
+ The created workflow.
1471
+ dict[int, list[int]]
1472
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1473
+ """
1474
+ return self.__get_app_func("make_and_submit_demo_workflow")
1475
+
1476
+ @property
1477
+ def submit_workflow(self) -> _SubmitWorkflow:
1478
+ """
1479
+ Submit an existing workflow.
1480
+
1481
+ Parameters
1482
+ ----------
1483
+ workflow_path: str
1484
+ Path to an existing workflow
1485
+ JS_parallelism: bool
1486
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1487
+ True but the store type does not support the `jobscript_parallelism` feature. If
1488
+ not set, jobscript parallelism will be used if the store type supports it.
1489
+ tasks: list[int]
1490
+ List of task indices to include in this submission. By default all tasks are
1491
+ included.
1492
+
1493
+ Returns
1494
+ -------
1495
+ dict[int, list[int]]
1496
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1497
+ """
1498
+ return self.__get_app_func("submit_workflow")
1499
+
1500
+ @property
1501
+ def run_hpcflow_tests(self) -> _RunTests:
1502
+ """Run hpcflow test suite. This function is only available from derived apps."""
1503
+ return self.__get_app_func("run_hpcflow_tests")
1504
+
1505
+ @property
1506
+ def run_tests(self) -> _RunTests:
1507
+ """Run the test suite."""
1508
+ return self.__get_app_func("run_tests")
1509
+
1510
+ @property
1511
+ def get_OS_info(self) -> Callable[[], Mapping[str, str]]:
1512
+ """
1513
+ Get information about the operating system.
1514
+
1515
+ Returns
1516
+ -------
1517
+ dict[str, str]
1518
+ Key-value mapping containing system version information.
1519
+ """
1520
+ return self.__get_app_func("get_OS_info")
1521
+
1522
+ @property
1523
+ def get_shell_info(self) -> Callable[[str, bool], VersionInfo]:
1524
+ """
1525
+ Get information about a given shell and the operating system.
1526
+
1527
+ Parameters
1528
+ ----------
1529
+ shell_name: str
1530
+ One of the supported shell names.
1531
+ exclude_os: bool
1532
+ If True, exclude operating system information.
1533
+
1534
+ Returns
1535
+ -------
1536
+ VersionInfo
1537
+ The shell version information descriptor.
1538
+ """
1539
+ return self.__get_app_func("get_shell_info")
1540
+
1541
+ @property
1542
+ def get_known_submissions(self) -> _GetKnownSubmissions:
1543
+ """
1544
+ Retrieve information about active and recently inactive finished workflows.
1545
+
1546
+ This method removes workflows from the known-submissions file that are found to be
1547
+ inactive on this machine (according to the scheduler/process ID).
1548
+
1549
+ Parameters
1550
+ ----------
1551
+ max_recent: int
1552
+ Maximum number of inactive workflows to retrieve.
1553
+ no_update: bool
1554
+ If True, do not update the known-submissions file to set submissions that are
1555
+ now inactive.
1556
+ as_json: bool
1557
+ If True, only include JSON-compatible information. This will exclude the
1558
+ `submission` key, for instance.
1559
+
1560
+ Returns
1561
+ -------
1562
+ list[KnownSubmissionItem]
1563
+ List of descriptions of known items.
1564
+ """
1565
+ return self.__get_app_func("get_known_submissions")
1566
+
1567
+ @property
1568
+ def show(self) -> _Show:
1569
+ """
1570
+ Show information about running workflows.
275
1571
 
276
- self._builtin_template_components = template_components or {}
1572
+ Parameters
1573
+ ----------
1574
+ max_recent: int
1575
+ Maximum number of inactive workflows to show.
1576
+ full: bool
1577
+ If True, provide more information; output may spans multiple lines for each
1578
+ workflow submission.
1579
+ no_update: bool
1580
+ If True, do not update the known-submissions file to remove workflows that are
1581
+ no longer running.
1582
+ """
1583
+ return self.__get_app_func("show")
277
1584
 
278
- self._config = None # assigned on first access to `config` property
279
- self._config_files = {} # assigned on config load, keys are string absolute paths
1585
+ @property
1586
+ def show_legend(self) -> Callable[[], None]:
1587
+ """
1588
+ Output a legend for the jobscript-element and EAR states that are displayed
1589
+ by the `show` command.
1590
+ """
1591
+ return self.__get_app_func("show_legend")
280
1592
 
281
- # Set by `_load_template_components`:
282
- self._template_components = {}
283
- self._parameters = None
284
- self._command_files = None
285
- self._environments = None
286
- self._task_schemas = None
287
- self._scripts = None
1593
+ @property
1594
+ def cancel(self) -> _Cancel:
1595
+ """
1596
+ Cancel the execution of a workflow submission.
288
1597
 
289
- self._app_attr_cache = {}
1598
+ Parameters
1599
+ ----------
1600
+ workflow_ref: int | str | Path
1601
+ Which workflow to cancel, by ID or path.
1602
+ ref_is_path: str
1603
+ One of "``id``", "``path``" or "``assume-id``" (the default)
1604
+ status: bool
1605
+ Whether to show a live status during cancel.
1606
+ """
1607
+ return self.__get_app_func("cancel")
290
1608
 
291
- # assigned on first access to respective properties
292
- self._user_data_dir = None
293
- self._user_cache_dir = None
294
- self._user_runtime_dir = None
295
- self._user_data_hostname_dir = None
296
- self._user_cache_hostname_dir = None
297
- self._demo_data_cache_dir = None
298
-
299
- def __getattr__(self, name):
1609
+ def __getattr__(self, name: str):
300
1610
  if name in sdk_classes:
301
1611
  return self._get_app_core_class(name)
302
1612
  elif name in sdk_funcs:
303
- return self._get_app_func(name)
1613
+ return self.__get_app_func(name)
304
1614
  else:
305
1615
  raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
306
1616
 
307
1617
  def __repr__(self):
308
1618
  return f"{self.__class__.__name__}(name={self.name!r}, version={self.version!r})"
309
1619
 
310
- def _get_app_core_class(self, name: str) -> Type:
311
- if name not in self._app_attr_cache:
312
- obj_mod = import_module(sdk_classes[name])
313
- cls = getattr(obj_mod, name)
314
- if issubclass(cls, enum.Enum):
315
- sub_cls = cls
316
- else:
317
- dct = {}
318
- if hasattr(cls, "_app_attr"):
319
- dct = {getattr(cls, "_app_attr"): self}
320
- sub_cls = type(cls.__name__, (cls,), dct)
321
- if cls.__doc__:
322
- sub_cls.__doc__ = cls.__doc__.format(app_name=self.name)
323
- sub_cls.__module__ = self.module
324
- self._app_attr_cache[name] = sub_cls
325
-
326
- return self._app_attr_cache[name]
327
-
328
- def _get_app_func(self, name) -> Callable:
329
- if name not in self._app_attr_cache:
330
-
331
- def wrap_func(func):
332
- # this function avoids scope issues
333
- return lambda *args, **kwargs: func(*args, **kwargs)
334
-
335
- # retrieve the "private" function:
336
- sdk_func = getattr(self, f"_{name}")
337
-
338
- func = wrap_func(sdk_func)
339
- func = wraps(sdk_func)(func)
340
- if func.__doc__:
341
- func.__doc__ = func.__doc__.format(app_name=self.name)
342
- func.__module__ = self.module
343
- self._app_attr_cache[name] = func
344
-
345
- return self._app_attr_cache[name]
1620
+ def _get_app_core_class(self, name: str) -> type:
1621
+ if name in self.__app_type_cache:
1622
+ return self.__app_type_cache[name]
1623
+ obj_mod = import_module(sdk_classes[name])
1624
+ cls = getattr(obj_mod, name)
1625
+ if issubclass(cls, enum.Enum):
1626
+ sub_cls = cls
1627
+ else:
1628
+ dct: dict[str, Any] = {}
1629
+ if hasattr(cls, "_app_attr"):
1630
+ dct = {getattr(cls, "_app_attr"): self}
1631
+ sub_cls = type(cls.__name__, (cls,), dct)
1632
+ if cls.__doc__:
1633
+ sub_cls.__doc__ = cls.__doc__.format(app_name=self.name)
1634
+ sub_cls.__module__ = self.module
1635
+ self.__app_type_cache[name] = sub_cls
1636
+ return sub_cls
1637
+
1638
+ def __get_app_func(self, name: str) -> Callable[..., Any]:
1639
+ if name in self.__app_func_cache:
1640
+ return self.__app_func_cache[name]
1641
+
1642
+ def wrap_func(func) -> Callable[..., Any]:
1643
+ # this function avoids scope issues
1644
+ return lambda *args, **kwargs: func(*args, **kwargs)
1645
+
1646
+ # retrieve the "private" function:
1647
+ sdk_func = getattr(self, f"_{name}")
1648
+
1649
+ func = wrap_func(sdk_func)
1650
+ func = wraps(sdk_func)(func)
1651
+ if func.__doc__:
1652
+ func.__doc__ = func.__doc__.format(app_name=self.name)
1653
+ func.__module__ = self.module
1654
+ self.__app_func_cache[name] = func
1655
+ return func
346
1656
 
347
1657
  @property
348
1658
  def run_time_info(self) -> RunTimeInfo:
@@ -370,107 +1680,118 @@ class BaseApp(metaclass=Singleton):
370
1680
  TimeIt.active = bool(value)
371
1681
 
372
1682
  @property
373
- def template_components(self) -> Dict[str, ObjectList]:
1683
+ def template_components(self) -> TemplateComponents:
374
1684
  """
375
1685
  The template component data.
376
1686
  """
377
1687
  if not self.is_template_components_loaded:
1688
+ if BaseApp.__load_pending:
1689
+ return {}
1690
+ BaseApp.__load_pending = True
378
1691
  self._load_template_components()
1692
+ BaseApp.__load_pending = False
379
1693
  return self._template_components
380
1694
 
381
- def _ensure_template_component(self, name) -> None:
1695
+ @property
1696
+ def _shared_data(self) -> Mapping[str, Any]:
1697
+ return cast("Mapping[str, Any]", self.template_components)
1698
+
1699
+ def _ensure_template_component(self, name: str) -> None:
382
1700
  """Invoked by access to individual template components (e.g. parameters)"""
383
1701
  if not getattr(self, f"_{name}"):
384
1702
  self._load_template_components(name)
385
1703
  else:
386
1704
  self.logger.debug(f"Template component {name!r} already loaded")
387
1705
 
388
- def load_template_components(self, warn=True) -> None:
1706
+ def load_template_components(self, warn: bool = True) -> None:
389
1707
  """Load all template component data, warning by default if already loaded."""
390
1708
  if warn and self.is_template_components_loaded:
391
1709
  warnings.warn("Template components already loaded; reloading now.")
392
1710
  self._load_template_components()
393
1711
 
394
- def reload_template_components(self, warn=True) -> None:
395
- """Reload all template component data, warning by default if not already
396
- loaded."""
1712
+ def reload_template_components(self, warn: bool = True) -> None:
1713
+ """
1714
+ Reload all template component data, warning by default if not already
1715
+ loaded.
1716
+ """
397
1717
  if warn and not self.is_template_components_loaded:
398
1718
  warnings.warn("Template components not loaded; loading now.")
399
1719
  self._load_template_components()
400
1720
 
401
1721
  @TimeIt.decorator
402
- def _load_template_components(self, *include) -> None:
403
- """Combine any builtin template components with user-defined template components
404
- and initialise list objects."""
405
-
1722
+ def _load_template_components(self, *include: str) -> None:
1723
+ """
1724
+ Combine any builtin template components with user-defined template components
1725
+ and initialise list objects.
1726
+ """
406
1727
  if not include or "task_schemas" in include:
407
1728
  # task schemas require all other template components to be loaded first
408
- include = [
1729
+ include = (
409
1730
  "parameters",
410
1731
  "command_files",
411
1732
  "environments",
412
1733
  "task_schemas",
413
1734
  "scripts",
414
- ]
1735
+ )
415
1736
 
416
1737
  self.logger.debug(f"Loading template components: {include!r}.")
417
1738
 
418
- self_tc = self._template_components
1739
+ self_tc: Any = self._template_components
419
1740
 
420
1741
  if "parameters" in include:
421
- params = self._builtin_template_components.get("parameters", [])
1742
+ params: list[Any] = self._builtin_template_components.get("parameters", [])
422
1743
  for path in self.config.parameter_sources:
423
1744
  params.extend(read_YAML_file(path))
424
- self_tc["parameters"] = self.ParametersList.from_json_like(
425
- params, shared_data=self_tc
426
- )
427
- self._parameters = self_tc["parameters"]
1745
+ param_list = self.ParametersList.from_json_like(params, shared_data=self_tc)
1746
+ self._template_components["parameters"] = param_list
1747
+ self._parameters = param_list
428
1748
 
429
1749
  if "command_files" in include:
430
- cmd_files = self._builtin_template_components.get("command_files", [])
1750
+ cmd_files: list[Any] = self._builtin_template_components.get(
1751
+ "command_files", []
1752
+ )
431
1753
  for path in self.config.command_file_sources:
432
1754
  cmd_files.extend(read_YAML_file(path))
433
- self_tc["command_files"] = self.CommandFilesList.from_json_like(
434
- cmd_files, shared_data=self_tc
435
- )
436
- self._command_files = self_tc["command_files"]
1755
+ cf_list = self.CommandFilesList.from_json_like(cmd_files, shared_data=self_tc)
1756
+ self._template_components["command_files"] = cf_list
1757
+ self._command_files = cf_list
437
1758
 
438
1759
  if "environments" in include:
439
1760
  envs = []
440
- builtin_envs = self._builtin_template_components.get("environments", [])
441
- for path in self.config.environment_sources:
442
- envs_i_lst = read_YAML_file(path)
443
- for env_j in envs_i_lst:
1761
+ builtin_envs: list[Any] = self._builtin_template_components.get(
1762
+ "environments", []
1763
+ )
1764
+ for e_path in self.config.environment_sources:
1765
+ for env_j in read_YAML_file(e_path):
444
1766
  for b_idx, builtin_env in enumerate(list(builtin_envs)):
445
1767
  # overwrite builtin envs with user-supplied:
446
1768
  if builtin_env["name"] == env_j["name"]:
447
1769
  builtin_envs.pop(b_idx)
448
1770
  envs.append(env_j)
449
1771
  envs = builtin_envs + envs
450
- self_tc["environments"] = self.EnvironmentsList.from_json_like(
451
- envs, shared_data=self_tc
452
- )
453
- self._environments = self_tc["environments"]
1772
+ env_list = self.EnvironmentsList.from_json_like(envs, shared_data=self_tc)
1773
+ self._template_components["environments"] = env_list
1774
+ self._environments = env_list
454
1775
 
455
1776
  if "task_schemas" in include:
456
- schemas = self._builtin_template_components.get("task_schemas", [])
1777
+ schemas: list[Any] = self._builtin_template_components.get("task_schemas", [])
457
1778
  for path in self.config.task_schema_sources:
458
1779
  schemas.extend(read_YAML_file(path))
459
- self_tc["task_schemas"] = self.TaskSchemasList.from_json_like(
460
- schemas, shared_data=self_tc
461
- )
462
- self._task_schemas = self_tc["task_schemas"]
1780
+ ts_list = self.TaskSchemasList.from_json_like(schemas, shared_data=self_tc)
1781
+ self._template_components["task_schemas"] = ts_list
1782
+ self._task_schemas = ts_list
463
1783
 
464
1784
  if "scripts" in include:
465
- self_tc["scripts"] = self._load_scripts()
466
- self._scripts = self_tc["scripts"]
1785
+ scripts = self._load_scripts()
1786
+ self._template_components["scripts"] = scripts
1787
+ self._scripts = scripts
467
1788
 
468
1789
  self.logger.info(f"Template components loaded ({include!r}).")
469
1790
 
470
1791
  @classmethod
471
1792
  def load_builtin_template_component_data(
472
- cls, package
473
- ) -> Dict[str, Union[List, Dict]]:
1793
+ cls, package: ModuleType | str
1794
+ ) -> BasicTemplateComponents:
474
1795
  """
475
1796
  Load the template component data built into the package.
476
1797
  This is as opposed to the template components defined by users.
@@ -478,55 +1799,57 @@ class BaseApp(metaclass=Singleton):
478
1799
  SDK_logger.info(
479
1800
  f"Loading built-in template component data for package: {package!r}."
480
1801
  )
481
- components = {}
1802
+ components: BasicTemplateComponents = {}
482
1803
  for comp_type in TEMPLATE_COMP_TYPES:
483
- resource = f"{comp_type}.yaml"
484
- fh = resources.files(package).joinpath(resource).open("rt")
485
- SDK_logger.info(f"Parsing file as YAML: {fh.name!r}")
486
- comp_dat = fh.read()
487
- components[comp_type] = read_YAML_str(comp_dat)
488
- fh.close()
1804
+ with open_text_resource(package, f"{comp_type}.yaml") as fh:
1805
+ SDK_logger.info(f"Parsing file as YAML: {fh.name!r}")
1806
+ components[comp_type] = read_YAML_str(fh.read())
489
1807
 
490
1808
  return components
491
1809
 
492
1810
  @property
493
- def parameters(self) -> get_app_attribute("ParametersList"):
1811
+ def parameters(self) -> _ParametersList:
494
1812
  """
495
1813
  The known template parameters.
496
1814
  """
497
1815
  self._ensure_template_component("parameters")
1816
+ assert self._parameters is not None
498
1817
  return self._parameters
499
1818
 
500
1819
  @property
501
- def command_files(self) -> get_app_attribute("CommandFilesList"):
1820
+ def command_files(self) -> _CommandFilesList:
502
1821
  """
503
1822
  The known template command files.
504
1823
  """
505
1824
  self._ensure_template_component("command_files")
1825
+ assert self._command_files is not None
506
1826
  return self._command_files
507
1827
 
508
1828
  @property
509
- def envs(self) -> get_app_attribute("EnvironmentsList"):
1829
+ def envs(self) -> _EnvironmentsList:
510
1830
  """
511
1831
  The known template execution environments.
512
1832
  """
513
1833
  self._ensure_template_component("environments")
1834
+ assert self._environments is not None
514
1835
  return self._environments
515
1836
 
516
1837
  @property
517
- def scripts(self):
1838
+ def scripts(self) -> dict[str, Path]:
518
1839
  """
519
1840
  The known template scripts.
520
1841
  """
521
1842
  self._ensure_template_component("scripts")
1843
+ assert self._scripts is not None
522
1844
  return self._scripts
523
1845
 
524
1846
  @property
525
- def task_schemas(self) -> get_app_attribute("TaskSchemasList"):
1847
+ def task_schemas(self) -> _TaskSchemasList:
526
1848
  """
527
1849
  The known template task schemas.
528
1850
  """
529
1851
  self._ensure_template_component("task_schemas")
1852
+ assert self._task_schemas is not None
530
1853
  return self._task_schemas
531
1854
 
532
1855
  @property
@@ -597,10 +1920,11 @@ class BaseApp(metaclass=Singleton):
597
1920
  """
598
1921
  if not self.is_config_loaded:
599
1922
  self.load_config()
1923
+ assert self._config
600
1924
  return self._config
601
1925
 
602
1926
  @property
603
- def scheduler_lookup(self):
1927
+ def scheduler_lookup(self) -> dict[tuple[str, str], type[Scheduler]]:
604
1928
  """
605
1929
  The scheduler mapping.
606
1930
  """
@@ -611,14 +1935,20 @@ class BaseApp(metaclass=Singleton):
611
1935
  ("slurm", "posix"): self.SlurmPosix,
612
1936
  }
613
1937
 
614
- def get_scheduler(self, scheduler_name, os_name, scheduler_args=None):
1938
+ def get_scheduler(
1939
+ self,
1940
+ scheduler_name: str,
1941
+ os_name: str,
1942
+ scheduler_args: dict[str, Any] | None = None,
1943
+ ) -> Scheduler:
615
1944
  """Get an arbitrary scheduler object."""
616
- scheduler_args = scheduler_args or {}
1945
+ scheduler_kwargs = scheduler_args or {}
617
1946
 
618
1947
  os_name = os_name.lower()
619
1948
  if os_name == "nt" and "_" in scheduler_name:
620
1949
  # e.g. WSL on windows uses *_posix
621
1950
  key = tuple(scheduler_name.split("_"))
1951
+ assert len(key) == 2
622
1952
  else:
623
1953
  key = (scheduler_name.lower(), os_name)
624
1954
 
@@ -628,28 +1958,28 @@ class BaseApp(metaclass=Singleton):
628
1958
  raise ValueError(
629
1959
  f"Unsupported combination of scheduler and operation system: {key!r}"
630
1960
  )
631
- return scheduler_cls(**scheduler_args)
1961
+ return scheduler_cls(**scheduler_kwargs)
632
1962
 
633
- def get_OS_supported_schedulers(self):
634
- """Retrieve a list of schedulers that are supported in principle by this operating
1963
+ def get_OS_supported_schedulers(self) -> Iterator[str]:
1964
+ """
1965
+ Retrieve a list of schedulers that are supported in principle by this operating
635
1966
  system.
636
1967
 
637
1968
  This does not necessarily mean all the returned schedulers are available on this
638
1969
  system.
639
-
640
1970
  """
641
- out = []
642
1971
  for k in self.scheduler_lookup:
643
1972
  if os.name == "nt" and k == ("direct", "posix"):
644
1973
  # this is valid for WSL on Windows
645
- out.append("_".join(k))
1974
+ yield "_".join(k)
646
1975
  elif k[1] == os.name:
647
- out.append(k[0])
648
- return out
1976
+ yield k[0]
649
1977
 
650
1978
  def perm_error_retry(self):
651
- """Return a decorator for retrying functions on permission and OS errors that
652
- might be associated with cloud-storage desktop sync. engine operations."""
1979
+ """
1980
+ Return a decorator for retrying functions on permission and OS errors that
1981
+ might be associated with cloud-storage desktop sync. engine operations.
1982
+ """
653
1983
  return retry(
654
1984
  (PermissionError, OSError),
655
1985
  tries=10,
@@ -696,7 +2026,6 @@ class BaseApp(metaclass=Singleton):
696
2026
  We segregate by hostname to account for the case where multiple machines might
697
2027
  use the same shared file system.
698
2028
  """
699
-
700
2029
  # This might need to cover e.g. multiple login nodes, as described in the
701
2030
  # config file:
702
2031
  if self._user_data_hostname_dir is None:
@@ -720,13 +2049,15 @@ class BaseApp(metaclass=Singleton):
720
2049
  return self.user_data_dir
721
2050
 
722
2051
  def _ensure_user_runtime_dir(self) -> Path:
723
- """Generate a user runtime directory for this machine in which we can create
2052
+ """
2053
+ Generate a user runtime directory for this machine in which we can create
724
2054
  semi-persistent temporary files.
725
2055
 
726
- Note: unlike `_ensure_user_data_dir`, and `_ensure_user_data_hostname_dir`, this
2056
+ Note
2057
+ ----
2058
+ Unlike `_ensure_user_data_dir`, and `_ensure_user_data_hostname_dir`, this
727
2059
  method is not invoked on config load, because it might need to be created after
728
2060
  each reboot, and it is not routinely used.
729
-
730
2061
  """
731
2062
  if not self.user_runtime_dir.exists():
732
2063
  self.user_runtime_dir.mkdir(parents=True)
@@ -752,8 +2083,10 @@ class BaseApp(metaclass=Singleton):
752
2083
  return self.demo_data_cache_dir
753
2084
 
754
2085
  def _ensure_user_data_hostname_dir(self) -> Path:
755
- """Ensure a user data directory for this machine exists (used by the helper
756
- process and the known-submissions file)."""
2086
+ """
2087
+ Ensure a user data directory for this machine exists (used by the helper
2088
+ process and the known-submissions file).
2089
+ """
757
2090
  if not self.user_data_hostname_dir.exists():
758
2091
  self.user_data_hostname_dir.mkdir(parents=True)
759
2092
  self.logger.info(
@@ -771,46 +2104,48 @@ class BaseApp(metaclass=Singleton):
771
2104
  )
772
2105
  return self.user_cache_hostname_dir
773
2106
 
774
- def clear_user_runtime_dir(self):
2107
+ def clear_user_runtime_dir(self) -> None:
775
2108
  """Delete the contents of the user runtime directory."""
776
2109
  if self.user_runtime_dir.exists():
777
2110
  shutil.rmtree(self.user_runtime_dir)
778
2111
  self._ensure_user_runtime_dir()
779
2112
 
780
- def clear_user_cache_dir(self):
2113
+ def clear_user_cache_dir(self) -> None:
781
2114
  """Delete the contents of the cache directory."""
782
2115
  if self.user_cache_dir.exists():
783
2116
  shutil.rmtree(self.user_cache_dir)
784
2117
  self._ensure_user_cache_dir()
785
2118
 
786
- def clear_demo_data_cache_dir(self):
2119
+ def clear_demo_data_cache_dir(self) -> None:
787
2120
  """Delete the contents of the example data files cache directory."""
788
2121
  if self.demo_data_cache_dir.exists():
789
2122
  shutil.rmtree(self.demo_data_cache_dir)
790
2123
  self._ensure_demo_data_cache_dir()
791
2124
 
792
- def clear_user_cache_hostname_dir(self):
2125
+ def clear_user_cache_hostname_dir(self) -> None:
793
2126
  """Delete the contents of the hostname-scoped cache directory."""
794
2127
  if self.user_cache_hostname_dir.exists():
795
2128
  shutil.rmtree(self.user_cache_hostname_dir)
796
2129
  self._ensure_user_cache_hostname_dir()
797
2130
 
798
2131
  @TimeIt.decorator
799
- def _load_config(self, config_dir, config_key, **overrides) -> None:
2132
+ def _load_config(
2133
+ self, config_dir: PathLike, config_key: str | None, **overrides
2134
+ ) -> None:
800
2135
  self.logger.info("Loading configuration.")
801
2136
  self._ensure_user_data_dir()
802
- config_dir = ConfigFile._resolve_config_dir(
2137
+ resolved_config_dir = ConfigFile._resolve_config_dir(
803
2138
  config_opt=self.config_options,
804
2139
  logger=self.config_logger,
805
2140
  directory=config_dir,
806
2141
  )
807
- if str(config_dir) not in self._config_files:
808
- self._config_files[str(config_dir)] = ConfigFile(
809
- directory=config_dir,
2142
+ if str(resolved_config_dir) not in self._config_files:
2143
+ self._config_files[str(resolved_config_dir)] = ConfigFile(
2144
+ directory=resolved_config_dir,
810
2145
  logger=self.config_logger,
811
2146
  config_options=self.config_options,
812
2147
  )
813
- file = self._config_files[str(config_dir)]
2148
+ file = self._config_files[str(resolved_config_dir)]
814
2149
  self._config = Config(
815
2150
  app=self,
816
2151
  config_file=file,
@@ -821,35 +2156,46 @@ class BaseApp(metaclass=Singleton):
821
2156
  **overrides,
822
2157
  )
823
2158
  self.log.update_console_level(self.config.get("log_console_level"))
824
- self.log.add_file_logger(
825
- path=self.config.get("log_file_path"),
826
- level=self.config.get("log_file_level"),
827
- )
2159
+ log_file_path = self.config.get("log_file_path")
2160
+ if log_file_path:
2161
+ self.log.add_file_logger(
2162
+ path=log_file_path,
2163
+ level=self.config.get("log_file_level"),
2164
+ )
828
2165
  self.logger.info(f"Configuration loaded from: {self.config.config_file_path}")
829
2166
  self._ensure_user_data_hostname_dir()
830
2167
 
831
2168
  def load_config(
832
2169
  self,
833
- config_dir=None,
834
- config_key=None,
835
- warn=True,
2170
+ config_dir: PathLike = None,
2171
+ config_key: str | None = None,
2172
+ warn: bool = True,
836
2173
  **overrides,
837
2174
  ) -> None:
838
2175
  """
839
2176
  Load the user's configuration.
2177
+
2178
+ Parameters
2179
+ ----------
2180
+ config_dir:
2181
+ Directory containing the configuration, if not default.
2182
+ config_key:
2183
+ Key to the configuration within the config file.
2184
+ warn:
2185
+ Whether to warn if a configuration is already loaded.
840
2186
  """
841
2187
  if warn and self.is_config_loaded:
842
2188
  warnings.warn("Configuration is already loaded; reloading.")
843
2189
  self._load_config(config_dir, config_key, **overrides)
844
2190
 
845
- def unload_config(self):
2191
+ def unload_config(self) -> None:
846
2192
  """
847
2193
  Discard any loaded configuration.
848
2194
  """
849
2195
  self._config_files = {}
850
2196
  self._config = None
851
2197
 
852
- def get_config_path(self, config_dir=None):
2198
+ def get_config_path(self, config_dir: PathLike = None) -> Path:
853
2199
  """Return the full path to the config file, without loading the config."""
854
2200
  config_dir = ConfigFile._resolve_config_dir(
855
2201
  config_opt=self.config_options,
@@ -858,7 +2204,7 @@ class BaseApp(metaclass=Singleton):
858
2204
  )
859
2205
  return ConfigFile.get_config_file_path(config_dir)
860
2206
 
861
- def _delete_config_file(self, config_dir=None):
2207
+ def _delete_config_file(self, config_dir: PathLike = None) -> None:
862
2208
  """Delete the config file."""
863
2209
  config_path = self.get_config_path(config_dir=config_dir)
864
2210
  self.logger.info(f"deleting config file: {str(config_path)!r}.")
@@ -866,13 +2212,13 @@ class BaseApp(metaclass=Singleton):
866
2212
 
867
2213
  def reset_config(
868
2214
  self,
869
- config_dir=None,
870
- config_key=None,
871
- warn=True,
2215
+ config_dir: PathLike = None,
2216
+ config_key: str | None = None,
2217
+ warn: bool = True,
872
2218
  **overrides,
873
2219
  ) -> None:
874
2220
  """Reset the config file to defaults, and reload the config."""
875
- self.logger.info(f"resetting config")
2221
+ self.logger.info("resetting config")
876
2222
  self._delete_config_file(config_dir=config_dir)
877
2223
  self._config = None
878
2224
  self._config_files = {}
@@ -880,9 +2226,9 @@ class BaseApp(metaclass=Singleton):
880
2226
 
881
2227
  def reload_config(
882
2228
  self,
883
- config_dir=None,
884
- config_key=None,
885
- warn=True,
2229
+ config_dir: PathLike = None,
2230
+ config_key: str | None = None,
2231
+ warn: bool = True,
886
2232
  **overrides,
887
2233
  ) -> None:
888
2234
  """
@@ -891,64 +2237,66 @@ class BaseApp(metaclass=Singleton):
891
2237
  """
892
2238
  if warn and not self.is_config_loaded:
893
2239
  warnings.warn("Configuration is not loaded; loading.")
894
- self.log.remove_file_handlers()
2240
+ self.log.remove_file_handler()
895
2241
  self._config_files = {}
896
2242
  self._load_config(config_dir, config_key, **overrides)
897
2243
 
898
2244
  @TimeIt.decorator
899
- def _load_scripts(self):
900
-
2245
+ def _load_scripts(self) -> dict[str, Path]:
2246
+ """
2247
+ Discover where the built-in scripts all are.
2248
+ """
901
2249
  # TODO: load custom directories / custom functions (via decorator)
902
2250
  scripts_package = f"{self.package_name}.{self.scripts_dir}"
903
2251
 
904
- ctx = resources.as_file(resources.files(scripts_package))
905
-
906
- scripts = {}
907
- with ctx as path:
908
- for dirpath, _, filenames in os.walk(path):
909
- dirpath = Path(dirpath)
910
- if dirpath.name == "__pycache__":
911
- continue
912
- for filename in filenames:
913
- if filename == "__init__.py":
2252
+ scripts: dict[str, Path] = {}
2253
+ try:
2254
+ with get_file_context(scripts_package) as path:
2255
+ for dirpath, _, filenames in os.walk(path):
2256
+ dirpath_ = Path(dirpath)
2257
+ if dirpath_.name == "__pycache__":
914
2258
  continue
915
- val = dirpath.joinpath(filename)
916
- key = str(val.relative_to(path).as_posix())
917
- scripts[key] = Path(val)
918
-
2259
+ for filename in filenames:
2260
+ if filename == "__init__.py":
2261
+ continue
2262
+ val = dirpath_.joinpath(filename)
2263
+ scripts[val.relative_to(path).as_posix()] = Path(val)
2264
+ except ModuleNotFoundError:
2265
+ self.logger.exception("failed to find scripts package")
2266
+ SDK_logger.info(f"loaded {len(scripts)} scripts from {scripts_package}")
919
2267
  return scripts
920
2268
 
921
- def _get_demo_workflows(self) -> Dict[str, Path]:
2269
+ def _get_demo_workflows(self) -> dict[str, Path]:
922
2270
  """Get all builtin demo workflow template file paths."""
923
- templates = {}
2271
+ templates: dict[str, Path] = {}
924
2272
  pkg = f"{self.package_name}.{self.workflows_dir}"
925
- files = resources.files(pkg).iterdir()
926
- for i in files:
927
- if i.suffix in (".yaml", ".yml", ".json", ".jsonc"):
928
- templates[i.stem] = i
2273
+ for file in resources.files(pkg).iterdir():
2274
+ p = Path(str(file))
2275
+ if p.exists() and p.suffix in (".yaml", ".yml", ".json", ".jsonc"):
2276
+ templates[p.stem] = p
929
2277
  return templates
930
2278
 
931
- def list_demo_workflows(self) -> Tuple[str]:
2279
+ def list_demo_workflows(self) -> tuple[str, ...]:
932
2280
  """Return a list of demo workflow templates included in the app."""
933
- return tuple(sorted(self._get_demo_workflows().keys()))
2281
+ return tuple(sorted(self._get_demo_workflows()))
934
2282
 
935
2283
  @contextmanager
936
2284
  def get_demo_workflow_template_file(
937
2285
  self, name: str, doc: bool = True, delete: bool = True
938
- ) -> Path:
939
- """Context manager to get a (temporary) file path to an included demo workflow
2286
+ ) -> Iterator[Path]:
2287
+ """
2288
+ Context manager to get a (temporary) file path to an included demo workflow
940
2289
  template.
941
2290
 
942
2291
  Parameters
943
2292
  ----------
944
- name
2293
+ name:
945
2294
  Name of the builtin demo workflow template whose file path is to be retrieved.
946
- doc
2295
+ doc:
947
2296
  If False, the yielded path will be to a file without the `doc` attribute (if
948
2297
  originally present).
949
- delete
2298
+ delete:
950
2299
  If True, remove the temporary file on exit.
951
-
952
2300
  """
953
2301
  tmp_dir = self._ensure_user_runtime_dir()
954
2302
  builtin_path = self._get_demo_workflows()[name]
@@ -961,12 +2309,12 @@ class BaseApp(metaclass=Singleton):
961
2309
  # load the file, modify, then dump to temp location:
962
2310
  if builtin_path.suffix in (".yaml", ".yml"):
963
2311
  # use round-trip loader to preserve comments:
964
- data = read_YAML_file(builtin_path, typ="rt", variables=False)
2312
+ data = read_YAML_file(builtin_path, typ="rt", variables={})
965
2313
  data.pop("doc", None)
966
2314
  write_YAML_file(data, path, typ="rt")
967
2315
 
968
2316
  elif builtin_path.suffix in (".json", ".jsonc"):
969
- data = read_JSON_file(builtin_path, variables=False)
2317
+ data = read_JSON_file(builtin_path, variables={})
970
2318
  data.pop("doc", None)
971
2319
  write_JSON_file(data, path)
972
2320
 
@@ -976,9 +2324,10 @@ class BaseApp(metaclass=Singleton):
976
2324
  path.unlink()
977
2325
 
978
2326
  def copy_demo_workflow(
979
- self, name: str, dst: Optional[PathLike] = None, doc: bool = True
2327
+ self, name: str, dst: PathLike | None = None, doc: bool = True
980
2328
  ) -> str:
981
- """Copy a builtin demo workflow to the specified location.
2329
+ """
2330
+ Copy a builtin demo workflow to the specified location.
982
2331
 
983
2332
  Parameters
984
2333
  ----------
@@ -991,7 +2340,6 @@ class BaseApp(metaclass=Singleton):
991
2340
  If False, the copied workflow template file will not include the `doc`
992
2341
  attribute (if originally present).
993
2342
  """
994
-
995
2343
  dst = dst or Path(".")
996
2344
  with self.get_demo_workflow_template_file(name, doc=doc) as src:
997
2345
  shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
@@ -999,15 +2347,16 @@ class BaseApp(metaclass=Singleton):
999
2347
  return src.name
1000
2348
 
1001
2349
  def show_demo_workflow(self, name: str, syntax: bool = True, doc: bool = False):
1002
- """Print the contents of a builtin demo workflow template file.
2350
+ """
2351
+ Print the contents of a builtin demo workflow template file.
1003
2352
 
1004
2353
  Parameters
1005
2354
  ----------
1006
- name
2355
+ name:
1007
2356
  The name of the demo workflow file to print.
1008
- syntax
2357
+ syntax:
1009
2358
  If True, use rich to syntax-highlight the output.
1010
- doc
2359
+ doc:
1011
2360
  If False, the printed workflow template file contents will not include the
1012
2361
  `doc` attribute (if originally present).
1013
2362
  """
@@ -1017,42 +2366,43 @@ class BaseApp(metaclass=Singleton):
1017
2366
 
1018
2367
  if syntax:
1019
2368
  fmt = DEMO_WK_FORMATS[path.suffix]
1020
- contents = Syntax(contents, fmt)
1021
- console = Console()
1022
- console.print(contents)
2369
+ Console().print(Syntax(contents, fmt))
1023
2370
  else:
1024
2371
  print(contents)
1025
2372
 
1026
- def load_demo_workflow(self, name: str) -> get_app_attribute("WorkflowTemplate"):
2373
+ def load_demo_workflow(self, name: str) -> _WorkflowTemplate:
1027
2374
  """Load a WorkflowTemplate object from a builtin demo template file."""
1028
2375
  with self.get_demo_workflow_template_file(name) as path:
1029
2376
  return self.WorkflowTemplate.from_file(path)
1030
2377
 
1031
- def template_components_from_json_like(self, json_like):
2378
+ def template_components_from_json_like(
2379
+ self, json_like: dict[str, dict]
2380
+ ) -> TemplateComponents:
1032
2381
  """
1033
- Get template components from a (simply parsed) JSOM document.
2382
+ Get template components from a (simply parsed) JSON document.
1034
2383
  """
1035
- cls_lookup = {
1036
- "parameters": self.ParametersList,
1037
- "command_files": self.CommandFilesList,
1038
- "environments": self.EnvironmentsList,
1039
- "task_schemas": self.TaskSchemasList,
1040
- }
1041
- tc = {}
1042
- for k, v in cls_lookup.items():
1043
- tc_k = v.from_json_like(
1044
- json_like.get(k, {}),
1045
- shared_data=tc,
1046
- is_hashed=True,
1047
- )
1048
- tc[k] = tc_k
2384
+ tc: TemplateComponents = {}
2385
+ sd: Mapping[str, Any] = tc
2386
+ tc["parameters"] = self.ParametersList.from_json_like(
2387
+ json_like.get("parameters", {}), shared_data=sd, is_hashed=True
2388
+ )
2389
+ tc["command_files"] = self.CommandFilesList.from_json_like(
2390
+ json_like.get("command_files", {}), shared_data=sd, is_hashed=True
2391
+ )
2392
+ tc["environments"] = self.EnvironmentsList.from_json_like(
2393
+ json_like.get("environments", {}), shared_data=sd, is_hashed=True
2394
+ )
2395
+ tc["task_schemas"] = self.TaskSchemasList.from_json_like(
2396
+ json_like.get("task_schemas", {}), shared_data=sd, is_hashed=True
2397
+ )
1049
2398
  return tc
1050
2399
 
1051
- def get_parameter_task_schema_map(self) -> Dict[str, List[List]]:
1052
- """Get a dict mapping parameter types to task schemas that input/output each
1053
- parameter."""
1054
-
1055
- param_map = {}
2400
+ def get_parameter_task_schema_map(self) -> dict[str, list[list[str]]]:
2401
+ """
2402
+ Get a dict mapping parameter types to task schemas that input/output each
2403
+ parameter.
2404
+ """
2405
+ param_map: dict[str, list[list[str]]] = {}
1056
2406
  for ts in self.task_schemas:
1057
2407
  for inp in ts.inputs:
1058
2408
  if inp.parameter.typ not in param_map:
@@ -1065,7 +2415,7 @@ class BaseApp(metaclass=Singleton):
1065
2415
 
1066
2416
  return param_map
1067
2417
 
1068
- def get_info(self) -> Dict[str, Any]:
2418
+ def get_info(self) -> dict[str, Any]:
1069
2419
  """
1070
2420
  Get miscellaneous runtime system information.
1071
2421
  """
@@ -1077,7 +2427,7 @@ class BaseApp(metaclass=Singleton):
1077
2427
  }
1078
2428
 
1079
2429
  @property
1080
- def known_subs_file_path(self):
2430
+ def known_subs_file_path(self) -> Path:
1081
2431
  """
1082
2432
  The path to the file describing known submissions.
1083
2433
  """
@@ -1093,7 +2443,7 @@ class BaseApp(metaclass=Singleton):
1093
2443
  wk_path,
1094
2444
  start_time,
1095
2445
  end_time,
1096
- ):
2446
+ ) -> str:
1097
2447
  line = [
1098
2448
  str(local_id),
1099
2449
  workflow_id,
@@ -1106,7 +2456,7 @@ class BaseApp(metaclass=Singleton):
1106
2456
  ]
1107
2457
  return self._known_subs_file_sep.join(line) + "\n"
1108
2458
 
1109
- def _parse_known_submissions_line(self, line: str) -> Dict:
2459
+ def _parse_known_submissions_line(self, line: str) -> KnownSubmission:
1110
2460
  (
1111
2461
  local_id,
1112
2462
  workflow_id,
@@ -1117,7 +2467,7 @@ class BaseApp(metaclass=Singleton):
1117
2467
  start_time,
1118
2468
  end_time,
1119
2469
  ) = line.split(self._known_subs_file_sep, maxsplit=7)
1120
- item = {
2470
+ return {
1121
2471
  "local_id": int(local_id),
1122
2472
  "workflow_id": workflow_id,
1123
2473
  "is_active": bool(int(is_active)),
@@ -1127,16 +2477,12 @@ class BaseApp(metaclass=Singleton):
1127
2477
  "start_time": start_time,
1128
2478
  "end_time": end_time.strip(),
1129
2479
  }
1130
- return item
1131
2480
 
1132
2481
  @TimeIt.decorator
1133
- def read_known_submissions_file(self) -> List[Dict]:
2482
+ def read_known_submissions_file(self) -> list[KnownSubmission]:
1134
2483
  """Retrieve existing workflows that *might* be running."""
1135
- known = []
1136
2484
  with self.known_subs_file_path.open("rt", newline="\n") as fh:
1137
- for ln in fh.readlines():
1138
- known.append(self._parse_known_submissions_line(ln))
1139
- return known
2485
+ return [self._parse_known_submissions_line(ln) for ln in fh.readlines()]
1140
2486
 
1141
2487
  def _add_to_known_submissions(
1142
2488
  self,
@@ -1145,9 +2491,10 @@ class BaseApp(metaclass=Singleton):
1145
2491
  sub_idx: int,
1146
2492
  sub_time: str,
1147
2493
  ) -> int:
1148
- """Ensure a the specified workflow submission is in the known-submissions file and
1149
- return the associated local ID."""
1150
-
2494
+ """
2495
+ Ensure a the specified workflow submission is in the known-submissions file and
2496
+ return the associated local ID.
2497
+ """
1151
2498
  try:
1152
2499
  known = self.read_known_submissions_file()
1153
2500
  except FileNotFoundError:
@@ -1155,15 +2502,15 @@ class BaseApp(metaclass=Singleton):
1155
2502
 
1156
2503
  wk_path = str(wk_path)
1157
2504
  all_ids = []
1158
- for i in known:
1159
- all_ids.append(i["local_id"])
2505
+ for known_sub in known:
2506
+ all_ids.append(known_sub["local_id"])
1160
2507
  if (
1161
- wk_path == i["path"]
1162
- and sub_idx == i["sub_idx"]
1163
- and sub_time == i["submit_time"]
2508
+ wk_path == known_sub["path"]
2509
+ and sub_idx == known_sub["sub_idx"]
2510
+ and sub_time == known_sub["submit_time"]
1164
2511
  ):
1165
2512
  # workflow submission part already present
1166
- return i["local_id"]
2513
+ return known_sub["local_id"]
1167
2514
 
1168
2515
  # get the next available local ID:
1169
2516
  if all_ids:
@@ -1194,13 +2541,16 @@ class BaseApp(metaclass=Singleton):
1194
2541
  @TimeIt.decorator
1195
2542
  def update_known_subs_file(
1196
2543
  self,
1197
- inactive_IDs: List[int],
1198
- start_times: Dict[int, str],
1199
- end_times: Dict[int, str],
1200
- ):
1201
- """Update submission records in the known-submission file.
2544
+ inactive_IDs: list[int],
2545
+ start_times: dict[int, str],
2546
+ end_times: dict[int, str],
2547
+ ) -> list[int]:
2548
+ """
2549
+ Update submission records in the known-submission file.
1202
2550
 
1203
- Note we aim for atomicity to help with the scenario where a new workflow
2551
+ Note
2552
+ ----
2553
+ We aim for atomicity to help with the scenario where a new workflow
1204
2554
  submission is adding itself to the file at the same time as we have decided an
1205
2555
  existing workflow should no longer be part of this file. Ideally, such a scenario
1206
2556
  should not arise because both operations should only ever be interactively
@@ -1210,12 +2560,10 @@ class BaseApp(metaclass=Singleton):
1210
2560
 
1211
2561
  Returns
1212
2562
  -------
1213
- removed_IDs
2563
+ list[int]
1214
2564
  List of local IDs removed from the known-submissions file due to the maximum
1215
2565
  number of recent workflows to store being exceeded.
1216
-
1217
2566
  """
1218
-
1219
2567
  self.submission_logger.info(
1220
2568
  f"setting these local IDs to inactive in known-submissions file: "
1221
2569
  f"{inactive_IDs}"
@@ -1225,12 +2573,14 @@ class BaseApp(metaclass=Singleton):
1225
2573
 
1226
2574
  # keys are line indices of non-running submissions, values are submission
1227
2575
  # date-times:
1228
- line_date = {}
2576
+ line_date: dict[int, str] = {}
1229
2577
 
1230
- removed_IDs = [] # which submissions we completely remove from the file
2578
+ removed_IDs: list[
2579
+ int
2580
+ ] = [] # which submissions we completely remove from the file
1231
2581
 
1232
- new_lines = []
1233
- line_IDs = []
2582
+ new_lines: list[str] = []
2583
+ line_IDs: list[int] = []
1234
2584
  for ln_idx, line in enumerate(self.known_subs_file_path.read_text().split("\n")):
1235
2585
  if not line.strip():
1236
2586
  continue
@@ -1246,7 +2596,6 @@ class BaseApp(metaclass=Singleton):
1246
2596
  update_end = item["local_id"] in end_times
1247
2597
 
1248
2598
  if update_inactive or update_start or update_end:
1249
-
1250
2599
  updated = self._format_known_submissions_line(
1251
2600
  local_id=item["local_id"],
1252
2601
  workflow_id=item["workflow_id"],
@@ -1273,7 +2622,7 @@ class BaseApp(metaclass=Singleton):
1273
2622
  if is_inactive:
1274
2623
  line_date[ln_idx] = item["submit_time"]
1275
2624
 
1276
- ld_srt_idx = list(dict(sorted(line_date.items(), key=lambda i: i[1])).keys())
2625
+ ld_srt_idx = sorted(line_date, key=lambda x: line_date[x])
1277
2626
 
1278
2627
  if len(line_date) > max_inactive:
1279
2628
  # remove oldest inactive submissions:
@@ -1285,8 +2634,7 @@ class BaseApp(metaclass=Singleton):
1285
2634
  )
1286
2635
 
1287
2636
  # sort in reverse so we can remove indices from new_lines:
1288
- oldest_idx = sorted(ld_srt_idx[:num_remove], reverse=True)
1289
- for i in oldest_idx:
2637
+ for i in sorted(ld_srt_idx[:num_remove], reverse=True):
1290
2638
  new_lines.pop(i)
1291
2639
  removed_IDs.append(line_IDs.pop(i))
1292
2640
 
@@ -1303,9 +2651,11 @@ class BaseApp(metaclass=Singleton):
1303
2651
 
1304
2652
  return removed_IDs
1305
2653
 
1306
- def clear_known_submissions_file(self):
1307
- """Clear the known-submissions file of all submissions. This shouldn't be needed
1308
- normally."""
2654
+ def clear_known_submissions_file(self) -> None:
2655
+ """
2656
+ Clear the known-submissions file of all submissions. This shouldn't be needed
2657
+ normally.
2658
+ """
1309
2659
  self.submission_logger.warning(
1310
2660
  f"clearing the known-submissions file at {self.known_subs_file_path}"
1311
2661
  )
@@ -1314,20 +2664,22 @@ class BaseApp(metaclass=Singleton):
1314
2664
 
1315
2665
  def _make_workflow(
1316
2666
  self,
1317
- template_file_or_str: Union[PathLike, str],
1318
- is_string: Optional[bool] = False,
1319
- template_format: Optional[str] = None,
1320
- path: Optional[PathLike] = None,
1321
- name: Optional[str] = None,
1322
- overwrite: Optional[bool] = False,
1323
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1324
- ts_fmt: Optional[str] = None,
1325
- ts_name_fmt: Optional[str] = None,
1326
- store_kwargs: Optional[Dict] = None,
1327
- variables: Optional[Dict[str, str]] = None,
1328
- status: Optional[bool] = True,
1329
- ) -> get_app_attribute("Workflow"):
1330
- """Generate a new {app_name} workflow from a file or string containing a workflow
2667
+ template_file_or_str: PathLike | str,
2668
+ is_string: bool = False,
2669
+ template_format: Literal["json", "yaml"] | None = None,
2670
+ path: PathLike = None,
2671
+ name: str | None = None,
2672
+ overwrite: bool = False,
2673
+ store: str = DEFAULT_STORE_FORMAT,
2674
+ ts_fmt: str | None = None,
2675
+ ts_name_fmt: str | None = None,
2676
+ store_kwargs: dict[str, Any] | None = None,
2677
+ variables: dict[str, str] | None = None,
2678
+ status: bool = True,
2679
+ add_submission: bool = False,
2680
+ ) -> _Workflow | _Submission | None:
2681
+ """
2682
+ Generate a new {app_name} workflow from a file or string containing a workflow
1331
2683
  template parametrisation.
1332
2684
 
1333
2685
  Parameters
@@ -1364,93 +2716,88 @@ class BaseApp(metaclass=Singleton):
1364
2716
  String variables to substitute in `template_file_or_str`.
1365
2717
  status
1366
2718
  If True, display a live status to track workflow creation progress.
1367
- """
2719
+ add_submission
2720
+ If True, add a submission to the workflow (but do not submit).
1368
2721
 
2722
+ Returns
2723
+ -------
2724
+ Workflow
2725
+ The created workflow, if `add_submission` is `False`.
2726
+ Submission
2727
+ The created submission object, if `add_submission` is `True`.
2728
+ """
1369
2729
  self.API_logger.info("make_workflow called")
1370
2730
 
1371
- if status:
1372
- console = rich.console.Console()
1373
- status = console.status("Making persistent workflow...")
1374
- status.start()
1375
-
1376
- common = {
1377
- "path": path,
1378
- "name": name,
1379
- "overwrite": overwrite,
1380
- "store": store,
1381
- "ts_fmt": ts_fmt,
1382
- "ts_name_fmt": ts_name_fmt,
1383
- "store_kwargs": store_kwargs,
1384
- "variables": variables,
1385
- "status": status,
1386
- }
1387
-
1388
- if not is_string:
1389
- wk = self.Workflow.from_file(
1390
- template_path=template_file_or_str,
1391
- template_format=template_format,
1392
- **common,
1393
- )
2731
+ status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
2732
+ Console().status("Making persistent workflow...") if status else nullcontext()
2733
+ )
1394
2734
 
1395
- elif template_format == "json":
1396
- try:
2735
+ with status_context as status_:
2736
+
2737
+ common: MakeWorkflowCommonArgs = {
2738
+ "path": str(path) if path else None,
2739
+ "name": name,
2740
+ "overwrite": overwrite,
2741
+ "store": store,
2742
+ "ts_fmt": ts_fmt,
2743
+ "ts_name_fmt": ts_name_fmt,
2744
+ "store_kwargs": store_kwargs,
2745
+ "variables": variables,
2746
+ "status": status_,
2747
+ }
2748
+ if not is_string:
2749
+ wk = self.Workflow.from_file(
2750
+ template_path=template_file_or_str,
2751
+ template_format=template_format,
2752
+ **common,
2753
+ )
2754
+ elif template_format == "json":
1397
2755
  wk = self.Workflow.from_JSON_string(
1398
- JSON_str=template_file_or_str, **common
2756
+ JSON_str=str(template_file_or_str), **common
1399
2757
  )
1400
- except Exception:
1401
- if status:
1402
- status.stop()
1403
- raise
1404
-
1405
- elif template_format == "yaml":
1406
- try:
2758
+ elif template_format == "yaml":
1407
2759
  wk = self.Workflow.from_YAML_string(
1408
- YAML_str=template_file_or_str, **common
2760
+ YAML_str=str(template_file_or_str), **common
1409
2761
  )
1410
- except Exception:
1411
- if status:
1412
- status.stop()
1413
- raise
1414
-
1415
- elif not template_format:
1416
- raise ValueError(
1417
- f"Must specify `template_format` if parsing a workflow template from a "
1418
- f"string; available options are: {ALL_TEMPLATE_FORMATS!r}."
1419
- )
1420
-
1421
- else:
1422
- raise ValueError(
1423
- f"Template format {template_format!r} not understood. Available template "
1424
- f"formats are {ALL_TEMPLATE_FORMATS!r}."
1425
- )
1426
-
1427
- if status:
1428
- status.stop()
2762
+ elif not template_format:
2763
+ raise ValueError(
2764
+ f"Must specify `template_format` if parsing a workflow template from a "
2765
+ f"string; available options are: {ALL_TEMPLATE_FORMATS!r}."
2766
+ )
2767
+ else:
2768
+ raise ValueError(
2769
+ f"Template format {template_format!r} not understood. Available template "
2770
+ f"formats are {ALL_TEMPLATE_FORMATS!r}."
2771
+ )
2772
+ if add_submission:
2773
+ with wk._store.cached_load(), wk.batch_update():
2774
+ return wk._add_submission(status=status_)
1429
2775
 
1430
2776
  return wk
1431
2777
 
1432
2778
  def _make_and_submit_workflow(
1433
2779
  self,
1434
- template_file_or_str: Union[PathLike, str],
1435
- is_string: Optional[bool] = False,
1436
- template_format: Optional[str] = None,
1437
- path: Optional[PathLike] = None,
1438
- name: Optional[str] = None,
1439
- overwrite: Optional[bool] = False,
1440
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1441
- ts_fmt: Optional[str] = None,
1442
- ts_name_fmt: Optional[str] = None,
1443
- store_kwargs: Optional[Dict] = None,
1444
- variables: Optional[Dict[str, str]] = None,
1445
- JS_parallelism: Optional[bool] = None,
1446
- wait: Optional[bool] = False,
1447
- add_to_known: Optional[bool] = True,
1448
- return_idx: Optional[bool] = False,
1449
- tasks: Optional[List[int]] = None,
1450
- cancel: Optional[bool] = False,
1451
- status: Optional[bool] = True,
1452
- ) -> Dict[int, int]:
1453
- """Generate and submit a new {app_name} workflow from a file or string containing a
2780
+ template_file_or_str: PathLike | str,
2781
+ is_string: bool = False,
2782
+ template_format: Literal["json", "yaml"] | None = None,
2783
+ path: PathLike | None = None,
2784
+ name: str | None = None,
2785
+ overwrite: bool = False,
2786
+ store: str = DEFAULT_STORE_FORMAT,
2787
+ ts_fmt: str | None = None,
2788
+ ts_name_fmt: str | None = None,
2789
+ store_kwargs: dict[str, Any] | None = None,
2790
+ variables: dict[str, str] | None = None,
2791
+ JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
2792
+ wait: bool = False,
2793
+ add_to_known: bool = True,
2794
+ return_idx: bool = False,
2795
+ tasks: list[int] | None = None,
2796
+ cancel: bool = False,
2797
+ status: bool = True,
2798
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
2799
+ """
2800
+ Generate and submit a new {app_name} workflow from a file or string containing a
1454
2801
  workflow template parametrisation.
1455
2802
 
1456
2803
  Parameters
@@ -1487,9 +2834,12 @@ class BaseApp(metaclass=Singleton):
1487
2834
  variables
1488
2835
  String variables to substitute in `template_file_or_str`.
1489
2836
  JS_parallelism
1490
- If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1491
- True but the store type does not support the `jobscript_parallelism` feature. If
1492
- not set, jobscript parallelism will be used if the store type supports it.
2837
+ If True, allow multiple jobscripts to execute simultaneously. If
2838
+ 'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
2839
+ jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
2840
+ type does not support the `jobscript_parallelism` feature. If not set,
2841
+ jobscript parallelism will be used if the store type supports it, for
2842
+ scheduled jobscripts only.
1493
2843
  wait
1494
2844
  If True, this command will block until the workflow execution is complete.
1495
2845
  add_to_known
@@ -1506,11 +2856,17 @@ class BaseApp(metaclass=Singleton):
1506
2856
  status
1507
2857
  If True, display a live status to track workflow creation and submission
1508
2858
  progress.
1509
- """
1510
2859
 
2860
+ Returns
2861
+ -------
2862
+ Workflow
2863
+ The created workflow.
2864
+ dict[int, list[int]]
2865
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
2866
+ """
1511
2867
  self.API_logger.info("make_and_submit_workflow called")
1512
2868
 
1513
- wk = self.make_workflow(
2869
+ wk = self._make_workflow(
1514
2870
  template_file_or_str=template_file_or_str,
1515
2871
  is_string=is_string,
1516
2872
  template_format=template_format,
@@ -1524,6 +2880,7 @@ class BaseApp(metaclass=Singleton):
1524
2880
  variables=variables,
1525
2881
  status=status,
1526
2882
  )
2883
+ assert isinstance(wk, _Workflow)
1527
2884
  submitted_js = wk.submit(
1528
2885
  JS_parallelism=JS_parallelism,
1529
2886
  wait=wait,
@@ -1541,18 +2898,20 @@ class BaseApp(metaclass=Singleton):
1541
2898
  def _make_demo_workflow(
1542
2899
  self,
1543
2900
  workflow_name: str,
1544
- template_format: Optional[str] = None,
1545
- path: Optional[PathLike] = None,
1546
- name: Optional[str] = None,
1547
- overwrite: Optional[bool] = False,
1548
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1549
- ts_fmt: Optional[str] = None,
1550
- ts_name_fmt: Optional[str] = None,
1551
- store_kwargs: Optional[Dict] = None,
1552
- variables: Optional[Dict[str, str]] = None,
1553
- status: Optional[bool] = True,
1554
- ) -> get_app_attribute("Workflow"):
1555
- """Generate a new {app_name} workflow from a builtin demo workflow template.
2901
+ template_format: Literal["json", "yaml"] | None = None,
2902
+ path: PathLike | None = None,
2903
+ name: str | None = None,
2904
+ overwrite: bool = False,
2905
+ store: str = DEFAULT_STORE_FORMAT,
2906
+ ts_fmt: str | None = None,
2907
+ ts_name_fmt: str | None = None,
2908
+ store_kwargs: dict[str, Any] | None = None,
2909
+ variables: dict[str, str] | None = None,
2910
+ status: bool = True,
2911
+ add_submission: bool = False,
2912
+ ) -> _Workflow | _Submission | None:
2913
+ """
2914
+ Generate a new {app_name} workflow from a builtin demo workflow template.
1556
2915
 
1557
2916
  Parameters
1558
2917
  ----------
@@ -1586,20 +2945,29 @@ class BaseApp(metaclass=Singleton):
1586
2945
  String variables to substitute in the demo workflow template file.
1587
2946
  status
1588
2947
  If True, display a live status to track workflow creation progress.
1589
- """
2948
+ add_submission
2949
+ If True, add a submission to the workflow (but do not submit).
1590
2950
 
2951
+ Returns
2952
+ -------
2953
+ Workflow
2954
+ The created workflow, if `add_submission` is `False`.
2955
+ Submission
2956
+ The created submission object, if `add_submission` is `True`.
2957
+ """
1591
2958
  self.API_logger.info("make_demo_workflow called")
1592
2959
 
1593
- if status:
1594
- console = rich.console.Console()
1595
- status = console.status("Making persistent workflow...")
1596
- status.start()
2960
+ status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
2961
+ Console().status("Making persistent workflow...") if status else nullcontext()
2962
+ )
1597
2963
 
1598
- with self.get_demo_workflow_template_file(workflow_name) as template_path:
2964
+ with status_context as status_, self.get_demo_workflow_template_file(
2965
+ workflow_name
2966
+ ) as template_path:
1599
2967
  wk = self.Workflow.from_file(
1600
2968
  template_path=template_path,
1601
2969
  template_format=template_format,
1602
- path=path,
2970
+ path=str(path) if path else None,
1603
2971
  name=name,
1604
2972
  overwrite=overwrite,
1605
2973
  store=store,
@@ -1607,33 +2975,36 @@ class BaseApp(metaclass=Singleton):
1607
2975
  ts_name_fmt=ts_name_fmt,
1608
2976
  store_kwargs=store_kwargs,
1609
2977
  variables=variables,
1610
- status=status,
2978
+ status=status_,
1611
2979
  )
1612
- if status:
1613
- status.stop()
1614
- return wk
2980
+ if add_submission:
2981
+ with wk._store.cached_load():
2982
+ with wk.batch_update():
2983
+ return wk._add_submission(status=status_)
2984
+ return wk
1615
2985
 
1616
2986
  def _make_and_submit_demo_workflow(
1617
2987
  self,
1618
2988
  workflow_name: str,
1619
- template_format: Optional[str] = None,
1620
- path: Optional[PathLike] = None,
1621
- name: Optional[str] = None,
1622
- overwrite: Optional[bool] = False,
1623
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1624
- ts_fmt: Optional[str] = None,
1625
- ts_name_fmt: Optional[str] = None,
1626
- store_kwargs: Optional[Dict] = None,
1627
- variables: Optional[Dict[str, str]] = None,
1628
- JS_parallelism: Optional[bool] = None,
1629
- wait: Optional[bool] = False,
1630
- add_to_known: Optional[bool] = True,
1631
- return_idx: Optional[bool] = False,
1632
- tasks: Optional[List[int]] = None,
1633
- cancel: Optional[bool] = False,
1634
- status: Optional[bool] = True,
1635
- ) -> Dict[int, int]:
1636
- """Generate and submit a new {app_name} workflow from a file or string containing a
2989
+ template_format: Literal["json", "yaml"] | None = None,
2990
+ path: PathLike | None = None,
2991
+ name: str | None = None,
2992
+ overwrite: bool = False,
2993
+ store: str = DEFAULT_STORE_FORMAT,
2994
+ ts_fmt: str | None = None,
2995
+ ts_name_fmt: str | None = None,
2996
+ store_kwargs: dict[str, Any] | None = None,
2997
+ variables: dict[str, str] | None = None,
2998
+ JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
2999
+ wait: bool = False,
3000
+ add_to_known: bool = True,
3001
+ return_idx: bool = False,
3002
+ tasks: list[int] | None = None,
3003
+ cancel: bool = False,
3004
+ status: bool = True,
3005
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
3006
+ """
3007
+ Generate and submit a new {app_name} workflow from a file or string containing a
1637
3008
  workflow template parametrisation.
1638
3009
 
1639
3010
  Parameters
@@ -1667,9 +3038,12 @@ class BaseApp(metaclass=Singleton):
1667
3038
  variables
1668
3039
  String variables to substitute in the demo workflow template file.
1669
3040
  JS_parallelism
1670
- If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1671
- True but the store type does not support the `jobscript_parallelism` feature. If
1672
- not set, jobscript parallelism will be used if the store type supports it.
3041
+ If True, allow multiple jobscripts to execute simultaneously. If
3042
+ 'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
3043
+ jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
3044
+ type does not support the `jobscript_parallelism` feature. If not set,
3045
+ jobscript parallelism will be used if the store type supports it, for
3046
+ scheduled jobscripts only.
1673
3047
  wait
1674
3048
  If True, this command will block until the workflow execution is complete.
1675
3049
  add_to_known
@@ -1685,11 +3059,17 @@ class BaseApp(metaclass=Singleton):
1685
3059
  Immediately cancel the submission. Useful for testing and benchmarking.
1686
3060
  status
1687
3061
  If True, display a live status to track submission progress.
1688
- """
1689
3062
 
3063
+ Returns
3064
+ -------
3065
+ Workflow
3066
+ The created workflow.
3067
+ dict[int, list[int]]
3068
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
3069
+ """
1690
3070
  self.API_logger.info("make_and_submit_demo_workflow called")
1691
3071
 
1692
- wk = self.make_demo_workflow(
3072
+ wk = self._make_demo_workflow(
1693
3073
  workflow_name=workflow_name,
1694
3074
  template_format=template_format,
1695
3075
  path=path,
@@ -1701,6 +3081,7 @@ class BaseApp(metaclass=Singleton):
1701
3081
  store_kwargs=store_kwargs,
1702
3082
  variables=variables,
1703
3083
  )
3084
+ assert isinstance(wk, _Workflow)
1704
3085
  submitted_js = wk.submit(
1705
3086
  JS_parallelism=JS_parallelism,
1706
3087
  wait=wait,
@@ -1718,59 +3099,69 @@ class BaseApp(metaclass=Singleton):
1718
3099
  def _submit_workflow(
1719
3100
  self,
1720
3101
  workflow_path: PathLike,
1721
- JS_parallelism: Optional[bool] = None,
1722
- wait: Optional[bool] = False,
1723
- return_idx: Optional[bool] = False,
1724
- tasks: Optional[List[int]] = None,
1725
- ) -> Dict[int, int]:
1726
- """Submit an existing {app_name} workflow.
3102
+ JS_parallelism: bool | Literal["direct", "scheduled"] | None = None,
3103
+ wait: bool = False,
3104
+ return_idx: bool = False,
3105
+ tasks: list[int] | None = None,
3106
+ ) -> Mapping[int, Sequence[int]] | None:
3107
+ """
3108
+ Submit an existing {app_name} workflow.
1727
3109
 
1728
3110
  Parameters
1729
3111
  ----------
1730
- workflow_path
1731
- Path to an existing workflow
1732
- JS_parallelism
1733
- If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1734
- True but the store type does not support the `jobscript_parallelism` feature. If
1735
- not set, jobscript parallelism will be used if the store type supports it.
1736
- tasks
3112
+ workflow_path:
3113
+ Path to an existing workflow.
3114
+ JS_parallelism:
3115
+ If True, allow multiple jobscripts to execute simultaneously. If
3116
+ 'scheduled'/'direct', only allow simultaneous execution of scheduled/direct
3117
+ jobscripts. Raises if set to True, 'scheduled', or 'direct', but the store
3118
+ type does not support the `jobscript_parallelism` feature. If not set,
3119
+ jobscript parallelism will be used if the store type supports it, for
3120
+ scheduled jobscripts only.
3121
+ wait:
3122
+ Whether to wait for the submission to complete.
3123
+ return_idx:
3124
+ Whether to return the index information.
3125
+ tasks:
1737
3126
  List of task indices to include in this submission. By default all tasks are
1738
3127
  included.
1739
- """
1740
3128
 
3129
+ Returns
3130
+ -------
3131
+ dict[int, list[int]]
3132
+ Mapping of submission handles, if requested by ``return_idx`` parameter.
3133
+ """
1741
3134
  self.API_logger.info("submit_workflow called")
3135
+ assert workflow_path is not None
1742
3136
  wk = self.Workflow(workflow_path)
1743
- return wk.submit(
1744
- JS_parallelism=JS_parallelism,
1745
- wait=wait,
1746
- return_idx=return_idx,
1747
- tasks=tasks,
1748
- )
3137
+ if return_idx:
3138
+ return wk.submit(
3139
+ JS_parallelism=JS_parallelism,
3140
+ wait=wait,
3141
+ return_idx=True,
3142
+ tasks=tasks,
3143
+ )
3144
+ wk.submit(JS_parallelism=JS_parallelism, wait=wait, tasks=tasks)
3145
+ return None
1749
3146
 
1750
- def _run_hpcflow_tests(self, *args):
3147
+ def _run_hpcflow_tests(self, *args: str) -> int:
1751
3148
  """Run hpcflow test suite. This function is only available from derived apps."""
1752
-
1753
3149
  from hpcflow import app as hf
1754
3150
 
1755
3151
  return hf.app.run_tests(*args)
1756
3152
 
1757
- def _run_tests(self, *args):
3153
+ def _run_tests(self, *args: str) -> int:
1758
3154
  """Run {app_name} test suite."""
1759
-
1760
3155
  try:
1761
3156
  import pytest
1762
3157
  except ModuleNotFoundError:
1763
3158
  raise RuntimeError(
1764
3159
  f"{self.name} has not been built with testing dependencies."
1765
3160
  )
1766
- test_args = (self.pytest_args or []) + list(args)
1767
- pkg = self.package_name
1768
- tests_dir = "tests"
1769
- ctx_man = resources.as_file(resources.files(pkg).joinpath(tests_dir))
1770
- with ctx_man as test_dir:
1771
- return pytest.main([str(test_dir)] + test_args)
1772
-
1773
- def _get_OS_info(self) -> Dict:
3161
+ with get_file_context(self.package_name, "tests") as test_dir:
3162
+ return pytest.main([str(test_dir), *(self.pytest_args or ()), *args])
3163
+
3164
+ def _get_OS_info(self) -> Mapping[str, str]:
1774
3165
  """Get information about the operating system."""
1775
3166
  os_name = os.name
1776
3167
  if os_name == "posix":
@@ -1779,19 +3170,22 @@ class BaseApp(metaclass=Singleton):
1779
3170
  )
1780
3171
  elif os_name == "nt":
1781
3172
  return get_OS_info_windows()
3173
+ else:
3174
+ raise Exception(f"unsupported OS '{os_name}'")
1782
3175
 
1783
3176
  def _get_shell_info(
1784
3177
  self,
1785
3178
  shell_name: str,
1786
- exclude_os: Optional[bool] = False,
1787
- ) -> Dict:
1788
- """Get information about a given shell and the operating system.
3179
+ exclude_os: bool = False,
3180
+ ) -> VersionInfo:
3181
+ """
3182
+ Get information about a given shell and the operating system.
1789
3183
 
1790
3184
  Parameters
1791
3185
  ----------
1792
- shell_name
3186
+ shell_name:
1793
3187
  One of the supported shell names.
1794
- exclude_os
3188
+ exclude_os:
1795
3189
  If True, exclude operating system information.
1796
3190
  """
1797
3191
  shell = get_shell(
@@ -1806,9 +3200,10 @@ class BaseApp(metaclass=Singleton):
1806
3200
  max_recent: int = 3,
1807
3201
  no_update: bool = False,
1808
3202
  as_json: bool = False,
1809
- status: Optional[Any] = None,
1810
- ):
1811
- """Retrieve information about active and recently inactive finished {app_name}
3203
+ status: Status | None = None,
3204
+ ) -> Sequence[KnownSubmissionItem]:
3205
+ """
3206
+ Retrieve information about active and recently inactive finished {app_name}
1812
3207
  workflows.
1813
3208
 
1814
3209
  This method removes workflows from the known-submissions file that are found to be
@@ -1816,21 +3211,19 @@ class BaseApp(metaclass=Singleton):
1816
3211
 
1817
3212
  Parameters
1818
3213
  ----------
1819
- max_recent
3214
+ max_recent:
1820
3215
  Maximum number of inactive workflows to retrieve.
1821
- no_update
3216
+ no_update:
1822
3217
  If True, do not update the known-submissions file to set submissions that are
1823
3218
  now inactive.
1824
- as_json
3219
+ as_json:
1825
3220
  If True, only include JSON-compatible information. This will exclude the
1826
3221
  `submission` key, for instance.
1827
-
1828
3222
  """
1829
-
1830
- out = []
1831
- inactive_IDs = []
1832
- start_times = {}
1833
- end_times = {}
3223
+ out: list[KnownSubmissionItem] = []
3224
+ inactive_IDs: list[int] = []
3225
+ start_times: dict[int, str] = {}
3226
+ end_times: dict[int, str] = {}
1834
3227
 
1835
3228
  ts_fmt = self._submission_ts_fmt
1836
3229
 
@@ -1841,28 +3234,29 @@ class BaseApp(metaclass=Singleton):
1841
3234
  except FileNotFoundError:
1842
3235
  known_subs = []
1843
3236
 
1844
- active_jobscripts = {} # keys are (workflow path, submission index)
1845
- loaded_workflows = {} # keys are workflow path
3237
+ # keys are (workflow path, submission index)
3238
+ active_jobscripts: dict[
3239
+ tuple[str, int],
3240
+ Mapping[int, Mapping[int, Mapping[int, JobscriptElementState]]],
3241
+ ] = {}
3242
+ loaded_workflows: dict[str, _Workflow] = {} # keys are workflow path
1846
3243
 
1847
3244
  # loop in reverse so we process more-recent submissions first:
1848
3245
  for file_dat_i in known_subs[::-1]:
1849
3246
  submit_time_str = file_dat_i["submit_time"]
1850
- submit_time_obj = datetime.strptime(submit_time_str, ts_fmt)
1851
- submit_time_obj = submit_time_obj.replace(tzinfo=timezone.utc).astimezone()
3247
+ submit_time_obj = parse_timestamp(submit_time_str, ts_fmt)
1852
3248
 
1853
3249
  start_time_str = file_dat_i["start_time"]
1854
3250
  start_time_obj = None
1855
3251
  if start_time_str:
1856
- start_time_obj = datetime.strptime(start_time_str, ts_fmt)
1857
- start_time_obj = start_time_obj.replace(tzinfo=timezone.utc).astimezone()
3252
+ start_time_obj = parse_timestamp(start_time_str, ts_fmt)
1858
3253
 
1859
3254
  end_time_str = file_dat_i["end_time"]
1860
3255
  end_time_obj = None
1861
3256
  if end_time_str:
1862
- end_time_obj = datetime.strptime(end_time_str, ts_fmt)
1863
- end_time_obj = end_time_obj.replace(tzinfo=timezone.utc).astimezone()
3257
+ end_time_obj = parse_timestamp(end_time_str, ts_fmt)
1864
3258
 
1865
- out_item = {
3259
+ out_item: KnownSubmissionItem = {
1866
3260
  "local_id": file_dat_i["local_id"],
1867
3261
  "workflow_id": file_dat_i["workflow_id"],
1868
3262
  "workflow_path": file_dat_i["path"],
@@ -1924,12 +3318,8 @@ class BaseApp(metaclass=Singleton):
1924
3318
  sub = wk_i.submissions[file_dat_i["sub_idx"]]
1925
3319
 
1926
3320
  all_jobscripts = sub._submission_parts[submit_time_str]
1927
- out_item.update(
1928
- {
1929
- "jobscripts": all_jobscripts,
1930
- "submission": sub,
1931
- }
1932
- )
3321
+ out_item["jobscripts"] = all_jobscripts
3322
+ out_item["submission"] = sub
1933
3323
  if not out_item["start_time"]:
1934
3324
  start_time_obj = sub.start_time
1935
3325
  if start_time_obj:
@@ -1949,11 +3339,19 @@ class BaseApp(metaclass=Singleton):
1949
3339
  if file_dat_i["is_active"]:
1950
3340
  # check it really is active:
1951
3341
  run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
3342
+ act_i_js: Mapping[
3343
+ int, Mapping[int, Mapping[int, JobscriptElementState]]
3344
+ ]
1952
3345
  if run_key in active_jobscripts:
1953
3346
  act_i_js = active_jobscripts[run_key]
1954
3347
  else:
1955
3348
  try:
1956
- act_i_js = sub.get_active_jobscripts(as_json=as_json)
3349
+ if as_json:
3350
+ act_i_js = cast( # not actually used?
3351
+ Any, sub.get_active_jobscripts(as_json=True)
3352
+ )
3353
+ else:
3354
+ act_i_js = sub.get_active_jobscripts()
1957
3355
  except KeyboardInterrupt:
1958
3356
  raise
1959
3357
  except Exception:
@@ -1984,38 +3382,64 @@ class BaseApp(metaclass=Singleton):
1984
3382
  )
1985
3383
  # remove these from the output, to avoid confusion (if kept, they would not
1986
3384
  # appear in the next invocation of this method):
1987
- out = [i for i in out if i["local_id"] not in removed_IDs]
3385
+ out = [item for item in out if item["local_id"] not in removed_IDs]
1988
3386
 
3387
+ out_active, out_inactive = self.__partition(
3388
+ out, lambda item: item["active_jobscripts"]
3389
+ )
1989
3390
  # sort inactive by most-recently finished, then deleted:
1990
- out_inactive = [i for i in out if not i["active_jobscripts"]]
1991
- out_no_access = [i for i in out_inactive if (i["deleted"] or i["unloadable"])]
1992
- out_access = [i for i in out_inactive if not (i["deleted"] or i["unloadable"])]
3391
+ out_no_access, out_access = self.__partition(
3392
+ out_inactive, lambda item: item["deleted"] or item["unloadable"]
3393
+ )
1993
3394
 
1994
3395
  # sort loadable inactive by end time or start time or submit time:
1995
3396
  out_access = sorted(
1996
3397
  out_access,
1997
3398
  key=lambda i: (
1998
- i["end_time_obj"] or i["start_time_obj"] or i["submit_time_obj"]
3399
+ i["end_time_obj"]
3400
+ or i["start_time_obj"]
3401
+ or i.get("submit_time_obj")
3402
+ or self.__DEF_TIMESTAMP
1999
3403
  ),
2000
3404
  reverse=True,
2001
3405
  )
2002
3406
  out_inactive = (out_access + out_no_access)[:max_recent]
2003
3407
 
2004
- out_active = [i for i in out if i["active_jobscripts"]]
2005
-
2006
3408
  # show active submissions first:
2007
3409
  out = out_active + out_inactive
2008
3410
 
2009
3411
  if as_json:
2010
- for idx, _ in enumerate(out):
2011
- out[idx].pop("submission", None)
2012
- out[idx].pop("submit_time_obj")
3412
+ for item in out:
3413
+ item.pop("submission", None)
3414
+ item.pop("submit_time_obj")
2013
3415
  return out
2014
3416
 
2015
- def _show_legend(self):
2016
- """ "Output a legend for the jobscript-element and EAR states that are displayed
2017
- by the `show` command."""
3417
+ __DEF_TIMESTAMP: Final[datetime] = datetime.fromtimestamp(0, tz=timezone.utc)
3418
+
3419
+ @staticmethod
3420
+ def __partition(
3421
+ lst: Iterable[T], cond: Callable[[T], Any]
3422
+ ) -> tuple[list[T], list[T]]:
3423
+ """
3424
+ Split a list into two by whether the condition holds for each item.
3425
+
3426
+ Returns
3427
+ -------
3428
+ true_items
3429
+ List of items for which the condition is true (or at least truthy).
3430
+ false_items
3431
+ List of items for which the condition is false.
3432
+ """
3433
+ lists: tuple[list[T], list[T]] = [], []
3434
+ for item in lst:
3435
+ lists[not cond(item)].append(item)
3436
+ return lists
2018
3437
 
3438
+ def _show_legend(self) -> None:
3439
+ """
3440
+ Output a legend for the jobscript-element and EAR states that are displayed
3441
+ by the `show` command.
3442
+ """
2019
3443
  js_notes = Panel(
2020
3444
  "The [i]Status[/i] column of the `show` command output displays the set of "
2021
3445
  "unique jobscript-element states for that submission. Jobscript element "
@@ -2028,8 +3452,8 @@ class BaseApp(metaclass=Singleton):
2028
3452
  js_tab.add_column("Symbol")
2029
3453
  js_tab.add_column("State")
2030
3454
  js_tab.add_column("Description")
2031
- for state in JobscriptElementState.__members__.values():
2032
- js_tab.add_row(state.rich_repr, state.name, state.__doc__)
3455
+ for jse_state in JobscriptElementState.__members__.values():
3456
+ js_tab.add_row(jse_state.rich_repr, jse_state.name, jse_state.__doc__)
2033
3457
 
2034
3458
  act_notes = Panel(
2035
3459
  "\nThe [i]Actions[/i] column of the `show` command output displays either the "
@@ -2044,8 +3468,8 @@ class BaseApp(metaclass=Singleton):
2044
3468
  act_tab.add_column("Symbol")
2045
3469
  act_tab.add_column("State")
2046
3470
  act_tab.add_column("Description")
2047
- for state in EARStatus.__members__.values():
2048
- act_tab.add_row(state.rich_repr, state.name, state.__doc__)
3471
+ for ear_state in EARStatus.__members__.values():
3472
+ act_tab.add_row(ear_state.rich_repr, ear_state.name, ear_state.__doc__)
2049
3473
 
2050
3474
  group = Group(
2051
3475
  js_notes,
@@ -2061,22 +3485,21 @@ class BaseApp(metaclass=Singleton):
2061
3485
  max_recent: int = 3,
2062
3486
  full: bool = False,
2063
3487
  no_update: bool = False,
2064
- columns=None,
2065
- ):
2066
- """Show information about running {app_name} workflows.
3488
+ ) -> None:
3489
+ """
3490
+ Show information about running {app_name} workflows.
2067
3491
 
2068
3492
  Parameters
2069
3493
  ----------
2070
- max_recent
3494
+ max_recent:
2071
3495
  Maximum number of inactive workflows to show.
2072
- full
3496
+ full:
2073
3497
  If True, provide more information; output may spans multiple lines for each
2074
3498
  workflow submission.
2075
- no_update
3499
+ no_update:
2076
3500
  If True, do not update the known-submissions file to remove workflows that are
2077
3501
  no longer running.
2078
3502
  """
2079
-
2080
3503
  # TODO: add --json to show, just returning this but without submissions?
2081
3504
 
2082
3505
  allowed_cols = {
@@ -2091,9 +3514,9 @@ class BaseApp(metaclass=Singleton):
2091
3514
  "actions_compact": "Actions",
2092
3515
  }
2093
3516
 
3517
+ columns: tuple[str, ...]
2094
3518
  if full:
2095
3519
  columns = ("id", "name", "status", "times", "actions")
2096
-
2097
3520
  else:
2098
3521
  columns = (
2099
3522
  "id",
@@ -2105,11 +3528,11 @@ class BaseApp(metaclass=Singleton):
2105
3528
  "actions_compact",
2106
3529
  )
2107
3530
 
2108
- unknown_cols = set(columns) - set(allowed_cols.keys())
3531
+ unknown_cols = set(columns).difference(allowed_cols)
2109
3532
  if unknown_cols:
2110
3533
  raise ValueError(
2111
3534
  f"Unknown column names: {unknown_cols!r}. Allowed columns are "
2112
- f"{list(allowed_cols.keys())!r}."
3535
+ f"{list(allowed_cols)!r}."
2113
3536
  )
2114
3537
 
2115
3538
  # TODO: add --filter option to filter by ID or name
@@ -2119,45 +3542,43 @@ class BaseApp(metaclass=Singleton):
2119
3542
  ts_fmt_part = r"%H:%M:%S"
2120
3543
 
2121
3544
  console = Console()
2122
- status = console.status("Retrieving data...")
2123
- status.start()
2124
-
2125
- try:
3545
+ with console.status("Retrieving data...") as status:
2126
3546
  run_dat = self._get_known_submissions(
2127
3547
  max_recent=max_recent,
2128
3548
  no_update=no_update,
2129
3549
  status=status,
2130
3550
  )
2131
- except (Exception, KeyboardInterrupt):
2132
- status.stop()
2133
- raise
2134
- else:
2135
3551
  if not run_dat:
2136
- status.stop()
2137
3552
  return
2138
3553
 
2139
- status.update("Formatting...")
2140
- table = Table(box=box.SQUARE, expand=False)
2141
- for col_name in columns:
2142
- table.add_column(allowed_cols[col_name])
3554
+ status.update("Formatting...")
3555
+ table = Table(box=box.SQUARE, expand=False)
3556
+ for col_name in columns:
3557
+ table.add_column(allowed_cols[col_name])
2143
3558
 
2144
- row_pad = 1 if full else 0
3559
+ row_pad = 1 if full else 0
2145
3560
 
2146
- for dat_i in run_dat:
2147
- deleted = dat_i["deleted"]
2148
- unloadable = dat_i["unloadable"]
2149
- no_access = deleted or unloadable
2150
- act_js = dat_i["active_jobscripts"]
2151
- style = "grey42" if (no_access or not act_js) else ""
2152
- style_wk_name = "grey42 strike" if deleted else style
2153
- style_it = "italic grey42" if (no_access or not act_js) else "italic"
3561
+ for dat_i in run_dat:
3562
+ deleted = dat_i["deleted"]
3563
+ unloadable = dat_i["unloadable"]
3564
+ no_access = deleted or unloadable
3565
+ act_js = dat_i["active_jobscripts"]
3566
+ style = "grey42" if (no_access or not act_js) else ""
3567
+ style_wk_name = "grey42 strike" if deleted else style
3568
+ style_it = "italic grey42" if (no_access or not act_js) else "italic"
2154
3569
 
2155
- all_cells = {}
3570
+ all_cells: dict[str, str | Text | Padding] = {}
2156
3571
  if "status" in columns:
2157
3572
  if act_js:
2158
- act_js_states = set([j for i in act_js.values() for j in i.values()])
2159
- status_text = "/".join(
2160
- f"[{i.colour}]{i.symbol}[/{i.colour}]" for i in act_js_states
3573
+ act_js_states = set(
3574
+ state_i
3575
+ for js_dat in act_js.values()
3576
+ for block_dat in js_dat.values()
3577
+ for state_i in block_dat.values()
3578
+ )
3579
+ all_cells["status"] = "/".join(
3580
+ js_state.rich_repr
3581
+ for js_state in sorted(act_js_states, key=lambda x: x.value)
2161
3582
  )
2162
3583
  else:
2163
3584
  if deleted:
@@ -2166,120 +3587,122 @@ class BaseApp(metaclass=Singleton):
2166
3587
  txt = "unloadable"
2167
3588
  else:
2168
3589
  txt = "inactive"
2169
- status_text = Text(txt, style=style_it)
2170
- all_cells["status"] = status_text
2171
-
2172
- if "id" in columns:
2173
- all_cells["id"] = Text(str(dat_i["local_id"]), style=style)
3590
+ all_cells["status"] = Text(txt, style=style_it)
2174
3591
 
2175
- if "name" in columns:
2176
- all_cells["name"] = Text(
2177
- Path(dat_i["workflow_path"]).name, style=style_wk_name
2178
- )
3592
+ if "id" in columns:
3593
+ all_cells["id"] = Text(str(dat_i["local_id"]), style=style)
2179
3594
 
2180
- start_time, end_time = None, None
2181
- if not no_access:
2182
- start_time = dat_i["start_time_obj"]
2183
- end_time = dat_i["end_time_obj"]
3595
+ if "name" in columns:
3596
+ all_cells["name"] = Text(
3597
+ Path(dat_i["workflow_path"]).name, style=style_wk_name
3598
+ )
2184
3599
 
2185
- if "actions" in columns:
3600
+ start_time, end_time = None, None
2186
3601
  if not no_access:
2187
- task_tab = Table(box=None, show_header=False)
2188
- task_tab.add_column()
2189
- task_tab.add_column()
2190
-
2191
- for task_idx, elements in dat_i[
2192
- "submission"
2193
- ].EARs_by_elements.items():
2194
- task = dat_i["submission"].workflow.tasks[task_idx]
2195
-
2196
- # inner table for elements/actions:
2197
- elem_tab_i = Table(box=None, show_header=False)
2198
- elem_tab_i.add_column()
2199
- for elem_idx, EARs in elements.items():
2200
- elem_status = Text(f"{elem_idx} | ", style=style)
2201
- for i in EARs:
2202
- elem_status.append(i.status.symbol, style=i.status.colour)
2203
- elem_tab_i.add_row(elem_status)
2204
- task_tab.add_row(task.unique_name, elem_tab_i, style=style)
2205
- else:
2206
- task_tab = ""
2207
-
2208
- all_cells["actions"] = Padding(task_tab, (0, 0, row_pad, 0))
3602
+ start_time = cast("datetime", dat_i["start_time_obj"])
3603
+ end_time = cast("datetime", dat_i["end_time_obj"])
3604
+
3605
+ if "actions" in columns:
3606
+ task_tab: str | Table
3607
+ if not no_access:
3608
+ task_tab = Table(box=None, show_header=False)
3609
+ task_tab.add_column()
3610
+ task_tab.add_column()
3611
+
3612
+ sub = dat_i["submission"]
3613
+ for task_idx, elements in sub.EARs_by_elements.items():
3614
+ task = sub.workflow.tasks[task_idx]
3615
+
3616
+ # inner table for elements/actions:
3617
+ elem_tab_i = Table(box=None, show_header=False)
3618
+ elem_tab_i.add_column()
3619
+ for elem_idx, EARs in elements.items():
3620
+ elem_status = Text(f"{elem_idx} | ", style=style)
3621
+ for ear in EARs:
3622
+ elem_status.append(
3623
+ ear.status.symbol, style=ear.status.colour
3624
+ )
3625
+ elem_tab_i.add_row(elem_status)
3626
+ task_tab.add_row(task.unique_name, elem_tab_i, style=style)
3627
+ else:
3628
+ task_tab = ""
2209
3629
 
2210
- if "actions_compact" in columns:
2211
- if not no_access:
2212
- EAR_stat_count = defaultdict(int)
2213
- for _, elements in dat_i["submission"].EARs_by_elements.items():
2214
- for elem_idx, EARs in elements.items():
2215
- for i in EARs:
2216
- EAR_stat_count[i.status] += 1
2217
- all_cells["actions_compact"] = " | ".join(
2218
- f"[{k.colour}]{k.symbol}[/{k.colour}]:{v}"
2219
- for k, v in EAR_stat_count.items()
2220
- )
2221
- else:
2222
- all_cells["actions_compact"] = ""
3630
+ all_cells["actions"] = Padding(task_tab, (0, 0, row_pad, 0))
2223
3631
 
2224
- if "submit_time" in columns or "times" in columns:
2225
- submit_time = (
2226
- datetime.strptime(dat_i["submit_time"], self._submission_ts_fmt)
2227
- .replace(tzinfo=timezone.utc)
2228
- .astimezone()
2229
- )
2230
- submit_time_full = submit_time.strftime(ts_fmt)
2231
-
2232
- if "start_time" in columns or "times" in columns:
2233
- start_time_full = start_time.strftime(ts_fmt) if start_time else "-"
2234
- start_time_part = start_time_full
2235
- if start_time and start_time.date() == submit_time.date():
2236
- start_time_part = start_time.strftime(ts_fmt_part)
2237
-
2238
- if "end_time" in columns or "times" in columns:
2239
- end_time_full = end_time.strftime(ts_fmt) if end_time else "-"
2240
- end_time_part = end_time_full
2241
- if end_time and end_time.date() == start_time.date():
2242
- end_time_part = end_time.strftime(ts_fmt_part)
2243
-
2244
- if "submit_time" in columns:
2245
- all_cells["submit_time"] = Padding(
2246
- Text(submit_time_full, style=style), (0, 0, row_pad, 0)
2247
- )
3632
+ if "actions_compact" in columns:
3633
+ if not no_access:
3634
+ EAR_stat_count = Counter(
3635
+ ear.status
3636
+ for elements in dat_i["submission"].EARs_by_elements.values()
3637
+ for EARs in elements.values()
3638
+ for ear in EARs
3639
+ )
3640
+ all_cells["actions_compact"] = " | ".join(
3641
+ f"[{k.colour}]{k.symbol}[/{k.colour}]:{v}" # type: ignore
3642
+ for k, v in dict(
3643
+ sorted(EAR_stat_count.items(), key=lambda x: x[0].value)
3644
+ ).items()
3645
+ )
3646
+ else:
3647
+ all_cells["actions_compact"] = ""
2248
3648
 
2249
- if "start_time" in columns:
2250
- all_cells["start_time"] = Padding(
2251
- Text(start_time_part, style=style), (0, 0, row_pad, 0)
2252
- )
3649
+ if "submit_time" in columns or "times" in columns:
3650
+ submit_time = parse_timestamp(
3651
+ dat_i["submit_time"], self._submission_ts_fmt
3652
+ )
3653
+ submit_time_full = submit_time.strftime(ts_fmt)
3654
+
3655
+ if "start_time" in columns or "times" in columns:
3656
+ start_time_full = start_time.strftime(ts_fmt) if start_time else "-"
3657
+ start_time_part = start_time_full
3658
+ if start_time and start_time.date() == submit_time.date():
3659
+ start_time_part = start_time.strftime(ts_fmt_part)
3660
+
3661
+ if "end_time" in columns or "times" in columns:
3662
+ end_time_full = end_time.strftime(ts_fmt) if end_time else "-"
3663
+ end_time_part = end_time_full
3664
+ if end_time and start_time and end_time.date() == start_time.date():
3665
+ end_time_part = end_time.strftime(ts_fmt_part)
3666
+
3667
+ if "submit_time" in columns:
3668
+ all_cells["submit_time"] = Padding(
3669
+ Text(submit_time_full, style=style), (0, 0, row_pad, 0)
3670
+ )
2253
3671
 
2254
- if "end_time" in columns:
2255
- all_cells["end_time"] = Padding(
2256
- Text(end_time_part, style=style), (0, 0, row_pad, 0)
2257
- )
3672
+ if "start_time" in columns:
3673
+ all_cells["start_time"] = Padding(
3674
+ Text(start_time_part, style=style), (0, 0, row_pad, 0)
3675
+ )
2258
3676
 
2259
- if "times" in columns:
2260
- # submit/start/end on separate lines:
2261
- times_tab = Table(box=None, show_header=False)
2262
- times_tab.add_column()
2263
- times_tab.add_column(justify="right")
3677
+ if "end_time" in columns:
3678
+ all_cells["end_time"] = Padding(
3679
+ Text(end_time_part, style=style), (0, 0, row_pad, 0)
3680
+ )
2264
3681
 
2265
- times_tab.add_row(
2266
- Text("sb.", style=style_it), Text(submit_time_full, style=style)
2267
- )
3682
+ if "times" in columns:
3683
+ # submit/start/end on separate lines:
3684
+ times_tab = Table(box=None, show_header=False)
3685
+ times_tab.add_column()
3686
+ times_tab.add_column(justify="right")
2268
3687
 
2269
- if start_time:
2270
3688
  times_tab.add_row(
2271
- Text("st.", style=style_it), Text(start_time_part, style=style)
2272
- )
2273
- if end_time:
2274
- times_tab.add_row(
2275
- Text("en.", style=style_it), Text(end_time_part, style=style)
3689
+ Text("sb.", style=style_it), Text(submit_time_full, style=style)
2276
3690
  )
2277
3691
 
2278
- all_cells["times"] = Padding(times_tab, (0, 0, row_pad, 0))
3692
+ if start_time:
3693
+ times_tab.add_row(
3694
+ Text("st.", style=style_it),
3695
+ Text(start_time_part, style=style),
3696
+ )
3697
+ if end_time:
3698
+ times_tab.add_row(
3699
+ Text("en.", style=style_it), Text(end_time_part, style=style)
3700
+ )
3701
+
3702
+ all_cells["times"] = Padding(times_tab, (0, 0, row_pad, 0))
2279
3703
 
2280
- table.add_row(*[all_cells[i] for i in columns])
3704
+ table.add_row(*(all_cells[col_name] for col_name in columns))
2281
3705
 
2282
- status.stop()
2283
3706
  if table.row_count:
2284
3707
  console.print(table)
2285
3708
 
@@ -2289,18 +3712,12 @@ class BaseApp(metaclass=Singleton):
2289
3712
  except FileNotFoundError:
2290
3713
  known_subs = []
2291
3714
 
2292
- path = None
2293
- for i in known_subs:
2294
- if i["local_id"] == local_ID:
2295
- path = Path(i["path"])
2296
- break
2297
- if not path:
2298
- raise ValueError(f"Specified local ID is not valid: {local_ID}.")
2299
-
2300
- return path
3715
+ if any((witness := sub)["local_id"] == local_ID for sub in known_subs):
3716
+ return Path(witness["path"])
3717
+ raise ValueError(f"Specified local ID is not valid: {local_ID}.")
2301
3718
 
2302
3719
  def _resolve_workflow_reference(
2303
- self, workflow_ref, ref_type: Union[str, None]
3720
+ self, workflow_ref: str, ref_type: str | None
2304
3721
  ) -> Path:
2305
3722
  path = None
2306
3723
  if ref_type == "path":
@@ -2343,24 +3760,38 @@ class BaseApp(metaclass=Singleton):
2343
3760
  )
2344
3761
  return path.resolve()
2345
3762
 
2346
- def _cancel(self, workflow_ref: Union[int, str, PathLike], ref_is_path=None):
2347
- """Cancel the execution of a workflow submission.
3763
+ def _cancel(
3764
+ self,
3765
+ workflow_ref: int | str | Path,
3766
+ ref_is_path: str | None = None,
3767
+ status: bool = True,
3768
+ ) -> None:
3769
+ """
3770
+ Cancel the execution of a workflow submission.
2348
3771
 
2349
3772
  Parameters
2350
3773
  ----------
2351
- ref_is_path
2352
- One of "id", "path" or "assume-id" (the default)
3774
+ workflow_ref: int | str | Path
3775
+ Which workflow to cancel, by ID or path.
3776
+ ref_is_path: str
3777
+ One of "``id``", "``path``" or "``assume-id``" (the default)
3778
+ status: bool
3779
+ Whether to show a live status during cancel.
2353
3780
  """
2354
- path = self._resolve_workflow_reference(workflow_ref, ref_is_path)
2355
- self.Workflow(path).cancel()
3781
+ path = self._resolve_workflow_reference(str(workflow_ref), ref_is_path)
3782
+ self.Workflow(path).cancel(status=status)
3783
+
3784
+ @staticmethod
3785
+ def redirect_std_to_file(*args, **kwargs):
3786
+ return redirect_std_to_file_hpcflow(*args, **kwargs)
2356
3787
 
2357
3788
  def configure_env(
2358
3789
  self,
2359
- name,
2360
- setup=None,
2361
- executables=None,
2362
- use_current_env=False,
2363
- env_source_file=None,
3790
+ name: str,
3791
+ setup: list[str] | None = None,
3792
+ executables: list[_Executable] | None = None,
3793
+ use_current_env: bool = False,
3794
+ env_source_file: Path | None = None,
2364
3795
  ):
2365
3796
  """
2366
3797
  Configure an execution environment.
@@ -2369,10 +3800,10 @@ class BaseApp(metaclass=Singleton):
2369
3800
  setup = []
2370
3801
  if not executables:
2371
3802
  executables = []
2372
- if not env_source_file:
2373
- env_source_file = self.config.get("config_directory").joinpath(
2374
- "configured_envs.yaml"
2375
- )
3803
+ env_source = env_source_file or self.config.get("config_directory").joinpath(
3804
+ "configured_envs.yaml"
3805
+ )
3806
+ assert isinstance(env_source, Path)
2376
3807
  if use_current_env:
2377
3808
  if self.run_time_info.is_conda_venv:
2378
3809
  # use the currently activated conda environment for the new app environment:
@@ -2399,35 +3830,36 @@ class BaseApp(metaclass=Singleton):
2399
3830
  ]
2400
3831
 
2401
3832
  new_env = self.Environment(name=name, setup=setup, executables=executables)
2402
- new_env_dat = new_env.to_json_like(exclude="_hash_value")[0]
2403
- if env_source_file.exists():
2404
- existing_env_dat = read_YAML_file(env_source_file, typ="rt")
2405
- if name in [i["name"] for i in existing_env_dat]:
3833
+ new_env_dat = new_env.to_json_like(exclude={"_hash_value"})[0]
3834
+ if env_source.exists():
3835
+ existing_env_dat: list[dict] = read_YAML_file(env_source, typ="rt")
3836
+ if any(name == i["name"] for i in existing_env_dat):
2406
3837
  # TODO: this doesn't check all app envs, just those added with this method
2407
3838
  raise ValueError(f"Environment {name!r} already exists.")
2408
3839
 
2409
- all_env_dat = existing_env_dat + [new_env_dat]
3840
+ all_env_dat = [*existing_env_dat, new_env_dat]
2410
3841
 
2411
3842
  # write a new temporary config file
2412
- tmp_file = env_source_file.with_suffix(env_source_file.suffix + ".tmp")
3843
+ tmp_file = env_source.with_suffix(env_source.suffix + ".tmp")
2413
3844
  self.logger.debug(f"Creating temporary env source file: {tmp_file!r}.")
2414
3845
  write_YAML_file(all_env_dat, tmp_file, typ="rt")
2415
3846
 
2416
3847
  # atomic rename, overwriting original:
2417
3848
  self.logger.debug("Replacing original env source file with temporary file.")
2418
- os.replace(src=tmp_file, dst=env_source_file)
3849
+ os.replace(src=tmp_file, dst=env_source)
2419
3850
 
2420
3851
  else:
2421
3852
  all_env_dat = [new_env_dat]
2422
- write_YAML_file(all_env_dat, env_source_file, typ="rt")
3853
+ write_YAML_file(all_env_dat, env_source, typ="rt")
2423
3854
 
2424
3855
  cur_env_source_files = self.config.get("environment_sources")
2425
- if env_source_file not in cur_env_source_files:
2426
- self.config.append("environment_sources", str(env_source_file))
3856
+ if env_source not in cur_env_source_files:
3857
+ self.config.append("environment_sources", str(env_source))
2427
3858
  self.config.save()
2428
3859
 
2429
- def get_demo_data_files_manifest(self) -> Dict[str, Union[None, str]]:
2430
- """Get a dict whose keys are example data file names and whose values are the
3860
+ def get_demo_data_files_manifest(self) -> dict[str, Any]:
3861
+ """
3862
+ Get a dict whose keys are example data file names and whose values are the
2431
3863
  source files if the source file required unzipping or `None` otherwise.
2432
3864
 
2433
3865
  If the config item `demo_data_manifest_file` is set, this is used as the manifest
@@ -2448,26 +3880,26 @@ class BaseApp(metaclass=Singleton):
2448
3880
  logger=self.logger,
2449
3881
  )
2450
3882
  with fs.open(url_path) as fh:
2451
- manifest = json.load(fh)
3883
+ return json.load(fh)
2452
3884
  else:
2453
3885
  self.logger.debug(
2454
3886
  f"loading example data files manifest from the app attribute "
2455
3887
  f"`demo_data_manifest_dir`: "
2456
3888
  f"{self.demo_data_manifest_dir!r}."
2457
3889
  )
2458
- package = self.demo_data_manifest_dir
2459
- resource = "demo_data_manifest.json"
2460
- fh = resources.files(package).joinpath(resource).open("rt")
2461
- manifest = json.load(fh)
2462
- fh.close()
2463
- return manifest
2464
-
2465
- def list_demo_data_files(self) -> Tuple[str]:
3890
+ if (package := self.demo_data_manifest_dir) is None:
3891
+ self.logger.warning("no demo data dir defined")
3892
+ return {}
3893
+ with open_text_resource(package, "demo_data_manifest.json") as fh:
3894
+ return json.load(fh)
3895
+
3896
+ def list_demo_data_files(self) -> tuple[str, ...]:
2466
3897
  """List available example data files."""
2467
- return tuple(self.get_demo_data_files_manifest().keys())
3898
+ return tuple(self.get_demo_data_files_manifest())
2468
3899
 
2469
- def _get_demo_data_file_source_path(self, file_name) -> Tuple[Path, bool, bool]:
2470
- """Get the full path to an example data file on the local file system, whether
3900
+ def _get_demo_data_file_source_path(self, file_name: str) -> tuple[Path, bool, bool]:
3901
+ """
3902
+ Get the full path to an example data file on the local file system, whether
2471
3903
  the file must be unpacked, and whether the file should be deleted.
2472
3904
 
2473
3905
  If `config.demo_data_dir` is set, this directory will be used as the example data
@@ -2484,10 +3916,9 @@ class BaseApp(metaclass=Singleton):
2484
3916
  value of `config.demo_data_dir` (without saving to the persistent config file),
2485
3917
  and then retrieve the example data file path as above. The default value is set to
2486
3918
  the GitHub repo of the app using the current tag/version.
2487
-
2488
3919
  """
2489
3920
 
2490
- def _retrieve_source_path_from_config(src_fn):
3921
+ def _retrieve_source_path_from_config(src_fn: str):
2491
3922
  fs, url_path = rate_limit_safe_url_to_fs(
2492
3923
  self,
2493
3924
  self.config.demo_data_dir,
@@ -2517,7 +3948,7 @@ class BaseApp(metaclass=Singleton):
2517
3948
  if file_name not in manifest:
2518
3949
  raise ValueError(f"No such example data file {file_name!r}.")
2519
3950
 
2520
- spec = manifest[file_name]
3951
+ spec: dict[str, str] = manifest[file_name]
2521
3952
  requires_unpack = bool(spec)
2522
3953
  src_fn = spec["in_zip"] if requires_unpack else file_name
2523
3954
 
@@ -2535,49 +3966,37 @@ class BaseApp(metaclass=Singleton):
2535
3966
  f"source directory: {self.demo_data_dir!r}."
2536
3967
  )
2537
3968
  # `config.demo_data_dir` not set, so try to use `app.demo_data_dir`:
2538
- package = self.demo_data_dir
2539
- resource_exists = True
2540
- delete = False
2541
- try:
2542
- ctx_man = resources.as_file(resources.files(package).joinpath(src_fn))
2543
- # raises ModuleNotFoundError
2544
- except ModuleNotFoundError:
2545
- resource_exists = False
2546
3969
 
2547
- if resource_exists:
3970
+ if package := self.demo_data_dir:
2548
3971
  try:
2549
- with ctx_man as path:
3972
+ with get_file_context(package, src_fn) as path:
2550
3973
  out = path
3974
+ delete = False
2551
3975
  except (ModuleNotFoundError, FileNotFoundError):
2552
- # frozen app
2553
- resource_exists = False
2554
-
2555
- if not resource_exists:
2556
- # example data not included (e.g. frozen, or installed via PyPI/conda), so
2557
- # set a default value for `config.demo_data_dir` (point to the package
2558
- # GitHub repo for the current tag):
2559
- path = "/".join(package.split("."))
2560
- url = self._get_github_url(sha=f"v{self.version}", path=path)
2561
- self.logger.info(
2562
- f"path {path!r} does not exist as a package resource (example data "
2563
- f"was probably not included in the app), so non-persistently setting "
2564
- f"the config item `demo_data_dir` to the app's GitHub repo path: "
2565
- f"{url!r}."
2566
- )
2567
- self.config.demo_data_dir = url
2568
- out, delete = _retrieve_source_path_from_config(src_fn)
3976
+ # example data not included (e.g. frozen, or installed via
3977
+ # PyPI/conda), so set a default value for `config.demo_data_dir`
3978
+ # (point to the package GitHub repo for the current tag):
3979
+ path_ = package.replace(".", "/")
3980
+ url = self._get_github_url(sha=f"v{self.version}", path=path_)
3981
+ self.logger.info(
3982
+ f"path {path_!r} does not exist as a package resource (example data "
3983
+ f"was probably not included in the app), so non-persistently setting "
3984
+ f"the config item `demo_data_dir` to the app's GitHub repo path: "
3985
+ f"{url!r}."
3986
+ )
3987
+ self.config.demo_data_dir = url
3988
+ out, delete = _retrieve_source_path_from_config(src_fn)
2569
3989
 
2570
3990
  return out, requires_unpack, delete
2571
3991
 
2572
- def get_demo_data_file_path(self, file_name) -> Path:
2573
- """Get the full path to an example data file in the app cache directory.
3992
+ def get_demo_data_file_path(self, file_name: str) -> Path:
3993
+ """
3994
+ Get the full path to an example data file in the app cache directory.
2574
3995
 
2575
3996
  If the file does not already exist in the app cache directory, it will be added
2576
3997
  (and unzipped if required). The file may first be downloaded from a remote file
2577
3998
  system such as GitHub (see `_get_demo_data_file_source_path` for details).
2578
-
2579
3999
  """
2580
-
2581
4000
  # check if file exists in cache dir already
2582
4001
  cache_file_path = self.demo_data_cache_dir.joinpath(file_name)
2583
4002
  if cache_file_path.exists():
@@ -2617,22 +4036,26 @@ class BaseApp(metaclass=Singleton):
2617
4036
  src.unlink()
2618
4037
  return cache_file_path
2619
4038
 
2620
- def cache_demo_data_file(self, file_name) -> Path:
4039
+ def cache_demo_data_file(self, file_name: str) -> Path:
2621
4040
  """
2622
4041
  Get the name of a cached demo data file.
2623
4042
  """
2624
4043
  return self.get_demo_data_file_path(file_name)
2625
4044
 
2626
- def cache_all_demo_data_files(self) -> List[Path]:
4045
+ def cache_all_demo_data_files(self) -> list[Path]:
2627
4046
  """
2628
4047
  Get the name of all cached demo data file.
2629
4048
  """
2630
- return [self.get_demo_data_file_path(i) for i in self.list_demo_data_files()]
4049
+ return [
4050
+ self.get_demo_data_file_path(filename)
4051
+ for filename in self.list_demo_data_files()
4052
+ ]
2631
4053
 
2632
4054
  def copy_demo_data(
2633
- self, file_name: str, dst: Optional[PathLike] = None, doc: bool = True
4055
+ self, file_name: str, dst: PathLike | None = None, doc: bool = True
2634
4056
  ) -> str:
2635
- """Copy a builtin demo data file to the specified location.
4057
+ """
4058
+ Copy a builtin demo data file to the specified location.
2636
4059
 
2637
4060
  Parameters
2638
4061
  ----------
@@ -2642,7 +4065,6 @@ class BaseApp(metaclass=Singleton):
2642
4065
  Directory or full file path to copy the demo data file to. If not specified,
2643
4066
  the current working directory will be used.
2644
4067
  """
2645
-
2646
4068
  dst = dst or Path(".")
2647
4069
  src = self.get_demo_data_file_path(file_name)
2648
4070
  shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
@@ -2650,8 +4072,10 @@ class BaseApp(metaclass=Singleton):
2650
4072
  return src.name
2651
4073
 
2652
4074
  def _get_github_url(self, sha: str, path: str):
2653
- """Return a fsspec URL for retrieving a file or directory on the app's GitHub
2654
- repository."""
4075
+ """
4076
+ Return a fsspec URL for retrieving a file or directory on the app's GitHub
4077
+ repository.
4078
+ """
2655
4079
  return f"github://{self.gh_org}:{self.gh_repo}@{sha}/{path}"
2656
4080
 
2657
4081