hpcflow-new2 0.2.0a188__py3-none-any.whl → 0.2.0a190__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (115) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
  2. hpcflow/_version.py +1 -1
  3. hpcflow/app.py +1 -0
  4. hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
  5. hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
  6. hpcflow/sdk/__init__.py +21 -15
  7. hpcflow/sdk/app.py +2133 -770
  8. hpcflow/sdk/cli.py +281 -250
  9. hpcflow/sdk/cli_common.py +6 -2
  10. hpcflow/sdk/config/__init__.py +1 -1
  11. hpcflow/sdk/config/callbacks.py +77 -42
  12. hpcflow/sdk/config/cli.py +126 -103
  13. hpcflow/sdk/config/config.py +578 -311
  14. hpcflow/sdk/config/config_file.py +131 -95
  15. hpcflow/sdk/config/errors.py +112 -85
  16. hpcflow/sdk/config/types.py +145 -0
  17. hpcflow/sdk/core/actions.py +1054 -994
  18. hpcflow/sdk/core/app_aware.py +24 -0
  19. hpcflow/sdk/core/cache.py +81 -63
  20. hpcflow/sdk/core/command_files.py +275 -185
  21. hpcflow/sdk/core/commands.py +111 -107
  22. hpcflow/sdk/core/element.py +724 -503
  23. hpcflow/sdk/core/enums.py +192 -0
  24. hpcflow/sdk/core/environment.py +74 -93
  25. hpcflow/sdk/core/errors.py +398 -51
  26. hpcflow/sdk/core/json_like.py +540 -272
  27. hpcflow/sdk/core/loop.py +380 -334
  28. hpcflow/sdk/core/loop_cache.py +160 -43
  29. hpcflow/sdk/core/object_list.py +370 -207
  30. hpcflow/sdk/core/parameters.py +728 -600
  31. hpcflow/sdk/core/rule.py +59 -41
  32. hpcflow/sdk/core/run_dir_files.py +33 -22
  33. hpcflow/sdk/core/task.py +1546 -1325
  34. hpcflow/sdk/core/task_schema.py +240 -196
  35. hpcflow/sdk/core/test_utils.py +126 -88
  36. hpcflow/sdk/core/types.py +387 -0
  37. hpcflow/sdk/core/utils.py +410 -305
  38. hpcflow/sdk/core/validation.py +82 -9
  39. hpcflow/sdk/core/workflow.py +1192 -1028
  40. hpcflow/sdk/core/zarr_io.py +98 -137
  41. hpcflow/sdk/demo/cli.py +46 -33
  42. hpcflow/sdk/helper/cli.py +18 -16
  43. hpcflow/sdk/helper/helper.py +75 -63
  44. hpcflow/sdk/helper/watcher.py +61 -28
  45. hpcflow/sdk/log.py +83 -59
  46. hpcflow/sdk/persistence/__init__.py +8 -31
  47. hpcflow/sdk/persistence/base.py +988 -586
  48. hpcflow/sdk/persistence/defaults.py +6 -0
  49. hpcflow/sdk/persistence/discovery.py +38 -0
  50. hpcflow/sdk/persistence/json.py +408 -153
  51. hpcflow/sdk/persistence/pending.py +158 -123
  52. hpcflow/sdk/persistence/store_resource.py +37 -22
  53. hpcflow/sdk/persistence/types.py +307 -0
  54. hpcflow/sdk/persistence/utils.py +14 -11
  55. hpcflow/sdk/persistence/zarr.py +477 -420
  56. hpcflow/sdk/runtime.py +44 -41
  57. hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
  58. hpcflow/sdk/submission/jobscript.py +444 -404
  59. hpcflow/sdk/submission/schedulers/__init__.py +133 -40
  60. hpcflow/sdk/submission/schedulers/direct.py +97 -71
  61. hpcflow/sdk/submission/schedulers/sge.py +132 -126
  62. hpcflow/sdk/submission/schedulers/slurm.py +263 -268
  63. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  64. hpcflow/sdk/submission/shells/__init__.py +14 -15
  65. hpcflow/sdk/submission/shells/base.py +102 -29
  66. hpcflow/sdk/submission/shells/bash.py +72 -55
  67. hpcflow/sdk/submission/shells/os_version.py +31 -30
  68. hpcflow/sdk/submission/shells/powershell.py +37 -29
  69. hpcflow/sdk/submission/submission.py +203 -257
  70. hpcflow/sdk/submission/types.py +143 -0
  71. hpcflow/sdk/typing.py +163 -12
  72. hpcflow/tests/conftest.py +8 -6
  73. hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
  74. hpcflow/tests/scripts/test_main_scripts.py +60 -30
  75. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
  76. hpcflow/tests/unit/test_action.py +86 -75
  77. hpcflow/tests/unit/test_action_rule.py +9 -4
  78. hpcflow/tests/unit/test_app.py +13 -6
  79. hpcflow/tests/unit/test_cli.py +1 -1
  80. hpcflow/tests/unit/test_command.py +71 -54
  81. hpcflow/tests/unit/test_config.py +20 -15
  82. hpcflow/tests/unit/test_config_file.py +21 -18
  83. hpcflow/tests/unit/test_element.py +58 -62
  84. hpcflow/tests/unit/test_element_iteration.py +3 -1
  85. hpcflow/tests/unit/test_element_set.py +29 -19
  86. hpcflow/tests/unit/test_group.py +4 -2
  87. hpcflow/tests/unit/test_input_source.py +116 -93
  88. hpcflow/tests/unit/test_input_value.py +29 -24
  89. hpcflow/tests/unit/test_json_like.py +44 -35
  90. hpcflow/tests/unit/test_loop.py +65 -58
  91. hpcflow/tests/unit/test_object_list.py +17 -12
  92. hpcflow/tests/unit/test_parameter.py +16 -7
  93. hpcflow/tests/unit/test_persistence.py +48 -35
  94. hpcflow/tests/unit/test_resources.py +20 -18
  95. hpcflow/tests/unit/test_run.py +8 -3
  96. hpcflow/tests/unit/test_runtime.py +2 -1
  97. hpcflow/tests/unit/test_schema_input.py +23 -15
  98. hpcflow/tests/unit/test_shell.py +3 -2
  99. hpcflow/tests/unit/test_slurm.py +8 -7
  100. hpcflow/tests/unit/test_submission.py +39 -19
  101. hpcflow/tests/unit/test_task.py +352 -247
  102. hpcflow/tests/unit/test_task_schema.py +33 -20
  103. hpcflow/tests/unit/test_utils.py +9 -11
  104. hpcflow/tests/unit/test_value_sequence.py +15 -12
  105. hpcflow/tests/unit/test_workflow.py +114 -83
  106. hpcflow/tests/unit/test_workflow_template.py +0 -1
  107. hpcflow/tests/workflows/test_jobscript.py +2 -1
  108. hpcflow/tests/workflows/test_workflows.py +18 -13
  109. {hpcflow_new2-0.2.0a188.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
  110. hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
  111. hpcflow/sdk/core/parallel.py +0 -21
  112. hpcflow_new2-0.2.0a188.dist-info/RECORD +0 -158
  113. {hpcflow_new2-0.2.0a188.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
  114. {hpcflow_new2-0.2.0a188.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
  115. {hpcflow_new2-0.2.0a188.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/app.py CHANGED
@@ -2,26 +2,25 @@
2
2
 
3
3
  from __future__ import annotations
4
4
 
5
- from collections import defaultdict
5
+ from collections import Counter
6
+ from contextlib import AbstractContextManager, nullcontext
6
7
  from datetime import datetime, timezone
7
8
  import enum
8
9
  import json
9
10
  import shutil
10
11
  from functools import wraps
11
12
  from importlib import resources, import_module
12
- from logging import Logger
13
13
  import os
14
14
  from contextlib import contextmanager
15
15
  from pathlib import Path
16
16
  import sys
17
17
  from tempfile import TemporaryDirectory
18
- from typing import Any, Callable, Dict, List, Optional, Type, Union, Tuple
18
+ from typing import Any, TypeVar, Generic, cast, TYPE_CHECKING
19
19
  import warnings
20
20
  import zipfile
21
21
  from platformdirs import user_cache_path, user_data_dir
22
22
  import requests
23
- from reretry import retry
24
- import rich
23
+ from reretry import retry # type: ignore
25
24
  from rich.console import Console, Group
26
25
  from rich.syntax import Syntax
27
26
  from rich.table import Table, box
@@ -29,50 +28,301 @@ from rich.text import Text
29
28
  from rich.padding import Padding
30
29
  from rich.panel import Panel
31
30
  from rich import print as rich_print
32
- from fsspec.core import url_to_fs
33
- from fsspec.implementations.local import LocalFileSystem
34
-
31
+ from fsspec.core import url_to_fs # type: ignore
32
+ from fsspec.implementations.local import LocalFileSystem # type: ignore
35
33
 
36
34
  from hpcflow import __version__
37
- from hpcflow.sdk.core.actions import EARStatus
38
- from hpcflow.sdk.core.errors import WorkflowNotFoundError
39
- from hpcflow.sdk.core.object_list import ObjectList
35
+ from hpcflow.sdk.core.enums import EARStatus
40
36
  from hpcflow.sdk.core.utils import (
41
37
  read_YAML_str,
42
38
  read_YAML_file,
43
39
  read_JSON_file,
44
40
  write_YAML_file,
45
41
  write_JSON_file,
42
+ parse_timestamp,
43
+ get_file_context,
44
+ open_text_resource,
46
45
  )
47
46
  from hpcflow.sdk import sdk_classes, sdk_funcs, get_SDK_logger
48
47
  from hpcflow.sdk.config import Config, ConfigFile
49
48
  from hpcflow.sdk.core import ALL_TEMPLATE_FORMATS
50
49
  from hpcflow.sdk.log import AppLog, TimeIt
51
- from hpcflow.sdk.persistence import DEFAULT_STORE_FORMAT
50
+ from hpcflow.sdk.persistence.defaults import DEFAULT_STORE_FORMAT
52
51
  from hpcflow.sdk.persistence.base import TEMPLATE_COMP_TYPES
53
52
  from hpcflow.sdk.runtime import RunTimeInfo
54
53
  from hpcflow.sdk.cli import make_cli
55
- from hpcflow.sdk.submission.jobscript_info import JobscriptElementState
54
+ from hpcflow.sdk.submission.enums import JobscriptElementState
56
55
  from hpcflow.sdk.submission.shells import get_shell
57
56
  from hpcflow.sdk.submission.shells.os_version import (
58
57
  get_OS_info_POSIX,
59
58
  get_OS_info_windows,
60
59
  )
61
- from hpcflow.sdk.typing import PathLike
60
+
61
+ if TYPE_CHECKING:
62
+ from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
63
+ from logging import Logger
64
+ from types import ModuleType
65
+ from typing import ClassVar, Literal, Protocol
66
+ from typing_extensions import Final
67
+ from rich.status import Status
68
+ from .typing import (
69
+ BasicTemplateComponents,
70
+ KnownSubmission,
71
+ KnownSubmissionItem,
72
+ PathLike,
73
+ TemplateComponents,
74
+ )
75
+ from .config.config import ConfigOptions
76
+ from .core.actions import (
77
+ ElementActionRun,
78
+ ElementAction,
79
+ ActionEnvironment,
80
+ Action,
81
+ ActionScope,
82
+ ActionRule,
83
+ )
84
+ from .core.command_files import (
85
+ FileSpec,
86
+ FileNameSpec,
87
+ InputFileGenerator,
88
+ FileNameStem,
89
+ FileNameExt,
90
+ OutputFileParser,
91
+ )
92
+ from .core.commands import Command
93
+ from .core.element import (
94
+ ElementInputs,
95
+ ElementOutputs,
96
+ ElementInputFiles,
97
+ ElementOutputFiles,
98
+ ElementIteration,
99
+ Element,
100
+ ElementParameter,
101
+ ElementResources,
102
+ ElementFilter,
103
+ ElementGroup,
104
+ )
105
+ from .core.enums import ActionScopeType, InputSourceType, TaskSourceType
106
+ from .core.environment import (
107
+ NumCores,
108
+ Environment,
109
+ Executable as _Executable,
110
+ ExecutableInstance,
111
+ )
112
+ from .core.loop import Loop, WorkflowLoop
113
+ from .core.object_list import (
114
+ CommandFilesList as _CommandFilesList,
115
+ EnvironmentsList as _EnvironmentsList,
116
+ ExecutablesList,
117
+ GroupList,
118
+ ParametersList as _ParametersList,
119
+ ResourceList,
120
+ TaskList,
121
+ TaskSchemasList as _TaskSchemasList,
122
+ TaskTemplateList,
123
+ WorkflowLoopList,
124
+ WorkflowTaskList,
125
+ )
126
+ from .core.parameters import (
127
+ SchemaParameter,
128
+ InputValue,
129
+ Parameter,
130
+ ParameterValue,
131
+ InputSource,
132
+ ResourceSpec,
133
+ SchemaOutput,
134
+ ValueSequence,
135
+ SchemaInput,
136
+ )
137
+ from .core.rule import Rule
138
+ from .core.run_dir_files import RunDirAppFiles
139
+ from .core.task import (
140
+ Task,
141
+ WorkflowTask,
142
+ Parameters,
143
+ TaskInputParameters,
144
+ TaskOutputParameters,
145
+ ElementPropagation,
146
+ ElementSet,
147
+ )
148
+ from .core.task_schema import TaskSchema, TaskObjective
149
+ from .core.workflow import (
150
+ Workflow as _Workflow,
151
+ WorkflowTemplate as _WorkflowTemplate,
152
+ )
153
+ from .submission.jobscript import Jobscript
154
+ from .submission.submission import Submission
155
+ from .submission.schedulers import Scheduler, QueuedScheduler
156
+ from .submission.schedulers.direct import DirectPosix, DirectWindows
157
+ from .submission.schedulers.sge import SGEPosix
158
+ from .submission.schedulers.slurm import SlurmPosix
159
+ from .submission.shells.base import VersionInfo
160
+
161
+ # Complex types for SDK functions
162
+ class _MakeWorkflow(Protocol):
163
+ """Type of :py:meth:`BaseApp.make_workflow`"""
164
+
165
+ def __call__(
166
+ self,
167
+ template_file_or_str: PathLike | str,
168
+ is_string: bool = False,
169
+ template_format: Literal["json", "yaml"] | None = None,
170
+ path: PathLike = None,
171
+ name: str | None = None,
172
+ overwrite: bool = False,
173
+ store: str = DEFAULT_STORE_FORMAT,
174
+ ts_fmt: str | None = None,
175
+ ts_name_fmt: str | None = None,
176
+ store_kwargs: dict[str, Any] | None = None,
177
+ variables: dict[str, str] | None = None,
178
+ status: bool = True,
179
+ ) -> _Workflow:
180
+ ...
181
+
182
+ class _MakeDemoWorkflow(Protocol):
183
+ """Type of :py:meth:`BaseApp.make_demo_workflow`"""
184
+
185
+ def __call__(
186
+ self,
187
+ workflow_name: str,
188
+ template_format: Literal["json", "yaml"] | None = None,
189
+ path: PathLike | None = None,
190
+ name: str | None = None,
191
+ overwrite: bool = False,
192
+ store: str = DEFAULT_STORE_FORMAT,
193
+ ts_fmt: str | None = None,
194
+ ts_name_fmt: str | None = None,
195
+ store_kwargs: dict[str, Any] | None = None,
196
+ variables: dict[str, str] | None = None,
197
+ status: bool = True,
198
+ ) -> _Workflow:
199
+ ...
200
+
201
+ class _MakeAndSubmitWorkflow(Protocol):
202
+ """Type of :py:meth:`BaseApp.make_and_submit_workflow`"""
203
+
204
+ # Should be overloaded on return_idx, but not bothering
205
+ def __call__(
206
+ self,
207
+ template_file_or_str: PathLike | str,
208
+ is_string: bool = False,
209
+ template_format: Literal["json", "yaml"] | None = None,
210
+ path: PathLike | None = None,
211
+ name: str | None = None,
212
+ overwrite: bool = False,
213
+ store: str = DEFAULT_STORE_FORMAT,
214
+ ts_fmt: str | None = None,
215
+ ts_name_fmt: str | None = None,
216
+ store_kwargs: dict[str, Any] | None = None,
217
+ variables: dict[str, str] | None = None,
218
+ JS_parallelism: bool | None = None,
219
+ wait: bool = False,
220
+ add_to_known: bool = True,
221
+ return_idx: bool = False,
222
+ tasks: list[int] | None = None,
223
+ cancel: bool = False,
224
+ status: bool = True,
225
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
226
+ ...
227
+
228
+ class _MakeAndSubmitDemoWorkflow(Protocol):
229
+ """Type of :py:meth:`BaseApp.make_and_submit_demo_workflow`"""
230
+
231
+ # Should be overloaded on return_idx, but not bothering
232
+ def __call__(
233
+ self,
234
+ workflow_name: str,
235
+ template_format: Literal["json", "yaml"] | None = None,
236
+ path: PathLike | None = None,
237
+ name: str | None = None,
238
+ overwrite: bool = False,
239
+ store: str = DEFAULT_STORE_FORMAT,
240
+ ts_fmt: str | None = None,
241
+ ts_name_fmt: str | None = None,
242
+ store_kwargs: dict[str, Any] | None = None,
243
+ variables: dict[str, str] | None = None,
244
+ JS_parallelism: bool | None = None,
245
+ wait: bool = False,
246
+ add_to_known: bool = True,
247
+ return_idx: bool = False,
248
+ tasks: list[int] | None = None,
249
+ cancel: bool = False,
250
+ status: bool = True,
251
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
252
+ ...
253
+
254
+ class _SubmitWorkflow(Protocol):
255
+ """Type of :py:meth:`BaseApp.submit_workflow`"""
256
+
257
+ # Should be overloaded on return_idx, but not bothering
258
+ def __call__(
259
+ self,
260
+ workflow_path: PathLike,
261
+ JS_parallelism: bool | None = None,
262
+ wait: bool = False,
263
+ return_idx: bool = False,
264
+ tasks: list[int] | None = None,
265
+ ) -> Mapping[int, Sequence[int]] | None:
266
+ ...
267
+
268
+ class _GetKnownSubmissions(Protocol):
269
+ """Type of :py:meth:`BaseApp.get_known_submissions`"""
270
+
271
+ # Should be overloaded on as_json, but not bothering
272
+ def __call__(
273
+ self,
274
+ max_recent: int = 3,
275
+ no_update: bool = False,
276
+ as_json: bool = False,
277
+ status: Status | None = None,
278
+ ) -> Sequence[KnownSubmissionItem]:
279
+ ...
280
+
281
+ class _Show(Protocol):
282
+ """Type of :py:meth:`BaseApp.show`"""
283
+
284
+ def __call__(
285
+ self,
286
+ max_recent: int = 3,
287
+ full: bool = False,
288
+ no_update: bool = False,
289
+ ) -> None:
290
+ ...
291
+
292
+ class _Cancel(Protocol):
293
+ """Type of :py:meth:`BaseApp.cancel`"""
294
+
295
+ def __call__(
296
+ self,
297
+ workflow_ref: int | str | Path,
298
+ ref_is_path: str | None = None,
299
+ ) -> None:
300
+ ...
301
+
302
+ class _RunTests(Protocol):
303
+ """Type of :py:meth:`BaseApp.run_tests and run_hpcflow_tests`"""
304
+
305
+ def __call__(self, *args: str) -> int:
306
+ ...
307
+
62
308
 
63
309
  SDK_logger = get_SDK_logger(__name__)
64
310
  DEMO_WK_FORMATS = {".yaml": "yaml", ".yml": "yaml", ".json": "json", ".jsonc": "json"}
65
311
 
312
+ T = TypeVar("T")
66
313
 
67
- def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
68
- R"""Call fsspec's ``url_to_fs`` but retry on ``requests.exceptions.HTTPError``\ s.
314
+
315
+ def rate_limit_safe_url_to_fs(
316
+ app: BaseApp, *args, logger: Logger | None = None, **kwargs
317
+ ):
318
+ R"""
319
+ Call fsspec's ``url_to_fs`` but retry on ``requests.exceptions.HTTPError``\ s.
69
320
 
70
321
  References
71
322
  ----------
72
323
  [1]: https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?
73
324
  apiVersion=2022-11-28#about-secondary-rate-limits
74
325
  """
75
-
76
326
  auth = {}
77
327
  if app.run_time_info.in_pytest:
78
328
  gh_token = os.environ.get("GH_TOKEN")
@@ -81,10 +331,11 @@ def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
81
331
  # requests allowed per hour to 1000 [1]. fsspec requires "username" to be
82
332
  # set if using "token":
83
333
  auth = {"username": "", "token": gh_token}
84
- logger.info(
85
- "calling fsspec's `url_to_fs` with a token from the env variable "
86
- "`GH_TOKEN`."
87
- )
334
+ if logger:
335
+ logger.info(
336
+ "calling fsspec's `url_to_fs` with a token from the env variable "
337
+ "`GH_TOKEN`."
338
+ )
88
339
 
89
340
  # GitHub actions testing is potentially highly concurrent, with multiple
90
341
  # Python versions and OSes being tested at the same time; so we might hit
@@ -104,48 +355,64 @@ def rate_limit_safe_url_to_fs(app, *args, logger=None, **kwargs):
104
355
  return _inner(*args, **kwargs)
105
356
 
106
357
 
107
- def __getattr__(name):
108
- """Allow access to core classes and API functions (useful for type annotations)."""
358
+ def __getattr__(name: str):
359
+ """Allow access to core classes and API functions."""
109
360
  try:
110
361
  return get_app_attribute(name)
111
362
  except AttributeError:
112
363
  raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
113
364
 
114
365
 
115
- def get_app_attribute(name):
116
- """A function to assign to an app module `__getattr__` to access app attributes."""
366
+ def get_app_attribute(name: str):
367
+ """
368
+ A function to assign to an app module `__getattr__` to access app attributes.
369
+ """
370
+ app_obj: BaseApp
117
371
  try:
118
- app_obj = App.get_instance()
372
+ app_obj = cast("App", App.get_instance())
119
373
  except RuntimeError:
120
- app_obj = BaseApp.get_instance()
374
+ app_obj = cast("BaseApp", BaseApp.get_instance())
121
375
  try:
122
376
  return getattr(app_obj, name)
123
377
  except AttributeError:
124
378
  raise AttributeError(f"module {app_obj.module!r} has no attribute {name!r}.")
125
379
 
126
380
 
127
- def get_app_module_all():
381
+ def get_app_module_all() -> list[str]:
128
382
  """
129
383
  The list of all symbols exported by this module.
130
384
  """
131
- return ["app"] + list(sdk_classes.keys()) + list(sdk_funcs)
385
+ return ["app", *sdk_classes, *sdk_funcs]
132
386
 
133
387
 
134
- def get_app_module_dir():
388
+ def get_app_module_dir() -> Callable[[], list[str]]:
135
389
  """
136
390
  The sorted list of all symbols exported by this module.
137
391
  """
138
392
  return lambda: sorted(get_app_module_all())
139
393
 
140
394
 
141
- class Singleton(type):
395
+ class Singleton(type, Generic[T]):
142
396
  """
143
397
  Metaclass that enforces that only one instance of a class can be made.
398
+
399
+ Type Parameters
400
+ ---------------
401
+ T
402
+ The type of the class that is a singleton.
144
403
  """
145
404
 
146
- _instances = {}
405
+ _instances: ClassVar[dict[Singleton, Any]] = {}
406
+
407
+ def __call__(cls: Singleton[T], *args, **kwargs) -> T:
408
+ """
409
+ Get the current instance or make it if it doesn't already exist.
147
410
 
148
- def __call__(cls, *args, **kwargs):
411
+ Return
412
+ ------
413
+ T:
414
+ The unique instance of the class.
415
+ """
149
416
  SDK_logger.info(
150
417
  f"App metaclass __call__: "
151
418
  f"name={kwargs['name']!r}, version={kwargs['version']!r}."
@@ -155,8 +422,15 @@ class Singleton(type):
155
422
  cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs)
156
423
  return cls._instances[cls]
157
424
 
158
- def get_instance(cls):
159
- """Retrieve the instance of the singleton class if initialised."""
425
+ def get_instance(cls: Singleton[T]) -> T:
426
+ """
427
+ Retrieve the instance of the singleton class if initialised.
428
+
429
+ Raises
430
+ ------
431
+ RuntimeError
432
+ If there is no instance already.
433
+ """
160
434
  try:
161
435
  return cls._instances[cls]
162
436
  except KeyError:
@@ -164,7 +438,8 @@ class Singleton(type):
164
438
 
165
439
 
166
440
  class BaseApp(metaclass=Singleton):
167
- """Class to generate the hpcflow application.
441
+ """
442
+ Class to generate the hpcflow application.
168
443
 
169
444
  Parameters
170
445
  ----------
@@ -206,28 +481,29 @@ class BaseApp(metaclass=Singleton):
206
481
  URL to documentation.
207
482
  """
208
483
 
209
- _known_subs_file_name = "known_submissions.txt"
210
- _known_subs_file_sep = "::"
211
- _submission_ts_fmt = r"%Y-%m-%d %H:%M:%S.%f"
484
+ _known_subs_file_name: ClassVar = "known_submissions.txt"
485
+ _known_subs_file_sep: ClassVar = "::"
486
+ _submission_ts_fmt: ClassVar = r"%Y-%m-%d %H:%M:%S.%f"
487
+ __load_pending: ClassVar = False
212
488
 
213
489
  def __init__(
214
490
  self,
215
- name,
216
- version,
217
- module,
218
- description,
219
- gh_org,
220
- gh_repo,
221
- config_options,
222
- scripts_dir,
223
- workflows_dir: str = None,
224
- demo_data_dir: str = None,
225
- demo_data_manifest_dir: str = None,
226
- template_components: Dict = None,
227
- pytest_args=None,
228
- package_name=None,
229
- docs_import_conv=None,
230
- docs_url=None,
491
+ name: str,
492
+ version: str,
493
+ module: str,
494
+ description: str,
495
+ gh_org: str,
496
+ gh_repo: str,
497
+ config_options: ConfigOptions,
498
+ scripts_dir: str,
499
+ workflows_dir: str | None = None,
500
+ demo_data_dir: str | None = None,
501
+ demo_data_manifest_dir: str | None = None,
502
+ template_components: dict[str, list[dict]] | None = None,
503
+ pytest_args: list[str] | None = None,
504
+ package_name: str | None = None,
505
+ docs_import_conv: str | None = None,
506
+ docs_url: str | None = None,
231
507
  ):
232
508
  SDK_logger.info(f"Generating {self.__class__.__name__} {name!r}.")
233
509
 
@@ -262,87 +538,1098 @@ class BaseApp(metaclass=Singleton):
262
538
  #: URL to documentation.
263
539
  self.docs_url = docs_url
264
540
 
265
- #: Command line interface subsystem.
266
- self.cli = make_cli(self)
541
+ #: Command line interface subsystem.
542
+ self.cli = make_cli(self)
543
+
544
+ self._log = AppLog(self)
545
+ self._run_time_info = RunTimeInfo(
546
+ self.name,
547
+ self.package_name,
548
+ self.version,
549
+ self.runtime_info_logger,
550
+ )
551
+
552
+ self._builtin_template_components = template_components or {}
553
+
554
+ self._config: Config | None = (
555
+ None # assigned on first access to `config` property
556
+ )
557
+ self._config_files: dict[
558
+ str, ConfigFile
559
+ ] = {} # assigned on config load, keys are string absolute paths
560
+
561
+ # Set by `_load_template_components`:
562
+ self._template_components: TemplateComponents = {}
563
+ self._parameters: _ParametersList | None = None
564
+ self._command_files: _CommandFilesList | None = None
565
+ self._environments: _EnvironmentsList | None = None
566
+ self._task_schemas: _TaskSchemasList | None = None
567
+ self._scripts: dict[str, Path] | None = None
568
+
569
+ self.__app_type_cache: dict[str, type] = {}
570
+ self.__app_func_cache: dict[str, Callable[..., Any]] = {}
571
+
572
+ # assigned on first access to respective properties
573
+ self._user_data_dir: Path | None = None
574
+ self._user_cache_dir: Path | None = None
575
+ self._user_runtime_dir: Path | None = None
576
+ self._user_data_hostname_dir: Path | None = None
577
+ self._user_cache_hostname_dir: Path | None = None
578
+ self._demo_data_cache_dir: Path | None = None
579
+
580
+ @property
581
+ def ElementActionRun(self) -> type[ElementActionRun]:
582
+ """
583
+ The :class:`ElementActionRun` class.
584
+
585
+ :meta private:
586
+ """
587
+ return self._get_app_core_class("ElementActionRun")
588
+
589
+ @property
590
+ def ElementAction(self) -> type[ElementAction]:
591
+ """
592
+ The :class:`ElementAction` class.
593
+
594
+ :meta private:
595
+ """
596
+ return self._get_app_core_class("ElementAction")
597
+
598
+ @property
599
+ def ElementFilter(self) -> type[ElementFilter]:
600
+ """
601
+ The :class:`ElementFilter` class.
602
+
603
+ :meta private:
604
+ """
605
+ return self._get_app_core_class("ElementFilter")
606
+
607
+ @property
608
+ def ElementGroup(self) -> type[ElementGroup]:
609
+ """
610
+ The :class:`ElementGroup` class.
611
+
612
+ :meta private:
613
+ """
614
+ return self._get_app_core_class("ElementGroup")
615
+
616
+ @property
617
+ def Environment(self) -> type[Environment]:
618
+ """
619
+ The :class:`Environment` class.
620
+
621
+ :meta private:
622
+ """
623
+ return self._get_app_core_class("Environment")
624
+
625
+ @property
626
+ def Executable(self) -> type[_Executable]:
627
+ """
628
+ The :class:`Executable` class.
629
+
630
+ :meta private:
631
+ """
632
+ return self._get_app_core_class("Executable")
633
+
634
+ @property
635
+ def ExecutableInstance(self) -> type[ExecutableInstance]:
636
+ """
637
+ The :class:`ExecutableInstance` class.
638
+
639
+ :meta private:
640
+ """
641
+ return self._get_app_core_class("ExecutableInstance")
642
+
643
+ @property
644
+ def NumCores(self) -> type[NumCores]:
645
+ """
646
+ The :class:`NumCores` class.
647
+
648
+ :meta private:
649
+ """
650
+ return self._get_app_core_class("NumCores")
651
+
652
+ @property
653
+ def ActionEnvironment(self) -> type[ActionEnvironment]:
654
+ """
655
+ The :class:`ActionEnvironment` class.
656
+
657
+ :meta private:
658
+ """
659
+ return self._get_app_core_class("ActionEnvironment")
660
+
661
+ @property
662
+ def Action(self) -> type[Action]:
663
+ """
664
+ The :class:`Action` class.
665
+
666
+ :meta private:
667
+ """
668
+ return self._get_app_core_class("Action")
669
+
670
+ @property
671
+ def ActionRule(self) -> type[ActionRule]:
672
+ """
673
+ The :class:`ActionRule` class.
674
+
675
+ :meta private:
676
+ """
677
+ return self._get_app_core_class("ActionRule")
678
+
679
+ @property
680
+ def ActionScope(self) -> type[ActionScope]:
681
+ """
682
+ The :class:`ActionScope` class.
683
+
684
+ :meta private:
685
+ """
686
+ return self._get_app_core_class("ActionScope")
687
+
688
+ @property
689
+ def ActionScopeType(self) -> type[ActionScopeType]:
690
+ """
691
+ The :class:`ActionScopeType` class.
692
+
693
+ :meta private:
694
+ """
695
+ return self._get_app_core_class("ActionScopeType")
696
+
697
+ @property
698
+ def FileSpec(self) -> type[FileSpec]:
699
+ """
700
+ The :class:`FileSpec` class.
701
+
702
+ :meta private:
703
+ """
704
+ return self._get_app_core_class("FileSpec")
705
+
706
+ @property
707
+ def FileNameSpec(self) -> type[FileNameSpec]:
708
+ """
709
+ The :class:`FileNameSpec` class.
710
+
711
+ :meta private:
712
+ """
713
+ return self._get_app_core_class("FileNameSpec")
714
+
715
+ @property
716
+ def FileNameStem(self) -> type[FileNameStem]:
717
+ """
718
+ The :class:`FileNameStem` class.
719
+
720
+ :meta private:
721
+ """
722
+ return self._get_app_core_class("FileNameStem")
723
+
724
+ @property
725
+ def FileNameExt(self) -> type[FileNameExt]:
726
+ """
727
+ The :class:`FileNameExt` class.
728
+
729
+ :meta private:
730
+ """
731
+ return self._get_app_core_class("FileNameExt")
732
+
733
+ @property
734
+ def OutputFileParser(self) -> type[OutputFileParser]:
735
+ """
736
+ The :class:`OutputFileParser` class.
737
+
738
+ :meta private:
739
+ """
740
+ return self._get_app_core_class("OutputFileParser")
741
+
742
+ @property
743
+ def InputSource(self) -> type[InputSource]:
744
+ """
745
+ The :class:`InputSource` class.
746
+
747
+ :meta private:
748
+ """
749
+ return self._get_app_core_class("InputSource")
750
+
751
+ @property
752
+ def InputSourceType(self) -> type[InputSourceType]:
753
+ """
754
+ The :class:`InputSourceType` class.
755
+
756
+ :meta private:
757
+ """
758
+ return self._get_app_core_class("InputSourceType")
759
+
760
+ @property
761
+ def ValueSequence(self) -> type[ValueSequence]:
762
+ """
763
+ The :class:`ValueSequence` class.
764
+
765
+ :meta private:
766
+ """
767
+ return self._get_app_core_class("ValueSequence")
768
+
769
+ @property
770
+ def SchemaInput(self) -> type[SchemaInput]:
771
+ """
772
+ The :class:`SchemaInput` class.
773
+
774
+ :meta private:
775
+ """
776
+ return self._get_app_core_class("SchemaInput")
777
+
778
+ @property
779
+ def InputFileGenerator(self) -> type[InputFileGenerator]:
780
+ """
781
+ The :class:`InputFileGenerator` class.
782
+
783
+ :meta private:
784
+ """
785
+ return self._get_app_core_class("InputFileGenerator")
786
+
787
+ @property
788
+ def Command(self) -> type[Command]:
789
+ """
790
+ The :class:`Command` class.
791
+
792
+ :meta private:
793
+ """
794
+ return self._get_app_core_class("Command")
795
+
796
+ @property
797
+ def ElementInputs(self) -> type[ElementInputs]:
798
+ """
799
+ The :class:`ElementInputs` class.
800
+
801
+ :meta private:
802
+ """
803
+ return self._get_app_core_class("ElementInputs")
804
+
805
+ @property
806
+ def ElementOutputs(self) -> type[ElementOutputs]:
807
+ """
808
+ The :class:`ElementOutputs` class.
809
+
810
+ :meta private:
811
+ """
812
+ return self._get_app_core_class("ElementOutputs")
813
+
814
+ @property
815
+ def ElementInputFiles(self) -> type[ElementInputFiles]:
816
+ """
817
+ The :class:`ElementInputFiles` class.
818
+
819
+ :meta private:
820
+ """
821
+ return self._get_app_core_class("ElementInputFiles")
822
+
823
+ @property
824
+ def ElementOutputFiles(self) -> type[ElementOutputFiles]:
825
+ """
826
+ The :class:`ElementOutputFiles` class.
827
+
828
+ :meta private:
829
+ """
830
+ return self._get_app_core_class("ElementOutputFiles")
831
+
832
+ @property
833
+ def ElementResources(self) -> type[ElementResources]:
834
+ """
835
+ The :class:`ElementResources` class.
836
+
837
+ :meta private:
838
+ """
839
+ return self._get_app_core_class("ElementResources")
840
+
841
+ @property
842
+ def ElementIteration(self) -> type[ElementIteration]:
843
+ """
844
+ The :class:`ElementIteration` class.
845
+
846
+ :meta private:
847
+ """
848
+ return self._get_app_core_class("ElementIteration")
849
+
850
+ @property
851
+ def ElementSet(self) -> type[ElementSet]:
852
+ """
853
+ The :class:`ElementSet` class.
854
+
855
+ :meta private:
856
+ """
857
+ return self._get_app_core_class("ElementSet")
858
+
859
+ @property
860
+ def Element(self) -> type[Element]:
861
+ """
862
+ The :class:`Element` class.
863
+
864
+ :meta private:
865
+ """
866
+ return self._get_app_core_class("Element")
867
+
868
+ @property
869
+ def ElementParameter(self) -> type[ElementParameter]:
870
+ """
871
+ The :class:`ElementParameter` class.
872
+
873
+ :meta private:
874
+ """
875
+ return self._get_app_core_class("ElementParameter")
876
+
877
+ @property
878
+ def Loop(self) -> type[Loop]:
879
+ """
880
+ The :class:`Loop` class.
881
+
882
+ :meta private:
883
+ """
884
+ return self._get_app_core_class("Loop")
885
+
886
+ @property
887
+ def WorkflowLoop(self) -> type[WorkflowLoop]:
888
+ """
889
+ The :class:`WorkflowLoop` class.
890
+
891
+ :meta private:
892
+ """
893
+ return self._get_app_core_class("WorkflowLoop")
894
+
895
+ @property
896
+ def CommandFilesList(self) -> type[_CommandFilesList]:
897
+ """
898
+ The :class:`CommandFilesList` class.
899
+
900
+ :meta private:
901
+ """
902
+ return self._get_app_core_class("CommandFilesList")
903
+
904
+ @property
905
+ def EnvironmentsList(self) -> type[_EnvironmentsList]:
906
+ """
907
+ The :class:`EnvironmentsList` class.
908
+
909
+ :meta private:
910
+ """
911
+ return self._get_app_core_class("EnvironmentsList")
912
+
913
+ @property
914
+ def ExecutablesList(self) -> type[ExecutablesList]:
915
+ """
916
+ The :class:`ExecutablesList` class.
917
+
918
+ :meta private:
919
+ """
920
+ return self._get_app_core_class("ExecutablesList")
921
+
922
+ @property
923
+ def GroupList(self) -> type[GroupList]:
924
+ """
925
+ The :class:`GroupList` class.
926
+
927
+ :meta private:
928
+ """
929
+ return self._get_app_core_class("GroupList")
930
+
931
+ @property
932
+ def ParametersList(self) -> type[_ParametersList]:
933
+ """
934
+ The :class:`ParametersList` class.
935
+
936
+ :meta private:
937
+ """
938
+ return self._get_app_core_class("ParametersList")
939
+
940
+ @property
941
+ def ResourceList(self) -> type[ResourceList]:
942
+ """
943
+ The :class:`ResourceList` class.
944
+
945
+ :meta private:
946
+ """
947
+ return self._get_app_core_class("ResourceList")
948
+
949
+ @property
950
+ def ResourceSpec(self) -> type[ResourceSpec]:
951
+ """
952
+ The :class:`ResourceSpec` class.
953
+
954
+ :meta private:
955
+ """
956
+ return self._get_app_core_class("ResourceSpec")
957
+
958
+ @property
959
+ def TaskList(self) -> type[TaskList]:
960
+ """
961
+ The :class:`TaskList` class.
962
+
963
+ :meta private:
964
+ """
965
+ return self._get_app_core_class("TaskList")
966
+
967
+ @property
968
+ def TaskSchemasList(self) -> type[_TaskSchemasList]:
969
+ """
970
+ The :class:`TaskSchemasList` class.
971
+
972
+ :meta private:
973
+ """
974
+ return self._get_app_core_class("TaskSchemasList")
975
+
976
+ @property
977
+ def TaskTemplateList(self) -> type[TaskTemplateList]:
978
+ """
979
+ The :class:`TaskTemplateList` class.
980
+
981
+ :meta private:
982
+ """
983
+ return self._get_app_core_class("TaskTemplateList")
984
+
985
+ @property
986
+ def WorkflowLoopList(self) -> type[WorkflowLoopList]:
987
+ """
988
+ The :class:`WorkflowLoopList` class.
989
+
990
+ :meta private:
991
+ """
992
+ return self._get_app_core_class("WorkflowLoopList")
993
+
994
+ @property
995
+ def WorkflowTaskList(self) -> type[WorkflowTaskList]:
996
+ """
997
+ The :class:`WorkflowTaskList` class.
998
+
999
+ :meta private:
1000
+ """
1001
+ return self._get_app_core_class("WorkflowTaskList")
1002
+
1003
+ @property
1004
+ def SchemaParameter(self) -> type[SchemaParameter]:
1005
+ """
1006
+ The :class:`SchemaParameter` class.
1007
+
1008
+ :meta private:
1009
+ """
1010
+ return self._get_app_core_class("SchemaParameter")
1011
+
1012
+ @property
1013
+ def SchemaOutput(self) -> type[SchemaOutput]:
1014
+ """
1015
+ The :class:`SchemaOutput` class.
1016
+
1017
+ :meta private:
1018
+ """
1019
+ return self._get_app_core_class("SchemaOutput")
1020
+
1021
+ @property
1022
+ def Rule(self) -> type[Rule]:
1023
+ """
1024
+ The :class:`Rule` class.
1025
+
1026
+ :meta private:
1027
+ """
1028
+ return self._get_app_core_class("Rule")
1029
+
1030
+ @property
1031
+ def RunDirAppFiles(self) -> type[RunDirAppFiles]:
1032
+ """
1033
+ The :class:`RunDirAppFiles` class.
1034
+
1035
+ :meta private:
1036
+ """
1037
+ return self._get_app_core_class("RunDirAppFiles")
1038
+
1039
+ @property
1040
+ def WorkflowTask(self) -> type[WorkflowTask]:
1041
+ """
1042
+ The :class:`WorkflowTask` class.
1043
+
1044
+ :meta private:
1045
+ """
1046
+ return self._get_app_core_class("WorkflowTask")
1047
+
1048
+ @property
1049
+ def Parameters(self) -> type[Parameters]:
1050
+ """
1051
+ The :class:`Parameters` class.
1052
+
1053
+ :meta private:
1054
+ """
1055
+ return self._get_app_core_class("Parameters")
1056
+
1057
+ @property
1058
+ def Parameter(self) -> type[Parameter]:
1059
+ """
1060
+ The :class:`Parameter` class.
1061
+
1062
+ :meta private:
1063
+ """
1064
+ return self._get_app_core_class("Parameter")
1065
+
1066
+ @property
1067
+ def ParameterValue(self) -> type[ParameterValue]:
1068
+ """
1069
+ The :class:`ParameterValue` class.
1070
+
1071
+ :meta private:
1072
+ """
1073
+ return self._get_app_core_class("ParameterValue")
1074
+
1075
+ @property
1076
+ def InputValue(self) -> type[InputValue]:
1077
+ """
1078
+ The :class:`InputValue` class.
1079
+
1080
+ :meta private:
1081
+ """
1082
+ return self._get_app_core_class("InputValue")
1083
+
1084
+ @property
1085
+ def Task(self) -> type[Task]:
1086
+ """
1087
+ The :class:`Task` class.
1088
+
1089
+ :meta private:
1090
+ """
1091
+ return self._get_app_core_class("Task")
1092
+
1093
+ @property
1094
+ def TaskSchema(self) -> type[TaskSchema]:
1095
+ """
1096
+ The :class:`TaskSchema` class.
1097
+
1098
+ :meta private:
1099
+ """
1100
+ return self._get_app_core_class("TaskSchema")
1101
+
1102
+ @property
1103
+ def TaskSourceType(self) -> type[TaskSourceType]:
1104
+ """
1105
+ The :class:`TaskSourceType` class.
1106
+
1107
+ :meta private:
1108
+ """
1109
+ return self._get_app_core_class("TaskSourceType")
1110
+
1111
+ @property
1112
+ def TaskObjective(self) -> type[TaskObjective]:
1113
+ """
1114
+ The :class:`TaskObjective` class.
1115
+
1116
+ :meta private:
1117
+ """
1118
+ return self._get_app_core_class("TaskObjective")
1119
+
1120
+ @property
1121
+ def TaskInputParameters(self) -> type[TaskInputParameters]:
1122
+ """
1123
+ The :class:`TaskInputParameters` class.
1124
+
1125
+ :meta private:
1126
+ """
1127
+ return self._get_app_core_class("TaskInputParameters")
1128
+
1129
+ @property
1130
+ def TaskOutputParameters(self) -> type[TaskOutputParameters]:
1131
+ """
1132
+ The :class:`TaskOutputParameters` class.
1133
+
1134
+ :meta private:
1135
+ """
1136
+ return self._get_app_core_class("TaskOutputParameters")
1137
+
1138
+ @property
1139
+ def ElementPropagation(self) -> type[ElementPropagation]:
1140
+ """
1141
+ The :class:`ElementPropagation` class.
1142
+
1143
+ :meta private:
1144
+ """
1145
+ return self._get_app_core_class("ElementPropagation")
1146
+
1147
+ @property
1148
+ def WorkflowTemplate(self) -> type[_WorkflowTemplate]:
1149
+ """
1150
+ The :class:`WorkflowTemplate` class.
1151
+
1152
+ :meta private:
1153
+ """
1154
+ return self._get_app_core_class("WorkflowTemplate")
1155
+
1156
+ @property
1157
+ def Workflow(self) -> type[_Workflow]:
1158
+ """
1159
+ The :class:`Workflow` class.
1160
+
1161
+ :meta private:
1162
+ """
1163
+ return self._get_app_core_class("Workflow")
1164
+
1165
+ @property
1166
+ def Jobscript(self) -> type[Jobscript]:
1167
+ """
1168
+ The :class:`Jobscript` class.
1169
+
1170
+ :meta private:
1171
+ """
1172
+ return self._get_app_core_class("Jobscript")
1173
+
1174
+ @property
1175
+ def Submission(self) -> type[Submission]:
1176
+ """
1177
+ The :class:`Submission` class.
1178
+
1179
+ :meta private:
1180
+ """
1181
+ return self._get_app_core_class("Submission")
1182
+
1183
+ @property
1184
+ def DirectPosix(self) -> type[DirectPosix]:
1185
+ """
1186
+ The :class:`DirectPosix` class.
1187
+
1188
+ :meta private:
1189
+ """
1190
+ return self._get_app_core_class("DirectPosix")
1191
+
1192
+ @property
1193
+ def DirectWindows(self) -> type[DirectWindows]:
1194
+ """
1195
+ The :class:`DirectWindows` class.
1196
+
1197
+ :meta private:
1198
+ """
1199
+ return self._get_app_core_class("DirectWindows")
1200
+
1201
+ @property
1202
+ def SGEPosix(self) -> type[SGEPosix]:
1203
+ """
1204
+ The :class:`SGEPosix` class.
1205
+
1206
+ :meta private:
1207
+ """
1208
+ return self._get_app_core_class("SGEPosix")
1209
+
1210
+ @property
1211
+ def SlurmPosix(self) -> type[SlurmPosix]:
1212
+ """
1213
+ The :class:`SlurmPosix` class.
1214
+
1215
+ :meta private:
1216
+ """
1217
+ return self._get_app_core_class("SlurmPosix")
1218
+
1219
+ @property
1220
+ def QueuedScheduler(self) -> type[QueuedScheduler]:
1221
+ """
1222
+ The :class:`QueuedScheduler` class.
1223
+
1224
+ :meta private:
1225
+ """
1226
+ return self._get_app_core_class("QueuedScheduler")
1227
+
1228
+ @property
1229
+ def make_workflow(self) -> _MakeWorkflow:
1230
+ """
1231
+ Generate a new workflow from a file or string containing a workflow
1232
+ template parametrisation.
1233
+
1234
+ Parameters
1235
+ ----------
1236
+ template_path_or_str: str
1237
+ Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
1238
+ is_string: bool
1239
+ Determines if passing a file path or a string.
1240
+ template_format: str
1241
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1242
+ format.
1243
+ path: str | Path
1244
+ The directory in which the workflow will be generated. The current directory
1245
+ if not specified.
1246
+ name: str
1247
+ The name of the workflow. If specified, the workflow directory will be `path`
1248
+ joined with `name`. If not specified the workflow template name will be used,
1249
+ in combination with a date-timestamp.
1250
+ overwrite: bool
1251
+ If True and the workflow directory (`path` + `name`) already exists, the
1252
+ existing directory will be overwritten.
1253
+ store: str
1254
+ The persistent store type to use.
1255
+ ts_fmt: str
1256
+ The datetime format to use for storing datetimes. Datetimes are always stored
1257
+ in UTC (because Numpy does not store time zone info), so this should not
1258
+ include a time zone name.
1259
+ ts_name_fmt: str
1260
+ The datetime format to use when generating the workflow name, where it
1261
+ includes a timestamp.
1262
+ store_kwargs: dict[str, object]
1263
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1264
+ variables: dict[str, str]
1265
+ String variables to substitute in `template_file_or_str`.
1266
+ status: bool
1267
+ If True, display a live status to track workflow creation progress.
1268
+
1269
+ Returns
1270
+ -------
1271
+ Workflow
1272
+ The created workflow.
1273
+ """
1274
+ return self.__get_app_func("make_workflow")
1275
+
1276
+ @property
1277
+ def make_demo_workflow(self) -> _MakeDemoWorkflow:
1278
+ """
1279
+ Generate a new workflow from a builtin demo workflow template.
1280
+
1281
+ Parameters
1282
+ ----------
1283
+ workflow_name: str
1284
+ Name of the demo workflow to make.
1285
+ template_format: str
1286
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1287
+ format.
1288
+ path: str | Path
1289
+ The directory in which the workflow will be generated. The current directory
1290
+ if not specified.
1291
+ name: str
1292
+ The name of the workflow. If specified, the workflow directory will be `path`
1293
+ joined with `name`. If not specified the workflow template name will be used,
1294
+ in combination with a date-timestamp.
1295
+ overwrite: bool
1296
+ If True and the workflow directory (`path` + `name`) already exists, the
1297
+ existing directory will be overwritten.
1298
+ store: str
1299
+ The persistent store type to use.
1300
+ ts_fmt: str
1301
+ The datetime format to use for storing datetimes. Datetimes are always stored
1302
+ in UTC (because Numpy does not store time zone info), so this should not
1303
+ include a time zone name.
1304
+ ts_name_fmt: str
1305
+ The datetime format to use when generating the workflow name, where it
1306
+ includes a timestamp.
1307
+ store_kwargs: dict[str, object]
1308
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1309
+ variables: dict[str, str]
1310
+ String variables to substitute in the demo workflow template file.
1311
+ status: bool
1312
+ If True, display a live status to track workflow creation progress.
1313
+
1314
+ Returns
1315
+ -------
1316
+ Workflow
1317
+ The created workflow.
1318
+ """
1319
+ return self.__get_app_func("make_demo_workflow")
1320
+
1321
+ @property
1322
+ def make_and_submit_workflow(self) -> _MakeAndSubmitWorkflow:
1323
+ """
1324
+ Generate and submit a new workflow from a file or string containing a
1325
+ workflow template parametrisation.
1326
+
1327
+ Parameters
1328
+ ----------
1329
+
1330
+ template_path_or_str: str
1331
+ Either a path to a template file in YAML or JSON format, or a YAML/JSON string.
1332
+ is_string: str
1333
+ Determines whether `template_path_or_str` is a string or a file.
1334
+ template_format: str
1335
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1336
+ format.
1337
+ path: str | Path
1338
+ The directory in which the workflow will be generated. The current directory
1339
+ if not specified.
1340
+ name: str
1341
+ The name of the workflow. If specified, the workflow directory will be `path`
1342
+ joined with `name`. If not specified the `WorkflowTemplate` name will be used,
1343
+ in combination with a date-timestamp.
1344
+ overwrite: bool
1345
+ If True and the workflow directory (`path` + `name`) already exists, the
1346
+ existing directory will be overwritten.
1347
+ store: str
1348
+ The persistent store to use for this workflow.
1349
+ ts_fmt: str
1350
+ The datetime format to use for storing datetimes. Datetimes are always stored
1351
+ in UTC (because Numpy does not store time zone info), so this should not
1352
+ include a time zone name.
1353
+ ts_name_fmt: str
1354
+ The datetime format to use when generating the workflow name, where it
1355
+ includes a timestamp.
1356
+ store_kwargs: dict[str, object]
1357
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1358
+ variables: dict[str, str]
1359
+ String variables to substitute in `template_file_or_str`.
1360
+ JS_parallelism: bool
1361
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1362
+ True but the store type does not support the `jobscript_parallelism` feature. If
1363
+ not set, jobscript parallelism will be used if the store type supports it.
1364
+ wait: bool
1365
+ If True, this command will block until the workflow execution is complete.
1366
+ add_to_known: bool
1367
+ If True, add the new submission to the known-submissions file, which is
1368
+ used by the `show` command to monitor current and recent submissions.
1369
+ return_idx: bool
1370
+ If True, return a dict representing the jobscript indices submitted for each
1371
+ submission.
1372
+ tasks: list[int]
1373
+ List of task indices to include in this submission. By default all tasks are
1374
+ included.
1375
+ cancel: bool
1376
+ Immediately cancel the submission. Useful for testing and benchmarking.
1377
+ status: bool
1378
+ If True, display a live status to track workflow creation and submission
1379
+ progress.
1380
+
1381
+ Returns
1382
+ -------
1383
+ Workflow
1384
+ The created workflow.
1385
+ dict[int, list[int]]
1386
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1387
+ """
1388
+ return self.__get_app_func("make_and_submit_workflow")
1389
+
1390
+ @property
1391
+ def make_and_submit_demo_workflow(self) -> _MakeAndSubmitDemoWorkflow:
1392
+ """
1393
+ Generate and submit a new demo workflow from a file or string containing a
1394
+ workflow template parametrisation.
1395
+
1396
+ Parameters
1397
+ ----------
1398
+ workflow_name: str
1399
+ Name of the demo workflow to make. **Required.**
1400
+ template_format: str
1401
+ If specified, one of "json" or "yaml". This forces parsing from a particular
1402
+ format.
1403
+ path: str | Path
1404
+ The directory in which the workflow will be generated. The current directory
1405
+ if not specified.
1406
+ name: str
1407
+ The name of the workflow. If specified, the workflow directory will be `path`
1408
+ joined with `name`. If not specified the `WorkflowTemplate` name will be used,
1409
+ in combination with a date-timestamp.
1410
+ overwrite: bool
1411
+ If True and the workflow directory (`path` + `name`) already exists, the
1412
+ existing directory will be overwritten.
1413
+ store: str
1414
+ The persistent store to use for this workflow.
1415
+ ts_fmt: str
1416
+ The datetime format to use for storing datetimes. Datetimes are always stored
1417
+ in UTC (because Numpy does not store time zone info), so this should not
1418
+ include a time zone name.
1419
+ ts_name_fmt: str
1420
+ The datetime format to use when generating the workflow name, where it
1421
+ includes a timestamp.
1422
+ store_kwargs: dict[str, object]
1423
+ Keyword arguments to pass to the store's `write_empty_workflow` method.
1424
+ variables: dict[str, str]
1425
+ String variables to substitute in the demo workflow template file.
1426
+ JS_parallelism: bool
1427
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1428
+ True but the store type does not support the `jobscript_parallelism` feature. If
1429
+ not set, jobscript parallelism will be used if the store type supports it.
1430
+ wait: bool
1431
+ If True, this command will block until the workflow execution is complete.
1432
+ add_to_known: bool
1433
+ If True, add the new submission to the known-submissions file, which is
1434
+ used by the `show` command to monitor current and recent submissions.
1435
+ return_idx: bool
1436
+ If True, return a dict representing the jobscript indices submitted for each
1437
+ submission.
1438
+ tasks: list[int]
1439
+ List of task indices to include in this submission. By default all tasks are
1440
+ included.
1441
+ cancel: bool
1442
+ Immediately cancel the submission. Useful for testing and benchmarking.
1443
+ status: bool
1444
+ If True, display a live status to track submission progress.
1445
+
1446
+ Returns
1447
+ -------
1448
+ Workflow
1449
+ The created workflow.
1450
+ dict[int, list[int]]
1451
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1452
+ """
1453
+ return self.__get_app_func("make_and_submit_demo_workflow")
1454
+
1455
+ @property
1456
+ def submit_workflow(self) -> _SubmitWorkflow:
1457
+ """
1458
+ Submit an existing workflow.
1459
+
1460
+ Parameters
1461
+ ----------
1462
+ workflow_path: str
1463
+ Path to an existing workflow
1464
+ JS_parallelism: bool
1465
+ If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1466
+ True but the store type does not support the `jobscript_parallelism` feature. If
1467
+ not set, jobscript parallelism will be used if the store type supports it.
1468
+ tasks: list[int]
1469
+ List of task indices to include in this submission. By default all tasks are
1470
+ included.
1471
+
1472
+ Returns
1473
+ -------
1474
+ dict[int, list[int]]
1475
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
1476
+ """
1477
+ return self.__get_app_func("submit_workflow")
1478
+
1479
+ @property
1480
+ def run_hpcflow_tests(self) -> _RunTests:
1481
+ """Run hpcflow test suite. This function is only available from derived apps."""
1482
+ return self.__get_app_func("run_hpcflow_tests")
1483
+
1484
+ @property
1485
+ def run_tests(self) -> _RunTests:
1486
+ """Run the test suite."""
1487
+ return self.__get_app_func("run_tests")
1488
+
1489
+ @property
1490
+ def get_OS_info(self) -> Callable[[], Mapping[str, str]]:
1491
+ """
1492
+ Get information about the operating system.
1493
+
1494
+ Returns
1495
+ -------
1496
+ dict[str, str]
1497
+ Key-value mapping containing system version information.
1498
+ """
1499
+ return self.__get_app_func("get_OS_info")
1500
+
1501
+ @property
1502
+ def get_shell_info(self) -> Callable[[str, bool], VersionInfo]:
1503
+ """
1504
+ Get information about a given shell and the operating system.
1505
+
1506
+ Parameters
1507
+ ----------
1508
+ shell_name: str
1509
+ One of the supported shell names.
1510
+ exclude_os: bool
1511
+ If True, exclude operating system information.
1512
+
1513
+ Returns
1514
+ -------
1515
+ VersionInfo
1516
+ The shell version information descriptor.
1517
+ """
1518
+ return self.__get_app_func("get_shell_info")
1519
+
1520
+ @property
1521
+ def get_known_submissions(self) -> _GetKnownSubmissions:
1522
+ """
1523
+ Retrieve information about active and recently inactive finished workflows.
1524
+
1525
+ This method removes workflows from the known-submissions file that are found to be
1526
+ inactive on this machine (according to the scheduler/process ID).
1527
+
1528
+ Parameters
1529
+ ----------
1530
+ max_recent: int
1531
+ Maximum number of inactive workflows to retrieve.
1532
+ no_update: bool
1533
+ If True, do not update the known-submissions file to set submissions that are
1534
+ now inactive.
1535
+ as_json: bool
1536
+ If True, only include JSON-compatible information. This will exclude the
1537
+ `submission` key, for instance.
267
1538
 
268
- self._log = AppLog(self)
269
- self._run_time_info: RunTimeInfo = RunTimeInfo(
270
- self.name,
271
- self.package_name,
272
- self.version,
273
- self.runtime_info_logger,
274
- )
1539
+ Returns
1540
+ -------
1541
+ list[KnownSubmissionItem]
1542
+ List of descriptions of known items.
1543
+ """
1544
+ return self.__get_app_func("get_known_submissions")
275
1545
 
276
- self._builtin_template_components = template_components or {}
1546
+ @property
1547
+ def show(self) -> _Show:
1548
+ """
1549
+ Show information about running workflows.
277
1550
 
278
- self._config = None # assigned on first access to `config` property
279
- self._config_files = {} # assigned on config load, keys are string absolute paths
1551
+ Parameters
1552
+ ----------
1553
+ max_recent: int
1554
+ Maximum number of inactive workflows to show.
1555
+ full: bool
1556
+ If True, provide more information; output may spans multiple lines for each
1557
+ workflow submission.
1558
+ no_update: bool
1559
+ If True, do not update the known-submissions file to remove workflows that are
1560
+ no longer running.
1561
+ """
1562
+ return self.__get_app_func("show")
280
1563
 
281
- # Set by `_load_template_components`:
282
- self._template_components = {}
283
- self._parameters = None
284
- self._command_files = None
285
- self._environments = None
286
- self._task_schemas = None
287
- self._scripts = None
1564
+ @property
1565
+ def show_legend(self) -> Callable[[], None]:
1566
+ """
1567
+ Output a legend for the jobscript-element and EAR states that are displayed
1568
+ by the `show` command.
1569
+ """
1570
+ return self.__get_app_func("show_legend")
288
1571
 
289
- self._app_attr_cache = {}
1572
+ @property
1573
+ def cancel(self) -> _Cancel:
1574
+ """
1575
+ Cancel the execution of a workflow submission.
290
1576
 
291
- # assigned on first access to respective properties
292
- self._user_data_dir = None
293
- self._user_cache_dir = None
294
- self._user_runtime_dir = None
295
- self._user_data_hostname_dir = None
296
- self._user_cache_hostname_dir = None
297
- self._demo_data_cache_dir = None
298
-
299
- def __getattr__(self, name):
1577
+ Parameters
1578
+ ----------
1579
+ workflow_ref: int | str | Path
1580
+ Which workflow to cancel, by ID or path.
1581
+ ref_is_path: str
1582
+ One of "``id``", "``path``" or "``assume-id``" (the default)
1583
+ """
1584
+ return self.__get_app_func("cancel")
1585
+
1586
+ def __getattr__(self, name: str):
300
1587
  if name in sdk_classes:
301
1588
  return self._get_app_core_class(name)
302
1589
  elif name in sdk_funcs:
303
- return self._get_app_func(name)
1590
+ return self.__get_app_func(name)
304
1591
  else:
305
1592
  raise AttributeError(f"module {__name__!r} has no attribute {name!r}.")
306
1593
 
307
1594
  def __repr__(self):
308
1595
  return f"{self.__class__.__name__}(name={self.name!r}, version={self.version!r})"
309
1596
 
310
- def _get_app_core_class(self, name: str) -> Type:
311
- if name not in self._app_attr_cache:
312
- obj_mod = import_module(sdk_classes[name])
313
- cls = getattr(obj_mod, name)
314
- if issubclass(cls, enum.Enum):
315
- sub_cls = cls
316
- else:
317
- dct = {}
318
- if hasattr(cls, "_app_attr"):
319
- dct = {getattr(cls, "_app_attr"): self}
320
- sub_cls = type(cls.__name__, (cls,), dct)
321
- if cls.__doc__:
322
- sub_cls.__doc__ = cls.__doc__.format(app_name=self.name)
323
- sub_cls.__module__ = self.module
324
- self._app_attr_cache[name] = sub_cls
325
-
326
- return self._app_attr_cache[name]
327
-
328
- def _get_app_func(self, name) -> Callable:
329
- if name not in self._app_attr_cache:
330
-
331
- def wrap_func(func):
332
- # this function avoids scope issues
333
- return lambda *args, **kwargs: func(*args, **kwargs)
334
-
335
- # retrieve the "private" function:
336
- sdk_func = getattr(self, f"_{name}")
337
-
338
- func = wrap_func(sdk_func)
339
- func = wraps(sdk_func)(func)
340
- if func.__doc__:
341
- func.__doc__ = func.__doc__.format(app_name=self.name)
342
- func.__module__ = self.module
343
- self._app_attr_cache[name] = func
344
-
345
- return self._app_attr_cache[name]
1597
+ def _get_app_core_class(self, name: str) -> type:
1598
+ if name in self.__app_type_cache:
1599
+ return self.__app_type_cache[name]
1600
+ obj_mod = import_module(sdk_classes[name])
1601
+ cls = getattr(obj_mod, name)
1602
+ if issubclass(cls, enum.Enum):
1603
+ sub_cls = cls
1604
+ else:
1605
+ dct: dict[str, Any] = {}
1606
+ if hasattr(cls, "_app_attr"):
1607
+ dct = {getattr(cls, "_app_attr"): self}
1608
+ sub_cls = type(cls.__name__, (cls,), dct)
1609
+ if cls.__doc__:
1610
+ sub_cls.__doc__ = cls.__doc__.format(app_name=self.name)
1611
+ sub_cls.__module__ = self.module
1612
+ self.__app_type_cache[name] = sub_cls
1613
+ return sub_cls
1614
+
1615
+ def __get_app_func(self, name: str) -> Callable[..., Any]:
1616
+ if name in self.__app_func_cache:
1617
+ return self.__app_func_cache[name]
1618
+
1619
+ def wrap_func(func) -> Callable[..., Any]:
1620
+ # this function avoids scope issues
1621
+ return lambda *args, **kwargs: func(*args, **kwargs)
1622
+
1623
+ # retrieve the "private" function:
1624
+ sdk_func = getattr(self, f"_{name}")
1625
+
1626
+ func = wrap_func(sdk_func)
1627
+ func = wraps(sdk_func)(func)
1628
+ if func.__doc__:
1629
+ func.__doc__ = func.__doc__.format(app_name=self.name)
1630
+ func.__module__ = self.module
1631
+ self.__app_func_cache[name] = func
1632
+ return func
346
1633
 
347
1634
  @property
348
1635
  def run_time_info(self) -> RunTimeInfo:
@@ -370,107 +1657,118 @@ class BaseApp(metaclass=Singleton):
370
1657
  TimeIt.active = bool(value)
371
1658
 
372
1659
  @property
373
- def template_components(self) -> Dict[str, ObjectList]:
1660
+ def template_components(self) -> TemplateComponents:
374
1661
  """
375
1662
  The template component data.
376
1663
  """
377
1664
  if not self.is_template_components_loaded:
1665
+ if BaseApp.__load_pending:
1666
+ return {}
1667
+ BaseApp.__load_pending = True
378
1668
  self._load_template_components()
1669
+ BaseApp.__load_pending = False
379
1670
  return self._template_components
380
1671
 
381
- def _ensure_template_component(self, name) -> None:
1672
+ @property
1673
+ def _shared_data(self) -> Mapping[str, Any]:
1674
+ return cast("Mapping[str, Any]", self.template_components)
1675
+
1676
+ def _ensure_template_component(self, name: str) -> None:
382
1677
  """Invoked by access to individual template components (e.g. parameters)"""
383
1678
  if not getattr(self, f"_{name}"):
384
1679
  self._load_template_components(name)
385
1680
  else:
386
1681
  self.logger.debug(f"Template component {name!r} already loaded")
387
1682
 
388
- def load_template_components(self, warn=True) -> None:
1683
+ def load_template_components(self, warn: bool = True) -> None:
389
1684
  """Load all template component data, warning by default if already loaded."""
390
1685
  if warn and self.is_template_components_loaded:
391
1686
  warnings.warn("Template components already loaded; reloading now.")
392
1687
  self._load_template_components()
393
1688
 
394
- def reload_template_components(self, warn=True) -> None:
395
- """Reload all template component data, warning by default if not already
396
- loaded."""
1689
+ def reload_template_components(self, warn: bool = True) -> None:
1690
+ """
1691
+ Reload all template component data, warning by default if not already
1692
+ loaded.
1693
+ """
397
1694
  if warn and not self.is_template_components_loaded:
398
1695
  warnings.warn("Template components not loaded; loading now.")
399
1696
  self._load_template_components()
400
1697
 
401
1698
  @TimeIt.decorator
402
- def _load_template_components(self, *include) -> None:
403
- """Combine any builtin template components with user-defined template components
404
- and initialise list objects."""
405
-
1699
+ def _load_template_components(self, *include: str) -> None:
1700
+ """
1701
+ Combine any builtin template components with user-defined template components
1702
+ and initialise list objects.
1703
+ """
406
1704
  if not include or "task_schemas" in include:
407
1705
  # task schemas require all other template components to be loaded first
408
- include = [
1706
+ include = (
409
1707
  "parameters",
410
1708
  "command_files",
411
1709
  "environments",
412
1710
  "task_schemas",
413
1711
  "scripts",
414
- ]
1712
+ )
415
1713
 
416
1714
  self.logger.debug(f"Loading template components: {include!r}.")
417
1715
 
418
- self_tc = self._template_components
1716
+ self_tc: Any = self._template_components
419
1717
 
420
1718
  if "parameters" in include:
421
- params = self._builtin_template_components.get("parameters", [])
1719
+ params: list[Any] = self._builtin_template_components.get("parameters", [])
422
1720
  for path in self.config.parameter_sources:
423
1721
  params.extend(read_YAML_file(path))
424
- self_tc["parameters"] = self.ParametersList.from_json_like(
425
- params, shared_data=self_tc
426
- )
427
- self._parameters = self_tc["parameters"]
1722
+ param_list = self.ParametersList.from_json_like(params, shared_data=self_tc)
1723
+ self._template_components["parameters"] = param_list
1724
+ self._parameters = param_list
428
1725
 
429
1726
  if "command_files" in include:
430
- cmd_files = self._builtin_template_components.get("command_files", [])
1727
+ cmd_files: list[Any] = self._builtin_template_components.get(
1728
+ "command_files", []
1729
+ )
431
1730
  for path in self.config.command_file_sources:
432
1731
  cmd_files.extend(read_YAML_file(path))
433
- self_tc["command_files"] = self.CommandFilesList.from_json_like(
434
- cmd_files, shared_data=self_tc
435
- )
436
- self._command_files = self_tc["command_files"]
1732
+ cf_list = self.CommandFilesList.from_json_like(cmd_files, shared_data=self_tc)
1733
+ self._template_components["command_files"] = cf_list
1734
+ self._command_files = cf_list
437
1735
 
438
1736
  if "environments" in include:
439
1737
  envs = []
440
- builtin_envs = self._builtin_template_components.get("environments", [])
441
- for path in self.config.environment_sources:
442
- envs_i_lst = read_YAML_file(path)
443
- for env_j in envs_i_lst:
1738
+ builtin_envs: list[Any] = self._builtin_template_components.get(
1739
+ "environments", []
1740
+ )
1741
+ for e_path in self.config.environment_sources:
1742
+ for env_j in read_YAML_file(e_path):
444
1743
  for b_idx, builtin_env in enumerate(list(builtin_envs)):
445
1744
  # overwrite builtin envs with user-supplied:
446
1745
  if builtin_env["name"] == env_j["name"]:
447
1746
  builtin_envs.pop(b_idx)
448
1747
  envs.append(env_j)
449
1748
  envs = builtin_envs + envs
450
- self_tc["environments"] = self.EnvironmentsList.from_json_like(
451
- envs, shared_data=self_tc
452
- )
453
- self._environments = self_tc["environments"]
1749
+ env_list = self.EnvironmentsList.from_json_like(envs, shared_data=self_tc)
1750
+ self._template_components["environments"] = env_list
1751
+ self._environments = env_list
454
1752
 
455
1753
  if "task_schemas" in include:
456
- schemas = self._builtin_template_components.get("task_schemas", [])
1754
+ schemas: list[Any] = self._builtin_template_components.get("task_schemas", [])
457
1755
  for path in self.config.task_schema_sources:
458
1756
  schemas.extend(read_YAML_file(path))
459
- self_tc["task_schemas"] = self.TaskSchemasList.from_json_like(
460
- schemas, shared_data=self_tc
461
- )
462
- self._task_schemas = self_tc["task_schemas"]
1757
+ ts_list = self.TaskSchemasList.from_json_like(schemas, shared_data=self_tc)
1758
+ self._template_components["task_schemas"] = ts_list
1759
+ self._task_schemas = ts_list
463
1760
 
464
1761
  if "scripts" in include:
465
- self_tc["scripts"] = self._load_scripts()
466
- self._scripts = self_tc["scripts"]
1762
+ scripts = self._load_scripts()
1763
+ self._template_components["scripts"] = scripts
1764
+ self._scripts = scripts
467
1765
 
468
1766
  self.logger.info(f"Template components loaded ({include!r}).")
469
1767
 
470
1768
  @classmethod
471
1769
  def load_builtin_template_component_data(
472
- cls, package
473
- ) -> Dict[str, Union[List, Dict]]:
1770
+ cls, package: ModuleType | str
1771
+ ) -> BasicTemplateComponents:
474
1772
  """
475
1773
  Load the template component data built into the package.
476
1774
  This is as opposed to the template components defined by users.
@@ -478,55 +1776,57 @@ class BaseApp(metaclass=Singleton):
478
1776
  SDK_logger.info(
479
1777
  f"Loading built-in template component data for package: {package!r}."
480
1778
  )
481
- components = {}
1779
+ components: BasicTemplateComponents = {}
482
1780
  for comp_type in TEMPLATE_COMP_TYPES:
483
- resource = f"{comp_type}.yaml"
484
- fh = resources.files(package).joinpath(resource).open("rt")
485
- SDK_logger.info(f"Parsing file as YAML: {fh.name!r}")
486
- comp_dat = fh.read()
487
- components[comp_type] = read_YAML_str(comp_dat)
488
- fh.close()
1781
+ with open_text_resource(package, f"{comp_type}.yaml") as fh:
1782
+ SDK_logger.info(f"Parsing file as YAML: {fh.name!r}")
1783
+ components[comp_type] = read_YAML_str(fh.read())
489
1784
 
490
1785
  return components
491
1786
 
492
1787
  @property
493
- def parameters(self) -> get_app_attribute("ParametersList"):
1788
+ def parameters(self) -> _ParametersList:
494
1789
  """
495
1790
  The known template parameters.
496
1791
  """
497
1792
  self._ensure_template_component("parameters")
1793
+ assert self._parameters is not None
498
1794
  return self._parameters
499
1795
 
500
1796
  @property
501
- def command_files(self) -> get_app_attribute("CommandFilesList"):
1797
+ def command_files(self) -> _CommandFilesList:
502
1798
  """
503
1799
  The known template command files.
504
1800
  """
505
1801
  self._ensure_template_component("command_files")
1802
+ assert self._command_files is not None
506
1803
  return self._command_files
507
1804
 
508
1805
  @property
509
- def envs(self) -> get_app_attribute("EnvironmentsList"):
1806
+ def envs(self) -> _EnvironmentsList:
510
1807
  """
511
1808
  The known template execution environments.
512
1809
  """
513
1810
  self._ensure_template_component("environments")
1811
+ assert self._environments is not None
514
1812
  return self._environments
515
1813
 
516
1814
  @property
517
- def scripts(self):
1815
+ def scripts(self) -> dict[str, Path]:
518
1816
  """
519
1817
  The known template scripts.
520
1818
  """
521
1819
  self._ensure_template_component("scripts")
1820
+ assert self._scripts is not None
522
1821
  return self._scripts
523
1822
 
524
1823
  @property
525
- def task_schemas(self) -> get_app_attribute("TaskSchemasList"):
1824
+ def task_schemas(self) -> _TaskSchemasList:
526
1825
  """
527
1826
  The known template task schemas.
528
1827
  """
529
1828
  self._ensure_template_component("task_schemas")
1829
+ assert self._task_schemas is not None
530
1830
  return self._task_schemas
531
1831
 
532
1832
  @property
@@ -597,10 +1897,11 @@ class BaseApp(metaclass=Singleton):
597
1897
  """
598
1898
  if not self.is_config_loaded:
599
1899
  self.load_config()
1900
+ assert self._config
600
1901
  return self._config
601
1902
 
602
1903
  @property
603
- def scheduler_lookup(self):
1904
+ def scheduler_lookup(self) -> dict[tuple[str, str], type[Scheduler]]:
604
1905
  """
605
1906
  The scheduler mapping.
606
1907
  """
@@ -611,14 +1912,20 @@ class BaseApp(metaclass=Singleton):
611
1912
  ("slurm", "posix"): self.SlurmPosix,
612
1913
  }
613
1914
 
614
- def get_scheduler(self, scheduler_name, os_name, scheduler_args=None):
1915
+ def get_scheduler(
1916
+ self,
1917
+ scheduler_name: str,
1918
+ os_name: str,
1919
+ scheduler_args: dict[str, Any] | None = None,
1920
+ ) -> Scheduler:
615
1921
  """Get an arbitrary scheduler object."""
616
- scheduler_args = scheduler_args or {}
1922
+ scheduler_kwargs = scheduler_args or {}
617
1923
 
618
1924
  os_name = os_name.lower()
619
1925
  if os_name == "nt" and "_" in scheduler_name:
620
1926
  # e.g. WSL on windows uses *_posix
621
1927
  key = tuple(scheduler_name.split("_"))
1928
+ assert len(key) == 2
622
1929
  else:
623
1930
  key = (scheduler_name.lower(), os_name)
624
1931
 
@@ -628,28 +1935,28 @@ class BaseApp(metaclass=Singleton):
628
1935
  raise ValueError(
629
1936
  f"Unsupported combination of scheduler and operation system: {key!r}"
630
1937
  )
631
- return scheduler_cls(**scheduler_args)
1938
+ return scheduler_cls(**scheduler_kwargs)
632
1939
 
633
- def get_OS_supported_schedulers(self):
634
- """Retrieve a list of schedulers that are supported in principle by this operating
1940
+ def get_OS_supported_schedulers(self) -> Iterator[str]:
1941
+ """
1942
+ Retrieve a list of schedulers that are supported in principle by this operating
635
1943
  system.
636
1944
 
637
1945
  This does not necessarily mean all the returned schedulers are available on this
638
1946
  system.
639
-
640
1947
  """
641
- out = []
642
1948
  for k in self.scheduler_lookup:
643
1949
  if os.name == "nt" and k == ("direct", "posix"):
644
1950
  # this is valid for WSL on Windows
645
- out.append("_".join(k))
1951
+ yield "_".join(k)
646
1952
  elif k[1] == os.name:
647
- out.append(k[0])
648
- return out
1953
+ yield k[0]
649
1954
 
650
1955
  def perm_error_retry(self):
651
- """Return a decorator for retrying functions on permission and OS errors that
652
- might be associated with cloud-storage desktop sync. engine operations."""
1956
+ """
1957
+ Return a decorator for retrying functions on permission and OS errors that
1958
+ might be associated with cloud-storage desktop sync. engine operations.
1959
+ """
653
1960
  return retry(
654
1961
  (PermissionError, OSError),
655
1962
  tries=10,
@@ -696,7 +2003,6 @@ class BaseApp(metaclass=Singleton):
696
2003
  We segregate by hostname to account for the case where multiple machines might
697
2004
  use the same shared file system.
698
2005
  """
699
-
700
2006
  # This might need to cover e.g. multiple login nodes, as described in the
701
2007
  # config file:
702
2008
  if self._user_data_hostname_dir is None:
@@ -720,13 +2026,15 @@ class BaseApp(metaclass=Singleton):
720
2026
  return self.user_data_dir
721
2027
 
722
2028
  def _ensure_user_runtime_dir(self) -> Path:
723
- """Generate a user runtime directory for this machine in which we can create
2029
+ """
2030
+ Generate a user runtime directory for this machine in which we can create
724
2031
  semi-persistent temporary files.
725
2032
 
726
- Note: unlike `_ensure_user_data_dir`, and `_ensure_user_data_hostname_dir`, this
2033
+ Note
2034
+ ----
2035
+ Unlike `_ensure_user_data_dir`, and `_ensure_user_data_hostname_dir`, this
727
2036
  method is not invoked on config load, because it might need to be created after
728
2037
  each reboot, and it is not routinely used.
729
-
730
2038
  """
731
2039
  if not self.user_runtime_dir.exists():
732
2040
  self.user_runtime_dir.mkdir(parents=True)
@@ -752,8 +2060,10 @@ class BaseApp(metaclass=Singleton):
752
2060
  return self.demo_data_cache_dir
753
2061
 
754
2062
  def _ensure_user_data_hostname_dir(self) -> Path:
755
- """Ensure a user data directory for this machine exists (used by the helper
756
- process and the known-submissions file)."""
2063
+ """
2064
+ Ensure a user data directory for this machine exists (used by the helper
2065
+ process and the known-submissions file).
2066
+ """
757
2067
  if not self.user_data_hostname_dir.exists():
758
2068
  self.user_data_hostname_dir.mkdir(parents=True)
759
2069
  self.logger.info(
@@ -771,46 +2081,48 @@ class BaseApp(metaclass=Singleton):
771
2081
  )
772
2082
  return self.user_cache_hostname_dir
773
2083
 
774
- def clear_user_runtime_dir(self):
2084
+ def clear_user_runtime_dir(self) -> None:
775
2085
  """Delete the contents of the user runtime directory."""
776
2086
  if self.user_runtime_dir.exists():
777
2087
  shutil.rmtree(self.user_runtime_dir)
778
2088
  self._ensure_user_runtime_dir()
779
2089
 
780
- def clear_user_cache_dir(self):
2090
+ def clear_user_cache_dir(self) -> None:
781
2091
  """Delete the contents of the cache directory."""
782
2092
  if self.user_cache_dir.exists():
783
2093
  shutil.rmtree(self.user_cache_dir)
784
2094
  self._ensure_user_cache_dir()
785
2095
 
786
- def clear_demo_data_cache_dir(self):
2096
+ def clear_demo_data_cache_dir(self) -> None:
787
2097
  """Delete the contents of the example data files cache directory."""
788
2098
  if self.demo_data_cache_dir.exists():
789
2099
  shutil.rmtree(self.demo_data_cache_dir)
790
2100
  self._ensure_demo_data_cache_dir()
791
2101
 
792
- def clear_user_cache_hostname_dir(self):
2102
+ def clear_user_cache_hostname_dir(self) -> None:
793
2103
  """Delete the contents of the hostname-scoped cache directory."""
794
2104
  if self.user_cache_hostname_dir.exists():
795
2105
  shutil.rmtree(self.user_cache_hostname_dir)
796
2106
  self._ensure_user_cache_hostname_dir()
797
2107
 
798
2108
  @TimeIt.decorator
799
- def _load_config(self, config_dir, config_key, **overrides) -> None:
2109
+ def _load_config(
2110
+ self, config_dir: PathLike, config_key: str | None, **overrides
2111
+ ) -> None:
800
2112
  self.logger.info("Loading configuration.")
801
2113
  self._ensure_user_data_dir()
802
- config_dir = ConfigFile._resolve_config_dir(
2114
+ resolved_config_dir = ConfigFile._resolve_config_dir(
803
2115
  config_opt=self.config_options,
804
2116
  logger=self.config_logger,
805
2117
  directory=config_dir,
806
2118
  )
807
- if str(config_dir) not in self._config_files:
808
- self._config_files[str(config_dir)] = ConfigFile(
809
- directory=config_dir,
2119
+ if str(resolved_config_dir) not in self._config_files:
2120
+ self._config_files[str(resolved_config_dir)] = ConfigFile(
2121
+ directory=resolved_config_dir,
810
2122
  logger=self.config_logger,
811
2123
  config_options=self.config_options,
812
2124
  )
813
- file = self._config_files[str(config_dir)]
2125
+ file = self._config_files[str(resolved_config_dir)]
814
2126
  self._config = Config(
815
2127
  app=self,
816
2128
  config_file=file,
@@ -830,26 +2142,35 @@ class BaseApp(metaclass=Singleton):
830
2142
 
831
2143
  def load_config(
832
2144
  self,
833
- config_dir=None,
834
- config_key=None,
835
- warn=True,
2145
+ config_dir: PathLike = None,
2146
+ config_key: str | None = None,
2147
+ warn: bool = True,
836
2148
  **overrides,
837
2149
  ) -> None:
838
2150
  """
839
2151
  Load the user's configuration.
2152
+
2153
+ Parameters
2154
+ ----------
2155
+ config_dir:
2156
+ Directory containing the configuration, if not default.
2157
+ config_key:
2158
+ Key to the configuration within the config file.
2159
+ warn:
2160
+ Whether to warn if a configuration is already loaded.
840
2161
  """
841
2162
  if warn and self.is_config_loaded:
842
2163
  warnings.warn("Configuration is already loaded; reloading.")
843
2164
  self._load_config(config_dir, config_key, **overrides)
844
2165
 
845
- def unload_config(self):
2166
+ def unload_config(self) -> None:
846
2167
  """
847
2168
  Discard any loaded configuration.
848
2169
  """
849
2170
  self._config_files = {}
850
2171
  self._config = None
851
2172
 
852
- def get_config_path(self, config_dir=None):
2173
+ def get_config_path(self, config_dir: PathLike = None) -> Path:
853
2174
  """Return the full path to the config file, without loading the config."""
854
2175
  config_dir = ConfigFile._resolve_config_dir(
855
2176
  config_opt=self.config_options,
@@ -858,7 +2179,7 @@ class BaseApp(metaclass=Singleton):
858
2179
  )
859
2180
  return ConfigFile.get_config_file_path(config_dir)
860
2181
 
861
- def _delete_config_file(self, config_dir=None):
2182
+ def _delete_config_file(self, config_dir: PathLike = None) -> None:
862
2183
  """Delete the config file."""
863
2184
  config_path = self.get_config_path(config_dir=config_dir)
864
2185
  self.logger.info(f"deleting config file: {str(config_path)!r}.")
@@ -866,13 +2187,13 @@ class BaseApp(metaclass=Singleton):
866
2187
 
867
2188
  def reset_config(
868
2189
  self,
869
- config_dir=None,
870
- config_key=None,
871
- warn=True,
2190
+ config_dir: PathLike = None,
2191
+ config_key: str | None = None,
2192
+ warn: bool = True,
872
2193
  **overrides,
873
2194
  ) -> None:
874
2195
  """Reset the config file to defaults, and reload the config."""
875
- self.logger.info(f"resetting config")
2196
+ self.logger.info("resetting config")
876
2197
  self._delete_config_file(config_dir=config_dir)
877
2198
  self._config = None
878
2199
  self._config_files = {}
@@ -880,9 +2201,9 @@ class BaseApp(metaclass=Singleton):
880
2201
 
881
2202
  def reload_config(
882
2203
  self,
883
- config_dir=None,
884
- config_key=None,
885
- warn=True,
2204
+ config_dir: PathLike = None,
2205
+ config_key: str | None = None,
2206
+ warn: bool = True,
886
2207
  **overrides,
887
2208
  ) -> None:
888
2209
  """
@@ -896,59 +2217,61 @@ class BaseApp(metaclass=Singleton):
896
2217
  self._load_config(config_dir, config_key, **overrides)
897
2218
 
898
2219
  @TimeIt.decorator
899
- def _load_scripts(self):
900
-
2220
+ def _load_scripts(self) -> dict[str, Path]:
2221
+ """
2222
+ Discover where the built-in scripts all are.
2223
+ """
901
2224
  # TODO: load custom directories / custom functions (via decorator)
902
2225
  scripts_package = f"{self.package_name}.{self.scripts_dir}"
903
2226
 
904
- ctx = resources.as_file(resources.files(scripts_package))
905
-
906
- scripts = {}
907
- with ctx as path:
908
- for dirpath, _, filenames in os.walk(path):
909
- dirpath = Path(dirpath)
910
- if dirpath.name == "__pycache__":
911
- continue
912
- for filename in filenames:
913
- if filename == "__init__.py":
2227
+ scripts: dict[str, Path] = {}
2228
+ try:
2229
+ with get_file_context(scripts_package) as path:
2230
+ for dirpath, _, filenames in os.walk(path):
2231
+ dirpath_ = Path(dirpath)
2232
+ if dirpath_.name == "__pycache__":
914
2233
  continue
915
- val = dirpath.joinpath(filename)
916
- key = str(val.relative_to(path).as_posix())
917
- scripts[key] = Path(val)
918
-
2234
+ for filename in filenames:
2235
+ if filename == "__init__.py":
2236
+ continue
2237
+ val = dirpath_.joinpath(filename)
2238
+ scripts[val.relative_to(path).as_posix()] = Path(val)
2239
+ except ModuleNotFoundError:
2240
+ self.logger.exception("failed to find scripts package")
2241
+ SDK_logger.info(f"loaded {len(scripts)} scripts from {scripts_package}")
919
2242
  return scripts
920
2243
 
921
- def _get_demo_workflows(self) -> Dict[str, Path]:
2244
+ def _get_demo_workflows(self) -> dict[str, Path]:
922
2245
  """Get all builtin demo workflow template file paths."""
923
- templates = {}
2246
+ templates: dict[str, Path] = {}
924
2247
  pkg = f"{self.package_name}.{self.workflows_dir}"
925
- files = resources.files(pkg).iterdir()
926
- for i in files:
927
- if i.suffix in (".yaml", ".yml", ".json", ".jsonc"):
928
- templates[i.stem] = i
2248
+ for file in resources.files(pkg).iterdir():
2249
+ p = Path(str(file))
2250
+ if p.exists() and p.suffix in (".yaml", ".yml", ".json", ".jsonc"):
2251
+ templates[p.stem] = p
929
2252
  return templates
930
2253
 
931
- def list_demo_workflows(self) -> Tuple[str]:
2254
+ def list_demo_workflows(self) -> tuple[str, ...]:
932
2255
  """Return a list of demo workflow templates included in the app."""
933
- return tuple(sorted(self._get_demo_workflows().keys()))
2256
+ return tuple(sorted(self._get_demo_workflows()))
934
2257
 
935
2258
  @contextmanager
936
2259
  def get_demo_workflow_template_file(
937
2260
  self, name: str, doc: bool = True, delete: bool = True
938
- ) -> Path:
939
- """Context manager to get a (temporary) file path to an included demo workflow
2261
+ ) -> Iterator[Path]:
2262
+ """
2263
+ Context manager to get a (temporary) file path to an included demo workflow
940
2264
  template.
941
2265
 
942
2266
  Parameters
943
2267
  ----------
944
- name
2268
+ name:
945
2269
  Name of the builtin demo workflow template whose file path is to be retrieved.
946
- doc
2270
+ doc:
947
2271
  If False, the yielded path will be to a file without the `doc` attribute (if
948
2272
  originally present).
949
- delete
2273
+ delete:
950
2274
  If True, remove the temporary file on exit.
951
-
952
2275
  """
953
2276
  tmp_dir = self._ensure_user_runtime_dir()
954
2277
  builtin_path = self._get_demo_workflows()[name]
@@ -961,12 +2284,12 @@ class BaseApp(metaclass=Singleton):
961
2284
  # load the file, modify, then dump to temp location:
962
2285
  if builtin_path.suffix in (".yaml", ".yml"):
963
2286
  # use round-trip loader to preserve comments:
964
- data = read_YAML_file(builtin_path, typ="rt", variables=False)
2287
+ data = read_YAML_file(builtin_path, typ="rt", variables={})
965
2288
  data.pop("doc", None)
966
2289
  write_YAML_file(data, path, typ="rt")
967
2290
 
968
2291
  elif builtin_path.suffix in (".json", ".jsonc"):
969
- data = read_JSON_file(builtin_path, variables=False)
2292
+ data = read_JSON_file(builtin_path, variables={})
970
2293
  data.pop("doc", None)
971
2294
  write_JSON_file(data, path)
972
2295
 
@@ -976,9 +2299,10 @@ class BaseApp(metaclass=Singleton):
976
2299
  path.unlink()
977
2300
 
978
2301
  def copy_demo_workflow(
979
- self, name: str, dst: Optional[PathLike] = None, doc: bool = True
2302
+ self, name: str, dst: PathLike | None = None, doc: bool = True
980
2303
  ) -> str:
981
- """Copy a builtin demo workflow to the specified location.
2304
+ """
2305
+ Copy a builtin demo workflow to the specified location.
982
2306
 
983
2307
  Parameters
984
2308
  ----------
@@ -991,7 +2315,6 @@ class BaseApp(metaclass=Singleton):
991
2315
  If False, the copied workflow template file will not include the `doc`
992
2316
  attribute (if originally present).
993
2317
  """
994
-
995
2318
  dst = dst or Path(".")
996
2319
  with self.get_demo_workflow_template_file(name, doc=doc) as src:
997
2320
  shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
@@ -999,15 +2322,16 @@ class BaseApp(metaclass=Singleton):
999
2322
  return src.name
1000
2323
 
1001
2324
  def show_demo_workflow(self, name: str, syntax: bool = True, doc: bool = False):
1002
- """Print the contents of a builtin demo workflow template file.
2325
+ """
2326
+ Print the contents of a builtin demo workflow template file.
1003
2327
 
1004
2328
  Parameters
1005
2329
  ----------
1006
- name
2330
+ name:
1007
2331
  The name of the demo workflow file to print.
1008
- syntax
2332
+ syntax:
1009
2333
  If True, use rich to syntax-highlight the output.
1010
- doc
2334
+ doc:
1011
2335
  If False, the printed workflow template file contents will not include the
1012
2336
  `doc` attribute (if originally present).
1013
2337
  """
@@ -1017,42 +2341,43 @@ class BaseApp(metaclass=Singleton):
1017
2341
 
1018
2342
  if syntax:
1019
2343
  fmt = DEMO_WK_FORMATS[path.suffix]
1020
- contents = Syntax(contents, fmt)
1021
- console = Console()
1022
- console.print(contents)
2344
+ Console().print(Syntax(contents, fmt))
1023
2345
  else:
1024
2346
  print(contents)
1025
2347
 
1026
- def load_demo_workflow(self, name: str) -> get_app_attribute("WorkflowTemplate"):
2348
+ def load_demo_workflow(self, name: str) -> _WorkflowTemplate:
1027
2349
  """Load a WorkflowTemplate object from a builtin demo template file."""
1028
2350
  with self.get_demo_workflow_template_file(name) as path:
1029
2351
  return self.WorkflowTemplate.from_file(path)
1030
2352
 
1031
- def template_components_from_json_like(self, json_like):
2353
+ def template_components_from_json_like(
2354
+ self, json_like: dict[str, dict]
2355
+ ) -> TemplateComponents:
1032
2356
  """
1033
- Get template components from a (simply parsed) JSOM document.
2357
+ Get template components from a (simply parsed) JSON document.
1034
2358
  """
1035
- cls_lookup = {
1036
- "parameters": self.ParametersList,
1037
- "command_files": self.CommandFilesList,
1038
- "environments": self.EnvironmentsList,
1039
- "task_schemas": self.TaskSchemasList,
1040
- }
1041
- tc = {}
1042
- for k, v in cls_lookup.items():
1043
- tc_k = v.from_json_like(
1044
- json_like.get(k, {}),
1045
- shared_data=tc,
1046
- is_hashed=True,
1047
- )
1048
- tc[k] = tc_k
2359
+ tc: TemplateComponents = {}
2360
+ sd: Mapping[str, Any] = tc
2361
+ tc["parameters"] = self.ParametersList.from_json_like(
2362
+ json_like.get("parameters", {}), shared_data=sd, is_hashed=True
2363
+ )
2364
+ tc["command_files"] = self.CommandFilesList.from_json_like(
2365
+ json_like.get("command_files", {}), shared_data=sd, is_hashed=True
2366
+ )
2367
+ tc["environments"] = self.EnvironmentsList.from_json_like(
2368
+ json_like.get("environments", {}), shared_data=sd, is_hashed=True
2369
+ )
2370
+ tc["task_schemas"] = self.TaskSchemasList.from_json_like(
2371
+ json_like.get("task_schemas", {}), shared_data=sd, is_hashed=True
2372
+ )
1049
2373
  return tc
1050
2374
 
1051
- def get_parameter_task_schema_map(self) -> Dict[str, List[List]]:
1052
- """Get a dict mapping parameter types to task schemas that input/output each
1053
- parameter."""
1054
-
1055
- param_map = {}
2375
+ def get_parameter_task_schema_map(self) -> dict[str, list[list[str]]]:
2376
+ """
2377
+ Get a dict mapping parameter types to task schemas that input/output each
2378
+ parameter.
2379
+ """
2380
+ param_map: dict[str, list[list[str]]] = {}
1056
2381
  for ts in self.task_schemas:
1057
2382
  for inp in ts.inputs:
1058
2383
  if inp.parameter.typ not in param_map:
@@ -1065,7 +2390,7 @@ class BaseApp(metaclass=Singleton):
1065
2390
 
1066
2391
  return param_map
1067
2392
 
1068
- def get_info(self) -> Dict[str, Any]:
2393
+ def get_info(self) -> dict[str, Any]:
1069
2394
  """
1070
2395
  Get miscellaneous runtime system information.
1071
2396
  """
@@ -1077,7 +2402,7 @@ class BaseApp(metaclass=Singleton):
1077
2402
  }
1078
2403
 
1079
2404
  @property
1080
- def known_subs_file_path(self):
2405
+ def known_subs_file_path(self) -> Path:
1081
2406
  """
1082
2407
  The path to the file describing known submissions.
1083
2408
  """
@@ -1093,7 +2418,7 @@ class BaseApp(metaclass=Singleton):
1093
2418
  wk_path,
1094
2419
  start_time,
1095
2420
  end_time,
1096
- ):
2421
+ ) -> str:
1097
2422
  line = [
1098
2423
  str(local_id),
1099
2424
  workflow_id,
@@ -1106,7 +2431,7 @@ class BaseApp(metaclass=Singleton):
1106
2431
  ]
1107
2432
  return self._known_subs_file_sep.join(line) + "\n"
1108
2433
 
1109
- def _parse_known_submissions_line(self, line: str) -> Dict:
2434
+ def _parse_known_submissions_line(self, line: str) -> KnownSubmission:
1110
2435
  (
1111
2436
  local_id,
1112
2437
  workflow_id,
@@ -1117,7 +2442,7 @@ class BaseApp(metaclass=Singleton):
1117
2442
  start_time,
1118
2443
  end_time,
1119
2444
  ) = line.split(self._known_subs_file_sep, maxsplit=7)
1120
- item = {
2445
+ return {
1121
2446
  "local_id": int(local_id),
1122
2447
  "workflow_id": workflow_id,
1123
2448
  "is_active": bool(int(is_active)),
@@ -1127,16 +2452,12 @@ class BaseApp(metaclass=Singleton):
1127
2452
  "start_time": start_time,
1128
2453
  "end_time": end_time.strip(),
1129
2454
  }
1130
- return item
1131
2455
 
1132
2456
  @TimeIt.decorator
1133
- def read_known_submissions_file(self) -> List[Dict]:
2457
+ def read_known_submissions_file(self) -> list[KnownSubmission]:
1134
2458
  """Retrieve existing workflows that *might* be running."""
1135
- known = []
1136
2459
  with self.known_subs_file_path.open("rt", newline="\n") as fh:
1137
- for ln in fh.readlines():
1138
- known.append(self._parse_known_submissions_line(ln))
1139
- return known
2460
+ return [self._parse_known_submissions_line(ln) for ln in fh.readlines()]
1140
2461
 
1141
2462
  def _add_to_known_submissions(
1142
2463
  self,
@@ -1145,9 +2466,10 @@ class BaseApp(metaclass=Singleton):
1145
2466
  sub_idx: int,
1146
2467
  sub_time: str,
1147
2468
  ) -> int:
1148
- """Ensure a the specified workflow submission is in the known-submissions file and
1149
- return the associated local ID."""
1150
-
2469
+ """
2470
+ Ensure a the specified workflow submission is in the known-submissions file and
2471
+ return the associated local ID.
2472
+ """
1151
2473
  try:
1152
2474
  known = self.read_known_submissions_file()
1153
2475
  except FileNotFoundError:
@@ -1155,15 +2477,15 @@ class BaseApp(metaclass=Singleton):
1155
2477
 
1156
2478
  wk_path = str(wk_path)
1157
2479
  all_ids = []
1158
- for i in known:
1159
- all_ids.append(i["local_id"])
2480
+ for known_sub in known:
2481
+ all_ids.append(known_sub["local_id"])
1160
2482
  if (
1161
- wk_path == i["path"]
1162
- and sub_idx == i["sub_idx"]
1163
- and sub_time == i["submit_time"]
2483
+ wk_path == known_sub["path"]
2484
+ and sub_idx == known_sub["sub_idx"]
2485
+ and sub_time == known_sub["submit_time"]
1164
2486
  ):
1165
2487
  # workflow submission part already present
1166
- return i["local_id"]
2488
+ return known_sub["local_id"]
1167
2489
 
1168
2490
  # get the next available local ID:
1169
2491
  if all_ids:
@@ -1194,13 +2516,16 @@ class BaseApp(metaclass=Singleton):
1194
2516
  @TimeIt.decorator
1195
2517
  def update_known_subs_file(
1196
2518
  self,
1197
- inactive_IDs: List[int],
1198
- start_times: Dict[int, str],
1199
- end_times: Dict[int, str],
1200
- ):
1201
- """Update submission records in the known-submission file.
2519
+ inactive_IDs: list[int],
2520
+ start_times: dict[int, str],
2521
+ end_times: dict[int, str],
2522
+ ) -> list[int]:
2523
+ """
2524
+ Update submission records in the known-submission file.
1202
2525
 
1203
- Note we aim for atomicity to help with the scenario where a new workflow
2526
+ Note
2527
+ ----
2528
+ We aim for atomicity to help with the scenario where a new workflow
1204
2529
  submission is adding itself to the file at the same time as we have decided an
1205
2530
  existing workflow should no longer be part of this file. Ideally, such a scenario
1206
2531
  should not arise because both operations should only ever be interactively
@@ -1210,12 +2535,10 @@ class BaseApp(metaclass=Singleton):
1210
2535
 
1211
2536
  Returns
1212
2537
  -------
1213
- removed_IDs
2538
+ list[int]
1214
2539
  List of local IDs removed from the known-submissions file due to the maximum
1215
2540
  number of recent workflows to store being exceeded.
1216
-
1217
2541
  """
1218
-
1219
2542
  self.submission_logger.info(
1220
2543
  f"setting these local IDs to inactive in known-submissions file: "
1221
2544
  f"{inactive_IDs}"
@@ -1225,12 +2548,14 @@ class BaseApp(metaclass=Singleton):
1225
2548
 
1226
2549
  # keys are line indices of non-running submissions, values are submission
1227
2550
  # date-times:
1228
- line_date = {}
2551
+ line_date: dict[int, str] = {}
1229
2552
 
1230
- removed_IDs = [] # which submissions we completely remove from the file
2553
+ removed_IDs: list[
2554
+ int
2555
+ ] = [] # which submissions we completely remove from the file
1231
2556
 
1232
- new_lines = []
1233
- line_IDs = []
2557
+ new_lines: list[str] = []
2558
+ line_IDs: list[int] = []
1234
2559
  for ln_idx, line in enumerate(self.known_subs_file_path.read_text().split("\n")):
1235
2560
  if not line.strip():
1236
2561
  continue
@@ -1246,7 +2571,6 @@ class BaseApp(metaclass=Singleton):
1246
2571
  update_end = item["local_id"] in end_times
1247
2572
 
1248
2573
  if update_inactive or update_start or update_end:
1249
-
1250
2574
  updated = self._format_known_submissions_line(
1251
2575
  local_id=item["local_id"],
1252
2576
  workflow_id=item["workflow_id"],
@@ -1273,7 +2597,7 @@ class BaseApp(metaclass=Singleton):
1273
2597
  if is_inactive:
1274
2598
  line_date[ln_idx] = item["submit_time"]
1275
2599
 
1276
- ld_srt_idx = list(dict(sorted(line_date.items(), key=lambda i: i[1])).keys())
2600
+ ld_srt_idx = sorted(line_date, key=lambda x: line_date[x])
1277
2601
 
1278
2602
  if len(line_date) > max_inactive:
1279
2603
  # remove oldest inactive submissions:
@@ -1285,8 +2609,7 @@ class BaseApp(metaclass=Singleton):
1285
2609
  )
1286
2610
 
1287
2611
  # sort in reverse so we can remove indices from new_lines:
1288
- oldest_idx = sorted(ld_srt_idx[:num_remove], reverse=True)
1289
- for i in oldest_idx:
2612
+ for i in sorted(ld_srt_idx[:num_remove], reverse=True):
1290
2613
  new_lines.pop(i)
1291
2614
  removed_IDs.append(line_IDs.pop(i))
1292
2615
 
@@ -1303,9 +2626,11 @@ class BaseApp(metaclass=Singleton):
1303
2626
 
1304
2627
  return removed_IDs
1305
2628
 
1306
- def clear_known_submissions_file(self):
1307
- """Clear the known-submissions file of all submissions. This shouldn't be needed
1308
- normally."""
2629
+ def clear_known_submissions_file(self) -> None:
2630
+ """
2631
+ Clear the known-submissions file of all submissions. This shouldn't be needed
2632
+ normally.
2633
+ """
1309
2634
  self.submission_logger.warning(
1310
2635
  f"clearing the known-submissions file at {self.known_subs_file_path}"
1311
2636
  )
@@ -1314,20 +2639,21 @@ class BaseApp(metaclass=Singleton):
1314
2639
 
1315
2640
  def _make_workflow(
1316
2641
  self,
1317
- template_file_or_str: Union[PathLike, str],
1318
- is_string: Optional[bool] = False,
1319
- template_format: Optional[str] = None,
1320
- path: Optional[PathLike] = None,
1321
- name: Optional[str] = None,
1322
- overwrite: Optional[bool] = False,
1323
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1324
- ts_fmt: Optional[str] = None,
1325
- ts_name_fmt: Optional[str] = None,
1326
- store_kwargs: Optional[Dict] = None,
1327
- variables: Optional[Dict[str, str]] = None,
1328
- status: Optional[bool] = True,
1329
- ) -> get_app_attribute("Workflow"):
1330
- """Generate a new {app_name} workflow from a file or string containing a workflow
2642
+ template_file_or_str: PathLike | str,
2643
+ is_string: bool = False,
2644
+ template_format: Literal["json", "yaml"] | None = None,
2645
+ path: PathLike = None,
2646
+ name: str | None = None,
2647
+ overwrite: bool = False,
2648
+ store: str = DEFAULT_STORE_FORMAT,
2649
+ ts_fmt: str | None = None,
2650
+ ts_name_fmt: str | None = None,
2651
+ store_kwargs: dict[str, Any] | None = None,
2652
+ variables: dict[str, str] | None = None,
2653
+ status: bool = True,
2654
+ ) -> _Workflow:
2655
+ """
2656
+ Generate a new {app_name} workflow from a file or string containing a workflow
1331
2657
  template parametrisation.
1332
2658
 
1333
2659
  Parameters
@@ -1364,93 +2690,92 @@ class BaseApp(metaclass=Singleton):
1364
2690
  String variables to substitute in `template_file_or_str`.
1365
2691
  status
1366
2692
  If True, display a live status to track workflow creation progress.
1367
- """
1368
2693
 
2694
+ Returns
2695
+ -------
2696
+ Workflow
2697
+ The created workflow.
2698
+ """
1369
2699
  self.API_logger.info("make_workflow called")
1370
2700
 
1371
- if status:
1372
- console = rich.console.Console()
1373
- status = console.status("Making persistent workflow...")
1374
- status.start()
1375
-
1376
- common = {
1377
- "path": path,
1378
- "name": name,
1379
- "overwrite": overwrite,
1380
- "store": store,
1381
- "ts_fmt": ts_fmt,
1382
- "ts_name_fmt": ts_name_fmt,
1383
- "store_kwargs": store_kwargs,
1384
- "variables": variables,
1385
- "status": status,
1386
- }
1387
-
1388
- if not is_string:
1389
- wk = self.Workflow.from_file(
1390
- template_path=template_file_or_str,
1391
- template_format=template_format,
1392
- **common,
1393
- )
2701
+ status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
2702
+ Console().status("Making persistent workflow...") if status else nullcontext()
2703
+ )
1394
2704
 
1395
- elif template_format == "json":
1396
- try:
1397
- wk = self.Workflow.from_JSON_string(
1398
- JSON_str=template_file_or_str, **common
2705
+ with status_context as status_:
2706
+ if not is_string:
2707
+ return self.Workflow.from_file(
2708
+ template_path=template_file_or_str,
2709
+ template_format=template_format,
2710
+ path=str(path) if path else None,
2711
+ name=name,
2712
+ overwrite=overwrite,
2713
+ store=store,
2714
+ ts_fmt=ts_fmt,
2715
+ ts_name_fmt=ts_name_fmt,
2716
+ store_kwargs=store_kwargs,
2717
+ variables=variables,
2718
+ status=status_,
1399
2719
  )
1400
- except Exception:
1401
- if status:
1402
- status.stop()
1403
- raise
1404
-
1405
- elif template_format == "yaml":
1406
- try:
1407
- wk = self.Workflow.from_YAML_string(
1408
- YAML_str=template_file_or_str, **common
2720
+ elif template_format == "json":
2721
+ return self.Workflow.from_JSON_string(
2722
+ JSON_str=str(template_file_or_str),
2723
+ path=str(path) if path else None,
2724
+ name=name,
2725
+ overwrite=overwrite,
2726
+ store=store,
2727
+ ts_fmt=ts_fmt,
2728
+ ts_name_fmt=ts_name_fmt,
2729
+ store_kwargs=store_kwargs,
2730
+ variables=variables,
2731
+ status=status_,
2732
+ )
2733
+ elif template_format == "yaml":
2734
+ return self.Workflow.from_YAML_string(
2735
+ YAML_str=str(template_file_or_str),
2736
+ path=str(path) if path else None,
2737
+ name=name,
2738
+ overwrite=overwrite,
2739
+ store=store,
2740
+ ts_fmt=ts_fmt,
2741
+ ts_name_fmt=ts_name_fmt,
2742
+ store_kwargs=store_kwargs,
2743
+ variables=variables,
2744
+ )
2745
+ elif not template_format:
2746
+ raise ValueError(
2747
+ f"Must specify `template_format` if parsing a workflow template from a "
2748
+ f"string; available options are: {ALL_TEMPLATE_FORMATS!r}."
2749
+ )
2750
+ else:
2751
+ raise ValueError(
2752
+ f"Template format {template_format!r} not understood. Available template "
2753
+ f"formats are {ALL_TEMPLATE_FORMATS!r}."
1409
2754
  )
1410
- except Exception:
1411
- if status:
1412
- status.stop()
1413
- raise
1414
-
1415
- elif not template_format:
1416
- raise ValueError(
1417
- f"Must specify `template_format` if parsing a workflow template from a "
1418
- f"string; available options are: {ALL_TEMPLATE_FORMATS!r}."
1419
- )
1420
-
1421
- else:
1422
- raise ValueError(
1423
- f"Template format {template_format!r} not understood. Available template "
1424
- f"formats are {ALL_TEMPLATE_FORMATS!r}."
1425
- )
1426
-
1427
- if status:
1428
- status.stop()
1429
-
1430
- return wk
1431
2755
 
1432
2756
  def _make_and_submit_workflow(
1433
2757
  self,
1434
- template_file_or_str: Union[PathLike, str],
1435
- is_string: Optional[bool] = False,
1436
- template_format: Optional[str] = None,
1437
- path: Optional[PathLike] = None,
1438
- name: Optional[str] = None,
1439
- overwrite: Optional[bool] = False,
1440
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1441
- ts_fmt: Optional[str] = None,
1442
- ts_name_fmt: Optional[str] = None,
1443
- store_kwargs: Optional[Dict] = None,
1444
- variables: Optional[Dict[str, str]] = None,
1445
- JS_parallelism: Optional[bool] = None,
1446
- wait: Optional[bool] = False,
1447
- add_to_known: Optional[bool] = True,
1448
- return_idx: Optional[bool] = False,
1449
- tasks: Optional[List[int]] = None,
1450
- cancel: Optional[bool] = False,
1451
- status: Optional[bool] = True,
1452
- ) -> Dict[int, int]:
1453
- """Generate and submit a new {app_name} workflow from a file or string containing a
2758
+ template_file_or_str: PathLike | str,
2759
+ is_string: bool = False,
2760
+ template_format: Literal["json", "yaml"] | None = None,
2761
+ path: PathLike | None = None,
2762
+ name: str | None = None,
2763
+ overwrite: bool = False,
2764
+ store: str = DEFAULT_STORE_FORMAT,
2765
+ ts_fmt: str | None = None,
2766
+ ts_name_fmt: str | None = None,
2767
+ store_kwargs: dict[str, Any] | None = None,
2768
+ variables: dict[str, str] | None = None,
2769
+ JS_parallelism: bool | None = None,
2770
+ wait: bool = False,
2771
+ add_to_known: bool = True,
2772
+ return_idx: bool = False,
2773
+ tasks: list[int] | None = None,
2774
+ cancel: bool = False,
2775
+ status: bool = True,
2776
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
2777
+ """
2778
+ Generate and submit a new {app_name} workflow from a file or string containing a
1454
2779
  workflow template parametrisation.
1455
2780
 
1456
2781
  Parameters
@@ -1506,11 +2831,17 @@ class BaseApp(metaclass=Singleton):
1506
2831
  status
1507
2832
  If True, display a live status to track workflow creation and submission
1508
2833
  progress.
1509
- """
1510
2834
 
2835
+ Returns
2836
+ -------
2837
+ Workflow
2838
+ The created workflow.
2839
+ dict[int, list[int]]
2840
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
2841
+ """
1511
2842
  self.API_logger.info("make_and_submit_workflow called")
1512
2843
 
1513
- wk = self.make_workflow(
2844
+ wk = self._make_workflow(
1514
2845
  template_file_or_str=template_file_or_str,
1515
2846
  is_string=is_string,
1516
2847
  template_format=template_format,
@@ -1541,18 +2872,19 @@ class BaseApp(metaclass=Singleton):
1541
2872
  def _make_demo_workflow(
1542
2873
  self,
1543
2874
  workflow_name: str,
1544
- template_format: Optional[str] = None,
1545
- path: Optional[PathLike] = None,
1546
- name: Optional[str] = None,
1547
- overwrite: Optional[bool] = False,
1548
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1549
- ts_fmt: Optional[str] = None,
1550
- ts_name_fmt: Optional[str] = None,
1551
- store_kwargs: Optional[Dict] = None,
1552
- variables: Optional[Dict[str, str]] = None,
1553
- status: Optional[bool] = True,
1554
- ) -> get_app_attribute("Workflow"):
1555
- """Generate a new {app_name} workflow from a builtin demo workflow template.
2875
+ template_format: Literal["json", "yaml"] | None = None,
2876
+ path: PathLike | None = None,
2877
+ name: str | None = None,
2878
+ overwrite: bool = False,
2879
+ store: str = DEFAULT_STORE_FORMAT,
2880
+ ts_fmt: str | None = None,
2881
+ ts_name_fmt: str | None = None,
2882
+ store_kwargs: dict[str, Any] | None = None,
2883
+ variables: dict[str, str] | None = None,
2884
+ status: bool = True,
2885
+ ) -> _Workflow:
2886
+ """
2887
+ Generate a new {app_name} workflow from a builtin demo workflow template.
1556
2888
 
1557
2889
  Parameters
1558
2890
  ----------
@@ -1586,20 +2918,25 @@ class BaseApp(metaclass=Singleton):
1586
2918
  String variables to substitute in the demo workflow template file.
1587
2919
  status
1588
2920
  If True, display a live status to track workflow creation progress.
1589
- """
1590
2921
 
2922
+ Returns
2923
+ -------
2924
+ Workflow
2925
+ The created workflow.
2926
+ """
1591
2927
  self.API_logger.info("make_demo_workflow called")
1592
2928
 
1593
- if status:
1594
- console = rich.console.Console()
1595
- status = console.status("Making persistent workflow...")
1596
- status.start()
2929
+ status_context: AbstractContextManager[Status] | AbstractContextManager[None] = (
2930
+ Console().status("Making persistent workflow...") if status else nullcontext()
2931
+ )
1597
2932
 
1598
- with self.get_demo_workflow_template_file(workflow_name) as template_path:
1599
- wk = self.Workflow.from_file(
2933
+ with status_context as status_, self.get_demo_workflow_template_file(
2934
+ workflow_name
2935
+ ) as template_path:
2936
+ return self.Workflow.from_file(
1600
2937
  template_path=template_path,
1601
2938
  template_format=template_format,
1602
- path=path,
2939
+ path=str(path) if path else None,
1603
2940
  name=name,
1604
2941
  overwrite=overwrite,
1605
2942
  store=store,
@@ -1607,33 +2944,31 @@ class BaseApp(metaclass=Singleton):
1607
2944
  ts_name_fmt=ts_name_fmt,
1608
2945
  store_kwargs=store_kwargs,
1609
2946
  variables=variables,
1610
- status=status,
2947
+ status=status_,
1611
2948
  )
1612
- if status:
1613
- status.stop()
1614
- return wk
1615
2949
 
1616
2950
  def _make_and_submit_demo_workflow(
1617
2951
  self,
1618
2952
  workflow_name: str,
1619
- template_format: Optional[str] = None,
1620
- path: Optional[PathLike] = None,
1621
- name: Optional[str] = None,
1622
- overwrite: Optional[bool] = False,
1623
- store: Optional[str] = DEFAULT_STORE_FORMAT,
1624
- ts_fmt: Optional[str] = None,
1625
- ts_name_fmt: Optional[str] = None,
1626
- store_kwargs: Optional[Dict] = None,
1627
- variables: Optional[Dict[str, str]] = None,
1628
- JS_parallelism: Optional[bool] = None,
1629
- wait: Optional[bool] = False,
1630
- add_to_known: Optional[bool] = True,
1631
- return_idx: Optional[bool] = False,
1632
- tasks: Optional[List[int]] = None,
1633
- cancel: Optional[bool] = False,
1634
- status: Optional[bool] = True,
1635
- ) -> Dict[int, int]:
1636
- """Generate and submit a new {app_name} workflow from a file or string containing a
2953
+ template_format: Literal["json", "yaml"] | None = None,
2954
+ path: PathLike | None = None,
2955
+ name: str | None = None,
2956
+ overwrite: bool = False,
2957
+ store: str = DEFAULT_STORE_FORMAT,
2958
+ ts_fmt: str | None = None,
2959
+ ts_name_fmt: str | None = None,
2960
+ store_kwargs: dict[str, Any] | None = None,
2961
+ variables: dict[str, str] | None = None,
2962
+ JS_parallelism: bool | None = None,
2963
+ wait: bool = False,
2964
+ add_to_known: bool = True,
2965
+ return_idx: bool = False,
2966
+ tasks: list[int] | None = None,
2967
+ cancel: bool = False,
2968
+ status: bool = True,
2969
+ ) -> tuple[_Workflow, Mapping[int, Sequence[int]]] | _Workflow:
2970
+ """
2971
+ Generate and submit a new {app_name} workflow from a file or string containing a
1637
2972
  workflow template parametrisation.
1638
2973
 
1639
2974
  Parameters
@@ -1685,11 +3020,17 @@ class BaseApp(metaclass=Singleton):
1685
3020
  Immediately cancel the submission. Useful for testing and benchmarking.
1686
3021
  status
1687
3022
  If True, display a live status to track submission progress.
1688
- """
1689
3023
 
3024
+ Returns
3025
+ -------
3026
+ Workflow
3027
+ The created workflow.
3028
+ dict[int, list[int]]
3029
+ Mapping of submission handles. If requested by ``return_idx`` parameter.
3030
+ """
1690
3031
  self.API_logger.info("make_and_submit_demo_workflow called")
1691
3032
 
1692
- wk = self.make_demo_workflow(
3033
+ wk = self._make_demo_workflow(
1693
3034
  workflow_name=workflow_name,
1694
3035
  template_format=template_format,
1695
3036
  path=path,
@@ -1718,59 +3059,66 @@ class BaseApp(metaclass=Singleton):
1718
3059
  def _submit_workflow(
1719
3060
  self,
1720
3061
  workflow_path: PathLike,
1721
- JS_parallelism: Optional[bool] = None,
1722
- wait: Optional[bool] = False,
1723
- return_idx: Optional[bool] = False,
1724
- tasks: Optional[List[int]] = None,
1725
- ) -> Dict[int, int]:
1726
- """Submit an existing {app_name} workflow.
3062
+ JS_parallelism: bool | None = None,
3063
+ wait: bool = False,
3064
+ return_idx: bool = False,
3065
+ tasks: list[int] | None = None,
3066
+ ) -> Mapping[int, Sequence[int]] | None:
3067
+ """
3068
+ Submit an existing {app_name} workflow.
1727
3069
 
1728
3070
  Parameters
1729
3071
  ----------
1730
- workflow_path
1731
- Path to an existing workflow
1732
- JS_parallelism
3072
+ workflow_path:
3073
+ Path to an existing workflow.
3074
+ JS_parallelism:
1733
3075
  If True, allow multiple jobscripts to execute simultaneously. Raises if set to
1734
3076
  True but the store type does not support the `jobscript_parallelism` feature. If
1735
3077
  not set, jobscript parallelism will be used if the store type supports it.
1736
- tasks
3078
+ wait:
3079
+ Whether to wait for the submission to complete.
3080
+ return_idx:
3081
+ Whether to return the index information.
3082
+ tasks:
1737
3083
  List of task indices to include in this submission. By default all tasks are
1738
3084
  included.
1739
- """
1740
3085
 
3086
+ Returns
3087
+ -------
3088
+ dict[int, list[int]]
3089
+ Mapping of submission handles, if requested by ``return_idx`` parameter.
3090
+ """
1741
3091
  self.API_logger.info("submit_workflow called")
3092
+ assert workflow_path is not None
1742
3093
  wk = self.Workflow(workflow_path)
1743
- return wk.submit(
1744
- JS_parallelism=JS_parallelism,
1745
- wait=wait,
1746
- return_idx=return_idx,
1747
- tasks=tasks,
1748
- )
3094
+ if return_idx:
3095
+ return wk.submit(
3096
+ JS_parallelism=JS_parallelism,
3097
+ wait=wait,
3098
+ return_idx=True,
3099
+ tasks=tasks,
3100
+ )
3101
+ wk.submit(JS_parallelism=JS_parallelism, wait=wait, tasks=tasks)
3102
+ return None
1749
3103
 
1750
- def _run_hpcflow_tests(self, *args):
3104
+ def _run_hpcflow_tests(self, *args: str) -> int:
1751
3105
  """Run hpcflow test suite. This function is only available from derived apps."""
1752
-
1753
3106
  from hpcflow import app as hf
1754
3107
 
1755
3108
  return hf.app.run_tests(*args)
1756
3109
 
1757
- def _run_tests(self, *args):
3110
+ def _run_tests(self, *args: str) -> int:
1758
3111
  """Run {app_name} test suite."""
1759
-
1760
3112
  try:
1761
3113
  import pytest
1762
3114
  except ModuleNotFoundError:
1763
3115
  raise RuntimeError(
1764
3116
  f"{self.name} has not been built with testing dependencies."
1765
3117
  )
1766
- test_args = (self.pytest_args or []) + list(args)
1767
- pkg = self.package_name
1768
- tests_dir = "tests"
1769
- ctx_man = resources.as_file(resources.files(pkg).joinpath(tests_dir))
1770
- with ctx_man as test_dir:
1771
- return pytest.main([str(test_dir)] + test_args)
1772
-
1773
- def _get_OS_info(self) -> Dict:
3118
+ with get_file_context(self.package_name, "tests") as test_dir:
3119
+ return pytest.main([str(test_dir), *(self.pytest_args or ()), *args])
3120
+
3121
+ def _get_OS_info(self) -> Mapping[str, str]:
1774
3122
  """Get information about the operating system."""
1775
3123
  os_name = os.name
1776
3124
  if os_name == "posix":
@@ -1779,19 +3127,22 @@ class BaseApp(metaclass=Singleton):
1779
3127
  )
1780
3128
  elif os_name == "nt":
1781
3129
  return get_OS_info_windows()
3130
+ else:
3131
+ raise Exception(f"unsupported OS '{os_name}'")
1782
3132
 
1783
3133
  def _get_shell_info(
1784
3134
  self,
1785
3135
  shell_name: str,
1786
- exclude_os: Optional[bool] = False,
1787
- ) -> Dict:
1788
- """Get information about a given shell and the operating system.
3136
+ exclude_os: bool = False,
3137
+ ) -> VersionInfo:
3138
+ """
3139
+ Get information about a given shell and the operating system.
1789
3140
 
1790
3141
  Parameters
1791
3142
  ----------
1792
- shell_name
3143
+ shell_name:
1793
3144
  One of the supported shell names.
1794
- exclude_os
3145
+ exclude_os:
1795
3146
  If True, exclude operating system information.
1796
3147
  """
1797
3148
  shell = get_shell(
@@ -1806,9 +3157,10 @@ class BaseApp(metaclass=Singleton):
1806
3157
  max_recent: int = 3,
1807
3158
  no_update: bool = False,
1808
3159
  as_json: bool = False,
1809
- status: Optional[Any] = None,
1810
- ):
1811
- """Retrieve information about active and recently inactive finished {app_name}
3160
+ status: Status | None = None,
3161
+ ) -> Sequence[KnownSubmissionItem]:
3162
+ """
3163
+ Retrieve information about active and recently inactive finished {app_name}
1812
3164
  workflows.
1813
3165
 
1814
3166
  This method removes workflows from the known-submissions file that are found to be
@@ -1816,21 +3168,19 @@ class BaseApp(metaclass=Singleton):
1816
3168
 
1817
3169
  Parameters
1818
3170
  ----------
1819
- max_recent
3171
+ max_recent:
1820
3172
  Maximum number of inactive workflows to retrieve.
1821
- no_update
3173
+ no_update:
1822
3174
  If True, do not update the known-submissions file to set submissions that are
1823
3175
  now inactive.
1824
- as_json
3176
+ as_json:
1825
3177
  If True, only include JSON-compatible information. This will exclude the
1826
3178
  `submission` key, for instance.
1827
-
1828
3179
  """
1829
-
1830
- out = []
1831
- inactive_IDs = []
1832
- start_times = {}
1833
- end_times = {}
3180
+ out: list[KnownSubmissionItem] = []
3181
+ inactive_IDs: list[int] = []
3182
+ start_times: dict[int, str] = {}
3183
+ end_times: dict[int, str] = {}
1834
3184
 
1835
3185
  ts_fmt = self._submission_ts_fmt
1836
3186
 
@@ -1841,28 +3191,28 @@ class BaseApp(metaclass=Singleton):
1841
3191
  except FileNotFoundError:
1842
3192
  known_subs = []
1843
3193
 
1844
- active_jobscripts = {} # keys are (workflow path, submission index)
1845
- loaded_workflows = {} # keys are workflow path
3194
+ # keys are (workflow path, submission index)
3195
+ active_jobscripts: dict[
3196
+ tuple[str, int], Mapping[int, Mapping[int, JobscriptElementState]]
3197
+ ] = {}
3198
+ loaded_workflows: dict[str, _Workflow] = {} # keys are workflow path
1846
3199
 
1847
3200
  # loop in reverse so we process more-recent submissions first:
1848
3201
  for file_dat_i in known_subs[::-1]:
1849
3202
  submit_time_str = file_dat_i["submit_time"]
1850
- submit_time_obj = datetime.strptime(submit_time_str, ts_fmt)
1851
- submit_time_obj = submit_time_obj.replace(tzinfo=timezone.utc).astimezone()
3203
+ submit_time_obj = parse_timestamp(submit_time_str, ts_fmt)
1852
3204
 
1853
3205
  start_time_str = file_dat_i["start_time"]
1854
3206
  start_time_obj = None
1855
3207
  if start_time_str:
1856
- start_time_obj = datetime.strptime(start_time_str, ts_fmt)
1857
- start_time_obj = start_time_obj.replace(tzinfo=timezone.utc).astimezone()
3208
+ start_time_obj = parse_timestamp(start_time_str, ts_fmt)
1858
3209
 
1859
3210
  end_time_str = file_dat_i["end_time"]
1860
3211
  end_time_obj = None
1861
3212
  if end_time_str:
1862
- end_time_obj = datetime.strptime(end_time_str, ts_fmt)
1863
- end_time_obj = end_time_obj.replace(tzinfo=timezone.utc).astimezone()
3213
+ end_time_obj = parse_timestamp(end_time_str, ts_fmt)
1864
3214
 
1865
- out_item = {
3215
+ out_item: KnownSubmissionItem = {
1866
3216
  "local_id": file_dat_i["local_id"],
1867
3217
  "workflow_id": file_dat_i["workflow_id"],
1868
3218
  "workflow_path": file_dat_i["path"],
@@ -1924,12 +3274,8 @@ class BaseApp(metaclass=Singleton):
1924
3274
  sub = wk_i.submissions[file_dat_i["sub_idx"]]
1925
3275
 
1926
3276
  all_jobscripts = sub._submission_parts[submit_time_str]
1927
- out_item.update(
1928
- {
1929
- "jobscripts": all_jobscripts,
1930
- "submission": sub,
1931
- }
1932
- )
3277
+ out_item["jobscripts"] = all_jobscripts
3278
+ out_item["submission"] = sub
1933
3279
  if not out_item["start_time"]:
1934
3280
  start_time_obj = sub.start_time
1935
3281
  if start_time_obj:
@@ -1949,11 +3295,17 @@ class BaseApp(metaclass=Singleton):
1949
3295
  if file_dat_i["is_active"]:
1950
3296
  # check it really is active:
1951
3297
  run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
3298
+ act_i_js: Mapping[int, Mapping[int, JobscriptElementState]]
1952
3299
  if run_key in active_jobscripts:
1953
3300
  act_i_js = active_jobscripts[run_key]
1954
3301
  else:
1955
3302
  try:
1956
- act_i_js = sub.get_active_jobscripts(as_json=as_json)
3303
+ if as_json:
3304
+ act_i_js = cast( # not actually used?
3305
+ Any, sub.get_active_jobscripts(as_json=True)
3306
+ )
3307
+ else:
3308
+ act_i_js = sub.get_active_jobscripts()
1957
3309
  except KeyboardInterrupt:
1958
3310
  raise
1959
3311
  except Exception:
@@ -1984,38 +3336,64 @@ class BaseApp(metaclass=Singleton):
1984
3336
  )
1985
3337
  # remove these from the output, to avoid confusion (if kept, they would not
1986
3338
  # appear in the next invocation of this method):
1987
- out = [i for i in out if i["local_id"] not in removed_IDs]
3339
+ out = [item for item in out if item["local_id"] not in removed_IDs]
1988
3340
 
3341
+ out_active, out_inactive = self.__partition(
3342
+ out, lambda item: item["active_jobscripts"]
3343
+ )
1989
3344
  # sort inactive by most-recently finished, then deleted:
1990
- out_inactive = [i for i in out if not i["active_jobscripts"]]
1991
- out_no_access = [i for i in out_inactive if (i["deleted"] or i["unloadable"])]
1992
- out_access = [i for i in out_inactive if not (i["deleted"] or i["unloadable"])]
3345
+ out_no_access, out_access = self.__partition(
3346
+ out_inactive, lambda item: item["deleted"] or item["unloadable"]
3347
+ )
1993
3348
 
1994
3349
  # sort loadable inactive by end time or start time or submit time:
1995
3350
  out_access = sorted(
1996
3351
  out_access,
1997
3352
  key=lambda i: (
1998
- i["end_time_obj"] or i["start_time_obj"] or i["submit_time_obj"]
3353
+ i["end_time_obj"]
3354
+ or i["start_time_obj"]
3355
+ or i.get("submit_time_obj")
3356
+ or self.__DEF_TIMESTAMP
1999
3357
  ),
2000
3358
  reverse=True,
2001
3359
  )
2002
3360
  out_inactive = (out_access + out_no_access)[:max_recent]
2003
3361
 
2004
- out_active = [i for i in out if i["active_jobscripts"]]
2005
-
2006
3362
  # show active submissions first:
2007
3363
  out = out_active + out_inactive
2008
3364
 
2009
3365
  if as_json:
2010
- for idx, _ in enumerate(out):
2011
- out[idx].pop("submission", None)
2012
- out[idx].pop("submit_time_obj")
3366
+ for item in out:
3367
+ item.pop("submission", None)
3368
+ item.pop("submit_time_obj")
2013
3369
  return out
2014
3370
 
2015
- def _show_legend(self):
2016
- """ "Output a legend for the jobscript-element and EAR states that are displayed
2017
- by the `show` command."""
3371
+ __DEF_TIMESTAMP: Final[datetime] = datetime.fromtimestamp(0, tz=timezone.utc)
3372
+
3373
+ @staticmethod
3374
+ def __partition(
3375
+ lst: Iterable[T], cond: Callable[[T], Any]
3376
+ ) -> tuple[list[T], list[T]]:
3377
+ """
3378
+ Split a list into two by whether the condition holds for each item.
3379
+
3380
+ Returns
3381
+ -------
3382
+ true_items
3383
+ List of items for which the condition is true (or at least truthy).
3384
+ false_items
3385
+ List of items for which the condition is false.
3386
+ """
3387
+ lists: tuple[list[T], list[T]] = [], []
3388
+ for item in lst:
3389
+ lists[not cond(item)].append(item)
3390
+ return lists
2018
3391
 
3392
+ def _show_legend(self) -> None:
3393
+ """
3394
+ Output a legend for the jobscript-element and EAR states that are displayed
3395
+ by the `show` command.
3396
+ """
2019
3397
  js_notes = Panel(
2020
3398
  "The [i]Status[/i] column of the `show` command output displays the set of "
2021
3399
  "unique jobscript-element states for that submission. Jobscript element "
@@ -2028,8 +3406,8 @@ class BaseApp(metaclass=Singleton):
2028
3406
  js_tab.add_column("Symbol")
2029
3407
  js_tab.add_column("State")
2030
3408
  js_tab.add_column("Description")
2031
- for state in JobscriptElementState.__members__.values():
2032
- js_tab.add_row(state.rich_repr, state.name, state.__doc__)
3409
+ for jse_state in JobscriptElementState.__members__.values():
3410
+ js_tab.add_row(jse_state.rich_repr, jse_state.name, jse_state.__doc__)
2033
3411
 
2034
3412
  act_notes = Panel(
2035
3413
  "\nThe [i]Actions[/i] column of the `show` command output displays either the "
@@ -2044,8 +3422,8 @@ class BaseApp(metaclass=Singleton):
2044
3422
  act_tab.add_column("Symbol")
2045
3423
  act_tab.add_column("State")
2046
3424
  act_tab.add_column("Description")
2047
- for state in EARStatus.__members__.values():
2048
- act_tab.add_row(state.rich_repr, state.name, state.__doc__)
3425
+ for ear_state in EARStatus.__members__.values():
3426
+ act_tab.add_row(ear_state.rich_repr, ear_state.name, ear_state.__doc__)
2049
3427
 
2050
3428
  group = Group(
2051
3429
  js_notes,
@@ -2061,22 +3439,21 @@ class BaseApp(metaclass=Singleton):
2061
3439
  max_recent: int = 3,
2062
3440
  full: bool = False,
2063
3441
  no_update: bool = False,
2064
- columns=None,
2065
- ):
2066
- """Show information about running {app_name} workflows.
3442
+ ) -> None:
3443
+ """
3444
+ Show information about running {app_name} workflows.
2067
3445
 
2068
3446
  Parameters
2069
3447
  ----------
2070
- max_recent
3448
+ max_recent:
2071
3449
  Maximum number of inactive workflows to show.
2072
- full
3450
+ full:
2073
3451
  If True, provide more information; output may spans multiple lines for each
2074
3452
  workflow submission.
2075
- no_update
3453
+ no_update:
2076
3454
  If True, do not update the known-submissions file to remove workflows that are
2077
3455
  no longer running.
2078
3456
  """
2079
-
2080
3457
  # TODO: add --json to show, just returning this but without submissions?
2081
3458
 
2082
3459
  allowed_cols = {
@@ -2091,9 +3468,9 @@ class BaseApp(metaclass=Singleton):
2091
3468
  "actions_compact": "Actions",
2092
3469
  }
2093
3470
 
3471
+ columns: tuple[str, ...]
2094
3472
  if full:
2095
3473
  columns = ("id", "name", "status", "times", "actions")
2096
-
2097
3474
  else:
2098
3475
  columns = (
2099
3476
  "id",
@@ -2105,11 +3482,11 @@ class BaseApp(metaclass=Singleton):
2105
3482
  "actions_compact",
2106
3483
  )
2107
3484
 
2108
- unknown_cols = set(columns) - set(allowed_cols.keys())
3485
+ unknown_cols = set(columns).difference(allowed_cols)
2109
3486
  if unknown_cols:
2110
3487
  raise ValueError(
2111
3488
  f"Unknown column names: {unknown_cols!r}. Allowed columns are "
2112
- f"{list(allowed_cols.keys())!r}."
3489
+ f"{list(allowed_cols)!r}."
2113
3490
  )
2114
3491
 
2115
3492
  # TODO: add --filter option to filter by ID or name
@@ -2119,167 +3496,163 @@ class BaseApp(metaclass=Singleton):
2119
3496
  ts_fmt_part = r"%H:%M:%S"
2120
3497
 
2121
3498
  console = Console()
2122
- status = console.status("Retrieving data...")
2123
- status.start()
2124
-
2125
- try:
3499
+ with console.status("Retrieving data...") as status:
2126
3500
  run_dat = self._get_known_submissions(
2127
3501
  max_recent=max_recent,
2128
3502
  no_update=no_update,
2129
3503
  status=status,
2130
3504
  )
2131
- except (Exception, KeyboardInterrupt):
2132
- status.stop()
2133
- raise
2134
- else:
2135
3505
  if not run_dat:
2136
- status.stop()
2137
3506
  return
2138
3507
 
2139
- status.update("Formatting...")
2140
- table = Table(box=box.SQUARE, expand=False)
2141
- for col_name in columns:
2142
- table.add_column(allowed_cols[col_name])
2143
-
2144
- row_pad = 1 if full else 0
2145
-
2146
- for dat_i in run_dat:
2147
- deleted = dat_i["deleted"]
2148
- unloadable = dat_i["unloadable"]
2149
- no_access = deleted or unloadable
2150
- act_js = dat_i["active_jobscripts"]
2151
- style = "grey42" if (no_access or not act_js) else ""
2152
- style_wk_name = "grey42 strike" if deleted else style
2153
- style_it = "italic grey42" if (no_access or not act_js) else "italic"
2154
-
2155
- all_cells = {}
2156
- if "status" in columns:
2157
- if act_js:
2158
- act_js_states = set([j for i in act_js.values() for j in i.values()])
2159
- status_text = "/".join(
2160
- f"[{i.colour}]{i.symbol}[/{i.colour}]" for i in act_js_states
2161
- )
2162
- else:
2163
- if deleted:
2164
- txt = "deleted"
2165
- elif unloadable:
2166
- txt = "unloadable"
3508
+ status.update("Formatting...")
3509
+ table = Table(box=box.SQUARE, expand=False)
3510
+ for col_name in columns:
3511
+ table.add_column(allowed_cols[col_name])
3512
+
3513
+ row_pad = 1 if full else 0
3514
+
3515
+ for dat_i in run_dat:
3516
+ deleted = dat_i["deleted"]
3517
+ unloadable = dat_i["unloadable"]
3518
+ no_access = deleted or unloadable
3519
+ act_js = dat_i["active_jobscripts"]
3520
+ style = "grey42" if (no_access or not act_js) else ""
3521
+ style_wk_name = "grey42 strike" if deleted else style
3522
+ style_it = "italic grey42" if (no_access or not act_js) else "italic"
3523
+
3524
+ all_cells: dict[str, str | Text | Padding] = {}
3525
+ if "status" in columns:
3526
+ if act_js:
3527
+ act_js_states = set(
3528
+ js_state
3529
+ for jsinf in act_js.values()
3530
+ for js_state in jsinf.values()
3531
+ )
3532
+ all_cells["status"] = "/".join(
3533
+ js_state.rich_repr for js_state in act_js_states
3534
+ )
2167
3535
  else:
2168
- txt = "inactive"
2169
- status_text = Text(txt, style=style_it)
2170
- all_cells["status"] = status_text
2171
-
2172
- if "id" in columns:
2173
- all_cells["id"] = Text(str(dat_i["local_id"]), style=style)
3536
+ if deleted:
3537
+ txt = "deleted"
3538
+ elif unloadable:
3539
+ txt = "unloadable"
3540
+ else:
3541
+ txt = "inactive"
3542
+ all_cells["status"] = Text(txt, style=style_it)
2174
3543
 
2175
- if "name" in columns:
2176
- all_cells["name"] = Text(
2177
- Path(dat_i["workflow_path"]).name, style=style_wk_name
2178
- )
3544
+ if "id" in columns:
3545
+ all_cells["id"] = Text(str(dat_i["local_id"]), style=style)
2179
3546
 
2180
- start_time, end_time = None, None
2181
- if not no_access:
2182
- start_time = dat_i["start_time_obj"]
2183
- end_time = dat_i["end_time_obj"]
3547
+ if "name" in columns:
3548
+ all_cells["name"] = Text(
3549
+ Path(dat_i["workflow_path"]).name, style=style_wk_name
3550
+ )
2184
3551
 
2185
- if "actions" in columns:
3552
+ start_time, end_time = None, None
2186
3553
  if not no_access:
2187
- task_tab = Table(box=None, show_header=False)
2188
- task_tab.add_column()
2189
- task_tab.add_column()
2190
-
2191
- for task_idx, elements in dat_i[
2192
- "submission"
2193
- ].EARs_by_elements.items():
2194
- task = dat_i["submission"].workflow.tasks[task_idx]
2195
-
2196
- # inner table for elements/actions:
2197
- elem_tab_i = Table(box=None, show_header=False)
2198
- elem_tab_i.add_column()
2199
- for elem_idx, EARs in elements.items():
2200
- elem_status = Text(f"{elem_idx} | ", style=style)
2201
- for i in EARs:
2202
- elem_status.append(i.status.symbol, style=i.status.colour)
2203
- elem_tab_i.add_row(elem_status)
2204
- task_tab.add_row(task.unique_name, elem_tab_i, style=style)
2205
- else:
2206
- task_tab = ""
2207
-
2208
- all_cells["actions"] = Padding(task_tab, (0, 0, row_pad, 0))
3554
+ start_time = cast("datetime", dat_i["start_time_obj"])
3555
+ end_time = cast("datetime", dat_i["end_time_obj"])
3556
+
3557
+ if "actions" in columns:
3558
+ task_tab: str | Table
3559
+ if not no_access:
3560
+ task_tab = Table(box=None, show_header=False)
3561
+ task_tab.add_column()
3562
+ task_tab.add_column()
3563
+
3564
+ sub = dat_i["submission"]
3565
+ for task_idx, elements in sub.EARs_by_elements.items():
3566
+ task = sub.workflow.tasks[task_idx]
3567
+
3568
+ # inner table for elements/actions:
3569
+ elem_tab_i = Table(box=None, show_header=False)
3570
+ elem_tab_i.add_column()
3571
+ for elem_idx, EARs in elements.items():
3572
+ elem_status = Text(f"{elem_idx} | ", style=style)
3573
+ for ear in EARs:
3574
+ elem_status.append(
3575
+ ear.status.symbol, style=ear.status.colour
3576
+ )
3577
+ elem_tab_i.add_row(elem_status)
3578
+ task_tab.add_row(task.unique_name, elem_tab_i, style=style)
3579
+ else:
3580
+ task_tab = ""
2209
3581
 
2210
- if "actions_compact" in columns:
2211
- if not no_access:
2212
- EAR_stat_count = defaultdict(int)
2213
- for _, elements in dat_i["submission"].EARs_by_elements.items():
2214
- for elem_idx, EARs in elements.items():
2215
- for i in EARs:
2216
- EAR_stat_count[i.status] += 1
2217
- all_cells["actions_compact"] = " | ".join(
2218
- f"[{k.colour}]{k.symbol}[/{k.colour}]:{v}"
2219
- for k, v in EAR_stat_count.items()
2220
- )
2221
- else:
2222
- all_cells["actions_compact"] = ""
3582
+ all_cells["actions"] = Padding(task_tab, (0, 0, row_pad, 0))
2223
3583
 
2224
- if "submit_time" in columns or "times" in columns:
2225
- submit_time = (
2226
- datetime.strptime(dat_i["submit_time"], self._submission_ts_fmt)
2227
- .replace(tzinfo=timezone.utc)
2228
- .astimezone()
2229
- )
2230
- submit_time_full = submit_time.strftime(ts_fmt)
2231
-
2232
- if "start_time" in columns or "times" in columns:
2233
- start_time_full = start_time.strftime(ts_fmt) if start_time else "-"
2234
- start_time_part = start_time_full
2235
- if start_time and start_time.date() == submit_time.date():
2236
- start_time_part = start_time.strftime(ts_fmt_part)
2237
-
2238
- if "end_time" in columns or "times" in columns:
2239
- end_time_full = end_time.strftime(ts_fmt) if end_time else "-"
2240
- end_time_part = end_time_full
2241
- if end_time and end_time.date() == start_time.date():
2242
- end_time_part = end_time.strftime(ts_fmt_part)
2243
-
2244
- if "submit_time" in columns:
2245
- all_cells["submit_time"] = Padding(
2246
- Text(submit_time_full, style=style), (0, 0, row_pad, 0)
2247
- )
3584
+ if "actions_compact" in columns:
3585
+ if not no_access:
3586
+ EAR_stat_count = Counter(
3587
+ ear.status
3588
+ for elements in dat_i["submission"].EARs_by_elements.values()
3589
+ for EARs in elements.values()
3590
+ for ear in EARs
3591
+ )
3592
+ all_cells["actions_compact"] = " | ".join(
3593
+ f"[{k.colour}]{k.symbol}[/{k.colour}]:{v}" # type: ignore
3594
+ for k, v in EAR_stat_count.items()
3595
+ )
3596
+ else:
3597
+ all_cells["actions_compact"] = ""
2248
3598
 
2249
- if "start_time" in columns:
2250
- all_cells["start_time"] = Padding(
2251
- Text(start_time_part, style=style), (0, 0, row_pad, 0)
2252
- )
3599
+ if "submit_time" in columns or "times" in columns:
3600
+ submit_time = parse_timestamp(
3601
+ dat_i["submit_time"], self._submission_ts_fmt
3602
+ )
3603
+ submit_time_full = submit_time.strftime(ts_fmt)
3604
+
3605
+ if "start_time" in columns or "times" in columns:
3606
+ start_time_full = start_time.strftime(ts_fmt) if start_time else "-"
3607
+ start_time_part = start_time_full
3608
+ if start_time and start_time.date() == submit_time.date():
3609
+ start_time_part = start_time.strftime(ts_fmt_part)
3610
+
3611
+ if "end_time" in columns or "times" in columns:
3612
+ end_time_full = end_time.strftime(ts_fmt) if end_time else "-"
3613
+ end_time_part = end_time_full
3614
+ if end_time and start_time and end_time.date() == start_time.date():
3615
+ end_time_part = end_time.strftime(ts_fmt_part)
3616
+
3617
+ if "submit_time" in columns:
3618
+ all_cells["submit_time"] = Padding(
3619
+ Text(submit_time_full, style=style), (0, 0, row_pad, 0)
3620
+ )
2253
3621
 
2254
- if "end_time" in columns:
2255
- all_cells["end_time"] = Padding(
2256
- Text(end_time_part, style=style), (0, 0, row_pad, 0)
2257
- )
3622
+ if "start_time" in columns:
3623
+ all_cells["start_time"] = Padding(
3624
+ Text(start_time_part, style=style), (0, 0, row_pad, 0)
3625
+ )
2258
3626
 
2259
- if "times" in columns:
2260
- # submit/start/end on separate lines:
2261
- times_tab = Table(box=None, show_header=False)
2262
- times_tab.add_column()
2263
- times_tab.add_column(justify="right")
3627
+ if "end_time" in columns:
3628
+ all_cells["end_time"] = Padding(
3629
+ Text(end_time_part, style=style), (0, 0, row_pad, 0)
3630
+ )
2264
3631
 
2265
- times_tab.add_row(
2266
- Text("sb.", style=style_it), Text(submit_time_full, style=style)
2267
- )
3632
+ if "times" in columns:
3633
+ # submit/start/end on separate lines:
3634
+ times_tab = Table(box=None, show_header=False)
3635
+ times_tab.add_column()
3636
+ times_tab.add_column(justify="right")
2268
3637
 
2269
- if start_time:
2270
- times_tab.add_row(
2271
- Text("st.", style=style_it), Text(start_time_part, style=style)
2272
- )
2273
- if end_time:
2274
3638
  times_tab.add_row(
2275
- Text("en.", style=style_it), Text(end_time_part, style=style)
3639
+ Text("sb.", style=style_it), Text(submit_time_full, style=style)
2276
3640
  )
2277
3641
 
2278
- all_cells["times"] = Padding(times_tab, (0, 0, row_pad, 0))
3642
+ if start_time:
3643
+ times_tab.add_row(
3644
+ Text("st.", style=style_it),
3645
+ Text(start_time_part, style=style),
3646
+ )
3647
+ if end_time:
3648
+ times_tab.add_row(
3649
+ Text("en.", style=style_it), Text(end_time_part, style=style)
3650
+ )
2279
3651
 
2280
- table.add_row(*[all_cells[i] for i in columns])
3652
+ all_cells["times"] = Padding(times_tab, (0, 0, row_pad, 0))
3653
+
3654
+ table.add_row(*(all_cells[col_name] for col_name in columns))
2281
3655
 
2282
- status.stop()
2283
3656
  if table.row_count:
2284
3657
  console.print(table)
2285
3658
 
@@ -2289,18 +3662,12 @@ class BaseApp(metaclass=Singleton):
2289
3662
  except FileNotFoundError:
2290
3663
  known_subs = []
2291
3664
 
2292
- path = None
2293
- for i in known_subs:
2294
- if i["local_id"] == local_ID:
2295
- path = Path(i["path"])
2296
- break
2297
- if not path:
2298
- raise ValueError(f"Specified local ID is not valid: {local_ID}.")
2299
-
2300
- return path
3665
+ if any((witness := sub)["local_id"] == local_ID for sub in known_subs):
3666
+ return Path(witness["path"])
3667
+ raise ValueError(f"Specified local ID is not valid: {local_ID}.")
2301
3668
 
2302
3669
  def _resolve_workflow_reference(
2303
- self, workflow_ref, ref_type: Union[str, None]
3670
+ self, workflow_ref: str, ref_type: str | None
2304
3671
  ) -> Path:
2305
3672
  path = None
2306
3673
  if ref_type == "path":
@@ -2343,24 +3710,27 @@ class BaseApp(metaclass=Singleton):
2343
3710
  )
2344
3711
  return path.resolve()
2345
3712
 
2346
- def _cancel(self, workflow_ref: Union[int, str, PathLike], ref_is_path=None):
2347
- """Cancel the execution of a workflow submission.
3713
+ def _cancel(
3714
+ self, workflow_ref: int | str | Path, ref_is_path: str | None = None
3715
+ ) -> None:
3716
+ """
3717
+ Cancel the execution of a workflow submission.
2348
3718
 
2349
3719
  Parameters
2350
3720
  ----------
2351
3721
  ref_is_path
2352
3722
  One of "id", "path" or "assume-id" (the default)
2353
3723
  """
2354
- path = self._resolve_workflow_reference(workflow_ref, ref_is_path)
3724
+ path = self._resolve_workflow_reference(str(workflow_ref), ref_is_path)
2355
3725
  self.Workflow(path).cancel()
2356
3726
 
2357
3727
  def configure_env(
2358
3728
  self,
2359
- name,
2360
- setup=None,
2361
- executables=None,
2362
- use_current_env=False,
2363
- env_source_file=None,
3729
+ name: str,
3730
+ setup: list[str] | None = None,
3731
+ executables: list[_Executable] | None = None,
3732
+ use_current_env: bool = False,
3733
+ env_source_file: Path | None = None,
2364
3734
  ):
2365
3735
  """
2366
3736
  Configure an execution environment.
@@ -2369,10 +3739,10 @@ class BaseApp(metaclass=Singleton):
2369
3739
  setup = []
2370
3740
  if not executables:
2371
3741
  executables = []
2372
- if not env_source_file:
2373
- env_source_file = self.config.get("config_directory").joinpath(
2374
- "configured_envs.yaml"
2375
- )
3742
+ env_source = env_source_file or self.config.get("config_directory").joinpath(
3743
+ "configured_envs.yaml"
3744
+ )
3745
+ assert isinstance(env_source, Path)
2376
3746
  if use_current_env:
2377
3747
  if self.run_time_info.is_conda_venv:
2378
3748
  # use the currently activated conda environment for the new app environment:
@@ -2399,35 +3769,36 @@ class BaseApp(metaclass=Singleton):
2399
3769
  ]
2400
3770
 
2401
3771
  new_env = self.Environment(name=name, setup=setup, executables=executables)
2402
- new_env_dat = new_env.to_json_like(exclude="_hash_value")[0]
2403
- if env_source_file.exists():
2404
- existing_env_dat = read_YAML_file(env_source_file, typ="rt")
2405
- if name in [i["name"] for i in existing_env_dat]:
3772
+ new_env_dat = new_env.to_json_like(exclude={"_hash_value"})[0]
3773
+ if env_source.exists():
3774
+ existing_env_dat: list[dict] = read_YAML_file(env_source, typ="rt")
3775
+ if any(name == i["name"] for i in existing_env_dat):
2406
3776
  # TODO: this doesn't check all app envs, just those added with this method
2407
3777
  raise ValueError(f"Environment {name!r} already exists.")
2408
3778
 
2409
- all_env_dat = existing_env_dat + [new_env_dat]
3779
+ all_env_dat = [*existing_env_dat, new_env_dat]
2410
3780
 
2411
3781
  # write a new temporary config file
2412
- tmp_file = env_source_file.with_suffix(env_source_file.suffix + ".tmp")
3782
+ tmp_file = env_source.with_suffix(env_source.suffix + ".tmp")
2413
3783
  self.logger.debug(f"Creating temporary env source file: {tmp_file!r}.")
2414
3784
  write_YAML_file(all_env_dat, tmp_file, typ="rt")
2415
3785
 
2416
3786
  # atomic rename, overwriting original:
2417
3787
  self.logger.debug("Replacing original env source file with temporary file.")
2418
- os.replace(src=tmp_file, dst=env_source_file)
3788
+ os.replace(src=tmp_file, dst=env_source)
2419
3789
 
2420
3790
  else:
2421
3791
  all_env_dat = [new_env_dat]
2422
- write_YAML_file(all_env_dat, env_source_file, typ="rt")
3792
+ write_YAML_file(all_env_dat, env_source, typ="rt")
2423
3793
 
2424
3794
  cur_env_source_files = self.config.get("environment_sources")
2425
- if env_source_file not in cur_env_source_files:
2426
- self.config.append("environment_sources", str(env_source_file))
3795
+ if env_source not in cur_env_source_files:
3796
+ self.config.append("environment_sources", str(env_source))
2427
3797
  self.config.save()
2428
3798
 
2429
- def get_demo_data_files_manifest(self) -> Dict[str, Union[None, str]]:
2430
- """Get a dict whose keys are example data file names and whose values are the
3799
+ def get_demo_data_files_manifest(self) -> dict[str, Any]:
3800
+ """
3801
+ Get a dict whose keys are example data file names and whose values are the
2431
3802
  source files if the source file required unzipping or `None` otherwise.
2432
3803
 
2433
3804
  If the config item `demo_data_manifest_file` is set, this is used as the manifest
@@ -2448,26 +3819,26 @@ class BaseApp(metaclass=Singleton):
2448
3819
  logger=self.logger,
2449
3820
  )
2450
3821
  with fs.open(url_path) as fh:
2451
- manifest = json.load(fh)
3822
+ return json.load(fh)
2452
3823
  else:
2453
3824
  self.logger.debug(
2454
3825
  f"loading example data files manifest from the app attribute "
2455
3826
  f"`demo_data_manifest_dir`: "
2456
3827
  f"{self.demo_data_manifest_dir!r}."
2457
3828
  )
2458
- package = self.demo_data_manifest_dir
2459
- resource = "demo_data_manifest.json"
2460
- fh = resources.files(package).joinpath(resource).open("rt")
2461
- manifest = json.load(fh)
2462
- fh.close()
2463
- return manifest
2464
-
2465
- def list_demo_data_files(self) -> Tuple[str]:
3829
+ if (package := self.demo_data_manifest_dir) is None:
3830
+ self.logger.warning("no demo data dir defined")
3831
+ return {}
3832
+ with open_text_resource(package, "demo_data_manifest.json") as fh:
3833
+ return json.load(fh)
3834
+
3835
+ def list_demo_data_files(self) -> tuple[str, ...]:
2466
3836
  """List available example data files."""
2467
- return tuple(self.get_demo_data_files_manifest().keys())
3837
+ return tuple(self.get_demo_data_files_manifest())
2468
3838
 
2469
- def _get_demo_data_file_source_path(self, file_name) -> Tuple[Path, bool, bool]:
2470
- """Get the full path to an example data file on the local file system, whether
3839
+ def _get_demo_data_file_source_path(self, file_name: str) -> tuple[Path, bool, bool]:
3840
+ """
3841
+ Get the full path to an example data file on the local file system, whether
2471
3842
  the file must be unpacked, and whether the file should be deleted.
2472
3843
 
2473
3844
  If `config.demo_data_dir` is set, this directory will be used as the example data
@@ -2484,10 +3855,9 @@ class BaseApp(metaclass=Singleton):
2484
3855
  value of `config.demo_data_dir` (without saving to the persistent config file),
2485
3856
  and then retrieve the example data file path as above. The default value is set to
2486
3857
  the GitHub repo of the app using the current tag/version.
2487
-
2488
3858
  """
2489
3859
 
2490
- def _retrieve_source_path_from_config(src_fn):
3860
+ def _retrieve_source_path_from_config(src_fn: str):
2491
3861
  fs, url_path = rate_limit_safe_url_to_fs(
2492
3862
  self,
2493
3863
  self.config.demo_data_dir,
@@ -2517,7 +3887,7 @@ class BaseApp(metaclass=Singleton):
2517
3887
  if file_name not in manifest:
2518
3888
  raise ValueError(f"No such example data file {file_name!r}.")
2519
3889
 
2520
- spec = manifest[file_name]
3890
+ spec: dict[str, str] = manifest[file_name]
2521
3891
  requires_unpack = bool(spec)
2522
3892
  src_fn = spec["in_zip"] if requires_unpack else file_name
2523
3893
 
@@ -2535,49 +3905,37 @@ class BaseApp(metaclass=Singleton):
2535
3905
  f"source directory: {self.demo_data_dir!r}."
2536
3906
  )
2537
3907
  # `config.demo_data_dir` not set, so try to use `app.demo_data_dir`:
2538
- package = self.demo_data_dir
2539
- resource_exists = True
2540
- delete = False
2541
- try:
2542
- ctx_man = resources.as_file(resources.files(package).joinpath(src_fn))
2543
- # raises ModuleNotFoundError
2544
- except ModuleNotFoundError:
2545
- resource_exists = False
2546
3908
 
2547
- if resource_exists:
3909
+ if package := self.demo_data_dir:
2548
3910
  try:
2549
- with ctx_man as path:
3911
+ with get_file_context(package, src_fn) as path:
2550
3912
  out = path
3913
+ delete = False
2551
3914
  except (ModuleNotFoundError, FileNotFoundError):
2552
- # frozen app
2553
- resource_exists = False
2554
-
2555
- if not resource_exists:
2556
- # example data not included (e.g. frozen, or installed via PyPI/conda), so
2557
- # set a default value for `config.demo_data_dir` (point to the package
2558
- # GitHub repo for the current tag):
2559
- path = "/".join(package.split("."))
2560
- url = self._get_github_url(sha=f"v{self.version}", path=path)
2561
- self.logger.info(
2562
- f"path {path!r} does not exist as a package resource (example data "
2563
- f"was probably not included in the app), so non-persistently setting "
2564
- f"the config item `demo_data_dir` to the app's GitHub repo path: "
2565
- f"{url!r}."
2566
- )
2567
- self.config.demo_data_dir = url
2568
- out, delete = _retrieve_source_path_from_config(src_fn)
3915
+ # example data not included (e.g. frozen, or installed via
3916
+ # PyPI/conda), so set a default value for `config.demo_data_dir`
3917
+ # (point to the package GitHub repo for the current tag):
3918
+ path_ = package.replace(".", "/")
3919
+ url = self._get_github_url(sha=f"v{self.version}", path=path_)
3920
+ self.logger.info(
3921
+ f"path {path_!r} does not exist as a package resource (example data "
3922
+ f"was probably not included in the app), so non-persistently setting "
3923
+ f"the config item `demo_data_dir` to the app's GitHub repo path: "
3924
+ f"{url!r}."
3925
+ )
3926
+ self.config.demo_data_dir = url
3927
+ out, delete = _retrieve_source_path_from_config(src_fn)
2569
3928
 
2570
3929
  return out, requires_unpack, delete
2571
3930
 
2572
- def get_demo_data_file_path(self, file_name) -> Path:
2573
- """Get the full path to an example data file in the app cache directory.
3931
+ def get_demo_data_file_path(self, file_name: str) -> Path:
3932
+ """
3933
+ Get the full path to an example data file in the app cache directory.
2574
3934
 
2575
3935
  If the file does not already exist in the app cache directory, it will be added
2576
3936
  (and unzipped if required). The file may first be downloaded from a remote file
2577
3937
  system such as GitHub (see `_get_demo_data_file_source_path` for details).
2578
-
2579
3938
  """
2580
-
2581
3939
  # check if file exists in cache dir already
2582
3940
  cache_file_path = self.demo_data_cache_dir.joinpath(file_name)
2583
3941
  if cache_file_path.exists():
@@ -2617,22 +3975,26 @@ class BaseApp(metaclass=Singleton):
2617
3975
  src.unlink()
2618
3976
  return cache_file_path
2619
3977
 
2620
- def cache_demo_data_file(self, file_name) -> Path:
3978
+ def cache_demo_data_file(self, file_name: str) -> Path:
2621
3979
  """
2622
3980
  Get the name of a cached demo data file.
2623
3981
  """
2624
3982
  return self.get_demo_data_file_path(file_name)
2625
3983
 
2626
- def cache_all_demo_data_files(self) -> List[Path]:
3984
+ def cache_all_demo_data_files(self) -> list[Path]:
2627
3985
  """
2628
3986
  Get the name of all cached demo data file.
2629
3987
  """
2630
- return [self.get_demo_data_file_path(i) for i in self.list_demo_data_files()]
3988
+ return [
3989
+ self.get_demo_data_file_path(filename)
3990
+ for filename in self.list_demo_data_files()
3991
+ ]
2631
3992
 
2632
3993
  def copy_demo_data(
2633
- self, file_name: str, dst: Optional[PathLike] = None, doc: bool = True
3994
+ self, file_name: str, dst: PathLike | None = None, doc: bool = True
2634
3995
  ) -> str:
2635
- """Copy a builtin demo data file to the specified location.
3996
+ """
3997
+ Copy a builtin demo data file to the specified location.
2636
3998
 
2637
3999
  Parameters
2638
4000
  ----------
@@ -2642,7 +4004,6 @@ class BaseApp(metaclass=Singleton):
2642
4004
  Directory or full file path to copy the demo data file to. If not specified,
2643
4005
  the current working directory will be used.
2644
4006
  """
2645
-
2646
4007
  dst = dst or Path(".")
2647
4008
  src = self.get_demo_data_file_path(file_name)
2648
4009
  shutil.copy2(src, dst) # copies metadata, and `dst` can be a dir
@@ -2650,8 +4011,10 @@ class BaseApp(metaclass=Singleton):
2650
4011
  return src.name
2651
4012
 
2652
4013
  def _get_github_url(self, sha: str, path: str):
2653
- """Return a fsspec URL for retrieving a file or directory on the app's GitHub
2654
- repository."""
4014
+ """
4015
+ Return a fsspec URL for retrieving a file or directory on the app's GitHub
4016
+ repository.
4017
+ """
2655
4018
  return f"github://{self.gh_org}:{self.gh_repo}@{sha}/{path}"
2656
4019
 
2657
4020