hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a190__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +8 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/sdk/__init__.py +21 -15
- hpcflow/sdk/app.py +2133 -770
- hpcflow/sdk/cli.py +281 -250
- hpcflow/sdk/cli_common.py +6 -2
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +77 -42
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +578 -311
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +112 -85
- hpcflow/sdk/config/types.py +145 -0
- hpcflow/sdk/core/actions.py +1054 -994
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +81 -63
- hpcflow/sdk/core/command_files.py +275 -185
- hpcflow/sdk/core/commands.py +111 -107
- hpcflow/sdk/core/element.py +724 -503
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +398 -51
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +380 -334
- hpcflow/sdk/core/loop_cache.py +160 -43
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +728 -600
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +33 -22
- hpcflow/sdk/core/task.py +1546 -1325
- hpcflow/sdk/core/task_schema.py +240 -196
- hpcflow/sdk/core/test_utils.py +126 -88
- hpcflow/sdk/core/types.py +387 -0
- hpcflow/sdk/core/utils.py +410 -305
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +1192 -1028
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/demo/cli.py +46 -33
- hpcflow/sdk/helper/cli.py +18 -16
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +83 -59
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +988 -586
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +408 -153
- hpcflow/sdk/persistence/pending.py +158 -123
- hpcflow/sdk/persistence/store_resource.py +37 -22
- hpcflow/sdk/persistence/types.py +307 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +477 -420
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +444 -404
- hpcflow/sdk/submission/schedulers/__init__.py +133 -40
- hpcflow/sdk/submission/schedulers/direct.py +97 -71
- hpcflow/sdk/submission/schedulers/sge.py +132 -126
- hpcflow/sdk/submission/schedulers/slurm.py +263 -268
- hpcflow/sdk/submission/schedulers/utils.py +7 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +102 -29
- hpcflow/sdk/submission/shells/bash.py +72 -55
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +37 -29
- hpcflow/sdk/submission/submission.py +203 -257
- hpcflow/sdk/submission/types.py +143 -0
- hpcflow/sdk/typing.py +163 -12
- hpcflow/tests/conftest.py +8 -6
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_main_scripts.py +60 -30
- hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -4
- hpcflow/tests/unit/test_action.py +86 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +13 -6
- hpcflow/tests/unit/test_cli.py +1 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +20 -15
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +3 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +65 -58
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +16 -7
- hpcflow/tests/unit/test_persistence.py +48 -35
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +8 -3
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +3 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +39 -19
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/workflows/test_jobscript.py +2 -1
- hpcflow/tests/workflows/test_workflows.py +18 -13
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/METADATA +2 -1
- hpcflow_new2-0.2.0a190.dist-info/RECORD +165 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a190.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/zarr_io.py
CHANGED
@@ -2,82 +2,71 @@
|
|
2
2
|
Utilities for working with Zarr.
|
3
3
|
"""
|
4
4
|
|
5
|
-
from
|
5
|
+
from __future__ import annotations
|
6
|
+
from typing import Any
|
7
|
+
from typing_extensions import Self
|
6
8
|
|
7
|
-
import zarr
|
9
|
+
import zarr # type: ignore
|
8
10
|
import numpy as np
|
9
11
|
|
10
12
|
from hpcflow.sdk.core.utils import get_in_container, get_relative_path, set_in_container
|
11
13
|
|
12
14
|
|
13
|
-
|
15
|
+
#: The basic types that Zarr can handle directly with no special action.
|
16
|
+
PRIMITIVES: tuple[type, ...] = (
|
14
17
|
int,
|
15
18
|
float,
|
16
19
|
str,
|
17
20
|
type(None),
|
18
21
|
)
|
19
22
|
|
20
|
-
|
21
|
-
|
22
|
-
path = path or []
|
23
|
-
encoded = encoded or []
|
24
|
-
|
25
|
-
if len(path) > 50:
|
26
|
-
raise RuntimeError("I'm in too deep!")
|
27
|
-
|
28
|
-
if isinstance(obj, ZarrEncodable):
|
29
|
-
obj = obj.to_dict()
|
30
|
-
out, encoded = _zarr_encode(
|
31
|
-
obj, zarr_group=zarr_group, path=path, encoded=encoded
|
32
|
-
)
|
33
|
-
|
34
|
-
elif isinstance(obj, (list, tuple, set)):
|
35
|
-
out = []
|
36
|
-
for idx, item in enumerate(obj):
|
37
|
-
item, encoded = _zarr_encode(item, zarr_group, path + [idx], encoded)
|
38
|
-
out.append(item)
|
39
|
-
if isinstance(obj, tuple):
|
40
|
-
out = tuple(out)
|
41
|
-
elif isinstance(obj, set):
|
42
|
-
out = set(out)
|
43
|
-
|
44
|
-
elif isinstance(obj, dict):
|
45
|
-
out = {}
|
46
|
-
for dct_key, dct_val in obj.items():
|
47
|
-
dct_val, encoded = _zarr_encode(
|
48
|
-
dct_val, zarr_group, path + [dct_key], encoded
|
49
|
-
)
|
50
|
-
out.update({dct_key: dct_val})
|
51
|
-
|
52
|
-
elif isinstance(obj, PRIMITIVES):
|
53
|
-
out = obj
|
54
|
-
|
55
|
-
elif isinstance(obj, np.ndarray):
|
56
|
-
names = [int(i) for i in zarr_group.keys()]
|
57
|
-
if not names:
|
58
|
-
new_name = "0"
|
59
|
-
else:
|
60
|
-
new_name = str(max(names) + 1)
|
61
|
-
|
62
|
-
zarr_group.create_dataset(name=new_name, data=obj)
|
63
|
-
encoded.append(
|
64
|
-
{
|
65
|
-
"path": path,
|
66
|
-
"dataset": new_name,
|
67
|
-
}
|
68
|
-
)
|
69
|
-
out = None
|
70
|
-
|
71
|
-
return out, encoded
|
23
|
+
#: Maximum nesting depth for encoding.
|
24
|
+
MAX_DEPTH = 50
|
72
25
|
|
73
26
|
|
74
|
-
def zarr_encode(data, zarr_group, is_pending_add, is_set):
|
27
|
+
def zarr_encode(data, zarr_group: zarr.Group, is_pending_add: bool, is_set: bool):
|
75
28
|
"""
|
76
29
|
Encode data into a zarr group.
|
77
30
|
"""
|
78
|
-
|
31
|
+
|
32
|
+
encoded: list[dict] = []
|
33
|
+
|
34
|
+
def encode(obj: Any, path: list) -> Any:
|
35
|
+
if len(path) > MAX_DEPTH:
|
36
|
+
raise RuntimeError("I'm in too deep!")
|
37
|
+
|
38
|
+
if isinstance(obj, ZarrEncodable):
|
39
|
+
return encode(obj.to_dict(), path)
|
40
|
+
elif isinstance(obj, (list, tuple, set)):
|
41
|
+
out = (encode(item, [*path, idx]) for idx, item in enumerate(obj))
|
42
|
+
if isinstance(obj, tuple):
|
43
|
+
return tuple(out)
|
44
|
+
elif isinstance(obj, set):
|
45
|
+
return set(out)
|
46
|
+
else:
|
47
|
+
return list(out)
|
48
|
+
elif isinstance(obj, dict):
|
49
|
+
return {
|
50
|
+
dct_key: encode(dct_val, [*path, dct_key])
|
51
|
+
for dct_key, dct_val in obj.items()
|
52
|
+
}
|
53
|
+
elif isinstance(obj, PRIMITIVES):
|
54
|
+
return obj
|
55
|
+
elif isinstance(obj, np.ndarray):
|
56
|
+
new_name = str(max((int(i) + 1 for i in zarr_group.keys()), default=0))
|
57
|
+
zarr_group.create_dataset(name=new_name, data=obj)
|
58
|
+
encoded.append(
|
59
|
+
{
|
60
|
+
"path": path,
|
61
|
+
"dataset": new_name,
|
62
|
+
}
|
63
|
+
)
|
64
|
+
return None
|
65
|
+
else:
|
66
|
+
raise ValueError(f"unserializable type: {type(obj)}")
|
67
|
+
|
68
|
+
zarr_group.attrs["data"] = encode(data, [])
|
79
69
|
zarr_group.attrs["encoded"] = encoded
|
80
|
-
zarr_group.attrs["data"] = data
|
81
70
|
zarr_group.attrs["is_set"] = is_set
|
82
71
|
if is_pending_add:
|
83
72
|
zarr_group.attrs["is_pending_add"] = is_pending_add
|
@@ -88,100 +77,68 @@ def zarr_encode(data, zarr_group, is_pending_add, is_set):
|
|
88
77
|
|
89
78
|
def _zarr_encode_NEW(
|
90
79
|
obj: Any,
|
91
|
-
base_arr: zarr.Array,
|
92
80
|
root_group: zarr.Group,
|
93
81
|
arr_path: str,
|
94
|
-
|
95
|
-
arr_lookup=None,
|
96
|
-
):
|
82
|
+
) -> tuple[Any, list[list]]:
|
97
83
|
"""
|
98
84
|
Save arbitrarily-nested Python-primitive, `ZarrEncodable` and numpy array objects into
|
99
85
|
Zarr.
|
100
86
|
|
101
87
|
Parameters
|
102
88
|
----------
|
103
|
-
obj
|
104
|
-
|
105
|
-
|
106
|
-
encoder.
|
107
|
-
root_group
|
89
|
+
obj:
|
90
|
+
Object to encode.
|
91
|
+
root_group:
|
108
92
|
Parent Zarr group into which new Zarr arrays will be added (at `arr_path`).
|
109
|
-
arr_path
|
93
|
+
arr_path:
|
110
94
|
Path relative to `root_group` into which new Zarr arrays will be added.
|
111
95
|
|
112
96
|
Returns
|
113
97
|
-------
|
114
|
-
|
115
|
-
|
116
|
-
|
98
|
+
data
|
99
|
+
The encoded data.
|
100
|
+
arr_lookup
|
101
|
+
How to look up where to rebuild Numpy arrays.
|
117
102
|
"""
|
118
103
|
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
)
|
144
|
-
|
145
|
-
|
146
|
-
data = tuple(data)
|
147
|
-
elif isinstance(obj, set):
|
148
|
-
data = set(data)
|
149
|
-
|
150
|
-
elif isinstance(obj, dict):
|
151
|
-
data = {}
|
152
|
-
for dct_key, dct_val in obj.items():
|
153
|
-
dct_val, arr_lookup = _zarr_encode_NEW(
|
154
|
-
obj=dct_val,
|
155
|
-
base_arr=base_arr,
|
156
|
-
root_group=root_group,
|
157
|
-
arr_path=arr_path,
|
158
|
-
path=path + [dct_key],
|
159
|
-
)
|
160
|
-
data[dct_key] = dct_val
|
161
|
-
|
162
|
-
elif isinstance(obj, PRIMITIVES):
|
163
|
-
data = obj
|
164
|
-
|
165
|
-
elif isinstance(obj, np.ndarray):
|
166
|
-
# Might need to generate new group:
|
167
|
-
param_arr_group = root_group.require_group(arr_path)
|
168
|
-
names = [int(i) for i in param_arr_group.keys()]
|
169
|
-
if not names:
|
170
|
-
new_idx = 0
|
104
|
+
arr_lookup: list[list] = []
|
105
|
+
|
106
|
+
def encode(obj: Any, path: list) -> Any:
|
107
|
+
if len(path) > MAX_DEPTH:
|
108
|
+
raise RuntimeError("I'm in too deep!")
|
109
|
+
|
110
|
+
if isinstance(obj, ZarrEncodable):
|
111
|
+
return encode(obj.to_dict(), path)
|
112
|
+
elif isinstance(obj, (list, tuple, set)):
|
113
|
+
items = (encode(item, [*path, idx]) for idx, item in enumerate(obj))
|
114
|
+
if isinstance(obj, tuple):
|
115
|
+
return tuple(items)
|
116
|
+
elif isinstance(obj, set):
|
117
|
+
return set(items)
|
118
|
+
else:
|
119
|
+
return list(items)
|
120
|
+
elif isinstance(obj, dict):
|
121
|
+
return {key: encode(val, [*path, key]) for key, val in obj.items()}
|
122
|
+
elif isinstance(obj, PRIMITIVES):
|
123
|
+
return obj
|
124
|
+
elif isinstance(obj, np.ndarray):
|
125
|
+
# Might need to generate new group:
|
126
|
+
param_arr_group = root_group.require_group(arr_path)
|
127
|
+
new_idx = max((int(i) + 1 for i in param_arr_group.keys()), default=0)
|
128
|
+
param_arr_group.create_dataset(name=f"arr_{new_idx}", data=obj)
|
129
|
+
arr_lookup.append([path, new_idx])
|
130
|
+
return None
|
171
131
|
else:
|
172
|
-
|
173
|
-
param_arr_group.create_dataset(name=f"arr_{new_idx}", data=obj)
|
174
|
-
arr_lookup.append([path, new_idx])
|
175
|
-
data = None
|
132
|
+
raise ValueError(f"unserializable type: {type(obj)}")
|
176
133
|
|
177
|
-
return
|
134
|
+
return encode(obj, []), arr_lookup
|
178
135
|
|
179
136
|
|
180
137
|
def zarr_decode(
|
181
|
-
param_data:
|
138
|
+
param_data: None | dict,
|
182
139
|
arr_group: zarr.Group,
|
183
|
-
path=None,
|
184
|
-
dataset_copy=False,
|
140
|
+
path: list | None = None,
|
141
|
+
dataset_copy: bool = False,
|
185
142
|
):
|
186
143
|
"""
|
187
144
|
Decode data from a zarr group.
|
@@ -220,7 +177,7 @@ class ZarrEncodable:
|
|
220
177
|
|
221
178
|
_typ = None
|
222
179
|
|
223
|
-
def to_dict(self):
|
180
|
+
def to_dict(self) -> dict[str, Any]:
|
224
181
|
"""
|
225
182
|
Convert this object to a dict.
|
226
183
|
"""
|
@@ -228,18 +185,22 @@ class ZarrEncodable:
|
|
228
185
|
return dict(self.__dict__)
|
229
186
|
elif hasattr(self, "__slots__"):
|
230
187
|
return {k: getattr(self, k) for k in self.__slots__}
|
188
|
+
else:
|
189
|
+
# Should be unreachable
|
190
|
+
return {}
|
231
191
|
|
232
|
-
def to_zarr(self, zarr_group):
|
192
|
+
def to_zarr(self, zarr_group: zarr.Group):
|
233
193
|
"""
|
234
194
|
Save this object into the given zarr group.
|
235
195
|
"""
|
236
|
-
|
237
|
-
zarr_encode(data, zarr_group)
|
196
|
+
zarr_encode(self.to_dict(), zarr_group, is_pending_add=False, is_set=False)
|
238
197
|
|
239
198
|
@classmethod
|
240
|
-
def from_zarr(cls, zarr_group, dataset_copy=False):
|
199
|
+
def from_zarr(cls, zarr_group: zarr.Group, dataset_copy: bool = False) -> Self:
|
241
200
|
"""
|
242
201
|
Read an instance of this class from the given zarr group.
|
243
202
|
"""
|
244
|
-
|
203
|
+
# FIXME: Do the read of the data!
|
204
|
+
param_data = None
|
205
|
+
data = zarr_decode(param_data, zarr_group, dataset_copy=dataset_copy)
|
245
206
|
return cls(**data)
|
hpcflow/sdk/demo/cli.py
CHANGED
@@ -1,5 +1,10 @@
|
|
1
|
+
"""
|
2
|
+
CLI components for demonstration code.
|
3
|
+
"""
|
4
|
+
from __future__ import annotations
|
1
5
|
from pathlib import Path
|
2
6
|
from random import randint
|
7
|
+
from typing import TYPE_CHECKING
|
3
8
|
import click
|
4
9
|
|
5
10
|
from hpcflow.sdk.core.utils import get_process_stamp
|
@@ -22,8 +27,13 @@ from hpcflow.sdk.cli_common import (
|
|
22
27
|
make_status_opt,
|
23
28
|
)
|
24
29
|
|
30
|
+
if TYPE_CHECKING:
|
31
|
+
from collections.abc import Iterable
|
32
|
+
from typing import Literal
|
33
|
+
from ..app import BaseApp
|
25
34
|
|
26
|
-
|
35
|
+
|
36
|
+
def get_demo_software_CLI(app: BaseApp):
|
27
37
|
"""Generate the CLI to provide an example software."""
|
28
38
|
|
29
39
|
@click.group()
|
@@ -35,7 +45,9 @@ def get_demo_software_CLI(app):
|
|
35
45
|
@click.option("--infile2", "-i2", type=click.Path(exists=True), required=True)
|
36
46
|
@click.option("--value", "-v")
|
37
47
|
@click.option("--out", "-o")
|
38
|
-
def demo_do_something(
|
48
|
+
def demo_do_something(
|
49
|
+
infile1: Path, infile2: Path, value: str | None = None, out: str | None = None
|
50
|
+
):
|
39
51
|
click.echo("trying to do something")
|
40
52
|
|
41
53
|
with Path(infile1).open("r") as handle:
|
@@ -47,7 +59,7 @@ def get_demo_software_CLI(app):
|
|
47
59
|
out = "outfile.txt"
|
48
60
|
out_path = Path(out)
|
49
61
|
with out_path.open("a") as handle:
|
50
|
-
handle.write("{}\n".format(randint(0, 1e6)))
|
62
|
+
handle.write("{}\n".format(randint(0, int(1e6))))
|
51
63
|
handle.write(
|
52
64
|
"{} Generated by `doSomething --infile1 {} --infile2 {}`.\n".format(
|
53
65
|
get_process_stamp(), infile1, infile2
|
@@ -69,10 +81,10 @@ def get_demo_software_CLI(app):
|
|
69
81
|
return demo_software
|
70
82
|
|
71
83
|
|
72
|
-
def get_demo_workflow_CLI(app):
|
84
|
+
def get_demo_workflow_CLI(app: BaseApp):
|
73
85
|
"""Generate the CLI to provide access to builtin demo workflows."""
|
74
86
|
|
75
|
-
def list_callback(ctx, param, value):
|
87
|
+
def list_callback(ctx: click.Context, param, value: bool):
|
76
88
|
if not value or ctx.resilient_parsing:
|
77
89
|
return
|
78
90
|
# TODO: format with Rich with a one-line description
|
@@ -105,16 +117,16 @@ def get_demo_workflow_CLI(app):
|
|
105
117
|
@variables_option
|
106
118
|
@make_status_opt
|
107
119
|
def make_demo_workflow(
|
108
|
-
workflow_name,
|
109
|
-
format,
|
110
|
-
path,
|
111
|
-
name,
|
112
|
-
overwrite,
|
113
|
-
store,
|
114
|
-
ts_fmt=None,
|
115
|
-
ts_name_fmt=None,
|
116
|
-
variables=
|
117
|
-
status=True,
|
120
|
+
workflow_name: str,
|
121
|
+
format: Literal["json", "yaml"] | None,
|
122
|
+
path: Path | None,
|
123
|
+
name: str | None,
|
124
|
+
overwrite: bool,
|
125
|
+
store: str,
|
126
|
+
ts_fmt: str | None = None,
|
127
|
+
ts_name_fmt: str | None = None,
|
128
|
+
variables: Iterable[tuple[str, str]] = (),
|
129
|
+
status: bool = True,
|
118
130
|
):
|
119
131
|
wk = app.make_demo_workflow(
|
120
132
|
workflow_name=workflow_name,
|
@@ -148,22 +160,22 @@ def get_demo_workflow_CLI(app):
|
|
148
160
|
@cancel_opt
|
149
161
|
@submit_status_opt
|
150
162
|
def make_and_submit_demo_workflow(
|
151
|
-
workflow_name,
|
152
|
-
format,
|
153
|
-
path,
|
154
|
-
name,
|
155
|
-
overwrite,
|
156
|
-
store,
|
157
|
-
ts_fmt=None,
|
158
|
-
ts_name_fmt=None,
|
159
|
-
variables=
|
160
|
-
js_parallelism=None,
|
161
|
-
wait=False,
|
162
|
-
add_to_known=True,
|
163
|
-
print_idx=False,
|
164
|
-
tasks=None,
|
165
|
-
cancel=False,
|
166
|
-
status=True,
|
163
|
+
workflow_name: str,
|
164
|
+
format: Literal["json", "yaml"] | None,
|
165
|
+
path: Path | None,
|
166
|
+
name: str | None,
|
167
|
+
overwrite: bool,
|
168
|
+
store: str,
|
169
|
+
ts_fmt: str | None = None,
|
170
|
+
ts_name_fmt: str | None = None,
|
171
|
+
variables: Iterable[tuple[str, str]] = (),
|
172
|
+
js_parallelism: bool | None = None,
|
173
|
+
wait: bool = False,
|
174
|
+
add_to_known: bool = True,
|
175
|
+
print_idx: bool = False,
|
176
|
+
tasks: list[int] | None = None,
|
177
|
+
cancel: bool = False,
|
178
|
+
status: bool = True,
|
167
179
|
):
|
168
180
|
out = app.make_and_submit_demo_workflow(
|
169
181
|
workflow_name=workflow_name,
|
@@ -184,20 +196,21 @@ def get_demo_workflow_CLI(app):
|
|
184
196
|
status=status,
|
185
197
|
)
|
186
198
|
if print_idx:
|
199
|
+
assert isinstance(out, tuple)
|
187
200
|
click.echo(out[1])
|
188
201
|
|
189
202
|
@demo_workflow.command("copy")
|
190
203
|
@click.argument("workflow_name")
|
191
204
|
@click.argument("destination")
|
192
205
|
@click.option("--doc/--no-doc", default=True)
|
193
|
-
def copy_demo_workflow(workflow_name, destination, doc):
|
206
|
+
def copy_demo_workflow(workflow_name: str, destination: str, doc: bool):
|
194
207
|
app.copy_demo_workflow(name=workflow_name, dst=destination, doc=doc)
|
195
208
|
|
196
209
|
@demo_workflow.command("show")
|
197
210
|
@click.argument("workflow_name")
|
198
211
|
@click.option("--syntax/--no-syntax", default=True)
|
199
212
|
@click.option("--doc/--no-doc", default=True)
|
200
|
-
def show_demo_workflow(workflow_name, syntax, doc):
|
213
|
+
def show_demo_workflow(workflow_name: str, syntax: bool, doc: bool):
|
201
214
|
app.show_demo_workflow(workflow_name, syntax=syntax, doc=doc)
|
202
215
|
|
203
216
|
return demo_workflow
|
hpcflow/sdk/helper/cli.py
CHANGED
@@ -1,13 +1,12 @@
|
|
1
1
|
"""
|
2
2
|
Common Click command line options related to the helper.
|
3
3
|
"""
|
4
|
-
|
5
|
-
from
|
4
|
+
from __future__ import annotations
|
5
|
+
from typing import TYPE_CHECKING
|
6
6
|
|
7
7
|
import click
|
8
|
-
import psutil
|
9
8
|
|
10
|
-
from .helper import (
|
9
|
+
from hpcflow.sdk.helper.helper import (
|
11
10
|
DEFAULT_TIMEOUT,
|
12
11
|
DEFAULT_TIMEOUT_CHECK,
|
13
12
|
DEFAULT_WATCH_INTERVAL,
|
@@ -22,7 +21,10 @@ from .helper import (
|
|
22
21
|
get_helper_PID,
|
23
22
|
get_helper_uptime,
|
24
23
|
)
|
25
|
-
from
|
24
|
+
from hpcflow.sdk.cli_common import _add_doc_from_help
|
25
|
+
|
26
|
+
if TYPE_CHECKING:
|
27
|
+
from ..app import BaseApp
|
26
28
|
|
27
29
|
#: Helper option: ``--timeout``
|
28
30
|
timeout_option = click.option(
|
@@ -54,7 +56,7 @@ watch_interval_option = click.option(
|
|
54
56
|
_add_doc_from_help(timeout_option, timeout_check_interval_option, watch_interval_option)
|
55
57
|
|
56
58
|
|
57
|
-
def get_helper_CLI(app):
|
59
|
+
def get_helper_CLI(app: BaseApp):
|
58
60
|
"""Generate the CLI to provide some server-like functionality."""
|
59
61
|
|
60
62
|
@click.group()
|
@@ -65,7 +67,7 @@ def get_helper_CLI(app):
|
|
65
67
|
@timeout_option
|
66
68
|
@timeout_check_interval_option
|
67
69
|
@watch_interval_option
|
68
|
-
def start(timeout, timeout_check_interval, watch_interval):
|
70
|
+
def start(timeout: float, timeout_check_interval: float, watch_interval: float):
|
69
71
|
"""Start the helper process."""
|
70
72
|
start_helper(app, timeout, timeout_check_interval, watch_interval)
|
71
73
|
|
@@ -78,7 +80,7 @@ def get_helper_CLI(app):
|
|
78
80
|
@timeout_option
|
79
81
|
@timeout_check_interval_option
|
80
82
|
@watch_interval_option
|
81
|
-
def run(timeout, timeout_check_interval, watch_interval):
|
83
|
+
def run(timeout: float, timeout_check_interval: float, watch_interval: float):
|
82
84
|
"""Run the helper functionality."""
|
83
85
|
run_helper(app, timeout, timeout_check_interval, watch_interval)
|
84
86
|
|
@@ -86,13 +88,13 @@ def get_helper_CLI(app):
|
|
86
88
|
@timeout_option
|
87
89
|
@timeout_check_interval_option
|
88
90
|
@watch_interval_option
|
89
|
-
def restart(timeout, timeout_check_interval, watch_interval):
|
91
|
+
def restart(timeout: float, timeout_check_interval: float, watch_interval: float):
|
90
92
|
"""Restart (or start) the helper process."""
|
91
93
|
restart_helper(app, timeout, timeout_check_interval, watch_interval)
|
92
94
|
|
93
95
|
@helper.command()
|
94
96
|
@click.option("-f", "--file", is_flag=True)
|
95
|
-
def pid(file):
|
97
|
+
def pid(file: bool):
|
96
98
|
"""Get the process ID of the running helper, if running."""
|
97
99
|
pid_info = get_helper_PID(app)
|
98
100
|
if pid_info:
|
@@ -103,32 +105,32 @@ def get_helper_CLI(app):
|
|
103
105
|
click.echo(pid)
|
104
106
|
|
105
107
|
@helper.command()
|
106
|
-
def clear():
|
108
|
+
def clear() -> None:
|
107
109
|
"""Remove the PID file (and kill the helper process if it exists). This should not
|
108
110
|
normally be needed."""
|
109
111
|
clear_helper(app)
|
110
112
|
|
111
113
|
@helper.command()
|
112
|
-
def uptime():
|
114
|
+
def uptime() -> None:
|
113
115
|
"""Get the uptime of the helper process, if it is running."""
|
114
116
|
out = get_helper_uptime(app)
|
115
117
|
if out:
|
116
118
|
click.echo(out)
|
117
119
|
|
118
120
|
@helper.command()
|
119
|
-
def log_path():
|
121
|
+
def log_path() -> None:
|
120
122
|
"""Get the path to the helper log file (may not exist)."""
|
121
123
|
click.echo(get_helper_log_path(app))
|
122
124
|
|
123
125
|
@helper.command()
|
124
|
-
def watch_list_path():
|
126
|
+
def watch_list_path() -> None:
|
125
127
|
"""Get the path to the workflow watch list file (may not exist)."""
|
126
128
|
click.echo(get_watcher_file_path(app))
|
127
129
|
|
128
130
|
@helper.command()
|
129
|
-
def watch_list():
|
131
|
+
def watch_list() -> None:
|
130
132
|
"""Get the list of workflows currently being watched."""
|
131
|
-
for wk in get_helper_watch_list(app) or
|
133
|
+
for wk in get_helper_watch_list(app) or ():
|
132
134
|
click.echo(str(wk["path"]))
|
133
135
|
|
134
136
|
return helper
|