hpcflow-new2 0.2.0a179__py3-none-any.whl → 0.2.0a180__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/sdk/__init__.py +4 -1
- hpcflow/sdk/app.py +160 -15
- hpcflow/sdk/cli.py +14 -0
- hpcflow/sdk/cli_common.py +83 -0
- hpcflow/sdk/config/__init__.py +4 -0
- hpcflow/sdk/config/callbacks.py +25 -2
- hpcflow/sdk/config/cli.py +4 -1
- hpcflow/sdk/config/config.py +188 -14
- hpcflow/sdk/config/config_file.py +91 -3
- hpcflow/sdk/config/errors.py +33 -0
- hpcflow/sdk/core/__init__.py +2 -0
- hpcflow/sdk/core/actions.py +492 -35
- hpcflow/sdk/core/cache.py +22 -0
- hpcflow/sdk/core/command_files.py +221 -5
- hpcflow/sdk/core/commands.py +57 -0
- hpcflow/sdk/core/element.py +407 -8
- hpcflow/sdk/core/environment.py +92 -0
- hpcflow/sdk/core/errors.py +245 -61
- hpcflow/sdk/core/json_like.py +72 -14
- hpcflow/sdk/core/loop.py +122 -21
- hpcflow/sdk/core/loop_cache.py +34 -9
- hpcflow/sdk/core/object_list.py +172 -26
- hpcflow/sdk/core/parallel.py +14 -0
- hpcflow/sdk/core/parameters.py +478 -25
- hpcflow/sdk/core/rule.py +31 -1
- hpcflow/sdk/core/run_dir_files.py +12 -2
- hpcflow/sdk/core/task.py +407 -80
- hpcflow/sdk/core/task_schema.py +70 -9
- hpcflow/sdk/core/test_utils.py +35 -0
- hpcflow/sdk/core/utils.py +101 -4
- hpcflow/sdk/core/validation.py +13 -1
- hpcflow/sdk/core/workflow.py +316 -96
- hpcflow/sdk/core/zarr_io.py +23 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +9 -0
- hpcflow/sdk/helper/helper.py +28 -0
- hpcflow/sdk/helper/watcher.py +33 -0
- hpcflow/sdk/log.py +40 -0
- hpcflow/sdk/persistence/__init__.py +14 -4
- hpcflow/sdk/persistence/base.py +289 -23
- hpcflow/sdk/persistence/json.py +29 -0
- hpcflow/sdk/persistence/pending.py +217 -107
- hpcflow/sdk/persistence/store_resource.py +58 -2
- hpcflow/sdk/persistence/utils.py +8 -0
- hpcflow/sdk/persistence/zarr.py +68 -1
- hpcflow/sdk/runtime.py +52 -10
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/jobscript.py +198 -9
- hpcflow/sdk/submission/jobscript_info.py +13 -0
- hpcflow/sdk/submission/schedulers/__init__.py +60 -0
- hpcflow/sdk/submission/schedulers/direct.py +53 -0
- hpcflow/sdk/submission/schedulers/sge.py +45 -7
- hpcflow/sdk/submission/schedulers/slurm.py +45 -8
- hpcflow/sdk/submission/schedulers/utils.py +4 -0
- hpcflow/sdk/submission/shells/__init__.py +11 -1
- hpcflow/sdk/submission/shells/base.py +32 -1
- hpcflow/sdk/submission/shells/bash.py +36 -1
- hpcflow/sdk/submission/shells/os_version.py +18 -6
- hpcflow/sdk/submission/shells/powershell.py +22 -0
- hpcflow/sdk/submission/submission.py +88 -3
- hpcflow/sdk/typing.py +10 -1
- {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/METADATA +1 -1
- {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/RECORD +70 -70
- {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/core/task_schema.py
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
"""
|
2
|
+
Abstract task, prior to instantiation.
|
3
|
+
"""
|
4
|
+
|
1
5
|
from contextlib import contextmanager
|
2
6
|
import copy
|
3
7
|
from dataclasses import dataclass
|
@@ -22,6 +26,15 @@ from .utils import check_valid_py_identifier
|
|
22
26
|
|
23
27
|
@dataclass
|
24
28
|
class TaskObjective(JSONLike):
|
29
|
+
"""
|
30
|
+
A thing that a task is attempting to achieve.
|
31
|
+
|
32
|
+
Parameter
|
33
|
+
---------
|
34
|
+
name: str
|
35
|
+
The name of the objective. A valid Python identifier.
|
36
|
+
"""
|
37
|
+
|
25
38
|
_child_objects = (
|
26
39
|
ChildObjectSpec(
|
27
40
|
name="name",
|
@@ -29,6 +42,7 @@ class TaskObjective(JSONLike):
|
|
29
42
|
),
|
30
43
|
)
|
31
44
|
|
45
|
+
#: The name of the objective. A valid Python identifier.
|
32
46
|
name: str
|
33
47
|
|
34
48
|
def __post_init__(self):
|
@@ -41,21 +55,28 @@ class TaskSchema(JSONLike):
|
|
41
55
|
|
42
56
|
Parameters
|
43
57
|
----------
|
44
|
-
objective
|
58
|
+
objective:
|
45
59
|
This is a string representing the objective of the task schema.
|
46
|
-
actions
|
60
|
+
actions:
|
47
61
|
A list of Action objects whose commands are to be executed by the task.
|
48
|
-
method
|
62
|
+
method:
|
49
63
|
An optional string to label the task schema by its method.
|
50
|
-
implementation
|
64
|
+
implementation:
|
51
65
|
An optional string to label the task schema by its implementation.
|
52
|
-
inputs
|
66
|
+
inputs:
|
53
67
|
A list of SchemaInput objects that define the inputs to the task.
|
54
|
-
outputs
|
68
|
+
outputs:
|
55
69
|
A list of SchemaOutput objects that define the outputs of the task.
|
56
|
-
|
57
|
-
|
58
|
-
|
70
|
+
version:
|
71
|
+
The version of this task schema.
|
72
|
+
parameter_class_modules:
|
73
|
+
Where to find implementations of parameter value handlers.
|
74
|
+
web_doc:
|
75
|
+
True if this object should be included in the Sphinx documentation
|
76
|
+
(normally only relevant for built-in task schemas). True by default.
|
77
|
+
environment_presets:
|
78
|
+
Information about default execution environments. Can be overridden in specific
|
79
|
+
cases in the concrete tasks.
|
59
80
|
"""
|
60
81
|
|
61
82
|
_validation_schema = "task_schema_spec_schema.yaml"
|
@@ -93,14 +114,24 @@ class TaskSchema(JSONLike):
|
|
93
114
|
environment_presets: Optional[Dict[str, Dict[str, Dict[str, Any]]]] = None,
|
94
115
|
_hash_value: Optional[str] = None,
|
95
116
|
):
|
117
|
+
#: This is a string representing the objective of the task schema.
|
96
118
|
self.objective = objective
|
119
|
+
#: A list of Action objects whose commands are to be executed by the task.
|
97
120
|
self.actions = actions or []
|
121
|
+
#: An optional string to label the task schema by its method.
|
98
122
|
self.method = method
|
123
|
+
#: An optional string to label the task schema by its implementation.
|
99
124
|
self.implementation = implementation
|
125
|
+
#: A list of SchemaInput objects that define the inputs to the task.
|
100
126
|
self.inputs = inputs or []
|
127
|
+
#: A list of SchemaOutput objects that define the outputs of the task.
|
101
128
|
self.outputs = outputs or []
|
129
|
+
#: Where to find implementations of parameter value handlers.
|
102
130
|
self.parameter_class_modules = parameter_class_modules or []
|
131
|
+
#: Whether this object should be included in the Sphinx documentation
|
132
|
+
#: (normally only relevant for built-in task schemas).
|
103
133
|
self.web_doc = web_doc
|
134
|
+
#: Information about default execution environments.
|
104
135
|
self.environment_presets = environment_presets
|
105
136
|
self._hash_value = _hash_value
|
106
137
|
|
@@ -112,6 +143,7 @@ class TaskSchema(JSONLike):
|
|
112
143
|
|
113
144
|
self._validate()
|
114
145
|
self.actions = self._expand_actions()
|
146
|
+
#: The version of this task schema.
|
115
147
|
self.version = version
|
116
148
|
self._task_template = None # assigned by parent Task
|
117
149
|
|
@@ -309,6 +341,10 @@ class TaskSchema(JSONLike):
|
|
309
341
|
return self._show_info()
|
310
342
|
|
311
343
|
def get_info_html(self) -> str:
|
344
|
+
"""
|
345
|
+
Describe the task schema as an HTML document.
|
346
|
+
"""
|
347
|
+
|
312
348
|
def _format_parameter_type(param):
|
313
349
|
param_typ_fmt = param.typ
|
314
350
|
if param.typ in param_types:
|
@@ -586,6 +622,9 @@ class TaskSchema(JSONLike):
|
|
586
622
|
@classmethod
|
587
623
|
@contextmanager
|
588
624
|
def ignore_invalid_actions(cls):
|
625
|
+
"""
|
626
|
+
A context manager within which invalid actions will be ignored.
|
627
|
+
"""
|
589
628
|
try:
|
590
629
|
cls._validate_actions = False
|
591
630
|
yield
|
@@ -709,6 +748,10 @@ class TaskSchema(JSONLike):
|
|
709
748
|
out.parameter._set_value_class()
|
710
749
|
|
711
750
|
def make_persistent(self, workflow: app.Workflow, source: Dict) -> List[int]:
|
751
|
+
"""
|
752
|
+
Convert this task schema to persistent form within the context of the given
|
753
|
+
workflow.
|
754
|
+
"""
|
712
755
|
new_refs = []
|
713
756
|
for input_i in self.inputs:
|
714
757
|
for lab_info in input_i.labelled_info():
|
@@ -721,6 +764,9 @@ class TaskSchema(JSONLike):
|
|
721
764
|
|
722
765
|
@property
|
723
766
|
def name(self):
|
767
|
+
"""
|
768
|
+
The name of this schema.
|
769
|
+
"""
|
724
770
|
out = (
|
725
771
|
f"{self.objective.name}"
|
726
772
|
f"{f'_{self.method}' if self.method else ''}"
|
@@ -730,14 +776,23 @@ class TaskSchema(JSONLike):
|
|
730
776
|
|
731
777
|
@property
|
732
778
|
def input_types(self):
|
779
|
+
"""
|
780
|
+
The input types to the schema.
|
781
|
+
"""
|
733
782
|
return tuple(j for i in self.inputs for j in i.all_labelled_types)
|
734
783
|
|
735
784
|
@property
|
736
785
|
def output_types(self):
|
786
|
+
"""
|
787
|
+
The output types from the schema.
|
788
|
+
"""
|
737
789
|
return tuple(i.typ for i in self.outputs)
|
738
790
|
|
739
791
|
@property
|
740
792
|
def provides_parameters(self) -> Tuple[Tuple[str, str]]:
|
793
|
+
"""
|
794
|
+
The parameters that this schema provides.
|
795
|
+
"""
|
741
796
|
out = []
|
742
797
|
for schema_inp in self.inputs:
|
743
798
|
for labelled_info in schema_inp.labelled_info():
|
@@ -753,6 +808,9 @@ class TaskSchema(JSONLike):
|
|
753
808
|
|
754
809
|
@property
|
755
810
|
def task_template(self):
|
811
|
+
"""
|
812
|
+
The template that this schema is contained in.
|
813
|
+
"""
|
756
814
|
return self._task_template
|
757
815
|
|
758
816
|
@classmethod
|
@@ -770,6 +828,9 @@ class TaskSchema(JSONLike):
|
|
770
828
|
return out
|
771
829
|
|
772
830
|
def get_key(self):
|
831
|
+
"""
|
832
|
+
Get the hashable value that represents this schema.
|
833
|
+
"""
|
773
834
|
return (str(self.objective), self.method, self.implementation)
|
774
835
|
|
775
836
|
def _get_single_label_lookup(self, prefix="") -> Dict[str, str]:
|
hpcflow/sdk/core/test_utils.py
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
"""
|
2
|
+
Utilities for making data to use in testing.
|
3
|
+
"""
|
4
|
+
|
1
5
|
from dataclasses import dataclass
|
2
6
|
from importlib import resources
|
3
7
|
from pathlib import Path
|
@@ -7,6 +11,9 @@ from hpcflow.sdk.core.parameters import ParameterValue
|
|
7
11
|
|
8
12
|
|
9
13
|
def make_schemas(ins_outs, ret_list=False):
|
14
|
+
"""
|
15
|
+
Construct a collection of schemas.
|
16
|
+
"""
|
10
17
|
out = []
|
11
18
|
for idx, info in enumerate(ins_outs):
|
12
19
|
if len(info) == 2:
|
@@ -57,6 +64,9 @@ def make_schemas(ins_outs, ret_list=False):
|
|
57
64
|
|
58
65
|
|
59
66
|
def make_parameters(num):
|
67
|
+
"""
|
68
|
+
Construct a sequence of parameters.
|
69
|
+
"""
|
60
70
|
return [hf.Parameter(f"p{i + 1}") for i in range(num)]
|
61
71
|
|
62
72
|
|
@@ -64,6 +74,9 @@ def make_actions(
|
|
64
74
|
ins_outs: List[Tuple[Union[Tuple, str], str]],
|
65
75
|
env="env1",
|
66
76
|
) -> List[hf.Action]:
|
77
|
+
"""
|
78
|
+
Construct a collection of actions.
|
79
|
+
"""
|
67
80
|
act_env = hf.ActionEnvironment(environment=env)
|
68
81
|
actions = []
|
69
82
|
for ins_outs_i in ins_outs:
|
@@ -98,6 +111,9 @@ def make_tasks(
|
|
98
111
|
input_sources=None,
|
99
112
|
groups=None,
|
100
113
|
):
|
114
|
+
"""
|
115
|
+
Construct a sequence of tasks.
|
116
|
+
"""
|
101
117
|
local_inputs = local_inputs or {}
|
102
118
|
local_sequences = local_sequences or {}
|
103
119
|
local_resources = local_resources or {}
|
@@ -148,6 +164,9 @@ def make_workflow(
|
|
148
164
|
overwrite=False,
|
149
165
|
store="zarr",
|
150
166
|
):
|
167
|
+
"""
|
168
|
+
Construct a workflow.
|
169
|
+
"""
|
151
170
|
tasks = make_tasks(
|
152
171
|
schemas_spec,
|
153
172
|
local_inputs=local_inputs,
|
@@ -202,6 +221,10 @@ def make_test_data_YAML_workflow_template(workflow_name, **kwargs):
|
|
202
221
|
|
203
222
|
@dataclass
|
204
223
|
class P1_sub_parameter_cls(ParameterValue):
|
224
|
+
"""
|
225
|
+
Parameter value handler: ``p1_sub``
|
226
|
+
"""
|
227
|
+
|
205
228
|
_typ = "p1_sub"
|
206
229
|
|
207
230
|
e: int
|
@@ -222,6 +245,10 @@ class P1_sub_parameter_cls(ParameterValue):
|
|
222
245
|
|
223
246
|
@dataclass
|
224
247
|
class P1_sub_parameter_cls_2(ParameterValue):
|
248
|
+
"""
|
249
|
+
Parameter value handler: ``p1_sub_2``
|
250
|
+
"""
|
251
|
+
|
225
252
|
_typ = "p1_sub_2"
|
226
253
|
|
227
254
|
f: int
|
@@ -229,6 +256,14 @@ class P1_sub_parameter_cls_2(ParameterValue):
|
|
229
256
|
|
230
257
|
@dataclass
|
231
258
|
class P1_parameter_cls(ParameterValue):
|
259
|
+
"""
|
260
|
+
Parameter value handler: ``p1c``
|
261
|
+
|
262
|
+
Note
|
263
|
+
----
|
264
|
+
This is a composite value handler.
|
265
|
+
"""
|
266
|
+
|
232
267
|
_typ = "p1c"
|
233
268
|
_sub_parameters = {"sub_param": "p1_sub", "sub_param_2": "p1_sub_2"}
|
234
269
|
|
hpcflow/sdk/core/utils.py
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
"""
|
2
|
+
Miscellaneous utilities.
|
3
|
+
"""
|
4
|
+
|
1
5
|
import copy
|
2
6
|
import enum
|
3
7
|
from functools import wraps
|
@@ -45,12 +49,18 @@ def load_config(func):
|
|
45
49
|
|
46
50
|
|
47
51
|
def make_workflow_id():
|
52
|
+
"""
|
53
|
+
Generate a random ID for a workflow.
|
54
|
+
"""
|
48
55
|
length = 12
|
49
56
|
chars = string.ascii_letters + "0123456789"
|
50
57
|
return "".join(random.choices(chars, k=length))
|
51
58
|
|
52
59
|
|
53
60
|
def get_time_stamp():
|
61
|
+
"""
|
62
|
+
Get the current time in standard string form.
|
63
|
+
"""
|
54
64
|
return datetime.now(timezone.utc).astimezone().strftime("%Y.%m.%d_%H:%M:%S_%z")
|
55
65
|
|
56
66
|
|
@@ -181,6 +191,11 @@ def swap_nested_dict_keys(dct, inner_key):
|
|
181
191
|
|
182
192
|
|
183
193
|
def get_in_container(cont, path, cast_indices=False, allow_getattr=False):
|
194
|
+
"""
|
195
|
+
Follow a path (sequence of indices of appropriate type) into a container to obtain
|
196
|
+
a "leaf" value. Containers can be lists, tuples, dicts,
|
197
|
+
or any class (with `getattr()`) if ``allow_getattr`` is True.
|
198
|
+
"""
|
184
199
|
cur_data = cont
|
185
200
|
err_msg = (
|
186
201
|
"Data at path {path_comps!r} is not a sequence, but is of type "
|
@@ -221,6 +236,11 @@ def get_in_container(cont, path, cast_indices=False, allow_getattr=False):
|
|
221
236
|
|
222
237
|
|
223
238
|
def set_in_container(cont, path, value, ensure_path=False, cast_indices=False):
|
239
|
+
"""
|
240
|
+
Follow a path (sequence of indices of appropriate type) into a container to update
|
241
|
+
a "leaf" value. Containers can be lists, tuples or dicts.
|
242
|
+
The "branch" holding the leaf to update must be modifiable.
|
243
|
+
"""
|
224
244
|
if ensure_path:
|
225
245
|
num_path = len(path)
|
226
246
|
for idx in range(1, num_path):
|
@@ -315,6 +335,10 @@ def search_dir_files_by_regex(pattern, group=0, directory=".") -> List[str]:
|
|
315
335
|
|
316
336
|
|
317
337
|
class classproperty(object):
|
338
|
+
"""
|
339
|
+
Simple class property decorator.
|
340
|
+
"""
|
341
|
+
|
318
342
|
def __init__(self, f):
|
319
343
|
self.f = f
|
320
344
|
|
@@ -323,6 +347,11 @@ class classproperty(object):
|
|
323
347
|
|
324
348
|
|
325
349
|
class PrettyPrinter(object):
|
350
|
+
"""
|
351
|
+
A class that produces a nice readable version of itself with ``str()``.
|
352
|
+
Intended to be subclassed.
|
353
|
+
"""
|
354
|
+
|
326
355
|
def __str__(self):
|
327
356
|
lines = [self.__class__.__name__ + ":"]
|
328
357
|
for key, val in vars(self).items():
|
@@ -331,6 +360,11 @@ class PrettyPrinter(object):
|
|
331
360
|
|
332
361
|
|
333
362
|
class Singleton(type):
|
363
|
+
"""
|
364
|
+
Metaclass that enforces that only one instance can exist of the classes to which it
|
365
|
+
is applied.
|
366
|
+
"""
|
367
|
+
|
334
368
|
_instances = {}
|
335
369
|
|
336
370
|
def __call__(cls, *args, **kwargs):
|
@@ -347,6 +381,10 @@ class Singleton(type):
|
|
347
381
|
|
348
382
|
|
349
383
|
def capitalise_first_letter(chars):
|
384
|
+
"""
|
385
|
+
Convert the first character of a string to upper case (if that makes sense).
|
386
|
+
The rest of the string is unchanged.
|
387
|
+
"""
|
350
388
|
return chars[0].upper() + chars[1:]
|
351
389
|
|
352
390
|
|
@@ -374,6 +412,18 @@ def check_in_object_list(spec_name, spec_pos=1, obj_list_pos=2):
|
|
374
412
|
|
375
413
|
@TimeIt.decorator
|
376
414
|
def substitute_string_vars(string, variables: Dict[str, str] = None):
|
415
|
+
"""
|
416
|
+
Scan ``string`` and substitute sequences like ``<<var:ABC>>`` with the value
|
417
|
+
looked up in the supplied dictionary (with ``ABC`` as the key).
|
418
|
+
|
419
|
+
Default values for the substitution can be supplied like:
|
420
|
+
``<<var:ABC[default=XYZ]>>``
|
421
|
+
|
422
|
+
Examples
|
423
|
+
--------
|
424
|
+
>>> substitute_string_vars("abc <<var:def>> ghi", {"def": "123"})
|
425
|
+
"abc 123 def"
|
426
|
+
"""
|
377
427
|
variables = variables or {}
|
378
428
|
|
379
429
|
def var_repl(match_obj):
|
@@ -410,7 +460,7 @@ def substitute_string_vars(string, variables: Dict[str, str] = None):
|
|
410
460
|
|
411
461
|
@TimeIt.decorator
|
412
462
|
def read_YAML_str(yaml_str, typ="safe", variables: Dict[str, str] = None):
|
413
|
-
"""Load a YAML string."""
|
463
|
+
"""Load a YAML string. This will produce basic objects."""
|
414
464
|
if variables is not False and "<<var:" in yaml_str:
|
415
465
|
yaml_str = substitute_string_vars(yaml_str, variables=variables)
|
416
466
|
yaml = YAML(typ=typ)
|
@@ -419,30 +469,35 @@ def read_YAML_str(yaml_str, typ="safe", variables: Dict[str, str] = None):
|
|
419
469
|
|
420
470
|
@TimeIt.decorator
|
421
471
|
def read_YAML_file(path: PathLike, typ="safe", variables: Dict[str, str] = None):
|
472
|
+
"""Load a YAML file. This will produce basic objects."""
|
422
473
|
with fsspec.open(path, "rt") as f:
|
423
474
|
yaml_str = f.read()
|
424
475
|
return read_YAML_str(yaml_str, typ=typ, variables=variables)
|
425
476
|
|
426
477
|
|
427
478
|
def write_YAML_file(obj, path: PathLike, typ="safe"):
|
479
|
+
"""Write a basic object to a YAML file."""
|
428
480
|
yaml = YAML(typ=typ)
|
429
481
|
with Path(path).open("wt") as fp:
|
430
482
|
yaml.dump(obj, fp)
|
431
483
|
|
432
484
|
|
433
485
|
def read_JSON_string(json_str: str, variables: Dict[str, str] = None):
|
486
|
+
"""Load a JSON string. This will produce basic objects."""
|
434
487
|
if variables is not False and "<<var:" in json_str:
|
435
488
|
json_str = substitute_string_vars(json_str, variables=variables)
|
436
489
|
return json.loads(json_str)
|
437
490
|
|
438
491
|
|
439
492
|
def read_JSON_file(path, variables: Dict[str, str] = None):
|
493
|
+
"""Load a JSON file. This will produce basic objects."""
|
440
494
|
with fsspec.open(path, "rt") as f:
|
441
495
|
json_str = f.read()
|
442
496
|
return read_JSON_string(json_str, variables=variables)
|
443
497
|
|
444
498
|
|
445
499
|
def write_JSON_file(obj, path: PathLike):
|
500
|
+
"""Write a basic object to a JSON file."""
|
446
501
|
with Path(path).open("wt") as fp:
|
447
502
|
json.dump(obj, fp)
|
448
503
|
|
@@ -486,6 +541,13 @@ def get_item_repeat_index(lst, distinguish_singular=False, item_callable=None):
|
|
486
541
|
|
487
542
|
|
488
543
|
def get_process_stamp():
|
544
|
+
"""
|
545
|
+
Return a globally unique string identifying this process.
|
546
|
+
|
547
|
+
Note
|
548
|
+
----
|
549
|
+
This should only be called once per process.
|
550
|
+
"""
|
489
551
|
return "{} {} {}".format(
|
490
552
|
datetime.now(),
|
491
553
|
socket.gethostname(),
|
@@ -494,11 +556,18 @@ def get_process_stamp():
|
|
494
556
|
|
495
557
|
|
496
558
|
def remove_ansi_escape_sequences(string):
|
559
|
+
"""
|
560
|
+
Strip ANSI terminal escape codes from a string.
|
561
|
+
"""
|
497
562
|
ansi_escape = re.compile(r"(\x9B|\x1B\[)[0-?]*[ -\/]*[@-~]")
|
498
563
|
return ansi_escape.sub("", string)
|
499
564
|
|
500
565
|
|
501
566
|
def get_md5_hash(obj):
|
567
|
+
"""
|
568
|
+
Compute the MD5 hash of an object.
|
569
|
+
This is the hash of the JSON of the object (with sorted keys) as a hex string.
|
570
|
+
"""
|
502
571
|
json_str = json.dumps(obj, sort_keys=True)
|
503
572
|
return hashlib.md5(json_str.encode("utf-8")).hexdigest()
|
504
573
|
|
@@ -562,6 +631,9 @@ def ensure_in(item, lst) -> int:
|
|
562
631
|
|
563
632
|
|
564
633
|
def list_to_dict(lst, exclude=None):
|
634
|
+
"""
|
635
|
+
Convert a list of dicts to a dict of lists.
|
636
|
+
"""
|
565
637
|
# TODD: test
|
566
638
|
exclude = exclude or []
|
567
639
|
dct = {k: [] for k in lst[0].keys() if k not in exclude}
|
@@ -617,7 +689,7 @@ def flatten(lst):
|
|
617
689
|
"""Flatten an arbitrarily (but of uniform depth) nested list and return shape
|
618
690
|
information to enable un-flattening.
|
619
691
|
|
620
|
-
Un-flattening can be performed with the
|
692
|
+
Un-flattening can be performed with the :py:func:`reshape` function.
|
621
693
|
|
622
694
|
lst
|
623
695
|
List to be flattened. Each element must contain all lists or otherwise all items
|
@@ -657,6 +729,10 @@ def flatten(lst):
|
|
657
729
|
|
658
730
|
|
659
731
|
def reshape(lst, lens):
|
732
|
+
"""
|
733
|
+
Reverse the destructuring of the :py:func:`flatten` function.
|
734
|
+
"""
|
735
|
+
|
660
736
|
def _reshape(lst, lens):
|
661
737
|
lens_acc = [0] + list(accumulate(lens))
|
662
738
|
lst_rs = [lst[lens_acc[idx] : lens_acc[idx + 1]] for idx in range(len(lens))]
|
@@ -669,15 +745,30 @@ def reshape(lst, lens):
|
|
669
745
|
|
670
746
|
|
671
747
|
def is_fsspec_url(url: str) -> bool:
|
748
|
+
"""
|
749
|
+
Test if a URL appears to be one that can be understood by fsspec.
|
750
|
+
"""
|
672
751
|
return bool(re.match(r"(?:[a-z0-9]+:{1,2})+\/\/", url))
|
673
752
|
|
674
753
|
|
675
754
|
class JSONLikeDirSnapShot(DirectorySnapshot):
|
676
|
-
"""
|
755
|
+
"""
|
756
|
+
Overridden DirectorySnapshot from watchdog to allow saving and loading from JSON.
|
757
|
+
"""
|
677
758
|
|
678
759
|
def __init__(self, root_path=None, data=None):
|
679
|
-
"""Create an empty snapshot or load from JSON-like data.
|
760
|
+
"""Create an empty snapshot or load from JSON-like data.
|
761
|
+
|
762
|
+
Parameters
|
763
|
+
----------
|
764
|
+
root_path: str
|
765
|
+
Where to take the snapshot based at.
|
766
|
+
data: dict
|
767
|
+
Serialised snapshot to reload from.
|
768
|
+
See :py:meth:`to_json_like`.
|
769
|
+
"""
|
680
770
|
|
771
|
+
#: Where to take the snapshot based at.
|
681
772
|
self.root_path = root_path
|
682
773
|
self._stat_info = {}
|
683
774
|
self._inode_to_path = {}
|
@@ -874,10 +965,16 @@ def dict_values_process_flat(d, callable):
|
|
874
965
|
|
875
966
|
|
876
967
|
def nth_key(dct, n):
|
968
|
+
"""
|
969
|
+
Given a dict in some order, get the n'th key of that dict.
|
970
|
+
"""
|
877
971
|
it = iter(dct)
|
878
972
|
next(islice(it, n, n), None)
|
879
973
|
return next(it)
|
880
974
|
|
881
975
|
|
882
976
|
def nth_value(dct, n):
|
977
|
+
"""
|
978
|
+
Given a dict in some order, get the n'th value of that dict.
|
979
|
+
"""
|
883
980
|
return dct[nth_key(dct, n)]
|
hpcflow/sdk/core/validation.py
CHANGED
@@ -1,10 +1,22 @@
|
|
1
|
+
"""
|
2
|
+
Schema management.
|
3
|
+
"""
|
4
|
+
|
1
5
|
from importlib import resources
|
2
6
|
|
3
7
|
from valida import Schema
|
4
8
|
|
5
9
|
|
6
10
|
def get_schema(filename):
|
7
|
-
"""
|
11
|
+
"""
|
12
|
+
Get a valida `Schema` object from the embedded data directory.
|
13
|
+
|
14
|
+
Parameter
|
15
|
+
---------
|
16
|
+
schema: str
|
17
|
+
The name of the schema file within the resources package
|
18
|
+
(:py:mod:`hpcflow.sdk.data`).
|
19
|
+
"""
|
8
20
|
package = "hpcflow.sdk.data"
|
9
21
|
try:
|
10
22
|
fh = resources.files(package).joinpath(filename).open("rt")
|