hpcflow-new2 0.2.0a158__py3-none-any.whl → 0.2.0a160__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/app.py +0 -3
- hpcflow/sdk/__init__.py +2 -0
- hpcflow/sdk/app.py +91 -18
- hpcflow/sdk/cli.py +18 -0
- hpcflow/sdk/cli_common.py +16 -0
- hpcflow/sdk/config/config.py +0 -4
- hpcflow/sdk/core/actions.py +20 -7
- hpcflow/sdk/core/command_files.py +4 -4
- hpcflow/sdk/core/element.py +15 -16
- hpcflow/sdk/core/rule.py +2 -0
- hpcflow/sdk/core/run_dir_files.py +63 -0
- hpcflow/sdk/core/task.py +34 -35
- hpcflow/sdk/core/utils.py +37 -15
- hpcflow/sdk/core/workflow.py +147 -49
- hpcflow/sdk/data/config_schema.yaml +0 -6
- hpcflow/sdk/demo/cli.py +12 -0
- hpcflow/sdk/log.py +2 -2
- hpcflow/sdk/persistence/base.py +142 -12
- hpcflow/sdk/persistence/json.py +84 -63
- hpcflow/sdk/persistence/pending.py +21 -7
- hpcflow/sdk/persistence/utils.py +2 -1
- hpcflow/sdk/persistence/zarr.py +143 -108
- hpcflow/sdk/runtime.py +0 -12
- hpcflow/sdk/submission/jobscript.py +25 -4
- hpcflow/sdk/submission/schedulers/sge.py +3 -0
- hpcflow/sdk/submission/schedulers/slurm.py +3 -0
- hpcflow/sdk/submission/shells/bash.py +2 -2
- hpcflow/sdk/submission/shells/powershell.py +2 -2
- hpcflow/sdk/submission/submission.py +24 -7
- hpcflow/tests/scripts/test_main_scripts.py +40 -0
- hpcflow/tests/unit/test_utils.py +28 -0
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/METADATA +1 -2
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/RECORD +36 -35
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/entry_points.txt +0 -0
hpcflow/sdk/persistence/zarr.py
CHANGED
@@ -42,6 +42,7 @@ from hpcflow.sdk.log import TimeIt
|
|
42
42
|
blosc.use_threads = False # hpcflow is a multiprocess program in general
|
43
43
|
|
44
44
|
|
45
|
+
@TimeIt.decorator
|
45
46
|
def _zarr_get_coord_selection(arr, selection, logger):
|
46
47
|
@retry(
|
47
48
|
RuntimeError,
|
@@ -51,6 +52,7 @@ def _zarr_get_coord_selection(arr, selection, logger):
|
|
51
52
|
jitter=(0, 5),
|
52
53
|
logger=logger,
|
53
54
|
)
|
55
|
+
@TimeIt.decorator
|
54
56
|
def _inner(arr, selection):
|
55
57
|
return arr.get_coordinate_selection(selection)
|
56
58
|
|
@@ -594,14 +596,21 @@ class ZarrPersistentStore(PersistentStore):
|
|
594
596
|
if attrs != attrs_orig:
|
595
597
|
arr.attrs.put(attrs)
|
596
598
|
|
597
|
-
|
599
|
+
@TimeIt.decorator
|
600
|
+
def _update_EAR_submission_indices(self, sub_indices: Dict[int:int]):
|
601
|
+
EAR_IDs = list(sub_indices.keys())
|
602
|
+
EARs = self._get_persistent_EARs(EAR_IDs)
|
603
|
+
|
598
604
|
arr = self._get_EARs_arr(mode="r+")
|
599
605
|
attrs_orig = arr.attrs.asdict()
|
600
606
|
attrs = copy.deepcopy(attrs_orig)
|
601
607
|
|
602
|
-
|
603
|
-
|
604
|
-
|
608
|
+
encoded_EARs = []
|
609
|
+
for EAR_ID_i, sub_idx_i in sub_indices.items():
|
610
|
+
new_EAR_i = EARs[EAR_ID_i].update(submission_idx=sub_idx_i)
|
611
|
+
# seems to be a Zarr bug that prevents `set_coordinate_selection` with an
|
612
|
+
# object array, so set one-by-one:
|
613
|
+
arr[EAR_ID_i] = new_EAR_i.encode(attrs, self.ts_fmt)
|
605
614
|
|
606
615
|
if attrs != attrs_orig:
|
607
616
|
arr.attrs.put(attrs)
|
@@ -686,24 +695,6 @@ class ZarrPersistentStore(PersistentStore):
|
|
686
695
|
f"PersistentStore._append_parameters: finished adding {len(params)} parameters."
|
687
696
|
)
|
688
697
|
|
689
|
-
def _set_parameter_value(self, param_id: int, value: Any, is_file: bool):
|
690
|
-
"""Set an unset persistent parameter."""
|
691
|
-
|
692
|
-
# the `decode` call in `_get_persistent_parameters` should be quick:
|
693
|
-
param_i = self._get_persistent_parameters([param_id])[param_id]
|
694
|
-
if is_file:
|
695
|
-
param_i = param_i.set_file(value)
|
696
|
-
else:
|
697
|
-
param_i = param_i.set_data(value)
|
698
|
-
dat_i = param_i.encode(
|
699
|
-
root_group=self._get_parameter_user_array_group(mode="r+"),
|
700
|
-
arr_path=self._param_data_arr_grp_name(param_i.id_),
|
701
|
-
)
|
702
|
-
|
703
|
-
# no need to update sources array:
|
704
|
-
base_arr = self._get_parameter_base_array(mode="r+")
|
705
|
-
base_arr[param_id] = dat_i
|
706
|
-
|
707
698
|
def _set_parameter_values(self, set_parameters: Dict[int, Tuple[Any, bool]]):
|
708
699
|
"""Set multiple unset persistent parameters."""
|
709
700
|
|
@@ -747,9 +738,16 @@ class ZarrPersistentStore(PersistentStore):
|
|
747
738
|
with self.using_resource("attrs", "update") as md:
|
748
739
|
md["template_components"] = tc
|
749
740
|
|
741
|
+
@TimeIt.decorator
|
750
742
|
def _get_num_persistent_tasks(self) -> int:
|
751
|
-
"""Get the number of persistent
|
752
|
-
|
743
|
+
"""Get the number of persistent tasks."""
|
744
|
+
if self.use_cache and self.num_tasks_cache is not None:
|
745
|
+
num = self.num_tasks_cache
|
746
|
+
else:
|
747
|
+
num = len(self._get_tasks_arr())
|
748
|
+
if self.use_cache and self.num_tasks_cache is None:
|
749
|
+
self.num_tasks_cache = num
|
750
|
+
return num
|
753
751
|
|
754
752
|
def _get_num_persistent_loops(self) -> int:
|
755
753
|
"""Get the number of persistent loops."""
|
@@ -932,32 +930,39 @@ class ZarrPersistentStore(PersistentStore):
|
|
932
930
|
with self.using_resource("attrs", "read") as attrs:
|
933
931
|
return attrs["template"]
|
934
932
|
|
935
|
-
|
936
|
-
|
937
|
-
|
938
|
-
|
939
|
-
|
940
|
-
|
941
|
-
|
942
|
-
i
|
943
|
-
|
944
|
-
|
945
|
-
|
946
|
-
|
947
|
-
|
948
|
-
|
949
|
-
|
950
|
-
|
951
|
-
|
952
|
-
|
953
|
-
|
954
|
-
|
955
|
-
|
956
|
-
|
957
|
-
|
958
|
-
|
959
|
-
|
933
|
+
@TimeIt.decorator
|
934
|
+
def _get_persistent_tasks(self, id_lst: Iterable[int]) -> Dict[int, ZarrStoreTask]:
|
935
|
+
tasks, id_lst = self._get_cached_persistent_tasks(id_lst)
|
936
|
+
if id_lst:
|
937
|
+
with self.using_resource("attrs", action="read") as attrs:
|
938
|
+
task_dat = {}
|
939
|
+
elem_IDs = []
|
940
|
+
for idx, i in enumerate(attrs["tasks"]):
|
941
|
+
i = copy.deepcopy(i)
|
942
|
+
elem_IDs.append(i.pop("element_IDs_idx"))
|
943
|
+
if id_lst is None or i["id_"] in id_lst:
|
944
|
+
task_dat[i["id_"]] = {**i, "index": idx}
|
945
|
+
if task_dat:
|
946
|
+
try:
|
947
|
+
elem_IDs_arr_dat = self._get_tasks_arr().get_coordinate_selection(
|
948
|
+
elem_IDs
|
949
|
+
)
|
950
|
+
except zarr.errors.BoundsCheckError:
|
951
|
+
raise MissingStoreTaskError(
|
952
|
+
elem_IDs
|
953
|
+
) from None # TODO: not an ID list
|
954
|
+
|
955
|
+
new_tasks = {
|
956
|
+
id_: ZarrStoreTask.decode({**i, "element_IDs": elem_IDs_arr_dat[id_]})
|
957
|
+
for idx, (id_, i) in enumerate(task_dat.items())
|
958
|
+
}
|
959
|
+
else:
|
960
|
+
new_tasks = {}
|
961
|
+
self.task_cache.update(new_tasks)
|
962
|
+
tasks.update(new_tasks)
|
963
|
+
return tasks
|
960
964
|
|
965
|
+
@TimeIt.decorator
|
961
966
|
def _get_persistent_loops(self, id_lst: Optional[Iterable[int]] = None):
|
962
967
|
with self.using_resource("attrs", "read") as attrs:
|
963
968
|
loop_dat = {
|
@@ -967,6 +972,7 @@ class ZarrPersistentStore(PersistentStore):
|
|
967
972
|
}
|
968
973
|
return loop_dat
|
969
974
|
|
975
|
+
@TimeIt.decorator
|
970
976
|
def _get_persistent_submissions(self, id_lst: Optional[Iterable[int]] = None):
|
971
977
|
self.logger.debug("loading persistent submissions from the zarr store")
|
972
978
|
with self.using_resource("attrs", "read") as attrs:
|
@@ -989,47 +995,66 @@ class ZarrPersistentStore(PersistentStore):
|
|
989
995
|
|
990
996
|
return subs_dat
|
991
997
|
|
998
|
+
@TimeIt.decorator
|
992
999
|
def _get_persistent_elements(
|
993
1000
|
self, id_lst: Iterable[int]
|
994
1001
|
) -> Dict[int, ZarrStoreElement]:
|
995
|
-
|
996
|
-
|
997
|
-
|
998
|
-
|
999
|
-
|
1000
|
-
|
1001
|
-
|
1002
|
-
|
1002
|
+
elems, id_lst = self._get_cached_persistent_elements(id_lst)
|
1003
|
+
if id_lst:
|
1004
|
+
arr = self._get_elements_arr()
|
1005
|
+
attrs = arr.attrs.asdict()
|
1006
|
+
try:
|
1007
|
+
elem_arr_dat = arr.get_coordinate_selection(id_lst)
|
1008
|
+
except zarr.errors.BoundsCheckError:
|
1009
|
+
raise MissingStoreElementError(id_lst) from None
|
1010
|
+
elem_dat = dict(zip(id_lst, elem_arr_dat))
|
1011
|
+
new_elems = {
|
1012
|
+
k: ZarrStoreElement.decode(v, attrs) for k, v in elem_dat.items()
|
1013
|
+
}
|
1014
|
+
self.element_cache.update(new_elems)
|
1015
|
+
elems.update(new_elems)
|
1016
|
+
return elems
|
1003
1017
|
|
1018
|
+
@TimeIt.decorator
|
1004
1019
|
def _get_persistent_element_iters(
|
1005
1020
|
self, id_lst: Iterable[int]
|
1006
|
-
) -> Dict[int,
|
1007
|
-
|
1008
|
-
|
1009
|
-
|
1010
|
-
|
1011
|
-
|
1012
|
-
|
1013
|
-
|
1014
|
-
|
1021
|
+
) -> Dict[int, ZarrStoreElementIter]:
|
1022
|
+
iters, id_lst = self._get_cached_persistent_element_iters(id_lst)
|
1023
|
+
if id_lst:
|
1024
|
+
arr = self._get_iters_arr()
|
1025
|
+
attrs = arr.attrs.asdict()
|
1026
|
+
try:
|
1027
|
+
iter_arr_dat = arr.get_coordinate_selection(id_lst)
|
1028
|
+
except zarr.errors.BoundsCheckError:
|
1029
|
+
raise MissingStoreElementIterationError(id_lst) from None
|
1030
|
+
iter_dat = dict(zip(id_lst, iter_arr_dat))
|
1031
|
+
new_iters = {
|
1032
|
+
k: ZarrStoreElementIter.decode(v, attrs) for k, v in iter_dat.items()
|
1033
|
+
}
|
1034
|
+
self.element_iter_cache.update(new_iters)
|
1035
|
+
iters.update(new_iters)
|
1036
|
+
return iters
|
1015
1037
|
|
1038
|
+
@TimeIt.decorator
|
1016
1039
|
def _get_persistent_EARs(self, id_lst: Iterable[int]) -> Dict[int, ZarrStoreEAR]:
|
1017
|
-
|
1018
|
-
|
1019
|
-
|
1020
|
-
|
1021
|
-
|
1022
|
-
|
1023
|
-
|
1024
|
-
|
1025
|
-
|
1026
|
-
|
1040
|
+
runs, id_lst = self._get_cached_persistent_EARs(id_lst)
|
1041
|
+
if id_lst:
|
1042
|
+
arr = self._get_EARs_arr()
|
1043
|
+
attrs = arr.attrs.asdict()
|
1044
|
+
try:
|
1045
|
+
self.logger.debug(f"_get_persistent_EARs: {id_lst=}")
|
1046
|
+
EAR_arr_dat = _zarr_get_coord_selection(arr, id_lst, self.logger)
|
1047
|
+
except zarr.errors.BoundsCheckError:
|
1048
|
+
raise MissingStoreEARError(id_lst) from None
|
1049
|
+
EAR_dat = dict(zip(id_lst, EAR_arr_dat))
|
1050
|
+
new_runs = {
|
1051
|
+
k: ZarrStoreEAR.decode(EAR_dat=v, attrs=attrs, ts_fmt=self.ts_fmt)
|
1052
|
+
for k, v in EAR_dat.items()
|
1053
|
+
}
|
1054
|
+
self.EAR_cache.update(new_runs)
|
1055
|
+
runs.update(new_runs)
|
1027
1056
|
|
1028
|
-
|
1029
|
-
k: ZarrStoreEAR.decode(EAR_dat=v, attrs=attrs, ts_fmt=self.ts_fmt)
|
1030
|
-
for k, v in EAR_dat.items()
|
1031
|
-
}
|
1032
|
-
return iters
|
1057
|
+
return runs
|
1033
1058
|
|
1034
1059
|
@TimeIt.decorator
|
1035
1060
|
def _get_persistent_parameters(
|
@@ -1037,39 +1062,49 @@ class ZarrPersistentStore(PersistentStore):
|
|
1037
1062
|
id_lst: Iterable[int],
|
1038
1063
|
dataset_copy: Optional[bool] = False,
|
1039
1064
|
) -> Dict[int, ZarrStoreParameter]:
|
1040
|
-
base_arr = self._get_parameter_base_array(mode="r")
|
1041
|
-
src_arr = self._get_parameter_sources_array(mode="r")
|
1042
1065
|
|
1043
|
-
|
1044
|
-
|
1045
|
-
|
1046
|
-
|
1047
|
-
raise MissingParameterData(id_lst) from None
|
1048
|
-
|
1049
|
-
param_dat = dict(zip(id_lst, param_arr_dat))
|
1050
|
-
src_dat = dict(zip(id_lst, src_arr_dat))
|
1066
|
+
params, id_lst = self._get_cached_persistent_parameters(id_lst)
|
1067
|
+
if id_lst:
|
1068
|
+
base_arr = self._get_parameter_base_array(mode="r")
|
1069
|
+
src_arr = self._get_parameter_sources_array(mode="r")
|
1051
1070
|
|
1052
|
-
|
1053
|
-
|
1054
|
-
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
|
1059
|
-
)
|
1060
|
-
|
1061
|
-
|
1071
|
+
try:
|
1072
|
+
param_arr_dat = base_arr.get_coordinate_selection(list(id_lst))
|
1073
|
+
src_arr_dat = src_arr.get_coordinate_selection(list(id_lst))
|
1074
|
+
except zarr.errors.BoundsCheckError:
|
1075
|
+
raise MissingParameterData(id_lst) from None
|
1076
|
+
|
1077
|
+
param_dat = dict(zip(id_lst, param_arr_dat))
|
1078
|
+
src_dat = dict(zip(id_lst, src_arr_dat))
|
1079
|
+
|
1080
|
+
new_params = {
|
1081
|
+
k: ZarrStoreParameter.decode(
|
1082
|
+
id_=k,
|
1083
|
+
data=v,
|
1084
|
+
source=src_dat[k],
|
1085
|
+
arr_group=self._get_parameter_data_array_group(k),
|
1086
|
+
dataset_copy=dataset_copy,
|
1087
|
+
)
|
1088
|
+
for k, v in param_dat.items()
|
1089
|
+
}
|
1090
|
+
self.parameter_cache.update(new_params)
|
1091
|
+
params.update(new_params)
|
1062
1092
|
|
1063
1093
|
return params
|
1064
1094
|
|
1095
|
+
@TimeIt.decorator
|
1065
1096
|
def _get_persistent_param_sources(self, id_lst: Iterable[int]) -> Dict[int, Dict]:
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1097
|
+
sources, id_lst = self._get_cached_persistent_param_sources(id_lst)
|
1098
|
+
if id_lst:
|
1099
|
+
src_arr = self._get_parameter_sources_array(mode="r")
|
1100
|
+
try:
|
1101
|
+
src_arr_dat = src_arr.get_coordinate_selection(list(id_lst))
|
1102
|
+
except zarr.errors.BoundsCheckError:
|
1103
|
+
raise MissingParameterData(id_lst) from None
|
1104
|
+
new_sources = dict(zip(id_lst, src_arr_dat))
|
1105
|
+
self.param_sources_cache.update(new_sources)
|
1106
|
+
sources.update(new_sources)
|
1107
|
+
return sources
|
1073
1108
|
|
1074
1109
|
def _get_persistent_parameter_set_status(
|
1075
1110
|
self, id_lst: Iterable[int]
|
hpcflow/sdk/runtime.py
CHANGED
@@ -7,7 +7,6 @@ import sys
|
|
7
7
|
from pathlib import Path
|
8
8
|
import warnings
|
9
9
|
|
10
|
-
import sentry_sdk
|
11
10
|
from rich.table import Table
|
12
11
|
from rich.console import Console
|
13
12
|
|
@@ -97,17 +96,6 @@ class RunTimeInfo:
|
|
97
96
|
# )
|
98
97
|
# warnings.warn(msg)
|
99
98
|
|
100
|
-
for k, v in self.to_dict().items():
|
101
|
-
if k in (
|
102
|
-
"is_frozen",
|
103
|
-
"is_venv",
|
104
|
-
"is_conda_venv",
|
105
|
-
"executable_name",
|
106
|
-
"python_version",
|
107
|
-
"in_ipython",
|
108
|
-
):
|
109
|
-
sentry_sdk.set_tag(f"rti.{k}", v)
|
110
|
-
|
111
99
|
def to_dict(self):
|
112
100
|
out = {
|
113
101
|
"name": self.name,
|
@@ -17,11 +17,13 @@ from hpcflow.sdk.core.actions import EARStatus
|
|
17
17
|
from hpcflow.sdk.core.errors import JobscriptSubmissionFailure, NotSubmitMachineError
|
18
18
|
|
19
19
|
from hpcflow.sdk.core.json_like import ChildObjectSpec, JSONLike
|
20
|
+
from hpcflow.sdk.log import TimeIt
|
20
21
|
from hpcflow.sdk.submission.jobscript_info import JobscriptElementState
|
21
22
|
from hpcflow.sdk.submission.schedulers import Scheduler
|
22
23
|
from hpcflow.sdk.submission.shells import get_shell
|
23
24
|
|
24
25
|
|
26
|
+
@TimeIt.decorator
|
25
27
|
def generate_EAR_resource_map(
|
26
28
|
task: app.WorkflowTask,
|
27
29
|
loop_idx: Dict,
|
@@ -76,6 +78,7 @@ def generate_EAR_resource_map(
|
|
76
78
|
)
|
77
79
|
|
78
80
|
|
81
|
+
@TimeIt.decorator
|
79
82
|
def group_resource_map_into_jobscripts(
|
80
83
|
resource_map: Union[List, NDArray],
|
81
84
|
none_val: Any = -1,
|
@@ -147,6 +150,7 @@ def group_resource_map_into_jobscripts(
|
|
147
150
|
return jobscripts, js_map
|
148
151
|
|
149
152
|
|
153
|
+
@TimeIt.decorator
|
150
154
|
def resolve_jobscript_dependencies(jobscripts, element_deps):
|
151
155
|
# first pass is to find the mappings between jobscript elements:
|
152
156
|
jobscript_deps = {}
|
@@ -217,6 +221,7 @@ def resolve_jobscript_dependencies(jobscripts, element_deps):
|
|
217
221
|
return jobscript_deps
|
218
222
|
|
219
223
|
|
224
|
+
@TimeIt.decorator
|
220
225
|
def merge_jobscripts_across_tasks(jobscripts: Dict) -> Dict:
|
221
226
|
"""Try to merge jobscripts between tasks.
|
222
227
|
|
@@ -274,6 +279,7 @@ def merge_jobscripts_across_tasks(jobscripts: Dict) -> Dict:
|
|
274
279
|
return jobscripts
|
275
280
|
|
276
281
|
|
282
|
+
@TimeIt.decorator
|
277
283
|
def jobscripts_to_list(jobscripts: Dict[int, Dict]) -> List[Dict]:
|
278
284
|
"""Convert the jobscripts dict to a list, normalising jobscript indices so they refer
|
279
285
|
to list indices; also remove `resource_hash`."""
|
@@ -396,8 +402,11 @@ class Jobscript(JSONLike):
|
|
396
402
|
return self._workflow_app_alias
|
397
403
|
|
398
404
|
def get_commands_file_name(self, js_action_idx, shell=None):
|
399
|
-
|
400
|
-
|
405
|
+
return self.app.RunDirAppFiles.get_commands_file_name(
|
406
|
+
js_idx=self.index,
|
407
|
+
js_action_idx=js_action_idx,
|
408
|
+
shell=shell or self.shell,
|
409
|
+
)
|
401
410
|
|
402
411
|
@property
|
403
412
|
def task_insert_IDs(self):
|
@@ -420,6 +429,7 @@ class Jobscript(JSONLike):
|
|
420
429
|
return self.EAR_ID.flatten()
|
421
430
|
|
422
431
|
@property
|
432
|
+
@TimeIt.decorator
|
423
433
|
def all_EARs(self) -> List:
|
424
434
|
if not self._all_EARs:
|
425
435
|
self._all_EARs = self.workflow.get_EARs_from_IDs(self.all_EAR_IDs)
|
@@ -438,6 +448,7 @@ class Jobscript(JSONLike):
|
|
438
448
|
return self._dependencies
|
439
449
|
|
440
450
|
@property
|
451
|
+
@TimeIt.decorator
|
441
452
|
def start_time(self):
|
442
453
|
"""Get the first start time from all EARs."""
|
443
454
|
if not self.is_submitted:
|
@@ -449,6 +460,7 @@ class Jobscript(JSONLike):
|
|
449
460
|
return None
|
450
461
|
|
451
462
|
@property
|
463
|
+
@TimeIt.decorator
|
452
464
|
def end_time(self):
|
453
465
|
"""Get the last end time from all EARs."""
|
454
466
|
if not self.is_submitted:
|
@@ -731,6 +743,7 @@ class Jobscript(JSONLike):
|
|
731
743
|
)
|
732
744
|
return loop_idx
|
733
745
|
|
746
|
+
@TimeIt.decorator
|
734
747
|
def write_EAR_ID_file(self):
|
735
748
|
"""Write a text file with `num_elements` lines and `num_actions` delimited tokens
|
736
749
|
per line, representing whether a given EAR must be executed."""
|
@@ -744,6 +757,7 @@ class Jobscript(JSONLike):
|
|
744
757
|
delimiter=self._EAR_files_delimiter,
|
745
758
|
)
|
746
759
|
|
760
|
+
@TimeIt.decorator
|
747
761
|
def write_element_run_dir_file(self, run_dirs: List[List[Path]]):
|
748
762
|
"""Write a text file with `num_elements` lines and `num_actions` delimited tokens
|
749
763
|
per line, representing the working directory for each EAR.
|
@@ -763,6 +777,7 @@ class Jobscript(JSONLike):
|
|
763
777
|
delimiter=self._EAR_files_delimiter,
|
764
778
|
)
|
765
779
|
|
780
|
+
@TimeIt.decorator
|
766
781
|
def compose_jobscript(
|
767
782
|
self,
|
768
783
|
deps: Optional[Dict] = None,
|
@@ -821,7 +836,7 @@ class Jobscript(JSONLike):
|
|
821
836
|
"workflow_app_alias": self.workflow_app_alias,
|
822
837
|
"env_setup": env_setup,
|
823
838
|
"app_invoc": app_invoc,
|
824
|
-
"
|
839
|
+
"run_log_file": self.app.RunDirAppFiles.get_log_file_name(),
|
825
840
|
"config_dir": str(self.app.config.config_directory),
|
826
841
|
"config_invoc_key": self.app.config.config_key,
|
827
842
|
"workflow_path": self.workflow.path,
|
@@ -868,7 +883,7 @@ class Jobscript(JSONLike):
|
|
868
883
|
EAR_files_delimiter=self._EAR_files_delimiter,
|
869
884
|
workflow_app_alias=self.workflow_app_alias,
|
870
885
|
commands_file_name=self.get_commands_file_name(r"${JS_act_idx}", shell=shell),
|
871
|
-
|
886
|
+
run_stream_file=self.app.RunDirAppFiles.get_std_file_name(),
|
872
887
|
)
|
873
888
|
|
874
889
|
out = header
|
@@ -890,6 +905,7 @@ class Jobscript(JSONLike):
|
|
890
905
|
|
891
906
|
return out
|
892
907
|
|
908
|
+
@TimeIt.decorator
|
893
909
|
def write_jobscript(
|
894
910
|
self,
|
895
911
|
os_name: str = None,
|
@@ -917,6 +933,7 @@ class Jobscript(JSONLike):
|
|
917
933
|
EARs_arr = np.array(self.all_EARs).reshape(self.EAR_ID.shape)
|
918
934
|
return EARs_arr
|
919
935
|
|
936
|
+
@TimeIt.decorator
|
920
937
|
def make_artifact_dirs(self):
|
921
938
|
EARs_arr = self._get_EARs_arr()
|
922
939
|
task_loop_idx_arr = self.get_task_loop_idx_array()
|
@@ -951,6 +968,7 @@ class Jobscript(JSONLike):
|
|
951
968
|
|
952
969
|
return run_dirs
|
953
970
|
|
971
|
+
@TimeIt.decorator
|
954
972
|
def _launch_direct_js_win(self):
|
955
973
|
# this is a "trick" to ensure we always get a fully detached new process (with no
|
956
974
|
# parent); the `powershell.exe -Command` process exits after running the inner
|
@@ -994,6 +1012,7 @@ class Jobscript(JSONLike):
|
|
994
1012
|
process_ID = int(self.direct_win_pid_file_path.read_text())
|
995
1013
|
return process_ID
|
996
1014
|
|
1015
|
+
@TimeIt.decorator
|
997
1016
|
def _launch_direct_js_posix(self) -> int:
|
998
1017
|
# direct submission; submit jobscript asynchronously:
|
999
1018
|
# detached process, avoid interrupt signals propagating to the subprocess:
|
@@ -1011,6 +1030,7 @@ class Jobscript(JSONLike):
|
|
1011
1030
|
|
1012
1031
|
return process_ID
|
1013
1032
|
|
1033
|
+
@TimeIt.decorator
|
1014
1034
|
def submit(
|
1015
1035
|
self,
|
1016
1036
|
scheduler_refs: Dict[int, (str, bool)],
|
@@ -1140,6 +1160,7 @@ class Jobscript(JSONLike):
|
|
1140
1160
|
out["num_js_elements"] = self.num_elements
|
1141
1161
|
return out
|
1142
1162
|
|
1163
|
+
@TimeIt.decorator
|
1143
1164
|
def get_active_states(
|
1144
1165
|
self, as_json: bool = False
|
1145
1166
|
) -> Dict[int, JobscriptElementState]:
|
@@ -6,6 +6,7 @@ from hpcflow.sdk.core.errors import (
|
|
6
6
|
NoCompatibleSGEPEError,
|
7
7
|
UnknownSGEPEError,
|
8
8
|
)
|
9
|
+
from hpcflow.sdk.log import TimeIt
|
9
10
|
from hpcflow.sdk.submission.jobscript_info import JobscriptElementState
|
10
11
|
from hpcflow.sdk.submission.schedulers import Scheduler
|
11
12
|
from hpcflow.sdk.submission.schedulers.utils import run_cmd
|
@@ -72,6 +73,7 @@ class SGEPosix(Scheduler):
|
|
72
73
|
self.cwd_switch = cwd_switch or self.DEFAULT_CWD_SWITCH
|
73
74
|
|
74
75
|
@classmethod
|
76
|
+
@TimeIt.decorator
|
75
77
|
def process_resources(cls, resources, scheduler_config: Dict) -> None:
|
76
78
|
"""Perform scheduler-specific processing to the element resources.
|
77
79
|
|
@@ -175,6 +177,7 @@ class SGEPosix(Scheduler):
|
|
175
177
|
|
176
178
|
return "\n".join(opts) + "\n"
|
177
179
|
|
180
|
+
@TimeIt.decorator
|
178
181
|
def get_version_info(self):
|
179
182
|
vers_cmd = self.show_cmd + ["-help"]
|
180
183
|
stdout, stderr = run_cmd(vers_cmd)
|
@@ -9,6 +9,7 @@ from hpcflow.sdk.core.errors import (
|
|
9
9
|
UnknownSLURMPartitionError,
|
10
10
|
)
|
11
11
|
from hpcflow.sdk.core.parameters import ParallelMode
|
12
|
+
from hpcflow.sdk.log import TimeIt
|
12
13
|
from hpcflow.sdk.submission.jobscript_info import JobscriptElementState
|
13
14
|
from hpcflow.sdk.submission.schedulers import Scheduler
|
14
15
|
from hpcflow.sdk.submission.schedulers.utils import run_cmd
|
@@ -58,6 +59,7 @@ class SlurmPosix(Scheduler):
|
|
58
59
|
super().__init__(*args, **kwargs)
|
59
60
|
|
60
61
|
@classmethod
|
62
|
+
@TimeIt.decorator
|
61
63
|
def process_resources(cls, resources, scheduler_config: Dict) -> None:
|
62
64
|
"""Perform scheduler-specific processing to the element resources.
|
63
65
|
|
@@ -360,6 +362,7 @@ class SlurmPosix(Scheduler):
|
|
360
362
|
|
361
363
|
return "\n".join(opts) + "\n"
|
362
364
|
|
365
|
+
@TimeIt.decorator
|
363
366
|
def get_version_info(self):
|
364
367
|
vers_cmd = [self.submit_cmd, "--version"]
|
365
368
|
proc = subprocess.run(
|
@@ -24,7 +24,7 @@ class Bash(Shell):
|
|
24
24
|
{workflow_app_alias} () {{
|
25
25
|
(
|
26
26
|
{env_setup}{app_invoc}\\
|
27
|
-
--with-config log_file_path "`pwd`/{
|
27
|
+
--with-config log_file_path "`pwd`/{run_log_file}"\\
|
28
28
|
--config-dir "{config_dir}"\\
|
29
29
|
--config-key "{config_invoc_key}"\\
|
30
30
|
"$@"
|
@@ -70,7 +70,7 @@ class Bash(Shell):
|
|
70
70
|
|
71
71
|
run_dir="$(cut -d'{EAR_files_delimiter}' -f $(($JS_act_idx + 1)) <<< $elem_run_dirs)"
|
72
72
|
cd "$WK_PATH/$run_dir"
|
73
|
-
app_stream_file="`pwd`/{
|
73
|
+
app_stream_file="`pwd`/{run_stream_file}"
|
74
74
|
|
75
75
|
skip=`{workflow_app_alias} internal workflow "$WK_PATH_ARG" get-ear-skipped $EAR_ID 2>> "$app_stream_file"`
|
76
76
|
exc_sk=$?
|
@@ -22,7 +22,7 @@ class WindowsPowerShell(Shell):
|
|
22
22
|
function {workflow_app_alias} {{
|
23
23
|
& {{
|
24
24
|
{env_setup}{app_invoc} `
|
25
|
-
--with-config log_file_path "$pwd/{
|
25
|
+
--with-config log_file_path "$pwd/{run_log_file}" `
|
26
26
|
--config-dir "{config_dir}" `
|
27
27
|
--config-key "{config_invoc_key}" `
|
28
28
|
$args
|
@@ -83,7 +83,7 @@ class WindowsPowerShell(Shell):
|
|
83
83
|
$run_dir = ($elem_run_dirs -split "{EAR_files_delimiter}")[$JS_act_idx]
|
84
84
|
$run_dir_abs = "$WK_PATH\\$run_dir"
|
85
85
|
Set-Location $run_dir_abs
|
86
|
-
$app_stream_file = "$pwd/{
|
86
|
+
$app_stream_file = "$pwd/{run_stream_file}"
|
87
87
|
|
88
88
|
$skip = {workflow_app_alias} internal workflow $WK_PATH get-ear-skipped $EAR_ID 2>> $app_stream_file
|
89
89
|
$exc_sk = $LASTEXITCODE
|