hpcflow-new2 0.2.0a159__py3-none-any.whl → 0.2.0a161__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/sdk/__init__.py +2 -0
- hpcflow/sdk/app.py +194 -41
- hpcflow/sdk/cli.py +18 -0
- hpcflow/sdk/cli_common.py +16 -0
- hpcflow/sdk/core/actions.py +14 -6
- hpcflow/sdk/core/command_files.py +22 -14
- hpcflow/sdk/core/element.py +15 -16
- hpcflow/sdk/core/run_dir_files.py +63 -0
- hpcflow/sdk/core/task.py +34 -35
- hpcflow/sdk/core/utils.py +37 -0
- hpcflow/sdk/core/workflow.py +144 -49
- hpcflow/sdk/demo/cli.py +12 -0
- hpcflow/sdk/log.py +2 -2
- hpcflow/sdk/persistence/base.py +140 -12
- hpcflow/sdk/persistence/json.py +84 -63
- hpcflow/sdk/persistence/pending.py +21 -7
- hpcflow/sdk/persistence/zarr.py +143 -108
- hpcflow/sdk/submission/jobscript.py +22 -4
- hpcflow/sdk/submission/shells/bash.py +2 -2
- hpcflow/sdk/submission/shells/powershell.py +2 -2
- hpcflow/sdk/submission/submission.py +20 -7
- hpcflow/tests/scripts/test_main_scripts.py +40 -0
- hpcflow/tests/unit/test_submission.py +1 -0
- hpcflow/tests/unit/test_utils.py +28 -0
- {hpcflow_new2-0.2.0a159.dist-info → hpcflow_new2-0.2.0a161.dist-info}/METADATA +1 -1
- {hpcflow_new2-0.2.0a159.dist-info → hpcflow_new2-0.2.0a161.dist-info}/RECORD +29 -28
- {hpcflow_new2-0.2.0a159.dist-info → hpcflow_new2-0.2.0a161.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a159.dist-info → hpcflow_new2-0.2.0a161.dist-info}/entry_points.txt +0 -0
hpcflow/_version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.2.
|
1
|
+
__version__ = "0.2.0a161"
|
hpcflow/sdk/__init__.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
"""Sub-package to define an extensible hpcflow application."""
|
2
|
+
|
2
3
|
import logging
|
3
4
|
import os
|
4
5
|
import sys
|
@@ -90,6 +91,7 @@ sdk_classes = {
|
|
90
91
|
"SlurmPosix": "hpcflow.sdk.submission.schedulers.slurm",
|
91
92
|
"SGEPosix": "hpcflow.sdk.submission.schedulers.sge",
|
92
93
|
"OutputLabel": "hpcflow.sdk.core.task",
|
94
|
+
"RunDirAppFiles": "hpcflow.sdk.core.run_dir_files",
|
93
95
|
}
|
94
96
|
|
95
97
|
# these are defined as `BaseApp` methods with an underscore prefix:
|
hpcflow/sdk/app.py
CHANGED
@@ -20,6 +20,7 @@ import warnings
|
|
20
20
|
import zipfile
|
21
21
|
from platformdirs import user_cache_path, user_data_dir
|
22
22
|
from reretry import retry
|
23
|
+
import rich
|
23
24
|
from rich.console import Console, Group
|
24
25
|
from rich.syntax import Syntax
|
25
26
|
from rich.table import Table, box
|
@@ -939,7 +940,15 @@ class BaseApp(metaclass=Singleton):
|
|
939
940
|
return self.user_data_hostname_dir / self.known_subs_file_name
|
940
941
|
|
941
942
|
def _format_known_submissions_line(
|
942
|
-
self,
|
943
|
+
self,
|
944
|
+
local_id,
|
945
|
+
workflow_id,
|
946
|
+
submit_time,
|
947
|
+
sub_idx,
|
948
|
+
is_active,
|
949
|
+
wk_path,
|
950
|
+
start_time,
|
951
|
+
end_time,
|
943
952
|
):
|
944
953
|
line = [
|
945
954
|
str(local_id),
|
@@ -948,23 +957,35 @@ class BaseApp(metaclass=Singleton):
|
|
948
957
|
str(sub_idx),
|
949
958
|
submit_time,
|
950
959
|
str(wk_path),
|
960
|
+
start_time,
|
961
|
+
end_time,
|
951
962
|
]
|
952
963
|
return self._known_subs_file_sep.join(line) + "\n"
|
953
964
|
|
954
965
|
def _parse_known_submissions_line(self, line: str) -> Dict:
|
955
|
-
|
956
|
-
|
957
|
-
|
966
|
+
(
|
967
|
+
local_id,
|
968
|
+
workflow_id,
|
969
|
+
is_active,
|
970
|
+
sub_idx,
|
971
|
+
submit_time,
|
972
|
+
path_i,
|
973
|
+
start_time,
|
974
|
+
end_time,
|
975
|
+
) = line.split(self._known_subs_file_sep, maxsplit=7)
|
958
976
|
item = {
|
959
977
|
"local_id": int(local_id),
|
960
978
|
"workflow_id": workflow_id,
|
961
979
|
"is_active": bool(int(is_active)),
|
962
|
-
"submit_time": submit_time,
|
963
980
|
"sub_idx": int(sub_idx),
|
964
|
-
"
|
981
|
+
"submit_time": submit_time,
|
982
|
+
"path": path_i,
|
983
|
+
"start_time": start_time,
|
984
|
+
"end_time": end_time.strip(),
|
965
985
|
}
|
966
986
|
return item
|
967
987
|
|
988
|
+
@TimeIt.decorator
|
968
989
|
def read_known_submissions_file(self) -> List[Dict]:
|
969
990
|
"""Retrieve existing workflows that *might* be running."""
|
970
991
|
known = []
|
@@ -1014,6 +1035,8 @@ class BaseApp(metaclass=Singleton):
|
|
1014
1035
|
submit_time=sub_time,
|
1015
1036
|
sub_idx=sub_idx,
|
1016
1037
|
wk_path=wk_path,
|
1038
|
+
start_time="",
|
1039
|
+
end_time="",
|
1017
1040
|
)
|
1018
1041
|
with self.known_subs_file_path.open("at", newline="\n") as fh:
|
1019
1042
|
# TODO: check wk_path is an absolute path? what about if a remote fsspec path?
|
@@ -1024,16 +1047,22 @@ class BaseApp(metaclass=Singleton):
|
|
1024
1047
|
|
1025
1048
|
return next_id
|
1026
1049
|
|
1027
|
-
|
1028
|
-
|
1050
|
+
@TimeIt.decorator
|
1051
|
+
def update_known_subs_file(
|
1052
|
+
self,
|
1053
|
+
inactive_IDs: List[int],
|
1054
|
+
start_times: Dict[int, str],
|
1055
|
+
end_times: Dict[int, str],
|
1056
|
+
):
|
1057
|
+
"""Update submission records in the known-submission file.
|
1029
1058
|
|
1030
1059
|
Note we aim for atomicity to help with the scenario where a new workflow
|
1031
1060
|
submission is adding itself to the file at the same time as we have decided an
|
1032
1061
|
existing workflow should no longer be part of this file. Ideally, such a scenario
|
1033
1062
|
should not arise because both operations should only ever be interactively
|
1034
|
-
initiated by the single user (`Workflow.submit` and `App.get_known_submissions`).
|
1035
|
-
operation is atomic, then at least the known-submissions file should be
|
1036
|
-
usable (but inaccurate) state.
|
1063
|
+
initiated by the single user (`Workflow.submit` and `App.get_known_submissions`).
|
1064
|
+
If this operation is atomic, then at least the known-submissions file should be
|
1065
|
+
left in a usable (but inaccurate) state.
|
1037
1066
|
|
1038
1067
|
Returns
|
1039
1068
|
-------
|
@@ -1063,30 +1092,41 @@ class BaseApp(metaclass=Singleton):
|
|
1063
1092
|
continue
|
1064
1093
|
item = self._parse_known_submissions_line(line)
|
1065
1094
|
line_IDs.append(item["local_id"])
|
1066
|
-
|
1095
|
+
shows_as_active = item["is_active"]
|
1096
|
+
is_inactive = item["local_id"] in inactive_IDs
|
1097
|
+
start_time = item["start_time"] or start_times.get(item["local_id"], "")
|
1098
|
+
end_time = item["end_time"] or end_times.get(item["local_id"], "")
|
1067
1099
|
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1100
|
+
update_inactive = is_inactive and shows_as_active
|
1101
|
+
update_start = item["local_id"] in start_times
|
1102
|
+
update_end = item["local_id"] in end_times
|
1103
|
+
|
1104
|
+
if update_inactive or update_start or update_end:
|
1105
|
+
|
1106
|
+
updated = self._format_known_submissions_line(
|
1071
1107
|
local_id=item["local_id"],
|
1072
1108
|
workflow_id=item["workflow_id"],
|
1073
|
-
is_active=
|
1109
|
+
is_active=not is_inactive,
|
1074
1110
|
submit_time=item["submit_time"],
|
1075
1111
|
sub_idx=item["sub_idx"],
|
1076
1112
|
wk_path=item["path"],
|
1113
|
+
start_time=start_time,
|
1114
|
+
end_time=end_time,
|
1077
1115
|
)
|
1078
|
-
new_lines.append(
|
1079
|
-
|
1116
|
+
new_lines.append(updated)
|
1117
|
+
|
1080
1118
|
self.submission_logger.debug(
|
1081
|
-
f"
|
1082
|
-
f"
|
1119
|
+
f"Updating (workflow, submission) from the known-submissions file: "
|
1120
|
+
f"{'set to inactive; ' if update_inactive else ''}"
|
1121
|
+
f"{f'set start_time: {start_time!r}; ' if update_start else ''}"
|
1122
|
+
f"{f'set end_time: {end_time!r}; ' if update_end else ''}"
|
1083
1123
|
f"({item['path']}, {item['sub_idx']})"
|
1084
1124
|
)
|
1085
1125
|
else:
|
1086
1126
|
# leave this one alone:
|
1087
1127
|
new_lines.append(line + "\n")
|
1088
1128
|
|
1089
|
-
if
|
1129
|
+
if is_inactive:
|
1090
1130
|
line_date[ln_idx] = item["submit_time"]
|
1091
1131
|
|
1092
1132
|
ld_srt_idx = list(dict(sorted(line_date.items(), key=lambda i: i[1])).keys())
|
@@ -1141,6 +1181,7 @@ class BaseApp(metaclass=Singleton):
|
|
1141
1181
|
ts_name_fmt: Optional[str] = None,
|
1142
1182
|
store_kwargs: Optional[Dict] = None,
|
1143
1183
|
variables: Optional[Dict[str, str]] = None,
|
1184
|
+
status: Optional[bool] = True,
|
1144
1185
|
) -> get_app_attribute("Workflow"):
|
1145
1186
|
"""Generate a new {app_name} workflow from a file or string containing a workflow
|
1146
1187
|
template parametrisation.
|
@@ -1177,10 +1218,17 @@ class BaseApp(metaclass=Singleton):
|
|
1177
1218
|
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
1178
1219
|
variables
|
1179
1220
|
String variables to substitute in `template_file_or_str`.
|
1221
|
+
status
|
1222
|
+
If True, display a live status to track workflow creation progress.
|
1180
1223
|
"""
|
1181
1224
|
|
1182
1225
|
self.API_logger.info("make_workflow called")
|
1183
1226
|
|
1227
|
+
if status:
|
1228
|
+
console = rich.console.Console()
|
1229
|
+
status = console.status("Making persistent workflow...")
|
1230
|
+
status.start()
|
1231
|
+
|
1184
1232
|
common = {
|
1185
1233
|
"path": path,
|
1186
1234
|
"name": name,
|
@@ -1190,6 +1238,7 @@ class BaseApp(metaclass=Singleton):
|
|
1190
1238
|
"ts_name_fmt": ts_name_fmt,
|
1191
1239
|
"store_kwargs": store_kwargs,
|
1192
1240
|
"variables": variables,
|
1241
|
+
"status": status,
|
1193
1242
|
}
|
1194
1243
|
|
1195
1244
|
if not is_string:
|
@@ -1200,10 +1249,24 @@ class BaseApp(metaclass=Singleton):
|
|
1200
1249
|
)
|
1201
1250
|
|
1202
1251
|
elif template_format == "json":
|
1203
|
-
|
1252
|
+
try:
|
1253
|
+
wk = self.Workflow.from_JSON_string(
|
1254
|
+
JSON_str=template_file_or_str, **common
|
1255
|
+
)
|
1256
|
+
except Exception:
|
1257
|
+
if status:
|
1258
|
+
status.stop()
|
1259
|
+
raise
|
1204
1260
|
|
1205
1261
|
elif template_format == "yaml":
|
1206
|
-
|
1262
|
+
try:
|
1263
|
+
wk = self.Workflow.from_YAML_string(
|
1264
|
+
YAML_str=template_file_or_str, **common
|
1265
|
+
)
|
1266
|
+
except Exception:
|
1267
|
+
if status:
|
1268
|
+
status.stop()
|
1269
|
+
raise
|
1207
1270
|
|
1208
1271
|
elif not template_format:
|
1209
1272
|
raise ValueError(
|
@@ -1216,6 +1279,10 @@ class BaseApp(metaclass=Singleton):
|
|
1216
1279
|
f"Template format {template_format!r} not understood. Available template "
|
1217
1280
|
f"formats are {ALL_TEMPLATE_FORMATS!r}."
|
1218
1281
|
)
|
1282
|
+
|
1283
|
+
if status:
|
1284
|
+
status.stop()
|
1285
|
+
|
1219
1286
|
return wk
|
1220
1287
|
|
1221
1288
|
def _make_and_submit_workflow(
|
@@ -1236,6 +1303,8 @@ class BaseApp(metaclass=Singleton):
|
|
1236
1303
|
add_to_known: Optional[bool] = True,
|
1237
1304
|
return_idx: Optional[bool] = False,
|
1238
1305
|
tasks: Optional[List[int]] = None,
|
1306
|
+
cancel: Optional[bool] = False,
|
1307
|
+
status: Optional[bool] = True,
|
1239
1308
|
) -> Dict[int, int]:
|
1240
1309
|
"""Generate and submit a new {app_name} workflow from a file or string containing a
|
1241
1310
|
workflow template parametrisation.
|
@@ -1288,6 +1357,11 @@ class BaseApp(metaclass=Singleton):
|
|
1288
1357
|
tasks
|
1289
1358
|
List of task indices to include in this submission. By default all tasks are
|
1290
1359
|
included.
|
1360
|
+
cancel
|
1361
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
1362
|
+
status
|
1363
|
+
If True, display a live status to track workflow creation and submission
|
1364
|
+
progress.
|
1291
1365
|
"""
|
1292
1366
|
|
1293
1367
|
self.API_logger.info("make_and_submit_workflow called")
|
@@ -1304,6 +1378,7 @@ class BaseApp(metaclass=Singleton):
|
|
1304
1378
|
ts_name_fmt=ts_name_fmt,
|
1305
1379
|
store_kwargs=store_kwargs,
|
1306
1380
|
variables=variables,
|
1381
|
+
status=status,
|
1307
1382
|
)
|
1308
1383
|
return wk.submit(
|
1309
1384
|
JS_parallelism=JS_parallelism,
|
@@ -1311,6 +1386,8 @@ class BaseApp(metaclass=Singleton):
|
|
1311
1386
|
add_to_known=add_to_known,
|
1312
1387
|
return_idx=return_idx,
|
1313
1388
|
tasks=tasks,
|
1389
|
+
cancel=cancel,
|
1390
|
+
status=status,
|
1314
1391
|
)
|
1315
1392
|
|
1316
1393
|
def _make_demo_workflow(
|
@@ -1325,6 +1402,7 @@ class BaseApp(metaclass=Singleton):
|
|
1325
1402
|
ts_name_fmt: Optional[str] = None,
|
1326
1403
|
store_kwargs: Optional[Dict] = None,
|
1327
1404
|
variables: Optional[Dict[str, str]] = None,
|
1405
|
+
status: Optional[bool] = True,
|
1328
1406
|
) -> get_app_attribute("Workflow"):
|
1329
1407
|
"""Generate a new {app_name} workflow from a builtin demo workflow template.
|
1330
1408
|
|
@@ -1358,10 +1436,17 @@ class BaseApp(metaclass=Singleton):
|
|
1358
1436
|
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
1359
1437
|
variables
|
1360
1438
|
String variables to substitute in the demo workflow template file.
|
1439
|
+
status
|
1440
|
+
If True, display a live status to track workflow creation progress.
|
1361
1441
|
"""
|
1362
1442
|
|
1363
1443
|
self.API_logger.info("make_demo_workflow called")
|
1364
1444
|
|
1445
|
+
if status:
|
1446
|
+
console = rich.console.Console()
|
1447
|
+
status = console.status("Making persistent workflow...")
|
1448
|
+
status.start()
|
1449
|
+
|
1365
1450
|
with self.get_demo_workflow_template_file(workflow_name) as template_path:
|
1366
1451
|
wk = self.Workflow.from_file(
|
1367
1452
|
template_path=template_path,
|
@@ -1374,7 +1459,10 @@ class BaseApp(metaclass=Singleton):
|
|
1374
1459
|
ts_name_fmt=ts_name_fmt,
|
1375
1460
|
store_kwargs=store_kwargs,
|
1376
1461
|
variables=variables,
|
1462
|
+
status=status,
|
1377
1463
|
)
|
1464
|
+
if status:
|
1465
|
+
status.stop()
|
1378
1466
|
return wk
|
1379
1467
|
|
1380
1468
|
def _make_and_submit_demo_workflow(
|
@@ -1394,6 +1482,8 @@ class BaseApp(metaclass=Singleton):
|
|
1394
1482
|
add_to_known: Optional[bool] = True,
|
1395
1483
|
return_idx: Optional[bool] = False,
|
1396
1484
|
tasks: Optional[List[int]] = None,
|
1485
|
+
cancel: Optional[bool] = False,
|
1486
|
+
status: Optional[bool] = True,
|
1397
1487
|
) -> Dict[int, int]:
|
1398
1488
|
"""Generate and submit a new {app_name} workflow from a file or string containing a
|
1399
1489
|
workflow template parametrisation.
|
@@ -1443,6 +1533,10 @@ class BaseApp(metaclass=Singleton):
|
|
1443
1533
|
tasks
|
1444
1534
|
List of task indices to include in this submission. By default all tasks are
|
1445
1535
|
included.
|
1536
|
+
cancel
|
1537
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
1538
|
+
status
|
1539
|
+
If True, display a live status to track submission progress.
|
1446
1540
|
"""
|
1447
1541
|
|
1448
1542
|
self.API_logger.info("make_and_submit_demo_workflow called")
|
@@ -1465,6 +1559,8 @@ class BaseApp(metaclass=Singleton):
|
|
1465
1559
|
add_to_known=add_to_known,
|
1466
1560
|
return_idx=return_idx,
|
1467
1561
|
tasks=tasks,
|
1562
|
+
cancel=cancel,
|
1563
|
+
status=status,
|
1468
1564
|
)
|
1469
1565
|
|
1470
1566
|
def _submit_workflow(
|
@@ -1556,8 +1652,13 @@ class BaseApp(metaclass=Singleton):
|
|
1556
1652
|
)
|
1557
1653
|
return shell.get_version_info(exclude_os)
|
1558
1654
|
|
1655
|
+
@TimeIt.decorator
|
1559
1656
|
def _get_known_submissions(
|
1560
|
-
self,
|
1657
|
+
self,
|
1658
|
+
max_recent: int = 3,
|
1659
|
+
no_update: bool = False,
|
1660
|
+
as_json: bool = False,
|
1661
|
+
status: Optional[Any] = None,
|
1561
1662
|
):
|
1562
1663
|
"""Retrieve information about active and recently inactive finished {app_name}
|
1563
1664
|
workflows.
|
@@ -1580,8 +1681,14 @@ class BaseApp(metaclass=Singleton):
|
|
1580
1681
|
|
1581
1682
|
out = []
|
1582
1683
|
inactive_IDs = []
|
1684
|
+
start_times = {}
|
1685
|
+
end_times = {}
|
1686
|
+
|
1687
|
+
ts_fmt = self._submission_ts_fmt
|
1583
1688
|
|
1584
1689
|
try:
|
1690
|
+
if status:
|
1691
|
+
status.update("Reading known submissions file...")
|
1585
1692
|
known_subs = self.read_known_submissions_file()
|
1586
1693
|
except FileNotFoundError:
|
1587
1694
|
known_subs = []
|
@@ -1592,14 +1699,31 @@ class BaseApp(metaclass=Singleton):
|
|
1592
1699
|
# loop in reverse so we process more-recent submissions first:
|
1593
1700
|
for file_dat_i in known_subs[::-1]:
|
1594
1701
|
submit_time_str = file_dat_i["submit_time"]
|
1595
|
-
submit_time_obj = datetime.strptime(submit_time_str,
|
1702
|
+
submit_time_obj = datetime.strptime(submit_time_str, ts_fmt)
|
1596
1703
|
submit_time_obj = submit_time_obj.replace(tzinfo=timezone.utc).astimezone()
|
1704
|
+
|
1705
|
+
start_time_str = file_dat_i["start_time"]
|
1706
|
+
start_time_obj = None
|
1707
|
+
if start_time_str:
|
1708
|
+
start_time_obj = datetime.strptime(start_time_str, ts_fmt)
|
1709
|
+
start_time_obj = start_time_obj.replace(tzinfo=timezone.utc).astimezone()
|
1710
|
+
|
1711
|
+
end_time_str = file_dat_i["end_time"]
|
1712
|
+
end_time_obj = None
|
1713
|
+
if end_time_str:
|
1714
|
+
end_time_obj = datetime.strptime(end_time_str, ts_fmt)
|
1715
|
+
end_time_obj = end_time_obj.replace(tzinfo=timezone.utc).astimezone()
|
1716
|
+
|
1597
1717
|
out_item = {
|
1598
1718
|
"local_id": file_dat_i["local_id"],
|
1599
1719
|
"workflow_id": file_dat_i["workflow_id"],
|
1600
1720
|
"workflow_path": file_dat_i["path"],
|
1601
1721
|
"submit_time": submit_time_str,
|
1602
1722
|
"submit_time_obj": submit_time_obj,
|
1723
|
+
"start_time": start_time_str,
|
1724
|
+
"start_time_obj": start_time_obj,
|
1725
|
+
"end_time": end_time_str,
|
1726
|
+
"end_time_obj": end_time_obj,
|
1603
1727
|
"sub_idx": file_dat_i["sub_idx"],
|
1604
1728
|
"jobscripts": [],
|
1605
1729
|
"active_jobscripts": {},
|
@@ -1614,7 +1738,11 @@ class BaseApp(metaclass=Singleton):
|
|
1614
1738
|
out_item["deleted"] = not path_exists
|
1615
1739
|
if path_exists:
|
1616
1740
|
try:
|
1741
|
+
if status:
|
1742
|
+
status.update(f"Inspecting workflow {file_dat_i['path']!r}.")
|
1617
1743
|
wk_i = self.Workflow(file_dat_i["path"])
|
1744
|
+
except KeyboardInterrupt:
|
1745
|
+
raise
|
1618
1746
|
except Exception:
|
1619
1747
|
wk_i = None
|
1620
1748
|
self.submission_logger.info(
|
@@ -1640,15 +1768,36 @@ class BaseApp(metaclass=Singleton):
|
|
1640
1768
|
out_item["deleted"] = True
|
1641
1769
|
|
1642
1770
|
else:
|
1643
|
-
|
1771
|
+
if status:
|
1772
|
+
status.update(
|
1773
|
+
f"Reading workflow {file_dat_i['path']!r} submission info..."
|
1774
|
+
)
|
1775
|
+
with wk_i._store.cache_ctx():
|
1776
|
+
sub = wk_i.submissions[file_dat_i["sub_idx"]]
|
1777
|
+
|
1778
|
+
all_jobscripts = sub._submission_parts[submit_time_str]
|
1779
|
+
out_item.update(
|
1780
|
+
{
|
1781
|
+
"jobscripts": all_jobscripts,
|
1782
|
+
"submission": sub,
|
1783
|
+
}
|
1784
|
+
)
|
1785
|
+
if not out_item["start_time"]:
|
1786
|
+
start_time_obj = sub.start_time
|
1787
|
+
if start_time_obj:
|
1788
|
+
start_time = datetime.strftime(start_time_obj, ts_fmt)
|
1789
|
+
out_item["start_time"] = start_time
|
1790
|
+
start_times[file_dat_i["local_id"]] = start_time
|
1791
|
+
out_item["start_time_obj"] = start_time_obj
|
1792
|
+
|
1793
|
+
if not out_item["end_time"]:
|
1794
|
+
end_time_obj = sub.end_time
|
1795
|
+
if end_time_obj:
|
1796
|
+
end_time = datetime.strftime(end_time_obj, ts_fmt)
|
1797
|
+
out_item["end_time"] = end_time
|
1798
|
+
end_times[file_dat_i["local_id"]] = end_time
|
1799
|
+
out_item["end_time_obj"] = end_time_obj
|
1644
1800
|
|
1645
|
-
all_jobscripts = sub._submission_parts[submit_time_str]
|
1646
|
-
out_item.update(
|
1647
|
-
{
|
1648
|
-
"jobscripts": all_jobscripts,
|
1649
|
-
"submission": sub,
|
1650
|
-
}
|
1651
|
-
)
|
1652
1801
|
if file_dat_i["is_active"]:
|
1653
1802
|
# check it really is active:
|
1654
1803
|
run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
|
@@ -1657,6 +1806,8 @@ class BaseApp(metaclass=Singleton):
|
|
1657
1806
|
else:
|
1658
1807
|
try:
|
1659
1808
|
act_i_js = sub.get_active_jobscripts(as_json=as_json)
|
1809
|
+
except KeyboardInterrupt:
|
1810
|
+
raise
|
1660
1811
|
except Exception:
|
1661
1812
|
self.submission_logger.info(
|
1662
1813
|
f"failed to retrieve active jobscripts from workflow "
|
@@ -1679,8 +1830,10 @@ class BaseApp(metaclass=Singleton):
|
|
1679
1830
|
|
1680
1831
|
out.append(out_item)
|
1681
1832
|
|
1682
|
-
if inactive_IDs and not no_update:
|
1683
|
-
removed_IDs = self.
|
1833
|
+
if (inactive_IDs or start_times or end_times) and not no_update:
|
1834
|
+
removed_IDs = self.update_known_subs_file(
|
1835
|
+
inactive_IDs, start_times, end_times
|
1836
|
+
)
|
1684
1837
|
# remove these from the output, to avoid confusion (if kept, they would not
|
1685
1838
|
# appear in the next invocation of this method):
|
1686
1839
|
out = [i for i in out if i["local_id"] not in removed_IDs]
|
@@ -1694,9 +1847,7 @@ class BaseApp(metaclass=Singleton):
|
|
1694
1847
|
out_access = sorted(
|
1695
1848
|
out_access,
|
1696
1849
|
key=lambda i: (
|
1697
|
-
i["
|
1698
|
-
or i["submission"].start_time
|
1699
|
-
or i["submit_time_obj"]
|
1850
|
+
i["end_time_obj"] or i["start_time_obj"] or i["submit_time_obj"]
|
1700
1851
|
),
|
1701
1852
|
reverse=True,
|
1702
1853
|
)
|
@@ -1756,6 +1907,7 @@ class BaseApp(metaclass=Singleton):
|
|
1756
1907
|
)
|
1757
1908
|
rich_print(group)
|
1758
1909
|
|
1910
|
+
@TimeIt.decorator
|
1759
1911
|
def _show(
|
1760
1912
|
self,
|
1761
1913
|
max_recent: int = 3,
|
@@ -1826,8 +1978,9 @@ class BaseApp(metaclass=Singleton):
|
|
1826
1978
|
run_dat = self._get_known_submissions(
|
1827
1979
|
max_recent=max_recent,
|
1828
1980
|
no_update=no_update,
|
1981
|
+
status=status,
|
1829
1982
|
)
|
1830
|
-
except Exception:
|
1983
|
+
except (Exception, KeyboardInterrupt):
|
1831
1984
|
status.stop()
|
1832
1985
|
raise
|
1833
1986
|
else:
|
@@ -1878,8 +2031,8 @@ class BaseApp(metaclass=Singleton):
|
|
1878
2031
|
|
1879
2032
|
start_time, end_time = None, None
|
1880
2033
|
if not no_access:
|
1881
|
-
start_time = dat_i["
|
1882
|
-
end_time = dat_i["
|
2034
|
+
start_time = dat_i["start_time_obj"]
|
2035
|
+
end_time = dat_i["end_time_obj"]
|
1883
2036
|
|
1884
2037
|
if "actions" in columns:
|
1885
2038
|
if not no_access:
|
hpcflow/sdk/cli.py
CHANGED
@@ -25,6 +25,9 @@ from hpcflow.sdk.cli_common import (
|
|
25
25
|
add_to_known_opt,
|
26
26
|
print_idx_opt,
|
27
27
|
tasks_opt,
|
28
|
+
cancel_opt,
|
29
|
+
submit_status_opt,
|
30
|
+
make_status_opt,
|
28
31
|
zip_path_opt,
|
29
32
|
zip_overwrite_opt,
|
30
33
|
zip_log_opt,
|
@@ -71,6 +74,7 @@ def _make_API_CLI(app):
|
|
71
74
|
@ts_fmt_option
|
72
75
|
@ts_name_fmt_option
|
73
76
|
@variables_option
|
77
|
+
@make_status_opt
|
74
78
|
def make_workflow(
|
75
79
|
template_file_or_str,
|
76
80
|
string,
|
@@ -82,6 +86,7 @@ def _make_API_CLI(app):
|
|
82
86
|
ts_fmt=None,
|
83
87
|
ts_name_fmt=None,
|
84
88
|
variables=None,
|
89
|
+
status=True,
|
85
90
|
):
|
86
91
|
"""Generate a new {app_name} workflow.
|
87
92
|
|
@@ -100,6 +105,7 @@ def _make_API_CLI(app):
|
|
100
105
|
ts_fmt=ts_fmt,
|
101
106
|
ts_name_fmt=ts_name_fmt,
|
102
107
|
variables=dict(variables),
|
108
|
+
status=status,
|
103
109
|
)
|
104
110
|
click.echo(wk.path)
|
105
111
|
|
@@ -119,6 +125,8 @@ def _make_API_CLI(app):
|
|
119
125
|
@add_to_known_opt
|
120
126
|
@print_idx_opt
|
121
127
|
@tasks_opt
|
128
|
+
@cancel_opt
|
129
|
+
@submit_status_opt
|
122
130
|
def make_and_submit_workflow(
|
123
131
|
template_file_or_str,
|
124
132
|
string,
|
@@ -135,6 +143,8 @@ def _make_API_CLI(app):
|
|
135
143
|
add_to_known=True,
|
136
144
|
print_idx=False,
|
137
145
|
tasks=None,
|
146
|
+
cancel=False,
|
147
|
+
status=True,
|
138
148
|
):
|
139
149
|
"""Generate and submit a new {app_name} workflow.
|
140
150
|
|
@@ -159,6 +169,8 @@ def _make_API_CLI(app):
|
|
159
169
|
add_to_known=add_to_known,
|
160
170
|
return_idx=print_idx,
|
161
171
|
tasks=tasks,
|
172
|
+
cancel=cancel,
|
173
|
+
status=status,
|
162
174
|
)
|
163
175
|
if print_idx:
|
164
176
|
click.echo(out)
|
@@ -320,6 +332,8 @@ def _make_workflow_CLI(app):
|
|
320
332
|
@add_to_known_opt
|
321
333
|
@print_idx_opt
|
322
334
|
@tasks_opt
|
335
|
+
@cancel_opt
|
336
|
+
@submit_status_opt
|
323
337
|
@click.pass_context
|
324
338
|
def submit_workflow(
|
325
339
|
ctx,
|
@@ -328,6 +342,8 @@ def _make_workflow_CLI(app):
|
|
328
342
|
add_to_known=True,
|
329
343
|
print_idx=False,
|
330
344
|
tasks=None,
|
345
|
+
cancel=False,
|
346
|
+
status=True,
|
331
347
|
):
|
332
348
|
"""Submit the workflow."""
|
333
349
|
out = ctx.obj["workflow"].submit(
|
@@ -336,6 +352,8 @@ def _make_workflow_CLI(app):
|
|
336
352
|
add_to_known=add_to_known,
|
337
353
|
return_idx=print_idx,
|
338
354
|
tasks=tasks,
|
355
|
+
cancel=cancel,
|
356
|
+
status=status,
|
339
357
|
)
|
340
358
|
if print_idx:
|
341
359
|
click.echo(out)
|
hpcflow/sdk/cli_common.py
CHANGED
@@ -109,6 +109,22 @@ tasks_opt = click.option(
|
|
109
109
|
),
|
110
110
|
callback=sub_tasks_callback,
|
111
111
|
)
|
112
|
+
cancel_opt = click.option(
|
113
|
+
"--cancel",
|
114
|
+
help="Immediately cancel the submission. Useful for testing and benchmarking.",
|
115
|
+
is_flag=True,
|
116
|
+
default=False,
|
117
|
+
)
|
118
|
+
submit_status_opt = click.option(
|
119
|
+
"--status/--no-status",
|
120
|
+
help="If True, display a live status to track submission progress.",
|
121
|
+
default=True,
|
122
|
+
)
|
123
|
+
make_status_opt = click.option(
|
124
|
+
"--status/--no-status",
|
125
|
+
help="If True, display a live status to track workflow creation progress.",
|
126
|
+
default=True,
|
127
|
+
)
|
112
128
|
zip_path_opt = click.option(
|
113
129
|
"--path",
|
114
130
|
default=".",
|