hpcflow-new2 0.2.0a158__py3-none-any.whl → 0.2.0a160__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/_version.py +1 -1
- hpcflow/app.py +0 -3
- hpcflow/sdk/__init__.py +2 -0
- hpcflow/sdk/app.py +91 -18
- hpcflow/sdk/cli.py +18 -0
- hpcflow/sdk/cli_common.py +16 -0
- hpcflow/sdk/config/config.py +0 -4
- hpcflow/sdk/core/actions.py +20 -7
- hpcflow/sdk/core/command_files.py +4 -4
- hpcflow/sdk/core/element.py +15 -16
- hpcflow/sdk/core/rule.py +2 -0
- hpcflow/sdk/core/run_dir_files.py +63 -0
- hpcflow/sdk/core/task.py +34 -35
- hpcflow/sdk/core/utils.py +37 -15
- hpcflow/sdk/core/workflow.py +147 -49
- hpcflow/sdk/data/config_schema.yaml +0 -6
- hpcflow/sdk/demo/cli.py +12 -0
- hpcflow/sdk/log.py +2 -2
- hpcflow/sdk/persistence/base.py +142 -12
- hpcflow/sdk/persistence/json.py +84 -63
- hpcflow/sdk/persistence/pending.py +21 -7
- hpcflow/sdk/persistence/utils.py +2 -1
- hpcflow/sdk/persistence/zarr.py +143 -108
- hpcflow/sdk/runtime.py +0 -12
- hpcflow/sdk/submission/jobscript.py +25 -4
- hpcflow/sdk/submission/schedulers/sge.py +3 -0
- hpcflow/sdk/submission/schedulers/slurm.py +3 -0
- hpcflow/sdk/submission/shells/bash.py +2 -2
- hpcflow/sdk/submission/shells/powershell.py +2 -2
- hpcflow/sdk/submission/submission.py +24 -7
- hpcflow/tests/scripts/test_main_scripts.py +40 -0
- hpcflow/tests/unit/test_utils.py +28 -0
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/METADATA +1 -2
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/RECORD +36 -35
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a158.dist-info → hpcflow_new2-0.2.0a160.dist-info}/entry_points.txt +0 -0
hpcflow/_version.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "0.2.
|
1
|
+
__version__ = "0.2.0a160"
|
hpcflow/app.py
CHANGED
@@ -14,9 +14,6 @@ __dir__ = sdk_app.get_app_module_dir()
|
|
14
14
|
config_options = ConfigOptions(
|
15
15
|
directory_env_var="HPCFLOW_CONFIG_DIR",
|
16
16
|
default_directory="~/.hpcflow-new",
|
17
|
-
sentry_DSN="https://2463b288fd1a40f4bada9f5ff53f6811@o1180430.ingest.sentry.io/6293231",
|
18
|
-
sentry_traces_sample_rate=1.0,
|
19
|
-
sentry_env="main" if "a" in __version__ else "develop",
|
20
17
|
)
|
21
18
|
|
22
19
|
# load built in template components (in this case, for demonstration purposes):
|
hpcflow/sdk/__init__.py
CHANGED
@@ -1,4 +1,5 @@
|
|
1
1
|
"""Sub-package to define an extensible hpcflow application."""
|
2
|
+
|
2
3
|
import logging
|
3
4
|
import os
|
4
5
|
import sys
|
@@ -90,6 +91,7 @@ sdk_classes = {
|
|
90
91
|
"SlurmPosix": "hpcflow.sdk.submission.schedulers.slurm",
|
91
92
|
"SGEPosix": "hpcflow.sdk.submission.schedulers.sge",
|
92
93
|
"OutputLabel": "hpcflow.sdk.core.task",
|
94
|
+
"RunDirAppFiles": "hpcflow.sdk.core.run_dir_files",
|
93
95
|
}
|
94
96
|
|
95
97
|
# these are defined as `BaseApp` methods with an underscore prefix:
|
hpcflow/sdk/app.py
CHANGED
@@ -20,6 +20,7 @@ import warnings
|
|
20
20
|
import zipfile
|
21
21
|
from platformdirs import user_cache_path, user_data_dir
|
22
22
|
from reretry import retry
|
23
|
+
import rich
|
23
24
|
from rich.console import Console, Group
|
24
25
|
from rich.syntax import Syntax
|
25
26
|
from rich.table import Table, box
|
@@ -31,8 +32,6 @@ from fsspec.core import url_to_fs
|
|
31
32
|
from fsspec.implementations.local import LocalFileSystem
|
32
33
|
|
33
34
|
|
34
|
-
from setuptools import find_packages
|
35
|
-
|
36
35
|
from hpcflow import __version__
|
37
36
|
from hpcflow.sdk.core.actions import EARStatus
|
38
37
|
from hpcflow.sdk.core.errors import WorkflowNotFoundError
|
@@ -730,12 +729,13 @@ class BaseApp(metaclass=Singleton):
|
|
730
729
|
|
731
730
|
@TimeIt.decorator
|
732
731
|
def _load_scripts(self):
|
732
|
+
from setuptools import find_packages
|
733
|
+
|
733
734
|
# TODO: load custom directories / custom functions (via decorator)
|
734
735
|
|
735
736
|
app_module = import_module(self.package_name)
|
736
737
|
root_scripts_dir = self.scripts_dir
|
737
738
|
|
738
|
-
# TODO: setuptools.find_packages takes a long time to import
|
739
739
|
packages = find_packages(
|
740
740
|
where=str(Path(app_module.__path__[0], *root_scripts_dir.split(".")))
|
741
741
|
)
|
@@ -966,6 +966,7 @@ class BaseApp(metaclass=Singleton):
|
|
966
966
|
}
|
967
967
|
return item
|
968
968
|
|
969
|
+
@TimeIt.decorator
|
969
970
|
def read_known_submissions_file(self) -> List[Dict]:
|
970
971
|
"""Retrieve existing workflows that *might* be running."""
|
971
972
|
known = []
|
@@ -1025,6 +1026,7 @@ class BaseApp(metaclass=Singleton):
|
|
1025
1026
|
|
1026
1027
|
return next_id
|
1027
1028
|
|
1029
|
+
@TimeIt.decorator
|
1028
1030
|
def set_inactive_in_known_subs_file(self, inactive_IDs: List[int]):
|
1029
1031
|
"""Set workflows in the known-submissions file to the non-running state.
|
1030
1032
|
|
@@ -1142,6 +1144,7 @@ class BaseApp(metaclass=Singleton):
|
|
1142
1144
|
ts_name_fmt: Optional[str] = None,
|
1143
1145
|
store_kwargs: Optional[Dict] = None,
|
1144
1146
|
variables: Optional[Dict[str, str]] = None,
|
1147
|
+
status: Optional[bool] = True,
|
1145
1148
|
) -> get_app_attribute("Workflow"):
|
1146
1149
|
"""Generate a new {app_name} workflow from a file or string containing a workflow
|
1147
1150
|
template parametrisation.
|
@@ -1178,10 +1181,17 @@ class BaseApp(metaclass=Singleton):
|
|
1178
1181
|
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
1179
1182
|
variables
|
1180
1183
|
String variables to substitute in `template_file_or_str`.
|
1184
|
+
status
|
1185
|
+
If True, display a live status to track workflow creation progress.
|
1181
1186
|
"""
|
1182
1187
|
|
1183
1188
|
self.API_logger.info("make_workflow called")
|
1184
1189
|
|
1190
|
+
if status:
|
1191
|
+
console = rich.console.Console()
|
1192
|
+
status = console.status("Making persistent workflow...")
|
1193
|
+
status.start()
|
1194
|
+
|
1185
1195
|
common = {
|
1186
1196
|
"path": path,
|
1187
1197
|
"name": name,
|
@@ -1191,6 +1201,7 @@ class BaseApp(metaclass=Singleton):
|
|
1191
1201
|
"ts_name_fmt": ts_name_fmt,
|
1192
1202
|
"store_kwargs": store_kwargs,
|
1193
1203
|
"variables": variables,
|
1204
|
+
"status": status,
|
1194
1205
|
}
|
1195
1206
|
|
1196
1207
|
if not is_string:
|
@@ -1201,10 +1212,24 @@ class BaseApp(metaclass=Singleton):
|
|
1201
1212
|
)
|
1202
1213
|
|
1203
1214
|
elif template_format == "json":
|
1204
|
-
|
1215
|
+
try:
|
1216
|
+
wk = self.Workflow.from_JSON_string(
|
1217
|
+
JSON_str=template_file_or_str, **common
|
1218
|
+
)
|
1219
|
+
except Exception:
|
1220
|
+
if status:
|
1221
|
+
status.stop()
|
1222
|
+
raise
|
1205
1223
|
|
1206
1224
|
elif template_format == "yaml":
|
1207
|
-
|
1225
|
+
try:
|
1226
|
+
wk = self.Workflow.from_YAML_string(
|
1227
|
+
YAML_str=template_file_or_str, **common
|
1228
|
+
)
|
1229
|
+
except Exception:
|
1230
|
+
if status:
|
1231
|
+
status.stop()
|
1232
|
+
raise
|
1208
1233
|
|
1209
1234
|
elif not template_format:
|
1210
1235
|
raise ValueError(
|
@@ -1217,6 +1242,10 @@ class BaseApp(metaclass=Singleton):
|
|
1217
1242
|
f"Template format {template_format!r} not understood. Available template "
|
1218
1243
|
f"formats are {ALL_TEMPLATE_FORMATS!r}."
|
1219
1244
|
)
|
1245
|
+
|
1246
|
+
if status:
|
1247
|
+
status.stop()
|
1248
|
+
|
1220
1249
|
return wk
|
1221
1250
|
|
1222
1251
|
def _make_and_submit_workflow(
|
@@ -1237,6 +1266,8 @@ class BaseApp(metaclass=Singleton):
|
|
1237
1266
|
add_to_known: Optional[bool] = True,
|
1238
1267
|
return_idx: Optional[bool] = False,
|
1239
1268
|
tasks: Optional[List[int]] = None,
|
1269
|
+
cancel: Optional[bool] = False,
|
1270
|
+
status: Optional[bool] = True,
|
1240
1271
|
) -> Dict[int, int]:
|
1241
1272
|
"""Generate and submit a new {app_name} workflow from a file or string containing a
|
1242
1273
|
workflow template parametrisation.
|
@@ -1289,6 +1320,11 @@ class BaseApp(metaclass=Singleton):
|
|
1289
1320
|
tasks
|
1290
1321
|
List of task indices to include in this submission. By default all tasks are
|
1291
1322
|
included.
|
1323
|
+
cancel
|
1324
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
1325
|
+
status
|
1326
|
+
If True, display a live status to track workflow creation and submission
|
1327
|
+
progress.
|
1292
1328
|
"""
|
1293
1329
|
|
1294
1330
|
self.API_logger.info("make_and_submit_workflow called")
|
@@ -1305,6 +1341,7 @@ class BaseApp(metaclass=Singleton):
|
|
1305
1341
|
ts_name_fmt=ts_name_fmt,
|
1306
1342
|
store_kwargs=store_kwargs,
|
1307
1343
|
variables=variables,
|
1344
|
+
status=status,
|
1308
1345
|
)
|
1309
1346
|
return wk.submit(
|
1310
1347
|
JS_parallelism=JS_parallelism,
|
@@ -1312,6 +1349,8 @@ class BaseApp(metaclass=Singleton):
|
|
1312
1349
|
add_to_known=add_to_known,
|
1313
1350
|
return_idx=return_idx,
|
1314
1351
|
tasks=tasks,
|
1352
|
+
cancel=cancel,
|
1353
|
+
status=status,
|
1315
1354
|
)
|
1316
1355
|
|
1317
1356
|
def _make_demo_workflow(
|
@@ -1326,6 +1365,7 @@ class BaseApp(metaclass=Singleton):
|
|
1326
1365
|
ts_name_fmt: Optional[str] = None,
|
1327
1366
|
store_kwargs: Optional[Dict] = None,
|
1328
1367
|
variables: Optional[Dict[str, str]] = None,
|
1368
|
+
status: Optional[bool] = True,
|
1329
1369
|
) -> get_app_attribute("Workflow"):
|
1330
1370
|
"""Generate a new {app_name} workflow from a builtin demo workflow template.
|
1331
1371
|
|
@@ -1359,10 +1399,17 @@ class BaseApp(metaclass=Singleton):
|
|
1359
1399
|
Keyword arguments to pass to the store's `write_empty_workflow` method.
|
1360
1400
|
variables
|
1361
1401
|
String variables to substitute in the demo workflow template file.
|
1402
|
+
status
|
1403
|
+
If True, display a live status to track workflow creation progress.
|
1362
1404
|
"""
|
1363
1405
|
|
1364
1406
|
self.API_logger.info("make_demo_workflow called")
|
1365
1407
|
|
1408
|
+
if status:
|
1409
|
+
console = rich.console.Console()
|
1410
|
+
status = console.status("Making persistent workflow...")
|
1411
|
+
status.start()
|
1412
|
+
|
1366
1413
|
with self.get_demo_workflow_template_file(workflow_name) as template_path:
|
1367
1414
|
wk = self.Workflow.from_file(
|
1368
1415
|
template_path=template_path,
|
@@ -1375,7 +1422,10 @@ class BaseApp(metaclass=Singleton):
|
|
1375
1422
|
ts_name_fmt=ts_name_fmt,
|
1376
1423
|
store_kwargs=store_kwargs,
|
1377
1424
|
variables=variables,
|
1425
|
+
status=status,
|
1378
1426
|
)
|
1427
|
+
if status:
|
1428
|
+
status.stop()
|
1379
1429
|
return wk
|
1380
1430
|
|
1381
1431
|
def _make_and_submit_demo_workflow(
|
@@ -1395,6 +1445,8 @@ class BaseApp(metaclass=Singleton):
|
|
1395
1445
|
add_to_known: Optional[bool] = True,
|
1396
1446
|
return_idx: Optional[bool] = False,
|
1397
1447
|
tasks: Optional[List[int]] = None,
|
1448
|
+
cancel: Optional[bool] = False,
|
1449
|
+
status: Optional[bool] = True,
|
1398
1450
|
) -> Dict[int, int]:
|
1399
1451
|
"""Generate and submit a new {app_name} workflow from a file or string containing a
|
1400
1452
|
workflow template parametrisation.
|
@@ -1444,6 +1496,10 @@ class BaseApp(metaclass=Singleton):
|
|
1444
1496
|
tasks
|
1445
1497
|
List of task indices to include in this submission. By default all tasks are
|
1446
1498
|
included.
|
1499
|
+
cancel
|
1500
|
+
Immediately cancel the submission. Useful for testing and benchmarking.
|
1501
|
+
status
|
1502
|
+
If True, display a live status to track submission progress.
|
1447
1503
|
"""
|
1448
1504
|
|
1449
1505
|
self.API_logger.info("make_and_submit_demo_workflow called")
|
@@ -1466,6 +1522,8 @@ class BaseApp(metaclass=Singleton):
|
|
1466
1522
|
add_to_known=add_to_known,
|
1467
1523
|
return_idx=return_idx,
|
1468
1524
|
tasks=tasks,
|
1525
|
+
cancel=cancel,
|
1526
|
+
status=status,
|
1469
1527
|
)
|
1470
1528
|
|
1471
1529
|
def _submit_workflow(
|
@@ -1557,8 +1615,13 @@ class BaseApp(metaclass=Singleton):
|
|
1557
1615
|
)
|
1558
1616
|
return shell.get_version_info(exclude_os)
|
1559
1617
|
|
1618
|
+
@TimeIt.decorator
|
1560
1619
|
def _get_known_submissions(
|
1561
|
-
self,
|
1620
|
+
self,
|
1621
|
+
max_recent: int = 3,
|
1622
|
+
no_update: bool = False,
|
1623
|
+
as_json: bool = False,
|
1624
|
+
status: Optional[Any] = None,
|
1562
1625
|
):
|
1563
1626
|
"""Retrieve information about active and recently inactive finished {app_name}
|
1564
1627
|
workflows.
|
@@ -1583,6 +1646,8 @@ class BaseApp(metaclass=Singleton):
|
|
1583
1646
|
inactive_IDs = []
|
1584
1647
|
|
1585
1648
|
try:
|
1649
|
+
if status:
|
1650
|
+
status.update("Reading known submissions file...")
|
1586
1651
|
known_subs = self.read_known_submissions_file()
|
1587
1652
|
except FileNotFoundError:
|
1588
1653
|
known_subs = []
|
@@ -1615,6 +1680,8 @@ class BaseApp(metaclass=Singleton):
|
|
1615
1680
|
out_item["deleted"] = not path_exists
|
1616
1681
|
if path_exists:
|
1617
1682
|
try:
|
1683
|
+
if status:
|
1684
|
+
status.update(f"Inspecting workflow {file_dat_i['path']!r}.")
|
1618
1685
|
wk_i = self.Workflow(file_dat_i["path"])
|
1619
1686
|
except Exception:
|
1620
1687
|
wk_i = None
|
@@ -1641,15 +1708,22 @@ class BaseApp(metaclass=Singleton):
|
|
1641
1708
|
out_item["deleted"] = True
|
1642
1709
|
|
1643
1710
|
else:
|
1644
|
-
|
1645
|
-
|
1646
|
-
|
1647
|
-
|
1648
|
-
|
1649
|
-
|
1650
|
-
|
1651
|
-
|
1652
|
-
|
1711
|
+
if status:
|
1712
|
+
status.update(
|
1713
|
+
f"Reading workflow {file_dat_i['path']!r} submission info..."
|
1714
|
+
)
|
1715
|
+
with wk_i._store.cache_ctx():
|
1716
|
+
sub = wk_i.submissions[file_dat_i["sub_idx"]]
|
1717
|
+
|
1718
|
+
all_jobscripts = sub._submission_parts[submit_time_str]
|
1719
|
+
out_item.update(
|
1720
|
+
{
|
1721
|
+
"jobscripts": all_jobscripts,
|
1722
|
+
"submission": sub,
|
1723
|
+
"sub_start_time": sub.start_time,
|
1724
|
+
"sub_end_time": sub.end_time,
|
1725
|
+
}
|
1726
|
+
)
|
1653
1727
|
if file_dat_i["is_active"]:
|
1654
1728
|
# check it really is active:
|
1655
1729
|
run_key = (file_dat_i["path"], file_dat_i["sub_idx"])
|
@@ -1695,9 +1769,7 @@ class BaseApp(metaclass=Singleton):
|
|
1695
1769
|
out_access = sorted(
|
1696
1770
|
out_access,
|
1697
1771
|
key=lambda i: (
|
1698
|
-
i["
|
1699
|
-
or i["submission"].start_time
|
1700
|
-
or i["submit_time_obj"]
|
1772
|
+
i["sub_end_time"] or i["sub_start_time"] or i["submit_time_obj"]
|
1701
1773
|
),
|
1702
1774
|
reverse=True,
|
1703
1775
|
)
|
@@ -1827,6 +1899,7 @@ class BaseApp(metaclass=Singleton):
|
|
1827
1899
|
run_dat = self._get_known_submissions(
|
1828
1900
|
max_recent=max_recent,
|
1829
1901
|
no_update=no_update,
|
1902
|
+
status=status,
|
1830
1903
|
)
|
1831
1904
|
except Exception:
|
1832
1905
|
status.stop()
|
hpcflow/sdk/cli.py
CHANGED
@@ -25,6 +25,9 @@ from hpcflow.sdk.cli_common import (
|
|
25
25
|
add_to_known_opt,
|
26
26
|
print_idx_opt,
|
27
27
|
tasks_opt,
|
28
|
+
cancel_opt,
|
29
|
+
submit_status_opt,
|
30
|
+
make_status_opt,
|
28
31
|
zip_path_opt,
|
29
32
|
zip_overwrite_opt,
|
30
33
|
zip_log_opt,
|
@@ -71,6 +74,7 @@ def _make_API_CLI(app):
|
|
71
74
|
@ts_fmt_option
|
72
75
|
@ts_name_fmt_option
|
73
76
|
@variables_option
|
77
|
+
@make_status_opt
|
74
78
|
def make_workflow(
|
75
79
|
template_file_or_str,
|
76
80
|
string,
|
@@ -82,6 +86,7 @@ def _make_API_CLI(app):
|
|
82
86
|
ts_fmt=None,
|
83
87
|
ts_name_fmt=None,
|
84
88
|
variables=None,
|
89
|
+
status=True,
|
85
90
|
):
|
86
91
|
"""Generate a new {app_name} workflow.
|
87
92
|
|
@@ -100,6 +105,7 @@ def _make_API_CLI(app):
|
|
100
105
|
ts_fmt=ts_fmt,
|
101
106
|
ts_name_fmt=ts_name_fmt,
|
102
107
|
variables=dict(variables),
|
108
|
+
status=status,
|
103
109
|
)
|
104
110
|
click.echo(wk.path)
|
105
111
|
|
@@ -119,6 +125,8 @@ def _make_API_CLI(app):
|
|
119
125
|
@add_to_known_opt
|
120
126
|
@print_idx_opt
|
121
127
|
@tasks_opt
|
128
|
+
@cancel_opt
|
129
|
+
@submit_status_opt
|
122
130
|
def make_and_submit_workflow(
|
123
131
|
template_file_or_str,
|
124
132
|
string,
|
@@ -135,6 +143,8 @@ def _make_API_CLI(app):
|
|
135
143
|
add_to_known=True,
|
136
144
|
print_idx=False,
|
137
145
|
tasks=None,
|
146
|
+
cancel=False,
|
147
|
+
status=True,
|
138
148
|
):
|
139
149
|
"""Generate and submit a new {app_name} workflow.
|
140
150
|
|
@@ -159,6 +169,8 @@ def _make_API_CLI(app):
|
|
159
169
|
add_to_known=add_to_known,
|
160
170
|
return_idx=print_idx,
|
161
171
|
tasks=tasks,
|
172
|
+
cancel=cancel,
|
173
|
+
status=status,
|
162
174
|
)
|
163
175
|
if print_idx:
|
164
176
|
click.echo(out)
|
@@ -320,6 +332,8 @@ def _make_workflow_CLI(app):
|
|
320
332
|
@add_to_known_opt
|
321
333
|
@print_idx_opt
|
322
334
|
@tasks_opt
|
335
|
+
@cancel_opt
|
336
|
+
@submit_status_opt
|
323
337
|
@click.pass_context
|
324
338
|
def submit_workflow(
|
325
339
|
ctx,
|
@@ -328,6 +342,8 @@ def _make_workflow_CLI(app):
|
|
328
342
|
add_to_known=True,
|
329
343
|
print_idx=False,
|
330
344
|
tasks=None,
|
345
|
+
cancel=False,
|
346
|
+
status=True,
|
331
347
|
):
|
332
348
|
"""Submit the workflow."""
|
333
349
|
out = ctx.obj["workflow"].submit(
|
@@ -336,6 +352,8 @@ def _make_workflow_CLI(app):
|
|
336
352
|
add_to_known=add_to_known,
|
337
353
|
return_idx=print_idx,
|
338
354
|
tasks=tasks,
|
355
|
+
cancel=cancel,
|
356
|
+
status=status,
|
339
357
|
)
|
340
358
|
if print_idx:
|
341
359
|
click.echo(out)
|
hpcflow/sdk/cli_common.py
CHANGED
@@ -109,6 +109,22 @@ tasks_opt = click.option(
|
|
109
109
|
),
|
110
110
|
callback=sub_tasks_callback,
|
111
111
|
)
|
112
|
+
cancel_opt = click.option(
|
113
|
+
"--cancel",
|
114
|
+
help="Immediately cancel the submission. Useful for testing and benchmarking.",
|
115
|
+
is_flag=True,
|
116
|
+
default=False,
|
117
|
+
)
|
118
|
+
submit_status_opt = click.option(
|
119
|
+
"--status/--no-status",
|
120
|
+
help="If True, display a live status to track submission progress.",
|
121
|
+
default=True,
|
122
|
+
)
|
123
|
+
make_status_opt = click.option(
|
124
|
+
"--status/--no-status",
|
125
|
+
help="If True, display a live status to track workflow creation progress.",
|
126
|
+
default=True,
|
127
|
+
)
|
112
128
|
zip_path_opt = click.option(
|
113
129
|
"--path",
|
114
130
|
default=".",
|
hpcflow/sdk/config/config.py
CHANGED
@@ -65,7 +65,6 @@ DEFAULT_CONFIG = {
|
|
65
65
|
"invocation": {"environment_setup": None, "match": {}},
|
66
66
|
"config": {
|
67
67
|
"machine": socket.gethostname(),
|
68
|
-
"telemetry": True,
|
69
68
|
"log_file_path": "logs/<<app_name>>_v<<app_version>>.log",
|
70
69
|
"environment_sources": [],
|
71
70
|
"task_schema_sources": [],
|
@@ -85,9 +84,6 @@ class ConfigOptions:
|
|
85
84
|
|
86
85
|
default_directory: Union[Path, str]
|
87
86
|
directory_env_var: str
|
88
|
-
sentry_DSN: str
|
89
|
-
sentry_traces_sample_rate: float
|
90
|
-
sentry_env: str
|
91
87
|
default_config: Optional[Dict] = field(
|
92
88
|
default_factory=lambda: deepcopy(DEFAULT_CONFIG)
|
93
89
|
)
|
hpcflow/sdk/core/actions.py
CHANGED
@@ -4,7 +4,6 @@ from dataclasses import dataclass
|
|
4
4
|
from datetime import datetime
|
5
5
|
import enum
|
6
6
|
import json
|
7
|
-
import h5py
|
8
7
|
from pathlib import Path
|
9
8
|
import re
|
10
9
|
from textwrap import indent, dedent
|
@@ -30,6 +29,7 @@ from hpcflow.sdk.core.utils import (
|
|
30
29
|
swap_nested_dict_keys,
|
31
30
|
)
|
32
31
|
from hpcflow.sdk.log import TimeIt
|
32
|
+
from hpcflow.sdk.core.run_dir_files import RunDirAppFiles
|
33
33
|
|
34
34
|
|
35
35
|
ACTION_SCOPE_REGEX = r"(\w*)(?:\[(.*)\])?"
|
@@ -252,13 +252,16 @@ class ElementActionRun:
|
|
252
252
|
@property
|
253
253
|
def snapshot_start(self):
|
254
254
|
if self._ss_start_obj is None and self._snapshot_start:
|
255
|
-
self._ss_start_obj = JSONLikeDirSnapShot(
|
255
|
+
self._ss_start_obj = JSONLikeDirSnapShot(
|
256
|
+
root_path=".",
|
257
|
+
**self._snapshot_start,
|
258
|
+
)
|
256
259
|
return self._ss_start_obj
|
257
260
|
|
258
261
|
@property
|
259
262
|
def snapshot_end(self):
|
260
263
|
if self._ss_end_obj is None and self._snapshot_end:
|
261
|
-
self._ss_end_obj = JSONLikeDirSnapShot(**self._snapshot_end)
|
264
|
+
self._ss_end_obj = JSONLikeDirSnapShot(root_path=".", **self._snapshot_end)
|
262
265
|
return self._ss_end_obj
|
263
266
|
|
264
267
|
@property
|
@@ -332,6 +335,7 @@ class ElementActionRun:
|
|
332
335
|
run_idx=self.index,
|
333
336
|
)
|
334
337
|
|
338
|
+
@TimeIt.decorator
|
335
339
|
def get_parameter_sources(
|
336
340
|
self,
|
337
341
|
path: str = None,
|
@@ -364,6 +368,7 @@ class ElementActionRun:
|
|
364
368
|
raise_on_unset=raise_on_unset,
|
365
369
|
)
|
366
370
|
|
371
|
+
@TimeIt.decorator
|
367
372
|
def get_EAR_dependencies(self, as_objects=False):
|
368
373
|
"""Get EARs that this EAR depends on."""
|
369
374
|
|
@@ -435,6 +440,7 @@ class ElementActionRun:
|
|
435
440
|
return self._outputs
|
436
441
|
|
437
442
|
@property
|
443
|
+
@TimeIt.decorator
|
438
444
|
def resources(self):
|
439
445
|
if not self._resources:
|
440
446
|
self._resources = self.app.ElementResources(**self.get_resources())
|
@@ -452,6 +458,7 @@ class ElementActionRun:
|
|
452
458
|
self._output_files = self.app.ElementOutputFiles(element_action_run=self)
|
453
459
|
return self._output_files
|
454
460
|
|
461
|
+
@TimeIt.decorator
|
455
462
|
def get_resources(self):
|
456
463
|
"""Resolve specific resources for this EAR, considering all applicable scopes and
|
457
464
|
template-level resources."""
|
@@ -592,6 +599,8 @@ class ElementActionRun:
|
|
592
599
|
return outputs
|
593
600
|
|
594
601
|
def write_source(self, js_idx: int, js_act_idx: int):
|
602
|
+
import h5py
|
603
|
+
|
595
604
|
for fmt, ins in self.action.script_data_in_grouped.items():
|
596
605
|
if fmt == "json":
|
597
606
|
in_vals = self.get_input_values(inputs=ins, label_dict=False)
|
@@ -627,6 +636,8 @@ class ElementActionRun:
|
|
627
636
|
def _param_save(self, js_idx: int, js_act_idx: int):
|
628
637
|
"""Save script-generated parameters that are stored within the supported script
|
629
638
|
data output formats (HDF5, JSON, etc)."""
|
639
|
+
import h5py
|
640
|
+
|
630
641
|
for fmt in self.action.script_data_out_grouped:
|
631
642
|
if fmt == "json":
|
632
643
|
load_path = self.action.get_param_load_file_path_JSON(js_idx, js_act_idx)
|
@@ -994,6 +1005,7 @@ class ActionRule(JSONLike):
|
|
994
1005
|
return True
|
995
1006
|
return False
|
996
1007
|
|
1008
|
+
@TimeIt.decorator
|
997
1009
|
def test(self, element_iteration: app.ElementIteration) -> bool:
|
998
1010
|
return self.rule.test(element_like=element_iteration, action=self.action)
|
999
1011
|
|
@@ -1453,11 +1465,11 @@ class Action(JSONLike):
|
|
1453
1465
|
|
1454
1466
|
@staticmethod
|
1455
1467
|
def get_param_dump_file_stem(js_idx: int, js_act_idx: int):
|
1456
|
-
return
|
1468
|
+
return RunDirAppFiles.get_run_param_dump_file_prefix(js_idx, js_act_idx)
|
1457
1469
|
|
1458
1470
|
@staticmethod
|
1459
1471
|
def get_param_load_file_stem(js_idx: int, js_act_idx: int):
|
1460
|
-
return
|
1472
|
+
return RunDirAppFiles.get_run_param_load_file_prefix(js_idx, js_act_idx)
|
1461
1473
|
|
1462
1474
|
def get_param_dump_file_path_JSON(self, js_idx: int, js_act_idx: int):
|
1463
1475
|
return Path(self.get_param_dump_file_stem(js_idx, js_act_idx) + ".json")
|
@@ -1896,6 +1908,7 @@ class Action(JSONLike):
|
|
1896
1908
|
if typ in (OFP.inputs or []):
|
1897
1909
|
return True
|
1898
1910
|
|
1911
|
+
@TimeIt.decorator
|
1899
1912
|
def test_rules(self, element_iter) -> List[bool]:
|
1900
1913
|
"""Test all rules against the specified element iteration."""
|
1901
1914
|
return [i.test(element_iteration=element_iter) for i in self.rules]
|
@@ -1940,7 +1953,7 @@ class Action(JSONLike):
|
|
1940
1953
|
"""\
|
1941
1954
|
import {app_module} as app
|
1942
1955
|
app.load_config(
|
1943
|
-
log_file_path=Path("{
|
1956
|
+
log_file_path=Path("{run_log_file}").resolve(),
|
1944
1957
|
config_dir=r"{cfg_dir}",
|
1945
1958
|
config_key=r"{cfg_invoc_key}",
|
1946
1959
|
)
|
@@ -1949,7 +1962,7 @@ class Action(JSONLike):
|
|
1949
1962
|
EAR = wk.get_EARs_from_IDs([EAR_ID])[0]
|
1950
1963
|
"""
|
1951
1964
|
).format(
|
1952
|
-
|
1965
|
+
run_log_file=self.app.RunDirAppFiles.get_log_file_name(),
|
1953
1966
|
app_module=self.app.module,
|
1954
1967
|
cfg_dir=self.app.config.config_directory,
|
1955
1968
|
cfg_invoc_key=self.app.config.config_key,
|
@@ -159,7 +159,7 @@ class InputFileGenerator(JSONLike):
|
|
159
159
|
from pathlib import Path
|
160
160
|
import {app_module} as app
|
161
161
|
app.load_config(
|
162
|
-
log_file_path=Path("{
|
162
|
+
log_file_path=Path("{run_log_file}").resolve(),
|
163
163
|
config_dir=r"{cfg_dir}",
|
164
164
|
config_key=r"{cfg_invoc_key}",
|
165
165
|
)
|
@@ -171,7 +171,7 @@ class InputFileGenerator(JSONLike):
|
|
171
171
|
"""
|
172
172
|
)
|
173
173
|
main_block = main_block.format(
|
174
|
-
|
174
|
+
run_log_file=self.app.RunDirAppFiles.get_log_file_name(),
|
175
175
|
app_module=self.app.module,
|
176
176
|
cfg_dir=self.app.config.config_directory,
|
177
177
|
cfg_invoc_key=self.app.config.config_key,
|
@@ -303,7 +303,7 @@ class OutputFileParser(JSONLike):
|
|
303
303
|
from pathlib import Path
|
304
304
|
import {app_module} as app
|
305
305
|
app.load_config(
|
306
|
-
log_file_path=Path("{
|
306
|
+
log_file_path=Path("{run_log_file}").resolve(),
|
307
307
|
config_dir=r"{cfg_dir}",
|
308
308
|
config_key=r"{cfg_invoc_key}",
|
309
309
|
)
|
@@ -321,7 +321,7 @@ class OutputFileParser(JSONLike):
|
|
321
321
|
"""
|
322
322
|
)
|
323
323
|
main_block = main_block.format(
|
324
|
-
|
324
|
+
run_log_file=self.app.RunDirAppFiles.get_log_file_name(),
|
325
325
|
app_module=self.app.module,
|
326
326
|
cfg_dir=self.app.config.config_directory,
|
327
327
|
cfg_invoc_key=self.app.config.config_key,
|