hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a199__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (130) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
  2. hpcflow/_version.py +1 -1
  3. hpcflow/data/scripts/bad_script.py +2 -0
  4. hpcflow/data/scripts/do_nothing.py +2 -0
  5. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  6. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  7. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  8. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  11. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  12. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  13. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  15. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  16. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  23. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  24. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  25. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  26. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  27. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  28. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  29. hpcflow/data/scripts/script_exit_test.py +5 -0
  30. hpcflow/data/template_components/environments.yaml +1 -1
  31. hpcflow/sdk/__init__.py +5 -0
  32. hpcflow/sdk/app.py +150 -89
  33. hpcflow/sdk/cli.py +263 -84
  34. hpcflow/sdk/cli_common.py +99 -5
  35. hpcflow/sdk/config/callbacks.py +38 -1
  36. hpcflow/sdk/config/config.py +102 -13
  37. hpcflow/sdk/config/errors.py +19 -5
  38. hpcflow/sdk/config/types.py +3 -0
  39. hpcflow/sdk/core/__init__.py +25 -1
  40. hpcflow/sdk/core/actions.py +914 -262
  41. hpcflow/sdk/core/cache.py +76 -34
  42. hpcflow/sdk/core/command_files.py +14 -128
  43. hpcflow/sdk/core/commands.py +35 -6
  44. hpcflow/sdk/core/element.py +122 -50
  45. hpcflow/sdk/core/errors.py +58 -2
  46. hpcflow/sdk/core/execute.py +207 -0
  47. hpcflow/sdk/core/loop.py +408 -50
  48. hpcflow/sdk/core/loop_cache.py +4 -4
  49. hpcflow/sdk/core/parameters.py +382 -37
  50. hpcflow/sdk/core/run_dir_files.py +13 -40
  51. hpcflow/sdk/core/skip_reason.py +7 -0
  52. hpcflow/sdk/core/task.py +119 -30
  53. hpcflow/sdk/core/task_schema.py +68 -0
  54. hpcflow/sdk/core/test_utils.py +66 -27
  55. hpcflow/sdk/core/types.py +54 -1
  56. hpcflow/sdk/core/utils.py +78 -7
  57. hpcflow/sdk/core/workflow.py +1538 -336
  58. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  59. hpcflow/sdk/demo/cli.py +7 -0
  60. hpcflow/sdk/helper/cli.py +1 -0
  61. hpcflow/sdk/log.py +42 -15
  62. hpcflow/sdk/persistence/base.py +405 -53
  63. hpcflow/sdk/persistence/json.py +177 -52
  64. hpcflow/sdk/persistence/pending.py +237 -69
  65. hpcflow/sdk/persistence/store_resource.py +3 -2
  66. hpcflow/sdk/persistence/types.py +15 -4
  67. hpcflow/sdk/persistence/zarr.py +928 -81
  68. hpcflow/sdk/submission/jobscript.py +1408 -489
  69. hpcflow/sdk/submission/schedulers/__init__.py +40 -5
  70. hpcflow/sdk/submission/schedulers/direct.py +33 -19
  71. hpcflow/sdk/submission/schedulers/sge.py +51 -16
  72. hpcflow/sdk/submission/schedulers/slurm.py +44 -16
  73. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  74. hpcflow/sdk/submission/shells/base.py +68 -20
  75. hpcflow/sdk/submission/shells/bash.py +222 -129
  76. hpcflow/sdk/submission/shells/powershell.py +200 -150
  77. hpcflow/sdk/submission/submission.py +852 -119
  78. hpcflow/sdk/submission/types.py +18 -21
  79. hpcflow/sdk/typing.py +24 -5
  80. hpcflow/sdk/utils/arrays.py +71 -0
  81. hpcflow/sdk/utils/deferred_file.py +55 -0
  82. hpcflow/sdk/utils/hashing.py +16 -0
  83. hpcflow/sdk/utils/patches.py +12 -0
  84. hpcflow/sdk/utils/strings.py +33 -0
  85. hpcflow/tests/api/test_api.py +32 -0
  86. hpcflow/tests/conftest.py +19 -0
  87. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  88. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  89. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  90. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  91. hpcflow/tests/scripts/test_main_scripts.py +821 -70
  92. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  93. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  94. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
  95. hpcflow/tests/unit/test_action.py +176 -0
  96. hpcflow/tests/unit/test_app.py +20 -0
  97. hpcflow/tests/unit/test_cache.py +46 -0
  98. hpcflow/tests/unit/test_cli.py +133 -0
  99. hpcflow/tests/unit/test_config.py +122 -1
  100. hpcflow/tests/unit/test_element_iteration.py +47 -0
  101. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  102. hpcflow/tests/unit/test_loop.py +1332 -27
  103. hpcflow/tests/unit/test_meta_task.py +325 -0
  104. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  105. hpcflow/tests/unit/test_parameter.py +13 -0
  106. hpcflow/tests/unit/test_persistence.py +190 -8
  107. hpcflow/tests/unit/test_run.py +109 -3
  108. hpcflow/tests/unit/test_run_directories.py +29 -0
  109. hpcflow/tests/unit/test_shell.py +20 -0
  110. hpcflow/tests/unit/test_submission.py +5 -76
  111. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  112. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  113. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  114. hpcflow/tests/unit/utils/test_patches.py +5 -0
  115. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  116. hpcflow/tests/workflows/__init__.py +0 -0
  117. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  118. hpcflow/tests/workflows/test_jobscript.py +332 -0
  119. hpcflow/tests/workflows/test_run_status.py +198 -0
  120. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  121. hpcflow/tests/workflows/test_submission.py +140 -0
  122. hpcflow/tests/workflows/test_workflows.py +142 -2
  123. hpcflow/tests/workflows/test_zip.py +18 -0
  124. hpcflow/viz_demo.ipynb +6587 -3
  125. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +7 -4
  126. hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
  127. hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
  128. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
  129. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
  130. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -12,6 +12,8 @@ from typing import cast, TYPE_CHECKING
12
12
  from typing_extensions import override
13
13
 
14
14
  from fsspec import filesystem, AbstractFileSystem # type: ignore
15
+ import numpy as np
16
+ from hpcflow.sdk.core import RUN_DIR_ARR_DTYPE, RUN_DIR_ARR_FILL
15
17
  from hpcflow.sdk.core.errors import (
16
18
  MissingParameterData,
17
19
  MissingStoreEARError,
@@ -28,14 +30,17 @@ from hpcflow.sdk.persistence.base import (
28
30
  StoreTask,
29
31
  update_param_source_dict,
30
32
  )
33
+ from hpcflow.sdk.submission.submission import JOBSCRIPT_SUBMIT_TIME_KEYS
31
34
  from hpcflow.sdk.persistence.pending import CommitResourceMap
32
35
  from hpcflow.sdk.persistence.store_resource import JSONFileStoreResource
36
+ from hpcflow.sdk.typing import DataIndex
33
37
 
34
38
  if TYPE_CHECKING:
35
39
  from collections.abc import Iterable, Iterator, Mapping, Sequence
36
40
  from datetime import datetime
37
- from typing import Any, ClassVar
41
+ from typing import Any, ClassVar, Literal
38
42
  from typing_extensions import Self
43
+ from numpy.typing import NDArray
39
44
  from ..app import BaseApp
40
45
  from ..core.json_like import JSONed, JSONDocument
41
46
  from ..core.workflow import Workflow
@@ -142,6 +147,7 @@ class JsonStoreEAR(StoreEAR["RunMeta", None]):
142
147
  "commands_idx": self.commands_idx,
143
148
  "data_idx": self.data_idx,
144
149
  "submission_idx": self.submission_idx,
150
+ "commands_file_ID": self.commands_file_ID,
145
151
  "success": self.success,
146
152
  "skip": self.skip,
147
153
  "start_time": self._encode_datetime(self.start_time, ts_fmt),
@@ -151,6 +157,7 @@ class JsonStoreEAR(StoreEAR["RunMeta", None]):
151
157
  "exit_code": self.exit_code,
152
158
  "metadata": self.metadata,
153
159
  "run_hostname": self.run_hostname,
160
+ "port_number": self.port_number,
154
161
  }
155
162
 
156
163
  @override
@@ -195,11 +202,13 @@ class JSONPersistentStore(
195
202
  _meta_res: ClassVar[str] = "metadata"
196
203
  _params_res: ClassVar[str] = "parameters"
197
204
  _subs_res: ClassVar[str] = "submissions"
205
+ _runs_res: ClassVar[str] = "runs"
198
206
 
199
207
  _res_file_names: ClassVar[Mapping[str, str]] = {
200
208
  _meta_res: "metadata.json",
201
209
  _params_res: "parameters.json",
202
210
  _subs_res: "submissions.json",
211
+ _runs_res: "runs.json",
203
212
  }
204
213
 
205
214
  _res_map: ClassVar[CommitResourceMap] = CommitResourceMap(
@@ -208,23 +217,27 @@ class JSONPersistentStore(
208
217
  commit_loop_num_iters=(_meta_res,),
209
218
  commit_loop_parents=(_meta_res,),
210
219
  commit_submissions=(_subs_res,),
211
- commit_submission_parts=(_subs_res,),
220
+ commit_at_submit_metadata=(_subs_res,),
212
221
  commit_js_metadata=(_subs_res,),
213
222
  commit_elem_IDs=(_meta_res,),
214
223
  commit_elements=(_meta_res,),
224
+ commit_element_sets=(_meta_res,),
215
225
  commit_elem_iter_IDs=(_meta_res,),
216
226
  commit_elem_iters=(_meta_res,),
217
227
  commit_loop_indices=(_meta_res,),
218
228
  commit_elem_iter_EAR_IDs=(_meta_res,),
219
229
  commit_EARs_initialised=(_meta_res,),
220
- commit_EARs=(_meta_res,),
221
- commit_EAR_submission_indices=(_meta_res,),
222
- commit_EAR_skips=(_meta_res,),
223
- commit_EAR_starts=(_meta_res,),
224
- commit_EAR_ends=(_meta_res,),
230
+ commit_EARs=(_runs_res,),
231
+ commit_EAR_submission_indices=(_runs_res,),
232
+ commit_EAR_skips=(_runs_res,),
233
+ commit_EAR_starts=(_runs_res,),
234
+ commit_EAR_ends=(_runs_res,),
225
235
  commit_template_components=(_meta_res,),
226
236
  commit_parameters=(_params_res,),
227
237
  commit_param_sources=(_params_res,),
238
+ commit_set_run_dirs=(_runs_res,),
239
+ commit_iter_data_idx=(_meta_res,),
240
+ commit_run_data_idx=(_runs_res,),
228
241
  )
229
242
 
230
243
  @classmethod
@@ -254,14 +267,63 @@ class JSONPersistentStore(
254
267
  self._meta_res: self._get_store_resource(app, "metadata", path, fs),
255
268
  self._params_res: self._get_store_resource(app, "parameters", path, fs),
256
269
  self._subs_res: self._get_store_resource(app, "submissions", path, fs),
270
+ self._runs_res: self._get_store_resource(app, "runs", path, fs),
257
271
  }
258
272
  super().__init__(app, workflow, path, fs)
259
273
 
274
+ # store-specific cache data, assigned in `using_resource()` when
275
+ # `_use_parameters_metadata_cache` is True, and set back to None when exiting the
276
+ # `parameters_metadata_cache` context manager.
277
+ self._parameters_file_dat: dict[str, dict[str, Any]] | None = None
278
+
260
279
  @contextmanager
261
280
  def cached_load(self) -> Iterator[None]:
262
281
  """Context manager to cache the metadata."""
263
282
  with self.using_resource("metadata", "read"):
283
+ with self.using_resource("runs", "read"):
284
+ yield
285
+
286
+ @contextmanager
287
+ def using_resource(
288
+ self,
289
+ res_label: Literal["metadata", "submissions", "parameters", "attrs", "runs"],
290
+ action: str,
291
+ ) -> Iterator[Any]:
292
+ """Context manager for managing `StoreResource` objects associated with the store.
293
+
294
+ Notes
295
+ -----
296
+ This overridden method facilitates easier use of the
297
+ `JSONPersistentStore`-specific implementation of the `parameters_metadata_cache`,
298
+ which in this case is just a copy of the `parameters.json` file data.
299
+
300
+ """
301
+
302
+ if (
303
+ self._use_parameters_metadata_cache
304
+ and res_label == "parameters"
305
+ and action == "read"
306
+ ):
307
+ if not self._parameters_file_dat:
308
+ with super().using_resource(
309
+ cast("Literal['parameters']", res_label), action
310
+ ) as res:
311
+ self._parameters_file_dat = res
312
+ yield self._parameters_file_dat
313
+
314
+ else:
315
+ with super().using_resource(res_label, action) as res:
316
+ yield res
317
+
318
+ @contextmanager
319
+ def parameters_metadata_cache(self) -> Iterator[None]:
320
+ """Context manager for using the parameters-metadata cache."""
321
+ self._use_parameters_metadata_cache = True
322
+ try:
264
323
  yield
324
+ finally:
325
+ self._use_parameters_metadata_cache = False
326
+ self._parameters_file_dat = None # clear cache data
265
327
 
266
328
  def remove_replaced_dir(self) -> None:
267
329
  """
@@ -332,16 +394,20 @@ class JSONPersistentStore(
332
394
  "tasks": [],
333
395
  "elements": [],
334
396
  "iters": [],
335
- "runs": [],
336
397
  "num_added_tasks": 0,
337
398
  "loops": [],
338
399
  }
400
+ runs: dict[str, list] = {
401
+ "runs": [],
402
+ "run_dirs": [],
403
+ }
339
404
  if replaced_wk:
340
405
  metadata["replaced_workflow"] = replaced_wk
341
406
 
342
407
  cls._get_store_resource(app, "metadata", wk_path, fs)._dump(metadata)
343
408
  cls._get_store_resource(app, "parameters", wk_path, fs)._dump(parameters)
344
409
  cls._get_store_resource(app, "submissions", wk_path, fs)._dump(submissions)
410
+ cls._get_store_resource(app, "runs", wk_path, fs)._dump(runs)
345
411
 
346
412
  def _append_tasks(self, tasks: Iterable[StoreTask]):
347
413
  with self.using_resource("metadata", action="update") as md:
@@ -360,12 +426,13 @@ class JSONPersistentStore(
360
426
  {
361
427
  "num_added_iterations": loop["num_added_iterations"],
362
428
  "iterable_parameters": loop["iterable_parameters"],
429
+ "output_parameters": loop["output_parameters"],
363
430
  "parents": loop["parents"],
364
431
  }
365
432
  )
366
433
  md["template"]["loops"].append(loop["loop_template"])
367
434
 
368
- def _append_submissions(self, subs: dict[int, JSONDocument]):
435
+ def _append_submissions(self, subs: dict[int, Mapping[str, JSONed]]):
369
436
  with self.using_resource("submissions", action="update") as subs_res:
370
437
  subs_res.extend(subs.values())
371
438
 
@@ -409,18 +476,19 @@ class JSONPersistentStore(
409
476
  assert "iters" in md
410
477
  md["iters"][iter_ID]["EARs_initialised"] = True
411
478
 
412
- def _append_submission_parts(self, sub_parts: dict[int, dict[str, list[int]]]):
479
+ def _update_at_submit_metadata(self, at_submit_metadata: dict[int, dict[str, Any]]):
413
480
  with self.using_resource("submissions", action="update") as subs_res:
414
- for sub_idx, sub_i_parts in sub_parts.items():
481
+ for sub_idx, metadata_i in at_submit_metadata.items():
415
482
  sub = subs_res[sub_idx]
416
483
  assert isinstance(sub, dict)
417
- for dt_str, parts_j in sub_i_parts.items():
418
- sub["submission_parts"][dt_str] = parts_j
484
+ for dt_str, parts_j in metadata_i["submission_parts"].items():
485
+ sub["at_submit_metadata"]["submission_parts"][dt_str] = parts_j
419
486
 
420
- def _update_loop_index(self, iter_ID: int, loop_idx: Mapping[str, int]):
487
+ def _update_loop_index(self, loop_indices: dict[int, dict[str, int]]):
421
488
  with self.using_resource("metadata", action="update") as md:
422
489
  assert "iters" in md
423
- md["iters"][iter_ID]["loop_idx"].update(loop_idx)
490
+ for iter_ID, loop_idx in loop_indices.items():
491
+ md["iters"][iter_ID]["loop_idx"].update(loop_idx)
424
492
 
425
493
  def _update_loop_num_iters(self, index: int, num_iters: list[list[list[int] | int]]):
426
494
  with self.using_resource("metadata", action="update") as md:
@@ -432,53 +500,87 @@ class JSONPersistentStore(
432
500
  assert "loops" in md
433
501
  md["loops"][index]["parents"] = parents
434
502
 
435
- def _append_EARs(self, EARs: Sequence[JsonStoreEAR]):
503
+ def _update_iter_data_indices(self, iter_data_indices: dict[int, DataIndex]):
436
504
  with self.using_resource("metadata", action="update") as md:
505
+ assert "iters" in md
506
+ for iter_ID, dat_idx in iter_data_indices.items():
507
+ md["iters"][iter_ID]["data_idx"].update(dat_idx)
508
+
509
+ def _update_run_data_indices(self, run_data_indices: dict[int, DataIndex]):
510
+ with self.using_resource("runs", action="update") as md:
437
511
  assert "runs" in md
438
- md["runs"].extend(ear.encode(self.ts_fmt, None) for ear in EARs)
512
+ for run_ID, dat_idx in run_data_indices.items():
513
+ md["runs"][run_ID]["data_idx"].update(dat_idx)
439
514
 
440
- def _update_EAR_submission_indices(self, sub_indices: Mapping[int, int]):
441
- with self.using_resource("metadata", action="update") as md:
515
+ def _append_EARs(self, EARs: Sequence[JsonStoreEAR]):
516
+ with self.using_resource("runs", action="update") as md:
517
+ assert "runs" in md
518
+ assert "run_dirs" in md
519
+ md["runs"].extend(i.encode(self.ts_fmt, None) for i in EARs)
520
+ md["run_dirs"].extend([None] * len(EARs))
521
+
522
+ def _set_run_dirs(self, run_dir_arr: np.ndarray, run_idx: np.ndarray):
523
+ with self.using_resource("runs", action="update") as md:
524
+ assert "run_dirs" in md
525
+ dirs_lst = md["run_dirs"]
526
+ for idx, r_idx in enumerate(run_idx):
527
+ dirs_lst[r_idx] = run_dir_arr[idx].item()
528
+ md["run_dirs"] = dirs_lst
529
+
530
+ def _update_EAR_submission_data(self, sub_data: Mapping[int, tuple[int, int | None]]):
531
+ with self.using_resource("runs", action="update") as md:
442
532
  assert "runs" in md
443
- for EAR_ID_i, sub_idx_i in sub_indices.items():
533
+ for EAR_ID_i, (sub_idx_i, cmd_file_ID) in sub_data.items():
444
534
  md["runs"][EAR_ID_i]["submission_idx"] = sub_idx_i
535
+ md["runs"][EAR_ID_i]["commands_file_ID"] = cmd_file_ID
445
536
 
446
537
  def _update_EAR_start(
447
- self, EAR_id: int, s_time: datetime, s_snap: dict[str, Any], s_hn: str
538
+ self,
539
+ run_starts: dict[int, tuple[datetime, dict[str, Any] | None, str, int | None]],
448
540
  ):
449
- with self.using_resource("metadata", action="update") as md:
541
+ with self.using_resource("runs", action="update") as md:
450
542
  assert "runs" in md
451
- md["runs"][EAR_id]["start_time"] = s_time.strftime(self.ts_fmt)
452
- md["runs"][EAR_id]["snapshot_start"] = s_snap
453
- md["runs"][EAR_id]["run_hostname"] = s_hn
543
+ for run_id, (s_time, s_snap, s_hn, port_number) in run_starts.items():
544
+ md["runs"][run_id]["start_time"] = s_time.strftime(self.ts_fmt)
545
+ md["runs"][run_id]["snapshot_start"] = s_snap
546
+ md["runs"][run_id]["run_hostname"] = s_hn
547
+ md["runs"][run_id]["port_number"] = port_number
454
548
 
455
549
  def _update_EAR_end(
456
- self,
457
- EAR_id: int,
458
- e_time: datetime,
459
- e_snap: dict[str, Any],
460
- ext_code: int,
461
- success: bool,
550
+ self, run_ends: dict[int, tuple[datetime, dict[str, Any] | None, int, bool]]
462
551
  ):
463
- with self.using_resource("metadata", action="update") as md:
552
+ with self.using_resource("runs", action="update") as md:
464
553
  assert "runs" in md
465
- md["runs"][EAR_id]["end_time"] = e_time.strftime(self.ts_fmt)
466
- md["runs"][EAR_id]["snapshot_end"] = e_snap
467
- md["runs"][EAR_id]["exit_code"] = ext_code
468
- md["runs"][EAR_id]["success"] = success
469
-
470
- def _update_EAR_skip(self, EAR_id: int):
471
- with self.using_resource("metadata", action="update") as md:
554
+ for run_id, (e_time, e_snap, ext_code, success) in run_ends.items():
555
+ md["runs"][run_id]["end_time"] = e_time.strftime(self.ts_fmt)
556
+ md["runs"][run_id]["snapshot_end"] = e_snap
557
+ md["runs"][run_id]["exit_code"] = ext_code
558
+ md["runs"][run_id]["success"] = success
559
+
560
+ def _update_EAR_skip(self, skips: dict[int, int]):
561
+ with self.using_resource("runs", action="update") as md:
472
562
  assert "runs" in md
473
- md["runs"][EAR_id]["skip"] = True
563
+ for run_ID, reason in skips.items():
564
+ md["runs"][run_ID]["skip"] = reason
474
565
 
475
566
  def _update_js_metadata(self, js_meta: dict[int, dict[int, dict[str, Any]]]):
476
567
  with self.using_resource("submissions", action="update") as sub_res:
477
568
  for sub_idx, all_js_md in js_meta.items():
478
569
  sub = cast("dict[str, list[dict[str, Any]]]", sub_res[sub_idx])
479
570
  for js_idx, js_meta_i in all_js_md.items():
480
- sub_i = sub["jobscripts"][js_idx]
481
- sub_i.update(**js_meta_i)
571
+ self.logger.info(
572
+ f"updating jobscript metadata for (sub={sub_idx}, js={js_idx}): "
573
+ f"{js_meta_i!r}."
574
+ )
575
+ _at_submit_md = {
576
+ k: js_meta_i.pop(k)
577
+ for k in JOBSCRIPT_SUBMIT_TIME_KEYS
578
+ if k in js_meta_i
579
+ }
580
+ sub["jobscripts"][js_idx].update(**js_meta_i)
581
+ sub["jobscripts"][js_idx]["at_submit_metadata"].update(
582
+ **_at_submit_md
583
+ )
482
584
 
483
585
  def _append_parameters(self, params: Sequence[StoreParameter]):
484
586
  with self.using_resource("parameters", "update") as params_u:
@@ -552,7 +654,7 @@ class JSONPersistentStore(
552
654
  if self.use_cache and self.num_EARs_cache is not None:
553
655
  num = self.num_EARs_cache
554
656
  else:
555
- with self.using_resource("metadata", action="read") as md:
657
+ with self.using_resource("runs", action="read") as md:
556
658
  assert "runs" in md
557
659
  num = len(md["runs"])
558
660
  if self.use_cache and self.num_EARs_cache is None:
@@ -560,8 +662,15 @@ class JSONPersistentStore(
560
662
  return num
561
663
 
562
664
  def _get_num_persistent_parameters(self) -> int:
563
- with self.using_resource("parameters", "read") as params:
564
- return len(params["data"])
665
+ if self.use_cache and self.num_params_cache is not None:
666
+ num = self.num_params_cache
667
+ else:
668
+ with self.using_resource("parameters", "read") as params:
669
+ assert "data" in params
670
+ num = len(params["data"])
671
+ if self.use_cache and self.num_params_cache is None:
672
+ self.num_params_cache = num
673
+ return num
565
674
 
566
675
  def _get_num_persistent_added_tasks(self) -> int:
567
676
  with self.using_resource("metadata", "read") as md:
@@ -620,7 +729,7 @@ class JSONPersistentStore(
620
729
  assert "tasks" in md
621
730
  new_tasks = {
622
731
  i["id_"]: JsonStoreTask.decode({**i, "index": idx})
623
- for idx, i in enumerate(md["tasks"])
732
+ for idx, i in enumerate(cast("Sequence[TaskMeta]", md["tasks"]))
624
733
  if id_lst is None or i["id_"] in id_lst
625
734
  }
626
735
  self.task_cache.update(new_tasks)
@@ -640,7 +749,7 @@ class JSONPersistentStore(
640
749
 
641
750
  def _get_persistent_submissions(
642
751
  self, id_lst: Iterable[int] | None = None
643
- ) -> dict[int, JSONDocument]:
752
+ ) -> dict[int, Mapping[str, JSONed]]:
644
753
  with self.using_resource("submissions", "read") as sub_res:
645
754
  subs_dat = copy.deepcopy(
646
755
  {
@@ -649,13 +758,16 @@ class JSONPersistentStore(
649
758
  if id_lst is None or idx in id_lst
650
759
  }
651
760
  )
652
- # cast jobscript submit-times and jobscript `task_elements` keys:
761
+ # cast jobscript `task_elements` keys:
653
762
  for sub in subs_dat.values():
654
- js: dict[str, dict[str | int, Any]]
763
+ js: dict[str, Any]
655
764
  assert isinstance(sub, dict)
656
765
  for js in sub["jobscripts"]:
657
- for key in list(te := js["task_elements"]):
658
- te[int(key)] = te.pop(key)
766
+ blk: dict[str, Any]
767
+ assert isinstance(js, dict)
768
+ for blk in js["blocks"]:
769
+ for key in list(te := blk["task_elements"]):
770
+ te[int(key)] = te.pop(key)
659
771
 
660
772
  return subs_dat
661
773
 
@@ -701,7 +813,7 @@ class JSONPersistentStore(
701
813
  def _get_persistent_EARs(self, id_lst: Iterable[int]) -> dict[int, JsonStoreEAR]:
702
814
  runs, id_lst_ = self._get_cached_persistent_EARs(id_lst)
703
815
  if id_lst_:
704
- with self.using_resource("metadata", action="read") as md:
816
+ with self.using_resource("runs", action="read") as md:
705
817
  try:
706
818
  if "runs" not in md:
707
819
  raise KeyError
@@ -824,3 +936,16 @@ class JSONPersistentStore(
824
936
  status: bool = True,
825
937
  ) -> Any:
826
938
  raise TypeError("unsupported operation: rechunk-json")
939
+
940
+ def get_dirs_array(self) -> NDArray:
941
+ """
942
+ Retrieve the run directories array.
943
+ """
944
+ with self.using_resource("runs", action="read") as md:
945
+ dirs_lst = md["run_dirs"]
946
+ dirs_arr = np.zeros(len(dirs_lst), dtype=RUN_DIR_ARR_DTYPE)
947
+ dirs_arr[:] = RUN_DIR_ARR_FILL
948
+ for idx, i in enumerate(dirs_lst):
949
+ if i is not None:
950
+ dirs_arr[idx] = tuple(i)
951
+ return dirs_arr