hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -5,17 +5,53 @@ Class to hold the state that is waiting to be committed to disk.
|
|
5
5
|
from __future__ import annotations
|
6
6
|
|
7
7
|
from collections import defaultdict
|
8
|
+
from collections.abc import Callable
|
8
9
|
import contextlib
|
9
10
|
from dataclasses import dataclass, field, fields
|
10
|
-
from
|
11
|
+
from functools import wraps
|
12
|
+
import copy
|
11
13
|
|
12
|
-
from typing import Any,
|
13
|
-
|
14
|
-
from hpcflow.sdk.log import TimeIt
|
14
|
+
from typing import Any, Generic, TYPE_CHECKING, TypeVar
|
15
|
+
from typing_extensions import ParamSpec
|
15
16
|
|
17
|
+
import numpy as np
|
16
18
|
|
17
|
-
|
18
|
-
|
19
|
+
from hpcflow.sdk.log import TimeIt
|
20
|
+
from hpcflow.sdk.submission.submission import SUBMISSION_SUBMIT_TIME_KEYS
|
21
|
+
from hpcflow.sdk.typing import DataIndex
|
22
|
+
from hpcflow.sdk.utils.strings import shorten_list_str
|
23
|
+
|
24
|
+
|
25
|
+
from hpcflow.sdk.persistence.types import (
|
26
|
+
AnySTask,
|
27
|
+
AnySElement,
|
28
|
+
AnySElementIter,
|
29
|
+
AnySEAR,
|
30
|
+
AnySParameter,
|
31
|
+
)
|
32
|
+
|
33
|
+
if TYPE_CHECKING:
|
34
|
+
from collections.abc import Mapping, Sequence
|
35
|
+
from datetime import datetime
|
36
|
+
from logging import Logger
|
37
|
+
from .base import PersistentStore, FileDescriptor, LoopDescriptor
|
38
|
+
from ..app import BaseApp
|
39
|
+
from ..typing import ParamSource
|
40
|
+
from ..core.json_like import JSONed
|
41
|
+
|
42
|
+
P = ParamSpec("P")
|
43
|
+
T = TypeVar("T")
|
44
|
+
|
45
|
+
_commit_method_data_map: dict[str, list[str]] = defaultdict(
|
46
|
+
list
|
47
|
+
) # note: this is updated at module-import time.
|
48
|
+
|
49
|
+
|
50
|
+
class PendingChanges(
|
51
|
+
Generic[AnySTask, AnySElement, AnySElementIter, AnySEAR, AnySParameter]
|
52
|
+
):
|
53
|
+
"""
|
54
|
+
Class to store pending changes and merge them into a persistent store.
|
19
55
|
|
20
56
|
Parameters
|
21
57
|
----------
|
@@ -27,70 +63,104 @@ class PendingChanges:
|
|
27
63
|
Map of resources, used when processing commits.
|
28
64
|
"""
|
29
65
|
|
30
|
-
|
31
|
-
|
66
|
+
# These would be in the docstring except they render really wrongly!
|
67
|
+
# Type Parameters
|
68
|
+
# ---------------
|
69
|
+
# AnySTask
|
70
|
+
# The type of stored tasks.
|
71
|
+
# AnySElement
|
72
|
+
# The type of stored elements.
|
73
|
+
# AnySElementIter
|
74
|
+
# The type of stored element iterations.
|
75
|
+
# AnySEAR
|
76
|
+
# The type of stored EARs.
|
77
|
+
# AnySParameter
|
78
|
+
# The type of stored parameters.
|
79
|
+
|
80
|
+
def __init__(
|
81
|
+
self,
|
82
|
+
app: BaseApp,
|
83
|
+
store: PersistentStore[
|
84
|
+
AnySTask, AnySElement, AnySElementIter, AnySEAR, AnySParameter
|
85
|
+
],
|
86
|
+
resource_map: CommitResourceMap,
|
87
|
+
):
|
88
|
+
self._app = app
|
32
89
|
self.store = store
|
33
90
|
self.resource_map = resource_map
|
34
91
|
|
35
92
|
#: Keys are new task IDs.
|
36
|
-
self.add_tasks:
|
93
|
+
self.add_tasks: dict[int, AnySTask] = {}
|
37
94
|
#: Keys are loop IDs, values are loop descriptors.
|
38
|
-
self.add_loops:
|
95
|
+
self.add_loops: dict[int, LoopDescriptor] = {}
|
39
96
|
#: Keys are submission IDs, values are submission descriptors.
|
40
|
-
self.add_submissions:
|
97
|
+
self.add_submissions: dict[int, Mapping[str, JSONed]] = {}
|
41
98
|
#: Keys are element IDs.
|
42
|
-
self.add_elements:
|
99
|
+
self.add_elements: dict[int, AnySElement] = {}
|
43
100
|
#: Keys are element iteration IDs.
|
44
|
-
self.add_elem_iters:
|
101
|
+
self.add_elem_iters: dict[int, AnySElementIter] = {}
|
45
102
|
#: Keys are element action run IDs.
|
46
|
-
self.add_EARs:
|
103
|
+
self.add_EARs: dict[int, AnySEAR] = {}
|
47
104
|
#: Keys are parameter indices and values are tuples whose first element is data
|
48
105
|
#: to add and whose second element is the source dict for the new data.
|
49
|
-
self.add_parameters:
|
106
|
+
self.add_parameters: dict[int, AnySParameter] = {}
|
50
107
|
#: Workflow-related files (inputs, outputs) added to the persistent store.
|
51
|
-
self.add_files:
|
108
|
+
self.add_files: list[FileDescriptor] = []
|
52
109
|
#: Template components to add.
|
53
|
-
self.add_template_components:
|
110
|
+
self.add_template_components: dict[str, dict[str, dict]] = {}
|
54
111
|
#: Keys are element set IDs, values are descriptors.
|
55
|
-
self.add_element_sets:
|
112
|
+
self.add_element_sets: dict[int, list[Mapping]] = {}
|
56
113
|
|
57
114
|
#: Keys are task IDs, and values are element IDs to add to that task.
|
58
|
-
self.add_elem_IDs:
|
115
|
+
self.add_elem_IDs: dict[int, list[int]] = {}
|
59
116
|
#: Keys are element IDs, and values are iteration IDs to add to that element.
|
60
|
-
self.add_elem_iter_IDs:
|
117
|
+
self.add_elem_iter_IDs: dict[int, list[int]] = {}
|
61
118
|
#: Keys are element iteration IDs, then EAR action index, and values are EAR IDs.
|
62
119
|
#: This is a list of EAR IDs to add to a given element iteration action.
|
63
|
-
self.add_elem_iter_EAR_IDs:
|
64
|
-
#: Submission
|
65
|
-
self.
|
120
|
+
self.add_elem_iter_EAR_IDs: dict[int, dict[int, list[int]]] = {}
|
121
|
+
#: Submission metadata added at submit-time, including submission parts.
|
122
|
+
self.update_at_submit_metadata: dict[int, dict[str, Any]] = {}
|
66
123
|
|
67
124
|
#: IDs of EARs to mark as initialised.
|
68
|
-
self.set_EARs_initialised:
|
69
|
-
#: Submission IDs to attach to EARs.
|
70
|
-
self.
|
125
|
+
self.set_EARs_initialised: list[int] = []
|
126
|
+
#: Submission IDs and commands file IDs to attach to EARs.
|
127
|
+
self.set_EAR_submission_data: dict[int, tuple[int, int | None]] = {}
|
71
128
|
#: IDs of EARs to mark as skipped.
|
72
|
-
self.set_EAR_skips:
|
73
|
-
#: Keys are EAR IDs and values are tuples of start time,
|
74
|
-
|
129
|
+
self.set_EAR_skips: dict[int, int] = {}
|
130
|
+
#: Keys are EAR IDs and values are tuples of start time, start dir snapshot, run
|
131
|
+
#: hostname, and port number.
|
132
|
+
self.set_EAR_starts: dict[
|
133
|
+
int, tuple[datetime, dict[str, Any] | None, str, int | None]
|
134
|
+
] = {}
|
75
135
|
#: Keys are EAR IDs and values are tuples of end time, end dir snapshot, exit
|
76
136
|
#: code, and success boolean.
|
77
|
-
self.set_EAR_ends:
|
137
|
+
self.set_EAR_ends: dict[
|
138
|
+
int, tuple[datetime, dict[str, Any] | None, int, bool]
|
139
|
+
] = {}
|
140
|
+
#: Each list item is a tuple of two arrays, the first of which is a run directory
|
141
|
+
#: indices array, and the second of which is an integer array indicating with
|
142
|
+
#: which run ID each run directory is associated.
|
143
|
+
self.set_run_dirs: list[tuple[np.ndarray, np.ndarray]] = []
|
78
144
|
|
79
145
|
#: Keys are IDs of jobscripts.
|
80
|
-
self.set_js_metadata:
|
146
|
+
self.set_js_metadata: dict[int, dict[int, dict[str, Any]]] = {}
|
81
147
|
|
82
|
-
#: Keys are IDs of parameters to add or modify
|
83
|
-
|
148
|
+
#: Keys are IDs of parameters to add or modify, and values are tuples of the
|
149
|
+
#: parameter value, and whether the parameter is a file.
|
150
|
+
self.set_parameters: dict[int, tuple[Any, bool]] = {}
|
84
151
|
|
85
152
|
#: Keys are parameter indices and values are dict parameter sources to merge
|
86
153
|
#: with existing source of that parameter.
|
87
|
-
self.update_param_sources:
|
154
|
+
self.update_param_sources: dict[int, ParamSource] = {}
|
88
155
|
#: Keys are indices of loops, values are descriptions of what to update.
|
89
|
-
self.update_loop_indices:
|
156
|
+
self.update_loop_indices: dict[int, dict[str, int]] = {}
|
90
157
|
#: Keys are indices of loops, values are number of iterations.
|
91
|
-
self.update_loop_num_iters:
|
158
|
+
self.update_loop_num_iters: dict[int, list[list[list[int] | int]]] = {}
|
92
159
|
#: Keys are indices of loops, values are list of parent names.
|
93
|
-
self.update_loop_parents:
|
160
|
+
self.update_loop_parents: dict[int, list[str]] = {}
|
161
|
+
|
162
|
+
self.update_iter_data_idx: dict[int, DataIndex] = {}
|
163
|
+
self.update_run_data_idx: dict[int, DataIndex] = {}
|
94
164
|
|
95
165
|
self.reset(is_init=True) # set up initial data structures
|
96
166
|
|
@@ -106,43 +176,99 @@ class PendingChanges:
|
|
106
176
|
or bool(self.add_elem_IDs)
|
107
177
|
or bool(self.add_elem_iter_IDs)
|
108
178
|
or bool(self.add_elem_iter_EAR_IDs)
|
109
|
-
or bool(self.
|
179
|
+
or bool(self.update_at_submit_metadata)
|
110
180
|
or bool(self.add_parameters)
|
111
181
|
or bool(self.add_files)
|
112
182
|
or bool(self.add_template_components)
|
113
183
|
or bool(self.add_element_sets)
|
114
184
|
or bool(self.set_EARs_initialised)
|
115
|
-
or bool(self.
|
185
|
+
or bool(self.set_EAR_submission_data)
|
116
186
|
or bool(self.set_EAR_starts)
|
117
187
|
or bool(self.set_EAR_ends)
|
118
188
|
or bool(self.set_EAR_skips)
|
189
|
+
or bool(self.set_run_dirs)
|
119
190
|
or bool(self.set_js_metadata)
|
120
191
|
or bool(self.set_parameters)
|
121
192
|
or bool(self.update_param_sources)
|
122
193
|
or bool(self.update_loop_indices)
|
123
194
|
or bool(self.update_loop_num_iters)
|
124
195
|
or bool(self.update_loop_parents)
|
196
|
+
or bool(self.update_iter_data_idx)
|
197
|
+
or bool(self.update_run_data_idx)
|
125
198
|
)
|
126
199
|
|
127
|
-
def where_pending(self) ->
|
200
|
+
def where_pending(self) -> list[str]:
|
128
201
|
"""
|
129
202
|
Get the list of items for which there is some outstanding pending items.
|
130
203
|
"""
|
131
|
-
|
132
|
-
|
133
|
-
for k, v in self.__dict__.items()
|
134
|
-
if k not in ("app", "store", "resource_map") and bool(v)
|
135
|
-
]
|
204
|
+
excluded = {"app", "store", "resource_map"}
|
205
|
+
return [k for k, v in self.__dict__.items() if k not in excluded and bool(v)]
|
136
206
|
|
137
207
|
@property
|
138
|
-
def logger(self):
|
208
|
+
def logger(self) -> Logger:
|
139
209
|
"""
|
140
210
|
The logger.
|
141
211
|
"""
|
142
|
-
return self.
|
212
|
+
return self._app.persistence_logger
|
213
|
+
|
214
|
+
def commits_data(*data_list: str):
|
215
|
+
"""Decorator that wraps `PendingChanges.commit_*` methods with arguments listing
|
216
|
+
which `PendingChanges` attributes must have non-trivial data in them for the method's
|
217
|
+
invocation to be required.
|
218
|
+
|
219
|
+
Notes
|
220
|
+
-----
|
221
|
+
This essentially provides a mapping between `PendingChanges` attributes and
|
222
|
+
`commit_*` methods. This allows us to only open the resources that need updating
|
223
|
+
in `PendingChanges.commit_all`.
|
224
|
+
|
225
|
+
We use a decorator rather than an explicitly declaring the map in
|
226
|
+
`_commit_method_data_map` to make the mapping obvious near the commit methods, and
|
227
|
+
hopefully avoid us forgetting to update `_commit_method_data_map` when we modify
|
228
|
+
or add commit methods in future!
|
229
|
+
|
230
|
+
"""
|
231
|
+
|
232
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
233
|
+
|
234
|
+
_commit_method_data_map[func.__name__].extend(data_list)
|
235
|
+
|
236
|
+
@wraps(func)
|
237
|
+
def inner(*args, **kwargs) -> T:
|
238
|
+
return func(*args, **kwargs)
|
239
|
+
|
240
|
+
return inner
|
241
|
+
|
242
|
+
return decorator
|
243
|
+
|
244
|
+
def get_pending_resource_map_groups(self) -> dict[tuple[str, ...], list[str]]:
|
245
|
+
"""Retrive resource map groups, where values are filtered to include only those
|
246
|
+
commit methods that must be invoked, due to pending data associated with those
|
247
|
+
methods.
|
248
|
+
|
249
|
+
Notes
|
250
|
+
-----
|
251
|
+
This method allows us to open only those resources that need to be updated, given
|
252
|
+
the state of pending data.
|
253
|
+
"""
|
254
|
+
|
255
|
+
where_pending = self.where_pending()
|
256
|
+
pending_groups = {}
|
257
|
+
for res_names, methods in self.resource_map.groups.items():
|
258
|
+
req_methods = [
|
259
|
+
meth_i
|
260
|
+
for meth_i in methods
|
261
|
+
if any(
|
262
|
+
dat_j in where_pending for dat_j in _commit_method_data_map[meth_i]
|
263
|
+
)
|
264
|
+
]
|
265
|
+
if req_methods:
|
266
|
+
pending_groups[res_names] = req_methods
|
267
|
+
|
268
|
+
return pending_groups
|
143
269
|
|
144
270
|
@TimeIt.decorator
|
145
|
-
def commit_all(self):
|
271
|
+
def commit_all(self) -> None:
|
146
272
|
"""Commit all pending changes to disk."""
|
147
273
|
self.logger.info(f"committing all pending changes: {self.where_pending()}")
|
148
274
|
|
@@ -150,23 +276,25 @@ class PendingChanges:
|
|
150
276
|
self.logger.debug("commit: no pending changes to commit.")
|
151
277
|
return
|
152
278
|
|
153
|
-
for resources, methods in self.
|
279
|
+
for resources, methods in self.get_pending_resource_map_groups().items():
|
154
280
|
# for each resource, enter `using_resource` context manager in "update" mode:
|
155
281
|
with contextlib.ExitStack() as stack:
|
156
282
|
for res in resources:
|
157
|
-
|
158
|
-
|
283
|
+
stack.enter_context(
|
284
|
+
self.store.using_resource(res, "update") # type: ignore[call-overload]
|
285
|
+
)
|
159
286
|
for meth in methods:
|
160
287
|
getattr(self, meth)()
|
161
288
|
|
162
289
|
assert not (self)
|
163
290
|
|
164
291
|
@TimeIt.decorator
|
292
|
+
@commits_data("add_tasks")
|
165
293
|
def commit_tasks(self) -> None:
|
166
294
|
"""Commit pending tasks to disk."""
|
167
295
|
if self.add_tasks:
|
168
296
|
tasks = self.store.get_tasks_by_IDs(self.add_tasks)
|
169
|
-
task_ids =
|
297
|
+
task_ids = set(self.add_tasks)
|
170
298
|
self.logger.debug(f"commit: adding pending tasks with IDs: {task_ids!r}")
|
171
299
|
self.store._append_tasks(tasks)
|
172
300
|
self.store.num_tasks_cache = None # invalidate cache
|
@@ -177,12 +305,13 @@ class PendingChanges:
|
|
177
305
|
self._clear_add_tasks()
|
178
306
|
|
179
307
|
@TimeIt.decorator
|
308
|
+
@commits_data("add_loops")
|
180
309
|
def commit_loops(self) -> None:
|
181
310
|
"""Commit pending loops to disk."""
|
182
311
|
if self.add_loops:
|
183
312
|
# retrieve pending loops, including pending changes to num_added_iterations:
|
184
313
|
loops = self.store.get_loops_by_IDs(self.add_loops)
|
185
|
-
loop_ids =
|
314
|
+
loop_ids = set(self.add_loops)
|
186
315
|
self.logger.debug(f"commit: adding pending loops with indices {loop_ids!r}")
|
187
316
|
self.store._append_loops(loops)
|
188
317
|
|
@@ -198,12 +327,15 @@ class PendingChanges:
|
|
198
327
|
self._clear_add_loops()
|
199
328
|
|
200
329
|
@TimeIt.decorator
|
330
|
+
@commits_data("add_submissions")
|
201
331
|
def commit_submissions(self) -> None:
|
202
332
|
"""Commit pending submissions to disk."""
|
203
333
|
if self.add_submissions:
|
204
334
|
# retrieve pending submissions:
|
205
|
-
subs = self.store.get_submissions_by_ID(
|
206
|
-
|
335
|
+
subs = self.store.get_submissions_by_ID(
|
336
|
+
self.add_submissions
|
337
|
+
) # TODO: I think this just returns add_submissions?
|
338
|
+
sub_ids = set(self.add_submissions)
|
207
339
|
self.logger.debug(
|
208
340
|
f"commit: adding pending submissions with indices {sub_ids!r}"
|
209
341
|
)
|
@@ -211,16 +343,18 @@ class PendingChanges:
|
|
211
343
|
self._clear_add_submissions()
|
212
344
|
|
213
345
|
@TimeIt.decorator
|
214
|
-
|
346
|
+
@commits_data("update_at_submit_metadata")
|
347
|
+
def commit_at_submit_metadata(self) -> None:
|
215
348
|
"""
|
216
|
-
Commit pending submission parts
|
349
|
+
Commit to disk pending at-submit-time metadata, including submission parts.
|
217
350
|
"""
|
218
|
-
if self.
|
219
|
-
self.logger.debug(
|
220
|
-
self.store.
|
221
|
-
self.
|
351
|
+
if self.update_at_submit_metadata:
|
352
|
+
self.logger.debug("commit: adding pending at-submit metadata")
|
353
|
+
self.store._update_at_submit_metadata(self.update_at_submit_metadata)
|
354
|
+
self._clear_at_submit_metadata()
|
222
355
|
|
223
356
|
@TimeIt.decorator
|
357
|
+
@commits_data("add_elem_IDs")
|
224
358
|
def commit_elem_IDs(self) -> None:
|
225
359
|
"""
|
226
360
|
Commit pending element ID updates to disk.
|
@@ -235,13 +369,14 @@ class PendingChanges:
|
|
235
369
|
self._clear_add_elem_IDs()
|
236
370
|
|
237
371
|
@TimeIt.decorator
|
372
|
+
@commits_data("add_elements")
|
238
373
|
def commit_elements(self) -> None:
|
239
374
|
"""
|
240
375
|
Commit pending elements to disk.
|
241
376
|
"""
|
242
377
|
if self.add_elements:
|
243
378
|
elems = self.store.get_elements(self.add_elements)
|
244
|
-
elem_ids =
|
379
|
+
elem_ids = set(self.add_elements)
|
245
380
|
self.logger.debug(f"commit: adding pending elements with IDs: {elem_ids!r}")
|
246
381
|
self.store._append_elements(elems)
|
247
382
|
# pending iter IDs that belong to pending elements are now committed:
|
@@ -251,17 +386,19 @@ class PendingChanges:
|
|
251
386
|
self._clear_add_elements()
|
252
387
|
|
253
388
|
@TimeIt.decorator
|
389
|
+
@commits_data("add_element_sets")
|
254
390
|
def commit_element_sets(self) -> None:
|
255
391
|
"""
|
256
392
|
Commit pending element sets to disk.
|
257
393
|
"""
|
258
394
|
# TODO: could be batched up?
|
259
395
|
for task_id, es_js in self.add_element_sets.items():
|
260
|
-
self.logger.debug(
|
396
|
+
self.logger.debug("commit: adding pending element sets.")
|
261
397
|
self.store._append_element_sets(task_id, es_js)
|
262
398
|
self._clear_add_element_sets()
|
263
399
|
|
264
400
|
@TimeIt.decorator
|
401
|
+
@commits_data("add_elem_iter_IDs")
|
265
402
|
def commit_elem_iter_IDs(self) -> None:
|
266
403
|
"""
|
267
404
|
Commit pending element iteration ID updates to disk.
|
@@ -277,21 +414,27 @@ class PendingChanges:
|
|
277
414
|
self._clear_add_elem_iter_IDs()
|
278
415
|
|
279
416
|
@TimeIt.decorator
|
417
|
+
@commits_data("add_elem_iters")
|
280
418
|
def commit_elem_iters(self) -> None:
|
281
419
|
"""
|
282
420
|
Commit pending element iterations to disk.
|
283
421
|
"""
|
284
422
|
if self.add_elem_iters:
|
285
|
-
iters = self.store.get_element_iterations(self.add_elem_iters
|
286
|
-
iter_ids =
|
423
|
+
iters = self.store.get_element_iterations(self.add_elem_iters)
|
424
|
+
iter_ids = set(self.add_elem_iters)
|
287
425
|
self.logger.debug(
|
288
426
|
f"commit: adding pending element iterations with IDs: {iter_ids!r}"
|
289
427
|
)
|
290
428
|
self.store._append_elem_iters(iters)
|
291
429
|
# pending EAR IDs that belong to pending iters are now committed:
|
292
|
-
self.add_elem_iter_EAR_IDs
|
293
|
-
|
294
|
-
|
430
|
+
add_elem_iter_EAR_IDs_cur = copy.deepcopy(self.add_elem_iter_EAR_IDs)
|
431
|
+
self._clear_add_elem_iter_EAR_IDs() # reset to empty nested defaultdict
|
432
|
+
for iter_id, all_run_IDs in add_elem_iter_EAR_IDs_cur.items():
|
433
|
+
# only re-assign iter_IDs that have not been comitted above:
|
434
|
+
if iter_id not in iter_ids:
|
435
|
+
for act_idx, run_IDs in all_run_IDs.items():
|
436
|
+
self.add_elem_iter_EAR_IDs[iter_id][act_idx].extend(run_IDs)
|
437
|
+
|
295
438
|
# pending EARs_initialised that belong to pending iters are now committed:
|
296
439
|
self.set_EARs_initialised = [
|
297
440
|
i for i in self.set_EARs_initialised if i not in iter_ids
|
@@ -299,6 +442,7 @@ class PendingChanges:
|
|
299
442
|
self._clear_add_elem_iters()
|
300
443
|
|
301
444
|
@TimeIt.decorator
|
445
|
+
@commits_data("add_elem_iter_EAR_IDs")
|
302
446
|
def commit_elem_iter_EAR_IDs(self) -> None:
|
303
447
|
"""
|
304
448
|
Commit pending element action run ID updates to disk.
|
@@ -315,24 +459,25 @@ class PendingChanges:
|
|
315
459
|
self._clear_add_elem_iter_EAR_IDs()
|
316
460
|
|
317
461
|
@TimeIt.decorator
|
462
|
+
@commits_data("add_EARs")
|
318
463
|
def commit_EARs(self) -> None:
|
319
464
|
"""
|
320
465
|
Commit pending element action runs to disk.
|
321
466
|
"""
|
322
467
|
if self.add_EARs:
|
323
468
|
EARs = self.store.get_EARs(self.add_EARs)
|
324
|
-
EAR_ids = list(self.add_EARs
|
469
|
+
EAR_ids = list(self.add_EARs)
|
325
470
|
self.logger.debug(f"commit: adding pending EARs with IDs: {EAR_ids!r}")
|
326
471
|
self.store._append_EARs(EARs)
|
327
472
|
self.store.num_EARs_cache = None # invalidate cache
|
328
473
|
# pending start/end times/snapshots, submission indices, and skips that belong
|
329
474
|
# to pending EARs are now committed (accounted for in `get_EARs` above):
|
330
|
-
self.
|
331
|
-
k: v
|
332
|
-
|
333
|
-
|
475
|
+
self.set_EAR_submission_data = {
|
476
|
+
k: v for k, v in self.set_EAR_submission_data.items() if k not in EAR_ids
|
477
|
+
}
|
478
|
+
self.set_EAR_skips = {
|
479
|
+
k: v for k, v in self.set_EAR_skips.items() if k not in EAR_ids
|
334
480
|
}
|
335
|
-
self.set_EAR_skips = [i for i in self.set_EAR_skips if i not in EAR_ids]
|
336
481
|
self.set_EAR_starts = {
|
337
482
|
k: v for k, v in self.set_EAR_starts.items() if k not in EAR_ids
|
338
483
|
}
|
@@ -343,6 +488,17 @@ class PendingChanges:
|
|
343
488
|
self._clear_add_EARs()
|
344
489
|
|
345
490
|
@TimeIt.decorator
|
491
|
+
@commits_data("set_run_dirs")
|
492
|
+
def commit_set_run_dirs(self) -> None:
|
493
|
+
"""
|
494
|
+
Commit pending run directory indices.
|
495
|
+
"""
|
496
|
+
for run_dir_arr, run_idx in self.set_run_dirs:
|
497
|
+
self.store._set_run_dirs(run_dir_arr, run_idx)
|
498
|
+
self._clear_set_run_dirs()
|
499
|
+
|
500
|
+
@TimeIt.decorator
|
501
|
+
@commits_data("set_EARs_initialised")
|
346
502
|
def commit_EARs_initialised(self) -> None:
|
347
503
|
"""
|
348
504
|
Commit pending element action run init state updates to disk.
|
@@ -360,63 +516,71 @@ class PendingChanges:
|
|
360
516
|
self._clear_set_EARs_initialised()
|
361
517
|
|
362
518
|
@TimeIt.decorator
|
519
|
+
@commits_data("set_EAR_submission_data")
|
363
520
|
def commit_EAR_submission_indices(self) -> None:
|
364
521
|
"""
|
365
522
|
Commit pending element action run submission index updates to disk.
|
366
523
|
"""
|
367
|
-
if self.
|
524
|
+
if self.set_EAR_submission_data:
|
368
525
|
self.logger.debug(
|
369
|
-
f"commit: updating submission
|
370
|
-
f"{self.set_EAR_submission_indices!r}."
|
526
|
+
f"commit: updating submission data: {self.set_EAR_submission_data!r}."
|
371
527
|
)
|
372
|
-
self.store.
|
373
|
-
for EAR_ID_i in self.
|
528
|
+
self.store._update_EAR_submission_data(self.set_EAR_submission_data)
|
529
|
+
for EAR_ID_i in self.set_EAR_submission_data:
|
374
530
|
self.store.EAR_cache.pop(EAR_ID_i, None) # invalidate cache
|
375
|
-
self.
|
531
|
+
self._clear_EAR_submission_data()
|
376
532
|
|
377
533
|
@TimeIt.decorator
|
534
|
+
@commits_data("set_EAR_starts")
|
378
535
|
def commit_EAR_starts(self) -> None:
|
379
536
|
"""
|
380
537
|
Commit pending element action run start information to disk.
|
381
538
|
"""
|
382
|
-
|
383
|
-
|
539
|
+
updates = self.set_EAR_starts
|
540
|
+
if updates:
|
384
541
|
self.logger.debug(
|
385
|
-
f"commit:
|
386
|
-
f"(
|
542
|
+
f"commit: registering {len(updates)} run(s) as started: "
|
543
|
+
f"{shorten_list_str(updates)}."
|
387
544
|
)
|
388
|
-
self.store._update_EAR_start(
|
389
|
-
|
545
|
+
self.store._update_EAR_start(updates)
|
546
|
+
for run_id in updates:
|
547
|
+
self.store.EAR_cache.pop(run_id, None) # invalidate cache
|
390
548
|
self._clear_set_EAR_starts()
|
391
549
|
|
392
550
|
@TimeIt.decorator
|
551
|
+
@commits_data("set_EAR_ends")
|
393
552
|
def commit_EAR_ends(self) -> None:
|
394
553
|
"""
|
395
554
|
Commit pending element action run finish information to disk.
|
396
555
|
"""
|
397
|
-
|
398
|
-
|
556
|
+
updates = self.set_EAR_ends
|
557
|
+
if updates:
|
399
558
|
self.logger.debug(
|
400
|
-
f"commit:
|
401
|
-
f"
|
559
|
+
f"commit: registering {len(updates)} run(s) as ended: "
|
560
|
+
f"{shorten_list_str(updates)}, with exit codes: "
|
561
|
+
f"{shorten_list_str([i[2] for i in updates.values()])}."
|
402
562
|
)
|
403
|
-
self.store._update_EAR_end(
|
404
|
-
|
563
|
+
self.store._update_EAR_end(updates)
|
564
|
+
for run_id in updates:
|
565
|
+
self.store.EAR_cache.pop(run_id, None) # invalidate cache
|
405
566
|
self._clear_set_EAR_ends()
|
406
567
|
|
407
568
|
@TimeIt.decorator
|
569
|
+
@commits_data("set_EAR_skips")
|
408
570
|
def commit_EAR_skips(self) -> None:
|
409
571
|
"""
|
410
572
|
Commit pending element action skip flags to disk.
|
411
573
|
"""
|
412
|
-
|
413
|
-
|
414
|
-
self.logger.debug(f"commit: setting
|
415
|
-
self.store._update_EAR_skip(
|
416
|
-
|
574
|
+
updates = self.set_EAR_skips
|
575
|
+
if updates:
|
576
|
+
self.logger.debug(f"commit: setting {len(updates)} run IDs as skipped.")
|
577
|
+
self.store._update_EAR_skip(updates)
|
578
|
+
for run_ID in updates:
|
579
|
+
self.store.EAR_cache.pop(run_ID, None) # invalidate cache
|
417
580
|
self._clear_set_EAR_skips()
|
418
581
|
|
419
582
|
@TimeIt.decorator
|
583
|
+
@commits_data("set_js_metadata")
|
420
584
|
def commit_js_metadata(self) -> None:
|
421
585
|
"""
|
422
586
|
Commit pending jobscript metadata changes to disk.
|
@@ -429,47 +593,50 @@ class PendingChanges:
|
|
429
593
|
self._clear_set_js_metadata()
|
430
594
|
|
431
595
|
@TimeIt.decorator
|
596
|
+
@commits_data("add_parameters", "set_parameters")
|
432
597
|
def commit_parameters(self) -> None:
|
433
598
|
"""Make pending parameters persistent."""
|
434
599
|
if self.add_parameters:
|
435
600
|
params = self.store.get_parameters(self.add_parameters)
|
436
|
-
param_ids = list(self.add_parameters
|
601
|
+
param_ids = list(self.add_parameters)
|
437
602
|
self.logger.debug(f"commit: adding pending parameters IDs: {param_ids!r}")
|
438
603
|
self.store._append_parameters(params)
|
439
604
|
self._clear_add_parameters()
|
440
605
|
|
441
606
|
if self.set_parameters:
|
442
|
-
param_ids = list(self.set_parameters
|
607
|
+
param_ids = list(self.set_parameters)
|
443
608
|
self.logger.debug(f"commit: setting values of parameter IDs {param_ids!r}.")
|
444
609
|
self.store._set_parameter_values(self.set_parameters)
|
445
610
|
for id_i in param_ids:
|
446
611
|
self.store.parameter_cache.pop(id_i, None)
|
447
|
-
|
448
612
|
self._clear_set_parameters()
|
449
613
|
|
450
614
|
@TimeIt.decorator
|
615
|
+
@commits_data("add_files")
|
451
616
|
def commit_files(self) -> None:
|
452
617
|
"""Add pending files to the files directory."""
|
453
618
|
if self.add_files:
|
454
|
-
self.logger.debug(
|
619
|
+
self.logger.debug("commit: adding pending files to the files directory.")
|
455
620
|
self.store._append_files(self.add_files)
|
456
621
|
self._clear_add_files()
|
457
622
|
|
458
623
|
@TimeIt.decorator
|
624
|
+
@commits_data("add_template_components")
|
459
625
|
def commit_template_components(self) -> None:
|
460
626
|
"""
|
461
627
|
Commit pending template components to disk.
|
462
628
|
"""
|
463
629
|
if self.add_template_components:
|
464
|
-
self.logger.debug(
|
630
|
+
self.logger.debug("commit: adding template components.")
|
465
631
|
self.store._update_template_components(self.store.get_template_components())
|
466
632
|
self._clear_add_template_components()
|
467
633
|
|
468
634
|
@TimeIt.decorator
|
635
|
+
@commits_data("update_param_sources")
|
469
636
|
def commit_param_sources(self) -> None:
|
470
637
|
"""Make pending changes to parameter sources persistent."""
|
471
638
|
if self.update_param_sources:
|
472
|
-
param_ids = list(self.update_param_sources
|
639
|
+
param_ids = list(self.update_param_sources)
|
473
640
|
self.logger.debug(f"commit: updating sources of parameter IDs {param_ids!r}.")
|
474
641
|
self.store._update_parameter_sources(self.update_param_sources)
|
475
642
|
for id_i in param_ids:
|
@@ -477,19 +644,21 @@ class PendingChanges:
|
|
477
644
|
self._clear_update_param_sources()
|
478
645
|
|
479
646
|
@TimeIt.decorator
|
647
|
+
@commits_data("update_loop_indices")
|
480
648
|
def commit_loop_indices(self) -> None:
|
481
649
|
"""Make pending update to element iteration loop indices persistent."""
|
482
|
-
|
483
|
-
|
650
|
+
updates = self.update_loop_indices
|
651
|
+
if updates:
|
484
652
|
self.logger.debug(
|
485
|
-
f"commit: updating loop indices of
|
486
|
-
f"{loop_idx!r}."
|
653
|
+
f"commit: updating loop indices of {len(updates)} iteration(s)."
|
487
654
|
)
|
488
|
-
self.store._update_loop_index(
|
489
|
-
|
655
|
+
self.store._update_loop_index(updates)
|
656
|
+
for iter_ID in updates:
|
657
|
+
self.store.element_iter_cache.pop(iter_ID, None) # invalidate cache
|
490
658
|
self._clear_update_loop_indices()
|
491
659
|
|
492
660
|
@TimeIt.decorator
|
661
|
+
@commits_data("update_loop_num_iters")
|
493
662
|
def commit_loop_num_iters(self) -> None:
|
494
663
|
"""Make pending update to the number of loop iterations."""
|
495
664
|
for index, num_iters in self.update_loop_num_iters.items():
|
@@ -500,6 +669,7 @@ class PendingChanges:
|
|
500
669
|
self._clear_update_loop_num_iters()
|
501
670
|
|
502
671
|
@TimeIt.decorator
|
672
|
+
@commits_data("update_loop_parents")
|
503
673
|
def commit_loop_parents(self) -> None:
|
504
674
|
"""Make pending update to additional loop parents."""
|
505
675
|
for index, parents in self.update_loop_parents.items():
|
@@ -507,82 +677,107 @@ class PendingChanges:
|
|
507
677
|
self.store._update_loop_parents(index, parents)
|
508
678
|
self._clear_update_loop_parents()
|
509
679
|
|
510
|
-
|
680
|
+
@TimeIt.decorator
|
681
|
+
@commits_data("update_iter_data_idx")
|
682
|
+
def commit_iter_data_idx(self) -> None:
|
683
|
+
if self.update_iter_data_idx:
|
684
|
+
self.store._update_iter_data_indices(self.update_iter_data_idx)
|
685
|
+
self._clear_update_iter_data_idx()
|
686
|
+
|
687
|
+
@TimeIt.decorator
|
688
|
+
@commits_data("update_run_data_idx")
|
689
|
+
def commit_run_data_idx(self) -> None:
|
690
|
+
if self.update_run_data_idx:
|
691
|
+
self.store._update_run_data_indices(self.update_run_data_idx)
|
692
|
+
self._clear_update_run_data_idx()
|
693
|
+
|
694
|
+
def _clear_add_tasks(self) -> None:
|
511
695
|
self.add_tasks = {}
|
512
696
|
|
513
|
-
def _clear_add_loops(self):
|
697
|
+
def _clear_add_loops(self) -> None:
|
514
698
|
self.add_loops = {}
|
515
699
|
|
516
|
-
def _clear_add_submissions(self):
|
700
|
+
def _clear_add_submissions(self) -> None:
|
517
701
|
self.add_submissions = {}
|
518
702
|
|
519
|
-
def
|
520
|
-
self.
|
703
|
+
def _clear_at_submit_metadata(self) -> None:
|
704
|
+
self.update_at_submit_metadata = defaultdict(
|
705
|
+
lambda: {i: None for i in SUBMISSION_SUBMIT_TIME_KEYS}
|
706
|
+
)
|
521
707
|
|
522
|
-
def _clear_add_elements(self):
|
708
|
+
def _clear_add_elements(self) -> None:
|
523
709
|
self.add_elements = {}
|
524
710
|
|
525
|
-
def _clear_add_element_sets(self):
|
711
|
+
def _clear_add_element_sets(self) -> None:
|
526
712
|
self.add_element_sets = defaultdict(list)
|
527
713
|
|
528
|
-
def _clear_add_elem_iters(self):
|
714
|
+
def _clear_add_elem_iters(self) -> None:
|
529
715
|
self.add_elem_iters = {}
|
530
716
|
|
531
|
-
def _clear_add_EARs(self):
|
717
|
+
def _clear_add_EARs(self) -> None:
|
532
718
|
self.add_EARs = {}
|
533
719
|
|
534
|
-
def
|
720
|
+
def _clear_set_run_dirs(self):
|
721
|
+
self.set_run_dirs = []
|
722
|
+
|
723
|
+
def _clear_add_elem_IDs(self) -> None:
|
535
724
|
self.add_elem_IDs = defaultdict(list)
|
536
725
|
|
537
|
-
def _clear_add_elem_iter_IDs(self):
|
726
|
+
def _clear_add_elem_iter_IDs(self) -> None:
|
538
727
|
self.add_elem_iter_IDs = defaultdict(list)
|
539
728
|
|
540
|
-
def _clear_add_elem_iter_EAR_IDs(self):
|
729
|
+
def _clear_add_elem_iter_EAR_IDs(self) -> None:
|
541
730
|
self.add_elem_iter_EAR_IDs = defaultdict(lambda: defaultdict(list))
|
542
731
|
|
543
|
-
def _clear_set_EARs_initialised(self):
|
732
|
+
def _clear_set_EARs_initialised(self) -> None:
|
544
733
|
self.set_EARs_initialised = []
|
545
734
|
|
546
|
-
def
|
547
|
-
self.
|
735
|
+
def _clear_EAR_submission_data(self) -> None:
|
736
|
+
self.set_EAR_submission_data = {}
|
548
737
|
|
549
|
-
def _clear_set_EAR_starts(self):
|
738
|
+
def _clear_set_EAR_starts(self) -> None:
|
550
739
|
self.set_EAR_starts = {}
|
551
740
|
|
552
|
-
def _clear_set_EAR_ends(self):
|
741
|
+
def _clear_set_EAR_ends(self) -> None:
|
553
742
|
self.set_EAR_ends = {}
|
554
743
|
|
555
|
-
def _clear_set_EAR_skips(self):
|
556
|
-
self.set_EAR_skips =
|
744
|
+
def _clear_set_EAR_skips(self) -> None:
|
745
|
+
self.set_EAR_skips = {}
|
557
746
|
|
558
|
-
def _clear_set_js_metadata(self):
|
747
|
+
def _clear_set_js_metadata(self) -> None:
|
559
748
|
self.set_js_metadata = defaultdict(lambda: defaultdict(dict))
|
560
749
|
|
561
|
-
def _clear_add_parameters(self):
|
750
|
+
def _clear_add_parameters(self) -> None:
|
562
751
|
self.add_parameters = {}
|
563
752
|
|
564
|
-
def _clear_add_files(self):
|
753
|
+
def _clear_add_files(self) -> None:
|
565
754
|
self.add_files = []
|
566
755
|
|
567
|
-
def _clear_add_template_components(self):
|
756
|
+
def _clear_add_template_components(self) -> None:
|
568
757
|
self.add_template_components = defaultdict(dict)
|
569
758
|
|
570
|
-
def _clear_set_parameters(self):
|
759
|
+
def _clear_set_parameters(self) -> None:
|
571
760
|
self.set_parameters = {}
|
572
761
|
|
573
|
-
def _clear_update_param_sources(self):
|
762
|
+
def _clear_update_param_sources(self) -> None:
|
574
763
|
self.update_param_sources = {}
|
575
764
|
|
576
|
-
def _clear_update_loop_indices(self):
|
765
|
+
def _clear_update_loop_indices(self) -> None:
|
577
766
|
self.update_loop_indices = defaultdict(dict)
|
578
767
|
|
579
|
-
def _clear_update_loop_num_iters(self):
|
768
|
+
def _clear_update_loop_num_iters(self) -> None:
|
580
769
|
self.update_loop_num_iters = {}
|
581
770
|
|
582
|
-
def _clear_update_loop_parents(self):
|
771
|
+
def _clear_update_loop_parents(self) -> None:
|
583
772
|
self.update_loop_parents = {}
|
584
773
|
|
585
|
-
def
|
774
|
+
def _clear_update_iter_data_idx(self):
|
775
|
+
self.update_iter_data_idx = defaultdict(dict)
|
776
|
+
|
777
|
+
def _clear_update_run_data_idx(self):
|
778
|
+
self.update_run_data_idx = defaultdict(dict)
|
779
|
+
|
780
|
+
def reset(self, is_init: bool = False) -> None:
|
586
781
|
"""Clear all pending data and prepare to accept new pending data."""
|
587
782
|
|
588
783
|
if not is_init and not self:
|
@@ -595,11 +790,12 @@ class PendingChanges:
|
|
595
790
|
self._clear_add_tasks()
|
596
791
|
self._clear_add_loops()
|
597
792
|
self._clear_add_submissions()
|
598
|
-
self.
|
793
|
+
self._clear_at_submit_metadata()
|
599
794
|
self._clear_add_elements()
|
600
795
|
self._clear_add_element_sets()
|
601
796
|
self._clear_add_elem_iters()
|
602
797
|
self._clear_add_EARs()
|
798
|
+
self._clear_set_run_dirs()
|
603
799
|
|
604
800
|
self._clear_set_EARs_initialised()
|
605
801
|
self._clear_add_elem_IDs()
|
@@ -610,7 +806,7 @@ class PendingChanges:
|
|
610
806
|
self._clear_add_files()
|
611
807
|
self._clear_add_template_components()
|
612
808
|
|
613
|
-
self.
|
809
|
+
self._clear_EAR_submission_data()
|
614
810
|
self._clear_set_EAR_starts()
|
615
811
|
self._clear_set_EAR_ends()
|
616
812
|
self._clear_set_EAR_skips()
|
@@ -622,6 +818,8 @@ class PendingChanges:
|
|
622
818
|
self._clear_update_loop_indices()
|
623
819
|
self._clear_update_loop_num_iters()
|
624
820
|
self._clear_update_loop_parents()
|
821
|
+
self._clear_update_iter_data_idx()
|
822
|
+
self._clear_update_run_data_idx()
|
625
823
|
|
626
824
|
|
627
825
|
@dataclass
|
@@ -640,65 +838,74 @@ class CommitResourceMap:
|
|
640
838
|
"""
|
641
839
|
|
642
840
|
#: Resources for :py:meth:`~.PendingChanges.commit_tasks`.
|
643
|
-
commit_tasks:
|
841
|
+
commit_tasks: tuple[str, ...] | None = tuple()
|
644
842
|
#: Resources for :py:meth:`~.PendingChanges.commit_loops`.
|
645
|
-
commit_loops:
|
843
|
+
commit_loops: tuple[str, ...] | None = tuple()
|
646
844
|
#: Resources for :py:meth:`~.PendingChanges.commit_submissions`.
|
647
|
-
commit_submissions:
|
648
|
-
#: Resources for :py:meth:`~.PendingChanges.
|
649
|
-
|
845
|
+
commit_submissions: tuple[str, ...] | None = tuple()
|
846
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_at_submit_metadata`.
|
847
|
+
commit_at_submit_metadata: tuple[str, ...] | None = tuple()
|
650
848
|
#: Resources for :py:meth:`~.PendingChanges.commit_elem_IDs`.
|
651
|
-
commit_elem_IDs:
|
849
|
+
commit_elem_IDs: tuple[str, ...] | None = tuple()
|
652
850
|
#: Resources for :py:meth:`~.PendingChanges.commit_elements`.
|
653
|
-
commit_elements:
|
851
|
+
commit_elements: tuple[str, ...] | None = tuple()
|
654
852
|
#: Resources for :py:meth:`~.PendingChanges.commit_element_sets`.
|
655
|
-
commit_element_sets:
|
853
|
+
commit_element_sets: tuple[str, ...] | None = tuple()
|
656
854
|
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iter_IDs`.
|
657
|
-
commit_elem_iter_IDs:
|
855
|
+
commit_elem_iter_IDs: tuple[str, ...] | None = tuple()
|
658
856
|
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iters`.
|
659
|
-
commit_elem_iters:
|
857
|
+
commit_elem_iters: tuple[str, ...] | None = tuple()
|
660
858
|
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iter_EAR_IDs`.
|
661
|
-
commit_elem_iter_EAR_IDs:
|
859
|
+
commit_elem_iter_EAR_IDs: tuple[str, ...] | None = tuple()
|
662
860
|
#: Resources for :py:meth:`~.PendingChanges.commit_EARs_initialised`.
|
663
|
-
commit_EARs_initialised:
|
861
|
+
commit_EARs_initialised: tuple[str, ...] | None = tuple()
|
664
862
|
#: Resources for :py:meth:`~.PendingChanges.commit_EARs`.
|
665
|
-
commit_EARs:
|
863
|
+
commit_EARs: tuple[str, ...] | None = tuple()
|
666
864
|
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_submission_indices`.
|
667
|
-
commit_EAR_submission_indices:
|
865
|
+
commit_EAR_submission_indices: tuple[str, ...] | None = tuple()
|
668
866
|
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_skips`.
|
669
|
-
commit_EAR_skips:
|
867
|
+
commit_EAR_skips: tuple[str, ...] | None = tuple()
|
670
868
|
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_starts`.
|
671
|
-
commit_EAR_starts:
|
869
|
+
commit_EAR_starts: tuple[str, ...] | None = tuple()
|
672
870
|
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_ends`.
|
673
|
-
commit_EAR_ends:
|
871
|
+
commit_EAR_ends: tuple[str, ...] | None = tuple()
|
674
872
|
#: Resources for :py:meth:`~.PendingChanges.commit_js_metadata`.
|
675
|
-
commit_js_metadata:
|
873
|
+
commit_js_metadata: tuple[str, ...] | None = tuple()
|
676
874
|
#: Resources for :py:meth:`~.PendingChanges.commit_parameters`.
|
677
|
-
commit_parameters:
|
875
|
+
commit_parameters: tuple[str, ...] | None = tuple()
|
678
876
|
#: Resources for :py:meth:`~.PendingChanges.commit_files`.
|
679
|
-
commit_files:
|
877
|
+
commit_files: tuple[str, ...] | None = tuple()
|
680
878
|
#: Resources for :py:meth:`~.PendingChanges.commit_template_components`.
|
681
|
-
commit_template_components:
|
879
|
+
commit_template_components: tuple[str, ...] | None = tuple()
|
682
880
|
#: Resources for :py:meth:`~.PendingChanges.commit_param_sources`.
|
683
|
-
commit_param_sources:
|
881
|
+
commit_param_sources: tuple[str, ...] | None = tuple()
|
684
882
|
#: Resources for :py:meth:`~.PendingChanges.commit_loop_indices`.
|
685
|
-
commit_loop_indices:
|
883
|
+
commit_loop_indices: tuple[str, ...] | None = tuple()
|
686
884
|
#: Resources for :py:meth:`~.PendingChanges.commit_loop_num_iters`.
|
687
|
-
commit_loop_num_iters:
|
885
|
+
commit_loop_num_iters: tuple[str, ...] | None = tuple()
|
688
886
|
#: Resources for :py:meth:`~.PendingChanges.commit_loop_parents`.
|
689
|
-
commit_loop_parents:
|
887
|
+
commit_loop_parents: tuple[str, ...] | None = tuple()
|
888
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_set_run_dirs`.
|
889
|
+
commit_set_run_dirs: tuple[str, ...] | None = tuple()
|
890
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_iter_data_idx`.
|
891
|
+
commit_iter_data_idx: tuple[str, ...] | None = tuple()
|
892
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_run_data_idx`.
|
893
|
+
commit_run_data_idx: tuple[str, ...] | None = tuple()
|
894
|
+
|
690
895
|
#: A dict whose keys are tuples of resource labels and whose values are lists
|
691
896
|
#: of :py:class:`PendingChanges` commit method names that require those resources.
|
692
897
|
#:
|
693
898
|
#: This grouping allows us to batch up commit methods by resource requirements,
|
694
899
|
#: which in turn means we can potentially minimise, e.g., the number of network
|
695
900
|
#: requests.
|
696
|
-
groups:
|
901
|
+
groups: Mapping[tuple[str, ...], Sequence[str]] = field(
|
902
|
+
init=False, repr=False, compare=False
|
903
|
+
)
|
697
904
|
|
698
905
|
def __post_init__(self):
|
699
906
|
self.groups = self._group_by_resource()
|
700
907
|
|
701
|
-
def _group_by_resource(self) ->
|
908
|
+
def _group_by_resource(self) -> Mapping[tuple[str, ...], Sequence[str]]:
|
702
909
|
"""
|
703
910
|
Get a dict whose keys are tuples of resource labels and whose values are
|
704
911
|
lists of :py:class:`PendingChanges` commit method names that require those
|
@@ -708,8 +915,9 @@ class CommitResourceMap:
|
|
708
915
|
which in turn means we can potentially minimise e.g. the number of network
|
709
916
|
requests.
|
710
917
|
"""
|
711
|
-
groups = {}
|
712
|
-
|
918
|
+
groups: dict[tuple[str, ...], list[str]] = {}
|
919
|
+
# The dicts are pretending to be insertion-ordered sets
|
920
|
+
cur_res_group: tuple[dict[str, None], list[str]] | None = None
|
713
921
|
for fld in fields(self):
|
714
922
|
if not fld.name.startswith("commit_"):
|
715
923
|
continue
|
@@ -718,28 +926,23 @@ class CommitResourceMap:
|
|
718
926
|
if not cur_res_group:
|
719
927
|
# start a new resource group: a mapping between resource labels and the
|
720
928
|
# commit methods that require those resources:
|
721
|
-
cur_res_group =
|
929
|
+
cur_res_group = (dict.fromkeys(res_labels), [fld.name])
|
722
930
|
|
723
931
|
elif not res_labels or set(res_labels).intersection(cur_res_group[0]):
|
724
932
|
# there is some overlap between resource labels required in the current
|
725
933
|
# group and this commit method, so we merge resource labels and add the
|
726
934
|
# new commit method:
|
727
|
-
cur_res_group[0]
|
935
|
+
cur_res_group[0].update(dict.fromkeys(res_labels))
|
728
936
|
cur_res_group[1].append(fld.name)
|
729
937
|
|
730
938
|
else:
|
731
939
|
# no overlap between resource labels required in the current group and
|
732
940
|
# those required by this commit method, so append the current group, and
|
733
941
|
# start a new group for this commit method:
|
734
|
-
|
735
|
-
|
736
|
-
groups[tuple(cur_res_group[0])].extend(cur_res_group[1])
|
737
|
-
cur_res_group = [list(res_labels), [fld.name]]
|
942
|
+
groups.setdefault(tuple(cur_res_group[0]), []).extend(cur_res_group[1])
|
943
|
+
cur_res_group = (dict.fromkeys(res_labels), [fld.name])
|
738
944
|
|
739
945
|
if cur_res_group:
|
740
|
-
|
741
|
-
groups[tuple(cur_res_group[0])] = []
|
742
|
-
|
743
|
-
groups[tuple(cur_res_group[0])].extend(cur_res_group[1])
|
946
|
+
groups.setdefault(tuple(cur_res_group[0]), []).extend(cur_res_group[1])
|
744
947
|
|
745
948
|
return groups
|