hpcflow 0.1.15__py3-none-any.whl → 0.2.0a271__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__init__.py +2 -11
- hpcflow/__pyinstaller/__init__.py +5 -0
- hpcflow/__pyinstaller/hook-hpcflow.py +40 -0
- hpcflow/_version.py +1 -1
- hpcflow/app.py +43 -0
- hpcflow/cli.py +2 -461
- hpcflow/data/demo_data_manifest/__init__.py +3 -0
- hpcflow/data/demo_data_manifest/demo_data_manifest.json +6 -0
- hpcflow/data/jinja_templates/test/test_template.txt +8 -0
- hpcflow/data/programs/hello_world/README.md +1 -0
- hpcflow/data/programs/hello_world/hello_world.c +87 -0
- hpcflow/data/programs/hello_world/linux/hello_world +0 -0
- hpcflow/data/programs/hello_world/macos/hello_world +0 -0
- hpcflow/data/programs/hello_world/win/hello_world.exe +0 -0
- hpcflow/data/scripts/__init__.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_1.py +8 -0
- hpcflow/data/scripts/demo_task_1_generate_t1_infile_2.py +8 -0
- hpcflow/data/scripts/demo_task_1_parse_p3.py +7 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/generate_t1_file_01.py +7 -0
- hpcflow/data/scripts/import_future_script.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py +15 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_env_spec.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_group.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +11 -0
- hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_and_direct_out.py +17 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out.py +14 -0
- hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_json_in_obj.py +12 -0
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_json_out_obj.py +10 -0
- hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py +16 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/parse_t1_file_01.py +4 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/__init__.py +1 -0
- hpcflow/data/template_components/command_files.yaml +26 -0
- hpcflow/data/template_components/environments.yaml +13 -0
- hpcflow/data/template_components/parameters.yaml +14 -0
- hpcflow/data/template_components/task_schemas.yaml +139 -0
- hpcflow/data/workflows/workflow_1.yaml +5 -0
- hpcflow/examples.ipynb +1037 -0
- hpcflow/sdk/__init__.py +149 -0
- hpcflow/sdk/app.py +4266 -0
- hpcflow/sdk/cli.py +1479 -0
- hpcflow/sdk/cli_common.py +385 -0
- hpcflow/sdk/config/__init__.py +5 -0
- hpcflow/sdk/config/callbacks.py +246 -0
- hpcflow/sdk/config/cli.py +388 -0
- hpcflow/sdk/config/config.py +1410 -0
- hpcflow/sdk/config/config_file.py +501 -0
- hpcflow/sdk/config/errors.py +272 -0
- hpcflow/sdk/config/types.py +150 -0
- hpcflow/sdk/core/__init__.py +38 -0
- hpcflow/sdk/core/actions.py +3857 -0
- hpcflow/sdk/core/app_aware.py +25 -0
- hpcflow/sdk/core/cache.py +224 -0
- hpcflow/sdk/core/command_files.py +814 -0
- hpcflow/sdk/core/commands.py +424 -0
- hpcflow/sdk/core/element.py +2071 -0
- hpcflow/sdk/core/enums.py +221 -0
- hpcflow/sdk/core/environment.py +256 -0
- hpcflow/sdk/core/errors.py +1043 -0
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +809 -0
- hpcflow/sdk/core/loop.py +1320 -0
- hpcflow/sdk/core/loop_cache.py +282 -0
- hpcflow/sdk/core/object_list.py +933 -0
- hpcflow/sdk/core/parameters.py +3371 -0
- hpcflow/sdk/core/rule.py +196 -0
- hpcflow/sdk/core/run_dir_files.py +57 -0
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +3792 -0
- hpcflow/sdk/core/task_schema.py +993 -0
- hpcflow/sdk/core/test_utils.py +538 -0
- hpcflow/sdk/core/types.py +447 -0
- hpcflow/sdk/core/utils.py +1207 -0
- hpcflow/sdk/core/validation.py +87 -0
- hpcflow/sdk/core/values.py +477 -0
- hpcflow/sdk/core/workflow.py +4820 -0
- hpcflow/sdk/core/zarr_io.py +206 -0
- hpcflow/sdk/data/__init__.py +13 -0
- hpcflow/sdk/data/config_file_schema.yaml +34 -0
- hpcflow/sdk/data/config_schema.yaml +260 -0
- hpcflow/sdk/data/environments_spec_schema.yaml +21 -0
- hpcflow/sdk/data/files_spec_schema.yaml +5 -0
- hpcflow/sdk/data/parameters_spec_schema.yaml +7 -0
- hpcflow/sdk/data/task_schema_spec_schema.yaml +3 -0
- hpcflow/sdk/data/workflow_spec_schema.yaml +22 -0
- hpcflow/sdk/demo/__init__.py +3 -0
- hpcflow/sdk/demo/cli.py +242 -0
- hpcflow/sdk/helper/__init__.py +3 -0
- hpcflow/sdk/helper/cli.py +137 -0
- hpcflow/sdk/helper/helper.py +300 -0
- hpcflow/sdk/helper/watcher.py +192 -0
- hpcflow/sdk/log.py +288 -0
- hpcflow/sdk/persistence/__init__.py +18 -0
- hpcflow/sdk/persistence/base.py +2817 -0
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +39 -0
- hpcflow/sdk/persistence/json.py +954 -0
- hpcflow/sdk/persistence/pending.py +948 -0
- hpcflow/sdk/persistence/store_resource.py +203 -0
- hpcflow/sdk/persistence/types.py +309 -0
- hpcflow/sdk/persistence/utils.py +73 -0
- hpcflow/sdk/persistence/zarr.py +2388 -0
- hpcflow/sdk/runtime.py +320 -0
- hpcflow/sdk/submission/__init__.py +3 -0
- hpcflow/sdk/submission/enums.py +70 -0
- hpcflow/sdk/submission/jobscript.py +2379 -0
- hpcflow/sdk/submission/schedulers/__init__.py +281 -0
- hpcflow/sdk/submission/schedulers/direct.py +233 -0
- hpcflow/sdk/submission/schedulers/sge.py +376 -0
- hpcflow/sdk/submission/schedulers/slurm.py +598 -0
- hpcflow/sdk/submission/schedulers/utils.py +25 -0
- hpcflow/sdk/submission/shells/__init__.py +52 -0
- hpcflow/sdk/submission/shells/base.py +229 -0
- hpcflow/sdk/submission/shells/bash.py +504 -0
- hpcflow/sdk/submission/shells/os_version.py +115 -0
- hpcflow/sdk/submission/shells/powershell.py +352 -0
- hpcflow/sdk/submission/submission.py +1402 -0
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +194 -0
- hpcflow/sdk/utils/arrays.py +69 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +31 -0
- hpcflow/sdk/utils/strings.py +69 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +123 -0
- hpcflow/tests/data/__init__.py +0 -0
- hpcflow/tests/data/benchmark_N_elements.yaml +6 -0
- hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_1.json +10 -0
- hpcflow/tests/data/workflow_1.yaml +5 -0
- hpcflow/tests/data/workflow_1_slurm.yaml +8 -0
- hpcflow/tests/data/workflow_1_wsl.yaml +8 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +42 -0
- hpcflow/tests/jinja_templates/test_jinja_templates.py +161 -0
- hpcflow/tests/programs/test_programs.py +180 -0
- hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py +12 -0
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +14 -0
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +1361 -0
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +14 -0
- hpcflow/tests/unit/test_action.py +1066 -0
- hpcflow/tests/unit/test_action_rule.py +24 -0
- hpcflow/tests/unit/test_app.py +132 -0
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +172 -0
- hpcflow/tests/unit/test_command.py +377 -0
- hpcflow/tests/unit/test_config.py +195 -0
- hpcflow/tests/unit/test_config_file.py +162 -0
- hpcflow/tests/unit/test_element.py +666 -0
- hpcflow/tests/unit/test_element_iteration.py +88 -0
- hpcflow/tests/unit/test_element_set.py +158 -0
- hpcflow/tests/unit/test_group.py +115 -0
- hpcflow/tests/unit/test_input_source.py +1479 -0
- hpcflow/tests/unit/test_input_value.py +398 -0
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +1247 -0
- hpcflow/tests/unit/test_loop.py +2674 -0
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +259 -0
- hpcflow/tests/unit/test_object_list.py +116 -0
- hpcflow/tests/unit/test_parameter.py +243 -0
- hpcflow/tests/unit/test_persistence.py +664 -0
- hpcflow/tests/unit/test_resources.py +243 -0
- hpcflow/tests/unit/test_run.py +286 -0
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +9 -0
- hpcflow/tests/unit/test_schema_input.py +372 -0
- hpcflow/tests/unit/test_shell.py +129 -0
- hpcflow/tests/unit/test_slurm.py +39 -0
- hpcflow/tests/unit/test_submission.py +502 -0
- hpcflow/tests/unit/test_task.py +2560 -0
- hpcflow/tests/unit/test_task_schema.py +182 -0
- hpcflow/tests/unit/test_utils.py +616 -0
- hpcflow/tests/unit/test_value_sequence.py +549 -0
- hpcflow/tests/unit/test_values.py +91 -0
- hpcflow/tests/unit/test_workflow.py +827 -0
- hpcflow/tests/unit/test_workflow_template.py +186 -0
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/unit/utils/test_strings.py +97 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +355 -0
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +564 -0
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6794 -0
- hpcflow-0.2.0a271.dist-info/LICENSE +375 -0
- hpcflow-0.2.0a271.dist-info/METADATA +65 -0
- hpcflow-0.2.0a271.dist-info/RECORD +237 -0
- {hpcflow-0.1.15.dist-info → hpcflow-0.2.0a271.dist-info}/WHEEL +4 -5
- hpcflow-0.2.0a271.dist-info/entry_points.txt +6 -0
- hpcflow/api.py +0 -490
- hpcflow/archive/archive.py +0 -307
- hpcflow/archive/cloud/cloud.py +0 -45
- hpcflow/archive/cloud/errors.py +0 -9
- hpcflow/archive/cloud/providers/dropbox.py +0 -427
- hpcflow/archive/errors.py +0 -5
- hpcflow/base_db.py +0 -4
- hpcflow/config.py +0 -233
- hpcflow/copytree.py +0 -66
- hpcflow/data/examples/_config.yml +0 -14
- hpcflow/data/examples/damask/demo/1.run.yml +0 -4
- hpcflow/data/examples/damask/demo/2.process.yml +0 -29
- hpcflow/data/examples/damask/demo/geom.geom +0 -2052
- hpcflow/data/examples/damask/demo/load.load +0 -1
- hpcflow/data/examples/damask/demo/material.config +0 -185
- hpcflow/data/examples/damask/inputs/geom.geom +0 -2052
- hpcflow/data/examples/damask/inputs/load.load +0 -1
- hpcflow/data/examples/damask/inputs/material.config +0 -185
- hpcflow/data/examples/damask/profiles/_variable_lookup.yml +0 -21
- hpcflow/data/examples/damask/profiles/damask.yml +0 -4
- hpcflow/data/examples/damask/profiles/damask_process.yml +0 -8
- hpcflow/data/examples/damask/profiles/damask_run.yml +0 -5
- hpcflow/data/examples/damask/profiles/default.yml +0 -6
- hpcflow/data/examples/thinking.yml +0 -177
- hpcflow/errors.py +0 -2
- hpcflow/init_db.py +0 -37
- hpcflow/models.py +0 -2595
- hpcflow/nesting.py +0 -9
- hpcflow/profiles.py +0 -455
- hpcflow/project.py +0 -81
- hpcflow/scheduler.py +0 -322
- hpcflow/utils.py +0 -103
- hpcflow/validation.py +0 -166
- hpcflow/variables.py +0 -543
- hpcflow-0.1.15.dist-info/METADATA +0 -168
- hpcflow-0.1.15.dist-info/RECORD +0 -45
- hpcflow-0.1.15.dist-info/entry_points.txt +0 -8
- hpcflow-0.1.15.dist-info/top_level.txt +0 -1
- /hpcflow/{archive → data/jinja_templates}/__init__.py +0 -0
- /hpcflow/{archive/cloud → data/programs}/__init__.py +0 -0
- /hpcflow/{archive/cloud/providers → data/workflows}/__init__.py +0 -0
|
@@ -0,0 +1,948 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Class to hold the state that is waiting to be committed to disk.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from __future__ import annotations
|
|
6
|
+
|
|
7
|
+
from collections import defaultdict
|
|
8
|
+
from collections.abc import Callable
|
|
9
|
+
import contextlib
|
|
10
|
+
from dataclasses import dataclass, field, fields
|
|
11
|
+
from functools import wraps
|
|
12
|
+
import copy
|
|
13
|
+
|
|
14
|
+
from typing import Any, Generic, TYPE_CHECKING, TypeVar
|
|
15
|
+
from typing_extensions import ParamSpec
|
|
16
|
+
|
|
17
|
+
import numpy as np
|
|
18
|
+
|
|
19
|
+
from hpcflow.sdk.log import TimeIt
|
|
20
|
+
from hpcflow.sdk.submission.submission import SUBMISSION_SUBMIT_TIME_KEYS
|
|
21
|
+
from hpcflow.sdk.typing import DataIndex
|
|
22
|
+
from hpcflow.sdk.utils.strings import shorten_list_str
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
from hpcflow.sdk.persistence.types import (
|
|
26
|
+
AnySTask,
|
|
27
|
+
AnySElement,
|
|
28
|
+
AnySElementIter,
|
|
29
|
+
AnySEAR,
|
|
30
|
+
AnySParameter,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from collections.abc import Mapping, Sequence
|
|
35
|
+
from datetime import datetime
|
|
36
|
+
from logging import Logger
|
|
37
|
+
from .base import PersistentStore, FileDescriptor, LoopDescriptor
|
|
38
|
+
from ..app import BaseApp
|
|
39
|
+
from ..typing import ParamSource
|
|
40
|
+
from ..core.json_like import JSONed
|
|
41
|
+
|
|
42
|
+
P = ParamSpec("P")
|
|
43
|
+
T = TypeVar("T")
|
|
44
|
+
|
|
45
|
+
_commit_method_data_map: dict[str, list[str]] = defaultdict(
|
|
46
|
+
list
|
|
47
|
+
) # note: this is updated at module-import time.
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
class PendingChanges(
|
|
51
|
+
Generic[AnySTask, AnySElement, AnySElementIter, AnySEAR, AnySParameter]
|
|
52
|
+
):
|
|
53
|
+
"""
|
|
54
|
+
Class to store pending changes and merge them into a persistent store.
|
|
55
|
+
|
|
56
|
+
Parameters
|
|
57
|
+
----------
|
|
58
|
+
app: App
|
|
59
|
+
The main application context.
|
|
60
|
+
store: PersistentStore
|
|
61
|
+
The persistent store that owns this object
|
|
62
|
+
resource_map: CommitResourceMap
|
|
63
|
+
Map of resources, used when processing commits.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
# These would be in the docstring except they render really wrongly!
|
|
67
|
+
# Type Parameters
|
|
68
|
+
# ---------------
|
|
69
|
+
# AnySTask
|
|
70
|
+
# The type of stored tasks.
|
|
71
|
+
# AnySElement
|
|
72
|
+
# The type of stored elements.
|
|
73
|
+
# AnySElementIter
|
|
74
|
+
# The type of stored element iterations.
|
|
75
|
+
# AnySEAR
|
|
76
|
+
# The type of stored EARs.
|
|
77
|
+
# AnySParameter
|
|
78
|
+
# The type of stored parameters.
|
|
79
|
+
|
|
80
|
+
def __init__(
|
|
81
|
+
self,
|
|
82
|
+
app: BaseApp,
|
|
83
|
+
store: PersistentStore[
|
|
84
|
+
AnySTask, AnySElement, AnySElementIter, AnySEAR, AnySParameter
|
|
85
|
+
],
|
|
86
|
+
resource_map: CommitResourceMap,
|
|
87
|
+
):
|
|
88
|
+
self._app = app
|
|
89
|
+
self.store = store
|
|
90
|
+
self.resource_map = resource_map
|
|
91
|
+
|
|
92
|
+
#: Keys are new task IDs.
|
|
93
|
+
self.add_tasks: dict[int, AnySTask] = {}
|
|
94
|
+
#: Keys are loop IDs, values are loop descriptors.
|
|
95
|
+
self.add_loops: dict[int, LoopDescriptor] = {}
|
|
96
|
+
#: Keys are submission IDs, values are submission descriptors.
|
|
97
|
+
self.add_submissions: dict[int, Mapping[str, JSONed]] = {}
|
|
98
|
+
#: Keys are element IDs.
|
|
99
|
+
self.add_elements: dict[int, AnySElement] = {}
|
|
100
|
+
#: Keys are element iteration IDs.
|
|
101
|
+
self.add_elem_iters: dict[int, AnySElementIter] = {}
|
|
102
|
+
#: Keys are element action run IDs.
|
|
103
|
+
self.add_EARs: dict[int, AnySEAR] = {}
|
|
104
|
+
#: Keys are parameter indices and values are tuples whose first element is data
|
|
105
|
+
#: to add and whose second element is the source dict for the new data.
|
|
106
|
+
self.add_parameters: dict[int, AnySParameter] = {}
|
|
107
|
+
#: Workflow-related files (inputs, outputs) added to the persistent store.
|
|
108
|
+
self.add_files: list[FileDescriptor] = []
|
|
109
|
+
#: Template components to add.
|
|
110
|
+
self.add_template_components: dict[str, dict[str, dict]] = {}
|
|
111
|
+
#: Keys are element set IDs, values are descriptors.
|
|
112
|
+
self.add_element_sets: dict[int, list[Mapping]] = {}
|
|
113
|
+
|
|
114
|
+
#: Keys are task IDs, and values are element IDs to add to that task.
|
|
115
|
+
self.add_elem_IDs: dict[int, list[int]] = {}
|
|
116
|
+
#: Keys are element IDs, and values are iteration IDs to add to that element.
|
|
117
|
+
self.add_elem_iter_IDs: dict[int, list[int]] = {}
|
|
118
|
+
#: Keys are element iteration IDs, then EAR action index, and values are EAR IDs.
|
|
119
|
+
#: This is a list of EAR IDs to add to a given element iteration action.
|
|
120
|
+
self.add_elem_iter_EAR_IDs: dict[int, dict[int, list[int]]] = {}
|
|
121
|
+
#: Submission metadata added at submit-time, including submission parts.
|
|
122
|
+
self.update_at_submit_metadata: dict[int, dict[str, Any]] = {}
|
|
123
|
+
|
|
124
|
+
#: IDs of EARs to mark as initialised.
|
|
125
|
+
self.set_EARs_initialised: list[int] = []
|
|
126
|
+
#: Submission IDs and commands file IDs to attach to EARs.
|
|
127
|
+
self.set_EAR_submission_data: dict[int, tuple[int, int | None]] = {}
|
|
128
|
+
#: IDs of EARs to mark as skipped.
|
|
129
|
+
self.set_EAR_skips: dict[int, int] = {}
|
|
130
|
+
#: Keys are EAR IDs and values are tuples of start time, start dir snapshot, run
|
|
131
|
+
#: hostname, and port number.
|
|
132
|
+
self.set_EAR_starts: dict[
|
|
133
|
+
int, tuple[datetime, dict[str, Any] | None, str, int | None]
|
|
134
|
+
] = {}
|
|
135
|
+
#: Keys are EAR IDs and values are tuples of end time, end dir snapshot, exit
|
|
136
|
+
#: code, and success boolean.
|
|
137
|
+
self.set_EAR_ends: dict[
|
|
138
|
+
int, tuple[datetime, dict[str, Any] | None, int, bool]
|
|
139
|
+
] = {}
|
|
140
|
+
#: Each list item is a tuple of two arrays, the first of which is a run directory
|
|
141
|
+
#: indices array, and the second of which is an integer array indicating with
|
|
142
|
+
#: which run ID each run directory is associated.
|
|
143
|
+
self.set_run_dirs: list[tuple[np.ndarray, np.ndarray]] = []
|
|
144
|
+
|
|
145
|
+
#: Keys are IDs of jobscripts.
|
|
146
|
+
self.set_js_metadata: dict[int, dict[int, dict[str, Any]]] = {}
|
|
147
|
+
|
|
148
|
+
#: Keys are IDs of parameters to add or modify, and values are tuples of the
|
|
149
|
+
#: parameter value, and whether the parameter is a file.
|
|
150
|
+
self.set_parameters: dict[int, tuple[Any, bool]] = {}
|
|
151
|
+
|
|
152
|
+
#: Keys are parameter indices and values are dict parameter sources to merge
|
|
153
|
+
#: with existing source of that parameter.
|
|
154
|
+
self.update_param_sources: dict[int, ParamSource] = {}
|
|
155
|
+
#: Keys are indices of loops, values are descriptions of what to update.
|
|
156
|
+
self.update_loop_indices: dict[int, dict[str, int]] = {}
|
|
157
|
+
#: Keys are indices of loops, values are number of iterations.
|
|
158
|
+
self.update_loop_num_iters: dict[int, list[list[list[int] | int]]] = {}
|
|
159
|
+
#: Keys are indices of loops, values are list of parent names.
|
|
160
|
+
self.update_loop_parents: dict[int, list[str]] = {}
|
|
161
|
+
|
|
162
|
+
self.update_iter_data_idx: dict[int, DataIndex] = {}
|
|
163
|
+
self.update_run_data_idx: dict[int, DataIndex] = {}
|
|
164
|
+
|
|
165
|
+
self.reset(is_init=True) # set up initial data structures
|
|
166
|
+
|
|
167
|
+
def __bool__(self):
|
|
168
|
+
"""Returns True if there are any outstanding pending items."""
|
|
169
|
+
return (
|
|
170
|
+
bool(self.add_tasks)
|
|
171
|
+
or bool(self.add_loops)
|
|
172
|
+
or bool(self.add_submissions)
|
|
173
|
+
or bool(self.add_elements)
|
|
174
|
+
or bool(self.add_elem_iters)
|
|
175
|
+
or bool(self.add_EARs)
|
|
176
|
+
or bool(self.add_elem_IDs)
|
|
177
|
+
or bool(self.add_elem_iter_IDs)
|
|
178
|
+
or bool(self.add_elem_iter_EAR_IDs)
|
|
179
|
+
or bool(self.update_at_submit_metadata)
|
|
180
|
+
or bool(self.add_parameters)
|
|
181
|
+
or bool(self.add_files)
|
|
182
|
+
or bool(self.add_template_components)
|
|
183
|
+
or bool(self.add_element_sets)
|
|
184
|
+
or bool(self.set_EARs_initialised)
|
|
185
|
+
or bool(self.set_EAR_submission_data)
|
|
186
|
+
or bool(self.set_EAR_starts)
|
|
187
|
+
or bool(self.set_EAR_ends)
|
|
188
|
+
or bool(self.set_EAR_skips)
|
|
189
|
+
or bool(self.set_run_dirs)
|
|
190
|
+
or bool(self.set_js_metadata)
|
|
191
|
+
or bool(self.set_parameters)
|
|
192
|
+
or bool(self.update_param_sources)
|
|
193
|
+
or bool(self.update_loop_indices)
|
|
194
|
+
or bool(self.update_loop_num_iters)
|
|
195
|
+
or bool(self.update_loop_parents)
|
|
196
|
+
or bool(self.update_iter_data_idx)
|
|
197
|
+
or bool(self.update_run_data_idx)
|
|
198
|
+
)
|
|
199
|
+
|
|
200
|
+
def where_pending(self) -> list[str]:
|
|
201
|
+
"""
|
|
202
|
+
Get the list of items for which there is some outstanding pending items.
|
|
203
|
+
"""
|
|
204
|
+
excluded = {"app", "store", "resource_map"}
|
|
205
|
+
return [k for k, v in self.__dict__.items() if k not in excluded and bool(v)]
|
|
206
|
+
|
|
207
|
+
@property
|
|
208
|
+
def logger(self) -> Logger:
|
|
209
|
+
"""
|
|
210
|
+
The logger.
|
|
211
|
+
"""
|
|
212
|
+
return self._app.persistence_logger
|
|
213
|
+
|
|
214
|
+
def commits_data(*data_list: str):
|
|
215
|
+
"""Decorator that wraps `PendingChanges.commit_*` methods with arguments listing
|
|
216
|
+
which `PendingChanges` attributes must have non-trivial data in them for the method's
|
|
217
|
+
invocation to be required.
|
|
218
|
+
|
|
219
|
+
Notes
|
|
220
|
+
-----
|
|
221
|
+
This essentially provides a mapping between `PendingChanges` attributes and
|
|
222
|
+
`commit_*` methods. This allows us to only open the resources that need updating
|
|
223
|
+
in `PendingChanges.commit_all`.
|
|
224
|
+
|
|
225
|
+
We use a decorator rather than an explicitly declaring the map in
|
|
226
|
+
`_commit_method_data_map` to make the mapping obvious near the commit methods, and
|
|
227
|
+
hopefully avoid us forgetting to update `_commit_method_data_map` when we modify
|
|
228
|
+
or add commit methods in future!
|
|
229
|
+
|
|
230
|
+
"""
|
|
231
|
+
|
|
232
|
+
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
|
233
|
+
|
|
234
|
+
_commit_method_data_map[func.__name__].extend(data_list)
|
|
235
|
+
|
|
236
|
+
@wraps(func)
|
|
237
|
+
def inner(*args, **kwargs) -> T:
|
|
238
|
+
return func(*args, **kwargs)
|
|
239
|
+
|
|
240
|
+
return inner
|
|
241
|
+
|
|
242
|
+
return decorator
|
|
243
|
+
|
|
244
|
+
def get_pending_resource_map_groups(self) -> dict[tuple[str, ...], list[str]]:
|
|
245
|
+
"""Retrive resource map groups, where values are filtered to include only those
|
|
246
|
+
commit methods that must be invoked, due to pending data associated with those
|
|
247
|
+
methods.
|
|
248
|
+
|
|
249
|
+
Notes
|
|
250
|
+
-----
|
|
251
|
+
This method allows us to open only those resources that need to be updated, given
|
|
252
|
+
the state of pending data.
|
|
253
|
+
"""
|
|
254
|
+
|
|
255
|
+
where_pending = self.where_pending()
|
|
256
|
+
pending_groups = {}
|
|
257
|
+
for res_names, methods in self.resource_map.groups.items():
|
|
258
|
+
req_methods = [
|
|
259
|
+
meth_i
|
|
260
|
+
for meth_i in methods
|
|
261
|
+
if any(
|
|
262
|
+
dat_j in where_pending for dat_j in _commit_method_data_map[meth_i]
|
|
263
|
+
)
|
|
264
|
+
]
|
|
265
|
+
if req_methods:
|
|
266
|
+
pending_groups[res_names] = req_methods
|
|
267
|
+
|
|
268
|
+
return pending_groups
|
|
269
|
+
|
|
270
|
+
@TimeIt.decorator
|
|
271
|
+
def commit_all(self) -> None:
|
|
272
|
+
"""Commit all pending changes to disk."""
|
|
273
|
+
self.logger.info(f"committing all pending changes: {self.where_pending()}")
|
|
274
|
+
|
|
275
|
+
if not self:
|
|
276
|
+
self.logger.debug("commit: no pending changes to commit.")
|
|
277
|
+
return
|
|
278
|
+
|
|
279
|
+
for resources, methods in self.get_pending_resource_map_groups().items():
|
|
280
|
+
# for each resource, enter `using_resource` context manager in "update" mode:
|
|
281
|
+
with contextlib.ExitStack() as stack:
|
|
282
|
+
for res in resources:
|
|
283
|
+
stack.enter_context(
|
|
284
|
+
self.store.using_resource(res, "update") # type: ignore[call-overload]
|
|
285
|
+
)
|
|
286
|
+
for meth in methods:
|
|
287
|
+
getattr(self, meth)()
|
|
288
|
+
|
|
289
|
+
assert not (self)
|
|
290
|
+
|
|
291
|
+
@TimeIt.decorator
|
|
292
|
+
@commits_data("add_tasks")
|
|
293
|
+
def commit_tasks(self) -> None:
|
|
294
|
+
"""Commit pending tasks to disk."""
|
|
295
|
+
if self.add_tasks:
|
|
296
|
+
tasks = self.store.get_tasks_by_IDs(self.add_tasks)
|
|
297
|
+
task_ids = set(self.add_tasks)
|
|
298
|
+
self.logger.debug(f"commit: adding pending tasks with IDs: {task_ids!r}")
|
|
299
|
+
self.store._append_tasks(tasks)
|
|
300
|
+
self.store.num_tasks_cache = None # invalidate cache
|
|
301
|
+
# pending element IDs that belong to pending tasks are now committed:
|
|
302
|
+
self.add_elem_IDs = {
|
|
303
|
+
k: v for k, v in self.add_elem_IDs.items() if k not in task_ids
|
|
304
|
+
}
|
|
305
|
+
self._clear_add_tasks()
|
|
306
|
+
|
|
307
|
+
@TimeIt.decorator
|
|
308
|
+
@commits_data("add_loops")
|
|
309
|
+
def commit_loops(self) -> None:
|
|
310
|
+
"""Commit pending loops to disk."""
|
|
311
|
+
if self.add_loops:
|
|
312
|
+
# retrieve pending loops, including pending changes to num_added_iterations:
|
|
313
|
+
loops = self.store.get_loops_by_IDs(self.add_loops)
|
|
314
|
+
loop_ids = set(self.add_loops)
|
|
315
|
+
self.logger.debug(f"commit: adding pending loops with indices {loop_ids!r}")
|
|
316
|
+
self.store._append_loops(loops)
|
|
317
|
+
|
|
318
|
+
# pending num_added_iters and parents that belong to pending loops are now
|
|
319
|
+
# committed:
|
|
320
|
+
self.update_loop_num_iters = {
|
|
321
|
+
k: v for k, v in self.update_loop_num_iters.items() if k not in loop_ids
|
|
322
|
+
}
|
|
323
|
+
self.update_loop_parents = {
|
|
324
|
+
k: v for k, v in self.update_loop_parents.items() if k not in loop_ids
|
|
325
|
+
}
|
|
326
|
+
|
|
327
|
+
self._clear_add_loops()
|
|
328
|
+
|
|
329
|
+
@TimeIt.decorator
|
|
330
|
+
@commits_data("add_submissions")
|
|
331
|
+
def commit_submissions(self) -> None:
|
|
332
|
+
"""Commit pending submissions to disk."""
|
|
333
|
+
if self.add_submissions:
|
|
334
|
+
# retrieve pending submissions:
|
|
335
|
+
subs = self.store.get_submissions_by_ID(
|
|
336
|
+
self.add_submissions
|
|
337
|
+
) # TODO: I think this just returns add_submissions?
|
|
338
|
+
sub_ids = set(self.add_submissions)
|
|
339
|
+
self.logger.debug(
|
|
340
|
+
f"commit: adding pending submissions with indices {sub_ids!r}"
|
|
341
|
+
)
|
|
342
|
+
self.store._append_submissions(subs)
|
|
343
|
+
self._clear_add_submissions()
|
|
344
|
+
|
|
345
|
+
@TimeIt.decorator
|
|
346
|
+
@commits_data("update_at_submit_metadata")
|
|
347
|
+
def commit_at_submit_metadata(self) -> None:
|
|
348
|
+
"""
|
|
349
|
+
Commit to disk pending at-submit-time metadata, including submission parts.
|
|
350
|
+
"""
|
|
351
|
+
if self.update_at_submit_metadata:
|
|
352
|
+
self.logger.debug("commit: adding pending at-submit metadata")
|
|
353
|
+
self.store._update_at_submit_metadata(self.update_at_submit_metadata)
|
|
354
|
+
self._clear_at_submit_metadata()
|
|
355
|
+
|
|
356
|
+
@TimeIt.decorator
|
|
357
|
+
@commits_data("add_elem_IDs")
|
|
358
|
+
def commit_elem_IDs(self) -> None:
|
|
359
|
+
"""
|
|
360
|
+
Commit pending element ID updates to disk.
|
|
361
|
+
"""
|
|
362
|
+
# TODO: could be batched up?
|
|
363
|
+
for task_ID, elem_IDs in self.add_elem_IDs.items():
|
|
364
|
+
self.logger.debug(
|
|
365
|
+
f"commit: adding pending element IDs to task {task_ID!r}: {elem_IDs!r}."
|
|
366
|
+
)
|
|
367
|
+
self.store._append_task_element_IDs(task_ID, elem_IDs)
|
|
368
|
+
self.store.task_cache.pop(task_ID, None) # invalidate cache
|
|
369
|
+
self._clear_add_elem_IDs()
|
|
370
|
+
|
|
371
|
+
@TimeIt.decorator
|
|
372
|
+
@commits_data("add_elements")
|
|
373
|
+
def commit_elements(self) -> None:
|
|
374
|
+
"""
|
|
375
|
+
Commit pending elements to disk.
|
|
376
|
+
"""
|
|
377
|
+
if self.add_elements:
|
|
378
|
+
elems = self.store.get_elements(self.add_elements)
|
|
379
|
+
elem_ids = set(self.add_elements)
|
|
380
|
+
self.logger.debug(f"commit: adding pending elements with IDs: {elem_ids!r}")
|
|
381
|
+
self.store._append_elements(elems)
|
|
382
|
+
# pending iter IDs that belong to pending elements are now committed:
|
|
383
|
+
self.add_elem_iter_IDs = {
|
|
384
|
+
k: v for k, v in self.add_elem_iter_IDs.items() if k not in elem_ids
|
|
385
|
+
}
|
|
386
|
+
self._clear_add_elements()
|
|
387
|
+
|
|
388
|
+
@TimeIt.decorator
|
|
389
|
+
@commits_data("add_element_sets")
|
|
390
|
+
def commit_element_sets(self) -> None:
|
|
391
|
+
"""
|
|
392
|
+
Commit pending element sets to disk.
|
|
393
|
+
"""
|
|
394
|
+
# TODO: could be batched up?
|
|
395
|
+
for task_id, es_js in self.add_element_sets.items():
|
|
396
|
+
self.logger.debug("commit: adding pending element sets.")
|
|
397
|
+
self.store._append_element_sets(task_id, es_js)
|
|
398
|
+
self._clear_add_element_sets()
|
|
399
|
+
|
|
400
|
+
@TimeIt.decorator
|
|
401
|
+
@commits_data("add_elem_iter_IDs")
|
|
402
|
+
def commit_elem_iter_IDs(self) -> None:
|
|
403
|
+
"""
|
|
404
|
+
Commit pending element iteration ID updates to disk.
|
|
405
|
+
"""
|
|
406
|
+
# TODO: could be batched up?
|
|
407
|
+
for elem_ID, iter_IDs in self.add_elem_iter_IDs.items():
|
|
408
|
+
self.logger.debug(
|
|
409
|
+
f"commit: adding pending element iteration IDs to element {elem_ID!r}: "
|
|
410
|
+
f"{iter_IDs!r}."
|
|
411
|
+
)
|
|
412
|
+
self.store._append_elem_iter_IDs(elem_ID, iter_IDs)
|
|
413
|
+
self.store.element_cache.pop(elem_ID, None) # invalidate cache
|
|
414
|
+
self._clear_add_elem_iter_IDs()
|
|
415
|
+
|
|
416
|
+
@TimeIt.decorator
|
|
417
|
+
@commits_data("add_elem_iters")
|
|
418
|
+
def commit_elem_iters(self) -> None:
|
|
419
|
+
"""
|
|
420
|
+
Commit pending element iterations to disk.
|
|
421
|
+
"""
|
|
422
|
+
if self.add_elem_iters:
|
|
423
|
+
iters = self.store.get_element_iterations(self.add_elem_iters)
|
|
424
|
+
iter_ids = set(self.add_elem_iters)
|
|
425
|
+
self.logger.debug(
|
|
426
|
+
f"commit: adding pending element iterations with IDs: {iter_ids!r}"
|
|
427
|
+
)
|
|
428
|
+
self.store._append_elem_iters(iters)
|
|
429
|
+
# pending EAR IDs that belong to pending iters are now committed:
|
|
430
|
+
add_elem_iter_EAR_IDs_cur = copy.deepcopy(self.add_elem_iter_EAR_IDs)
|
|
431
|
+
self._clear_add_elem_iter_EAR_IDs() # reset to empty nested defaultdict
|
|
432
|
+
for iter_id, all_run_IDs in add_elem_iter_EAR_IDs_cur.items():
|
|
433
|
+
# only re-assign iter_IDs that have not been comitted above:
|
|
434
|
+
if iter_id not in iter_ids:
|
|
435
|
+
for act_idx, run_IDs in all_run_IDs.items():
|
|
436
|
+
self.add_elem_iter_EAR_IDs[iter_id][act_idx].extend(run_IDs)
|
|
437
|
+
|
|
438
|
+
# pending EARs_initialised that belong to pending iters are now committed:
|
|
439
|
+
self.set_EARs_initialised = [
|
|
440
|
+
i for i in self.set_EARs_initialised if i not in iter_ids
|
|
441
|
+
]
|
|
442
|
+
self._clear_add_elem_iters()
|
|
443
|
+
|
|
444
|
+
@TimeIt.decorator
|
|
445
|
+
@commits_data("add_elem_iter_EAR_IDs")
|
|
446
|
+
def commit_elem_iter_EAR_IDs(self) -> None:
|
|
447
|
+
"""
|
|
448
|
+
Commit pending element action run ID updates to disk.
|
|
449
|
+
"""
|
|
450
|
+
# TODO: could be batched up?
|
|
451
|
+
for iter_ID, act_EAR_IDs in self.add_elem_iter_EAR_IDs.items():
|
|
452
|
+
self.logger.debug(
|
|
453
|
+
f"commit: adding pending EAR IDs to element iteration {iter_ID!r}: "
|
|
454
|
+
f"{dict(act_EAR_IDs)!r}."
|
|
455
|
+
)
|
|
456
|
+
for act_idx, EAR_IDs in act_EAR_IDs.items():
|
|
457
|
+
self.store._append_elem_iter_EAR_IDs(iter_ID, act_idx, EAR_IDs)
|
|
458
|
+
self.store.element_iter_cache.pop(iter_ID, None) # invalidate cache
|
|
459
|
+
self._clear_add_elem_iter_EAR_IDs()
|
|
460
|
+
|
|
461
|
+
@TimeIt.decorator
|
|
462
|
+
@commits_data("add_EARs")
|
|
463
|
+
def commit_EARs(self) -> None:
|
|
464
|
+
"""
|
|
465
|
+
Commit pending element action runs to disk.
|
|
466
|
+
"""
|
|
467
|
+
if self.add_EARs:
|
|
468
|
+
EARs = self.store.get_EARs(self.add_EARs)
|
|
469
|
+
EAR_ids = list(self.add_EARs)
|
|
470
|
+
self.logger.debug(f"commit: adding pending EARs with IDs: {EAR_ids!r}")
|
|
471
|
+
self.store._append_EARs(EARs)
|
|
472
|
+
self.store.num_EARs_cache = None # invalidate cache
|
|
473
|
+
# pending start/end times/snapshots, submission indices, and skips that belong
|
|
474
|
+
# to pending EARs are now committed (accounted for in `get_EARs` above):
|
|
475
|
+
self.set_EAR_submission_data = {
|
|
476
|
+
k: v for k, v in self.set_EAR_submission_data.items() if k not in EAR_ids
|
|
477
|
+
}
|
|
478
|
+
self.set_EAR_skips = {
|
|
479
|
+
k: v for k, v in self.set_EAR_skips.items() if k not in EAR_ids
|
|
480
|
+
}
|
|
481
|
+
self.set_EAR_starts = {
|
|
482
|
+
k: v for k, v in self.set_EAR_starts.items() if k not in EAR_ids
|
|
483
|
+
}
|
|
484
|
+
self.set_EAR_ends = {
|
|
485
|
+
k: v for k, v in self.set_EAR_ends.items() if k not in EAR_ids
|
|
486
|
+
}
|
|
487
|
+
|
|
488
|
+
self._clear_add_EARs()
|
|
489
|
+
|
|
490
|
+
@TimeIt.decorator
|
|
491
|
+
@commits_data("set_run_dirs")
|
|
492
|
+
def commit_set_run_dirs(self) -> None:
|
|
493
|
+
"""
|
|
494
|
+
Commit pending run directory indices.
|
|
495
|
+
"""
|
|
496
|
+
for run_dir_arr, run_idx in self.set_run_dirs:
|
|
497
|
+
self.store._set_run_dirs(run_dir_arr, run_idx)
|
|
498
|
+
self._clear_set_run_dirs()
|
|
499
|
+
|
|
500
|
+
@TimeIt.decorator
|
|
501
|
+
@commits_data("set_EARs_initialised")
|
|
502
|
+
def commit_EARs_initialised(self) -> None:
|
|
503
|
+
"""
|
|
504
|
+
Commit pending element action run init state updates to disk.
|
|
505
|
+
"""
|
|
506
|
+
if self.set_EARs_initialised:
|
|
507
|
+
iter_ids = self.set_EARs_initialised
|
|
508
|
+
self.logger.debug(
|
|
509
|
+
f"commit: setting pending `EARs_initialised` for iteration IDs: "
|
|
510
|
+
f"{iter_ids!r}."
|
|
511
|
+
)
|
|
512
|
+
# TODO: could be batched up?
|
|
513
|
+
for i in iter_ids:
|
|
514
|
+
self.store._update_elem_iter_EARs_initialised(i)
|
|
515
|
+
self.store.element_iter_cache.pop(i, None) # invalidate cache
|
|
516
|
+
self._clear_set_EARs_initialised()
|
|
517
|
+
|
|
518
|
+
@TimeIt.decorator
|
|
519
|
+
@commits_data("set_EAR_submission_data")
|
|
520
|
+
def commit_EAR_submission_indices(self) -> None:
|
|
521
|
+
"""
|
|
522
|
+
Commit pending element action run submission index updates to disk.
|
|
523
|
+
"""
|
|
524
|
+
if self.set_EAR_submission_data:
|
|
525
|
+
self.logger.debug(
|
|
526
|
+
f"commit: updating submission data: {self.set_EAR_submission_data!r}."
|
|
527
|
+
)
|
|
528
|
+
self.store._update_EAR_submission_data(self.set_EAR_submission_data)
|
|
529
|
+
for EAR_ID_i in self.set_EAR_submission_data:
|
|
530
|
+
self.store.EAR_cache.pop(EAR_ID_i, None) # invalidate cache
|
|
531
|
+
self._clear_EAR_submission_data()
|
|
532
|
+
|
|
533
|
+
@TimeIt.decorator
|
|
534
|
+
@commits_data("set_EAR_starts")
|
|
535
|
+
def commit_EAR_starts(self) -> None:
|
|
536
|
+
"""
|
|
537
|
+
Commit pending element action run start information to disk.
|
|
538
|
+
"""
|
|
539
|
+
updates = self.set_EAR_starts
|
|
540
|
+
if updates:
|
|
541
|
+
self.logger.debug(
|
|
542
|
+
f"commit: registering {len(updates)} run(s) as started: "
|
|
543
|
+
f"{shorten_list_str(updates)}."
|
|
544
|
+
)
|
|
545
|
+
self.store._update_EAR_start(updates)
|
|
546
|
+
for run_id in updates:
|
|
547
|
+
self.store.EAR_cache.pop(run_id, None) # invalidate cache
|
|
548
|
+
self._clear_set_EAR_starts()
|
|
549
|
+
|
|
550
|
+
@TimeIt.decorator
|
|
551
|
+
@commits_data("set_EAR_ends")
|
|
552
|
+
def commit_EAR_ends(self) -> None:
|
|
553
|
+
"""
|
|
554
|
+
Commit pending element action run finish information to disk.
|
|
555
|
+
"""
|
|
556
|
+
updates = self.set_EAR_ends
|
|
557
|
+
if updates:
|
|
558
|
+
self.logger.debug(
|
|
559
|
+
f"commit: registering {len(updates)} run(s) as ended: "
|
|
560
|
+
f"{shorten_list_str(updates)}, with exit codes: "
|
|
561
|
+
f"{shorten_list_str([i[2] for i in updates.values()])}."
|
|
562
|
+
)
|
|
563
|
+
self.store._update_EAR_end(updates)
|
|
564
|
+
for run_id in updates:
|
|
565
|
+
self.store.EAR_cache.pop(run_id, None) # invalidate cache
|
|
566
|
+
self._clear_set_EAR_ends()
|
|
567
|
+
|
|
568
|
+
@TimeIt.decorator
|
|
569
|
+
@commits_data("set_EAR_skips")
|
|
570
|
+
def commit_EAR_skips(self) -> None:
|
|
571
|
+
"""
|
|
572
|
+
Commit pending element action skip flags to disk.
|
|
573
|
+
"""
|
|
574
|
+
updates = self.set_EAR_skips
|
|
575
|
+
if updates:
|
|
576
|
+
self.logger.debug(f"commit: setting {len(updates)} run IDs as skipped.")
|
|
577
|
+
self.store._update_EAR_skip(updates)
|
|
578
|
+
for run_ID in updates:
|
|
579
|
+
self.store.EAR_cache.pop(run_ID, None) # invalidate cache
|
|
580
|
+
self._clear_set_EAR_skips()
|
|
581
|
+
|
|
582
|
+
@TimeIt.decorator
|
|
583
|
+
@commits_data("set_js_metadata")
|
|
584
|
+
def commit_js_metadata(self) -> None:
|
|
585
|
+
"""
|
|
586
|
+
Commit pending jobscript metadata changes to disk.
|
|
587
|
+
"""
|
|
588
|
+
if self.set_js_metadata:
|
|
589
|
+
self.logger.debug(
|
|
590
|
+
f"commit: setting jobscript metadata: {self.set_js_metadata!r}"
|
|
591
|
+
)
|
|
592
|
+
self.store._update_js_metadata(self.set_js_metadata)
|
|
593
|
+
self._clear_set_js_metadata()
|
|
594
|
+
|
|
595
|
+
@TimeIt.decorator
|
|
596
|
+
@commits_data("add_parameters", "set_parameters")
|
|
597
|
+
def commit_parameters(self) -> None:
|
|
598
|
+
"""Make pending parameters persistent."""
|
|
599
|
+
if self.add_parameters:
|
|
600
|
+
params = self.store.get_parameters(self.add_parameters)
|
|
601
|
+
param_ids = list(self.add_parameters)
|
|
602
|
+
self.logger.debug(f"commit: adding pending parameters IDs: {param_ids!r}")
|
|
603
|
+
self.store._append_parameters(params)
|
|
604
|
+
self._clear_add_parameters()
|
|
605
|
+
|
|
606
|
+
if self.set_parameters:
|
|
607
|
+
param_ids = list(self.set_parameters)
|
|
608
|
+
self.logger.debug(f"commit: setting values of parameter IDs {param_ids!r}.")
|
|
609
|
+
self.store._set_parameter_values(self.set_parameters)
|
|
610
|
+
for id_i in param_ids:
|
|
611
|
+
self.store.parameter_cache.pop(id_i, None)
|
|
612
|
+
self._clear_set_parameters()
|
|
613
|
+
|
|
614
|
+
@TimeIt.decorator
|
|
615
|
+
@commits_data("add_files")
|
|
616
|
+
def commit_files(self) -> None:
|
|
617
|
+
"""Add pending files to the files directory."""
|
|
618
|
+
if self.add_files:
|
|
619
|
+
self.logger.debug("commit: adding pending files to the files directory.")
|
|
620
|
+
self.store._append_files(self.add_files)
|
|
621
|
+
self._clear_add_files()
|
|
622
|
+
|
|
623
|
+
@TimeIt.decorator
|
|
624
|
+
@commits_data("add_template_components")
|
|
625
|
+
def commit_template_components(self) -> None:
|
|
626
|
+
"""
|
|
627
|
+
Commit pending template components to disk.
|
|
628
|
+
"""
|
|
629
|
+
if self.add_template_components:
|
|
630
|
+
self.logger.debug("commit: adding template components.")
|
|
631
|
+
self.store._update_template_components(self.store.get_template_components())
|
|
632
|
+
self._clear_add_template_components()
|
|
633
|
+
|
|
634
|
+
@TimeIt.decorator
|
|
635
|
+
@commits_data("update_param_sources")
|
|
636
|
+
def commit_param_sources(self) -> None:
|
|
637
|
+
"""Make pending changes to parameter sources persistent."""
|
|
638
|
+
if self.update_param_sources:
|
|
639
|
+
param_ids = list(self.update_param_sources)
|
|
640
|
+
self.logger.debug(f"commit: updating sources of parameter IDs {param_ids!r}.")
|
|
641
|
+
self.store._update_parameter_sources(self.update_param_sources)
|
|
642
|
+
for id_i in param_ids:
|
|
643
|
+
self.store.param_sources_cache.pop(id_i, None) # invalidate cache
|
|
644
|
+
self._clear_update_param_sources()
|
|
645
|
+
|
|
646
|
+
@TimeIt.decorator
|
|
647
|
+
@commits_data("update_loop_indices")
|
|
648
|
+
def commit_loop_indices(self) -> None:
|
|
649
|
+
"""Make pending update to element iteration loop indices persistent."""
|
|
650
|
+
updates = self.update_loop_indices
|
|
651
|
+
if updates:
|
|
652
|
+
self.logger.debug(
|
|
653
|
+
f"commit: updating loop indices of {len(updates)} iteration(s)."
|
|
654
|
+
)
|
|
655
|
+
self.store._update_loop_index(updates)
|
|
656
|
+
for iter_ID in updates:
|
|
657
|
+
self.store.element_iter_cache.pop(iter_ID, None) # invalidate cache
|
|
658
|
+
self._clear_update_loop_indices()
|
|
659
|
+
|
|
660
|
+
@TimeIt.decorator
|
|
661
|
+
@commits_data("update_loop_num_iters")
|
|
662
|
+
def commit_loop_num_iters(self) -> None:
|
|
663
|
+
"""Make pending update to the number of loop iterations."""
|
|
664
|
+
for index, num_iters in self.update_loop_num_iters.items():
|
|
665
|
+
self.logger.debug(
|
|
666
|
+
f"commit: updating loop {index!r} number of iterations to {num_iters!r}."
|
|
667
|
+
)
|
|
668
|
+
self.store._update_loop_num_iters(index, num_iters)
|
|
669
|
+
self._clear_update_loop_num_iters()
|
|
670
|
+
|
|
671
|
+
@TimeIt.decorator
|
|
672
|
+
@commits_data("update_loop_parents")
|
|
673
|
+
def commit_loop_parents(self) -> None:
|
|
674
|
+
"""Make pending update to additional loop parents."""
|
|
675
|
+
for index, parents in self.update_loop_parents.items():
|
|
676
|
+
self.logger.debug(f"commit: updating loop {index!r} parents to {parents!r}.")
|
|
677
|
+
self.store._update_loop_parents(index, parents)
|
|
678
|
+
self._clear_update_loop_parents()
|
|
679
|
+
|
|
680
|
+
@TimeIt.decorator
|
|
681
|
+
@commits_data("update_iter_data_idx")
|
|
682
|
+
def commit_iter_data_idx(self) -> None:
|
|
683
|
+
if self.update_iter_data_idx:
|
|
684
|
+
self.store._update_iter_data_indices(self.update_iter_data_idx)
|
|
685
|
+
self._clear_update_iter_data_idx()
|
|
686
|
+
|
|
687
|
+
@TimeIt.decorator
|
|
688
|
+
@commits_data("update_run_data_idx")
|
|
689
|
+
def commit_run_data_idx(self) -> None:
|
|
690
|
+
if self.update_run_data_idx:
|
|
691
|
+
self.store._update_run_data_indices(self.update_run_data_idx)
|
|
692
|
+
self._clear_update_run_data_idx()
|
|
693
|
+
|
|
694
|
+
def _clear_add_tasks(self) -> None:
|
|
695
|
+
self.add_tasks = {}
|
|
696
|
+
|
|
697
|
+
def _clear_add_loops(self) -> None:
|
|
698
|
+
self.add_loops = {}
|
|
699
|
+
|
|
700
|
+
def _clear_add_submissions(self) -> None:
|
|
701
|
+
self.add_submissions = {}
|
|
702
|
+
|
|
703
|
+
def _clear_at_submit_metadata(self) -> None:
|
|
704
|
+
self.update_at_submit_metadata = defaultdict(
|
|
705
|
+
lambda: {i: None for i in SUBMISSION_SUBMIT_TIME_KEYS}
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
def _clear_add_elements(self) -> None:
|
|
709
|
+
self.add_elements = {}
|
|
710
|
+
|
|
711
|
+
def _clear_add_element_sets(self) -> None:
|
|
712
|
+
self.add_element_sets = defaultdict(list)
|
|
713
|
+
|
|
714
|
+
def _clear_add_elem_iters(self) -> None:
|
|
715
|
+
self.add_elem_iters = {}
|
|
716
|
+
|
|
717
|
+
def _clear_add_EARs(self) -> None:
|
|
718
|
+
self.add_EARs = {}
|
|
719
|
+
|
|
720
|
+
def _clear_set_run_dirs(self):
|
|
721
|
+
self.set_run_dirs = []
|
|
722
|
+
|
|
723
|
+
def _clear_add_elem_IDs(self) -> None:
|
|
724
|
+
self.add_elem_IDs = defaultdict(list)
|
|
725
|
+
|
|
726
|
+
def _clear_add_elem_iter_IDs(self) -> None:
|
|
727
|
+
self.add_elem_iter_IDs = defaultdict(list)
|
|
728
|
+
|
|
729
|
+
def _clear_add_elem_iter_EAR_IDs(self) -> None:
|
|
730
|
+
self.add_elem_iter_EAR_IDs = defaultdict(lambda: defaultdict(list))
|
|
731
|
+
|
|
732
|
+
def _clear_set_EARs_initialised(self) -> None:
|
|
733
|
+
self.set_EARs_initialised = []
|
|
734
|
+
|
|
735
|
+
def _clear_EAR_submission_data(self) -> None:
|
|
736
|
+
self.set_EAR_submission_data = {}
|
|
737
|
+
|
|
738
|
+
def _clear_set_EAR_starts(self) -> None:
|
|
739
|
+
self.set_EAR_starts = {}
|
|
740
|
+
|
|
741
|
+
def _clear_set_EAR_ends(self) -> None:
|
|
742
|
+
self.set_EAR_ends = {}
|
|
743
|
+
|
|
744
|
+
def _clear_set_EAR_skips(self) -> None:
|
|
745
|
+
self.set_EAR_skips = {}
|
|
746
|
+
|
|
747
|
+
def _clear_set_js_metadata(self) -> None:
|
|
748
|
+
self.set_js_metadata = defaultdict(lambda: defaultdict(dict))
|
|
749
|
+
|
|
750
|
+
def _clear_add_parameters(self) -> None:
|
|
751
|
+
self.add_parameters = {}
|
|
752
|
+
|
|
753
|
+
def _clear_add_files(self) -> None:
|
|
754
|
+
self.add_files = []
|
|
755
|
+
|
|
756
|
+
def _clear_add_template_components(self) -> None:
|
|
757
|
+
self.add_template_components = defaultdict(dict)
|
|
758
|
+
|
|
759
|
+
def _clear_set_parameters(self) -> None:
|
|
760
|
+
self.set_parameters = {}
|
|
761
|
+
|
|
762
|
+
def _clear_update_param_sources(self) -> None:
|
|
763
|
+
self.update_param_sources = {}
|
|
764
|
+
|
|
765
|
+
def _clear_update_loop_indices(self) -> None:
|
|
766
|
+
self.update_loop_indices = defaultdict(dict)
|
|
767
|
+
|
|
768
|
+
def _clear_update_loop_num_iters(self) -> None:
|
|
769
|
+
self.update_loop_num_iters = {}
|
|
770
|
+
|
|
771
|
+
def _clear_update_loop_parents(self) -> None:
|
|
772
|
+
self.update_loop_parents = {}
|
|
773
|
+
|
|
774
|
+
def _clear_update_iter_data_idx(self):
|
|
775
|
+
self.update_iter_data_idx = defaultdict(dict)
|
|
776
|
+
|
|
777
|
+
def _clear_update_run_data_idx(self):
|
|
778
|
+
self.update_run_data_idx = defaultdict(dict)
|
|
779
|
+
|
|
780
|
+
def reset(self, is_init: bool = False) -> None:
|
|
781
|
+
"""Clear all pending data and prepare to accept new pending data."""
|
|
782
|
+
|
|
783
|
+
if not is_init and not self:
|
|
784
|
+
# no pending changes
|
|
785
|
+
return
|
|
786
|
+
|
|
787
|
+
if not is_init:
|
|
788
|
+
self.logger.info("resetting pending changes.")
|
|
789
|
+
|
|
790
|
+
self._clear_add_tasks()
|
|
791
|
+
self._clear_add_loops()
|
|
792
|
+
self._clear_add_submissions()
|
|
793
|
+
self._clear_at_submit_metadata()
|
|
794
|
+
self._clear_add_elements()
|
|
795
|
+
self._clear_add_element_sets()
|
|
796
|
+
self._clear_add_elem_iters()
|
|
797
|
+
self._clear_add_EARs()
|
|
798
|
+
self._clear_set_run_dirs()
|
|
799
|
+
|
|
800
|
+
self._clear_set_EARs_initialised()
|
|
801
|
+
self._clear_add_elem_IDs()
|
|
802
|
+
self._clear_add_elem_iter_IDs()
|
|
803
|
+
self._clear_add_elem_iter_EAR_IDs()
|
|
804
|
+
|
|
805
|
+
self._clear_add_parameters()
|
|
806
|
+
self._clear_add_files()
|
|
807
|
+
self._clear_add_template_components()
|
|
808
|
+
|
|
809
|
+
self._clear_EAR_submission_data()
|
|
810
|
+
self._clear_set_EAR_starts()
|
|
811
|
+
self._clear_set_EAR_ends()
|
|
812
|
+
self._clear_set_EAR_skips()
|
|
813
|
+
|
|
814
|
+
self._clear_set_js_metadata()
|
|
815
|
+
self._clear_set_parameters()
|
|
816
|
+
|
|
817
|
+
self._clear_update_param_sources()
|
|
818
|
+
self._clear_update_loop_indices()
|
|
819
|
+
self._clear_update_loop_num_iters()
|
|
820
|
+
self._clear_update_loop_parents()
|
|
821
|
+
self._clear_update_iter_data_idx()
|
|
822
|
+
self._clear_update_run_data_idx()
|
|
823
|
+
|
|
824
|
+
|
|
825
|
+
@dataclass
|
|
826
|
+
class CommitResourceMap:
|
|
827
|
+
"""
|
|
828
|
+
Map of :py:class:`PendingChanges` commit method names to store resource labels,
|
|
829
|
+
representing the store resources required by each ``commit_*`` method, for a given
|
|
830
|
+
:py:class:`~.PersistentStore`.
|
|
831
|
+
|
|
832
|
+
When :py:meth:`PendingChanges.commit_all` is called, the resources specified will be
|
|
833
|
+
opened in "update" mode, for each ``commit_*`` method.
|
|
834
|
+
|
|
835
|
+
Notes
|
|
836
|
+
-----
|
|
837
|
+
Normally only of interest to implementations of persistent stores.
|
|
838
|
+
"""
|
|
839
|
+
|
|
840
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_tasks`.
|
|
841
|
+
commit_tasks: tuple[str, ...] | None = tuple()
|
|
842
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_loops`.
|
|
843
|
+
commit_loops: tuple[str, ...] | None = tuple()
|
|
844
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_submissions`.
|
|
845
|
+
commit_submissions: tuple[str, ...] | None = tuple()
|
|
846
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_at_submit_metadata`.
|
|
847
|
+
commit_at_submit_metadata: tuple[str, ...] | None = tuple()
|
|
848
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_elem_IDs`.
|
|
849
|
+
commit_elem_IDs: tuple[str, ...] | None = tuple()
|
|
850
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_elements`.
|
|
851
|
+
commit_elements: tuple[str, ...] | None = tuple()
|
|
852
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_element_sets`.
|
|
853
|
+
commit_element_sets: tuple[str, ...] | None = tuple()
|
|
854
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iter_IDs`.
|
|
855
|
+
commit_elem_iter_IDs: tuple[str, ...] | None = tuple()
|
|
856
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iters`.
|
|
857
|
+
commit_elem_iters: tuple[str, ...] | None = tuple()
|
|
858
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_elem_iter_EAR_IDs`.
|
|
859
|
+
commit_elem_iter_EAR_IDs: tuple[str, ...] | None = tuple()
|
|
860
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EARs_initialised`.
|
|
861
|
+
commit_EARs_initialised: tuple[str, ...] | None = tuple()
|
|
862
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EARs`.
|
|
863
|
+
commit_EARs: tuple[str, ...] | None = tuple()
|
|
864
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_submission_indices`.
|
|
865
|
+
commit_EAR_submission_indices: tuple[str, ...] | None = tuple()
|
|
866
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_skips`.
|
|
867
|
+
commit_EAR_skips: tuple[str, ...] | None = tuple()
|
|
868
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_starts`.
|
|
869
|
+
commit_EAR_starts: tuple[str, ...] | None = tuple()
|
|
870
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_EAR_ends`.
|
|
871
|
+
commit_EAR_ends: tuple[str, ...] | None = tuple()
|
|
872
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_js_metadata`.
|
|
873
|
+
commit_js_metadata: tuple[str, ...] | None = tuple()
|
|
874
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_parameters`.
|
|
875
|
+
commit_parameters: tuple[str, ...] | None = tuple()
|
|
876
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_files`.
|
|
877
|
+
commit_files: tuple[str, ...] | None = tuple()
|
|
878
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_template_components`.
|
|
879
|
+
commit_template_components: tuple[str, ...] | None = tuple()
|
|
880
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_param_sources`.
|
|
881
|
+
commit_param_sources: tuple[str, ...] | None = tuple()
|
|
882
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_loop_indices`.
|
|
883
|
+
commit_loop_indices: tuple[str, ...] | None = tuple()
|
|
884
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_loop_num_iters`.
|
|
885
|
+
commit_loop_num_iters: tuple[str, ...] | None = tuple()
|
|
886
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_loop_parents`.
|
|
887
|
+
commit_loop_parents: tuple[str, ...] | None = tuple()
|
|
888
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_set_run_dirs`.
|
|
889
|
+
commit_set_run_dirs: tuple[str, ...] | None = tuple()
|
|
890
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_iter_data_idx`.
|
|
891
|
+
commit_iter_data_idx: tuple[str, ...] | None = tuple()
|
|
892
|
+
#: Resources for :py:meth:`~.PendingChanges.commit_run_data_idx`.
|
|
893
|
+
commit_run_data_idx: tuple[str, ...] | None = tuple()
|
|
894
|
+
|
|
895
|
+
#: A dict whose keys are tuples of resource labels and whose values are lists
|
|
896
|
+
#: of :py:class:`PendingChanges` commit method names that require those resources.
|
|
897
|
+
#:
|
|
898
|
+
#: This grouping allows us to batch up commit methods by resource requirements,
|
|
899
|
+
#: which in turn means we can potentially minimise, e.g., the number of network
|
|
900
|
+
#: requests.
|
|
901
|
+
groups: Mapping[tuple[str, ...], Sequence[str]] = field(
|
|
902
|
+
init=False, repr=False, compare=False
|
|
903
|
+
)
|
|
904
|
+
|
|
905
|
+
def __post_init__(self):
|
|
906
|
+
self.groups = self._group_by_resource()
|
|
907
|
+
|
|
908
|
+
def _group_by_resource(self) -> Mapping[tuple[str, ...], Sequence[str]]:
|
|
909
|
+
"""
|
|
910
|
+
Get a dict whose keys are tuples of resource labels and whose values are
|
|
911
|
+
lists of :py:class:`PendingChanges` commit method names that require those
|
|
912
|
+
resource.
|
|
913
|
+
|
|
914
|
+
This grouping allows us to batch up commit methods by resource requirements,
|
|
915
|
+
which in turn means we can potentially minimise e.g. the number of network
|
|
916
|
+
requests.
|
|
917
|
+
"""
|
|
918
|
+
groups: dict[tuple[str, ...], list[str]] = {}
|
|
919
|
+
# The dicts are pretending to be insertion-ordered sets
|
|
920
|
+
cur_res_group: tuple[dict[str, None], list[str]] | None = None
|
|
921
|
+
for fld in fields(self):
|
|
922
|
+
if not fld.name.startswith("commit_"):
|
|
923
|
+
continue
|
|
924
|
+
res_labels = getattr(self, fld.name)
|
|
925
|
+
|
|
926
|
+
if not cur_res_group:
|
|
927
|
+
# start a new resource group: a mapping between resource labels and the
|
|
928
|
+
# commit methods that require those resources:
|
|
929
|
+
cur_res_group = (dict.fromkeys(res_labels), [fld.name])
|
|
930
|
+
|
|
931
|
+
elif not res_labels or set(res_labels).intersection(cur_res_group[0]):
|
|
932
|
+
# there is some overlap between resource labels required in the current
|
|
933
|
+
# group and this commit method, so we merge resource labels and add the
|
|
934
|
+
# new commit method:
|
|
935
|
+
cur_res_group[0].update(dict.fromkeys(res_labels))
|
|
936
|
+
cur_res_group[1].append(fld.name)
|
|
937
|
+
|
|
938
|
+
else:
|
|
939
|
+
# no overlap between resource labels required in the current group and
|
|
940
|
+
# those required by this commit method, so append the current group, and
|
|
941
|
+
# start a new group for this commit method:
|
|
942
|
+
groups.setdefault(tuple(cur_res_group[0]), []).extend(cur_res_group[1])
|
|
943
|
+
cur_res_group = (dict.fromkeys(res_labels), [fld.name])
|
|
944
|
+
|
|
945
|
+
if cur_res_group:
|
|
946
|
+
groups.setdefault(tuple(cur_res_group[0]), []).extend(cur_res_group[1])
|
|
947
|
+
|
|
948
|
+
return groups
|