hpcflow-new2 0.2.0a179__py3-none-any.whl → 0.2.0a180__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. hpcflow/_version.py +1 -1
  2. hpcflow/data/demo_data_manifest/__init__.py +3 -0
  3. hpcflow/sdk/__init__.py +4 -1
  4. hpcflow/sdk/app.py +160 -15
  5. hpcflow/sdk/cli.py +14 -0
  6. hpcflow/sdk/cli_common.py +83 -0
  7. hpcflow/sdk/config/__init__.py +4 -0
  8. hpcflow/sdk/config/callbacks.py +25 -2
  9. hpcflow/sdk/config/cli.py +4 -1
  10. hpcflow/sdk/config/config.py +188 -14
  11. hpcflow/sdk/config/config_file.py +91 -3
  12. hpcflow/sdk/config/errors.py +33 -0
  13. hpcflow/sdk/core/__init__.py +2 -0
  14. hpcflow/sdk/core/actions.py +492 -35
  15. hpcflow/sdk/core/cache.py +22 -0
  16. hpcflow/sdk/core/command_files.py +221 -5
  17. hpcflow/sdk/core/commands.py +57 -0
  18. hpcflow/sdk/core/element.py +407 -8
  19. hpcflow/sdk/core/environment.py +92 -0
  20. hpcflow/sdk/core/errors.py +245 -61
  21. hpcflow/sdk/core/json_like.py +72 -14
  22. hpcflow/sdk/core/loop.py +122 -21
  23. hpcflow/sdk/core/loop_cache.py +34 -9
  24. hpcflow/sdk/core/object_list.py +172 -26
  25. hpcflow/sdk/core/parallel.py +14 -0
  26. hpcflow/sdk/core/parameters.py +478 -25
  27. hpcflow/sdk/core/rule.py +31 -1
  28. hpcflow/sdk/core/run_dir_files.py +12 -2
  29. hpcflow/sdk/core/task.py +407 -80
  30. hpcflow/sdk/core/task_schema.py +70 -9
  31. hpcflow/sdk/core/test_utils.py +35 -0
  32. hpcflow/sdk/core/utils.py +101 -4
  33. hpcflow/sdk/core/validation.py +13 -1
  34. hpcflow/sdk/core/workflow.py +316 -96
  35. hpcflow/sdk/core/zarr_io.py +23 -0
  36. hpcflow/sdk/data/__init__.py +13 -0
  37. hpcflow/sdk/demo/__init__.py +3 -0
  38. hpcflow/sdk/helper/__init__.py +3 -0
  39. hpcflow/sdk/helper/cli.py +9 -0
  40. hpcflow/sdk/helper/helper.py +28 -0
  41. hpcflow/sdk/helper/watcher.py +33 -0
  42. hpcflow/sdk/log.py +40 -0
  43. hpcflow/sdk/persistence/__init__.py +14 -4
  44. hpcflow/sdk/persistence/base.py +289 -23
  45. hpcflow/sdk/persistence/json.py +29 -0
  46. hpcflow/sdk/persistence/pending.py +217 -107
  47. hpcflow/sdk/persistence/store_resource.py +58 -2
  48. hpcflow/sdk/persistence/utils.py +8 -0
  49. hpcflow/sdk/persistence/zarr.py +68 -1
  50. hpcflow/sdk/runtime.py +52 -10
  51. hpcflow/sdk/submission/__init__.py +3 -0
  52. hpcflow/sdk/submission/jobscript.py +198 -9
  53. hpcflow/sdk/submission/jobscript_info.py +13 -0
  54. hpcflow/sdk/submission/schedulers/__init__.py +60 -0
  55. hpcflow/sdk/submission/schedulers/direct.py +53 -0
  56. hpcflow/sdk/submission/schedulers/sge.py +45 -7
  57. hpcflow/sdk/submission/schedulers/slurm.py +45 -8
  58. hpcflow/sdk/submission/schedulers/utils.py +4 -0
  59. hpcflow/sdk/submission/shells/__init__.py +11 -1
  60. hpcflow/sdk/submission/shells/base.py +32 -1
  61. hpcflow/sdk/submission/shells/bash.py +36 -1
  62. hpcflow/sdk/submission/shells/os_version.py +18 -6
  63. hpcflow/sdk/submission/shells/powershell.py +22 -0
  64. hpcflow/sdk/submission/submission.py +88 -3
  65. hpcflow/sdk/typing.py +10 -1
  66. {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/METADATA +1 -1
  67. {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/RECORD +70 -70
  68. {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/LICENSE +0 -0
  69. {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/WHEEL +0 -0
  70. {hpcflow_new2-0.2.0a179.dist-info → hpcflow_new2-0.2.0a180.dist-info}/entry_points.txt +0 -0
@@ -1,3 +1,7 @@
1
+ """
2
+ Serialization and deserialization mechanism intended to map between a complex
3
+ graph of objects and either JSON or YAML.
4
+ """
1
5
  from __future__ import annotations
2
6
 
3
7
  import copy
@@ -10,7 +14,7 @@ from .utils import classproperty, get_md5_hash
10
14
  from .validation import get_schema
11
15
  from .errors import ToJSONLikeChildReferenceError
12
16
 
13
-
17
+ #: Primitive types supported by the serialization mechanism.
14
18
  PRIMITIVES = (
15
19
  int,
16
20
  float,
@@ -22,6 +26,10 @@ _SDK_logger = get_SDK_logger(__name__)
22
26
 
23
27
 
24
28
  def to_json_like(obj, shared_data=None, parent_refs=None, path=None):
29
+ """
30
+ Convert the object to a JSON-like basic value tree.
31
+ Such trees are trivial to serialize as JSON or YAML.
32
+ """
25
33
  path = path or []
26
34
 
27
35
  if len(path) > 50:
@@ -81,32 +89,54 @@ def to_json_like(obj, shared_data=None, parent_refs=None, path=None):
81
89
 
82
90
  @dataclass
83
91
  class ChildObjectSpec:
92
+ """
93
+ Used to describe what the child structure of an class is so that the generic
94
+ deserializer can build the structure.
95
+ """
96
+
97
+ #: The name of the attribute.
84
98
  name: str
99
+ #: The name of the class (or class of members of a list) used to deserialize the
100
+ #: attribute.
85
101
  class_name: Optional[str] = None
102
+ #: The class (or class of members of a list) used to deserialize the
103
+ #: attribute.
86
104
  class_obj: Optional[
87
105
  Type
88
106
  ] = None # TODO: no need for class_obj/class_name if shared data?
107
+ #: The name of the key used in the JSON document, if different from the attribute
108
+ #: name.
89
109
  json_like_name: Optional[str] = None
110
+ #: If true, the attribute is really a list of instances,
111
+ #: or a dictionary if :attr:`dict_key_attr` is set.
90
112
  is_multiple: Optional[bool] = False
113
+ #: If set, the name of an attribute of the object to use as a dictionary key.
114
+ #: Requires that :attr:`is_multiple` be set as well.
91
115
  dict_key_attr: Optional[str] = None
116
+ #: If set, the name of an attribute of the object to use as a dictionary value.
117
+ #: If not set but :attr:`dict_key_attr` is set, the whole object is the value.
118
+ #: Requires that :attr:`dict_key_attr` be set as well.
92
119
  dict_val_attr: Optional[str] = None
120
+ #: If set, the attribute of the child object that contains a reference to its parent.
93
121
  parent_ref: Optional[
94
122
  str
95
123
  ] = None # TODO: do parent refs make sense when from shared? Prob not.
96
- is_single_attribute: Optional[
97
- bool
98
- ] = False # if True, obj is not represented as a dict of attr name-values, but just a value.
99
- is_enum: Optional[
100
- bool
101
- ] = False # if true, we don't invoke to/from_json_like on the data/Enum
102
- is_dict_values: Optional[
103
- bool
104
- ] = False # if True, the child object is a dict, whose values are of the specified class. The dict structure will remain.
105
- is_dict_values_ensure_list: Optional[
106
- bool
107
- ] = False # if True, values that are not lists are cast to lists and multiple child objects are instantiated for each dict value
108
-
124
+ #: If true, the object is not represented as a dict of attr name-values, but just a value.
125
+ is_single_attribute: Optional[bool] = False
126
+ #: If true, the object is an enum member and should use special serialization rules.
127
+ is_enum: Optional[bool] = False
128
+ #: If true, the child object is a dict, whose values are of the specified class.
129
+ #: The dict structure will remain.
130
+ is_dict_values: Optional[bool] = False
131
+ #: If true, values that are not lists are cast to lists and multiple child objects
132
+ #: are instantiated for each dict value.
133
+ is_dict_values_ensure_list: Optional[bool] = False
134
+ #: What key to look values up under in the shared data cache.
135
+ #: If unspecified, the shared data cache is ignored.
109
136
  shared_data_name: Optional[str] = None
137
+ #: What attribute provides the value of the key into the shared data cache.
138
+ #: If unspecified, a hash of the object dictionary is used.
139
+ #: Ignored if :py:attr:`~.shared_data_name` is unspecified.
110
140
  shared_data_primary_key: Optional[str] = None
111
141
  # shared_data_secondary_keys: Optional[Tuple[str]] = None # TODO: what's the point?
112
142
 
@@ -155,6 +185,8 @@ class ChildObjectSpec:
155
185
 
156
186
  class BaseJSONLike:
157
187
  """
188
+ An object that has a serialization as JSON or YAML.
189
+
158
190
  Parameters
159
191
  ----------
160
192
  _class_namespace : namespace
@@ -200,6 +232,21 @@ class BaseJSONLike:
200
232
  json_like: Union[Dict, List],
201
233
  shared_data: Optional[Dict[str, ObjectList]] = None,
202
234
  ):
235
+ """
236
+ Make an instance of this class from JSON (or YAML) data.
237
+
238
+ Parameters
239
+ ----------
240
+ json_like:
241
+ The data to deserialise.
242
+ shared_data:
243
+ Shared context data.
244
+
245
+ Returns
246
+ -------
247
+ The deserialised object.
248
+ """
249
+
203
250
  def _from_json_like_item(child_obj_spec, json_like_i):
204
251
  if not (
205
252
  child_obj_spec.class_name
@@ -403,12 +450,20 @@ class BaseJSONLike:
403
450
  return get_md5_hash(json_like)
404
451
 
405
452
  def to_dict(self):
453
+ """
454
+ Serialize this object as a dictionary.
455
+ """
406
456
  if hasattr(self, "__dict__"):
407
457
  return dict(self.__dict__)
408
458
  elif hasattr(self, "__slots__"):
409
459
  return {k: getattr(self, k) for k in self.__slots__}
410
460
 
411
461
  def to_json_like(self, dct=None, shared_data=None, exclude=None, path=None):
462
+ """
463
+ Serialize this object as an object structure that can be trivially converted
464
+ to JSON. Note that YAML can also be produced from the result of this method;
465
+ it just requires a different final serialization step.
466
+ """
412
467
  if dct is None:
413
468
  dct = {k: v for k, v in self.to_dict().items() if k not in (exclude or [])}
414
469
 
@@ -475,6 +530,9 @@ class JSONLike(BaseJSONLike):
475
530
  return getattr(cls, cls._app_attr)
476
531
 
477
532
  def to_dict(self):
533
+ """
534
+ Serialize this object as a dictionary.
535
+ """
478
536
  out = super().to_dict()
479
537
 
480
538
  # remove parent references:
hpcflow/sdk/core/loop.py CHANGED
@@ -1,3 +1,9 @@
1
+ """
2
+ A looping construct for a workflow.
3
+ There are multiple types of loop,
4
+ notably looping over a set of values or until a condition holds.
5
+ """
6
+
1
7
  from __future__ import annotations
2
8
 
3
9
  import copy
@@ -26,11 +32,29 @@ from hpcflow.sdk.log import TimeIt
26
32
  # @dataclass
27
33
  # class Loop:
28
34
  # parameter: Parameter
29
- # stopping_criteria: StoppingCriterion # TODO: should be a logical combination of these (maybe provide a superclass in valida to re-use some logic there?)
35
+ # stopping_criteria: StoppingCriterion
36
+ # # TODO: should be a logical combination of these (maybe provide a superclass in valida to re-use some logic there?)
30
37
  # maximum_iterations: int
31
38
 
32
39
 
33
40
  class Loop(JSONLike):
41
+ """
42
+ A loop in a workflow template.
43
+
44
+ Parameters
45
+ ----------
46
+ tasks: list[int | ~hpcflow.app.WorkflowTask]
47
+ List of task insert IDs or workflow tasks.
48
+ num_iterations:
49
+ Number of iterations to perform.
50
+ name: str
51
+ Loop name.
52
+ non_iterable_parameters: list[str]
53
+ Specify input parameters that should not iterate.
54
+ termination: v~hpcflow.app.Rule
55
+ Stopping criterion, expressed as a rule.
56
+ """
57
+
34
58
  _app_attr = "app"
35
59
  _child_objects = (ChildObjectSpec(name="termination", class_name="Rule"),)
36
60
 
@@ -42,21 +66,6 @@ class Loop(JSONLike):
42
66
  non_iterable_parameters: Optional[List[str]] = None,
43
67
  termination: Optional[app.Rule] = None,
44
68
  ) -> None:
45
- """
46
-
47
- Parameters
48
- ----------
49
- name
50
- Loop name, optional
51
- tasks
52
- List of task insert IDs or WorkflowTask objects
53
- non_iterable_parameters
54
- Specify input parameters that should not iterate.
55
- termination
56
- Stopping criterion, expressed as a rule.
57
-
58
- """
59
-
60
69
  _task_insert_IDs = []
61
70
  for task in tasks:
62
71
  if isinstance(task, WorkflowTask):
@@ -98,22 +107,37 @@ class Loop(JSONLike):
98
107
 
99
108
  @property
100
109
  def name(self):
110
+ """
111
+ The name of the loop, if one was provided.
112
+ """
101
113
  return self._name
102
114
 
103
115
  @property
104
116
  def num_iterations(self):
117
+ """
118
+ The number of loop iterations to do.
119
+ """
105
120
  return self._num_iterations
106
121
 
107
122
  @property
108
123
  def non_iterable_parameters(self):
124
+ """
125
+ Which parameters are not iterable.
126
+ """
109
127
  return self._non_iterable_parameters
110
128
 
111
129
  @property
112
130
  def termination(self):
131
+ """
132
+ A termination rule for the loop, if one is provided.
133
+ """
113
134
  return self._termination
114
135
 
115
136
  @property
116
137
  def workflow_template(self):
138
+ """
139
+ The workflow template that contains this loop.
140
+ """
117
141
  return self._workflow_template
118
142
 
119
143
  @workflow_template.setter
@@ -123,6 +147,9 @@ class Loop(JSONLike):
123
147
 
124
148
  @property
125
149
  def task_objects(self) -> Tuple[app.WorkflowTask]:
150
+ """
151
+ The tasks in the loop.
152
+ """
126
153
  if not self.workflow_template:
127
154
  raise RuntimeError(
128
155
  "Workflow template must be assigned to retrieve task objects of the loop."
@@ -169,7 +196,25 @@ class Loop(JSONLike):
169
196
 
170
197
 
171
198
  class WorkflowLoop:
172
- """Class to represent a Loop that is bound to a Workflow."""
199
+ """
200
+ Class to represent a :py:class:`.Loop` that is bound to a
201
+ :py:class:`~hpcflow.app.Workflow`.
202
+
203
+ Parameters
204
+ ----------
205
+ index: int
206
+ The index of this loop in the workflow.
207
+ workflow: ~hpcflow.app.Workflow
208
+ The workflow containing this loop.
209
+ template: Loop
210
+ The loop that this was generated from.
211
+ num_added_iterations:
212
+ Description of what iterations have been added.
213
+ iterable_parameters:
214
+ Description of what parameters are being iterated over.
215
+ parents: list[str]
216
+ The paths to the parent entities of this loop.
217
+ """
173
218
 
174
219
  _app_attr = "app"
175
220
 
@@ -229,7 +274,9 @@ class WorkflowLoop:
229
274
 
230
275
  @property
231
276
  def num_added_iterations(self):
232
-
277
+ """
278
+ The number of added iterations.
279
+ """
233
280
  if self._pending_num_added_iterations:
234
281
  return self._pending_num_added_iterations
235
282
  else:
@@ -281,53 +328,82 @@ class WorkflowLoop:
281
328
 
282
329
  @property
283
330
  def index(self):
331
+ """
332
+ The index of this loop within its workflow.
333
+ """
284
334
  return self._index
285
335
 
286
336
  @property
287
337
  def task_insert_IDs(self):
338
+ """
339
+ The insertion IDs of the tasks inside this loop.
340
+ """
288
341
  return self.template.task_insert_IDs
289
342
 
290
343
  @property
291
344
  def task_objects(self):
345
+ """
346
+ The tasks in this loop.
347
+ """
292
348
  return self.template.task_objects
293
349
 
294
350
  @property
295
351
  def task_indices(self) -> Tuple[int]:
296
- """Get the list of task indices that define the extent of the loop."""
352
+ """
353
+ The list of task indices that define the extent of the loop.
354
+ """
297
355
  return tuple(i.index for i in self.task_objects)
298
356
 
299
357
  @property
300
358
  def workflow(self):
359
+ """
360
+ The workflow containing this loop.
361
+ """
301
362
  return self._workflow
302
363
 
303
364
  @property
304
365
  def template(self):
366
+ """
367
+ The loop template for this loop.
368
+ """
305
369
  return self._template
306
370
 
307
371
  @property
308
372
  def parents(self) -> List[str]:
373
+ """
374
+ The parents of this loop.
375
+ """
309
376
  return self._parents + self._pending_parents
310
377
 
311
378
  @property
312
379
  def name(self):
380
+ """
381
+ The name of this loop, if one is defined.
382
+ """
313
383
  return self.template.name
314
384
 
315
385
  @property
316
386
  def iterable_parameters(self):
387
+ """
388
+ The parameters that are being iterated over.
389
+ """
317
390
  return self._iterable_parameters
318
391
 
319
392
  @property
320
393
  def num_iterations(self):
394
+ """
395
+ The number of iterations.
396
+ """
321
397
  return self.template.num_iterations
322
398
 
323
399
  @property
324
400
  def downstream_tasks(self) -> List[app.WorkflowLoop]:
325
- """Return tasks that are not part of the loop, and downstream from this loop."""
401
+ """Tasks that are not part of the loop, and downstream from this loop."""
326
402
  return self.workflow.tasks[self.task_objects[-1].index + 1 :]
327
403
 
328
404
  @property
329
405
  def upstream_tasks(self) -> List[app.WorkflowLoop]:
330
- """Return tasks that are not part of the loop, and upstream from this loop."""
406
+ """Tasks that are not part of the loop, and upstream from this loop."""
331
407
  return self.workflow.tasks[: self.task_objects[0].index]
332
408
 
333
409
  @staticmethod
@@ -367,6 +443,20 @@ class WorkflowLoop:
367
443
  template: app.Loop,
368
444
  iter_loop_idx: List[Dict],
369
445
  ) -> Tuple[app.WorkflowLoop, List[Dict[str, int]]]:
446
+ """
447
+ Make a new empty loop.
448
+
449
+ Parameters
450
+ ----------
451
+ index: int
452
+ The index of the loop to create.
453
+ workflow: ~hpcflow.app.Workflow
454
+ The workflow that will contain the loop.
455
+ template: Loop
456
+ The template for the loop.
457
+ iter_loop_idx: list[dict]
458
+ Iteration information from parent loops.
459
+ """
370
460
  parent_loops = cls._get_parent_loops(index, workflow, template)
371
461
  parent_names = [i.name for i in parent_loops]
372
462
  num_added_iters = {}
@@ -436,6 +526,17 @@ class WorkflowLoop:
436
526
 
437
527
  @TimeIt.decorator
438
528
  def add_iteration(self, parent_loop_indices=None, cache: Optional[LoopCache] = None):
529
+ """
530
+ Add an iteration to this loop.
531
+
532
+ Parameters
533
+ ----------
534
+ parent_loop_indices:
535
+ Where have any parent loops got up to?
536
+ cache:
537
+ A cache used to make adding the iteration more efficient.
538
+ One will be created if it is not supplied.
539
+ """
439
540
  if not cache:
440
541
  cache = LoopCache.build(self.workflow)
441
542
  parent_loops = self.get_parent_loops()
@@ -1,3 +1,7 @@
1
+ """
2
+ Cache of loop statuses.
3
+ """
4
+
1
5
  from dataclasses import dataclass
2
6
  from collections import defaultdict
3
7
  from typing import Dict, List, Optional, Tuple
@@ -10,40 +14,55 @@ from hpcflow.sdk.core.cache import DependencyCache
10
14
 
11
15
  @dataclass
12
16
  class LoopCache:
13
- """Class to store a cache for use in `Workflow.add_empty_loop` and
14
- `WorkflowLoop.add_iterations`.
17
+ """Class to store a cache for use in :py:meth:`.Workflow.add_empty_loop` and
18
+ :py:meth:`.WorkflowLoop.add_iterations`. Use :py:meth:`build` to get a new instance.
15
19
 
16
- Attributes
20
+ Parameters
17
21
  ----------
18
- element_dependents
22
+ element_dependents:
19
23
  Keys are element IDs, values are dicts whose keys are element IDs that depend on
20
24
  the key element ID (via `Element.get_dependent_elements_recursively`), and whose
21
25
  values are dicts with keys: `group_names`, which is a tuple of the string group
22
26
  names associated with the dependent element's element set.
23
- elements
27
+ elements:
24
28
  Keys are element IDs, values are dicts with keys: `input_statuses`,
25
29
  `input_sources`, and `task_insert_ID`.
26
- zeroth_iters
30
+ zeroth_iters:
27
31
  Keys are element IDs, values are data associated with the zeroth iteration of that
28
32
  element, namely a tuple of iteration ID and `ElementIteration.data_idx`.
29
- data_idx
33
+ data_idx:
30
34
  Keys are element IDs, values are data associated with all iterations of that
31
35
  element, namely a dict whose keys are the iteration loop index as a tuple, and
32
36
  whose values are data indices via `ElementIteration.get_data_idx()`.
33
- iterations
37
+ iterations:
34
38
  Keys are iteration IDs, values are tuples of element ID and iteration index within
35
39
  that element.
36
- task_iterations
40
+ task_iterations:
37
41
  Keys are task insert IDs, values are list of all iteration IDs associated with
38
42
  that task.
39
43
 
40
44
  """
41
45
 
46
+ #: Keys are element IDs, values are dicts whose keys are element IDs that depend on
47
+ #: the key element ID (via `Element.get_dependent_elements_recursively`), and whose
48
+ #: values are dicts with keys: `group_names`, which is a tuple of the string group
49
+ #: names associated with the dependent element's element set.
42
50
  element_dependents: Dict[int, Dict]
51
+ #: Keys are element IDs, values are dicts with keys: `input_statuses`,
52
+ #: `input_sources`, and `task_insert_ID`.
43
53
  elements: Dict[int, Dict]
54
+ #: Keys are element IDs, values are data associated with the zeroth iteration of that
55
+ #: element, namely a tuple of iteration ID and `ElementIteration.data_idx`.
44
56
  zeroth_iters: Dict[int, Tuple]
57
+ #: Keys are element IDs, values are data associated with all iterations of that
58
+ #: element, namely a dict whose keys are the iteration loop index as a tuple, and
59
+ #: whose values are data indices via `ElementIteration.get_data_idx()`.
45
60
  data_idx: Dict[int, Dict]
61
+ #: Keys are iteration IDs, values are tuples of element ID and iteration index within
62
+ #: that element.
46
63
  iterations: Dict[int, Tuple]
64
+ #: Keys are task insert IDs, values are list of all iteration IDs associated with
65
+ #: that task.
47
66
  task_iterations: Dict[int, List[int]]
48
67
 
49
68
  @TimeIt.decorator
@@ -53,6 +72,9 @@ class LoopCache:
53
72
 
54
73
  @TimeIt.decorator
55
74
  def get_iter_loop_indices(self, iter_IDs: List[int]) -> List[Dict[str, int]]:
75
+ """
76
+ Retrieve the mapping from element to loop index for each given iteration.
77
+ """
56
78
  iter_loop_idx = []
57
79
  for i in iter_IDs:
58
80
  elem_id, idx = self.iterations[i]
@@ -61,6 +83,9 @@ class LoopCache:
61
83
 
62
84
  @TimeIt.decorator
63
85
  def update_loop_indices(self, new_loop_name: str, iter_IDs: List[int]):
86
+ """
87
+ Set the loop indices for a named loop to the given list of iteration IDs.
88
+ """
64
89
  elem_ids = {v[0] for k, v in self.iterations.items() if k in iter_IDs}
65
90
  for i in elem_ids:
66
91
  new_item = {}