hpcflow-new2 0.2.0a144__py3-none-any.whl → 0.2.0a147__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
hpcflow/_version.py CHANGED
@@ -1 +1 @@
1
- __version__ = "0.2.0a144"
1
+ __version__ = "0.2.0a147"
@@ -0,0 +1,6 @@
1
+ def main_script_test_direct_sub_param_in_direct_out(a):
2
+ # process
3
+ p2 = a + 100
4
+
5
+ # return outputs
6
+ return {"p2": p2}
@@ -0,0 +1,16 @@
1
+ import json
2
+
3
+
4
+ def main_script_test_json_sub_param_in_json_out_labels(_input_files, _output_files):
5
+ # read inputs
6
+ with _input_files["json"].open("rt") as fh:
7
+ inp_dat = json.load(fh)
8
+ a = int(inp_dat["a"])
9
+ p1_2 = int(inp_dat["p1[two]"])
10
+
11
+ # process
12
+ p2 = a + p1_2
13
+
14
+ # save outputs
15
+ with _output_files["json"].open("wt") as fh:
16
+ json.dump({"p2": p2}, fh)
@@ -148,6 +148,7 @@ class ElementActionRun:
148
148
  skip: bool,
149
149
  exit_code: Union[int, None],
150
150
  metadata: Dict,
151
+ run_hostname: Union[str, None],
151
152
  ) -> None:
152
153
  self._id = id_
153
154
  self._is_pending = is_pending
@@ -163,6 +164,7 @@ class ElementActionRun:
163
164
  self._snapshot_end = snapshot_end
164
165
  self._exit_code = exit_code
165
166
  self._metadata = metadata
167
+ self._run_hostname = run_hostname
166
168
 
167
169
  # assigned on first access of corresponding properties:
168
170
  self._inputs = None
@@ -222,6 +224,10 @@ class ElementActionRun:
222
224
  def metadata(self):
223
225
  return self._metadata
224
226
 
227
+ @property
228
+ def run_hostname(self):
229
+ return self._run_hostname
230
+
225
231
  @property
226
232
  def start_time(self):
227
233
  return self._start_time
@@ -478,7 +484,7 @@ class ElementActionRun:
478
484
  Parameters
479
485
  ----------
480
486
  inputs
481
- If specified, a list of input parameter types to include, of a dict whose keys
487
+ If specified, a list of input parameter types to include, or a dict whose keys
482
488
  are input parameter types to include. For schema inputs that have
483
489
  `multiple=True`, the input type should be labelled. If a dict is passed, and
484
490
  the key "all_iterations` is present and `True`, the return for that input
@@ -489,10 +495,11 @@ class ElementActionRun:
489
495
  keys.
490
496
 
491
497
  """
498
+ if not inputs:
499
+ inputs = self.get_parameter_names("inputs")
500
+
492
501
  out = {}
493
- for inp_name in self.get_parameter_names("inputs"):
494
- if inputs and inp_name not in inputs:
495
- continue
502
+ for inp_name in inputs:
496
503
  path_i, label_i = split_param_label(inp_name)
497
504
 
498
505
  try:
@@ -512,12 +519,20 @@ class ElementActionRun:
512
519
  else:
513
520
  val_i = self.get(f"inputs.{inp_name}")
514
521
 
522
+ key = inp_name
523
+ if label_dict and label_i:
524
+ key = path_i # exclude label from key
525
+
526
+ if "." in key:
527
+ # for sub-parameters, take only the final part as the dict key:
528
+ key = key.split(".")[-1]
529
+
515
530
  if label_dict and label_i:
516
- if path_i not in out:
517
- out[path_i] = {}
518
- out[path_i][label_i] = val_i
531
+ if key not in out:
532
+ out[key] = {}
533
+ out[key][label_i] = val_i
519
534
  else:
520
- out[inp_name] = val_i
535
+ out[key] = val_i
521
536
 
522
537
  return out
523
538
 
@@ -1163,8 +1178,8 @@ class Action(JSONLike):
1163
1178
  # validation:
1164
1179
  allowed_keys = ("format", "all_iterations")
1165
1180
  for k, v in all_params.items():
1166
- # validate parameter name:
1167
- if k not in param_names:
1181
+ # validate parameter name (sub-parameters are allowed):
1182
+ if k.split(".")[0] not in param_names:
1168
1183
  raise UnknownScriptDataParameter(
1169
1184
  f"Script data parameter {k!r} is not a known parameter of the "
1170
1185
  f"action. Parameters ({prefix}) are: {param_names!r}."
@@ -274,7 +274,7 @@ class ElementResources(JSONLike):
274
274
  def get_env_instance_filterable_attributes() -> Tuple[str]:
275
275
  """Get a tuple of resource attributes that are used to filter environment
276
276
  executable instances at submit- and run-time."""
277
- return ("num_cores", "parallel_mode")
277
+ return ("num_cores",) # TODO: filter on `parallel_mode` later
278
278
 
279
279
  @staticmethod
280
280
  def get_default_os_name():
hpcflow/sdk/core/loop.py CHANGED
@@ -423,7 +423,15 @@ class WorkflowLoop:
423
423
 
424
424
  elif orig_inp_src.source_type is InputSourceType.DEFAULT:
425
425
  # keep default value from original element
426
- inp_dat_idx = element.iterations[0].get_data_idx()[inp_key]
426
+ inp_dat_idx_iter_0 = element.iterations[0].get_data_idx()
427
+ try:
428
+ inp_dat_idx = inp_dat_idx_iter_0[inp_key]
429
+ except KeyError:
430
+ # if this input is required by a conditional action, and
431
+ # that condition is not met, then this input will not
432
+ # exist in the action-run data index, so use the initial
433
+ # iteration data index:
434
+ inp_dat_idx = element.iterations[0].data_idx[inp_key]
427
435
 
428
436
  elif orig_inp_src.source_type is InputSourceType.TASK:
429
437
  if orig_inp_src.task_ref not in self.task_insert_IDs:
@@ -486,7 +494,16 @@ class WorkflowLoop:
486
494
  inp_status_inps = set([f"inputs.{i}" for i in inp_statuses])
487
495
  sub_params = inp_status_inps - set(new_data_idx.keys())
488
496
  for sub_param_i in sub_params:
489
- sub_param_data_idx = element.iterations[0].get_data_idx()[sub_param_i]
497
+ sub_param_data_idx_iter_0 = element.iterations[0].get_data_idx()
498
+ try:
499
+ sub_param_data_idx = sub_param_data_idx_iter_0[sub_param_i]
500
+ except KeyError:
501
+ # as before, if this input is required by a conditional action,
502
+ # and that condition is not met, then this input will not exist in
503
+ # the action-run data index, so use the initial iteration data
504
+ # index:
505
+ sub_param_data_idx = element.iterations[0].data_idx[sub_param_i]
506
+
490
507
  new_data_idx[sub_param_i] = sub_param_data_idx
491
508
 
492
509
  for out in task.template.all_schema_outputs:
@@ -10,6 +10,7 @@ import enum
10
10
  import os
11
11
  from pathlib import Path
12
12
  import shutil
13
+ import socket
13
14
  import time
14
15
  from typing import Any, Dict, Iterable, List, Optional, Tuple, TypeVar, Union
15
16
 
@@ -306,6 +307,7 @@ class StoreEAR:
306
307
  snapshot_end: Optional[Dict] = None
307
308
  exit_code: Optional[int] = None
308
309
  metadata: Dict[str, Any] = None
310
+ run_hostname: Optional[str] = None
309
311
 
310
312
  @staticmethod
311
313
  def _encode_datetime(dt: Union[datetime, None], ts_fmt: str) -> str:
@@ -331,6 +333,7 @@ class StoreEAR:
331
333
  "snapshot_end": self.snapshot_end,
332
334
  "exit_code": self.exit_code,
333
335
  "metadata": self.metadata,
336
+ "run_hostname": self.run_hostname,
334
337
  }
335
338
 
336
339
  @classmethod
@@ -365,6 +368,7 @@ class StoreEAR:
365
368
  "snapshot_end": self.snapshot_end,
366
369
  "exit_code": self.exit_code,
367
370
  "metadata": self.metadata,
371
+ "run_hostname": self.run_hostname,
368
372
  }
369
373
 
370
374
  def update(
@@ -377,6 +381,7 @@ class StoreEAR:
377
381
  snapshot_start: Optional[Dict] = None,
378
382
  snapshot_end: Optional[Dict] = None,
379
383
  exit_code: Optional[int] = None,
384
+ run_hostname: Optional[str] = None,
380
385
  ) -> AnySEAR:
381
386
  """Return a shallow copy, with specified data updated."""
382
387
 
@@ -388,6 +393,7 @@ class StoreEAR:
388
393
  snap_s = snapshot_start if snapshot_start is not None else self.snapshot_start
389
394
  snap_e = snapshot_end if snapshot_end is not None else self.snapshot_end
390
395
  exit_code = exit_code if exit_code is not None else self.exit_code
396
+ run_hn = run_hostname if run_hostname is not None else self.run_hostname
391
397
 
392
398
  return self.__class__(
393
399
  id_=self.id_,
@@ -404,6 +410,7 @@ class StoreEAR:
404
410
  snapshot_start=snap_s,
405
411
  snapshot_end=snap_e,
406
412
  exit_code=exit_code,
413
+ run_hostname=run_hn,
407
414
  )
408
415
 
409
416
 
@@ -961,7 +968,8 @@ class PersistentStore(ABC):
961
968
  snapshot = JSONLikeDirSnapShot()
962
969
  snapshot.take(".")
963
970
  ss_js = snapshot.to_json_like()
964
- self._pending.set_EAR_starts[EAR_ID] = (dt, ss_js)
971
+ run_hostname = socket.gethostname()
972
+ self._pending.set_EAR_starts[EAR_ID] = (dt, ss_js, run_hostname)
965
973
  if save:
966
974
  self.save()
967
975
  return dt
@@ -1419,7 +1427,7 @@ class PersistentStore(ABC):
1419
1427
  pend_end = self._pending.set_EAR_ends.get(EAR_i.id_)
1420
1428
  pend_skip = True if EAR_i.id_ in self._pending.set_EAR_skips else None
1421
1429
 
1422
- p_st, p_ss = pend_start if pend_start else (None, None)
1430
+ p_st, p_ss, p_hn = pend_start if pend_start else (None, None, None)
1423
1431
  p_et, p_se, p_ex, p_sx = pend_end if pend_end else (None, None, None, None)
1424
1432
 
1425
1433
  updates = {
@@ -1431,6 +1439,7 @@ class PersistentStore(ABC):
1431
1439
  "snapshot_start": p_ss,
1432
1440
  "snapshot_end": p_se,
1433
1441
  "exit_code": p_ex,
1442
+ "run_hostname": p_hn,
1434
1443
  }
1435
1444
  if any(i is not None for i in updates.values()):
1436
1445
  EAR_i = EAR_i.update(**updates)
@@ -232,10 +232,11 @@ class JSONPersistentStore(PersistentStore):
232
232
  with self.using_resource("metadata", action="update") as md:
233
233
  md["runs"][EAR_id]["submission_idx"] = sub_idx
234
234
 
235
- def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict):
235
+ def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict, s_hn: str):
236
236
  with self.using_resource("metadata", action="update") as md:
237
237
  md["runs"][EAR_id]["start_time"] = s_time.strftime(self.ts_fmt)
238
238
  md["runs"][EAR_id]["snapshot_start"] = s_snap
239
+ md["runs"][EAR_id]["run_hostname"] = s_hn
239
240
 
240
241
  def _update_EAR_end(
241
242
  self, EAR_id: int, e_time: datetime, e_snap: Dict, ext_code: int, success: bool
@@ -59,7 +59,7 @@ class PendingChanges:
59
59
  self.set_EARs_initialised: List[int] = None
60
60
  self.set_EAR_submission_indices: Dict[int, int] = None
61
61
  self.set_EAR_skips: List[int] = None
62
- self.set_EAR_starts: Dict[int, Tuple[datetime, Dict]] = None
62
+ self.set_EAR_starts: Dict[int, Tuple[datetime, Dict], str] = None
63
63
  self.set_EAR_ends: Dict[int, Tuple[datetime, Dict, int, bool]] = None
64
64
 
65
65
  self.set_js_metadata: Dict[int, Dict[int, Any]] = None
@@ -286,12 +286,12 @@ class PendingChanges:
286
286
 
287
287
  def commit_EAR_starts(self) -> None:
288
288
  # TODO: could be batched up?
289
- for EAR_id, (time, snap) in self.set_EAR_starts.items():
289
+ for EAR_id, (time, snap, hostname) in self.set_EAR_starts.items():
290
290
  self.logger.debug(
291
- f"commit: adding pending start time ({time!r}) and "
292
- f"directory snapshot to EAR ID {EAR_id!r}."
291
+ f"commit: adding pending start time ({time!r}), run hostname "
292
+ f"({hostname!r}), and directory snapshot to EAR ID {EAR_id!r}."
293
293
  )
294
- self.store._update_EAR_start(EAR_id, time, snap)
294
+ self.store._update_EAR_start(EAR_id, time, snap, hostname)
295
295
  self.clear_set_EAR_starts()
296
296
 
297
297
  def commit_EAR_ends(self) -> None:
@@ -230,6 +230,7 @@ class ZarrStoreEAR(StoreEAR):
230
230
  self.snapshot_end,
231
231
  self.exit_code,
232
232
  self.metadata,
233
+ self.run_hostname,
233
234
  ]
234
235
  return EAR_enc
235
236
 
@@ -250,6 +251,7 @@ class ZarrStoreEAR(StoreEAR):
250
251
  "snapshot_end": EAR_dat[10],
251
252
  "exit_code": EAR_dat[11],
252
253
  "metadata": EAR_dat[12],
254
+ "run_hostname": EAR_dat[13],
253
255
  }
254
256
  return cls(is_pending=False, **obj_dat)
255
257
 
@@ -598,7 +600,7 @@ class ZarrPersistentStore(PersistentStore):
598
600
  if attrs != attrs_orig:
599
601
  arr.attrs.put(attrs)
600
602
 
601
- def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict):
603
+ def _update_EAR_start(self, EAR_id: int, s_time: datetime, s_snap: Dict, s_hn: str):
602
604
  arr = self._get_EARs_arr(mode="r+")
603
605
  attrs_orig = arr.attrs.asdict()
604
606
  attrs = copy.deepcopy(attrs_orig)
@@ -607,6 +609,7 @@ class ZarrPersistentStore(PersistentStore):
607
609
  EAR_i = EAR_i.update(
608
610
  start_time=s_time,
609
611
  snapshot_start=s_snap,
612
+ run_hostname=s_hn,
610
613
  )
611
614
  arr[EAR_id] = EAR_i.encode(attrs, self.ts_fmt)
612
615
 
@@ -10,7 +10,6 @@ class NullScheduler:
10
10
 
11
11
  def __init__(
12
12
  self,
13
- submit_cmd=None,
14
13
  shell_args=None,
15
14
  shebang_args=None,
16
15
  options=None,
@@ -19,6 +18,10 @@ class NullScheduler:
19
18
  self.shell_args = shell_args or self.DEFAULT_SHELL_ARGS
20
19
  self.options = options or []
21
20
 
21
+ @property
22
+ def unique_properties(self):
23
+ return (self.__class__.__name__,)
24
+
22
25
  def __eq__(self, other) -> bool:
23
26
  if type(self) != type(other):
24
27
  return False
@@ -64,6 +67,10 @@ class Scheduler(NullScheduler):
64
67
  self.array_switch = array_switch or self.DEFAULT_ARRAY_SWITCH
65
68
  self.array_item_var = array_item_var or self.DEFAULT_ARRAY_ITEM_VAR
66
69
 
70
+ @property
71
+ def unique_properties(self):
72
+ return (self.__class__.__name__, self.submit_cmd, self.show_cmd, self.del_cmd)
73
+
67
74
  def format_switch(self, switch):
68
75
  return f"{self.js_cmd} {switch}"
69
76
 
@@ -416,7 +416,11 @@ class SlurmPosix(Scheduler):
416
416
  _arr_idx = []
417
417
  for i_range_str in arr_idx.strip("[]").split(","):
418
418
  if "-" in i_range_str:
419
- i_args = [int(j) - 1 for j in i_range_str.split("-")]
419
+ range_parts = i_range_str.split("-")
420
+ if "%" in range_parts[1]:
421
+ # indicates max concurrent array items; not needed
422
+ range_parts[1] = range_parts[1].split("%")[0]
423
+ i_args = [int(j) - 1 for j in range_parts]
420
424
  _arr_idx.extend(list(range(i_args[0], i_args[1] + 1)))
421
425
  else:
422
426
  _arr_idx.append(int(i_range_str) - 1)
@@ -123,7 +123,12 @@ class Submission(JSONLike):
123
123
  filter_exec = {j: getattr(js_j.resources, j) for j in filterable}
124
124
  exec_instances = exec_i.filter_instances(**filter_exec)
125
125
  if not exec_instances:
126
- raise MissingEnvironmentExecutableInstanceError
126
+ raise MissingEnvironmentExecutableInstanceError(
127
+ f"No matching executable instances found for executable "
128
+ f"{exec_i_lab!r} of environment {env_lab!r} for jobscript "
129
+ f"index {js_idx_j!r} with requested resources "
130
+ f"{filter_exec!r}."
131
+ )
127
132
 
128
133
  # save env definitions to the environments attribute:
129
134
  self._environments = self.app.EnvironmentsList(envs)
@@ -327,14 +332,24 @@ class Submission(JSONLike):
327
332
  def get_unique_schedulers_of_jobscripts(
328
333
  jobscripts: List[Jobscript],
329
334
  ) -> Dict[Tuple[Tuple[int, int]], Scheduler]:
330
- """Get unique schedulers and which of the passed jobscripts they correspond to."""
335
+ """Get unique schedulers and which of the passed jobscripts they correspond to.
336
+
337
+ Uniqueness is determines only by the `Scheduler.unique_properties` tuple.
338
+
339
+ """
331
340
  js_idx = []
332
341
  schedulers = []
342
+
343
+ # list of tuples of scheduler properties we consider to determine "uniqueness",
344
+ # with the first string being the scheduler type (class name):
345
+ seen_schedulers = []
346
+
333
347
  for js in jobscripts:
334
- if js.scheduler not in schedulers:
348
+ if js.scheduler.unique_properties not in seen_schedulers:
349
+ seen_schedulers.append(js.scheduler.unique_properties)
335
350
  schedulers.append(js.scheduler)
336
351
  js_idx.append([])
337
- sched_idx = schedulers.index(js.scheduler)
352
+ sched_idx = seen_schedulers.index(js.scheduler.unique_properties)
338
353
  js_idx[sched_idx].append((js.submission.index, js.index))
339
354
 
340
355
  sched_js_idx = dict(zip((tuple(i) for i in js_idx), schedulers))
@@ -33,6 +33,32 @@ def test_script_direct_in_direct_out(null_config, tmp_path):
33
33
  assert wk.tasks[0].elements[0].outputs.p2.value == p1_val + 100
34
34
 
35
35
 
36
+ @pytest.mark.integration
37
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
38
+ def test_script_direct_sub_param_in_direct_out(null_config, tmp_path):
39
+ s1 = hf.TaskSchema(
40
+ objective="t1",
41
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
42
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
43
+ actions=[
44
+ hf.Action(
45
+ script="<<script:main_script_test_direct_sub_param_in_direct_out.py>>",
46
+ script_data_in={"p1.a": "direct"},
47
+ script_data_out="direct",
48
+ script_exe="python_script",
49
+ environments=[hf.ActionEnvironment(environment="python_env")],
50
+ )
51
+ ],
52
+ )
53
+ p1_val = {"a": 101}
54
+ t1 = hf.Task(schema=s1, inputs={"p1": p1_val})
55
+ wk = hf.Workflow.from_template_data(
56
+ tasks=[t1], template_name="main_script_test", path=tmp_path
57
+ )
58
+ wk.submit(wait=True, add_to_known=False)
59
+ assert wk.tasks[0].elements[0].outputs.p2.value == p1_val["a"] + 100
60
+
61
+
36
62
  @pytest.mark.integration
37
63
  @pytest.mark.skipif("hf.run_time_info.is_frozen")
38
64
  def test_script_direct_in_direct_out_single_label(null_config, tmp_path):
@@ -170,6 +196,47 @@ def test_script_json_in_json_out_labels(null_config, tmp_path):
170
196
  assert wk.tasks[0].elements[0].outputs.p2.value == p1_1_val + p1_2_val
171
197
 
172
198
 
199
+ @pytest.mark.integration
200
+ @pytest.mark.skipif("hf.run_time_info.is_frozen")
201
+ def test_script_json_sub_param_in_json_out_labels(null_config, tmp_path):
202
+ p1_label_1 = "one"
203
+ p1_label_2 = "two"
204
+ s1 = hf.TaskSchema(
205
+ objective="t1",
206
+ inputs=[
207
+ hf.SchemaInput(
208
+ parameter=hf.Parameter("p1"),
209
+ labels={p1_label_1: {}, p1_label_2: {}},
210
+ multiple=True,
211
+ )
212
+ ],
213
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p2"))],
214
+ actions=[
215
+ hf.Action(
216
+ script="<<script:main_script_test_json_sub_param_in_json_out_labels.py>>",
217
+ script_data_in={"p1[one].a": "json", "p1[two]": "json"},
218
+ script_data_out="json",
219
+ script_exe="python_script",
220
+ environments=[hf.ActionEnvironment(environment="python_env")],
221
+ )
222
+ ],
223
+ )
224
+ a_val = 101
225
+ p1_2_val = 201
226
+ t1 = hf.Task(
227
+ schema=s1,
228
+ inputs={
229
+ f"p1[{p1_label_1}]": {"a": a_val},
230
+ f"p1[{p1_label_2}]": p1_2_val,
231
+ },
232
+ )
233
+ wk = hf.Workflow.from_template_data(
234
+ tasks=[t1], template_name="main_script_test", path=tmp_path
235
+ )
236
+ wk.submit(wait=True, add_to_known=False)
237
+ assert wk.tasks[0].elements[0].outputs.p2.value == a_val + p1_2_val
238
+
239
+
173
240
  @pytest.mark.integration
174
241
  @pytest.mark.skipif("hf.run_time_info.is_frozen")
175
242
  def test_script_json_and_direct_in_json_out(null_config, tmp_path):
@@ -1,4 +1,8 @@
1
1
  import pytest
2
+
3
+ from valida.conditions import Value
4
+
5
+
2
6
  from hpcflow.app import app as hf
3
7
  from hpcflow.sdk.core.errors import LoopAlreadyExistsError
4
8
  from hpcflow.sdk.core.test_utils import P1_parameter_cls, make_workflow
@@ -381,6 +385,54 @@ def test_wk_loop_input_sources_iterable_param_default(null_config, tmp_path):
381
385
  assert t1_iter_2["inputs.p1"] == t1_iter_1["outputs.p1"]
382
386
 
383
387
 
388
+ def test_wk_loop_input_sources_iterable_param_default_conditional_action(
389
+ null_config, tmp_path
390
+ ):
391
+ act_env = hf.ActionEnvironment("null_env")
392
+ ts1 = hf.TaskSchema(
393
+ objective="t1",
394
+ inputs=[
395
+ hf.SchemaInput("p1", default_value=1),
396
+ hf.SchemaInput("p2", default_value=None),
397
+ ],
398
+ outputs=[hf.SchemaOutput("p1")],
399
+ actions=[
400
+ hf.Action(
401
+ commands=[
402
+ hf.Command(
403
+ "Write-Output ((<<parameter:p1>> + 10))",
404
+ stdout="<<int(parameter:p1)>>",
405
+ )
406
+ ],
407
+ environments=[act_env],
408
+ ),
409
+ hf.Action(
410
+ commands=[hf.Command("Write-Output ((<<parameter:p2>> + 10))")],
411
+ environments=[act_env],
412
+ rules=[
413
+ hf.ActionRule(path="inputs.p2", condition=Value.not_equal_to(None))
414
+ ],
415
+ ),
416
+ ],
417
+ )
418
+ wk = hf.Workflow.from_template_data(
419
+ template_name="test_loop",
420
+ path=tmp_path,
421
+ tasks=[hf.Task(schema=ts1, inputs={"p1": 101})],
422
+ )
423
+ wk.add_loop(hf.Loop(tasks=[0], num_iterations=3))
424
+ # first iteration should be the default value, second and third iterations should
425
+ # be from previous iteration outputs:
426
+ t1_iter_0 = wk.tasks.t1.elements[0].iterations[0].get_data_idx()
427
+ t1_iter_1 = wk.tasks.t1.elements[0].iterations[1].get_data_idx()
428
+ t1_iter_2 = wk.tasks.t1.elements[0].iterations[2].get_data_idx()
429
+
430
+ assert t1_iter_0["inputs.p1"] != t1_iter_1["inputs.p1"]
431
+ assert t1_iter_1["inputs.p1"] != t1_iter_2["inputs.p1"]
432
+ assert t1_iter_1["inputs.p1"] == t1_iter_0["outputs.p1"]
433
+ assert t1_iter_2["inputs.p1"] == t1_iter_1["outputs.p1"]
434
+
435
+
384
436
  def test_wk_loop_input_sources_including_non_iteration_task_source_with_groups(
385
437
  null_config, tmp_path
386
438
  ):
@@ -192,7 +192,7 @@ def test_get_input_values_subset(null_config, tmp_path):
192
192
  template_name="temp",
193
193
  )
194
194
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
195
- assert run.get_input_values(inputs=("p1")) == {"p1": 101}
195
+ assert run.get_input_values(inputs=("p1",)) == {"p1": 101}
196
196
 
197
197
 
198
198
  def test_get_input_values_subset_labelled_label_dict_False(null_config, tmp_path):
@@ -226,7 +226,7 @@ def test_get_input_values_subset_labelled_label_dict_False(null_config, tmp_path
226
226
  template_name="temp",
227
227
  )
228
228
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
229
- assert run.get_input_values(inputs=("p1[one]"), label_dict=False) == {"p1[one]": 101}
229
+ assert run.get_input_values(inputs=("p1[one]",), label_dict=False) == {"p1[one]": 101}
230
230
 
231
231
 
232
232
  def test_get_input_values_subset_labelled_label_dict_True(null_config, tmp_path):
@@ -260,7 +260,7 @@ def test_get_input_values_subset_labelled_label_dict_True(null_config, tmp_path)
260
260
  template_name="temp",
261
261
  )
262
262
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
263
- assert run.get_input_values(inputs=("p1[one]"), label_dict=True) == {
263
+ assert run.get_input_values(inputs=("p1[one]",), label_dict=True) == {
264
264
  "p1": {"one": 101}
265
265
  }
266
266
 
@@ -25,3 +25,14 @@ def test_parse_job_ID_array_simple_mixed_range():
25
25
  "30627658",
26
26
  [4, 7, 8, 9],
27
27
  )
28
+
29
+
30
+ def test_parse_job_ID_array_simple_range_with_max_concurrent():
31
+ assert SlurmPosix._parse_job_IDs("3397752_[9-11%2]") == ("3397752", [8, 9, 10])
32
+
33
+
34
+ def test_parse_job_ID_array_simple_multiple_range_max_concurrent():
35
+ assert SlurmPosix._parse_job_IDs("49203_[3-5%1,9-11%2]") == (
36
+ "49203",
37
+ [2, 3, 4, 8, 9, 10],
38
+ )
@@ -402,3 +402,117 @@ def test_abort_EARs_file_update_with_existing_abort(null_config, tmp_path):
402
402
 
403
403
  lines_exp = ["0", "1", "1"]
404
404
  assert lines == "\n".join(lines_exp) + "\n"
405
+
406
+
407
+ def test_unique_schedulers_one_direct(new_null_config, tmp_path):
408
+ t1 = hf.Task(
409
+ schema=hf.task_schemas.test_t1_conditional_OS,
410
+ inputs={"p1": 1},
411
+ )
412
+ t2 = hf.Task(
413
+ schema=hf.task_schemas.test_t1_conditional_OS,
414
+ inputs={"p1": 1},
415
+ )
416
+ wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
417
+ wk = hf.Workflow.from_template(
418
+ template=wkt,
419
+ path=tmp_path,
420
+ )
421
+ sub = wk.add_submission()
422
+ scheds = sub.get_unique_schedulers()
423
+
424
+ assert len(scheds) == 1
425
+
426
+
427
+ def test_unique_schedulers_one_direct_distinct_resources(new_null_config, tmp_path):
428
+ t1 = hf.Task(
429
+ schema=hf.task_schemas.test_t1_conditional_OS,
430
+ inputs={"p1": 1},
431
+ resources={"any": {"num_cores": 1}},
432
+ )
433
+ t2 = hf.Task(
434
+ schema=hf.task_schemas.test_t1_conditional_OS,
435
+ inputs={"p1": 1},
436
+ resources={"any": {"num_cores": 2}},
437
+ )
438
+ wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
439
+ wk = hf.Workflow.from_template(
440
+ template=wkt,
441
+ path=tmp_path,
442
+ )
443
+ sub = wk.add_submission()
444
+ scheds = sub.get_unique_schedulers()
445
+
446
+ assert len(scheds) == 1
447
+
448
+
449
+ @pytest.mark.slurm
450
+ def test_unique_schedulers_one_SLURM(new_null_config, tmp_path):
451
+ hf.config.add_scheduler("slurm")
452
+ t1 = hf.Task(
453
+ schema=hf.task_schemas.test_t1_conditional_OS,
454
+ inputs={"p1": 1},
455
+ resources={"any": {"scheduler": "slurm"}},
456
+ )
457
+ t2 = hf.Task(
458
+ schema=hf.task_schemas.test_t1_conditional_OS,
459
+ inputs={"p1": 1},
460
+ resources={"any": {"scheduler": "slurm"}},
461
+ )
462
+ wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
463
+ wk = hf.Workflow.from_template(
464
+ template=wkt,
465
+ path=tmp_path,
466
+ )
467
+ sub = wk.add_submission()
468
+ scheds = sub.get_unique_schedulers()
469
+
470
+ assert len(scheds) == 1
471
+
472
+
473
+ @pytest.mark.slurm
474
+ def test_unique_schedulers_one_SLURM_distinct_resources(new_null_config, tmp_path):
475
+ hf.config.add_scheduler("slurm")
476
+ t1 = hf.Task(
477
+ schema=hf.task_schemas.test_t1_conditional_OS,
478
+ inputs={"p1": 1},
479
+ resources={"any": {"scheduler": "slurm", "num_cores": 1}},
480
+ )
481
+ t2 = hf.Task(
482
+ schema=hf.task_schemas.test_t1_conditional_OS,
483
+ inputs={"p1": 1},
484
+ resources={"any": {"scheduler": "slurm", "num_cores": 2}},
485
+ )
486
+ wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
487
+ wk = hf.Workflow.from_template(
488
+ template=wkt,
489
+ path=tmp_path,
490
+ )
491
+ sub = wk.add_submission()
492
+ scheds = sub.get_unique_schedulers()
493
+
494
+ assert len(scheds) == 1
495
+
496
+
497
+ @pytest.mark.slurm
498
+ def test_unique_schedulers_two_direct_and_SLURM(new_null_config, tmp_path):
499
+ hf.config.add_scheduler("slurm")
500
+ t1 = hf.Task(
501
+ schema=hf.task_schemas.test_t1_conditional_OS,
502
+ inputs={"p1": 1},
503
+ resources={"any": {"scheduler": "direct"}},
504
+ )
505
+ t2 = hf.Task(
506
+ schema=hf.task_schemas.test_t1_conditional_OS,
507
+ inputs={"p1": 1},
508
+ resources={"any": {"scheduler": "slurm"}},
509
+ )
510
+ wkt = hf.WorkflowTemplate(name="temp", tasks=[t1, t2])
511
+ wk = hf.Workflow.from_template(
512
+ template=wkt,
513
+ path=tmp_path,
514
+ )
515
+ sub = wk.add_submission()
516
+ scheds = sub.get_unique_schedulers()
517
+
518
+ assert len(scheds) == 2
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: hpcflow-new2
3
- Version: 0.2.0a144
3
+ Version: 0.2.0a147
4
4
  Summary: Computational workflow management
5
5
  License: MIT
6
6
  Author: aplowman
@@ -1,7 +1,7 @@
1
1
  hpcflow/__init__.py,sha256=WIETuRHeOp2SqUqHUzpjQ-lk9acbYv-6aWOhZPRdlhs,64
2
2
  hpcflow/__pyinstaller/__init__.py,sha256=YOzBlPSck6slucv6lJM9K80JtsJWxXRL00cv6tRj3oc,98
3
3
  hpcflow/__pyinstaller/hook-hpcflow.py,sha256=SeMopsPkhCyd9gqIrzwFNRj3ZlkUlUYl-74QYz61mo4,1089
4
- hpcflow/_version.py,sha256=o1AqdGt48zT7wydmWzEDr9WDxvnyNLXQyyL88_tFzCE,26
4
+ hpcflow/_version.py,sha256=LHmkRRiVDd7QVfxPhXIqphmBf8p2Cl9Q2FLfe8IJSM8,26
5
5
  hpcflow/app.py,sha256=GQsMq_sjjXxMLLiIPF1ZvyapW_7IgsxALCCwMiqmC8I,1520
6
6
  hpcflow/cli.py,sha256=G2J3D9v6MnMWOWMMWK6UEKLn_6wnV9lT_qygEBBxg-I,66
7
7
  hpcflow/data/demo_data_manifest/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -14,6 +14,7 @@ hpcflow/data/scripts/generate_t1_file_01.py,sha256=QtrtIC-lDZynzhIUxjJPTSAjLffSu
14
14
  hpcflow/data/scripts/main_script_test_direct_in_direct_out.py,sha256=5zKBGKdkY1vPnu2-_rsENRqyJeiCKiH9IjBbAnmeLHg,123
15
15
  hpcflow/data/scripts/main_script_test_direct_in_direct_out_all_iters_test.py,sha256=p22arJAO3rMJjfEA7s0Acm0l0L_XDICJeMoNXpmk79A,463
16
16
  hpcflow/data/scripts/main_script_test_direct_in_direct_out_labels.py,sha256=ORVsD5ZVzPUI42kqp2ndDNgYt0yGYc4DgyhNvZZToI0,175
17
+ hpcflow/data/scripts/main_script_test_direct_sub_param_in_direct_out.py,sha256=pQ-BD1FmqTIHeCaAakCr1M61cWb1Fjinzr1qA3HX3ik,131
17
18
  hpcflow/data/scripts/main_script_test_hdf5_in_obj.py,sha256=ztL0mms6bmB-fuzRpW7bTFRar3fFHP_kIM0e4iX53As,233
18
19
  hpcflow/data/scripts/main_script_test_hdf5_out_obj.py,sha256=yios8XxvnkXe6nY5GNW3u947b4XNnp5KXIajcgCDzhI,267
19
20
  hpcflow/data/scripts/main_script_test_json_and_direct_in_json_out.py,sha256=Y1kpKu59DbjqKW2i4QcmfHzdXYmiAEvNzqIqBU_Z4fg,336
@@ -22,6 +23,7 @@ hpcflow/data/scripts/main_script_test_json_in_json_out.py,sha256=1VRajQQdtpWSudw
22
23
  hpcflow/data/scripts/main_script_test_json_in_json_out_labels.py,sha256=3lJHyrwKfqe9ReUyUAmVihR6XlKkkews4oMRT9227pU,404
23
24
  hpcflow/data/scripts/main_script_test_json_in_obj.py,sha256=pHvaEAlKuk7Baabmaw-T6OUIhf712kbow5AESiyKdQI,225
24
25
  hpcflow/data/scripts/main_script_test_json_out_obj.py,sha256=9715rSsbslIB3ZuF22pBhTm-ux9eVV3a7Qj_f8tc6GA,214
26
+ hpcflow/data/scripts/main_script_test_json_sub_param_in_json_out_labels.py,sha256=GkKqWb-A32ahgX5L_uDWZFfjC6IhBOKKGGP34N2O2tA,402
25
27
  hpcflow/data/scripts/parse_t1_file_01.py,sha256=2BVotqY3sMc3FXSUPCvnzSHfymyGI9wYYOYrTrXngyI,125
26
28
  hpcflow/data/template_components/__init__.py,sha256=qv0IzZGQho4aBR2eld5rNFiYlb8nDcTvkAwa982jDWs,83
27
29
  hpcflow/data/template_components/command_files.yaml,sha256=9y82MoHkn6Vgi8V_SQgCWTLm09h5SlfbD8InRoJB3Qo,356
@@ -42,14 +44,14 @@ hpcflow/sdk/config/config.py,sha256=9DZi7sebFJUU4gGfJIrKijNAOtWy015Rc8my8hF1mXo,
42
44
  hpcflow/sdk/config/config_file.py,sha256=JlMcprj0aujFVk8552ahP2f8EXB0tglMaHwzbcGZH6w,12373
43
45
  hpcflow/sdk/config/errors.py,sha256=2D7HJ1dbyeoD3xk4MuaGSsbJsUyQzyw8kaThEBZfP2I,6876
44
46
  hpcflow/sdk/core/__init__.py,sha256=GcIklEsXy3M5PWpmxyhd2KoI0u6HjXRIjD_aR1bgRjo,215
45
- hpcflow/sdk/core/actions.py,sha256=fwlCEazZohZdhnMnQGUrSbe6xZs9WnBJTTbq1AALl9s,73691
47
+ hpcflow/sdk/core/actions.py,sha256=GPUchju1f1BI0Ju8lPeiK3EUp1yHERcegVsZf0ZEcGY,74127
46
48
  hpcflow/sdk/core/command_files.py,sha256=oEW6g6f_cQFmRAgP1DTWPZPhufXcRi56yJZWaS8fU28,18161
47
49
  hpcflow/sdk/core/commands.py,sha256=-Tiu7zVVwWr1xiTXVB9oH3E4g09ebRRtHSRrMdFDCRY,12060
48
- hpcflow/sdk/core/element.py,sha256=0iSSpgYKpVEvKv7xj9gJgo6y9ZB4pILTH3vTUfc73Oo,45383
50
+ hpcflow/sdk/core/element.py,sha256=nKKvUDP-rLECq-1nP4Fv0OfUA7GKmE3df37ku0ZyKzQ,45408
49
51
  hpcflow/sdk/core/environment.py,sha256=DGUz1NvliKh6opP0IueGHD69rn_8wFLhDsq6kAmEgM4,4849
50
52
  hpcflow/sdk/core/errors.py,sha256=wtoeyAaYPvRPc4W9AatdPqmOedShJgu__CLO8r7ngdI,8651
51
53
  hpcflow/sdk/core/json_like.py,sha256=LRZsUd1tn8zXC8fESeiXs7Eko-VdnB8zcXiqixKVcZM,18874
52
- hpcflow/sdk/core/loop.py,sha256=0f31GxqyJAR_Fg6Z-XbiF7oA5GW3cAxRY1MWSQ-oZIY,20780
54
+ hpcflow/sdk/core/loop.py,sha256=5Ai_HHCzM21_IOvtgobE8yi_b9dc9jWQijt2LyI3PlM,21865
53
55
  hpcflow/sdk/core/object_list.py,sha256=bhJc-U4BpGDQMW4x0sQlVOlgNH2XIeVS4Re0u_x0l80,19866
54
56
  hpcflow/sdk/core/parallel.py,sha256=LI-g-qOuOR1oaEUWVT0qW0hmiP9hsJyUP8_IfSTKYYo,95
55
57
  hpcflow/sdk/core/parameters.py,sha256=U4nh7rSZlafgq_zR1f6qn0qqBh5tSjNZ45Yp2gseb0Q,64086
@@ -77,27 +79,27 @@ hpcflow/sdk/helper/helper.py,sha256=MkjYKHox1F4XOpy-20sCCDUTWUbQY84QpWZkcpSq9n8,
77
79
  hpcflow/sdk/helper/watcher.py,sha256=hLqgwXtZw-6ihNUUcWYnZw8TCyD_AdhYE7abOrO2r_0,4003
78
80
  hpcflow/sdk/log.py,sha256=6bqL67oCSLgEBE3xb3N46QWEuuVJLF4wXYu2Ys0p3ss,1925
79
81
  hpcflow/sdk/persistence/__init__.py,sha256=IzWycfiO6rDn_7Kocw4Df5ETe9BSoaqqxG7Yp4FW_ls,900
80
- hpcflow/sdk/persistence/base.py,sha256=s-Bkm9CR1bkAZmLCOytcV5A17ou35imyRP3c1ao-j9g,55793
81
- hpcflow/sdk/persistence/json.py,sha256=DAWNYRM5dPFwEIUK8oCXMihj-6q4dt6H_U-F-IbjrBE,19400
82
- hpcflow/sdk/persistence/pending.py,sha256=PzM4IiKxNuBVOd1nk7X8gRhAX6pqV7opBzdk3oIG16A,22879
82
+ hpcflow/sdk/persistence/base.py,sha256=dHJuSVKY1UuenOLBKeuE62AqsuGPnRu7ayg_w476e98,56206
83
+ hpcflow/sdk/persistence/json.py,sha256=aWj5R9PsGzudkD5N85W3vQF2tifekgnAbAtw-OK6ixg,19465
84
+ hpcflow/sdk/persistence/pending.py,sha256=8XaxuOB2QAodoYP61d3j0cxJtZ4Ek1qYV0KgPj5cUTs,22934
83
85
  hpcflow/sdk/persistence/store_resource.py,sha256=oEyocRqa8Uym-57UFosrwate-Xw9O7i2FM82TxHc4m0,4307
84
86
  hpcflow/sdk/persistence/utils.py,sha256=woLFUXYkdZQqXeBcoDjVuPEq2xdaWVhGO0YP6Qvc2Ww,1517
85
- hpcflow/sdk/persistence/zarr.py,sha256=CkDDFFKYWOjP5Xh2Due9Q9sNa7BNJxLoxKMQq5L0lyE,41420
87
+ hpcflow/sdk/persistence/zarr.py,sha256=SA2z7zeZcrwmlxKd3B5y23Jv3-Ih0t6bsUMzoNfiPHY,41534
86
88
  hpcflow/sdk/runtime.py,sha256=-n8OHcbhSVCGGlyWcJvadpsUAIJzzuWVXkZav1RQSio,9555
87
89
  hpcflow/sdk/submission/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
88
90
  hpcflow/sdk/submission/jobscript.py,sha256=XoFiD6qbWOVG30bRtiAfys-erTbv4g6PWGRxSux0kP4,44170
89
91
  hpcflow/sdk/submission/jobscript_info.py,sha256=PsaOENLpp2OUKTQO3wRO64TPOgvzdFh9gfNu56X-uBw,1164
90
- hpcflow/sdk/submission/schedulers/__init__.py,sha256=GmswLUpzVb_t_CdUgEMfhatWwT-_zWm8NidjgDYA3uo,2436
92
+ hpcflow/sdk/submission/schedulers/__init__.py,sha256=4DJBmYNXC9GWuQXC3Ank64V6TBURLqtngb_okLuI9GY,2636
91
93
  hpcflow/sdk/submission/schedulers/direct.py,sha256=J4naNvWJ_4UfjiXu46cdM4qI19p2mKSGngLaD2wKU3s,5539
92
94
  hpcflow/sdk/submission/schedulers/sge.py,sha256=k9K8Ai5IIXj8-nZn75jj7a3Hv7bklSLTETbfE4MLvi0,10612
93
- hpcflow/sdk/submission/schedulers/slurm.py,sha256=vXE3h7ecv1FUOg8z71XumavDaKY7mOtSi5P_u1wiVmE,21863
95
+ hpcflow/sdk/submission/schedulers/slurm.py,sha256=tN3-92Nh0DjDnEEhWN8iu6eBrA0t8Gs_IfQlR8rYRgw,22116
94
96
  hpcflow/sdk/submission/schedulers/utils.py,sha256=Ar3DYO6pmS9S-gZWBCsB6afHvgaReqgAaQ719NWGd2U,364
95
97
  hpcflow/sdk/submission/shells/__init__.py,sha256=dN5pg-5OoeTlqOMtK-0N4ZxbLUgzjIm__dnPnKxAA1k,1169
96
98
  hpcflow/sdk/submission/shells/base.py,sha256=AszYb14J7QMHlttRFdM9GJkzf6USERhfWJ10jwppAb8,2302
97
99
  hpcflow/sdk/submission/shells/bash.py,sha256=L29a3nYM3OqhyYJfBMlltGjTa_vBgrVFI2K6Ujs2aHM,10920
98
100
  hpcflow/sdk/submission/shells/os_version.py,sha256=o185IsrF6JsBMr9bZU99ZrDguH342DeHWeclUuNrgeU,3182
99
101
  hpcflow/sdk/submission/shells/powershell.py,sha256=pNoj8mUbUXpWKv7i4Y1BL5EFgA6h631ckXEJMZDdZFc,9066
100
- hpcflow/sdk/submission/submission.py,sha256=9JfZixZD1jlbrD6LRzgMJSLDmifnfsA7MFthCkk-LY4,20044
102
+ hpcflow/sdk/submission/submission.py,sha256=Y5sc8kf9WNTJ0uLM9wRCKkiLodewq2oOsrlLYQlBamw,20769
101
103
  hpcflow/sdk/typing.py,sha256=p1duIXcWh5FRNZIGUjsTcnqjGDg2-nCpfNicrut-VPk,327
102
104
  hpcflow/tests/conftest.py,sha256=38FCWeZdwoGI1Nh1cHG9afp2K8HJQ4sUE_h3gE26Qe4,3479
103
105
  hpcflow/tests/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -108,7 +110,7 @@ hpcflow/tests/data/workflow_1_wsl.yaml,sha256=-n17Ab2-jj03weAeyYOfafrE0eKdPT0YMt
108
110
  hpcflow/tests/data/workflow_test_run_abort.yaml,sha256=K0-uOqL_H1ebgrk7ohrcLGcY_VRYw1F16YL_RIwd1PM,1277
109
111
  hpcflow/tests/schedulers/direct_linux/test_direct_linux_submission.py,sha256=pgHHG4iak0tx-1JTtpo8sCIvcZF2XayzEysjqWa_9LM,456
110
112
  hpcflow/tests/schedulers/slurm/test_slurm_submission.py,sha256=IpLq4TBwhK8_3KrvsySctIn4rA_1oyyWLFonzL28o4Q,403
111
- hpcflow/tests/scripts/test_main_scripts.py,sha256=vfzCtav_vcd7Jh9Qt2DhG2HI8Op2rVV2tca7mbsymP0,12542
113
+ hpcflow/tests/scripts/test_main_scripts.py,sha256=KZLI0q6mphD7aCwXvKPyu7hhXvoftDtYGesP7e6RONE,14891
112
114
  hpcflow/tests/shells/wsl/test_wsl_submission.py,sha256=IrpvsxVfsQCUmS8KKn7w9DiVFR8z_ak_IWyAd1E0KKc,516
113
115
  hpcflow/tests/unit/test_action.py,sha256=HYscdTgbh_ExO2dk_Sapo4Cz8SirQtEz5B9uKFYohkw,19907
114
116
  hpcflow/tests/unit/test_action_rule.py,sha256=vX7hMo_9AO5iUGWdDF8uP8rM4jghZidusiY4ZvNcEKo,556
@@ -123,17 +125,17 @@ hpcflow/tests/unit/test_element_set.py,sha256=-aPBcHgubEMvI-lFfKatGm6jZqFfs8zQY9
123
125
  hpcflow/tests/unit/test_input_source.py,sha256=QKAqIpMk-idbSvoWMhjFLroXzBtsWFL4yEG3Frkvq70,20467
124
126
  hpcflow/tests/unit/test_input_value.py,sha256=hHoe9rlCbfFvqqkAu4ylu95YVaAhinthXqTCUNEnSm8,5454
125
127
  hpcflow/tests/unit/test_json_like.py,sha256=aGCiGfT-tNiFu3yzW6d_T-oDc5QLwSUgq3pN3jFhyF0,29939
126
- hpcflow/tests/unit/test_loop.py,sha256=ENSa2APGO5p25FdFYHSr4uzzqNcMWNvdWbNg1ftm8oA,19227
128
+ hpcflow/tests/unit/test_loop.py,sha256=vBnCnoweiFIi2T68bu6PCQ4yH829pxK6Oe4Comg0bHo,21060
127
129
  hpcflow/tests/unit/test_object_list.py,sha256=nDpbRpCu4XqoYxKMr1_QmDS1s2_6nQOpIEBRHSAXoVg,3049
128
130
  hpcflow/tests/unit/test_parameter.py,sha256=rckhftwuJGBm5BSN-MuCBpaSD0-cUs6SBi-MppxLiwg,6056
129
131
  hpcflow/tests/unit/test_persistence.py,sha256=UEAaDamuOdmb3nJEJ2v3S23mFG8u_Nai2AED2Ydwv5U,8005
130
132
  hpcflow/tests/unit/test_resources.py,sha256=FycwRm34woa5H-UcmXzOHzCCnRmO1U6OWg5z_dk58ic,7896
131
133
  hpcflow/tests/unit/test_run.py,sha256=uvG2BbVOD0JJAJCbdh0MMRJME8tVzOm7H4PTLzPLWZY,2613
132
134
  hpcflow/tests/unit/test_runtime.py,sha256=HjHPTS3UkX1LcwheFgpp4px_VlRis8KAE2HoeqxRbA8,322
133
- hpcflow/tests/unit/test_schema_input.py,sha256=bojnj1VaGi73WBpVzzBO2navbXcSqcmYwsgaRT10tP8,12081
135
+ hpcflow/tests/unit/test_schema_input.py,sha256=spkTtvNuheh-y29Tsx7YRX6y3dV80vXx0hcg0jVfMp4,12084
134
136
  hpcflow/tests/unit/test_shell.py,sha256=FDtQ9fHRhSKiVtxMJ8BRisoeSvvk8zmJndTB4LlhqGc,3442
135
- hpcflow/tests/unit/test_slurm.py,sha256=EVyg7uHhDWL6v37D2wnZxYrdkMlnTC2fHgFlR3hFG14,726
136
- hpcflow/tests/unit/test_submission.py,sha256=m1z0igppis8D1JgyhUG82zTp2LQxpnI9WiLvpb8-GbM,12501
137
+ hpcflow/tests/unit/test_slurm.py,sha256=ewfNuXXUEEelAxcd7MBbAQ-RCvU8xBenHTAyfXYF-R0,1064
138
+ hpcflow/tests/unit/test_submission.py,sha256=E8ku48TeCpAQlYDci30D-hf0YvzbT3jpm-emAaS02Is,15764
137
139
  hpcflow/tests/unit/test_task.py,sha256=94TwyjlhKMRRXTQjys2a1PiK7A-rCzhnvrkk4vRz39I,70000
138
140
  hpcflow/tests/unit/test_task_schema.py,sha256=7a7o42gQhrZPMXfH0a6sGzFCJnuFrbDEl9u3u_bFsgw,3624
139
141
  hpcflow/tests/unit/test_utils.py,sha256=0qYbEC6vbVZizlyZgjDoEOrSAWQ-OYYl4ZEJVuEcg1o,11940
@@ -143,7 +145,7 @@ hpcflow/tests/unit/test_workflow_template.py,sha256=uDaZMwSEW6yj7R1u3SG25UM7TPdw
143
145
  hpcflow/tests/workflows/test_jobscript.py,sha256=9sp1o0g72JZbv2QlOl5v7wCZEFjotxiIKGNUxVaFgaA,724
144
146
  hpcflow/tests/workflows/test_workflows.py,sha256=xai6FRtGqG4lStJk6KmsqPUSuvqs9FrsBOxMVALshIs,13400
145
147
  hpcflow/viz_demo.ipynb,sha256=1QdnVsk72vihv2L6hOGyk318uEa22ZSgGxQCa7hW2oo,6238
146
- hpcflow_new2-0.2.0a144.dist-info/METADATA,sha256=xlqmfgn5ROFkqIegIoG5sX804_rg-OrP0fJfQCTbSfQ,1921
147
- hpcflow_new2-0.2.0a144.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
148
- hpcflow_new2-0.2.0a144.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
149
- hpcflow_new2-0.2.0a144.dist-info/RECORD,,
148
+ hpcflow_new2-0.2.0a147.dist-info/METADATA,sha256=KhBTQbwfUdPHbvEW7H2e8K5RvckVK6ajfX2lmtpstco,1921
149
+ hpcflow_new2-0.2.0a147.dist-info/WHEEL,sha256=kLuE8m1WYU0Ig0_YEGrXyTtiJvKPpLpDEiChiNyei5Y,88
150
+ hpcflow_new2-0.2.0a147.dist-info/entry_points.txt,sha256=aoGtCnFdfPcXfBdu2zZyMOJoz6fPgdR0elqsgrE-USU,106
151
+ hpcflow_new2-0.2.0a147.dist-info/RECORD,,