hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
  2. hpcflow/_version.py +1 -1
  3. hpcflow/data/scripts/bad_script.py +2 -0
  4. hpcflow/data/scripts/do_nothing.py +2 -0
  5. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  6. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  7. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  8. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  11. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  12. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  13. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  15. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  16. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  23. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  24. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  25. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  26. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  27. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  28. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  29. hpcflow/data/scripts/script_exit_test.py +5 -0
  30. hpcflow/data/template_components/environments.yaml +1 -1
  31. hpcflow/sdk/__init__.py +5 -0
  32. hpcflow/sdk/app.py +166 -92
  33. hpcflow/sdk/cli.py +263 -84
  34. hpcflow/sdk/cli_common.py +99 -5
  35. hpcflow/sdk/config/callbacks.py +38 -1
  36. hpcflow/sdk/config/config.py +102 -13
  37. hpcflow/sdk/config/errors.py +19 -5
  38. hpcflow/sdk/config/types.py +3 -0
  39. hpcflow/sdk/core/__init__.py +25 -1
  40. hpcflow/sdk/core/actions.py +914 -262
  41. hpcflow/sdk/core/cache.py +76 -34
  42. hpcflow/sdk/core/command_files.py +14 -128
  43. hpcflow/sdk/core/commands.py +35 -6
  44. hpcflow/sdk/core/element.py +122 -50
  45. hpcflow/sdk/core/errors.py +58 -2
  46. hpcflow/sdk/core/execute.py +207 -0
  47. hpcflow/sdk/core/loop.py +408 -50
  48. hpcflow/sdk/core/loop_cache.py +4 -4
  49. hpcflow/sdk/core/parameters.py +382 -37
  50. hpcflow/sdk/core/run_dir_files.py +13 -40
  51. hpcflow/sdk/core/skip_reason.py +7 -0
  52. hpcflow/sdk/core/task.py +119 -30
  53. hpcflow/sdk/core/task_schema.py +68 -0
  54. hpcflow/sdk/core/test_utils.py +66 -27
  55. hpcflow/sdk/core/types.py +54 -1
  56. hpcflow/sdk/core/utils.py +136 -19
  57. hpcflow/sdk/core/workflow.py +1587 -356
  58. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  59. hpcflow/sdk/demo/cli.py +7 -0
  60. hpcflow/sdk/helper/cli.py +1 -0
  61. hpcflow/sdk/log.py +42 -15
  62. hpcflow/sdk/persistence/base.py +405 -53
  63. hpcflow/sdk/persistence/json.py +177 -52
  64. hpcflow/sdk/persistence/pending.py +237 -69
  65. hpcflow/sdk/persistence/store_resource.py +3 -2
  66. hpcflow/sdk/persistence/types.py +15 -4
  67. hpcflow/sdk/persistence/zarr.py +928 -81
  68. hpcflow/sdk/submission/jobscript.py +1408 -489
  69. hpcflow/sdk/submission/schedulers/__init__.py +40 -5
  70. hpcflow/sdk/submission/schedulers/direct.py +33 -19
  71. hpcflow/sdk/submission/schedulers/sge.py +51 -16
  72. hpcflow/sdk/submission/schedulers/slurm.py +44 -16
  73. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  74. hpcflow/sdk/submission/shells/base.py +68 -20
  75. hpcflow/sdk/submission/shells/bash.py +222 -129
  76. hpcflow/sdk/submission/shells/powershell.py +200 -150
  77. hpcflow/sdk/submission/submission.py +852 -119
  78. hpcflow/sdk/submission/types.py +18 -21
  79. hpcflow/sdk/typing.py +24 -5
  80. hpcflow/sdk/utils/arrays.py +71 -0
  81. hpcflow/sdk/utils/deferred_file.py +55 -0
  82. hpcflow/sdk/utils/hashing.py +16 -0
  83. hpcflow/sdk/utils/patches.py +12 -0
  84. hpcflow/sdk/utils/strings.py +33 -0
  85. hpcflow/tests/api/test_api.py +32 -0
  86. hpcflow/tests/conftest.py +19 -0
  87. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  88. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  89. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  90. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  91. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  92. hpcflow/tests/scripts/test_main_scripts.py +821 -70
  93. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  94. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  95. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
  96. hpcflow/tests/unit/test_action.py +176 -0
  97. hpcflow/tests/unit/test_app.py +20 -0
  98. hpcflow/tests/unit/test_cache.py +46 -0
  99. hpcflow/tests/unit/test_cli.py +133 -0
  100. hpcflow/tests/unit/test_config.py +122 -1
  101. hpcflow/tests/unit/test_element_iteration.py +47 -0
  102. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  103. hpcflow/tests/unit/test_loop.py +1332 -27
  104. hpcflow/tests/unit/test_meta_task.py +325 -0
  105. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  106. hpcflow/tests/unit/test_parameter.py +13 -0
  107. hpcflow/tests/unit/test_persistence.py +190 -8
  108. hpcflow/tests/unit/test_run.py +109 -3
  109. hpcflow/tests/unit/test_run_directories.py +29 -0
  110. hpcflow/tests/unit/test_shell.py +20 -0
  111. hpcflow/tests/unit/test_submission.py +5 -76
  112. hpcflow/tests/unit/test_workflow_template.py +31 -0
  113. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  114. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  115. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  116. hpcflow/tests/unit/utils/test_patches.py +5 -0
  117. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  118. hpcflow/tests/workflows/__init__.py +0 -0
  119. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  120. hpcflow/tests/workflows/test_jobscript.py +332 -0
  121. hpcflow/tests/workflows/test_run_status.py +198 -0
  122. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  123. hpcflow/tests/workflows/test_submission.py +140 -0
  124. hpcflow/tests/workflows/test_workflows.py +142 -2
  125. hpcflow/tests/workflows/test_zip.py +18 -0
  126. hpcflow/viz_demo.ipynb +6587 -3
  127. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/METADATA +7 -4
  128. hpcflow_new2-0.2.0a200.dist-info/RECORD +222 -0
  129. hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
  130. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/LICENSE +0 -0
  131. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/WHEEL +0 -0
  132. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/entry_points.txt +0 -0
@@ -1,16 +1,20 @@
1
1
  from __future__ import annotations
2
2
  from pathlib import Path
3
+ import sys
4
+ from typing import Any, Dict, List, Tuple
3
5
  from typing import cast, TYPE_CHECKING
4
6
  import numpy as np
5
7
  import zarr # type: ignore
6
8
  import pytest
7
- from hpcflow.sdk.core.test_utils import make_test_data_YAML_workflow
9
+ from hpcflow.sdk.core.test_utils import make_test_data_YAML_workflow, make_workflow
8
10
  from hpcflow.sdk.persistence.json import (
9
11
  JSONPersistentStore,
10
12
  JsonStoreElement,
11
13
  JsonStoreElementIter,
12
14
  JsonStoreEAR,
13
15
  )
16
+ from hpcflow.sdk.persistence.zarr import ZarrPersistentStore
17
+
14
18
  from hpcflow.app import app as hf
15
19
 
16
20
  if TYPE_CHECKING:
@@ -258,6 +262,9 @@ def test_make_zarr_store_no_compressor(null_config, tmp_path: Path):
258
262
 
259
263
 
260
264
  @pytest.mark.integration
265
+ @pytest.mark.skipif(
266
+ sys.version_info < (3, 9), reason="Python 3.8 support is being removed anyway."
267
+ )
261
268
  def test_zarr_rechunk_data_equivalent(null_config, tmp_path: Path):
262
269
  t1 = hf.Task(
263
270
  schema=hf.task_schemas.test_t1_conditional_OS,
@@ -276,10 +283,10 @@ def test_zarr_rechunk_data_equivalent(null_config, tmp_path: Path):
276
283
  arr = cast("ZarrPersistentStore", wk._store)._get_EARs_arr()
277
284
  assert arr.chunks == arr.shape
278
285
 
279
- bak_path = (Path(wk.path) / arr.path).with_suffix(".bak")
286
+ bak_path = (Path(arr.store.path) / arr.path).with_suffix(".bak")
280
287
  arr_bak = zarr.open(bak_path)
281
288
 
282
- assert arr_bak.chunks == (1,)
289
+ assert arr_bak.chunks == (1, 1) # runs array is 2D
283
290
 
284
291
  # check backup and new runs data are equal:
285
292
  assert np.all(arr[:] == arr_bak[:])
@@ -289,6 +296,9 @@ def test_zarr_rechunk_data_equivalent(null_config, tmp_path: Path):
289
296
 
290
297
 
291
298
  @pytest.mark.integration
299
+ @pytest.mark.skipif(
300
+ sys.version_info < (3, 9), reason="Python 3.8 support is being removed anyway."
301
+ )
292
302
  def test_zarr_rechunk_data_equivalent_custom_chunk_size(null_config, tmp_path: Path):
293
303
  t1 = hf.Task(
294
304
  schema=hf.task_schemas.test_t1_conditional_OS,
@@ -305,12 +315,12 @@ def test_zarr_rechunk_data_equivalent_custom_chunk_size(null_config, tmp_path: P
305
315
  wk.rechunk_runs(backup=True, status=False, chunk_size=2)
306
316
 
307
317
  arr = cast("ZarrPersistentStore", wk._store)._get_EARs_arr()
308
- assert arr.chunks == (2,)
318
+ assert arr.chunks == (2, 2) # runs array is 2D
309
319
 
310
- bak_path = (Path(wk.path) / arr.path).with_suffix(".bak")
320
+ bak_path = (Path(arr.store.path) / arr.path).with_suffix(".bak")
311
321
  arr_bak = zarr.open(bak_path)
312
322
 
313
- assert arr_bak.chunks == (1,)
323
+ assert arr_bak.chunks == (1, 1) # runs array is 2D
314
324
 
315
325
  # check backup and new runs data are equal:
316
326
  assert np.all(arr[:] == arr_bak[:])
@@ -334,7 +344,7 @@ def test_zarr_rechunk_data_no_backup_load_runs(null_config, tmp_path: Path):
334
344
 
335
345
  arr = cast("ZarrPersistentStore", wk._store)._get_EARs_arr()
336
346
 
337
- bak_path = (Path(wk.path) / arr.path).with_suffix(".bak")
347
+ bak_path = (Path(arr.store.path) / arr.path).with_suffix(".bak")
338
348
  assert not bak_path.is_file()
339
349
 
340
350
  # check we can load runs:
@@ -358,14 +368,186 @@ def test_zarr_rechunk_data_no_backup_load_parameter_base(null_config, tmp_path:
358
368
  path=tmp_path,
359
369
  )
360
370
  wk.submit(wait=True, status=False, add_to_known=False)
371
+
372
+ params_old = wk.get_all_parameter_data()
361
373
  wk.rechunk_parameter_base(backup=False, status=False)
362
374
 
375
+ wk = wk.reload()
376
+ params_new = wk.get_all_parameter_data()
377
+ assert params_new == params_old
378
+
363
379
  arr = cast("ZarrPersistentStore", wk._store)._get_parameter_base_array()
364
380
 
365
- bak_path = (Path(wk.path) / arr.path).with_suffix(".bak")
381
+ bak_path = (Path(arr.store.path) / arr.path).with_suffix(".bak")
366
382
  assert not bak_path.is_file()
367
383
 
368
384
  # check we can load parameters:
369
385
  param_IDs = []
370
386
  for i in wk.get_all_parameters():
371
387
  param_IDs.append(i.id_)
388
+
389
+
390
+ def test_get_parameter_sources_duplicate_ids(null_config, tmp_path):
391
+ wk = make_workflow(
392
+ schemas_spec=[[{"p1": None}, ("p1",), "t1"]],
393
+ local_inputs={0: ("p1",)},
394
+ path=tmp_path,
395
+ )
396
+ id_lst = [0, 1, 1, 2, 0]
397
+ src = wk._store.get_parameter_sources(id_lst)
398
+ assert len(src) == len(id_lst)
399
+ assert src[0] == src[4]
400
+ assert src[1] == src[2]
401
+
402
+
403
+ def _transform_jobscript_dependencies_to_encodable(
404
+ deps: dict[tuple[int, int], dict[tuple[int, int], dict[str, Any]]],
405
+ ) -> dict[str, list[dict[str, Any]]]:
406
+ """Transform a dict of jobscript dependencies written in a more testing-friendly/
407
+ convenient format into the format expected by the method
408
+ `ZarrPersistentStore._encode_jobscript_block_dependencies`.
409
+
410
+ """
411
+ max_js_idx = max(i[0] for i in deps)
412
+ sub_js: dict[str, list[dict[str, Any]]] = {
413
+ "jobscripts": [
414
+ {"blocks": [], "index": js_idx} for js_idx in range(max_js_idx + 1)
415
+ ]
416
+ }
417
+ for (js_idx, blk_idx), deps_i in deps.items():
418
+ sub_js["jobscripts"][js_idx]["blocks"].append(
419
+ {
420
+ "dependencies": [[[k[0], k[1]], v] for k, v in deps_i.items()],
421
+ "index": blk_idx,
422
+ }
423
+ )
424
+ return sub_js
425
+
426
+
427
+ def test_zarr_encode_jobscript_block_dependencies_element_mapping_array_non_array_equivalence():
428
+ deps_1 = {
429
+ (0, 0): {},
430
+ (1, 0): {(0, 0): {"js_element_mapping": {0: [0]}, "is_array": True}},
431
+ }
432
+ deps_2 = {
433
+ (0, 0): {},
434
+ (1, 0): {(0, 0): {"js_element_mapping": {0: np.array([0])}, "is_array": True}},
435
+ }
436
+ deps_1 = _transform_jobscript_dependencies_to_encodable(deps_1)
437
+ deps_2 = _transform_jobscript_dependencies_to_encodable(deps_2)
438
+ arr_1 = ZarrPersistentStore._encode_jobscript_block_dependencies(deps_1)
439
+ arr_2 = ZarrPersistentStore._encode_jobscript_block_dependencies(deps_2)
440
+ assert np.array_equal(arr_1, arr_2)
441
+
442
+
443
+ def test_zarr_encode_decode_jobscript_block_dependencies():
444
+
445
+ deps = {
446
+ (0, 0): {},
447
+ (1, 0): {
448
+ (0, 0): {
449
+ "js_element_mapping": {0: [0], 1: [1]},
450
+ "is_array": True,
451
+ }
452
+ },
453
+ (2, 0): {
454
+ (1, 0): {
455
+ "js_element_mapping": {0: [0, 1], 1: [0, 1]},
456
+ "is_array": False,
457
+ }
458
+ },
459
+ (2, 1): {
460
+ (0, 0): {"js_element_mapping": {0: [0, 1]}, "is_array": False},
461
+ (2, 0): {"js_element_mapping": {0: [0, 1]}, "is_array": False},
462
+ },
463
+ }
464
+ deps_t = _transform_jobscript_dependencies_to_encodable(deps)
465
+ arr = ZarrPersistentStore._encode_jobscript_block_dependencies(deps_t)
466
+ assert np.array_equal(
467
+ arr,
468
+ np.array(
469
+ [
470
+ 2,
471
+ 0,
472
+ 0,
473
+ 12,
474
+ 1,
475
+ 0,
476
+ 9,
477
+ 0,
478
+ 0,
479
+ 1,
480
+ 2,
481
+ 0,
482
+ 0,
483
+ 2,
484
+ 1,
485
+ 1,
486
+ 14,
487
+ 2,
488
+ 0,
489
+ 11,
490
+ 1,
491
+ 0,
492
+ 0,
493
+ 3,
494
+ 0,
495
+ 0,
496
+ 1,
497
+ 3,
498
+ 1,
499
+ 0,
500
+ 1,
501
+ 18,
502
+ 2,
503
+ 1,
504
+ 7,
505
+ 0,
506
+ 0,
507
+ 0,
508
+ 3,
509
+ 0,
510
+ 0,
511
+ 1,
512
+ 7,
513
+ 2,
514
+ 0,
515
+ 0,
516
+ 3,
517
+ 0,
518
+ 0,
519
+ 1,
520
+ ]
521
+ ),
522
+ )
523
+ deps_rt = ZarrPersistentStore._decode_jobscript_block_dependencies(arr)
524
+ assert deps_rt == deps
525
+
526
+
527
+ def test_zarr_encode_decode_jobscript_block_dependencies_large_many_to_one():
528
+ deps = {
529
+ (0, 0): {},
530
+ (1, 0): {
531
+ (0, 0): {"js_element_mapping": {0: list(range(1_000_000))}, "is_array": False}
532
+ },
533
+ }
534
+ deps_t = _transform_jobscript_dependencies_to_encodable(deps)
535
+ arr = ZarrPersistentStore._encode_jobscript_block_dependencies(deps_t)
536
+ deps_rt = ZarrPersistentStore._decode_jobscript_block_dependencies(arr)
537
+ assert deps_rt == deps
538
+
539
+
540
+ def test_zarr_encode_decode_jobscript_block_dependencies_large_one_to_one():
541
+ deps = {
542
+ (0, 0): {},
543
+ (1, 0): {
544
+ (0, 0): {
545
+ "js_element_mapping": {i: [i] for i in range(1_000_000)},
546
+ "is_array": False,
547
+ }
548
+ },
549
+ }
550
+ deps_t = _transform_jobscript_dependencies_to_encodable(deps)
551
+ arr = ZarrPersistentStore._encode_jobscript_block_dependencies(deps_t)
552
+ deps_rt = ZarrPersistentStore._decode_jobscript_block_dependencies(arr)
553
+ assert deps_rt == deps
@@ -1,6 +1,13 @@
1
1
  from __future__ import annotations
2
+
3
+ import os
4
+
5
+ import pytest
6
+
2
7
  from pathlib import Path
3
8
  from hpcflow.app import app as hf
9
+ from hpcflow.sdk.core.actions import SkipReason
10
+ from hpcflow.sdk.core.test_utils import make_workflow_to_run_command
4
11
 
5
12
 
6
13
  def test_compose_commands_no_shell_var(null_config, tmp_path: Path):
@@ -17,7 +24,7 @@ def test_compose_commands_no_shell_var(null_config, tmp_path: Path):
17
24
  assert sub is not None
18
25
  js = sub.jobscripts[0]
19
26
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
20
- _, shell_vars = run.compose_commands(jobscript=js, JS_action_idx=0)
27
+ _, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
21
28
  assert shell_vars == {0: []}
22
29
 
23
30
 
@@ -46,7 +53,7 @@ def test_compose_commands_single_shell_var(null_config, tmp_path: Path):
46
53
  assert sub is not None
47
54
  js = sub.jobscripts[0]
48
55
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
49
- _, shell_vars = run.compose_commands(jobscript=js, JS_action_idx=0)
56
+ _, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
50
57
  assert shell_vars == {0: [("outputs.p1", "parameter_p1", "stdout")]}
51
58
 
52
59
 
@@ -76,5 +83,104 @@ def test_compose_commands_multi_single_shell_var(null_config, tmp_path: Path):
76
83
  assert sub is not None
77
84
  js = sub.jobscripts[0]
78
85
  run = wk.tasks[0].elements[0].iterations[0].action_runs[0]
79
- _, shell_vars = run.compose_commands(jobscript=js, JS_action_idx=0)
86
+ _, shell_vars = run.compose_commands(environments=sub.environments, shell=js.shell)
80
87
  assert shell_vars == {0: [], 1: [("outputs.p1", "parameter_p1", "stdout")]}
88
+
89
+
90
+ @pytest.mark.integration
91
+ def test_run_dir_diff_new_file(null_config, tmp_path):
92
+ if os.name == "nt":
93
+ command = "New-Item -Path 'new_file.txt' -ItemType File"
94
+ else:
95
+ command = "touch new_file.txt"
96
+ wk = make_workflow_to_run_command(
97
+ command=command,
98
+ requires_dir=True,
99
+ path=tmp_path,
100
+ name="w2",
101
+ overwrite=True,
102
+ )
103
+ wk.submit(wait=True, add_to_known=False, status=False)
104
+ assert wk.get_all_EARs()[0].dir_diff.files_created == ["new_file.txt"]
105
+
106
+
107
+ @pytest.mark.integration
108
+ def test_run_skip_reason_upstream_failure(null_config, tmp_path):
109
+ ts = hf.TaskSchema(
110
+ objective="t1",
111
+ inputs=[hf.SchemaInput("p1")],
112
+ outputs=[hf.SchemaInput("p2")],
113
+ actions=[
114
+ hf.Action(
115
+ commands=[
116
+ hf.Command(
117
+ command="echo $(( <<parameter:p1>> + 100 ))",
118
+ stdout="<<parameter:p2>>",
119
+ ),
120
+ hf.Command(command="exit 1"),
121
+ ]
122
+ ),
123
+ hf.Action(
124
+ commands=[
125
+ hf.Command(
126
+ command="echo $(( <<parameter:p2>> + 100 ))",
127
+ stdout="<<parameter:p2>>",
128
+ ),
129
+ ]
130
+ ), # should be skipped due to failure of action 0
131
+ ],
132
+ )
133
+ wk = hf.Workflow.from_template_data(
134
+ template_name="test_skip_reason",
135
+ path=tmp_path,
136
+ tasks=[hf.Task(schema=ts, inputs={"p1": 100})],
137
+ )
138
+ wk.submit(wait=True, add_to_known=False, status=False)
139
+ runs = wk.get_all_EARs()
140
+ assert not runs[0].success
141
+ assert not runs[1].success
142
+ assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
143
+ assert runs[1].skip_reason is SkipReason.UPSTREAM_FAILURE
144
+
145
+
146
+ @pytest.mark.integration
147
+ def test_run_skip_reason_loop_termination(null_config, tmp_path):
148
+ ts = hf.TaskSchema(
149
+ objective="t1",
150
+ inputs=[hf.SchemaInput("p1")],
151
+ outputs=[hf.SchemaInput("p1")],
152
+ actions=[
153
+ hf.Action(
154
+ commands=[
155
+ hf.Command(
156
+ command="echo $(( <<parameter:p1>> + 100 ))",
157
+ stdout="<<int(parameter:p1)>>",
158
+ ),
159
+ ]
160
+ ),
161
+ ],
162
+ )
163
+ loop_term = hf.Rule(path="outputs.p1", condition={"value.equal_to": 300})
164
+ wk = hf.Workflow.from_template_data(
165
+ template_name="test_skip_reason",
166
+ path=tmp_path,
167
+ tasks=[hf.Task(schema=ts, inputs={"p1": 100})],
168
+ loops=[
169
+ hf.Loop(name="my_loop", tasks=[0], termination=loop_term, num_iterations=3)
170
+ ],
171
+ )
172
+ # loop should terminate after the second iteration
173
+ wk.submit(wait=True, add_to_known=False, status=False)
174
+ runs = wk.get_all_EARs()
175
+
176
+ assert runs[0].get("outputs.p1") == 200
177
+ assert runs[1].get("outputs.p1") == 300
178
+ assert not runs[2].get("outputs.p1")
179
+
180
+ assert runs[0].success
181
+ assert runs[1].success
182
+ assert not runs[2].success
183
+
184
+ assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
185
+ assert runs[1].skip_reason is SkipReason.NOT_SKIPPED
186
+ assert runs[2].skip_reason is SkipReason.LOOP_TERMINATION
@@ -0,0 +1,29 @@
1
+ from pathlib import Path
2
+ import pytest
3
+ from hpcflow.app import app as hf
4
+ from hpcflow.sdk.core.test_utils import make_workflow
5
+
6
+
7
+ @pytest.mark.parametrize("store", ["json", "zarr"])
8
+ def test_run_directories(null_config, tmp_path, store):
9
+ wk = make_workflow(
10
+ schemas_spec=[
11
+ [{"p1": None}, ("p1",), "t1"],
12
+ [{"p2": None}, ("p2",), "t2", {"requires_dir": True}],
13
+ ],
14
+ local_inputs={0: ("p1",)},
15
+ local_sequences={1: [("inputs.p2", 2, 0)]},
16
+ path=tmp_path,
17
+ store=store,
18
+ )
19
+ lp_0 = hf.Loop(name="my_loop", tasks=[1], num_iterations=2)
20
+ wk.add_loop(lp_0)
21
+ sub = wk.add_submission() # populates run directories
22
+
23
+ run_dirs = wk.get_run_directories()
24
+
25
+ assert run_dirs[0] is None
26
+ assert str(run_dirs[1]) == str(Path(wk.path).joinpath("execute/t_1/e_0/i_0"))
27
+ assert str(run_dirs[2]) == str(Path(wk.path).joinpath("execute/t_1/e_1/i_0"))
28
+ assert str(run_dirs[3]) == str(Path(wk.path).joinpath("execute/t_1/e_0/i_1"))
29
+ assert str(run_dirs[4]) == str(Path(wk.path).joinpath("execute/t_1/e_1/i_1"))
@@ -77,3 +77,23 @@ def test_process_JS_header_args_app_invoc_bash() -> None:
77
77
  for i, j in zip(app_invocs, expected):
78
78
  processed = shell.process_JS_header_args({"app_invoc": i})
79
79
  assert processed["app_invoc"] == j
80
+
81
+
82
+ def test_format_array_powershell():
83
+ shell = ALL_SHELLS["powershell"]["nt"]()
84
+ assert shell.format_array([1, 2, 3]) == "@(1, 2, 3)"
85
+
86
+
87
+ def test_format_array_get_item_powershell():
88
+ shell = ALL_SHELLS["powershell"]["nt"]()
89
+ assert shell.format_array_get_item("my_arr", 3) == "$my_arr[3]"
90
+
91
+
92
+ def test_format_array_bash():
93
+ shell = ALL_SHELLS["bash"]["posix"]()
94
+ assert shell.format_array([1, 2, 3]) == "(1 2 3)"
95
+
96
+
97
+ def test_format_array_get_item_bash():
98
+ shell = ALL_SHELLS["bash"]["posix"]()
99
+ assert shell.format_array_get_item("my_arr", 3) == r"${my_arr[3]}"
@@ -247,6 +247,8 @@ def test_raise_missing_matching_env_executable(new_null_config, tmp_path) -> Non
247
247
  with pytest.raises(MissingEnvironmentExecutableInstanceError):
248
248
  wk.add_submission()
249
249
 
250
+ hf.reload_template_components() # remove extra envs
251
+
250
252
 
251
253
  def test_no_raise_matching_env_executable(new_null_config, tmp_path) -> None:
252
254
  env_name = "my_hpcflow_env"
@@ -284,6 +286,8 @@ def test_no_raise_matching_env_executable(new_null_config, tmp_path) -> None:
284
286
  wk = hf.Workflow.from_template(wkt, path=tmp_path)
285
287
  wk.add_submission()
286
288
 
289
+ hf.reload_template_components() # remove extra envs
290
+
287
291
 
288
292
  def test_raise_missing_env(new_null_config, tmp_path) -> None:
289
293
  env_name = "my_hpcflow_env"
@@ -336,82 +340,7 @@ def test_custom_env_and_executable(new_null_config, tmp_path) -> None:
336
340
  wk = hf.Workflow.from_template(wkt, path=tmp_path)
337
341
  wk.add_submission()
338
342
 
339
-
340
- def test_abort_EARs_file_creation(null_config, tmp_path) -> None:
341
- wk_name = "temp"
342
- t1 = hf.Task(
343
- schema=hf.task_schemas.test_t1_conditional_OS,
344
- sequences=[hf.ValueSequence("inputs.p1", values=[1, 2, 3])],
345
- )
346
- wkt = hf.WorkflowTemplate(name=wk_name, tasks=[t1])
347
- wk = hf.Workflow.from_template(
348
- template=wkt,
349
- path=tmp_path,
350
- )
351
- sub = wk.add_submission()
352
- assert sub is not None
353
- wk.submissions_path.mkdir(exist_ok=True, parents=True)
354
- sub.path.mkdir(exist_ok=True)
355
- sub._write_abort_EARs_file()
356
- with sub.abort_EARs_file_path.open("rt") as fp:
357
- lines = fp.read()
358
-
359
- assert lines == "0\n0\n0\n"
360
-
361
-
362
- @pytest.mark.parametrize("run_id", [0, 1, 2])
363
- def test_abort_EARs_file_update(null_config, tmp_path, run_id) -> None:
364
- wk_name = "temp"
365
- t1 = hf.Task(
366
- schema=hf.task_schemas.test_t1_conditional_OS,
367
- sequences=[hf.ValueSequence("inputs.p1", values=[1, 2, 3])],
368
- )
369
- wkt = hf.WorkflowTemplate(name=wk_name, tasks=[t1])
370
- wk = hf.Workflow.from_template(
371
- template=wkt,
372
- path=tmp_path,
373
- )
374
- sub = wk.add_submission()
375
- assert sub is not None
376
- wk.submissions_path.mkdir(exist_ok=True, parents=True)
377
- sub.path.mkdir(exist_ok=True)
378
- sub._write_abort_EARs_file()
379
-
380
- sub._set_run_abort(run_ID=run_id)
381
-
382
- with sub.abort_EARs_file_path.open("rt") as fp:
383
- lines = fp.read()
384
-
385
- lines_exp = ["0", "0", "0"]
386
- lines_exp[run_id] = "1"
387
- assert lines == "\n".join(lines_exp) + "\n"
388
-
389
-
390
- def test_abort_EARs_file_update_with_existing_abort(null_config, tmp_path) -> None:
391
- wk_name = "temp"
392
- t1 = hf.Task(
393
- schema=hf.task_schemas.test_t1_conditional_OS,
394
- sequences=[hf.ValueSequence("inputs.p1", values=[1, 2, 3])],
395
- )
396
- wkt = hf.WorkflowTemplate(name=wk_name, tasks=[t1])
397
- wk = hf.Workflow.from_template(
398
- template=wkt,
399
- path=tmp_path,
400
- )
401
- sub = wk.add_submission()
402
- assert sub is not None
403
- wk.submissions_path.mkdir(exist_ok=True, parents=True)
404
- sub.path.mkdir(exist_ok=True)
405
- sub._write_abort_EARs_file()
406
-
407
- sub._set_run_abort(run_ID=1)
408
- sub._set_run_abort(run_ID=2)
409
-
410
- with sub.abort_EARs_file_path.open("rt") as fp:
411
- lines = fp.read()
412
-
413
- lines_exp = ["0", "1", "1"]
414
- assert lines == "\n".join(lines_exp) + "\n"
343
+ hf.reload_template_components() # remove extra envs
415
344
 
416
345
 
417
346
  def test_unique_schedulers_one_direct(new_null_config, tmp_path) -> None:
@@ -1,5 +1,7 @@
1
+ from textwrap import dedent
1
2
  import pytest
2
3
  from hpcflow.app import app as hf
4
+ from hpcflow.sdk.core.errors import MissingVariableSubstitutionError
3
5
  from hpcflow.sdk.core.test_utils import (
4
6
  make_test_data_YAML_workflow_template,
5
7
  )
@@ -38,6 +40,35 @@ def test_workflow_template_vars(tmp_path, new_null_config):
38
40
  assert wkt.tasks[0].element_sets[0].repeats[0]["number"] == num_repeats
39
41
 
40
42
 
43
+ def test_workflow_template_vars_raise_no_vars(tmp_path, new_null_config):
44
+ # no default value for the variable is provided in `benchmark_N_elements`, so should
45
+ # raise if the variables dict is not passed:
46
+ with pytest.raises(MissingVariableSubstitutionError):
47
+ make_test_data_YAML_workflow_template("benchmark_N_elements.yaml")
48
+
49
+
50
+ def test_workflow_template_vars_defaults_used(tmp_path, new_null_config):
51
+ # `benchmark_script_runner` contains a default value for the variable `N`, so that
52
+ # should be used, since we don't pass any variables:
53
+ wkt = make_test_data_YAML_workflow_template("benchmark_script_runner.yaml")
54
+ assert wkt.tasks[0].element_sets[0].repeats[0]["number"] == 1
55
+
56
+
57
+ def test_workflow_template_vars_False_no_substitution(tmp_path, new_null_config):
58
+ # read a yaml template, check variables are not substituted, when `variables=False`:
59
+ wkt_yaml = dedent(
60
+ """\
61
+ name: workflow_1
62
+ tasks:
63
+ - schema: test_t1_conditional_OS
64
+ inputs:
65
+ p1: <<var:my_var>>
66
+ """
67
+ )
68
+ wkt = hf.WorkflowTemplate.from_YAML_string(wkt_yaml, variables=False)
69
+ assert wkt.tasks[0].element_sets[0].inputs[0].value == "<<var:my_var>>"
70
+
71
+
41
72
  def test_env_preset_merge_simple(null_config):
42
73
  s1 = hf.TaskSchema(
43
74
  objective="s1",
@@ -0,0 +1,40 @@
1
+ import numpy as np
2
+
3
+ from hpcflow.sdk.utils.arrays import get_1D_idx, get_2D_idx, split_arr
4
+
5
+
6
+ def test_get_2D_idx():
7
+ assert get_2D_idx(0, num_cols=10) == (0, 0)
8
+ assert get_2D_idx(4, num_cols=10) == (0, 4)
9
+ assert get_2D_idx(9, num_cols=10) == (0, 9)
10
+ assert get_2D_idx(10, num_cols=10) == (1, 0)
11
+ assert get_2D_idx(13, num_cols=10) == (1, 3)
12
+ assert get_2D_idx(20, num_cols=10) == (2, 0)
13
+ arr_r, arr_c = get_2D_idx(np.array([0, 4, 9, 10, 13, 20]), num_cols=10)
14
+ assert np.array_equal(arr_r, np.array([0, 0, 0, 1, 1, 2]))
15
+ assert np.array_equal(arr_c, np.array([0, 4, 9, 0, 3, 0]))
16
+
17
+
18
+ def test_get_1D_idx():
19
+ assert get_1D_idx(*(0, 0), num_cols=10) == 0
20
+ assert get_1D_idx(*(0, 4), num_cols=10) == 4
21
+ assert get_1D_idx(*(0, 9), num_cols=10) == 9
22
+ assert get_1D_idx(*(1, 0), num_cols=10) == 10
23
+ assert get_1D_idx(*(1, 3), num_cols=10) == 13
24
+ assert get_1D_idx(*(2, 0), num_cols=10) == 20
25
+
26
+ assert np.array_equal(
27
+ get_1D_idx(
28
+ np.array([0, 0, 0, 1, 1, 2]), np.array([0, 4, 9, 0, 3, 0]), num_cols=10
29
+ ),
30
+ np.array([0, 4, 9, 10, 13, 20]),
31
+ )
32
+
33
+
34
+ def test_split_arr():
35
+ splt = split_arr(np.array([4, 0, 1, 2, 3, 4, 1, 4, 5, 6]), metadata_size=1)
36
+ assert len(splt) == 2
37
+ assert np.array_equal(splt[0][0], np.array([0]))
38
+ assert np.array_equal(splt[0][1], np.array([1, 2, 3]))
39
+ assert np.array_equal(splt[1][0], np.array([1]))
40
+ assert np.array_equal(splt[1][1], np.array([4, 5, 6]))
@@ -0,0 +1,34 @@
1
+ from hpcflow.sdk.utils.deferred_file import DeferredFileWriter
2
+
3
+
4
+ def test_file_not_created(tmp_path):
5
+ file_name = tmp_path / "test.txt"
6
+ assert not file_name.is_file()
7
+ with DeferredFileWriter(file_name, mode="w") as fp:
8
+ assert not fp._is_open
9
+ assert not file_name.is_file()
10
+
11
+
12
+ def test_append_file_not_opened(tmp_path):
13
+ file_name = tmp_path / "test.txt"
14
+ with DeferredFileWriter(file_name, mode="a") as fp:
15
+ assert not fp._is_open
16
+ assert not file_name.is_file()
17
+
18
+
19
+ def test_file_created_write(tmp_path):
20
+ file_name = tmp_path / "test.txt"
21
+ assert not file_name.is_file()
22
+ with DeferredFileWriter(file_name, mode="w") as fp:
23
+ fp.write("contents\n")
24
+ assert fp._is_open
25
+ assert file_name.is_file()
26
+
27
+
28
+ def test_file_created_writelines(tmp_path):
29
+ file_name = tmp_path / "test.txt"
30
+ assert not file_name.is_file()
31
+ with DeferredFileWriter(file_name, mode="w") as fp:
32
+ fp.writelines(["contents\n"])
33
+ assert fp._is_open
34
+ assert file_name.is_file()