hpcflow-new2 0.2.0a190__py3-none-any.whl → 0.2.0a200__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (132) hide show
  1. hpcflow/__pyinstaller/hook-hpcflow.py +1 -0
  2. hpcflow/_version.py +1 -1
  3. hpcflow/data/scripts/bad_script.py +2 -0
  4. hpcflow/data/scripts/do_nothing.py +2 -0
  5. hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
  6. hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
  7. hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
  8. hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
  9. hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
  10. hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
  11. hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
  12. hpcflow/data/scripts/input_file_generator_basic.py +3 -0
  13. hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
  14. hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
  15. hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
  16. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
  17. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
  18. hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
  19. hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
  20. hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
  21. hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
  22. hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
  23. hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
  24. hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
  25. hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
  26. hpcflow/data/scripts/output_file_parser_basic.py +3 -0
  27. hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
  28. hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
  29. hpcflow/data/scripts/script_exit_test.py +5 -0
  30. hpcflow/data/template_components/environments.yaml +1 -1
  31. hpcflow/sdk/__init__.py +5 -0
  32. hpcflow/sdk/app.py +166 -92
  33. hpcflow/sdk/cli.py +263 -84
  34. hpcflow/sdk/cli_common.py +99 -5
  35. hpcflow/sdk/config/callbacks.py +38 -1
  36. hpcflow/sdk/config/config.py +102 -13
  37. hpcflow/sdk/config/errors.py +19 -5
  38. hpcflow/sdk/config/types.py +3 -0
  39. hpcflow/sdk/core/__init__.py +25 -1
  40. hpcflow/sdk/core/actions.py +914 -262
  41. hpcflow/sdk/core/cache.py +76 -34
  42. hpcflow/sdk/core/command_files.py +14 -128
  43. hpcflow/sdk/core/commands.py +35 -6
  44. hpcflow/sdk/core/element.py +122 -50
  45. hpcflow/sdk/core/errors.py +58 -2
  46. hpcflow/sdk/core/execute.py +207 -0
  47. hpcflow/sdk/core/loop.py +408 -50
  48. hpcflow/sdk/core/loop_cache.py +4 -4
  49. hpcflow/sdk/core/parameters.py +382 -37
  50. hpcflow/sdk/core/run_dir_files.py +13 -40
  51. hpcflow/sdk/core/skip_reason.py +7 -0
  52. hpcflow/sdk/core/task.py +119 -30
  53. hpcflow/sdk/core/task_schema.py +68 -0
  54. hpcflow/sdk/core/test_utils.py +66 -27
  55. hpcflow/sdk/core/types.py +54 -1
  56. hpcflow/sdk/core/utils.py +136 -19
  57. hpcflow/sdk/core/workflow.py +1587 -356
  58. hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
  59. hpcflow/sdk/demo/cli.py +7 -0
  60. hpcflow/sdk/helper/cli.py +1 -0
  61. hpcflow/sdk/log.py +42 -15
  62. hpcflow/sdk/persistence/base.py +405 -53
  63. hpcflow/sdk/persistence/json.py +177 -52
  64. hpcflow/sdk/persistence/pending.py +237 -69
  65. hpcflow/sdk/persistence/store_resource.py +3 -2
  66. hpcflow/sdk/persistence/types.py +15 -4
  67. hpcflow/sdk/persistence/zarr.py +928 -81
  68. hpcflow/sdk/submission/jobscript.py +1408 -489
  69. hpcflow/sdk/submission/schedulers/__init__.py +40 -5
  70. hpcflow/sdk/submission/schedulers/direct.py +33 -19
  71. hpcflow/sdk/submission/schedulers/sge.py +51 -16
  72. hpcflow/sdk/submission/schedulers/slurm.py +44 -16
  73. hpcflow/sdk/submission/schedulers/utils.py +7 -2
  74. hpcflow/sdk/submission/shells/base.py +68 -20
  75. hpcflow/sdk/submission/shells/bash.py +222 -129
  76. hpcflow/sdk/submission/shells/powershell.py +200 -150
  77. hpcflow/sdk/submission/submission.py +852 -119
  78. hpcflow/sdk/submission/types.py +18 -21
  79. hpcflow/sdk/typing.py +24 -5
  80. hpcflow/sdk/utils/arrays.py +71 -0
  81. hpcflow/sdk/utils/deferred_file.py +55 -0
  82. hpcflow/sdk/utils/hashing.py +16 -0
  83. hpcflow/sdk/utils/patches.py +12 -0
  84. hpcflow/sdk/utils/strings.py +33 -0
  85. hpcflow/tests/api/test_api.py +32 -0
  86. hpcflow/tests/conftest.py +19 -0
  87. hpcflow/tests/data/benchmark_script_runner.yaml +26 -0
  88. hpcflow/tests/data/multi_path_sequences.yaml +29 -0
  89. hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
  90. hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
  91. hpcflow/tests/scripts/test_input_file_generators.py +282 -0
  92. hpcflow/tests/scripts/test_main_scripts.py +821 -70
  93. hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
  94. hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
  95. hpcflow/tests/shells/wsl/test_wsl_submission.py +6 -0
  96. hpcflow/tests/unit/test_action.py +176 -0
  97. hpcflow/tests/unit/test_app.py +20 -0
  98. hpcflow/tests/unit/test_cache.py +46 -0
  99. hpcflow/tests/unit/test_cli.py +133 -0
  100. hpcflow/tests/unit/test_config.py +122 -1
  101. hpcflow/tests/unit/test_element_iteration.py +47 -0
  102. hpcflow/tests/unit/test_jobscript_unit.py +757 -0
  103. hpcflow/tests/unit/test_loop.py +1332 -27
  104. hpcflow/tests/unit/test_meta_task.py +325 -0
  105. hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
  106. hpcflow/tests/unit/test_parameter.py +13 -0
  107. hpcflow/tests/unit/test_persistence.py +190 -8
  108. hpcflow/tests/unit/test_run.py +109 -3
  109. hpcflow/tests/unit/test_run_directories.py +29 -0
  110. hpcflow/tests/unit/test_shell.py +20 -0
  111. hpcflow/tests/unit/test_submission.py +5 -76
  112. hpcflow/tests/unit/test_workflow_template.py +31 -0
  113. hpcflow/tests/unit/utils/test_arrays.py +40 -0
  114. hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
  115. hpcflow/tests/unit/utils/test_hashing.py +65 -0
  116. hpcflow/tests/unit/utils/test_patches.py +5 -0
  117. hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
  118. hpcflow/tests/workflows/__init__.py +0 -0
  119. hpcflow/tests/workflows/test_directory_structure.py +31 -0
  120. hpcflow/tests/workflows/test_jobscript.py +332 -0
  121. hpcflow/tests/workflows/test_run_status.py +198 -0
  122. hpcflow/tests/workflows/test_skip_downstream.py +696 -0
  123. hpcflow/tests/workflows/test_submission.py +140 -0
  124. hpcflow/tests/workflows/test_workflows.py +142 -2
  125. hpcflow/tests/workflows/test_zip.py +18 -0
  126. hpcflow/viz_demo.ipynb +6587 -3
  127. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/METADATA +7 -4
  128. hpcflow_new2-0.2.0a200.dist-info/RECORD +222 -0
  129. hpcflow_new2-0.2.0a190.dist-info/RECORD +0 -165
  130. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/LICENSE +0 -0
  131. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/WHEEL +0 -0
  132. {hpcflow_new2-0.2.0a190.dist-info → hpcflow_new2-0.2.0a200.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,140 @@
1
+ import os
2
+ from pathlib import Path
3
+ import pytest
4
+ from hpcflow.app import app as hf
5
+
6
+
7
+ @pytest.mark.integration
8
+ def test_zarr_metadata_file_modification_times_many_jobscripts(null_config, tmp_path):
9
+ """Test that root group attributes are modified first, then individual jobscript
10
+ at-submit-metadata chunk files, then the submission at-submit-metadata group
11
+ attributes."""
12
+
13
+ num_js = 30
14
+ t1 = hf.Task(
15
+ schema=hf.task_schemas.test_t1_conditional_OS,
16
+ inputs={"p1": 100},
17
+ sequences=[
18
+ hf.ValueSequence(
19
+ path="resources.any.resources_id", values=list(range(num_js))
20
+ )
21
+ ],
22
+ )
23
+ wk = hf.Workflow.from_template_data(
24
+ template_name="test_zarr_metadata_attrs_modified_times",
25
+ path=tmp_path,
26
+ tasks=[t1],
27
+ store="zarr",
28
+ )
29
+ wk.submit(add_to_known=False, status=False, cancel=True)
30
+
31
+ mtime_meta_group = Path(wk.path).joinpath(".zattrs").stat().st_mtime
32
+ mtime_mid_jobscript_chunk = (
33
+ wk._store._get_jobscripts_at_submit_metadata_arr_path(0)
34
+ .joinpath(str(int(num_js / 2)))
35
+ .stat()
36
+ .st_mtime
37
+ )
38
+ mtime_submission_group = (
39
+ wk._store._get_submission_metadata_group_path(0)
40
+ .joinpath(".zattrs")
41
+ .stat()
42
+ .st_mtime
43
+ )
44
+ assert mtime_meta_group < mtime_mid_jobscript_chunk < mtime_submission_group
45
+
46
+
47
+ @pytest.mark.integration
48
+ def test_json_metadata_file_modification_times_many_jobscripts(null_config, tmp_path):
49
+ """Test that the metadata.json file is modified first, then the submissions.json
50
+ file."""
51
+
52
+ num_js = 30
53
+ t1 = hf.Task(
54
+ schema=hf.task_schemas.test_t1_conditional_OS,
55
+ inputs={"p1": 100},
56
+ sequences=[
57
+ hf.ValueSequence(
58
+ path="resources.any.resources_id", values=list(range(num_js))
59
+ )
60
+ ],
61
+ )
62
+ wk = hf.Workflow.from_template_data(
63
+ template_name="test_zarr_metadata_attrs_modified_times",
64
+ path=tmp_path,
65
+ tasks=[t1],
66
+ store="json",
67
+ )
68
+ wk.submit(add_to_known=False, status=False, cancel=True)
69
+
70
+ mtime_meta = Path(wk.path).joinpath("metadata.json").stat().st_mtime
71
+ mtime_subs = Path(wk.path).joinpath("submissions.json").stat().st_mtime
72
+ assert mtime_meta < mtime_subs
73
+
74
+
75
+ @pytest.mark.integration
76
+ def test_subission_start_end_times_equal_to_first_and_last_jobscript_start_end_times(
77
+ null_config, tmp_path
78
+ ):
79
+ num_js = 2
80
+ t1 = hf.Task(
81
+ schema=hf.task_schemas.test_t1_conditional_OS,
82
+ inputs={"p1": 100},
83
+ sequences=[
84
+ hf.ValueSequence(
85
+ path="resources.any.resources_id", values=list(range(num_js))
86
+ )
87
+ ],
88
+ )
89
+ wk = hf.Workflow.from_template_data(
90
+ template_name="test_subission_start_end_times",
91
+ path=tmp_path,
92
+ tasks=[t1],
93
+ )
94
+ wk.submit(wait=True, add_to_known=False, status=False)
95
+
96
+ sub = wk.submissions[0]
97
+ jobscripts = sub.jobscripts
98
+
99
+ assert len(jobscripts) == num_js
100
+
101
+ # submission has two jobscripts, so start time should be start time of first jobscript:
102
+ assert sub.start_time == jobscripts[0].start_time
103
+
104
+ # ...and end time should be end time of second jobscript:
105
+ assert sub.end_time == jobscripts[1].end_time
106
+
107
+
108
+ @pytest.mark.integration
109
+ def test_multiple_jobscript_functions_files(null_config, tmp_path):
110
+ if os.name == "nt":
111
+ shell_exes = ["powershell.exe", "pwsh.exe", "pwsh.exe"]
112
+ else:
113
+ shell_exes = ["/bin/bash", "bash", "bash"]
114
+ t1 = hf.Task(
115
+ schema=hf.task_schemas.test_t1_conditional_OS,
116
+ inputs={"p1": 100},
117
+ sequences=[
118
+ hf.ValueSequence(
119
+ path="resources.any.shell_args.executable",
120
+ values=shell_exes,
121
+ )
122
+ ],
123
+ )
124
+ wk = hf.Workflow.from_template_data(
125
+ template_name="test_multi_js_funcs_files",
126
+ path=tmp_path,
127
+ tasks=[t1],
128
+ store="json",
129
+ )
130
+ wk.submit(add_to_known=True, status=False, cancel=True)
131
+
132
+ sub_js = wk.submissions[0].jobscripts
133
+ assert len(sub_js) == 2
134
+
135
+ funcs_0 = sub_js[0].jobscript_functions_path
136
+ funcs_1 = sub_js[1].jobscript_functions_path
137
+
138
+ assert funcs_0.is_file()
139
+ assert funcs_1.is_file()
140
+ assert funcs_0 != funcs_1
@@ -1,9 +1,11 @@
1
1
  import os
2
+ import sys
2
3
  from pathlib import Path
3
4
  import time
4
5
  import pytest
5
6
  from hpcflow.app import app as hf
6
7
  from hpcflow.sdk.core.enums import EARStatus
8
+ from hpcflow.sdk.core.skip_reason import SkipReason
7
9
  from hpcflow.sdk.core.test_utils import (
8
10
  P1_parameter_cls as P1,
9
11
  P1_sub_parameter_cls as P1_sub,
@@ -32,8 +34,8 @@ def test_workflow_1_with_working_dir_with_spaces(tmp_path: Path, new_null_config
32
34
 
33
35
 
34
36
  @pytest.mark.integration
35
- @pytest.mark.skip(
36
- reason="Sometimes fails on MacOS GHAs runner; too slow on Windows + Linux"
37
+ @pytest.mark.skipif(
38
+ sys.platform == "darwin", reason="fails/too slow; need to investigate"
37
39
  )
38
40
  def test_run_abort(tmp_path: Path, new_null_config):
39
41
  wk = make_test_data_YAML_workflow("workflow_test_run_abort.yaml", path=tmp_path)
@@ -367,6 +369,7 @@ def test_loop_simple(null_config, tmp_path: Path):
367
369
 
368
370
 
369
371
  @pytest.mark.integration
372
+ @pytest.mark.skip(reason="need to fix loop termination for multiple elements")
370
373
  def test_loop_termination_multi_element(null_config, tmp_path: Path):
371
374
  if os.name == "nt":
372
375
  cmds = [
@@ -422,3 +425,140 @@ def test_loop_termination_multi_element(null_config, tmp_path: Path):
422
425
  assert elem_1.iterations[0].action_runs[0].status is EARStatus.success
423
426
  assert elem_1.iterations[1].action_runs[0].status is EARStatus.success
424
427
  assert elem_1.iterations[2].action_runs[0].status is EARStatus.skipped
428
+
429
+
430
+ @pytest.mark.integration
431
+ def test_input_file_generator_no_errors_on_skip(null_config, tmp_path):
432
+ """i.e. we don't try to save a file that hasn't been created because the run was
433
+ skipped"""
434
+
435
+ inp_file = hf.FileSpec(label="my_input_file", name="my_input_file.txt")
436
+
437
+ if os.name == "nt":
438
+ cmds = (
439
+ "Write-Output ((<<parameter:p0>> + 1))",
440
+ "Get-Content <<file:my_input_file>>",
441
+ )
442
+ else:
443
+ cmds = ('echo "$((<<parameter:p0>> + 1))"', "cat <<file:my_input_file>>")
444
+
445
+ s1 = hf.TaskSchema(
446
+ objective="t1",
447
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p0"))],
448
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p1"))],
449
+ actions=[
450
+ hf.Action(
451
+ commands=[hf.Command(command=cmds[0], stdout="<<parameter:p1>>")],
452
+ )
453
+ ],
454
+ )
455
+
456
+ s2 = hf.TaskSchema(
457
+ objective="t2",
458
+ inputs=[hf.SchemaInput(parameter=hf.Parameter("p1"))],
459
+ outputs=[hf.SchemaOutput(parameter=hf.Parameter("p0"))],
460
+ actions=[
461
+ hf.Action(
462
+ commands=[hf.Command(cmds[1], stdout="<<int(parameter:p0)>>")],
463
+ input_file_generators=[
464
+ hf.InputFileGenerator(
465
+ input_file=inp_file,
466
+ inputs=[hf.Parameter("p1")],
467
+ script="<<script:input_file_generator_basic.py>>",
468
+ ),
469
+ ],
470
+ environments=[hf.ActionEnvironment(environment="python_env")],
471
+ )
472
+ ],
473
+ )
474
+ p0_val = 100
475
+ t1 = hf.Task(schema=s1, inputs={"p0": p0_val})
476
+ t2 = hf.Task(schema=s2)
477
+ wk = hf.Workflow.from_template_data(
478
+ tasks=[t1, t2],
479
+ loops=[
480
+ hf.Loop(
481
+ tasks=[0, 1],
482
+ num_iterations=2,
483
+ termination={"path": "outputs.p0", "condition": {"value.equal_to": 101}},
484
+ )
485
+ ],
486
+ template_name="input_file_generator_skip_test",
487
+ path=tmp_path,
488
+ )
489
+
490
+ wk.submit(wait=True, add_to_known=False)
491
+
492
+ # check correct runs are set to skip due to loop termination:
493
+ runs = wk.get_all_EARs()
494
+ assert runs[0].skip_reason is SkipReason.NOT_SKIPPED
495
+ assert runs[1].skip_reason is SkipReason.NOT_SKIPPED
496
+ assert runs[2].skip_reason is SkipReason.NOT_SKIPPED
497
+ assert runs[3].skip_reason is SkipReason.LOOP_TERMINATION
498
+ assert runs[4].skip_reason is SkipReason.LOOP_TERMINATION
499
+ assert runs[5].skip_reason is SkipReason.LOOP_TERMINATION
500
+
501
+ # run 4 is the input file generator of the second iteration, which should be skipped
502
+ # check no error from trying to save the input file:
503
+ std_stream_path = runs[4].get_app_std_path()
504
+ if std_stream_path.is_file():
505
+ assert "FileNotFoundError" not in std_stream_path.read_text()
506
+
507
+
508
+ @pytest.mark.integration
509
+ @pytest.mark.parametrize("store", ["zarr", "json"])
510
+ def test_get_text_file(null_config, tmp_path, store):
511
+
512
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
513
+ wk = hf.Workflow.from_template_data(
514
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store=store
515
+ )
516
+ wk.submit(wait=True, add_to_known=False, status=False)
517
+
518
+ combine = wk.submissions[0].jobscripts[0].resources.combine_jobscript_std
519
+ filename = "js_0_std.log" if combine else "js_0_stdout.log"
520
+ rel_path = f"artifacts/submissions/0/js_std/0/{filename}"
521
+ abs_path = f"{wk.url}/{rel_path}"
522
+
523
+ assert wk.get_text_file(rel_path) == "hi!\n"
524
+ assert wk.get_text_file(abs_path) == "hi!\n"
525
+
526
+
527
+ @pytest.mark.integration
528
+ def test_get_text_file_zarr_zip(null_config, tmp_path):
529
+
530
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
531
+ wk = hf.Workflow.from_template_data(
532
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store="zarr"
533
+ )
534
+ wk.submit(wait=True, add_to_known=False, status=False)
535
+
536
+ wkz = hf.Workflow(wk.zip())
537
+
538
+ combine = wkz.submissions[0].jobscripts[0].resources.combine_jobscript_std
539
+ filename = "js_0_std.log" if combine else "js_0_stdout.log"
540
+ rel_path = f"artifacts/submissions/0/js_std/0/{filename}"
541
+ abs_path = f"{wkz.url}/{rel_path}"
542
+
543
+ assert wkz.get_text_file(rel_path) == "hi!\n"
544
+ assert wkz.get_text_file(abs_path) == "hi!\n"
545
+
546
+
547
+ @pytest.mark.parametrize("store", ["zarr", "json"])
548
+ def test_get_text_file_file_not_found(null_config, tmp_path, store):
549
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
550
+ wk = hf.Workflow.from_template_data(
551
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store=store
552
+ )
553
+ with pytest.raises(FileNotFoundError):
554
+ wk.get_text_file("non_existent_file.txt")
555
+
556
+
557
+ def test_get_text_file_file_not_found_zarr_zip(null_config, tmp_path):
558
+ s1 = hf.TaskSchema("t1", actions=[hf.Action(commands=[hf.Command("echo 'hi!'")])])
559
+ wk = hf.Workflow.from_template_data(
560
+ tasks=[hf.Task(s1)], template_name="print_stdout", path=tmp_path, store="zarr"
561
+ )
562
+ wkz = hf.Workflow(wk.zip())
563
+ with pytest.raises(FileNotFoundError):
564
+ wkz.get_text_file("non_existent_file.txt")
@@ -0,0 +1,18 @@
1
+ import sys
2
+ import pytest
3
+
4
+ from hpcflow.app import app as hf
5
+ from hpcflow.sdk.core.test_utils import make_test_data_YAML_workflow
6
+
7
+
8
+ @pytest.mark.integration
9
+ @pytest.mark.skipif(
10
+ sys.version_info < (3, 9), reason="Python 3.8 support is being removed anyway."
11
+ )
12
+ def test_workflow_1_zip(tmp_path, new_null_config):
13
+ wk = make_test_data_YAML_workflow("workflow_1.yaml", path=tmp_path)
14
+ wk.submit(wait=True, add_to_known=False, status=False)
15
+
16
+ zip_path = wk.zip(path=tmp_path)
17
+ wkz = hf.Workflow(zip_path)
18
+ assert wkz.tasks[0].elements[0].outputs.p2.value == "201"