hpcflow-new2 0.2.0a189__py3-none-any.whl → 0.2.0a199__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- hpcflow/__pyinstaller/hook-hpcflow.py +9 -6
- hpcflow/_version.py +1 -1
- hpcflow/app.py +1 -0
- hpcflow/data/scripts/bad_script.py +2 -0
- hpcflow/data/scripts/do_nothing.py +2 -0
- hpcflow/data/scripts/env_specifier_test/input_file_generator_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/main_script_test_pass_env_spec.py +8 -0
- hpcflow/data/scripts/env_specifier_test/output_file_parser_pass_env_spec.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/input_file_generator_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v1/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/env_specifier_test/v1/output_file_parser_basic.py +4 -0
- hpcflow/data/scripts/env_specifier_test/v2/main_script_test_direct_in_direct_out.py +7 -0
- hpcflow/data/scripts/input_file_generator_basic.py +3 -0
- hpcflow/data/scripts/input_file_generator_basic_FAIL.py +3 -0
- hpcflow/data/scripts/input_file_generator_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/main_script_test_direct_in.py +3 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_2_fail_allowed_group.py +7 -0
- hpcflow/data/scripts/main_script_test_direct_in_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_direct_in_group_one_fail_direct_out_3.py +6 -0
- hpcflow/data/scripts/main_script_test_hdf5_in_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_hdf5_in_obj_2.py +12 -0
- hpcflow/data/scripts/main_script_test_hdf5_out_obj.py +1 -1
- hpcflow/data/scripts/main_script_test_json_out_FAIL.py +3 -0
- hpcflow/data/scripts/main_script_test_shell_env_vars.py +12 -0
- hpcflow/data/scripts/main_script_test_std_out_std_err.py +6 -0
- hpcflow/data/scripts/output_file_parser_basic.py +3 -0
- hpcflow/data/scripts/output_file_parser_basic_FAIL.py +7 -0
- hpcflow/data/scripts/output_file_parser_test_stdout_stderr.py +8 -0
- hpcflow/data/scripts/script_exit_test.py +5 -0
- hpcflow/data/template_components/environments.yaml +1 -1
- hpcflow/sdk/__init__.py +26 -15
- hpcflow/sdk/app.py +2192 -768
- hpcflow/sdk/cli.py +506 -296
- hpcflow/sdk/cli_common.py +105 -7
- hpcflow/sdk/config/__init__.py +1 -1
- hpcflow/sdk/config/callbacks.py +115 -43
- hpcflow/sdk/config/cli.py +126 -103
- hpcflow/sdk/config/config.py +674 -318
- hpcflow/sdk/config/config_file.py +131 -95
- hpcflow/sdk/config/errors.py +125 -84
- hpcflow/sdk/config/types.py +148 -0
- hpcflow/sdk/core/__init__.py +25 -1
- hpcflow/sdk/core/actions.py +1771 -1059
- hpcflow/sdk/core/app_aware.py +24 -0
- hpcflow/sdk/core/cache.py +139 -79
- hpcflow/sdk/core/command_files.py +263 -287
- hpcflow/sdk/core/commands.py +145 -112
- hpcflow/sdk/core/element.py +828 -535
- hpcflow/sdk/core/enums.py +192 -0
- hpcflow/sdk/core/environment.py +74 -93
- hpcflow/sdk/core/errors.py +455 -52
- hpcflow/sdk/core/execute.py +207 -0
- hpcflow/sdk/core/json_like.py +540 -272
- hpcflow/sdk/core/loop.py +751 -347
- hpcflow/sdk/core/loop_cache.py +164 -47
- hpcflow/sdk/core/object_list.py +370 -207
- hpcflow/sdk/core/parameters.py +1100 -627
- hpcflow/sdk/core/rule.py +59 -41
- hpcflow/sdk/core/run_dir_files.py +21 -37
- hpcflow/sdk/core/skip_reason.py +7 -0
- hpcflow/sdk/core/task.py +1649 -1339
- hpcflow/sdk/core/task_schema.py +308 -196
- hpcflow/sdk/core/test_utils.py +191 -114
- hpcflow/sdk/core/types.py +440 -0
- hpcflow/sdk/core/utils.py +485 -309
- hpcflow/sdk/core/validation.py +82 -9
- hpcflow/sdk/core/workflow.py +2544 -1178
- hpcflow/sdk/core/zarr_io.py +98 -137
- hpcflow/sdk/data/workflow_spec_schema.yaml +2 -0
- hpcflow/sdk/demo/cli.py +53 -33
- hpcflow/sdk/helper/cli.py +18 -15
- hpcflow/sdk/helper/helper.py +75 -63
- hpcflow/sdk/helper/watcher.py +61 -28
- hpcflow/sdk/log.py +122 -71
- hpcflow/sdk/persistence/__init__.py +8 -31
- hpcflow/sdk/persistence/base.py +1360 -606
- hpcflow/sdk/persistence/defaults.py +6 -0
- hpcflow/sdk/persistence/discovery.py +38 -0
- hpcflow/sdk/persistence/json.py +568 -188
- hpcflow/sdk/persistence/pending.py +382 -179
- hpcflow/sdk/persistence/store_resource.py +39 -23
- hpcflow/sdk/persistence/types.py +318 -0
- hpcflow/sdk/persistence/utils.py +14 -11
- hpcflow/sdk/persistence/zarr.py +1337 -433
- hpcflow/sdk/runtime.py +44 -41
- hpcflow/sdk/submission/{jobscript_info.py → enums.py} +39 -12
- hpcflow/sdk/submission/jobscript.py +1651 -692
- hpcflow/sdk/submission/schedulers/__init__.py +167 -39
- hpcflow/sdk/submission/schedulers/direct.py +121 -81
- hpcflow/sdk/submission/schedulers/sge.py +170 -129
- hpcflow/sdk/submission/schedulers/slurm.py +291 -268
- hpcflow/sdk/submission/schedulers/utils.py +12 -2
- hpcflow/sdk/submission/shells/__init__.py +14 -15
- hpcflow/sdk/submission/shells/base.py +150 -29
- hpcflow/sdk/submission/shells/bash.py +283 -173
- hpcflow/sdk/submission/shells/os_version.py +31 -30
- hpcflow/sdk/submission/shells/powershell.py +228 -170
- hpcflow/sdk/submission/submission.py +1014 -335
- hpcflow/sdk/submission/types.py +140 -0
- hpcflow/sdk/typing.py +182 -12
- hpcflow/sdk/utils/arrays.py +71 -0
- hpcflow/sdk/utils/deferred_file.py +55 -0
- hpcflow/sdk/utils/hashing.py +16 -0
- hpcflow/sdk/utils/patches.py +12 -0
- hpcflow/sdk/utils/strings.py +33 -0
- hpcflow/tests/api/test_api.py +32 -0
- hpcflow/tests/conftest.py +27 -6
- hpcflow/tests/data/multi_path_sequences.yaml +29 -0
- hpcflow/tests/data/workflow_test_run_abort.yaml +34 -35
- hpcflow/tests/schedulers/sge/test_sge_submission.py +36 -0
- hpcflow/tests/schedulers/slurm/test_slurm_submission.py +5 -2
- hpcflow/tests/scripts/test_input_file_generators.py +282 -0
- hpcflow/tests/scripts/test_main_scripts.py +866 -85
- hpcflow/tests/scripts/test_non_snippet_script.py +46 -0
- hpcflow/tests/scripts/test_ouput_file_parsers.py +353 -0
- hpcflow/tests/shells/wsl/test_wsl_submission.py +12 -4
- hpcflow/tests/unit/test_action.py +262 -75
- hpcflow/tests/unit/test_action_rule.py +9 -4
- hpcflow/tests/unit/test_app.py +33 -6
- hpcflow/tests/unit/test_cache.py +46 -0
- hpcflow/tests/unit/test_cli.py +134 -1
- hpcflow/tests/unit/test_command.py +71 -54
- hpcflow/tests/unit/test_config.py +142 -16
- hpcflow/tests/unit/test_config_file.py +21 -18
- hpcflow/tests/unit/test_element.py +58 -62
- hpcflow/tests/unit/test_element_iteration.py +50 -1
- hpcflow/tests/unit/test_element_set.py +29 -19
- hpcflow/tests/unit/test_group.py +4 -2
- hpcflow/tests/unit/test_input_source.py +116 -93
- hpcflow/tests/unit/test_input_value.py +29 -24
- hpcflow/tests/unit/test_jobscript_unit.py +757 -0
- hpcflow/tests/unit/test_json_like.py +44 -35
- hpcflow/tests/unit/test_loop.py +1396 -84
- hpcflow/tests/unit/test_meta_task.py +325 -0
- hpcflow/tests/unit/test_multi_path_sequences.py +229 -0
- hpcflow/tests/unit/test_object_list.py +17 -12
- hpcflow/tests/unit/test_parameter.py +29 -7
- hpcflow/tests/unit/test_persistence.py +237 -42
- hpcflow/tests/unit/test_resources.py +20 -18
- hpcflow/tests/unit/test_run.py +117 -6
- hpcflow/tests/unit/test_run_directories.py +29 -0
- hpcflow/tests/unit/test_runtime.py +2 -1
- hpcflow/tests/unit/test_schema_input.py +23 -15
- hpcflow/tests/unit/test_shell.py +23 -2
- hpcflow/tests/unit/test_slurm.py +8 -7
- hpcflow/tests/unit/test_submission.py +38 -89
- hpcflow/tests/unit/test_task.py +352 -247
- hpcflow/tests/unit/test_task_schema.py +33 -20
- hpcflow/tests/unit/test_utils.py +9 -11
- hpcflow/tests/unit/test_value_sequence.py +15 -12
- hpcflow/tests/unit/test_workflow.py +114 -83
- hpcflow/tests/unit/test_workflow_template.py +0 -1
- hpcflow/tests/unit/utils/test_arrays.py +40 -0
- hpcflow/tests/unit/utils/test_deferred_file_writer.py +34 -0
- hpcflow/tests/unit/utils/test_hashing.py +65 -0
- hpcflow/tests/unit/utils/test_patches.py +5 -0
- hpcflow/tests/unit/utils/test_redirect_std.py +50 -0
- hpcflow/tests/workflows/__init__.py +0 -0
- hpcflow/tests/workflows/test_directory_structure.py +31 -0
- hpcflow/tests/workflows/test_jobscript.py +334 -1
- hpcflow/tests/workflows/test_run_status.py +198 -0
- hpcflow/tests/workflows/test_skip_downstream.py +696 -0
- hpcflow/tests/workflows/test_submission.py +140 -0
- hpcflow/tests/workflows/test_workflows.py +160 -15
- hpcflow/tests/workflows/test_zip.py +18 -0
- hpcflow/viz_demo.ipynb +6587 -3
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/METADATA +8 -4
- hpcflow_new2-0.2.0a199.dist-info/RECORD +221 -0
- hpcflow/sdk/core/parallel.py +0 -21
- hpcflow_new2-0.2.0a189.dist-info/RECORD +0 -158
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/LICENSE +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/WHEEL +0 -0
- {hpcflow_new2-0.2.0a189.dist-info → hpcflow_new2-0.2.0a199.dist-info}/entry_points.txt +0 -0
@@ -1,7 +1,9 @@
|
|
1
|
+
from __future__ import annotations
|
1
2
|
import copy
|
2
3
|
from dataclasses import dataclass
|
3
4
|
from pathlib import Path
|
4
5
|
from textwrap import dedent
|
6
|
+
from typing import TYPE_CHECKING
|
5
7
|
|
6
8
|
import pytest
|
7
9
|
|
@@ -11,16 +13,23 @@ from hpcflow.sdk.core.errors import (
|
|
11
13
|
WorkflowBatchUpdateFailedError,
|
12
14
|
WorkflowNotFoundError,
|
13
15
|
)
|
14
|
-
from hpcflow.sdk.core.parameters import ParameterValue
|
15
16
|
from hpcflow.sdk.core.test_utils import (
|
16
17
|
make_workflow,
|
17
18
|
P1_parameter_cls as P1,
|
18
19
|
make_test_data_YAML_workflow,
|
19
20
|
)
|
20
21
|
|
22
|
+
if TYPE_CHECKING:
|
23
|
+
from collections.abc import Iterator
|
24
|
+
from hpcflow.sdk.core.actions import Action, ActionEnvironment
|
25
|
+
from hpcflow.sdk.core.command_files import FileSpec
|
26
|
+
from hpcflow.sdk.core.parameters import Parameter
|
27
|
+
from hpcflow.sdk.core.task_schema import TaskSchema
|
28
|
+
from hpcflow.sdk.core.workflow import Workflow
|
29
|
+
|
21
30
|
|
22
31
|
@pytest.fixture
|
23
|
-
def persistent_workflow(null_config):
|
32
|
+
def persistent_workflow(null_config) -> Iterator[Workflow]:
|
24
33
|
tmp_dir = hf._ensure_user_runtime_dir().joinpath("test_data")
|
25
34
|
tmp_dir.mkdir(exist_ok=True)
|
26
35
|
wk = make_test_data_YAML_workflow("workflow_1.yaml", path=tmp_dir, overwrite=True)
|
@@ -44,16 +53,18 @@ def test_workflow_zip(persistent_workflow):
|
|
44
53
|
Path(zip_path).unlink()
|
45
54
|
|
46
55
|
|
47
|
-
def modify_workflow_metadata_on_disk(workflow):
|
56
|
+
def modify_workflow_metadata_on_disk(workflow: Workflow):
|
48
57
|
"""Make a non-sense change to the on-disk metadata."""
|
49
58
|
assert workflow.store_format == "zarr"
|
50
|
-
wk_md = workflow._store.load_metadata()
|
59
|
+
wk_md = workflow._store.load_metadata() # type: ignore
|
51
60
|
changed_md = copy.deepcopy(wk_md)
|
52
61
|
changed_md["new_key"] = "new_value"
|
53
|
-
workflow._store._get_root_group(mode="r+").attrs.put(changed_md)
|
62
|
+
workflow._store._get_root_group(mode="r+").attrs.put(changed_md) # type: ignore
|
54
63
|
|
55
64
|
|
56
|
-
def make_workflow_w1_with_config_kwargs(
|
65
|
+
def make_workflow_w1_with_config_kwargs(
|
66
|
+
config_kwargs, path, param_p1: Parameter, param_p2: Parameter
|
67
|
+
) -> Workflow:
|
57
68
|
hf.load_config(**config_kwargs)
|
58
69
|
s1 = hf.TaskSchema("ts1", actions=[], inputs=[param_p1], outputs=[param_p2])
|
59
70
|
t1 = hf.Task(schema=s1, inputs=[hf.InputValue(param_p1, 101)])
|
@@ -68,37 +79,37 @@ def null_config(tmp_path):
|
|
68
79
|
|
69
80
|
|
70
81
|
@pytest.fixture
|
71
|
-
def empty_workflow(null_config, tmp_path):
|
82
|
+
def empty_workflow(null_config, tmp_path) -> Workflow:
|
72
83
|
return hf.Workflow.from_template(hf.WorkflowTemplate(name="w1"), path=tmp_path)
|
73
84
|
|
74
85
|
|
75
86
|
@pytest.fixture
|
76
|
-
def param_p1(null_config):
|
87
|
+
def param_p1(null_config) -> Parameter:
|
77
88
|
return hf.Parameter("p1")
|
78
89
|
|
79
90
|
|
80
91
|
@pytest.fixture
|
81
|
-
def param_p1c(null_config):
|
92
|
+
def param_p1c(null_config) -> Parameter:
|
82
93
|
return hf.Parameter("p1c")
|
83
94
|
|
84
95
|
|
85
96
|
@pytest.fixture
|
86
|
-
def param_p2():
|
97
|
+
def param_p2() -> Parameter:
|
87
98
|
return hf.Parameter("p2")
|
88
99
|
|
89
100
|
|
90
101
|
@pytest.fixture
|
91
|
-
def param_p3(null_config):
|
102
|
+
def param_p3(null_config) -> Parameter:
|
92
103
|
return hf.Parameter("p3")
|
93
104
|
|
94
105
|
|
95
106
|
@pytest.fixture
|
96
|
-
def act_env_1(null_config):
|
107
|
+
def act_env_1(null_config) -> ActionEnvironment:
|
97
108
|
return hf.ActionEnvironment("env_1")
|
98
109
|
|
99
110
|
|
100
111
|
@pytest.fixture
|
101
|
-
def act_1(null_config, act_env_1):
|
112
|
+
def act_1(null_config, act_env_1: ActionEnvironment) -> Action:
|
102
113
|
return hf.Action(
|
103
114
|
commands=[hf.Command("<<parameter:p1>>")],
|
104
115
|
environments=[act_env_1],
|
@@ -106,7 +117,7 @@ def act_1(null_config, act_env_1):
|
|
106
117
|
|
107
118
|
|
108
119
|
@pytest.fixture
|
109
|
-
def act_2(null_config, act_env_1):
|
120
|
+
def act_2(null_config, act_env_1: ActionEnvironment) -> Action:
|
110
121
|
return hf.Action(
|
111
122
|
commands=[hf.Command("<<parameter:p2>> <<parameter:p3>>")],
|
112
123
|
environments=[act_env_1],
|
@@ -116,12 +127,17 @@ def act_2(null_config, act_env_1):
|
|
116
127
|
@pytest.fixture
|
117
128
|
def file_spec_fs1(
|
118
129
|
null_config,
|
119
|
-
):
|
130
|
+
) -> FileSpec:
|
120
131
|
return hf.FileSpec(label="file1", name="file1.txt")
|
121
132
|
|
122
133
|
|
123
134
|
@pytest.fixture
|
124
|
-
def act_3(
|
135
|
+
def act_3(
|
136
|
+
null_config,
|
137
|
+
act_env_1: ActionEnvironment,
|
138
|
+
param_p2: Parameter,
|
139
|
+
file_spec_fs1: FileSpec,
|
140
|
+
) -> Action:
|
125
141
|
return hf.Action(
|
126
142
|
commands=[hf.Command("<<parameter:p1>>")],
|
127
143
|
output_file_parsers=[
|
@@ -132,25 +148,29 @@ def act_3(null_config, act_env_1, param_p2, file_spec_fs1):
|
|
132
148
|
|
133
149
|
|
134
150
|
@pytest.fixture
|
135
|
-
def schema_s1(null_config, param_p1, act_1):
|
151
|
+
def schema_s1(null_config, param_p1: Parameter, act_1: Action) -> TaskSchema:
|
136
152
|
return hf.TaskSchema("ts1", actions=[act_1], inputs=[param_p1])
|
137
153
|
|
138
154
|
|
139
155
|
@pytest.fixture
|
140
|
-
def schema_s2(
|
156
|
+
def schema_s2(
|
157
|
+
null_config, param_p2: Parameter, param_p3: Parameter, act_2: Action
|
158
|
+
) -> TaskSchema:
|
141
159
|
return hf.TaskSchema("ts2", actions=[act_2], inputs=[param_p2, param_p3])
|
142
160
|
|
143
161
|
|
144
162
|
@pytest.fixture
|
145
|
-
def schema_s3(
|
163
|
+
def schema_s3(
|
164
|
+
null_config, param_p1: Parameter, param_p2: Parameter, act_3: Action
|
165
|
+
) -> TaskSchema:
|
146
166
|
return hf.TaskSchema("ts1", actions=[act_3], inputs=[param_p1], outputs=[param_p2])
|
147
167
|
|
148
168
|
|
149
169
|
@pytest.fixture
|
150
170
|
def schema_s4(
|
151
171
|
null_config,
|
152
|
-
param_p1,
|
153
|
-
):
|
172
|
+
param_p1: Parameter,
|
173
|
+
) -> TaskSchema:
|
154
174
|
return hf.TaskSchema(
|
155
175
|
objective="t1",
|
156
176
|
inputs=[hf.SchemaInput(parameter=param_p1)],
|
@@ -166,8 +186,8 @@ def schema_s4(
|
|
166
186
|
@pytest.fixture
|
167
187
|
def schema_s4c(
|
168
188
|
null_config,
|
169
|
-
param_p1c,
|
170
|
-
):
|
189
|
+
param_p1c: Parameter,
|
190
|
+
) -> TaskSchema:
|
171
191
|
return hf.TaskSchema(
|
172
192
|
objective="t1",
|
173
193
|
inputs=[hf.SchemaInput(parameter=param_p1c)],
|
@@ -181,41 +201,47 @@ def schema_s4c(
|
|
181
201
|
|
182
202
|
|
183
203
|
@pytest.fixture
|
184
|
-
def workflow_w1(
|
204
|
+
def workflow_w1(
|
205
|
+
null_config, tmp_path: Path, schema_s3: TaskSchema, param_p1: Parameter
|
206
|
+
) -> Workflow:
|
185
207
|
t1 = hf.Task(schema=schema_s3, inputs=[hf.InputValue(param_p1, 101)])
|
186
208
|
wkt = hf.WorkflowTemplate(name="w1", tasks=[t1])
|
187
209
|
return hf.Workflow.from_template(wkt, path=tmp_path)
|
188
210
|
|
189
211
|
|
190
|
-
def test_make_empty_workflow(null_config, empty_workflow):
|
212
|
+
def test_make_empty_workflow(null_config, empty_workflow: Workflow):
|
191
213
|
assert empty_workflow.path is not None
|
192
214
|
|
193
215
|
|
194
|
-
def test_raise_on_missing_workflow(null_config, tmp_path):
|
216
|
+
def test_raise_on_missing_workflow(null_config, tmp_path: Path):
|
195
217
|
with pytest.raises(WorkflowNotFoundError):
|
196
218
|
hf.Workflow(tmp_path)
|
197
219
|
|
198
220
|
|
199
|
-
def test_add_empty_task(empty_workflow, schema_s1):
|
221
|
+
def test_add_empty_task(empty_workflow: Workflow, schema_s1: TaskSchema):
|
200
222
|
t1 = hf.Task(schema=schema_s1)
|
201
223
|
wk_t1 = empty_workflow._add_empty_task(t1)
|
202
224
|
assert len(empty_workflow.tasks) == 1 and wk_t1.index == 0 and wk_t1.name == "ts1"
|
203
225
|
|
204
226
|
|
205
|
-
def test_raise_on_missing_inputs_add_first_task(
|
227
|
+
def test_raise_on_missing_inputs_add_first_task(
|
228
|
+
empty_workflow: Workflow, schema_s1: TaskSchema, param_p1: Parameter
|
229
|
+
):
|
206
230
|
t1 = hf.Task(schema=schema_s1)
|
207
231
|
with pytest.raises(MissingInputs) as exc_info:
|
208
232
|
empty_workflow.add_task(t1)
|
209
233
|
|
210
|
-
assert exc_info.value.missing_inputs ==
|
234
|
+
assert exc_info.value.missing_inputs == (param_p1.typ,)
|
211
235
|
|
212
236
|
|
213
|
-
def test_raise_on_missing_inputs_add_second_task(
|
237
|
+
def test_raise_on_missing_inputs_add_second_task(
|
238
|
+
workflow_w1: Workflow, schema_s2: TaskSchema, param_p3: Parameter
|
239
|
+
):
|
214
240
|
t2 = hf.Task(schema=schema_s2)
|
215
241
|
with pytest.raises(MissingInputs) as exc_info:
|
216
242
|
workflow_w1.add_task(t2)
|
217
243
|
|
218
|
-
assert exc_info.value.missing_inputs ==
|
244
|
+
assert exc_info.value.missing_inputs == (param_p3.typ,) # p2 comes from existing task
|
219
245
|
|
220
246
|
|
221
247
|
@pytest.mark.skip(reason="TODO: Not implemented.")
|
@@ -301,42 +327,45 @@ def test_WorkflowTemplate_from_YAML_string_with_and_without_element_sets_equival
|
|
301
327
|
assert wkt_1 == wkt_2
|
302
328
|
|
303
329
|
|
304
|
-
def test_store_has_pending_during_add_task(
|
330
|
+
def test_store_has_pending_during_add_task(
|
331
|
+
workflow_w1: Workflow, schema_s2: TaskSchema, param_p3: Parameter
|
332
|
+
):
|
305
333
|
t2 = hf.Task(schema=schema_s2, inputs=[hf.InputValue(param_p3, 301)])
|
306
334
|
with workflow_w1.batch_update():
|
307
335
|
workflow_w1.add_task(t2)
|
308
336
|
assert workflow_w1._store.has_pending
|
309
337
|
|
310
338
|
|
311
|
-
def test_empty_batch_update_does_nothing(workflow_w1):
|
339
|
+
def test_empty_batch_update_does_nothing(workflow_w1: Workflow):
|
312
340
|
with workflow_w1.batch_update():
|
313
341
|
assert not workflow_w1._store.has_pending
|
314
342
|
|
315
343
|
|
316
344
|
@pytest.mark.skip("need to re-implement `is_modified_on_disk`")
|
317
|
-
def test_is_modified_on_disk_when_metadata_changed(workflow_w1):
|
345
|
+
def test_is_modified_on_disk_when_metadata_changed(workflow_w1: Workflow):
|
318
346
|
# this is ZarrPersistentStore-specific; might want to consider a refactor later
|
319
347
|
with workflow_w1._store.cached_load():
|
320
348
|
modify_workflow_metadata_on_disk(workflow_w1)
|
321
|
-
assert workflow_w1._store.is_modified_on_disk()
|
349
|
+
assert workflow_w1._store.is_modified_on_disk() # type: ignore
|
322
350
|
|
323
351
|
|
324
352
|
@pytest.mark.skip("need to re-implement `is_modified_on_disk`")
|
325
|
-
def test_batch_update_abort_if_modified_on_disk(
|
353
|
+
def test_batch_update_abort_if_modified_on_disk(
|
354
|
+
workflow_w1: Workflow, schema_s2: TaskSchema, param_p3: Parameter
|
355
|
+
):
|
326
356
|
t2 = hf.Task(schema=schema_s2, inputs=[hf.InputValue(param_p3, 301)])
|
327
357
|
with pytest.raises(WorkflowBatchUpdateFailedError):
|
328
|
-
with workflow_w1._store.cached_load():
|
329
|
-
|
330
|
-
|
331
|
-
modify_workflow_metadata_on_disk(workflow_w1)
|
358
|
+
with workflow_w1._store.cached_load(), workflow_w1.batch_update():
|
359
|
+
workflow_w1.add_task(t2)
|
360
|
+
modify_workflow_metadata_on_disk(workflow_w1)
|
332
361
|
|
333
362
|
|
334
|
-
def test_closest_task_input_source_chosen(null_config, tmp_path):
|
363
|
+
def test_closest_task_input_source_chosen(null_config, tmp_path: Path):
|
335
364
|
wk = make_workflow(
|
336
365
|
schemas_spec=[
|
337
|
-
|
338
|
-
|
339
|
-
|
366
|
+
({"p1": None}, ("p1",), "t1"),
|
367
|
+
({"p1": None}, ("p1",), "t2"),
|
368
|
+
({"p1": None}, ("p1",), "t3"),
|
340
369
|
],
|
341
370
|
local_inputs={0: ("p1",)},
|
342
371
|
path=tmp_path,
|
@@ -366,10 +395,10 @@ def test_WorkflowTemplate_from_JSON_string_without_element_sets(null_config):
|
|
366
395
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
367
396
|
def test_equivalent_element_input_parameter_value_class_and_kwargs(
|
368
397
|
null_config,
|
369
|
-
tmp_path,
|
370
|
-
store,
|
371
|
-
schema_s4c,
|
372
|
-
param_p1c,
|
398
|
+
tmp_path: Path,
|
399
|
+
store: str,
|
400
|
+
schema_s4c: TaskSchema,
|
401
|
+
param_p1c: Parameter,
|
373
402
|
):
|
374
403
|
a_value = 101
|
375
404
|
t1_1 = hf.Task(
|
@@ -395,10 +424,10 @@ def test_equivalent_element_input_parameter_value_class_and_kwargs(
|
|
395
424
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
396
425
|
def test_equivalent_element_input_parameter_value_class_method_and_kwargs(
|
397
426
|
null_config,
|
398
|
-
tmp_path,
|
399
|
-
store,
|
400
|
-
schema_s4c,
|
401
|
-
param_p1c,
|
427
|
+
tmp_path: Path,
|
428
|
+
store: str,
|
429
|
+
schema_s4c: TaskSchema,
|
430
|
+
param_p1c: Parameter,
|
402
431
|
):
|
403
432
|
b_val = 50
|
404
433
|
c_val = 51
|
@@ -432,7 +461,7 @@ def test_equivalent_element_input_parameter_value_class_method_and_kwargs(
|
|
432
461
|
|
433
462
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
434
463
|
def test_input_value_class_expected_value(
|
435
|
-
null_config, tmp_path, store, schema_s4c, param_p1c
|
464
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema, param_p1c: Parameter
|
436
465
|
):
|
437
466
|
a_value = 101
|
438
467
|
t1_value_exp = P1(a=a_value)
|
@@ -459,7 +488,7 @@ def test_input_value_class_expected_value(
|
|
459
488
|
|
460
489
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
461
490
|
def test_input_value_class_method_expected_value(
|
462
|
-
null_config, tmp_path, store, schema_s4c, param_p1c
|
491
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema, param_p1c: Parameter
|
463
492
|
):
|
464
493
|
b_val = 50
|
465
494
|
c_val = 51
|
@@ -493,10 +522,10 @@ def test_input_value_class_method_expected_value(
|
|
493
522
|
|
494
523
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
495
524
|
def test_equivalent_element_input_sequence_parameter_value_class_and_kwargs(
|
496
|
-
null_config, tmp_path, store, schema_s4c
|
525
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema
|
497
526
|
):
|
498
527
|
data = {"a": 101}
|
499
|
-
obj = P1(**data)
|
528
|
+
obj = P1(**data) # type: ignore[arg-type] # python/mypy#15317
|
500
529
|
t1_1 = hf.Task(
|
501
530
|
schema=[schema_s4c],
|
502
531
|
sequences=[hf.ValueSequence(path="inputs.p1c", values=[obj], nesting_order=0)],
|
@@ -518,7 +547,7 @@ def test_equivalent_element_input_sequence_parameter_value_class_and_kwargs(
|
|
518
547
|
|
519
548
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
520
549
|
def test_equivalent_element_input_sequence_parameter_value_class_method_and_kwargs(
|
521
|
-
null_config, tmp_path, store, schema_s4c
|
550
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema
|
522
551
|
):
|
523
552
|
data = {"b": 50, "c": 51}
|
524
553
|
obj = P1.from_data(**data)
|
@@ -549,9 +578,11 @@ def test_equivalent_element_input_sequence_parameter_value_class_method_and_kwar
|
|
549
578
|
|
550
579
|
|
551
580
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
552
|
-
def test_sequence_value_class_expected_value(
|
581
|
+
def test_sequence_value_class_expected_value(
|
582
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema
|
583
|
+
):
|
553
584
|
data = {"a": 101}
|
554
|
-
obj = P1(**data)
|
585
|
+
obj = P1(**data) # type: ignore[arg-type] # python/mypy#15317
|
555
586
|
t1_1 = hf.Task(
|
556
587
|
schema=[schema_s4c],
|
557
588
|
sequences=[hf.ValueSequence(path="inputs.p1c", values=[obj], nesting_order=0)],
|
@@ -574,7 +605,7 @@ def test_sequence_value_class_expected_value(null_config, tmp_path, store, schem
|
|
574
605
|
|
575
606
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
576
607
|
def test_sequence_value_class_method_expected_value(
|
577
|
-
null_config, tmp_path, store, schema_s4c
|
608
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema
|
578
609
|
):
|
579
610
|
data = {"b": 50, "c": 51}
|
580
611
|
obj = P1.from_data(**data)
|
@@ -607,7 +638,7 @@ def test_sequence_value_class_method_expected_value(
|
|
607
638
|
|
608
639
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
609
640
|
def test_expected_element_input_parameter_value_class_merge_sequence(
|
610
|
-
null_config, tmp_path, store, schema_s4c, param_p1c
|
641
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema, param_p1c: Parameter
|
611
642
|
):
|
612
643
|
a_val = 101
|
613
644
|
d_val = 201
|
@@ -632,7 +663,7 @@ def test_expected_element_input_parameter_value_class_merge_sequence(
|
|
632
663
|
|
633
664
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
634
665
|
def test_expected_element_input_parameter_value_class_method_merge_sequence(
|
635
|
-
null_config, tmp_path, store, schema_s4c, param_p1c
|
666
|
+
null_config, tmp_path: Path, store: str, schema_s4c: TaskSchema, param_p1c: Parameter
|
636
667
|
):
|
637
668
|
b_val = 50
|
638
669
|
c_val = 51
|
@@ -661,14 +692,10 @@ def test_expected_element_input_parameter_value_class_method_merge_sequence(
|
|
661
692
|
|
662
693
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
663
694
|
def test_upstream_input_source_merge_with_current_input_modification(
|
664
|
-
null_config, tmp_path, store
|
695
|
+
null_config, tmp_path: Path, store: str, param_p2: Parameter
|
665
696
|
):
|
666
|
-
s1 = hf.TaskSchema(
|
667
|
-
|
668
|
-
)
|
669
|
-
s2 = hf.TaskSchema(
|
670
|
-
objective="t2", inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))]
|
671
|
-
)
|
697
|
+
s1 = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput(parameter=param_p2)])
|
698
|
+
s2 = hf.TaskSchema(objective="t2", inputs=[hf.SchemaInput(parameter=param_p2)])
|
672
699
|
tasks = [
|
673
700
|
hf.Task(schema=s1, inputs=[hf.InputValue("p2", {"a": 101})]),
|
674
701
|
hf.Task(schema=s2, inputs=[hf.InputValue("p2", value=102, path="b")]),
|
@@ -679,17 +706,17 @@ def test_upstream_input_source_merge_with_current_input_modification(
|
|
679
706
|
template_name="temp",
|
680
707
|
store=store,
|
681
708
|
)
|
682
|
-
|
709
|
+
p2 = wk.tasks[1].elements[0].inputs.p2
|
710
|
+
assert isinstance(p2, hf.ElementParameter)
|
711
|
+
assert p2.value == {"a": 101, "b": 102}
|
683
712
|
|
684
713
|
|
685
714
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
686
|
-
def test_upstream_input_source_with_sub_parameter(
|
687
|
-
|
688
|
-
|
689
|
-
)
|
690
|
-
s2 = hf.TaskSchema(
|
691
|
-
objective="t2", inputs=[hf.SchemaInput(parameter=hf.Parameter("p2"))]
|
692
|
-
)
|
715
|
+
def test_upstream_input_source_with_sub_parameter(
|
716
|
+
null_config, tmp_path: Path, store: str, param_p2: Parameter
|
717
|
+
):
|
718
|
+
s1 = hf.TaskSchema(objective="t1", inputs=[hf.SchemaInput(parameter=param_p2)])
|
719
|
+
s2 = hf.TaskSchema(objective="t2", inputs=[hf.SchemaInput(parameter=param_p2)])
|
693
720
|
tasks = [
|
694
721
|
hf.Task(
|
695
722
|
schema=s1,
|
@@ -706,11 +733,13 @@ def test_upstream_input_source_with_sub_parameter(null_config, tmp_path, store):
|
|
706
733
|
template_name="temp",
|
707
734
|
store=store,
|
708
735
|
)
|
709
|
-
|
736
|
+
p2 = wk.tasks[1].elements[0].inputs.p2
|
737
|
+
assert isinstance(p2, hf.ElementParameter)
|
738
|
+
assert p2.value == {"a": 101, "b": 102}
|
710
739
|
|
711
740
|
|
712
741
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
713
|
-
def test_from_template_data_workflow_reload(null_config, tmp_path, store):
|
742
|
+
def test_from_template_data_workflow_reload(null_config, tmp_path: Path, store: str):
|
714
743
|
wk_name = "temp"
|
715
744
|
t1 = hf.Task(schema=hf.task_schemas.test_t1_ps, inputs=[hf.InputValue("p1", 101)])
|
716
745
|
wk = hf.Workflow.from_template_data(
|
@@ -727,7 +756,7 @@ def test_from_template_data_workflow_reload(null_config, tmp_path, store):
|
|
727
756
|
|
728
757
|
|
729
758
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
730
|
-
def test_from_template_workflow_reload(null_config, tmp_path, store):
|
759
|
+
def test_from_template_workflow_reload(null_config, tmp_path: Path, store: str):
|
731
760
|
wk_name = "temp"
|
732
761
|
t1 = hf.Task(schema=hf.task_schemas.test_t1_ps, inputs=[hf.InputValue("p1", 101)])
|
733
762
|
wkt = hf.WorkflowTemplate(name=wk_name, tasks=[t1])
|
@@ -744,7 +773,7 @@ def test_from_template_workflow_reload(null_config, tmp_path, store):
|
|
744
773
|
|
745
774
|
|
746
775
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
747
|
-
def test_from_YAML_str_template_workflow_reload(null_config, tmp_path, store):
|
776
|
+
def test_from_YAML_str_template_workflow_reload(null_config, tmp_path: Path, store: str):
|
748
777
|
yaml_str = dedent(
|
749
778
|
"""
|
750
779
|
name: temp
|
@@ -767,7 +796,7 @@ def test_from_YAML_str_template_workflow_reload(null_config, tmp_path, store):
|
|
767
796
|
|
768
797
|
|
769
798
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
770
|
-
def test_from_template_workflow_add_task_reload(null_config, tmp_path, store):
|
799
|
+
def test_from_template_workflow_add_task_reload(null_config, tmp_path: Path, store: str):
|
771
800
|
wk_name = "temp"
|
772
801
|
t1 = hf.Task(schema=hf.task_schemas.test_t1_ps, inputs=[hf.InputValue("p1", 101)])
|
773
802
|
wkt = hf.WorkflowTemplate(name=wk_name)
|
@@ -785,7 +814,9 @@ def test_from_template_workflow_add_task_reload(null_config, tmp_path, store):
|
|
785
814
|
|
786
815
|
|
787
816
|
@pytest.mark.parametrize("store", ["json", "zarr"])
|
788
|
-
def test_batch_update_mode_false_after_empty_workflow_init(
|
817
|
+
def test_batch_update_mode_false_after_empty_workflow_init(
|
818
|
+
null_config, tmp_path: Path, store: str
|
819
|
+
):
|
789
820
|
wk_name = "temp"
|
790
821
|
wk = hf.Workflow.from_template_data(
|
791
822
|
tasks=[],
|
@@ -0,0 +1,40 @@
|
|
1
|
+
import numpy as np
|
2
|
+
|
3
|
+
from hpcflow.sdk.utils.arrays import get_1D_idx, get_2D_idx, split_arr
|
4
|
+
|
5
|
+
|
6
|
+
def test_get_2D_idx():
|
7
|
+
assert get_2D_idx(0, num_cols=10) == (0, 0)
|
8
|
+
assert get_2D_idx(4, num_cols=10) == (0, 4)
|
9
|
+
assert get_2D_idx(9, num_cols=10) == (0, 9)
|
10
|
+
assert get_2D_idx(10, num_cols=10) == (1, 0)
|
11
|
+
assert get_2D_idx(13, num_cols=10) == (1, 3)
|
12
|
+
assert get_2D_idx(20, num_cols=10) == (2, 0)
|
13
|
+
arr_r, arr_c = get_2D_idx(np.array([0, 4, 9, 10, 13, 20]), num_cols=10)
|
14
|
+
assert np.array_equal(arr_r, np.array([0, 0, 0, 1, 1, 2]))
|
15
|
+
assert np.array_equal(arr_c, np.array([0, 4, 9, 0, 3, 0]))
|
16
|
+
|
17
|
+
|
18
|
+
def test_get_1D_idx():
|
19
|
+
assert get_1D_idx(*(0, 0), num_cols=10) == 0
|
20
|
+
assert get_1D_idx(*(0, 4), num_cols=10) == 4
|
21
|
+
assert get_1D_idx(*(0, 9), num_cols=10) == 9
|
22
|
+
assert get_1D_idx(*(1, 0), num_cols=10) == 10
|
23
|
+
assert get_1D_idx(*(1, 3), num_cols=10) == 13
|
24
|
+
assert get_1D_idx(*(2, 0), num_cols=10) == 20
|
25
|
+
|
26
|
+
assert np.array_equal(
|
27
|
+
get_1D_idx(
|
28
|
+
np.array([0, 0, 0, 1, 1, 2]), np.array([0, 4, 9, 0, 3, 0]), num_cols=10
|
29
|
+
),
|
30
|
+
np.array([0, 4, 9, 10, 13, 20]),
|
31
|
+
)
|
32
|
+
|
33
|
+
|
34
|
+
def test_split_arr():
|
35
|
+
splt = split_arr(np.array([4, 0, 1, 2, 3, 4, 1, 4, 5, 6]), metadata_size=1)
|
36
|
+
assert len(splt) == 2
|
37
|
+
assert np.array_equal(splt[0][0], np.array([0]))
|
38
|
+
assert np.array_equal(splt[0][1], np.array([1, 2, 3]))
|
39
|
+
assert np.array_equal(splt[1][0], np.array([1]))
|
40
|
+
assert np.array_equal(splt[1][1], np.array([4, 5, 6]))
|
@@ -0,0 +1,34 @@
|
|
1
|
+
from hpcflow.sdk.utils.deferred_file import DeferredFileWriter
|
2
|
+
|
3
|
+
|
4
|
+
def test_file_not_created(tmp_path):
|
5
|
+
file_name = tmp_path / "test.txt"
|
6
|
+
assert not file_name.is_file()
|
7
|
+
with DeferredFileWriter(file_name, mode="w") as fp:
|
8
|
+
assert not fp._is_open
|
9
|
+
assert not file_name.is_file()
|
10
|
+
|
11
|
+
|
12
|
+
def test_append_file_not_opened(tmp_path):
|
13
|
+
file_name = tmp_path / "test.txt"
|
14
|
+
with DeferredFileWriter(file_name, mode="a") as fp:
|
15
|
+
assert not fp._is_open
|
16
|
+
assert not file_name.is_file()
|
17
|
+
|
18
|
+
|
19
|
+
def test_file_created_write(tmp_path):
|
20
|
+
file_name = tmp_path / "test.txt"
|
21
|
+
assert not file_name.is_file()
|
22
|
+
with DeferredFileWriter(file_name, mode="w") as fp:
|
23
|
+
fp.write("contents\n")
|
24
|
+
assert fp._is_open
|
25
|
+
assert file_name.is_file()
|
26
|
+
|
27
|
+
|
28
|
+
def test_file_created_writelines(tmp_path):
|
29
|
+
file_name = tmp_path / "test.txt"
|
30
|
+
assert not file_name.is_file()
|
31
|
+
with DeferredFileWriter(file_name, mode="w") as fp:
|
32
|
+
fp.writelines(["contents\n"])
|
33
|
+
assert fp._is_open
|
34
|
+
assert file_name.is_file()
|
@@ -0,0 +1,65 @@
|
|
1
|
+
from hpcflow.sdk.utils.hashing import get_hash
|
2
|
+
|
3
|
+
|
4
|
+
def test_get_hash_simple_types_is_int():
|
5
|
+
assert isinstance(get_hash(1), int)
|
6
|
+
assert isinstance(get_hash(3.2), int)
|
7
|
+
assert isinstance(get_hash("a"), int)
|
8
|
+
assert isinstance(get_hash("abc"), int)
|
9
|
+
|
10
|
+
|
11
|
+
def test_get_hash_compound_types_is_int():
|
12
|
+
assert isinstance(get_hash([1, 2, 3]), int)
|
13
|
+
assert isinstance(get_hash((1, 2, 3)), int)
|
14
|
+
assert isinstance(get_hash({1, 2, 3}), int)
|
15
|
+
assert isinstance(get_hash({"a": 1, "b": 2, "c": 3}), int)
|
16
|
+
|
17
|
+
|
18
|
+
def test_get_hash_nested_dict_is_int():
|
19
|
+
assert isinstance(get_hash({"a": {"b": {"c": [1, 2, 3, ("4", 5, 6)]}}}), int)
|
20
|
+
|
21
|
+
|
22
|
+
def test_get_hash_distinct_simple_types():
|
23
|
+
assert get_hash(1) != get_hash(2)
|
24
|
+
assert get_hash(2.2) != get_hash(2.3)
|
25
|
+
assert get_hash("a") != get_hash("b")
|
26
|
+
assert get_hash("abc") != get_hash("ABC")
|
27
|
+
|
28
|
+
|
29
|
+
def test_get_hash_distinct_compound_types():
|
30
|
+
assert get_hash([1, 2, 3]) != get_hash([1, 2, 4])
|
31
|
+
assert get_hash((1, 2, 3)) != get_hash((1, 2, 4))
|
32
|
+
assert get_hash({1, 2, 3}) != get_hash({1, 2, 4})
|
33
|
+
assert get_hash({"a": 1, "b": 2, "c": 3}) != get_hash({"a": 1, "b": 2, "c": 4})
|
34
|
+
assert get_hash({"a": {"b": {"c": [1, 2, 3, ("4", 5, 7)]}}}) == get_hash(
|
35
|
+
{"a": {"b": {"c": [1, 2, 3, ("4", 5, 7)]}}}
|
36
|
+
)
|
37
|
+
assert get_hash({"a": 1}) != get_hash(1) != get_hash("a")
|
38
|
+
|
39
|
+
|
40
|
+
def test_get_hash_equal_simple_types():
|
41
|
+
assert get_hash(1) == get_hash(1)
|
42
|
+
assert get_hash(2.2) == get_hash(2.2)
|
43
|
+
assert get_hash("a") == get_hash("a")
|
44
|
+
assert get_hash("abc") == get_hash("abc")
|
45
|
+
|
46
|
+
|
47
|
+
def test_get_hash_equal_compound_types():
|
48
|
+
assert get_hash([1, 2, 3]) == get_hash([1, 2, 3])
|
49
|
+
assert get_hash((1, 2, 3)) == get_hash((1, 2, 3))
|
50
|
+
assert get_hash({1, 2, 3}) == get_hash({1, 2, 3})
|
51
|
+
assert get_hash({"a": 1, "b": 2, "c": 3}) == get_hash({"a": 1, "b": 2, "c": 3})
|
52
|
+
assert get_hash({"a": {"b": {"c": [1, 2, 3, ("4", 5, 6)]}}}) == get_hash(
|
53
|
+
{"a": {"b": {"c": [1, 2, 3, ("4", 5, 6)]}}}
|
54
|
+
)
|
55
|
+
|
56
|
+
|
57
|
+
def test_get_hash_order_insensitivity():
|
58
|
+
assert get_hash({"a": 1, "b": 2}) == get_hash({"b": 2, "a": 1})
|
59
|
+
assert get_hash({1, 2, 3}) == get_hash({2, 3, 1})
|
60
|
+
|
61
|
+
|
62
|
+
def test_get_hash_order_sensitivity():
|
63
|
+
assert get_hash([1, 2, 3]) != get_hash([2, 3, 1])
|
64
|
+
assert get_hash((1, 2, 3)) != get_hash((2, 3, 1))
|
65
|
+
assert get_hash("abc") != get_hash("cba")
|