floodmodeller-api 0.4.2.post1__py3-none-any.whl → 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- floodmodeller_api/__init__.py +8 -9
- floodmodeller_api/_base.py +169 -176
- floodmodeller_api/backup.py +273 -273
- floodmodeller_api/dat.py +889 -831
- floodmodeller_api/diff.py +136 -119
- floodmodeller_api/ied.py +302 -306
- floodmodeller_api/ief.py +553 -637
- floodmodeller_api/ief_flags.py +253 -253
- floodmodeller_api/inp.py +260 -266
- floodmodeller_api/libs/libifcoremd.dll +0 -0
- floodmodeller_api/libs/libifcoremt.so.5 +0 -0
- floodmodeller_api/libs/libifport.so.5 +0 -0
- floodmodeller_api/{libmmd.dll → libs/libimf.so} +0 -0
- floodmodeller_api/libs/libintlc.so.5 +0 -0
- floodmodeller_api/libs/libmmd.dll +0 -0
- floodmodeller_api/libs/libsvml.so +0 -0
- floodmodeller_api/libs/libzzn_read.so +0 -0
- floodmodeller_api/libs/zzn_read.dll +0 -0
- floodmodeller_api/logs/__init__.py +2 -2
- floodmodeller_api/logs/lf.py +364 -312
- floodmodeller_api/logs/lf_helpers.py +354 -352
- floodmodeller_api/logs/lf_params.py +643 -529
- floodmodeller_api/mapping.py +84 -0
- floodmodeller_api/test/__init__.py +4 -4
- floodmodeller_api/test/conftest.py +16 -8
- floodmodeller_api/test/test_backup.py +117 -117
- floodmodeller_api/test/test_conveyance.py +107 -0
- floodmodeller_api/test/test_dat.py +222 -92
- floodmodeller_api/test/test_data/All Units 4_6.DAT +1081 -1081
- floodmodeller_api/test/test_data/All Units 4_6.feb +1081 -1081
- floodmodeller_api/test/test_data/BRIDGE.DAT +926 -926
- floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.dat +36 -36
- floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.feb +36 -36
- floodmodeller_api/test/test_data/DamBreakADI.xml +52 -52
- floodmodeller_api/test/test_data/DamBreakFAST.xml +58 -58
- floodmodeller_api/test/test_data/DamBreakFAST_dy.xml +53 -53
- floodmodeller_api/test/test_data/DamBreakTVD.xml +55 -55
- floodmodeller_api/test/test_data/DefenceBreach.xml +53 -53
- floodmodeller_api/test/test_data/DefenceBreachFAST.xml +60 -60
- floodmodeller_api/test/test_data/DefenceBreachFAST_dy.xml +55 -55
- floodmodeller_api/test/test_data/Domain1+2_QH.xml +76 -76
- floodmodeller_api/test/test_data/Domain1_H.xml +41 -41
- floodmodeller_api/test/test_data/Domain1_Q.xml +41 -41
- floodmodeller_api/test/test_data/Domain1_Q_FAST.xml +48 -48
- floodmodeller_api/test/test_data/Domain1_Q_FAST_dy.xml +48 -48
- floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +263 -0
- floodmodeller_api/test/test_data/Domain1_W.xml +41 -41
- floodmodeller_api/test/test_data/EX1.DAT +321 -321
- floodmodeller_api/test/test_data/EX1.ext +107 -107
- floodmodeller_api/test/test_data/EX1.feb +320 -320
- floodmodeller_api/test/test_data/EX1.gxy +107 -107
- floodmodeller_api/test/test_data/EX17.DAT +421 -422
- floodmodeller_api/test/test_data/EX17.ext +213 -213
- floodmodeller_api/test/test_data/EX17.feb +422 -422
- floodmodeller_api/test/test_data/EX18.DAT +375 -375
- floodmodeller_api/test/test_data/EX18_DAT_expected.json +3876 -0
- floodmodeller_api/test/test_data/EX2.DAT +302 -302
- floodmodeller_api/test/test_data/EX3.DAT +926 -926
- floodmodeller_api/test/test_data/EX3_DAT_expected.json +16235 -0
- floodmodeller_api/test/test_data/EX3_IEF_expected.json +61 -0
- floodmodeller_api/test/test_data/EX6.DAT +2084 -2084
- floodmodeller_api/test/test_data/EX6.ext +532 -532
- floodmodeller_api/test/test_data/EX6.feb +2084 -2084
- floodmodeller_api/test/test_data/EX6_DAT_expected.json +31647 -0
- floodmodeller_api/test/test_data/Event Data Example.DAT +336 -336
- floodmodeller_api/test/test_data/Event Data Example.ext +107 -107
- floodmodeller_api/test/test_data/Event Data Example.feb +336 -336
- floodmodeller_api/test/test_data/Linked1D2D.xml +52 -52
- floodmodeller_api/test/test_data/Linked1D2DFAST.xml +53 -53
- floodmodeller_api/test/test_data/Linked1D2DFAST_dy.xml +48 -48
- floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +313 -0
- floodmodeller_api/test/test_data/blockage.dat +50 -50
- floodmodeller_api/test/test_data/blockage.ext +45 -45
- floodmodeller_api/test/test_data/blockage.feb +9 -9
- floodmodeller_api/test/test_data/blockage.gxy +71 -71
- floodmodeller_api/test/test_data/conveyance_test.dat +165 -0
- floodmodeller_api/test/test_data/conveyance_test.feb +116 -0
- floodmodeller_api/test/test_data/conveyance_test.gxy +85 -0
- floodmodeller_api/test/test_data/defaultUnits.dat +127 -127
- floodmodeller_api/test/test_data/defaultUnits.ext +45 -45
- floodmodeller_api/test/test_data/defaultUnits.feb +9 -9
- floodmodeller_api/test/test_data/defaultUnits.fmpx +58 -58
- floodmodeller_api/test/test_data/defaultUnits.gxy +85 -85
- floodmodeller_api/test/test_data/ex3.ief +20 -20
- floodmodeller_api/test/test_data/ex3.lf1 +2800 -2800
- floodmodeller_api/test/test_data/ex4.DAT +1374 -1374
- floodmodeller_api/test/test_data/ex4_changed.DAT +1374 -1374
- floodmodeller_api/test/test_data/example1.inp +329 -329
- floodmodeller_api/test/test_data/example2.inp +158 -158
- floodmodeller_api/test/test_data/example3.inp +297 -297
- floodmodeller_api/test/test_data/example4.inp +388 -388
- floodmodeller_api/test/test_data/example5.inp +147 -147
- floodmodeller_api/test/test_data/example6.inp +154 -154
- floodmodeller_api/test/test_data/expected_conveyance.csv +60 -0
- floodmodeller_api/test/test_data/jump.dat +176 -176
- floodmodeller_api/test/test_data/network.dat +1374 -1374
- floodmodeller_api/test/test_data/network.ext +45 -45
- floodmodeller_api/test/test_data/network.exy +1 -1
- floodmodeller_api/test/test_data/network.feb +45 -45
- floodmodeller_api/test/test_data/network.ied +45 -45
- floodmodeller_api/test/test_data/network.ief +20 -20
- floodmodeller_api/test/test_data/network.inp +147 -147
- floodmodeller_api/test/test_data/network.pxy +57 -57
- floodmodeller_api/test/test_data/network.zzd +122 -122
- floodmodeller_api/test/test_data/network_dat_expected.json +21837 -0
- floodmodeller_api/test/test_data/network_from_tabularCSV.csv +87 -87
- floodmodeller_api/test/test_data/network_ied_expected.json +287 -0
- floodmodeller_api/test/test_data/rnweir.dat +9 -9
- floodmodeller_api/test/test_data/rnweir.ext +45 -45
- floodmodeller_api/test/test_data/rnweir.feb +9 -9
- floodmodeller_api/test/test_data/rnweir.gxy +45 -45
- floodmodeller_api/test/test_data/rnweir_default.dat +74 -74
- floodmodeller_api/test/test_data/rnweir_default.ext +45 -45
- floodmodeller_api/test/test_data/rnweir_default.feb +9 -9
- floodmodeller_api/test/test_data/rnweir_default.fmpx +58 -58
- floodmodeller_api/test/test_data/rnweir_default.gxy +53 -53
- floodmodeller_api/test/test_data/unit checks.dat +16 -16
- floodmodeller_api/test/test_ied.py +29 -29
- floodmodeller_api/test/test_ief.py +136 -24
- floodmodeller_api/test/test_inp.py +47 -48
- floodmodeller_api/test/test_json.py +114 -0
- floodmodeller_api/test/test_logs_lf.py +102 -51
- floodmodeller_api/test/test_tool.py +165 -152
- floodmodeller_api/test/test_toolbox_structure_log.py +234 -239
- floodmodeller_api/test/test_xml2d.py +151 -156
- floodmodeller_api/test/test_zzn.py +36 -34
- floodmodeller_api/to_from_json.py +230 -0
- floodmodeller_api/tool.py +332 -329
- floodmodeller_api/toolbox/__init__.py +5 -5
- floodmodeller_api/toolbox/example_tool.py +45 -45
- floodmodeller_api/toolbox/model_build/__init__.py +2 -2
- floodmodeller_api/toolbox/model_build/add_siltation_definition.py +100 -98
- floodmodeller_api/toolbox/model_build/structure_log/__init__.py +1 -1
- floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +287 -289
- floodmodeller_api/toolbox/model_build/structure_log_definition.py +76 -76
- floodmodeller_api/units/__init__.py +10 -10
- floodmodeller_api/units/_base.py +214 -212
- floodmodeller_api/units/boundaries.py +467 -467
- floodmodeller_api/units/comment.py +52 -55
- floodmodeller_api/units/conduits.py +382 -402
- floodmodeller_api/units/conveyance.py +301 -0
- floodmodeller_api/units/helpers.py +123 -131
- floodmodeller_api/units/iic.py +107 -101
- floodmodeller_api/units/losses.py +305 -306
- floodmodeller_api/units/sections.py +465 -446
- floodmodeller_api/units/structures.py +1690 -1683
- floodmodeller_api/units/units.py +93 -104
- floodmodeller_api/units/unsupported.py +44 -44
- floodmodeller_api/units/variables.py +87 -89
- floodmodeller_api/urban1d/__init__.py +11 -11
- floodmodeller_api/urban1d/_base.py +188 -179
- floodmodeller_api/urban1d/conduits.py +93 -85
- floodmodeller_api/urban1d/general_parameters.py +58 -58
- floodmodeller_api/urban1d/junctions.py +81 -79
- floodmodeller_api/urban1d/losses.py +81 -74
- floodmodeller_api/urban1d/outfalls.py +114 -110
- floodmodeller_api/urban1d/raingauges.py +111 -111
- floodmodeller_api/urban1d/subsections.py +92 -98
- floodmodeller_api/urban1d/xsections.py +147 -144
- floodmodeller_api/util.py +119 -21
- floodmodeller_api/validation/parameters.py +660 -660
- floodmodeller_api/validation/urban_parameters.py +388 -404
- floodmodeller_api/validation/validation.py +110 -108
- floodmodeller_api/version.py +1 -1
- floodmodeller_api/xml2d.py +632 -673
- floodmodeller_api/xml2d_template.py +37 -37
- floodmodeller_api/zzn.py +414 -363
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/LICENSE.txt +13 -13
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/METADATA +85 -82
- floodmodeller_api-0.4.4.dist-info/RECORD +185 -0
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/WHEEL +1 -1
- floodmodeller_api/libifcoremd.dll +0 -0
- floodmodeller_api/test/test_data/EX3.bmp +0 -0
- floodmodeller_api/test/test_data/test_output.csv +0 -87
- floodmodeller_api/zzn_read.dll +0 -0
- floodmodeller_api-0.4.2.post1.dist-info/RECORD +0 -164
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/entry_points.txt +0 -0
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
from typing import Any
|
|
2
|
+
|
|
3
|
+
from . import DAT, IED, IEF, INP, LF1, LF2, XML2D, ZZN
|
|
4
|
+
from .backup import File
|
|
5
|
+
from .units import (
|
|
6
|
+
BLOCKAGE,
|
|
7
|
+
BRIDGE,
|
|
8
|
+
COMMENT,
|
|
9
|
+
CONDUIT,
|
|
10
|
+
CRUMP,
|
|
11
|
+
CULVERT,
|
|
12
|
+
FLAT_V_WEIR,
|
|
13
|
+
HTBDY,
|
|
14
|
+
IIC,
|
|
15
|
+
INTERPOLATE,
|
|
16
|
+
ORIFICE,
|
|
17
|
+
OUTFALL,
|
|
18
|
+
QHBDY,
|
|
19
|
+
QTBDY,
|
|
20
|
+
REFHBDY,
|
|
21
|
+
REPLICATE,
|
|
22
|
+
RIVER,
|
|
23
|
+
RNWEIR,
|
|
24
|
+
SLUICE,
|
|
25
|
+
SPILL,
|
|
26
|
+
UNSUPPORTED,
|
|
27
|
+
WEIR,
|
|
28
|
+
Variables,
|
|
29
|
+
)
|
|
30
|
+
from .urban1d.conduits import CONDUIT as CONDUIT_URBAN
|
|
31
|
+
from .urban1d.conduits import CONDUITS as CONDUITS_URBAN
|
|
32
|
+
from .urban1d.junctions import JUNCTION, JUNCTIONS
|
|
33
|
+
from .urban1d.losses import LOSS, LOSSES
|
|
34
|
+
from .urban1d.outfalls import OUTFALL as OUTFALL_URBAN
|
|
35
|
+
from .urban1d.outfalls import OUTFALLS as OUTFALLS_URBAN
|
|
36
|
+
from .urban1d.raingauges import RAINGAUGE, RAINGAUGES
|
|
37
|
+
from .urban1d.xsections import XSECTION, XSECTIONS
|
|
38
|
+
|
|
39
|
+
api_class_mapping: dict[str, Any] = {
|
|
40
|
+
"floodmodeller_api.dat.DAT": DAT,
|
|
41
|
+
"floodmodeller_api.ied.IED": IED,
|
|
42
|
+
"floodmodeller_api.ief.IEF": IEF,
|
|
43
|
+
"floodmodeller_api.inp.INP": INP,
|
|
44
|
+
"floodmodeller_api.lf.LF1": LF1,
|
|
45
|
+
"floodmodeller_api.lf.LF2": LF2,
|
|
46
|
+
"floodmodeller_api.xml2d.XML2D": XML2D,
|
|
47
|
+
"floodmodeller_api.zzn.ZZN": ZZN,
|
|
48
|
+
"floodmodeller_api.backup.File": File,
|
|
49
|
+
"floodmodeller_api.urban1d.junctions.JUNCTIONS": JUNCTIONS,
|
|
50
|
+
"floodmodeller_api.urban1d.junctions.JUNCTION": JUNCTION,
|
|
51
|
+
"floodmodeller_api.urban1d.outfalls.OUTFALLS": OUTFALLS_URBAN,
|
|
52
|
+
"floodmodeller_api.urban1d.outfalls.OUTFALL": OUTFALL_URBAN,
|
|
53
|
+
"floodmodeller_api.urban1d.raingauges.RAINGAUGES": RAINGAUGES,
|
|
54
|
+
"floodmodeller_api.urban1d.raingauges.RAINGAUGE": RAINGAUGE,
|
|
55
|
+
"floodmodeller_api.urban1d.conduits.CONDUITS": CONDUITS_URBAN,
|
|
56
|
+
"floodmodeller_api.urban1d.conduits.CONDUIT": CONDUIT_URBAN,
|
|
57
|
+
"floodmodeller_api.urban1d.losses.LOSSES": LOSSES,
|
|
58
|
+
"floodmodeller_api.urban1d.losses.LOSS": LOSS,
|
|
59
|
+
"floodmodeller_api.urban1d.xsections.XSECTIONS": XSECTIONS,
|
|
60
|
+
"floodmodeller_api.urban1d.xsections.XSECTION": XSECTION,
|
|
61
|
+
"floodmodeller_api.units.boundaries.HTBDY": HTBDY,
|
|
62
|
+
"floodmodeller_api.units.boundaries.QHBDY": QHBDY,
|
|
63
|
+
"floodmodeller_api.units.boundaries.QTBDY": QTBDY,
|
|
64
|
+
"floodmodeller_api.units.boundaries.REFHBDY": REFHBDY,
|
|
65
|
+
"floodmodeller_api.units.comment.COMMENT": COMMENT,
|
|
66
|
+
"floodmodeller_api.units.conduits.CONDUIT": CONDUIT,
|
|
67
|
+
"floodmodeller_api.units.iic.IIC": IIC,
|
|
68
|
+
"floodmodeller_api.units.losses.BLOCKAGE": BLOCKAGE,
|
|
69
|
+
"floodmodeller_api.units.losses.CULVERT": CULVERT,
|
|
70
|
+
"floodmodeller_api.units.sections.INTERPOLATE": INTERPOLATE,
|
|
71
|
+
"floodmodeller_api.units.sections.REPLICATE": REPLICATE,
|
|
72
|
+
"floodmodeller_api.units.sections.RIVER": RIVER,
|
|
73
|
+
"floodmodeller_api.units.structures.BRIDGE": BRIDGE,
|
|
74
|
+
"floodmodeller_api.units.structures.CRUMP": CRUMP,
|
|
75
|
+
"floodmodeller_api.units.structures.FLAT_V_WEIR": FLAT_V_WEIR,
|
|
76
|
+
"floodmodeller_api.units.structures.ORIFICE": ORIFICE,
|
|
77
|
+
"floodmodeller_api.units.structures.OUTFALL": OUTFALL,
|
|
78
|
+
"floodmodeller_api.units.structures.RNWEIR": RNWEIR,
|
|
79
|
+
"floodmodeller_api.units.structures.SLUICE": SLUICE,
|
|
80
|
+
"floodmodeller_api.units.structures.SPILL": SPILL,
|
|
81
|
+
"floodmodeller_api.units.structures.WEIR": WEIR,
|
|
82
|
+
"floodmodeller_api.units.unsupported.UNSUPPORTED": UNSUPPORTED,
|
|
83
|
+
"floodmodeller_api.units.variables.Variables": Variables,
|
|
84
|
+
}
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import sys
|
|
3
|
-
|
|
4
|
-
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
|
1
|
+
import os
|
|
2
|
+
import sys
|
|
3
|
+
|
|
4
|
+
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
|
|
@@ -1,8 +1,16 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
import
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
1
|
+
import os
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from unittest.mock import patch
|
|
4
|
+
|
|
5
|
+
import pytest
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@pytest.fixture(scope="session")
|
|
9
|
+
def test_workspace():
|
|
10
|
+
return Path(os.path.dirname(__file__), "test_data")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@pytest.fixture()
|
|
14
|
+
def log_timeout():
|
|
15
|
+
with patch("floodmodeller_api.logs.lf.LOG_TIMEOUT", new=0):
|
|
16
|
+
yield
|
|
@@ -1,117 +1,117 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
|
-
|
|
3
|
-
import pandas as pd
|
|
4
|
-
import pytest
|
|
5
|
-
|
|
6
|
-
from floodmodeller_api.backup import BackupControl, File
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
@pytest.fixture
|
|
10
|
-
def backup_control():
|
|
11
|
-
# Use a different directory for testing
|
|
12
|
-
return BackupControl()
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
@pytest.fixture
|
|
16
|
-
def file(test_workspace):
|
|
17
|
-
test_file = Path(test_workspace, "EX1.DAT")
|
|
18
|
-
file = File(test_file)
|
|
19
|
-
# Make a backup to clear in test
|
|
20
|
-
file.backup()
|
|
21
|
-
return file
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def test_init_backup(backup_control):
|
|
25
|
-
"""Has the backup been initialised correctly?"""
|
|
26
|
-
assert backup_control.backup_dir.exists()
|
|
27
|
-
assert backup_control.backup_csv_path.exists()
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def test_generate_file_id(file, test_workspace):
|
|
31
|
-
"""Does this generate a consistent file ID for the same file on disk?"""
|
|
32
|
-
# Test that the file ID is the same for the same path input
|
|
33
|
-
file1 = File(Path(test_workspace, "EX1.DAT"))
|
|
34
|
-
file2 = File(Path(test_workspace, "EX1.DAT"))
|
|
35
|
-
assert file1.file_id == file2.file_id
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def test_clear_backup(file, test_workspace):
|
|
39
|
-
"""
|
|
40
|
-
Does the the clear_backup method work correctly
|
|
41
|
-
"""
|
|
42
|
-
# Clearing backup -------------------
|
|
43
|
-
# Load a different file to check it isn't affected by the
|
|
44
|
-
other_file = File(Path(test_workspace, "EX3.DAT"))
|
|
45
|
-
# Assert there is a backup for the other file
|
|
46
|
-
other_file.backup()
|
|
47
|
-
# Clear the backups for the file to test backup functionality
|
|
48
|
-
file.clear_backup()
|
|
49
|
-
# Assert that clearing the backup has worked - there aren't any backups for the file
|
|
50
|
-
assert len(file.list_backups()) == 0
|
|
51
|
-
# And that clearing it hasn't affected backups for the other file
|
|
52
|
-
assert len(other_file.list_backups()) > 0
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def test_backup_locations(file):
|
|
56
|
-
"""
|
|
57
|
-
Does it make a backup in the right place?
|
|
58
|
-
"""
|
|
59
|
-
# Making a backup --------------------
|
|
60
|
-
file.clear_backup()
|
|
61
|
-
# make a backup and check if file exists
|
|
62
|
-
file.backup()
|
|
63
|
-
backup_file_path = Path(file.backup_dir, file.backup_filename)
|
|
64
|
-
assert backup_file_path.exists()
|
|
65
|
-
# check if contents of backup file match the original file
|
|
66
|
-
with open(backup_file_path
|
|
67
|
-
assert f1.read() == f2.read()
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
def test_no_duplicate_backup(file, test_workspace):
|
|
71
|
-
"""The backup method should only backup if the file has changed"""
|
|
72
|
-
# Don't Make Duplicate -------------------
|
|
73
|
-
# Check that the file isn't backed up again if it hasn't changed
|
|
74
|
-
the_same_file = File(Path(test_workspace, "EX1.DAT"))
|
|
75
|
-
# Append something to the dttm string to ensure the filename is different to the previous backup
|
|
76
|
-
# If the two File objects are created in the same second then then will have identical file names
|
|
77
|
-
# The function should check for equivalence between file contents.
|
|
78
|
-
the_same_file.dttm_str = the_same_file.dttm_str + "_1"
|
|
79
|
-
# Generate a new file name
|
|
80
|
-
the_same_file._generate_file_name()
|
|
81
|
-
# Attempt a backup
|
|
82
|
-
the_same_file.backup()
|
|
83
|
-
# Check that the file hasn't been created
|
|
84
|
-
duplicate_backup_path = Path(the_same_file.backup_dir, the_same_file.backup_filename)
|
|
85
|
-
assert not duplicate_backup_path.exists()
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def test_backup_logs(file):
|
|
89
|
-
"""Are backups being logged in the CSV?"""
|
|
90
|
-
# Clear the backup
|
|
91
|
-
file.clear_backup()
|
|
92
|
-
# There shouldn't be any edits in the csv
|
|
93
|
-
backup_logs = pd.read_csv(file.backup_csv_path)
|
|
94
|
-
backup_count = backup_logs[
|
|
95
|
-
(backup_logs.file_id == file.file_id) & (backup_logs.dttm == file.dttm_str)
|
|
96
|
-
].shape[0]
|
|
97
|
-
assert backup_count == 0
|
|
98
|
-
# Make a backup and assert it is in the CSV
|
|
99
|
-
file.backup()
|
|
100
|
-
# Check edits to the backup CSV
|
|
101
|
-
# Check a row has been added to the csv for the file & version
|
|
102
|
-
backup_logs = pd.read_csv(file.backup_csv_path)
|
|
103
|
-
backup_count = backup_logs[
|
|
104
|
-
(backup_logs.file_id == file.file_id) & (backup_logs.dttm == file.dttm_str)
|
|
105
|
-
].shape[0]
|
|
106
|
-
assert backup_count == 1
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
def test_list_backups(file):
|
|
110
|
-
"""Does the list backups method work correctly?"""
|
|
111
|
-
# First clear any backups that exist
|
|
112
|
-
file.clear_backup()
|
|
113
|
-
# make a backup and check if it appears in the backup list
|
|
114
|
-
file.backup()
|
|
115
|
-
backups = file.list_backups()
|
|
116
|
-
expected_backup = Path(file.backup_dir, file.backup_filename)
|
|
117
|
-
assert expected_backup in [backup.path for backup in backups]
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import pandas as pd
|
|
4
|
+
import pytest
|
|
5
|
+
|
|
6
|
+
from floodmodeller_api.backup import BackupControl, File
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@pytest.fixture
|
|
10
|
+
def backup_control():
|
|
11
|
+
# Use a different directory for testing
|
|
12
|
+
return BackupControl()
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@pytest.fixture
|
|
16
|
+
def file(test_workspace):
|
|
17
|
+
test_file = Path(test_workspace, "EX1.DAT")
|
|
18
|
+
file = File(test_file)
|
|
19
|
+
# Make a backup to clear in test
|
|
20
|
+
file.backup()
|
|
21
|
+
return file
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def test_init_backup(backup_control):
|
|
25
|
+
"""Has the backup been initialised correctly?"""
|
|
26
|
+
assert backup_control.backup_dir.exists()
|
|
27
|
+
assert backup_control.backup_csv_path.exists()
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_generate_file_id(file, test_workspace):
|
|
31
|
+
"""Does this generate a consistent file ID for the same file on disk?"""
|
|
32
|
+
# Test that the file ID is the same for the same path input
|
|
33
|
+
file1 = File(Path(test_workspace, "EX1.DAT"))
|
|
34
|
+
file2 = File(Path(test_workspace, "EX1.DAT"))
|
|
35
|
+
assert file1.file_id == file2.file_id
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def test_clear_backup(file, test_workspace):
|
|
39
|
+
"""
|
|
40
|
+
Does the the clear_backup method work correctly
|
|
41
|
+
"""
|
|
42
|
+
# Clearing backup -------------------
|
|
43
|
+
# Load a different file to check it isn't affected by the
|
|
44
|
+
other_file = File(Path(test_workspace, "EX3.DAT"))
|
|
45
|
+
# Assert there is a backup for the other file
|
|
46
|
+
other_file.backup()
|
|
47
|
+
# Clear the backups for the file to test backup functionality
|
|
48
|
+
file.clear_backup()
|
|
49
|
+
# Assert that clearing the backup has worked - there aren't any backups for the file
|
|
50
|
+
assert len(file.list_backups()) == 0
|
|
51
|
+
# And that clearing it hasn't affected backups for the other file
|
|
52
|
+
assert len(other_file.list_backups()) > 0
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def test_backup_locations(file):
|
|
56
|
+
"""
|
|
57
|
+
Does it make a backup in the right place?
|
|
58
|
+
"""
|
|
59
|
+
# Making a backup --------------------
|
|
60
|
+
file.clear_backup()
|
|
61
|
+
# make a backup and check if file exists
|
|
62
|
+
file.backup()
|
|
63
|
+
backup_file_path = Path(file.backup_dir, file.backup_filename)
|
|
64
|
+
assert backup_file_path.exists()
|
|
65
|
+
# check if contents of backup file match the original file
|
|
66
|
+
with open(backup_file_path) as f1, open(file.path) as f2:
|
|
67
|
+
assert f1.read() == f2.read()
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def test_no_duplicate_backup(file, test_workspace):
|
|
71
|
+
"""The backup method should only backup if the file has changed"""
|
|
72
|
+
# Don't Make Duplicate -------------------
|
|
73
|
+
# Check that the file isn't backed up again if it hasn't changed
|
|
74
|
+
the_same_file = File(Path(test_workspace, "EX1.DAT"))
|
|
75
|
+
# Append something to the dttm string to ensure the filename is different to the previous backup
|
|
76
|
+
# If the two File objects are created in the same second then then will have identical file names
|
|
77
|
+
# The function should check for equivalence between file contents.
|
|
78
|
+
the_same_file.dttm_str = the_same_file.dttm_str + "_1"
|
|
79
|
+
# Generate a new file name
|
|
80
|
+
the_same_file._generate_file_name()
|
|
81
|
+
# Attempt a backup
|
|
82
|
+
the_same_file.backup()
|
|
83
|
+
# Check that the file hasn't been created
|
|
84
|
+
duplicate_backup_path = Path(the_same_file.backup_dir, the_same_file.backup_filename)
|
|
85
|
+
assert not duplicate_backup_path.exists()
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def test_backup_logs(file):
|
|
89
|
+
"""Are backups being logged in the CSV?"""
|
|
90
|
+
# Clear the backup
|
|
91
|
+
file.clear_backup()
|
|
92
|
+
# There shouldn't be any edits in the csv
|
|
93
|
+
backup_logs = pd.read_csv(file.backup_csv_path)
|
|
94
|
+
backup_count = backup_logs[
|
|
95
|
+
(backup_logs.file_id == file.file_id) & (backup_logs.dttm == file.dttm_str)
|
|
96
|
+
].shape[0]
|
|
97
|
+
assert backup_count == 0
|
|
98
|
+
# Make a backup and assert it is in the CSV
|
|
99
|
+
file.backup()
|
|
100
|
+
# Check edits to the backup CSV
|
|
101
|
+
# Check a row has been added to the csv for the file & version
|
|
102
|
+
backup_logs = pd.read_csv(file.backup_csv_path)
|
|
103
|
+
backup_count = backup_logs[
|
|
104
|
+
(backup_logs.file_id == file.file_id) & (backup_logs.dttm == file.dttm_str)
|
|
105
|
+
].shape[0]
|
|
106
|
+
assert backup_count == 1
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def test_list_backups(file):
|
|
110
|
+
"""Does the list backups method work correctly?"""
|
|
111
|
+
# First clear any backups that exist
|
|
112
|
+
file.clear_backup()
|
|
113
|
+
# make a backup and check if it appears in the backup list
|
|
114
|
+
file.backup()
|
|
115
|
+
backups = file.list_backups()
|
|
116
|
+
expected_backup = Path(file.backup_dir, file.backup_filename)
|
|
117
|
+
assert expected_backup in [backup.path for backup in backups]
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import pandas as pd
|
|
5
|
+
import pytest
|
|
6
|
+
from scipy.spatial.distance import directed_hausdorff
|
|
7
|
+
from shapely.geometry import LineString, Polygon
|
|
8
|
+
|
|
9
|
+
from floodmodeller_api import DAT
|
|
10
|
+
from floodmodeller_api.units.conveyance import (
|
|
11
|
+
calculate_conveyance_by_panel,
|
|
12
|
+
calculate_conveyance_part,
|
|
13
|
+
calculate_cross_section_conveyance,
|
|
14
|
+
insert_intermediate_wls,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_calculate_cross_section_conveyance():
|
|
19
|
+
x = np.array([0, 1, 2, 3, 4])
|
|
20
|
+
y = np.array([5, 3, 1, 2, 6])
|
|
21
|
+
n = np.array([0.03, 0.03, 0.03, 0.03, 0.03])
|
|
22
|
+
panel_markers = np.array([False, False, False, False, False])
|
|
23
|
+
rpl = np.array([1.0, 1.0, 1.0, 1.0, 1.0])
|
|
24
|
+
result = calculate_cross_section_conveyance(x, y, n, rpl, panel_markers)
|
|
25
|
+
|
|
26
|
+
assert isinstance(result, pd.Series), "Result should be a pandas Series"
|
|
27
|
+
assert not result.empty, "Result should not be empty"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_calculate_conveyance_by_panel():
|
|
31
|
+
x = np.array([0, 1, 2])
|
|
32
|
+
y = np.array([5, 3, 1])
|
|
33
|
+
n = np.array([0.03, 0.03])
|
|
34
|
+
rpl = 1.0
|
|
35
|
+
wls = np.array([1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0])
|
|
36
|
+
|
|
37
|
+
result = calculate_conveyance_by_panel(x, y, n, rpl, wls)
|
|
38
|
+
|
|
39
|
+
assert isinstance(result, list), "Result should be a list"
|
|
40
|
+
assert len(result) == len(wls), "Result length should match the length of water levels"
|
|
41
|
+
assert all(isinstance(val, float) for val in result), "All conveyance values should be floats"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_calculate_conveyance_part():
|
|
45
|
+
wetted_polygon = Polygon([(1, 3), (2, 1), (3, 2), (4, 6), (1, 3)])
|
|
46
|
+
water_plane = LineString([(0, 3), (5, 3)])
|
|
47
|
+
glass_walls = LineString([(1, 3), (1, 7)]), LineString([(4, 6), (4, 7)])
|
|
48
|
+
x = np.array([1, 2, 3, 4])
|
|
49
|
+
n = np.array([0.03, 0.03, 0.03, 0.03])
|
|
50
|
+
rpl = 1.0
|
|
51
|
+
|
|
52
|
+
result = calculate_conveyance_part(wetted_polygon, water_plane, glass_walls, x, n, rpl)
|
|
53
|
+
|
|
54
|
+
assert isinstance(result, float), "Result should be a float"
|
|
55
|
+
assert result >= 0, "Conveyance should be non-negative"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_insert_intermediate_wls():
|
|
59
|
+
arr = np.array([1.0, 2.0, 3.0])
|
|
60
|
+
threshold = 0.5
|
|
61
|
+
|
|
62
|
+
result = insert_intermediate_wls(arr, threshold)
|
|
63
|
+
|
|
64
|
+
assert isinstance(result, np.ndarray), "Result should be a numpy array"
|
|
65
|
+
assert result[0] == 1.0 and result[-1] == 3.0, "First and last elements should match the input"
|
|
66
|
+
assert all(np.diff(result) <= threshold), "All gaps should be <= to the threshold"
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@pytest.fixture(scope="module")
|
|
70
|
+
def dat(test_workspace: Path):
|
|
71
|
+
return DAT(test_workspace / "conveyance_test.dat")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@pytest.fixture(scope="module")
|
|
75
|
+
def from_gui(test_workspace: Path):
|
|
76
|
+
return pd.read_csv(test_workspace / "expected_conveyance.csv")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@pytest.mark.parametrize("section", ("a", "a2", "b", "b2", "c", "d", "d2", "e", "e2", "e3"))
|
|
80
|
+
def test_results_close_to_gui(section: str, dat: DAT, from_gui: pd.DataFrame):
|
|
81
|
+
threshold = 6
|
|
82
|
+
|
|
83
|
+
actual = dat.sections[section].conveyance
|
|
84
|
+
expected = (
|
|
85
|
+
from_gui.set_index(f"{section}_stage")[f"{section}_conveyance"].dropna().drop_duplicates()
|
|
86
|
+
)
|
|
87
|
+
common_index = sorted(set(actual.index).union(expected.index))
|
|
88
|
+
actual_interpolated = actual.reindex(common_index).interpolate(method="slinear")
|
|
89
|
+
expected_interpolated = expected.reindex(common_index).interpolate(method="slinear")
|
|
90
|
+
|
|
91
|
+
u = np.array(list(zip(actual_interpolated.index, actual_interpolated)))
|
|
92
|
+
v = np.array(list(zip(expected_interpolated.index, expected_interpolated)))
|
|
93
|
+
hausdorff_distance = directed_hausdorff(u, v)[0]
|
|
94
|
+
|
|
95
|
+
assert hausdorff_distance < threshold
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@pytest.mark.parametrize("section", ("a", "a2", "b", "b2", "c", "d", "d2", "e", "e2", "e3"))
|
|
99
|
+
def test_results_match_gui_at_shared_points(section: str, dat: DAT, from_gui: pd.DataFrame):
|
|
100
|
+
tolerance = 1e-2 # 0.001
|
|
101
|
+
actual = dat.sections[section].conveyance
|
|
102
|
+
expected = (
|
|
103
|
+
from_gui.set_index(f"{section}_stage")[f"{section}_conveyance"].dropna().drop_duplicates()
|
|
104
|
+
)
|
|
105
|
+
shared_index = sorted(set(actual.index).intersection(expected.index))
|
|
106
|
+
diff = expected[shared_index] - actual[shared_index]
|
|
107
|
+
assert (abs(diff) < tolerance).all() # asserts all conveyance values within 0.001 difference
|