floodmodeller-api 0.4.4__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- floodmodeller_api/__init__.py +1 -0
- floodmodeller_api/dat.py +117 -96
- floodmodeller_api/hydrology_plus/__init__.py +2 -0
- floodmodeller_api/hydrology_plus/helper.py +23 -0
- floodmodeller_api/hydrology_plus/hydrology_plus_export.py +333 -0
- floodmodeller_api/ied.py +93 -90
- floodmodeller_api/ief.py +233 -50
- floodmodeller_api/ief_flags.py +1 -0
- floodmodeller_api/logs/lf.py +5 -1
- floodmodeller_api/mapping.py +2 -0
- floodmodeller_api/test/test_conveyance.py +23 -32
- floodmodeller_api/test/test_data/7082.ief +28 -0
- floodmodeller_api/test/test_data/BaseModel_2D_Q100.ief +28 -0
- floodmodeller_api/test/test_data/Baseline_unchecked.csv +77 -0
- floodmodeller_api/test/test_data/Constant QT.ief +19 -0
- floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +7 -7
- floodmodeller_api/test/test_data/EX18_DAT_expected.json +54 -38
- floodmodeller_api/test/test_data/EX3_DAT_expected.json +246 -166
- floodmodeller_api/test/test_data/EX3_IEF_expected.json +25 -20
- floodmodeller_api/test/test_data/EX6_DAT_expected.json +522 -350
- floodmodeller_api/test/test_data/FEH boundary.ief +23 -0
- floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +7 -7
- floodmodeller_api/test/test_data/P3Panels_UNsteady.ief +25 -0
- floodmodeller_api/test/test_data/QT in dat file.ief +20 -0
- floodmodeller_api/test/test_data/T10.ief +25 -0
- floodmodeller_api/test/test_data/T2.ief +25 -0
- floodmodeller_api/test/test_data/T5.ief +25 -0
- floodmodeller_api/test/test_data/df_flows_hplus.csv +56 -0
- floodmodeller_api/test/test_data/event_hplus.csv +56 -0
- floodmodeller_api/test/test_data/ex4.ief +20 -0
- floodmodeller_api/test/test_data/ex6.ief +21 -0
- floodmodeller_api/test/test_data/example_h+_export.csv +77 -0
- floodmodeller_api/test/test_data/hplus_export_example_1.csv +72 -0
- floodmodeller_api/test/test_data/hplus_export_example_10.csv +77 -0
- floodmodeller_api/test/test_data/hplus_export_example_2.csv +79 -0
- floodmodeller_api/test/test_data/hplus_export_example_3.csv +77 -0
- floodmodeller_api/test/test_data/hplus_export_example_4.csv +131 -0
- floodmodeller_api/test/test_data/hplus_export_example_5.csv +77 -0
- floodmodeller_api/test/test_data/hplus_export_example_6.csv +131 -0
- floodmodeller_api/test/test_data/hplus_export_example_7.csv +131 -0
- floodmodeller_api/test/test_data/hplus_export_example_8.csv +131 -0
- floodmodeller_api/test/test_data/hplus_export_example_9.csv +131 -0
- floodmodeller_api/test/test_data/network_dat_expected.json +312 -210
- floodmodeller_api/test/test_data/network_ied_expected.json +6 -6
- floodmodeller_api/test/test_data/network_with_comments.ied +55 -0
- floodmodeller_api/test/test_flowtimeprofile.py +133 -0
- floodmodeller_api/test/test_hydrology_plus_export.py +210 -0
- floodmodeller_api/test/test_ied.py +12 -0
- floodmodeller_api/test/test_ief.py +49 -9
- floodmodeller_api/test/test_json.py +6 -1
- floodmodeller_api/test/test_read_file.py +27 -0
- floodmodeller_api/test/test_river.py +169 -0
- floodmodeller_api/to_from_json.py +7 -1
- floodmodeller_api/tool.py +6 -10
- floodmodeller_api/units/__init__.py +11 -1
- floodmodeller_api/units/boundaries.py +6 -0
- floodmodeller_api/units/conveyance.py +101 -212
- floodmodeller_api/units/sections.py +120 -39
- floodmodeller_api/util.py +2 -0
- floodmodeller_api/version.py +1 -1
- floodmodeller_api/xml2d.py +20 -13
- floodmodeller_api/xsd_backup.xml +738 -0
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/METADATA +2 -1
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/RECORD +68 -34
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/WHEEL +1 -1
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/LICENSE.txt +0 -0
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/entry_points.txt +0 -0
- {floodmodeller_api-0.4.4.dist-info → floodmodeller_api-0.5.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
import pandas as pd
|
|
2
|
+
import pytest
|
|
3
|
+
|
|
4
|
+
from floodmodeller_api.units.sections import RIVER
|
|
5
|
+
|
|
6
|
+
river_unit_data_cases = [
|
|
7
|
+
(
|
|
8
|
+
[
|
|
9
|
+
"RIVER normal case",
|
|
10
|
+
"SECTION",
|
|
11
|
+
"SomeUnit",
|
|
12
|
+
" 0.000 0.000100 1000.000",
|
|
13
|
+
" 5",
|
|
14
|
+
" 0.000 10 0.030 0.000 0.0 0.0 ",
|
|
15
|
+
" 1.000 9 0.030 0.000 0.0 0.0 LEFT",
|
|
16
|
+
" 2.000 5 0.030 0.000 0.0 0.0 ",
|
|
17
|
+
" 3.000 6 0.030 0.000 0.0 0.0 RIGHT",
|
|
18
|
+
" 4.000 10 0.030 0.000 0.0 0.0 ",
|
|
19
|
+
],
|
|
20
|
+
3,
|
|
21
|
+
),
|
|
22
|
+
(
|
|
23
|
+
[
|
|
24
|
+
"RIVER close together",
|
|
25
|
+
"SECTION",
|
|
26
|
+
"AnotherUnit",
|
|
27
|
+
" 0.000 0.000100 1000.000",
|
|
28
|
+
" 3",
|
|
29
|
+
" 0.000 15 0.040 0.000 0.0 0.0 ",
|
|
30
|
+
" 1.500 8 0.040 0.000 0.0 0.0 LEFT",
|
|
31
|
+
" 3.000 12 0.040 0.000 0.0 0.0 RIGHT",
|
|
32
|
+
],
|
|
33
|
+
2,
|
|
34
|
+
),
|
|
35
|
+
(
|
|
36
|
+
[
|
|
37
|
+
"RIVER double markers",
|
|
38
|
+
"SECTION",
|
|
39
|
+
"AnotherUnit",
|
|
40
|
+
" 0.000 0.000100 1000.000",
|
|
41
|
+
" 3",
|
|
42
|
+
" 0.000 15 0.040 0.000 0.0 0.0 ",
|
|
43
|
+
" 1.500 8 0.040 0.000 0.0 0.0 LEFT",
|
|
44
|
+
" 3.000 12 0.040 0.000 0.0 0.0 ",
|
|
45
|
+
" 4.000 13 0.040 0.000 0.0 0.0 LEFT",
|
|
46
|
+
" 5.000 2 0.040 0.000 0.0 0.0 ",
|
|
47
|
+
" 6.000 1 0.040 0.000 0.0 0.0 ",
|
|
48
|
+
" 7.000 254 0.040 0.000 0.0 0.0 RIGHT",
|
|
49
|
+
" 8.000 21 0.040 0.000 0.0 0.0 ",
|
|
50
|
+
" 9.000 76 0.040 0.000 0.0 0.0 RIGHT",
|
|
51
|
+
],
|
|
52
|
+
4,
|
|
53
|
+
),
|
|
54
|
+
]
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@pytest.mark.parametrize(("river_unit_data", "_"), river_unit_data_cases)
|
|
58
|
+
def test_read_write(river_unit_data, _):
|
|
59
|
+
river_section_1 = RIVER(river_unit_data)
|
|
60
|
+
river_section_2 = RIVER(river_section_1._write())
|
|
61
|
+
assert river_section_1 == river_section_2
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
@pytest.mark.parametrize(("river_unit_data", "expected_len"), river_unit_data_cases)
|
|
65
|
+
def test_river_active_data(river_unit_data, expected_len):
|
|
66
|
+
river_section = RIVER(river_unit_data)
|
|
67
|
+
active_data = river_section.active_data
|
|
68
|
+
|
|
69
|
+
assert len(active_data) == expected_len
|
|
70
|
+
assert active_data.iloc[0].Deactivation == "LEFT"
|
|
71
|
+
assert active_data.iloc[-1].Deactivation == "RIGHT"
|
|
72
|
+
assert "LEFT" not in active_data.iloc[1:-1].Deactivation.to_list()
|
|
73
|
+
assert "RIGHT" not in active_data.iloc[1:-1].Deactivation.to_list()
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def test_edit_active_data():
|
|
77
|
+
unit = RIVER(
|
|
78
|
+
[
|
|
79
|
+
"RIVER normal case",
|
|
80
|
+
"SECTION",
|
|
81
|
+
"SomeUnit",
|
|
82
|
+
" 0.000 0.000100 1000.000",
|
|
83
|
+
" 5",
|
|
84
|
+
" 0.000 10 0.030 0.000 0.0 0.0 ",
|
|
85
|
+
" 1.000 9 0.030 0.000 0.0 0.0 LEFT",
|
|
86
|
+
" 2.000 5 0.030 0.000 0.0 0.0 ",
|
|
87
|
+
" 3.000 6 0.030 0.000 0.0 0.0 RIGHT",
|
|
88
|
+
" 4.000 10 0.030 0.000 0.0 0.0 ",
|
|
89
|
+
],
|
|
90
|
+
)
|
|
91
|
+
unit.active_data.iloc[0, 1] = 99
|
|
92
|
+
assert unit.data.iloc[1, 1] == 99
|
|
93
|
+
expected_row = "1.000 99.000 0.030 0.000 0.000 0.000 LEFT"
|
|
94
|
+
assert expected_row in str(unit)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def test_active_data_with_no_markers():
|
|
98
|
+
unit = RIVER(
|
|
99
|
+
[
|
|
100
|
+
"RIVER normal case",
|
|
101
|
+
"SECTION",
|
|
102
|
+
"SomeUnit",
|
|
103
|
+
" 0.000 0.000100 1000.000",
|
|
104
|
+
" 5",
|
|
105
|
+
" 0.000 10 0.030",
|
|
106
|
+
" 1.000 9 0.030",
|
|
107
|
+
" 2.000 5 0.030",
|
|
108
|
+
" 3.000 6 0.030",
|
|
109
|
+
" 4.000 10 0.030",
|
|
110
|
+
],
|
|
111
|
+
)
|
|
112
|
+
assert len(unit.active_data) == 5
|
|
113
|
+
unit.data.iloc[1, 8] = "LEFT"
|
|
114
|
+
unit.data.iloc[3, 8] = "RIGHT"
|
|
115
|
+
assert len(unit.active_data) == 3
|
|
116
|
+
|
|
117
|
+
|
|
118
|
+
def test_create_from_blank():
|
|
119
|
+
blank_unit = RIVER()
|
|
120
|
+
assert len(blank_unit.data) == 0
|
|
121
|
+
assert len(blank_unit.active_data) == 0
|
|
122
|
+
assert blank_unit._write() == [
|
|
123
|
+
"RIVER ",
|
|
124
|
+
"SECTION",
|
|
125
|
+
"new_section ",
|
|
126
|
+
" 0.000 0.000100 1000.000",
|
|
127
|
+
" 0",
|
|
128
|
+
]
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def test_create_from_blank_with_params():
|
|
132
|
+
unit = RIVER(
|
|
133
|
+
name="for_test",
|
|
134
|
+
comment="testing",
|
|
135
|
+
spill1="t",
|
|
136
|
+
spill2="e",
|
|
137
|
+
lat1="s",
|
|
138
|
+
lat2="t",
|
|
139
|
+
lat3="i",
|
|
140
|
+
lat4="ng",
|
|
141
|
+
dist_to_next=55,
|
|
142
|
+
slope=0.00015,
|
|
143
|
+
density=1010.0,
|
|
144
|
+
data=pd.DataFrame(
|
|
145
|
+
{
|
|
146
|
+
"X": [0.0, 1.0, 2.0],
|
|
147
|
+
"Y": [5.0, 2.0, 5.0],
|
|
148
|
+
"Mannings n": [0.01, 0.01, 0.01],
|
|
149
|
+
"Panel": ["", "", ""],
|
|
150
|
+
"RPL": [0.0, 0.0, 0.0],
|
|
151
|
+
"Marker": ["", "", ""],
|
|
152
|
+
"Easting": [0.0, 0.0, 0.0],
|
|
153
|
+
"Northing": [0.0, 0.0, 0.0],
|
|
154
|
+
"Deactivation": ["", "", ""],
|
|
155
|
+
"SP. Marker": ["", "", ""],
|
|
156
|
+
},
|
|
157
|
+
),
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
assert unit._write() == [
|
|
161
|
+
"RIVER testing",
|
|
162
|
+
"SECTION",
|
|
163
|
+
"for_test t e s t i ng ",
|
|
164
|
+
" 55.000 0.000150 1010.000",
|
|
165
|
+
" 3",
|
|
166
|
+
" 0.000 5.000 0.010 0.000 0.000 0.000 ",
|
|
167
|
+
" 1.000 2.000 0.010 0.000 0.000 0.000 ",
|
|
168
|
+
" 2.000 5.000 0.010 0.000 0.000 0.000 ",
|
|
169
|
+
]
|
|
@@ -77,6 +77,7 @@ def recursive_to_json(obj: Any, is_top_level: bool = True) -> Any: # noqa: PLR0
|
|
|
77
77
|
"""
|
|
78
78
|
from ._base import FMFile
|
|
79
79
|
from .backup import File
|
|
80
|
+
from .ief import FlowTimeProfile
|
|
80
81
|
from .units import IIC
|
|
81
82
|
from .units._base import Unit
|
|
82
83
|
from .urban1d._base import UrbanSubsection, UrbanUnit
|
|
@@ -105,7 +106,10 @@ def recursive_to_json(obj: Any, is_top_level: bool = True) -> Any: # noqa: PLR0
|
|
|
105
106
|
return {key: recursive_to_json(value, is_top_level=False) for key, value in obj.items()}
|
|
106
107
|
|
|
107
108
|
# Either a type of FM API Class
|
|
108
|
-
if isinstance(
|
|
109
|
+
if isinstance(
|
|
110
|
+
obj,
|
|
111
|
+
(FMFile, Unit, IIC, File, UrbanSubsection, UrbanUnit, FlowTimeProfile),
|
|
112
|
+
):
|
|
109
113
|
# Information from the flood modeller object will be included in the JSON output
|
|
110
114
|
# slicing undertaken to remove quotation marks
|
|
111
115
|
return_dict: dict[str, Any] = {"API Class": str(obj.__class__)[8:-2]}
|
|
@@ -118,6 +122,8 @@ def recursive_to_json(obj: Any, is_top_level: bool = True) -> Any: # noqa: PLR0
|
|
|
118
122
|
|
|
119
123
|
return return_dict
|
|
120
124
|
|
|
125
|
+
return None
|
|
126
|
+
|
|
121
127
|
|
|
122
128
|
def from_json(obj: str | dict) -> dict:
|
|
123
129
|
"""
|
floodmodeller_api/tool.py
CHANGED
|
@@ -54,9 +54,7 @@ def validate_int(value):
|
|
|
54
54
|
"""
|
|
55
55
|
if value.isdigit():
|
|
56
56
|
return True
|
|
57
|
-
|
|
58
|
-
return True
|
|
59
|
-
return False
|
|
57
|
+
return value == ""
|
|
60
58
|
|
|
61
59
|
|
|
62
60
|
def validate_float(value):
|
|
@@ -72,9 +70,7 @@ def validate_float(value):
|
|
|
72
70
|
float(value)
|
|
73
71
|
return True
|
|
74
72
|
except ValueError:
|
|
75
|
-
|
|
76
|
-
return True
|
|
77
|
-
return False
|
|
73
|
+
return value == ""
|
|
78
74
|
|
|
79
75
|
|
|
80
76
|
class Gui:
|
|
@@ -92,7 +88,7 @@ class Gui:
|
|
|
92
88
|
|
|
93
89
|
"""
|
|
94
90
|
|
|
95
|
-
def __init__(
|
|
91
|
+
def __init__(
|
|
96
92
|
self,
|
|
97
93
|
master: tk.Tk,
|
|
98
94
|
title: str,
|
|
@@ -145,12 +141,12 @@ class Gui:
|
|
|
145
141
|
label.pack()
|
|
146
142
|
# Conditional stuff to add validation for different data types.
|
|
147
143
|
# This ensures that you can't enter text if the input should be a number, etc.
|
|
148
|
-
if data_type
|
|
144
|
+
if data_type is str:
|
|
149
145
|
entry = tk.Entry(self.master)
|
|
150
|
-
elif data_type
|
|
146
|
+
elif data_type is int:
|
|
151
147
|
entry = tk.Entry(self.master, validate="key")
|
|
152
148
|
entry.config(validatecommand=(entry.register(validate_int), "%P"))
|
|
153
|
-
elif data_type
|
|
149
|
+
elif data_type is float:
|
|
154
150
|
entry = tk.Entry(self.master, validate="key")
|
|
155
151
|
entry.config(validatecommand=(entry.register(validate_float), "%P"))
|
|
156
152
|
else:
|
|
@@ -4,7 +4,17 @@ from .conduits import CONDUIT
|
|
|
4
4
|
from .iic import IIC
|
|
5
5
|
from .losses import BLOCKAGE, CULVERT
|
|
6
6
|
from .sections import INTERPOLATE, REPLICATE, RIVER
|
|
7
|
-
from .structures import
|
|
7
|
+
from .structures import (
|
|
8
|
+
BRIDGE,
|
|
9
|
+
CRUMP,
|
|
10
|
+
FLAT_V_WEIR,
|
|
11
|
+
ORIFICE,
|
|
12
|
+
OUTFALL,
|
|
13
|
+
RNWEIR,
|
|
14
|
+
SLUICE,
|
|
15
|
+
SPILL,
|
|
16
|
+
WEIR,
|
|
17
|
+
)
|
|
8
18
|
from .units import ALL_UNIT_TYPES, SUPPORTED_UNIT_TYPES, UNSUPPORTED_UNIT_TYPES
|
|
9
19
|
from .unsupported import UNSUPPORTED
|
|
10
20
|
from .variables import Variables
|
|
@@ -17,6 +17,7 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
|
|
|
17
17
|
import pandas as pd
|
|
18
18
|
|
|
19
19
|
from floodmodeller_api.validation import _validate_unit
|
|
20
|
+
from floodmodeller_api.validation.parameters import parameter_options
|
|
20
21
|
|
|
21
22
|
from ._base import Unit
|
|
22
23
|
from .helpers import (
|
|
@@ -212,6 +213,11 @@ class HTBDY(Unit):
|
|
|
212
213
|
self.data = self.data.set_index("Time")
|
|
213
214
|
self.data = self.data["Stage"] # Convert to series
|
|
214
215
|
|
|
216
|
+
# Fix legacy Flood Modeller bug where timeunit is present in extendmethod place
|
|
217
|
+
if self.extendmethod in parameter_options["timeunit"]["options"][1]:
|
|
218
|
+
self.timeunit = self.extendmethod
|
|
219
|
+
self.extendmethod = "EXTEND"
|
|
220
|
+
|
|
215
221
|
def _write(self):
|
|
216
222
|
"""Function to write a valid HTBDY block"""
|
|
217
223
|
_validate_unit(self) # Function to check the params are valid for HTBDY
|
|
@@ -1,31 +1,34 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from functools import lru_cache
|
|
4
|
+
from typing import TYPE_CHECKING
|
|
4
5
|
|
|
5
6
|
import numpy as np
|
|
6
7
|
import pandas as pd
|
|
7
|
-
|
|
8
|
+
|
|
9
|
+
if TYPE_CHECKING:
|
|
10
|
+
from numpy.typing import NDArray
|
|
8
11
|
|
|
9
12
|
MINIMUM_PERIMETER_THRESHOLD = 1e-8
|
|
10
13
|
|
|
11
14
|
|
|
12
15
|
def calculate_cross_section_conveyance(
|
|
13
|
-
x: np.
|
|
14
|
-
y: np.
|
|
15
|
-
n: np.
|
|
16
|
-
rpl: np.
|
|
17
|
-
panel_markers: np.
|
|
16
|
+
x: NDArray[np.float64],
|
|
17
|
+
y: NDArray[np.float64],
|
|
18
|
+
n: NDArray[np.float64],
|
|
19
|
+
rpl: NDArray[np.float64],
|
|
20
|
+
panel_markers: NDArray[np.float64],
|
|
18
21
|
) -> pd.Series:
|
|
19
22
|
"""
|
|
20
23
|
Calculate the conveyance of a cross-section by summing the conveyance
|
|
21
24
|
across all panels defined by panel markers.
|
|
22
25
|
|
|
23
26
|
Args:
|
|
24
|
-
x (np.
|
|
25
|
-
y (np.
|
|
26
|
-
n (np.
|
|
27
|
-
rpl (np.
|
|
28
|
-
panel_markers (np.
|
|
27
|
+
x (NDArray[np.float64]): The x-coordinates of the cross-section.
|
|
28
|
+
y (NDArray[np.float64]): The y-coordinates of the cross-section.
|
|
29
|
+
n (NDArray[np.float64]): Manning's n values for each segment.
|
|
30
|
+
rpl (NDArray[np.float64]): Relative Path Length values for each segment.
|
|
31
|
+
panel_markers (NDArray[np.float64]): Boolean array indicating the start of each panel.
|
|
29
32
|
|
|
30
33
|
Returns:
|
|
31
34
|
pd.Series: A pandas Series containing the conveyance values indexed by water levels.
|
|
@@ -41,164 +44,115 @@ def calculate_cross_section_conveyance(
|
|
|
41
44
|
result = calculate_cross_section_conveyance(x, y, n, rpl, panel_markers)
|
|
42
45
|
print(result)
|
|
43
46
|
"""
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
# Panel markers are forced true to the bounds to make the process work
|
|
49
|
-
panel_markers = np.array([True, *panel_markers[1:-1], True])
|
|
50
|
-
panel_indices = np.where(panel_markers)[0]
|
|
51
|
-
conveyance_by_panel = []
|
|
52
|
-
for panel_start, panel_end in zip(panel_indices[:-1], panel_indices[1:] + 1):
|
|
53
|
-
panel_x = x[panel_start:panel_end]
|
|
54
|
-
panel_y = y[panel_start:panel_end]
|
|
55
|
-
panel_n = n[panel_start:panel_end]
|
|
56
|
-
# RPL value is only valid for the start of a panel, and set to 1 if it's zero
|
|
57
|
-
panel_rpl = (
|
|
58
|
-
1.0
|
|
59
|
-
if (panel_start == 0 and not panel_markers[0]) or rpl[panel_start] == 0
|
|
60
|
-
else float(rpl[panel_start])
|
|
61
|
-
)
|
|
62
|
-
conveyance_by_panel.append(
|
|
63
|
-
calculate_conveyance_by_panel(panel_x, panel_y, panel_n, panel_rpl, wls),
|
|
64
|
-
)
|
|
65
|
-
|
|
66
|
-
# Sum conveyance across panels
|
|
67
|
-
conveyance_values = [sum(values) for values in zip(*conveyance_by_panel)]
|
|
68
|
-
|
|
69
|
-
return pd.Series(data=conveyance_values, index=wls)
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
def calculate_conveyance_by_panel(
|
|
73
|
-
x: np.ndarray,
|
|
74
|
-
y: np.ndarray,
|
|
75
|
-
n: np.ndarray,
|
|
76
|
-
rpl: float,
|
|
77
|
-
wls: np.ndarray,
|
|
78
|
-
) -> list[float]:
|
|
79
|
-
"""
|
|
80
|
-
Calculate the conveyance for a single panel of a cross-section at specified water levels.
|
|
81
|
-
|
|
82
|
-
Args:
|
|
83
|
-
x (np.ndarray): The x-coordinates of the panel.
|
|
84
|
-
y (np.ndarray): The y-coordinates of the panel.
|
|
85
|
-
n (np.ndarray): Manning's n values for each segment in the panel.
|
|
86
|
-
rpl (float): Relative Path Length for each segment in the panel.
|
|
87
|
-
wls (np.ndarray): The water levels at which to calculate conveyance.
|
|
88
|
-
|
|
89
|
-
Returns:
|
|
90
|
-
list[float]: A list of conveyance values for each water level.
|
|
91
|
-
"""
|
|
47
|
+
water_levels = insert_intermediate_wls(np.unique(y), threshold=0.05)
|
|
48
|
+
area, length, mannings = calculate_geometry(x, y, n, water_levels)
|
|
49
|
+
panel = panel_markers.cumsum()[:-1]
|
|
92
50
|
|
|
93
|
-
|
|
94
|
-
|
|
51
|
+
intersection = (y[:-2] < water_levels[:, np.newaxis]) & (y[1:-1] >= water_levels[:, np.newaxis])
|
|
52
|
+
section_markers = np.hstack([np.full((intersection.shape[0], 1), False), intersection])
|
|
53
|
+
section = section_markers.cumsum(axis=1)
|
|
95
54
|
|
|
96
|
-
|
|
97
|
-
x = np.array([x[0], *x, x[-1]])
|
|
98
|
-
y = np.array([max_y, *y, max_y])
|
|
99
|
-
n = np.array([0, *n, 0])
|
|
55
|
+
conveyance = np.zeros_like(water_levels)
|
|
100
56
|
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
# Define linestring geometries representing glass walls, so they can be subtracted later
|
|
106
|
-
glass_walls = (
|
|
107
|
-
LineString(zip([x[0], x[1]], [y[0], y[1]])), # left
|
|
108
|
-
LineString(zip([x[-2], x[-1]], [y[-2], y[-1]])), # right
|
|
109
|
-
)
|
|
110
|
-
|
|
111
|
-
# Remove glass wall sections from coords
|
|
112
|
-
x, y, n = x[1:-1], y[1:-1], n[1:-1]
|
|
113
|
-
|
|
114
|
-
conveyance_values = []
|
|
115
|
-
for wl in wls:
|
|
116
|
-
if wl <= np.min(y):
|
|
117
|
-
# no channel capacity (essentially zero depth) so no need to calculate
|
|
118
|
-
conveyance_values.append(0.0)
|
|
57
|
+
for i in range(panel.max() + 1):
|
|
58
|
+
in_panel = panel == i
|
|
59
|
+
if not in_panel.any():
|
|
119
60
|
continue
|
|
120
61
|
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
water_plane = intersection(channel_polygon, LineString(zip([start, end], [wl, wl])))
|
|
124
|
-
wetted_polygon = intersection(channel_polygon, water_surface)
|
|
62
|
+
rpl_panel = np.sqrt(rpl[:-1][in_panel][0])
|
|
63
|
+
rpl_panel = 1 if rpl_panel == 0 else rpl_panel
|
|
125
64
|
|
|
126
|
-
|
|
127
|
-
|
|
65
|
+
for j in range(section.max() + 1):
|
|
66
|
+
in_section = section == j
|
|
67
|
+
in_panel_and_section = in_panel & in_section
|
|
68
|
+
if not in_panel_and_section.any():
|
|
69
|
+
continue
|
|
128
70
|
|
|
129
|
-
|
|
71
|
+
total_area = np.where(in_panel_and_section, area, 0).sum(axis=1)
|
|
72
|
+
total_length = np.where(in_panel_and_section, length, 0).sum(axis=1)
|
|
73
|
+
total_mannings = np.where(in_panel_and_section, mannings, 0).sum(axis=1)
|
|
130
74
|
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
75
|
+
conveyance += np.where(
|
|
76
|
+
total_length >= MINIMUM_PERIMETER_THRESHOLD,
|
|
77
|
+
total_area ** (5 / 3) * total_length ** (1 / 3) / (total_mannings * rpl_panel),
|
|
78
|
+
0,
|
|
79
|
+
)
|
|
136
80
|
|
|
137
|
-
return
|
|
81
|
+
return pd.Series(conveyance, index=water_levels)
|
|
138
82
|
|
|
139
83
|
|
|
140
|
-
def
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
rpl: float,
|
|
147
|
-
) -> float:
|
|
84
|
+
def calculate_geometry(
|
|
85
|
+
x: NDArray[np.float64],
|
|
86
|
+
y: NDArray[np.float64],
|
|
87
|
+
n: NDArray[np.float64],
|
|
88
|
+
water_levels: NDArray[np.float64],
|
|
89
|
+
) -> tuple[NDArray[np.float64], NDArray[np.float64], NDArray[np.float64]]:
|
|
148
90
|
"""
|
|
149
|
-
Calculate
|
|
91
|
+
Calculate area, length, weighted mannings for piecewise linear curve (x, y) below water_level.
|
|
150
92
|
|
|
151
93
|
Args:
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
x (np.ndarray): 1D array of channel chainage
|
|
157
|
-
n (np.ndarray): 1D array of channel mannings
|
|
158
|
-
rpl (float): Relative path length of panel
|
|
94
|
+
x (NDArray[np.float64]): 1D array of x-coordinates.
|
|
95
|
+
y (NDArray[np.float64]): 1D array of y-coordinates.
|
|
96
|
+
n (NDArray[np.float64]): 1D array to integrate over the length.
|
|
97
|
+
water_levels (NDArray[np.float64]): The horizontal reference line.
|
|
159
98
|
|
|
160
99
|
Returns:
|
|
161
|
-
|
|
100
|
+
NDArray[np.float64]: The area above the curve and under the reference line.
|
|
101
|
+
NDArray[np.float64]: The length of the curve under the reference line.
|
|
102
|
+
NDArray[np.float64]: Manning's n integrated along the curve under the reference line.
|
|
162
103
|
"""
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
104
|
+
h = water_levels[:, np.newaxis] - y
|
|
105
|
+
|
|
106
|
+
x1 = x[:-1]
|
|
107
|
+
x2 = x[1:]
|
|
108
|
+
h1 = h[:, :-1]
|
|
109
|
+
h2 = h[:, 1:]
|
|
110
|
+
n1 = n[:-1]
|
|
111
|
+
|
|
112
|
+
dx = x2 - x1
|
|
113
|
+
|
|
114
|
+
is_submerged = (h1 > 0) & (h2 > 0)
|
|
115
|
+
is_submerged_on_left = (h1 > 0) & (h2 <= 0)
|
|
116
|
+
is_submerged_on_right = (h1 <= 0) & (h2 > 0)
|
|
117
|
+
conditions = [is_submerged, is_submerged_on_left, is_submerged_on_right]
|
|
118
|
+
|
|
119
|
+
# needed for partially submerged sections
|
|
120
|
+
dx_left = dx * h1 / (h1 - h2)
|
|
121
|
+
dx_right = dx * h2 / (h2 - h1)
|
|
122
|
+
|
|
123
|
+
area = np.select(
|
|
124
|
+
conditions,
|
|
125
|
+
[
|
|
126
|
+
0.5 * dx * (h1 + h2),
|
|
127
|
+
0.5 * dx_left * h1,
|
|
128
|
+
0.5 * dx_right * h2,
|
|
129
|
+
],
|
|
130
|
+
default=0,
|
|
171
131
|
)
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
wetted_polyline: LineString = (
|
|
182
|
-
wetted_polygon.exterior.difference(water_plane_clip)
|
|
183
|
-
.difference(glass_wall_left_clip)
|
|
184
|
-
.difference(glass_wall_right_clip)
|
|
132
|
+
length = np.select(
|
|
133
|
+
conditions,
|
|
134
|
+
[
|
|
135
|
+
np.sqrt((h2 - h1) ** 2 + dx**2),
|
|
136
|
+
np.sqrt(h1**2 + dx_left**2),
|
|
137
|
+
np.sqrt(h2**2 + dx_right**2),
|
|
138
|
+
],
|
|
139
|
+
default=0,
|
|
185
140
|
)
|
|
186
|
-
weighted_mannings =
|
|
141
|
+
weighted_mannings = n1 * length
|
|
187
142
|
|
|
188
|
-
|
|
189
|
-
return (area ** (5 / 3) / wetted_perimeter ** (2 / 3)) * (wetted_perimeter / weighted_mannings)
|
|
143
|
+
return area, length, weighted_mannings
|
|
190
144
|
|
|
191
145
|
|
|
192
|
-
def insert_intermediate_wls(arr: np.
|
|
146
|
+
def insert_intermediate_wls(arr: NDArray[np.float64], threshold: float) -> NDArray[np.float64]:
|
|
193
147
|
"""
|
|
194
148
|
Insert intermediate water levels into an array based on a threshold.
|
|
195
149
|
|
|
196
150
|
Args:
|
|
197
|
-
arr (np.
|
|
151
|
+
arr (NDArray[np.float64]): The array of original water levels.
|
|
198
152
|
threshold (float): The maximum allowed gap between water levels.
|
|
199
153
|
|
|
200
154
|
Returns:
|
|
201
|
-
np.
|
|
155
|
+
NDArray[np.float64]: The array with intermediate water levels inserted.
|
|
202
156
|
"""
|
|
203
157
|
# Calculate gaps between consecutive elements
|
|
204
158
|
gaps = np.diff(arr)
|
|
@@ -207,81 +161,16 @@ def insert_intermediate_wls(arr: np.ndarray, threshold: float):
|
|
|
207
161
|
num_points = (gaps // threshold).astype(int)
|
|
208
162
|
|
|
209
163
|
# Prepare lists to hold the new points and results
|
|
210
|
-
new_points = [
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
new_points.extend(points)
|
|
217
|
-
new_points.append(end)
|
|
218
|
-
|
|
219
|
-
# Combine the original starting point with the new points
|
|
220
|
-
return np.array([arr[0]] + new_points)
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
def calculate_weighted_mannings(
|
|
224
|
-
x: np.ndarray,
|
|
225
|
-
n: np.ndarray,
|
|
226
|
-
rpl: float,
|
|
227
|
-
wetted_polyline: LineString,
|
|
228
|
-
) -> float:
|
|
229
|
-
"""Calculate the weighted Manning's n value for a wetted polyline."""
|
|
230
|
-
|
|
231
|
-
# We want the polyline to be split into each individual segment
|
|
232
|
-
segments = line_to_segments(wetted_polyline)
|
|
233
|
-
weighted_mannings = 0
|
|
234
|
-
for segment in segments:
|
|
235
|
-
mannings_value = get_mannings_by_segment_x_coords(
|
|
236
|
-
x,
|
|
237
|
-
n,
|
|
238
|
-
segment.coords[0][0],
|
|
239
|
-
segment.coords[1][0],
|
|
240
|
-
)
|
|
241
|
-
weighted_mannings += mannings_value * segment.length * np.sqrt(rpl)
|
|
242
|
-
|
|
243
|
-
return weighted_mannings
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
def line_to_segments(line: LineString | MultiLineString) -> list[LineString]:
|
|
247
|
-
"""Convert a LineString or MultiLineString into a list of LineString segments."""
|
|
248
|
-
if isinstance(line, LineString):
|
|
249
|
-
segments = []
|
|
250
|
-
for start, end in zip(line.coords[:-1], line.coords[1:]):
|
|
251
|
-
points = sorted([start, end], key=lambda x: x[0])
|
|
252
|
-
segments.append(LineString(points))
|
|
253
|
-
return segments
|
|
254
|
-
if isinstance(line, MultiLineString):
|
|
255
|
-
segments = []
|
|
256
|
-
for linestring in line.geoms:
|
|
257
|
-
segments.extend(line_to_segments(linestring))
|
|
258
|
-
return segments
|
|
259
|
-
raise TypeError("Input must be a LineString or MultiLineString")
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
def get_mannings_by_segment_x_coords(
|
|
263
|
-
x: np.ndarray,
|
|
264
|
-
n: np.ndarray,
|
|
265
|
-
start_x: float,
|
|
266
|
-
end_x: float,
|
|
267
|
-
) -> float:
|
|
268
|
-
"""Get the Manning's n or RPL value for a segment based on its start x-coordinate."""
|
|
269
|
-
|
|
270
|
-
# This method doesn't handle cases where we have multiple manning's values at a vertical section
|
|
271
|
-
# and will always just take the first at any verticle, but it is probably quite rare for this
|
|
272
|
-
# not to be the case
|
|
273
|
-
if start_x == end_x:
|
|
274
|
-
# Vertical segment take first x match
|
|
275
|
-
index = np.searchsorted(x, start_x) - (start_x not in x)
|
|
276
|
-
else:
|
|
277
|
-
# Otherwise non-vertical segment, take last match
|
|
278
|
-
index = np.searchsorted(x, start_x, side="right") - 1
|
|
279
|
-
|
|
280
|
-
return n[index]
|
|
164
|
+
new_points = [
|
|
165
|
+
np.linspace(start, end, num + 2, endpoint=False)
|
|
166
|
+
for start, end, num in zip(arr[:-1], arr[1:], num_points)
|
|
167
|
+
]
|
|
168
|
+
end = np.array([arr[-1]])
|
|
169
|
+
return np.concatenate([*new_points, end])
|
|
281
170
|
|
|
282
171
|
|
|
283
172
|
@lru_cache
|
|
284
|
-
def
|
|
173
|
+
def calculate_cross_section_conveyance_cached(
|
|
285
174
|
x: tuple[float],
|
|
286
175
|
y: tuple[float],
|
|
287
176
|
n: tuple[float],
|