floodmodeller-api 0.5.1__py3-none-any.whl → 0.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- floodmodeller_api/__init__.py +10 -0
- floodmodeller_api/_base.py +29 -20
- floodmodeller_api/backup.py +12 -10
- floodmodeller_api/dat.py +162 -91
- floodmodeller_api/diff.py +1 -1
- floodmodeller_api/hydrology_plus/hydrology_plus_export.py +1 -1
- floodmodeller_api/ied.py +2 -4
- floodmodeller_api/ief.py +29 -17
- floodmodeller_api/ief_flags.py +1 -1
- floodmodeller_api/inp.py +4 -6
- floodmodeller_api/logs/lf.py +18 -12
- floodmodeller_api/logs/lf_helpers.py +2 -2
- floodmodeller_api/logs/lf_params.py +1 -5
- floodmodeller_api/mapping.py +9 -2
- floodmodeller_api/test/test_conveyance.py +9 -4
- floodmodeller_api/test/test_dat.py +166 -18
- floodmodeller_api/test/test_data/EX18_DAT_expected.json +164 -144
- floodmodeller_api/test/test_data/EX3_DAT_expected.json +6 -2
- floodmodeller_api/test/test_data/EX6_DAT_expected.json +12 -46
- floodmodeller_api/test/test_data/encoding_test_cp1252.dat +1081 -0
- floodmodeller_api/test/test_data/encoding_test_utf8.dat +1081 -0
- floodmodeller_api/test/test_data/integrated_bridge/AR_NoSP_NoBl_2O_NO_OneFRC.ied +33 -0
- floodmodeller_api/test/test_data/integrated_bridge/AR_vSP_25pc_1O.ied +32 -0
- floodmodeller_api/test/test_data/integrated_bridge/PL_vSP_25pc_1O.ied +34 -0
- floodmodeller_api/test/test_data/integrated_bridge/SBTwoFRCsStaggered.IED +32 -0
- floodmodeller_api/test/test_data/integrated_bridge/US_NoSP_NoBl_OR_RN.ied +28 -0
- floodmodeller_api/test/test_data/integrated_bridge/US_SP_NoBl_OR_frc_PT2-5_RN.ied +34 -0
- floodmodeller_api/test/test_data/integrated_bridge/US_fSP_NoBl_1O.ied +30 -0
- floodmodeller_api/test/test_data/integrated_bridge/US_nSP_NoBl_1O.ied +49 -0
- floodmodeller_api/test/test_data/integrated_bridge/US_vSP_NoBl_2O_Para.ied +35 -0
- floodmodeller_api/test/test_data/integrated_bridge.dat +40 -0
- floodmodeller_api/test/test_data/network.ied +2 -2
- floodmodeller_api/test/test_data/network_dat_expected.json +141 -243
- floodmodeller_api/test/test_data/network_ied_expected.json +2 -2
- floodmodeller_api/test/test_data/network_with_comments.ied +2 -2
- floodmodeller_api/test/test_ied.py +1 -1
- floodmodeller_api/test/test_ief.py +10 -2
- floodmodeller_api/test/test_integrated_bridge.py +159 -0
- floodmodeller_api/test/test_json.py +9 -3
- floodmodeller_api/test/test_logs_lf.py +45 -24
- floodmodeller_api/test/test_river.py +1 -1
- floodmodeller_api/test/test_toolbox_structure_log.py +0 -1
- floodmodeller_api/test/test_xml2d.py +5 -5
- floodmodeller_api/to_from_json.py +1 -1
- floodmodeller_api/tool.py +3 -5
- floodmodeller_api/toolbox/model_build/add_siltation_definition.py +1 -1
- floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +12 -8
- floodmodeller_api/units/__init__.py +15 -0
- floodmodeller_api/units/_base.py +73 -10
- floodmodeller_api/units/_helpers.py +343 -0
- floodmodeller_api/units/boundaries.py +59 -71
- floodmodeller_api/units/comment.py +1 -1
- floodmodeller_api/units/conduits.py +57 -54
- floodmodeller_api/units/connectors.py +112 -0
- floodmodeller_api/units/controls.py +107 -0
- floodmodeller_api/units/iic.py +2 -9
- floodmodeller_api/units/losses.py +42 -42
- floodmodeller_api/units/sections.py +40 -43
- floodmodeller_api/units/structures.py +360 -530
- floodmodeller_api/units/units.py +25 -26
- floodmodeller_api/units/unsupported.py +5 -7
- floodmodeller_api/units/variables.py +2 -2
- floodmodeller_api/urban1d/_base.py +7 -8
- floodmodeller_api/urban1d/conduits.py +11 -21
- floodmodeller_api/urban1d/general_parameters.py +1 -1
- floodmodeller_api/urban1d/junctions.py +7 -11
- floodmodeller_api/urban1d/losses.py +13 -17
- floodmodeller_api/urban1d/outfalls.py +16 -21
- floodmodeller_api/urban1d/raingauges.py +3 -9
- floodmodeller_api/urban1d/subsections.py +3 -4
- floodmodeller_api/urban1d/xsections.py +11 -15
- floodmodeller_api/util.py +7 -4
- floodmodeller_api/validation/parameters.py +7 -3
- floodmodeller_api/validation/urban_parameters.py +1 -4
- floodmodeller_api/validation/validation.py +9 -4
- floodmodeller_api/version.py +1 -1
- floodmodeller_api/xml2d.py +9 -11
- floodmodeller_api/xml2d_template.py +1 -1
- floodmodeller_api/zz.py +7 -6
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/LICENSE.txt +1 -1
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/METADATA +11 -3
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/RECORD +85 -70
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/WHEEL +1 -1
- floodmodeller_api/units/helpers.py +0 -121
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/entry_points.txt +0 -0
- {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,343 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Flood Modeller Python API
|
|
3
|
+
Copyright (C) 2025 Jacobs U.K. Limited
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
|
|
6
|
+
as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
|
7
|
+
|
|
8
|
+
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
|
|
9
|
+
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
|
10
|
+
|
|
11
|
+
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
|
12
|
+
|
|
13
|
+
If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
|
|
14
|
+
address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
import copy
|
|
20
|
+
from itertools import chain
|
|
21
|
+
from typing import Any, Callable
|
|
22
|
+
|
|
23
|
+
import pandas as pd
|
|
24
|
+
|
|
25
|
+
NOTATION_THRESHOLD = 10
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def split_10_char(line: str) -> list[str]:
|
|
29
|
+
return split_n_char(line, 10)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def split_12_char(line: str) -> list[str]:
|
|
33
|
+
return split_n_char(line, 12)
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def split_n_char(line: str, n: int) -> list[str]:
|
|
37
|
+
return [line[i : i + n].strip() for i in range(0, len(line), n)]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def join_10_char(*itms, dp=3):
|
|
41
|
+
"""Joins a set of values with a 10 character buffer and right-justified"""
|
|
42
|
+
string = ""
|
|
43
|
+
for itm in itms:
|
|
44
|
+
if itm is None:
|
|
45
|
+
itm = ""
|
|
46
|
+
if isinstance(itm, float):
|
|
47
|
+
# save to 3 dp
|
|
48
|
+
# Use scientific notation if number greater than NOTATION_THRESHOLD characters
|
|
49
|
+
itm = f"{itm:.{dp}e}" if len(f"{itm:.{dp}f}") > NOTATION_THRESHOLD else f"{itm:.{dp}f}"
|
|
50
|
+
itm = str(itm)
|
|
51
|
+
itm = itm[:10]
|
|
52
|
+
string += f"{itm:>10}"
|
|
53
|
+
return string
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
def join_12_char_ljust(*itms, dp=3):
|
|
57
|
+
"""Joins a set of values with a 12 character buffer and left-justified"""
|
|
58
|
+
return join_n_char_ljust(12, *itms, dp=dp)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def join_n_char_ljust(n, *itms, dp=3):
|
|
62
|
+
"""Joins a set of values with a n character buffer and left-justified"""
|
|
63
|
+
string = ""
|
|
64
|
+
for itm in itms:
|
|
65
|
+
if itm is None:
|
|
66
|
+
itm = ""
|
|
67
|
+
if isinstance(itm, float):
|
|
68
|
+
# save to 3 dp
|
|
69
|
+
# Use scientific notation if number greater than 10 characters
|
|
70
|
+
itm = f"{itm:.{dp}e}" if len(f"{itm:.{dp}f}") > NOTATION_THRESHOLD else f"{itm:.{dp}f}"
|
|
71
|
+
itm = str(itm)
|
|
72
|
+
itm = itm[:n]
|
|
73
|
+
string += f"{itm:<{n}}"
|
|
74
|
+
return string
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def to_float(itm, default=0.0):
|
|
78
|
+
try:
|
|
79
|
+
return float(itm)
|
|
80
|
+
except ValueError:
|
|
81
|
+
return default
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def to_int(itm, default=0):
|
|
85
|
+
try:
|
|
86
|
+
return int(itm)
|
|
87
|
+
except ValueError:
|
|
88
|
+
return default
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def to_str(itm, default, check_float=False):
|
|
92
|
+
if check_float:
|
|
93
|
+
try:
|
|
94
|
+
return float(itm)
|
|
95
|
+
except ValueError:
|
|
96
|
+
pass
|
|
97
|
+
if itm == "":
|
|
98
|
+
return default
|
|
99
|
+
return itm
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def to_data_list(block: list[str], num_cols: int | None = None, date_col: int | None = None):
|
|
103
|
+
if num_cols is not None:
|
|
104
|
+
num_cols += 1 if date_col is not None else 0
|
|
105
|
+
data_list = []
|
|
106
|
+
for row in block:
|
|
107
|
+
row_split = split_10_char(row) if num_cols is None else split_10_char(row)[:num_cols]
|
|
108
|
+
if date_col is not None:
|
|
109
|
+
date_time = " ".join(row_split[date_col : date_col + 2])
|
|
110
|
+
row_split = [
|
|
111
|
+
to_float(itm)
|
|
112
|
+
for idx, itm in enumerate(row_split)
|
|
113
|
+
if idx not in (date_col, date_col + 1)
|
|
114
|
+
]
|
|
115
|
+
row_split.insert(date_col, date_time)
|
|
116
|
+
else:
|
|
117
|
+
row_split = [to_float(itm) for itm in row_split]
|
|
118
|
+
|
|
119
|
+
row_list = list(row_split)
|
|
120
|
+
data_list.append(row_list)
|
|
121
|
+
return data_list
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def set_bridge_params(obj: Any, line: str, *, include_pier: bool = True) -> None:
|
|
125
|
+
params = split_10_char(f"{line:<90}")
|
|
126
|
+
obj.calibration_coefficient = to_float(params[0], 1.0)
|
|
127
|
+
obj.skew = to_float(params[1])
|
|
128
|
+
obj.bridge_width_dual = to_float(params[2])
|
|
129
|
+
obj.bridge_dist_dual = to_float(params[3])
|
|
130
|
+
if include_pier:
|
|
131
|
+
obj.total_pier_width = to_float(params[4])
|
|
132
|
+
obj.orifice_flow = params[5] == "ORIFICE"
|
|
133
|
+
obj.orifice_lower_transition_dist = to_float(params[6])
|
|
134
|
+
obj.orifice_upper_transition_dist = to_float(params[7])
|
|
135
|
+
obj.orifice_discharge_coefficient = to_float(params[8], 1.0)
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def set_pier_params(obj: Any, line: str) -> None:
|
|
139
|
+
pier_info = split_10_char(line)
|
|
140
|
+
if int(pier_info[0]) > 0:
|
|
141
|
+
obj.specify_piers = True
|
|
142
|
+
obj.npiers = int(pier_info[0])
|
|
143
|
+
if pier_info[1] == "COEFF":
|
|
144
|
+
obj.pier_use_calibration_coeff = True
|
|
145
|
+
obj.pier_calibration_coeff = to_float(pier_info[3])
|
|
146
|
+
else:
|
|
147
|
+
obj.pier_use_calibration_coeff = False
|
|
148
|
+
obj.pier_shape = pier_info[1]
|
|
149
|
+
obj.pier_faces = pier_info[2]
|
|
150
|
+
else:
|
|
151
|
+
obj.specify_piers = False
|
|
152
|
+
obj.soffit_shape = pier_info[1]
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def read_dataframe_from_lines(
|
|
156
|
+
all_lines: list[str],
|
|
157
|
+
end_idx: int,
|
|
158
|
+
read_lines: Callable[[list[str]], pd.DataFrame],
|
|
159
|
+
*args,
|
|
160
|
+
**kwargs,
|
|
161
|
+
) -> tuple[int, int, pd.DataFrame]:
|
|
162
|
+
nrows = get_int(all_lines[end_idx])
|
|
163
|
+
start_idx = end_idx + 1
|
|
164
|
+
end_idx = start_idx + nrows
|
|
165
|
+
data = read_lines(all_lines[start_idx:end_idx], *args, **kwargs)
|
|
166
|
+
return nrows, end_idx, data
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
def read_bridge_cross_sections(
|
|
170
|
+
lines: list[str],
|
|
171
|
+
*,
|
|
172
|
+
include_panel_marker: bool = False,
|
|
173
|
+
include_top_level: bool = False,
|
|
174
|
+
) -> pd.DataFrame:
|
|
175
|
+
data_list = []
|
|
176
|
+
for line in lines:
|
|
177
|
+
line_split = split_10_char(f"{line:<50}")
|
|
178
|
+
df_row = [
|
|
179
|
+
to_float(line_split[0]),
|
|
180
|
+
to_float(line_split[1]),
|
|
181
|
+
to_float(line_split[2]),
|
|
182
|
+
]
|
|
183
|
+
|
|
184
|
+
if include_panel_marker:
|
|
185
|
+
df_row.append(line_split[3])
|
|
186
|
+
|
|
187
|
+
df_row.append(line_split[4])
|
|
188
|
+
|
|
189
|
+
if include_top_level:
|
|
190
|
+
df_row.append(line_split[5])
|
|
191
|
+
|
|
192
|
+
data_list.append(df_row)
|
|
193
|
+
|
|
194
|
+
columns = ["X", "Y", "Mannings n"]
|
|
195
|
+
|
|
196
|
+
if include_panel_marker:
|
|
197
|
+
columns.append("Panel")
|
|
198
|
+
|
|
199
|
+
columns.append("Embankments")
|
|
200
|
+
|
|
201
|
+
if include_top_level:
|
|
202
|
+
columns.append("Top Level")
|
|
203
|
+
return pd.DataFrame(data_list, columns=columns)
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def read_bridge_opening_data(lines: list[str]) -> pd.DataFrame:
|
|
207
|
+
data_list = []
|
|
208
|
+
for line in lines:
|
|
209
|
+
line_split = split_10_char(f"{line:<40}")
|
|
210
|
+
start = to_float(line_split[0])
|
|
211
|
+
finish = to_float(line_split[1])
|
|
212
|
+
spring = to_float(line_split[2])
|
|
213
|
+
soffit = to_float(line_split[3])
|
|
214
|
+
data_list.append([start, finish, spring, soffit])
|
|
215
|
+
return pd.DataFrame(data_list, columns=["Start", "Finish", "Springing Level", "Soffit Level"])
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def read_bridge_culvert_data(lines: list[str]) -> pd.DataFrame:
|
|
219
|
+
data_list = []
|
|
220
|
+
for line in lines:
|
|
221
|
+
line_split = split_10_char(f"{line:<70}")
|
|
222
|
+
invert = to_float(line_split[0])
|
|
223
|
+
soffit = to_float(line_split[1])
|
|
224
|
+
area = to_float(line_split[2])
|
|
225
|
+
cd_part = to_float(line_split[3])
|
|
226
|
+
cd_full = to_float(line_split[4])
|
|
227
|
+
dlinen = to_float(line_split[5])
|
|
228
|
+
x = to_float(line_split[6])
|
|
229
|
+
data_list.append([invert, soffit, area, cd_part, cd_full, dlinen, x])
|
|
230
|
+
return pd.DataFrame(
|
|
231
|
+
data_list,
|
|
232
|
+
columns=[
|
|
233
|
+
"Invert",
|
|
234
|
+
"Soffit",
|
|
235
|
+
"Section Area",
|
|
236
|
+
"Cd Part Full",
|
|
237
|
+
"Cd Full",
|
|
238
|
+
"Drowning Coefficient",
|
|
239
|
+
"X",
|
|
240
|
+
],
|
|
241
|
+
)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def read_bridge_pier_locations(lines: list[str]) -> pd.DataFrame:
|
|
245
|
+
data_list = []
|
|
246
|
+
for line in lines:
|
|
247
|
+
line_split = split_10_char(f"{line:<40}")
|
|
248
|
+
l_x = to_float(line_split[0])
|
|
249
|
+
l_top_level = to_float(line_split[1])
|
|
250
|
+
r_x = to_float(line_split[2])
|
|
251
|
+
r_top_level = to_float(line_split[3])
|
|
252
|
+
data_list.append([l_x, l_top_level, r_x, r_top_level])
|
|
253
|
+
return pd.DataFrame(
|
|
254
|
+
data_list,
|
|
255
|
+
columns=["Left X", "Left Top Level", "Right X", "Right Top Level"],
|
|
256
|
+
)
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def read_spill_section_data(lines: list[str]) -> pd.DataFrame:
|
|
260
|
+
data_list = []
|
|
261
|
+
for line in lines:
|
|
262
|
+
line_split = split_10_char(f"{line:<40}")
|
|
263
|
+
chainage = to_float(line_split[0])
|
|
264
|
+
elevation = to_float(line_split[1])
|
|
265
|
+
easting = to_float(line_split[2])
|
|
266
|
+
northing = to_float(line_split[3])
|
|
267
|
+
data_list.append([chainage, elevation, easting, northing])
|
|
268
|
+
return pd.DataFrame(data_list, columns=["X", "Y", "Easting", "Northing"])
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
def read_superbridge_opening_data(lines: list[str]) -> pd.DataFrame:
|
|
272
|
+
data_list = []
|
|
273
|
+
for line in lines:
|
|
274
|
+
line_split = split_10_char(f"{line:<20}")
|
|
275
|
+
x = to_float(line_split[0])
|
|
276
|
+
z = to_float(line_split[1])
|
|
277
|
+
data_list.append([x, z])
|
|
278
|
+
return pd.DataFrame(data_list, columns=["X", "Z"])
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def read_superbridge_block_data(lines: list[str]) -> pd.DataFrame:
|
|
282
|
+
data_list = []
|
|
283
|
+
for line in lines:
|
|
284
|
+
line_split = split_10_char(f"{line:<30}")
|
|
285
|
+
percentage = to_int(line_split[0])
|
|
286
|
+
time = to_float(line_split[1])
|
|
287
|
+
datetime = to_float(line_split[2])
|
|
288
|
+
data_list.append([percentage, time, datetime])
|
|
289
|
+
return pd.DataFrame(data_list, columns=["percentage", "time", "datetime"])
|
|
290
|
+
|
|
291
|
+
|
|
292
|
+
def read_lateral_data(lines: list[str]) -> pd.DataFrame:
|
|
293
|
+
data_list = []
|
|
294
|
+
for line in lines:
|
|
295
|
+
line_split = split_12_char(f"{line:<36}")
|
|
296
|
+
label = line_split[0]
|
|
297
|
+
factor = to_float(line_split[1])
|
|
298
|
+
flag = line_split[2]
|
|
299
|
+
data_list.append([label, factor, flag])
|
|
300
|
+
columns = ["Node Label", "Custom Weight Factor", "Use Weight Factor"]
|
|
301
|
+
return pd.DataFrame(data_list, columns=columns)
|
|
302
|
+
|
|
303
|
+
|
|
304
|
+
def read_reservoir_data(lines: list[str]) -> pd.DataFrame:
|
|
305
|
+
data_list = []
|
|
306
|
+
for line in lines:
|
|
307
|
+
line_split = split_10_char(f"{line:<20}")
|
|
308
|
+
elevation = to_float(line_split[0])
|
|
309
|
+
area = to_float(line_split[1])
|
|
310
|
+
data_list.append([elevation, area])
|
|
311
|
+
columns = ["Elevation", "Plan Area"]
|
|
312
|
+
return pd.DataFrame(data_list, columns=columns)
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
def get_int(line: str) -> int:
|
|
316
|
+
return int(float(split_10_char(line)[0]))
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def write_dataframe(
|
|
320
|
+
header: int | str | None,
|
|
321
|
+
df: pd.DataFrame,
|
|
322
|
+
empty: int | None = None,
|
|
323
|
+
n: int = 10,
|
|
324
|
+
) -> list[str]:
|
|
325
|
+
df_to_use = copy.deepcopy(df)
|
|
326
|
+
if empty is not None:
|
|
327
|
+
df_to_use.insert(empty, "_", [None] * len(df_to_use))
|
|
328
|
+
lines = [join_n_char_ljust(n, *x) for x in df_to_use.itertuples(index=False)]
|
|
329
|
+
if header is not None:
|
|
330
|
+
lines = [str(header), *lines]
|
|
331
|
+
return lines
|
|
332
|
+
|
|
333
|
+
|
|
334
|
+
def write_dataframes(
|
|
335
|
+
header: int | str | None,
|
|
336
|
+
subheaders: list[int],
|
|
337
|
+
df_list: list[pd.DataFrame],
|
|
338
|
+
) -> list[str]:
|
|
339
|
+
list_of_lists = [write_dataframe(x, y) for x, y in zip(subheaders, df_list)]
|
|
340
|
+
lines = list(chain.from_iterable(list_of_lists))
|
|
341
|
+
if header is not None:
|
|
342
|
+
lines = [str(header), *lines]
|
|
343
|
+
return lines
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Flood Modeller Python API
|
|
3
|
-
Copyright (C)
|
|
3
|
+
Copyright (C) 2025 Jacobs U.K. Limited
|
|
4
4
|
|
|
5
5
|
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
|
|
6
6
|
as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
|
@@ -20,14 +20,13 @@ from floodmodeller_api.validation import _validate_unit
|
|
|
20
20
|
from floodmodeller_api.validation.parameters import parameter_options
|
|
21
21
|
|
|
22
22
|
from ._base import Unit
|
|
23
|
-
from .
|
|
24
|
-
_to_data_list,
|
|
25
|
-
_to_float,
|
|
26
|
-
_to_int,
|
|
27
|
-
_to_str,
|
|
23
|
+
from ._helpers import (
|
|
28
24
|
join_10_char,
|
|
29
25
|
join_n_char_ljust,
|
|
30
26
|
split_10_char,
|
|
27
|
+
to_data_list,
|
|
28
|
+
to_float,
|
|
29
|
+
to_str,
|
|
31
30
|
)
|
|
32
31
|
|
|
33
32
|
|
|
@@ -82,13 +81,6 @@ class QTBDY(Unit):
|
|
|
82
81
|
}.items():
|
|
83
82
|
setattr(self, param, val)
|
|
84
83
|
|
|
85
|
-
# AL Since this is most likely used when building a model,
|
|
86
|
-
# AL it would be nice to have a "name generator" to create
|
|
87
|
-
# AL a unique name with each call (ie new_qtbdy_12345 then new_qtbdy_02508)
|
|
88
|
-
# JP Yes this is a good idea, although I'm not sure how it would be best implemented
|
|
89
|
-
# since any two instances of the class being initialised would be unaware of each other?
|
|
90
|
-
# There is always the option to pass a name when constrcuting the class which may be better
|
|
91
|
-
|
|
92
84
|
self.data = (
|
|
93
85
|
data
|
|
94
86
|
if isinstance(data, pd.Series)
|
|
@@ -98,21 +90,21 @@ class QTBDY(Unit):
|
|
|
98
90
|
def _read(self, qtbdy_block):
|
|
99
91
|
"""Function to read a given QTBDY block and store data as class attributes"""
|
|
100
92
|
self.name = qtbdy_block[1][: self._label_len].strip()
|
|
101
|
-
self.comment = qtbdy_block[0]
|
|
93
|
+
self.comment = self._remove_unit_name(qtbdy_block[0])
|
|
102
94
|
qtbdy_params = split_10_char(f"{qtbdy_block[2]:<90}")
|
|
103
95
|
self.nrows = int(qtbdy_params[0])
|
|
104
|
-
self.timeoffset =
|
|
105
|
-
self._something =
|
|
106
|
-
self.timeunit =
|
|
107
|
-
self.extendmethod =
|
|
108
|
-
self.interpmethod =
|
|
109
|
-
self.flowmultiplier =
|
|
110
|
-
self.minflow =
|
|
111
|
-
self.allow_override =
|
|
96
|
+
self.timeoffset = to_float(qtbdy_params[1])
|
|
97
|
+
self._something = to_float(qtbdy_params[2])
|
|
98
|
+
self.timeunit = to_str(qtbdy_params[3], "HOURS", check_float=True)
|
|
99
|
+
self.extendmethod = to_str(qtbdy_params[4], "EXTEND")
|
|
100
|
+
self.interpmethod = to_str(qtbdy_params[5], "LINEAR")
|
|
101
|
+
self.flowmultiplier = to_float(qtbdy_params[6])
|
|
102
|
+
self.minflow = to_float(qtbdy_params[7])
|
|
103
|
+
self.allow_override = to_str(qtbdy_params[8], "OVERRIDE") # ''/OVERRIDE or NOOVERRIDE
|
|
112
104
|
data_list = (
|
|
113
|
-
|
|
105
|
+
to_data_list(qtbdy_block[3:], date_col=1)
|
|
114
106
|
if self.timeunit == "DATES"
|
|
115
|
-
else
|
|
107
|
+
else to_data_list(qtbdy_block[3:])
|
|
116
108
|
)
|
|
117
109
|
|
|
118
110
|
self.data = pd.DataFrame(data_list, columns=["Flow", "Time"])
|
|
@@ -122,7 +114,7 @@ class QTBDY(Unit):
|
|
|
122
114
|
def _write(self):
|
|
123
115
|
"""Function to write a valid QTBDY block"""
|
|
124
116
|
_validate_unit(self) # Function to check the params are valid for QTBDY
|
|
125
|
-
header =
|
|
117
|
+
header = self._create_header()
|
|
126
118
|
name = self.name[: self._label_len]
|
|
127
119
|
self.nrows = len(self.data)
|
|
128
120
|
|
|
@@ -195,18 +187,18 @@ class HTBDY(Unit):
|
|
|
195
187
|
def _read(self, htbdy_block):
|
|
196
188
|
"""Function to read a given HTBDY block and store data as class attributes"""
|
|
197
189
|
self.name = htbdy_block[1][: self._label_len].strip()
|
|
198
|
-
self.comment = htbdy_block[0]
|
|
190
|
+
self.comment = self._remove_unit_name(htbdy_block[0])
|
|
199
191
|
htbdy_params = split_10_char(f"{htbdy_block[2]:<50}")
|
|
200
192
|
self.nrows = int(htbdy_params[0])
|
|
201
|
-
self._something =
|
|
202
|
-
self.timeunit =
|
|
203
|
-
self.extendmethod =
|
|
204
|
-
self.interpmethod =
|
|
193
|
+
self._something = to_str(htbdy_params[1], "")
|
|
194
|
+
self.timeunit = to_str(htbdy_params[2], "HOURS", check_float=True)
|
|
195
|
+
self.extendmethod = to_str(htbdy_params[3], "EXTEND")
|
|
196
|
+
self.interpmethod = to_str(htbdy_params[4], "LINEAR")
|
|
205
197
|
|
|
206
198
|
data_list = (
|
|
207
|
-
|
|
199
|
+
to_data_list(htbdy_block[3:], date_col=1)
|
|
208
200
|
if self.timeunit == "DATES"
|
|
209
|
-
else
|
|
201
|
+
else to_data_list(htbdy_block[3:])
|
|
210
202
|
)
|
|
211
203
|
|
|
212
204
|
self.data = pd.DataFrame(data_list, columns=["Stage", "Time"])
|
|
@@ -221,7 +213,7 @@ class HTBDY(Unit):
|
|
|
221
213
|
def _write(self):
|
|
222
214
|
"""Function to write a valid HTBDY block"""
|
|
223
215
|
_validate_unit(self) # Function to check the params are valid for HTBDY
|
|
224
|
-
header =
|
|
216
|
+
header = self._create_header()
|
|
225
217
|
name = self.name
|
|
226
218
|
self.nrows = len(self.data)
|
|
227
219
|
|
|
@@ -273,12 +265,12 @@ class QHBDY(Unit):
|
|
|
273
265
|
def _read(self, qhbdy_block):
|
|
274
266
|
"""Function to read a given QHBDY block and store data as class attributes"""
|
|
275
267
|
self.name = qhbdy_block[1][: self._label_len].strip()
|
|
276
|
-
self.comment = qhbdy_block[0]
|
|
268
|
+
self.comment = self._remove_unit_name(qhbdy_block[0])
|
|
277
269
|
qhbdy_params = split_10_char(f"{qhbdy_block[2]:<30}")
|
|
278
270
|
self.nrows = int(qhbdy_params[0])
|
|
279
|
-
self.interpmethod =
|
|
271
|
+
self.interpmethod = to_str(qhbdy_params[2], "LINEAR")
|
|
280
272
|
|
|
281
|
-
data_list =
|
|
273
|
+
data_list = to_data_list(qhbdy_block[3:])
|
|
282
274
|
|
|
283
275
|
self.data = pd.DataFrame(data_list, columns=["Flow", "Stage"])
|
|
284
276
|
self.data = self.data.set_index("Stage")
|
|
@@ -287,7 +279,7 @@ class QHBDY(Unit):
|
|
|
287
279
|
def _write(self):
|
|
288
280
|
"""Function to write a valid QHBDY block"""
|
|
289
281
|
_validate_unit(self) # Function to check the params are valid for QHBDY
|
|
290
|
-
header =
|
|
282
|
+
header = self._create_header()
|
|
291
283
|
name = self.name
|
|
292
284
|
self.nrows = len(self.data)
|
|
293
285
|
|
|
@@ -348,43 +340,40 @@ class REFHBDY(Unit):
|
|
|
348
340
|
"""Function to read a given REFHBDY block and store data as class attributes"""
|
|
349
341
|
# line 1 & 2
|
|
350
342
|
# Extract comment and revision number
|
|
351
|
-
|
|
352
|
-
self._revision = _to_int(b[0], 1)
|
|
353
|
-
self.comment = b[1:].strip()
|
|
343
|
+
self._revision, self.comment = self._get_revision_and_comment(refhbdy_block[0])
|
|
354
344
|
self.name = refhbdy_block[1][: self._label_len].strip()
|
|
355
345
|
|
|
356
346
|
# line 3
|
|
357
347
|
refhbdy_params1 = split_10_char(refhbdy_block[2])
|
|
358
|
-
|
|
359
|
-
self._something = _to_float(refhbdy_params1[0])
|
|
348
|
+
self._unknown_param_1 = to_float(refhbdy_params1[0])
|
|
360
349
|
self.easting = int(float(refhbdy_params1[1]))
|
|
361
350
|
self.northing = int(float(refhbdy_params1[2]))
|
|
362
351
|
|
|
363
352
|
# line 4
|
|
364
353
|
refhbdy_opts = split_10_char(f"{refhbdy_block[3]:<90}")
|
|
365
|
-
self.time_delay =
|
|
354
|
+
self.time_delay = to_float(refhbdy_opts[0])
|
|
366
355
|
# SD / timestep must be odd interval
|
|
367
|
-
self.timestep =
|
|
356
|
+
self.timestep = to_float(refhbdy_opts[1])
|
|
368
357
|
# '' : Full hydrograph, 'pfonly' : peak flow, 'bfonly' : baseflow only
|
|
369
358
|
self.sim_type = refhbdy_opts[2]
|
|
370
|
-
self.scale_method =
|
|
371
|
-
self.scale_value =
|
|
372
|
-
self.boundary_type =
|
|
373
|
-
self.scale_type =
|
|
374
|
-
self.minflow =
|
|
359
|
+
self.scale_method = to_str(refhbdy_opts[3], "SCALEFACT") # PEAKVALUE or SCALEFACT
|
|
360
|
+
self.scale_value = to_float(refhbdy_opts[4], 1.0)
|
|
361
|
+
self.boundary_type = to_str(refhbdy_opts[5], "HYDROGRAPH") # HYDROGRAPH or HYETOGRAPH
|
|
362
|
+
self.scale_type = to_str(refhbdy_opts[6], "FULL") # FULL or RUNOFF
|
|
363
|
+
self.minflow = to_float(refhbdy_opts[7])
|
|
375
364
|
self.allow_override = refhbdy_opts[8] # ''/OVERRIDE or NOOVERRIDE
|
|
376
365
|
|
|
377
366
|
# line 5
|
|
378
367
|
refhbdy_params2 = split_10_char(f"{refhbdy_block[4]:<60}")
|
|
379
|
-
self.area =
|
|
368
|
+
self.area = to_float(refhbdy_params2[0])
|
|
380
369
|
try:
|
|
381
370
|
# Maintain SAAR as integer if already is, else use float
|
|
382
371
|
self.saar = int(refhbdy_params2[1])
|
|
383
372
|
except ValueError:
|
|
384
373
|
self.saar = float(refhbdy_params2[1])
|
|
385
|
-
self.urbext =
|
|
386
|
-
self.season =
|
|
387
|
-
self.calc_source =
|
|
374
|
+
self.urbext = to_float(refhbdy_params2[2])
|
|
375
|
+
self.season = to_str(refhbdy_params2[3], "DEFAULT") # DEFAULT, SUMMER or WINTER
|
|
376
|
+
self.calc_source = to_str(refhbdy_params2[4], "DLL") # DLL or REPORT
|
|
388
377
|
self.use_urban_subdivisions = refhbdy_params2[5] != ""
|
|
389
378
|
if self.use_urban_subdivisions:
|
|
390
379
|
# Just keeping this raw for now as unlikely to be used.
|
|
@@ -399,37 +388,36 @@ class REFHBDY(Unit):
|
|
|
399
388
|
|
|
400
389
|
# line 6
|
|
401
390
|
rainfall_params1 = split_10_char(rainfall_params1)
|
|
402
|
-
self.storm_area =
|
|
403
|
-
self.storm_duration =
|
|
404
|
-
|
|
405
|
-
self._something2 = _to_float(rainfall_params1[2])
|
|
391
|
+
self.storm_area = to_float(rainfall_params1[0])
|
|
392
|
+
self.storm_duration = to_float(rainfall_params1[1])
|
|
393
|
+
self._unknown_param_2 = to_float(rainfall_params1[2])
|
|
406
394
|
|
|
407
395
|
# line 7
|
|
408
396
|
self.rainfall_comment = rainfall_params2[20:]
|
|
409
397
|
rainfall_params2 = split_10_char(rainfall_params2[:20])
|
|
410
398
|
self.arf_method = rainfall_params2[1]
|
|
411
|
-
self.
|
|
399
|
+
self._unknown_param_3 = rainfall_params2[0]
|
|
412
400
|
|
|
413
401
|
# line 8
|
|
414
402
|
rainfall_params3 = split_10_char(rainfall_params3)
|
|
415
|
-
self.observed_rainfall_depth =
|
|
416
|
-
self.return_period =
|
|
417
|
-
self.arf =
|
|
418
|
-
self.ddf_c =
|
|
419
|
-
self.ddf_d1 =
|
|
420
|
-
self.ddf_d2 =
|
|
421
|
-
self.ddf_d3 =
|
|
422
|
-
self.ddf_e =
|
|
423
|
-
self.ddf_f =
|
|
403
|
+
self.observed_rainfall_depth = to_float(rainfall_params3[0])
|
|
404
|
+
self.return_period = to_float(rainfall_params3[1])
|
|
405
|
+
self.arf = to_float(rainfall_params3[2])
|
|
406
|
+
self.ddf_c = to_float(rainfall_params3[3])
|
|
407
|
+
self.ddf_d1 = to_float(rainfall_params3[4])
|
|
408
|
+
self.ddf_d2 = to_float(rainfall_params3[5])
|
|
409
|
+
self.ddf_d3 = to_float(rainfall_params3[6])
|
|
410
|
+
self.ddf_e = to_float(rainfall_params3[7])
|
|
411
|
+
self.ddf_f = to_float(rainfall_params3[8])
|
|
424
412
|
|
|
425
413
|
def _write(self):
|
|
426
414
|
"""Function to write a valid REFHBDY block"""
|
|
427
415
|
_validate_unit(self) # Function to check the params are valid for QTBDY
|
|
428
|
-
header =
|
|
416
|
+
header = self._create_header(include_revision=True)
|
|
429
417
|
name = self.name[: self._label_len]
|
|
430
418
|
|
|
431
419
|
refhbdy_block = [header, name]
|
|
432
|
-
line3 = join_10_char(self.
|
|
420
|
+
line3 = join_10_char(self._unknown_param_1, self.easting, self.northing)
|
|
433
421
|
self.sim_type = (
|
|
434
422
|
"" if self.sim_type.upper() == "FULL" else self.sim_type
|
|
435
423
|
) # Allow 'full' as an option
|
|
@@ -454,8 +442,8 @@ class REFHBDY(Unit):
|
|
|
454
442
|
if self.use_urban_subdivisions:
|
|
455
443
|
refhbdy_block.extend(self._urban_refh_data)
|
|
456
444
|
|
|
457
|
-
line6 = join_10_char(self.storm_area, self.storm_duration, self.
|
|
458
|
-
line7 = join_10_char(self.
|
|
445
|
+
line6 = join_10_char(self.storm_area, self.storm_duration, self._unknown_param_2)
|
|
446
|
+
line7 = join_10_char(self._unknown_param_3, self.arf_method) + self.rainfall_comment
|
|
459
447
|
line8 = join_10_char(
|
|
460
448
|
self.observed_rainfall_depth,
|
|
461
449
|
self.return_period,
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""
|
|
2
2
|
Flood Modeller Python API
|
|
3
|
-
Copyright (C)
|
|
3
|
+
Copyright (C) 2025 Jacobs U.K. Limited
|
|
4
4
|
|
|
5
5
|
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
|
|
6
6
|
as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|