floodmodeller-api 0.4.2.post1__py3-none-any.whl → 0.4.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- floodmodeller_api/__init__.py +8 -9
- floodmodeller_api/_base.py +169 -176
- floodmodeller_api/backup.py +273 -273
- floodmodeller_api/dat.py +889 -831
- floodmodeller_api/diff.py +136 -119
- floodmodeller_api/ied.py +302 -306
- floodmodeller_api/ief.py +553 -637
- floodmodeller_api/ief_flags.py +253 -253
- floodmodeller_api/inp.py +260 -266
- floodmodeller_api/libs/libifcoremd.dll +0 -0
- floodmodeller_api/libs/libifcoremt.so.5 +0 -0
- floodmodeller_api/libs/libifport.so.5 +0 -0
- floodmodeller_api/{libmmd.dll → libs/libimf.so} +0 -0
- floodmodeller_api/libs/libintlc.so.5 +0 -0
- floodmodeller_api/libs/libmmd.dll +0 -0
- floodmodeller_api/libs/libsvml.so +0 -0
- floodmodeller_api/libs/libzzn_read.so +0 -0
- floodmodeller_api/libs/zzn_read.dll +0 -0
- floodmodeller_api/logs/__init__.py +2 -2
- floodmodeller_api/logs/lf.py +364 -312
- floodmodeller_api/logs/lf_helpers.py +354 -352
- floodmodeller_api/logs/lf_params.py +643 -529
- floodmodeller_api/mapping.py +84 -0
- floodmodeller_api/test/__init__.py +4 -4
- floodmodeller_api/test/conftest.py +16 -8
- floodmodeller_api/test/test_backup.py +117 -117
- floodmodeller_api/test/test_conveyance.py +107 -0
- floodmodeller_api/test/test_dat.py +222 -92
- floodmodeller_api/test/test_data/All Units 4_6.DAT +1081 -1081
- floodmodeller_api/test/test_data/All Units 4_6.feb +1081 -1081
- floodmodeller_api/test/test_data/BRIDGE.DAT +926 -926
- floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.dat +36 -36
- floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.feb +36 -36
- floodmodeller_api/test/test_data/DamBreakADI.xml +52 -52
- floodmodeller_api/test/test_data/DamBreakFAST.xml +58 -58
- floodmodeller_api/test/test_data/DamBreakFAST_dy.xml +53 -53
- floodmodeller_api/test/test_data/DamBreakTVD.xml +55 -55
- floodmodeller_api/test/test_data/DefenceBreach.xml +53 -53
- floodmodeller_api/test/test_data/DefenceBreachFAST.xml +60 -60
- floodmodeller_api/test/test_data/DefenceBreachFAST_dy.xml +55 -55
- floodmodeller_api/test/test_data/Domain1+2_QH.xml +76 -76
- floodmodeller_api/test/test_data/Domain1_H.xml +41 -41
- floodmodeller_api/test/test_data/Domain1_Q.xml +41 -41
- floodmodeller_api/test/test_data/Domain1_Q_FAST.xml +48 -48
- floodmodeller_api/test/test_data/Domain1_Q_FAST_dy.xml +48 -48
- floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +263 -0
- floodmodeller_api/test/test_data/Domain1_W.xml +41 -41
- floodmodeller_api/test/test_data/EX1.DAT +321 -321
- floodmodeller_api/test/test_data/EX1.ext +107 -107
- floodmodeller_api/test/test_data/EX1.feb +320 -320
- floodmodeller_api/test/test_data/EX1.gxy +107 -107
- floodmodeller_api/test/test_data/EX17.DAT +421 -422
- floodmodeller_api/test/test_data/EX17.ext +213 -213
- floodmodeller_api/test/test_data/EX17.feb +422 -422
- floodmodeller_api/test/test_data/EX18.DAT +375 -375
- floodmodeller_api/test/test_data/EX18_DAT_expected.json +3876 -0
- floodmodeller_api/test/test_data/EX2.DAT +302 -302
- floodmodeller_api/test/test_data/EX3.DAT +926 -926
- floodmodeller_api/test/test_data/EX3_DAT_expected.json +16235 -0
- floodmodeller_api/test/test_data/EX3_IEF_expected.json +61 -0
- floodmodeller_api/test/test_data/EX6.DAT +2084 -2084
- floodmodeller_api/test/test_data/EX6.ext +532 -532
- floodmodeller_api/test/test_data/EX6.feb +2084 -2084
- floodmodeller_api/test/test_data/EX6_DAT_expected.json +31647 -0
- floodmodeller_api/test/test_data/Event Data Example.DAT +336 -336
- floodmodeller_api/test/test_data/Event Data Example.ext +107 -107
- floodmodeller_api/test/test_data/Event Data Example.feb +336 -336
- floodmodeller_api/test/test_data/Linked1D2D.xml +52 -52
- floodmodeller_api/test/test_data/Linked1D2DFAST.xml +53 -53
- floodmodeller_api/test/test_data/Linked1D2DFAST_dy.xml +48 -48
- floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +313 -0
- floodmodeller_api/test/test_data/blockage.dat +50 -50
- floodmodeller_api/test/test_data/blockage.ext +45 -45
- floodmodeller_api/test/test_data/blockage.feb +9 -9
- floodmodeller_api/test/test_data/blockage.gxy +71 -71
- floodmodeller_api/test/test_data/conveyance_test.dat +165 -0
- floodmodeller_api/test/test_data/conveyance_test.feb +116 -0
- floodmodeller_api/test/test_data/conveyance_test.gxy +85 -0
- floodmodeller_api/test/test_data/defaultUnits.dat +127 -127
- floodmodeller_api/test/test_data/defaultUnits.ext +45 -45
- floodmodeller_api/test/test_data/defaultUnits.feb +9 -9
- floodmodeller_api/test/test_data/defaultUnits.fmpx +58 -58
- floodmodeller_api/test/test_data/defaultUnits.gxy +85 -85
- floodmodeller_api/test/test_data/ex3.ief +20 -20
- floodmodeller_api/test/test_data/ex3.lf1 +2800 -2800
- floodmodeller_api/test/test_data/ex4.DAT +1374 -1374
- floodmodeller_api/test/test_data/ex4_changed.DAT +1374 -1374
- floodmodeller_api/test/test_data/example1.inp +329 -329
- floodmodeller_api/test/test_data/example2.inp +158 -158
- floodmodeller_api/test/test_data/example3.inp +297 -297
- floodmodeller_api/test/test_data/example4.inp +388 -388
- floodmodeller_api/test/test_data/example5.inp +147 -147
- floodmodeller_api/test/test_data/example6.inp +154 -154
- floodmodeller_api/test/test_data/expected_conveyance.csv +60 -0
- floodmodeller_api/test/test_data/jump.dat +176 -176
- floodmodeller_api/test/test_data/network.dat +1374 -1374
- floodmodeller_api/test/test_data/network.ext +45 -45
- floodmodeller_api/test/test_data/network.exy +1 -1
- floodmodeller_api/test/test_data/network.feb +45 -45
- floodmodeller_api/test/test_data/network.ied +45 -45
- floodmodeller_api/test/test_data/network.ief +20 -20
- floodmodeller_api/test/test_data/network.inp +147 -147
- floodmodeller_api/test/test_data/network.pxy +57 -57
- floodmodeller_api/test/test_data/network.zzd +122 -122
- floodmodeller_api/test/test_data/network_dat_expected.json +21837 -0
- floodmodeller_api/test/test_data/network_from_tabularCSV.csv +87 -87
- floodmodeller_api/test/test_data/network_ied_expected.json +287 -0
- floodmodeller_api/test/test_data/rnweir.dat +9 -9
- floodmodeller_api/test/test_data/rnweir.ext +45 -45
- floodmodeller_api/test/test_data/rnweir.feb +9 -9
- floodmodeller_api/test/test_data/rnweir.gxy +45 -45
- floodmodeller_api/test/test_data/rnweir_default.dat +74 -74
- floodmodeller_api/test/test_data/rnweir_default.ext +45 -45
- floodmodeller_api/test/test_data/rnweir_default.feb +9 -9
- floodmodeller_api/test/test_data/rnweir_default.fmpx +58 -58
- floodmodeller_api/test/test_data/rnweir_default.gxy +53 -53
- floodmodeller_api/test/test_data/unit checks.dat +16 -16
- floodmodeller_api/test/test_ied.py +29 -29
- floodmodeller_api/test/test_ief.py +136 -24
- floodmodeller_api/test/test_inp.py +47 -48
- floodmodeller_api/test/test_json.py +114 -0
- floodmodeller_api/test/test_logs_lf.py +102 -51
- floodmodeller_api/test/test_tool.py +165 -152
- floodmodeller_api/test/test_toolbox_structure_log.py +234 -239
- floodmodeller_api/test/test_xml2d.py +151 -156
- floodmodeller_api/test/test_zzn.py +36 -34
- floodmodeller_api/to_from_json.py +230 -0
- floodmodeller_api/tool.py +332 -329
- floodmodeller_api/toolbox/__init__.py +5 -5
- floodmodeller_api/toolbox/example_tool.py +45 -45
- floodmodeller_api/toolbox/model_build/__init__.py +2 -2
- floodmodeller_api/toolbox/model_build/add_siltation_definition.py +100 -98
- floodmodeller_api/toolbox/model_build/structure_log/__init__.py +1 -1
- floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +287 -289
- floodmodeller_api/toolbox/model_build/structure_log_definition.py +76 -76
- floodmodeller_api/units/__init__.py +10 -10
- floodmodeller_api/units/_base.py +214 -212
- floodmodeller_api/units/boundaries.py +467 -467
- floodmodeller_api/units/comment.py +52 -55
- floodmodeller_api/units/conduits.py +382 -402
- floodmodeller_api/units/conveyance.py +301 -0
- floodmodeller_api/units/helpers.py +123 -131
- floodmodeller_api/units/iic.py +107 -101
- floodmodeller_api/units/losses.py +305 -306
- floodmodeller_api/units/sections.py +465 -446
- floodmodeller_api/units/structures.py +1690 -1683
- floodmodeller_api/units/units.py +93 -104
- floodmodeller_api/units/unsupported.py +44 -44
- floodmodeller_api/units/variables.py +87 -89
- floodmodeller_api/urban1d/__init__.py +11 -11
- floodmodeller_api/urban1d/_base.py +188 -179
- floodmodeller_api/urban1d/conduits.py +93 -85
- floodmodeller_api/urban1d/general_parameters.py +58 -58
- floodmodeller_api/urban1d/junctions.py +81 -79
- floodmodeller_api/urban1d/losses.py +81 -74
- floodmodeller_api/urban1d/outfalls.py +114 -110
- floodmodeller_api/urban1d/raingauges.py +111 -111
- floodmodeller_api/urban1d/subsections.py +92 -98
- floodmodeller_api/urban1d/xsections.py +147 -144
- floodmodeller_api/util.py +119 -21
- floodmodeller_api/validation/parameters.py +660 -660
- floodmodeller_api/validation/urban_parameters.py +388 -404
- floodmodeller_api/validation/validation.py +110 -108
- floodmodeller_api/version.py +1 -1
- floodmodeller_api/xml2d.py +632 -673
- floodmodeller_api/xml2d_template.py +37 -37
- floodmodeller_api/zzn.py +414 -363
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/LICENSE.txt +13 -13
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/METADATA +85 -82
- floodmodeller_api-0.4.4.dist-info/RECORD +185 -0
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/WHEEL +1 -1
- floodmodeller_api/libifcoremd.dll +0 -0
- floodmodeller_api/test/test_data/EX3.bmp +0 -0
- floodmodeller_api/test/test_data/test_output.csv +0 -87
- floodmodeller_api/zzn_read.dll +0 -0
- floodmodeller_api-0.4.2.post1.dist-info/RECORD +0 -164
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/entry_points.txt +0 -0
- {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py
CHANGED
|
@@ -1,831 +1,889 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Flood Modeller Python API
|
|
3
|
-
Copyright (C)
|
|
4
|
-
|
|
5
|
-
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
|
|
6
|
-
as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
|
7
|
-
|
|
8
|
-
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
|
|
9
|
-
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
|
10
|
-
|
|
11
|
-
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
|
12
|
-
|
|
13
|
-
If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
|
|
14
|
-
address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
|
|
15
|
-
"""
|
|
16
|
-
|
|
17
|
-
from
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
from
|
|
21
|
-
|
|
22
|
-
from .
|
|
23
|
-
from .
|
|
24
|
-
from .
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
def
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
the
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
return self._name_label_match(unit)
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
self.
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
if
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
233
|
-
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
self.
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
"",
|
|
338
|
-
"
|
|
339
|
-
"
|
|
340
|
-
"
|
|
341
|
-
"
|
|
342
|
-
"",
|
|
343
|
-
"
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
self._gxy_data =
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
self.general_parameters["
|
|
367
|
-
self.general_parameters["
|
|
368
|
-
self.general_parameters["
|
|
369
|
-
self.
|
|
370
|
-
self.general_parameters["
|
|
371
|
-
self.
|
|
372
|
-
self.general_parameters["
|
|
373
|
-
self.general_parameters["
|
|
374
|
-
self.general_parameters["
|
|
375
|
-
self.general_parameters["
|
|
376
|
-
self.general_parameters["
|
|
377
|
-
self.general_parameters["
|
|
378
|
-
self.general_parameters["
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
self.general_parameters["
|
|
387
|
-
self.general_parameters["
|
|
388
|
-
self.general_parameters["
|
|
389
|
-
self.
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
self.general_parameters["
|
|
398
|
-
self.general_parameters["
|
|
399
|
-
self.general_parameters["
|
|
400
|
-
self.general_parameters["
|
|
401
|
-
self.general_parameters["
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
(self.
|
|
410
|
-
(self.
|
|
411
|
-
(self.
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
#
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
unit.
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
"
|
|
457
|
-
"
|
|
458
|
-
"
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
block["start"]
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
|
|
487
|
-
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
unit_name = unit_data[
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
|
|
515
|
-
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
self.
|
|
524
|
-
self.
|
|
525
|
-
self.
|
|
526
|
-
self.
|
|
527
|
-
self.
|
|
528
|
-
self.
|
|
529
|
-
self.
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
-
|
|
544
|
-
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
)
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
)
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
general_block
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
comment_n
|
|
615
|
-
|
|
616
|
-
|
|
617
|
-
|
|
618
|
-
|
|
619
|
-
|
|
620
|
-
|
|
621
|
-
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
637
|
-
|
|
638
|
-
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
)
|
|
650
|
-
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
#
|
|
664
|
-
|
|
665
|
-
unit_block =
|
|
666
|
-
|
|
667
|
-
|
|
668
|
-
|
|
669
|
-
|
|
670
|
-
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
|
|
674
|
-
|
|
675
|
-
|
|
676
|
-
|
|
677
|
-
|
|
678
|
-
|
|
679
|
-
|
|
680
|
-
|
|
681
|
-
|
|
682
|
-
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
687
|
-
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
693
|
-
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
709
|
-
|
|
710
|
-
|
|
711
|
-
""
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
|
|
719
|
-
|
|
720
|
-
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
728
|
-
|
|
729
|
-
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
|
|
737
|
-
|
|
738
|
-
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
762
|
-
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
792
|
-
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
|
|
802
|
-
|
|
803
|
-
|
|
804
|
-
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
|
|
826
|
-
|
|
827
|
-
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
|
|
831
|
-
|
|
1
|
+
"""
|
|
2
|
+
Flood Modeller Python API
|
|
3
|
+
Copyright (C) 2024 Jacobs U.K. Limited
|
|
4
|
+
|
|
5
|
+
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
|
|
6
|
+
as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
|
|
7
|
+
|
|
8
|
+
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
|
|
9
|
+
of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
|
|
10
|
+
|
|
11
|
+
You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
|
|
12
|
+
|
|
13
|
+
If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
|
|
14
|
+
address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
from __future__ import annotations
|
|
18
|
+
|
|
19
|
+
from pathlib import Path
|
|
20
|
+
from typing import Any
|
|
21
|
+
|
|
22
|
+
from . import units
|
|
23
|
+
from ._base import FMFile
|
|
24
|
+
from .units._base import Unit
|
|
25
|
+
from .units.helpers import _to_float, _to_int
|
|
26
|
+
from .util import handle_exception
|
|
27
|
+
from .validation.validation import _validate_unit
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
class DAT(FMFile):
|
|
31
|
+
"""Reads and write Flood Modeller datafile format '.dat'
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
|
|
35
|
+
|
|
36
|
+
Output:
|
|
37
|
+
Initiates 'DAT' class object
|
|
38
|
+
|
|
39
|
+
Raises:
|
|
40
|
+
TypeError: Raised if dat_filepath does not point to a .dat file
|
|
41
|
+
FileNotFoundError: Raised if dat_filepath points to a file which does not exist
|
|
42
|
+
"""
|
|
43
|
+
|
|
44
|
+
_filetype: str = "DAT"
|
|
45
|
+
_suffix: str = ".dat"
|
|
46
|
+
|
|
47
|
+
@handle_exception(when="read")
|
|
48
|
+
def __init__(
|
|
49
|
+
self,
|
|
50
|
+
dat_filepath: str | Path | None = None,
|
|
51
|
+
with_gxy: bool = False,
|
|
52
|
+
from_json: bool = False,
|
|
53
|
+
) -> None:
|
|
54
|
+
if from_json:
|
|
55
|
+
return
|
|
56
|
+
if dat_filepath is not None:
|
|
57
|
+
FMFile.__init__(self, dat_filepath)
|
|
58
|
+
self._read()
|
|
59
|
+
|
|
60
|
+
else:
|
|
61
|
+
self._create_from_blank(with_gxy)
|
|
62
|
+
|
|
63
|
+
self._get_general_parameters()
|
|
64
|
+
self._get_unit_definitions()
|
|
65
|
+
|
|
66
|
+
def update(self) -> None:
|
|
67
|
+
"""Updates the existing DAT based on any altered attributes"""
|
|
68
|
+
self._update()
|
|
69
|
+
self._write_gxy(self._gxy_filepath)
|
|
70
|
+
|
|
71
|
+
def save(self, filepath: str | Path) -> None:
|
|
72
|
+
"""Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
|
|
73
|
+
Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
|
|
74
|
+
update in the latest saved location rather than the original source DAT used to construct the class
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
filepath (str): Filepath to new save location including the name and '.dat' extension
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
|
|
81
|
+
"""
|
|
82
|
+
filepath = Path(filepath).absolute()
|
|
83
|
+
self._save(filepath)
|
|
84
|
+
self._write_gxy(filepath)
|
|
85
|
+
|
|
86
|
+
def _write_gxy(self, filepath):
|
|
87
|
+
if self._gxy_data is not None:
|
|
88
|
+
gxy_string = self._gxy_data
|
|
89
|
+
new_gxy_path = filepath.with_suffix(".gxy")
|
|
90
|
+
with open(new_gxy_path, "w") as gxy_file:
|
|
91
|
+
gxy_file.write(gxy_string)
|
|
92
|
+
self._gxy_filepath = new_gxy_path
|
|
93
|
+
|
|
94
|
+
def diff(self, other: DAT, force_print: bool = False) -> None:
|
|
95
|
+
"""Compares the DAT class against another DAT class to check whether they are
|
|
96
|
+
equivalent, or if not, what the differences are. Two instances of a DAT class are
|
|
97
|
+
deemed equivalent if all of their attributes are equal except for the filepath and
|
|
98
|
+
raw data. For example, two DAT files from different filepaths that had the same
|
|
99
|
+
data except maybe some differences in decimal places and some default parameters
|
|
100
|
+
ommitted, would be classed as equivalent as they would produce the same DAT instance
|
|
101
|
+
and write the exact same data.
|
|
102
|
+
|
|
103
|
+
The result is printed to the console. If you need to access the returned data, use
|
|
104
|
+
the method ``DAT._get_diff()``
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
other (floodmodeller_api.DAT): Other instance of a DAT class
|
|
108
|
+
force_print (bool): Forces the API to print every difference found, rather than
|
|
109
|
+
just the first 25 differences. Defaults to False.
|
|
110
|
+
"""
|
|
111
|
+
self._diff(other, force_print=force_print)
|
|
112
|
+
|
|
113
|
+
# def _get_unit_from_connectivity(self, method) #use this as method prev and next
|
|
114
|
+
|
|
115
|
+
@handle_exception(when="calculate next unit in")
|
|
116
|
+
def next(self, unit: Unit) -> Unit | list[Unit] | None:
|
|
117
|
+
"""Finds next unit in the reach.
|
|
118
|
+
|
|
119
|
+
Next unit in reach can be infered by:
|
|
120
|
+
The next unit in the .dat file structure - such as when a river section has a positive distance to next
|
|
121
|
+
The units with the exact same name - such as a junction unit
|
|
122
|
+
The next unit as described in the ds_label - such as with Bridge units
|
|
123
|
+
|
|
124
|
+
Args:
|
|
125
|
+
unit (Unit): flood modeller unit input.
|
|
126
|
+
|
|
127
|
+
Returns:
|
|
128
|
+
Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
|
|
129
|
+
"""
|
|
130
|
+
# Needs to handle same name match outside dist to next (e.g. inflow)
|
|
131
|
+
if hasattr(unit, "dist_to_next"):
|
|
132
|
+
# Case 1a - positive distance to next
|
|
133
|
+
if unit.dist_to_next != 0:
|
|
134
|
+
return self._next_in_dat_struct(unit)
|
|
135
|
+
|
|
136
|
+
# Case 1b - distance to next = 0
|
|
137
|
+
return self._name_label_match(unit)
|
|
138
|
+
|
|
139
|
+
# Case 2: next unit is in ds_label
|
|
140
|
+
if hasattr(unit, "ds_label"):
|
|
141
|
+
return self._name_label_match(unit, name_override=unit.ds_label)
|
|
142
|
+
|
|
143
|
+
if unit._unit == "JUNCTION":
|
|
144
|
+
return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
|
|
145
|
+
|
|
146
|
+
if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
|
|
147
|
+
return None
|
|
148
|
+
|
|
149
|
+
return self._name_label_match(unit)
|
|
150
|
+
|
|
151
|
+
@handle_exception(when="calculate previous unit in")
|
|
152
|
+
def prev(self, unit: Unit) -> Unit | list[Unit] | None:
|
|
153
|
+
"""Finds previous unit in the reach.
|
|
154
|
+
|
|
155
|
+
Previous unit in reach can be infered by:
|
|
156
|
+
The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
|
|
157
|
+
The units with the exact same name - such as a junction unit
|
|
158
|
+
The previous unit as linked through upstream and downstream labels - such as with Bridge units
|
|
159
|
+
|
|
160
|
+
Args:
|
|
161
|
+
unit (Unit): flood modeller unit input.
|
|
162
|
+
|
|
163
|
+
Returns:
|
|
164
|
+
Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
|
|
165
|
+
"""
|
|
166
|
+
# Case 1: Unit is input boundary condition
|
|
167
|
+
if unit._unit in (
|
|
168
|
+
"QTBDY",
|
|
169
|
+
"HTBDY",
|
|
170
|
+
"REFHBDY",
|
|
171
|
+
"FEHBDY",
|
|
172
|
+
"FRQSIM",
|
|
173
|
+
"FSRBDY",
|
|
174
|
+
"FSSR16BDY",
|
|
175
|
+
"GERRBDY",
|
|
176
|
+
"REBDY",
|
|
177
|
+
"REFH2BDY",
|
|
178
|
+
"SCSBDY",
|
|
179
|
+
):
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
if unit._unit == "JUNCTION":
|
|
183
|
+
return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
|
|
184
|
+
|
|
185
|
+
prev_units = []
|
|
186
|
+
_prev_in_dat = self._prev_in_dat_struct(unit)
|
|
187
|
+
_name_match = self._name_label_match(unit)
|
|
188
|
+
_ds_label_match = self._ds_label_match(unit)
|
|
189
|
+
_junction_match = [
|
|
190
|
+
junction
|
|
191
|
+
for junction in self._all_units
|
|
192
|
+
if junction._unit == "JUNCTION" and unit.name in junction.labels
|
|
193
|
+
]
|
|
194
|
+
|
|
195
|
+
# Case 2: Previous unit has positive distance to next
|
|
196
|
+
if (
|
|
197
|
+
_prev_in_dat
|
|
198
|
+
and hasattr(_prev_in_dat, "dist_to_next")
|
|
199
|
+
and _prev_in_dat.dist_to_next != 0
|
|
200
|
+
):
|
|
201
|
+
prev_units.append(_prev_in_dat)
|
|
202
|
+
_name_match = None # Name match does apply if upstream section exists
|
|
203
|
+
|
|
204
|
+
# All other matches added (matching name, matching name to ds_label and junciton)
|
|
205
|
+
for match in [_name_match, _ds_label_match, _junction_match]:
|
|
206
|
+
if isinstance(match, list):
|
|
207
|
+
prev_units.extend(match)
|
|
208
|
+
elif match:
|
|
209
|
+
prev_units.append(match)
|
|
210
|
+
|
|
211
|
+
if len(prev_units) == 0:
|
|
212
|
+
return None
|
|
213
|
+
if len(prev_units) == 1:
|
|
214
|
+
return prev_units[0]
|
|
215
|
+
return prev_units
|
|
216
|
+
|
|
217
|
+
def _next_in_dat_struct(self, current_unit: Unit) -> Unit | None:
|
|
218
|
+
"""Finds next unit in the dat file using the index position.
|
|
219
|
+
|
|
220
|
+
Returns:
|
|
221
|
+
Unit with all associated data
|
|
222
|
+
"""
|
|
223
|
+
|
|
224
|
+
for idx, unit in enumerate(self._all_units):
|
|
225
|
+
# Names checked first to speed up comparison
|
|
226
|
+
if unit.name == current_unit.name and unit == current_unit:
|
|
227
|
+
try:
|
|
228
|
+
return self._all_units[idx + 1]
|
|
229
|
+
except IndexError:
|
|
230
|
+
return None
|
|
231
|
+
|
|
232
|
+
return None
|
|
233
|
+
|
|
234
|
+
def _prev_in_dat_struct(self, current_unit: Unit) -> Unit | None:
|
|
235
|
+
"""Finds previous unit in the dat file using the index position.
|
|
236
|
+
|
|
237
|
+
Returns:
|
|
238
|
+
Unit with all associated data
|
|
239
|
+
"""
|
|
240
|
+
for idx, unit in enumerate(self._all_units):
|
|
241
|
+
# Names checked first to speed up comparison
|
|
242
|
+
if unit.name == current_unit.name and unit == current_unit:
|
|
243
|
+
if idx == 0:
|
|
244
|
+
return None
|
|
245
|
+
return self._all_units[idx - 1]
|
|
246
|
+
|
|
247
|
+
return None
|
|
248
|
+
|
|
249
|
+
def _ds_label_match(self, current_unit: Unit) -> Unit | list[Unit] | None:
|
|
250
|
+
"""Pulls out all units with ds label that matches the input unit.
|
|
251
|
+
|
|
252
|
+
Returns:
|
|
253
|
+
Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
|
|
254
|
+
"""
|
|
255
|
+
|
|
256
|
+
_ds_list = []
|
|
257
|
+
for item in self._all_units:
|
|
258
|
+
try:
|
|
259
|
+
if item.ds_label == current_unit.name:
|
|
260
|
+
_ds_list.append(item)
|
|
261
|
+
except AttributeError:
|
|
262
|
+
continue
|
|
263
|
+
|
|
264
|
+
if len(_ds_list) == 0:
|
|
265
|
+
return None
|
|
266
|
+
if len(_ds_list) == 1:
|
|
267
|
+
return _ds_list[0]
|
|
268
|
+
return _ds_list
|
|
269
|
+
|
|
270
|
+
def _name_label_match(
|
|
271
|
+
self,
|
|
272
|
+
current_unit: Unit,
|
|
273
|
+
name_override: str | None = None,
|
|
274
|
+
) -> Unit | list[Unit] | None:
|
|
275
|
+
"""Pulls out all units with same name as the input unit.
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
|
|
279
|
+
"""
|
|
280
|
+
|
|
281
|
+
_name = name_override or str(current_unit.name)
|
|
282
|
+
_name_list = []
|
|
283
|
+
for item in self._all_units:
|
|
284
|
+
if item.name == _name and item != current_unit:
|
|
285
|
+
_name_list.append(item)
|
|
286
|
+
else:
|
|
287
|
+
pass
|
|
288
|
+
|
|
289
|
+
if len(_name_list) == 0:
|
|
290
|
+
return None
|
|
291
|
+
if len(_name_list) == 1:
|
|
292
|
+
return _name_list[0]
|
|
293
|
+
return _name_list
|
|
294
|
+
|
|
295
|
+
def _read(self):
|
|
296
|
+
# Read DAT data
|
|
297
|
+
with open(self._filepath) as dat_file:
|
|
298
|
+
self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
|
|
299
|
+
|
|
300
|
+
# Generate DAT structure
|
|
301
|
+
self._update_dat_struct()
|
|
302
|
+
|
|
303
|
+
# Get network .gxy if present
|
|
304
|
+
gxy_path = self._filepath.with_suffix(".gxy")
|
|
305
|
+
if gxy_path.exists():
|
|
306
|
+
self._gxy_filepath = gxy_path
|
|
307
|
+
with open(self._gxy_filepath) as gxy_file:
|
|
308
|
+
self._gxy_data = gxy_file.read()
|
|
309
|
+
else:
|
|
310
|
+
self._gxy_filepath = None
|
|
311
|
+
self._gxy_data = None
|
|
312
|
+
|
|
313
|
+
@handle_exception(when="write")
|
|
314
|
+
def _write(self) -> str:
|
|
315
|
+
"""Returns string representation of the current DAT data
|
|
316
|
+
|
|
317
|
+
Returns:
|
|
318
|
+
str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
|
|
319
|
+
"""
|
|
320
|
+
self._update_raw_data()
|
|
321
|
+
self._update_general_parameters()
|
|
322
|
+
self._update_dat_struct()
|
|
323
|
+
self._update_unit_names()
|
|
324
|
+
|
|
325
|
+
return "\n".join(self._raw_data) + "\n"
|
|
326
|
+
|
|
327
|
+
def _create_from_blank(self, with_gxy: bool = False) -> None:
|
|
328
|
+
# No filepath specified, create new 'blank' DAT in memory
|
|
329
|
+
# ** Update these to have minimal data needed (general header, empty IC header)
|
|
330
|
+
self._dat_struct = [
|
|
331
|
+
{"start": 0, "Type": "GENERAL", "end": 6},
|
|
332
|
+
{"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
|
|
333
|
+
]
|
|
334
|
+
self._raw_data = [
|
|
335
|
+
"",
|
|
336
|
+
"#REVISION#1",
|
|
337
|
+
" 0 0.750 0.900 0.100 0.001 12SI",
|
|
338
|
+
" 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
|
|
339
|
+
"RAD FILE",
|
|
340
|
+
"",
|
|
341
|
+
"END GENERAL",
|
|
342
|
+
"INITIAL CONDITIONS",
|
|
343
|
+
" label ? flow stage froude no velocity umode ustate z",
|
|
344
|
+
]
|
|
345
|
+
|
|
346
|
+
self._gxy_filepath = None
|
|
347
|
+
if with_gxy:
|
|
348
|
+
self._gxy_data = ""
|
|
349
|
+
else:
|
|
350
|
+
self._gxy_data = None
|
|
351
|
+
|
|
352
|
+
def _get_general_parameters(self) -> None:
|
|
353
|
+
# ** Get general parameters here
|
|
354
|
+
self.title = self._raw_data[0]
|
|
355
|
+
self.general_parameters = {}
|
|
356
|
+
line = f"{self._raw_data[2]:<70}"
|
|
357
|
+
params = units.helpers.split_10_char(line)
|
|
358
|
+
if params[6] == "":
|
|
359
|
+
# Adds the measurements unit as DEFAULT if not specified
|
|
360
|
+
params[6] = "DEFAULT"
|
|
361
|
+
line = f"{self._raw_data[3]:<70}"
|
|
362
|
+
params.extend(units.helpers.split_10_char(line))
|
|
363
|
+
|
|
364
|
+
self.general_parameters["Node Count"] = _to_int(params[0], 0)
|
|
365
|
+
self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
|
|
366
|
+
self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
|
|
367
|
+
self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
|
|
368
|
+
self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
|
|
369
|
+
self._label_len = _to_int(params[5], 12) # label length
|
|
370
|
+
self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
|
|
371
|
+
self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
|
|
372
|
+
self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
|
|
373
|
+
self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
|
|
374
|
+
self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
|
|
375
|
+
self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
|
|
376
|
+
self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
|
|
377
|
+
self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
|
|
378
|
+
self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
|
|
379
|
+
|
|
380
|
+
def _update_general_parameters(self) -> None:
|
|
381
|
+
self._raw_data[0] = self.title
|
|
382
|
+
self._raw_data[5] = self.general_parameters["RAD File"]
|
|
383
|
+
general_params_1 = units.helpers.join_10_char(
|
|
384
|
+
self.general_parameters["Node Count"],
|
|
385
|
+
self.general_parameters["Lower Froude"],
|
|
386
|
+
self.general_parameters["Upper Froude"],
|
|
387
|
+
self.general_parameters["Min Depth"],
|
|
388
|
+
self.general_parameters["Convergence Direct"],
|
|
389
|
+
self._label_len,
|
|
390
|
+
)
|
|
391
|
+
general_params_1 += self.general_parameters["Units"]
|
|
392
|
+
self._raw_data[2] = general_params_1
|
|
393
|
+
|
|
394
|
+
general_params_2 = units.helpers.join_10_char(
|
|
395
|
+
self.general_parameters["Water Temperature"],
|
|
396
|
+
self.general_parameters["Convergence Flow"],
|
|
397
|
+
self.general_parameters["Convergence Head"],
|
|
398
|
+
self.general_parameters["Mathematical Damping"],
|
|
399
|
+
self.general_parameters["Pivotal Choice"],
|
|
400
|
+
self.general_parameters["Under-relaxation"],
|
|
401
|
+
self.general_parameters["Matrix Dummy"],
|
|
402
|
+
)
|
|
403
|
+
self._raw_data[3] = general_params_2
|
|
404
|
+
|
|
405
|
+
def _update_unit_names(self):
|
|
406
|
+
for unit_group, unit_group_name in [
|
|
407
|
+
(self.boundaries, "boundaries"),
|
|
408
|
+
(self.sections, "sections"),
|
|
409
|
+
(self.structures, "structures"),
|
|
410
|
+
(self.conduits, "conduits"),
|
|
411
|
+
(self.losses, "losses"),
|
|
412
|
+
]:
|
|
413
|
+
for name, unit in unit_group.copy().items():
|
|
414
|
+
if name != unit.name:
|
|
415
|
+
# Check if new name already exists as a label
|
|
416
|
+
if unit.name in unit_group:
|
|
417
|
+
raise Exception(
|
|
418
|
+
f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group',
|
|
419
|
+
)
|
|
420
|
+
unit_group[unit.name] = unit
|
|
421
|
+
del unit_group[name]
|
|
422
|
+
# Update label in ICs
|
|
423
|
+
if unit_group_name not in ["boundaries", "losses"]:
|
|
424
|
+
# TODO: Need to do a more thorough check for whether a unit is one in the ICs
|
|
425
|
+
# e.g. Culvert inlet and river section may have same label, but only river
|
|
426
|
+
# section label should update in ICs
|
|
427
|
+
self.initial_conditions.update_label(name, unit.name)
|
|
428
|
+
|
|
429
|
+
# Update label in GISINFO and GXY data
|
|
430
|
+
self._update_gisinfo_label(
|
|
431
|
+
unit._unit,
|
|
432
|
+
unit._subtype,
|
|
433
|
+
name,
|
|
434
|
+
unit.name,
|
|
435
|
+
unit_group_name
|
|
436
|
+
in ["boundaries", "losses"], # if True it ignores second lbl
|
|
437
|
+
)
|
|
438
|
+
self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
|
|
439
|
+
|
|
440
|
+
# Update IC table names in raw_data if any name changes
|
|
441
|
+
ic_start, ic_end = next(
|
|
442
|
+
(unit["start"], unit["end"])
|
|
443
|
+
for unit in self._dat_struct
|
|
444
|
+
if unit["Type"] == "INITIAL CONDITIONS"
|
|
445
|
+
)
|
|
446
|
+
self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
|
|
447
|
+
|
|
448
|
+
def _update_raw_data(self):
|
|
449
|
+
block_shift = 0
|
|
450
|
+
comment_tracker = 0
|
|
451
|
+
comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
|
|
452
|
+
prev_block_end = self._dat_struct[0]["end"]
|
|
453
|
+
existing_units = {
|
|
454
|
+
"boundaries": [],
|
|
455
|
+
"structures": [],
|
|
456
|
+
"sections": [],
|
|
457
|
+
"conduits": [],
|
|
458
|
+
"losses": [],
|
|
459
|
+
}
|
|
460
|
+
|
|
461
|
+
for block in self._dat_struct:
|
|
462
|
+
# Check for all supported boundary types
|
|
463
|
+
if block["Type"] in units.SUPPORTED_UNIT_TYPES:
|
|
464
|
+
# clause for when unit has been inserted into the dat file
|
|
465
|
+
if "new_insert" in block:
|
|
466
|
+
block["start"] = prev_block_end + 1
|
|
467
|
+
block["end"] = block["start"] + len(block["new_insert"]) - 1
|
|
468
|
+
self._raw_data[block["start"] : block["start"]] = block["new_insert"]
|
|
469
|
+
block_shift += len(block["new_insert"])
|
|
470
|
+
prev_block_end = block["end"]
|
|
471
|
+
del block["new_insert"]
|
|
472
|
+
|
|
473
|
+
else:
|
|
474
|
+
unit_data = self._raw_data[
|
|
475
|
+
block["start"] + block_shift : block["end"] + 1 + block_shift
|
|
476
|
+
]
|
|
477
|
+
prev_block_len = len(unit_data)
|
|
478
|
+
|
|
479
|
+
if block["Type"] == "INITIAL CONDITIONS":
|
|
480
|
+
new_unit_data = self.initial_conditions._write()
|
|
481
|
+
elif block["Type"] == "COMMENT":
|
|
482
|
+
comment = comment_units[comment_tracker]
|
|
483
|
+
new_unit_data = comment._write()
|
|
484
|
+
comment_tracker += 1
|
|
485
|
+
|
|
486
|
+
elif block["Type"] == "VARIABLES":
|
|
487
|
+
new_unit_data = self.variables._write()
|
|
488
|
+
|
|
489
|
+
else:
|
|
490
|
+
if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
|
|
491
|
+
unit_name = unit_data[2][: self._label_len].strip()
|
|
492
|
+
else:
|
|
493
|
+
unit_name = unit_data[1][: self._label_len].strip()
|
|
494
|
+
|
|
495
|
+
# Get unit object
|
|
496
|
+
unit_group = getattr(
|
|
497
|
+
self,
|
|
498
|
+
units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
|
|
499
|
+
)
|
|
500
|
+
if unit_name in unit_group:
|
|
501
|
+
# block still exists
|
|
502
|
+
new_unit_data = unit_group[unit_name]._write()
|
|
503
|
+
existing_units[
|
|
504
|
+
units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
|
|
505
|
+
].append(unit_name)
|
|
506
|
+
else:
|
|
507
|
+
# Bdy block has been deleted
|
|
508
|
+
new_unit_data = []
|
|
509
|
+
|
|
510
|
+
new_block_len = len(new_unit_data)
|
|
511
|
+
self._raw_data[
|
|
512
|
+
block["start"] + block_shift : block["end"] + 1 + block_shift
|
|
513
|
+
] = new_unit_data
|
|
514
|
+
# adjust block shift for change in number of lines in bdy block
|
|
515
|
+
block_shift += new_block_len - prev_block_len
|
|
516
|
+
prev_block_end = (
|
|
517
|
+
block["end"] + block_shift
|
|
518
|
+
) # add in to keep a record of the last block read in
|
|
519
|
+
|
|
520
|
+
def _get_unit_definitions(self): # noqa: C901
|
|
521
|
+
# Get unit definitions
|
|
522
|
+
self.sections = {}
|
|
523
|
+
self.boundaries = {}
|
|
524
|
+
self.structures = {}
|
|
525
|
+
self.conduits = {}
|
|
526
|
+
self.losses = {}
|
|
527
|
+
self._unsupported = {}
|
|
528
|
+
self._all_units = []
|
|
529
|
+
for block in self._dat_struct:
|
|
530
|
+
unit_data = self._raw_data[block["start"] : block["end"] + 1]
|
|
531
|
+
if block["Type"] in units.SUPPORTED_UNIT_TYPES:
|
|
532
|
+
# Deal with initial conditions block
|
|
533
|
+
if block["Type"] == "INITIAL CONDITIONS":
|
|
534
|
+
self.initial_conditions = units.IIC(unit_data, n=self._label_len)
|
|
535
|
+
continue
|
|
536
|
+
|
|
537
|
+
if block["Type"] == "COMMENT":
|
|
538
|
+
self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
|
|
539
|
+
continue
|
|
540
|
+
|
|
541
|
+
if block["Type"] == "VARIABLES":
|
|
542
|
+
self.variables = units.Variables(unit_data)
|
|
543
|
+
continue
|
|
544
|
+
|
|
545
|
+
# Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
|
|
546
|
+
if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
|
|
547
|
+
unit_name = unit_data[2][: self._label_len].strip()
|
|
548
|
+
else:
|
|
549
|
+
unit_name = unit_data[1][: self._label_len].strip()
|
|
550
|
+
|
|
551
|
+
# Create instance of unit and add to relevant group
|
|
552
|
+
unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
|
|
553
|
+
if unit_name in unit_group:
|
|
554
|
+
raise Exception(
|
|
555
|
+
f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}',
|
|
556
|
+
)
|
|
557
|
+
# Changes done to account for unit types with spaces/dashes eg Flat-V Weir
|
|
558
|
+
unit_type = block["Type"].replace(" ", "_").replace("-", "_")
|
|
559
|
+
unit_group[unit_name] = eval(
|
|
560
|
+
f"units.{unit_type}({unit_data}, {self._label_len})", # append to our _all._units as well???
|
|
561
|
+
)
|
|
562
|
+
self._all_units.append(unit_group[unit_name])
|
|
563
|
+
|
|
564
|
+
elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
|
|
565
|
+
# Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
|
|
566
|
+
if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
|
|
567
|
+
unit_name = unit_data[2][: self._label_len].strip()
|
|
568
|
+
subtype = True
|
|
569
|
+
else:
|
|
570
|
+
unit_name = unit_data[1][: self._label_len].strip()
|
|
571
|
+
subtype = False
|
|
572
|
+
|
|
573
|
+
self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
|
|
574
|
+
unit_data,
|
|
575
|
+
self._label_len,
|
|
576
|
+
unit_name=unit_name,
|
|
577
|
+
unit_type=block["Type"],
|
|
578
|
+
subtype=subtype,
|
|
579
|
+
)
|
|
580
|
+
self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
|
|
581
|
+
|
|
582
|
+
elif block["Type"] not in ("GENERAL", "GISINFO"):
|
|
583
|
+
raise Exception(f"Unexpected unit type encountered: {block['Type']}")
|
|
584
|
+
|
|
585
|
+
def _update_dat_struct(self) -> None: # noqa: C901, PLR0912
|
|
586
|
+
"""Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
|
|
587
|
+
are a dictionary containing the 'start', 'end' and 'type' of the block.
|
|
588
|
+
|
|
589
|
+
"""
|
|
590
|
+
# Generate DAT structure
|
|
591
|
+
dat_struct = []
|
|
592
|
+
in_block = False
|
|
593
|
+
in_general = True
|
|
594
|
+
in_comment = False
|
|
595
|
+
comment_n = None # Used as counter for number of lines in a comment block
|
|
596
|
+
gisinfo_block = False
|
|
597
|
+
general_block = {"start": 0, "Type": "GENERAL"}
|
|
598
|
+
unit_block: dict[str, Any] = {}
|
|
599
|
+
for idx, line in enumerate(self._raw_data):
|
|
600
|
+
# Deal with 'general' header
|
|
601
|
+
if in_general is True:
|
|
602
|
+
if line == "END GENERAL":
|
|
603
|
+
general_block["end"] = idx
|
|
604
|
+
dat_struct.append(general_block)
|
|
605
|
+
in_general = False
|
|
606
|
+
continue
|
|
607
|
+
|
|
608
|
+
# Deal with comment blocks explicitly as they could contain unit keywords
|
|
609
|
+
if in_comment and comment_n is None:
|
|
610
|
+
comment_n = int(line.strip())
|
|
611
|
+
continue
|
|
612
|
+
if in_comment:
|
|
613
|
+
comment_n -= 1
|
|
614
|
+
if comment_n <= 0:
|
|
615
|
+
unit_block["end"] = idx + comment_n # add ending index
|
|
616
|
+
# append existing bdy block to the dat_struct
|
|
617
|
+
dat_struct.append(unit_block)
|
|
618
|
+
unit_block = {} # reset bdy block
|
|
619
|
+
in_comment = False
|
|
620
|
+
in_block = False
|
|
621
|
+
comment_n = None
|
|
622
|
+
continue # move onto next line as still in comment block
|
|
623
|
+
|
|
624
|
+
if line == "COMMENT":
|
|
625
|
+
in_comment = True
|
|
626
|
+
unit_block, in_block = self._close_struct_block(
|
|
627
|
+
dat_struct,
|
|
628
|
+
"COMMENT",
|
|
629
|
+
unit_block,
|
|
630
|
+
in_block,
|
|
631
|
+
idx,
|
|
632
|
+
)
|
|
633
|
+
continue
|
|
634
|
+
|
|
635
|
+
if line == "GISINFO":
|
|
636
|
+
gisinfo_block = True
|
|
637
|
+
unit_block, in_block = self._close_struct_block(
|
|
638
|
+
dat_struct,
|
|
639
|
+
"GISINFO",
|
|
640
|
+
unit_block,
|
|
641
|
+
in_block,
|
|
642
|
+
idx,
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
if not gisinfo_block:
|
|
646
|
+
if line.split(" ")[0] in units.ALL_UNIT_TYPES:
|
|
647
|
+
# The " " is needed here in case of empty string
|
|
648
|
+
unit_type = line.split()[0]
|
|
649
|
+
elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
|
|
650
|
+
unit_type = " ".join(line.split()[:2])
|
|
651
|
+
else:
|
|
652
|
+
continue
|
|
653
|
+
|
|
654
|
+
unit_block, in_block = self._close_struct_block(
|
|
655
|
+
dat_struct,
|
|
656
|
+
unit_type,
|
|
657
|
+
unit_block,
|
|
658
|
+
in_block,
|
|
659
|
+
idx,
|
|
660
|
+
)
|
|
661
|
+
|
|
662
|
+
if len(unit_block) != 0:
|
|
663
|
+
# Only adds end block if there is a block present (i.e. an empty DAT stays empty)
|
|
664
|
+
# add ending index for final block
|
|
665
|
+
unit_block["end"] = len(self._raw_data) - 1
|
|
666
|
+
dat_struct.append(unit_block) # add final block
|
|
667
|
+
|
|
668
|
+
self._dat_struct = dat_struct
|
|
669
|
+
|
|
670
|
+
def _close_struct_block( # noqa: PLR0913
|
|
671
|
+
self,
|
|
672
|
+
dat_struct: list[dict],
|
|
673
|
+
unit_type: str,
|
|
674
|
+
unit_block: dict,
|
|
675
|
+
in_block: bool,
|
|
676
|
+
idx: int,
|
|
677
|
+
) -> tuple[dict, bool]:
|
|
678
|
+
"""Helper method to close block in dat struct"""
|
|
679
|
+
if in_block is True:
|
|
680
|
+
unit_block["end"] = idx - 1 # add ending index
|
|
681
|
+
# append existing bdy block to the dat_struct
|
|
682
|
+
dat_struct.append(unit_block)
|
|
683
|
+
unit_block = {} # reset bdy block
|
|
684
|
+
in_block = True
|
|
685
|
+
unit_block["Type"] = unit_type # start new bdy block
|
|
686
|
+
unit_block["start"] = idx # add starting index
|
|
687
|
+
|
|
688
|
+
return unit_block, in_block
|
|
689
|
+
|
|
690
|
+
@handle_exception(when="remove unit from")
|
|
691
|
+
def remove_unit(self, unit: Unit) -> None:
|
|
692
|
+
"""Remove a unit from the dat file.
|
|
693
|
+
|
|
694
|
+
Args:
|
|
695
|
+
unit (Unit): flood modeller unit input.
|
|
696
|
+
|
|
697
|
+
Raises:
|
|
698
|
+
TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
|
|
699
|
+
"""
|
|
700
|
+
# catch if not valid unit
|
|
701
|
+
if not isinstance(unit, Unit):
|
|
702
|
+
raise TypeError("unit isn't a unit")
|
|
703
|
+
|
|
704
|
+
# remove from all units
|
|
705
|
+
index = self._all_units.index(unit)
|
|
706
|
+
del self._all_units[index]
|
|
707
|
+
# remove from dat_struct
|
|
708
|
+
dat_struct_unit = self._dat_struct[index + 1]
|
|
709
|
+
del self._dat_struct[index + 1]
|
|
710
|
+
# remove from raw data
|
|
711
|
+
del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
|
|
712
|
+
# remove from unit group
|
|
713
|
+
unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
|
|
714
|
+
unit_group = getattr(self, unit_group_name)
|
|
715
|
+
del unit_group[unit.name]
|
|
716
|
+
# remove from ICs
|
|
717
|
+
self.initial_conditions.data = self.initial_conditions.data.loc[
|
|
718
|
+
self.initial_conditions.data["label"] != unit.name
|
|
719
|
+
]
|
|
720
|
+
|
|
721
|
+
self._update_dat_struct()
|
|
722
|
+
self.general_parameters["Node Count"] -= 1
|
|
723
|
+
|
|
724
|
+
@handle_exception(when="insert unit into")
|
|
725
|
+
def insert_unit( # noqa: C901, PLR0912, PLR0913
|
|
726
|
+
self,
|
|
727
|
+
unit: Unit,
|
|
728
|
+
add_before: Unit | None = None,
|
|
729
|
+
add_after: Unit | None = None,
|
|
730
|
+
add_at: int | None = None,
|
|
731
|
+
defer_update: bool = False,
|
|
732
|
+
) -> None:
|
|
733
|
+
"""Inserts a unit into the dat file.
|
|
734
|
+
|
|
735
|
+
Args:
|
|
736
|
+
unit (Unit): FloodModeller unit input.
|
|
737
|
+
add_before (Unit): FloodModeller unit to add before.
|
|
738
|
+
add_after (Unit): FloodModeller unit to add after.
|
|
739
|
+
add_at (integer): Positional argument (starting at 0) of where to add in
|
|
740
|
+
the dat file. To add at the end of the network you can use -1.
|
|
741
|
+
|
|
742
|
+
Raises:
|
|
743
|
+
SyntaxError: Raised if no positional argument is given.
|
|
744
|
+
TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
|
|
745
|
+
NameError: Raised if unit name already appears in unit group.
|
|
746
|
+
"""
|
|
747
|
+
# catch errors
|
|
748
|
+
provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
|
|
749
|
+
if provided_params == 0:
|
|
750
|
+
raise SyntaxError(
|
|
751
|
+
"No positional argument given. Please provide either add_before, add_at or add_after",
|
|
752
|
+
)
|
|
753
|
+
if provided_params > 1:
|
|
754
|
+
raise SyntaxError("Only one of add_at, add_before, or add_after required")
|
|
755
|
+
if not isinstance(unit, Unit):
|
|
756
|
+
raise TypeError("unit isn't a unit")
|
|
757
|
+
if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
|
|
758
|
+
raise TypeError(
|
|
759
|
+
"add_before or add_after argument must be a Flood Modeller Unit type",
|
|
760
|
+
)
|
|
761
|
+
|
|
762
|
+
unit_class = unit._unit
|
|
763
|
+
if unit_class != "COMMENT":
|
|
764
|
+
_validate_unit(unit)
|
|
765
|
+
unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
|
|
766
|
+
unit_group = getattr(self, unit_group_name)
|
|
767
|
+
if unit.name in unit_group:
|
|
768
|
+
raise NameError(
|
|
769
|
+
"Name already appears in unit group. Cannot have two units with same name in same group",
|
|
770
|
+
)
|
|
771
|
+
|
|
772
|
+
# positional argument
|
|
773
|
+
if add_at is not None:
|
|
774
|
+
insert_index = add_at
|
|
775
|
+
if insert_index < 0:
|
|
776
|
+
insert_index += len(self._all_units) + 1
|
|
777
|
+
if insert_index < 0:
|
|
778
|
+
raise Exception(f"invalid add_at index: {add_at}")
|
|
779
|
+
else:
|
|
780
|
+
check_unit = add_before or add_after
|
|
781
|
+
for index, thing in enumerate(self._all_units):
|
|
782
|
+
if thing == check_unit:
|
|
783
|
+
insert_index = index
|
|
784
|
+
insert_index += 1 if add_after else 0
|
|
785
|
+
break
|
|
786
|
+
else:
|
|
787
|
+
raise Exception(
|
|
788
|
+
f"{check_unit} not found in dat network, so cannot be used to add before/after",
|
|
789
|
+
)
|
|
790
|
+
|
|
791
|
+
unit_data = unit._write()
|
|
792
|
+
self._all_units.insert(insert_index, unit)
|
|
793
|
+
if unit._unit != "COMMENT":
|
|
794
|
+
unit_group[unit.name] = unit
|
|
795
|
+
self._dat_struct.insert(
|
|
796
|
+
insert_index + 1,
|
|
797
|
+
{"Type": unit_class, "new_insert": unit_data},
|
|
798
|
+
) # add to dat struct without unit.name
|
|
799
|
+
|
|
800
|
+
if unit._unit != "COMMENT":
|
|
801
|
+
# update the iic's tables
|
|
802
|
+
iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
|
|
803
|
+
self.initial_conditions.data.loc[len(self.initial_conditions.data)] = iic_data # flaged
|
|
804
|
+
|
|
805
|
+
# update all
|
|
806
|
+
if unit._unit != "COMMENT":
|
|
807
|
+
self.general_parameters["Node Count"] += 1 # flag no update for comments
|
|
808
|
+
|
|
809
|
+
if not defer_update:
|
|
810
|
+
self._update_raw_data()
|
|
811
|
+
self._update_dat_struct()
|
|
812
|
+
|
|
813
|
+
def insert_units(
|
|
814
|
+
self,
|
|
815
|
+
units: list[Unit],
|
|
816
|
+
add_before: Unit | None = None,
|
|
817
|
+
add_after: Unit | None = None,
|
|
818
|
+
add_at: int | None = None,
|
|
819
|
+
) -> None:
|
|
820
|
+
"""Inserts a list of units into the dat file.
|
|
821
|
+
|
|
822
|
+
Args:
|
|
823
|
+
units (list[Unit]): List of FloodModeller units.
|
|
824
|
+
add_before (Unit): FloodModeller unit to add before.
|
|
825
|
+
add_after (Unit): FloodModeller unit to add after.
|
|
826
|
+
add_at (integer): Positional argument (starting at 0) of where to add in
|
|
827
|
+
the dat file. To add at the end of the network you can use -1.
|
|
828
|
+
"""
|
|
829
|
+
ordered = (add_at is None and add_after is None) or (isinstance(add_at, int) and add_at < 0)
|
|
830
|
+
ordered_units = units if ordered else units[::-1]
|
|
831
|
+
for unit in ordered_units:
|
|
832
|
+
self.insert_unit(unit, add_before, add_after, add_at, defer_update=True)
|
|
833
|
+
self._update_raw_data()
|
|
834
|
+
self._update_dat_struct()
|
|
835
|
+
|
|
836
|
+
def _update_gisinfo_label( # noqa: PLR0913
|
|
837
|
+
self,
|
|
838
|
+
unit_type,
|
|
839
|
+
unit_subtype,
|
|
840
|
+
prev_lbl,
|
|
841
|
+
new_lbl,
|
|
842
|
+
ignore_second,
|
|
843
|
+
):
|
|
844
|
+
"""Update labels in GISINFO block if unit is renamed"""
|
|
845
|
+
|
|
846
|
+
start, end = next(
|
|
847
|
+
(block["start"], block["end"])
|
|
848
|
+
for block in self._dat_struct
|
|
849
|
+
if block["Type"] == "GISINFO"
|
|
850
|
+
)
|
|
851
|
+
gisinfo_block = self._raw_data[start : end + 1]
|
|
852
|
+
|
|
853
|
+
prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
|
|
854
|
+
|
|
855
|
+
new_gisinfo_block = []
|
|
856
|
+
for line in gisinfo_block:
|
|
857
|
+
# Replace first label
|
|
858
|
+
if line.startswith(f"{prefix} {prev_lbl} "):
|
|
859
|
+
# found matching line (space at the end is important to ignore node
|
|
860
|
+
# lables with similar starting chars)
|
|
861
|
+
line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
|
|
862
|
+
|
|
863
|
+
# Replace second label
|
|
864
|
+
if not ignore_second and line.startswith(
|
|
865
|
+
f"{prev_lbl} ",
|
|
866
|
+
): # space at the end important again
|
|
867
|
+
line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
|
|
868
|
+
|
|
869
|
+
new_gisinfo_block.append(line)
|
|
870
|
+
|
|
871
|
+
self._raw_data[start : end + 1] = new_gisinfo_block
|
|
872
|
+
|
|
873
|
+
def _update_gxy_label(
|
|
874
|
+
self,
|
|
875
|
+
unit_type: str,
|
|
876
|
+
unit_subtype: str,
|
|
877
|
+
prev_lbl: str,
|
|
878
|
+
new_lbl: str,
|
|
879
|
+
) -> None:
|
|
880
|
+
"""Update labels in GXY file if unit is renamed"""
|
|
881
|
+
|
|
882
|
+
if self._gxy_data is not None:
|
|
883
|
+
if unit_subtype is None:
|
|
884
|
+
unit_subtype = ""
|
|
885
|
+
|
|
886
|
+
old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
|
|
887
|
+
new = f"{unit_type}_{unit_subtype}_{new_lbl}"
|
|
888
|
+
|
|
889
|
+
self._gxy_data = self._gxy_data.replace(old, new)
|