floodmodeller-api 0.4.2.post1__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (172) hide show
  1. floodmodeller_api/__init__.py +8 -9
  2. floodmodeller_api/_base.py +184 -176
  3. floodmodeller_api/backup.py +273 -273
  4. floodmodeller_api/dat.py +909 -831
  5. floodmodeller_api/diff.py +136 -119
  6. floodmodeller_api/ied.py +307 -306
  7. floodmodeller_api/ief.py +647 -637
  8. floodmodeller_api/ief_flags.py +253 -253
  9. floodmodeller_api/inp.py +266 -266
  10. floodmodeller_api/libs/libifcoremd.dll +0 -0
  11. floodmodeller_api/libs/libifcoremt.so.5 +0 -0
  12. floodmodeller_api/libs/libifport.so.5 +0 -0
  13. floodmodeller_api/{libmmd.dll → libs/libimf.so} +0 -0
  14. floodmodeller_api/libs/libintlc.so.5 +0 -0
  15. floodmodeller_api/libs/libmmd.dll +0 -0
  16. floodmodeller_api/libs/libsvml.so +0 -0
  17. floodmodeller_api/libs/libzzn_read.so +0 -0
  18. floodmodeller_api/libs/zzn_read.dll +0 -0
  19. floodmodeller_api/logs/__init__.py +2 -2
  20. floodmodeller_api/logs/lf.py +320 -312
  21. floodmodeller_api/logs/lf_helpers.py +354 -352
  22. floodmodeller_api/logs/lf_params.py +643 -529
  23. floodmodeller_api/mapping.py +84 -0
  24. floodmodeller_api/test/__init__.py +4 -4
  25. floodmodeller_api/test/conftest.py +9 -8
  26. floodmodeller_api/test/test_backup.py +117 -117
  27. floodmodeller_api/test/test_dat.py +221 -92
  28. floodmodeller_api/test/test_data/All Units 4_6.DAT +1081 -1081
  29. floodmodeller_api/test/test_data/All Units 4_6.feb +1081 -1081
  30. floodmodeller_api/test/test_data/BRIDGE.DAT +926 -926
  31. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.dat +36 -36
  32. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.feb +36 -36
  33. floodmodeller_api/test/test_data/DamBreakADI.xml +52 -52
  34. floodmodeller_api/test/test_data/DamBreakFAST.xml +58 -58
  35. floodmodeller_api/test/test_data/DamBreakFAST_dy.xml +53 -53
  36. floodmodeller_api/test/test_data/DamBreakTVD.xml +55 -55
  37. floodmodeller_api/test/test_data/DefenceBreach.xml +53 -53
  38. floodmodeller_api/test/test_data/DefenceBreachFAST.xml +60 -60
  39. floodmodeller_api/test/test_data/DefenceBreachFAST_dy.xml +55 -55
  40. floodmodeller_api/test/test_data/Domain1+2_QH.xml +76 -76
  41. floodmodeller_api/test/test_data/Domain1_H.xml +41 -41
  42. floodmodeller_api/test/test_data/Domain1_Q.xml +41 -41
  43. floodmodeller_api/test/test_data/Domain1_Q_FAST.xml +48 -48
  44. floodmodeller_api/test/test_data/Domain1_Q_FAST_dy.xml +48 -48
  45. floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +263 -0
  46. floodmodeller_api/test/test_data/Domain1_W.xml +41 -41
  47. floodmodeller_api/test/test_data/EX1.DAT +321 -321
  48. floodmodeller_api/test/test_data/EX1.ext +107 -107
  49. floodmodeller_api/test/test_data/EX1.feb +320 -320
  50. floodmodeller_api/test/test_data/EX1.gxy +107 -107
  51. floodmodeller_api/test/test_data/EX17.DAT +421 -422
  52. floodmodeller_api/test/test_data/EX17.ext +213 -213
  53. floodmodeller_api/test/test_data/EX17.feb +422 -422
  54. floodmodeller_api/test/test_data/EX18.DAT +375 -375
  55. floodmodeller_api/test/test_data/EX18_DAT_expected.json +3876 -0
  56. floodmodeller_api/test/test_data/EX2.DAT +302 -302
  57. floodmodeller_api/test/test_data/EX3.DAT +926 -926
  58. floodmodeller_api/test/test_data/EX3_DAT_expected.json +16235 -0
  59. floodmodeller_api/test/test_data/EX3_IEF_expected.json +61 -0
  60. floodmodeller_api/test/test_data/EX6.DAT +2084 -2084
  61. floodmodeller_api/test/test_data/EX6.ext +532 -532
  62. floodmodeller_api/test/test_data/EX6.feb +2084 -2084
  63. floodmodeller_api/test/test_data/EX6_DAT_expected.json +31647 -0
  64. floodmodeller_api/test/test_data/Event Data Example.DAT +336 -336
  65. floodmodeller_api/test/test_data/Event Data Example.ext +107 -107
  66. floodmodeller_api/test/test_data/Event Data Example.feb +336 -336
  67. floodmodeller_api/test/test_data/Linked1D2D.xml +52 -52
  68. floodmodeller_api/test/test_data/Linked1D2DFAST.xml +53 -53
  69. floodmodeller_api/test/test_data/Linked1D2DFAST_dy.xml +48 -48
  70. floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +313 -0
  71. floodmodeller_api/test/test_data/blockage.dat +50 -50
  72. floodmodeller_api/test/test_data/blockage.ext +45 -45
  73. floodmodeller_api/test/test_data/blockage.feb +9 -9
  74. floodmodeller_api/test/test_data/blockage.gxy +71 -71
  75. floodmodeller_api/test/test_data/defaultUnits.dat +127 -127
  76. floodmodeller_api/test/test_data/defaultUnits.ext +45 -45
  77. floodmodeller_api/test/test_data/defaultUnits.feb +9 -9
  78. floodmodeller_api/test/test_data/defaultUnits.fmpx +58 -58
  79. floodmodeller_api/test/test_data/defaultUnits.gxy +85 -85
  80. floodmodeller_api/test/test_data/ex3.ief +20 -20
  81. floodmodeller_api/test/test_data/ex3.lf1 +2800 -2800
  82. floodmodeller_api/test/test_data/ex4.DAT +1374 -1374
  83. floodmodeller_api/test/test_data/ex4_changed.DAT +1374 -1374
  84. floodmodeller_api/test/test_data/example1.inp +329 -329
  85. floodmodeller_api/test/test_data/example2.inp +158 -158
  86. floodmodeller_api/test/test_data/example3.inp +297 -297
  87. floodmodeller_api/test/test_data/example4.inp +388 -388
  88. floodmodeller_api/test/test_data/example5.inp +147 -147
  89. floodmodeller_api/test/test_data/example6.inp +154 -154
  90. floodmodeller_api/test/test_data/jump.dat +176 -176
  91. floodmodeller_api/test/test_data/network.dat +1374 -1374
  92. floodmodeller_api/test/test_data/network.ext +45 -45
  93. floodmodeller_api/test/test_data/network.exy +1 -1
  94. floodmodeller_api/test/test_data/network.feb +45 -45
  95. floodmodeller_api/test/test_data/network.ied +45 -45
  96. floodmodeller_api/test/test_data/network.ief +20 -20
  97. floodmodeller_api/test/test_data/network.inp +147 -147
  98. floodmodeller_api/test/test_data/network.pxy +57 -57
  99. floodmodeller_api/test/test_data/network.zzd +122 -122
  100. floodmodeller_api/test/test_data/network_dat_expected.json +21837 -0
  101. floodmodeller_api/test/test_data/network_from_tabularCSV.csv +87 -87
  102. floodmodeller_api/test/test_data/network_ied_expected.json +287 -0
  103. floodmodeller_api/test/test_data/rnweir.dat +9 -9
  104. floodmodeller_api/test/test_data/rnweir.ext +45 -45
  105. floodmodeller_api/test/test_data/rnweir.feb +9 -9
  106. floodmodeller_api/test/test_data/rnweir.gxy +45 -45
  107. floodmodeller_api/test/test_data/rnweir_default.dat +74 -74
  108. floodmodeller_api/test/test_data/rnweir_default.ext +45 -45
  109. floodmodeller_api/test/test_data/rnweir_default.feb +9 -9
  110. floodmodeller_api/test/test_data/rnweir_default.fmpx +58 -58
  111. floodmodeller_api/test/test_data/rnweir_default.gxy +53 -53
  112. floodmodeller_api/test/test_data/unit checks.dat +16 -16
  113. floodmodeller_api/test/test_ied.py +29 -29
  114. floodmodeller_api/test/test_ief.py +125 -24
  115. floodmodeller_api/test/test_inp.py +47 -48
  116. floodmodeller_api/test/test_json.py +114 -0
  117. floodmodeller_api/test/test_logs_lf.py +48 -51
  118. floodmodeller_api/test/test_tool.py +165 -152
  119. floodmodeller_api/test/test_toolbox_structure_log.py +234 -239
  120. floodmodeller_api/test/test_xml2d.py +151 -156
  121. floodmodeller_api/test/test_zzn.py +36 -34
  122. floodmodeller_api/to_from_json.py +218 -0
  123. floodmodeller_api/tool.py +332 -329
  124. floodmodeller_api/toolbox/__init__.py +5 -5
  125. floodmodeller_api/toolbox/example_tool.py +45 -45
  126. floodmodeller_api/toolbox/model_build/__init__.py +2 -2
  127. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +100 -98
  128. floodmodeller_api/toolbox/model_build/structure_log/__init__.py +1 -1
  129. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +287 -289
  130. floodmodeller_api/toolbox/model_build/structure_log_definition.py +76 -76
  131. floodmodeller_api/units/__init__.py +10 -10
  132. floodmodeller_api/units/_base.py +214 -212
  133. floodmodeller_api/units/boundaries.py +467 -467
  134. floodmodeller_api/units/comment.py +52 -55
  135. floodmodeller_api/units/conduits.py +382 -402
  136. floodmodeller_api/units/helpers.py +123 -131
  137. floodmodeller_api/units/iic.py +107 -101
  138. floodmodeller_api/units/losses.py +305 -306
  139. floodmodeller_api/units/sections.py +444 -446
  140. floodmodeller_api/units/structures.py +1690 -1683
  141. floodmodeller_api/units/units.py +93 -104
  142. floodmodeller_api/units/unsupported.py +44 -44
  143. floodmodeller_api/units/variables.py +87 -89
  144. floodmodeller_api/urban1d/__init__.py +11 -11
  145. floodmodeller_api/urban1d/_base.py +188 -179
  146. floodmodeller_api/urban1d/conduits.py +93 -85
  147. floodmodeller_api/urban1d/general_parameters.py +58 -58
  148. floodmodeller_api/urban1d/junctions.py +81 -79
  149. floodmodeller_api/urban1d/losses.py +81 -74
  150. floodmodeller_api/urban1d/outfalls.py +114 -110
  151. floodmodeller_api/urban1d/raingauges.py +111 -111
  152. floodmodeller_api/urban1d/subsections.py +92 -98
  153. floodmodeller_api/urban1d/xsections.py +147 -144
  154. floodmodeller_api/util.py +77 -21
  155. floodmodeller_api/validation/parameters.py +660 -660
  156. floodmodeller_api/validation/urban_parameters.py +388 -404
  157. floodmodeller_api/validation/validation.py +110 -108
  158. floodmodeller_api/version.py +1 -1
  159. floodmodeller_api/xml2d.py +688 -673
  160. floodmodeller_api/xml2d_template.py +37 -37
  161. floodmodeller_api/zzn.py +387 -363
  162. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.3.dist-info}/LICENSE.txt +13 -13
  163. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.3.dist-info}/METADATA +82 -82
  164. floodmodeller_api-0.4.3.dist-info/RECORD +179 -0
  165. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.3.dist-info}/WHEEL +1 -1
  166. floodmodeller_api/libifcoremd.dll +0 -0
  167. floodmodeller_api/test/test_data/EX3.bmp +0 -0
  168. floodmodeller_api/test/test_data/test_output.csv +0 -87
  169. floodmodeller_api/zzn_read.dll +0 -0
  170. floodmodeller_api-0.4.2.post1.dist-info/RECORD +0 -164
  171. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.3.dist-info}/entry_points.txt +0 -0
  172. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.3.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py CHANGED
@@ -1,831 +1,909 @@
1
- """
2
- Flood Modeller Python API
3
- Copyright (C) 2023 Jacobs U.K. Limited
4
-
5
- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
- as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
-
8
- This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
- of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
-
11
- You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
-
13
- If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
- address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
- """
16
-
17
- from pathlib import Path
18
- from typing import List, Optional, Union
19
-
20
- from . import units # Import for using as package
21
- from ._base import FMFile
22
- from .units._base import Unit
23
- from .units.helpers import _to_float, _to_int
24
- from .validation.validation import _validate_unit
25
-
26
-
27
- class DAT(FMFile):
28
- """Reads and write Flood Modeller datafile format '.dat'
29
-
30
- Args:
31
- dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
32
-
33
- Output:
34
- Initiates 'DAT' class object
35
-
36
- Raises:
37
- TypeError: Raised if dat_filepath does not point to a .dat file
38
- FileNotFoundError: Raised if dat_filepath points to a file which does not exist
39
- """
40
-
41
- _filetype: str = "DAT"
42
- _suffix: str = ".dat"
43
-
44
- def __init__(self, dat_filepath: Optional[Union[str, Path]] = None, with_gxy: bool = False):
45
- try:
46
- if dat_filepath is not None:
47
- FMFile.__init__(self, dat_filepath)
48
- self._read()
49
-
50
- else:
51
- self._create_from_blank(with_gxy)
52
-
53
- self._get_general_parameters()
54
- self._get_unit_definitions()
55
- except Exception as e:
56
- self._handle_exception(e, when="read")
57
-
58
- def update(self) -> None:
59
- """Updates the existing DAT based on any altered attributes"""
60
- self._update()
61
- self._write_gxy(self._gxy_filepath)
62
-
63
- def save(self, filepath: Union[str, Path]) -> None:
64
- """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
65
- Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
66
- update in the latest saved location rather than the original source DAT used to construct the class
67
-
68
- Args:
69
- filepath (str): Filepath to new save location including the name and '.dat' extension
70
-
71
- Raises:
72
- TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
73
- """
74
- filepath = Path(filepath).absolute()
75
- self._save(filepath)
76
- self._write_gxy(filepath)
77
-
78
- def _write_gxy(self, filepath):
79
- if self._gxy_data is not None:
80
- gxy_string = self._gxy_data
81
- new_gxy_path = filepath.with_suffix(".gxy")
82
- with open(new_gxy_path, "w") as gxy_file:
83
- gxy_file.write(gxy_string)
84
- self._gxy_filepath = new_gxy_path
85
-
86
- def diff(self, other: "DAT", force_print: bool = False) -> None:
87
- """Compares the DAT class against another DAT class to check whether they are
88
- equivalent, or if not, what the differences are. Two instances of a DAT class are
89
- deemed equivalent if all of their attributes are equal except for the filepath and
90
- raw data. For example, two DAT files from different filepaths that had the same
91
- data except maybe some differences in decimal places and some default parameters
92
- ommitted, would be classed as equivalent as they would produce the same DAT instance
93
- and write the exact same data.
94
-
95
- The result is printed to the console. If you need to access the returned data, use
96
- the method ``DAT._get_diff()``
97
-
98
- Args:
99
- other (floodmodeller_api.DAT): Other instance of a DAT class
100
- force_print (bool): Forces the API to print every difference found, rather than
101
- just the first 25 differences. Defaults to False.
102
- """
103
- self._diff(other, force_print=force_print)
104
-
105
- # def _get_unit_from_connectivity(self, method) #use this as method prev and next
106
-
107
- def next(self, unit: Unit) -> Union[Unit, List[Unit], None]:
108
- """Finds next unit in the reach.
109
-
110
- Next unit in reach can be infered by:
111
- The next unit in the .dat file structure - such as when a river section has a positive distance to next
112
- The units with the exact same name - such as a junction unit
113
- The next unit as described in the ds_label - such as with Bridge units
114
-
115
- Args:
116
- unit (Unit): flood modeller unit input.
117
-
118
- Returns:
119
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
120
- """
121
- # Needs to handle same name match outside dist to next (e.g. inflow)
122
- try:
123
- if hasattr(unit, "dist_to_next"):
124
- # Case 1a - positive distance to next
125
- if unit.dist_to_next != 0:
126
- return self._next_in_dat_struct(unit)
127
-
128
- # Case 1b - distance to next = 0
129
- return self._name_label_match(unit)
130
-
131
- # Case 2: next unit is in ds_label
132
- if hasattr(unit, "ds_label"):
133
- return self._name_label_match(unit, name_override=unit.ds_label)
134
-
135
- if unit._unit == "JUNCTION":
136
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
137
-
138
- if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
139
- return None
140
-
141
- return self._name_label_match(unit)
142
-
143
- except Exception as e:
144
- self._handle_exception(e, when="calculating next unit")
145
-
146
- def prev(self, unit: Unit) -> Union[Unit, List[Unit], None]: # noqa: C901
147
- """Finds previous unit in the reach.
148
-
149
- Previous unit in reach can be infered by:
150
- The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
151
- The units with the exact same name - such as a junction unit
152
- The previous unit as linked through upstream and downstream labels - such as with Bridge units
153
-
154
- Args:
155
- unit (Unit): flood modeller unit input.
156
-
157
- Returns:
158
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
159
- """
160
-
161
- try:
162
- # Case 1: Unit is input boundary condition
163
- if unit._unit in (
164
- "QTBDY",
165
- "HTBDY",
166
- "REFHBDY",
167
- "FEHBDY",
168
- "FRQSIM",
169
- "FSRBDY",
170
- "FSSR16BDY",
171
- "GERRBDY",
172
- "REBDY",
173
- "REFH2BDY",
174
- "SCSBDY",
175
- ):
176
- return None
177
-
178
- if unit._unit == "JUNCTION":
179
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
180
-
181
- prev_units = []
182
- _prev_in_dat = self._prev_in_dat_struct(unit)
183
- _name_match = self._name_label_match(unit)
184
- _ds_label_match = self._ds_label_match(unit)
185
- _junction_match = [
186
- junction
187
- for junction in self._all_units
188
- if junction._unit == "JUNCTION" and unit.name in junction.labels
189
- ]
190
-
191
- # Case 2: Previous unit has positive distance to next
192
- if (
193
- _prev_in_dat
194
- and hasattr(_prev_in_dat, "dist_to_next")
195
- and _prev_in_dat.dist_to_next != 0
196
- ):
197
- prev_units.append(_prev_in_dat)
198
- _name_match = None # Name match does apply if upstream section exists
199
-
200
- # All other matches added (matching name, matching name to ds_label and junciton)
201
- for match in [_name_match, _ds_label_match, _junction_match]:
202
- if isinstance(match, list):
203
- prev_units.extend(match)
204
- elif match:
205
- prev_units.append(match)
206
-
207
- if len(prev_units) == 0:
208
- return None
209
- if len(prev_units) == 1:
210
- return prev_units[0]
211
- return prev_units
212
-
213
- except Exception as e:
214
- self._handle_exception(e, when="calculating next unit")
215
-
216
- def _next_in_dat_struct(self, current_unit) -> Optional[Unit]:
217
- """Finds next unit in the dat file using the index position.
218
-
219
- Returns:
220
- Unit with all associated data
221
- """
222
-
223
- for idx, unit in enumerate(self._all_units):
224
- # Names checked first to speed up comparison
225
- if unit.name == current_unit.name and unit == current_unit:
226
- try:
227
- return self._all_units[idx + 1]
228
- except IndexError:
229
- return None
230
-
231
- return None
232
-
233
- def _prev_in_dat_struct(self, current_unit) -> Optional[Unit]:
234
- """Finds previous unit in the dat file using the index position.
235
-
236
- Returns:
237
- Unit with all associated data
238
- """
239
- for idx, unit in enumerate(self._all_units):
240
- # Names checked first to speed up comparison
241
- if unit.name == current_unit.name and unit == current_unit:
242
- if idx == 0:
243
- return None
244
- return self._all_units[idx - 1]
245
-
246
- return None
247
-
248
- def _ds_label_match(self, current_unit) -> Union[Unit, List[Unit], None]:
249
- """Pulls out all units with ds label that matches the input unit.
250
-
251
- Returns:
252
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
253
- """
254
-
255
- _ds_list = []
256
- for item in self._all_units:
257
- try:
258
- if item.ds_label == current_unit.name:
259
- _ds_list.append(item)
260
- except AttributeError:
261
- continue
262
-
263
- if len(_ds_list) == 0:
264
- return None
265
- if len(_ds_list) == 1:
266
- return _ds_list[0]
267
- return _ds_list
268
-
269
- def _name_label_match(self, current_unit, name_override=None) -> Union[Unit, List[Unit], None]:
270
- """Pulls out all units with same name as the input unit.
271
-
272
- Returns:
273
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
274
- """
275
-
276
- _name = name_override or str(current_unit.name)
277
- _name_list = []
278
- for item in self._all_units:
279
- if item.name == _name and item != current_unit:
280
- _name_list.append(item)
281
- else:
282
- pass
283
-
284
- if len(_name_list) == 0:
285
- return None
286
- if len(_name_list) == 1:
287
- return _name_list[0]
288
- return _name_list
289
-
290
- def _read(self):
291
- # Read DAT data
292
- with open(self._filepath, "r") as dat_file:
293
- self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
294
-
295
- # Generate DAT structure
296
- self._update_dat_struct()
297
-
298
- # Get network .gxy if present
299
- gxy_path = self._filepath.with_suffix(".gxy")
300
- if gxy_path.exists():
301
- self._gxy_filepath = gxy_path
302
- with open(self._gxy_filepath, "r") as gxy_file:
303
- self._gxy_data = gxy_file.read()
304
- else:
305
- self._gxy_filepath = None
306
- self._gxy_data = None
307
-
308
- def _write(self) -> str:
309
- """Returns string representation of the current DAT data
310
-
311
- Returns:
312
- str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
313
- """
314
- try:
315
- self._update_raw_data()
316
- self._update_general_parameters()
317
- self._update_dat_struct()
318
- self._update_unit_names()
319
-
320
- dat_string = ""
321
- for line in self._raw_data:
322
- dat_string += line + "\n"
323
-
324
- return dat_string
325
-
326
- except Exception as e:
327
- self._handle_exception(e, when="write")
328
-
329
- def _create_from_blank(self, with_gxy=False):
330
- # No filepath specified, create new 'blank' DAT in memory
331
- # ** Update these to have minimal data needed (general header, empty IC header)
332
- self._dat_struct = [
333
- {"start": 0, "Type": "GENERAL", "end": 6},
334
- {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
335
- ]
336
- self._raw_data = [
337
- "",
338
- "#REVISION#1",
339
- " 0 0.750 0.900 0.100 0.001 12SI",
340
- " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
341
- "RAD FILE",
342
- "",
343
- "END GENERAL",
344
- "INITIAL CONDITIONS",
345
- " label ? flow stage froude no velocity umode ustate z",
346
- ]
347
-
348
- self._gxy_filepath = None
349
- if with_gxy:
350
- self._gxy_data = ""
351
- else:
352
- self._gxy_data = None
353
-
354
- def _get_general_parameters(self):
355
- # ** Get general parameters here
356
- self.title = self._raw_data[0]
357
- self.general_parameters = {}
358
- line = f"{self._raw_data[2]:<70}"
359
- params = units.helpers.split_10_char(line)
360
- if params[6] == "":
361
- # Adds the measurements unit as DEFAULT if not specified
362
- params[6] = "DEFAULT"
363
- line = f"{self._raw_data[3]:<70}"
364
- params.extend(units.helpers.split_10_char(line))
365
-
366
- self.general_parameters["Node Count"] = _to_int(params[0], 0)
367
- self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
368
- self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
369
- self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
370
- self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
371
- self._label_len = _to_int(params[5], 12) # label length
372
- self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
373
- self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
374
- self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
375
- self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
376
- self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
377
- self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
378
- self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
379
- self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
380
- self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
381
-
382
- def _update_general_parameters(self):
383
- self._raw_data[0] = self.title
384
- self._raw_data[5] = self.general_parameters["RAD File"]
385
- general_params_1 = units.helpers.join_10_char(
386
- self.general_parameters["Node Count"],
387
- self.general_parameters["Lower Froude"],
388
- self.general_parameters["Upper Froude"],
389
- self.general_parameters["Min Depth"],
390
- self.general_parameters["Convergence Direct"],
391
- self._label_len,
392
- )
393
- general_params_1 += self.general_parameters["Units"]
394
- self._raw_data[2] = general_params_1
395
-
396
- general_params_2 = units.helpers.join_10_char(
397
- self.general_parameters["Water Temperature"],
398
- self.general_parameters["Convergence Flow"],
399
- self.general_parameters["Convergence Head"],
400
- self.general_parameters["Mathematical Damping"],
401
- self.general_parameters["Pivotal Choice"],
402
- self.general_parameters["Under-relaxation"],
403
- self.general_parameters["Matrix Dummy"],
404
- )
405
- self._raw_data[3] = general_params_2
406
-
407
- def _update_unit_names(self):
408
- for unit_group, unit_group_name in [
409
- (self.boundaries, "boundaries"),
410
- (self.sections, "sections"),
411
- (self.structures, "structures"),
412
- (self.conduits, "conduits"),
413
- (self.losses, "losses"),
414
- ]:
415
- for name, unit in unit_group.copy().items():
416
- if name != unit.name:
417
- # Check if new name already exists as a label
418
- if unit.name in unit_group:
419
- raise Exception(
420
- f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group'
421
- )
422
- unit_group[unit.name] = unit
423
- del unit_group[name]
424
- # Update label in ICs
425
- if unit_group_name not in ["boundaries", "losses"]:
426
- # TODO: Need to do a more thorough check for whether a unit is one in the ICs
427
- # e.g. Culvert inlet and river section may have same label, but only river
428
- # section label should update in ICs
429
- self.initial_conditions.update_label(name, unit.name)
430
-
431
- # Update label in GISINFO and GXY data
432
- self._update_gisinfo_label(
433
- unit._unit,
434
- unit._subtype,
435
- name,
436
- unit.name,
437
- unit_group_name
438
- in ["boundaries", "losses"], # if True it ignores second lbl
439
- )
440
- self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
441
-
442
- # Update IC table names in raw_data if any name changes
443
- ic_start, ic_end = next(
444
- (unit["start"], unit["end"])
445
- for unit in self._dat_struct
446
- if unit["Type"] == "INITIAL CONDITIONS"
447
- )
448
- self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
449
-
450
- def _update_raw_data(self):
451
- block_shift = 0
452
- comment_tracker = 0
453
- comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
454
- prev_block_end = self._dat_struct[0]["end"]
455
- existing_units = {
456
- "boundaries": [],
457
- "structures": [],
458
- "sections": [],
459
- "conduits": [],
460
- "losses": [],
461
- }
462
-
463
- for block in self._dat_struct:
464
- # Check for all supported boundary types
465
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
466
- # clause for when unit has been inserted into the dat file
467
- if "new_insert" in block:
468
- block["start"] = prev_block_end + 1
469
- block["end"] = block["start"] + len(block["new_insert"]) - 1
470
- self._raw_data[block["start"] : block["start"]] = block["new_insert"]
471
- block_shift += len(block["new_insert"])
472
- prev_block_end = block["end"]
473
- del block["new_insert"]
474
-
475
- else:
476
- unit_data = self._raw_data[
477
- block["start"] + block_shift : block["end"] + 1 + block_shift
478
- ]
479
- prev_block_len = len(unit_data)
480
-
481
- if block["Type"] == "INITIAL CONDITIONS":
482
- new_unit_data = self.initial_conditions._write()
483
- elif block["Type"] == "COMMENT":
484
- comment = comment_units[comment_tracker]
485
- new_unit_data = comment._write()
486
- comment_tracker += 1
487
-
488
- elif block["Type"] == "VARIABLES":
489
- new_unit_data = self.variables._write()
490
-
491
- else:
492
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
493
- unit_name = unit_data[2][: self._label_len].strip()
494
- else:
495
- unit_name = unit_data[1][: self._label_len].strip()
496
-
497
- # Get unit object
498
- unit_group = getattr(
499
- self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
500
- )
501
- if unit_name in unit_group:
502
- # block still exists
503
- new_unit_data = unit_group[unit_name]._write()
504
- existing_units[
505
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
506
- ].append(unit_name)
507
- else:
508
- # Bdy block has been deleted
509
- new_unit_data = []
510
-
511
- new_block_len = len(new_unit_data)
512
- self._raw_data[
513
- block["start"] + block_shift : block["end"] + 1 + block_shift
514
- ] = new_unit_data
515
- # adjust block shift for change in number of lines in bdy block
516
- block_shift += new_block_len - prev_block_len
517
- prev_block_end = (
518
- block["end"] + block_shift
519
- ) # add in to keep a record of the last block read in
520
-
521
- def _get_unit_definitions(self): # noqa: C901
522
- # Get unit definitions
523
- self.sections = {}
524
- self.boundaries = {}
525
- self.structures = {}
526
- self.conduits = {}
527
- self.losses = {}
528
- self._unsupported = {}
529
- self._all_units = []
530
- for block in self._dat_struct:
531
- unit_data = self._raw_data[block["start"] : block["end"] + 1]
532
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
533
- # Deal with initial conditions block
534
- if block["Type"] == "INITIAL CONDITIONS":
535
- self.initial_conditions = units.IIC(unit_data, n=self._label_len)
536
- continue
537
-
538
- if block["Type"] == "COMMENT":
539
- self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
540
- continue
541
-
542
- if block["Type"] == "VARIABLES":
543
- self.variables = units.Variables(unit_data)
544
- continue
545
-
546
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
547
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
548
- unit_name = unit_data[2][: self._label_len].strip()
549
- else:
550
- unit_name = unit_data[1][: self._label_len].strip()
551
-
552
- # Create instance of unit and add to relevant group
553
- unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
554
- if unit_name in unit_group:
555
- raise Exception(
556
- f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}'
557
- )
558
- # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
559
- unit_type = block["Type"].replace(" ", "_").replace("-", "_")
560
- unit_group[unit_name] = eval(
561
- f"units.{unit_type}({unit_data}, {self._label_len})" # append to our _all._units as well???
562
- )
563
- self._all_units.append(unit_group[unit_name])
564
-
565
- elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
566
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
567
- if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
568
- unit_name = unit_data[2][: self._label_len].strip()
569
- subtype = True
570
- else:
571
- unit_name = unit_data[1][: self._label_len].strip()
572
- subtype = False
573
-
574
- self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
575
- unit_data,
576
- self._label_len,
577
- unit_name=unit_name,
578
- unit_type=block["Type"],
579
- subtype=subtype,
580
- )
581
- self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
582
-
583
- elif block["Type"] not in ("GENERAL", "GISINFO"):
584
- raise Exception(f"Unexpected unit type encountered: {block['Type']}")
585
-
586
- def _update_dat_struct(self): # noqa: C901
587
- """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
588
- are a dictionary containing the 'start', 'end' and 'type' of the block.
589
-
590
- """
591
- # Generate DAT structure
592
- dat_struct = []
593
- in_block = False
594
- in_general = True
595
- in_comment = False
596
- comment_n = None # Used as counter for number of lines in a comment block
597
- gisinfo_block = False
598
- general_block = {"start": 0, "Type": "GENERAL"}
599
- unit_block = {}
600
- for idx, line in enumerate(self._raw_data):
601
- # Deal with 'general' header
602
- if in_general is True:
603
- if line == "END GENERAL":
604
- general_block["end"] = idx
605
- dat_struct.append(general_block)
606
- in_general = False
607
- continue
608
-
609
- # Deal with comment blocks explicitly as they could contain unit keywords
610
- if in_comment and comment_n is None:
611
- comment_n = int(line.strip())
612
- continue
613
- if in_comment:
614
- comment_n -= 1
615
- if comment_n == 0:
616
- unit_block["end"] = idx # add ending index
617
- # append existing bdy block to the dat_struct
618
- dat_struct.append(unit_block)
619
- unit_block = {} # reset bdy block
620
- in_comment = False
621
- in_block = False
622
- comment_n = None
623
- continue # move onto next line as still in comment block
624
-
625
- if line == "COMMENT":
626
- in_comment = True
627
- unit_block, in_block = self._close_struct_block(
628
- dat_struct, "COMMENT", unit_block, in_block, idx
629
- )
630
- continue
631
-
632
- if line == "GISINFO":
633
- gisinfo_block = True
634
- unit_block, in_block = self._close_struct_block(
635
- dat_struct, "GISINFO", unit_block, in_block, idx
636
- )
637
-
638
- if not gisinfo_block:
639
- if line.split(" ")[0] in units.ALL_UNIT_TYPES:
640
- # The " " is needed here in case of empty string
641
- unit_type = line.split()[0]
642
- elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
643
- unit_type = " ".join(line.split()[:2])
644
- else:
645
- continue
646
-
647
- unit_block, in_block = self._close_struct_block(
648
- dat_struct, unit_type, unit_block, in_block, idx
649
- )
650
-
651
- if len(unit_block) != 0:
652
- # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
653
- # add ending index for final block
654
- unit_block["end"] = len(self._raw_data) - 1
655
- dat_struct.append(unit_block) # add final block
656
-
657
- self._dat_struct = dat_struct
658
-
659
- def _close_struct_block(self, dat_struct, unit_type, unit_block, in_block, idx):
660
- """Helper method to close block in dat struct"""
661
- if in_block is True:
662
- unit_block["end"] = idx - 1 # add ending index
663
- # append existing bdy block to the dat_struct
664
- dat_struct.append(unit_block)
665
- unit_block = {} # reset bdy block
666
- in_block = True
667
- unit_block["Type"] = unit_type # start new bdy block
668
- unit_block["start"] = idx # add starting index
669
-
670
- return unit_block, in_block
671
-
672
- def remove_unit(self, unit):
673
- """Remove a unit from the dat file.
674
-
675
- Args:
676
- unit (Unit): flood modeller unit input.
677
-
678
- Raises:
679
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
680
- """
681
-
682
- try:
683
- # catch if not valid unit
684
- if not isinstance(unit, Unit):
685
- raise TypeError("unit isn't a unit")
686
-
687
- # remove from all units
688
- index = self._all_units.index(unit)
689
- del self._all_units[index]
690
- # remove from dat_struct
691
- dat_struct_unit = self._dat_struct[index + 1]
692
- del self._dat_struct[index + 1]
693
- # remove from raw data
694
- del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
695
- # remove from unit group
696
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
697
- unit_group = getattr(self, unit_group_name)
698
- del unit_group[unit.name]
699
- # remove from ICs
700
- self.initial_conditions.data = self.initial_conditions.data.loc[
701
- self.initial_conditions.data["label"] != unit.name
702
- ]
703
-
704
- self._update_dat_struct()
705
- self.general_parameters["Node Count"] -= 1
706
-
707
- except Exception as e:
708
- self._handle_exception(e, when="remove unit")
709
-
710
- def insert_unit(self, unit, add_before=None, add_after=None, add_at=None): # noqa: C901
711
- """Inserts a unit into the dat file.
712
-
713
- Args:
714
- unit (Unit): FloodModeller unit input.
715
- add_before (Unit): FloodModeller unit to add before.
716
- add_after (Unit): FloodModeller unit to add after.
717
- add_at (interger): Positional argument (starting at 0) of where to add in
718
- the dat file. To add at the end of the network you can use -1.
719
-
720
- Raises:
721
- SyntaxError: Raised if no positional argument is given.
722
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
723
- NameError: Raised if unit name already appears in unit group.
724
- """
725
- try:
726
- # catch errors
727
- if all(arg is None for arg in (add_before, add_after, add_at)):
728
- raise SyntaxError(
729
- "No possitional argument given. Please provide either add_before, add_at or add_after"
730
- )
731
- if not isinstance(unit, Unit):
732
- raise TypeError("unit isn't a unit")
733
- if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
734
- raise TypeError(
735
- "add_before or add_after argument must be a Flood Modeller Unit type"
736
- )
737
-
738
- unit_class = unit._unit
739
- if unit_class != "COMMENT":
740
- _validate_unit(unit)
741
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"] # get rid
742
- unit_group = getattr(self, unit_group_name)
743
- # unit_class = unit._unit
744
- if unit.name in unit_group:
745
- raise NameError(
746
- "Name already appears in unit group. Cannot have two units with same name in same group"
747
- )
748
-
749
- # positional argument
750
- if add_at is not None:
751
- insert_index = add_at
752
- if insert_index < 0:
753
- insert_index += len(self._all_units) + 1
754
- if insert_index < 0:
755
- raise Exception(f"invalid add_at index: {add_at}")
756
- else:
757
- check_unit = add_before or add_after
758
- for index, thing in enumerate(self._all_units):
759
- if thing == check_unit:
760
- insert_index = index
761
- insert_index += 1 if add_after else 0
762
- break
763
- else:
764
- raise Exception(
765
- f"{check_unit} not found in dat network, so cannot be used to add before/after"
766
- )
767
-
768
- unit_data = unit._write()
769
- self._all_units.insert(insert_index, unit)
770
- if unit._unit != "COMMENT":
771
- unit_group[unit.name] = unit
772
- self._dat_struct.insert(
773
- insert_index + 1, {"Type": unit_class, "new_insert": unit_data}
774
- ) # add to dat struct without unit.name
775
-
776
- if unit._unit != "COMMENT":
777
- # update the iic's tables
778
- iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
779
- self.initial_conditions.data.loc[
780
- len(self.initial_conditions.data)
781
- ] = iic_data # flaged
782
-
783
- # update all
784
- if unit._unit != "COMMENT":
785
- self.general_parameters["Node Count"] += 1 # flag no update for comments
786
- self._update_raw_data()
787
- self._update_dat_struct()
788
-
789
- except Exception as e:
790
- self._handle_exception(e, when="insert unit")
791
-
792
- def _update_gisinfo_label(self, unit_type, unit_subtype, prev_lbl, new_lbl, ignore_second):
793
- """Update labels in GISINFO block if unit is renamed"""
794
-
795
- start, end = next(
796
- (block["start"], block["end"])
797
- for block in self._dat_struct
798
- if block["Type"] == "GISINFO"
799
- )
800
- gisinfo_block = self._raw_data[start : end + 1]
801
-
802
- prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
803
-
804
- new_gisinfo_block = []
805
- for line in gisinfo_block:
806
- # Replace first label
807
- if line.startswith(f"{prefix} {prev_lbl} "):
808
- # found matching line (space at the end is important to ignore node
809
- # lables with similar starting chars)
810
- line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
811
-
812
- # Replace second label
813
- if not ignore_second:
814
- if line.startswith(f"{prev_lbl} "): # space at the end important again
815
- line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
816
-
817
- new_gisinfo_block.append(line)
818
-
819
- self._raw_data[start : end + 1] = new_gisinfo_block
820
-
821
- def _update_gxy_label(self, unit_type, unit_subtype, prev_lbl, new_lbl):
822
- """Update labels in GXY file if unit is renamed"""
823
-
824
- if self._gxy_data is not None:
825
- if unit_subtype is None:
826
- unit_subtype = ""
827
-
828
- old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
829
- new = f"{unit_type}_{unit_subtype}_{new_lbl}"
830
-
831
- self._gxy_data = self._gxy_data.replace(old, new)
1
+ """
2
+ Flood Modeller Python API
3
+ Copyright (C) 2024 Jacobs U.K. Limited
4
+
5
+ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
+ as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
+
8
+ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
+ of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
+
11
+ You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
+
13
+ If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
+ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from . import units
23
+ from ._base import FMFile
24
+ from .units._base import Unit
25
+ from .units.helpers import _to_float, _to_int
26
+ from .validation.validation import _validate_unit
27
+
28
+
29
+ class DAT(FMFile):
30
+ """Reads and write Flood Modeller datafile format '.dat'
31
+
32
+ Args:
33
+ dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
34
+
35
+ Output:
36
+ Initiates 'DAT' class object
37
+
38
+ Raises:
39
+ TypeError: Raised if dat_filepath does not point to a .dat file
40
+ FileNotFoundError: Raised if dat_filepath points to a file which does not exist
41
+ """
42
+
43
+ _filetype: str = "DAT"
44
+ _suffix: str = ".dat"
45
+
46
+ def __init__(
47
+ self,
48
+ dat_filepath: str | Path | None = None,
49
+ with_gxy: bool = False,
50
+ from_json: bool = False,
51
+ ) -> None:
52
+ try:
53
+ if from_json:
54
+ return
55
+ if dat_filepath is not None:
56
+ FMFile.__init__(self, dat_filepath)
57
+ self._read()
58
+
59
+ else:
60
+ self._create_from_blank(with_gxy)
61
+
62
+ self._get_general_parameters()
63
+ self._get_unit_definitions()
64
+ except Exception as e:
65
+ self._handle_exception(e, when="read")
66
+
67
+ def update(self) -> None:
68
+ """Updates the existing DAT based on any altered attributes"""
69
+ self._update()
70
+ self._write_gxy(self._gxy_filepath)
71
+
72
+ def save(self, filepath: str | Path) -> None:
73
+ """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
74
+ Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
75
+ update in the latest saved location rather than the original source DAT used to construct the class
76
+
77
+ Args:
78
+ filepath (str): Filepath to new save location including the name and '.dat' extension
79
+
80
+ Raises:
81
+ TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
82
+ """
83
+ filepath = Path(filepath).absolute()
84
+ self._save(filepath)
85
+ self._write_gxy(filepath)
86
+
87
+ def _write_gxy(self, filepath):
88
+ if self._gxy_data is not None:
89
+ gxy_string = self._gxy_data
90
+ new_gxy_path = filepath.with_suffix(".gxy")
91
+ with open(new_gxy_path, "w") as gxy_file:
92
+ gxy_file.write(gxy_string)
93
+ self._gxy_filepath = new_gxy_path
94
+
95
+ def diff(self, other: DAT, force_print: bool = False) -> None:
96
+ """Compares the DAT class against another DAT class to check whether they are
97
+ equivalent, or if not, what the differences are. Two instances of a DAT class are
98
+ deemed equivalent if all of their attributes are equal except for the filepath and
99
+ raw data. For example, two DAT files from different filepaths that had the same
100
+ data except maybe some differences in decimal places and some default parameters
101
+ ommitted, would be classed as equivalent as they would produce the same DAT instance
102
+ and write the exact same data.
103
+
104
+ The result is printed to the console. If you need to access the returned data, use
105
+ the method ``DAT._get_diff()``
106
+
107
+ Args:
108
+ other (floodmodeller_api.DAT): Other instance of a DAT class
109
+ force_print (bool): Forces the API to print every difference found, rather than
110
+ just the first 25 differences. Defaults to False.
111
+ """
112
+ self._diff(other, force_print=force_print)
113
+
114
+ # def _get_unit_from_connectivity(self, method) #use this as method prev and next
115
+
116
+ def next(self, unit: Unit) -> Unit | list[Unit] | None:
117
+ """Finds next unit in the reach.
118
+
119
+ Next unit in reach can be infered by:
120
+ The next unit in the .dat file structure - such as when a river section has a positive distance to next
121
+ The units with the exact same name - such as a junction unit
122
+ The next unit as described in the ds_label - such as with Bridge units
123
+
124
+ Args:
125
+ unit (Unit): flood modeller unit input.
126
+
127
+ Returns:
128
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
129
+ """
130
+ # Needs to handle same name match outside dist to next (e.g. inflow)
131
+ try:
132
+ if hasattr(unit, "dist_to_next"):
133
+ # Case 1a - positive distance to next
134
+ if unit.dist_to_next != 0:
135
+ return self._next_in_dat_struct(unit)
136
+
137
+ # Case 1b - distance to next = 0
138
+ return self._name_label_match(unit)
139
+
140
+ # Case 2: next unit is in ds_label
141
+ if hasattr(unit, "ds_label"):
142
+ return self._name_label_match(unit, name_override=unit.ds_label)
143
+
144
+ if unit._unit == "JUNCTION":
145
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
146
+
147
+ if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
148
+ return None
149
+
150
+ return self._name_label_match(unit)
151
+
152
+ except Exception as e:
153
+ self._handle_exception(e, when="calculating next unit")
154
+
155
+ def prev(self, unit: Unit) -> Unit | list[Unit] | None:
156
+ """Finds previous unit in the reach.
157
+
158
+ Previous unit in reach can be infered by:
159
+ The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
160
+ The units with the exact same name - such as a junction unit
161
+ The previous unit as linked through upstream and downstream labels - such as with Bridge units
162
+
163
+ Args:
164
+ unit (Unit): flood modeller unit input.
165
+
166
+ Returns:
167
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
168
+ """
169
+
170
+ try:
171
+ # Case 1: Unit is input boundary condition
172
+ if unit._unit in (
173
+ "QTBDY",
174
+ "HTBDY",
175
+ "REFHBDY",
176
+ "FEHBDY",
177
+ "FRQSIM",
178
+ "FSRBDY",
179
+ "FSSR16BDY",
180
+ "GERRBDY",
181
+ "REBDY",
182
+ "REFH2BDY",
183
+ "SCSBDY",
184
+ ):
185
+ return None
186
+
187
+ if unit._unit == "JUNCTION":
188
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
189
+
190
+ prev_units = []
191
+ _prev_in_dat = self._prev_in_dat_struct(unit)
192
+ _name_match = self._name_label_match(unit)
193
+ _ds_label_match = self._ds_label_match(unit)
194
+ _junction_match = [
195
+ junction
196
+ for junction in self._all_units
197
+ if junction._unit == "JUNCTION" and unit.name in junction.labels
198
+ ]
199
+
200
+ # Case 2: Previous unit has positive distance to next
201
+ if (
202
+ _prev_in_dat
203
+ and hasattr(_prev_in_dat, "dist_to_next")
204
+ and _prev_in_dat.dist_to_next != 0
205
+ ):
206
+ prev_units.append(_prev_in_dat)
207
+ _name_match = None # Name match does apply if upstream section exists
208
+
209
+ # All other matches added (matching name, matching name to ds_label and junciton)
210
+ for match in [_name_match, _ds_label_match, _junction_match]:
211
+ if isinstance(match, list):
212
+ prev_units.extend(match)
213
+ elif match:
214
+ prev_units.append(match)
215
+
216
+ if len(prev_units) == 0:
217
+ return None
218
+ if len(prev_units) == 1:
219
+ return prev_units[0]
220
+ return prev_units
221
+
222
+ except Exception as e:
223
+ self._handle_exception(e, when="calculating next unit")
224
+
225
+ def _next_in_dat_struct(self, current_unit: Unit) -> Unit | None:
226
+ """Finds next unit in the dat file using the index position.
227
+
228
+ Returns:
229
+ Unit with all associated data
230
+ """
231
+
232
+ for idx, unit in enumerate(self._all_units):
233
+ # Names checked first to speed up comparison
234
+ if unit.name == current_unit.name and unit == current_unit:
235
+ try:
236
+ return self._all_units[idx + 1]
237
+ except IndexError:
238
+ return None
239
+
240
+ return None
241
+
242
+ def _prev_in_dat_struct(self, current_unit: Unit) -> Unit | None:
243
+ """Finds previous unit in the dat file using the index position.
244
+
245
+ Returns:
246
+ Unit with all associated data
247
+ """
248
+ for idx, unit in enumerate(self._all_units):
249
+ # Names checked first to speed up comparison
250
+ if unit.name == current_unit.name and unit == current_unit:
251
+ if idx == 0:
252
+ return None
253
+ return self._all_units[idx - 1]
254
+
255
+ return None
256
+
257
+ def _ds_label_match(self, current_unit: Unit) -> Unit | list[Unit] | None:
258
+ """Pulls out all units with ds label that matches the input unit.
259
+
260
+ Returns:
261
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
262
+ """
263
+
264
+ _ds_list = []
265
+ for item in self._all_units:
266
+ try:
267
+ if item.ds_label == current_unit.name:
268
+ _ds_list.append(item)
269
+ except AttributeError:
270
+ continue
271
+
272
+ if len(_ds_list) == 0:
273
+ return None
274
+ if len(_ds_list) == 1:
275
+ return _ds_list[0]
276
+ return _ds_list
277
+
278
+ def _name_label_match(
279
+ self,
280
+ current_unit: Unit,
281
+ name_override: str | None = None,
282
+ ) -> Unit | list[Unit] | None:
283
+ """Pulls out all units with same name as the input unit.
284
+
285
+ Returns:
286
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
287
+ """
288
+
289
+ _name = name_override or str(current_unit.name)
290
+ _name_list = []
291
+ for item in self._all_units:
292
+ if item.name == _name and item != current_unit:
293
+ _name_list.append(item)
294
+ else:
295
+ pass
296
+
297
+ if len(_name_list) == 0:
298
+ return None
299
+ if len(_name_list) == 1:
300
+ return _name_list[0]
301
+ return _name_list
302
+
303
+ def _read(self):
304
+ # Read DAT data
305
+ with open(self._filepath) as dat_file:
306
+ self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
307
+
308
+ # Generate DAT structure
309
+ self._update_dat_struct()
310
+
311
+ # Get network .gxy if present
312
+ gxy_path = self._filepath.with_suffix(".gxy")
313
+ if gxy_path.exists():
314
+ self._gxy_filepath = gxy_path
315
+ with open(self._gxy_filepath) as gxy_file:
316
+ self._gxy_data = gxy_file.read()
317
+ else:
318
+ self._gxy_filepath = None
319
+ self._gxy_data = None
320
+
321
+ def _write(self) -> str:
322
+ """Returns string representation of the current DAT data
323
+
324
+ Returns:
325
+ str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
326
+ """
327
+ try:
328
+ self._update_raw_data()
329
+ self._update_general_parameters()
330
+ self._update_dat_struct()
331
+ self._update_unit_names()
332
+
333
+ return "\n".join(self._raw_data) + "\n"
334
+
335
+ except Exception as e:
336
+ self._handle_exception(e, when="write")
337
+
338
+ def _create_from_blank(self, with_gxy: bool = False) -> None:
339
+ # No filepath specified, create new 'blank' DAT in memory
340
+ # ** Update these to have minimal data needed (general header, empty IC header)
341
+ self._dat_struct = [
342
+ {"start": 0, "Type": "GENERAL", "end": 6},
343
+ {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
344
+ ]
345
+ self._raw_data = [
346
+ "",
347
+ "#REVISION#1",
348
+ " 0 0.750 0.900 0.100 0.001 12SI",
349
+ " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
350
+ "RAD FILE",
351
+ "",
352
+ "END GENERAL",
353
+ "INITIAL CONDITIONS",
354
+ " label ? flow stage froude no velocity umode ustate z",
355
+ ]
356
+
357
+ self._gxy_filepath = None
358
+ if with_gxy:
359
+ self._gxy_data = ""
360
+ else:
361
+ self._gxy_data = None
362
+
363
+ def _get_general_parameters(self) -> None:
364
+ # ** Get general parameters here
365
+ self.title = self._raw_data[0]
366
+ self.general_parameters = {}
367
+ line = f"{self._raw_data[2]:<70}"
368
+ params = units.helpers.split_10_char(line)
369
+ if params[6] == "":
370
+ # Adds the measurements unit as DEFAULT if not specified
371
+ params[6] = "DEFAULT"
372
+ line = f"{self._raw_data[3]:<70}"
373
+ params.extend(units.helpers.split_10_char(line))
374
+
375
+ self.general_parameters["Node Count"] = _to_int(params[0], 0)
376
+ self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
377
+ self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
378
+ self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
379
+ self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
380
+ self._label_len = _to_int(params[5], 12) # label length
381
+ self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
382
+ self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
383
+ self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
384
+ self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
385
+ self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
386
+ self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
387
+ self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
388
+ self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
389
+ self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
390
+
391
+ def _update_general_parameters(self) -> None:
392
+ self._raw_data[0] = self.title
393
+ self._raw_data[5] = self.general_parameters["RAD File"]
394
+ general_params_1 = units.helpers.join_10_char(
395
+ self.general_parameters["Node Count"],
396
+ self.general_parameters["Lower Froude"],
397
+ self.general_parameters["Upper Froude"],
398
+ self.general_parameters["Min Depth"],
399
+ self.general_parameters["Convergence Direct"],
400
+ self._label_len,
401
+ )
402
+ general_params_1 += self.general_parameters["Units"]
403
+ self._raw_data[2] = general_params_1
404
+
405
+ general_params_2 = units.helpers.join_10_char(
406
+ self.general_parameters["Water Temperature"],
407
+ self.general_parameters["Convergence Flow"],
408
+ self.general_parameters["Convergence Head"],
409
+ self.general_parameters["Mathematical Damping"],
410
+ self.general_parameters["Pivotal Choice"],
411
+ self.general_parameters["Under-relaxation"],
412
+ self.general_parameters["Matrix Dummy"],
413
+ )
414
+ self._raw_data[3] = general_params_2
415
+
416
+ def _update_unit_names(self):
417
+ for unit_group, unit_group_name in [
418
+ (self.boundaries, "boundaries"),
419
+ (self.sections, "sections"),
420
+ (self.structures, "structures"),
421
+ (self.conduits, "conduits"),
422
+ (self.losses, "losses"),
423
+ ]:
424
+ for name, unit in unit_group.copy().items():
425
+ if name != unit.name:
426
+ # Check if new name already exists as a label
427
+ if unit.name in unit_group:
428
+ raise Exception(
429
+ f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group',
430
+ )
431
+ unit_group[unit.name] = unit
432
+ del unit_group[name]
433
+ # Update label in ICs
434
+ if unit_group_name not in ["boundaries", "losses"]:
435
+ # TODO: Need to do a more thorough check for whether a unit is one in the ICs
436
+ # e.g. Culvert inlet and river section may have same label, but only river
437
+ # section label should update in ICs
438
+ self.initial_conditions.update_label(name, unit.name)
439
+
440
+ # Update label in GISINFO and GXY data
441
+ self._update_gisinfo_label(
442
+ unit._unit,
443
+ unit._subtype,
444
+ name,
445
+ unit.name,
446
+ unit_group_name
447
+ in ["boundaries", "losses"], # if True it ignores second lbl
448
+ )
449
+ self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
450
+
451
+ # Update IC table names in raw_data if any name changes
452
+ ic_start, ic_end = next(
453
+ (unit["start"], unit["end"])
454
+ for unit in self._dat_struct
455
+ if unit["Type"] == "INITIAL CONDITIONS"
456
+ )
457
+ self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
458
+
459
+ def _update_raw_data(self):
460
+ block_shift = 0
461
+ comment_tracker = 0
462
+ comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
463
+ prev_block_end = self._dat_struct[0]["end"]
464
+ existing_units = {
465
+ "boundaries": [],
466
+ "structures": [],
467
+ "sections": [],
468
+ "conduits": [],
469
+ "losses": [],
470
+ }
471
+
472
+ for block in self._dat_struct:
473
+ # Check for all supported boundary types
474
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
475
+ # clause for when unit has been inserted into the dat file
476
+ if "new_insert" in block:
477
+ block["start"] = prev_block_end + 1
478
+ block["end"] = block["start"] + len(block["new_insert"]) - 1
479
+ self._raw_data[block["start"] : block["start"]] = block["new_insert"]
480
+ block_shift += len(block["new_insert"])
481
+ prev_block_end = block["end"]
482
+ del block["new_insert"]
483
+
484
+ else:
485
+ unit_data = self._raw_data[
486
+ block["start"] + block_shift : block["end"] + 1 + block_shift
487
+ ]
488
+ prev_block_len = len(unit_data)
489
+
490
+ if block["Type"] == "INITIAL CONDITIONS":
491
+ new_unit_data = self.initial_conditions._write()
492
+ elif block["Type"] == "COMMENT":
493
+ comment = comment_units[comment_tracker]
494
+ new_unit_data = comment._write()
495
+ comment_tracker += 1
496
+
497
+ elif block["Type"] == "VARIABLES":
498
+ new_unit_data = self.variables._write()
499
+
500
+ else:
501
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
502
+ unit_name = unit_data[2][: self._label_len].strip()
503
+ else:
504
+ unit_name = unit_data[1][: self._label_len].strip()
505
+
506
+ # Get unit object
507
+ unit_group = getattr(
508
+ self,
509
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
510
+ )
511
+ if unit_name in unit_group:
512
+ # block still exists
513
+ new_unit_data = unit_group[unit_name]._write()
514
+ existing_units[
515
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
516
+ ].append(unit_name)
517
+ else:
518
+ # Bdy block has been deleted
519
+ new_unit_data = []
520
+
521
+ new_block_len = len(new_unit_data)
522
+ self._raw_data[
523
+ block["start"] + block_shift : block["end"] + 1 + block_shift
524
+ ] = new_unit_data
525
+ # adjust block shift for change in number of lines in bdy block
526
+ block_shift += new_block_len - prev_block_len
527
+ prev_block_end = (
528
+ block["end"] + block_shift
529
+ ) # add in to keep a record of the last block read in
530
+
531
+ def _get_unit_definitions(self): # noqa: C901
532
+ # Get unit definitions
533
+ self.sections = {}
534
+ self.boundaries = {}
535
+ self.structures = {}
536
+ self.conduits = {}
537
+ self.losses = {}
538
+ self._unsupported = {}
539
+ self._all_units = []
540
+ for block in self._dat_struct:
541
+ unit_data = self._raw_data[block["start"] : block["end"] + 1]
542
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
543
+ # Deal with initial conditions block
544
+ if block["Type"] == "INITIAL CONDITIONS":
545
+ self.initial_conditions = units.IIC(unit_data, n=self._label_len)
546
+ continue
547
+
548
+ if block["Type"] == "COMMENT":
549
+ self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
550
+ continue
551
+
552
+ if block["Type"] == "VARIABLES":
553
+ self.variables = units.Variables(unit_data)
554
+ continue
555
+
556
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
557
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
558
+ unit_name = unit_data[2][: self._label_len].strip()
559
+ else:
560
+ unit_name = unit_data[1][: self._label_len].strip()
561
+
562
+ # Create instance of unit and add to relevant group
563
+ unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
564
+ if unit_name in unit_group:
565
+ raise Exception(
566
+ f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}',
567
+ )
568
+ # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
569
+ unit_type = block["Type"].replace(" ", "_").replace("-", "_")
570
+ unit_group[unit_name] = eval(
571
+ f"units.{unit_type}({unit_data}, {self._label_len})", # append to our _all._units as well???
572
+ )
573
+ self._all_units.append(unit_group[unit_name])
574
+
575
+ elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
576
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
577
+ if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
578
+ unit_name = unit_data[2][: self._label_len].strip()
579
+ subtype = True
580
+ else:
581
+ unit_name = unit_data[1][: self._label_len].strip()
582
+ subtype = False
583
+
584
+ self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
585
+ unit_data,
586
+ self._label_len,
587
+ unit_name=unit_name,
588
+ unit_type=block["Type"],
589
+ subtype=subtype,
590
+ )
591
+ self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
592
+
593
+ elif block["Type"] not in ("GENERAL", "GISINFO"):
594
+ raise Exception(f"Unexpected unit type encountered: {block['Type']}")
595
+
596
+ def _update_dat_struct(self) -> None: # noqa: C901, PLR0912
597
+ """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
598
+ are a dictionary containing the 'start', 'end' and 'type' of the block.
599
+
600
+ """
601
+ # Generate DAT structure
602
+ dat_struct = []
603
+ in_block = False
604
+ in_general = True
605
+ in_comment = False
606
+ comment_n = None # Used as counter for number of lines in a comment block
607
+ gisinfo_block = False
608
+ general_block = {"start": 0, "Type": "GENERAL"}
609
+ unit_block: dict[str, Any] = {}
610
+ for idx, line in enumerate(self._raw_data):
611
+ # Deal with 'general' header
612
+ if in_general is True:
613
+ if line == "END GENERAL":
614
+ general_block["end"] = idx
615
+ dat_struct.append(general_block)
616
+ in_general = False
617
+ continue
618
+
619
+ # Deal with comment blocks explicitly as they could contain unit keywords
620
+ if in_comment and comment_n is None:
621
+ comment_n = int(line.strip())
622
+ continue
623
+ if in_comment:
624
+ comment_n -= 1
625
+ if comment_n <= 0:
626
+ unit_block["end"] = idx + comment_n # add ending index
627
+ # append existing bdy block to the dat_struct
628
+ dat_struct.append(unit_block)
629
+ unit_block = {} # reset bdy block
630
+ in_comment = False
631
+ in_block = False
632
+ comment_n = None
633
+ continue # move onto next line as still in comment block
634
+
635
+ if line == "COMMENT":
636
+ in_comment = True
637
+ unit_block, in_block = self._close_struct_block(
638
+ dat_struct,
639
+ "COMMENT",
640
+ unit_block,
641
+ in_block,
642
+ idx,
643
+ )
644
+ continue
645
+
646
+ if line == "GISINFO":
647
+ gisinfo_block = True
648
+ unit_block, in_block = self._close_struct_block(
649
+ dat_struct,
650
+ "GISINFO",
651
+ unit_block,
652
+ in_block,
653
+ idx,
654
+ )
655
+
656
+ if not gisinfo_block:
657
+ if line.split(" ")[0] in units.ALL_UNIT_TYPES:
658
+ # The " " is needed here in case of empty string
659
+ unit_type = line.split()[0]
660
+ elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
661
+ unit_type = " ".join(line.split()[:2])
662
+ else:
663
+ continue
664
+
665
+ unit_block, in_block = self._close_struct_block(
666
+ dat_struct,
667
+ unit_type,
668
+ unit_block,
669
+ in_block,
670
+ idx,
671
+ )
672
+
673
+ if len(unit_block) != 0:
674
+ # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
675
+ # add ending index for final block
676
+ unit_block["end"] = len(self._raw_data) - 1
677
+ dat_struct.append(unit_block) # add final block
678
+
679
+ self._dat_struct = dat_struct
680
+
681
+ def _close_struct_block( # noqa: PLR0913
682
+ self,
683
+ dat_struct: list[dict],
684
+ unit_type: str,
685
+ unit_block: dict,
686
+ in_block: bool,
687
+ idx: int,
688
+ ) -> tuple[dict, bool]:
689
+ """Helper method to close block in dat struct"""
690
+ if in_block is True:
691
+ unit_block["end"] = idx - 1 # add ending index
692
+ # append existing bdy block to the dat_struct
693
+ dat_struct.append(unit_block)
694
+ unit_block = {} # reset bdy block
695
+ in_block = True
696
+ unit_block["Type"] = unit_type # start new bdy block
697
+ unit_block["start"] = idx # add starting index
698
+
699
+ return unit_block, in_block
700
+
701
+ def remove_unit(self, unit: Unit) -> None:
702
+ """Remove a unit from the dat file.
703
+
704
+ Args:
705
+ unit (Unit): flood modeller unit input.
706
+
707
+ Raises:
708
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
709
+ """
710
+
711
+ try:
712
+ # catch if not valid unit
713
+ if not isinstance(unit, Unit):
714
+ raise TypeError("unit isn't a unit")
715
+
716
+ # remove from all units
717
+ index = self._all_units.index(unit)
718
+ del self._all_units[index]
719
+ # remove from dat_struct
720
+ dat_struct_unit = self._dat_struct[index + 1]
721
+ del self._dat_struct[index + 1]
722
+ # remove from raw data
723
+ del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
724
+ # remove from unit group
725
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
726
+ unit_group = getattr(self, unit_group_name)
727
+ del unit_group[unit.name]
728
+ # remove from ICs
729
+ self.initial_conditions.data = self.initial_conditions.data.loc[
730
+ self.initial_conditions.data["label"] != unit.name
731
+ ]
732
+
733
+ self._update_dat_struct()
734
+ self.general_parameters["Node Count"] -= 1
735
+
736
+ except Exception as e:
737
+ self._handle_exception(e, when="remove unit")
738
+
739
+ def insert_unit( # noqa: C901, PLR0912, PLR0913
740
+ self,
741
+ unit: Unit,
742
+ add_before: Unit | None = None,
743
+ add_after: Unit | None = None,
744
+ add_at: int | None = None,
745
+ defer_update: bool = False,
746
+ ) -> None:
747
+ """Inserts a unit into the dat file.
748
+
749
+ Args:
750
+ unit (Unit): FloodModeller unit input.
751
+ add_before (Unit): FloodModeller unit to add before.
752
+ add_after (Unit): FloodModeller unit to add after.
753
+ add_at (integer): Positional argument (starting at 0) of where to add in
754
+ the dat file. To add at the end of the network you can use -1.
755
+
756
+ Raises:
757
+ SyntaxError: Raised if no positional argument is given.
758
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
759
+ NameError: Raised if unit name already appears in unit group.
760
+ """
761
+ try:
762
+ # catch errors
763
+ provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
764
+ if provided_params == 0:
765
+ raise SyntaxError(
766
+ "No positional argument given. Please provide either add_before, add_at or add_after",
767
+ )
768
+ if provided_params > 1:
769
+ raise SyntaxError("Only one of add_at, add_before, or add_after required")
770
+ if not isinstance(unit, Unit):
771
+ raise TypeError("unit isn't a unit")
772
+ if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
773
+ raise TypeError(
774
+ "add_before or add_after argument must be a Flood Modeller Unit type",
775
+ )
776
+
777
+ unit_class = unit._unit
778
+ if unit_class != "COMMENT":
779
+ _validate_unit(unit)
780
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
781
+ unit_group = getattr(self, unit_group_name)
782
+ if unit.name in unit_group:
783
+ raise NameError(
784
+ "Name already appears in unit group. Cannot have two units with same name in same group",
785
+ )
786
+
787
+ # positional argument
788
+ if add_at is not None:
789
+ insert_index = add_at
790
+ if insert_index < 0:
791
+ insert_index += len(self._all_units) + 1
792
+ if insert_index < 0:
793
+ raise Exception(f"invalid add_at index: {add_at}")
794
+ else:
795
+ check_unit = add_before or add_after
796
+ for index, thing in enumerate(self._all_units):
797
+ if thing == check_unit:
798
+ insert_index = index
799
+ insert_index += 1 if add_after else 0
800
+ break
801
+ else:
802
+ raise Exception(
803
+ f"{check_unit} not found in dat network, so cannot be used to add before/after",
804
+ )
805
+
806
+ unit_data = unit._write()
807
+ self._all_units.insert(insert_index, unit)
808
+ if unit._unit != "COMMENT":
809
+ unit_group[unit.name] = unit
810
+ self._dat_struct.insert(
811
+ insert_index + 1,
812
+ {"Type": unit_class, "new_insert": unit_data},
813
+ ) # add to dat struct without unit.name
814
+
815
+ if unit._unit != "COMMENT":
816
+ # update the iic's tables
817
+ iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
818
+ self.initial_conditions.data.loc[len(self.initial_conditions.data)] = (
819
+ iic_data # flaged
820
+ )
821
+
822
+ # update all
823
+ if unit._unit != "COMMENT":
824
+ self.general_parameters["Node Count"] += 1 # flag no update for comments
825
+
826
+ if not defer_update:
827
+ self._update_raw_data()
828
+ self._update_dat_struct()
829
+
830
+ except Exception as e:
831
+ self._handle_exception(e, when="insert unit")
832
+
833
+ def insert_units(
834
+ self,
835
+ units: list[Unit],
836
+ add_before: Unit | None = None,
837
+ add_after: Unit | None = None,
838
+ add_at: int | None = None,
839
+ ) -> None:
840
+ """Inserts a list of units into the dat file.
841
+
842
+ Args:
843
+ units (list[Unit]): List of FloodModeller units.
844
+ add_before (Unit): FloodModeller unit to add before.
845
+ add_after (Unit): FloodModeller unit to add after.
846
+ add_at (integer): Positional argument (starting at 0) of where to add in
847
+ the dat file. To add at the end of the network you can use -1.
848
+ """
849
+ ordered = (add_at is None and add_after is None) or (isinstance(add_at, int) and add_at < 0)
850
+ ordered_units = units if ordered else units[::-1]
851
+ for unit in ordered_units:
852
+ self.insert_unit(unit, add_before, add_after, add_at, defer_update=True)
853
+ self._update_raw_data()
854
+ self._update_dat_struct()
855
+
856
+ def _update_gisinfo_label( # noqa: PLR0913
857
+ self,
858
+ unit_type,
859
+ unit_subtype,
860
+ prev_lbl,
861
+ new_lbl,
862
+ ignore_second,
863
+ ):
864
+ """Update labels in GISINFO block if unit is renamed"""
865
+
866
+ start, end = next(
867
+ (block["start"], block["end"])
868
+ for block in self._dat_struct
869
+ if block["Type"] == "GISINFO"
870
+ )
871
+ gisinfo_block = self._raw_data[start : end + 1]
872
+
873
+ prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
874
+
875
+ new_gisinfo_block = []
876
+ for line in gisinfo_block:
877
+ # Replace first label
878
+ if line.startswith(f"{prefix} {prev_lbl} "):
879
+ # found matching line (space at the end is important to ignore node
880
+ # lables with similar starting chars)
881
+ line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
882
+
883
+ # Replace second label
884
+ if not ignore_second and line.startswith(
885
+ f"{prev_lbl} ",
886
+ ): # space at the end important again
887
+ line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
888
+
889
+ new_gisinfo_block.append(line)
890
+
891
+ self._raw_data[start : end + 1] = new_gisinfo_block
892
+
893
+ def _update_gxy_label(
894
+ self,
895
+ unit_type: str,
896
+ unit_subtype: str,
897
+ prev_lbl: str,
898
+ new_lbl: str,
899
+ ) -> None:
900
+ """Update labels in GXY file if unit is renamed"""
901
+
902
+ if self._gxy_data is not None:
903
+ if unit_subtype is None:
904
+ unit_subtype = ""
905
+
906
+ old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
907
+ new = f"{unit_type}_{unit_subtype}_{new_lbl}"
908
+
909
+ self._gxy_data = self._gxy_data.replace(old, new)