floodmodeller-api 0.4.2__py3-none-any.whl → 0.4.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. floodmodeller_api/__init__.py +8 -9
  2. floodmodeller_api/_base.py +184 -176
  3. floodmodeller_api/backup.py +273 -273
  4. floodmodeller_api/dat.py +909 -838
  5. floodmodeller_api/diff.py +136 -119
  6. floodmodeller_api/ied.py +307 -311
  7. floodmodeller_api/ief.py +647 -646
  8. floodmodeller_api/ief_flags.py +253 -253
  9. floodmodeller_api/inp.py +266 -268
  10. floodmodeller_api/libs/libifcoremd.dll +0 -0
  11. floodmodeller_api/libs/libifcoremt.so.5 +0 -0
  12. floodmodeller_api/libs/libifport.so.5 +0 -0
  13. floodmodeller_api/{libmmd.dll → libs/libimf.so} +0 -0
  14. floodmodeller_api/libs/libintlc.so.5 +0 -0
  15. floodmodeller_api/libs/libmmd.dll +0 -0
  16. floodmodeller_api/libs/libsvml.so +0 -0
  17. floodmodeller_api/libs/libzzn_read.so +0 -0
  18. floodmodeller_api/libs/zzn_read.dll +0 -0
  19. floodmodeller_api/logs/__init__.py +2 -2
  20. floodmodeller_api/logs/lf.py +320 -314
  21. floodmodeller_api/logs/lf_helpers.py +354 -346
  22. floodmodeller_api/logs/lf_params.py +643 -529
  23. floodmodeller_api/mapping.py +84 -0
  24. floodmodeller_api/test/__init__.py +4 -4
  25. floodmodeller_api/test/conftest.py +9 -8
  26. floodmodeller_api/test/test_backup.py +117 -117
  27. floodmodeller_api/test/test_dat.py +221 -92
  28. floodmodeller_api/test/test_data/All Units 4_6.DAT +1081 -1081
  29. floodmodeller_api/test/test_data/All Units 4_6.feb +1081 -1081
  30. floodmodeller_api/test/test_data/BRIDGE.DAT +926 -926
  31. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.dat +36 -36
  32. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.feb +36 -36
  33. floodmodeller_api/test/test_data/DamBreakADI.xml +52 -52
  34. floodmodeller_api/test/test_data/DamBreakFAST.xml +58 -58
  35. floodmodeller_api/test/test_data/DamBreakFAST_dy.xml +53 -53
  36. floodmodeller_api/test/test_data/DamBreakTVD.xml +55 -55
  37. floodmodeller_api/test/test_data/DefenceBreach.xml +53 -53
  38. floodmodeller_api/test/test_data/DefenceBreachFAST.xml +60 -60
  39. floodmodeller_api/test/test_data/DefenceBreachFAST_dy.xml +55 -55
  40. floodmodeller_api/test/test_data/Domain1+2_QH.xml +76 -76
  41. floodmodeller_api/test/test_data/Domain1_H.xml +41 -41
  42. floodmodeller_api/test/test_data/Domain1_Q.xml +41 -41
  43. floodmodeller_api/test/test_data/Domain1_Q_FAST.xml +48 -48
  44. floodmodeller_api/test/test_data/Domain1_Q_FAST_dy.xml +48 -48
  45. floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +263 -0
  46. floodmodeller_api/test/test_data/Domain1_W.xml +41 -41
  47. floodmodeller_api/test/test_data/EX1.DAT +321 -321
  48. floodmodeller_api/test/test_data/EX1.ext +107 -107
  49. floodmodeller_api/test/test_data/EX1.feb +320 -320
  50. floodmodeller_api/test/test_data/EX1.gxy +107 -107
  51. floodmodeller_api/test/test_data/EX17.DAT +421 -422
  52. floodmodeller_api/test/test_data/EX17.ext +213 -213
  53. floodmodeller_api/test/test_data/EX17.feb +422 -422
  54. floodmodeller_api/test/test_data/EX18.DAT +375 -375
  55. floodmodeller_api/test/test_data/EX18_DAT_expected.json +3876 -0
  56. floodmodeller_api/test/test_data/EX2.DAT +302 -302
  57. floodmodeller_api/test/test_data/EX3.DAT +926 -926
  58. floodmodeller_api/test/test_data/EX3_DAT_expected.json +16235 -0
  59. floodmodeller_api/test/test_data/EX3_IEF_expected.json +61 -0
  60. floodmodeller_api/test/test_data/EX6.DAT +2084 -2084
  61. floodmodeller_api/test/test_data/EX6.ext +532 -532
  62. floodmodeller_api/test/test_data/EX6.feb +2084 -2084
  63. floodmodeller_api/test/test_data/EX6_DAT_expected.json +31647 -0
  64. floodmodeller_api/test/test_data/Event Data Example.DAT +336 -336
  65. floodmodeller_api/test/test_data/Event Data Example.ext +107 -107
  66. floodmodeller_api/test/test_data/Event Data Example.feb +336 -336
  67. floodmodeller_api/test/test_data/Linked1D2D.xml +52 -52
  68. floodmodeller_api/test/test_data/Linked1D2DFAST.xml +53 -53
  69. floodmodeller_api/test/test_data/Linked1D2DFAST_dy.xml +48 -48
  70. floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +313 -0
  71. floodmodeller_api/test/test_data/blockage.dat +50 -50
  72. floodmodeller_api/test/test_data/blockage.ext +45 -45
  73. floodmodeller_api/test/test_data/blockage.feb +9 -9
  74. floodmodeller_api/test/test_data/blockage.gxy +71 -71
  75. floodmodeller_api/test/test_data/defaultUnits.dat +127 -127
  76. floodmodeller_api/test/test_data/defaultUnits.ext +45 -45
  77. floodmodeller_api/test/test_data/defaultUnits.feb +9 -9
  78. floodmodeller_api/test/test_data/defaultUnits.fmpx +58 -58
  79. floodmodeller_api/test/test_data/defaultUnits.gxy +85 -85
  80. floodmodeller_api/test/test_data/ex3.ief +20 -20
  81. floodmodeller_api/test/test_data/ex3.lf1 +2800 -2800
  82. floodmodeller_api/test/test_data/ex4.DAT +1374 -1374
  83. floodmodeller_api/test/test_data/ex4_changed.DAT +1374 -1374
  84. floodmodeller_api/test/test_data/example1.inp +329 -329
  85. floodmodeller_api/test/test_data/example2.inp +158 -158
  86. floodmodeller_api/test/test_data/example3.inp +297 -297
  87. floodmodeller_api/test/test_data/example4.inp +388 -388
  88. floodmodeller_api/test/test_data/example5.inp +147 -147
  89. floodmodeller_api/test/test_data/example6.inp +154 -154
  90. floodmodeller_api/test/test_data/jump.dat +176 -176
  91. floodmodeller_api/test/test_data/network.dat +1374 -1374
  92. floodmodeller_api/test/test_data/network.ext +45 -45
  93. floodmodeller_api/test/test_data/network.exy +1 -1
  94. floodmodeller_api/test/test_data/network.feb +45 -45
  95. floodmodeller_api/test/test_data/network.ied +45 -45
  96. floodmodeller_api/test/test_data/network.ief +20 -20
  97. floodmodeller_api/test/test_data/network.inp +147 -147
  98. floodmodeller_api/test/test_data/network.pxy +57 -57
  99. floodmodeller_api/test/test_data/network.zzd +122 -122
  100. floodmodeller_api/test/test_data/network_dat_expected.json +21837 -0
  101. floodmodeller_api/test/test_data/network_from_tabularCSV.csv +87 -87
  102. floodmodeller_api/test/test_data/network_ied_expected.json +287 -0
  103. floodmodeller_api/test/test_data/rnweir.dat +9 -9
  104. floodmodeller_api/test/test_data/rnweir.ext +45 -45
  105. floodmodeller_api/test/test_data/rnweir.feb +9 -9
  106. floodmodeller_api/test/test_data/rnweir.gxy +45 -45
  107. floodmodeller_api/test/test_data/rnweir_default.dat +74 -74
  108. floodmodeller_api/test/test_data/rnweir_default.ext +45 -45
  109. floodmodeller_api/test/test_data/rnweir_default.feb +9 -9
  110. floodmodeller_api/test/test_data/rnweir_default.fmpx +58 -58
  111. floodmodeller_api/test/test_data/rnweir_default.gxy +53 -53
  112. floodmodeller_api/test/test_data/unit checks.dat +16 -16
  113. floodmodeller_api/test/test_ied.py +29 -29
  114. floodmodeller_api/test/test_ief.py +125 -24
  115. floodmodeller_api/test/test_inp.py +47 -48
  116. floodmodeller_api/test/test_json.py +114 -0
  117. floodmodeller_api/test/test_logs_lf.py +48 -51
  118. floodmodeller_api/test/test_tool.py +165 -154
  119. floodmodeller_api/test/test_toolbox_structure_log.py +234 -239
  120. floodmodeller_api/test/test_xml2d.py +151 -156
  121. floodmodeller_api/test/test_zzn.py +36 -34
  122. floodmodeller_api/to_from_json.py +218 -0
  123. floodmodeller_api/tool.py +332 -330
  124. floodmodeller_api/toolbox/__init__.py +5 -5
  125. floodmodeller_api/toolbox/example_tool.py +45 -45
  126. floodmodeller_api/toolbox/model_build/__init__.py +2 -2
  127. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +100 -94
  128. floodmodeller_api/toolbox/model_build/structure_log/__init__.py +1 -1
  129. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +287 -289
  130. floodmodeller_api/toolbox/model_build/structure_log_definition.py +76 -72
  131. floodmodeller_api/units/__init__.py +10 -10
  132. floodmodeller_api/units/_base.py +214 -209
  133. floodmodeller_api/units/boundaries.py +467 -469
  134. floodmodeller_api/units/comment.py +52 -55
  135. floodmodeller_api/units/conduits.py +382 -403
  136. floodmodeller_api/units/helpers.py +123 -132
  137. floodmodeller_api/units/iic.py +107 -101
  138. floodmodeller_api/units/losses.py +305 -308
  139. floodmodeller_api/units/sections.py +444 -445
  140. floodmodeller_api/units/structures.py +1690 -1684
  141. floodmodeller_api/units/units.py +93 -102
  142. floodmodeller_api/units/unsupported.py +44 -44
  143. floodmodeller_api/units/variables.py +87 -89
  144. floodmodeller_api/urban1d/__init__.py +11 -11
  145. floodmodeller_api/urban1d/_base.py +188 -177
  146. floodmodeller_api/urban1d/conduits.py +93 -85
  147. floodmodeller_api/urban1d/general_parameters.py +58 -58
  148. floodmodeller_api/urban1d/junctions.py +81 -79
  149. floodmodeller_api/urban1d/losses.py +81 -74
  150. floodmodeller_api/urban1d/outfalls.py +114 -107
  151. floodmodeller_api/urban1d/raingauges.py +111 -108
  152. floodmodeller_api/urban1d/subsections.py +92 -93
  153. floodmodeller_api/urban1d/xsections.py +147 -141
  154. floodmodeller_api/util.py +77 -21
  155. floodmodeller_api/validation/parameters.py +660 -660
  156. floodmodeller_api/validation/urban_parameters.py +388 -404
  157. floodmodeller_api/validation/validation.py +110 -112
  158. floodmodeller_api/version.py +1 -1
  159. floodmodeller_api/xml2d.py +688 -684
  160. floodmodeller_api/xml2d_template.py +37 -37
  161. floodmodeller_api/zzn.py +387 -365
  162. {floodmodeller_api-0.4.2.dist-info → floodmodeller_api-0.4.3.dist-info}/LICENSE.txt +13 -13
  163. {floodmodeller_api-0.4.2.dist-info → floodmodeller_api-0.4.3.dist-info}/METADATA +82 -82
  164. floodmodeller_api-0.4.3.dist-info/RECORD +179 -0
  165. {floodmodeller_api-0.4.2.dist-info → floodmodeller_api-0.4.3.dist-info}/WHEEL +1 -1
  166. floodmodeller_api-0.4.3.dist-info/entry_points.txt +3 -0
  167. floodmodeller_api/libifcoremd.dll +0 -0
  168. floodmodeller_api/test/test_data/EX3.bmp +0 -0
  169. floodmodeller_api/test/test_data/test_output.csv +0 -87
  170. floodmodeller_api/zzn_read.dll +0 -0
  171. floodmodeller_api-0.4.2.data/scripts/fmapi-add_siltation.bat +0 -2
  172. floodmodeller_api-0.4.2.data/scripts/fmapi-add_siltation.py +0 -3
  173. floodmodeller_api-0.4.2.data/scripts/fmapi-structure_log.bat +0 -2
  174. floodmodeller_api-0.4.2.data/scripts/fmapi-structure_log.py +0 -3
  175. floodmodeller_api-0.4.2.data/scripts/fmapi-toolbox.bat +0 -2
  176. floodmodeller_api-0.4.2.data/scripts/fmapi-toolbox.py +0 -41
  177. floodmodeller_api-0.4.2.dist-info/RECORD +0 -169
  178. {floodmodeller_api-0.4.2.dist-info → floodmodeller_api-0.4.3.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py CHANGED
@@ -1,838 +1,909 @@
1
- """
2
- Flood Modeller Python API
3
- Copyright (C) 2023 Jacobs U.K. Limited
4
-
5
- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
- as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
-
8
- This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
- of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
-
11
- You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
-
13
- If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
- address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
- """
16
-
17
- from pathlib import Path
18
- from typing import Optional, Union
19
-
20
- from . import units # Import for using as package
21
- from ._base import FMFile
22
- from .units._base import Unit
23
- from .units.helpers import _to_float, _to_int
24
- from .validation.validation import _validate_unit
25
-
26
-
27
- class DAT(FMFile):
28
- """Reads and write Flood Modeller datafile format '.dat'
29
-
30
- Args:
31
- dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
32
-
33
- Output:
34
- Initiates 'DAT' class object
35
-
36
- Raises:
37
- TypeError: Raised if dat_filepath does not point to a .dat file
38
- FileNotFoundError: Raised if dat_filepath points to a file which does not exist
39
- """
40
-
41
- _filetype: str = "DAT"
42
- _suffix: str = ".dat"
43
-
44
- def __init__(self, dat_filepath: Optional[Union[str, Path]] = None, with_gxy: bool = False):
45
- try:
46
- self._filepath = dat_filepath
47
- if self._filepath is not None:
48
- FMFile.__init__(self)
49
- self._read()
50
-
51
- else:
52
- self._create_from_blank(with_gxy)
53
-
54
- self._get_general_parameters()
55
- self._get_unit_definitions()
56
- except Exception as e:
57
- self._handle_exception(e, when="read")
58
-
59
- def update(self) -> None:
60
- """Updates the existing DAT based on any altered attributes"""
61
- self._update()
62
- self._write_gxy(self._gxy_filepath)
63
-
64
- def save(self, filepath: Union[str, Path]) -> None:
65
- """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
66
- Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
67
- update in the latest saved location rather than the original source DAT used to construct the class
68
-
69
- Args:
70
- filepath (str): Filepath to new save location including the name and '.dat' extension
71
-
72
- Raises:
73
- TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
74
- """
75
- filepath = Path(filepath).absolute()
76
- self._save(filepath)
77
- self._write_gxy(filepath)
78
-
79
- def _write_gxy(self, filepath):
80
- if self._gxy_data is not None:
81
- gxy_string = self._gxy_data
82
- new_gxy_path = filepath.with_suffix(".gxy")
83
- with open(new_gxy_path, "w") as gxy_file:
84
- gxy_file.write(gxy_string)
85
- self._gxy_filepath = new_gxy_path
86
-
87
- def diff(self, other: "DAT", force_print: bool = False) -> None:
88
- """Compares the DAT class against another DAT class to check whether they are
89
- equivalent, or if not, what the differences are. Two instances of a DAT class are
90
- deemed equivalent if all of their attributes are equal except for the filepath and
91
- raw data. For example, two DAT files from different filepaths that had the same
92
- data except maybe some differences in decimal places and some default parameters
93
- ommitted, would be classed as equivalent as they would produce the same DAT instance
94
- and write the exact same data.
95
-
96
- The result is printed to the console. If you need to access the returned data, use
97
- the method ``DAT._get_diff()``
98
-
99
- Args:
100
- other (floodmodeller_api.DAT): Other instance of a DAT class
101
- force_print (bool): Forces the API to print every difference found, rather than
102
- just the first 25 differences. Defaults to False.
103
- """
104
- self._diff(other, force_print=force_print)
105
-
106
- # def _get_unit_from_connectivity(self, method) #use this as method prev and next
107
-
108
- def next(self, unit: Unit) -> Union[Unit, list[Unit], None]:
109
- """Finds next unit in the reach.
110
-
111
- Next unit in reach can be infered by:
112
- The next unit in the .dat file structure - such as when a river section has a positive distance to next
113
- The units with the exact same name - such as a junction unit
114
- The next unit as described in the ds_label - such as with Bridge units
115
-
116
- Args:
117
- unit (Unit): flood modeller unit input.
118
-
119
- Returns:
120
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
121
- """
122
- # Needs to handle same name match outside dist to next (e.g. inflow)
123
- try:
124
- if hasattr(unit, "dist_to_next"):
125
- # Case 1a - positive distance to next
126
- if unit.dist_to_next != 0:
127
- return self._next_in_dat_struct(unit)
128
-
129
- # Case 1b - distance to next = 0
130
- else:
131
- return self._name_label_match(unit)
132
-
133
- # Case 2: next unit is in ds_label
134
- elif hasattr(unit, "ds_label"):
135
- return self._name_label_match(unit, name_override=unit.ds_label)
136
-
137
- elif unit._unit == "JUNCTION":
138
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels]
139
-
140
- elif unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
141
- return None
142
-
143
- else:
144
- return self._name_label_match(unit)
145
-
146
- except Exception as e:
147
- self._handle_exception(e, when="calculating next unit")
148
-
149
- def prev(self, unit: Unit) -> Union[Unit, list[Unit], None]:
150
- """Finds previous unit in the reach.
151
-
152
- Previous unit in reach can be infered by:
153
- The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
154
- The units with the exact same name - such as a junction unit
155
- The previous unit as linked through upstream and downstream labels - such as with Bridge units
156
-
157
- Args:
158
- unit (Unit): flood modeller unit input.
159
-
160
- Returns:
161
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
162
- """
163
-
164
- try:
165
- # Case 1: Unit is input boundary condition
166
- if unit._unit in (
167
- "QTBDY",
168
- "HTBDY",
169
- "REFHBDY",
170
- "FEHBDY",
171
- "FRQSIM",
172
- "FSRBDY",
173
- "FSSR16BDY",
174
- "GERRBDY",
175
- "REBDY",
176
- "REFH2BDY",
177
- "SCSBDY",
178
- ):
179
- return None
180
-
181
- elif unit._unit == "JUNCTION":
182
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels]
183
-
184
- prev_units = []
185
- _prev_in_dat = self._prev_in_dat_struct(unit)
186
- _name_match = self._name_label_match(unit)
187
- _ds_label_match = self._ds_label_match(unit)
188
- _junction_match = [
189
- junction
190
- for junction in self._all_units
191
- if junction._unit == "JUNCTION" and unit.name in junction.labels
192
- ]
193
-
194
- # Case 2: Previous unit has positive distance to next
195
- if hasattr(_prev_in_dat, "dist_to_next") and _prev_in_dat.dist_to_next != 0:
196
- prev_units.append(_prev_in_dat)
197
- _name_match = None # Name match does apply if upstream section exists
198
-
199
- # All other matches added (matching name, matching name to ds_label and junciton)
200
- for match in [_name_match, _ds_label_match, _junction_match]:
201
- if isinstance(match, list):
202
- prev_units.extend(match)
203
- else:
204
- prev_units.append(match)
205
-
206
- # Filter out 'None' matches
207
- prev_units = [_unit for _unit in prev_units if _unit is not None]
208
-
209
- if len(prev_units) == 0:
210
- return None
211
- elif len(prev_units) == 1:
212
- return prev_units[0]
213
- else:
214
- return prev_units
215
-
216
- except Exception as e:
217
- self._handle_exception(e, when="calculating next unit")
218
-
219
- def _next_in_dat_struct(self, current_unit) -> Unit:
220
- """Finds next unit in the dat file using the index position.
221
-
222
- Returns:
223
- Unit with all associated data
224
- """
225
-
226
- for idx, unit in enumerate(self._all_units):
227
- # Names checked first to speed up comparison
228
- if unit.name == current_unit.name and unit == current_unit:
229
- try:
230
- return self._all_units[idx + 1]
231
- except IndexError:
232
- return None
233
-
234
- def _prev_in_dat_struct(self, current_unit) -> Unit:
235
- """Finds previous unit in the dat file using the index position.
236
-
237
- Returns:
238
- Unit with all associated data
239
- """
240
- for idx, unit in enumerate(self._all_units):
241
- # Names checked first to speed up comparison
242
- if unit.name == current_unit.name and unit == current_unit:
243
- if idx == 0:
244
- return None
245
- else:
246
- return self._all_units[idx - 1]
247
-
248
- def _ds_label_match(self, current_unit) -> Union[Unit, list[Unit], None]:
249
- """Pulls out all units with ds label that matches the input unit.
250
-
251
- Returns:
252
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
253
- """
254
-
255
- _ds_list = []
256
- for item in self._all_units:
257
- try:
258
- if item.ds_label == current_unit.name:
259
- _ds_list.append(item)
260
- except AttributeError:
261
- continue
262
-
263
- if len(_ds_list) == 0:
264
- return None
265
- elif len(_ds_list) == 1:
266
- return _ds_list[0]
267
- else:
268
- return _ds_list
269
-
270
- def _name_label_match(self, current_unit, name_override=None) -> Union[Unit, list[Unit], None]:
271
- """Pulls out all units with same name as the input unit.
272
-
273
- Returns:
274
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
275
- """
276
-
277
- _name = name_override or str(current_unit.name)
278
- _name_list = []
279
- for item in self._all_units:
280
- if item.name == _name and item != current_unit:
281
- _name_list.append(item)
282
- else:
283
- pass
284
-
285
- if len(_name_list) == 0:
286
- return None
287
- elif len(_name_list) == 1:
288
- return _name_list[0]
289
- else:
290
- return _name_list
291
-
292
- def _read(self):
293
- # Read DAT data
294
- with open(self._filepath, "r") as dat_file:
295
- self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
296
-
297
- # Generate DAT structure
298
- self._update_dat_struct()
299
-
300
- # Get network .gxy if present
301
- gxy_path = self._filepath.with_suffix(".gxy")
302
- if gxy_path.exists():
303
- self._gxy_filepath = gxy_path
304
- with open(self._gxy_filepath, "r") as gxy_file:
305
- self._gxy_data = gxy_file.read()
306
- else:
307
- self._gxy_filepath = None
308
- self._gxy_data = None
309
-
310
- def _write(self) -> str:
311
- """Returns string representation of the current DAT data
312
-
313
- Returns:
314
- str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
315
- """
316
- try:
317
- self._update_raw_data()
318
- self._update_general_parameters()
319
- self._update_dat_struct()
320
- self._update_unit_names()
321
-
322
- dat_string = ""
323
- for line in self._raw_data:
324
- dat_string += line + "\n"
325
-
326
- return dat_string
327
-
328
- except Exception as e:
329
- self._handle_exception(e, when="write")
330
-
331
- def _create_from_blank(self, with_gxy=False):
332
- # No filepath specified, create new 'blank' DAT in memory
333
- # ** Update these to have minimal data needed (general header, empty IC header)
334
- self._dat_struct = [
335
- {"start": 0, "Type": "GENERAL", "end": 6},
336
- {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
337
- ]
338
- self._raw_data = [
339
- "",
340
- "#REVISION#1",
341
- " 0 0.750 0.900 0.100 0.001 12SI",
342
- " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
343
- "RAD FILE",
344
- "",
345
- "END GENERAL",
346
- "INITIAL CONDITIONS",
347
- " label ? flow stage froude no velocity umode ustate z",
348
- ]
349
-
350
- self._gxy_filepath = None
351
- if with_gxy:
352
- self._gxy_data = ""
353
- else:
354
- self._gxy_data = None
355
-
356
- def _get_general_parameters(self):
357
- # ** Get general parameters here
358
- self.title = self._raw_data[0]
359
- self.general_parameters = {}
360
- line = f"{self._raw_data[2]:<70}"
361
- params = units.helpers.split_10_char(line)
362
- if params[6] == "":
363
- # Adds the measurements unit as DEFAULT if not specified
364
- params[6] = "DEFAULT"
365
- line = f"{self._raw_data[3]:<70}"
366
- params.extend(units.helpers.split_10_char(line))
367
-
368
- self.general_parameters["Node Count"] = _to_int(params[0], 0)
369
- self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
370
- self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
371
- self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
372
- self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
373
- self._label_len = _to_int(params[5], 12) # label length
374
- self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
375
- self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
376
- self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
377
- self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
378
- self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
379
- self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
380
- self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
381
- self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
382
- self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
383
-
384
- def _update_general_parameters(self):
385
- self._raw_data[0] = self.title
386
- self._raw_data[5] = self.general_parameters["RAD File"]
387
- general_params_1 = units.helpers.join_10_char(
388
- self.general_parameters["Node Count"],
389
- self.general_parameters["Lower Froude"],
390
- self.general_parameters["Upper Froude"],
391
- self.general_parameters["Min Depth"],
392
- self.general_parameters["Convergence Direct"],
393
- self._label_len,
394
- )
395
- general_params_1 += self.general_parameters["Units"]
396
- self._raw_data[2] = general_params_1
397
-
398
- general_params_2 = units.helpers.join_10_char(
399
- self.general_parameters["Water Temperature"],
400
- self.general_parameters["Convergence Flow"],
401
- self.general_parameters["Convergence Head"],
402
- self.general_parameters["Mathematical Damping"],
403
- self.general_parameters["Pivotal Choice"],
404
- self.general_parameters["Under-relaxation"],
405
- self.general_parameters["Matrix Dummy"],
406
- )
407
- self._raw_data[3] = general_params_2
408
-
409
- def _update_unit_names(self):
410
- for unit_group, unit_group_name in [
411
- (self.boundaries, "boundaries"),
412
- (self.sections, "sections"),
413
- (self.structures, "structures"),
414
- (self.conduits, "conduits"),
415
- (self.losses, "losses"),
416
- ]:
417
- for name, unit in unit_group.copy().items():
418
- if name != unit.name:
419
- # Check if new name already exists as a label
420
- if unit.name in unit_group:
421
- raise Exception(
422
- f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group'
423
- )
424
- unit_group[unit.name] = unit
425
- del unit_group[name]
426
- # Update label in ICs
427
- if unit_group_name not in ["boundaries", "losses"]:
428
- # TODO: Need to do a more thorough check for whether a unit is one in the ICs
429
- # e.g. Culvert inlet and river section may have same label, but only river
430
- # section label should update in ICs
431
- self.initial_conditions.update_label(name, unit.name)
432
-
433
- # Update label in GISINFO and GXY data
434
- self._update_gisinfo_label(
435
- unit._unit,
436
- unit._subtype,
437
- name,
438
- unit.name,
439
- unit_group_name
440
- in ["boundaries", "losses"], # if True it ignores second lbl
441
- )
442
- self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
443
-
444
- # Update IC table names in raw_data if any name changes
445
- ic_start, ic_end = next(
446
- (unit["start"], unit["end"])
447
- for unit in self._dat_struct
448
- if unit["Type"] == "INITIAL CONDITIONS"
449
- )
450
- self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
451
-
452
- def _update_raw_data(self):
453
- block_shift = 0
454
- comment_tracker = 0
455
- comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
456
- prev_block_end = self._dat_struct[0]["end"]
457
- existing_units = {
458
- "boundaries": [],
459
- "structures": [],
460
- "sections": [],
461
- "conduits": [],
462
- "losses": [],
463
- }
464
-
465
- for block in self._dat_struct:
466
- # Check for all supported boundary types
467
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
468
- # clause for when unit has been inserted into the dat file
469
- if "new_insert" in block.keys():
470
- block["start"] = prev_block_end + 1
471
- block["end"] = block["start"] + len(block["new_insert"]) - 1
472
- self._raw_data[block["start"] : block["start"]] = block["new_insert"]
473
- block_shift += len(block["new_insert"])
474
- prev_block_end = block["end"]
475
- del block["new_insert"]
476
-
477
- else:
478
- unit_data = self._raw_data[
479
- block["start"] + block_shift : block["end"] + 1 + block_shift
480
- ]
481
- prev_block_len = len(unit_data)
482
-
483
- if block["Type"] == "INITIAL CONDITIONS":
484
- new_unit_data = self.initial_conditions._write()
485
- elif block["Type"] == "COMMENT":
486
- comment = comment_units[comment_tracker]
487
- new_unit_data = comment._write()
488
- comment_tracker += 1
489
-
490
- elif block["Type"] == "VARIABLES":
491
- new_unit_data = self.variables._write()
492
-
493
- else:
494
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
495
- unit_name = unit_data[2][: self._label_len].strip()
496
- else:
497
- unit_name = unit_data[1][: self._label_len].strip()
498
-
499
- # Get unit object
500
- unit_group = getattr(
501
- self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
502
- )
503
- if unit_name in unit_group:
504
- # block still exists
505
- new_unit_data = unit_group[unit_name]._write()
506
- existing_units[
507
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
508
- ].append(unit_name)
509
- else:
510
- # Bdy block has been deleted
511
- new_unit_data = []
512
-
513
- new_block_len = len(new_unit_data)
514
- self._raw_data[
515
- block["start"] + block_shift : block["end"] + 1 + block_shift
516
- ] = new_unit_data
517
- # adjust block shift for change in number of lines in bdy block
518
- block_shift += new_block_len - prev_block_len
519
- prev_block_end = (
520
- block["end"] + block_shift
521
- ) # add in to keep a record of the last block read in
522
-
523
- def _get_unit_definitions(self): # noqa: C901
524
- # Get unit definitions
525
- self.sections = {}
526
- self.boundaries = {}
527
- self.structures = {}
528
- self.conduits = {}
529
- self.losses = {}
530
- self._unsupported = {}
531
- self._all_units = []
532
- for block in self._dat_struct:
533
- unit_data = self._raw_data[block["start"] : block["end"] + 1]
534
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
535
- # Deal with initial conditions block
536
- if block["Type"] == "INITIAL CONDITIONS":
537
- self.initial_conditions = units.IIC(unit_data, n=self._label_len)
538
- continue
539
-
540
- if block["Type"] == "COMMENT":
541
- self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
542
- continue
543
-
544
- if block["Type"] == "VARIABLES":
545
- self.variables = units.Variables(unit_data)
546
- continue
547
-
548
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
549
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
550
- unit_name = unit_data[2][: self._label_len].strip()
551
- else:
552
- unit_name = unit_data[1][: self._label_len].strip()
553
-
554
- # Create instance of unit and add to relevant group
555
- unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
556
- if unit_name in unit_group:
557
- raise Exception(
558
- f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}'
559
- )
560
- else:
561
- # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
562
- unit_type = block["Type"].replace(" ", "_").replace("-", "_")
563
- unit_group[unit_name] = eval(
564
- f"units.{unit_type}({unit_data}, {self._label_len})" # append to our _all._units as well???
565
- )
566
- self._all_units.append(unit_group[unit_name])
567
-
568
- elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
569
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
570
- if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
571
- unit_name = unit_data[2][: self._label_len].strip()
572
- subtype = True
573
- else:
574
- unit_name = unit_data[1][: self._label_len].strip()
575
- subtype = False
576
-
577
- self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
578
- unit_data,
579
- self._label_len,
580
- unit_name=unit_name,
581
- unit_type=block["Type"],
582
- subtype=subtype,
583
- )
584
- self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
585
-
586
- elif block["Type"] not in ("GENERAL", "GISINFO"):
587
- raise Exception(f"Unexpected unit type encountered: {block['Type']}")
588
-
589
- def _update_dat_struct(self): # noqa: C901
590
- """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
591
- are a dictionary containing the 'start', 'end' and 'type' of the block.
592
-
593
- """
594
- # Generate DAT structure
595
- dat_struct = []
596
- in_block = False
597
- in_general = True
598
- in_comment = False
599
- comment_n = None # Used as counter for number of lines in a comment block
600
- gisinfo_block = False
601
- general_block = {"start": 0, "Type": "GENERAL"}
602
- unit_block = {}
603
- for idx, line in enumerate(self._raw_data):
604
- # Deal with 'general' header
605
- if in_general is True:
606
- if line == "END GENERAL":
607
- general_block["end"] = idx
608
- dat_struct.append(general_block)
609
- in_general = False
610
- continue
611
- else:
612
- continue
613
-
614
- # Deal with comment blocks explicitly as they could contain unit keywords
615
- if in_comment and comment_n is None:
616
- comment_n = int(line.strip())
617
- continue
618
- elif in_comment:
619
- comment_n -= 1
620
- if comment_n == 0:
621
- unit_block["end"] = idx # add ending index
622
- # append existing bdy block to the dat_struct
623
- dat_struct.append(unit_block)
624
- unit_block = {} # reset bdy block
625
- in_comment = False
626
- in_block = False
627
- comment_n = None
628
- continue
629
- else:
630
- continue # move onto next line as still in comment block
631
-
632
- if line == "COMMENT":
633
- in_comment = True
634
- unit_block, in_block = self._close_struct_block(
635
- dat_struct, "COMMENT", unit_block, in_block, idx
636
- )
637
- continue
638
-
639
- if line == "GISINFO":
640
- gisinfo_block = True
641
- unit_block, in_block = self._close_struct_block(
642
- dat_struct, "GISINFO", unit_block, in_block, idx
643
- )
644
-
645
- if not gisinfo_block:
646
- if line.split(" ")[0] in units.ALL_UNIT_TYPES:
647
- # The " " is needed here in case of empty string
648
- unit_type = line.split()[0]
649
- elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
650
- unit_type = " ".join(line.split()[:2])
651
- else:
652
- continue
653
-
654
- unit_block, in_block = self._close_struct_block(
655
- dat_struct, unit_type, unit_block, in_block, idx
656
- )
657
-
658
- if len(unit_block) != 0:
659
- # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
660
- # add ending index for final block
661
- unit_block["end"] = len(self._raw_data) - 1
662
- dat_struct.append(unit_block) # add final block
663
-
664
- self._dat_struct = dat_struct
665
-
666
- def _close_struct_block(self, dat_struct, unit_type, unit_block, in_block, idx):
667
- """Helper method to close block in dat struct"""
668
- if in_block is True:
669
- unit_block["end"] = idx - 1 # add ending index
670
- # append existing bdy block to the dat_struct
671
- dat_struct.append(unit_block)
672
- unit_block = {} # reset bdy block
673
- in_block = True
674
- unit_block["Type"] = unit_type # start new bdy block
675
- unit_block["start"] = idx # add starting index
676
-
677
- return unit_block, in_block
678
-
679
- def remove_unit(self, unit):
680
- """Remove a unit from the dat file.
681
-
682
- Args:
683
- unit (Unit): flood modeller unit input.
684
-
685
- Raises:
686
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
687
- """
688
-
689
- try:
690
- # catch if not valid unit
691
- if not isinstance(unit, Unit):
692
- raise TypeError("unit isn't a unit")
693
-
694
- # remove from all units
695
- index = self._all_units.index(unit)
696
- del self._all_units[index]
697
- # remove from dat_struct
698
- dat_struct_unit = self._dat_struct[index + 1]
699
- del self._dat_struct[index + 1]
700
- # remove from raw data
701
- del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
702
- # remove from unit group
703
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
704
- unit_group = getattr(self, unit_group_name)
705
- del unit_group[unit.name]
706
- # remove from ICs
707
- self.initial_conditions.data = self.initial_conditions.data.loc[
708
- self.initial_conditions.data["label"] != unit.name
709
- ]
710
-
711
- self._update_dat_struct()
712
- self.general_parameters["Node Count"] -= 1
713
-
714
- except Exception as e:
715
- self._handle_exception(e, when="remove unit")
716
-
717
- def insert_unit(self, unit, add_before=None, add_after=None, add_at=None): # noqa: C901
718
- """Inserts a unit into the dat file.
719
-
720
- Args:
721
- unit (Unit): FloodModeller unit input.
722
- add_before (Unit): FloodModeller unit to add before.
723
- add_after (Unit): FloodModeller unit to add after.
724
- add_at (interger): Positional argument (starting at 0) of where to add in
725
- the dat file. To add at the end of the network you can use -1.
726
-
727
- Raises:
728
- SyntaxError: Raised if no positional argument is given.
729
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
730
- NameError: Raised if unit name already appears in unit group.
731
- """
732
- try:
733
- # catch errors
734
- if all(arg is None for arg in (add_before, add_after, add_at)):
735
- raise SyntaxError(
736
- "No possitional argument given. Please provide either add_before, add_at or add_after"
737
- )
738
- if not isinstance(unit, Unit):
739
- raise TypeError("unit isn't a unit")
740
- if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
741
- raise TypeError(
742
- "add_before or add_after argument must be a Flood Modeller Unit type"
743
- )
744
-
745
- unit_class = unit._unit
746
- if unit_class != "COMMENT":
747
- _validate_unit(unit)
748
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"] # get rid
749
- unit_group = getattr(self, unit_group_name)
750
- # unit_class = unit._unit
751
- if unit.name in unit_group:
752
- raise NameError(
753
- "Name already appears in unit group. Cannot have two units with same name in same group"
754
- )
755
-
756
- # positional argument
757
- if add_at is not None:
758
- insert_index = add_at
759
- if insert_index < 0:
760
- insert_index += len(self._all_units) + 1
761
- if insert_index < 0:
762
- raise Exception(f"invalid add_at index: {add_at}")
763
- else:
764
- check_unit = add_before or add_after
765
- for index, thing in enumerate(self._all_units):
766
- if thing == check_unit:
767
- insert_index = index
768
- insert_index += 1 if add_after else 0
769
- break
770
- else:
771
- raise Exception(
772
- f"{check_unit} not found in dat network, so cannot be used to add before/after"
773
- )
774
-
775
- unit_data = unit._write()
776
- self._all_units.insert(insert_index, unit)
777
- if unit._unit != "COMMENT":
778
- unit_group[unit.name] = unit
779
- self._dat_struct.insert(
780
- insert_index + 1, {"Type": unit_class, "new_insert": unit_data}
781
- ) # add to dat struct without unit.name
782
-
783
- if unit._unit != "COMMENT":
784
- # update the iic's tables
785
- iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
786
- self.initial_conditions.data.loc[
787
- len(self.initial_conditions.data)
788
- ] = iic_data # flaged
789
-
790
- # update all
791
- if unit._unit != "COMMENT":
792
- self.general_parameters["Node Count"] += 1 # flag no update for comments
793
- self._update_raw_data()
794
- self._update_dat_struct()
795
-
796
- except Exception as e:
797
- self._handle_exception(e, when="insert unit")
798
-
799
- def _update_gisinfo_label(self, unit_type, unit_subtype, prev_lbl, new_lbl, ignore_second):
800
- """Update labels in GISINFO block if unit is renamed"""
801
-
802
- start, end = next(
803
- (block["start"], block["end"])
804
- for block in self._dat_struct
805
- if block["Type"] == "GISINFO"
806
- )
807
- gisinfo_block = self._raw_data[start : end + 1]
808
-
809
- prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
810
-
811
- new_gisinfo_block = []
812
- for line in gisinfo_block:
813
- # Replace first label
814
- if line.startswith(f"{prefix} {prev_lbl} "):
815
- # found matching line (space at the end is important to ignore node
816
- # lables with similar starting chars)
817
- line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
818
-
819
- # Replace second label
820
- if not ignore_second:
821
- if line.startswith(f"{prev_lbl} "): # space at the end important again
822
- line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
823
-
824
- new_gisinfo_block.append(line)
825
-
826
- self._raw_data[start : end + 1] = new_gisinfo_block
827
-
828
- def _update_gxy_label(self, unit_type, unit_subtype, prev_lbl, new_lbl):
829
- """Update labels in GXY file if unit is renamed"""
830
-
831
- if self._gxy_data is not None:
832
- if unit_subtype is None:
833
- unit_subtype = ""
834
-
835
- old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
836
- new = f"{unit_type}_{unit_subtype}_{new_lbl}"
837
-
838
- self._gxy_data = self._gxy_data.replace(old, new)
1
+ """
2
+ Flood Modeller Python API
3
+ Copyright (C) 2024 Jacobs U.K. Limited
4
+
5
+ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
+ as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
+
8
+ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
+ of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
+
11
+ You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
+
13
+ If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
+ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from . import units
23
+ from ._base import FMFile
24
+ from .units._base import Unit
25
+ from .units.helpers import _to_float, _to_int
26
+ from .validation.validation import _validate_unit
27
+
28
+
29
+ class DAT(FMFile):
30
+ """Reads and write Flood Modeller datafile format '.dat'
31
+
32
+ Args:
33
+ dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
34
+
35
+ Output:
36
+ Initiates 'DAT' class object
37
+
38
+ Raises:
39
+ TypeError: Raised if dat_filepath does not point to a .dat file
40
+ FileNotFoundError: Raised if dat_filepath points to a file which does not exist
41
+ """
42
+
43
+ _filetype: str = "DAT"
44
+ _suffix: str = ".dat"
45
+
46
+ def __init__(
47
+ self,
48
+ dat_filepath: str | Path | None = None,
49
+ with_gxy: bool = False,
50
+ from_json: bool = False,
51
+ ) -> None:
52
+ try:
53
+ if from_json:
54
+ return
55
+ if dat_filepath is not None:
56
+ FMFile.__init__(self, dat_filepath)
57
+ self._read()
58
+
59
+ else:
60
+ self._create_from_blank(with_gxy)
61
+
62
+ self._get_general_parameters()
63
+ self._get_unit_definitions()
64
+ except Exception as e:
65
+ self._handle_exception(e, when="read")
66
+
67
+ def update(self) -> None:
68
+ """Updates the existing DAT based on any altered attributes"""
69
+ self._update()
70
+ self._write_gxy(self._gxy_filepath)
71
+
72
+ def save(self, filepath: str | Path) -> None:
73
+ """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
74
+ Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
75
+ update in the latest saved location rather than the original source DAT used to construct the class
76
+
77
+ Args:
78
+ filepath (str): Filepath to new save location including the name and '.dat' extension
79
+
80
+ Raises:
81
+ TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
82
+ """
83
+ filepath = Path(filepath).absolute()
84
+ self._save(filepath)
85
+ self._write_gxy(filepath)
86
+
87
+ def _write_gxy(self, filepath):
88
+ if self._gxy_data is not None:
89
+ gxy_string = self._gxy_data
90
+ new_gxy_path = filepath.with_suffix(".gxy")
91
+ with open(new_gxy_path, "w") as gxy_file:
92
+ gxy_file.write(gxy_string)
93
+ self._gxy_filepath = new_gxy_path
94
+
95
+ def diff(self, other: DAT, force_print: bool = False) -> None:
96
+ """Compares the DAT class against another DAT class to check whether they are
97
+ equivalent, or if not, what the differences are. Two instances of a DAT class are
98
+ deemed equivalent if all of their attributes are equal except for the filepath and
99
+ raw data. For example, two DAT files from different filepaths that had the same
100
+ data except maybe some differences in decimal places and some default parameters
101
+ ommitted, would be classed as equivalent as they would produce the same DAT instance
102
+ and write the exact same data.
103
+
104
+ The result is printed to the console. If you need to access the returned data, use
105
+ the method ``DAT._get_diff()``
106
+
107
+ Args:
108
+ other (floodmodeller_api.DAT): Other instance of a DAT class
109
+ force_print (bool): Forces the API to print every difference found, rather than
110
+ just the first 25 differences. Defaults to False.
111
+ """
112
+ self._diff(other, force_print=force_print)
113
+
114
+ # def _get_unit_from_connectivity(self, method) #use this as method prev and next
115
+
116
+ def next(self, unit: Unit) -> Unit | list[Unit] | None:
117
+ """Finds next unit in the reach.
118
+
119
+ Next unit in reach can be infered by:
120
+ The next unit in the .dat file structure - such as when a river section has a positive distance to next
121
+ The units with the exact same name - such as a junction unit
122
+ The next unit as described in the ds_label - such as with Bridge units
123
+
124
+ Args:
125
+ unit (Unit): flood modeller unit input.
126
+
127
+ Returns:
128
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
129
+ """
130
+ # Needs to handle same name match outside dist to next (e.g. inflow)
131
+ try:
132
+ if hasattr(unit, "dist_to_next"):
133
+ # Case 1a - positive distance to next
134
+ if unit.dist_to_next != 0:
135
+ return self._next_in_dat_struct(unit)
136
+
137
+ # Case 1b - distance to next = 0
138
+ return self._name_label_match(unit)
139
+
140
+ # Case 2: next unit is in ds_label
141
+ if hasattr(unit, "ds_label"):
142
+ return self._name_label_match(unit, name_override=unit.ds_label)
143
+
144
+ if unit._unit == "JUNCTION":
145
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
146
+
147
+ if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
148
+ return None
149
+
150
+ return self._name_label_match(unit)
151
+
152
+ except Exception as e:
153
+ self._handle_exception(e, when="calculating next unit")
154
+
155
+ def prev(self, unit: Unit) -> Unit | list[Unit] | None:
156
+ """Finds previous unit in the reach.
157
+
158
+ Previous unit in reach can be infered by:
159
+ The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
160
+ The units with the exact same name - such as a junction unit
161
+ The previous unit as linked through upstream and downstream labels - such as with Bridge units
162
+
163
+ Args:
164
+ unit (Unit): flood modeller unit input.
165
+
166
+ Returns:
167
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
168
+ """
169
+
170
+ try:
171
+ # Case 1: Unit is input boundary condition
172
+ if unit._unit in (
173
+ "QTBDY",
174
+ "HTBDY",
175
+ "REFHBDY",
176
+ "FEHBDY",
177
+ "FRQSIM",
178
+ "FSRBDY",
179
+ "FSSR16BDY",
180
+ "GERRBDY",
181
+ "REBDY",
182
+ "REFH2BDY",
183
+ "SCSBDY",
184
+ ):
185
+ return None
186
+
187
+ if unit._unit == "JUNCTION":
188
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
189
+
190
+ prev_units = []
191
+ _prev_in_dat = self._prev_in_dat_struct(unit)
192
+ _name_match = self._name_label_match(unit)
193
+ _ds_label_match = self._ds_label_match(unit)
194
+ _junction_match = [
195
+ junction
196
+ for junction in self._all_units
197
+ if junction._unit == "JUNCTION" and unit.name in junction.labels
198
+ ]
199
+
200
+ # Case 2: Previous unit has positive distance to next
201
+ if (
202
+ _prev_in_dat
203
+ and hasattr(_prev_in_dat, "dist_to_next")
204
+ and _prev_in_dat.dist_to_next != 0
205
+ ):
206
+ prev_units.append(_prev_in_dat)
207
+ _name_match = None # Name match does apply if upstream section exists
208
+
209
+ # All other matches added (matching name, matching name to ds_label and junciton)
210
+ for match in [_name_match, _ds_label_match, _junction_match]:
211
+ if isinstance(match, list):
212
+ prev_units.extend(match)
213
+ elif match:
214
+ prev_units.append(match)
215
+
216
+ if len(prev_units) == 0:
217
+ return None
218
+ if len(prev_units) == 1:
219
+ return prev_units[0]
220
+ return prev_units
221
+
222
+ except Exception as e:
223
+ self._handle_exception(e, when="calculating next unit")
224
+
225
+ def _next_in_dat_struct(self, current_unit: Unit) -> Unit | None:
226
+ """Finds next unit in the dat file using the index position.
227
+
228
+ Returns:
229
+ Unit with all associated data
230
+ """
231
+
232
+ for idx, unit in enumerate(self._all_units):
233
+ # Names checked first to speed up comparison
234
+ if unit.name == current_unit.name and unit == current_unit:
235
+ try:
236
+ return self._all_units[idx + 1]
237
+ except IndexError:
238
+ return None
239
+
240
+ return None
241
+
242
+ def _prev_in_dat_struct(self, current_unit: Unit) -> Unit | None:
243
+ """Finds previous unit in the dat file using the index position.
244
+
245
+ Returns:
246
+ Unit with all associated data
247
+ """
248
+ for idx, unit in enumerate(self._all_units):
249
+ # Names checked first to speed up comparison
250
+ if unit.name == current_unit.name and unit == current_unit:
251
+ if idx == 0:
252
+ return None
253
+ return self._all_units[idx - 1]
254
+
255
+ return None
256
+
257
+ def _ds_label_match(self, current_unit: Unit) -> Unit | list[Unit] | None:
258
+ """Pulls out all units with ds label that matches the input unit.
259
+
260
+ Returns:
261
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
262
+ """
263
+
264
+ _ds_list = []
265
+ for item in self._all_units:
266
+ try:
267
+ if item.ds_label == current_unit.name:
268
+ _ds_list.append(item)
269
+ except AttributeError:
270
+ continue
271
+
272
+ if len(_ds_list) == 0:
273
+ return None
274
+ if len(_ds_list) == 1:
275
+ return _ds_list[0]
276
+ return _ds_list
277
+
278
+ def _name_label_match(
279
+ self,
280
+ current_unit: Unit,
281
+ name_override: str | None = None,
282
+ ) -> Unit | list[Unit] | None:
283
+ """Pulls out all units with same name as the input unit.
284
+
285
+ Returns:
286
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
287
+ """
288
+
289
+ _name = name_override or str(current_unit.name)
290
+ _name_list = []
291
+ for item in self._all_units:
292
+ if item.name == _name and item != current_unit:
293
+ _name_list.append(item)
294
+ else:
295
+ pass
296
+
297
+ if len(_name_list) == 0:
298
+ return None
299
+ if len(_name_list) == 1:
300
+ return _name_list[0]
301
+ return _name_list
302
+
303
+ def _read(self):
304
+ # Read DAT data
305
+ with open(self._filepath) as dat_file:
306
+ self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
307
+
308
+ # Generate DAT structure
309
+ self._update_dat_struct()
310
+
311
+ # Get network .gxy if present
312
+ gxy_path = self._filepath.with_suffix(".gxy")
313
+ if gxy_path.exists():
314
+ self._gxy_filepath = gxy_path
315
+ with open(self._gxy_filepath) as gxy_file:
316
+ self._gxy_data = gxy_file.read()
317
+ else:
318
+ self._gxy_filepath = None
319
+ self._gxy_data = None
320
+
321
+ def _write(self) -> str:
322
+ """Returns string representation of the current DAT data
323
+
324
+ Returns:
325
+ str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
326
+ """
327
+ try:
328
+ self._update_raw_data()
329
+ self._update_general_parameters()
330
+ self._update_dat_struct()
331
+ self._update_unit_names()
332
+
333
+ return "\n".join(self._raw_data) + "\n"
334
+
335
+ except Exception as e:
336
+ self._handle_exception(e, when="write")
337
+
338
+ def _create_from_blank(self, with_gxy: bool = False) -> None:
339
+ # No filepath specified, create new 'blank' DAT in memory
340
+ # ** Update these to have minimal data needed (general header, empty IC header)
341
+ self._dat_struct = [
342
+ {"start": 0, "Type": "GENERAL", "end": 6},
343
+ {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
344
+ ]
345
+ self._raw_data = [
346
+ "",
347
+ "#REVISION#1",
348
+ " 0 0.750 0.900 0.100 0.001 12SI",
349
+ " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
350
+ "RAD FILE",
351
+ "",
352
+ "END GENERAL",
353
+ "INITIAL CONDITIONS",
354
+ " label ? flow stage froude no velocity umode ustate z",
355
+ ]
356
+
357
+ self._gxy_filepath = None
358
+ if with_gxy:
359
+ self._gxy_data = ""
360
+ else:
361
+ self._gxy_data = None
362
+
363
+ def _get_general_parameters(self) -> None:
364
+ # ** Get general parameters here
365
+ self.title = self._raw_data[0]
366
+ self.general_parameters = {}
367
+ line = f"{self._raw_data[2]:<70}"
368
+ params = units.helpers.split_10_char(line)
369
+ if params[6] == "":
370
+ # Adds the measurements unit as DEFAULT if not specified
371
+ params[6] = "DEFAULT"
372
+ line = f"{self._raw_data[3]:<70}"
373
+ params.extend(units.helpers.split_10_char(line))
374
+
375
+ self.general_parameters["Node Count"] = _to_int(params[0], 0)
376
+ self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
377
+ self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
378
+ self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
379
+ self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
380
+ self._label_len = _to_int(params[5], 12) # label length
381
+ self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
382
+ self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
383
+ self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
384
+ self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
385
+ self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
386
+ self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
387
+ self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
388
+ self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
389
+ self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
390
+
391
+ def _update_general_parameters(self) -> None:
392
+ self._raw_data[0] = self.title
393
+ self._raw_data[5] = self.general_parameters["RAD File"]
394
+ general_params_1 = units.helpers.join_10_char(
395
+ self.general_parameters["Node Count"],
396
+ self.general_parameters["Lower Froude"],
397
+ self.general_parameters["Upper Froude"],
398
+ self.general_parameters["Min Depth"],
399
+ self.general_parameters["Convergence Direct"],
400
+ self._label_len,
401
+ )
402
+ general_params_1 += self.general_parameters["Units"]
403
+ self._raw_data[2] = general_params_1
404
+
405
+ general_params_2 = units.helpers.join_10_char(
406
+ self.general_parameters["Water Temperature"],
407
+ self.general_parameters["Convergence Flow"],
408
+ self.general_parameters["Convergence Head"],
409
+ self.general_parameters["Mathematical Damping"],
410
+ self.general_parameters["Pivotal Choice"],
411
+ self.general_parameters["Under-relaxation"],
412
+ self.general_parameters["Matrix Dummy"],
413
+ )
414
+ self._raw_data[3] = general_params_2
415
+
416
+ def _update_unit_names(self):
417
+ for unit_group, unit_group_name in [
418
+ (self.boundaries, "boundaries"),
419
+ (self.sections, "sections"),
420
+ (self.structures, "structures"),
421
+ (self.conduits, "conduits"),
422
+ (self.losses, "losses"),
423
+ ]:
424
+ for name, unit in unit_group.copy().items():
425
+ if name != unit.name:
426
+ # Check if new name already exists as a label
427
+ if unit.name in unit_group:
428
+ raise Exception(
429
+ f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group',
430
+ )
431
+ unit_group[unit.name] = unit
432
+ del unit_group[name]
433
+ # Update label in ICs
434
+ if unit_group_name not in ["boundaries", "losses"]:
435
+ # TODO: Need to do a more thorough check for whether a unit is one in the ICs
436
+ # e.g. Culvert inlet and river section may have same label, but only river
437
+ # section label should update in ICs
438
+ self.initial_conditions.update_label(name, unit.name)
439
+
440
+ # Update label in GISINFO and GXY data
441
+ self._update_gisinfo_label(
442
+ unit._unit,
443
+ unit._subtype,
444
+ name,
445
+ unit.name,
446
+ unit_group_name
447
+ in ["boundaries", "losses"], # if True it ignores second lbl
448
+ )
449
+ self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
450
+
451
+ # Update IC table names in raw_data if any name changes
452
+ ic_start, ic_end = next(
453
+ (unit["start"], unit["end"])
454
+ for unit in self._dat_struct
455
+ if unit["Type"] == "INITIAL CONDITIONS"
456
+ )
457
+ self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
458
+
459
+ def _update_raw_data(self):
460
+ block_shift = 0
461
+ comment_tracker = 0
462
+ comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
463
+ prev_block_end = self._dat_struct[0]["end"]
464
+ existing_units = {
465
+ "boundaries": [],
466
+ "structures": [],
467
+ "sections": [],
468
+ "conduits": [],
469
+ "losses": [],
470
+ }
471
+
472
+ for block in self._dat_struct:
473
+ # Check for all supported boundary types
474
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
475
+ # clause for when unit has been inserted into the dat file
476
+ if "new_insert" in block:
477
+ block["start"] = prev_block_end + 1
478
+ block["end"] = block["start"] + len(block["new_insert"]) - 1
479
+ self._raw_data[block["start"] : block["start"]] = block["new_insert"]
480
+ block_shift += len(block["new_insert"])
481
+ prev_block_end = block["end"]
482
+ del block["new_insert"]
483
+
484
+ else:
485
+ unit_data = self._raw_data[
486
+ block["start"] + block_shift : block["end"] + 1 + block_shift
487
+ ]
488
+ prev_block_len = len(unit_data)
489
+
490
+ if block["Type"] == "INITIAL CONDITIONS":
491
+ new_unit_data = self.initial_conditions._write()
492
+ elif block["Type"] == "COMMENT":
493
+ comment = comment_units[comment_tracker]
494
+ new_unit_data = comment._write()
495
+ comment_tracker += 1
496
+
497
+ elif block["Type"] == "VARIABLES":
498
+ new_unit_data = self.variables._write()
499
+
500
+ else:
501
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
502
+ unit_name = unit_data[2][: self._label_len].strip()
503
+ else:
504
+ unit_name = unit_data[1][: self._label_len].strip()
505
+
506
+ # Get unit object
507
+ unit_group = getattr(
508
+ self,
509
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
510
+ )
511
+ if unit_name in unit_group:
512
+ # block still exists
513
+ new_unit_data = unit_group[unit_name]._write()
514
+ existing_units[
515
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
516
+ ].append(unit_name)
517
+ else:
518
+ # Bdy block has been deleted
519
+ new_unit_data = []
520
+
521
+ new_block_len = len(new_unit_data)
522
+ self._raw_data[
523
+ block["start"] + block_shift : block["end"] + 1 + block_shift
524
+ ] = new_unit_data
525
+ # adjust block shift for change in number of lines in bdy block
526
+ block_shift += new_block_len - prev_block_len
527
+ prev_block_end = (
528
+ block["end"] + block_shift
529
+ ) # add in to keep a record of the last block read in
530
+
531
+ def _get_unit_definitions(self): # noqa: C901
532
+ # Get unit definitions
533
+ self.sections = {}
534
+ self.boundaries = {}
535
+ self.structures = {}
536
+ self.conduits = {}
537
+ self.losses = {}
538
+ self._unsupported = {}
539
+ self._all_units = []
540
+ for block in self._dat_struct:
541
+ unit_data = self._raw_data[block["start"] : block["end"] + 1]
542
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
543
+ # Deal with initial conditions block
544
+ if block["Type"] == "INITIAL CONDITIONS":
545
+ self.initial_conditions = units.IIC(unit_data, n=self._label_len)
546
+ continue
547
+
548
+ if block["Type"] == "COMMENT":
549
+ self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
550
+ continue
551
+
552
+ if block["Type"] == "VARIABLES":
553
+ self.variables = units.Variables(unit_data)
554
+ continue
555
+
556
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
557
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
558
+ unit_name = unit_data[2][: self._label_len].strip()
559
+ else:
560
+ unit_name = unit_data[1][: self._label_len].strip()
561
+
562
+ # Create instance of unit and add to relevant group
563
+ unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
564
+ if unit_name in unit_group:
565
+ raise Exception(
566
+ f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}',
567
+ )
568
+ # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
569
+ unit_type = block["Type"].replace(" ", "_").replace("-", "_")
570
+ unit_group[unit_name] = eval(
571
+ f"units.{unit_type}({unit_data}, {self._label_len})", # append to our _all._units as well???
572
+ )
573
+ self._all_units.append(unit_group[unit_name])
574
+
575
+ elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
576
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
577
+ if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
578
+ unit_name = unit_data[2][: self._label_len].strip()
579
+ subtype = True
580
+ else:
581
+ unit_name = unit_data[1][: self._label_len].strip()
582
+ subtype = False
583
+
584
+ self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
585
+ unit_data,
586
+ self._label_len,
587
+ unit_name=unit_name,
588
+ unit_type=block["Type"],
589
+ subtype=subtype,
590
+ )
591
+ self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
592
+
593
+ elif block["Type"] not in ("GENERAL", "GISINFO"):
594
+ raise Exception(f"Unexpected unit type encountered: {block['Type']}")
595
+
596
+ def _update_dat_struct(self) -> None: # noqa: C901, PLR0912
597
+ """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
598
+ are a dictionary containing the 'start', 'end' and 'type' of the block.
599
+
600
+ """
601
+ # Generate DAT structure
602
+ dat_struct = []
603
+ in_block = False
604
+ in_general = True
605
+ in_comment = False
606
+ comment_n = None # Used as counter for number of lines in a comment block
607
+ gisinfo_block = False
608
+ general_block = {"start": 0, "Type": "GENERAL"}
609
+ unit_block: dict[str, Any] = {}
610
+ for idx, line in enumerate(self._raw_data):
611
+ # Deal with 'general' header
612
+ if in_general is True:
613
+ if line == "END GENERAL":
614
+ general_block["end"] = idx
615
+ dat_struct.append(general_block)
616
+ in_general = False
617
+ continue
618
+
619
+ # Deal with comment blocks explicitly as they could contain unit keywords
620
+ if in_comment and comment_n is None:
621
+ comment_n = int(line.strip())
622
+ continue
623
+ if in_comment:
624
+ comment_n -= 1
625
+ if comment_n <= 0:
626
+ unit_block["end"] = idx + comment_n # add ending index
627
+ # append existing bdy block to the dat_struct
628
+ dat_struct.append(unit_block)
629
+ unit_block = {} # reset bdy block
630
+ in_comment = False
631
+ in_block = False
632
+ comment_n = None
633
+ continue # move onto next line as still in comment block
634
+
635
+ if line == "COMMENT":
636
+ in_comment = True
637
+ unit_block, in_block = self._close_struct_block(
638
+ dat_struct,
639
+ "COMMENT",
640
+ unit_block,
641
+ in_block,
642
+ idx,
643
+ )
644
+ continue
645
+
646
+ if line == "GISINFO":
647
+ gisinfo_block = True
648
+ unit_block, in_block = self._close_struct_block(
649
+ dat_struct,
650
+ "GISINFO",
651
+ unit_block,
652
+ in_block,
653
+ idx,
654
+ )
655
+
656
+ if not gisinfo_block:
657
+ if line.split(" ")[0] in units.ALL_UNIT_TYPES:
658
+ # The " " is needed here in case of empty string
659
+ unit_type = line.split()[0]
660
+ elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
661
+ unit_type = " ".join(line.split()[:2])
662
+ else:
663
+ continue
664
+
665
+ unit_block, in_block = self._close_struct_block(
666
+ dat_struct,
667
+ unit_type,
668
+ unit_block,
669
+ in_block,
670
+ idx,
671
+ )
672
+
673
+ if len(unit_block) != 0:
674
+ # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
675
+ # add ending index for final block
676
+ unit_block["end"] = len(self._raw_data) - 1
677
+ dat_struct.append(unit_block) # add final block
678
+
679
+ self._dat_struct = dat_struct
680
+
681
+ def _close_struct_block( # noqa: PLR0913
682
+ self,
683
+ dat_struct: list[dict],
684
+ unit_type: str,
685
+ unit_block: dict,
686
+ in_block: bool,
687
+ idx: int,
688
+ ) -> tuple[dict, bool]:
689
+ """Helper method to close block in dat struct"""
690
+ if in_block is True:
691
+ unit_block["end"] = idx - 1 # add ending index
692
+ # append existing bdy block to the dat_struct
693
+ dat_struct.append(unit_block)
694
+ unit_block = {} # reset bdy block
695
+ in_block = True
696
+ unit_block["Type"] = unit_type # start new bdy block
697
+ unit_block["start"] = idx # add starting index
698
+
699
+ return unit_block, in_block
700
+
701
+ def remove_unit(self, unit: Unit) -> None:
702
+ """Remove a unit from the dat file.
703
+
704
+ Args:
705
+ unit (Unit): flood modeller unit input.
706
+
707
+ Raises:
708
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
709
+ """
710
+
711
+ try:
712
+ # catch if not valid unit
713
+ if not isinstance(unit, Unit):
714
+ raise TypeError("unit isn't a unit")
715
+
716
+ # remove from all units
717
+ index = self._all_units.index(unit)
718
+ del self._all_units[index]
719
+ # remove from dat_struct
720
+ dat_struct_unit = self._dat_struct[index + 1]
721
+ del self._dat_struct[index + 1]
722
+ # remove from raw data
723
+ del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
724
+ # remove from unit group
725
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
726
+ unit_group = getattr(self, unit_group_name)
727
+ del unit_group[unit.name]
728
+ # remove from ICs
729
+ self.initial_conditions.data = self.initial_conditions.data.loc[
730
+ self.initial_conditions.data["label"] != unit.name
731
+ ]
732
+
733
+ self._update_dat_struct()
734
+ self.general_parameters["Node Count"] -= 1
735
+
736
+ except Exception as e:
737
+ self._handle_exception(e, when="remove unit")
738
+
739
+ def insert_unit( # noqa: C901, PLR0912, PLR0913
740
+ self,
741
+ unit: Unit,
742
+ add_before: Unit | None = None,
743
+ add_after: Unit | None = None,
744
+ add_at: int | None = None,
745
+ defer_update: bool = False,
746
+ ) -> None:
747
+ """Inserts a unit into the dat file.
748
+
749
+ Args:
750
+ unit (Unit): FloodModeller unit input.
751
+ add_before (Unit): FloodModeller unit to add before.
752
+ add_after (Unit): FloodModeller unit to add after.
753
+ add_at (integer): Positional argument (starting at 0) of where to add in
754
+ the dat file. To add at the end of the network you can use -1.
755
+
756
+ Raises:
757
+ SyntaxError: Raised if no positional argument is given.
758
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
759
+ NameError: Raised if unit name already appears in unit group.
760
+ """
761
+ try:
762
+ # catch errors
763
+ provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
764
+ if provided_params == 0:
765
+ raise SyntaxError(
766
+ "No positional argument given. Please provide either add_before, add_at or add_after",
767
+ )
768
+ if provided_params > 1:
769
+ raise SyntaxError("Only one of add_at, add_before, or add_after required")
770
+ if not isinstance(unit, Unit):
771
+ raise TypeError("unit isn't a unit")
772
+ if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
773
+ raise TypeError(
774
+ "add_before or add_after argument must be a Flood Modeller Unit type",
775
+ )
776
+
777
+ unit_class = unit._unit
778
+ if unit_class != "COMMENT":
779
+ _validate_unit(unit)
780
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
781
+ unit_group = getattr(self, unit_group_name)
782
+ if unit.name in unit_group:
783
+ raise NameError(
784
+ "Name already appears in unit group. Cannot have two units with same name in same group",
785
+ )
786
+
787
+ # positional argument
788
+ if add_at is not None:
789
+ insert_index = add_at
790
+ if insert_index < 0:
791
+ insert_index += len(self._all_units) + 1
792
+ if insert_index < 0:
793
+ raise Exception(f"invalid add_at index: {add_at}")
794
+ else:
795
+ check_unit = add_before or add_after
796
+ for index, thing in enumerate(self._all_units):
797
+ if thing == check_unit:
798
+ insert_index = index
799
+ insert_index += 1 if add_after else 0
800
+ break
801
+ else:
802
+ raise Exception(
803
+ f"{check_unit} not found in dat network, so cannot be used to add before/after",
804
+ )
805
+
806
+ unit_data = unit._write()
807
+ self._all_units.insert(insert_index, unit)
808
+ if unit._unit != "COMMENT":
809
+ unit_group[unit.name] = unit
810
+ self._dat_struct.insert(
811
+ insert_index + 1,
812
+ {"Type": unit_class, "new_insert": unit_data},
813
+ ) # add to dat struct without unit.name
814
+
815
+ if unit._unit != "COMMENT":
816
+ # update the iic's tables
817
+ iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
818
+ self.initial_conditions.data.loc[len(self.initial_conditions.data)] = (
819
+ iic_data # flaged
820
+ )
821
+
822
+ # update all
823
+ if unit._unit != "COMMENT":
824
+ self.general_parameters["Node Count"] += 1 # flag no update for comments
825
+
826
+ if not defer_update:
827
+ self._update_raw_data()
828
+ self._update_dat_struct()
829
+
830
+ except Exception as e:
831
+ self._handle_exception(e, when="insert unit")
832
+
833
+ def insert_units(
834
+ self,
835
+ units: list[Unit],
836
+ add_before: Unit | None = None,
837
+ add_after: Unit | None = None,
838
+ add_at: int | None = None,
839
+ ) -> None:
840
+ """Inserts a list of units into the dat file.
841
+
842
+ Args:
843
+ units (list[Unit]): List of FloodModeller units.
844
+ add_before (Unit): FloodModeller unit to add before.
845
+ add_after (Unit): FloodModeller unit to add after.
846
+ add_at (integer): Positional argument (starting at 0) of where to add in
847
+ the dat file. To add at the end of the network you can use -1.
848
+ """
849
+ ordered = (add_at is None and add_after is None) or (isinstance(add_at, int) and add_at < 0)
850
+ ordered_units = units if ordered else units[::-1]
851
+ for unit in ordered_units:
852
+ self.insert_unit(unit, add_before, add_after, add_at, defer_update=True)
853
+ self._update_raw_data()
854
+ self._update_dat_struct()
855
+
856
+ def _update_gisinfo_label( # noqa: PLR0913
857
+ self,
858
+ unit_type,
859
+ unit_subtype,
860
+ prev_lbl,
861
+ new_lbl,
862
+ ignore_second,
863
+ ):
864
+ """Update labels in GISINFO block if unit is renamed"""
865
+
866
+ start, end = next(
867
+ (block["start"], block["end"])
868
+ for block in self._dat_struct
869
+ if block["Type"] == "GISINFO"
870
+ )
871
+ gisinfo_block = self._raw_data[start : end + 1]
872
+
873
+ prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
874
+
875
+ new_gisinfo_block = []
876
+ for line in gisinfo_block:
877
+ # Replace first label
878
+ if line.startswith(f"{prefix} {prev_lbl} "):
879
+ # found matching line (space at the end is important to ignore node
880
+ # lables with similar starting chars)
881
+ line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
882
+
883
+ # Replace second label
884
+ if not ignore_second and line.startswith(
885
+ f"{prev_lbl} ",
886
+ ): # space at the end important again
887
+ line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
888
+
889
+ new_gisinfo_block.append(line)
890
+
891
+ self._raw_data[start : end + 1] = new_gisinfo_block
892
+
893
+ def _update_gxy_label(
894
+ self,
895
+ unit_type: str,
896
+ unit_subtype: str,
897
+ prev_lbl: str,
898
+ new_lbl: str,
899
+ ) -> None:
900
+ """Update labels in GXY file if unit is renamed"""
901
+
902
+ if self._gxy_data is not None:
903
+ if unit_subtype is None:
904
+ unit_subtype = ""
905
+
906
+ old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
907
+ new = f"{unit_type}_{unit_subtype}_{new_lbl}"
908
+
909
+ self._gxy_data = self._gxy_data.replace(old, new)