floodmodeller-api 0.4.2.post1__py3-none-any.whl → 0.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. floodmodeller_api/__init__.py +8 -9
  2. floodmodeller_api/_base.py +169 -176
  3. floodmodeller_api/backup.py +273 -273
  4. floodmodeller_api/dat.py +889 -831
  5. floodmodeller_api/diff.py +136 -119
  6. floodmodeller_api/ied.py +302 -306
  7. floodmodeller_api/ief.py +553 -637
  8. floodmodeller_api/ief_flags.py +253 -253
  9. floodmodeller_api/inp.py +260 -266
  10. floodmodeller_api/libs/libifcoremd.dll +0 -0
  11. floodmodeller_api/libs/libifcoremt.so.5 +0 -0
  12. floodmodeller_api/libs/libifport.so.5 +0 -0
  13. floodmodeller_api/{libmmd.dll → libs/libimf.so} +0 -0
  14. floodmodeller_api/libs/libintlc.so.5 +0 -0
  15. floodmodeller_api/libs/libmmd.dll +0 -0
  16. floodmodeller_api/libs/libsvml.so +0 -0
  17. floodmodeller_api/libs/libzzn_read.so +0 -0
  18. floodmodeller_api/libs/zzn_read.dll +0 -0
  19. floodmodeller_api/logs/__init__.py +2 -2
  20. floodmodeller_api/logs/lf.py +364 -312
  21. floodmodeller_api/logs/lf_helpers.py +354 -352
  22. floodmodeller_api/logs/lf_params.py +643 -529
  23. floodmodeller_api/mapping.py +84 -0
  24. floodmodeller_api/test/__init__.py +4 -4
  25. floodmodeller_api/test/conftest.py +16 -8
  26. floodmodeller_api/test/test_backup.py +117 -117
  27. floodmodeller_api/test/test_conveyance.py +107 -0
  28. floodmodeller_api/test/test_dat.py +222 -92
  29. floodmodeller_api/test/test_data/All Units 4_6.DAT +1081 -1081
  30. floodmodeller_api/test/test_data/All Units 4_6.feb +1081 -1081
  31. floodmodeller_api/test/test_data/BRIDGE.DAT +926 -926
  32. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.dat +36 -36
  33. floodmodeller_api/test/test_data/Culvert_Inlet_Outlet.feb +36 -36
  34. floodmodeller_api/test/test_data/DamBreakADI.xml +52 -52
  35. floodmodeller_api/test/test_data/DamBreakFAST.xml +58 -58
  36. floodmodeller_api/test/test_data/DamBreakFAST_dy.xml +53 -53
  37. floodmodeller_api/test/test_data/DamBreakTVD.xml +55 -55
  38. floodmodeller_api/test/test_data/DefenceBreach.xml +53 -53
  39. floodmodeller_api/test/test_data/DefenceBreachFAST.xml +60 -60
  40. floodmodeller_api/test/test_data/DefenceBreachFAST_dy.xml +55 -55
  41. floodmodeller_api/test/test_data/Domain1+2_QH.xml +76 -76
  42. floodmodeller_api/test/test_data/Domain1_H.xml +41 -41
  43. floodmodeller_api/test/test_data/Domain1_Q.xml +41 -41
  44. floodmodeller_api/test/test_data/Domain1_Q_FAST.xml +48 -48
  45. floodmodeller_api/test/test_data/Domain1_Q_FAST_dy.xml +48 -48
  46. floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +263 -0
  47. floodmodeller_api/test/test_data/Domain1_W.xml +41 -41
  48. floodmodeller_api/test/test_data/EX1.DAT +321 -321
  49. floodmodeller_api/test/test_data/EX1.ext +107 -107
  50. floodmodeller_api/test/test_data/EX1.feb +320 -320
  51. floodmodeller_api/test/test_data/EX1.gxy +107 -107
  52. floodmodeller_api/test/test_data/EX17.DAT +421 -422
  53. floodmodeller_api/test/test_data/EX17.ext +213 -213
  54. floodmodeller_api/test/test_data/EX17.feb +422 -422
  55. floodmodeller_api/test/test_data/EX18.DAT +375 -375
  56. floodmodeller_api/test/test_data/EX18_DAT_expected.json +3876 -0
  57. floodmodeller_api/test/test_data/EX2.DAT +302 -302
  58. floodmodeller_api/test/test_data/EX3.DAT +926 -926
  59. floodmodeller_api/test/test_data/EX3_DAT_expected.json +16235 -0
  60. floodmodeller_api/test/test_data/EX3_IEF_expected.json +61 -0
  61. floodmodeller_api/test/test_data/EX6.DAT +2084 -2084
  62. floodmodeller_api/test/test_data/EX6.ext +532 -532
  63. floodmodeller_api/test/test_data/EX6.feb +2084 -2084
  64. floodmodeller_api/test/test_data/EX6_DAT_expected.json +31647 -0
  65. floodmodeller_api/test/test_data/Event Data Example.DAT +336 -336
  66. floodmodeller_api/test/test_data/Event Data Example.ext +107 -107
  67. floodmodeller_api/test/test_data/Event Data Example.feb +336 -336
  68. floodmodeller_api/test/test_data/Linked1D2D.xml +52 -52
  69. floodmodeller_api/test/test_data/Linked1D2DFAST.xml +53 -53
  70. floodmodeller_api/test/test_data/Linked1D2DFAST_dy.xml +48 -48
  71. floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +313 -0
  72. floodmodeller_api/test/test_data/blockage.dat +50 -50
  73. floodmodeller_api/test/test_data/blockage.ext +45 -45
  74. floodmodeller_api/test/test_data/blockage.feb +9 -9
  75. floodmodeller_api/test/test_data/blockage.gxy +71 -71
  76. floodmodeller_api/test/test_data/conveyance_test.dat +165 -0
  77. floodmodeller_api/test/test_data/conveyance_test.feb +116 -0
  78. floodmodeller_api/test/test_data/conveyance_test.gxy +85 -0
  79. floodmodeller_api/test/test_data/defaultUnits.dat +127 -127
  80. floodmodeller_api/test/test_data/defaultUnits.ext +45 -45
  81. floodmodeller_api/test/test_data/defaultUnits.feb +9 -9
  82. floodmodeller_api/test/test_data/defaultUnits.fmpx +58 -58
  83. floodmodeller_api/test/test_data/defaultUnits.gxy +85 -85
  84. floodmodeller_api/test/test_data/ex3.ief +20 -20
  85. floodmodeller_api/test/test_data/ex3.lf1 +2800 -2800
  86. floodmodeller_api/test/test_data/ex4.DAT +1374 -1374
  87. floodmodeller_api/test/test_data/ex4_changed.DAT +1374 -1374
  88. floodmodeller_api/test/test_data/example1.inp +329 -329
  89. floodmodeller_api/test/test_data/example2.inp +158 -158
  90. floodmodeller_api/test/test_data/example3.inp +297 -297
  91. floodmodeller_api/test/test_data/example4.inp +388 -388
  92. floodmodeller_api/test/test_data/example5.inp +147 -147
  93. floodmodeller_api/test/test_data/example6.inp +154 -154
  94. floodmodeller_api/test/test_data/expected_conveyance.csv +60 -0
  95. floodmodeller_api/test/test_data/jump.dat +176 -176
  96. floodmodeller_api/test/test_data/network.dat +1374 -1374
  97. floodmodeller_api/test/test_data/network.ext +45 -45
  98. floodmodeller_api/test/test_data/network.exy +1 -1
  99. floodmodeller_api/test/test_data/network.feb +45 -45
  100. floodmodeller_api/test/test_data/network.ied +45 -45
  101. floodmodeller_api/test/test_data/network.ief +20 -20
  102. floodmodeller_api/test/test_data/network.inp +147 -147
  103. floodmodeller_api/test/test_data/network.pxy +57 -57
  104. floodmodeller_api/test/test_data/network.zzd +122 -122
  105. floodmodeller_api/test/test_data/network_dat_expected.json +21837 -0
  106. floodmodeller_api/test/test_data/network_from_tabularCSV.csv +87 -87
  107. floodmodeller_api/test/test_data/network_ied_expected.json +287 -0
  108. floodmodeller_api/test/test_data/rnweir.dat +9 -9
  109. floodmodeller_api/test/test_data/rnweir.ext +45 -45
  110. floodmodeller_api/test/test_data/rnweir.feb +9 -9
  111. floodmodeller_api/test/test_data/rnweir.gxy +45 -45
  112. floodmodeller_api/test/test_data/rnweir_default.dat +74 -74
  113. floodmodeller_api/test/test_data/rnweir_default.ext +45 -45
  114. floodmodeller_api/test/test_data/rnweir_default.feb +9 -9
  115. floodmodeller_api/test/test_data/rnweir_default.fmpx +58 -58
  116. floodmodeller_api/test/test_data/rnweir_default.gxy +53 -53
  117. floodmodeller_api/test/test_data/unit checks.dat +16 -16
  118. floodmodeller_api/test/test_ied.py +29 -29
  119. floodmodeller_api/test/test_ief.py +136 -24
  120. floodmodeller_api/test/test_inp.py +47 -48
  121. floodmodeller_api/test/test_json.py +114 -0
  122. floodmodeller_api/test/test_logs_lf.py +102 -51
  123. floodmodeller_api/test/test_tool.py +165 -152
  124. floodmodeller_api/test/test_toolbox_structure_log.py +234 -239
  125. floodmodeller_api/test/test_xml2d.py +151 -156
  126. floodmodeller_api/test/test_zzn.py +36 -34
  127. floodmodeller_api/to_from_json.py +230 -0
  128. floodmodeller_api/tool.py +332 -329
  129. floodmodeller_api/toolbox/__init__.py +5 -5
  130. floodmodeller_api/toolbox/example_tool.py +45 -45
  131. floodmodeller_api/toolbox/model_build/__init__.py +2 -2
  132. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +100 -98
  133. floodmodeller_api/toolbox/model_build/structure_log/__init__.py +1 -1
  134. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +287 -289
  135. floodmodeller_api/toolbox/model_build/structure_log_definition.py +76 -76
  136. floodmodeller_api/units/__init__.py +10 -10
  137. floodmodeller_api/units/_base.py +214 -212
  138. floodmodeller_api/units/boundaries.py +467 -467
  139. floodmodeller_api/units/comment.py +52 -55
  140. floodmodeller_api/units/conduits.py +382 -402
  141. floodmodeller_api/units/conveyance.py +301 -0
  142. floodmodeller_api/units/helpers.py +123 -131
  143. floodmodeller_api/units/iic.py +107 -101
  144. floodmodeller_api/units/losses.py +305 -306
  145. floodmodeller_api/units/sections.py +465 -446
  146. floodmodeller_api/units/structures.py +1690 -1683
  147. floodmodeller_api/units/units.py +93 -104
  148. floodmodeller_api/units/unsupported.py +44 -44
  149. floodmodeller_api/units/variables.py +87 -89
  150. floodmodeller_api/urban1d/__init__.py +11 -11
  151. floodmodeller_api/urban1d/_base.py +188 -179
  152. floodmodeller_api/urban1d/conduits.py +93 -85
  153. floodmodeller_api/urban1d/general_parameters.py +58 -58
  154. floodmodeller_api/urban1d/junctions.py +81 -79
  155. floodmodeller_api/urban1d/losses.py +81 -74
  156. floodmodeller_api/urban1d/outfalls.py +114 -110
  157. floodmodeller_api/urban1d/raingauges.py +111 -111
  158. floodmodeller_api/urban1d/subsections.py +92 -98
  159. floodmodeller_api/urban1d/xsections.py +147 -144
  160. floodmodeller_api/util.py +119 -21
  161. floodmodeller_api/validation/parameters.py +660 -660
  162. floodmodeller_api/validation/urban_parameters.py +388 -404
  163. floodmodeller_api/validation/validation.py +110 -108
  164. floodmodeller_api/version.py +1 -1
  165. floodmodeller_api/xml2d.py +632 -673
  166. floodmodeller_api/xml2d_template.py +37 -37
  167. floodmodeller_api/zzn.py +414 -363
  168. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/LICENSE.txt +13 -13
  169. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/METADATA +85 -82
  170. floodmodeller_api-0.4.4.dist-info/RECORD +185 -0
  171. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/WHEEL +1 -1
  172. floodmodeller_api/libifcoremd.dll +0 -0
  173. floodmodeller_api/test/test_data/EX3.bmp +0 -0
  174. floodmodeller_api/test/test_data/test_output.csv +0 -87
  175. floodmodeller_api/zzn_read.dll +0 -0
  176. floodmodeller_api-0.4.2.post1.dist-info/RECORD +0 -164
  177. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/entry_points.txt +0 -0
  178. {floodmodeller_api-0.4.2.post1.dist-info → floodmodeller_api-0.4.4.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py CHANGED
@@ -1,831 +1,889 @@
1
- """
2
- Flood Modeller Python API
3
- Copyright (C) 2023 Jacobs U.K. Limited
4
-
5
- This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
- as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
-
8
- This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
- of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
-
11
- You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
-
13
- If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
- address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
- """
16
-
17
- from pathlib import Path
18
- from typing import List, Optional, Union
19
-
20
- from . import units # Import for using as package
21
- from ._base import FMFile
22
- from .units._base import Unit
23
- from .units.helpers import _to_float, _to_int
24
- from .validation.validation import _validate_unit
25
-
26
-
27
- class DAT(FMFile):
28
- """Reads and write Flood Modeller datafile format '.dat'
29
-
30
- Args:
31
- dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
32
-
33
- Output:
34
- Initiates 'DAT' class object
35
-
36
- Raises:
37
- TypeError: Raised if dat_filepath does not point to a .dat file
38
- FileNotFoundError: Raised if dat_filepath points to a file which does not exist
39
- """
40
-
41
- _filetype: str = "DAT"
42
- _suffix: str = ".dat"
43
-
44
- def __init__(self, dat_filepath: Optional[Union[str, Path]] = None, with_gxy: bool = False):
45
- try:
46
- if dat_filepath is not None:
47
- FMFile.__init__(self, dat_filepath)
48
- self._read()
49
-
50
- else:
51
- self._create_from_blank(with_gxy)
52
-
53
- self._get_general_parameters()
54
- self._get_unit_definitions()
55
- except Exception as e:
56
- self._handle_exception(e, when="read")
57
-
58
- def update(self) -> None:
59
- """Updates the existing DAT based on any altered attributes"""
60
- self._update()
61
- self._write_gxy(self._gxy_filepath)
62
-
63
- def save(self, filepath: Union[str, Path]) -> None:
64
- """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
65
- Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
66
- update in the latest saved location rather than the original source DAT used to construct the class
67
-
68
- Args:
69
- filepath (str): Filepath to new save location including the name and '.dat' extension
70
-
71
- Raises:
72
- TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
73
- """
74
- filepath = Path(filepath).absolute()
75
- self._save(filepath)
76
- self._write_gxy(filepath)
77
-
78
- def _write_gxy(self, filepath):
79
- if self._gxy_data is not None:
80
- gxy_string = self._gxy_data
81
- new_gxy_path = filepath.with_suffix(".gxy")
82
- with open(new_gxy_path, "w") as gxy_file:
83
- gxy_file.write(gxy_string)
84
- self._gxy_filepath = new_gxy_path
85
-
86
- def diff(self, other: "DAT", force_print: bool = False) -> None:
87
- """Compares the DAT class against another DAT class to check whether they are
88
- equivalent, or if not, what the differences are. Two instances of a DAT class are
89
- deemed equivalent if all of their attributes are equal except for the filepath and
90
- raw data. For example, two DAT files from different filepaths that had the same
91
- data except maybe some differences in decimal places and some default parameters
92
- ommitted, would be classed as equivalent as they would produce the same DAT instance
93
- and write the exact same data.
94
-
95
- The result is printed to the console. If you need to access the returned data, use
96
- the method ``DAT._get_diff()``
97
-
98
- Args:
99
- other (floodmodeller_api.DAT): Other instance of a DAT class
100
- force_print (bool): Forces the API to print every difference found, rather than
101
- just the first 25 differences. Defaults to False.
102
- """
103
- self._diff(other, force_print=force_print)
104
-
105
- # def _get_unit_from_connectivity(self, method) #use this as method prev and next
106
-
107
- def next(self, unit: Unit) -> Union[Unit, List[Unit], None]:
108
- """Finds next unit in the reach.
109
-
110
- Next unit in reach can be infered by:
111
- The next unit in the .dat file structure - such as when a river section has a positive distance to next
112
- The units with the exact same name - such as a junction unit
113
- The next unit as described in the ds_label - such as with Bridge units
114
-
115
- Args:
116
- unit (Unit): flood modeller unit input.
117
-
118
- Returns:
119
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
120
- """
121
- # Needs to handle same name match outside dist to next (e.g. inflow)
122
- try:
123
- if hasattr(unit, "dist_to_next"):
124
- # Case 1a - positive distance to next
125
- if unit.dist_to_next != 0:
126
- return self._next_in_dat_struct(unit)
127
-
128
- # Case 1b - distance to next = 0
129
- return self._name_label_match(unit)
130
-
131
- # Case 2: next unit is in ds_label
132
- if hasattr(unit, "ds_label"):
133
- return self._name_label_match(unit, name_override=unit.ds_label)
134
-
135
- if unit._unit == "JUNCTION":
136
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
137
-
138
- if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
139
- return None
140
-
141
- return self._name_label_match(unit)
142
-
143
- except Exception as e:
144
- self._handle_exception(e, when="calculating next unit")
145
-
146
- def prev(self, unit: Unit) -> Union[Unit, List[Unit], None]: # noqa: C901
147
- """Finds previous unit in the reach.
148
-
149
- Previous unit in reach can be infered by:
150
- The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
151
- The units with the exact same name - such as a junction unit
152
- The previous unit as linked through upstream and downstream labels - such as with Bridge units
153
-
154
- Args:
155
- unit (Unit): flood modeller unit input.
156
-
157
- Returns:
158
- Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
159
- """
160
-
161
- try:
162
- # Case 1: Unit is input boundary condition
163
- if unit._unit in (
164
- "QTBDY",
165
- "HTBDY",
166
- "REFHBDY",
167
- "FEHBDY",
168
- "FRQSIM",
169
- "FSRBDY",
170
- "FSSR16BDY",
171
- "GERRBDY",
172
- "REBDY",
173
- "REFH2BDY",
174
- "SCSBDY",
175
- ):
176
- return None
177
-
178
- if unit._unit == "JUNCTION":
179
- return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
180
-
181
- prev_units = []
182
- _prev_in_dat = self._prev_in_dat_struct(unit)
183
- _name_match = self._name_label_match(unit)
184
- _ds_label_match = self._ds_label_match(unit)
185
- _junction_match = [
186
- junction
187
- for junction in self._all_units
188
- if junction._unit == "JUNCTION" and unit.name in junction.labels
189
- ]
190
-
191
- # Case 2: Previous unit has positive distance to next
192
- if (
193
- _prev_in_dat
194
- and hasattr(_prev_in_dat, "dist_to_next")
195
- and _prev_in_dat.dist_to_next != 0
196
- ):
197
- prev_units.append(_prev_in_dat)
198
- _name_match = None # Name match does apply if upstream section exists
199
-
200
- # All other matches added (matching name, matching name to ds_label and junciton)
201
- for match in [_name_match, _ds_label_match, _junction_match]:
202
- if isinstance(match, list):
203
- prev_units.extend(match)
204
- elif match:
205
- prev_units.append(match)
206
-
207
- if len(prev_units) == 0:
208
- return None
209
- if len(prev_units) == 1:
210
- return prev_units[0]
211
- return prev_units
212
-
213
- except Exception as e:
214
- self._handle_exception(e, when="calculating next unit")
215
-
216
- def _next_in_dat_struct(self, current_unit) -> Optional[Unit]:
217
- """Finds next unit in the dat file using the index position.
218
-
219
- Returns:
220
- Unit with all associated data
221
- """
222
-
223
- for idx, unit in enumerate(self._all_units):
224
- # Names checked first to speed up comparison
225
- if unit.name == current_unit.name and unit == current_unit:
226
- try:
227
- return self._all_units[idx + 1]
228
- except IndexError:
229
- return None
230
-
231
- return None
232
-
233
- def _prev_in_dat_struct(self, current_unit) -> Optional[Unit]:
234
- """Finds previous unit in the dat file using the index position.
235
-
236
- Returns:
237
- Unit with all associated data
238
- """
239
- for idx, unit in enumerate(self._all_units):
240
- # Names checked first to speed up comparison
241
- if unit.name == current_unit.name and unit == current_unit:
242
- if idx == 0:
243
- return None
244
- return self._all_units[idx - 1]
245
-
246
- return None
247
-
248
- def _ds_label_match(self, current_unit) -> Union[Unit, List[Unit], None]:
249
- """Pulls out all units with ds label that matches the input unit.
250
-
251
- Returns:
252
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
253
- """
254
-
255
- _ds_list = []
256
- for item in self._all_units:
257
- try:
258
- if item.ds_label == current_unit.name:
259
- _ds_list.append(item)
260
- except AttributeError:
261
- continue
262
-
263
- if len(_ds_list) == 0:
264
- return None
265
- if len(_ds_list) == 1:
266
- return _ds_list[0]
267
- return _ds_list
268
-
269
- def _name_label_match(self, current_unit, name_override=None) -> Union[Unit, List[Unit], None]:
270
- """Pulls out all units with same name as the input unit.
271
-
272
- Returns:
273
- Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
274
- """
275
-
276
- _name = name_override or str(current_unit.name)
277
- _name_list = []
278
- for item in self._all_units:
279
- if item.name == _name and item != current_unit:
280
- _name_list.append(item)
281
- else:
282
- pass
283
-
284
- if len(_name_list) == 0:
285
- return None
286
- if len(_name_list) == 1:
287
- return _name_list[0]
288
- return _name_list
289
-
290
- def _read(self):
291
- # Read DAT data
292
- with open(self._filepath, "r") as dat_file:
293
- self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
294
-
295
- # Generate DAT structure
296
- self._update_dat_struct()
297
-
298
- # Get network .gxy if present
299
- gxy_path = self._filepath.with_suffix(".gxy")
300
- if gxy_path.exists():
301
- self._gxy_filepath = gxy_path
302
- with open(self._gxy_filepath, "r") as gxy_file:
303
- self._gxy_data = gxy_file.read()
304
- else:
305
- self._gxy_filepath = None
306
- self._gxy_data = None
307
-
308
- def _write(self) -> str:
309
- """Returns string representation of the current DAT data
310
-
311
- Returns:
312
- str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
313
- """
314
- try:
315
- self._update_raw_data()
316
- self._update_general_parameters()
317
- self._update_dat_struct()
318
- self._update_unit_names()
319
-
320
- dat_string = ""
321
- for line in self._raw_data:
322
- dat_string += line + "\n"
323
-
324
- return dat_string
325
-
326
- except Exception as e:
327
- self._handle_exception(e, when="write")
328
-
329
- def _create_from_blank(self, with_gxy=False):
330
- # No filepath specified, create new 'blank' DAT in memory
331
- # ** Update these to have minimal data needed (general header, empty IC header)
332
- self._dat_struct = [
333
- {"start": 0, "Type": "GENERAL", "end": 6},
334
- {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
335
- ]
336
- self._raw_data = [
337
- "",
338
- "#REVISION#1",
339
- " 0 0.750 0.900 0.100 0.001 12SI",
340
- " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
341
- "RAD FILE",
342
- "",
343
- "END GENERAL",
344
- "INITIAL CONDITIONS",
345
- " label ? flow stage froude no velocity umode ustate z",
346
- ]
347
-
348
- self._gxy_filepath = None
349
- if with_gxy:
350
- self._gxy_data = ""
351
- else:
352
- self._gxy_data = None
353
-
354
- def _get_general_parameters(self):
355
- # ** Get general parameters here
356
- self.title = self._raw_data[0]
357
- self.general_parameters = {}
358
- line = f"{self._raw_data[2]:<70}"
359
- params = units.helpers.split_10_char(line)
360
- if params[6] == "":
361
- # Adds the measurements unit as DEFAULT if not specified
362
- params[6] = "DEFAULT"
363
- line = f"{self._raw_data[3]:<70}"
364
- params.extend(units.helpers.split_10_char(line))
365
-
366
- self.general_parameters["Node Count"] = _to_int(params[0], 0)
367
- self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
368
- self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
369
- self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
370
- self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
371
- self._label_len = _to_int(params[5], 12) # label length
372
- self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
373
- self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
374
- self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
375
- self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
376
- self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
377
- self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
378
- self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
379
- self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
380
- self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
381
-
382
- def _update_general_parameters(self):
383
- self._raw_data[0] = self.title
384
- self._raw_data[5] = self.general_parameters["RAD File"]
385
- general_params_1 = units.helpers.join_10_char(
386
- self.general_parameters["Node Count"],
387
- self.general_parameters["Lower Froude"],
388
- self.general_parameters["Upper Froude"],
389
- self.general_parameters["Min Depth"],
390
- self.general_parameters["Convergence Direct"],
391
- self._label_len,
392
- )
393
- general_params_1 += self.general_parameters["Units"]
394
- self._raw_data[2] = general_params_1
395
-
396
- general_params_2 = units.helpers.join_10_char(
397
- self.general_parameters["Water Temperature"],
398
- self.general_parameters["Convergence Flow"],
399
- self.general_parameters["Convergence Head"],
400
- self.general_parameters["Mathematical Damping"],
401
- self.general_parameters["Pivotal Choice"],
402
- self.general_parameters["Under-relaxation"],
403
- self.general_parameters["Matrix Dummy"],
404
- )
405
- self._raw_data[3] = general_params_2
406
-
407
- def _update_unit_names(self):
408
- for unit_group, unit_group_name in [
409
- (self.boundaries, "boundaries"),
410
- (self.sections, "sections"),
411
- (self.structures, "structures"),
412
- (self.conduits, "conduits"),
413
- (self.losses, "losses"),
414
- ]:
415
- for name, unit in unit_group.copy().items():
416
- if name != unit.name:
417
- # Check if new name already exists as a label
418
- if unit.name in unit_group:
419
- raise Exception(
420
- f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group'
421
- )
422
- unit_group[unit.name] = unit
423
- del unit_group[name]
424
- # Update label in ICs
425
- if unit_group_name not in ["boundaries", "losses"]:
426
- # TODO: Need to do a more thorough check for whether a unit is one in the ICs
427
- # e.g. Culvert inlet and river section may have same label, but only river
428
- # section label should update in ICs
429
- self.initial_conditions.update_label(name, unit.name)
430
-
431
- # Update label in GISINFO and GXY data
432
- self._update_gisinfo_label(
433
- unit._unit,
434
- unit._subtype,
435
- name,
436
- unit.name,
437
- unit_group_name
438
- in ["boundaries", "losses"], # if True it ignores second lbl
439
- )
440
- self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
441
-
442
- # Update IC table names in raw_data if any name changes
443
- ic_start, ic_end = next(
444
- (unit["start"], unit["end"])
445
- for unit in self._dat_struct
446
- if unit["Type"] == "INITIAL CONDITIONS"
447
- )
448
- self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
449
-
450
- def _update_raw_data(self):
451
- block_shift = 0
452
- comment_tracker = 0
453
- comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
454
- prev_block_end = self._dat_struct[0]["end"]
455
- existing_units = {
456
- "boundaries": [],
457
- "structures": [],
458
- "sections": [],
459
- "conduits": [],
460
- "losses": [],
461
- }
462
-
463
- for block in self._dat_struct:
464
- # Check for all supported boundary types
465
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
466
- # clause for when unit has been inserted into the dat file
467
- if "new_insert" in block:
468
- block["start"] = prev_block_end + 1
469
- block["end"] = block["start"] + len(block["new_insert"]) - 1
470
- self._raw_data[block["start"] : block["start"]] = block["new_insert"]
471
- block_shift += len(block["new_insert"])
472
- prev_block_end = block["end"]
473
- del block["new_insert"]
474
-
475
- else:
476
- unit_data = self._raw_data[
477
- block["start"] + block_shift : block["end"] + 1 + block_shift
478
- ]
479
- prev_block_len = len(unit_data)
480
-
481
- if block["Type"] == "INITIAL CONDITIONS":
482
- new_unit_data = self.initial_conditions._write()
483
- elif block["Type"] == "COMMENT":
484
- comment = comment_units[comment_tracker]
485
- new_unit_data = comment._write()
486
- comment_tracker += 1
487
-
488
- elif block["Type"] == "VARIABLES":
489
- new_unit_data = self.variables._write()
490
-
491
- else:
492
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
493
- unit_name = unit_data[2][: self._label_len].strip()
494
- else:
495
- unit_name = unit_data[1][: self._label_len].strip()
496
-
497
- # Get unit object
498
- unit_group = getattr(
499
- self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
500
- )
501
- if unit_name in unit_group:
502
- # block still exists
503
- new_unit_data = unit_group[unit_name]._write()
504
- existing_units[
505
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
506
- ].append(unit_name)
507
- else:
508
- # Bdy block has been deleted
509
- new_unit_data = []
510
-
511
- new_block_len = len(new_unit_data)
512
- self._raw_data[
513
- block["start"] + block_shift : block["end"] + 1 + block_shift
514
- ] = new_unit_data
515
- # adjust block shift for change in number of lines in bdy block
516
- block_shift += new_block_len - prev_block_len
517
- prev_block_end = (
518
- block["end"] + block_shift
519
- ) # add in to keep a record of the last block read in
520
-
521
- def _get_unit_definitions(self): # noqa: C901
522
- # Get unit definitions
523
- self.sections = {}
524
- self.boundaries = {}
525
- self.structures = {}
526
- self.conduits = {}
527
- self.losses = {}
528
- self._unsupported = {}
529
- self._all_units = []
530
- for block in self._dat_struct:
531
- unit_data = self._raw_data[block["start"] : block["end"] + 1]
532
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
533
- # Deal with initial conditions block
534
- if block["Type"] == "INITIAL CONDITIONS":
535
- self.initial_conditions = units.IIC(unit_data, n=self._label_len)
536
- continue
537
-
538
- if block["Type"] == "COMMENT":
539
- self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
540
- continue
541
-
542
- if block["Type"] == "VARIABLES":
543
- self.variables = units.Variables(unit_data)
544
- continue
545
-
546
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
547
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
548
- unit_name = unit_data[2][: self._label_len].strip()
549
- else:
550
- unit_name = unit_data[1][: self._label_len].strip()
551
-
552
- # Create instance of unit and add to relevant group
553
- unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
554
- if unit_name in unit_group:
555
- raise Exception(
556
- f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}'
557
- )
558
- # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
559
- unit_type = block["Type"].replace(" ", "_").replace("-", "_")
560
- unit_group[unit_name] = eval(
561
- f"units.{unit_type}({unit_data}, {self._label_len})" # append to our _all._units as well???
562
- )
563
- self._all_units.append(unit_group[unit_name])
564
-
565
- elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
566
- # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
567
- if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
568
- unit_name = unit_data[2][: self._label_len].strip()
569
- subtype = True
570
- else:
571
- unit_name = unit_data[1][: self._label_len].strip()
572
- subtype = False
573
-
574
- self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
575
- unit_data,
576
- self._label_len,
577
- unit_name=unit_name,
578
- unit_type=block["Type"],
579
- subtype=subtype,
580
- )
581
- self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
582
-
583
- elif block["Type"] not in ("GENERAL", "GISINFO"):
584
- raise Exception(f"Unexpected unit type encountered: {block['Type']}")
585
-
586
- def _update_dat_struct(self): # noqa: C901
587
- """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
588
- are a dictionary containing the 'start', 'end' and 'type' of the block.
589
-
590
- """
591
- # Generate DAT structure
592
- dat_struct = []
593
- in_block = False
594
- in_general = True
595
- in_comment = False
596
- comment_n = None # Used as counter for number of lines in a comment block
597
- gisinfo_block = False
598
- general_block = {"start": 0, "Type": "GENERAL"}
599
- unit_block = {}
600
- for idx, line in enumerate(self._raw_data):
601
- # Deal with 'general' header
602
- if in_general is True:
603
- if line == "END GENERAL":
604
- general_block["end"] = idx
605
- dat_struct.append(general_block)
606
- in_general = False
607
- continue
608
-
609
- # Deal with comment blocks explicitly as they could contain unit keywords
610
- if in_comment and comment_n is None:
611
- comment_n = int(line.strip())
612
- continue
613
- if in_comment:
614
- comment_n -= 1
615
- if comment_n == 0:
616
- unit_block["end"] = idx # add ending index
617
- # append existing bdy block to the dat_struct
618
- dat_struct.append(unit_block)
619
- unit_block = {} # reset bdy block
620
- in_comment = False
621
- in_block = False
622
- comment_n = None
623
- continue # move onto next line as still in comment block
624
-
625
- if line == "COMMENT":
626
- in_comment = True
627
- unit_block, in_block = self._close_struct_block(
628
- dat_struct, "COMMENT", unit_block, in_block, idx
629
- )
630
- continue
631
-
632
- if line == "GISINFO":
633
- gisinfo_block = True
634
- unit_block, in_block = self._close_struct_block(
635
- dat_struct, "GISINFO", unit_block, in_block, idx
636
- )
637
-
638
- if not gisinfo_block:
639
- if line.split(" ")[0] in units.ALL_UNIT_TYPES:
640
- # The " " is needed here in case of empty string
641
- unit_type = line.split()[0]
642
- elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
643
- unit_type = " ".join(line.split()[:2])
644
- else:
645
- continue
646
-
647
- unit_block, in_block = self._close_struct_block(
648
- dat_struct, unit_type, unit_block, in_block, idx
649
- )
650
-
651
- if len(unit_block) != 0:
652
- # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
653
- # add ending index for final block
654
- unit_block["end"] = len(self._raw_data) - 1
655
- dat_struct.append(unit_block) # add final block
656
-
657
- self._dat_struct = dat_struct
658
-
659
- def _close_struct_block(self, dat_struct, unit_type, unit_block, in_block, idx):
660
- """Helper method to close block in dat struct"""
661
- if in_block is True:
662
- unit_block["end"] = idx - 1 # add ending index
663
- # append existing bdy block to the dat_struct
664
- dat_struct.append(unit_block)
665
- unit_block = {} # reset bdy block
666
- in_block = True
667
- unit_block["Type"] = unit_type # start new bdy block
668
- unit_block["start"] = idx # add starting index
669
-
670
- return unit_block, in_block
671
-
672
- def remove_unit(self, unit):
673
- """Remove a unit from the dat file.
674
-
675
- Args:
676
- unit (Unit): flood modeller unit input.
677
-
678
- Raises:
679
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
680
- """
681
-
682
- try:
683
- # catch if not valid unit
684
- if not isinstance(unit, Unit):
685
- raise TypeError("unit isn't a unit")
686
-
687
- # remove from all units
688
- index = self._all_units.index(unit)
689
- del self._all_units[index]
690
- # remove from dat_struct
691
- dat_struct_unit = self._dat_struct[index + 1]
692
- del self._dat_struct[index + 1]
693
- # remove from raw data
694
- del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
695
- # remove from unit group
696
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
697
- unit_group = getattr(self, unit_group_name)
698
- del unit_group[unit.name]
699
- # remove from ICs
700
- self.initial_conditions.data = self.initial_conditions.data.loc[
701
- self.initial_conditions.data["label"] != unit.name
702
- ]
703
-
704
- self._update_dat_struct()
705
- self.general_parameters["Node Count"] -= 1
706
-
707
- except Exception as e:
708
- self._handle_exception(e, when="remove unit")
709
-
710
- def insert_unit(self, unit, add_before=None, add_after=None, add_at=None): # noqa: C901
711
- """Inserts a unit into the dat file.
712
-
713
- Args:
714
- unit (Unit): FloodModeller unit input.
715
- add_before (Unit): FloodModeller unit to add before.
716
- add_after (Unit): FloodModeller unit to add after.
717
- add_at (interger): Positional argument (starting at 0) of where to add in
718
- the dat file. To add at the end of the network you can use -1.
719
-
720
- Raises:
721
- SyntaxError: Raised if no positional argument is given.
722
- TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
723
- NameError: Raised if unit name already appears in unit group.
724
- """
725
- try:
726
- # catch errors
727
- if all(arg is None for arg in (add_before, add_after, add_at)):
728
- raise SyntaxError(
729
- "No possitional argument given. Please provide either add_before, add_at or add_after"
730
- )
731
- if not isinstance(unit, Unit):
732
- raise TypeError("unit isn't a unit")
733
- if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
734
- raise TypeError(
735
- "add_before or add_after argument must be a Flood Modeller Unit type"
736
- )
737
-
738
- unit_class = unit._unit
739
- if unit_class != "COMMENT":
740
- _validate_unit(unit)
741
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"] # get rid
742
- unit_group = getattr(self, unit_group_name)
743
- # unit_class = unit._unit
744
- if unit.name in unit_group:
745
- raise NameError(
746
- "Name already appears in unit group. Cannot have two units with same name in same group"
747
- )
748
-
749
- # positional argument
750
- if add_at is not None:
751
- insert_index = add_at
752
- if insert_index < 0:
753
- insert_index += len(self._all_units) + 1
754
- if insert_index < 0:
755
- raise Exception(f"invalid add_at index: {add_at}")
756
- else:
757
- check_unit = add_before or add_after
758
- for index, thing in enumerate(self._all_units):
759
- if thing == check_unit:
760
- insert_index = index
761
- insert_index += 1 if add_after else 0
762
- break
763
- else:
764
- raise Exception(
765
- f"{check_unit} not found in dat network, so cannot be used to add before/after"
766
- )
767
-
768
- unit_data = unit._write()
769
- self._all_units.insert(insert_index, unit)
770
- if unit._unit != "COMMENT":
771
- unit_group[unit.name] = unit
772
- self._dat_struct.insert(
773
- insert_index + 1, {"Type": unit_class, "new_insert": unit_data}
774
- ) # add to dat struct without unit.name
775
-
776
- if unit._unit != "COMMENT":
777
- # update the iic's tables
778
- iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
779
- self.initial_conditions.data.loc[
780
- len(self.initial_conditions.data)
781
- ] = iic_data # flaged
782
-
783
- # update all
784
- if unit._unit != "COMMENT":
785
- self.general_parameters["Node Count"] += 1 # flag no update for comments
786
- self._update_raw_data()
787
- self._update_dat_struct()
788
-
789
- except Exception as e:
790
- self._handle_exception(e, when="insert unit")
791
-
792
- def _update_gisinfo_label(self, unit_type, unit_subtype, prev_lbl, new_lbl, ignore_second):
793
- """Update labels in GISINFO block if unit is renamed"""
794
-
795
- start, end = next(
796
- (block["start"], block["end"])
797
- for block in self._dat_struct
798
- if block["Type"] == "GISINFO"
799
- )
800
- gisinfo_block = self._raw_data[start : end + 1]
801
-
802
- prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
803
-
804
- new_gisinfo_block = []
805
- for line in gisinfo_block:
806
- # Replace first label
807
- if line.startswith(f"{prefix} {prev_lbl} "):
808
- # found matching line (space at the end is important to ignore node
809
- # lables with similar starting chars)
810
- line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
811
-
812
- # Replace second label
813
- if not ignore_second:
814
- if line.startswith(f"{prev_lbl} "): # space at the end important again
815
- line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
816
-
817
- new_gisinfo_block.append(line)
818
-
819
- self._raw_data[start : end + 1] = new_gisinfo_block
820
-
821
- def _update_gxy_label(self, unit_type, unit_subtype, prev_lbl, new_lbl):
822
- """Update labels in GXY file if unit is renamed"""
823
-
824
- if self._gxy_data is not None:
825
- if unit_subtype is None:
826
- unit_subtype = ""
827
-
828
- old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
829
- new = f"{unit_type}_{unit_subtype}_{new_lbl}"
830
-
831
- self._gxy_data = self._gxy_data.replace(old, new)
1
+ """
2
+ Flood Modeller Python API
3
+ Copyright (C) 2024 Jacobs U.K. Limited
4
+
5
+ This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
+ as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
7
+
8
+ This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty
9
+ of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
10
+
11
+ You should have received a copy of the GNU General Public License along with this program. If not, see https://www.gnu.org/licenses/.
12
+
13
+ If you have any query about this program or this License, please contact us at support@floodmodeller.com or write to the following
14
+ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London, SE1 2QG, United Kingdom.
15
+ """
16
+
17
+ from __future__ import annotations
18
+
19
+ from pathlib import Path
20
+ from typing import Any
21
+
22
+ from . import units
23
+ from ._base import FMFile
24
+ from .units._base import Unit
25
+ from .units.helpers import _to_float, _to_int
26
+ from .util import handle_exception
27
+ from .validation.validation import _validate_unit
28
+
29
+
30
+ class DAT(FMFile):
31
+ """Reads and write Flood Modeller datafile format '.dat'
32
+
33
+ Args:
34
+ dat_filepath (str, optional): Full filepath to dat file. If not specified, a new DAT class will be created. Defaults to None.
35
+
36
+ Output:
37
+ Initiates 'DAT' class object
38
+
39
+ Raises:
40
+ TypeError: Raised if dat_filepath does not point to a .dat file
41
+ FileNotFoundError: Raised if dat_filepath points to a file which does not exist
42
+ """
43
+
44
+ _filetype: str = "DAT"
45
+ _suffix: str = ".dat"
46
+
47
+ @handle_exception(when="read")
48
+ def __init__(
49
+ self,
50
+ dat_filepath: str | Path | None = None,
51
+ with_gxy: bool = False,
52
+ from_json: bool = False,
53
+ ) -> None:
54
+ if from_json:
55
+ return
56
+ if dat_filepath is not None:
57
+ FMFile.__init__(self, dat_filepath)
58
+ self._read()
59
+
60
+ else:
61
+ self._create_from_blank(with_gxy)
62
+
63
+ self._get_general_parameters()
64
+ self._get_unit_definitions()
65
+
66
+ def update(self) -> None:
67
+ """Updates the existing DAT based on any altered attributes"""
68
+ self._update()
69
+ self._write_gxy(self._gxy_filepath)
70
+
71
+ def save(self, filepath: str | Path) -> None:
72
+ """Saves the DAT to the given location, if pointing to an existing file it will be overwritten.
73
+ Once saved, the DAT() class will continue working from the saved location, therefore any further calls to DAT.update() will
74
+ update in the latest saved location rather than the original source DAT used to construct the class
75
+
76
+ Args:
77
+ filepath (str): Filepath to new save location including the name and '.dat' extension
78
+
79
+ Raises:
80
+ TypeError: Raised if given filepath doesn't point to a file suffixed '.dat'
81
+ """
82
+ filepath = Path(filepath).absolute()
83
+ self._save(filepath)
84
+ self._write_gxy(filepath)
85
+
86
+ def _write_gxy(self, filepath):
87
+ if self._gxy_data is not None:
88
+ gxy_string = self._gxy_data
89
+ new_gxy_path = filepath.with_suffix(".gxy")
90
+ with open(new_gxy_path, "w") as gxy_file:
91
+ gxy_file.write(gxy_string)
92
+ self._gxy_filepath = new_gxy_path
93
+
94
+ def diff(self, other: DAT, force_print: bool = False) -> None:
95
+ """Compares the DAT class against another DAT class to check whether they are
96
+ equivalent, or if not, what the differences are. Two instances of a DAT class are
97
+ deemed equivalent if all of their attributes are equal except for the filepath and
98
+ raw data. For example, two DAT files from different filepaths that had the same
99
+ data except maybe some differences in decimal places and some default parameters
100
+ ommitted, would be classed as equivalent as they would produce the same DAT instance
101
+ and write the exact same data.
102
+
103
+ The result is printed to the console. If you need to access the returned data, use
104
+ the method ``DAT._get_diff()``
105
+
106
+ Args:
107
+ other (floodmodeller_api.DAT): Other instance of a DAT class
108
+ force_print (bool): Forces the API to print every difference found, rather than
109
+ just the first 25 differences. Defaults to False.
110
+ """
111
+ self._diff(other, force_print=force_print)
112
+
113
+ # def _get_unit_from_connectivity(self, method) #use this as method prev and next
114
+
115
+ @handle_exception(when="calculate next unit in")
116
+ def next(self, unit: Unit) -> Unit | list[Unit] | None:
117
+ """Finds next unit in the reach.
118
+
119
+ Next unit in reach can be infered by:
120
+ The next unit in the .dat file structure - such as when a river section has a positive distance to next
121
+ The units with the exact same name - such as a junction unit
122
+ The next unit as described in the ds_label - such as with Bridge units
123
+
124
+ Args:
125
+ unit (Unit): flood modeller unit input.
126
+
127
+ Returns:
128
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
129
+ """
130
+ # Needs to handle same name match outside dist to next (e.g. inflow)
131
+ if hasattr(unit, "dist_to_next"):
132
+ # Case 1a - positive distance to next
133
+ if unit.dist_to_next != 0:
134
+ return self._next_in_dat_struct(unit)
135
+
136
+ # Case 1b - distance to next = 0
137
+ return self._name_label_match(unit)
138
+
139
+ # Case 2: next unit is in ds_label
140
+ if hasattr(unit, "ds_label"):
141
+ return self._name_label_match(unit, name_override=unit.ds_label)
142
+
143
+ if unit._unit == "JUNCTION":
144
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
145
+
146
+ if unit._unit in ("QHBDY", "NCDBDY", "TIDBDY"):
147
+ return None
148
+
149
+ return self._name_label_match(unit)
150
+
151
+ @handle_exception(when="calculate previous unit in")
152
+ def prev(self, unit: Unit) -> Unit | list[Unit] | None:
153
+ """Finds previous unit in the reach.
154
+
155
+ Previous unit in reach can be infered by:
156
+ The previous unit in the .dat file structure - such as when the previous river section has a positive distance to next.
157
+ The units with the exact same name - such as a junction unit
158
+ The previous unit as linked through upstream and downstream labels - such as with Bridge units
159
+
160
+ Args:
161
+ unit (Unit): flood modeller unit input.
162
+
163
+ Returns:
164
+ Union[Unit, list[Unit], None]: Flood modeller unit either on its own or in a list if more than one follows in reach.
165
+ """
166
+ # Case 1: Unit is input boundary condition
167
+ if unit._unit in (
168
+ "QTBDY",
169
+ "HTBDY",
170
+ "REFHBDY",
171
+ "FEHBDY",
172
+ "FRQSIM",
173
+ "FSRBDY",
174
+ "FSSR16BDY",
175
+ "GERRBDY",
176
+ "REBDY",
177
+ "REFH2BDY",
178
+ "SCSBDY",
179
+ ):
180
+ return None
181
+
182
+ if unit._unit == "JUNCTION":
183
+ return [self._name_label_match(unit, name_override=lbl) for lbl in unit.labels] # type: ignore[misc, attr-defined]
184
+
185
+ prev_units = []
186
+ _prev_in_dat = self._prev_in_dat_struct(unit)
187
+ _name_match = self._name_label_match(unit)
188
+ _ds_label_match = self._ds_label_match(unit)
189
+ _junction_match = [
190
+ junction
191
+ for junction in self._all_units
192
+ if junction._unit == "JUNCTION" and unit.name in junction.labels
193
+ ]
194
+
195
+ # Case 2: Previous unit has positive distance to next
196
+ if (
197
+ _prev_in_dat
198
+ and hasattr(_prev_in_dat, "dist_to_next")
199
+ and _prev_in_dat.dist_to_next != 0
200
+ ):
201
+ prev_units.append(_prev_in_dat)
202
+ _name_match = None # Name match does apply if upstream section exists
203
+
204
+ # All other matches added (matching name, matching name to ds_label and junciton)
205
+ for match in [_name_match, _ds_label_match, _junction_match]:
206
+ if isinstance(match, list):
207
+ prev_units.extend(match)
208
+ elif match:
209
+ prev_units.append(match)
210
+
211
+ if len(prev_units) == 0:
212
+ return None
213
+ if len(prev_units) == 1:
214
+ return prev_units[0]
215
+ return prev_units
216
+
217
+ def _next_in_dat_struct(self, current_unit: Unit) -> Unit | None:
218
+ """Finds next unit in the dat file using the index position.
219
+
220
+ Returns:
221
+ Unit with all associated data
222
+ """
223
+
224
+ for idx, unit in enumerate(self._all_units):
225
+ # Names checked first to speed up comparison
226
+ if unit.name == current_unit.name and unit == current_unit:
227
+ try:
228
+ return self._all_units[idx + 1]
229
+ except IndexError:
230
+ return None
231
+
232
+ return None
233
+
234
+ def _prev_in_dat_struct(self, current_unit: Unit) -> Unit | None:
235
+ """Finds previous unit in the dat file using the index position.
236
+
237
+ Returns:
238
+ Unit with all associated data
239
+ """
240
+ for idx, unit in enumerate(self._all_units):
241
+ # Names checked first to speed up comparison
242
+ if unit.name == current_unit.name and unit == current_unit:
243
+ if idx == 0:
244
+ return None
245
+ return self._all_units[idx - 1]
246
+
247
+ return None
248
+
249
+ def _ds_label_match(self, current_unit: Unit) -> Unit | list[Unit] | None:
250
+ """Pulls out all units with ds label that matches the input unit.
251
+
252
+ Returns:
253
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
254
+ """
255
+
256
+ _ds_list = []
257
+ for item in self._all_units:
258
+ try:
259
+ if item.ds_label == current_unit.name:
260
+ _ds_list.append(item)
261
+ except AttributeError:
262
+ continue
263
+
264
+ if len(_ds_list) == 0:
265
+ return None
266
+ if len(_ds_list) == 1:
267
+ return _ds_list[0]
268
+ return _ds_list
269
+
270
+ def _name_label_match(
271
+ self,
272
+ current_unit: Unit,
273
+ name_override: str | None = None,
274
+ ) -> Unit | list[Unit] | None:
275
+ """Pulls out all units with same name as the input unit.
276
+
277
+ Returns:
278
+ Union[Unit, list[Unit], None]: Either a singular unit or list of units with matching names, if none exist returns none. Does not return itself
279
+ """
280
+
281
+ _name = name_override or str(current_unit.name)
282
+ _name_list = []
283
+ for item in self._all_units:
284
+ if item.name == _name and item != current_unit:
285
+ _name_list.append(item)
286
+ else:
287
+ pass
288
+
289
+ if len(_name_list) == 0:
290
+ return None
291
+ if len(_name_list) == 1:
292
+ return _name_list[0]
293
+ return _name_list
294
+
295
+ def _read(self):
296
+ # Read DAT data
297
+ with open(self._filepath) as dat_file:
298
+ self._raw_data = [line.rstrip("\n") for line in dat_file.readlines()]
299
+
300
+ # Generate DAT structure
301
+ self._update_dat_struct()
302
+
303
+ # Get network .gxy if present
304
+ gxy_path = self._filepath.with_suffix(".gxy")
305
+ if gxy_path.exists():
306
+ self._gxy_filepath = gxy_path
307
+ with open(self._gxy_filepath) as gxy_file:
308
+ self._gxy_data = gxy_file.read()
309
+ else:
310
+ self._gxy_filepath = None
311
+ self._gxy_data = None
312
+
313
+ @handle_exception(when="write")
314
+ def _write(self) -> str:
315
+ """Returns string representation of the current DAT data
316
+
317
+ Returns:
318
+ str: Full string representation of DAT in its most recent state (including changes not yet saved to disk)
319
+ """
320
+ self._update_raw_data()
321
+ self._update_general_parameters()
322
+ self._update_dat_struct()
323
+ self._update_unit_names()
324
+
325
+ return "\n".join(self._raw_data) + "\n"
326
+
327
+ def _create_from_blank(self, with_gxy: bool = False) -> None:
328
+ # No filepath specified, create new 'blank' DAT in memory
329
+ # ** Update these to have minimal data needed (general header, empty IC header)
330
+ self._dat_struct = [
331
+ {"start": 0, "Type": "GENERAL", "end": 6},
332
+ {"Type": "INITIAL CONDITIONS", "start": 7, "end": 8},
333
+ ]
334
+ self._raw_data = [
335
+ "",
336
+ "#REVISION#1",
337
+ " 0 0.750 0.900 0.100 0.001 12SI",
338
+ " 10.000 0.010 0.010 0.700 0.100 0.700 0.000",
339
+ "RAD FILE",
340
+ "",
341
+ "END GENERAL",
342
+ "INITIAL CONDITIONS",
343
+ " label ? flow stage froude no velocity umode ustate z",
344
+ ]
345
+
346
+ self._gxy_filepath = None
347
+ if with_gxy:
348
+ self._gxy_data = ""
349
+ else:
350
+ self._gxy_data = None
351
+
352
+ def _get_general_parameters(self) -> None:
353
+ # ** Get general parameters here
354
+ self.title = self._raw_data[0]
355
+ self.general_parameters = {}
356
+ line = f"{self._raw_data[2]:<70}"
357
+ params = units.helpers.split_10_char(line)
358
+ if params[6] == "":
359
+ # Adds the measurements unit as DEFAULT if not specified
360
+ params[6] = "DEFAULT"
361
+ line = f"{self._raw_data[3]:<70}"
362
+ params.extend(units.helpers.split_10_char(line))
363
+
364
+ self.general_parameters["Node Count"] = _to_int(params[0], 0)
365
+ self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
366
+ self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
367
+ self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
368
+ self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
369
+ self._label_len = _to_int(params[5], 12) # label length
370
+ self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
371
+ self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
372
+ self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
373
+ self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
374
+ self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
375
+ self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
376
+ self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
377
+ self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
378
+ self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
379
+
380
+ def _update_general_parameters(self) -> None:
381
+ self._raw_data[0] = self.title
382
+ self._raw_data[5] = self.general_parameters["RAD File"]
383
+ general_params_1 = units.helpers.join_10_char(
384
+ self.general_parameters["Node Count"],
385
+ self.general_parameters["Lower Froude"],
386
+ self.general_parameters["Upper Froude"],
387
+ self.general_parameters["Min Depth"],
388
+ self.general_parameters["Convergence Direct"],
389
+ self._label_len,
390
+ )
391
+ general_params_1 += self.general_parameters["Units"]
392
+ self._raw_data[2] = general_params_1
393
+
394
+ general_params_2 = units.helpers.join_10_char(
395
+ self.general_parameters["Water Temperature"],
396
+ self.general_parameters["Convergence Flow"],
397
+ self.general_parameters["Convergence Head"],
398
+ self.general_parameters["Mathematical Damping"],
399
+ self.general_parameters["Pivotal Choice"],
400
+ self.general_parameters["Under-relaxation"],
401
+ self.general_parameters["Matrix Dummy"],
402
+ )
403
+ self._raw_data[3] = general_params_2
404
+
405
+ def _update_unit_names(self):
406
+ for unit_group, unit_group_name in [
407
+ (self.boundaries, "boundaries"),
408
+ (self.sections, "sections"),
409
+ (self.structures, "structures"),
410
+ (self.conduits, "conduits"),
411
+ (self.losses, "losses"),
412
+ ]:
413
+ for name, unit in unit_group.copy().items():
414
+ if name != unit.name:
415
+ # Check if new name already exists as a label
416
+ if unit.name in unit_group:
417
+ raise Exception(
418
+ f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group',
419
+ )
420
+ unit_group[unit.name] = unit
421
+ del unit_group[name]
422
+ # Update label in ICs
423
+ if unit_group_name not in ["boundaries", "losses"]:
424
+ # TODO: Need to do a more thorough check for whether a unit is one in the ICs
425
+ # e.g. Culvert inlet and river section may have same label, but only river
426
+ # section label should update in ICs
427
+ self.initial_conditions.update_label(name, unit.name)
428
+
429
+ # Update label in GISINFO and GXY data
430
+ self._update_gisinfo_label(
431
+ unit._unit,
432
+ unit._subtype,
433
+ name,
434
+ unit.name,
435
+ unit_group_name
436
+ in ["boundaries", "losses"], # if True it ignores second lbl
437
+ )
438
+ self._update_gxy_label(unit._unit, unit._subtype, name, unit.name)
439
+
440
+ # Update IC table names in raw_data if any name changes
441
+ ic_start, ic_end = next(
442
+ (unit["start"], unit["end"])
443
+ for unit in self._dat_struct
444
+ if unit["Type"] == "INITIAL CONDITIONS"
445
+ )
446
+ self._raw_data[ic_start : ic_end + 1] = self.initial_conditions._write()
447
+
448
+ def _update_raw_data(self):
449
+ block_shift = 0
450
+ comment_tracker = 0
451
+ comment_units = [unit for unit in self._all_units if unit._unit == "COMMENT"]
452
+ prev_block_end = self._dat_struct[0]["end"]
453
+ existing_units = {
454
+ "boundaries": [],
455
+ "structures": [],
456
+ "sections": [],
457
+ "conduits": [],
458
+ "losses": [],
459
+ }
460
+
461
+ for block in self._dat_struct:
462
+ # Check for all supported boundary types
463
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
464
+ # clause for when unit has been inserted into the dat file
465
+ if "new_insert" in block:
466
+ block["start"] = prev_block_end + 1
467
+ block["end"] = block["start"] + len(block["new_insert"]) - 1
468
+ self._raw_data[block["start"] : block["start"]] = block["new_insert"]
469
+ block_shift += len(block["new_insert"])
470
+ prev_block_end = block["end"]
471
+ del block["new_insert"]
472
+
473
+ else:
474
+ unit_data = self._raw_data[
475
+ block["start"] + block_shift : block["end"] + 1 + block_shift
476
+ ]
477
+ prev_block_len = len(unit_data)
478
+
479
+ if block["Type"] == "INITIAL CONDITIONS":
480
+ new_unit_data = self.initial_conditions._write()
481
+ elif block["Type"] == "COMMENT":
482
+ comment = comment_units[comment_tracker]
483
+ new_unit_data = comment._write()
484
+ comment_tracker += 1
485
+
486
+ elif block["Type"] == "VARIABLES":
487
+ new_unit_data = self.variables._write()
488
+
489
+ else:
490
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
491
+ unit_name = unit_data[2][: self._label_len].strip()
492
+ else:
493
+ unit_name = unit_data[1][: self._label_len].strip()
494
+
495
+ # Get unit object
496
+ unit_group = getattr(
497
+ self,
498
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
499
+ )
500
+ if unit_name in unit_group:
501
+ # block still exists
502
+ new_unit_data = unit_group[unit_name]._write()
503
+ existing_units[
504
+ units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
505
+ ].append(unit_name)
506
+ else:
507
+ # Bdy block has been deleted
508
+ new_unit_data = []
509
+
510
+ new_block_len = len(new_unit_data)
511
+ self._raw_data[
512
+ block["start"] + block_shift : block["end"] + 1 + block_shift
513
+ ] = new_unit_data
514
+ # adjust block shift for change in number of lines in bdy block
515
+ block_shift += new_block_len - prev_block_len
516
+ prev_block_end = (
517
+ block["end"] + block_shift
518
+ ) # add in to keep a record of the last block read in
519
+
520
+ def _get_unit_definitions(self): # noqa: C901
521
+ # Get unit definitions
522
+ self.sections = {}
523
+ self.boundaries = {}
524
+ self.structures = {}
525
+ self.conduits = {}
526
+ self.losses = {}
527
+ self._unsupported = {}
528
+ self._all_units = []
529
+ for block in self._dat_struct:
530
+ unit_data = self._raw_data[block["start"] : block["end"] + 1]
531
+ if block["Type"] in units.SUPPORTED_UNIT_TYPES:
532
+ # Deal with initial conditions block
533
+ if block["Type"] == "INITIAL CONDITIONS":
534
+ self.initial_conditions = units.IIC(unit_data, n=self._label_len)
535
+ continue
536
+
537
+ if block["Type"] == "COMMENT":
538
+ self._all_units.append(units.COMMENT(unit_data, n=self._label_len))
539
+ continue
540
+
541
+ if block["Type"] == "VARIABLES":
542
+ self.variables = units.Variables(unit_data)
543
+ continue
544
+
545
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
546
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
547
+ unit_name = unit_data[2][: self._label_len].strip()
548
+ else:
549
+ unit_name = unit_data[1][: self._label_len].strip()
550
+
551
+ # Create instance of unit and add to relevant group
552
+ unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
553
+ if unit_name in unit_group:
554
+ raise Exception(
555
+ f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}',
556
+ )
557
+ # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
558
+ unit_type = block["Type"].replace(" ", "_").replace("-", "_")
559
+ unit_group[unit_name] = eval(
560
+ f"units.{unit_type}({unit_data}, {self._label_len})", # append to our _all._units as well???
561
+ )
562
+ self._all_units.append(unit_group[unit_name])
563
+
564
+ elif block["Type"] in units.UNSUPPORTED_UNIT_TYPES:
565
+ # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
566
+ if units.UNSUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
567
+ unit_name = unit_data[2][: self._label_len].strip()
568
+ subtype = True
569
+ else:
570
+ unit_name = unit_data[1][: self._label_len].strip()
571
+ subtype = False
572
+
573
+ self._unsupported[f"{unit_name} ({block['Type']})"] = units.UNSUPPORTED(
574
+ unit_data,
575
+ self._label_len,
576
+ unit_name=unit_name,
577
+ unit_type=block["Type"],
578
+ subtype=subtype,
579
+ )
580
+ self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
581
+
582
+ elif block["Type"] not in ("GENERAL", "GISINFO"):
583
+ raise Exception(f"Unexpected unit type encountered: {block['Type']}")
584
+
585
+ def _update_dat_struct(self) -> None: # noqa: C901, PLR0912
586
+ """Internal method used to update self._dat_struct which details the overall structure of the dat file as a list of blocks, each of which
587
+ are a dictionary containing the 'start', 'end' and 'type' of the block.
588
+
589
+ """
590
+ # Generate DAT structure
591
+ dat_struct = []
592
+ in_block = False
593
+ in_general = True
594
+ in_comment = False
595
+ comment_n = None # Used as counter for number of lines in a comment block
596
+ gisinfo_block = False
597
+ general_block = {"start": 0, "Type": "GENERAL"}
598
+ unit_block: dict[str, Any] = {}
599
+ for idx, line in enumerate(self._raw_data):
600
+ # Deal with 'general' header
601
+ if in_general is True:
602
+ if line == "END GENERAL":
603
+ general_block["end"] = idx
604
+ dat_struct.append(general_block)
605
+ in_general = False
606
+ continue
607
+
608
+ # Deal with comment blocks explicitly as they could contain unit keywords
609
+ if in_comment and comment_n is None:
610
+ comment_n = int(line.strip())
611
+ continue
612
+ if in_comment:
613
+ comment_n -= 1
614
+ if comment_n <= 0:
615
+ unit_block["end"] = idx + comment_n # add ending index
616
+ # append existing bdy block to the dat_struct
617
+ dat_struct.append(unit_block)
618
+ unit_block = {} # reset bdy block
619
+ in_comment = False
620
+ in_block = False
621
+ comment_n = None
622
+ continue # move onto next line as still in comment block
623
+
624
+ if line == "COMMENT":
625
+ in_comment = True
626
+ unit_block, in_block = self._close_struct_block(
627
+ dat_struct,
628
+ "COMMENT",
629
+ unit_block,
630
+ in_block,
631
+ idx,
632
+ )
633
+ continue
634
+
635
+ if line == "GISINFO":
636
+ gisinfo_block = True
637
+ unit_block, in_block = self._close_struct_block(
638
+ dat_struct,
639
+ "GISINFO",
640
+ unit_block,
641
+ in_block,
642
+ idx,
643
+ )
644
+
645
+ if not gisinfo_block:
646
+ if line.split(" ")[0] in units.ALL_UNIT_TYPES:
647
+ # The " " is needed here in case of empty string
648
+ unit_type = line.split()[0]
649
+ elif " ".join(line.split()[:2]) in units.ALL_UNIT_TYPES:
650
+ unit_type = " ".join(line.split()[:2])
651
+ else:
652
+ continue
653
+
654
+ unit_block, in_block = self._close_struct_block(
655
+ dat_struct,
656
+ unit_type,
657
+ unit_block,
658
+ in_block,
659
+ idx,
660
+ )
661
+
662
+ if len(unit_block) != 0:
663
+ # Only adds end block if there is a block present (i.e. an empty DAT stays empty)
664
+ # add ending index for final block
665
+ unit_block["end"] = len(self._raw_data) - 1
666
+ dat_struct.append(unit_block) # add final block
667
+
668
+ self._dat_struct = dat_struct
669
+
670
+ def _close_struct_block( # noqa: PLR0913
671
+ self,
672
+ dat_struct: list[dict],
673
+ unit_type: str,
674
+ unit_block: dict,
675
+ in_block: bool,
676
+ idx: int,
677
+ ) -> tuple[dict, bool]:
678
+ """Helper method to close block in dat struct"""
679
+ if in_block is True:
680
+ unit_block["end"] = idx - 1 # add ending index
681
+ # append existing bdy block to the dat_struct
682
+ dat_struct.append(unit_block)
683
+ unit_block = {} # reset bdy block
684
+ in_block = True
685
+ unit_block["Type"] = unit_type # start new bdy block
686
+ unit_block["start"] = idx # add starting index
687
+
688
+ return unit_block, in_block
689
+
690
+ @handle_exception(when="remove unit from")
691
+ def remove_unit(self, unit: Unit) -> None:
692
+ """Remove a unit from the dat file.
693
+
694
+ Args:
695
+ unit (Unit): flood modeller unit input.
696
+
697
+ Raises:
698
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
699
+ """
700
+ # catch if not valid unit
701
+ if not isinstance(unit, Unit):
702
+ raise TypeError("unit isn't a unit")
703
+
704
+ # remove from all units
705
+ index = self._all_units.index(unit)
706
+ del self._all_units[index]
707
+ # remove from dat_struct
708
+ dat_struct_unit = self._dat_struct[index + 1]
709
+ del self._dat_struct[index + 1]
710
+ # remove from raw data
711
+ del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
712
+ # remove from unit group
713
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
714
+ unit_group = getattr(self, unit_group_name)
715
+ del unit_group[unit.name]
716
+ # remove from ICs
717
+ self.initial_conditions.data = self.initial_conditions.data.loc[
718
+ self.initial_conditions.data["label"] != unit.name
719
+ ]
720
+
721
+ self._update_dat_struct()
722
+ self.general_parameters["Node Count"] -= 1
723
+
724
+ @handle_exception(when="insert unit into")
725
+ def insert_unit( # noqa: C901, PLR0912, PLR0913
726
+ self,
727
+ unit: Unit,
728
+ add_before: Unit | None = None,
729
+ add_after: Unit | None = None,
730
+ add_at: int | None = None,
731
+ defer_update: bool = False,
732
+ ) -> None:
733
+ """Inserts a unit into the dat file.
734
+
735
+ Args:
736
+ unit (Unit): FloodModeller unit input.
737
+ add_before (Unit): FloodModeller unit to add before.
738
+ add_after (Unit): FloodModeller unit to add after.
739
+ add_at (integer): Positional argument (starting at 0) of where to add in
740
+ the dat file. To add at the end of the network you can use -1.
741
+
742
+ Raises:
743
+ SyntaxError: Raised if no positional argument is given.
744
+ TypeError: Raised if given unit isn't an instance of FloodModeller Unit.
745
+ NameError: Raised if unit name already appears in unit group.
746
+ """
747
+ # catch errors
748
+ provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
749
+ if provided_params == 0:
750
+ raise SyntaxError(
751
+ "No positional argument given. Please provide either add_before, add_at or add_after",
752
+ )
753
+ if provided_params > 1:
754
+ raise SyntaxError("Only one of add_at, add_before, or add_after required")
755
+ if not isinstance(unit, Unit):
756
+ raise TypeError("unit isn't a unit")
757
+ if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
758
+ raise TypeError(
759
+ "add_before or add_after argument must be a Flood Modeller Unit type",
760
+ )
761
+
762
+ unit_class = unit._unit
763
+ if unit_class != "COMMENT":
764
+ _validate_unit(unit)
765
+ unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
766
+ unit_group = getattr(self, unit_group_name)
767
+ if unit.name in unit_group:
768
+ raise NameError(
769
+ "Name already appears in unit group. Cannot have two units with same name in same group",
770
+ )
771
+
772
+ # positional argument
773
+ if add_at is not None:
774
+ insert_index = add_at
775
+ if insert_index < 0:
776
+ insert_index += len(self._all_units) + 1
777
+ if insert_index < 0:
778
+ raise Exception(f"invalid add_at index: {add_at}")
779
+ else:
780
+ check_unit = add_before or add_after
781
+ for index, thing in enumerate(self._all_units):
782
+ if thing == check_unit:
783
+ insert_index = index
784
+ insert_index += 1 if add_after else 0
785
+ break
786
+ else:
787
+ raise Exception(
788
+ f"{check_unit} not found in dat network, so cannot be used to add before/after",
789
+ )
790
+
791
+ unit_data = unit._write()
792
+ self._all_units.insert(insert_index, unit)
793
+ if unit._unit != "COMMENT":
794
+ unit_group[unit.name] = unit
795
+ self._dat_struct.insert(
796
+ insert_index + 1,
797
+ {"Type": unit_class, "new_insert": unit_data},
798
+ ) # add to dat struct without unit.name
799
+
800
+ if unit._unit != "COMMENT":
801
+ # update the iic's tables
802
+ iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
803
+ self.initial_conditions.data.loc[len(self.initial_conditions.data)] = iic_data # flaged
804
+
805
+ # update all
806
+ if unit._unit != "COMMENT":
807
+ self.general_parameters["Node Count"] += 1 # flag no update for comments
808
+
809
+ if not defer_update:
810
+ self._update_raw_data()
811
+ self._update_dat_struct()
812
+
813
+ def insert_units(
814
+ self,
815
+ units: list[Unit],
816
+ add_before: Unit | None = None,
817
+ add_after: Unit | None = None,
818
+ add_at: int | None = None,
819
+ ) -> None:
820
+ """Inserts a list of units into the dat file.
821
+
822
+ Args:
823
+ units (list[Unit]): List of FloodModeller units.
824
+ add_before (Unit): FloodModeller unit to add before.
825
+ add_after (Unit): FloodModeller unit to add after.
826
+ add_at (integer): Positional argument (starting at 0) of where to add in
827
+ the dat file. To add at the end of the network you can use -1.
828
+ """
829
+ ordered = (add_at is None and add_after is None) or (isinstance(add_at, int) and add_at < 0)
830
+ ordered_units = units if ordered else units[::-1]
831
+ for unit in ordered_units:
832
+ self.insert_unit(unit, add_before, add_after, add_at, defer_update=True)
833
+ self._update_raw_data()
834
+ self._update_dat_struct()
835
+
836
+ def _update_gisinfo_label( # noqa: PLR0913
837
+ self,
838
+ unit_type,
839
+ unit_subtype,
840
+ prev_lbl,
841
+ new_lbl,
842
+ ignore_second,
843
+ ):
844
+ """Update labels in GISINFO block if unit is renamed"""
845
+
846
+ start, end = next(
847
+ (block["start"], block["end"])
848
+ for block in self._dat_struct
849
+ if block["Type"] == "GISINFO"
850
+ )
851
+ gisinfo_block = self._raw_data[start : end + 1]
852
+
853
+ prefix = unit_type if unit_subtype is None else f"{unit_type} {unit_subtype}"
854
+
855
+ new_gisinfo_block = []
856
+ for line in gisinfo_block:
857
+ # Replace first label
858
+ if line.startswith(f"{prefix} {prev_lbl} "):
859
+ # found matching line (space at the end is important to ignore node
860
+ # lables with similar starting chars)
861
+ line = line.replace(f"{prefix} {prev_lbl} ", f"{prefix} {new_lbl} ")
862
+
863
+ # Replace second label
864
+ if not ignore_second and line.startswith(
865
+ f"{prev_lbl} ",
866
+ ): # space at the end important again
867
+ line = line.replace(f"{prev_lbl} ", f"{new_lbl} ", 1)
868
+
869
+ new_gisinfo_block.append(line)
870
+
871
+ self._raw_data[start : end + 1] = new_gisinfo_block
872
+
873
+ def _update_gxy_label(
874
+ self,
875
+ unit_type: str,
876
+ unit_subtype: str,
877
+ prev_lbl: str,
878
+ new_lbl: str,
879
+ ) -> None:
880
+ """Update labels in GXY file if unit is renamed"""
881
+
882
+ if self._gxy_data is not None:
883
+ if unit_subtype is None:
884
+ unit_subtype = ""
885
+
886
+ old = f"{unit_type}_{unit_subtype}_{prev_lbl}"
887
+ new = f"{unit_type}_{unit_subtype}_{new_lbl}"
888
+
889
+ self._gxy_data = self._gxy_data.replace(old, new)