floodmodeller-api 0.5.1__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. floodmodeller_api/__init__.py +10 -0
  2. floodmodeller_api/_base.py +29 -20
  3. floodmodeller_api/backup.py +12 -10
  4. floodmodeller_api/dat.py +162 -91
  5. floodmodeller_api/diff.py +1 -1
  6. floodmodeller_api/hydrology_plus/hydrology_plus_export.py +1 -1
  7. floodmodeller_api/ied.py +2 -4
  8. floodmodeller_api/ief.py +29 -17
  9. floodmodeller_api/ief_flags.py +1 -1
  10. floodmodeller_api/inp.py +4 -6
  11. floodmodeller_api/logs/lf.py +18 -12
  12. floodmodeller_api/logs/lf_helpers.py +2 -2
  13. floodmodeller_api/logs/lf_params.py +1 -5
  14. floodmodeller_api/mapping.py +9 -2
  15. floodmodeller_api/test/test_conveyance.py +9 -4
  16. floodmodeller_api/test/test_dat.py +166 -18
  17. floodmodeller_api/test/test_data/EX18_DAT_expected.json +164 -144
  18. floodmodeller_api/test/test_data/EX3_DAT_expected.json +6 -2
  19. floodmodeller_api/test/test_data/EX6_DAT_expected.json +12 -46
  20. floodmodeller_api/test/test_data/encoding_test_cp1252.dat +1081 -0
  21. floodmodeller_api/test/test_data/encoding_test_utf8.dat +1081 -0
  22. floodmodeller_api/test/test_data/integrated_bridge/AR_NoSP_NoBl_2O_NO_OneFRC.ied +33 -0
  23. floodmodeller_api/test/test_data/integrated_bridge/AR_vSP_25pc_1O.ied +32 -0
  24. floodmodeller_api/test/test_data/integrated_bridge/PL_vSP_25pc_1O.ied +34 -0
  25. floodmodeller_api/test/test_data/integrated_bridge/SBTwoFRCsStaggered.IED +32 -0
  26. floodmodeller_api/test/test_data/integrated_bridge/US_NoSP_NoBl_OR_RN.ied +28 -0
  27. floodmodeller_api/test/test_data/integrated_bridge/US_SP_NoBl_OR_frc_PT2-5_RN.ied +34 -0
  28. floodmodeller_api/test/test_data/integrated_bridge/US_fSP_NoBl_1O.ied +30 -0
  29. floodmodeller_api/test/test_data/integrated_bridge/US_nSP_NoBl_1O.ied +49 -0
  30. floodmodeller_api/test/test_data/integrated_bridge/US_vSP_NoBl_2O_Para.ied +35 -0
  31. floodmodeller_api/test/test_data/integrated_bridge.dat +40 -0
  32. floodmodeller_api/test/test_data/network.ied +2 -2
  33. floodmodeller_api/test/test_data/network_dat_expected.json +141 -243
  34. floodmodeller_api/test/test_data/network_ied_expected.json +2 -2
  35. floodmodeller_api/test/test_data/network_with_comments.ied +2 -2
  36. floodmodeller_api/test/test_ied.py +1 -1
  37. floodmodeller_api/test/test_ief.py +10 -2
  38. floodmodeller_api/test/test_integrated_bridge.py +159 -0
  39. floodmodeller_api/test/test_json.py +9 -3
  40. floodmodeller_api/test/test_logs_lf.py +45 -24
  41. floodmodeller_api/test/test_river.py +1 -1
  42. floodmodeller_api/test/test_toolbox_structure_log.py +0 -1
  43. floodmodeller_api/test/test_xml2d.py +5 -5
  44. floodmodeller_api/to_from_json.py +1 -1
  45. floodmodeller_api/tool.py +3 -5
  46. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +1 -1
  47. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +12 -8
  48. floodmodeller_api/units/__init__.py +15 -0
  49. floodmodeller_api/units/_base.py +73 -10
  50. floodmodeller_api/units/_helpers.py +343 -0
  51. floodmodeller_api/units/boundaries.py +59 -71
  52. floodmodeller_api/units/comment.py +1 -1
  53. floodmodeller_api/units/conduits.py +57 -54
  54. floodmodeller_api/units/connectors.py +112 -0
  55. floodmodeller_api/units/controls.py +107 -0
  56. floodmodeller_api/units/iic.py +2 -9
  57. floodmodeller_api/units/losses.py +42 -42
  58. floodmodeller_api/units/sections.py +40 -43
  59. floodmodeller_api/units/structures.py +360 -530
  60. floodmodeller_api/units/units.py +25 -26
  61. floodmodeller_api/units/unsupported.py +5 -7
  62. floodmodeller_api/units/variables.py +2 -2
  63. floodmodeller_api/urban1d/_base.py +7 -8
  64. floodmodeller_api/urban1d/conduits.py +11 -21
  65. floodmodeller_api/urban1d/general_parameters.py +1 -1
  66. floodmodeller_api/urban1d/junctions.py +7 -11
  67. floodmodeller_api/urban1d/losses.py +13 -17
  68. floodmodeller_api/urban1d/outfalls.py +16 -21
  69. floodmodeller_api/urban1d/raingauges.py +3 -9
  70. floodmodeller_api/urban1d/subsections.py +3 -4
  71. floodmodeller_api/urban1d/xsections.py +11 -15
  72. floodmodeller_api/util.py +7 -4
  73. floodmodeller_api/validation/parameters.py +7 -3
  74. floodmodeller_api/validation/urban_parameters.py +1 -4
  75. floodmodeller_api/validation/validation.py +9 -4
  76. floodmodeller_api/version.py +1 -1
  77. floodmodeller_api/xml2d.py +9 -11
  78. floodmodeller_api/xml2d_template.py +1 -1
  79. floodmodeller_api/zz.py +7 -6
  80. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/LICENSE.txt +1 -1
  81. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/METADATA +11 -3
  82. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/RECORD +85 -70
  83. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/WHEEL +1 -1
  84. floodmodeller_api/units/helpers.py +0 -121
  85. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/entry_points.txt +0 -0
  86. {floodmodeller_api-0.5.1.dist-info → floodmodeller_api-0.5.2.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,6 @@
1
+ import logging
2
+ import sys
3
+
1
4
  from .dat import DAT
2
5
  from .ied import IED
3
6
  from .ief import IEF
@@ -7,3 +10,10 @@ from .util import read_file
7
10
  from .version import __version__
8
11
  from .xml2d import XML2D
9
12
  from .zz import ZZN, ZZX
13
+
14
+ logging.basicConfig(
15
+ stream=sys.stdout,
16
+ level=logging.INFO,
17
+ format="%(asctime)s - %(levelname)s - %(message)s",
18
+ datefmt="%H:%M:%S",
19
+ )
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -18,6 +18,7 @@ from __future__ import annotations
18
18
 
19
19
  """ Holds the base file class for API file classes """
20
20
 
21
+ import logging
21
22
  from pathlib import Path
22
23
  from typing import NoReturn
23
24
 
@@ -36,6 +37,7 @@ class FMFile(Jsonable):
36
37
  _filetype: str
37
38
  _suffix: str
38
39
  MAX_DIFF = 25
40
+ ENCODING = "cp1252"
39
41
 
40
42
  def __init__(self, filepath: str | Path | None = None, **kwargs):
41
43
  if filepath is not None:
@@ -88,9 +90,9 @@ class FMFile(Jsonable):
88
90
  raise UserWarning(msg)
89
91
 
90
92
  string = self._write()
91
- with open(self._filepath, "w") as _file:
93
+ with open(self._filepath, "w", encoding=self.ENCODING) as _file:
92
94
  _file.write(string)
93
- print(f"{self._filetype} File Updated!")
95
+ logging.info("%s File Updated!", self._filepath)
94
96
 
95
97
  def _save(self, filepath):
96
98
  filepath = Path(filepath).absolute()
@@ -102,40 +104,47 @@ class FMFile(Jsonable):
102
104
  Path.mkdir(filepath.parent)
103
105
 
104
106
  string = self._write()
105
- with open(filepath, "w") as _file:
107
+ with open(filepath, "w", encoding=self.ENCODING) as _file:
106
108
  _file.write(string)
107
109
  self._filepath = filepath # Updates the filepath attribute to the given path
108
110
 
109
- print(f"{self._filetype} File Saved to: {filepath}")
111
+ logging.info("%s File Saved to: %s", self._filetype, filepath)
110
112
 
111
113
  @handle_exception(when="compare")
112
- def _diff(self, other, force_print=False):
114
+ def _diff(self, other, force_print=False) -> None:
115
+ def _format_diff(diff_list, max_items=None) -> str:
116
+ return "\n".join(
117
+ f" {name}: {reason}"
118
+ for name, reason in (diff_list[:max_items] if max_items else diff_list)
119
+ )
120
+
113
121
  if self._filetype != other._filetype:
114
122
  msg = "Cannot compare objects of different filetypes"
115
123
  raise TypeError(msg)
116
124
  diff = self._get_diff(other)
117
125
  if diff[0]:
118
- print("No difference, files are equivalent")
119
- else:
120
- print(f"Files not equivalent, {len(diff[1])} difference(s) found:")
121
- if len(diff[1]) > self.MAX_DIFF and not force_print:
122
- print(f"[Showing first {self.MAX_DIFF} differences...] ")
123
- print(
124
- "\n".join(
125
- [f" {name}: {reason}" for name, reason in diff[1][: self.MAX_DIFF]],
126
- ),
127
- )
128
- print("\n...To see full list of all differences add force_print=True")
129
- else:
130
- print("\n".join([f" {name}: {reason}" for name, reason in diff[1]]))
126
+ logging.info("No difference, files are equivalent")
127
+ return
128
+ differences = (
129
+ f"[Showing first {self.MAX_DIFF} differences...]\n"
130
+ f"{_format_diff(diff[1], self.MAX_DIFF)}\n"
131
+ "...To see full list of all differences add force_print=True"
132
+ if len(diff[1]) > self.MAX_DIFF and not force_print
133
+ else _format_diff(diff[1])
134
+ )
135
+ logging.info("Files not equivalent, %s difference(s) found:\n%s", len(diff[1]), differences)
131
136
 
132
137
  def _get_diff(self, other):
133
138
  return self.__eq__(other, return_diff=True) # pylint: disable=unnecessary-dunder-call
134
139
 
135
140
  def _handle_exception(self, err, when) -> NoReturn:
136
- raise FloodModellerAPIError(err, when, self._filetype, self._filepath) from err
141
+ filepath_or_none = self._filepath if hasattr(self, "_filepath") else None
142
+ raise FloodModellerAPIError(err, when, self._filetype, filepath_or_none) from err
137
143
 
138
144
  def __eq__(self, other, return_diff=False):
145
+ if not isinstance(other, FMFile):
146
+ return NotImplemented if not return_diff else (False, ["Type mismatch"])
147
+
139
148
  result = True
140
149
  diff = []
141
150
  try:
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -17,6 +17,7 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
17
17
  from __future__ import annotations
18
18
 
19
19
  import filecmp
20
+ import logging
20
21
  import re
21
22
  import tempfile
22
23
  from datetime import datetime
@@ -60,6 +61,8 @@ class BackupControl(Jsonable):
60
61
  backup.clear_backup()
61
62
  """
62
63
 
64
+ ENCODING = "cp1252"
65
+
63
66
  def __init__(self):
64
67
  """
65
68
  Initialises a new BackUp object.
@@ -80,11 +83,15 @@ class BackupControl(Jsonable):
80
83
  # Create the backup directory if it doesn't exist
81
84
  if not self.backup_dir.exists():
82
85
  self.backup_dir.mkdir()
83
- print(f"{self.__class__.__name__}: Initialised backup directory at {self.backup_dir}")
86
+ logging.info(
87
+ "%s: Initialised backup directory at %s",
88
+ self.__class__.__name__,
89
+ self.backup_dir,
90
+ )
84
91
 
85
92
  # Create the backup CSV file if it doesn't exist
86
93
  if not self.backup_csv_path.exists():
87
- with open(self.backup_csv_path, "w") as f:
94
+ with open(self.backup_csv_path, "w", encoding=self.ENCODING) as f:
88
95
  f.write("path,file_id,dttm\n")
89
96
 
90
97
  def clear_backup(self, file_id="*"):
@@ -191,9 +198,8 @@ class File(BackupControl):
191
198
  """
192
199
 
193
200
  def __init__(self, path: str | Path = "", from_json: bool = False, **args):
194
- # TODO: Make protected properties so they can't be manipulated
195
201
  self.path = Path(path)
196
- # Check if the file exists
202
+ # Check if the file exists
197
203
  if not self.path.exists():
198
204
  msg = "File not found!"
199
205
  raise OSError(msg)
@@ -212,7 +218,6 @@ class File(BackupControl):
212
218
  Generate the file's unique identifier as using a hash of the absolute file path
213
219
  """
214
220
  # hash the absolute path becuase the same file name / directroy structure may be mirrored across projects
215
- # TODO: Use a function that produces a shorter has to make interpretation of the directory easier
216
221
  fp_bytes = str(self.path.absolute()).encode()
217
222
  self.file_id = sha1(fp_bytes).hexdigest()
218
223
 
@@ -232,9 +237,8 @@ class File(BackupControl):
232
237
  backup_filepath = Path(self.backup_dir, self.backup_filename)
233
238
  copy(self.path, backup_filepath)
234
239
  # Log an entry to the csv to make it easy to find the file
235
- # TODO: Only log file_id and poath, don't log duplicate lines. Needs to be fast so it doesn't slow FMFile down
236
240
  log_str = f"{self.path!s},{self.file_id},{self.dttm_str}\n"
237
- with open(self.backup_csv_path, "a") as f:
241
+ with open(self.backup_csv_path, "a", encoding=self.ENCODING) as f:
238
242
  f.write(log_str)
239
243
 
240
244
  def list_backups(self) -> list:
@@ -259,8 +263,6 @@ class File(BackupControl):
259
263
  if len(backups) == 0 or not filecmp.cmp(self.path, backups[0].path):
260
264
  self._make_backup()
261
265
  # If the file doesn't match the last backup then do a back up
262
- # TODO: Use FloodModeller API implemented equivalence testing. This is implemented at a higher level than FMFile where this method is called.
263
- # TODO: Return the file path?
264
266
 
265
267
  def clear_backup(self):
266
268
  """
floodmodeller_api/dat.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -16,13 +16,14 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
16
16
 
17
17
  from __future__ import annotations
18
18
 
19
+ from collections import defaultdict
19
20
  from pathlib import Path
20
21
  from typing import Any
21
22
 
22
23
  from . import units
23
24
  from ._base import FMFile
24
25
  from .units._base import Unit
25
- from .units.helpers import _to_float, _to_int
26
+ from .units._helpers import join_10_char, split_10_char, to_float, to_int
26
27
  from .util import handle_exception
27
28
  from .validation.validation import _validate_unit
28
29
 
@@ -110,8 +111,6 @@ class DAT(FMFile):
110
111
  """
111
112
  self._diff(other, force_print=force_print)
112
113
 
113
- # def _get_unit_from_connectivity(self, method) #use this as method prev and next
114
-
115
114
  @handle_exception(when="calculate next unit in")
116
115
  def next(self, unit: Unit) -> Unit | list[Unit] | None:
117
116
  """Finds next unit in the reach.
@@ -189,7 +188,7 @@ class DAT(FMFile):
189
188
  _junction_match = [
190
189
  junction
191
190
  for junction in self._all_units
192
- if junction._unit == "JUNCTION" and unit.name in junction.labels
191
+ if junction._unit == "JUNCTION" and unit.name in junction.labels # type: ignore
193
192
  ]
194
193
 
195
194
  # Case 2: Previous unit has positive distance to next
@@ -292,7 +291,7 @@ class DAT(FMFile):
292
291
 
293
292
  def _read(self) -> None:
294
293
  # Read DAT data
295
- with open(self._filepath) as dat_file:
294
+ with open(self._filepath, encoding=self.ENCODING) as dat_file:
296
295
  self._raw_data: list[str] = [line.rstrip("\n") for line in dat_file]
297
296
 
298
297
  # Generate DAT structure
@@ -352,33 +351,33 @@ class DAT(FMFile):
352
351
  self.title = self._raw_data[0]
353
352
  self.general_parameters = {}
354
353
  line = f"{self._raw_data[2]:<70}"
355
- params = units.helpers.split_10_char(line)
354
+ params = split_10_char(line)
356
355
  if params[6] == "":
357
356
  # Adds the measurements unit as DEFAULT if not specified
358
357
  params[6] = "DEFAULT"
359
358
  line = f"{self._raw_data[3]:<70}"
360
- params.extend(units.helpers.split_10_char(line))
361
-
362
- self.general_parameters["Node Count"] = _to_int(params[0], 0)
363
- self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
364
- self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
365
- self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
366
- self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
367
- self._label_len = _to_int(params[5], 12) # label length
359
+ params.extend(split_10_char(line))
360
+
361
+ self.general_parameters["Node Count"] = to_int(params[0], 0)
362
+ self.general_parameters["Lower Froude"] = to_float(params[1], 0.75)
363
+ self.general_parameters["Upper Froude"] = to_float(params[2], 0.9)
364
+ self.general_parameters["Min Depth"] = to_float(params[3], 0.1)
365
+ self.general_parameters["Convergence Direct"] = to_float(params[4], 0.001)
366
+ self._label_len = to_int(params[5], 12) # label length
368
367
  self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
369
- self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
370
- self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
371
- self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
372
- self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
373
- self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
374
- self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
375
- self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
368
+ self.general_parameters["Water Temperature"] = to_float(params[7], 10.0)
369
+ self.general_parameters["Convergence Flow"] = to_float(params[8], 0.01)
370
+ self.general_parameters["Convergence Head"] = to_float(params[9], 0.01)
371
+ self.general_parameters["Mathematical Damping"] = to_float(params[10], 0.7)
372
+ self.general_parameters["Pivotal Choice"] = to_float(params[11], 0.1)
373
+ self.general_parameters["Under-relaxation"] = to_float(params[12], 0.7)
374
+ self.general_parameters["Matrix Dummy"] = to_float(params[13], 0.0)
376
375
  self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
377
376
 
378
377
  def _update_general_parameters(self) -> None:
379
378
  self._raw_data[0] = self.title
380
379
  self._raw_data[5] = self.general_parameters["RAD File"]
381
- general_params_1 = units.helpers.join_10_char(
380
+ general_params_1 = join_10_char(
382
381
  self.general_parameters["Node Count"],
383
382
  self.general_parameters["Lower Froude"],
384
383
  self.general_parameters["Upper Froude"],
@@ -389,7 +388,7 @@ class DAT(FMFile):
389
388
  general_params_1 += self.general_parameters["Units"]
390
389
  self._raw_data[2] = general_params_1
391
390
 
392
- general_params_2 = units.helpers.join_10_char(
391
+ general_params_2 = join_10_char(
393
392
  self.general_parameters["Water Temperature"],
394
393
  self.general_parameters["Convergence Flow"],
395
394
  self.general_parameters["Convergence Head"],
@@ -407,6 +406,8 @@ class DAT(FMFile):
407
406
  (self.structures, "structures"),
408
407
  (self.conduits, "conduits"),
409
408
  (self.losses, "losses"),
409
+ (self.connectors, "connectors"),
410
+ (self.controls, "controls"),
410
411
  ]:
411
412
  for name, unit in unit_group.copy().items():
412
413
  if name != unit.name:
@@ -418,9 +419,6 @@ class DAT(FMFile):
418
419
  del unit_group[name]
419
420
  # Update label in ICs
420
421
  if unit_group_name not in ["boundaries", "losses"]:
421
- # TODO: Need to do a more thorough check for whether a unit is one in the ICs
422
- # e.g. Culvert inlet and river section may have same label, but only river
423
- # section label should update in ICs
424
422
  self.initial_conditions.update_label(name, unit.name)
425
423
 
426
424
  # Update label in GISINFO and GXY data
@@ -453,66 +451,65 @@ class DAT(FMFile):
453
451
  "sections": [],
454
452
  "conduits": [],
455
453
  "losses": [],
454
+ "connectors": [],
455
+ "controls": [],
456
456
  }
457
457
 
458
458
  for block in self._dat_struct:
459
459
  # Check for all supported boundary types
460
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
461
- # clause for when unit has been inserted into the dat file
462
- if "new_insert" in block:
463
- block["start"] = prev_block_end + 1
464
- block["end"] = block["start"] + len(block["new_insert"]) - 1
465
- self._raw_data[block["start"] : block["start"]] = block["new_insert"]
466
- block_shift += len(block["new_insert"])
467
- prev_block_end = block["end"]
468
- del block["new_insert"]
460
+ if block["Type"] not in units.SUPPORTED_UNIT_TYPES:
461
+ continue
462
+ # clause for when unit has been inserted into the dat file
463
+ if "new_insert" in block:
464
+ block["start"] = prev_block_end + 1
465
+ block["end"] = block["start"] + len(block["new_insert"]) - 1
466
+ self._raw_data[block["start"] : block["start"]] = block["new_insert"]
467
+ block_shift += len(block["new_insert"])
468
+ prev_block_end = block["end"]
469
+ del block["new_insert"]
469
470
 
470
- else:
471
- unit_data = self._raw_data[
472
- block["start"] + block_shift : block["end"] + 1 + block_shift
473
- ]
474
- prev_block_len = len(unit_data)
471
+ else:
472
+ unit_data = self._raw_data[
473
+ block["start"] + block_shift : block["end"] + 1 + block_shift
474
+ ]
475
+ prev_block_len = len(unit_data)
475
476
 
476
- if block["Type"] == "INITIAL CONDITIONS":
477
- new_unit_data = self.initial_conditions._write()
478
- elif block["Type"] == "COMMENT":
479
- comment = comment_units[comment_tracker]
480
- new_unit_data = comment._write()
481
- comment_tracker += 1
477
+ if block["Type"] == "INITIAL CONDITIONS":
478
+ new_unit_data = self.initial_conditions._write()
479
+ elif block["Type"] == "COMMENT":
480
+ comment = comment_units[comment_tracker]
481
+ new_unit_data = comment._write()
482
+ comment_tracker += 1
482
483
 
483
- elif block["Type"] == "VARIABLES":
484
- new_unit_data = self.variables._write()
484
+ elif block["Type"] == "VARIABLES":
485
+ new_unit_data = self.variables._write()
485
486
 
487
+ else:
488
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
489
+ unit_name = unit_data[2][: self._label_len].strip()
486
490
  else:
487
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
488
- unit_name = unit_data[2][: self._label_len].strip()
489
- else:
490
- unit_name = unit_data[1][: self._label_len].strip()
491
-
492
- # Get unit object
493
- unit_group = getattr(
494
- self,
495
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
496
- )
497
- if unit_name in unit_group:
498
- # block still exists
499
- new_unit_data = unit_group[unit_name]._write()
500
- existing_units[
501
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
502
- ].append(unit_name)
503
- else:
504
- # Bdy block has been deleted
505
- new_unit_data = []
506
-
507
- new_block_len = len(new_unit_data)
508
- self._raw_data[
509
- block["start"] + block_shift : block["end"] + 1 + block_shift
510
- ] = new_unit_data
511
- # adjust block shift for change in number of lines in bdy block
512
- block_shift += new_block_len - prev_block_len
513
- prev_block_end = (
514
- block["end"] + block_shift
515
- ) # add in to keep a record of the last block read in
491
+ unit_name = unit_data[1][: self._label_len].strip()
492
+
493
+ # Get unit object
494
+ unit_group_str = units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
495
+ unit_group = getattr(self, unit_group_str)
496
+ if unit_name in unit_group:
497
+ # block still exists
498
+ new_unit_data = unit_group[unit_name]._write()
499
+ existing_units[unit_group_str].append(unit_name)
500
+ else:
501
+ # Bdy block has been deleted
502
+ new_unit_data = []
503
+
504
+ new_block_len = len(new_unit_data)
505
+ self._raw_data[block["start"] + block_shift : block["end"] + 1 + block_shift] = (
506
+ new_unit_data
507
+ )
508
+ # adjust block shift for change in number of lines in bdy block
509
+ block_shift += new_block_len - prev_block_len
510
+ prev_block_end = (
511
+ block["end"] + block_shift
512
+ ) # add in to keep a record of the last block read in
516
513
 
517
514
  def _get_unit_definitions(self):
518
515
  self._initialize_collections()
@@ -528,17 +525,19 @@ class DAT(FMFile):
528
525
  msg = f"Unexpected unit type encountered: {unit_type}"
529
526
  raise Exception(msg)
530
527
 
531
- def _initialize_collections(self):
528
+ def _initialize_collections(self) -> None:
532
529
  # Initialize unit collections
533
- self.sections = {}
534
- self.boundaries = {}
535
- self.structures = {}
536
- self.conduits = {}
537
- self.losses = {}
538
- self._unsupported = {}
539
- self._all_units = []
540
-
541
- def _process_supported_unit(self, unit_type, unit_data):
530
+ self.sections: dict[str, units.TSections] = {}
531
+ self.boundaries: dict[str, units.TBoundaries] = {}
532
+ self.structures: dict[str, units.TStructures] = {}
533
+ self.conduits: dict[str, units.TConduits] = {}
534
+ self.losses: dict[str, units.TLosses] = {}
535
+ self.connectors: dict[str, units.TConnectors] = {}
536
+ self.controls: dict[str, units.TControls] = {}
537
+ self._unsupported: dict[str, units.TUnsupported] = {}
538
+ self._all_units: list[Unit] = []
539
+
540
+ def _process_supported_unit(self, unit_type, unit_data) -> None:
542
541
  # Handle initial conditions block
543
542
  if unit_type == "INITIAL CONDITIONS":
544
543
  self.initial_conditions = units.IIC(unit_data, n=self._label_len)
@@ -559,7 +558,13 @@ class DAT(FMFile):
559
558
  return unit_data[2][: self._label_len].strip()
560
559
  return unit_data[1][: self._label_len].strip()
561
560
 
562
- def _add_unit_to_group(self, unit_group, unit_type, unit_name, unit_data):
561
+ def _add_unit_to_group(
562
+ self,
563
+ unit_group,
564
+ unit_type: str,
565
+ unit_name: str,
566
+ unit_data: list[str],
567
+ ) -> None:
563
568
  # Raise exception if a duplicate label is encountered
564
569
  if unit_name in unit_group:
565
570
  msg = f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[unit_type]["group"]}'
@@ -571,7 +576,7 @@ class DAT(FMFile):
571
576
  )
572
577
  self._all_units.append(unit_group[unit_name])
573
578
 
574
- def _process_unsupported_unit(self, unit_type, unit_data):
579
+ def _process_unsupported_unit(self, unit_type, unit_data) -> None:
575
580
  # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
576
581
  unit_name, subtype = self._get_unsupported_unit_name(unit_type, unit_data)
577
582
  self._unsupported[f"{unit_name} ({unit_type})"] = units.UNSUPPORTED(
@@ -583,7 +588,7 @@ class DAT(FMFile):
583
588
  )
584
589
  self._all_units.append(self._unsupported[f"{unit_name} ({unit_type})"])
585
590
 
586
- def _get_unsupported_unit_name(self, unit_type, unit_data):
591
+ def _get_unsupported_unit_name(self, unit_type: str, unit_data: list[str]) -> tuple[str, bool]:
587
592
  # Check if the unit type has associated subtypes
588
593
  if units.UNSUPPORTED_UNIT_TYPES[unit_type]["has_subtype"]:
589
594
  return unit_data[2][: self._label_len].strip(), True
@@ -907,3 +912,69 @@ class DAT(FMFile):
907
912
  new = f"{unit_type}_{unit_subtype}_{new_lbl}"
908
913
 
909
914
  self._gxy_data = self._gxy_data.replace(old, new)
915
+
916
+ def get_network(self) -> tuple[list[Unit], list[tuple[Unit, Unit]]]:
917
+ """Generates a network representation of units and their connections.
918
+
919
+ This method creates a directed network where nodes represent units
920
+ and edges represent labeled connections between them. The edges are
921
+ directional, determined by the order of appearance in the `.dat` file.
922
+
923
+ Raises:
924
+ ValueError: If a unit has no name when an implicit label is assigned.
925
+ RuntimeError: If the constructed network contains labels that do not
926
+ form valid two-unit connections.
927
+
928
+ Returns:
929
+ tuple[list[Unit], list[tuple[Unit, Unit]]]:
930
+ - A list of `Unit` objects representing the nodes.
931
+ - A list of tuples, each containing two `Unit` objects representing
932
+ a directed edge."""
933
+
934
+ # collect all relevant units and labels
935
+ units = [unit for unit in self._all_units if unit._unit != "COMMENT"]
936
+ label_lists = [list(unit.all_labels) for unit in units]
937
+
938
+ # connect units for each label
939
+ label_to_unit_list: dict[str, list[Unit]] = defaultdict(list)
940
+ for idx, (unit, label_list) in enumerate(zip(units, label_lists)):
941
+ in_reach = hasattr(unit, "dist_to_next") and unit.dist_to_next > 0
942
+ if in_reach: # has implicit downstream labels
943
+ next_unit = units[idx + 1]
944
+ next_next_unit = units[idx + 2]
945
+
946
+ if next_unit.name is None:
947
+ msg = "Unit has no name."
948
+ raise ValueError(msg)
949
+
950
+ end_of_reach = (
951
+ (not hasattr(next_unit, "dist_to_next"))
952
+ or (next_unit.dist_to_next == 0)
953
+ or (not hasattr(next_next_unit, "dist_to_next"))
954
+ )
955
+
956
+ if end_of_reach:
957
+ renamed_label = next_unit.name + "_dummy"
958
+ label_list.append(renamed_label)
959
+ label_lists[idx + 1].append(renamed_label) # why label_lists is made first
960
+ else:
961
+ label_list.append(next_unit.name)
962
+
963
+ for label in label_list:
964
+ label_to_unit_list[label].append(unit)
965
+
966
+ # check validity of network
967
+ units_per_edge = 2
968
+ invalid_labels = [k for k, v in label_to_unit_list.items() if len(v) != units_per_edge]
969
+ no_invalid_labels = len(invalid_labels)
970
+ no_labels = len(label_to_unit_list)
971
+ if no_invalid_labels > 0:
972
+ msg = (
973
+ "Unable to create a valid network with the current algorithm and/or data."
974
+ f" {no_invalid_labels}/{no_labels} labels do not join two units: {invalid_labels}."
975
+ )
976
+ raise RuntimeError(msg)
977
+
978
+ # the labels themselves are no longer needed
979
+ unit_pairs = [(unit_pair[0], unit_pair[1]) for unit_pair in label_to_unit_list.values()]
980
+ return units, unit_pairs
floodmodeller_api/diff.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
floodmodeller_api/ied.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -60,7 +60,7 @@ class IED(FMFile):
60
60
 
61
61
  def _read(self):
62
62
  # Read IED data
63
- with open(self._filepath) as ied_file:
63
+ with open(self._filepath, encoding=self.ENCODING) as ied_file:
64
64
  self._raw_data = [line.rstrip("\n") for line in ied_file]
65
65
 
66
66
  # Generate IED structure
@@ -206,8 +206,6 @@ class IED(FMFile):
206
206
  )
207
207
  self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
208
208
 
209
- print()
210
-
211
209
  def _update_ied_struct(self): # noqa: C901, PLR0912
212
210
  # Generate IED structure
213
211
  ied_struct = []