floodmodeller-api 0.5.0.post1__py3-none-any.whl → 0.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. floodmodeller_api/__init__.py +11 -1
  2. floodmodeller_api/_base.py +55 -36
  3. floodmodeller_api/backup.py +15 -12
  4. floodmodeller_api/dat.py +191 -121
  5. floodmodeller_api/diff.py +4 -4
  6. floodmodeller_api/hydrology_plus/hydrology_plus_export.py +15 -14
  7. floodmodeller_api/ied.py +8 -10
  8. floodmodeller_api/ief.py +56 -42
  9. floodmodeller_api/ief_flags.py +1 -1
  10. floodmodeller_api/inp.py +7 -10
  11. floodmodeller_api/logs/lf.py +25 -26
  12. floodmodeller_api/logs/lf_helpers.py +20 -20
  13. floodmodeller_api/logs/lf_params.py +1 -5
  14. floodmodeller_api/mapping.py +11 -2
  15. floodmodeller_api/test/__init__.py +2 -2
  16. floodmodeller_api/test/conftest.py +2 -3
  17. floodmodeller_api/test/test_backup.py +2 -2
  18. floodmodeller_api/test/test_conveyance.py +13 -7
  19. floodmodeller_api/test/test_dat.py +168 -20
  20. floodmodeller_api/test/test_data/EX18_DAT_expected.json +164 -144
  21. floodmodeller_api/test/test_data/EX3_DAT_expected.json +6 -2
  22. floodmodeller_api/test/test_data/EX6_DAT_expected.json +12 -46
  23. floodmodeller_api/test/test_data/encoding_test_cp1252.dat +1081 -0
  24. floodmodeller_api/test/test_data/encoding_test_utf8.dat +1081 -0
  25. floodmodeller_api/test/test_data/integrated_bridge/AR_NoSP_NoBl_2O_NO_OneFRC.ied +33 -0
  26. floodmodeller_api/test/test_data/integrated_bridge/AR_vSP_25pc_1O.ied +32 -0
  27. floodmodeller_api/test/test_data/integrated_bridge/PL_vSP_25pc_1O.ied +34 -0
  28. floodmodeller_api/test/test_data/integrated_bridge/SBTwoFRCsStaggered.IED +32 -0
  29. floodmodeller_api/test/test_data/integrated_bridge/US_NoSP_NoBl_OR_RN.ied +28 -0
  30. floodmodeller_api/test/test_data/integrated_bridge/US_SP_NoBl_OR_frc_PT2-5_RN.ied +34 -0
  31. floodmodeller_api/test/test_data/integrated_bridge/US_fSP_NoBl_1O.ied +30 -0
  32. floodmodeller_api/test/test_data/integrated_bridge/US_nSP_NoBl_1O.ied +49 -0
  33. floodmodeller_api/test/test_data/integrated_bridge/US_vSP_NoBl_2O_Para.ied +35 -0
  34. floodmodeller_api/test/test_data/integrated_bridge.dat +40 -0
  35. floodmodeller_api/test/test_data/network.ied +2 -2
  36. floodmodeller_api/test/test_data/network_dat_expected.json +141 -243
  37. floodmodeller_api/test/test_data/network_ied_expected.json +2 -2
  38. floodmodeller_api/test/test_data/network_with_comments.ied +2 -2
  39. floodmodeller_api/test/test_data/structure_logs/EX17_expected.csv +4 -0
  40. floodmodeller_api/test/test_data/structure_logs/EX17_expected.json +69 -0
  41. floodmodeller_api/test/test_data/structure_logs/EX18_expected.csv +20 -0
  42. floodmodeller_api/test/test_data/structure_logs/EX18_expected.json +292 -0
  43. floodmodeller_api/test/test_data/structure_logs/EX6_expected.csv +4 -0
  44. floodmodeller_api/test/test_data/structure_logs/EX6_expected.json +35 -0
  45. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_flow.csv +182 -0
  46. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_fr.csv +182 -0
  47. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_mode.csv +182 -0
  48. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_stage.csv +182 -0
  49. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_state.csv +182 -0
  50. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_velocity.csv +182 -0
  51. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_left_fp_h.csv +182 -0
  52. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_left_fp_mode.csv +182 -0
  53. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_link_inflow.csv +182 -0
  54. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_max.csv +87 -0
  55. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_right_fp_h.csv +182 -0
  56. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_right_fp_mode.csv +182 -0
  57. floodmodeller_api/test/test_flowtimeprofile.py +2 -2
  58. floodmodeller_api/test/test_hydrology_plus_export.py +4 -2
  59. floodmodeller_api/test/test_ied.py +3 -3
  60. floodmodeller_api/test/test_ief.py +12 -4
  61. floodmodeller_api/test/test_inp.py +2 -2
  62. floodmodeller_api/test/test_integrated_bridge.py +159 -0
  63. floodmodeller_api/test/test_json.py +14 -13
  64. floodmodeller_api/test/test_logs_lf.py +50 -29
  65. floodmodeller_api/test/test_read_file.py +1 -0
  66. floodmodeller_api/test/test_river.py +12 -12
  67. floodmodeller_api/test/test_tool.py +8 -5
  68. floodmodeller_api/test/test_toolbox_structure_log.py +148 -158
  69. floodmodeller_api/test/test_xml2d.py +14 -16
  70. floodmodeller_api/test/test_zz.py +143 -0
  71. floodmodeller_api/to_from_json.py +9 -9
  72. floodmodeller_api/tool.py +15 -11
  73. floodmodeller_api/toolbox/example_tool.py +5 -1
  74. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +13 -9
  75. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +500 -194
  76. floodmodeller_api/toolbox/model_build/structure_log_definition.py +5 -1
  77. floodmodeller_api/units/__init__.py +15 -0
  78. floodmodeller_api/units/_base.py +87 -20
  79. floodmodeller_api/units/_helpers.py +343 -0
  80. floodmodeller_api/units/boundaries.py +59 -71
  81. floodmodeller_api/units/comment.py +1 -1
  82. floodmodeller_api/units/conduits.py +57 -54
  83. floodmodeller_api/units/connectors.py +112 -0
  84. floodmodeller_api/units/controls.py +107 -0
  85. floodmodeller_api/units/conveyance.py +1 -1
  86. floodmodeller_api/units/iic.py +2 -9
  87. floodmodeller_api/units/losses.py +44 -45
  88. floodmodeller_api/units/sections.py +52 -51
  89. floodmodeller_api/units/structures.py +361 -531
  90. floodmodeller_api/units/units.py +27 -26
  91. floodmodeller_api/units/unsupported.py +5 -7
  92. floodmodeller_api/units/variables.py +2 -2
  93. floodmodeller_api/urban1d/_base.py +13 -17
  94. floodmodeller_api/urban1d/conduits.py +11 -21
  95. floodmodeller_api/urban1d/general_parameters.py +1 -1
  96. floodmodeller_api/urban1d/junctions.py +7 -11
  97. floodmodeller_api/urban1d/losses.py +13 -17
  98. floodmodeller_api/urban1d/outfalls.py +18 -22
  99. floodmodeller_api/urban1d/raingauges.py +5 -10
  100. floodmodeller_api/urban1d/subsections.py +5 -4
  101. floodmodeller_api/urban1d/xsections.py +14 -17
  102. floodmodeller_api/util.py +23 -6
  103. floodmodeller_api/validation/parameters.py +7 -3
  104. floodmodeller_api/validation/urban_parameters.py +1 -4
  105. floodmodeller_api/validation/validation.py +11 -5
  106. floodmodeller_api/version.py +1 -1
  107. floodmodeller_api/xml2d.py +27 -31
  108. floodmodeller_api/xml2d_template.py +1 -1
  109. floodmodeller_api/zz.py +539 -0
  110. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/LICENSE.txt +1 -1
  111. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/METADATA +30 -16
  112. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/RECORD +116 -83
  113. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/WHEEL +1 -1
  114. floodmodeller_api/test/test_zzn.py +0 -36
  115. floodmodeller_api/units/helpers.py +0 -123
  116. floodmodeller_api/zzn.py +0 -414
  117. /floodmodeller_api/test/test_data/{network_from_tabularCSV.csv → tabular_csv_outputs/network_zzn_max.csv} +0 -0
  118. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/entry_points.txt +0 -0
  119. {floodmodeller_api-0.5.0.post1.dist-info → floodmodeller_api-0.5.2.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -16,13 +16,14 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
16
16
 
17
17
  from __future__ import annotations
18
18
 
19
+ from collections import defaultdict
19
20
  from pathlib import Path
20
21
  from typing import Any
21
22
 
22
23
  from . import units
23
24
  from ._base import FMFile
24
25
  from .units._base import Unit
25
- from .units.helpers import _to_float, _to_int
26
+ from .units._helpers import join_10_char, split_10_char, to_float, to_int
26
27
  from .util import handle_exception
27
28
  from .validation.validation import _validate_unit
28
29
 
@@ -110,8 +111,6 @@ class DAT(FMFile):
110
111
  """
111
112
  self._diff(other, force_print=force_print)
112
113
 
113
- # def _get_unit_from_connectivity(self, method) #use this as method prev and next
114
-
115
114
  @handle_exception(when="calculate next unit in")
116
115
  def next(self, unit: Unit) -> Unit | list[Unit] | None:
117
116
  """Finds next unit in the reach.
@@ -189,7 +188,7 @@ class DAT(FMFile):
189
188
  _junction_match = [
190
189
  junction
191
190
  for junction in self._all_units
192
- if junction._unit == "JUNCTION" and unit.name in junction.labels
191
+ if junction._unit == "JUNCTION" and unit.name in junction.labels # type: ignore
193
192
  ]
194
193
 
195
194
  # Case 2: Previous unit has positive distance to next
@@ -253,13 +252,11 @@ class DAT(FMFile):
253
252
  Union[Unit, list[Unit], None]: Either a singular unit or list of units with ds_label matching, if none exist returns none.
254
253
  """
255
254
 
256
- _ds_list = []
257
- for item in self._all_units:
258
- try:
259
- if item.ds_label == current_unit.name:
260
- _ds_list.append(item)
261
- except AttributeError:
262
- continue
255
+ _ds_list = [
256
+ item
257
+ for item in self._all_units
258
+ if hasattr(item, "ds_label") and item.ds_label == current_unit.name
259
+ ]
263
260
 
264
261
  if len(_ds_list) == 0:
265
262
  return None
@@ -294,8 +291,8 @@ class DAT(FMFile):
294
291
 
295
292
  def _read(self) -> None:
296
293
  # Read DAT data
297
- with open(self._filepath) as dat_file:
298
- self._raw_data: list[str] = [line.rstrip("\n") for line in dat_file.readlines()]
294
+ with open(self._filepath, encoding=self.ENCODING) as dat_file:
295
+ self._raw_data: list[str] = [line.rstrip("\n") for line in dat_file]
299
296
 
300
297
  # Generate DAT structure
301
298
  self._update_dat_struct()
@@ -354,33 +351,33 @@ class DAT(FMFile):
354
351
  self.title = self._raw_data[0]
355
352
  self.general_parameters = {}
356
353
  line = f"{self._raw_data[2]:<70}"
357
- params = units.helpers.split_10_char(line)
354
+ params = split_10_char(line)
358
355
  if params[6] == "":
359
356
  # Adds the measurements unit as DEFAULT if not specified
360
357
  params[6] = "DEFAULT"
361
358
  line = f"{self._raw_data[3]:<70}"
362
- params.extend(units.helpers.split_10_char(line))
363
-
364
- self.general_parameters["Node Count"] = _to_int(params[0], 0)
365
- self.general_parameters["Lower Froude"] = _to_float(params[1], 0.75)
366
- self.general_parameters["Upper Froude"] = _to_float(params[2], 0.9)
367
- self.general_parameters["Min Depth"] = _to_float(params[3], 0.1)
368
- self.general_parameters["Convergence Direct"] = _to_float(params[4], 0.001)
369
- self._label_len = _to_int(params[5], 12) # label length
359
+ params.extend(split_10_char(line))
360
+
361
+ self.general_parameters["Node Count"] = to_int(params[0], 0)
362
+ self.general_parameters["Lower Froude"] = to_float(params[1], 0.75)
363
+ self.general_parameters["Upper Froude"] = to_float(params[2], 0.9)
364
+ self.general_parameters["Min Depth"] = to_float(params[3], 0.1)
365
+ self.general_parameters["Convergence Direct"] = to_float(params[4], 0.001)
366
+ self._label_len = to_int(params[5], 12) # label length
370
367
  self.general_parameters["Units"] = params[6] # "DEFAULT" set during read above.
371
- self.general_parameters["Water Temperature"] = _to_float(params[7], 10.0)
372
- self.general_parameters["Convergence Flow"] = _to_float(params[8], 0.01)
373
- self.general_parameters["Convergence Head"] = _to_float(params[9], 0.01)
374
- self.general_parameters["Mathematical Damping"] = _to_float(params[10], 0.7)
375
- self.general_parameters["Pivotal Choice"] = _to_float(params[11], 0.1)
376
- self.general_parameters["Under-relaxation"] = _to_float(params[12], 0.7)
377
- self.general_parameters["Matrix Dummy"] = _to_float(params[13], 0.0)
368
+ self.general_parameters["Water Temperature"] = to_float(params[7], 10.0)
369
+ self.general_parameters["Convergence Flow"] = to_float(params[8], 0.01)
370
+ self.general_parameters["Convergence Head"] = to_float(params[9], 0.01)
371
+ self.general_parameters["Mathematical Damping"] = to_float(params[10], 0.7)
372
+ self.general_parameters["Pivotal Choice"] = to_float(params[11], 0.1)
373
+ self.general_parameters["Under-relaxation"] = to_float(params[12], 0.7)
374
+ self.general_parameters["Matrix Dummy"] = to_float(params[13], 0.0)
378
375
  self.general_parameters["RAD File"] = self._raw_data[5] # No default, optional
379
376
 
380
377
  def _update_general_parameters(self) -> None:
381
378
  self._raw_data[0] = self.title
382
379
  self._raw_data[5] = self.general_parameters["RAD File"]
383
- general_params_1 = units.helpers.join_10_char(
380
+ general_params_1 = join_10_char(
384
381
  self.general_parameters["Node Count"],
385
382
  self.general_parameters["Lower Froude"],
386
383
  self.general_parameters["Upper Froude"],
@@ -391,7 +388,7 @@ class DAT(FMFile):
391
388
  general_params_1 += self.general_parameters["Units"]
392
389
  self._raw_data[2] = general_params_1
393
390
 
394
- general_params_2 = units.helpers.join_10_char(
391
+ general_params_2 = join_10_char(
395
392
  self.general_parameters["Water Temperature"],
396
393
  self.general_parameters["Convergence Flow"],
397
394
  self.general_parameters["Convergence Head"],
@@ -409,21 +406,19 @@ class DAT(FMFile):
409
406
  (self.structures, "structures"),
410
407
  (self.conduits, "conduits"),
411
408
  (self.losses, "losses"),
409
+ (self.connectors, "connectors"),
410
+ (self.controls, "controls"),
412
411
  ]:
413
412
  for name, unit in unit_group.copy().items():
414
413
  if name != unit.name:
415
414
  # Check if new name already exists as a label
416
415
  if unit.name in unit_group:
417
- raise Exception(
418
- f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group',
419
- )
416
+ msg = f'Error: Cannot update label "{name}" to "{unit.name}" because "{unit.name}" already exists in the Network {unit_group_name} group'
417
+ raise Exception(msg)
420
418
  unit_group[unit.name] = unit
421
419
  del unit_group[name]
422
420
  # Update label in ICs
423
421
  if unit_group_name not in ["boundaries", "losses"]:
424
- # TODO: Need to do a more thorough check for whether a unit is one in the ICs
425
- # e.g. Culvert inlet and river section may have same label, but only river
426
- # section label should update in ICs
427
422
  self.initial_conditions.update_label(name, unit.name)
428
423
 
429
424
  # Update label in GISINFO and GXY data
@@ -456,66 +451,65 @@ class DAT(FMFile):
456
451
  "sections": [],
457
452
  "conduits": [],
458
453
  "losses": [],
454
+ "connectors": [],
455
+ "controls": [],
459
456
  }
460
457
 
461
458
  for block in self._dat_struct:
462
459
  # Check for all supported boundary types
463
- if block["Type"] in units.SUPPORTED_UNIT_TYPES:
464
- # clause for when unit has been inserted into the dat file
465
- if "new_insert" in block:
466
- block["start"] = prev_block_end + 1
467
- block["end"] = block["start"] + len(block["new_insert"]) - 1
468
- self._raw_data[block["start"] : block["start"]] = block["new_insert"]
469
- block_shift += len(block["new_insert"])
470
- prev_block_end = block["end"]
471
- del block["new_insert"]
460
+ if block["Type"] not in units.SUPPORTED_UNIT_TYPES:
461
+ continue
462
+ # clause for when unit has been inserted into the dat file
463
+ if "new_insert" in block:
464
+ block["start"] = prev_block_end + 1
465
+ block["end"] = block["start"] + len(block["new_insert"]) - 1
466
+ self._raw_data[block["start"] : block["start"]] = block["new_insert"]
467
+ block_shift += len(block["new_insert"])
468
+ prev_block_end = block["end"]
469
+ del block["new_insert"]
472
470
 
473
- else:
474
- unit_data = self._raw_data[
475
- block["start"] + block_shift : block["end"] + 1 + block_shift
476
- ]
477
- prev_block_len = len(unit_data)
471
+ else:
472
+ unit_data = self._raw_data[
473
+ block["start"] + block_shift : block["end"] + 1 + block_shift
474
+ ]
475
+ prev_block_len = len(unit_data)
478
476
 
479
- if block["Type"] == "INITIAL CONDITIONS":
480
- new_unit_data = self.initial_conditions._write()
481
- elif block["Type"] == "COMMENT":
482
- comment = comment_units[comment_tracker]
483
- new_unit_data = comment._write()
484
- comment_tracker += 1
477
+ if block["Type"] == "INITIAL CONDITIONS":
478
+ new_unit_data = self.initial_conditions._write()
479
+ elif block["Type"] == "COMMENT":
480
+ comment = comment_units[comment_tracker]
481
+ new_unit_data = comment._write()
482
+ comment_tracker += 1
485
483
 
486
- elif block["Type"] == "VARIABLES":
487
- new_unit_data = self.variables._write()
484
+ elif block["Type"] == "VARIABLES":
485
+ new_unit_data = self.variables._write()
488
486
 
487
+ else:
488
+ if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
489
+ unit_name = unit_data[2][: self._label_len].strip()
489
490
  else:
490
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
491
- unit_name = unit_data[2][: self._label_len].strip()
492
- else:
493
- unit_name = unit_data[1][: self._label_len].strip()
494
-
495
- # Get unit object
496
- unit_group = getattr(
497
- self,
498
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"],
499
- )
500
- if unit_name in unit_group:
501
- # block still exists
502
- new_unit_data = unit_group[unit_name]._write()
503
- existing_units[
504
- units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
505
- ].append(unit_name)
506
- else:
507
- # Bdy block has been deleted
508
- new_unit_data = []
509
-
510
- new_block_len = len(new_unit_data)
511
- self._raw_data[
512
- block["start"] + block_shift : block["end"] + 1 + block_shift
513
- ] = new_unit_data
514
- # adjust block shift for change in number of lines in bdy block
515
- block_shift += new_block_len - prev_block_len
516
- prev_block_end = (
517
- block["end"] + block_shift
518
- ) # add in to keep a record of the last block read in
491
+ unit_name = unit_data[1][: self._label_len].strip()
492
+
493
+ # Get unit object
494
+ unit_group_str = units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
495
+ unit_group = getattr(self, unit_group_str)
496
+ if unit_name in unit_group:
497
+ # block still exists
498
+ new_unit_data = unit_group[unit_name]._write()
499
+ existing_units[unit_group_str].append(unit_name)
500
+ else:
501
+ # Bdy block has been deleted
502
+ new_unit_data = []
503
+
504
+ new_block_len = len(new_unit_data)
505
+ self._raw_data[block["start"] + block_shift : block["end"] + 1 + block_shift] = (
506
+ new_unit_data
507
+ )
508
+ # adjust block shift for change in number of lines in bdy block
509
+ block_shift += new_block_len - prev_block_len
510
+ prev_block_end = (
511
+ block["end"] + block_shift
512
+ ) # add in to keep a record of the last block read in
519
513
 
520
514
  def _get_unit_definitions(self):
521
515
  self._initialize_collections()
@@ -528,19 +522,22 @@ class DAT(FMFile):
528
522
  elif unit_type in units.UNSUPPORTED_UNIT_TYPES:
529
523
  self._process_unsupported_unit(unit_type, unit_data)
530
524
  elif unit_type not in ("GENERAL", "GISINFO"):
531
- raise Exception(f"Unexpected unit type encountered: {unit_type}")
525
+ msg = f"Unexpected unit type encountered: {unit_type}"
526
+ raise Exception(msg)
532
527
 
533
- def _initialize_collections(self):
528
+ def _initialize_collections(self) -> None:
534
529
  # Initialize unit collections
535
- self.sections = {}
536
- self.boundaries = {}
537
- self.structures = {}
538
- self.conduits = {}
539
- self.losses = {}
540
- self._unsupported = {}
541
- self._all_units = []
542
-
543
- def _process_supported_unit(self, unit_type, unit_data):
530
+ self.sections: dict[str, units.TSections] = {}
531
+ self.boundaries: dict[str, units.TBoundaries] = {}
532
+ self.structures: dict[str, units.TStructures] = {}
533
+ self.conduits: dict[str, units.TConduits] = {}
534
+ self.losses: dict[str, units.TLosses] = {}
535
+ self.connectors: dict[str, units.TConnectors] = {}
536
+ self.controls: dict[str, units.TControls] = {}
537
+ self._unsupported: dict[str, units.TUnsupported] = {}
538
+ self._all_units: list[Unit] = []
539
+
540
+ def _process_supported_unit(self, unit_type, unit_data) -> None:
544
541
  # Handle initial conditions block
545
542
  if unit_type == "INITIAL CONDITIONS":
546
543
  self.initial_conditions = units.IIC(unit_data, n=self._label_len)
@@ -561,12 +558,17 @@ class DAT(FMFile):
561
558
  return unit_data[2][: self._label_len].strip()
562
559
  return unit_data[1][: self._label_len].strip()
563
560
 
564
- def _add_unit_to_group(self, unit_group, unit_type, unit_name, unit_data):
561
+ def _add_unit_to_group(
562
+ self,
563
+ unit_group,
564
+ unit_type: str,
565
+ unit_name: str,
566
+ unit_data: list[str],
567
+ ) -> None:
565
568
  # Raise exception if a duplicate label is encountered
566
569
  if unit_name in unit_group:
567
- raise Exception(
568
- f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[unit_type]["group"]}',
569
- )
570
+ msg = f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[unit_type]["group"]}'
571
+ raise Exception(msg)
570
572
  # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
571
573
  unit_type_safe = unit_type.replace(" ", "_").replace("-", "_")
572
574
  unit_group[unit_name] = eval(
@@ -574,7 +576,7 @@ class DAT(FMFile):
574
576
  )
575
577
  self._all_units.append(unit_group[unit_name])
576
578
 
577
- def _process_unsupported_unit(self, unit_type, unit_data):
579
+ def _process_unsupported_unit(self, unit_type, unit_data) -> None:
578
580
  # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
579
581
  unit_name, subtype = self._get_unsupported_unit_name(unit_type, unit_data)
580
582
  self._unsupported[f"{unit_name} ({unit_type})"] = units.UNSUPPORTED(
@@ -586,7 +588,7 @@ class DAT(FMFile):
586
588
  )
587
589
  self._all_units.append(self._unsupported[f"{unit_name} ({unit_type})"])
588
590
 
589
- def _get_unsupported_unit_name(self, unit_type, unit_data):
591
+ def _get_unsupported_unit_name(self, unit_type: str, unit_data: list[str]) -> tuple[str, bool]:
590
592
  # Check if the unit type has associated subtypes
591
593
  if units.UNSUPPORTED_UNIT_TYPES[unit_type]["has_subtype"]:
592
594
  return unit_data[2][: self._label_len].strip(), True
@@ -720,7 +722,8 @@ class DAT(FMFile):
720
722
  """
721
723
  # catch if not valid unit
722
724
  if not isinstance(unit, Unit):
723
- raise TypeError("unit isn't a unit")
725
+ msg = "unit isn't a unit"
726
+ raise TypeError(msg)
724
727
 
725
728
  # remove from all units
726
729
  index = self._all_units.index(unit)
@@ -768,17 +771,17 @@ class DAT(FMFile):
768
771
  # catch errors
769
772
  provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
770
773
  if provided_params == 0:
771
- raise SyntaxError(
772
- "No positional argument given. Please provide either add_before, add_at or add_after",
773
- )
774
+ msg = "No positional argument given. Please provide either add_before, add_at or add_after"
775
+ raise SyntaxError(msg)
774
776
  if provided_params > 1:
775
- raise SyntaxError("Only one of add_at, add_before, or add_after required")
777
+ msg = "Only one of add_at, add_before, or add_after required"
778
+ raise SyntaxError(msg)
776
779
  if not isinstance(unit, Unit):
777
- raise TypeError("unit isn't a unit")
780
+ msg = "unit isn't a unit"
781
+ raise TypeError(msg)
778
782
  if add_at is None and not (isinstance(add_before, Unit) or isinstance(add_after, Unit)):
779
- raise TypeError(
780
- "add_before or add_after argument must be a Flood Modeller Unit type",
781
- )
783
+ msg = "add_before or add_after argument must be a Flood Modeller Unit type"
784
+ raise TypeError(msg)
782
785
 
783
786
  unit_class = unit._unit
784
787
  if unit_class != "COMMENT":
@@ -786,9 +789,8 @@ class DAT(FMFile):
786
789
  unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
787
790
  unit_group = getattr(self, unit_group_name)
788
791
  if unit.name in unit_group:
789
- raise NameError(
790
- "Name already appears in unit group. Cannot have two units with same name in same group",
791
- )
792
+ msg = "Name already appears in unit group. Cannot have two units with same name in same group"
793
+ raise NameError(msg)
792
794
 
793
795
  # positional argument
794
796
  if add_at is not None:
@@ -796,7 +798,8 @@ class DAT(FMFile):
796
798
  if insert_index < 0:
797
799
  insert_index += len(self._all_units) + 1
798
800
  if insert_index < 0:
799
- raise Exception(f"invalid add_at index: {add_at}")
801
+ msg = f"invalid add_at index: {add_at}"
802
+ raise Exception(msg)
800
803
  else:
801
804
  check_unit = add_before or add_after
802
805
  for index, thing in enumerate(self._all_units):
@@ -805,9 +808,10 @@ class DAT(FMFile):
805
808
  insert_index += 1 if add_after else 0
806
809
  break
807
810
  else:
808
- raise Exception(
809
- f"{check_unit} not found in dat network, so cannot be used to add before/after",
811
+ msg = (
812
+ f"{check_unit} not found in dat network, so cannot be used to add before/after"
810
813
  )
814
+ raise Exception(msg)
811
815
 
812
816
  unit_data = unit._write()
813
817
  self._all_units.insert(insert_index, unit)
@@ -908,3 +912,69 @@ class DAT(FMFile):
908
912
  new = f"{unit_type}_{unit_subtype}_{new_lbl}"
909
913
 
910
914
  self._gxy_data = self._gxy_data.replace(old, new)
915
+
916
+ def get_network(self) -> tuple[list[Unit], list[tuple[Unit, Unit]]]:
917
+ """Generates a network representation of units and their connections.
918
+
919
+ This method creates a directed network where nodes represent units
920
+ and edges represent labeled connections between them. The edges are
921
+ directional, determined by the order of appearance in the `.dat` file.
922
+
923
+ Raises:
924
+ ValueError: If a unit has no name when an implicit label is assigned.
925
+ RuntimeError: If the constructed network contains labels that do not
926
+ form valid two-unit connections.
927
+
928
+ Returns:
929
+ tuple[list[Unit], list[tuple[Unit, Unit]]]:
930
+ - A list of `Unit` objects representing the nodes.
931
+ - A list of tuples, each containing two `Unit` objects representing
932
+ a directed edge."""
933
+
934
+ # collect all relevant units and labels
935
+ units = [unit for unit in self._all_units if unit._unit != "COMMENT"]
936
+ label_lists = [list(unit.all_labels) for unit in units]
937
+
938
+ # connect units for each label
939
+ label_to_unit_list: dict[str, list[Unit]] = defaultdict(list)
940
+ for idx, (unit, label_list) in enumerate(zip(units, label_lists)):
941
+ in_reach = hasattr(unit, "dist_to_next") and unit.dist_to_next > 0
942
+ if in_reach: # has implicit downstream labels
943
+ next_unit = units[idx + 1]
944
+ next_next_unit = units[idx + 2]
945
+
946
+ if next_unit.name is None:
947
+ msg = "Unit has no name."
948
+ raise ValueError(msg)
949
+
950
+ end_of_reach = (
951
+ (not hasattr(next_unit, "dist_to_next"))
952
+ or (next_unit.dist_to_next == 0)
953
+ or (not hasattr(next_next_unit, "dist_to_next"))
954
+ )
955
+
956
+ if end_of_reach:
957
+ renamed_label = next_unit.name + "_dummy"
958
+ label_list.append(renamed_label)
959
+ label_lists[idx + 1].append(renamed_label) # why label_lists is made first
960
+ else:
961
+ label_list.append(next_unit.name)
962
+
963
+ for label in label_list:
964
+ label_to_unit_list[label].append(unit)
965
+
966
+ # check validity of network
967
+ units_per_edge = 2
968
+ invalid_labels = [k for k, v in label_to_unit_list.items() if len(v) != units_per_edge]
969
+ no_invalid_labels = len(invalid_labels)
970
+ no_labels = len(label_to_unit_list)
971
+ if no_invalid_labels > 0:
972
+ msg = (
973
+ "Unable to create a valid network with the current algorithm and/or data."
974
+ f" {no_invalid_labels}/{no_labels} labels do not join two units: {invalid_labels}."
975
+ )
976
+ raise RuntimeError(msg)
977
+
978
+ # the labels themselves are no longer needed
979
+ unit_pairs = [(unit_pair[0], unit_pair[1]) for unit_pair in label_to_unit_list.values()]
980
+ return units, unit_pairs
floodmodeller_api/diff.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -59,8 +59,8 @@ def check_item_with_dataframe_equal( # noqa: C901, PLR0912
59
59
  row_diffs = []
60
60
  for row in rows:
61
61
  for col in df_diff.columns:
62
- if True not in df_diff.loc[row, col].duplicated().values:
63
- vals = df_diff.loc[row, col].values
62
+ if True not in df_diff.loc[row, col].duplicated().to_numpy():
63
+ vals = df_diff.loc[row, col].to_numpy()
64
64
  row_diffs.append(
65
65
  f" Row: {row}, Col: '{col}' - left: {vals[0]}, right: {vals[1]}",
66
66
  )
@@ -96,7 +96,7 @@ def check_dict_with_dataframe_equal(dict_a, dict_b, name, diff, special_types):
96
96
  )
97
97
  if not _result:
98
98
  result = False
99
- except KeyError as ke:
99
+ except KeyError as ke: # noqa: PERF203
100
100
  result = False
101
101
  diff.append((name, f"Key: '{ke.args[0]}' missing in other"))
102
102
  continue
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -51,7 +51,8 @@ class HydrologyPlusExport(FMFile):
51
51
  with self._filepath.open("r") as file:
52
52
  header = file.readline().strip(" ,\n\r")
53
53
  if header != "Flood Modeller Hydrology+ hydrograph file":
54
- raise ValueError("Input file is not the correct format for Hydrology+ export data.")
54
+ msg = "Input file is not the correct format for Hydrology+ export data."
55
+ raise ValueError(msg)
55
56
 
56
57
  self._data_file = pd.read_csv(self._filepath)
57
58
  self._metadata = self._get_metadata()
@@ -85,20 +86,21 @@ class HydrologyPlusExport(FMFile):
85
86
  return next(col for col in self.data.columns if col.lower().startswith(event.lower()))
86
87
 
87
88
  if not (return_period and storm_duration and scenario):
88
- raise ValueError(
89
+ msg = (
89
90
  "Missing required inputs to find event, if no event string is passed then a "
90
91
  "return_period, storm_duration and scenario are needed. You provided: "
91
- f"{return_period=}, {storm_duration=}, {scenario=}",
92
+ f"{return_period=}, {storm_duration=}, {scenario=}"
92
93
  )
94
+ raise ValueError(msg)
93
95
  for column in self.data.columns:
94
96
  s, sd, rp, *_ = column.split(" - ")
95
97
  if s == scenario and float(sd) == storm_duration and float(rp) == return_period:
96
98
  return column
97
- else:
98
- raise ValueError(
99
- "No matching event was found based on "
100
- f"{return_period=}, {storm_duration=}, {scenario=}",
101
- )
99
+ msg = (
100
+ "No matching event was found based on "
101
+ f"{return_period=}, {storm_duration=}, {scenario=}"
102
+ )
103
+ raise ValueError(msg)
102
104
 
103
105
  def get_event_flow(
104
106
  self,
@@ -197,11 +199,10 @@ class HydrologyPlusExport(FMFile):
197
199
  elif isinstance(template_ief, (Path, str)):
198
200
  template_ief = IEF(template_ief)
199
201
 
200
- generated_iefs = []
201
- for column in self.data.columns:
202
- generated_iefs.append(self.generate_ief(node_label, template_ief, event=column))
203
-
204
- return generated_iefs
202
+ return [
203
+ self.generate_ief(node_label, template_ief, event=column)
204
+ for column in self.data.columns
205
+ ]
205
206
 
206
207
  def generate_ief( # noqa: PLR0913
207
208
  self,
floodmodeller_api/ied.py CHANGED
@@ -1,6 +1,6 @@
1
1
  """
2
2
  Flood Modeller Python API
3
- Copyright (C) 2024 Jacobs U.K. Limited
3
+ Copyright (C) 2025 Jacobs U.K. Limited
4
4
 
5
5
  This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License
6
6
  as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
@@ -60,8 +60,8 @@ class IED(FMFile):
60
60
 
61
61
  def _read(self):
62
62
  # Read IED data
63
- with open(self._filepath) as ied_file:
64
- self._raw_data = [line.rstrip("\n") for line in ied_file.readlines()]
63
+ with open(self._filepath, encoding=self.ENCODING) as ied_file:
64
+ self._raw_data = [line.rstrip("\n") for line in ied_file]
65
65
 
66
66
  # Generate IED structure
67
67
  self._update_ied_struct()
@@ -87,10 +87,11 @@ class IED(FMFile):
87
87
  if name != unit.name:
88
88
  # Check if new name already exists as a label
89
89
  if unit.name in unit_group:
90
- raise Exception(
90
+ msg = (
91
91
  f'Error: Cannot update label "{name}" to "{unit.name}" because '
92
- f'"{unit.name}" already exists in the Network {unit_group_name} group',
92
+ f'"{unit.name}" already exists in the Network {unit_group_name} group'
93
93
  )
94
+ raise Exception(msg)
94
95
  unit_group[unit.name] = unit
95
96
  del unit_group[name]
96
97
 
@@ -180,9 +181,8 @@ class IED(FMFile):
180
181
  # Create instance of unit and add to relevant group
181
182
  unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
182
183
  if unit_name in unit_group:
183
- raise Exception(
184
- f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}',
185
- )
184
+ msg = f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}'
185
+ raise Exception(msg)
186
186
  unit_group[unit_name] = eval(f'units.{block["Type"]}({unit_data})')
187
187
 
188
188
  self._all_units.append(unit_group[unit_name])
@@ -206,8 +206,6 @@ class IED(FMFile):
206
206
  )
207
207
  self._all_units.append(self._unsupported[f"{unit_name} ({block['Type']})"])
208
208
 
209
- print()
210
-
211
209
  def _update_ied_struct(self): # noqa: C901, PLR0912
212
210
  # Generate IED structure
213
211
  ied_struct = []