floodmodeller-api 0.5.3.post1__py3-none-any.whl → 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. floodmodeller_api/dat.py +140 -53
  2. floodmodeller_api/hydrology_plus/hydrology_plus_export.py +1 -2
  3. floodmodeller_api/ied.py +1 -1
  4. floodmodeller_api/test/test_dat.py +89 -2
  5. floodmodeller_api/test/test_data/All Units 4_6.DAT +0 -2
  6. floodmodeller_api/test/test_data/All Units 4_6.feb +0 -2
  7. floodmodeller_api/test/test_data/River_Bridge.dat +1453 -0
  8. floodmodeller_api/test/test_data/River_Bridge.gxy +221 -0
  9. floodmodeller_api/test/test_data/River_Bridge_DAT_expected.json +27273 -0
  10. floodmodeller_api/test/test_data/River_Bridge_no_gxy.dat +1453 -0
  11. floodmodeller_api/test/test_data/River_Bridge_no_gxy_DAT_expected.json +26853 -0
  12. floodmodeller_api/test/test_data/duplicate_unit_test.dat +18 -0
  13. floodmodeller_api/test/test_data/duplicate_unit_test_unsupported.dat +28 -0
  14. floodmodeller_api/test/test_data/encoding_test_cp1252.dat +0 -2
  15. floodmodeller_api/test/test_data/encoding_test_utf8.dat +0 -2
  16. floodmodeller_api/test/test_data/remove_dummy_test.dat +19 -0
  17. floodmodeller_api/test/test_gxy.py +98 -0
  18. floodmodeller_api/test/test_json.py +40 -2
  19. floodmodeller_api/test/test_read_file.py +3 -0
  20. floodmodeller_api/test/test_unit.py +12 -0
  21. floodmodeller_api/to_from_json.py +16 -2
  22. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +8 -8
  23. floodmodeller_api/units/_base.py +30 -0
  24. floodmodeller_api/units/boundaries.py +4 -1
  25. floodmodeller_api/units/conduits.py +1 -1
  26. floodmodeller_api/units/losses.py +2 -2
  27. floodmodeller_api/units/sections.py +36 -0
  28. floodmodeller_api/units/structures.py +60 -13
  29. floodmodeller_api/units/units.py +1 -0
  30. floodmodeller_api/units/unsupported.py +2 -2
  31. floodmodeller_api/validation/validation.py +6 -6
  32. floodmodeller_api/version.py +1 -1
  33. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/METADATA +1 -1
  34. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/RECORD +38 -29
  35. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/WHEEL +0 -0
  36. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/entry_points.txt +0 -0
  37. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/licenses/LICENSE.txt +0 -0
  38. {floodmodeller_api-0.5.3.post1.dist-info → floodmodeller_api-0.5.5.dist-info}/top_level.txt +0 -0
floodmodeller_api/dat.py CHANGED
@@ -63,6 +63,8 @@ class DAT(FMFile):
63
63
 
64
64
  self._get_general_parameters()
65
65
  self._get_unit_definitions()
66
+ if self._gxy_data:
67
+ self._get_unit_locations()
66
68
 
67
69
  def update(self) -> None:
68
70
  """Updates the existing DAT based on any altered attributes"""
@@ -453,12 +455,14 @@ class DAT(FMFile):
453
455
  "losses": [],
454
456
  "connectors": [],
455
457
  "controls": [],
458
+ "_unsupported": [],
456
459
  }
457
460
 
458
461
  for block in self._dat_struct:
459
462
  # Check for all supported boundary types
460
- if block["Type"] not in units.SUPPORTED_UNIT_TYPES:
463
+ if block["Type"] not in units.ALL_UNIT_TYPES:
461
464
  continue
465
+ unit_type = block["Type"]
462
466
  # clause for when unit has been inserted into the dat file
463
467
  if "new_insert" in block:
464
468
  block["start"] = prev_block_end + 1
@@ -474,24 +478,26 @@ class DAT(FMFile):
474
478
  ]
475
479
  prev_block_len = len(unit_data)
476
480
 
477
- if block["Type"] == "INITIAL CONDITIONS":
481
+ if unit_type == "INITIAL CONDITIONS":
478
482
  new_unit_data = self.initial_conditions._write()
479
- elif block["Type"] == "COMMENT":
483
+ elif unit_type == "COMMENT":
480
484
  comment = comment_units[comment_tracker]
481
485
  new_unit_data = comment._write()
482
486
  comment_tracker += 1
483
487
 
484
- elif block["Type"] == "VARIABLES":
488
+ elif unit_type == "VARIABLES":
485
489
  new_unit_data = self.variables._write()
486
490
 
487
491
  else:
488
- if units.SUPPORTED_UNIT_TYPES[block["Type"]]["has_subtype"]:
489
- unit_name = unit_data[2][: self._label_len].strip()
492
+ if unit_type in units.SUPPORTED_UNIT_TYPES:
493
+ unit_name = self._get_supported_unit_name(unit_type, unit_data)
494
+ unit_group_str = units.SUPPORTED_UNIT_TYPES[unit_type]["group"]
490
495
  else:
491
- unit_name = unit_data[1][: self._label_len].strip()
496
+ unit_name, _ = self._get_unsupported_unit_name(unit_type, unit_data)
497
+ unit_name = f"{unit_name} ({unit_type})"
498
+ unit_group_str = "_unsupported"
492
499
 
493
500
  # Get unit object
494
- unit_group_str = units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]
495
501
  unit_group = getattr(self, unit_group_str)
496
502
  if unit_name in unit_group:
497
503
  # block still exists
@@ -525,6 +531,33 @@ class DAT(FMFile):
525
531
  msg = f"Unexpected unit type encountered: {unit_type}"
526
532
  raise Exception(msg)
527
533
 
534
+ def _get_unit_locations(self):
535
+ # use gxy data to assign locations to units.
536
+ gxy_lines = self._gxy_data.splitlines()
537
+ line = 0
538
+ gxy_dict = {}
539
+ while True:
540
+ header = gxy_lines[line][1:-1].split("_", 2)
541
+
542
+ # header format for a unit is [TYPE_SUBTYPE_NAME], so simple check that our header is a unit is check split length is 3
543
+ if len(header) != 3: # noqa: PLR2004
544
+ break
545
+
546
+ x = float(gxy_lines[line + 1][2:].strip())
547
+ y = float(gxy_lines[line + 2][2:].strip())
548
+
549
+ # key should match ._unique_name attributes
550
+ gxy_dict[f"{header[0]}_{header[2]}"] = (x, y)
551
+
552
+ line += 4
553
+
554
+ for unit in self._all_units:
555
+ if unit.unit in ("COMMENT",):
556
+ break
557
+
558
+ if unit.unique_name in gxy_dict:
559
+ unit.set_cached_location_from_gxy(gxy_dict.pop(unit.unique_name))
560
+
528
561
  def _initialize_collections(self) -> None:
529
562
  # Initialize unit collections
530
563
  self.sections: dict[str, units.TSections] = {}
@@ -548,8 +581,11 @@ class DAT(FMFile):
548
581
  else:
549
582
  # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
550
583
  unit_name = self._get_unit_name(unit_type, unit_data)
551
- # Create instance of unit and add to relevant group
584
+
585
+ # fetch the relevant group that the unit belongs in
552
586
  unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[unit_type]["group"])
587
+
588
+ # Create instance of unit and add to group
553
589
  self._add_unit_to_group(unit_group, unit_type, unit_name, unit_data)
554
590
 
555
591
  def _get_unit_name(self, unit_type, unit_data):
@@ -567,24 +603,40 @@ class DAT(FMFile):
567
603
  ) -> None:
568
604
  # Raise exception if a duplicate label is encountered
569
605
  if unit_name in unit_group:
570
- msg = f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[unit_type]["group"]}'
606
+ msg = f"Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[unit_type]['group']}"
571
607
  raise Exception(msg)
572
608
  # Changes done to account for unit types with spaces/dashes eg Flat-V Weir
573
609
  unit_type_safe = unit_type.replace(" ", "_").replace("-", "_")
574
- unit_group[unit_name] = getattr(units, unit_type_safe)(unit_data, self._label_len)
575
- self._all_units.append(unit_group[unit_name])
610
+
611
+ # Get class object from unit type and instantiate unit with block data & length.
612
+ unit = getattr(units, unit_type_safe)(unit_data, self._label_len)
613
+
614
+ # Add unit to group, and to all units list.
615
+ unit_group[unit_name] = unit
616
+ self._all_units.append(unit)
576
617
 
577
618
  def _process_unsupported_unit(self, unit_type, unit_data) -> None:
578
619
  # Check to see whether unit type has associated subtypes so that unit name can be correctly assigned
579
620
  unit_name, subtype = self._get_unsupported_unit_name(unit_type, unit_data)
580
- self._unsupported[f"{unit_name} ({unit_type})"] = units.UNSUPPORTED(
621
+ unit_name_and_type = f"{unit_name} ({unit_type})"
622
+ if unit_name_and_type in self._unsupported:
623
+ msg = (
624
+ f"Duplicate label ({unit_name_and_type}) encountered within category: _unsupported"
625
+ )
626
+ raise Exception(msg)
627
+ self._unsupported[unit_name_and_type] = units.UNSUPPORTED(
581
628
  unit_data,
582
629
  self._label_len,
583
630
  unit_name=unit_name,
584
631
  unit_type=unit_type,
585
632
  subtype=subtype,
586
633
  )
587
- self._all_units.append(self._unsupported[f"{unit_name} ({unit_type})"])
634
+ self._all_units.append(self._unsupported[unit_name_and_type])
635
+
636
+ def _get_supported_unit_name(self, unit_type: str, unit_data: list[str]) -> str:
637
+ if units.SUPPORTED_UNIT_TYPES[unit_type]["has_subtype"]:
638
+ return unit_data[2][: self._label_len].strip()
639
+ return unit_data[1][: self._label_len].strip()
588
640
 
589
641
  def _get_unsupported_unit_name(self, unit_type: str, unit_data: list[str]) -> tuple[str, bool]:
590
642
  # Check if the unit type has associated subtypes
@@ -708,6 +760,19 @@ class DAT(FMFile):
708
760
 
709
761
  return unit_block, in_block
710
762
 
763
+ @property
764
+ def node_labels(self):
765
+ all_labels = set()
766
+ for unit in self._all_units:
767
+ all_labels.update(unit.all_labels)
768
+ return all_labels
769
+
770
+ def _get_unit_group_name(self, unit: Unit) -> str:
771
+ unit_type = unit.unit
772
+ if unit_type in units.SUPPORTED_UNIT_TYPES:
773
+ return units.SUPPORTED_UNIT_TYPES[unit_type]["group"]
774
+ return "_unsupported"
775
+
711
776
  @handle_exception(when="remove unit from")
712
777
  def remove_unit(self, unit: Unit) -> None:
713
778
  """Remove a unit from the dat file.
@@ -732,19 +797,23 @@ class DAT(FMFile):
732
797
  # remove from raw data
733
798
  del self._raw_data[dat_struct_unit["start"] : dat_struct_unit["end"] + 1]
734
799
  # remove from unit group
735
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
800
+ unit_group_name = self._get_unit_group_name(unit)
736
801
  unit_group = getattr(self, unit_group_name)
737
- del unit_group[unit.name]
738
- # remove from ICs
739
- self.initial_conditions.data = self.initial_conditions.data.loc[
740
- self.initial_conditions.data["label"] != unit.name
741
- ]
802
+ if unit_group_name == "_unsupported":
803
+ del unit_group[f"{unit.name} ({unit.unit})"]
804
+ else:
805
+ del unit_group[unit.name]
806
+ # remove from ICs if no more labels
807
+ if unit.name not in self.node_labels:
808
+ self.initial_conditions.data = self.initial_conditions.data.loc[
809
+ self.initial_conditions.data["label"] != unit.name
810
+ ]
811
+ self.general_parameters["Node Count"] -= 1
742
812
 
743
813
  self._update_dat_struct()
744
- self.general_parameters["Node Count"] -= 1
745
814
 
746
815
  @handle_exception(when="insert unit into")
747
- def insert_unit( # noqa: C901, PLR0912
816
+ def insert_unit(
748
817
  self,
749
818
  unit: Unit,
750
819
  add_before: Unit | None = None,
@@ -767,6 +836,48 @@ class DAT(FMFile):
767
836
  NameError: Raised if unit name already appears in unit group.
768
837
  """
769
838
  # catch errors
839
+ self._validate_insert_unit_params(unit, add_before, add_after, add_at)
840
+
841
+ unit_class = unit._unit
842
+ if unit_class != "COMMENT":
843
+ _validate_unit(unit)
844
+ unit_group_name = self._get_unit_group_name(unit)
845
+ unit_group = getattr(self, unit_group_name)
846
+ if unit.name in unit_group:
847
+ msg = "Name already appears in unit group. Cannot have two units with same name in same group"
848
+ raise NameError(msg)
849
+
850
+ insert_index = self._get_insert_index(add_before, add_after, add_at)
851
+
852
+ unit_data = unit._write()
853
+ if unit._unit != "COMMENT":
854
+ if unit_group_name == "_unsupported":
855
+ unit_group[f"{unit.name} ({unit.unit})"] = unit
856
+ else:
857
+ unit_group[unit.name] = unit
858
+ self._dat_struct.insert(
859
+ insert_index + 1,
860
+ {"Type": unit_class, "new_insert": unit_data},
861
+ ) # add to dat struct without unit.name
862
+
863
+ if unit._unit != "COMMENT" and unit.name not in self.node_labels:
864
+ # update the iic's tables
865
+ iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
866
+ self.initial_conditions.data.loc[len(self.initial_conditions.data)] = iic_data # flaged
867
+ self.general_parameters["Node Count"] += 1 # flag no update for comments
868
+
869
+ self._all_units.insert(insert_index, unit)
870
+ if not defer_update:
871
+ self._update_raw_data()
872
+ self._update_dat_struct()
873
+
874
+ def _validate_insert_unit_params(
875
+ self,
876
+ unit: Unit,
877
+ add_before: Unit | None,
878
+ add_after: Unit | None,
879
+ add_at: int | None,
880
+ ):
770
881
  provided_params = sum(arg is not None for arg in (add_before, add_after, add_at))
771
882
  if provided_params == 0:
772
883
  msg = "No positional argument given. Please provide either add_before, add_at or add_after"
@@ -781,15 +892,12 @@ class DAT(FMFile):
781
892
  msg = "add_before or add_after argument must be a Flood Modeller Unit type"
782
893
  raise TypeError(msg)
783
894
 
784
- unit_class = unit._unit
785
- if unit_class != "COMMENT":
786
- _validate_unit(unit)
787
- unit_group_name = units.SUPPORTED_UNIT_TYPES[unit._unit]["group"]
788
- unit_group = getattr(self, unit_group_name)
789
- if unit.name in unit_group:
790
- msg = "Name already appears in unit group. Cannot have two units with same name in same group"
791
- raise NameError(msg)
792
-
895
+ def _get_insert_index(
896
+ self,
897
+ add_before: Unit | None,
898
+ add_after: Unit | None,
899
+ add_at: int | None,
900
+ ) -> int:
793
901
  # positional argument
794
902
  if add_at is not None:
795
903
  insert_index = add_at
@@ -810,28 +918,7 @@ class DAT(FMFile):
810
918
  f"{check_unit} not found in dat network, so cannot be used to add before/after"
811
919
  )
812
920
  raise Exception(msg)
813
-
814
- unit_data = unit._write()
815
- self._all_units.insert(insert_index, unit)
816
- if unit._unit != "COMMENT":
817
- unit_group[unit.name] = unit
818
- self._dat_struct.insert(
819
- insert_index + 1,
820
- {"Type": unit_class, "new_insert": unit_data},
821
- ) # add to dat struct without unit.name
822
-
823
- if unit._unit != "COMMENT":
824
- # update the iic's tables
825
- iic_data = [unit.name, "y", 00.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
826
- self.initial_conditions.data.loc[len(self.initial_conditions.data)] = iic_data # flaged
827
-
828
- # update all
829
- if unit._unit != "COMMENT":
830
- self.general_parameters["Node Count"] += 1 # flag no update for comments
831
-
832
- if not defer_update:
833
- self._update_raw_data()
834
- self._update_dat_struct()
921
+ return insert_index
835
922
 
836
923
  def insert_units(
837
924
  self,
@@ -97,8 +97,7 @@ class HydrologyPlusExport(FMFile):
97
97
  if s == scenario and float(sd) == storm_duration and float(rp) == return_period:
98
98
  return column
99
99
  msg = (
100
- "No matching event was found based on "
101
- f"{return_period=}, {storm_duration=}, {scenario=}"
100
+ f"No matching event was found based on {return_period=}, {storm_duration=}, {scenario=}"
102
101
  )
103
102
  raise ValueError(msg)
104
103
 
floodmodeller_api/ied.py CHANGED
@@ -181,7 +181,7 @@ class IED(FMFile):
181
181
  # Create instance of unit and add to relevant group
182
182
  unit_group = getattr(self, units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"])
183
183
  if unit_name in unit_group:
184
- msg = f'Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block["Type"]]["group"]}'
184
+ msg = f"Duplicate label ({unit_name}) encountered within category: {units.SUPPORTED_UNIT_TYPES[block['Type']]['group']}"
185
185
  raise Exception(msg)
186
186
  unit_group[unit_name] = getattr(units, block["Type"])(unit_data)
187
187
 
@@ -5,7 +5,7 @@ from unittest.mock import patch
5
5
  import pytest
6
6
 
7
7
  from floodmodeller_api import DAT
8
- from floodmodeller_api.units import JUNCTION, LATERAL, QTBDY, RESERVOIR
8
+ from floodmodeller_api.units import JUNCTION, LATERAL, QTBDY, RESERVOIR, UNSUPPORTED
9
9
  from floodmodeller_api.util import FloodModellerAPIError
10
10
 
11
11
 
@@ -42,6 +42,28 @@ def dat_ex6(test_workspace):
42
42
  yield dat
43
43
 
44
44
 
45
+ @pytest.fixture()
46
+ def unsupported_dummy_unit():
47
+ data = [
48
+ "APITESTDUMMY Dummy unnsupported unit for testing purposes",
49
+ "LBL001 LBL002",
50
+ "arbitrary data",
51
+ " table01234",
52
+ " -0.500 0.000 0.000 0.000091000000.0",
53
+ " 0.000 1.000 1.000 0.0000 910000000",
54
+ " 1.000 2.000 2.000 0.000091000000.0",
55
+ " 2.000 3.000 3.000 0.000091000000.0",
56
+ " 5.000 3.000 3.000 0.000091000000.0",
57
+ ]
58
+ return UNSUPPORTED(
59
+ data,
60
+ 12,
61
+ unit_name="LBL001",
62
+ unit_type="APITESTDUMMY",
63
+ subtype=False,
64
+ )
65
+
66
+
45
67
  def test_changing_section_and_dist_works(dat_fp, data_before):
46
68
  """DAT: Test changing and reverting section name and dist to next makes no changes"""
47
69
  dat = DAT(dat_fp)
@@ -74,15 +96,33 @@ def test_changing_and_reverting_qtbdy_hydrograph_works(dat_fp, data_before):
74
96
  def test_dat_read_doesnt_change_data(test_workspace, tmp_path):
75
97
  """DAT: Check all '.dat' files in folder by reading the _write() output into a new DAT instance and checking it stays the same."""
76
98
  for datfile in Path(test_workspace).glob("*.dat"):
99
+ if datfile.name.startswith("duplicate_unit_test"):
100
+ # Skipping as invalid DAT (duplicate units)
101
+ continue
102
+
77
103
  dat = DAT(datfile)
78
104
  first_output = dat._write()
79
105
  new_path = tmp_path / "tmp.dat"
80
106
  dat.save(new_path)
81
107
  second_dat = DAT(new_path)
82
- assert dat == second_dat, f"dat objects not equal for {datfile=}"
108
+ assert dat == second_dat, f"dat objects not equal for {datfile=}\n{dat.diff(second_dat)}"
83
109
  second_output = second_dat._write()
84
110
  assert first_output == second_output, f"dat outputs not equal for {datfile=}"
85
111
 
112
+ gxy_path = datfile.with_suffix(".gxy")
113
+ if gxy_path.exists():
114
+ second_gxy_path = new_path.with_suffix(".gxy")
115
+ assert second_gxy_path.exists(), f"updated .gxy not found when testing {datfile=}"
116
+
117
+ # note filecmp.cmp() doesnt work here because input/output data has different eol sequences.
118
+ assert (
119
+ gxy_path.read_text() == second_gxy_path.read_text()
120
+ ), f".gxy file content not identical for {datfile=}"
121
+
122
+ new_path.unlink()
123
+ if gxy_path.exists():
124
+ second_gxy_path.unlink()
125
+
86
126
 
87
127
  def test_insert_unit_before(units, dat_ex6):
88
128
  dat_ex6.insert_unit(units[0], add_before=dat_ex6.sections["P4000"])
@@ -380,3 +420,50 @@ def test_encoding(test_workspace: Path, dat_str: str, label: str, tmp_path: Path
380
420
 
381
421
  assert label in dat_read.sections
382
422
  assert label in dat_write.sections # remains as \xc3\xa5 even for utf8
423
+
424
+
425
+ def test_insert_unsupported_unit(tmp_path: Path, unsupported_dummy_unit):
426
+ new_dat = DAT()
427
+ new_dat.insert_unit(unsupported_dummy_unit, add_at=-1)
428
+ assert unsupported_dummy_unit in new_dat._unsupported.values()
429
+ assert len(new_dat._all_units) == 1
430
+ filepath = tmp_path / "insert_dummy_test.dat"
431
+ new_dat.save(filepath)
432
+
433
+ dat = DAT(filepath)
434
+ assert unsupported_dummy_unit in dat._unsupported.values()
435
+ assert len(dat._all_units) == 1
436
+
437
+
438
+ def test_remove_unsupported_unit(test_workspace, unsupported_dummy_unit):
439
+ dat = DAT(test_workspace / "remove_dummy_test.dat")
440
+ assert len(dat._all_units) == 1
441
+ assert len(dat._dat_struct) == 3
442
+ assert len(dat.initial_conditions.data) == 1
443
+ assert "LBL001 (APITESTDUMMY)" in dat._unsupported
444
+ dat.remove_unit(unsupported_dummy_unit)
445
+ assert len(dat._all_units) == 0
446
+ assert len(dat._dat_struct) == 2
447
+ assert len(dat.initial_conditions.data) == 0
448
+ assert "LBL001 (APITESTDUMMY)" not in dat._unsupported
449
+ dat._write()
450
+ assert len(dat._all_units) == 0
451
+ assert len(dat._dat_struct) == 2
452
+ assert len(dat.initial_conditions.data) == 0
453
+ assert "LBL001 (APITESTDUMMY)" not in dat._unsupported
454
+
455
+
456
+ def test_duplicate_unit_raises_error(test_workspace):
457
+ msg = (
458
+ r"\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
459
+ r"\nAPI Error: Problem encountered when trying to read DAT file .*\."
460
+ r"\n"
461
+ r"\nDetails: .*-floodmodeller_api/dat\.py-\d+"
462
+ r"\nMsg: Duplicate label (.*) encountered within category: .*"
463
+ r"\n"
464
+ r"\nFor additional support, go to: https://github\.com/People-Places-Solutions/floodmodeller-api"
465
+ )
466
+ with pytest.raises(FloodModellerAPIError, match=msg):
467
+ DAT(test_workspace / "duplicate_unit_test.dat")
468
+ with pytest.raises(FloodModellerAPIError, match=msg):
469
+ DAT(test_workspace / "duplicate_unit_test_unsupported.dat")
@@ -715,8 +715,6 @@ YARNELL
715
715
  4.000 5.000 6.000 5.000
716
716
  GERRBDY
717
717
  Gerry46
718
- GERRBDY
719
- Gerry46
720
718
  #COMMENT
721
719
 
722
720
  ##CATCHMENT DETAILS
@@ -715,8 +715,6 @@ YARNELL
715
715
  4.000 5.000 6.000 5.000
716
716
  GERRBDY
717
717
  Gerry46
718
- GERRBDY
719
- Gerry46
720
718
  #COMMENT
721
719
 
722
720
  ##CATCHMENT DETAILS