floodmodeller-api 0.4.4.post1__py3-none-any.whl → 0.5.0.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. floodmodeller_api/__init__.py +1 -0
  2. floodmodeller_api/dat.py +117 -96
  3. floodmodeller_api/hydrology_plus/__init__.py +2 -0
  4. floodmodeller_api/hydrology_plus/helper.py +23 -0
  5. floodmodeller_api/hydrology_plus/hydrology_plus_export.py +333 -0
  6. floodmodeller_api/ied.py +93 -90
  7. floodmodeller_api/ief.py +233 -50
  8. floodmodeller_api/ief_flags.py +1 -0
  9. floodmodeller_api/logs/lf.py +5 -1
  10. floodmodeller_api/mapping.py +2 -0
  11. floodmodeller_api/test/test_conveyance.py +23 -32
  12. floodmodeller_api/test/test_data/7082.ief +28 -0
  13. floodmodeller_api/test/test_data/BaseModel_2D_Q100.ief +28 -0
  14. floodmodeller_api/test/test_data/Baseline_unchecked.csv +77 -0
  15. floodmodeller_api/test/test_data/Constant QT.ief +19 -0
  16. floodmodeller_api/test/test_data/Domain1_Q_xml_expected.json +7 -7
  17. floodmodeller_api/test/test_data/EX18_DAT_expected.json +54 -38
  18. floodmodeller_api/test/test_data/EX3_DAT_expected.json +246 -166
  19. floodmodeller_api/test/test_data/EX3_IEF_expected.json +25 -20
  20. floodmodeller_api/test/test_data/EX6_DAT_expected.json +522 -350
  21. floodmodeller_api/test/test_data/FEH boundary.ief +23 -0
  22. floodmodeller_api/test/test_data/Linked1D2D_xml_expected.json +7 -7
  23. floodmodeller_api/test/test_data/P3Panels_UNsteady.ief +25 -0
  24. floodmodeller_api/test/test_data/QT in dat file.ief +20 -0
  25. floodmodeller_api/test/test_data/T10.ief +25 -0
  26. floodmodeller_api/test/test_data/T2.ief +25 -0
  27. floodmodeller_api/test/test_data/T5.ief +25 -0
  28. floodmodeller_api/test/test_data/df_flows_hplus.csv +56 -0
  29. floodmodeller_api/test/test_data/event_hplus.csv +56 -0
  30. floodmodeller_api/test/test_data/ex4.ief +20 -0
  31. floodmodeller_api/test/test_data/ex6.ief +21 -0
  32. floodmodeller_api/test/test_data/example_h+_export.csv +77 -0
  33. floodmodeller_api/test/test_data/hplus_export_example_1.csv +72 -0
  34. floodmodeller_api/test/test_data/hplus_export_example_10.csv +77 -0
  35. floodmodeller_api/test/test_data/hplus_export_example_2.csv +79 -0
  36. floodmodeller_api/test/test_data/hplus_export_example_3.csv +77 -0
  37. floodmodeller_api/test/test_data/hplus_export_example_4.csv +131 -0
  38. floodmodeller_api/test/test_data/hplus_export_example_5.csv +77 -0
  39. floodmodeller_api/test/test_data/hplus_export_example_6.csv +131 -0
  40. floodmodeller_api/test/test_data/hplus_export_example_7.csv +131 -0
  41. floodmodeller_api/test/test_data/hplus_export_example_8.csv +131 -0
  42. floodmodeller_api/test/test_data/hplus_export_example_9.csv +131 -0
  43. floodmodeller_api/test/test_data/network_dat_expected.json +312 -210
  44. floodmodeller_api/test/test_data/network_ied_expected.json +6 -6
  45. floodmodeller_api/test/test_data/network_with_comments.ied +55 -0
  46. floodmodeller_api/test/test_flowtimeprofile.py +133 -0
  47. floodmodeller_api/test/test_hydrology_plus_export.py +210 -0
  48. floodmodeller_api/test/test_ied.py +12 -0
  49. floodmodeller_api/test/test_ief.py +49 -9
  50. floodmodeller_api/test/test_json.py +6 -1
  51. floodmodeller_api/test/test_read_file.py +27 -0
  52. floodmodeller_api/test/test_river.py +246 -0
  53. floodmodeller_api/to_from_json.py +7 -1
  54. floodmodeller_api/tool.py +6 -10
  55. floodmodeller_api/units/__init__.py +11 -1
  56. floodmodeller_api/units/conveyance.py +103 -212
  57. floodmodeller_api/units/sections.py +120 -39
  58. floodmodeller_api/util.py +2 -0
  59. floodmodeller_api/version.py +1 -1
  60. floodmodeller_api/xml2d.py +20 -13
  61. floodmodeller_api/xsd_backup.xml +738 -0
  62. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/METADATA +2 -1
  63. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/RECORD +67 -33
  64. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/WHEEL +1 -1
  65. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/LICENSE.txt +0 -0
  66. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/entry_points.txt +0 -0
  67. {floodmodeller_api-0.4.4.post1.dist-info → floodmodeller_api-0.5.0.post1.dist-info}/top_level.txt +0 -0
floodmodeller_api/ief.py CHANGED
@@ -16,9 +16,11 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
16
16
 
17
17
  from __future__ import annotations
18
18
 
19
+ import csv
19
20
  import os
20
21
  import subprocess
21
22
  import time
23
+ from io import StringIO
22
24
  from pathlib import Path
23
25
  from subprocess import Popen
24
26
  from typing import Callable
@@ -27,24 +29,38 @@ import pandas as pd
27
29
  from tqdm import trange
28
30
 
29
31
  from ._base import FMFile
32
+ from .diff import check_item_with_dataframe_equal
30
33
  from .ief_flags import flags
31
34
  from .logs import LF1, create_lf
35
+ from .to_from_json import Jsonable
32
36
  from .util import handle_exception
33
37
  from .zzn import ZZN
34
38
 
35
39
 
40
+ def try_numeric(value: str) -> str | int | float:
41
+ """Attempt to parse value as float or int if valid, else return the original string"""
42
+ try:
43
+ return int(value)
44
+ except ValueError:
45
+ try:
46
+ return float(value)
47
+ except ValueError:
48
+ return value
49
+
50
+
36
51
  class IEF(FMFile):
37
52
  """Reads and write Flood Modeller event file format '.ief'
38
53
 
39
54
  Args:
40
- ief_filepath (str, optional): Full filepath to ief file. If not specified, a new IEF class will be created.. Defaults to None.
55
+ ief_filepath (str, optional): Full filepath to ief file. If not specified, a new IEF class
56
+ will be created.. Defaults to None.
41
57
 
42
58
  Raises:
43
59
  TypeError: Raised if ief_filepath not pointing to valide IEF file
44
60
  FileNotFoundError: Raised if ief_filepath points to a non-existent location
45
61
 
46
62
  Output:
47
- Initiates 'IEF' class object
63
+ Initiates 'IEF' class object
48
64
  """
49
65
 
50
66
  _filetype: str = "IEF"
@@ -70,7 +86,9 @@ class IEF(FMFile):
70
86
  # Clean data and add as class properties
71
87
  # Create a list to store the properties which are to be saved in IEF, so as to ignore any temp properties.
72
88
  prev_comment = None
73
- self._ief_properties = []
89
+ self._ief_properties: list[str] = []
90
+ self.EventData: dict[str, str] = {}
91
+ self.flowtimeprofiles: list[FlowTimeProfile] = []
74
92
  for line in raw_data:
75
93
  # Handle any comments here (prefixed with ;)
76
94
  if line.lstrip().startswith(";"):
@@ -89,23 +107,35 @@ class IEF(FMFile):
89
107
  event_data_title = value
90
108
  else:
91
109
  event_data_title = prev_comment
92
- if hasattr(self, "EventData"):
93
- # Append event data to list so multiple can be specified
94
- self.EventData[event_data_title] = value
95
- else:
96
- self.EventData = {event_data_title: value}
110
+ self.eventdata[event_data_title] = value
97
111
  self._ief_properties.append("EventData")
98
112
 
113
+ elif prop.upper().startswith("FLOWTIMEPROFILE"):
114
+ self.flowtimeprofiles.append(
115
+ FlowTimeProfile(value, ief_filepath=self._filepath),
116
+ )
117
+ self._ief_properties.append(prop)
99
118
  else:
100
119
  # Sets the property and value as class properties so they can be edited.
101
- setattr(self, prop, value)
120
+ setattr(self, prop, try_numeric(value))
102
121
  self._ief_properties.append(prop)
103
122
  prev_comment = None
104
123
  else:
105
124
  # This should add the [] bound headers
106
125
  self._ief_properties.append(line)
107
126
  prev_comment = None
108
- del raw_data
127
+
128
+ self._check_formatting(raw_data)
129
+ self._update_ief_properties() # call this here to ensure ief properties is correct
130
+
131
+ def _check_formatting(self, raw_data: list[str]) -> None:
132
+ """Check to see if ief formatted with line breaks between groups and spaces around '='."""
133
+ self._format_group_line_breaks = False
134
+ self._format_equals_spaced = False
135
+ if "" in raw_data[:-1]:
136
+ self._format_group_line_breaks = True
137
+ if any(" = " in line for line in raw_data):
138
+ self._format_equals_spaced = True
109
139
 
110
140
  @handle_exception(when="write")
111
141
  def _write(self) -> str:
@@ -118,36 +148,50 @@ class IEF(FMFile):
118
148
  self._update_ief_properties()
119
149
 
120
150
  ief_string = ""
121
- event = 0 # Used as a counter for multiple eventdata files
151
+ event_index = 0 # Used as a counter for multiple eventdata files
152
+ ftp_index = 0 # Counter for flowtimeprofiles
153
+ eq = " = " if self._format_equals_spaced else "="
154
+ section_newline = "\n" if self._format_group_line_breaks else ""
122
155
  for idx, prop in enumerate(self._ief_properties):
123
156
  if prop.startswith("["):
124
157
  # writes the [] bound headers to ief string
125
- ief_string += prop + "\n"
158
+ if idx > 0:
159
+ ief_string += section_newline + prop + "\n"
160
+ else:
161
+ ief_string += prop + "\n"
162
+
126
163
  elif prop.lstrip().startswith(";"):
127
164
  if self._ief_properties[idx + 1].lower() != "eventdata":
128
165
  # Only write comment if not preceding event data
129
166
  ief_string += prop + "\n"
167
+
130
168
  elif prop.lower() == "eventdata":
131
169
  event_data = getattr(self, prop)
132
170
  # Add multiple EventData if present
133
- for event_idx, key in enumerate(event_data):
134
- if event_idx == event:
135
- ief_string += f";{key}\n{prop}={str(event_data[key])}\n"
171
+ for idx, key in enumerate(event_data):
172
+ if idx == event_index:
173
+ ief_string += f";{key}\nEventData{eq}{str(event_data[key])}\n"
136
174
  break
137
- event += 1
175
+ event_index += 1
176
+
177
+ elif prop.lower().startswith("flowtimeprofile"):
178
+ flowtimeprofile = self.flowtimeprofiles[ftp_index]
179
+ ief_string += f"{prop}{eq}{flowtimeprofile}\n"
180
+ ftp_index += 1
138
181
 
139
182
  else:
140
183
  # writes property and value to ief string
141
- ief_string += f"{prop}={str(getattr(self, prop))}\n"
184
+ ief_string += f"{prop}{eq}{str(getattr(self, prop))}\n"
185
+
142
186
  return ief_string
143
187
 
144
188
  def _create_from_blank(self):
145
189
  # No filepath specified, create new 'blank' IEF in memory
146
190
  blank_ief = [
147
191
  "[ISIS Event Header]",
148
- 'Title=""',
149
- 'Datafile=""',
150
- 'Results=""',
192
+ "Title=",
193
+ "Datafile=",
194
+ "Results=",
151
195
  "[ISIS Event Details]",
152
196
  "RunType=Steady",
153
197
  "Start=0",
@@ -155,23 +199,32 @@ class IEF(FMFile):
155
199
  ]
156
200
 
157
201
  # Create a list to store the properties which are to be saved in IEF, so as to ignore any temp properties.
202
+ self._filepath = None
158
203
  self._ief_properties = []
204
+ self._format_group_line_breaks = False
205
+ self._format_equals_spaced = False
206
+ self.EventData: dict[str, str] = {}
207
+ self.flowtimeprofiles: list[FlowTimeProfile] = []
159
208
  for line in blank_ief:
160
209
  if "=" in line:
161
210
  prop, value = line.split("=")
162
211
  # Sets the property and value as class properties so they can be edited.
163
- setattr(self, prop, value)
212
+ setattr(self, prop, try_numeric(value))
164
213
  self._ief_properties.append(prop)
165
214
  else:
166
215
  # This should add the [] bound headers
167
216
  self._ief_properties.append(line)
168
217
  del blank_ief
169
218
 
170
- def _update_ief_properties(self): # noqa: C901
219
+ def _update_ief_properties(self):
171
220
  """Updates the list of properties included in the IEF file"""
172
221
  # Add new properties
173
222
  for prop, val in self.__dict__.copy().items():
174
- if (prop not in self._ief_properties) and (not prop.startswith("_")) and prop != "file":
223
+ if (
224
+ (prop not in self._ief_properties)
225
+ and (not prop.startswith("_"))
226
+ and prop not in ["file", "flowtimeprofiles"]
227
+ ):
175
228
  # Check if valid flag
176
229
  if prop.upper() not in flags:
177
230
  print(
@@ -186,8 +239,8 @@ class IEF(FMFile):
186
239
  # exist, this stops it being deleted
187
240
  # Add new values to EventData flag
188
241
  delattr(self, prop)
189
- self.EventData = val
190
- prop = "EventData"
242
+ self.eventdata = val
243
+ prop = "eventdata"
191
244
 
192
245
  # Check ief group header
193
246
  group = f"[{flags[prop.upper()]}]"
@@ -218,15 +271,13 @@ class IEF(FMFile):
218
271
  ]
219
272
 
220
273
  # Rearrange order of Flow Time Profiles group if present * Currently assuming all relevent flags included
221
- if "[Flow Time Profiles]" in self._ief_properties:
222
- self._update_flowtimeprofile_info()
274
+ self._update_flowtimeprofile_info()
223
275
 
224
276
  # Ensure number of EventData entries is equal to length of EventData attribute
225
- if hasattr(self, "EventData"):
226
- self._update_eventdata_info()
277
+ self._update_eventdata_info()
227
278
 
228
279
  def _update_eventdata_info(self): # noqa: C901
229
- if not isinstance(self.EventData, dict):
280
+ if not isinstance(self.eventdata, dict):
230
281
  # If attribute not a dict, adds the value as a single entry in list
231
282
  raise AttributeError(
232
283
  "The 'EventData' attribute should be a dictionary with keys defining the event"
@@ -236,7 +287,7 @@ class IEF(FMFile):
236
287
  # Number of 'EventData' flags in ief
237
288
  event_properties = self._ief_properties.count("EventData")
238
289
  # Number of event data specified in class
239
- events = len(self.EventData)
290
+ events = len(self.eventdata)
240
291
  if event_properties < events:
241
292
  # Need to add additional event properties to IEF to match number of events specified
242
293
  to_add = events - event_properties
@@ -266,35 +317,67 @@ class IEF(FMFile):
266
317
  if removed == to_remove:
267
318
  break
268
319
 
269
- def _update_flowtimeprofile_info(self):
320
+ def _update_flowtimeprofile_info(self) -> None:
321
+ """Update the flowtimeprofile data stored in ief properties"""
322
+ if not hasattr(self, "flowtimeprofiles") or len(self.flowtimeprofiles) == 0:
323
+ self._remove_flowtimeprofile_info()
324
+ return
325
+
326
+ # Update properties
327
+ self.NoOfFlowTimeProfiles = len(self.flowtimeprofiles)
328
+ try:
329
+ self.NoOfFlowTimeSeries = sum([ftp.count_series() for ftp in self.flowtimeprofiles])
330
+ except FileNotFoundError as err:
331
+ raise UserWarning(
332
+ "Failed to read csv referenced in flowtimeprofile, file either does not exist or is"
333
+ "unable to be found due to relative path from IEF file. NoOfFlowTimeSeries has not"
334
+ "been updated.",
335
+ ) from err
336
+
270
337
  end_index = None
271
- start_index = self._ief_properties.index("[Flow Time Profiles]")
338
+ start_index = (
339
+ self._ief_properties.index("[Flow Time Profiles]")
340
+ if "[Flow Time Profiles]" in self._ief_properties
341
+ else len(self._ief_properties)
342
+ )
272
343
  for idx, item in enumerate(self._ief_properties[start_index:]):
273
344
  if idx != 0 and item.startswith("["):
274
345
  end_index = idx + start_index
275
346
  break
276
- flow_time_list = self._ief_properties[start_index:end_index]
277
- flow_time_list = [
347
+
348
+ flowtimeprofile_list = [
278
349
  "[Flow Time Profiles]",
279
350
  "NoOfFlowTimeProfiles",
280
351
  "NoOfFlowTimeSeries",
281
- ] + [i for i in flow_time_list if i.lower().startswith("flowtimeprofile")]
282
-
283
- # sort list to ensure the flow time profiles are in order
284
- def flow_sort(itm):
285
- try:
286
- num = int(itm.upper().replace("FLOWTIMEPROFILE", ""))
287
- return (1, num)
288
- except ValueError:
289
- return (0, itm)
352
+ ]
353
+ for idx, _ in enumerate(self.flowtimeprofiles):
354
+ flowtimeprofile_list.append(f"FlowTimeProfile{idx}")
290
355
 
291
- flow_time_list[3:] = sorted(flow_time_list[3:], key=flow_sort)
356
+ # Replace existing slice of ief properties with new slice
357
+ self._ief_properties[start_index:end_index] = flowtimeprofile_list
292
358
 
293
- # Replace existing slice of ief properties with new reordered slice
294
- self._ief_properties[start_index:end_index] = flow_time_list
359
+ def _remove_flowtimeprofile_info(self) -> None:
360
+ """Delete flowtimeprofile data from ief properties and any attributes present"""
361
+ # Remove flowtimeprofile info from IEF properties
362
+ self._ief_properties = [
363
+ line
364
+ for line in self._ief_properties
365
+ if (
366
+ line.lower()
367
+ not in [
368
+ "[flow time profiles]",
369
+ "noofflowtimeprofiles",
370
+ "noofflowtimeseries",
371
+ ]
372
+ )
373
+ and (not line.lower().startswith("flowtimeprofile"))
374
+ ]
375
+ if hasattr(self, "noofflowtimeprofiles"):
376
+ del self.NoOfFlowTimeProfiles
377
+ if hasattr(self, "noofflowtimeseries"):
378
+ del self.NoOfFlowTimeSeries
295
379
 
296
- # Update NoOfFlowTimeSeries
297
- self.NoOfFlowTimeProfiles = str(len(flow_time_list[3:]))
380
+ self.flowtimeprofiles = []
298
381
 
299
382
  def __getattr__(self, name):
300
383
  for attr in self.__dict__.copy():
@@ -448,7 +531,7 @@ class IEF(FMFile):
448
531
  return None
449
532
 
450
533
  def _get_result_filepath(self, suffix):
451
- if hasattr(self, "Results") and self.Results != '""': # because blank IEF has 'Results=""'
534
+ if hasattr(self, "Results") and self.Results != "":
452
535
  path = Path(self.Results).with_suffix("." + suffix)
453
536
  if not path.is_absolute():
454
537
  # set cwd to ief location and resolve path
@@ -551,3 +634,103 @@ class IEF(FMFile):
551
634
  return 1, f"Simulation Failed! - {details}"
552
635
 
553
636
  return 0, f"Simulation Completed! - {details}"
637
+
638
+
639
+ class FlowTimeProfile(Jsonable):
640
+ """Handles defining and formatting flow time profiles in IEF files
641
+
642
+ Args:
643
+ raw_string (Optional[str]): A raw CSV-formatted string to initialize the profile attributes.
644
+
645
+ Keyword Args:
646
+ labels (list[str]): A list of string labels for the profile headers.
647
+ columns (list[int]): A list of integers (1-indexed) for the column indices of the profile.
648
+ start_row (int): The starting row index (1-indexed) for reading data from the CSV.
649
+ csv_filepath (str): The file path to the CSV file containing flow data.
650
+ file_type (str): The type of the file format, e.g. fm1, fm2, hplus, refh2.
651
+ profile (str): A description or identifier for the profile.
652
+ comment (str): An optional comment or note related to the profile.
653
+ ief_filepath (str): The base directory path for resolving the CSV file.
654
+
655
+ Raises:
656
+ ValueError: If neither a `raw_string` nor keyword arguments are provided.
657
+ """
658
+
659
+ labels: list[str]
660
+ columns: list[int]
661
+ start_row: int
662
+ csv_filepath: str
663
+ file_type: str
664
+ profile: str
665
+ comment: str
666
+
667
+ def __init__(self, raw_string: str | None = None, **kwargs) -> None:
668
+ """Initializes the FlowTimeProfile instance from either a raw string or keyword arguments."""
669
+ if raw_string is not None:
670
+ self._parse_raw_string(raw_string)
671
+
672
+ elif kwargs:
673
+ self.labels = kwargs.get("labels", [])
674
+ self.columns = kwargs.get("columns", [])
675
+ self.start_row = kwargs.get("start_row", 0)
676
+ self.csv_filepath = kwargs.get("csv_filepath", "")
677
+ self.file_type = kwargs.get("file_type", "")
678
+ self.profile = kwargs.get("profile", "")
679
+ self.comment = kwargs.get("comment", "")
680
+ else:
681
+ raise ValueError(
682
+ "You must provide either a single raw string argument or keyword arguments.",
683
+ )
684
+
685
+ base_path = Path(kwargs.get("ief_filepath", ""))
686
+ self._csvfile = (base_path / self.csv_filepath.strip('"')).resolve()
687
+
688
+ for attr in ["csv_filepath", "comment"]:
689
+ value = getattr(self, attr)
690
+ if "," in value:
691
+ # Ensure string wrapped in quotes if containing comma
692
+ setattr(self, attr, f'"{value}"'.replace('""', '"'))
693
+
694
+ def _parse_raw_string(self, raw_string: str) -> None:
695
+ """Parses a raw string of comma separated values and stores as attributes"""
696
+ csv_reader = csv.reader(StringIO(raw_string), skipinitialspace=True, quotechar='"')
697
+ parts = next(csv_reader) # Read the first (and only) line as a list of fields
698
+ self.labels = [label for label in parts[0].split(" ") if label != ""]
699
+ self.columns = [int(col) for col in parts[1].split(" ") if col != ""]
700
+ self.start_row = int(parts[2])
701
+ self.csv_filepath = parts[3]
702
+ self.file_type = parts[4]
703
+ self.profile, self.comment = (parts[5:] + ["", ""])[:2]
704
+
705
+ def __str__(self) -> str:
706
+ """Converts the flow time profile into a valid comma separated ief string"""
707
+ return (
708
+ f"{' '.join(self.labels)},{' '.join(map(str, self.columns))},{self.start_row},"
709
+ f"{self.csv_filepath},{self.file_type},{self.profile},{self.comment}"
710
+ )
711
+
712
+ def __repr__(self) -> str:
713
+ return (
714
+ f"<floodmodeller_api FlowTimeProfile(\n\tlabels={self.labels},\n\t"
715
+ f"columns={self.columns},\n\tstart_row={self.start_row},\n\t"
716
+ f"csv_filepath={self.csv_filepath},\n\tfile_type={self.file_type},\n\t"
717
+ f"profile={self.profile},\n\tcomment={self.comment}\n)>"
718
+ )
719
+
720
+ def __eq__(self, other, return_diff=False):
721
+ result = True
722
+ diff = []
723
+ result, diff = check_item_with_dataframe_equal(
724
+ {key: value for key, value in self.__dict__.items() if key != "_csvfile"},
725
+ {key: value for key, value in other.__dict__.items() if key != "_csvfile"},
726
+ name="FlowTimeProfile",
727
+ diff=diff,
728
+ )
729
+ return (result, diff) if return_diff else result
730
+
731
+ def count_series(self) -> int:
732
+ if self.file_type.lower() == "fm1":
733
+ # read csv and count series
734
+ return len(pd.read_csv(self._csvfile, skiprows=self.start_row - 1, index_col=0).columns)
735
+
736
+ return len(self.columns)
@@ -230,6 +230,7 @@ flags = {
230
230
  "OVERRIDEURBANTIMES": "ISIS Event Details",
231
231
  "NOOFFLOWTIMEPROFILES": "Flow Time Profiles",
232
232
  "NOOFFLOWTIMESERIES": "Flow Time Profiles",
233
+ "FLOWTIMEPROFILE0": "Flow Time Profiles",
233
234
  "FLOWTIMEPROFILE1": "Flow Time Profiles",
234
235
  "FLOWTIMEPROFILE2": "Flow Time Profiles",
235
236
  "FLOWTIMEPROFILE3": "Flow Time Profiles",
@@ -25,7 +25,11 @@ import pandas as pd
25
25
  from .._base import FMFile
26
26
  from ..util import handle_exception
27
27
  from .lf_helpers import state_factory
28
- from .lf_params import lf1_steady_data_to_extract, lf1_unsteady_data_to_extract, lf2_data_to_extract
28
+ from .lf_params import (
29
+ lf1_steady_data_to_extract,
30
+ lf1_unsteady_data_to_extract,
31
+ lf2_data_to_extract,
32
+ )
29
33
 
30
34
  if TYPE_CHECKING:
31
35
  from pathlib import Path
@@ -2,6 +2,7 @@ from typing import Any
2
2
 
3
3
  from . import DAT, IED, IEF, INP, LF1, LF2, XML2D, ZZN
4
4
  from .backup import File
5
+ from .ief import FlowTimeProfile
5
6
  from .units import (
6
7
  BLOCKAGE,
7
8
  BRIDGE,
@@ -40,6 +41,7 @@ api_class_mapping: dict[str, Any] = {
40
41
  "floodmodeller_api.dat.DAT": DAT,
41
42
  "floodmodeller_api.ied.IED": IED,
42
43
  "floodmodeller_api.ief.IEF": IEF,
44
+ "floodmodeller_api.ief.FlowTimeProfile": FlowTimeProfile,
43
45
  "floodmodeller_api.inp.INP": INP,
44
46
  "floodmodeller_api.lf.LF1": LF1,
45
47
  "floodmodeller_api.lf.LF2": LF2,
@@ -1,19 +1,23 @@
1
- from pathlib import Path
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
2
4
 
3
5
  import numpy as np
4
6
  import pandas as pd
5
7
  import pytest
8
+ from numpy.testing import assert_array_almost_equal
6
9
  from scipy.spatial.distance import directed_hausdorff
7
- from shapely.geometry import LineString, Polygon
8
10
 
9
11
  from floodmodeller_api import DAT
10
12
  from floodmodeller_api.units.conveyance import (
11
- calculate_conveyance_by_panel,
12
- calculate_conveyance_part,
13
13
  calculate_cross_section_conveyance,
14
+ calculate_geometry,
14
15
  insert_intermediate_wls,
15
16
  )
16
17
 
18
+ if TYPE_CHECKING:
19
+ from pathlib import Path
20
+
17
21
 
18
22
  def test_calculate_cross_section_conveyance():
19
23
  x = np.array([0, 1, 2, 3, 4])
@@ -27,34 +31,6 @@ def test_calculate_cross_section_conveyance():
27
31
  assert not result.empty, "Result should not be empty"
28
32
 
29
33
 
30
- def test_calculate_conveyance_by_panel():
31
- x = np.array([0, 1, 2])
32
- y = np.array([5, 3, 1])
33
- n = np.array([0.03, 0.03])
34
- rpl = 1.0
35
- wls = np.array([1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0])
36
-
37
- result = calculate_conveyance_by_panel(x, y, n, rpl, wls)
38
-
39
- assert isinstance(result, list), "Result should be a list"
40
- assert len(result) == len(wls), "Result length should match the length of water levels"
41
- assert all(isinstance(val, float) for val in result), "All conveyance values should be floats"
42
-
43
-
44
- def test_calculate_conveyance_part():
45
- wetted_polygon = Polygon([(1, 3), (2, 1), (3, 2), (4, 6), (1, 3)])
46
- water_plane = LineString([(0, 3), (5, 3)])
47
- glass_walls = LineString([(1, 3), (1, 7)]), LineString([(4, 6), (4, 7)])
48
- x = np.array([1, 2, 3, 4])
49
- n = np.array([0.03, 0.03, 0.03, 0.03])
50
- rpl = 1.0
51
-
52
- result = calculate_conveyance_part(wetted_polygon, water_plane, glass_walls, x, n, rpl)
53
-
54
- assert isinstance(result, float), "Result should be a float"
55
- assert result >= 0, "Conveyance should be non-negative"
56
-
57
-
58
34
  def test_insert_intermediate_wls():
59
35
  arr = np.array([1.0, 2.0, 3.0])
60
36
  threshold = 0.5
@@ -105,3 +81,18 @@ def test_results_match_gui_at_shared_points(section: str, dat: DAT, from_gui: pd
105
81
  shared_index = sorted(set(actual.index).intersection(expected.index))
106
82
  diff = expected[shared_index] - actual[shared_index]
107
83
  assert (abs(diff) < tolerance).all() # asserts all conveyance values within 0.001 difference
84
+
85
+
86
+ def test_calculate_geometry():
87
+ # area example from https://blogs.sas.com/content/iml/2022/11/21/area-under-curve.html
88
+ x = np.array([1, 2, 3.5, 4, 5, 6, 6.5, 7, 8, 10, 12, 15])
89
+ y = np.array([-0.5, -0.1, 0.2, 0.7, 0.8, -0.2, 0.3, 0.6, 0.3, 0.1, -0.4, -0.6])
90
+ n = np.array([1, 2, 3.5, 0, 0, 0, 0, 0, 0, 5, 6, 7])
91
+ water_levels = np.array([-1, 0, 1])
92
+ area, length, mannings = calculate_geometry(x, y, n, water_levels)
93
+ total_area = np.sum(area, axis=1)
94
+ total_length = np.sum(length, axis=1)
95
+ total_mannings = np.sum(mannings, axis=1)
96
+ assert_array_almost_equal(total_area, np.array([0, 2.185, 13.65]))
97
+ assert_array_almost_equal(total_length, np.array([0, 6.808522, 15.145467]))
98
+ assert_array_almost_equal(total_mannings, np.array([0, 28.383004, 34.959038]))
@@ -0,0 +1,28 @@
1
+ [ISIS Event Header]
2
+ Title = 7082
3
+ Datafile = ..\..\networks\BridgeSpill1.dat
4
+ Results = Output\7082
5
+
6
+ [ISIS Event Details]
7
+ RunType = Unsteady
8
+ TimeZero = 0000hrs; 23/05/2024
9
+ Start = 0500hrs; 23/05/2024
10
+ Finish = 80
11
+ Timestep = 300
12
+ SaveInterval = 300
13
+ ICsFrom = 1
14
+ RefineBridgeSecProps = 0
15
+ SolveDHEqualsZeroAtStart = 1
16
+ RulesAtTimeZero = 1
17
+ RulesOnFirstIteration = 0
18
+ ResetTimesAfterPos = 1
19
+ UseFPSModularLimit = 1
20
+ OutputUnitSummary = 1
21
+ UseRemoteQ = 1
22
+ 2DFLOW = 1
23
+
24
+ [Flow Time Profiles]
25
+ NoOfFlowTimeProfiles = 2
26
+ NoOfFlowTimeSeries = 2
27
+ FlowTimeProfile0 = SEV19005,19,23,..\..\associated_data\Baseline unchecked.csv,hplus,100 - 11 - 2020 Upper,
28
+ FlowTimeProfile1 = randomInflow,9,23,..\..\associated_data\Baseline unchecked.csv,hplus,100 - 11 - Reconciled Baseline,
@@ -0,0 +1,28 @@
1
+ [ISIS Event Header]
2
+
3
+ ;WIBUContainer=140-1474647971
4
+ ;WIBUContainer=140-1474647971
5
+ Title = BaseModel_2D_Q100
6
+ Datafile = ..\..\networks\1D_Model_002.dat
7
+ Results = Output\BaseModel_2D_Q100
8
+
9
+ [ISIS Event Details]
10
+ RunType = Unsteady
11
+ Start = 0
12
+ Finish = 150
13
+ Timestep = 5
14
+ SaveInterval = 300
15
+ ICsFrom = 1
16
+ RefineBridgeSecProps = 0
17
+ SolveDHEqualsZeroAtStart = 1
18
+ RulesAtTimeZero = 1
19
+ RulesOnFirstIteration = 0
20
+ ResetTimesAfterPos = 1
21
+ UseFPSModularLimit = 1
22
+ UseRemoteQ = 1
23
+ 2DFLOW = 1
24
+
25
+ [Flow Time Profiles]
26
+ NoOfFlowTimeProfiles = 1
27
+ NoOfFlowTimeSeries = 1
28
+ FlowTimeProfile0 = Inflow1,65,2,..\..\associated_data\Severn_FLow_P01.csv,refh2,Total flow m3/s (100 year)- urbanised model,