floodmodeller-api 0.4.3__py3-none-any.whl → 0.4.4.post1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. floodmodeller_api/_base.py +22 -37
  2. floodmodeller_api/dat.py +165 -185
  3. floodmodeller_api/ied.py +82 -87
  4. floodmodeller_api/ief.py +92 -186
  5. floodmodeller_api/inp.py +64 -70
  6. floodmodeller_api/logs/__init__.py +1 -1
  7. floodmodeller_api/logs/lf.py +61 -17
  8. floodmodeller_api/test/conftest.py +7 -0
  9. floodmodeller_api/test/test_conveyance.py +107 -0
  10. floodmodeller_api/test/test_dat.py +5 -4
  11. floodmodeller_api/test/test_data/conveyance_test.dat +165 -0
  12. floodmodeller_api/test/test_data/conveyance_test.feb +116 -0
  13. floodmodeller_api/test/test_data/conveyance_test.gxy +85 -0
  14. floodmodeller_api/test/test_data/expected_conveyance.csv +60 -0
  15. floodmodeller_api/test/test_ief.py +26 -15
  16. floodmodeller_api/test/test_logs_lf.py +54 -0
  17. floodmodeller_api/to_from_json.py +24 -12
  18. floodmodeller_api/units/boundaries.py +6 -0
  19. floodmodeller_api/units/conveyance.py +301 -0
  20. floodmodeller_api/units/sections.py +21 -0
  21. floodmodeller_api/util.py +42 -0
  22. floodmodeller_api/version.py +1 -1
  23. floodmodeller_api/xml2d.py +80 -136
  24. floodmodeller_api/zzn.py +166 -139
  25. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/METADATA +4 -1
  26. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/RECORD +30 -24
  27. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/WHEEL +1 -1
  28. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/LICENSE.txt +0 -0
  29. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/entry_points.txt +0 -0
  30. {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,301 @@
1
+ from __future__ import annotations
2
+
3
+ from functools import lru_cache
4
+
5
+ import numpy as np
6
+ import pandas as pd
7
+ from shapely import LineString, MultiLineString, Polygon, intersection
8
+
9
+ MINIMUM_PERIMETER_THRESHOLD = 1e-8
10
+
11
+
12
+ def calculate_cross_section_conveyance(
13
+ x: np.ndarray,
14
+ y: np.ndarray,
15
+ n: np.ndarray,
16
+ rpl: np.ndarray,
17
+ panel_markers: np.ndarray,
18
+ ) -> pd.Series:
19
+ """
20
+ Calculate the conveyance of a cross-section by summing the conveyance
21
+ across all panels defined by panel markers.
22
+
23
+ Args:
24
+ x (np.ndarray): The x-coordinates of the cross-section.
25
+ y (np.ndarray): The y-coordinates of the cross-section.
26
+ n (np.ndarray): Manning's n values for each segment.
27
+ rpl (np.ndarray): Relative Path Length values for each segment.
28
+ panel_markers (np.ndarray): Boolean array indicating the start of each panel.
29
+
30
+ Returns:
31
+ pd.Series: A pandas Series containing the conveyance values indexed by water levels.
32
+
33
+ Example:
34
+ .. code-block:: python
35
+
36
+ x = np.array([0, 1, 2, 3, 4])
37
+ y = np.array([1, 2, 1, 2, 1])
38
+ n = np.array([0.03, 0.03, 0.03, 0.03, 0.03])
39
+ rpl = np.array([1., 1., 1., 1., 1.])
40
+ panel_markers = np.array([True, False, True, False, True])
41
+ result = calculate_cross_section_conveyance(x, y, n, rpl, panel_markers)
42
+ print(result)
43
+ """
44
+ # Create a set of water levels to calculate conveyance at,
45
+ # currently using 50mm minimum increments plus WLs at every data point
46
+ wls = insert_intermediate_wls(np.unique(y), threshold=0.05)
47
+
48
+ # Panel markers are forced true to the bounds to make the process work
49
+ panel_markers = np.array([True, *panel_markers[1:-1], True])
50
+ panel_indices = np.where(panel_markers)[0]
51
+ conveyance_by_panel = []
52
+ for panel_start, panel_end in zip(panel_indices[:-1], panel_indices[1:] + 1):
53
+ panel_x = x[panel_start:panel_end]
54
+ panel_y = y[panel_start:panel_end]
55
+ panel_n = n[panel_start:panel_end]
56
+ # RPL value is only valid for the start of a panel, and set to 1 if it's zero
57
+ panel_rpl = (
58
+ 1.0
59
+ if (panel_start == 0 and not panel_markers[0]) or rpl[panel_start] == 0
60
+ else float(rpl[panel_start])
61
+ )
62
+ conveyance_by_panel.append(
63
+ calculate_conveyance_by_panel(panel_x, panel_y, panel_n, panel_rpl, wls),
64
+ )
65
+
66
+ # Sum conveyance across panels
67
+ conveyance_values = [sum(values) for values in zip(*conveyance_by_panel)]
68
+
69
+ return pd.Series(data=conveyance_values, index=wls)
70
+
71
+
72
+ def calculate_conveyance_by_panel(
73
+ x: np.ndarray,
74
+ y: np.ndarray,
75
+ n: np.ndarray,
76
+ rpl: float,
77
+ wls: np.ndarray,
78
+ ) -> list[float]:
79
+ """
80
+ Calculate the conveyance for a single panel of a cross-section at specified water levels.
81
+
82
+ Args:
83
+ x (np.ndarray): The x-coordinates of the panel.
84
+ y (np.ndarray): The y-coordinates of the panel.
85
+ n (np.ndarray): Manning's n values for each segment in the panel.
86
+ rpl (float): Relative Path Length for each segment in the panel.
87
+ wls (np.ndarray): The water levels at which to calculate conveyance.
88
+
89
+ Returns:
90
+ list[float]: A list of conveyance values for each water level.
91
+ """
92
+
93
+ max_y = np.max(wls) + 1
94
+ min_y = np.min(wls) - 1
95
+
96
+ # insert additional start/end points to represent the glass wall sides
97
+ x = np.array([x[0], *x, x[-1]])
98
+ y = np.array([max_y, *y, max_y])
99
+ n = np.array([0, *n, 0])
100
+
101
+ # Define a polygon for the channel including artificial sides and top
102
+ channel_polygon = Polygon(zip(x, y))
103
+ start, end = x[0] - 0.1, x[-1] + 0.1 # Useful points enclosing the x bounds with small buffer
104
+
105
+ # Define linestring geometries representing glass walls, so they can be subtracted later
106
+ glass_walls = (
107
+ LineString(zip([x[0], x[1]], [y[0], y[1]])), # left
108
+ LineString(zip([x[-2], x[-1]], [y[-2], y[-1]])), # right
109
+ )
110
+
111
+ # Remove glass wall sections from coords
112
+ x, y, n = x[1:-1], y[1:-1], n[1:-1]
113
+
114
+ conveyance_values = []
115
+ for wl in wls:
116
+ if wl <= np.min(y):
117
+ # no channel capacity (essentially zero depth) so no need to calculate
118
+ conveyance_values.append(0.0)
119
+ continue
120
+
121
+ # Some geometries to represent the channel at a given water level
122
+ water_surface = Polygon(zip([start, start, end, end], [wl, min_y, min_y, wl]))
123
+ water_plane = intersection(channel_polygon, LineString(zip([start, end], [wl, wl])))
124
+ wetted_polygon = intersection(channel_polygon, water_surface)
125
+
126
+ multiple_parts = wetted_polygon.geom_type in ["GeometryCollection", "MultiPolygon"]
127
+ parts = wetted_polygon.geoms if multiple_parts else [wetted_polygon]
128
+
129
+ conveyance = 0.0
130
+
131
+ # 'parts' here refers to when a water level results in 2 separate channel sections,
132
+ # e.g. where the cross section has a 'peak' part way through
133
+ for part in parts:
134
+ conveyance += calculate_conveyance_part(part, water_plane, glass_walls, x, n, rpl)
135
+ conveyance_values.append(conveyance)
136
+
137
+ return conveyance_values
138
+
139
+
140
+ def calculate_conveyance_part( # noqa: PLR0913
141
+ wetted_polygon: Polygon,
142
+ water_plane: LineString,
143
+ glass_walls: tuple[LineString, LineString],
144
+ x: np.ndarray,
145
+ n: np.ndarray,
146
+ rpl: float,
147
+ ) -> float:
148
+ """
149
+ Calculate the conveyance for a part of the wetted area.
150
+
151
+ Args:
152
+ wetted_polygon (Polygon): The polygon representing the wetted area.
153
+ water_plane (LineString): The line representing the water plane.
154
+ glass_wall_left (LineString): The left boundary of the channel.
155
+ glass_wall_right (LineString): The right boundary of the channel.
156
+ x (np.ndarray): 1D array of channel chainage
157
+ n (np.ndarray): 1D array of channel mannings
158
+ rpl (float): Relative path length of panel
159
+
160
+ Returns:
161
+ float: The conveyance value for the wetted part.
162
+ """
163
+ water_plane_clip: LineString = intersection(water_plane, wetted_polygon)
164
+ glass_wall_left_clip: LineString = intersection(glass_walls[0], wetted_polygon)
165
+ glass_wall_right_clip: LineString = intersection(glass_walls[1], wetted_polygon)
166
+
167
+ # wetted perimeter should only account for actual section of channel, so we need to remove any
168
+ # length related to the water surface and any glass walls due to panel
169
+ perimeter_loss = (
170
+ water_plane_clip.length + glass_wall_left_clip.length + glass_wall_right_clip.length
171
+ )
172
+
173
+ wetted_perimeter = wetted_polygon.boundary.length - perimeter_loss
174
+ if wetted_perimeter < MINIMUM_PERIMETER_THRESHOLD:
175
+ # Would occur if water level is above lowest point on section, but intersects a near-zero
176
+ # perimeter, e.g. touching the bottom of an elevated side channel
177
+ return 0.0
178
+
179
+ area = wetted_polygon.area
180
+
181
+ wetted_polyline: LineString = (
182
+ wetted_polygon.exterior.difference(water_plane_clip)
183
+ .difference(glass_wall_left_clip)
184
+ .difference(glass_wall_right_clip)
185
+ )
186
+ weighted_mannings = calculate_weighted_mannings(x, n, rpl, wetted_polyline)
187
+
188
+ # apply conveyance equation
189
+ return (area ** (5 / 3) / wetted_perimeter ** (2 / 3)) * (wetted_perimeter / weighted_mannings)
190
+
191
+
192
+ def insert_intermediate_wls(arr: np.ndarray, threshold: float):
193
+ """
194
+ Insert intermediate water levels into an array based on a threshold.
195
+
196
+ Args:
197
+ arr (np.ndarray): The array of original water levels.
198
+ threshold (float): The maximum allowed gap between water levels.
199
+
200
+ Returns:
201
+ np.ndarray: The array with intermediate water levels inserted.
202
+ """
203
+ # Calculate gaps between consecutive elements
204
+ gaps = np.diff(arr)
205
+
206
+ # Calculate the number of points needed for each gap
207
+ num_points = (gaps // threshold).astype(int)
208
+
209
+ # Prepare lists to hold the new points and results
210
+ new_points = []
211
+
212
+ for i, start in enumerate(arr[:-1]):
213
+ end = arr[i + 1]
214
+ if num_points[i] > 0:
215
+ points = np.linspace(start, end, num_points[i] + 2)[1:-1]
216
+ new_points.extend(points)
217
+ new_points.append(end)
218
+
219
+ # Combine the original starting point with the new points
220
+ return np.array([arr[0]] + new_points)
221
+
222
+
223
+ def calculate_weighted_mannings(
224
+ x: np.ndarray,
225
+ n: np.ndarray,
226
+ rpl: float,
227
+ wetted_polyline: LineString,
228
+ ) -> float:
229
+ """Calculate the weighted Manning's n value for a wetted polyline."""
230
+
231
+ # We want the polyline to be split into each individual segment
232
+ segments = line_to_segments(wetted_polyline)
233
+ weighted_mannings = 0
234
+ for segment in segments:
235
+ mannings_value = get_mannings_by_segment_x_coords(
236
+ x,
237
+ n,
238
+ segment.coords[0][0],
239
+ segment.coords[1][0],
240
+ )
241
+ weighted_mannings += mannings_value * segment.length * np.sqrt(rpl)
242
+
243
+ return weighted_mannings
244
+
245
+
246
+ def line_to_segments(line: LineString | MultiLineString) -> list[LineString]:
247
+ """Convert a LineString or MultiLineString into a list of LineString segments."""
248
+ if isinstance(line, LineString):
249
+ segments = []
250
+ for start, end in zip(line.coords[:-1], line.coords[1:]):
251
+ points = sorted([start, end], key=lambda x: x[0])
252
+ segments.append(LineString(points))
253
+ return segments
254
+ if isinstance(line, MultiLineString):
255
+ segments = []
256
+ for linestring in line.geoms:
257
+ segments.extend(line_to_segments(linestring))
258
+ return segments
259
+ raise TypeError("Input must be a LineString or MultiLineString")
260
+
261
+
262
+ def get_mannings_by_segment_x_coords(
263
+ x: np.ndarray,
264
+ n: np.ndarray,
265
+ start_x: float,
266
+ end_x: float,
267
+ ) -> float:
268
+ """Get the Manning's n or RPL value for a segment based on its start x-coordinate."""
269
+
270
+ # This method doesn't handle cases where we have multiple manning's values at a vertical section
271
+ # and will always just take the first at any verticle, but it is probably quite rare for this
272
+ # not to be the case
273
+ if start_x == end_x:
274
+ # Vertical segment take first x match
275
+ index = np.searchsorted(x, start_x) - (start_x not in x)
276
+ else:
277
+ # Otherwise non-vertical segment, take last match
278
+ index = np.searchsorted(x, start_x, side="right") - 1
279
+
280
+ return n[index]
281
+
282
+
283
+ @lru_cache
284
+ def calculate_cross_section_conveyance_chached(
285
+ x: tuple[float],
286
+ y: tuple[float],
287
+ n: tuple[float],
288
+ rpl: tuple[float],
289
+ panel_markers: tuple[float],
290
+ ) -> pd.Series:
291
+ """Dummy function to allow for caching of the conveyance function as numpy arrays are not
292
+ hashable
293
+ """
294
+
295
+ return calculate_cross_section_conveyance(
296
+ np.array(x),
297
+ np.array(y),
298
+ np.array(n),
299
+ np.array(rpl),
300
+ np.array(panel_markers),
301
+ )
@@ -19,6 +19,7 @@ import pandas as pd
19
19
  from floodmodeller_api.validation import _validate_unit
20
20
 
21
21
  from ._base import Unit
22
+ from .conveyance import calculate_cross_section_conveyance_chached
22
23
  from .helpers import (
23
24
  _to_float,
24
25
  _to_int,
@@ -244,6 +245,26 @@ class RIVER(Unit):
244
245
 
245
246
  return self._raw_block
246
247
 
248
+ @property
249
+ def conveyance(self) -> pd.Series:
250
+ """Calculate and return the conveyance curve of the cross-section.
251
+
252
+ Note:
253
+ This uses the same method as applied in Flood Modeller so will be able to pick out any
254
+ undesirable spikes in conveyance. The only difference compared with Flood Modeller may
255
+ be the number of sampled points.
256
+
257
+ Returns:
258
+ pd.Series: A pandas Series containing the conveyance values indexed by water levels.
259
+ """
260
+ return calculate_cross_section_conveyance_chached(
261
+ x=tuple(self.data.X.values),
262
+ y=tuple(self.data.Y.values),
263
+ n=tuple(self.data["Mannings n"].values),
264
+ rpl=tuple(self.data.RPL.values),
265
+ panel_markers=tuple(self.data.Panel.values),
266
+ )
267
+
247
268
 
248
269
  class INTERPOLATE(Unit):
249
270
  """Class to hold and process INTERPOLATE unit type
floodmodeller_api/util.py CHANGED
@@ -18,10 +18,15 @@ from __future__ import annotations
18
18
 
19
19
  import sys
20
20
  import webbrowser
21
+ from functools import wraps
21
22
  from pathlib import Path
22
23
  from typing import TYPE_CHECKING
23
24
 
25
+ from .version import __version__
26
+
24
27
  if TYPE_CHECKING:
28
+ from typing import Callable
29
+
25
30
  from ._base import FMFile
26
31
 
27
32
 
@@ -75,3 +80,40 @@ def read_file(filepath: str | Path) -> FMFile:
75
80
 
76
81
  def is_windows() -> bool:
77
82
  return sys.platform.startswith("win")
83
+
84
+
85
+ def handle_exception(when: str) -> Callable:
86
+ """Decorator factory to wrap a method with exception handling."""
87
+
88
+ def decorator(method: Callable) -> Callable:
89
+ @wraps(method)
90
+ def wrapped_method(self: FMFile, *args, **kwargs):
91
+ try:
92
+ return method(self, *args, **kwargs)
93
+ except Exception as e:
94
+ self._handle_exception(e, when)
95
+
96
+ return wrapped_method
97
+
98
+ return decorator
99
+
100
+
101
+ class FloodModellerAPIError(Exception):
102
+ """Custom exception class for Flood Modeller API errors."""
103
+
104
+ def __init__(self, original_exception, when, filetype, filepath) -> None:
105
+ tb = original_exception.__traceback__
106
+ while tb.tb_next is not None:
107
+ tb = tb.tb_next
108
+ line_no = tb.tb_lineno
109
+ tb_path = Path(tb.tb_frame.f_code.co_filename)
110
+ fname = "/".join(tb_path.parts[-2:])
111
+
112
+ message = (
113
+ "\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
114
+ f"\nAPI Error: Problem encountered when trying to {when} {filetype} file {filepath}."
115
+ f"\n\nDetails: {__version__}-{fname}-{line_no}"
116
+ f"\nMsg: {original_exception}"
117
+ "\n\nFor additional support, go to: https://github.com/People-Places-Solutions/floodmodeller-api"
118
+ )
119
+ super().__init__(message)
@@ -1 +1 @@
1
- __version__ = "0.4.3"
1
+ __version__ = "0.4.4.post1"
@@ -16,7 +16,6 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
16
16
 
17
17
  from __future__ import annotations
18
18
 
19
- import datetime as dt
20
19
  import io
21
20
  import os
22
21
  import time
@@ -30,7 +29,8 @@ from tqdm import trange
30
29
 
31
30
  from floodmodeller_api._base import FMFile
32
31
 
33
- from .logs import error_2d_dict, lf_factory
32
+ from .logs import LF2, create_lf, error_2d_dict
33
+ from .util import handle_exception
34
34
  from .xml2d_template import xml2d_template
35
35
 
36
36
 
@@ -70,19 +70,16 @@ class XML2D(FMFile):
70
70
  OLD_FILE = 5
71
71
  GOOD_EXIT_CODE = 100
72
72
 
73
+ @handle_exception(when="read")
73
74
  def __init__(self, xml_filepath: str | Path | None = None, from_json: bool = False):
74
- try:
75
- if from_json:
76
- return
77
- if xml_filepath is not None:
78
- FMFile.__init__(self, xml_filepath)
79
- self._read()
80
- self._log_path = self._filepath.with_suffix(".lf2")
81
- else:
82
- self._read(from_blank=True)
83
-
84
- except Exception as e:
85
- self._handle_exception(e, when="read")
75
+ if from_json:
76
+ return
77
+ if xml_filepath is not None:
78
+ FMFile.__init__(self, xml_filepath)
79
+ self._read()
80
+ self._log_path = self._filepath.with_suffix(".lf2")
81
+ else:
82
+ self._read(from_blank=True)
86
83
 
87
84
  def _read(self, from_blank=False):
88
85
  # Read xml data
@@ -387,26 +384,21 @@ class XML2D(FMFile):
387
384
  except AttributeError:
388
385
  self._data[attr] = None
389
386
 
387
+ @handle_exception(when="write")
390
388
  def _write(self) -> str:
391
- orig_xml_tree = deepcopy(self._xmltree)
389
+ self._update_dict()
390
+ self._recursive_update_xml(self._data, self._raw_data, "ROOT")
391
+ self._recursive_remove_data_xml(self._data, self._xmltree.getroot())
392
+ etree.indent(self._xmltree, space=" ")
392
393
  try:
393
- self._update_dict()
394
- self._recursive_update_xml(self._data, self._raw_data, "ROOT")
395
- self._recursive_remove_data_xml(self._data, self._xmltree.getroot())
396
- etree.indent(self._xmltree, space=" ")
397
- try:
398
- self._validate()
399
- except Exception:
400
- self._recursive_reorder_xml()
401
- self._validate()
402
-
403
- self._raw_data = deepcopy(self._data) # reset raw data to equal data
394
+ self._validate()
395
+ except Exception:
396
+ self._recursive_reorder_xml()
397
+ self._validate()
404
398
 
405
- return f'<?xml version="1.0" standalone="yes"?>\n{etree.tostring(self._xmltree.getroot()).decode()}'
399
+ self._raw_data = deepcopy(self._data) # reset raw data to equal data
406
400
 
407
- except Exception as e:
408
- self._xmltree = orig_xml_tree
409
- self._handle_exception(e, when="write")
401
+ return f'<?xml version="1.0" standalone="yes"?>\n{etree.tostring(self._xmltree.getroot()).decode()}'
410
402
 
411
403
  def _get_multi_value_keys(self):
412
404
  self._multi_value_keys = []
@@ -460,6 +452,7 @@ class XML2D(FMFile):
460
452
  self._read()
461
453
  self._log_path = self._filepath.with_suffix(".lf2")
462
454
 
455
+ @handle_exception(when="simulate")
463
456
  def simulate( # noqa: C901, PLR0912, PLR0913
464
457
  self,
465
458
  method: str = "WAIT",
@@ -508,78 +501,74 @@ class XML2D(FMFile):
508
501
  self.range_function = range_function
509
502
  self.range_settings = range_settings if range_settings else {}
510
503
 
511
- try:
512
- if self._filepath is None:
513
- raise UserWarning(
514
- "xml2D must be saved to a specific filepath before simulate() can be called.",
515
- )
516
- if precision.upper() == "DEFAULT":
517
- precision = "SINGLE" # defaults to single precision
518
- for _, domain in self.domains.items():
519
- if domain["run_data"].get("double_precision") == "required":
520
- precision = "DOUBLE"
521
- break
522
-
523
- if enginespath == "":
524
- # Default location
525
- _enginespath = r"C:\Program Files\Flood Modeller\bin"
526
- else:
527
- _enginespath = enginespath
528
- if not Path(_enginespath).exists():
529
- raise Exception(
530
- f"Flood Modeller non-default engine path not found! {str(_enginespath)}",
531
- )
532
-
533
- # checking if all schemes used are fast, if so will use FAST.exe
534
- # TODO: Add in option to choose to use or not to use if you can
535
- is_fast = True
504
+ if self._filepath is None:
505
+ raise UserWarning(
506
+ "xml2D must be saved to a specific filepath before simulate() can be called.",
507
+ )
508
+ if precision.upper() == "DEFAULT":
509
+ precision = "SINGLE" # defaults to single precision
536
510
  for _, domain in self.domains.items():
537
- if domain["run_data"]["scheme"] != "FAST":
538
- is_fast = False
511
+ if domain["run_data"].get("double_precision") == "required":
512
+ precision = "DOUBLE"
539
513
  break
540
514
 
541
- if is_fast is True:
542
- isis2d_fp = str(Path(_enginespath, "FAST.exe"))
543
- elif precision.upper() == "SINGLE":
544
- isis2d_fp = str(Path(_enginespath, "ISIS2d.exe"))
545
- else:
546
- isis2d_fp = str(Path(_enginespath, "ISIS2d_DP.exe"))
515
+ if enginespath == "":
516
+ # Default location
517
+ _enginespath = r"C:\Program Files\Flood Modeller\bin"
518
+ else:
519
+ _enginespath = enginespath
520
+ if not Path(_enginespath).exists():
521
+ raise Exception(
522
+ f"Flood Modeller non-default engine path not found! {str(_enginespath)}",
523
+ )
547
524
 
548
- if not Path(isis2d_fp).exists():
549
- raise Exception(f"Flood Modeller engine not found! Expected location: {isis2d_fp}")
525
+ # checking if all schemes used are fast, if so will use FAST.exe
526
+ # TODO: Add in option to choose to use or not to use if you can
527
+ is_fast = True
528
+ for _, domain in self.domains.items():
529
+ if domain["run_data"]["scheme"] != "FAST":
530
+ is_fast = False
531
+ break
532
+
533
+ if is_fast is True:
534
+ isis2d_fp = str(Path(_enginespath, "FAST.exe"))
535
+ elif precision.upper() == "SINGLE":
536
+ isis2d_fp = str(Path(_enginespath, "ISIS2d.exe"))
537
+ else:
538
+ isis2d_fp = str(Path(_enginespath, "ISIS2d_DP.exe"))
550
539
 
551
- console_output = console_output.lower()
552
- run_command = (
553
- f'"{isis2d_fp}" {"-q" if console_output != "detailed" else ""} "{self._filepath}"'
554
- )
555
- stdout = DEVNULL if console_output == "simple" else None
540
+ if not Path(isis2d_fp).exists():
541
+ raise Exception(f"Flood Modeller engine not found! Expected location: {isis2d_fp}")
556
542
 
557
- if method.upper() == "WAIT":
558
- print("Executing simulation ... ")
559
- # execute simulation
560
- process = Popen(run_command, cwd=os.path.dirname(self._filepath), stdout=stdout)
543
+ console_output = console_output.lower()
544
+ run_command = (
545
+ f'"{isis2d_fp}" {"-q" if console_output != "detailed" else ""} "{self._filepath}"'
546
+ )
547
+ stdout = DEVNULL if console_output == "simple" else None
561
548
 
562
- # progress bar based on log files:
563
- if console_output == "simple":
564
- self._init_log_file()
565
- self._update_progress_bar(process)
549
+ if method.upper() == "WAIT":
550
+ print("Executing simulation ... ")
551
+ # execute simulation
552
+ process = Popen(run_command, cwd=os.path.dirname(self._filepath), stdout=stdout)
566
553
 
567
- while process.poll() is None:
568
- # process is still running
569
- time.sleep(1)
554
+ # progress bar based on log files:
555
+ if console_output == "simple":
556
+ self._lf = create_lf(self._log_path, "lf2")
557
+ self._update_progress_bar(process)
570
558
 
571
- exitcode = process.returncode
572
- self._interpret_exit_code(exitcode, raise_on_failure)
559
+ while process.poll() is None:
560
+ # process is still running
561
+ time.sleep(1)
573
562
 
574
- elif method.upper() == "RETURN_PROCESS":
575
- print("Executing simulation ...")
576
- # execute simulation
577
- return Popen(run_command, cwd=os.path.dirname(self._filepath), stdout=stdout)
563
+ exitcode = process.returncode
564
+ self._interpret_exit_code(exitcode, raise_on_failure)
578
565
 
579
- return None
566
+ elif method.upper() == "RETURN_PROCESS":
567
+ print("Executing simulation ...")
568
+ # execute simulation
569
+ return Popen(run_command, cwd=os.path.dirname(self._filepath), stdout=stdout)
580
570
 
581
- except Exception as e:
582
- self._handle_exception(e, when="simulate")
571
+ return None
583
572
 
584
573
  def get_log(self):
585
574
  """If log files for the simulation exist, this function returns them as a LF2 class object
@@ -590,52 +579,7 @@ class XML2D(FMFile):
590
579
  if not self._log_path.exists():
591
580
  raise FileNotFoundError("Log file (LF2) not found")
592
581
 
593
- return lf_factory(self._log_path, "lf2", False)
594
-
595
- def _init_log_file(self):
596
- """Checks for a new log file, waiting for its creation if necessary"""
597
- # wait for log file to exist
598
- log_file_exists = False
599
- max_time = time.time() + 10
600
-
601
- while not log_file_exists:
602
- time.sleep(0.1)
603
- log_file_exists = self._log_path.is_file()
604
-
605
- # timeout
606
- if time.time() > max_time:
607
- self._no_log_file("log file is expected but not detected")
608
- self._lf = None
609
- return
610
-
611
- # wait for new log file
612
- old_log_file = True
613
- max_time = time.time() + 10
614
-
615
- while old_log_file:
616
- time.sleep(0.1)
617
-
618
- # difference between now and when log file was last modified
619
- last_modified_timestamp = self._log_path.stat().st_mtime
620
- last_modified = dt.datetime.fromtimestamp(last_modified_timestamp)
621
- time_diff_sec = (dt.datetime.now() - last_modified).total_seconds()
622
-
623
- # it's old if it's over self.OLD_FILE seconds old (TODO: is this robust?)
624
- old_log_file = time_diff_sec > self.OLD_FILE
625
-
626
- # timeout
627
- if time.time() > max_time:
628
- self._no_log_file("log file is from previous run")
629
- self._lf = None
630
- return
631
-
632
- # create LF instance
633
- self._lf = lf_factory(self._log_path, "lf2", False)
634
-
635
- def _no_log_file(self, reason):
636
- """Warning that there will be no progress bar"""
637
-
638
- print("No progress bar as " + reason + ". Simulation will continue as usual.")
582
+ return LF2(self._log_path)
639
583
 
640
584
  def _update_progress_bar(self, process: Popen):
641
585
  """Updates progress bar based on log file"""