floodmodeller-api 0.4.3__py3-none-any.whl → 0.4.4.post1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- floodmodeller_api/_base.py +22 -37
- floodmodeller_api/dat.py +165 -185
- floodmodeller_api/ied.py +82 -87
- floodmodeller_api/ief.py +92 -186
- floodmodeller_api/inp.py +64 -70
- floodmodeller_api/logs/__init__.py +1 -1
- floodmodeller_api/logs/lf.py +61 -17
- floodmodeller_api/test/conftest.py +7 -0
- floodmodeller_api/test/test_conveyance.py +107 -0
- floodmodeller_api/test/test_dat.py +5 -4
- floodmodeller_api/test/test_data/conveyance_test.dat +165 -0
- floodmodeller_api/test/test_data/conveyance_test.feb +116 -0
- floodmodeller_api/test/test_data/conveyance_test.gxy +85 -0
- floodmodeller_api/test/test_data/expected_conveyance.csv +60 -0
- floodmodeller_api/test/test_ief.py +26 -15
- floodmodeller_api/test/test_logs_lf.py +54 -0
- floodmodeller_api/to_from_json.py +24 -12
- floodmodeller_api/units/boundaries.py +6 -0
- floodmodeller_api/units/conveyance.py +301 -0
- floodmodeller_api/units/sections.py +21 -0
- floodmodeller_api/util.py +42 -0
- floodmodeller_api/version.py +1 -1
- floodmodeller_api/xml2d.py +80 -136
- floodmodeller_api/zzn.py +166 -139
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/METADATA +4 -1
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/RECORD +30 -24
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/WHEEL +1 -1
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/LICENSE.txt +0 -0
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/entry_points.txt +0 -0
- {floodmodeller_api-0.4.3.dist-info → floodmodeller_api-0.4.4.post1.dist-info}/top_level.txt +0 -0
floodmodeller_api/inp.py
CHANGED
|
@@ -23,6 +23,7 @@ from ._base import FMFile
|
|
|
23
23
|
from .units.helpers import _to_str, join_n_char_ljust
|
|
24
24
|
from .urban1d import subsections
|
|
25
25
|
from .urban1d.general_parameters import DEFAULT_OPTIONS
|
|
26
|
+
from .util import handle_exception
|
|
26
27
|
from .validation import _validate_unit
|
|
27
28
|
|
|
28
29
|
|
|
@@ -43,20 +44,18 @@ class INP(FMFile):
|
|
|
43
44
|
_filetype: str = "INP"
|
|
44
45
|
_suffix: str = ".inp"
|
|
45
46
|
|
|
47
|
+
@handle_exception(when="read")
|
|
46
48
|
def __init__(self, inp_filepath: str | Path | None = None, from_json: bool = False):
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
self._read()
|
|
49
|
+
if from_json:
|
|
50
|
+
return
|
|
51
|
+
if inp_filepath is not None:
|
|
52
|
+
FMFile.__init__(self, inp_filepath)
|
|
53
|
+
self._read()
|
|
53
54
|
|
|
54
|
-
|
|
55
|
-
|
|
55
|
+
else:
|
|
56
|
+
self._create_from_blank()
|
|
56
57
|
|
|
57
|
-
|
|
58
|
-
except Exception as e:
|
|
59
|
-
self._handle_exception(e, when="read")
|
|
58
|
+
self._get_section_definitions()
|
|
60
59
|
|
|
61
60
|
def _read(self):
|
|
62
61
|
# Read INP file
|
|
@@ -66,73 +65,68 @@ class INP(FMFile):
|
|
|
66
65
|
# Generate INP file structure
|
|
67
66
|
self._update_inp_struct()
|
|
68
67
|
|
|
68
|
+
@handle_exception(when="write")
|
|
69
69
|
def _write(self) -> str:
|
|
70
70
|
"""Returns string representation of the current INP data
|
|
71
71
|
|
|
72
72
|
Returns:
|
|
73
73
|
str: Full string representation of INP in its most recent state (including changes not yet saved to disk)
|
|
74
74
|
"""
|
|
75
|
-
|
|
76
|
-
_validate_unit(self, urban=True)
|
|
75
|
+
_validate_unit(self, urban=True)
|
|
77
76
|
|
|
78
|
-
|
|
77
|
+
block_shift = 0 # Used to allow changes in the length of subsections.
|
|
79
78
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
)
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
# Write _raw_data out to INP file.
|
|
132
|
-
return "\n".join(self._raw_data) + "\n"
|
|
133
|
-
|
|
134
|
-
except Exception as e:
|
|
135
|
-
self._handle_exception(e, when="write")
|
|
79
|
+
for block in self._inp_struct:
|
|
80
|
+
if block["Subsection_Type"] in subsections.SUPPORTED_SUBSECTIONS:
|
|
81
|
+
subsection_data = self._raw_data[
|
|
82
|
+
block["start"] + block_shift : block["end"] + 1 + block_shift
|
|
83
|
+
]
|
|
84
|
+
prev_block_len = len(subsection_data)
|
|
85
|
+
|
|
86
|
+
if (
|
|
87
|
+
subsections.SUPPORTED_SUBSECTIONS[block["Subsection_Type"]]["group"]
|
|
88
|
+
== "general"
|
|
89
|
+
):
|
|
90
|
+
# General parameters
|
|
91
|
+
|
|
92
|
+
if block["Subsection_Type"] == "[OPTIONS]":
|
|
93
|
+
# Options subsection
|
|
94
|
+
|
|
95
|
+
new_subsection_data = [
|
|
96
|
+
"[OPTIONS]",
|
|
97
|
+
";;Option Value",
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
for param, value in self.options.items():
|
|
101
|
+
if value is not None:
|
|
102
|
+
option_line = join_n_char_ljust(21, param.upper(), value)
|
|
103
|
+
new_subsection_data.append(option_line)
|
|
104
|
+
|
|
105
|
+
new_subsection_data.append("") # blank line before next section
|
|
106
|
+
|
|
107
|
+
else: # Of unit type
|
|
108
|
+
subsection = getattr(
|
|
109
|
+
self,
|
|
110
|
+
subsections.SUPPORTED_SUBSECTIONS[block["Subsection_Type"]]["attribute"],
|
|
111
|
+
) # Get unit object
|
|
112
|
+
new_subsection_data = (
|
|
113
|
+
subsection._write()
|
|
114
|
+
) # String representation of unit object
|
|
115
|
+
|
|
116
|
+
new_block_len = len(new_subsection_data)
|
|
117
|
+
|
|
118
|
+
self._raw_data[block["start"] + block_shift : block["end"] + 1 + block_shift] = (
|
|
119
|
+
new_subsection_data # Replace existing subsection with new subsection string
|
|
120
|
+
)
|
|
121
|
+
block_shift += (
|
|
122
|
+
new_block_len - prev_block_len
|
|
123
|
+
) # adjust block shift for change in number of lines in block
|
|
124
|
+
|
|
125
|
+
# Regenerate INP file structure
|
|
126
|
+
self._update_inp_struct()
|
|
127
|
+
|
|
128
|
+
# Write _raw_data out to INP file.
|
|
129
|
+
return "\n".join(self._raw_data) + "\n"
|
|
136
130
|
|
|
137
131
|
def _create_from_blank(self):
|
|
138
132
|
raise NotImplementedError(
|
|
@@ -1,2 +1,2 @@
|
|
|
1
|
-
from .lf import LF1, LF2,
|
|
1
|
+
from .lf import LF1, LF2, create_lf
|
|
2
2
|
from .lf_params import error_2d_dict
|
floodmodeller_api/logs/lf.py
CHANGED
|
@@ -16,11 +16,14 @@ address: Jacobs UK Limited, Flood Modeller, Cottons Centre, Cottons Lane, London
|
|
|
16
16
|
|
|
17
17
|
from __future__ import annotations
|
|
18
18
|
|
|
19
|
+
import datetime as dt
|
|
20
|
+
import time
|
|
19
21
|
from typing import TYPE_CHECKING
|
|
20
22
|
|
|
21
23
|
import pandas as pd
|
|
22
24
|
|
|
23
25
|
from .._base import FMFile
|
|
26
|
+
from ..util import handle_exception
|
|
24
27
|
from .lf_helpers import state_factory
|
|
25
28
|
from .lf_params import lf1_steady_data_to_extract, lf1_unsteady_data_to_extract, lf2_data_to_extract
|
|
26
29
|
|
|
@@ -28,6 +31,10 @@ if TYPE_CHECKING:
|
|
|
28
31
|
from pathlib import Path
|
|
29
32
|
|
|
30
33
|
|
|
34
|
+
OLD_FILE = 5
|
|
35
|
+
LOG_TIMEOUT = 10
|
|
36
|
+
|
|
37
|
+
|
|
31
38
|
class LF(FMFile):
|
|
32
39
|
"""Reads and processes Flood Modeller log file
|
|
33
40
|
|
|
@@ -40,24 +47,21 @@ class LF(FMFile):
|
|
|
40
47
|
Initiates 'LF' class object
|
|
41
48
|
"""
|
|
42
49
|
|
|
50
|
+
@handle_exception(when="read")
|
|
43
51
|
def __init__(
|
|
44
52
|
self,
|
|
45
53
|
lf_filepath: str | Path | None,
|
|
46
54
|
data_to_extract: dict,
|
|
47
55
|
steady: bool = False,
|
|
48
56
|
):
|
|
49
|
-
|
|
50
|
-
FMFile.__init__(self, lf_filepath)
|
|
51
|
-
|
|
52
|
-
self._data_to_extract = data_to_extract
|
|
53
|
-
self._init_counters()
|
|
54
|
-
self._init_parsers()
|
|
55
|
-
self._state = state_factory(steady, self._extracted_data)
|
|
57
|
+
FMFile.__init__(self, lf_filepath)
|
|
56
58
|
|
|
57
|
-
|
|
59
|
+
self._data_to_extract = data_to_extract
|
|
60
|
+
self._init_counters()
|
|
61
|
+
self._init_parsers()
|
|
62
|
+
self._state = state_factory(steady, self._extracted_data)
|
|
58
63
|
|
|
59
|
-
|
|
60
|
-
self._handle_exception(e, when="read")
|
|
64
|
+
self._read()
|
|
61
65
|
|
|
62
66
|
def _read(self, force_reread: bool = False, suppress_final_step: bool = False):
|
|
63
67
|
# Read LF file
|
|
@@ -311,10 +315,50 @@ class LF2(LF):
|
|
|
311
315
|
super().__init__(lf_filepath, data_to_extract, steady=False)
|
|
312
316
|
|
|
313
317
|
|
|
314
|
-
def
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
318
|
+
def create_lf(filepath: Path, suffix: str) -> LF1 | LF2 | None:
|
|
319
|
+
"""Checks for a new log file, waiting for its creation if necessary"""
|
|
320
|
+
|
|
321
|
+
def _no_log_file(reason: str) -> None:
|
|
322
|
+
print(f"No progress bar as {reason}. Simulation will continue as usual.")
|
|
323
|
+
|
|
324
|
+
# ensure progress bar is supported
|
|
325
|
+
if suffix not in {"lf1", "lf2"}:
|
|
326
|
+
_no_log_file("log file must have suffix lf1 or lf2")
|
|
327
|
+
return None
|
|
328
|
+
|
|
329
|
+
# wait for log file to exist
|
|
330
|
+
log_file_exists = False
|
|
331
|
+
max_time = time.time() + LOG_TIMEOUT
|
|
332
|
+
|
|
333
|
+
while not log_file_exists:
|
|
334
|
+
time.sleep(0.1)
|
|
335
|
+
|
|
336
|
+
log_file_exists = filepath.is_file()
|
|
337
|
+
|
|
338
|
+
# timeout
|
|
339
|
+
if (not log_file_exists) and (time.time() > max_time):
|
|
340
|
+
_no_log_file("log file is expected but not detected")
|
|
341
|
+
return None
|
|
342
|
+
|
|
343
|
+
# wait for new log file
|
|
344
|
+
old_log_file = True
|
|
345
|
+
max_time = time.time() + LOG_TIMEOUT
|
|
346
|
+
|
|
347
|
+
while old_log_file:
|
|
348
|
+
time.sleep(0.1)
|
|
349
|
+
|
|
350
|
+
# difference between now and when log file was last modified
|
|
351
|
+
last_modified_timestamp = filepath.stat().st_mtime
|
|
352
|
+
last_modified = dt.datetime.fromtimestamp(last_modified_timestamp)
|
|
353
|
+
time_diff_sec = (dt.datetime.now() - last_modified).total_seconds()
|
|
354
|
+
|
|
355
|
+
# it's old if it's over OLD_FILE seconds old (TODO: is this robust?)
|
|
356
|
+
old_log_file = time_diff_sec > OLD_FILE
|
|
357
|
+
|
|
358
|
+
# timeout
|
|
359
|
+
if old_log_file and (time.time() > max_time):
|
|
360
|
+
_no_log_file("log file is from previous run")
|
|
361
|
+
return None
|
|
362
|
+
|
|
363
|
+
# create LF instance
|
|
364
|
+
return LF1(filepath) if suffix == "lf1" else LF2(filepath)
|
|
@@ -1,5 +1,6 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from pathlib import Path
|
|
3
|
+
from unittest.mock import patch
|
|
3
4
|
|
|
4
5
|
import pytest
|
|
5
6
|
|
|
@@ -7,3 +8,9 @@ import pytest
|
|
|
7
8
|
@pytest.fixture(scope="session")
|
|
8
9
|
def test_workspace():
|
|
9
10
|
return Path(os.path.dirname(__file__), "test_data")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@pytest.fixture()
|
|
14
|
+
def log_timeout():
|
|
15
|
+
with patch("floodmodeller_api.logs.lf.LOG_TIMEOUT", new=0):
|
|
16
|
+
yield
|
|
@@ -0,0 +1,107 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
import pandas as pd
|
|
5
|
+
import pytest
|
|
6
|
+
from scipy.spatial.distance import directed_hausdorff
|
|
7
|
+
from shapely.geometry import LineString, Polygon
|
|
8
|
+
|
|
9
|
+
from floodmodeller_api import DAT
|
|
10
|
+
from floodmodeller_api.units.conveyance import (
|
|
11
|
+
calculate_conveyance_by_panel,
|
|
12
|
+
calculate_conveyance_part,
|
|
13
|
+
calculate_cross_section_conveyance,
|
|
14
|
+
insert_intermediate_wls,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def test_calculate_cross_section_conveyance():
|
|
19
|
+
x = np.array([0, 1, 2, 3, 4])
|
|
20
|
+
y = np.array([5, 3, 1, 2, 6])
|
|
21
|
+
n = np.array([0.03, 0.03, 0.03, 0.03, 0.03])
|
|
22
|
+
panel_markers = np.array([False, False, False, False, False])
|
|
23
|
+
rpl = np.array([1.0, 1.0, 1.0, 1.0, 1.0])
|
|
24
|
+
result = calculate_cross_section_conveyance(x, y, n, rpl, panel_markers)
|
|
25
|
+
|
|
26
|
+
assert isinstance(result, pd.Series), "Result should be a pandas Series"
|
|
27
|
+
assert not result.empty, "Result should not be empty"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def test_calculate_conveyance_by_panel():
|
|
31
|
+
x = np.array([0, 1, 2])
|
|
32
|
+
y = np.array([5, 3, 1])
|
|
33
|
+
n = np.array([0.03, 0.03])
|
|
34
|
+
rpl = 1.0
|
|
35
|
+
wls = np.array([1.0, 1.5, 2.0, 2.5, 3.0, 3.5, 4.0, 4.5, 5.0])
|
|
36
|
+
|
|
37
|
+
result = calculate_conveyance_by_panel(x, y, n, rpl, wls)
|
|
38
|
+
|
|
39
|
+
assert isinstance(result, list), "Result should be a list"
|
|
40
|
+
assert len(result) == len(wls), "Result length should match the length of water levels"
|
|
41
|
+
assert all(isinstance(val, float) for val in result), "All conveyance values should be floats"
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def test_calculate_conveyance_part():
|
|
45
|
+
wetted_polygon = Polygon([(1, 3), (2, 1), (3, 2), (4, 6), (1, 3)])
|
|
46
|
+
water_plane = LineString([(0, 3), (5, 3)])
|
|
47
|
+
glass_walls = LineString([(1, 3), (1, 7)]), LineString([(4, 6), (4, 7)])
|
|
48
|
+
x = np.array([1, 2, 3, 4])
|
|
49
|
+
n = np.array([0.03, 0.03, 0.03, 0.03])
|
|
50
|
+
rpl = 1.0
|
|
51
|
+
|
|
52
|
+
result = calculate_conveyance_part(wetted_polygon, water_plane, glass_walls, x, n, rpl)
|
|
53
|
+
|
|
54
|
+
assert isinstance(result, float), "Result should be a float"
|
|
55
|
+
assert result >= 0, "Conveyance should be non-negative"
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_insert_intermediate_wls():
|
|
59
|
+
arr = np.array([1.0, 2.0, 3.0])
|
|
60
|
+
threshold = 0.5
|
|
61
|
+
|
|
62
|
+
result = insert_intermediate_wls(arr, threshold)
|
|
63
|
+
|
|
64
|
+
assert isinstance(result, np.ndarray), "Result should be a numpy array"
|
|
65
|
+
assert result[0] == 1.0 and result[-1] == 3.0, "First and last elements should match the input"
|
|
66
|
+
assert all(np.diff(result) <= threshold), "All gaps should be <= to the threshold"
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@pytest.fixture(scope="module")
|
|
70
|
+
def dat(test_workspace: Path):
|
|
71
|
+
return DAT(test_workspace / "conveyance_test.dat")
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
@pytest.fixture(scope="module")
|
|
75
|
+
def from_gui(test_workspace: Path):
|
|
76
|
+
return pd.read_csv(test_workspace / "expected_conveyance.csv")
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
@pytest.mark.parametrize("section", ("a", "a2", "b", "b2", "c", "d", "d2", "e", "e2", "e3"))
|
|
80
|
+
def test_results_close_to_gui(section: str, dat: DAT, from_gui: pd.DataFrame):
|
|
81
|
+
threshold = 6
|
|
82
|
+
|
|
83
|
+
actual = dat.sections[section].conveyance
|
|
84
|
+
expected = (
|
|
85
|
+
from_gui.set_index(f"{section}_stage")[f"{section}_conveyance"].dropna().drop_duplicates()
|
|
86
|
+
)
|
|
87
|
+
common_index = sorted(set(actual.index).union(expected.index))
|
|
88
|
+
actual_interpolated = actual.reindex(common_index).interpolate(method="slinear")
|
|
89
|
+
expected_interpolated = expected.reindex(common_index).interpolate(method="slinear")
|
|
90
|
+
|
|
91
|
+
u = np.array(list(zip(actual_interpolated.index, actual_interpolated)))
|
|
92
|
+
v = np.array(list(zip(expected_interpolated.index, expected_interpolated)))
|
|
93
|
+
hausdorff_distance = directed_hausdorff(u, v)[0]
|
|
94
|
+
|
|
95
|
+
assert hausdorff_distance < threshold
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@pytest.mark.parametrize("section", ("a", "a2", "b", "b2", "c", "d", "d2", "e", "e2", "e3"))
|
|
99
|
+
def test_results_match_gui_at_shared_points(section: str, dat: DAT, from_gui: pd.DataFrame):
|
|
100
|
+
tolerance = 1e-2 # 0.001
|
|
101
|
+
actual = dat.sections[section].conveyance
|
|
102
|
+
expected = (
|
|
103
|
+
from_gui.set_index(f"{section}_stage")[f"{section}_conveyance"].dropna().drop_duplicates()
|
|
104
|
+
)
|
|
105
|
+
shared_index = sorted(set(actual.index).intersection(expected.index))
|
|
106
|
+
diff = expected[shared_index] - actual[shared_index]
|
|
107
|
+
assert (abs(diff) < tolerance).all() # asserts all conveyance values within 0.001 difference
|
|
@@ -5,6 +5,7 @@ import pytest
|
|
|
5
5
|
|
|
6
6
|
from floodmodeller_api import DAT
|
|
7
7
|
from floodmodeller_api.units import QTBDY
|
|
8
|
+
from floodmodeller_api.util import FloodModellerAPIError
|
|
8
9
|
|
|
9
10
|
|
|
10
11
|
@pytest.fixture
|
|
@@ -125,28 +126,28 @@ def test_insert_unit_at(units, dat_ex6):
|
|
|
125
126
|
def test_insert_unit_too_many_arguments_error(dat_ex6, units, kwargs):
|
|
126
127
|
msg = (
|
|
127
128
|
r"\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
|
128
|
-
r"\nAPI Error: Problem encountered when trying to insert unit DAT file .*\."
|
|
129
|
+
r"\nAPI Error: Problem encountered when trying to insert unit into DAT file .*\."
|
|
129
130
|
r"\n"
|
|
130
131
|
r"\nDetails: .*-floodmodeller_api/dat\.py-\d+"
|
|
131
132
|
r"\nMsg: Only one of add_at, add_before, or add_after required"
|
|
132
133
|
r"\n"
|
|
133
134
|
r"\nFor additional support, go to: https://github\.com/People-Places-Solutions/floodmodeller-api"
|
|
134
135
|
)
|
|
135
|
-
with pytest.raises(
|
|
136
|
+
with pytest.raises(FloodModellerAPIError, match=msg):
|
|
136
137
|
dat_ex6.insert_unit(units[0], **kwargs)
|
|
137
138
|
|
|
138
139
|
|
|
139
140
|
def test_insert_unit_no_arguments_error(dat_ex6, units):
|
|
140
141
|
msg = (
|
|
141
142
|
r"\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
|
|
142
|
-
r"\nAPI Error: Problem encountered when trying to insert unit DAT file .*\."
|
|
143
|
+
r"\nAPI Error: Problem encountered when trying to insert unit into DAT file .*\."
|
|
143
144
|
r"\n"
|
|
144
145
|
r"\nDetails: .*-floodmodeller_api/dat\.py-\d+"
|
|
145
146
|
r"\nMsg: No positional argument given\. Please provide either add_before, add_at or add_after"
|
|
146
147
|
r"\n"
|
|
147
148
|
r"\nFor additional support, go to: https://github\.com/People-Places-Solutions/floodmodeller-api"
|
|
148
149
|
)
|
|
149
|
-
with pytest.raises(
|
|
150
|
+
with pytest.raises(FloodModellerAPIError, match=msg):
|
|
150
151
|
dat_ex6.insert_unit(units[0])
|
|
151
152
|
|
|
152
153
|
|
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
|
|
2
|
+
#REVISION#1
|
|
3
|
+
10 0.750 0.900 0.100 0.001 12SI
|
|
4
|
+
10.000 0.010 0.010 0.700 0.100 0.700 0.000
|
|
5
|
+
RAD FILE
|
|
6
|
+
|
|
7
|
+
END GENERAL
|
|
8
|
+
RIVER
|
|
9
|
+
SECTION
|
|
10
|
+
a
|
|
11
|
+
0.000 0.0001 1000
|
|
12
|
+
6
|
|
13
|
+
0.000 3.000 0.030 1.000 0.000 0.000
|
|
14
|
+
1.000 2.500 0.030 1.000 0.000 0.000
|
|
15
|
+
2.000 2.200 0.030 1.000 0.000 0.000
|
|
16
|
+
3.000 1.800 0.030 1.000 0.000 0.000
|
|
17
|
+
4.000 3.500 0.030 1.000 0.000 0.000
|
|
18
|
+
5.000 4.000 0.030 1.000 0.000 0.000
|
|
19
|
+
RIVER
|
|
20
|
+
SECTION
|
|
21
|
+
a2
|
|
22
|
+
0.000 0.0001 1000
|
|
23
|
+
6
|
|
24
|
+
0.000 3.000 0.030* 1.000 0.000 0.000
|
|
25
|
+
1.000 2.500 0.030* 1.000 0.000 0.000
|
|
26
|
+
2.000 2.200 0.030* 1.000 0.000 0.000
|
|
27
|
+
3.000 1.800 0.030* 1.000 0.000 0.000
|
|
28
|
+
4.000 3.500 0.030* 1.000 0.000 0.000
|
|
29
|
+
5.000 4.000 0.030* 1.000 0.000 0.000
|
|
30
|
+
RIVER
|
|
31
|
+
SECTION
|
|
32
|
+
b
|
|
33
|
+
0.000 0.0001 1000
|
|
34
|
+
7
|
|
35
|
+
0.000 3.000 0.020 1.000 0.000 0.000
|
|
36
|
+
0.000 2.300 0.040 1.000 0.000 0.000
|
|
37
|
+
2.000 2.200 0.015 1.000 0.000 0.000
|
|
38
|
+
3.000 1.800 0.015 1.000 0.000 0.000
|
|
39
|
+
3.500 2.200 0.030 1.000 0.000 0.000
|
|
40
|
+
4.000 3.500 0.030 1.000 0.000 0.000
|
|
41
|
+
4.000 4.000 0.030 1.000 0.000 0.000
|
|
42
|
+
RIVER
|
|
43
|
+
SECTION
|
|
44
|
+
b2
|
|
45
|
+
0.000 0.0001 1000
|
|
46
|
+
7
|
|
47
|
+
0.000 3.000 0.020 1.000 0.000 0.000
|
|
48
|
+
0.000 2.300 0.040 1.000 0.000 0.000
|
|
49
|
+
2.000 2.200 0.015* 1.000 0.000 0.000
|
|
50
|
+
3.000 1.800 0.015 1.000 0.000 0.000
|
|
51
|
+
3.500 2.200 0.030 1.000 0.000 0.000
|
|
52
|
+
4.000 3.500 0.030 1.000 0.000 0.000
|
|
53
|
+
4.000 4.000 0.030 1.000 0.000 0.000
|
|
54
|
+
RIVER
|
|
55
|
+
SECTION
|
|
56
|
+
c
|
|
57
|
+
0.000 0.0001 1000
|
|
58
|
+
5
|
|
59
|
+
0.000 4.000 0.080 1.000 0.000 0.000
|
|
60
|
+
1.000 3.000 0.050 1.000 0.000 0.000
|
|
61
|
+
2.000 2.000 0.015 1.000 0.000 0.000
|
|
62
|
+
3.000 3.000 0.100 1.000 0.000 0.000
|
|
63
|
+
4.000 4.000 0.500 1.000 0.000 0.000
|
|
64
|
+
RIVER
|
|
65
|
+
SECTION
|
|
66
|
+
d
|
|
67
|
+
0.000 0.0001 1000
|
|
68
|
+
13
|
|
69
|
+
0.000 5.000 0.040 1.000 0.000 0.000
|
|
70
|
+
1.000 2.000 0.040 1.000 0.000 0.000
|
|
71
|
+
2.000 1.000 0.040 1.000 0.000 0.000
|
|
72
|
+
3.000 0.500 0.040 1.000 0.000 0.000
|
|
73
|
+
4.000 1.000 0.040 1.000 0.000 0.000
|
|
74
|
+
5.000 4.000 0.100 1.000 0.000 0.000
|
|
75
|
+
5.200 4.000 0.070 1.000 0.000 0.000
|
|
76
|
+
6.000 1.000 0.070 1.000 0.000 0.000
|
|
77
|
+
7.000 1.500 0.070 1.000 0.000 0.000
|
|
78
|
+
8.000 2.000 0.070 1.000 0.000 0.000
|
|
79
|
+
9.000 3.000 0.070 1.000 0.000 0.000
|
|
80
|
+
10.000 4.500 0.070 1.000 0.000 0.000
|
|
81
|
+
11.000 5.000 0.070 1.000 0.000 0.000
|
|
82
|
+
RIVER
|
|
83
|
+
SECTION
|
|
84
|
+
d2
|
|
85
|
+
0.000 0.0001 1000
|
|
86
|
+
13
|
|
87
|
+
0.000 5.000 0.040 1.000 0.000 0.000
|
|
88
|
+
1.000 2.000 0.040 1.000 0.000 0.000
|
|
89
|
+
2.000 1.000 0.040 1.000 0.000 0.000
|
|
90
|
+
3.000 0.500 0.040 1.000 0.000 0.000
|
|
91
|
+
4.000 1.000 0.040 1.000 0.000 0.000
|
|
92
|
+
5.000 4.000 0.100* 1.000 0.000 0.000
|
|
93
|
+
5.200 4.000 0.070 1.000 0.000 0.000
|
|
94
|
+
6.000 1.000 0.070 1.000 0.000 0.000
|
|
95
|
+
7.000 1.500 0.070 1.000 0.000 0.000
|
|
96
|
+
8.000 2.000 0.070 1.000 0.000 0.000
|
|
97
|
+
9.000 3.000 0.070 1.000 0.000 0.000
|
|
98
|
+
10.000 4.500 0.070 1.000 0.000 0.000
|
|
99
|
+
11.000 5.000 0.070 1.000 0.000 0.000
|
|
100
|
+
RIVER
|
|
101
|
+
SECTION
|
|
102
|
+
e
|
|
103
|
+
0.000 0.0001 1000
|
|
104
|
+
7
|
|
105
|
+
30.000 42.000 0.030 1.000 0.000 0.000
|
|
106
|
+
31.000 40.000 0.030 1.000 0.000 0.000
|
|
107
|
+
33.000 39.800 0.035 1.000 0.000 0.000
|
|
108
|
+
36.000 39.700 0.035 1.100 0.000 0.000
|
|
109
|
+
40.000 39.500 0.040 1.150 0.000 0.000
|
|
110
|
+
45.000 41.000 0.045 3.000 0.000 0.000
|
|
111
|
+
75.000 42.200 0.045 3.000 0.000 0.000
|
|
112
|
+
RIVER
|
|
113
|
+
SECTION
|
|
114
|
+
e2
|
|
115
|
+
0.000 0.0001 1000
|
|
116
|
+
7
|
|
117
|
+
30.000 42.000 0.030 1.000 0.000 0.000
|
|
118
|
+
31.000 40.000 0.030 1.000 0.000 0.000
|
|
119
|
+
33.000 39.800 0.035 1.000 0.000 0.000
|
|
120
|
+
36.000 39.700 0.035 1.100 0.000 0.000
|
|
121
|
+
40.000 39.500 0.040 1.150 0.000 0.000
|
|
122
|
+
45.000 41.000 0.045* 3.000 0.000 0.000
|
|
123
|
+
75.000 42.200 0.045 3.000 0.000 0.000
|
|
124
|
+
RIVER
|
|
125
|
+
SECTION
|
|
126
|
+
e3
|
|
127
|
+
0.000 0.0001 1000
|
|
128
|
+
7
|
|
129
|
+
30.000 42.000 0.030* 1.000 0.000 0.000
|
|
130
|
+
31.000 40.000 0.030 1.000 0.000 0.000
|
|
131
|
+
33.000 39.800 0.035 1.000 0.000 0.000
|
|
132
|
+
36.000 39.700 0.035* 1.100 0.000 0.000
|
|
133
|
+
40.000 39.500 0.040* 1.150 0.000 0.000
|
|
134
|
+
45.000 41.000 0.045* 3.000 0.000 0.000
|
|
135
|
+
75.000 42.200 0.045 3.000 0.000 0.000
|
|
136
|
+
INITIAL CONDITIONS
|
|
137
|
+
label ? flow stage froude no velocity umode ustate z
|
|
138
|
+
a y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
139
|
+
a2 y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
140
|
+
b y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
141
|
+
b2 y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
142
|
+
c y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
143
|
+
d y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
144
|
+
d2 y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
145
|
+
e y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
146
|
+
e2 y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
147
|
+
e3 y 0.000 0.000 0.000 0.000 0.000 0.000 0.000
|
|
148
|
+
GISINFO
|
|
149
|
+
RIVER SECTION a 0 0 0 0 0
|
|
150
|
+
RIVER SECTION a2 0 0 0 0 0
|
|
151
|
+
RIVER SECTION b 0 0 0 0 0
|
|
152
|
+
RIVER SECTION b2 0 0 0 0 0
|
|
153
|
+
RIVER SECTION c 0 0 0 0 0
|
|
154
|
+
RIVER SECTION d 0 0 0 0 0
|
|
155
|
+
RIVER SECTION d2 0 0 0 0 0
|
|
156
|
+
RIVER SECTION e 0 0 0 0 0
|
|
157
|
+
RIVER SECTION e2 0 0 0 0 0
|
|
158
|
+
RIVER SECTION e3 0 0 0 0 0
|
|
159
|
+
a 0 0 0 0 0
|
|
160
|
+
a2 0 0 0 0 0
|
|
161
|
+
b 0 0 0 0 0
|
|
162
|
+
b2 0 0 0 0 0
|
|
163
|
+
c 0 0 0 0 0
|
|
164
|
+
d 0 0 0 0 0
|
|
165
|
+
d2 0 0 0 0 0
|