floodmodeller-api 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. floodmodeller_api/__init__.py +1 -1
  2. floodmodeller_api/_base.py +26 -16
  3. floodmodeller_api/backup.py +3 -2
  4. floodmodeller_api/dat.py +29 -30
  5. floodmodeller_api/diff.py +3 -3
  6. floodmodeller_api/hydrology_plus/hydrology_plus_export.py +14 -13
  7. floodmodeller_api/ied.py +6 -6
  8. floodmodeller_api/ief.py +27 -25
  9. floodmodeller_api/inp.py +3 -4
  10. floodmodeller_api/logs/lf.py +9 -16
  11. floodmodeller_api/logs/lf_helpers.py +18 -18
  12. floodmodeller_api/mapping.py +2 -0
  13. floodmodeller_api/test/__init__.py +2 -2
  14. floodmodeller_api/test/conftest.py +2 -3
  15. floodmodeller_api/test/test_backup.py +2 -2
  16. floodmodeller_api/test/test_conveyance.py +4 -3
  17. floodmodeller_api/test/test_dat.py +2 -2
  18. floodmodeller_api/test/test_data/structure_logs/EX17_expected.csv +4 -0
  19. floodmodeller_api/test/test_data/structure_logs/EX17_expected.json +69 -0
  20. floodmodeller_api/test/test_data/structure_logs/EX18_expected.csv +20 -0
  21. floodmodeller_api/test/test_data/structure_logs/EX18_expected.json +292 -0
  22. floodmodeller_api/test/test_data/structure_logs/EX6_expected.csv +4 -0
  23. floodmodeller_api/test/test_data/structure_logs/EX6_expected.json +35 -0
  24. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_flow.csv +182 -0
  25. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_fr.csv +182 -0
  26. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_mode.csv +182 -0
  27. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_stage.csv +182 -0
  28. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_state.csv +182 -0
  29. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzn_velocity.csv +182 -0
  30. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_left_fp_h.csv +182 -0
  31. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_left_fp_mode.csv +182 -0
  32. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_link_inflow.csv +182 -0
  33. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_max.csv +87 -0
  34. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_right_fp_h.csv +182 -0
  35. floodmodeller_api/test/test_data/tabular_csv_outputs/network_zzx_right_fp_mode.csv +182 -0
  36. floodmodeller_api/test/test_flowtimeprofile.py +2 -2
  37. floodmodeller_api/test/test_hydrology_plus_export.py +4 -2
  38. floodmodeller_api/test/test_ied.py +2 -2
  39. floodmodeller_api/test/test_ief.py +2 -2
  40. floodmodeller_api/test/test_inp.py +2 -2
  41. floodmodeller_api/test/test_json.py +5 -10
  42. floodmodeller_api/test/test_logs_lf.py +6 -6
  43. floodmodeller_api/test/test_read_file.py +1 -0
  44. floodmodeller_api/test/test_river.py +79 -2
  45. floodmodeller_api/test/test_tool.py +8 -5
  46. floodmodeller_api/test/test_toolbox_structure_log.py +149 -158
  47. floodmodeller_api/test/test_xml2d.py +9 -11
  48. floodmodeller_api/test/test_zz.py +143 -0
  49. floodmodeller_api/to_from_json.py +8 -8
  50. floodmodeller_api/tool.py +12 -6
  51. floodmodeller_api/toolbox/example_tool.py +5 -1
  52. floodmodeller_api/toolbox/model_build/add_siltation_definition.py +12 -8
  53. floodmodeller_api/toolbox/model_build/structure_log/structure_log.py +498 -196
  54. floodmodeller_api/toolbox/model_build/structure_log_definition.py +5 -1
  55. floodmodeller_api/units/_base.py +14 -10
  56. floodmodeller_api/units/conveyance.py +10 -8
  57. floodmodeller_api/units/helpers.py +1 -3
  58. floodmodeller_api/units/losses.py +2 -3
  59. floodmodeller_api/units/sections.py +15 -11
  60. floodmodeller_api/units/structures.py +9 -9
  61. floodmodeller_api/units/units.py +2 -0
  62. floodmodeller_api/urban1d/_base.py +6 -9
  63. floodmodeller_api/urban1d/outfalls.py +2 -1
  64. floodmodeller_api/urban1d/raingauges.py +2 -1
  65. floodmodeller_api/urban1d/subsections.py +2 -0
  66. floodmodeller_api/urban1d/xsections.py +3 -2
  67. floodmodeller_api/util.py +16 -2
  68. floodmodeller_api/validation/validation.py +2 -1
  69. floodmodeller_api/version.py +1 -1
  70. floodmodeller_api/xml2d.py +18 -20
  71. floodmodeller_api/zz.py +538 -0
  72. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/METADATA +20 -14
  73. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/RECORD +78 -60
  74. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/WHEEL +1 -1
  75. floodmodeller_api/test/test_zzn.py +0 -36
  76. floodmodeller_api/zzn.py +0 -414
  77. /floodmodeller_api/test/test_data/{network_from_tabularCSV.csv → tabular_csv_outputs/network_zzn_max.csv} +0 -0
  78. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/LICENSE.txt +0 -0
  79. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/entry_points.txt +0 -0
  80. {floodmodeller_api-0.5.0.dist-info → floodmodeller_api-0.5.1.dist-info}/top_level.txt +0 -0
@@ -1,22 +1,28 @@
1
1
  import copy
2
2
  import csv
3
+ import json
4
+ import subprocess
3
5
  from pathlib import Path
4
6
 
5
7
  import pandas as pd
6
8
  import pytest
7
9
 
8
10
  from floodmodeller_api import DAT
9
- from floodmodeller_api.toolbox.model_build.structure_log import StructureLogBuilder
11
+ from floodmodeller_api.toolbox import StructureLog
12
+ from floodmodeller_api.toolbox.model_build.structure_log.structure_log import (
13
+ StructureLogBuilder,
14
+ serialise_keys,
15
+ )
10
16
  from floodmodeller_api.units.conduits import CONDUIT
11
17
  from floodmodeller_api.units.structures import ORIFICE
12
18
 
13
19
 
14
- @pytest.fixture
20
+ @pytest.fixture()
15
21
  def slb():
16
22
  return StructureLogBuilder("", "")
17
23
 
18
24
 
19
- @pytest.fixture
25
+ @pytest.fixture()
20
26
  def conduit_empty():
21
27
  c = CONDUIT()
22
28
  c.dist_to_next = 0
@@ -27,6 +33,7 @@ def conduit_empty():
27
33
  c.height = 0
28
34
  c.width = 0
29
35
  c.diameter = 0
36
+ c.elevation_invert = 0
30
37
  c.friction_on_invert = 0
31
38
  c.friction_on_soffit = 0
32
39
  c.friction_on_walls = 0
@@ -40,7 +47,7 @@ def conduit_empty():
40
47
  return c
41
48
 
42
49
 
43
- @pytest.fixture
50
+ @pytest.fixture()
44
51
  def conduit_filled():
45
52
  c = CONDUIT()
46
53
  c.dist_to_next = 0
@@ -51,6 +58,7 @@ def conduit_filled():
51
58
  c.height = 25.45
52
59
  c.width = 3
53
60
  c.diameter = 6
61
+ c.elevation_invert = 3
54
62
  c.friction_on_invert = 1.876
55
63
  c.friction_on_soffit = 1.34
56
64
  c.friction_on_walls = 1.8
@@ -64,171 +72,154 @@ def conduit_filled():
64
72
  return c
65
73
 
66
74
 
67
- @pytest.fixture
75
+ @pytest.fixture()
68
76
  def no_length():
69
77
  return 0
70
78
 
71
79
 
72
- @pytest.fixture
80
+ @pytest.fixture()
73
81
  def with_length():
74
82
  return 4.973
75
83
 
76
84
 
77
- @pytest.fixture
85
+ @pytest.fixture()
78
86
  def structure():
79
87
  return ORIFICE()
80
88
 
81
89
 
82
- def test_conduit_data(slb, conduit_empty):
83
- slb._dat = DAT()
84
- output = slb._conduit_data(conduit_empty)
85
- assert output == [0.0, "", ""]
86
-
87
-
88
- def test_culvert_loss_data(slb):
89
- output = slb._culvert_loss_data("", "")
90
- assert output == ""
91
- output = slb._culvert_loss_data("TEST1", "TEST2")
92
- assert output == "Ki: TEST1, Ko: TEST2"
93
-
94
-
95
- def test_circular_data(slb, conduit_empty, conduit_filled, no_length, with_length):
96
- slb._dat = DAT()
97
- output = slb._circular_data(conduit_empty, no_length)
98
- assert output == ["Mannings: 0", "dia: 0.00 x l: 0.00"]
99
- output = slb._circular_data(conduit_filled, with_length)
100
- assert output == [
101
- "Mannings: [min: 1.453345, max: 3.435]",
102
- "dia: 6.00 x l: 4.97",
103
- ]
104
-
105
-
106
- def test_sprungarch_data(slb, conduit_empty, conduit_filled, no_length, with_length):
107
- output = slb._sprungarch_data(conduit_empty, no_length)
108
- assert output == [
109
- "Mannings: 0",
110
- "(Springing: 0.00, Crown: 0.00) x w: 0.00 x l: 0.00",
111
- ]
112
- output = slb._sprungarch_data(conduit_filled, with_length)
113
- assert output == [
114
- "Mannings: [min: 1.34, max: 1.876]",
115
- "(Springing: 23.10, Crown: 5.40) x w: 3.00 x l: 4.97",
116
- ]
117
-
118
-
119
- def test_rectangular_data(slb, conduit_empty, conduit_filled, no_length, with_length):
120
- output = slb._rectangular_data(conduit_empty, no_length)
121
- assert output == ["Mannings: 0", "h: 0.00 x w: 0.00 x l: 0.00"]
122
- output = slb._rectangular_data(conduit_filled, with_length)
123
- assert output == [
124
- "Mannings: [min: 1.34, max: 1.876]",
125
- "h: 25.45 x w: 3.00 x l: 4.97",
126
- ]
127
-
128
-
129
- def test_section_data(slb, conduit_empty, conduit_filled, no_length, with_length):
130
- output = slb._section_data(conduit_empty, no_length)
131
- assert output == ["Colebrook-White: 0", "h: 0.00 x w: 0.00 x l: 0.00"]
132
- output = slb._section_data(conduit_filled, with_length)
133
- assert output == [
134
- "Colebrook-White: [min: 0.0, max: 4.0]",
135
- "h: 65.00 x w: 150.00 x l: 4.97",
136
- ]
137
-
138
-
139
- def test_sprung_data(slb, conduit_empty, conduit_filled, no_length, with_length):
140
- output = slb._sprung_data(conduit_empty, no_length)
141
- assert output == [
142
- "Mannings: 0",
143
- "(Springing: 0.00, Crown: 0.00) x w: 0.00 x l: 0.00",
144
- ]
145
- output = slb._sprung_data(conduit_filled, with_length)
146
- assert output == [
147
- "Mannings: [min: 1.34, max: 1.876]",
148
- "(Springing: 23.10, Crown: 5.40) x w: 3.00 x l: 4.97",
149
- ]
150
-
151
-
152
- def test_orifice_dimensions(slb, structure):
153
- structure.invert = 1
154
- output = slb._orifice_dimensions(structure)
155
- assert output == "h: -1.00 x w: -1.00"
156
-
157
-
158
- def test_spill_data(slb, structure):
159
- structure.data = pd.DataFrame(data={"X": [0, 0], "Y": [0, 0]})
160
- structure.weir_coefficient = 0
161
- output = slb._spill_data(structure)
162
- assert output == ["Elevation: 0.00 x w: 0.00", 0]
163
-
164
-
165
- def test_bridge_data(slb, structure):
166
- structure.section_data = pd.DataFrame(data={"X": [0, 0], "Y": [0, 0], "Mannings n": [0, 0]})
167
- structure.opening_data = pd.DataFrame(
168
- data={"Start": 0, "Finish": 0, "Springing Level": 0, "Soffit Level": 0},
169
- index=[0],
170
- )
171
- output = slb._bridge_data(structure)
172
- assert output == ["Mannings: 0", "h: 0.00 x w: 0.00"]
173
-
174
-
175
- def test_add_conduits(slb, conduit_filled, tmpdir):
176
- slb._dat = DAT()
177
- prev_c = copy.deepcopy(conduit_filled)
178
- prev_c.dist_to_next = 0
179
- prev_c.name = "prev"
180
- slb._dat.conduits["prev"] = prev_c
181
- conduit_filled.dist_to_next = 5
182
- slb._dat.conduits["test_conduit"] = conduit_filled
183
- next_c = copy.deepcopy(conduit_filled)
184
- next_c.dist_to_next = 0
185
- slb._dat.conduits["next"] = next_c
186
- slb._dat._all_units = [prev_c, conduit_filled, next_c]
187
- conduit_non_subtype = copy.deepcopy(conduit_filled)
188
- conduit_non_subtype._subtype = "NON_SUBTYPE"
189
- slb._dat.conduits["test_conduit_NON_SUBTYPE"] = conduit_non_subtype
190
-
191
- tmp_csv = Path(tmpdir) / "temp_structure_data.csv"
192
- with tmp_csv.open("w") as file:
90
+ @pytest.fixture()
91
+ def conduit_chain_dat(conduit_filled):
92
+ dat = DAT()
93
+ names = ["first", "second", "third", "fourth"]
94
+ for name in names:
95
+ cond = copy.deepcopy(conduit_filled)
96
+ cond.dist_to_next = 10
97
+ cond.name = name
98
+ dat.conduits[name] = cond
99
+ dat._all_units.append(cond)
100
+
101
+ cond = copy.deepcopy(conduit_filled)
102
+ cond.dist_to_next = 0
103
+ cond.name = "fifth"
104
+ dat.conduits["fifth"] = cond
105
+ dat._all_units.append(cond)
106
+ return dat
107
+
108
+
109
+ @pytest.fixture()
110
+ def ex18_dat_path(test_workspace):
111
+ # TODO: Source a better test case that can be opened to public repo.
112
+ return Path(test_workspace, "EX18.DAT")
113
+
114
+
115
+ @pytest.fixture()
116
+ def ex18_dat_expected():
117
+ # This is about the limit of what can be pasted in code, if any larger test material is found then read from csv.
118
+ return """Unit Name,Unit Type,Unit Subtype,Comment,Friction,Dimensions (m),Weir Coefficient,Culvert Inlet/Outlet Loss
119
+ C2,CONDUIT,CIRCULAR,,"Mannings: [min: 0.015, max: 0.020]",dia: 1.00 x l: 100.00 (Total conduit length: 500.00),,Ki: 0.6
120
+ C2_R1,REPLICATE,,,,,,
121
+ C2_R2,REPLICATE,,,,,,
122
+ C2_R3,REPLICATE,,,,,,
123
+ C2_R4,REPLICATE,,,,,,
124
+ C2m,CONDUIT,CIRCULAR,,"Mannings: [min: 0.015, max: 0.020]",dia: 1.00 x l: 0.00,,
125
+ C2md,CONDUIT,CIRCULAR,,"Mannings: [min: 0.015, max: 0.020]",dia: 1.00 x l: 100.00 (Total conduit length: 700.00),,
126
+ C2_R5,REPLICATE,,,,,,
127
+ C2_R6,REPLICATE,,,,,,
128
+ C2_R7,REPLICATE,,,,,,
129
+ C2_R8,REPLICATE,,,,,,
130
+ C2_R9,REPLICATE,,,,,,
131
+ C2_R10,REPLICATE,,,,,,
132
+ C2d,CONDUIT,CIRCULAR,,"Mannings: [min: 0.015, max: 0.020]",dia: 1.00 x l: 0.00,,
133
+ S0,WEIR,,,,Crest Elevation: 21.00 x w: 1.50,,
134
+ C2d,WEIR,,,,Crest Elevation: 18.00 x w: 0.60,,
135
+ S4,WEIR,,,,Crest Elevation: 17.90 x w: 2.00,,
136
+ S8,WEIR,,,,Crest Elevation: 17.70 x w: 2.00,,
137
+ S3LS,SPILL,,,,Elevation: 20.00 x w: 100.00,1.7,
138
+ """
139
+
140
+
141
+ def test_empty_conduit(slb, conduit_empty):
142
+ slb.dat = DAT()
143
+ output, _ = slb._conduit_data(conduit_empty)
144
+ assert output == {
145
+ "length": 0.0,
146
+ "total_length": 0.0,
147
+ }
148
+
149
+
150
+ def test_multi_conduits(slb, conduit_chain_dat, tmp_path):
151
+ expected = """Unit Name,Unit Type,Unit Subtype,Comment,Friction,Dimensions (m),Weir Coefficient,Culvert Inlet/Outlet Loss
152
+ first,CONDUIT,SECTION,,"Colebrook-White: [min: 0.000, max: 4.000]",h: 65.00 x w: 156.00 x l: 10.00 (Total conduit length: 40.00),,
153
+ second,CONDUIT,SECTION,,"Colebrook-White: [min: 0.000, max: 4.000]",h: 65.00 x w: 156.00 x l: 10.00,,
154
+ third,CONDUIT,SECTION,,"Colebrook-White: [min: 0.000, max: 4.000]",h: 65.00 x w: 156.00 x l: 10.00,,
155
+ fourth,CONDUIT,SECTION,,"Colebrook-White: [min: 0.000, max: 4.000]",h: 65.00 x w: 156.00 x l: 10.00,,
156
+ fifth,CONDUIT,SECTION,,"Colebrook-White: [min: 0.000, max: 4.000]",h: 65.00 x w: 156.00 x l: 0.00,,
157
+ """
158
+
159
+ slb.dat = conduit_chain_dat
160
+ tmp_csv = tmp_path / "test_multi_conduits.csv"
161
+ with tmp_csv.open("w", newline="") as file:
193
162
  slb._writer = csv.writer(file)
194
- slb._add_conduits()
195
-
196
-
197
- def test_add_structures(slb, structure, tmpdir):
198
- slb._dat = DAT()
199
- structure.soffit = 3
200
- structure.weir_coefficient = 1
201
- structure.data = pd.DataFrame(data={"X": [0, 0], "Y": [0, 0]})
202
- structure.section_data = pd.DataFrame(data={"X": [0, 0], "Y": [0, 0], "Mannings n": [0, 0]})
203
- structure.opening_data = pd.DataFrame(
204
- data={"Start": 0, "Finish": 0, "Springing Level": 0, "Soffit Level": 0},
205
- index=[0],
163
+ slb.add_conduits()
164
+ slb.write_csv_output(file)
165
+
166
+ with open(tmp_csv) as read_file:
167
+ text = read_file.read()
168
+
169
+ assert text == expected
170
+
171
+
172
+ @pytest.mark.parametrize(
173
+ ("filename"),
174
+ [
175
+ ("EX18"),
176
+ ("EX6"),
177
+ ("EX17"),
178
+ ],
179
+ )
180
+ def test_multiple_dats(filename, test_workspace, tmp_path):
181
+ dat_path = Path(test_workspace, f"{filename}.DAT")
182
+ expected_csv_path = Path(test_workspace / "structure_logs", f"{filename}_expected.csv")
183
+ expected_json_path = Path(test_workspace / "structure_logs", f"{filename}_expected.json")
184
+ test_csv_path = tmp_path / f"test_multiple_dats_{filename}.csv"
185
+ slb = StructureLogBuilder(dat_path, test_csv_path)
186
+ slb.dat = DAT(slb.dat_file_path)
187
+ slb.add_conduits()
188
+ slb.add_structures()
189
+
190
+ with expected_json_path.open("r") as file:
191
+ expected_json_data = json.load(file)
192
+
193
+ assert serialise_keys(slb.unit_store) == expected_json_data
194
+
195
+ with open(slb.csv_output_path, "w", newline="") as file:
196
+ slb.write_csv_output(file)
197
+
198
+ with expected_csv_path.open("r") as file:
199
+ expected_csv_data = file.read()
200
+ with test_csv_path.open("r") as file:
201
+ test_csv_data = file.read()
202
+
203
+ assert test_csv_data == expected_csv_data
204
+
205
+
206
+ def test_full_dat_from_python(slb, tmp_path, ex18_dat_path, ex18_dat_expected):
207
+ # these two tests should be as described in the toolbox documentation
208
+ tmp_csv = tmp_path / "test_full_dat_from_python.csv"
209
+ StructureLog.run(input_path=ex18_dat_path, output_path=tmp_csv)
210
+
211
+ with open(tmp_csv) as read_file:
212
+ text = read_file.read()
213
+ assert text == ex18_dat_expected
214
+
215
+
216
+ def test_full_dat_from_commandline(slb, tmp_path, ex18_dat_path, ex18_dat_expected):
217
+ # these two tests should be as described in the toolbox documentation
218
+ tmp_csv = tmp_path / "test_full_dat_from_python.csv"
219
+ subprocess.call(
220
+ ["fmapi-structure_log", "--input_path", str(ex18_dat_path), "--output_path", str(tmp_csv)],
206
221
  )
207
- structure.crest_elevation = 1
208
- structure.weir_breadth = 1
209
- structure.weir_length = 1
210
- structure.weir_elevation = 1
211
- slb._dat.structures["test_structure_orifice"] = structure
212
- struc_spill = copy.deepcopy(structure)
213
- struc_spill._unit = "SPILL"
214
- slb._dat.structures["test_structure_spill"] = struc_spill
215
- struc_sluice = copy.deepcopy(structure)
216
- struc_sluice._unit = "SLUICE"
217
- slb._dat.structures["test_structure_sluice"] = struc_sluice
218
- struc_rnweir = copy.deepcopy(structure)
219
- struc_rnweir._unit = "RNWEIR"
220
- slb._dat.structures["test_structure_rnweir"] = struc_rnweir
221
- struc_weir = copy.deepcopy(structure)
222
- struc_weir._unit = "WEIR"
223
- slb._dat.structures["test_structure_weir"] = struc_weir
224
- struc_bridge = copy.deepcopy(structure)
225
- struc_bridge._unit = "BRIDGE"
226
- slb._dat.structures["test_structure_bridge"] = struc_bridge
227
- struc_none = copy.deepcopy(structure)
228
- struc_none._unit = "NONE"
229
- slb._dat.structures["test_structure_none"] = struc_none
230
-
231
- tmp_csv = Path(tmpdir) / "temp_structure_data.csv"
232
- with tmp_csv.open("w") as file:
233
- slb._writer = csv.writer(file)
234
- slb._add_structures()
222
+
223
+ with open(tmp_csv) as read_file:
224
+ text = read_file.read()
225
+ assert text == ex18_dat_expected
@@ -5,12 +5,12 @@ import pytest
5
5
  from floodmodeller_api import XML2D
6
6
 
7
7
 
8
- @pytest.fixture
8
+ @pytest.fixture()
9
9
  def xml_fp(test_workspace):
10
10
  return Path(test_workspace, "Domain1_Q.xml")
11
11
 
12
12
 
13
- @pytest.fixture
13
+ @pytest.fixture()
14
14
  def data_before(xml_fp):
15
15
  return XML2D(xml_fp)._write()
16
16
 
@@ -25,7 +25,7 @@ def test_xml2d_link_dtm_changes(xml_fp, data_before):
25
25
  """XML2D: Test changing and reverting link1d file and dtm makes no changes"""
26
26
  x2d = XML2D(xml_fp)
27
27
  prev_link = x2d.link1d[0]["link"]
28
- domain = list(x2d.domains)[0]
28
+ domain = next(iter(x2d.domains))
29
29
  prev_dtm = x2d.domains[domain]["topography"]
30
30
 
31
31
  x2d.link1d[0]["link"] = ["new_link"]
@@ -56,10 +56,8 @@ def test_xml2d_change_revert_elem_topography():
56
56
  """XML2D: Check that when we change an existing element
57
57
  that it is actually adding it and that it is being reverted."""
58
58
  x2d = XML2D()
59
- domain = list(x2d.domains)[0]
60
- orig_topography = []
61
- for item in x2d.domains[domain]["topography"]:
62
- orig_topography.append(str(item))
59
+ domain = next(iter(x2d.domains))
60
+ orig_topography = [str(item) for item in x2d.domains[domain]["topography"]]
63
61
  orig_xml = x2d._write()
64
62
  x2d.domains[domain]["topography"][0] = "my/new/topography"
65
63
 
@@ -74,7 +72,7 @@ def test_xml2d_add_remove_branch_roughness():
74
72
  """XML2D: Check that we can actually add a branch and that
75
73
  it is being added and passes validation (i.e write)"""
76
74
  x2d = XML2D()
77
- domain = list(x2d.domains)[0]
75
+ domain = next(iter(x2d.domains))
78
76
  orig_xml = x2d._write()
79
77
  x2d.domains[domain]["roughness"] = []
80
78
  x2d.domains[domain]["roughness"].append(
@@ -91,7 +89,7 @@ def test_xml2d_append_remove_branch_roughness():
91
89
  """XML2D: Check that we can append an extra branch to preexisting branch
92
90
  so that it passes validation"""
93
91
  x2d = XML2D()
94
- domain = list(x2d.domains)[0]
92
+ domain = next(iter(x2d.domains))
95
93
  x2d.domains[domain]["roughness"] = []
96
94
  x2d.domains[domain]["roughness"].append(
97
95
  {"type": "file", "law": "manning", "value": "my/roughness/file.shp"},
@@ -115,7 +113,7 @@ def test_xml2d_append_remove_branch_roughness():
115
113
  def test_xml2d_reorder_elem_computational_area_wrong_position():
116
114
  """XML2D: Check that if we add ??? in the wrong position does it reorder"""
117
115
  x2d = XML2D()
118
- domain = list(x2d.domains)[0]
116
+ domain = next(iter(x2d.domains))
119
117
  x2d.domains[domain]["computational_area"] = {
120
118
  "yll": ...,
121
119
  "xll": ...,
@@ -145,7 +143,7 @@ def test_xml2d_reorder_elem_computational_area_wrong_position():
145
143
  def test_xml2d_update_value(xml_fp, data_before):
146
144
  """XML2D: Test changing and reverting link1d file and dtm makes no changes"""
147
145
  x2d = XML2D(xml_fp)
148
- domain = list(x2d.domains)[0]
146
+ domain = next(iter(x2d.domains))
149
147
  x2d.domains[domain]["run_data"]["scheme"] = "TVD"
150
148
 
151
149
  assert x2d._write()
@@ -0,0 +1,143 @@
1
+ # type: ignore
2
+ # ignored because the output from _ZZ.to_dataframe() is only a series in special cases
3
+
4
+ from pathlib import Path
5
+
6
+ import pandas as pd
7
+ import pytest
8
+
9
+ from floodmodeller_api import IEF, ZZN, ZZX
10
+
11
+
12
+ @pytest.fixture()
13
+ def zzn(test_workspace: Path) -> ZZN:
14
+ path = test_workspace / "network.zzn"
15
+ return ZZN(path)
16
+
17
+
18
+ @pytest.fixture()
19
+ def zzx(test_workspace: Path) -> ZZX:
20
+ path = test_workspace / "network.zzx"
21
+ return ZZX(path)
22
+
23
+
24
+ @pytest.fixture()
25
+ def ief(test_workspace: Path) -> IEF:
26
+ path = test_workspace / "network.ief"
27
+ return IEF(path)
28
+
29
+
30
+ @pytest.fixture()
31
+ def folder(test_workspace: Path) -> Path:
32
+ return test_workspace / "tabular_csv_outputs"
33
+
34
+
35
+ @pytest.mark.parametrize(
36
+ ("csv", "file"),
37
+ [
38
+ ("network_zzn_max.csv", "zzn"),
39
+ ("network_zzx_max.csv", "zzx"),
40
+ ],
41
+ )
42
+ def test_max(zzn: ZZN, zzx: ZZX, folder: Path, csv: str, file: str):
43
+ file_obj = zzn if file == "zzn" else zzx
44
+ expected = pd.read_csv(folder / csv, index_col=0)
45
+
46
+ actual = file_obj.to_dataframe(result_type="max")
47
+ pd.testing.assert_frame_equal(actual, expected, atol=1e-3, check_dtype=False)
48
+
49
+
50
+ @pytest.mark.parametrize(
51
+ ("variable", "csv", "file"),
52
+ [
53
+ # zzn
54
+ ("Flow", "network_zzn_flow.csv", "zzn"),
55
+ ("Stage", "network_zzn_stage.csv", "zzn"),
56
+ ("Froude", "network_zzn_fr.csv", "zzn"),
57
+ ("Velocity", "network_zzn_velocity.csv", "zzn"),
58
+ ("Mode", "network_zzn_mode.csv", "zzn"),
59
+ ("State", "network_zzn_state.csv", "zzn"),
60
+ # zzx
61
+ ("Left FP h", "network_zzx_left_fp_h.csv", "zzx"),
62
+ ("Link inflow", "network_zzx_link_inflow.csv", "zzx"),
63
+ ("Right FP h", "network_zzx_right_fp_h.csv", "zzx"),
64
+ ("Right FP mode", "network_zzx_right_fp_mode.csv", "zzx"),
65
+ ("Left FP mode", "network_zzx_left_fp_mode.csv", "zzx"),
66
+ ],
67
+ )
68
+ def test_all_timesteps(zzn: ZZN, zzx: ZZX, folder: Path, variable: str, csv: str, file: str):
69
+ file_obj = zzn if file == "zzn" else zzx
70
+ suffix = f"_{variable}"
71
+ expected = pd.read_csv(folder / csv, index_col=0)
72
+
73
+ actual_1 = file_obj.to_dataframe(variable=variable)
74
+ actual_1.index = actual_1.index.round(3)
75
+ pd.testing.assert_frame_equal(actual_1, expected, atol=1e-3, check_dtype=False)
76
+
77
+ actual_2 = file_obj.to_dataframe()[variable]
78
+ actual_2.index = actual_2.index.round(3)
79
+ pd.testing.assert_frame_equal(actual_2, expected, atol=1e-3, check_dtype=False)
80
+
81
+ actual_3 = file_obj.to_dataframe(multilevel_header=False).filter(like=suffix, axis=1)
82
+ actual_3.index = actual_3.index.round(3)
83
+ actual_3.columns = [x.removesuffix(suffix) for x in actual_3.columns]
84
+ pd.testing.assert_frame_equal(actual_3, expected, atol=1e-3, check_dtype=False)
85
+
86
+ actual_4 = file_obj.to_dataframe(variable=variable, multilevel_header=False)
87
+ actual_4.index = actual_4.index.round(3)
88
+ actual_4.columns = [x.removesuffix(suffix) for x in actual_4.columns]
89
+ pd.testing.assert_frame_equal(actual_4, expected, atol=1e-3, check_dtype=False)
90
+
91
+
92
+ def test_zzn_include_time(zzn: ZZN):
93
+ zzn_df = zzn.to_dataframe(result_type="max", variable="flow", include_time=True)
94
+ actual = zzn_df.loc["resin", ["Max Flow", "Max Flow Time(hrs)"]].to_numpy()
95
+ assert actual[0] == pytest.approx(7.296, abs=0.001)
96
+ assert actual[1] == 9
97
+
98
+
99
+ def test_zzn_from_ief(zzn: ZZN, ief: IEF):
100
+ zzn_df = zzn.to_dataframe()
101
+ zzn_from_ief = ief.get_results().to_dataframe()
102
+ pd.testing.assert_frame_equal(zzn_df, zzn_from_ief)
103
+
104
+
105
+ def test_zzn_to_csv(zzn: ZZN, tmp_path: Path, test_workspace: Path):
106
+ # default
107
+ zzn.export_to_csv()
108
+ path = test_workspace / "network.csv"
109
+ assert path.exists()
110
+ path.unlink()
111
+
112
+ # absolute
113
+ zzn.export_to_csv(tmp_path / "test.csv")
114
+ path = tmp_path / "test.csv"
115
+ assert path.exists()
116
+ path.unlink()
117
+
118
+ # relative
119
+ zzn.export_to_csv("test.csv")
120
+ path = zzn.filepath.parent / "test.csv"
121
+ assert path.exists()
122
+ path.unlink()
123
+
124
+ # folder
125
+ zzn.export_to_csv(tmp_path)
126
+ path = tmp_path / "network.csv"
127
+ assert path.exists()
128
+ path.unlink()
129
+
130
+ # doesn't exist
131
+ zzn.export_to_csv("test")
132
+ path = test_workspace / "test/network.csv"
133
+ assert path.exists()
134
+ path.unlink()
135
+
136
+
137
+ def test_meta_is_read_only(zzx: ZZN):
138
+ assert dict(zzx.meta) == zzx._meta
139
+
140
+ with pytest.raises(TypeError):
141
+ zzx.meta["variables"] = "hi"
142
+
143
+ zzx._meta["variables"] = "hi"
@@ -162,15 +162,15 @@ def recursive_from_json(obj: dict | Any) -> Any:
162
162
  return api_class_mapping[class_type].from_json(obj)
163
163
 
164
164
  if "class" in obj and obj["class"] == "pandas.DataFrame":
165
- df = pd.DataFrame.from_dict(obj["object"])
166
- df.index = convert_dataframe_index(df.index)
167
- return df
165
+ reconstructed_df = pd.DataFrame.from_dict(obj["object"])
166
+ reconstructed_df.index = convert_dataframe_index(reconstructed_df.index)
167
+ return reconstructed_df
168
168
  if "class" in obj and obj["class"] == "pandas.Series":
169
- sr = pd.Series(obj["object"])
170
- sr.index = convert_dataframe_index(sr.index)
171
- sr.index.name = obj["index_name"]
172
- sr.name = obj["variable_name"]
173
- return sr
169
+ reconstructed_sr = pd.Series(obj["object"])
170
+ reconstructed_sr.index = convert_dataframe_index(reconstructed_sr.index)
171
+ reconstructed_sr.index.name = obj["index_name"]
172
+ reconstructed_sr.name = obj["variable_name"]
173
+ return reconstructed_sr
174
174
 
175
175
  if "python_set" in obj:
176
176
  return set(obj["python_set"])
floodmodeller_api/tool.py CHANGED
@@ -4,6 +4,7 @@ import argparse
4
4
  import sys
5
5
  import tkinter as tk
6
6
  from dataclasses import dataclass
7
+ from typing import ClassVar
7
8
 
8
9
 
9
10
  @dataclass()
@@ -150,7 +151,8 @@ class Gui:
150
151
  entry = tk.Entry(self.master, validate="key")
151
152
  entry.config(validatecommand=(entry.register(validate_float), "%P"))
152
153
  else:
153
- raise ValueError("Invalid data type")
154
+ msg = "Invalid data type"
155
+ raise ValueError(msg)
154
156
  entry.pack()
155
157
  self.root_entries[name] = entry
156
158
 
@@ -209,28 +211,31 @@ class FMTool:
209
211
 
210
212
  """
211
213
 
212
- parameters: list[Parameter] = []
214
+ parameters: ClassVar[list[Parameter]] = []
213
215
 
214
216
  @property
215
217
  def name(self):
216
218
  """
217
219
  A property method to ensure a tool name is provided in child class. Overwritten by child.
218
220
  """
219
- raise NotImplementedError("Tools need a name")
221
+ msg = "Tools need a name"
222
+ raise NotImplementedError(msg)
220
223
 
221
224
  @property
222
225
  def description(self):
223
226
  """
224
227
  A property method to ensure a tool description is provided in child class. Overwritten by child.
225
228
  """
226
- raise NotImplementedError("Tools need a description")
229
+ msg = "Tools need a description"
230
+ raise NotImplementedError(msg)
227
231
 
228
232
  @property
229
233
  def tool_function(self):
230
234
  """
231
235
  A property method to ensure an tool_function is provided in child class. Overwritten by child.
232
236
  """
233
- raise NotImplementedError("You must provide an entry point function")
237
+ msg = "You must provide an entry point function"
238
+ raise NotImplementedError(msg)
234
239
 
235
240
  def __init__(self):
236
241
  self.check_parameters()
@@ -248,7 +253,8 @@ class FMTool:
248
253
  params = []
249
254
  for parameter in self.parameters:
250
255
  if parameter.name in params:
251
- raise ValueError("Parameter names must be unique")
256
+ msg = "Parameter names must be unique"
257
+ raise ValueError(msg)
252
258
  params.append(parameter.name)
253
259
 
254
260
  # This is defined as a class method because of the use of **kwargs