flood-adapt 0.3.9__py3-none-any.whl → 0.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flood_adapt/__init__.py +26 -22
- flood_adapt/adapter/__init__.py +9 -9
- flood_adapt/adapter/fiat_adapter.py +1541 -1541
- flood_adapt/adapter/interface/hazard_adapter.py +70 -70
- flood_adapt/adapter/interface/impact_adapter.py +36 -36
- flood_adapt/adapter/interface/model_adapter.py +89 -89
- flood_adapt/adapter/interface/offshore.py +19 -19
- flood_adapt/adapter/sfincs_adapter.py +1848 -1848
- flood_adapt/adapter/sfincs_offshore.py +193 -193
- flood_adapt/config/config.py +248 -248
- flood_adapt/config/fiat.py +219 -219
- flood_adapt/config/gui.py +331 -331
- flood_adapt/config/sfincs.py +481 -336
- flood_adapt/config/site.py +129 -129
- flood_adapt/database_builder/database_builder.py +2210 -2210
- flood_adapt/database_builder/templates/default_units/imperial.toml +9 -9
- flood_adapt/database_builder/templates/default_units/metric.toml +9 -9
- flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -10
- flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -90
- flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -57
- flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -121
- flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -65
- flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -45
- flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -126
- flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -60
- flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -121
- flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -65
- flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -45
- flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -4
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -143
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -153
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -127
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -57
- flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -4
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -191
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -153
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -178
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -57
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -9
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -65
- flood_adapt/database_builder/templates/output_layers/bin_colors.toml +5 -5
- flood_adapt/database_builder.py +16 -16
- flood_adapt/dbs_classes/__init__.py +21 -21
- flood_adapt/dbs_classes/database.py +495 -684
- flood_adapt/dbs_classes/dbs_benefit.py +77 -76
- flood_adapt/dbs_classes/dbs_event.py +61 -59
- flood_adapt/dbs_classes/dbs_measure.py +112 -111
- flood_adapt/dbs_classes/dbs_projection.py +34 -34
- flood_adapt/dbs_classes/dbs_scenario.py +137 -137
- flood_adapt/dbs_classes/dbs_static.py +274 -273
- flood_adapt/dbs_classes/dbs_strategy.py +130 -129
- flood_adapt/dbs_classes/dbs_template.py +279 -278
- flood_adapt/dbs_classes/interface/database.py +107 -139
- flood_adapt/dbs_classes/interface/element.py +121 -121
- flood_adapt/dbs_classes/interface/static.py +47 -47
- flood_adapt/flood_adapt.py +1207 -1178
- flood_adapt/misc/database_user.py +16 -16
- flood_adapt/misc/exceptions.py +22 -0
- flood_adapt/misc/log.py +183 -183
- flood_adapt/misc/path_builder.py +54 -54
- flood_adapt/misc/utils.py +185 -185
- flood_adapt/objects/__init__.py +82 -82
- flood_adapt/objects/benefits/benefits.py +61 -61
- flood_adapt/objects/events/event_factory.py +135 -135
- flood_adapt/objects/events/event_set.py +88 -84
- flood_adapt/objects/events/events.py +234 -234
- flood_adapt/objects/events/historical.py +58 -58
- flood_adapt/objects/events/hurricane.py +68 -67
- flood_adapt/objects/events/synthetic.py +46 -50
- flood_adapt/objects/forcing/__init__.py +92 -92
- flood_adapt/objects/forcing/csv.py +68 -68
- flood_adapt/objects/forcing/discharge.py +66 -66
- flood_adapt/objects/forcing/forcing.py +150 -150
- flood_adapt/objects/forcing/forcing_factory.py +182 -182
- flood_adapt/objects/forcing/meteo_handler.py +93 -93
- flood_adapt/objects/forcing/netcdf.py +40 -40
- flood_adapt/objects/forcing/plotting.py +453 -429
- flood_adapt/objects/forcing/rainfall.py +98 -98
- flood_adapt/objects/forcing/tide_gauge.py +191 -191
- flood_adapt/objects/forcing/time_frame.py +90 -90
- flood_adapt/objects/forcing/timeseries.py +564 -564
- flood_adapt/objects/forcing/unit_system.py +580 -580
- flood_adapt/objects/forcing/waterlevels.py +108 -108
- flood_adapt/objects/forcing/wind.py +124 -124
- flood_adapt/objects/measures/measure_factory.py +92 -92
- flood_adapt/objects/measures/measures.py +529 -529
- flood_adapt/objects/object_model.py +74 -68
- flood_adapt/objects/projections/projections.py +103 -103
- flood_adapt/objects/scenarios/scenarios.py +22 -22
- flood_adapt/objects/strategies/strategies.py +89 -89
- flood_adapt/workflows/benefit_runner.py +579 -554
- flood_adapt/workflows/floodmap.py +85 -85
- flood_adapt/workflows/impacts_integrator.py +85 -85
- flood_adapt/workflows/scenario_runner.py +70 -70
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/LICENSE +674 -674
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/METADATA +866 -865
- flood_adapt-0.3.10.dist-info/RECORD +140 -0
- flood_adapt-0.3.9.dist-info/RECORD +0 -139
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/WHEEL +0 -0
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/top_level.txt +0 -0
|
@@ -1,1848 +1,1848 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import math
|
|
3
|
-
import os
|
|
4
|
-
import shutil
|
|
5
|
-
import subprocess
|
|
6
|
-
import tempfile
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
from typing import Optional, Union
|
|
9
|
-
|
|
10
|
-
import geopandas as gpd
|
|
11
|
-
import hydromt_sfincs.utils as utils
|
|
12
|
-
import numpy as np
|
|
13
|
-
import pandas as pd
|
|
14
|
-
import plotly.express as px
|
|
15
|
-
import pyproj
|
|
16
|
-
import shapely
|
|
17
|
-
import xarray as xr
|
|
18
|
-
from cht_cyclones.tropical_cyclone import TropicalCyclone
|
|
19
|
-
from cht_tide.read_bca import SfincsBoundary
|
|
20
|
-
from cht_tide.tide_predict import predict
|
|
21
|
-
from hydromt_sfincs import SfincsModel as HydromtSfincsModel
|
|
22
|
-
from hydromt_sfincs.quadtree import QuadtreeGrid
|
|
23
|
-
from numpy import matlib
|
|
24
|
-
from shapely.affinity import translate
|
|
25
|
-
|
|
26
|
-
from flood_adapt.adapter.interface.hazard_adapter import IHazardAdapter
|
|
27
|
-
from flood_adapt.config.config import Settings
|
|
28
|
-
from flood_adapt.config.site import Site
|
|
29
|
-
from flood_adapt.misc.log import FloodAdaptLogging
|
|
30
|
-
from flood_adapt.misc.path_builder import (
|
|
31
|
-
ObjectDir,
|
|
32
|
-
TopLevelDir,
|
|
33
|
-
db_path,
|
|
34
|
-
)
|
|
35
|
-
from flood_adapt.misc.utils import cd, resolve_filepath
|
|
36
|
-
from flood_adapt.objects.events.event_set import EventSet
|
|
37
|
-
from flood_adapt.objects.events.events import Event, Mode, Template
|
|
38
|
-
from flood_adapt.objects.events.historical import HistoricalEvent
|
|
39
|
-
from flood_adapt.objects.events.hurricane import TranslationModel
|
|
40
|
-
from flood_adapt.objects.forcing import unit_system as us
|
|
41
|
-
from flood_adapt.objects.forcing.discharge import (
|
|
42
|
-
DischargeConstant,
|
|
43
|
-
DischargeCSV,
|
|
44
|
-
DischargeSynthetic,
|
|
45
|
-
)
|
|
46
|
-
from flood_adapt.objects.forcing.forcing import (
|
|
47
|
-
ForcingSource,
|
|
48
|
-
ForcingType,
|
|
49
|
-
IDischarge,
|
|
50
|
-
IForcing,
|
|
51
|
-
IRainfall,
|
|
52
|
-
IWaterlevel,
|
|
53
|
-
IWind,
|
|
54
|
-
)
|
|
55
|
-
from flood_adapt.objects.forcing.meteo_handler import MeteoHandler
|
|
56
|
-
from flood_adapt.objects.forcing.rainfall import (
|
|
57
|
-
RainfallConstant,
|
|
58
|
-
RainfallCSV,
|
|
59
|
-
RainfallMeteo,
|
|
60
|
-
RainfallNetCDF,
|
|
61
|
-
RainfallSynthetic,
|
|
62
|
-
RainfallTrack,
|
|
63
|
-
)
|
|
64
|
-
from flood_adapt.objects.forcing.time_frame import TimeFrame
|
|
65
|
-
from flood_adapt.objects.forcing.waterlevels import (
|
|
66
|
-
WaterlevelCSV,
|
|
67
|
-
WaterlevelGauged,
|
|
68
|
-
WaterlevelModel,
|
|
69
|
-
WaterlevelSynthetic,
|
|
70
|
-
)
|
|
71
|
-
from flood_adapt.objects.forcing.wind import (
|
|
72
|
-
WindConstant,
|
|
73
|
-
WindCSV,
|
|
74
|
-
WindMeteo,
|
|
75
|
-
WindNetCDF,
|
|
76
|
-
WindSynthetic,
|
|
77
|
-
WindTrack,
|
|
78
|
-
)
|
|
79
|
-
from flood_adapt.objects.measures.measures import (
|
|
80
|
-
FloodWall,
|
|
81
|
-
GreenInfrastructure,
|
|
82
|
-
Measure,
|
|
83
|
-
Pump,
|
|
84
|
-
)
|
|
85
|
-
from flood_adapt.objects.projections.projections import (
|
|
86
|
-
PhysicalProjection,
|
|
87
|
-
Projection,
|
|
88
|
-
)
|
|
89
|
-
from flood_adapt.objects.scenarios.scenarios import Scenario
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
class SfincsAdapter(IHazardAdapter):
|
|
93
|
-
"""Adapter for the SFINCS model.
|
|
94
|
-
|
|
95
|
-
This class is used to run the SFINCS model and process the results.
|
|
96
|
-
|
|
97
|
-
Attributes
|
|
98
|
-
----------
|
|
99
|
-
settings : SfincsModel
|
|
100
|
-
The settings for the SFINCS model.
|
|
101
|
-
"""
|
|
102
|
-
|
|
103
|
-
logger = FloodAdaptLogging.getLogger("SfincsAdapter")
|
|
104
|
-
_site: Site
|
|
105
|
-
_model: HydromtSfincsModel
|
|
106
|
-
|
|
107
|
-
###############
|
|
108
|
-
### PUBLIC ####
|
|
109
|
-
###############
|
|
110
|
-
|
|
111
|
-
### HAZARD ADAPTER METHODS ###
|
|
112
|
-
def __init__(self, model_root: Path):
|
|
113
|
-
"""Load overland sfincs model based on a root directory.
|
|
114
|
-
|
|
115
|
-
Parameters
|
|
116
|
-
----------
|
|
117
|
-
model_root : Path
|
|
118
|
-
Root directory of overland sfincs model.
|
|
119
|
-
"""
|
|
120
|
-
self.settings = self.database.site.sfincs
|
|
121
|
-
self.units = self.database.site.gui.units
|
|
122
|
-
self.sfincs_logger = self._setup_sfincs_logger(model_root)
|
|
123
|
-
self._model = HydromtSfincsModel(
|
|
124
|
-
root=str(model_root.resolve()), mode="r", logger=self.sfincs_logger
|
|
125
|
-
)
|
|
126
|
-
self._model.read()
|
|
127
|
-
|
|
128
|
-
def read(self, path: Path):
|
|
129
|
-
"""Read the sfincs model from the current model root."""
|
|
130
|
-
if Path(self._model.root).resolve() != Path(path).resolve():
|
|
131
|
-
self._model.set_root(root=str(path), mode="r")
|
|
132
|
-
self._model.read()
|
|
133
|
-
|
|
134
|
-
def write(self, path_out: Union[str, os.PathLike], overwrite: bool = True):
|
|
135
|
-
"""Write the sfincs model configuration to a directory."""
|
|
136
|
-
root = self.get_model_root()
|
|
137
|
-
if not isinstance(path_out, Path):
|
|
138
|
-
path_out = Path(path_out).resolve()
|
|
139
|
-
|
|
140
|
-
if not path_out.exists():
|
|
141
|
-
path_out.mkdir(parents=True)
|
|
142
|
-
|
|
143
|
-
if root != path_out:
|
|
144
|
-
shutil.copytree(root, path_out, dirs_exist_ok=True)
|
|
145
|
-
|
|
146
|
-
write_mode = "w+" if overwrite else "w"
|
|
147
|
-
with cd(path_out):
|
|
148
|
-
self._model.set_root(root=str(path_out), mode=write_mode)
|
|
149
|
-
self._model.write()
|
|
150
|
-
|
|
151
|
-
def close_files(self):
|
|
152
|
-
"""Close all open files and clean up file handles."""
|
|
153
|
-
for logger in [self.logger, self.sfincs_logger]:
|
|
154
|
-
if hasattr(logger, "handlers"):
|
|
155
|
-
for handler in logger.handlers:
|
|
156
|
-
if isinstance(handler, logging.FileHandler):
|
|
157
|
-
handler.close()
|
|
158
|
-
logger.removeHandler(handler)
|
|
159
|
-
|
|
160
|
-
def __enter__(self) -> "SfincsAdapter":
|
|
161
|
-
return self
|
|
162
|
-
|
|
163
|
-
def __exit__(self, exc_type, exc_value, traceback) -> bool:
|
|
164
|
-
self.close_files()
|
|
165
|
-
return False
|
|
166
|
-
|
|
167
|
-
def has_run(self, scenario: Scenario) -> bool:
|
|
168
|
-
"""Check if the model has been run."""
|
|
169
|
-
event = self.database.events.get(scenario.event)
|
|
170
|
-
if event.mode == Mode.risk:
|
|
171
|
-
sim_paths = [
|
|
172
|
-
self._get_simulation_path(scenario, sub_event=sub_event)
|
|
173
|
-
for sub_event in event.sub_events
|
|
174
|
-
]
|
|
175
|
-
# No need to check postprocessing for risk scenarios
|
|
176
|
-
return all(self.sfincs_completed(sim_path) for sim_path in sim_paths)
|
|
177
|
-
else:
|
|
178
|
-
return self.sfincs_completed(self._get_simulation_path(scenario))
|
|
179
|
-
|
|
180
|
-
def execute(self, path: Path, strict: bool = True) -> bool:
|
|
181
|
-
"""
|
|
182
|
-
Run the sfincs executable in the specified path.
|
|
183
|
-
|
|
184
|
-
Parameters
|
|
185
|
-
----------
|
|
186
|
-
path : str
|
|
187
|
-
Path to the simulation folder.
|
|
188
|
-
Default is None, in which case the model root is used.
|
|
189
|
-
strict : bool, optional
|
|
190
|
-
True: raise an error if the model fails to run.
|
|
191
|
-
False: log a warning.
|
|
192
|
-
Default is True.
|
|
193
|
-
|
|
194
|
-
Returns
|
|
195
|
-
-------
|
|
196
|
-
bool
|
|
197
|
-
True if the model ran successfully, False otherwise.
|
|
198
|
-
|
|
199
|
-
"""
|
|
200
|
-
with cd(path):
|
|
201
|
-
self.logger.info(f"Running SFINCS in {path}")
|
|
202
|
-
process = subprocess.run(
|
|
203
|
-
str(Settings().sfincs_bin_path),
|
|
204
|
-
stdout=subprocess.PIPE,
|
|
205
|
-
stderr=subprocess.PIPE,
|
|
206
|
-
text=True,
|
|
207
|
-
)
|
|
208
|
-
self.sfincs_logger.info(process.stdout)
|
|
209
|
-
self.logger.debug(process.stdout)
|
|
210
|
-
|
|
211
|
-
self._cleanup_simulation_folder(path)
|
|
212
|
-
|
|
213
|
-
if process.returncode != 0:
|
|
214
|
-
if Settings().delete_crashed_runs:
|
|
215
|
-
# Remove all files in the simulation folder except for the log files
|
|
216
|
-
for subdir, dirs, files in os.walk(path, topdown=False):
|
|
217
|
-
for file in files:
|
|
218
|
-
if not file.endswith(".log"):
|
|
219
|
-
os.remove(os.path.join(subdir, file))
|
|
220
|
-
|
|
221
|
-
if not os.listdir(subdir):
|
|
222
|
-
os.rmdir(subdir)
|
|
223
|
-
|
|
224
|
-
if strict:
|
|
225
|
-
raise RuntimeError(f"SFINCS model failed to run in {path}.")
|
|
226
|
-
else:
|
|
227
|
-
self.logger.error(f"SFINCS model failed to run in {path}.")
|
|
228
|
-
|
|
229
|
-
return process.returncode == 0
|
|
230
|
-
|
|
231
|
-
def run(self, scenario: Scenario):
|
|
232
|
-
"""Run the whole workflow (Preprocess, process and postprocess) for a given scenario."""
|
|
233
|
-
self._ensure_no_existing_forcings()
|
|
234
|
-
event = self.database.events.get(scenario.event)
|
|
235
|
-
|
|
236
|
-
if event.mode == Mode.risk:
|
|
237
|
-
self._run_risk_scenario(scenario=scenario)
|
|
238
|
-
else:
|
|
239
|
-
self._run_single_event(scenario=scenario, event=event)
|
|
240
|
-
|
|
241
|
-
def preprocess(self, scenario: Scenario, event: Event):
|
|
242
|
-
"""
|
|
243
|
-
Preprocess the SFINCS model for a given scenario.
|
|
244
|
-
|
|
245
|
-
Parameters
|
|
246
|
-
----------
|
|
247
|
-
scenario : Scenario
|
|
248
|
-
Scenario to preprocess.
|
|
249
|
-
event : Event, optional
|
|
250
|
-
Event to preprocess, by default None.
|
|
251
|
-
"""
|
|
252
|
-
# I dont like this due to it being state based and might break if people use functions in the wrong order
|
|
253
|
-
# Currently only used to pass projection + event stuff to WaterlevelModel
|
|
254
|
-
|
|
255
|
-
sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
|
|
256
|
-
sim_path.mkdir(parents=True, exist_ok=True)
|
|
257
|
-
template_path = (
|
|
258
|
-
self.database.static.get_overland_sfincs_model().get_model_root()
|
|
259
|
-
)
|
|
260
|
-
shutil.copytree(template_path, sim_path, dirs_exist_ok=True)
|
|
261
|
-
|
|
262
|
-
with SfincsAdapter(model_root=sim_path) as model:
|
|
263
|
-
model._load_scenario_objects(scenario, event)
|
|
264
|
-
is_risk = "Probabilistic " if model._event_set is not None else ""
|
|
265
|
-
self.logger.info(
|
|
266
|
-
f"Preprocessing Scenario `{model._scenario.name}`: {is_risk}Event `{model._event.name}`, Strategy `{model._strategy.name}`, Projection `{model._projection.name}`"
|
|
267
|
-
)
|
|
268
|
-
# Write template model to output path and set it as the model root so focings can write to it
|
|
269
|
-
model.set_timing(model._event.time)
|
|
270
|
-
model.write(sim_path)
|
|
271
|
-
|
|
272
|
-
# Event
|
|
273
|
-
for forcing in model._event.get_forcings():
|
|
274
|
-
model.add_forcing(forcing)
|
|
275
|
-
|
|
276
|
-
if self.rainfall is not None:
|
|
277
|
-
model.rainfall *= model._event.rainfall_multiplier
|
|
278
|
-
else:
|
|
279
|
-
model.logger.warning(
|
|
280
|
-
"Failed to add event rainfall multiplier, no rainfall forcing found in the model."
|
|
281
|
-
)
|
|
282
|
-
|
|
283
|
-
# Measures
|
|
284
|
-
for measure in model._strategy.get_hazard_measures():
|
|
285
|
-
model.add_measure(measure)
|
|
286
|
-
|
|
287
|
-
# Projection
|
|
288
|
-
model.add_projection(model._projection)
|
|
289
|
-
|
|
290
|
-
# Output
|
|
291
|
-
model.add_obs_points()
|
|
292
|
-
|
|
293
|
-
# Save any changes made to disk as well
|
|
294
|
-
model.write(path_out=sim_path)
|
|
295
|
-
|
|
296
|
-
def process(self, scenario: Scenario, event: Event):
|
|
297
|
-
if event.mode != Mode.single_event:
|
|
298
|
-
raise ValueError(f"Unsupported event mode: {event.mode}.")
|
|
299
|
-
|
|
300
|
-
sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
|
|
301
|
-
self.logger.info(f"Running SFINCS for single event Scenario `{scenario.name}`")
|
|
302
|
-
self.execute(sim_path)
|
|
303
|
-
|
|
304
|
-
def postprocess(self, scenario: Scenario, event: Event):
|
|
305
|
-
if event.mode != Mode.single_event:
|
|
306
|
-
raise ValueError(f"Unsupported event mode: {event.mode}.")
|
|
307
|
-
|
|
308
|
-
self.logger.info(f"Postprocessing SFINCS for Scenario `{scenario.name}`")
|
|
309
|
-
if not self.sfincs_completed(
|
|
310
|
-
self._get_simulation_path(scenario, sub_event=event)
|
|
311
|
-
):
|
|
312
|
-
raise RuntimeError("SFINCS was not run successfully!")
|
|
313
|
-
|
|
314
|
-
self.write_floodmap_geotiff(scenario)
|
|
315
|
-
self.plot_wl_obs(scenario)
|
|
316
|
-
self.write_water_level_map(scenario)
|
|
317
|
-
|
|
318
|
-
def set_timing(self, time: TimeFrame):
|
|
319
|
-
"""Set model reference times."""
|
|
320
|
-
self.logger.info(f"Setting timing for the SFINCS model: `{time}`")
|
|
321
|
-
self._model.set_config("tref", time.start_time)
|
|
322
|
-
self._model.set_config("tstart", time.start_time)
|
|
323
|
-
self._model.set_config("tstop", time.end_time)
|
|
324
|
-
|
|
325
|
-
def add_forcing(self, forcing: IForcing):
|
|
326
|
-
"""Get forcing data and add it."""
|
|
327
|
-
if forcing is None:
|
|
328
|
-
return
|
|
329
|
-
|
|
330
|
-
self.logger.info(
|
|
331
|
-
f"Adding {forcing.type.capitalize()}: {forcing.source.capitalize()}"
|
|
332
|
-
)
|
|
333
|
-
if isinstance(forcing, IRainfall):
|
|
334
|
-
self._add_forcing_rain(forcing)
|
|
335
|
-
elif isinstance(forcing, IWind):
|
|
336
|
-
self._add_forcing_wind(forcing)
|
|
337
|
-
elif isinstance(forcing, IDischarge):
|
|
338
|
-
self._add_forcing_discharge(forcing)
|
|
339
|
-
elif isinstance(forcing, IWaterlevel):
|
|
340
|
-
self._add_forcing_waterlevels(forcing)
|
|
341
|
-
else:
|
|
342
|
-
self.logger.warning(
|
|
343
|
-
f"Skipping unsupported forcing type {forcing.__class__.__name__}"
|
|
344
|
-
)
|
|
345
|
-
|
|
346
|
-
def add_measure(self, measure: Measure):
|
|
347
|
-
"""Get measure data and add it."""
|
|
348
|
-
self.logger.info(
|
|
349
|
-
f"Adding {measure.__class__.__name__.capitalize()} `{measure.name}`"
|
|
350
|
-
)
|
|
351
|
-
|
|
352
|
-
if isinstance(measure, FloodWall):
|
|
353
|
-
self._add_measure_floodwall(measure)
|
|
354
|
-
elif isinstance(measure, GreenInfrastructure):
|
|
355
|
-
self._add_measure_greeninfra(measure)
|
|
356
|
-
elif isinstance(measure, Pump):
|
|
357
|
-
self._add_measure_pump(measure)
|
|
358
|
-
else:
|
|
359
|
-
self.logger.warning(
|
|
360
|
-
f"Skipping unsupported measure type {measure.__class__.__name__}"
|
|
361
|
-
)
|
|
362
|
-
|
|
363
|
-
def add_projection(self, projection: Projection):
|
|
364
|
-
"""Get forcing data currently in the sfincs model and add the projection it."""
|
|
365
|
-
self.logger.info(f"Adding Projection `{projection.name}`")
|
|
366
|
-
phys_projection = projection.physical_projection
|
|
367
|
-
|
|
368
|
-
if phys_projection.sea_level_rise:
|
|
369
|
-
self.logger.info(
|
|
370
|
-
f"Adding projected sea level rise `{phys_projection.sea_level_rise}`"
|
|
371
|
-
)
|
|
372
|
-
if self.waterlevels is not None:
|
|
373
|
-
self.waterlevels += phys_projection.sea_level_rise.convert(
|
|
374
|
-
us.UnitTypesLength.meters
|
|
375
|
-
)
|
|
376
|
-
else:
|
|
377
|
-
self.logger.warning(
|
|
378
|
-
"Failed to add sea level rise, no water level forcing found in the model."
|
|
379
|
-
)
|
|
380
|
-
|
|
381
|
-
if phys_projection.rainfall_multiplier:
|
|
382
|
-
self.logger.info(
|
|
383
|
-
f"Adding projected rainfall multiplier `{phys_projection.rainfall_multiplier}`"
|
|
384
|
-
)
|
|
385
|
-
if self.rainfall is not None:
|
|
386
|
-
self.rainfall *= phys_projection.rainfall_multiplier
|
|
387
|
-
else:
|
|
388
|
-
self.logger.warning(
|
|
389
|
-
"Failed to add projected rainfall multiplier, no rainfall forcing found in the model."
|
|
390
|
-
)
|
|
391
|
-
|
|
392
|
-
### GETTERS ###
|
|
393
|
-
def get_model_time(self) -> TimeFrame:
|
|
394
|
-
t0, t1 = self._model.get_model_time()
|
|
395
|
-
return TimeFrame(start_time=t0, end_time=t1)
|
|
396
|
-
|
|
397
|
-
def get_model_root(self) -> Path:
|
|
398
|
-
return Path(self._model.root)
|
|
399
|
-
|
|
400
|
-
def get_mask(self):
|
|
401
|
-
"""Get mask with inactive cells from model."""
|
|
402
|
-
mask = self._model.grid["msk"]
|
|
403
|
-
return mask
|
|
404
|
-
|
|
405
|
-
def get_bedlevel(self):
|
|
406
|
-
"""Get bed level from model."""
|
|
407
|
-
self._model.read_results()
|
|
408
|
-
zb = self._model.results["zb"]
|
|
409
|
-
return zb
|
|
410
|
-
|
|
411
|
-
def get_model_boundary(self) -> gpd.GeoDataFrame:
|
|
412
|
-
"""Get bounding box from model."""
|
|
413
|
-
return self._model.region
|
|
414
|
-
|
|
415
|
-
def get_model_grid(self) -> QuadtreeGrid:
|
|
416
|
-
"""Get grid from model.
|
|
417
|
-
|
|
418
|
-
Returns
|
|
419
|
-
-------
|
|
420
|
-
QuadtreeGrid
|
|
421
|
-
QuadtreeGrid with the model grid
|
|
422
|
-
"""
|
|
423
|
-
return self._model.quadtree
|
|
424
|
-
|
|
425
|
-
# Forcing properties
|
|
426
|
-
@property
|
|
427
|
-
def waterlevels(self) -> xr.Dataset | xr.DataArray | None:
|
|
428
|
-
return self._model.forcing.get("bzs")
|
|
429
|
-
|
|
430
|
-
@waterlevels.setter
|
|
431
|
-
def waterlevels(self, waterlevels: xr.Dataset | xr.DataArray):
|
|
432
|
-
if self.waterlevels is None or self.waterlevels.size == 0:
|
|
433
|
-
raise ValueError("No water level forcing found in the model.")
|
|
434
|
-
self._model.forcing["bzs"] = waterlevels
|
|
435
|
-
|
|
436
|
-
@property
|
|
437
|
-
def discharge(self) -> xr.Dataset | xr.DataArray | None:
|
|
438
|
-
return self._model.forcing.get("dis")
|
|
439
|
-
|
|
440
|
-
@discharge.setter
|
|
441
|
-
def discharge(self, discharge: xr.Dataset | xr.DataArray):
|
|
442
|
-
if self.discharge is None or self.discharge.size == 0:
|
|
443
|
-
raise ValueError("No discharge forcing found in the model.")
|
|
444
|
-
self._model.forcing["dis"] = discharge
|
|
445
|
-
|
|
446
|
-
@property
|
|
447
|
-
def rainfall(self) -> xr.Dataset | xr.DataArray | None:
|
|
448
|
-
names = ["precip", "precip_2d"]
|
|
449
|
-
in_model = [name for name in names if name in self._model.forcing]
|
|
450
|
-
if len(in_model) == 0:
|
|
451
|
-
return None
|
|
452
|
-
elif len(in_model) == 1:
|
|
453
|
-
return self._model.forcing[in_model[0]]
|
|
454
|
-
else:
|
|
455
|
-
raise ValueError("Multiple rainfall forcings found in the model.")
|
|
456
|
-
|
|
457
|
-
@rainfall.setter
|
|
458
|
-
def rainfall(self, rainfall: xr.Dataset | xr.DataArray):
|
|
459
|
-
if self.rainfall is None or self.rainfall.size == 0:
|
|
460
|
-
raise ValueError("No rainfall forcing found in the model.")
|
|
461
|
-
elif "precip_2d" in self._model.forcing:
|
|
462
|
-
self._model.forcing["precip_2d"] = rainfall
|
|
463
|
-
elif "precip" in self._model.forcing:
|
|
464
|
-
self._model.forcing["precip"] = rainfall
|
|
465
|
-
else:
|
|
466
|
-
raise ValueError("Unsupported rainfall forcing in the model.")
|
|
467
|
-
|
|
468
|
-
@property
|
|
469
|
-
def wind(self) -> xr.Dataset | xr.DataArray | None:
|
|
470
|
-
wind_names = ["wnd", "wind_2d", "wind", "wind10_u", "wind10_v"]
|
|
471
|
-
wind_in_model = [name for name in wind_names if name in self._model.forcing]
|
|
472
|
-
if len(wind_in_model) == 0:
|
|
473
|
-
return None
|
|
474
|
-
elif len(wind_in_model) == 1:
|
|
475
|
-
return self._model.forcing[wind_in_model[0]]
|
|
476
|
-
elif len(wind_in_model) == 2:
|
|
477
|
-
if not ("wind10_u" in wind_in_model and "wind10_v" in wind_in_model):
|
|
478
|
-
raise ValueError(
|
|
479
|
-
"Multiple wind forcings found in the model. Both should be wind10_u and wind10_v or a singular wind forcing."
|
|
480
|
-
)
|
|
481
|
-
return xr.Dataset(
|
|
482
|
-
{
|
|
483
|
-
"wind10_u": self._model.forcing["wind10_u"],
|
|
484
|
-
"wind10_v": self._model.forcing["wind10_v"],
|
|
485
|
-
}
|
|
486
|
-
)
|
|
487
|
-
else:
|
|
488
|
-
raise ValueError("Multiple wind forcings found in the model.")
|
|
489
|
-
|
|
490
|
-
@wind.setter
|
|
491
|
-
def wind(self, wind: xr.Dataset | xr.DataArray):
|
|
492
|
-
if (not self.wind) or (self.wind.size == 0):
|
|
493
|
-
raise ValueError("No wind forcing found in the model.")
|
|
494
|
-
|
|
495
|
-
elif "wind_2d" in self._model.forcing:
|
|
496
|
-
self._model.forcing["wind_2d"] = wind
|
|
497
|
-
elif "wind" in self._model.forcing:
|
|
498
|
-
self._model.forcing["wind"] = wind
|
|
499
|
-
elif "wnd" in self._model.forcing:
|
|
500
|
-
self._model.forcing["wnd"] = wind
|
|
501
|
-
elif "wind10_u" in self._model.forcing and "wind10_v" in self._model.forcing:
|
|
502
|
-
self._model.forcing["wind10_u"] = wind["wind10_u"]
|
|
503
|
-
self._model.forcing["wind10_v"] = wind["wind10_v"]
|
|
504
|
-
else:
|
|
505
|
-
raise ValueError("Unsupported wind forcing in the model.")
|
|
506
|
-
|
|
507
|
-
### OUTPUT ###
|
|
508
|
-
def run_completed(self, scenario: Scenario) -> bool:
|
|
509
|
-
"""Check if the entire model run has been completed successfully by checking if all flood maps exist that are created in postprocess().
|
|
510
|
-
|
|
511
|
-
Returns
|
|
512
|
-
-------
|
|
513
|
-
bool : True if all flood maps exist, False otherwise.
|
|
514
|
-
|
|
515
|
-
"""
|
|
516
|
-
any_floodmap = len(self._get_flood_map_paths(scenario)) > 0
|
|
517
|
-
all_exist = all(
|
|
518
|
-
floodmap.exists() for floodmap in self._get_flood_map_paths(scenario)
|
|
519
|
-
)
|
|
520
|
-
return any_floodmap and all_exist
|
|
521
|
-
|
|
522
|
-
def sfincs_completed(self, sim_path: Path) -> bool:
|
|
523
|
-
"""Check if the sfincs executable has been run successfully by checking if the output files exist in the simulation folder.
|
|
524
|
-
|
|
525
|
-
Parameters
|
|
526
|
-
----------
|
|
527
|
-
sim_path : Path
|
|
528
|
-
Path to the simulation folder to check.
|
|
529
|
-
|
|
530
|
-
Returns
|
|
531
|
-
-------
|
|
532
|
-
bool: True if the sfincs executable has been run successfully, False otherwise.
|
|
533
|
-
|
|
534
|
-
"""
|
|
535
|
-
SFINCS_OUTPUT_FILES = ["sfincs_map.nc"]
|
|
536
|
-
|
|
537
|
-
if self.settings.obs_point is not None:
|
|
538
|
-
SFINCS_OUTPUT_FILES.append("sfincs_his.nc")
|
|
539
|
-
|
|
540
|
-
to_check = [Path(sim_path) / file for file in SFINCS_OUTPUT_FILES]
|
|
541
|
-
return all(output.exists() for output in to_check)
|
|
542
|
-
|
|
543
|
-
def write_floodmap_geotiff(
|
|
544
|
-
self, scenario: Scenario, sim_path: Optional[Path] = None
|
|
545
|
-
):
|
|
546
|
-
"""
|
|
547
|
-
Read simulation results from SFINCS and saves a geotiff with the maximum water levels.
|
|
548
|
-
|
|
549
|
-
Produced floodmap is in the units defined in the sfincs config settings.
|
|
550
|
-
|
|
551
|
-
Parameters
|
|
552
|
-
----------
|
|
553
|
-
scenario : Scenario
|
|
554
|
-
Scenario for which to create the floodmap.
|
|
555
|
-
sim_path : Path, optional
|
|
556
|
-
Path to the simulation folder, by default None.
|
|
557
|
-
"""
|
|
558
|
-
self.logger.info("Writing flood maps to geotiff")
|
|
559
|
-
results_path = self._get_result_path(scenario)
|
|
560
|
-
sim_path = sim_path or self._get_simulation_path(scenario)
|
|
561
|
-
demfile = self.database.static_path / "dem" / self.settings.dem.filename
|
|
562
|
-
|
|
563
|
-
with SfincsAdapter(model_root=sim_path) as model:
|
|
564
|
-
zsmax = model._get_zsmax()
|
|
565
|
-
|
|
566
|
-
dem = model._model.data_catalog.get_rasterdataset(demfile)
|
|
567
|
-
|
|
568
|
-
# convert dem from dem units to floodmap units
|
|
569
|
-
dem_conversion = us.UnitfulLength(
|
|
570
|
-
value=1.0, units=self.settings.dem.units
|
|
571
|
-
).convert(self.settings.config.floodmap_units)
|
|
572
|
-
|
|
573
|
-
floodmap_fn = results_path / f"FloodMap_{scenario.name}.tif"
|
|
574
|
-
|
|
575
|
-
# convert zsmax from meters to floodmap units
|
|
576
|
-
floodmap_conversion = us.UnitfulLength(
|
|
577
|
-
value=1.0, units=us.UnitTypesLength.meters
|
|
578
|
-
).convert(self.settings.config.floodmap_units)
|
|
579
|
-
|
|
580
|
-
utils.downscale_floodmap(
|
|
581
|
-
zsmax=floodmap_conversion * zsmax,
|
|
582
|
-
dep=dem_conversion * dem,
|
|
583
|
-
hmin=0.01,
|
|
584
|
-
floodmap_fn=str(floodmap_fn),
|
|
585
|
-
)
|
|
586
|
-
|
|
587
|
-
def write_water_level_map(
|
|
588
|
-
self, scenario: Scenario, sim_path: Optional[Path] = None
|
|
589
|
-
):
|
|
590
|
-
"""Read simulation results from SFINCS and saves a netcdf with the maximum water levels."""
|
|
591
|
-
self.logger.info("Writing water level map to netcdf")
|
|
592
|
-
results_path = self._get_result_path(scenario)
|
|
593
|
-
sim_path = sim_path or self._get_simulation_path(scenario)
|
|
594
|
-
|
|
595
|
-
with SfincsAdapter(model_root=sim_path) as model:
|
|
596
|
-
zsmax = model._get_zsmax()
|
|
597
|
-
zsmax.to_netcdf(results_path / "max_water_level_map.nc")
|
|
598
|
-
|
|
599
|
-
def plot_wl_obs(
|
|
600
|
-
self,
|
|
601
|
-
scenario: Scenario,
|
|
602
|
-
):
|
|
603
|
-
"""Plot water levels at SFINCS observation points as html.
|
|
604
|
-
|
|
605
|
-
Only for single event scenarios, or for a specific simulation path containing the written and processed sfincs model.
|
|
606
|
-
"""
|
|
607
|
-
if not self.settings.obs_point:
|
|
608
|
-
self.logger.warning("No observation points provided in config.")
|
|
609
|
-
return
|
|
610
|
-
|
|
611
|
-
self.logger.info("Plotting water levels at observation points")
|
|
612
|
-
sim_path = self._get_simulation_path(scenario)
|
|
613
|
-
|
|
614
|
-
# read SFINCS model
|
|
615
|
-
with SfincsAdapter(model_root=sim_path) as model:
|
|
616
|
-
df, gdf = model._get_zs_points()
|
|
617
|
-
|
|
618
|
-
gui_units = us.UnitTypesLength(
|
|
619
|
-
self.database.site.gui.units.default_length_units
|
|
620
|
-
)
|
|
621
|
-
conversion_factor = us.UnitfulLength(
|
|
622
|
-
value=1.0, units=us.UnitTypesLength("meters")
|
|
623
|
-
).convert(gui_units)
|
|
624
|
-
|
|
625
|
-
overland_reference_height = self.settings.water_level.get_datum(
|
|
626
|
-
self.settings.config.overland_model.reference
|
|
627
|
-
).height.convert(gui_units)
|
|
628
|
-
|
|
629
|
-
for ii, col in enumerate(df.columns):
|
|
630
|
-
# Plot actual thing
|
|
631
|
-
fig = px.line(
|
|
632
|
-
df[col] * conversion_factor
|
|
633
|
-
+ overland_reference_height # convert to reference datum for plotting
|
|
634
|
-
)
|
|
635
|
-
|
|
636
|
-
fig.add_hline(
|
|
637
|
-
y=0,
|
|
638
|
-
line_dash="dash",
|
|
639
|
-
line_color="#000000",
|
|
640
|
-
annotation_text=self.settings.water_level.reference,
|
|
641
|
-
annotation_position="bottom right",
|
|
642
|
-
)
|
|
643
|
-
|
|
644
|
-
# plot reference water levels
|
|
645
|
-
for wl_ref in self.settings.water_level.datums:
|
|
646
|
-
if (
|
|
647
|
-
wl_ref.name == self.settings.config.overland_model.reference
|
|
648
|
-
or wl_ref.name in self.database.site.gui.plotting.excluded_datums
|
|
649
|
-
):
|
|
650
|
-
continue
|
|
651
|
-
fig.add_hline(
|
|
652
|
-
y=wl_ref.height.convert(gui_units),
|
|
653
|
-
line_dash="dash",
|
|
654
|
-
line_color="#3ec97c",
|
|
655
|
-
annotation_text=wl_ref.name,
|
|
656
|
-
annotation_position="bottom right",
|
|
657
|
-
)
|
|
658
|
-
|
|
659
|
-
fig.update_layout(
|
|
660
|
-
autosize=False,
|
|
661
|
-
height=100 * 2,
|
|
662
|
-
width=280 * 2,
|
|
663
|
-
margin={"r": 0, "l": 0, "b": 0, "t": 20},
|
|
664
|
-
font={"size": 10, "color": "black", "family": "Arial"},
|
|
665
|
-
title={
|
|
666
|
-
"text": gdf.iloc[ii]["Description"],
|
|
667
|
-
"font": {"size": 12, "color": "black", "family": "Arial"},
|
|
668
|
-
"x": 0.5,
|
|
669
|
-
"xanchor": "center",
|
|
670
|
-
},
|
|
671
|
-
xaxis_title="Time",
|
|
672
|
-
yaxis_title=f"Water level [{gui_units.value}] above {self.settings.water_level.reference}",
|
|
673
|
-
yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
674
|
-
xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
675
|
-
showlegend=False,
|
|
676
|
-
)
|
|
677
|
-
|
|
678
|
-
event = self.database.events.get(scenario.event)
|
|
679
|
-
if self.settings.obs_point[ii].name == self.settings.tide_gauge.name:
|
|
680
|
-
self._add_tide_gauge_plot(fig, event, units=gui_units)
|
|
681
|
-
|
|
682
|
-
# write html to results folder
|
|
683
|
-
station_name = gdf.iloc[ii]["Name"]
|
|
684
|
-
results_path = self._get_result_path(scenario)
|
|
685
|
-
fig.write_html(results_path / f"{station_name}_timeseries.html")
|
|
686
|
-
|
|
687
|
-
def add_obs_points(self):
|
|
688
|
-
"""Add observation points provided in the site toml to SFINCS model."""
|
|
689
|
-
if self.settings.obs_point is None:
|
|
690
|
-
return
|
|
691
|
-
self.logger.info("Adding observation points to the overland flood model")
|
|
692
|
-
|
|
693
|
-
obs_points = self.settings.obs_point
|
|
694
|
-
names = []
|
|
695
|
-
lat = []
|
|
696
|
-
lon = []
|
|
697
|
-
for pt in obs_points:
|
|
698
|
-
names.append(pt.name)
|
|
699
|
-
lat.append(pt.lat)
|
|
700
|
-
lon.append(pt.lon)
|
|
701
|
-
|
|
702
|
-
# create GeoDataFrame from obs_points in site file
|
|
703
|
-
df = pd.DataFrame({"name": names})
|
|
704
|
-
gdf = gpd.GeoDataFrame(
|
|
705
|
-
df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
|
|
706
|
-
)
|
|
707
|
-
|
|
708
|
-
# Add locations to SFINCS file
|
|
709
|
-
self._model.setup_observation_points(locations=gdf, merge=False)
|
|
710
|
-
|
|
711
|
-
def get_wl_df_from_offshore_his_results(self) -> pd.DataFrame:
|
|
712
|
-
"""Create a pd.Dataframe with waterlevels from the offshore model at the bnd locations of the overland model.
|
|
713
|
-
|
|
714
|
-
Returns
|
|
715
|
-
-------
|
|
716
|
-
wl_df: pd.DataFrame
|
|
717
|
-
time series of water level.
|
|
718
|
-
"""
|
|
719
|
-
self.logger.info("Reading water levels from offshore model")
|
|
720
|
-
ds_his = utils.read_sfincs_his_results(
|
|
721
|
-
Path(self._model.root) / "sfincs_his.nc",
|
|
722
|
-
crs=self._model.crs.to_epsg(),
|
|
723
|
-
)
|
|
724
|
-
wl_df = pd.DataFrame(
|
|
725
|
-
data=ds_his.point_zs.to_numpy(),
|
|
726
|
-
index=ds_his.time.to_numpy(),
|
|
727
|
-
columns=np.arange(1, ds_his.point_zs.to_numpy().shape[1] + 1, 1),
|
|
728
|
-
)
|
|
729
|
-
return wl_df
|
|
730
|
-
|
|
731
|
-
## RISK EVENTS ##
|
|
732
|
-
def calculate_rp_floodmaps(self, scenario: Scenario):
|
|
733
|
-
"""Calculate flood risk maps from a set of (currently) SFINCS water level outputs using linear interpolation.
|
|
734
|
-
|
|
735
|
-
It would be nice to make it more widely applicable and move the loading of the SFINCS results to self.postprocess_sfincs().
|
|
736
|
-
|
|
737
|
-
generates return period water level maps in netcdf format to be used by FIAT
|
|
738
|
-
generates return period water depth maps in geotiff format as product for users
|
|
739
|
-
|
|
740
|
-
TODO: make this robust and more efficient for bigger datasets.
|
|
741
|
-
"""
|
|
742
|
-
event: EventSet = self.database.events.get(scenario.event)
|
|
743
|
-
if not isinstance(event, EventSet):
|
|
744
|
-
raise ValueError("This function is only available for risk scenarios.")
|
|
745
|
-
|
|
746
|
-
result_path = self._get_result_path(scenario)
|
|
747
|
-
sim_paths = [
|
|
748
|
-
self._get_simulation_path(scenario, sub_event=sub_event)
|
|
749
|
-
for sub_event in event._events
|
|
750
|
-
]
|
|
751
|
-
|
|
752
|
-
phys_proj = self.database.projections.get(
|
|
753
|
-
scenario.projection
|
|
754
|
-
).physical_projection
|
|
755
|
-
|
|
756
|
-
floodmap_rp = self.database.site.fiat.risk.return_periods
|
|
757
|
-
frequencies = [sub_event.frequency for sub_event in event.sub_events]
|
|
758
|
-
|
|
759
|
-
# adjust storm frequency for hurricane events
|
|
760
|
-
if not math.isclose(phys_proj.storm_frequency_increase, 0, abs_tol=1e-9):
|
|
761
|
-
storminess_increase = phys_proj.storm_frequency_increase / 100.0
|
|
762
|
-
for ii, event in enumerate(event._events):
|
|
763
|
-
if event.template == Template.Hurricane:
|
|
764
|
-
frequencies[ii] = frequencies[ii] * (1 + storminess_increase)
|
|
765
|
-
|
|
766
|
-
with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
|
|
767
|
-
# read mask and bed level
|
|
768
|
-
mask = dummymodel.get_mask().stack(z=("x", "y"))
|
|
769
|
-
zb = dummymodel.get_bedlevel().stack(z=("x", "y")).to_numpy()
|
|
770
|
-
|
|
771
|
-
zs_maps = []
|
|
772
|
-
for simulation_path in sim_paths:
|
|
773
|
-
# read zsmax data from overland sfincs model
|
|
774
|
-
with SfincsAdapter(model_root=simulation_path) as sim:
|
|
775
|
-
zsmax = sim._get_zsmax().load()
|
|
776
|
-
zs_stacked = zsmax.stack(z=("x", "y"))
|
|
777
|
-
zs_maps.append(zs_stacked)
|
|
778
|
-
|
|
779
|
-
# Create RP flood maps
|
|
780
|
-
|
|
781
|
-
# 1a: make a table of all water levels and associated frequencies
|
|
782
|
-
zs = xr.concat(zs_maps, pd.Index(frequencies, name="frequency"))
|
|
783
|
-
# Get the indices of columns with all NaN values
|
|
784
|
-
nan_cells = np.where(np.all(np.isnan(zs), axis=0))[0]
|
|
785
|
-
# fill nan values with minimum bed levels in each grid cell, np.interp cannot ignore nan values
|
|
786
|
-
zs = xr.where(np.isnan(zs), np.tile(zb, (zs.shape[0], 1)), zs)
|
|
787
|
-
# Get table of frequencies
|
|
788
|
-
freq = np.tile(frequencies, (zs.shape[1], 1)).transpose()
|
|
789
|
-
|
|
790
|
-
# 1b: sort water levels in descending order and include the frequencies in the sorting process
|
|
791
|
-
# (i.e. each h-value should be linked to the same p-values as in step 1a)
|
|
792
|
-
sort_index = zs.argsort(axis=0)
|
|
793
|
-
sorted_prob = np.flipud(np.take_along_axis(freq, sort_index, axis=0))
|
|
794
|
-
sorted_zs = np.flipud(np.take_along_axis(zs.values, sort_index, axis=0))
|
|
795
|
-
|
|
796
|
-
# 1c: Compute exceedance probabilities of water depths
|
|
797
|
-
# Method: accumulate probabilities from top to bottom
|
|
798
|
-
prob_exceed = np.cumsum(sorted_prob, axis=0)
|
|
799
|
-
|
|
800
|
-
# 1d: Compute return periods of water depths
|
|
801
|
-
# Method: simply take the inverse of the exceedance probability (1/Pex)
|
|
802
|
-
rp_zs = 1.0 / prob_exceed
|
|
803
|
-
|
|
804
|
-
# For each return period (T) of interest do the following:
|
|
805
|
-
# For each grid cell do the following:
|
|
806
|
-
# Use the table from step [1d] as a “lookup-table” to derive the T-year water depth. Use a 1-d interpolation technique:
|
|
807
|
-
# h(T) = interp1 (log(T*), h*, log(T))
|
|
808
|
-
# in which t* and h* are the values from the table and T is the return period (T) of interest
|
|
809
|
-
# The resulting T-year water depths for all grids combined form the T-year hazard map
|
|
810
|
-
rp_da = xr.DataArray(rp_zs, dims=zs.dims)
|
|
811
|
-
|
|
812
|
-
# no_data_value = -999 # in SFINCS
|
|
813
|
-
# sorted_zs = xr.where(sorted_zs == no_data_value, np.nan, sorted_zs)
|
|
814
|
-
|
|
815
|
-
valid_cells = np.where(mask == 1)[
|
|
816
|
-
0
|
|
817
|
-
] # only loop over cells where model is not masked
|
|
818
|
-
h = matlib.repmat(
|
|
819
|
-
np.copy(zb), len(floodmap_rp), 1
|
|
820
|
-
) # if not flooded (i.e. not in valid_cells) revert to bed_level, read from SFINCS results so it is the minimum bed level in a grid cell
|
|
821
|
-
|
|
822
|
-
self.logger.info("Calculating flood risk maps, this may take some time")
|
|
823
|
-
for jj in valid_cells: # looping over all non-masked cells.
|
|
824
|
-
# linear interpolation for all return periods to evaluate
|
|
825
|
-
h[:, jj] = np.interp(
|
|
826
|
-
np.log10(floodmap_rp),
|
|
827
|
-
np.log10(rp_da[::-1, jj]),
|
|
828
|
-
sorted_zs[::-1, jj],
|
|
829
|
-
left=0,
|
|
830
|
-
)
|
|
831
|
-
|
|
832
|
-
# Re-fill locations that had nan water level for all simulations with nans
|
|
833
|
-
h[:, nan_cells] = np.full(h[:, nan_cells].shape, np.nan)
|
|
834
|
-
|
|
835
|
-
# If a cell has the same water-level as the bed elevation it should be dry (turn to nan)
|
|
836
|
-
diff = h - np.tile(zb, (h.shape[0], 1))
|
|
837
|
-
dry = (
|
|
838
|
-
diff < 10e-10
|
|
839
|
-
) # here we use a small number instead of zero for rounding errors
|
|
840
|
-
h[dry] = np.nan
|
|
841
|
-
|
|
842
|
-
for ii, rp in enumerate(floodmap_rp):
|
|
843
|
-
# #create single nc
|
|
844
|
-
zs_rp_single = xr.DataArray(
|
|
845
|
-
data=h[ii, :], coords={"z": zs["z"]}, attrs={"units": "meters"}
|
|
846
|
-
).unstack()
|
|
847
|
-
zs_rp_single = zs_rp_single.rio.write_crs(
|
|
848
|
-
zsmax.raster.crs
|
|
849
|
-
) # , inplace=True)
|
|
850
|
-
zs_rp_single = zs_rp_single.to_dataset(name="risk_map")
|
|
851
|
-
fn_rp = result_path / f"RP_{rp:04d}_maps.nc"
|
|
852
|
-
zs_rp_single.to_netcdf(fn_rp)
|
|
853
|
-
|
|
854
|
-
# write geotiff
|
|
855
|
-
# dem file for high resolution flood depth map
|
|
856
|
-
demfile = self.database.static_path / "dem" / self.settings.dem.filename
|
|
857
|
-
|
|
858
|
-
# writing the geotiff to the scenario results folder
|
|
859
|
-
with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
|
|
860
|
-
dem = dummymodel._model.data_catalog.get_rasterdataset(demfile)
|
|
861
|
-
zsmax = zs_rp_single.to_array().squeeze().transpose()
|
|
862
|
-
floodmap_fn = fn_rp.with_suffix(".tif")
|
|
863
|
-
|
|
864
|
-
# convert dem from dem units to floodmap units
|
|
865
|
-
dem_conversion = us.UnitfulLength(
|
|
866
|
-
value=1.0, units=self.settings.dem.units
|
|
867
|
-
).convert(self.settings.config.floodmap_units)
|
|
868
|
-
|
|
869
|
-
# convert zsmax from meters to floodmap units
|
|
870
|
-
floodmap_conversion = us.UnitfulLength(
|
|
871
|
-
value=1.0, units=us.UnitTypesLength.meters
|
|
872
|
-
).convert(self.settings.config.floodmap_units)
|
|
873
|
-
|
|
874
|
-
utils.downscale_floodmap(
|
|
875
|
-
zsmax=floodmap_conversion * zsmax,
|
|
876
|
-
dep=dem_conversion * dem,
|
|
877
|
-
hmin=0.01,
|
|
878
|
-
floodmap_fn=str(floodmap_fn),
|
|
879
|
-
)
|
|
880
|
-
|
|
881
|
-
######################################
|
|
882
|
-
### PRIVATE - use at your own risk ###
|
|
883
|
-
######################################
|
|
884
|
-
def _run_single_event(self, scenario: Scenario, event: Event):
|
|
885
|
-
self.preprocess(scenario, event)
|
|
886
|
-
self.process(scenario, event)
|
|
887
|
-
self.postprocess(scenario, event)
|
|
888
|
-
shutil.rmtree(
|
|
889
|
-
self._get_simulation_path(scenario, sub_event=event), ignore_errors=True
|
|
890
|
-
)
|
|
891
|
-
|
|
892
|
-
def _run_risk_scenario(self, scenario: Scenario):
|
|
893
|
-
"""Run the whole workflow for a risk scenario.
|
|
894
|
-
|
|
895
|
-
This means preprocessing and running the SFINCS model for each event in the event set, and then postprocessing the results.
|
|
896
|
-
"""
|
|
897
|
-
event_set: EventSet = self.database.events.get(scenario.event)
|
|
898
|
-
total = len(event_set._events)
|
|
899
|
-
|
|
900
|
-
for i, sub_event in enumerate(event_set._events):
|
|
901
|
-
sim_path = self._get_simulation_path(scenario, sub_event=sub_event)
|
|
902
|
-
|
|
903
|
-
# Preprocess
|
|
904
|
-
self.preprocess(scenario, event=sub_event)
|
|
905
|
-
self.logger.info(
|
|
906
|
-
f"Running SFINCS for Eventset Scenario `{scenario.name}`, Event `{sub_event.name}` ({i + 1}/{total})"
|
|
907
|
-
)
|
|
908
|
-
self.execute(sim_path)
|
|
909
|
-
|
|
910
|
-
# Postprocess
|
|
911
|
-
self.calculate_rp_floodmaps(scenario)
|
|
912
|
-
|
|
913
|
-
# Cleanup
|
|
914
|
-
for i, sub_event in enumerate(event_set._events):
|
|
915
|
-
shutil.rmtree(
|
|
916
|
-
self._get_simulation_path(scenario, sub_event=sub_event),
|
|
917
|
-
ignore_errors=True,
|
|
918
|
-
)
|
|
919
|
-
|
|
920
|
-
def _ensure_no_existing_forcings(self):
|
|
921
|
-
"""Check for existing forcings in the model and raise an error if any are found."""
|
|
922
|
-
all_forcings = {
|
|
923
|
-
"waterlevel": self.waterlevels,
|
|
924
|
-
"rainfall": self.rainfall,
|
|
925
|
-
"wind": self.wind,
|
|
926
|
-
"discharge": self.discharge,
|
|
927
|
-
}
|
|
928
|
-
contains_forcings = ", ".join(
|
|
929
|
-
[
|
|
930
|
-
f"{name.capitalize()}"
|
|
931
|
-
for name, forcing in all_forcings.items()
|
|
932
|
-
if forcing is not None
|
|
933
|
-
]
|
|
934
|
-
)
|
|
935
|
-
if contains_forcings:
|
|
936
|
-
raise ValueError(
|
|
937
|
-
f"{contains_forcings} forcing(s) should not exists in the SFINCS template model. Remove it from the SFINCS model located at: {self.get_model_root()}. For more information on SFINCS and its input files, see the SFINCS documentation at: `https://sfincs.readthedocs.io/en/latest/input.html`"
|
|
938
|
-
)
|
|
939
|
-
|
|
940
|
-
### FORCING ###
|
|
941
|
-
def _add_forcing_wind(
|
|
942
|
-
self,
|
|
943
|
-
wind: IWind,
|
|
944
|
-
):
|
|
945
|
-
"""Add spatially constant wind forcing to sfincs model. Use timeseries or a constant magnitude and direction.
|
|
946
|
-
|
|
947
|
-
Parameters
|
|
948
|
-
----------
|
|
949
|
-
timeseries : Union[str, os.PathLike], optional
|
|
950
|
-
path to file of timeseries file (.csv) which has three columns: time, magnitude and direction, by default None
|
|
951
|
-
const_mag : float, optional
|
|
952
|
-
magnitude of time-invariant wind forcing [m/s], by default None
|
|
953
|
-
const_dir : float, optional
|
|
954
|
-
direction of time-invariant wind forcing [deg], by default None
|
|
955
|
-
"""
|
|
956
|
-
time_frame = self.get_model_time()
|
|
957
|
-
if isinstance(wind, WindConstant):
|
|
958
|
-
# HydroMT function: set wind forcing from constant magnitude and direction
|
|
959
|
-
self._model.setup_wind_forcing(
|
|
960
|
-
timeseries=None,
|
|
961
|
-
magnitude=wind.speed.convert(us.UnitTypesVelocity.mps),
|
|
962
|
-
direction=wind.direction.value,
|
|
963
|
-
)
|
|
964
|
-
elif isinstance(wind, WindSynthetic):
|
|
965
|
-
df = wind.to_dataframe(time_frame=time_frame)
|
|
966
|
-
df["mag"] *= us.UnitfulVelocity(
|
|
967
|
-
value=1.0, units=self.units.default_velocity_units
|
|
968
|
-
).convert(us.UnitTypesVelocity.mps)
|
|
969
|
-
|
|
970
|
-
tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
|
|
971
|
-
df.to_csv(tmp_path)
|
|
972
|
-
|
|
973
|
-
# HydroMT function: set wind forcing from timeseries
|
|
974
|
-
self._model.setup_wind_forcing(
|
|
975
|
-
timeseries=tmp_path, magnitude=None, direction=None
|
|
976
|
-
)
|
|
977
|
-
elif isinstance(wind, WindMeteo):
|
|
978
|
-
ds = MeteoHandler().read(time_frame)
|
|
979
|
-
# data already in metric units so no conversion needed
|
|
980
|
-
|
|
981
|
-
# HydroMT function: set wind forcing from grid
|
|
982
|
-
self._model.setup_wind_forcing_from_grid(wind=ds)
|
|
983
|
-
elif isinstance(wind, WindTrack):
|
|
984
|
-
# data already in metric units so no conversion needed
|
|
985
|
-
self._add_forcing_spw(wind)
|
|
986
|
-
elif isinstance(wind, WindNetCDF):
|
|
987
|
-
ds = wind.read()
|
|
988
|
-
# time slicing to time_frame not needed, hydromt-sfincs handles it
|
|
989
|
-
conversion = us.UnitfulVelocity(value=1.0, units=wind.units).convert(
|
|
990
|
-
us.UnitTypesVelocity.mps
|
|
991
|
-
)
|
|
992
|
-
ds *= conversion
|
|
993
|
-
self._model.setup_wind_forcing_from_grid(wind=ds)
|
|
994
|
-
elif isinstance(wind, WindCSV):
|
|
995
|
-
df = wind.to_dataframe(time_frame=time_frame)
|
|
996
|
-
|
|
997
|
-
conversion = us.UnitfulVelocity(
|
|
998
|
-
value=1.0, units=wind.units["speed"]
|
|
999
|
-
).convert(us.UnitTypesVelocity.mps)
|
|
1000
|
-
df *= conversion
|
|
1001
|
-
|
|
1002
|
-
tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
|
|
1003
|
-
df.to_csv(tmp_path)
|
|
1004
|
-
|
|
1005
|
-
# HydroMT function: set wind forcing from timeseries
|
|
1006
|
-
self._model.setup_wind_forcing(
|
|
1007
|
-
timeseries=tmp_path,
|
|
1008
|
-
magnitude=None,
|
|
1009
|
-
direction=None,
|
|
1010
|
-
)
|
|
1011
|
-
else:
|
|
1012
|
-
self.logger.warning(
|
|
1013
|
-
f"Unsupported wind forcing type: {wind.__class__.__name__}"
|
|
1014
|
-
)
|
|
1015
|
-
return
|
|
1016
|
-
|
|
1017
|
-
def _add_forcing_rain(self, rainfall: IRainfall):
|
|
1018
|
-
"""Add spatially constant rain forcing to sfincs model. Use timeseries or a constant magnitude.
|
|
1019
|
-
|
|
1020
|
-
Parameters
|
|
1021
|
-
----------
|
|
1022
|
-
timeseries : Union[str, os.PathLike], optional
|
|
1023
|
-
path to file of timeseries file (.csv) which has two columns: time and precipitation, by default None
|
|
1024
|
-
const_intensity : float, optional
|
|
1025
|
-
time-invariant precipitation intensity [mm_hr], by default None
|
|
1026
|
-
"""
|
|
1027
|
-
time_frame = self.get_model_time()
|
|
1028
|
-
if isinstance(rainfall, RainfallConstant):
|
|
1029
|
-
self._model.setup_precip_forcing(
|
|
1030
|
-
timeseries=None,
|
|
1031
|
-
magnitude=rainfall.intensity.convert(us.UnitTypesIntensity.mm_hr),
|
|
1032
|
-
)
|
|
1033
|
-
elif isinstance(rainfall, RainfallCSV):
|
|
1034
|
-
df = rainfall.to_dataframe(time_frame=time_frame)
|
|
1035
|
-
conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
|
|
1036
|
-
us.UnitTypesIntensity.mm_hr
|
|
1037
|
-
)
|
|
1038
|
-
df *= conversion
|
|
1039
|
-
|
|
1040
|
-
tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
|
|
1041
|
-
df.to_csv(tmp_path)
|
|
1042
|
-
|
|
1043
|
-
self._model.setup_precip_forcing(timeseries=tmp_path)
|
|
1044
|
-
elif isinstance(rainfall, RainfallSynthetic):
|
|
1045
|
-
df = rainfall.to_dataframe(time_frame=time_frame)
|
|
1046
|
-
|
|
1047
|
-
if rainfall.timeseries.cumulative is not None: # scs
|
|
1048
|
-
conversion = us.UnitfulLength(
|
|
1049
|
-
value=1.0, units=rainfall.timeseries.cumulative.units
|
|
1050
|
-
).convert(us.UnitTypesLength.millimeters)
|
|
1051
|
-
else:
|
|
1052
|
-
conversion = us.UnitfulIntensity(
|
|
1053
|
-
value=1.0, units=rainfall.timeseries.peak_value.units
|
|
1054
|
-
).convert(us.UnitTypesIntensity.mm_hr)
|
|
1055
|
-
|
|
1056
|
-
df *= conversion
|
|
1057
|
-
tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
|
|
1058
|
-
df.to_csv(tmp_path)
|
|
1059
|
-
|
|
1060
|
-
self._model.setup_precip_forcing(timeseries=tmp_path)
|
|
1061
|
-
elif isinstance(rainfall, RainfallMeteo):
|
|
1062
|
-
ds = MeteoHandler().read(time_frame)
|
|
1063
|
-
# MeteoHandler always return metric so no conversion needed
|
|
1064
|
-
self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
|
|
1065
|
-
elif isinstance(rainfall, RainfallTrack):
|
|
1066
|
-
# data already in metric units so no conversion needed
|
|
1067
|
-
self._add_forcing_spw(rainfall)
|
|
1068
|
-
elif isinstance(rainfall, RainfallNetCDF):
|
|
1069
|
-
ds = rainfall.read()
|
|
1070
|
-
# time slicing to time_frame not needed, hydromt-sfincs handles it
|
|
1071
|
-
conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
|
|
1072
|
-
us.UnitTypesIntensity.mm_hr
|
|
1073
|
-
)
|
|
1074
|
-
ds *= conversion
|
|
1075
|
-
self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
|
|
1076
|
-
else:
|
|
1077
|
-
self.logger.warning(
|
|
1078
|
-
f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}"
|
|
1079
|
-
)
|
|
1080
|
-
return
|
|
1081
|
-
|
|
1082
|
-
def _add_forcing_discharge(self, forcing: IDischarge):
|
|
1083
|
-
"""Add spatially constant discharge forcing to sfincs model. Use timeseries or a constant magnitude.
|
|
1084
|
-
|
|
1085
|
-
Parameters
|
|
1086
|
-
----------
|
|
1087
|
-
forcing : IDischarge
|
|
1088
|
-
The discharge forcing to add to the model.
|
|
1089
|
-
Can be a constant, synthetic or from a csv file.
|
|
1090
|
-
Also contains the river information.
|
|
1091
|
-
"""
|
|
1092
|
-
if isinstance(forcing, (DischargeConstant, DischargeCSV, DischargeSynthetic)):
|
|
1093
|
-
self._set_single_river_forcing(discharge=forcing)
|
|
1094
|
-
else:
|
|
1095
|
-
self.logger.warning(
|
|
1096
|
-
f"Unsupported discharge forcing type: {forcing.__class__.__name__}"
|
|
1097
|
-
)
|
|
1098
|
-
|
|
1099
|
-
def _add_forcing_waterlevels(self, forcing: IWaterlevel):
|
|
1100
|
-
time_frame = self.get_model_time()
|
|
1101
|
-
if isinstance(forcing, WaterlevelSynthetic):
|
|
1102
|
-
df_ts = forcing.to_dataframe(time_frame=time_frame)
|
|
1103
|
-
|
|
1104
|
-
conversion = us.UnitfulLength(
|
|
1105
|
-
value=1.0, units=forcing.surge.timeseries.peak_value.units
|
|
1106
|
-
).convert(us.UnitTypesLength.meters)
|
|
1107
|
-
datum_correction = self.settings.water_level.get_datum(
|
|
1108
|
-
self.database.site.gui.plotting.synthetic_tide.datum
|
|
1109
|
-
).height.convert(us.UnitTypesLength.meters)
|
|
1110
|
-
|
|
1111
|
-
df_ts = df_ts * conversion + datum_correction
|
|
1112
|
-
|
|
1113
|
-
self._set_waterlevel_forcing(df_ts)
|
|
1114
|
-
elif isinstance(forcing, WaterlevelGauged):
|
|
1115
|
-
if self.settings.tide_gauge is None:
|
|
1116
|
-
raise ValueError("No tide gauge defined for this site.")
|
|
1117
|
-
|
|
1118
|
-
df_ts = self.settings.tide_gauge.get_waterlevels_in_time_frame(
|
|
1119
|
-
time=time_frame,
|
|
1120
|
-
)
|
|
1121
|
-
conversion = us.UnitfulLength(
|
|
1122
|
-
value=1.0, units=self.settings.tide_gauge.units
|
|
1123
|
-
).convert(us.UnitTypesLength.meters)
|
|
1124
|
-
|
|
1125
|
-
datum_height = self.settings.water_level.get_datum(
|
|
1126
|
-
self.settings.tide_gauge.reference
|
|
1127
|
-
).height.convert(us.UnitTypesLength.meters)
|
|
1128
|
-
|
|
1129
|
-
df_ts = conversion * df_ts + datum_height
|
|
1130
|
-
|
|
1131
|
-
self._set_waterlevel_forcing(df_ts)
|
|
1132
|
-
elif isinstance(forcing, WaterlevelCSV):
|
|
1133
|
-
df_ts = forcing.to_dataframe(time_frame=time_frame)
|
|
1134
|
-
|
|
1135
|
-
if df_ts is None:
|
|
1136
|
-
raise ValueError("Failed to get waterlevel data.")
|
|
1137
|
-
conversion = us.UnitfulLength(value=1.0, units=forcing.units).convert(
|
|
1138
|
-
us.UnitTypesLength.meters
|
|
1139
|
-
)
|
|
1140
|
-
df_ts *= conversion
|
|
1141
|
-
self._set_waterlevel_forcing(df_ts)
|
|
1142
|
-
|
|
1143
|
-
elif isinstance(forcing, WaterlevelModel):
|
|
1144
|
-
from flood_adapt.adapter.sfincs_offshore import OffshoreSfincsHandler
|
|
1145
|
-
|
|
1146
|
-
if self.settings.config.offshore_model is None:
|
|
1147
|
-
raise ValueError("Offshore model configuration is missing.")
|
|
1148
|
-
if self._scenario is None or self._event is None:
|
|
1149
|
-
raise ValueError(
|
|
1150
|
-
"Scenario and event must be provided to run the offshore model."
|
|
1151
|
-
)
|
|
1152
|
-
|
|
1153
|
-
df_ts = OffshoreSfincsHandler(
|
|
1154
|
-
scenario=self._scenario, event=self._event
|
|
1155
|
-
).get_resulting_waterlevels()
|
|
1156
|
-
if df_ts is None:
|
|
1157
|
-
raise ValueError("Failed to get waterlevel data.")
|
|
1158
|
-
|
|
1159
|
-
# Datum
|
|
1160
|
-
datum_correction = self.settings.water_level.get_datum(
|
|
1161
|
-
self.settings.config.offshore_model.reference
|
|
1162
|
-
).height.convert(us.UnitTypesLength.meters)
|
|
1163
|
-
df_ts += datum_correction
|
|
1164
|
-
|
|
1165
|
-
# Already in meters since it was produced by SFINCS so no conversion needed
|
|
1166
|
-
self._set_waterlevel_forcing(df_ts)
|
|
1167
|
-
self._turn_off_bnd_press_correction()
|
|
1168
|
-
else:
|
|
1169
|
-
self.logger.warning(
|
|
1170
|
-
f"Unsupported waterlevel forcing type: {forcing.__class__.__name__}"
|
|
1171
|
-
)
|
|
1172
|
-
|
|
1173
|
-
# SPIDERWEB
|
|
1174
|
-
def _add_forcing_spw(self, forcing: Union[RainfallTrack, WindTrack]):
|
|
1175
|
-
"""Add spiderweb forcing."""
|
|
1176
|
-
if forcing.source != ForcingSource.TRACK:
|
|
1177
|
-
raise ValueError("Forcing source should be TRACK.")
|
|
1178
|
-
|
|
1179
|
-
if forcing.path is None:
|
|
1180
|
-
raise ValueError("No path to track file provided.")
|
|
1181
|
-
|
|
1182
|
-
if not forcing.path.exists():
|
|
1183
|
-
# Check if the file is in the database
|
|
1184
|
-
in_db = self._get_event_input_path(self._event) / forcing.path.name
|
|
1185
|
-
if not in_db.exists():
|
|
1186
|
-
raise FileNotFoundError(
|
|
1187
|
-
f"Input file for track forcing not found: {forcing.path}"
|
|
1188
|
-
)
|
|
1189
|
-
forcing.path = in_db
|
|
1190
|
-
|
|
1191
|
-
if forcing.path.suffix == ".cyc":
|
|
1192
|
-
forcing.path = self._create_spw_file_from_track(
|
|
1193
|
-
track_forcing=forcing,
|
|
1194
|
-
hurricane_translation=self._event.hurricane_translation,
|
|
1195
|
-
name=self._event.name,
|
|
1196
|
-
output_dir=forcing.path.parent,
|
|
1197
|
-
include_rainfall=bool(self._event.forcings.get(ForcingType.RAINFALL)),
|
|
1198
|
-
recreate=False,
|
|
1199
|
-
)
|
|
1200
|
-
|
|
1201
|
-
if forcing.path.suffix != ".spw":
|
|
1202
|
-
raise ValueError(
|
|
1203
|
-
"Track files should be in one of [spw, ddb_cyc] file format and must have [.spw, .cyc] extension."
|
|
1204
|
-
)
|
|
1205
|
-
|
|
1206
|
-
sim_path = self.get_model_root()
|
|
1207
|
-
self.logger.info(f"Adding spiderweb forcing to Sfincs model: {sim_path.name}")
|
|
1208
|
-
|
|
1209
|
-
# prevent SameFileError
|
|
1210
|
-
output_spw_path = sim_path / forcing.path.name
|
|
1211
|
-
if forcing.path == output_spw_path:
|
|
1212
|
-
raise ValueError(
|
|
1213
|
-
"Add a different SPW file than the one already in the model."
|
|
1214
|
-
)
|
|
1215
|
-
|
|
1216
|
-
if output_spw_path.exists():
|
|
1217
|
-
os.remove(output_spw_path)
|
|
1218
|
-
shutil.copy2(forcing.path, output_spw_path)
|
|
1219
|
-
|
|
1220
|
-
self._model.set_config("spwfile", output_spw_path.name)
|
|
1221
|
-
|
|
1222
|
-
### MEASURES ###
|
|
1223
|
-
def _add_measure_floodwall(self, floodwall: FloodWall):
|
|
1224
|
-
"""Add floodwall to sfincs model.
|
|
1225
|
-
|
|
1226
|
-
Parameters
|
|
1227
|
-
----------
|
|
1228
|
-
floodwall : FloodWall
|
|
1229
|
-
floodwall information
|
|
1230
|
-
"""
|
|
1231
|
-
polygon_file = resolve_filepath(
|
|
1232
|
-
object_dir=ObjectDir.measure,
|
|
1233
|
-
obj_name=floodwall.name,
|
|
1234
|
-
path=floodwall.polygon_file,
|
|
1235
|
-
)
|
|
1236
|
-
|
|
1237
|
-
# HydroMT function: get geodataframe from filename
|
|
1238
|
-
gdf_floodwall = self._model.data_catalog.get_geodataframe(
|
|
1239
|
-
polygon_file, geom=self._model.region, crs=self._model.crs
|
|
1240
|
-
)
|
|
1241
|
-
|
|
1242
|
-
# Add floodwall attributes to geodataframe
|
|
1243
|
-
gdf_floodwall["name"] = floodwall.name
|
|
1244
|
-
if (gdf_floodwall.geometry.type == "MultiLineString").any():
|
|
1245
|
-
gdf_floodwall = gdf_floodwall.explode()
|
|
1246
|
-
|
|
1247
|
-
try:
|
|
1248
|
-
heights = [
|
|
1249
|
-
float(
|
|
1250
|
-
us.UnitfulLength(
|
|
1251
|
-
value=float(height),
|
|
1252
|
-
units=self.database.site.gui.units.default_length_units,
|
|
1253
|
-
).convert(us.UnitTypesLength("meters"))
|
|
1254
|
-
)
|
|
1255
|
-
for height in gdf_floodwall["z"]
|
|
1256
|
-
]
|
|
1257
|
-
gdf_floodwall["z"] = heights
|
|
1258
|
-
self.logger.info("Using floodwall height from shape file.")
|
|
1259
|
-
except Exception:
|
|
1260
|
-
self.logger.warning(
|
|
1261
|
-
f"Could not use height data from file due to missing `z` column or missing values therein. Using uniform height of {floodwall.elevation} instead."
|
|
1262
|
-
)
|
|
1263
|
-
gdf_floodwall["z"] = floodwall.elevation.convert(
|
|
1264
|
-
us.UnitTypesLength(us.UnitTypesLength.meters)
|
|
1265
|
-
)
|
|
1266
|
-
|
|
1267
|
-
# par1 is the overflow coefficient for weirs
|
|
1268
|
-
gdf_floodwall["par1"] = 0.6
|
|
1269
|
-
|
|
1270
|
-
# HydroMT function: create floodwall
|
|
1271
|
-
self._model.setup_structures(structures=gdf_floodwall, stype="weir", merge=True)
|
|
1272
|
-
|
|
1273
|
-
def _add_measure_greeninfra(self, green_infrastructure: GreenInfrastructure):
|
|
1274
|
-
# HydroMT function: get geodataframe from filename
|
|
1275
|
-
if green_infrastructure.selection_type == "polygon":
|
|
1276
|
-
polygon_file = resolve_filepath(
|
|
1277
|
-
ObjectDir.measure,
|
|
1278
|
-
green_infrastructure.name,
|
|
1279
|
-
green_infrastructure.polygon_file,
|
|
1280
|
-
)
|
|
1281
|
-
elif green_infrastructure.selection_type == "aggregation_area":
|
|
1282
|
-
# TODO this logic already exists in the Database controller but cannot be used due to cyclic imports
|
|
1283
|
-
# Loop through available aggregation area types
|
|
1284
|
-
for aggr_dict in self.database.site.fiat.config.aggregation:
|
|
1285
|
-
# check which one is used in measure
|
|
1286
|
-
if not aggr_dict.name == green_infrastructure.aggregation_area_type:
|
|
1287
|
-
continue
|
|
1288
|
-
# load geodataframe
|
|
1289
|
-
aggr_areas = gpd.read_file(
|
|
1290
|
-
db_path(TopLevelDir.static) / aggr_dict.file,
|
|
1291
|
-
engine="pyogrio",
|
|
1292
|
-
).to_crs(4326)
|
|
1293
|
-
# keep only aggregation area chosen
|
|
1294
|
-
polygon_file = aggr_areas.loc[
|
|
1295
|
-
aggr_areas[aggr_dict.field_name]
|
|
1296
|
-
== green_infrastructure.aggregation_area_name,
|
|
1297
|
-
["geometry"],
|
|
1298
|
-
].reset_index(drop=True)
|
|
1299
|
-
else:
|
|
1300
|
-
raise ValueError(
|
|
1301
|
-
f"The selection type: {green_infrastructure.selection_type} is not valid"
|
|
1302
|
-
)
|
|
1303
|
-
|
|
1304
|
-
gdf_green_infra = self._model.data_catalog.get_geodataframe(
|
|
1305
|
-
polygon_file,
|
|
1306
|
-
geom=self._model.region,
|
|
1307
|
-
crs=self._model.crs,
|
|
1308
|
-
)
|
|
1309
|
-
|
|
1310
|
-
# Make sure no multipolygons are there
|
|
1311
|
-
gdf_green_infra = gdf_green_infra.explode()
|
|
1312
|
-
|
|
1313
|
-
# HydroMT function: create storage volume
|
|
1314
|
-
self._model.setup_storage_volume(
|
|
1315
|
-
storage_locs=gdf_green_infra,
|
|
1316
|
-
volume=green_infrastructure.volume.convert(us.UnitTypesVolume.m3),
|
|
1317
|
-
merge=True,
|
|
1318
|
-
)
|
|
1319
|
-
|
|
1320
|
-
def _add_measure_pump(self, pump: Pump):
|
|
1321
|
-
"""Add pump to sfincs model.
|
|
1322
|
-
|
|
1323
|
-
Parameters
|
|
1324
|
-
----------
|
|
1325
|
-
pump : Pump
|
|
1326
|
-
pump information
|
|
1327
|
-
"""
|
|
1328
|
-
polygon_file = resolve_filepath(ObjectDir.measure, pump.name, pump.polygon_file)
|
|
1329
|
-
# HydroMT function: get geodataframe from filename
|
|
1330
|
-
gdf_pump = self._model.data_catalog.get_geodataframe(
|
|
1331
|
-
polygon_file, geom=self._model.region, crs=self._model.crs
|
|
1332
|
-
)
|
|
1333
|
-
|
|
1334
|
-
# HydroMT function: create floodwall
|
|
1335
|
-
self._model.setup_drainage_structures(
|
|
1336
|
-
structures=gdf_pump,
|
|
1337
|
-
stype="pump",
|
|
1338
|
-
discharge=pump.discharge.convert(us.UnitTypesDischarge.cms),
|
|
1339
|
-
merge=True,
|
|
1340
|
-
)
|
|
1341
|
-
|
|
1342
|
-
### SFINCS SETTERS ###
|
|
1343
|
-
def _set_single_river_forcing(self, discharge: IDischarge):
|
|
1344
|
-
"""Add discharge to overland sfincs model.
|
|
1345
|
-
|
|
1346
|
-
Parameters
|
|
1347
|
-
----------
|
|
1348
|
-
discharge : IDischarge
|
|
1349
|
-
Discharge object with discharge timeseries data and river information.
|
|
1350
|
-
"""
|
|
1351
|
-
if not isinstance(
|
|
1352
|
-
discharge, (DischargeConstant, DischargeSynthetic, DischargeCSV)
|
|
1353
|
-
):
|
|
1354
|
-
self.logger.warning(
|
|
1355
|
-
f"Unsupported discharge forcing type: {discharge.__class__.__name__}"
|
|
1356
|
-
)
|
|
1357
|
-
return
|
|
1358
|
-
|
|
1359
|
-
self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}")
|
|
1360
|
-
|
|
1361
|
-
time_frame = self.get_model_time()
|
|
1362
|
-
model_rivers = self._read_river_locations()
|
|
1363
|
-
|
|
1364
|
-
# Check that the river is defined in the model and that the coordinates match
|
|
1365
|
-
river_loc = shapely.Point(
|
|
1366
|
-
discharge.river.x_coordinate, discharge.river.y_coordinate
|
|
1367
|
-
)
|
|
1368
|
-
tolerance = 0.001 # in degrees, ~111 meters at the equator. (0.0001: 11 meters at the equator)
|
|
1369
|
-
river_gdf = model_rivers[model_rivers.distance(river_loc) <= tolerance]
|
|
1370
|
-
river_inds = river_gdf.index.to_list()
|
|
1371
|
-
if len(river_inds) != 1:
|
|
1372
|
-
raise ValueError(
|
|
1373
|
-
f"River {discharge.river.name} is not defined in the sfincs model. Please ensure the river coordinates in the site.toml match the coordinates for rivers in the SFINCS model."
|
|
1374
|
-
)
|
|
1375
|
-
|
|
1376
|
-
# Create a geodataframe with the river coordinates, the timeseries data and rename the column to the river index defined in the model
|
|
1377
|
-
if isinstance(discharge, DischargeCSV):
|
|
1378
|
-
df = discharge.to_dataframe(time_frame)
|
|
1379
|
-
conversion = us.UnitfulDischarge(value=1.0, units=discharge.units).convert(
|
|
1380
|
-
us.UnitTypesDischarge.cms
|
|
1381
|
-
)
|
|
1382
|
-
elif isinstance(discharge, DischargeConstant):
|
|
1383
|
-
df = discharge.to_dataframe(time_frame)
|
|
1384
|
-
conversion = us.UnitfulDischarge(
|
|
1385
|
-
value=1.0, units=discharge.discharge.units
|
|
1386
|
-
).convert(us.UnitTypesDischarge.cms)
|
|
1387
|
-
elif isinstance(discharge, DischargeSynthetic):
|
|
1388
|
-
df = discharge.to_dataframe(time_frame)
|
|
1389
|
-
conversion = us.UnitfulDischarge(
|
|
1390
|
-
value=1.0, units=discharge.timeseries.peak_value.units
|
|
1391
|
-
).convert(us.UnitTypesDischarge.cms)
|
|
1392
|
-
else:
|
|
1393
|
-
raise ValueError(
|
|
1394
|
-
f"Unsupported discharge forcing type: {discharge.__class__}"
|
|
1395
|
-
)
|
|
1396
|
-
|
|
1397
|
-
df *= conversion
|
|
1398
|
-
|
|
1399
|
-
df = df.rename(columns={df.columns[0]: river_inds[0]})
|
|
1400
|
-
|
|
1401
|
-
# HydroMT function: set discharge forcing from time series and river coordinates
|
|
1402
|
-
self._model.setup_discharge_forcing(
|
|
1403
|
-
locations=river_gdf,
|
|
1404
|
-
timeseries=df,
|
|
1405
|
-
merge=True,
|
|
1406
|
-
)
|
|
1407
|
-
|
|
1408
|
-
def _turn_off_bnd_press_correction(self):
|
|
1409
|
-
"""Turn off the boundary pressure correction in the sfincs model."""
|
|
1410
|
-
self.logger.info(
|
|
1411
|
-
"Turning off boundary pressure correction in the offshore model"
|
|
1412
|
-
)
|
|
1413
|
-
self._model.set_config("pavbnd", -9999)
|
|
1414
|
-
|
|
1415
|
-
def _set_waterlevel_forcing(self, df_ts: pd.DataFrame):
|
|
1416
|
-
"""
|
|
1417
|
-
Add water level forcing to sfincs model.
|
|
1418
|
-
|
|
1419
|
-
Values in the timeseries are expected to be relative to the main reference datum: `self.settings.water_level.reference`.
|
|
1420
|
-
The overland model reference: `self.settings.config.overland_model.reference` is used to convert the water levels to the reference of the overland model.
|
|
1421
|
-
|
|
1422
|
-
Parameters
|
|
1423
|
-
----------
|
|
1424
|
-
df_ts : pd.DataFrame
|
|
1425
|
-
Time series of water levels with the first column as the time index.
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
"""
|
|
1429
|
-
# Determine bnd points from reference overland model
|
|
1430
|
-
gdf_locs = self._read_waterlevel_boundary_locations()
|
|
1431
|
-
|
|
1432
|
-
if len(df_ts.columns) == 1:
|
|
1433
|
-
# Go from 1 timeseries to timeseries for all boundary points
|
|
1434
|
-
name = df_ts.columns[0]
|
|
1435
|
-
for i in range(1, len(gdf_locs)):
|
|
1436
|
-
df_ts[i + 1] = df_ts[name]
|
|
1437
|
-
df_ts.columns = list(range(1, len(gdf_locs) + 1))
|
|
1438
|
-
|
|
1439
|
-
# Datum
|
|
1440
|
-
sfincs_overland_reference_height = self.settings.water_level.get_datum(
|
|
1441
|
-
self.settings.config.overland_model.reference
|
|
1442
|
-
).height.convert(us.UnitTypesLength.meters)
|
|
1443
|
-
|
|
1444
|
-
df_ts -= sfincs_overland_reference_height
|
|
1445
|
-
|
|
1446
|
-
# HydroMT function: set waterlevel forcing from time series
|
|
1447
|
-
self._model.set_forcing_1d(
|
|
1448
|
-
name="bzs", df_ts=df_ts, gdf_locs=gdf_locs, merge=False
|
|
1449
|
-
)
|
|
1450
|
-
|
|
1451
|
-
# OFFSHORE
|
|
1452
|
-
def _add_pressure_forcing_from_grid(self, ds: xr.DataArray):
|
|
1453
|
-
"""Add spatially varying barometric pressure to sfincs model.
|
|
1454
|
-
|
|
1455
|
-
Parameters
|
|
1456
|
-
----------
|
|
1457
|
-
ds : xr.DataArray
|
|
1458
|
-
- Required variables: ['press_msl' (Pa)]
|
|
1459
|
-
- Required coordinates: ['time', 'y', 'x']
|
|
1460
|
-
- spatial_ref: CRS
|
|
1461
|
-
"""
|
|
1462
|
-
self.logger.info("Adding pressure forcing to the offshore model")
|
|
1463
|
-
self._model.setup_pressure_forcing_from_grid(press=ds)
|
|
1464
|
-
|
|
1465
|
-
def _add_bzs_from_bca(self, event: Event, physical_projection: PhysicalProjection):
|
|
1466
|
-
# ONLY offshore models
|
|
1467
|
-
"""Convert tidal constituents from bca file to waterlevel timeseries that can be read in by hydromt_sfincs."""
|
|
1468
|
-
if self.settings.config.offshore_model is None:
|
|
1469
|
-
raise ValueError("No offshore model found in sfincs config.")
|
|
1470
|
-
|
|
1471
|
-
self.logger.info("Adding water level forcing to the offshore model")
|
|
1472
|
-
sb = SfincsBoundary()
|
|
1473
|
-
sb.read_flow_boundary_points(self.get_model_root() / "sfincs.bnd")
|
|
1474
|
-
sb.read_astro_boundary_conditions(self.get_model_root() / "sfincs.bca")
|
|
1475
|
-
|
|
1476
|
-
times = pd.date_range(
|
|
1477
|
-
start=event.time.start_time,
|
|
1478
|
-
end=event.time.end_time,
|
|
1479
|
-
freq="10T",
|
|
1480
|
-
)
|
|
1481
|
-
|
|
1482
|
-
# Predict tidal signal and add SLR
|
|
1483
|
-
if not sb.flow_boundary_points:
|
|
1484
|
-
raise ValueError("No flow boundary points found.")
|
|
1485
|
-
|
|
1486
|
-
if self.settings.config.offshore_model.vertical_offset:
|
|
1487
|
-
correction = self.settings.config.offshore_model.vertical_offset.convert(
|
|
1488
|
-
us.UnitTypesLength.meters
|
|
1489
|
-
)
|
|
1490
|
-
else:
|
|
1491
|
-
correction = 0.0
|
|
1492
|
-
|
|
1493
|
-
for bnd_ii in range(len(sb.flow_boundary_points)):
|
|
1494
|
-
tide_ii = (
|
|
1495
|
-
predict(sb.flow_boundary_points[bnd_ii].astro, times)
|
|
1496
|
-
+ correction
|
|
1497
|
-
+ physical_projection.sea_level_rise.convert(us.UnitTypesLength.meters)
|
|
1498
|
-
)
|
|
1499
|
-
|
|
1500
|
-
if bnd_ii == 0:
|
|
1501
|
-
wl_df = pd.DataFrame(data={1: tide_ii}, index=times)
|
|
1502
|
-
else:
|
|
1503
|
-
wl_df[bnd_ii + 1] = tide_ii
|
|
1504
|
-
|
|
1505
|
-
# Determine bnd points from reference overland model
|
|
1506
|
-
gdf_locs = self._read_waterlevel_boundary_locations()
|
|
1507
|
-
|
|
1508
|
-
# HydroMT function: set waterlevel forcing from time series
|
|
1509
|
-
self._model.set_forcing_1d(
|
|
1510
|
-
name="bzs", df_ts=wl_df, gdf_locs=gdf_locs, merge=False
|
|
1511
|
-
)
|
|
1512
|
-
|
|
1513
|
-
### PRIVATE GETTERS ###
|
|
1514
|
-
def _get_result_path(self, scenario: Scenario) -> Path:
|
|
1515
|
-
"""Return the path to store the results."""
|
|
1516
|
-
return self.database.scenarios.output_path / scenario.name / "Flooding"
|
|
1517
|
-
|
|
1518
|
-
def _get_simulation_path(
|
|
1519
|
-
self, scenario: Scenario, sub_event: Optional[Event] = None
|
|
1520
|
-
) -> Path:
|
|
1521
|
-
"""
|
|
1522
|
-
Return the path to the simulation results.
|
|
1523
|
-
|
|
1524
|
-
Parameters
|
|
1525
|
-
----------
|
|
1526
|
-
scenario : Scenario
|
|
1527
|
-
The scenario for which to get the simulation path.
|
|
1528
|
-
sub_event : Optional[Event], optional
|
|
1529
|
-
The sub-event for which to get the simulation path, by default None.
|
|
1530
|
-
Is only used when the event associated with the scenario is an EventSet.
|
|
1531
|
-
"""
|
|
1532
|
-
base_path = (
|
|
1533
|
-
self._get_result_path(scenario)
|
|
1534
|
-
/ "simulations"
|
|
1535
|
-
/ self.settings.config.overland_model.name
|
|
1536
|
-
)
|
|
1537
|
-
event = self.database.events.get(scenario.event)
|
|
1538
|
-
|
|
1539
|
-
if isinstance(event, EventSet):
|
|
1540
|
-
if sub_event is None:
|
|
1541
|
-
raise ValueError("Event must be provided when scenario is an EventSet.")
|
|
1542
|
-
return base_path.parent / sub_event.name / base_path.name
|
|
1543
|
-
elif isinstance(event, Event):
|
|
1544
|
-
return base_path
|
|
1545
|
-
else:
|
|
1546
|
-
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1547
|
-
|
|
1548
|
-
def _get_simulation_path_offshore(
|
|
1549
|
-
self, scenario: Scenario, sub_event: Optional[Event] = None
|
|
1550
|
-
) -> Path:
|
|
1551
|
-
# Get the path to the offshore model (will not be used if offshore model is not created)
|
|
1552
|
-
if self.settings.config.offshore_model is None:
|
|
1553
|
-
raise ValueError("No offshore model found in sfincs config.")
|
|
1554
|
-
base_path = (
|
|
1555
|
-
self._get_result_path(scenario)
|
|
1556
|
-
/ "simulations"
|
|
1557
|
-
/ self.settings.config.offshore_model.name
|
|
1558
|
-
)
|
|
1559
|
-
event = self.database.events.get(scenario.event)
|
|
1560
|
-
if isinstance(event, EventSet):
|
|
1561
|
-
return base_path.parent / sub_event.name / base_path.name
|
|
1562
|
-
elif isinstance(event, Event):
|
|
1563
|
-
return base_path
|
|
1564
|
-
else:
|
|
1565
|
-
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1566
|
-
|
|
1567
|
-
def _get_flood_map_paths(self, scenario: Scenario) -> list[Path]:
|
|
1568
|
-
"""Return the paths to the flood maps that running this scenario should produce."""
|
|
1569
|
-
results_path = self._get_result_path(scenario)
|
|
1570
|
-
event = self.database.events.get(scenario.event)
|
|
1571
|
-
|
|
1572
|
-
if isinstance(event, EventSet):
|
|
1573
|
-
map_fn = []
|
|
1574
|
-
for rp in self.database.site.fiat.risk.return_periods:
|
|
1575
|
-
map_fn.append(results_path / f"RP_{rp:04d}_maps.nc")
|
|
1576
|
-
elif isinstance(event, Event):
|
|
1577
|
-
map_fn = [results_path / "max_water_level_map.nc"]
|
|
1578
|
-
else:
|
|
1579
|
-
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1580
|
-
|
|
1581
|
-
return map_fn
|
|
1582
|
-
|
|
1583
|
-
def _get_event_input_path(self, event: Event) -> Path:
|
|
1584
|
-
"""Return the path to the event input directory."""
|
|
1585
|
-
return self.database.events.input_path / event.name
|
|
1586
|
-
|
|
1587
|
-
def _get_zsmax(self):
|
|
1588
|
-
"""Read zsmax file and return absolute maximum water level over entire simulation."""
|
|
1589
|
-
self._model.read_results()
|
|
1590
|
-
zsmax = self._model.results["zsmax"].max(dim="timemax")
|
|
1591
|
-
zsmax.attrs["units"] = "m"
|
|
1592
|
-
return zsmax
|
|
1593
|
-
|
|
1594
|
-
def _get_zs_points(self):
|
|
1595
|
-
"""Read water level (zs) timeseries at observation points.
|
|
1596
|
-
|
|
1597
|
-
Names are allocated from the site.toml.
|
|
1598
|
-
See also add_obs_points() above.
|
|
1599
|
-
"""
|
|
1600
|
-
self._model.read_results()
|
|
1601
|
-
da = self._model.results["point_zs"]
|
|
1602
|
-
df = pd.DataFrame(index=pd.DatetimeIndex(da.time), data=da.to_numpy())
|
|
1603
|
-
|
|
1604
|
-
names = []
|
|
1605
|
-
descriptions = []
|
|
1606
|
-
# get station names from site.toml
|
|
1607
|
-
if self.settings.obs_point is not None:
|
|
1608
|
-
obs_points = self.settings.obs_point
|
|
1609
|
-
for pt in obs_points:
|
|
1610
|
-
names.append(pt.name)
|
|
1611
|
-
descriptions.append(pt.description)
|
|
1612
|
-
|
|
1613
|
-
pt_df = pd.DataFrame({"Name": names, "Description": descriptions})
|
|
1614
|
-
gdf = gpd.GeoDataFrame(
|
|
1615
|
-
pt_df,
|
|
1616
|
-
geometry=gpd.points_from_xy(da.point_x.values, da.point_y.values),
|
|
1617
|
-
crs=self._model.crs,
|
|
1618
|
-
)
|
|
1619
|
-
return df, gdf
|
|
1620
|
-
|
|
1621
|
-
def _create_spw_file_from_track(
|
|
1622
|
-
self,
|
|
1623
|
-
track_forcing: Union[RainfallTrack, WindTrack],
|
|
1624
|
-
hurricane_translation: TranslationModel,
|
|
1625
|
-
name: str,
|
|
1626
|
-
output_dir: Path,
|
|
1627
|
-
include_rainfall: bool = False,
|
|
1628
|
-
recreate: bool = False,
|
|
1629
|
-
):
|
|
1630
|
-
"""
|
|
1631
|
-
Create a spiderweb file from a given TropicalCyclone track and save it to the event's input directory.
|
|
1632
|
-
|
|
1633
|
-
Providing the output_dir argument allows to save the spiderweb file in a different directory.
|
|
1634
|
-
|
|
1635
|
-
Parameters
|
|
1636
|
-
----------
|
|
1637
|
-
output_dir : Path
|
|
1638
|
-
The directory where the spiderweb file is saved (or copied to if it already exists and recreate is False)
|
|
1639
|
-
recreate : bool, optional
|
|
1640
|
-
If True, the spiderweb file is recreated even if it already exists, by default False
|
|
1641
|
-
|
|
1642
|
-
Returns
|
|
1643
|
-
-------
|
|
1644
|
-
Path
|
|
1645
|
-
the path to the created spiderweb file
|
|
1646
|
-
"""
|
|
1647
|
-
if track_forcing.path is None:
|
|
1648
|
-
raise ValueError("No path to track file provided.")
|
|
1649
|
-
|
|
1650
|
-
# Check file format
|
|
1651
|
-
match track_forcing.path.suffix:
|
|
1652
|
-
case ".spw":
|
|
1653
|
-
if recreate:
|
|
1654
|
-
raise ValueError(
|
|
1655
|
-
"Recreating spiderweb files from existing spiderweb files is not supported. Provide a track file instead."
|
|
1656
|
-
)
|
|
1657
|
-
|
|
1658
|
-
if track_forcing.path.exists():
|
|
1659
|
-
return track_forcing.path
|
|
1660
|
-
|
|
1661
|
-
elif (output_dir / track_forcing.path.name).exists():
|
|
1662
|
-
return output_dir / track_forcing.path.name
|
|
1663
|
-
|
|
1664
|
-
else:
|
|
1665
|
-
raise FileNotFoundError(f"SPW file not found: {track_forcing.path}")
|
|
1666
|
-
case ".cyc":
|
|
1667
|
-
pass
|
|
1668
|
-
case _:
|
|
1669
|
-
raise ValueError(
|
|
1670
|
-
"Track files should be in the DDB_CYC file format and must have .cyc extension, or in the SPW file format and must have .spw extension"
|
|
1671
|
-
)
|
|
1672
|
-
|
|
1673
|
-
# Check if the spiderweb file already exists
|
|
1674
|
-
spw_file = output_dir / track_forcing.path.with_suffix(".spw").name
|
|
1675
|
-
if spw_file.exists():
|
|
1676
|
-
if recreate:
|
|
1677
|
-
os.remove(spw_file)
|
|
1678
|
-
else:
|
|
1679
|
-
return spw_file
|
|
1680
|
-
|
|
1681
|
-
# Initialize the tropical cyclone
|
|
1682
|
-
tc = TropicalCyclone()
|
|
1683
|
-
tc.read_track(filename=str(track_forcing.path), fmt="ddb_cyc")
|
|
1684
|
-
|
|
1685
|
-
# Alter the track of the tc if necessary
|
|
1686
|
-
tc = self._translate_tc_track(
|
|
1687
|
-
tc=tc, hurricane_translation=hurricane_translation
|
|
1688
|
-
)
|
|
1689
|
-
|
|
1690
|
-
# Rainfall
|
|
1691
|
-
start = "Including" if include_rainfall else "Excluding"
|
|
1692
|
-
self.logger.info(f"{start} rainfall in the spiderweb file")
|
|
1693
|
-
tc.include_rainfall = include_rainfall
|
|
1694
|
-
|
|
1695
|
-
self.logger.info(
|
|
1696
|
-
f"Creating spiderweb file for hurricane event `{name}`. This may take a while."
|
|
1697
|
-
)
|
|
1698
|
-
|
|
1699
|
-
# Create spiderweb file from the track
|
|
1700
|
-
tc.to_spiderweb(spw_file)
|
|
1701
|
-
|
|
1702
|
-
return spw_file
|
|
1703
|
-
|
|
1704
|
-
def _translate_tc_track(
|
|
1705
|
-
self, tc: TropicalCyclone, hurricane_translation: TranslationModel
|
|
1706
|
-
):
|
|
1707
|
-
if math.isclose(
|
|
1708
|
-
hurricane_translation.eastwest_translation.value, 0, abs_tol=1e-6
|
|
1709
|
-
) and math.isclose(
|
|
1710
|
-
hurricane_translation.northsouth_translation.value, 0, abs_tol=1e-6
|
|
1711
|
-
):
|
|
1712
|
-
return tc
|
|
1713
|
-
|
|
1714
|
-
self.logger.info(f"Translating the track of the tropical cyclone `{tc.name}`")
|
|
1715
|
-
# First convert geodataframe to the local coordinate system
|
|
1716
|
-
crs = pyproj.CRS.from_string(self.settings.config.csname)
|
|
1717
|
-
tc.track = tc.track.to_crs(crs)
|
|
1718
|
-
|
|
1719
|
-
# Translate the track in the local coordinate system
|
|
1720
|
-
tc.track["geometry"] = tc.track["geometry"].apply(
|
|
1721
|
-
lambda geom: translate(
|
|
1722
|
-
geom,
|
|
1723
|
-
xoff=hurricane_translation.eastwest_translation.convert(
|
|
1724
|
-
us.UnitTypesLength.meters
|
|
1725
|
-
),
|
|
1726
|
-
yoff=hurricane_translation.northsouth_translation.convert(
|
|
1727
|
-
us.UnitTypesLength.meters
|
|
1728
|
-
),
|
|
1729
|
-
)
|
|
1730
|
-
)
|
|
1731
|
-
|
|
1732
|
-
# Convert the geodataframe to lat/lon
|
|
1733
|
-
tc.track = tc.track.to_crs(epsg=4326)
|
|
1734
|
-
|
|
1735
|
-
return tc
|
|
1736
|
-
|
|
1737
|
-
# @gundula do we keep this func, its not used anywhere?
|
|
1738
|
-
def _downscale_hmax(self, zsmax, demfile: Path):
|
|
1739
|
-
# read DEM and convert units to metric units used by SFINCS
|
|
1740
|
-
demfile_units = self.settings.dem.units
|
|
1741
|
-
dem_conversion = us.UnitfulLength(value=1.0, units=demfile_units).convert(
|
|
1742
|
-
us.UnitTypesLength("meters")
|
|
1743
|
-
)
|
|
1744
|
-
dem = dem_conversion * self._model.data_catalog.get_rasterdataset(demfile)
|
|
1745
|
-
dem = dem.rio.reproject(self._model.crs)
|
|
1746
|
-
|
|
1747
|
-
# determine conversion factor for output floodmap
|
|
1748
|
-
floodmap_units = self.settings.config.floodmap_units
|
|
1749
|
-
floodmap_conversion = us.UnitfulLength(
|
|
1750
|
-
value=1.0, units=us.UnitTypesLength.meters
|
|
1751
|
-
).convert(floodmap_units)
|
|
1752
|
-
|
|
1753
|
-
hmax = utils.downscale_floodmap(
|
|
1754
|
-
zsmax=floodmap_conversion * zsmax,
|
|
1755
|
-
dep=floodmap_conversion * dem,
|
|
1756
|
-
hmin=0.01,
|
|
1757
|
-
)
|
|
1758
|
-
return hmax
|
|
1759
|
-
|
|
1760
|
-
def _read_river_locations(self) -> gpd.GeoDataFrame:
|
|
1761
|
-
path = self.get_model_root() / "sfincs.src"
|
|
1762
|
-
|
|
1763
|
-
with open(path) as f:
|
|
1764
|
-
lines = f.readlines()
|
|
1765
|
-
coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
|
|
1766
|
-
points = [shapely.Point(coord) for coord in coords]
|
|
1767
|
-
|
|
1768
|
-
return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
|
|
1769
|
-
|
|
1770
|
-
def _read_waterlevel_boundary_locations(self) -> gpd.GeoDataFrame:
|
|
1771
|
-
with open(self.get_model_root() / "sfincs.bnd") as f:
|
|
1772
|
-
lines = f.readlines()
|
|
1773
|
-
coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
|
|
1774
|
-
points = [shapely.Point(coord) for coord in coords]
|
|
1775
|
-
|
|
1776
|
-
return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
|
|
1777
|
-
|
|
1778
|
-
def _setup_sfincs_logger(self, model_root: Path) -> logging.Logger:
|
|
1779
|
-
"""Initialize the logger for the SFINCS model."""
|
|
1780
|
-
# Create a logger for the SFINCS model manually
|
|
1781
|
-
sfincs_logger = logging.getLogger("SfincsModel")
|
|
1782
|
-
for handler in sfincs_logger.handlers[:]:
|
|
1783
|
-
sfincs_logger.removeHandler(handler)
|
|
1784
|
-
|
|
1785
|
-
# Add a file handler
|
|
1786
|
-
file_handler = logging.FileHandler(
|
|
1787
|
-
filename=model_root.resolve() / "sfincs_model.log",
|
|
1788
|
-
mode="w",
|
|
1789
|
-
)
|
|
1790
|
-
sfincs_logger.setLevel(logging.DEBUG)
|
|
1791
|
-
sfincs_logger.addHandler(file_handler)
|
|
1792
|
-
return sfincs_logger
|
|
1793
|
-
|
|
1794
|
-
def _cleanup_simulation_folder(
|
|
1795
|
-
self,
|
|
1796
|
-
path: Path,
|
|
1797
|
-
extensions: list[str] = [".spw"],
|
|
1798
|
-
):
|
|
1799
|
-
"""Remove all files with the given extensions in the given path."""
|
|
1800
|
-
if not path.exists():
|
|
1801
|
-
return
|
|
1802
|
-
|
|
1803
|
-
for ext in extensions:
|
|
1804
|
-
for file in path.glob(f"*{ext}"):
|
|
1805
|
-
file.unlink()
|
|
1806
|
-
|
|
1807
|
-
def _load_scenario_objects(self, scenario: Scenario, event: Event) -> None:
|
|
1808
|
-
self._scenario = scenario
|
|
1809
|
-
self._projection = self.database.projections.get(scenario.projection)
|
|
1810
|
-
self._strategy = self.database.strategies.get(scenario.strategy)
|
|
1811
|
-
self._event = event
|
|
1812
|
-
|
|
1813
|
-
_event = self.database.events.get(scenario.event)
|
|
1814
|
-
if isinstance(_event, EventSet):
|
|
1815
|
-
self._event_set = _event
|
|
1816
|
-
else:
|
|
1817
|
-
self._event_set = None
|
|
1818
|
-
|
|
1819
|
-
def _add_tide_gauge_plot(
|
|
1820
|
-
self, fig, event: Event, units: us.UnitTypesLength
|
|
1821
|
-
) -> None:
|
|
1822
|
-
# check if event is historic
|
|
1823
|
-
if not isinstance(event, HistoricalEvent):
|
|
1824
|
-
return
|
|
1825
|
-
if self.settings.tide_gauge is None:
|
|
1826
|
-
return
|
|
1827
|
-
df_gauge = self.settings.tide_gauge.get_waterlevels_in_time_frame(
|
|
1828
|
-
time=TimeFrame(
|
|
1829
|
-
start_time=event.time.start_time,
|
|
1830
|
-
end_time=event.time.end_time,
|
|
1831
|
-
),
|
|
1832
|
-
units=us.UnitTypesLength(units),
|
|
1833
|
-
)
|
|
1834
|
-
|
|
1835
|
-
if df_gauge is not None:
|
|
1836
|
-
gauge_reference_height = self.settings.water_level.get_datum(
|
|
1837
|
-
self.settings.tide_gauge.reference
|
|
1838
|
-
).height.convert(units)
|
|
1839
|
-
|
|
1840
|
-
waterlevel = df_gauge.iloc[:, 0] + gauge_reference_height
|
|
1841
|
-
|
|
1842
|
-
# If data is available, add to plot
|
|
1843
|
-
fig.add_trace(
|
|
1844
|
-
px.line(waterlevel, color_discrete_sequence=["#ea6404"]).data[0]
|
|
1845
|
-
)
|
|
1846
|
-
fig["data"][0]["name"] = "model"
|
|
1847
|
-
fig["data"][1]["name"] = "measurement"
|
|
1848
|
-
fig.update_layout(showlegend=True)
|
|
1
|
+
import logging
|
|
2
|
+
import math
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
import subprocess
|
|
6
|
+
import tempfile
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Optional, Union
|
|
9
|
+
|
|
10
|
+
import geopandas as gpd
|
|
11
|
+
import hydromt_sfincs.utils as utils
|
|
12
|
+
import numpy as np
|
|
13
|
+
import pandas as pd
|
|
14
|
+
import plotly.express as px
|
|
15
|
+
import pyproj
|
|
16
|
+
import shapely
|
|
17
|
+
import xarray as xr
|
|
18
|
+
from cht_cyclones.tropical_cyclone import TropicalCyclone
|
|
19
|
+
from cht_tide.read_bca import SfincsBoundary
|
|
20
|
+
from cht_tide.tide_predict import predict
|
|
21
|
+
from hydromt_sfincs import SfincsModel as HydromtSfincsModel
|
|
22
|
+
from hydromt_sfincs.quadtree import QuadtreeGrid
|
|
23
|
+
from numpy import matlib
|
|
24
|
+
from shapely.affinity import translate
|
|
25
|
+
|
|
26
|
+
from flood_adapt.adapter.interface.hazard_adapter import IHazardAdapter
|
|
27
|
+
from flood_adapt.config.config import Settings
|
|
28
|
+
from flood_adapt.config.site import Site
|
|
29
|
+
from flood_adapt.misc.log import FloodAdaptLogging
|
|
30
|
+
from flood_adapt.misc.path_builder import (
|
|
31
|
+
ObjectDir,
|
|
32
|
+
TopLevelDir,
|
|
33
|
+
db_path,
|
|
34
|
+
)
|
|
35
|
+
from flood_adapt.misc.utils import cd, resolve_filepath
|
|
36
|
+
from flood_adapt.objects.events.event_set import EventSet
|
|
37
|
+
from flood_adapt.objects.events.events import Event, Mode, Template
|
|
38
|
+
from flood_adapt.objects.events.historical import HistoricalEvent
|
|
39
|
+
from flood_adapt.objects.events.hurricane import TranslationModel
|
|
40
|
+
from flood_adapt.objects.forcing import unit_system as us
|
|
41
|
+
from flood_adapt.objects.forcing.discharge import (
|
|
42
|
+
DischargeConstant,
|
|
43
|
+
DischargeCSV,
|
|
44
|
+
DischargeSynthetic,
|
|
45
|
+
)
|
|
46
|
+
from flood_adapt.objects.forcing.forcing import (
|
|
47
|
+
ForcingSource,
|
|
48
|
+
ForcingType,
|
|
49
|
+
IDischarge,
|
|
50
|
+
IForcing,
|
|
51
|
+
IRainfall,
|
|
52
|
+
IWaterlevel,
|
|
53
|
+
IWind,
|
|
54
|
+
)
|
|
55
|
+
from flood_adapt.objects.forcing.meteo_handler import MeteoHandler
|
|
56
|
+
from flood_adapt.objects.forcing.rainfall import (
|
|
57
|
+
RainfallConstant,
|
|
58
|
+
RainfallCSV,
|
|
59
|
+
RainfallMeteo,
|
|
60
|
+
RainfallNetCDF,
|
|
61
|
+
RainfallSynthetic,
|
|
62
|
+
RainfallTrack,
|
|
63
|
+
)
|
|
64
|
+
from flood_adapt.objects.forcing.time_frame import TimeFrame
|
|
65
|
+
from flood_adapt.objects.forcing.waterlevels import (
|
|
66
|
+
WaterlevelCSV,
|
|
67
|
+
WaterlevelGauged,
|
|
68
|
+
WaterlevelModel,
|
|
69
|
+
WaterlevelSynthetic,
|
|
70
|
+
)
|
|
71
|
+
from flood_adapt.objects.forcing.wind import (
|
|
72
|
+
WindConstant,
|
|
73
|
+
WindCSV,
|
|
74
|
+
WindMeteo,
|
|
75
|
+
WindNetCDF,
|
|
76
|
+
WindSynthetic,
|
|
77
|
+
WindTrack,
|
|
78
|
+
)
|
|
79
|
+
from flood_adapt.objects.measures.measures import (
|
|
80
|
+
FloodWall,
|
|
81
|
+
GreenInfrastructure,
|
|
82
|
+
Measure,
|
|
83
|
+
Pump,
|
|
84
|
+
)
|
|
85
|
+
from flood_adapt.objects.projections.projections import (
|
|
86
|
+
PhysicalProjection,
|
|
87
|
+
Projection,
|
|
88
|
+
)
|
|
89
|
+
from flood_adapt.objects.scenarios.scenarios import Scenario
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class SfincsAdapter(IHazardAdapter):
|
|
93
|
+
"""Adapter for the SFINCS model.
|
|
94
|
+
|
|
95
|
+
This class is used to run the SFINCS model and process the results.
|
|
96
|
+
|
|
97
|
+
Attributes
|
|
98
|
+
----------
|
|
99
|
+
settings : SfincsModel
|
|
100
|
+
The settings for the SFINCS model.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
logger = FloodAdaptLogging.getLogger("SfincsAdapter")
|
|
104
|
+
_site: Site
|
|
105
|
+
_model: HydromtSfincsModel
|
|
106
|
+
|
|
107
|
+
###############
|
|
108
|
+
### PUBLIC ####
|
|
109
|
+
###############
|
|
110
|
+
|
|
111
|
+
### HAZARD ADAPTER METHODS ###
|
|
112
|
+
def __init__(self, model_root: Path):
|
|
113
|
+
"""Load overland sfincs model based on a root directory.
|
|
114
|
+
|
|
115
|
+
Parameters
|
|
116
|
+
----------
|
|
117
|
+
model_root : Path
|
|
118
|
+
Root directory of overland sfincs model.
|
|
119
|
+
"""
|
|
120
|
+
self.settings = self.database.site.sfincs
|
|
121
|
+
self.units = self.database.site.gui.units
|
|
122
|
+
self.sfincs_logger = self._setup_sfincs_logger(model_root)
|
|
123
|
+
self._model = HydromtSfincsModel(
|
|
124
|
+
root=str(model_root.resolve()), mode="r", logger=self.sfincs_logger
|
|
125
|
+
)
|
|
126
|
+
self._model.read()
|
|
127
|
+
|
|
128
|
+
def read(self, path: Path):
|
|
129
|
+
"""Read the sfincs model from the current model root."""
|
|
130
|
+
if Path(self._model.root).resolve() != Path(path).resolve():
|
|
131
|
+
self._model.set_root(root=str(path), mode="r")
|
|
132
|
+
self._model.read()
|
|
133
|
+
|
|
134
|
+
def write(self, path_out: Union[str, os.PathLike], overwrite: bool = True):
|
|
135
|
+
"""Write the sfincs model configuration to a directory."""
|
|
136
|
+
root = self.get_model_root()
|
|
137
|
+
if not isinstance(path_out, Path):
|
|
138
|
+
path_out = Path(path_out).resolve()
|
|
139
|
+
|
|
140
|
+
if not path_out.exists():
|
|
141
|
+
path_out.mkdir(parents=True)
|
|
142
|
+
|
|
143
|
+
if root != path_out:
|
|
144
|
+
shutil.copytree(root, path_out, dirs_exist_ok=True)
|
|
145
|
+
|
|
146
|
+
write_mode = "w+" if overwrite else "w"
|
|
147
|
+
with cd(path_out):
|
|
148
|
+
self._model.set_root(root=str(path_out), mode=write_mode)
|
|
149
|
+
self._model.write()
|
|
150
|
+
|
|
151
|
+
def close_files(self):
|
|
152
|
+
"""Close all open files and clean up file handles."""
|
|
153
|
+
for logger in [self.logger, self.sfincs_logger]:
|
|
154
|
+
if hasattr(logger, "handlers"):
|
|
155
|
+
for handler in logger.handlers:
|
|
156
|
+
if isinstance(handler, logging.FileHandler):
|
|
157
|
+
handler.close()
|
|
158
|
+
logger.removeHandler(handler)
|
|
159
|
+
|
|
160
|
+
def __enter__(self) -> "SfincsAdapter":
|
|
161
|
+
return self
|
|
162
|
+
|
|
163
|
+
def __exit__(self, exc_type, exc_value, traceback) -> bool:
|
|
164
|
+
self.close_files()
|
|
165
|
+
return False
|
|
166
|
+
|
|
167
|
+
def has_run(self, scenario: Scenario) -> bool:
|
|
168
|
+
"""Check if the model has been run."""
|
|
169
|
+
event = self.database.events.get(scenario.event)
|
|
170
|
+
if event.mode == Mode.risk:
|
|
171
|
+
sim_paths = [
|
|
172
|
+
self._get_simulation_path(scenario, sub_event=sub_event)
|
|
173
|
+
for sub_event in event.sub_events
|
|
174
|
+
]
|
|
175
|
+
# No need to check postprocessing for risk scenarios
|
|
176
|
+
return all(self.sfincs_completed(sim_path) for sim_path in sim_paths)
|
|
177
|
+
else:
|
|
178
|
+
return self.sfincs_completed(self._get_simulation_path(scenario))
|
|
179
|
+
|
|
180
|
+
def execute(self, path: Path, strict: bool = True) -> bool:
|
|
181
|
+
"""
|
|
182
|
+
Run the sfincs executable in the specified path.
|
|
183
|
+
|
|
184
|
+
Parameters
|
|
185
|
+
----------
|
|
186
|
+
path : str
|
|
187
|
+
Path to the simulation folder.
|
|
188
|
+
Default is None, in which case the model root is used.
|
|
189
|
+
strict : bool, optional
|
|
190
|
+
True: raise an error if the model fails to run.
|
|
191
|
+
False: log a warning.
|
|
192
|
+
Default is True.
|
|
193
|
+
|
|
194
|
+
Returns
|
|
195
|
+
-------
|
|
196
|
+
bool
|
|
197
|
+
True if the model ran successfully, False otherwise.
|
|
198
|
+
|
|
199
|
+
"""
|
|
200
|
+
with cd(path):
|
|
201
|
+
self.logger.info(f"Running SFINCS in {path}")
|
|
202
|
+
process = subprocess.run(
|
|
203
|
+
str(Settings().sfincs_bin_path),
|
|
204
|
+
stdout=subprocess.PIPE,
|
|
205
|
+
stderr=subprocess.PIPE,
|
|
206
|
+
text=True,
|
|
207
|
+
)
|
|
208
|
+
self.sfincs_logger.info(process.stdout)
|
|
209
|
+
self.logger.debug(process.stdout)
|
|
210
|
+
|
|
211
|
+
self._cleanup_simulation_folder(path)
|
|
212
|
+
|
|
213
|
+
if process.returncode != 0:
|
|
214
|
+
if Settings().delete_crashed_runs:
|
|
215
|
+
# Remove all files in the simulation folder except for the log files
|
|
216
|
+
for subdir, dirs, files in os.walk(path, topdown=False):
|
|
217
|
+
for file in files:
|
|
218
|
+
if not file.endswith(".log"):
|
|
219
|
+
os.remove(os.path.join(subdir, file))
|
|
220
|
+
|
|
221
|
+
if not os.listdir(subdir):
|
|
222
|
+
os.rmdir(subdir)
|
|
223
|
+
|
|
224
|
+
if strict:
|
|
225
|
+
raise RuntimeError(f"SFINCS model failed to run in {path}.")
|
|
226
|
+
else:
|
|
227
|
+
self.logger.error(f"SFINCS model failed to run in {path}.")
|
|
228
|
+
|
|
229
|
+
return process.returncode == 0
|
|
230
|
+
|
|
231
|
+
def run(self, scenario: Scenario):
|
|
232
|
+
"""Run the whole workflow (Preprocess, process and postprocess) for a given scenario."""
|
|
233
|
+
self._ensure_no_existing_forcings()
|
|
234
|
+
event = self.database.events.get(scenario.event)
|
|
235
|
+
|
|
236
|
+
if event.mode == Mode.risk:
|
|
237
|
+
self._run_risk_scenario(scenario=scenario)
|
|
238
|
+
else:
|
|
239
|
+
self._run_single_event(scenario=scenario, event=event)
|
|
240
|
+
|
|
241
|
+
def preprocess(self, scenario: Scenario, event: Event):
|
|
242
|
+
"""
|
|
243
|
+
Preprocess the SFINCS model for a given scenario.
|
|
244
|
+
|
|
245
|
+
Parameters
|
|
246
|
+
----------
|
|
247
|
+
scenario : Scenario
|
|
248
|
+
Scenario to preprocess.
|
|
249
|
+
event : Event, optional
|
|
250
|
+
Event to preprocess, by default None.
|
|
251
|
+
"""
|
|
252
|
+
# I dont like this due to it being state based and might break if people use functions in the wrong order
|
|
253
|
+
# Currently only used to pass projection + event stuff to WaterlevelModel
|
|
254
|
+
|
|
255
|
+
sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
|
|
256
|
+
sim_path.mkdir(parents=True, exist_ok=True)
|
|
257
|
+
template_path = (
|
|
258
|
+
self.database.static.get_overland_sfincs_model().get_model_root()
|
|
259
|
+
)
|
|
260
|
+
shutil.copytree(template_path, sim_path, dirs_exist_ok=True)
|
|
261
|
+
|
|
262
|
+
with SfincsAdapter(model_root=sim_path) as model:
|
|
263
|
+
model._load_scenario_objects(scenario, event)
|
|
264
|
+
is_risk = "Probabilistic " if model._event_set is not None else ""
|
|
265
|
+
self.logger.info(
|
|
266
|
+
f"Preprocessing Scenario `{model._scenario.name}`: {is_risk}Event `{model._event.name}`, Strategy `{model._strategy.name}`, Projection `{model._projection.name}`"
|
|
267
|
+
)
|
|
268
|
+
# Write template model to output path and set it as the model root so focings can write to it
|
|
269
|
+
model.set_timing(model._event.time)
|
|
270
|
+
model.write(sim_path)
|
|
271
|
+
|
|
272
|
+
# Event
|
|
273
|
+
for forcing in model._event.get_forcings():
|
|
274
|
+
model.add_forcing(forcing)
|
|
275
|
+
|
|
276
|
+
if self.rainfall is not None:
|
|
277
|
+
model.rainfall *= model._event.rainfall_multiplier
|
|
278
|
+
else:
|
|
279
|
+
model.logger.warning(
|
|
280
|
+
"Failed to add event rainfall multiplier, no rainfall forcing found in the model."
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
# Measures
|
|
284
|
+
for measure in model._strategy.get_hazard_measures():
|
|
285
|
+
model.add_measure(measure)
|
|
286
|
+
|
|
287
|
+
# Projection
|
|
288
|
+
model.add_projection(model._projection)
|
|
289
|
+
|
|
290
|
+
# Output
|
|
291
|
+
model.add_obs_points()
|
|
292
|
+
|
|
293
|
+
# Save any changes made to disk as well
|
|
294
|
+
model.write(path_out=sim_path)
|
|
295
|
+
|
|
296
|
+
def process(self, scenario: Scenario, event: Event):
|
|
297
|
+
if event.mode != Mode.single_event:
|
|
298
|
+
raise ValueError(f"Unsupported event mode: {event.mode}.")
|
|
299
|
+
|
|
300
|
+
sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
|
|
301
|
+
self.logger.info(f"Running SFINCS for single event Scenario `{scenario.name}`")
|
|
302
|
+
self.execute(sim_path)
|
|
303
|
+
|
|
304
|
+
def postprocess(self, scenario: Scenario, event: Event):
|
|
305
|
+
if event.mode != Mode.single_event:
|
|
306
|
+
raise ValueError(f"Unsupported event mode: {event.mode}.")
|
|
307
|
+
|
|
308
|
+
self.logger.info(f"Postprocessing SFINCS for Scenario `{scenario.name}`")
|
|
309
|
+
if not self.sfincs_completed(
|
|
310
|
+
self._get_simulation_path(scenario, sub_event=event)
|
|
311
|
+
):
|
|
312
|
+
raise RuntimeError("SFINCS was not run successfully!")
|
|
313
|
+
|
|
314
|
+
self.write_floodmap_geotiff(scenario)
|
|
315
|
+
self.plot_wl_obs(scenario)
|
|
316
|
+
self.write_water_level_map(scenario)
|
|
317
|
+
|
|
318
|
+
def set_timing(self, time: TimeFrame):
|
|
319
|
+
"""Set model reference times."""
|
|
320
|
+
self.logger.info(f"Setting timing for the SFINCS model: `{time}`")
|
|
321
|
+
self._model.set_config("tref", time.start_time)
|
|
322
|
+
self._model.set_config("tstart", time.start_time)
|
|
323
|
+
self._model.set_config("tstop", time.end_time)
|
|
324
|
+
|
|
325
|
+
def add_forcing(self, forcing: IForcing):
|
|
326
|
+
"""Get forcing data and add it."""
|
|
327
|
+
if forcing is None:
|
|
328
|
+
return
|
|
329
|
+
|
|
330
|
+
self.logger.info(
|
|
331
|
+
f"Adding {forcing.type.capitalize()}: {forcing.source.capitalize()}"
|
|
332
|
+
)
|
|
333
|
+
if isinstance(forcing, IRainfall):
|
|
334
|
+
self._add_forcing_rain(forcing)
|
|
335
|
+
elif isinstance(forcing, IWind):
|
|
336
|
+
self._add_forcing_wind(forcing)
|
|
337
|
+
elif isinstance(forcing, IDischarge):
|
|
338
|
+
self._add_forcing_discharge(forcing)
|
|
339
|
+
elif isinstance(forcing, IWaterlevel):
|
|
340
|
+
self._add_forcing_waterlevels(forcing)
|
|
341
|
+
else:
|
|
342
|
+
self.logger.warning(
|
|
343
|
+
f"Skipping unsupported forcing type {forcing.__class__.__name__}"
|
|
344
|
+
)
|
|
345
|
+
|
|
346
|
+
def add_measure(self, measure: Measure):
|
|
347
|
+
"""Get measure data and add it."""
|
|
348
|
+
self.logger.info(
|
|
349
|
+
f"Adding {measure.__class__.__name__.capitalize()} `{measure.name}`"
|
|
350
|
+
)
|
|
351
|
+
|
|
352
|
+
if isinstance(measure, FloodWall):
|
|
353
|
+
self._add_measure_floodwall(measure)
|
|
354
|
+
elif isinstance(measure, GreenInfrastructure):
|
|
355
|
+
self._add_measure_greeninfra(measure)
|
|
356
|
+
elif isinstance(measure, Pump):
|
|
357
|
+
self._add_measure_pump(measure)
|
|
358
|
+
else:
|
|
359
|
+
self.logger.warning(
|
|
360
|
+
f"Skipping unsupported measure type {measure.__class__.__name__}"
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
def add_projection(self, projection: Projection):
|
|
364
|
+
"""Get forcing data currently in the sfincs model and add the projection it."""
|
|
365
|
+
self.logger.info(f"Adding Projection `{projection.name}`")
|
|
366
|
+
phys_projection = projection.physical_projection
|
|
367
|
+
|
|
368
|
+
if phys_projection.sea_level_rise:
|
|
369
|
+
self.logger.info(
|
|
370
|
+
f"Adding projected sea level rise `{phys_projection.sea_level_rise}`"
|
|
371
|
+
)
|
|
372
|
+
if self.waterlevels is not None:
|
|
373
|
+
self.waterlevels += phys_projection.sea_level_rise.convert(
|
|
374
|
+
us.UnitTypesLength.meters
|
|
375
|
+
)
|
|
376
|
+
else:
|
|
377
|
+
self.logger.warning(
|
|
378
|
+
"Failed to add sea level rise, no water level forcing found in the model."
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
if phys_projection.rainfall_multiplier:
|
|
382
|
+
self.logger.info(
|
|
383
|
+
f"Adding projected rainfall multiplier `{phys_projection.rainfall_multiplier}`"
|
|
384
|
+
)
|
|
385
|
+
if self.rainfall is not None:
|
|
386
|
+
self.rainfall *= phys_projection.rainfall_multiplier
|
|
387
|
+
else:
|
|
388
|
+
self.logger.warning(
|
|
389
|
+
"Failed to add projected rainfall multiplier, no rainfall forcing found in the model."
|
|
390
|
+
)
|
|
391
|
+
|
|
392
|
+
### GETTERS ###
|
|
393
|
+
def get_model_time(self) -> TimeFrame:
|
|
394
|
+
t0, t1 = self._model.get_model_time()
|
|
395
|
+
return TimeFrame(start_time=t0, end_time=t1)
|
|
396
|
+
|
|
397
|
+
def get_model_root(self) -> Path:
|
|
398
|
+
return Path(self._model.root)
|
|
399
|
+
|
|
400
|
+
def get_mask(self):
|
|
401
|
+
"""Get mask with inactive cells from model."""
|
|
402
|
+
mask = self._model.grid["msk"]
|
|
403
|
+
return mask
|
|
404
|
+
|
|
405
|
+
def get_bedlevel(self):
|
|
406
|
+
"""Get bed level from model."""
|
|
407
|
+
self._model.read_results()
|
|
408
|
+
zb = self._model.results["zb"]
|
|
409
|
+
return zb
|
|
410
|
+
|
|
411
|
+
def get_model_boundary(self) -> gpd.GeoDataFrame:
|
|
412
|
+
"""Get bounding box from model."""
|
|
413
|
+
return self._model.region
|
|
414
|
+
|
|
415
|
+
def get_model_grid(self) -> QuadtreeGrid:
|
|
416
|
+
"""Get grid from model.
|
|
417
|
+
|
|
418
|
+
Returns
|
|
419
|
+
-------
|
|
420
|
+
QuadtreeGrid
|
|
421
|
+
QuadtreeGrid with the model grid
|
|
422
|
+
"""
|
|
423
|
+
return self._model.quadtree
|
|
424
|
+
|
|
425
|
+
# Forcing properties
|
|
426
|
+
@property
|
|
427
|
+
def waterlevels(self) -> xr.Dataset | xr.DataArray | None:
|
|
428
|
+
return self._model.forcing.get("bzs")
|
|
429
|
+
|
|
430
|
+
@waterlevels.setter
|
|
431
|
+
def waterlevels(self, waterlevels: xr.Dataset | xr.DataArray):
|
|
432
|
+
if self.waterlevels is None or self.waterlevels.size == 0:
|
|
433
|
+
raise ValueError("No water level forcing found in the model.")
|
|
434
|
+
self._model.forcing["bzs"] = waterlevels
|
|
435
|
+
|
|
436
|
+
@property
|
|
437
|
+
def discharge(self) -> xr.Dataset | xr.DataArray | None:
|
|
438
|
+
return self._model.forcing.get("dis")
|
|
439
|
+
|
|
440
|
+
@discharge.setter
|
|
441
|
+
def discharge(self, discharge: xr.Dataset | xr.DataArray):
|
|
442
|
+
if self.discharge is None or self.discharge.size == 0:
|
|
443
|
+
raise ValueError("No discharge forcing found in the model.")
|
|
444
|
+
self._model.forcing["dis"] = discharge
|
|
445
|
+
|
|
446
|
+
@property
|
|
447
|
+
def rainfall(self) -> xr.Dataset | xr.DataArray | None:
|
|
448
|
+
names = ["precip", "precip_2d"]
|
|
449
|
+
in_model = [name for name in names if name in self._model.forcing]
|
|
450
|
+
if len(in_model) == 0:
|
|
451
|
+
return None
|
|
452
|
+
elif len(in_model) == 1:
|
|
453
|
+
return self._model.forcing[in_model[0]]
|
|
454
|
+
else:
|
|
455
|
+
raise ValueError("Multiple rainfall forcings found in the model.")
|
|
456
|
+
|
|
457
|
+
@rainfall.setter
|
|
458
|
+
def rainfall(self, rainfall: xr.Dataset | xr.DataArray):
|
|
459
|
+
if self.rainfall is None or self.rainfall.size == 0:
|
|
460
|
+
raise ValueError("No rainfall forcing found in the model.")
|
|
461
|
+
elif "precip_2d" in self._model.forcing:
|
|
462
|
+
self._model.forcing["precip_2d"] = rainfall
|
|
463
|
+
elif "precip" in self._model.forcing:
|
|
464
|
+
self._model.forcing["precip"] = rainfall
|
|
465
|
+
else:
|
|
466
|
+
raise ValueError("Unsupported rainfall forcing in the model.")
|
|
467
|
+
|
|
468
|
+
@property
|
|
469
|
+
def wind(self) -> xr.Dataset | xr.DataArray | None:
|
|
470
|
+
wind_names = ["wnd", "wind_2d", "wind", "wind10_u", "wind10_v"]
|
|
471
|
+
wind_in_model = [name for name in wind_names if name in self._model.forcing]
|
|
472
|
+
if len(wind_in_model) == 0:
|
|
473
|
+
return None
|
|
474
|
+
elif len(wind_in_model) == 1:
|
|
475
|
+
return self._model.forcing[wind_in_model[0]]
|
|
476
|
+
elif len(wind_in_model) == 2:
|
|
477
|
+
if not ("wind10_u" in wind_in_model and "wind10_v" in wind_in_model):
|
|
478
|
+
raise ValueError(
|
|
479
|
+
"Multiple wind forcings found in the model. Both should be wind10_u and wind10_v or a singular wind forcing."
|
|
480
|
+
)
|
|
481
|
+
return xr.Dataset(
|
|
482
|
+
{
|
|
483
|
+
"wind10_u": self._model.forcing["wind10_u"],
|
|
484
|
+
"wind10_v": self._model.forcing["wind10_v"],
|
|
485
|
+
}
|
|
486
|
+
)
|
|
487
|
+
else:
|
|
488
|
+
raise ValueError("Multiple wind forcings found in the model.")
|
|
489
|
+
|
|
490
|
+
@wind.setter
|
|
491
|
+
def wind(self, wind: xr.Dataset | xr.DataArray):
|
|
492
|
+
if (not self.wind) or (self.wind.size == 0):
|
|
493
|
+
raise ValueError("No wind forcing found in the model.")
|
|
494
|
+
|
|
495
|
+
elif "wind_2d" in self._model.forcing:
|
|
496
|
+
self._model.forcing["wind_2d"] = wind
|
|
497
|
+
elif "wind" in self._model.forcing:
|
|
498
|
+
self._model.forcing["wind"] = wind
|
|
499
|
+
elif "wnd" in self._model.forcing:
|
|
500
|
+
self._model.forcing["wnd"] = wind
|
|
501
|
+
elif "wind10_u" in self._model.forcing and "wind10_v" in self._model.forcing:
|
|
502
|
+
self._model.forcing["wind10_u"] = wind["wind10_u"]
|
|
503
|
+
self._model.forcing["wind10_v"] = wind["wind10_v"]
|
|
504
|
+
else:
|
|
505
|
+
raise ValueError("Unsupported wind forcing in the model.")
|
|
506
|
+
|
|
507
|
+
### OUTPUT ###
|
|
508
|
+
def run_completed(self, scenario: Scenario) -> bool:
|
|
509
|
+
"""Check if the entire model run has been completed successfully by checking if all flood maps exist that are created in postprocess().
|
|
510
|
+
|
|
511
|
+
Returns
|
|
512
|
+
-------
|
|
513
|
+
bool : True if all flood maps exist, False otherwise.
|
|
514
|
+
|
|
515
|
+
"""
|
|
516
|
+
any_floodmap = len(self._get_flood_map_paths(scenario)) > 0
|
|
517
|
+
all_exist = all(
|
|
518
|
+
floodmap.exists() for floodmap in self._get_flood_map_paths(scenario)
|
|
519
|
+
)
|
|
520
|
+
return any_floodmap and all_exist
|
|
521
|
+
|
|
522
|
+
def sfincs_completed(self, sim_path: Path) -> bool:
|
|
523
|
+
"""Check if the sfincs executable has been run successfully by checking if the output files exist in the simulation folder.
|
|
524
|
+
|
|
525
|
+
Parameters
|
|
526
|
+
----------
|
|
527
|
+
sim_path : Path
|
|
528
|
+
Path to the simulation folder to check.
|
|
529
|
+
|
|
530
|
+
Returns
|
|
531
|
+
-------
|
|
532
|
+
bool: True if the sfincs executable has been run successfully, False otherwise.
|
|
533
|
+
|
|
534
|
+
"""
|
|
535
|
+
SFINCS_OUTPUT_FILES = ["sfincs_map.nc"]
|
|
536
|
+
|
|
537
|
+
if self.settings.obs_point is not None:
|
|
538
|
+
SFINCS_OUTPUT_FILES.append("sfincs_his.nc")
|
|
539
|
+
|
|
540
|
+
to_check = [Path(sim_path) / file for file in SFINCS_OUTPUT_FILES]
|
|
541
|
+
return all(output.exists() for output in to_check)
|
|
542
|
+
|
|
543
|
+
def write_floodmap_geotiff(
|
|
544
|
+
self, scenario: Scenario, sim_path: Optional[Path] = None
|
|
545
|
+
):
|
|
546
|
+
"""
|
|
547
|
+
Read simulation results from SFINCS and saves a geotiff with the maximum water levels.
|
|
548
|
+
|
|
549
|
+
Produced floodmap is in the units defined in the sfincs config settings.
|
|
550
|
+
|
|
551
|
+
Parameters
|
|
552
|
+
----------
|
|
553
|
+
scenario : Scenario
|
|
554
|
+
Scenario for which to create the floodmap.
|
|
555
|
+
sim_path : Path, optional
|
|
556
|
+
Path to the simulation folder, by default None.
|
|
557
|
+
"""
|
|
558
|
+
self.logger.info("Writing flood maps to geotiff")
|
|
559
|
+
results_path = self._get_result_path(scenario)
|
|
560
|
+
sim_path = sim_path or self._get_simulation_path(scenario)
|
|
561
|
+
demfile = self.database.static_path / "dem" / self.settings.dem.filename
|
|
562
|
+
|
|
563
|
+
with SfincsAdapter(model_root=sim_path) as model:
|
|
564
|
+
zsmax = model._get_zsmax()
|
|
565
|
+
|
|
566
|
+
dem = model._model.data_catalog.get_rasterdataset(demfile)
|
|
567
|
+
|
|
568
|
+
# convert dem from dem units to floodmap units
|
|
569
|
+
dem_conversion = us.UnitfulLength(
|
|
570
|
+
value=1.0, units=self.settings.dem.units
|
|
571
|
+
).convert(self.settings.config.floodmap_units)
|
|
572
|
+
|
|
573
|
+
floodmap_fn = results_path / f"FloodMap_{scenario.name}.tif"
|
|
574
|
+
|
|
575
|
+
# convert zsmax from meters to floodmap units
|
|
576
|
+
floodmap_conversion = us.UnitfulLength(
|
|
577
|
+
value=1.0, units=us.UnitTypesLength.meters
|
|
578
|
+
).convert(self.settings.config.floodmap_units)
|
|
579
|
+
|
|
580
|
+
utils.downscale_floodmap(
|
|
581
|
+
zsmax=floodmap_conversion * zsmax,
|
|
582
|
+
dep=dem_conversion * dem,
|
|
583
|
+
hmin=0.01,
|
|
584
|
+
floodmap_fn=str(floodmap_fn),
|
|
585
|
+
)
|
|
586
|
+
|
|
587
|
+
def write_water_level_map(
|
|
588
|
+
self, scenario: Scenario, sim_path: Optional[Path] = None
|
|
589
|
+
):
|
|
590
|
+
"""Read simulation results from SFINCS and saves a netcdf with the maximum water levels."""
|
|
591
|
+
self.logger.info("Writing water level map to netcdf")
|
|
592
|
+
results_path = self._get_result_path(scenario)
|
|
593
|
+
sim_path = sim_path or self._get_simulation_path(scenario)
|
|
594
|
+
|
|
595
|
+
with SfincsAdapter(model_root=sim_path) as model:
|
|
596
|
+
zsmax = model._get_zsmax()
|
|
597
|
+
zsmax.to_netcdf(results_path / "max_water_level_map.nc")
|
|
598
|
+
|
|
599
|
+
def plot_wl_obs(
|
|
600
|
+
self,
|
|
601
|
+
scenario: Scenario,
|
|
602
|
+
):
|
|
603
|
+
"""Plot water levels at SFINCS observation points as html.
|
|
604
|
+
|
|
605
|
+
Only for single event scenarios, or for a specific simulation path containing the written and processed sfincs model.
|
|
606
|
+
"""
|
|
607
|
+
if not self.settings.obs_point:
|
|
608
|
+
self.logger.warning("No observation points provided in config.")
|
|
609
|
+
return
|
|
610
|
+
|
|
611
|
+
self.logger.info("Plotting water levels at observation points")
|
|
612
|
+
sim_path = self._get_simulation_path(scenario)
|
|
613
|
+
|
|
614
|
+
# read SFINCS model
|
|
615
|
+
with SfincsAdapter(model_root=sim_path) as model:
|
|
616
|
+
df, gdf = model._get_zs_points()
|
|
617
|
+
|
|
618
|
+
gui_units = us.UnitTypesLength(
|
|
619
|
+
self.database.site.gui.units.default_length_units
|
|
620
|
+
)
|
|
621
|
+
conversion_factor = us.UnitfulLength(
|
|
622
|
+
value=1.0, units=us.UnitTypesLength("meters")
|
|
623
|
+
).convert(gui_units)
|
|
624
|
+
|
|
625
|
+
overland_reference_height = self.settings.water_level.get_datum(
|
|
626
|
+
self.settings.config.overland_model.reference
|
|
627
|
+
).height.convert(gui_units)
|
|
628
|
+
|
|
629
|
+
for ii, col in enumerate(df.columns):
|
|
630
|
+
# Plot actual thing
|
|
631
|
+
fig = px.line(
|
|
632
|
+
df[col] * conversion_factor
|
|
633
|
+
+ overland_reference_height # convert to reference datum for plotting
|
|
634
|
+
)
|
|
635
|
+
|
|
636
|
+
fig.add_hline(
|
|
637
|
+
y=0,
|
|
638
|
+
line_dash="dash",
|
|
639
|
+
line_color="#000000",
|
|
640
|
+
annotation_text=self.settings.water_level.reference,
|
|
641
|
+
annotation_position="bottom right",
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
# plot reference water levels
|
|
645
|
+
for wl_ref in self.settings.water_level.datums:
|
|
646
|
+
if (
|
|
647
|
+
wl_ref.name == self.settings.config.overland_model.reference
|
|
648
|
+
or wl_ref.name in self.database.site.gui.plotting.excluded_datums
|
|
649
|
+
):
|
|
650
|
+
continue
|
|
651
|
+
fig.add_hline(
|
|
652
|
+
y=wl_ref.height.convert(gui_units),
|
|
653
|
+
line_dash="dash",
|
|
654
|
+
line_color="#3ec97c",
|
|
655
|
+
annotation_text=wl_ref.name,
|
|
656
|
+
annotation_position="bottom right",
|
|
657
|
+
)
|
|
658
|
+
|
|
659
|
+
fig.update_layout(
|
|
660
|
+
autosize=False,
|
|
661
|
+
height=100 * 2,
|
|
662
|
+
width=280 * 2,
|
|
663
|
+
margin={"r": 0, "l": 0, "b": 0, "t": 20},
|
|
664
|
+
font={"size": 10, "color": "black", "family": "Arial"},
|
|
665
|
+
title={
|
|
666
|
+
"text": gdf.iloc[ii]["Description"],
|
|
667
|
+
"font": {"size": 12, "color": "black", "family": "Arial"},
|
|
668
|
+
"x": 0.5,
|
|
669
|
+
"xanchor": "center",
|
|
670
|
+
},
|
|
671
|
+
xaxis_title="Time",
|
|
672
|
+
yaxis_title=f"Water level [{gui_units.value}] above {self.settings.water_level.reference}",
|
|
673
|
+
yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
674
|
+
xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
675
|
+
showlegend=False,
|
|
676
|
+
)
|
|
677
|
+
|
|
678
|
+
event = self.database.events.get(scenario.event)
|
|
679
|
+
if self.settings.obs_point[ii].name == self.settings.tide_gauge.name:
|
|
680
|
+
self._add_tide_gauge_plot(fig, event, units=gui_units)
|
|
681
|
+
|
|
682
|
+
# write html to results folder
|
|
683
|
+
station_name = gdf.iloc[ii]["Name"]
|
|
684
|
+
results_path = self._get_result_path(scenario)
|
|
685
|
+
fig.write_html(results_path / f"{station_name}_timeseries.html")
|
|
686
|
+
|
|
687
|
+
def add_obs_points(self):
|
|
688
|
+
"""Add observation points provided in the site toml to SFINCS model."""
|
|
689
|
+
if self.settings.obs_point is None:
|
|
690
|
+
return
|
|
691
|
+
self.logger.info("Adding observation points to the overland flood model")
|
|
692
|
+
|
|
693
|
+
obs_points = self.settings.obs_point
|
|
694
|
+
names = []
|
|
695
|
+
lat = []
|
|
696
|
+
lon = []
|
|
697
|
+
for pt in obs_points:
|
|
698
|
+
names.append(pt.name)
|
|
699
|
+
lat.append(pt.lat)
|
|
700
|
+
lon.append(pt.lon)
|
|
701
|
+
|
|
702
|
+
# create GeoDataFrame from obs_points in site file
|
|
703
|
+
df = pd.DataFrame({"name": names})
|
|
704
|
+
gdf = gpd.GeoDataFrame(
|
|
705
|
+
df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
# Add locations to SFINCS file
|
|
709
|
+
self._model.setup_observation_points(locations=gdf, merge=False)
|
|
710
|
+
|
|
711
|
+
def get_wl_df_from_offshore_his_results(self) -> pd.DataFrame:
|
|
712
|
+
"""Create a pd.Dataframe with waterlevels from the offshore model at the bnd locations of the overland model.
|
|
713
|
+
|
|
714
|
+
Returns
|
|
715
|
+
-------
|
|
716
|
+
wl_df: pd.DataFrame
|
|
717
|
+
time series of water level.
|
|
718
|
+
"""
|
|
719
|
+
self.logger.info("Reading water levels from offshore model")
|
|
720
|
+
ds_his = utils.read_sfincs_his_results(
|
|
721
|
+
Path(self._model.root) / "sfincs_his.nc",
|
|
722
|
+
crs=self._model.crs.to_epsg(),
|
|
723
|
+
)
|
|
724
|
+
wl_df = pd.DataFrame(
|
|
725
|
+
data=ds_his.point_zs.to_numpy(),
|
|
726
|
+
index=ds_his.time.to_numpy(),
|
|
727
|
+
columns=np.arange(1, ds_his.point_zs.to_numpy().shape[1] + 1, 1),
|
|
728
|
+
)
|
|
729
|
+
return wl_df
|
|
730
|
+
|
|
731
|
+
## RISK EVENTS ##
|
|
732
|
+
def calculate_rp_floodmaps(self, scenario: Scenario):
|
|
733
|
+
"""Calculate flood risk maps from a set of (currently) SFINCS water level outputs using linear interpolation.
|
|
734
|
+
|
|
735
|
+
It would be nice to make it more widely applicable and move the loading of the SFINCS results to self.postprocess_sfincs().
|
|
736
|
+
|
|
737
|
+
generates return period water level maps in netcdf format to be used by FIAT
|
|
738
|
+
generates return period water depth maps in geotiff format as product for users
|
|
739
|
+
|
|
740
|
+
TODO: make this robust and more efficient for bigger datasets.
|
|
741
|
+
"""
|
|
742
|
+
event: EventSet = self.database.events.get(scenario.event, load_all=True)
|
|
743
|
+
if not isinstance(event, EventSet):
|
|
744
|
+
raise ValueError("This function is only available for risk scenarios.")
|
|
745
|
+
|
|
746
|
+
result_path = self._get_result_path(scenario)
|
|
747
|
+
sim_paths = [
|
|
748
|
+
self._get_simulation_path(scenario, sub_event=sub_event)
|
|
749
|
+
for sub_event in event._events
|
|
750
|
+
]
|
|
751
|
+
|
|
752
|
+
phys_proj = self.database.projections.get(
|
|
753
|
+
scenario.projection
|
|
754
|
+
).physical_projection
|
|
755
|
+
|
|
756
|
+
floodmap_rp = self.database.site.fiat.risk.return_periods
|
|
757
|
+
frequencies = [sub_event.frequency for sub_event in event.sub_events]
|
|
758
|
+
|
|
759
|
+
# adjust storm frequency for hurricane events
|
|
760
|
+
if not math.isclose(phys_proj.storm_frequency_increase, 0, abs_tol=1e-9):
|
|
761
|
+
storminess_increase = phys_proj.storm_frequency_increase / 100.0
|
|
762
|
+
for ii, event in enumerate(event._events):
|
|
763
|
+
if event.template == Template.Hurricane:
|
|
764
|
+
frequencies[ii] = frequencies[ii] * (1 + storminess_increase)
|
|
765
|
+
|
|
766
|
+
with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
|
|
767
|
+
# read mask and bed level
|
|
768
|
+
mask = dummymodel.get_mask().stack(z=("x", "y"))
|
|
769
|
+
zb = dummymodel.get_bedlevel().stack(z=("x", "y")).to_numpy()
|
|
770
|
+
|
|
771
|
+
zs_maps = []
|
|
772
|
+
for simulation_path in sim_paths:
|
|
773
|
+
# read zsmax data from overland sfincs model
|
|
774
|
+
with SfincsAdapter(model_root=simulation_path) as sim:
|
|
775
|
+
zsmax = sim._get_zsmax().load()
|
|
776
|
+
zs_stacked = zsmax.stack(z=("x", "y"))
|
|
777
|
+
zs_maps.append(zs_stacked)
|
|
778
|
+
|
|
779
|
+
# Create RP flood maps
|
|
780
|
+
|
|
781
|
+
# 1a: make a table of all water levels and associated frequencies
|
|
782
|
+
zs = xr.concat(zs_maps, pd.Index(frequencies, name="frequency"))
|
|
783
|
+
# Get the indices of columns with all NaN values
|
|
784
|
+
nan_cells = np.where(np.all(np.isnan(zs), axis=0))[0]
|
|
785
|
+
# fill nan values with minimum bed levels in each grid cell, np.interp cannot ignore nan values
|
|
786
|
+
zs = xr.where(np.isnan(zs), np.tile(zb, (zs.shape[0], 1)), zs)
|
|
787
|
+
# Get table of frequencies
|
|
788
|
+
freq = np.tile(frequencies, (zs.shape[1], 1)).transpose()
|
|
789
|
+
|
|
790
|
+
# 1b: sort water levels in descending order and include the frequencies in the sorting process
|
|
791
|
+
# (i.e. each h-value should be linked to the same p-values as in step 1a)
|
|
792
|
+
sort_index = zs.argsort(axis=0)
|
|
793
|
+
sorted_prob = np.flipud(np.take_along_axis(freq, sort_index, axis=0))
|
|
794
|
+
sorted_zs = np.flipud(np.take_along_axis(zs.values, sort_index, axis=0))
|
|
795
|
+
|
|
796
|
+
# 1c: Compute exceedance probabilities of water depths
|
|
797
|
+
# Method: accumulate probabilities from top to bottom
|
|
798
|
+
prob_exceed = np.cumsum(sorted_prob, axis=0)
|
|
799
|
+
|
|
800
|
+
# 1d: Compute return periods of water depths
|
|
801
|
+
# Method: simply take the inverse of the exceedance probability (1/Pex)
|
|
802
|
+
rp_zs = 1.0 / prob_exceed
|
|
803
|
+
|
|
804
|
+
# For each return period (T) of interest do the following:
|
|
805
|
+
# For each grid cell do the following:
|
|
806
|
+
# Use the table from step [1d] as a “lookup-table” to derive the T-year water depth. Use a 1-d interpolation technique:
|
|
807
|
+
# h(T) = interp1 (log(T*), h*, log(T))
|
|
808
|
+
# in which t* and h* are the values from the table and T is the return period (T) of interest
|
|
809
|
+
# The resulting T-year water depths for all grids combined form the T-year hazard map
|
|
810
|
+
rp_da = xr.DataArray(rp_zs, dims=zs.dims)
|
|
811
|
+
|
|
812
|
+
# no_data_value = -999 # in SFINCS
|
|
813
|
+
# sorted_zs = xr.where(sorted_zs == no_data_value, np.nan, sorted_zs)
|
|
814
|
+
|
|
815
|
+
valid_cells = np.where(mask == 1)[
|
|
816
|
+
0
|
|
817
|
+
] # only loop over cells where model is not masked
|
|
818
|
+
h = matlib.repmat(
|
|
819
|
+
np.copy(zb), len(floodmap_rp), 1
|
|
820
|
+
) # if not flooded (i.e. not in valid_cells) revert to bed_level, read from SFINCS results so it is the minimum bed level in a grid cell
|
|
821
|
+
|
|
822
|
+
self.logger.info("Calculating flood risk maps, this may take some time")
|
|
823
|
+
for jj in valid_cells: # looping over all non-masked cells.
|
|
824
|
+
# linear interpolation for all return periods to evaluate
|
|
825
|
+
h[:, jj] = np.interp(
|
|
826
|
+
np.log10(floodmap_rp),
|
|
827
|
+
np.log10(rp_da[::-1, jj]),
|
|
828
|
+
sorted_zs[::-1, jj],
|
|
829
|
+
left=0,
|
|
830
|
+
)
|
|
831
|
+
|
|
832
|
+
# Re-fill locations that had nan water level for all simulations with nans
|
|
833
|
+
h[:, nan_cells] = np.full(h[:, nan_cells].shape, np.nan)
|
|
834
|
+
|
|
835
|
+
# If a cell has the same water-level as the bed elevation it should be dry (turn to nan)
|
|
836
|
+
diff = h - np.tile(zb, (h.shape[0], 1))
|
|
837
|
+
dry = (
|
|
838
|
+
diff < 10e-10
|
|
839
|
+
) # here we use a small number instead of zero for rounding errors
|
|
840
|
+
h[dry] = np.nan
|
|
841
|
+
|
|
842
|
+
for ii, rp in enumerate(floodmap_rp):
|
|
843
|
+
# #create single nc
|
|
844
|
+
zs_rp_single = xr.DataArray(
|
|
845
|
+
data=h[ii, :], coords={"z": zs["z"]}, attrs={"units": "meters"}
|
|
846
|
+
).unstack()
|
|
847
|
+
zs_rp_single = zs_rp_single.rio.write_crs(
|
|
848
|
+
zsmax.raster.crs
|
|
849
|
+
) # , inplace=True)
|
|
850
|
+
zs_rp_single = zs_rp_single.to_dataset(name="risk_map")
|
|
851
|
+
fn_rp = result_path / f"RP_{rp:04d}_maps.nc"
|
|
852
|
+
zs_rp_single.to_netcdf(fn_rp)
|
|
853
|
+
|
|
854
|
+
# write geotiff
|
|
855
|
+
# dem file for high resolution flood depth map
|
|
856
|
+
demfile = self.database.static_path / "dem" / self.settings.dem.filename
|
|
857
|
+
|
|
858
|
+
# writing the geotiff to the scenario results folder
|
|
859
|
+
with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
|
|
860
|
+
dem = dummymodel._model.data_catalog.get_rasterdataset(demfile)
|
|
861
|
+
zsmax = zs_rp_single.to_array().squeeze().transpose()
|
|
862
|
+
floodmap_fn = fn_rp.with_suffix(".tif")
|
|
863
|
+
|
|
864
|
+
# convert dem from dem units to floodmap units
|
|
865
|
+
dem_conversion = us.UnitfulLength(
|
|
866
|
+
value=1.0, units=self.settings.dem.units
|
|
867
|
+
).convert(self.settings.config.floodmap_units)
|
|
868
|
+
|
|
869
|
+
# convert zsmax from meters to floodmap units
|
|
870
|
+
floodmap_conversion = us.UnitfulLength(
|
|
871
|
+
value=1.0, units=us.UnitTypesLength.meters
|
|
872
|
+
).convert(self.settings.config.floodmap_units)
|
|
873
|
+
|
|
874
|
+
utils.downscale_floodmap(
|
|
875
|
+
zsmax=floodmap_conversion * zsmax,
|
|
876
|
+
dep=dem_conversion * dem,
|
|
877
|
+
hmin=0.01,
|
|
878
|
+
floodmap_fn=str(floodmap_fn),
|
|
879
|
+
)
|
|
880
|
+
|
|
881
|
+
######################################
|
|
882
|
+
### PRIVATE - use at your own risk ###
|
|
883
|
+
######################################
|
|
884
|
+
def _run_single_event(self, scenario: Scenario, event: Event):
|
|
885
|
+
self.preprocess(scenario, event)
|
|
886
|
+
self.process(scenario, event)
|
|
887
|
+
self.postprocess(scenario, event)
|
|
888
|
+
shutil.rmtree(
|
|
889
|
+
self._get_simulation_path(scenario, sub_event=event), ignore_errors=True
|
|
890
|
+
)
|
|
891
|
+
|
|
892
|
+
def _run_risk_scenario(self, scenario: Scenario):
|
|
893
|
+
"""Run the whole workflow for a risk scenario.
|
|
894
|
+
|
|
895
|
+
This means preprocessing and running the SFINCS model for each event in the event set, and then postprocessing the results.
|
|
896
|
+
"""
|
|
897
|
+
event_set: EventSet = self.database.events.get(scenario.event, load_all=True)
|
|
898
|
+
total = len(event_set._events)
|
|
899
|
+
|
|
900
|
+
for i, sub_event in enumerate(event_set._events):
|
|
901
|
+
sim_path = self._get_simulation_path(scenario, sub_event=sub_event)
|
|
902
|
+
|
|
903
|
+
# Preprocess
|
|
904
|
+
self.preprocess(scenario, event=sub_event)
|
|
905
|
+
self.logger.info(
|
|
906
|
+
f"Running SFINCS for Eventset Scenario `{scenario.name}`, Event `{sub_event.name}` ({i + 1}/{total})"
|
|
907
|
+
)
|
|
908
|
+
self.execute(sim_path)
|
|
909
|
+
|
|
910
|
+
# Postprocess
|
|
911
|
+
self.calculate_rp_floodmaps(scenario)
|
|
912
|
+
|
|
913
|
+
# Cleanup
|
|
914
|
+
for i, sub_event in enumerate(event_set._events):
|
|
915
|
+
shutil.rmtree(
|
|
916
|
+
self._get_simulation_path(scenario, sub_event=sub_event),
|
|
917
|
+
ignore_errors=True,
|
|
918
|
+
)
|
|
919
|
+
|
|
920
|
+
def _ensure_no_existing_forcings(self):
|
|
921
|
+
"""Check for existing forcings in the model and raise an error if any are found."""
|
|
922
|
+
all_forcings = {
|
|
923
|
+
"waterlevel": self.waterlevels,
|
|
924
|
+
"rainfall": self.rainfall,
|
|
925
|
+
"wind": self.wind,
|
|
926
|
+
"discharge": self.discharge,
|
|
927
|
+
}
|
|
928
|
+
contains_forcings = ", ".join(
|
|
929
|
+
[
|
|
930
|
+
f"{name.capitalize()}"
|
|
931
|
+
for name, forcing in all_forcings.items()
|
|
932
|
+
if forcing is not None
|
|
933
|
+
]
|
|
934
|
+
)
|
|
935
|
+
if contains_forcings:
|
|
936
|
+
raise ValueError(
|
|
937
|
+
f"{contains_forcings} forcing(s) should not exists in the SFINCS template model. Remove it from the SFINCS model located at: {self.get_model_root()}. For more information on SFINCS and its input files, see the SFINCS documentation at: `https://sfincs.readthedocs.io/en/latest/input.html`"
|
|
938
|
+
)
|
|
939
|
+
|
|
940
|
+
### FORCING ###
|
|
941
|
+
def _add_forcing_wind(
|
|
942
|
+
self,
|
|
943
|
+
wind: IWind,
|
|
944
|
+
):
|
|
945
|
+
"""Add spatially constant wind forcing to sfincs model. Use timeseries or a constant magnitude and direction.
|
|
946
|
+
|
|
947
|
+
Parameters
|
|
948
|
+
----------
|
|
949
|
+
timeseries : Union[str, os.PathLike], optional
|
|
950
|
+
path to file of timeseries file (.csv) which has three columns: time, magnitude and direction, by default None
|
|
951
|
+
const_mag : float, optional
|
|
952
|
+
magnitude of time-invariant wind forcing [m/s], by default None
|
|
953
|
+
const_dir : float, optional
|
|
954
|
+
direction of time-invariant wind forcing [deg], by default None
|
|
955
|
+
"""
|
|
956
|
+
time_frame = self.get_model_time()
|
|
957
|
+
if isinstance(wind, WindConstant):
|
|
958
|
+
# HydroMT function: set wind forcing from constant magnitude and direction
|
|
959
|
+
self._model.setup_wind_forcing(
|
|
960
|
+
timeseries=None,
|
|
961
|
+
magnitude=wind.speed.convert(us.UnitTypesVelocity.mps),
|
|
962
|
+
direction=wind.direction.value,
|
|
963
|
+
)
|
|
964
|
+
elif isinstance(wind, WindSynthetic):
|
|
965
|
+
df = wind.to_dataframe(time_frame=time_frame)
|
|
966
|
+
df["mag"] *= us.UnitfulVelocity(
|
|
967
|
+
value=1.0, units=self.units.default_velocity_units
|
|
968
|
+
).convert(us.UnitTypesVelocity.mps)
|
|
969
|
+
|
|
970
|
+
tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
|
|
971
|
+
df.to_csv(tmp_path)
|
|
972
|
+
|
|
973
|
+
# HydroMT function: set wind forcing from timeseries
|
|
974
|
+
self._model.setup_wind_forcing(
|
|
975
|
+
timeseries=tmp_path, magnitude=None, direction=None
|
|
976
|
+
)
|
|
977
|
+
elif isinstance(wind, WindMeteo):
|
|
978
|
+
ds = MeteoHandler().read(time_frame)
|
|
979
|
+
# data already in metric units so no conversion needed
|
|
980
|
+
|
|
981
|
+
# HydroMT function: set wind forcing from grid
|
|
982
|
+
self._model.setup_wind_forcing_from_grid(wind=ds)
|
|
983
|
+
elif isinstance(wind, WindTrack):
|
|
984
|
+
# data already in metric units so no conversion needed
|
|
985
|
+
self._add_forcing_spw(wind)
|
|
986
|
+
elif isinstance(wind, WindNetCDF):
|
|
987
|
+
ds = wind.read()
|
|
988
|
+
# time slicing to time_frame not needed, hydromt-sfincs handles it
|
|
989
|
+
conversion = us.UnitfulVelocity(value=1.0, units=wind.units).convert(
|
|
990
|
+
us.UnitTypesVelocity.mps
|
|
991
|
+
)
|
|
992
|
+
ds *= conversion
|
|
993
|
+
self._model.setup_wind_forcing_from_grid(wind=ds)
|
|
994
|
+
elif isinstance(wind, WindCSV):
|
|
995
|
+
df = wind.to_dataframe(time_frame=time_frame)
|
|
996
|
+
|
|
997
|
+
conversion = us.UnitfulVelocity(
|
|
998
|
+
value=1.0, units=wind.units["speed"]
|
|
999
|
+
).convert(us.UnitTypesVelocity.mps)
|
|
1000
|
+
df *= conversion
|
|
1001
|
+
|
|
1002
|
+
tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
|
|
1003
|
+
df.to_csv(tmp_path)
|
|
1004
|
+
|
|
1005
|
+
# HydroMT function: set wind forcing from timeseries
|
|
1006
|
+
self._model.setup_wind_forcing(
|
|
1007
|
+
timeseries=tmp_path,
|
|
1008
|
+
magnitude=None,
|
|
1009
|
+
direction=None,
|
|
1010
|
+
)
|
|
1011
|
+
else:
|
|
1012
|
+
self.logger.warning(
|
|
1013
|
+
f"Unsupported wind forcing type: {wind.__class__.__name__}"
|
|
1014
|
+
)
|
|
1015
|
+
return
|
|
1016
|
+
|
|
1017
|
+
def _add_forcing_rain(self, rainfall: IRainfall):
|
|
1018
|
+
"""Add spatially constant rain forcing to sfincs model. Use timeseries or a constant magnitude.
|
|
1019
|
+
|
|
1020
|
+
Parameters
|
|
1021
|
+
----------
|
|
1022
|
+
timeseries : Union[str, os.PathLike], optional
|
|
1023
|
+
path to file of timeseries file (.csv) which has two columns: time and precipitation, by default None
|
|
1024
|
+
const_intensity : float, optional
|
|
1025
|
+
time-invariant precipitation intensity [mm_hr], by default None
|
|
1026
|
+
"""
|
|
1027
|
+
time_frame = self.get_model_time()
|
|
1028
|
+
if isinstance(rainfall, RainfallConstant):
|
|
1029
|
+
self._model.setup_precip_forcing(
|
|
1030
|
+
timeseries=None,
|
|
1031
|
+
magnitude=rainfall.intensity.convert(us.UnitTypesIntensity.mm_hr),
|
|
1032
|
+
)
|
|
1033
|
+
elif isinstance(rainfall, RainfallCSV):
|
|
1034
|
+
df = rainfall.to_dataframe(time_frame=time_frame)
|
|
1035
|
+
conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
|
|
1036
|
+
us.UnitTypesIntensity.mm_hr
|
|
1037
|
+
)
|
|
1038
|
+
df *= conversion
|
|
1039
|
+
|
|
1040
|
+
tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
|
|
1041
|
+
df.to_csv(tmp_path)
|
|
1042
|
+
|
|
1043
|
+
self._model.setup_precip_forcing(timeseries=tmp_path)
|
|
1044
|
+
elif isinstance(rainfall, RainfallSynthetic):
|
|
1045
|
+
df = rainfall.to_dataframe(time_frame=time_frame)
|
|
1046
|
+
|
|
1047
|
+
if rainfall.timeseries.cumulative is not None: # scs
|
|
1048
|
+
conversion = us.UnitfulLength(
|
|
1049
|
+
value=1.0, units=rainfall.timeseries.cumulative.units
|
|
1050
|
+
).convert(us.UnitTypesLength.millimeters)
|
|
1051
|
+
else:
|
|
1052
|
+
conversion = us.UnitfulIntensity(
|
|
1053
|
+
value=1.0, units=rainfall.timeseries.peak_value.units
|
|
1054
|
+
).convert(us.UnitTypesIntensity.mm_hr)
|
|
1055
|
+
|
|
1056
|
+
df *= conversion
|
|
1057
|
+
tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
|
|
1058
|
+
df.to_csv(tmp_path)
|
|
1059
|
+
|
|
1060
|
+
self._model.setup_precip_forcing(timeseries=tmp_path)
|
|
1061
|
+
elif isinstance(rainfall, RainfallMeteo):
|
|
1062
|
+
ds = MeteoHandler().read(time_frame)
|
|
1063
|
+
# MeteoHandler always return metric so no conversion needed
|
|
1064
|
+
self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
|
|
1065
|
+
elif isinstance(rainfall, RainfallTrack):
|
|
1066
|
+
# data already in metric units so no conversion needed
|
|
1067
|
+
self._add_forcing_spw(rainfall)
|
|
1068
|
+
elif isinstance(rainfall, RainfallNetCDF):
|
|
1069
|
+
ds = rainfall.read()
|
|
1070
|
+
# time slicing to time_frame not needed, hydromt-sfincs handles it
|
|
1071
|
+
conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
|
|
1072
|
+
us.UnitTypesIntensity.mm_hr
|
|
1073
|
+
)
|
|
1074
|
+
ds *= conversion
|
|
1075
|
+
self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
|
|
1076
|
+
else:
|
|
1077
|
+
self.logger.warning(
|
|
1078
|
+
f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}"
|
|
1079
|
+
)
|
|
1080
|
+
return
|
|
1081
|
+
|
|
1082
|
+
def _add_forcing_discharge(self, forcing: IDischarge):
|
|
1083
|
+
"""Add spatially constant discharge forcing to sfincs model. Use timeseries or a constant magnitude.
|
|
1084
|
+
|
|
1085
|
+
Parameters
|
|
1086
|
+
----------
|
|
1087
|
+
forcing : IDischarge
|
|
1088
|
+
The discharge forcing to add to the model.
|
|
1089
|
+
Can be a constant, synthetic or from a csv file.
|
|
1090
|
+
Also contains the river information.
|
|
1091
|
+
"""
|
|
1092
|
+
if isinstance(forcing, (DischargeConstant, DischargeCSV, DischargeSynthetic)):
|
|
1093
|
+
self._set_single_river_forcing(discharge=forcing)
|
|
1094
|
+
else:
|
|
1095
|
+
self.logger.warning(
|
|
1096
|
+
f"Unsupported discharge forcing type: {forcing.__class__.__name__}"
|
|
1097
|
+
)
|
|
1098
|
+
|
|
1099
|
+
def _add_forcing_waterlevels(self, forcing: IWaterlevel):
|
|
1100
|
+
time_frame = self.get_model_time()
|
|
1101
|
+
if isinstance(forcing, WaterlevelSynthetic):
|
|
1102
|
+
df_ts = forcing.to_dataframe(time_frame=time_frame)
|
|
1103
|
+
|
|
1104
|
+
conversion = us.UnitfulLength(
|
|
1105
|
+
value=1.0, units=forcing.surge.timeseries.peak_value.units
|
|
1106
|
+
).convert(us.UnitTypesLength.meters)
|
|
1107
|
+
datum_correction = self.settings.water_level.get_datum(
|
|
1108
|
+
self.database.site.gui.plotting.synthetic_tide.datum
|
|
1109
|
+
).height.convert(us.UnitTypesLength.meters)
|
|
1110
|
+
|
|
1111
|
+
df_ts = df_ts * conversion + datum_correction
|
|
1112
|
+
|
|
1113
|
+
self._set_waterlevel_forcing(df_ts)
|
|
1114
|
+
elif isinstance(forcing, WaterlevelGauged):
|
|
1115
|
+
if self.settings.tide_gauge is None:
|
|
1116
|
+
raise ValueError("No tide gauge defined for this site.")
|
|
1117
|
+
|
|
1118
|
+
df_ts = self.settings.tide_gauge.get_waterlevels_in_time_frame(
|
|
1119
|
+
time=time_frame,
|
|
1120
|
+
)
|
|
1121
|
+
conversion = us.UnitfulLength(
|
|
1122
|
+
value=1.0, units=self.settings.tide_gauge.units
|
|
1123
|
+
).convert(us.UnitTypesLength.meters)
|
|
1124
|
+
|
|
1125
|
+
datum_height = self.settings.water_level.get_datum(
|
|
1126
|
+
self.settings.tide_gauge.reference
|
|
1127
|
+
).height.convert(us.UnitTypesLength.meters)
|
|
1128
|
+
|
|
1129
|
+
df_ts = conversion * df_ts + datum_height
|
|
1130
|
+
|
|
1131
|
+
self._set_waterlevel_forcing(df_ts)
|
|
1132
|
+
elif isinstance(forcing, WaterlevelCSV):
|
|
1133
|
+
df_ts = forcing.to_dataframe(time_frame=time_frame)
|
|
1134
|
+
|
|
1135
|
+
if df_ts is None:
|
|
1136
|
+
raise ValueError("Failed to get waterlevel data.")
|
|
1137
|
+
conversion = us.UnitfulLength(value=1.0, units=forcing.units).convert(
|
|
1138
|
+
us.UnitTypesLength.meters
|
|
1139
|
+
)
|
|
1140
|
+
df_ts *= conversion
|
|
1141
|
+
self._set_waterlevel_forcing(df_ts)
|
|
1142
|
+
|
|
1143
|
+
elif isinstance(forcing, WaterlevelModel):
|
|
1144
|
+
from flood_adapt.adapter.sfincs_offshore import OffshoreSfincsHandler
|
|
1145
|
+
|
|
1146
|
+
if self.settings.config.offshore_model is None:
|
|
1147
|
+
raise ValueError("Offshore model configuration is missing.")
|
|
1148
|
+
if self._scenario is None or self._event is None:
|
|
1149
|
+
raise ValueError(
|
|
1150
|
+
"Scenario and event must be provided to run the offshore model."
|
|
1151
|
+
)
|
|
1152
|
+
|
|
1153
|
+
df_ts = OffshoreSfincsHandler(
|
|
1154
|
+
scenario=self._scenario, event=self._event
|
|
1155
|
+
).get_resulting_waterlevels()
|
|
1156
|
+
if df_ts is None:
|
|
1157
|
+
raise ValueError("Failed to get waterlevel data.")
|
|
1158
|
+
|
|
1159
|
+
# Datum
|
|
1160
|
+
datum_correction = self.settings.water_level.get_datum(
|
|
1161
|
+
self.settings.config.offshore_model.reference
|
|
1162
|
+
).height.convert(us.UnitTypesLength.meters)
|
|
1163
|
+
df_ts += datum_correction
|
|
1164
|
+
|
|
1165
|
+
# Already in meters since it was produced by SFINCS so no conversion needed
|
|
1166
|
+
self._set_waterlevel_forcing(df_ts)
|
|
1167
|
+
self._turn_off_bnd_press_correction()
|
|
1168
|
+
else:
|
|
1169
|
+
self.logger.warning(
|
|
1170
|
+
f"Unsupported waterlevel forcing type: {forcing.__class__.__name__}"
|
|
1171
|
+
)
|
|
1172
|
+
|
|
1173
|
+
# SPIDERWEB
|
|
1174
|
+
def _add_forcing_spw(self, forcing: Union[RainfallTrack, WindTrack]):
|
|
1175
|
+
"""Add spiderweb forcing."""
|
|
1176
|
+
if forcing.source != ForcingSource.TRACK:
|
|
1177
|
+
raise ValueError("Forcing source should be TRACK.")
|
|
1178
|
+
|
|
1179
|
+
if forcing.path is None:
|
|
1180
|
+
raise ValueError("No path to track file provided.")
|
|
1181
|
+
|
|
1182
|
+
if not forcing.path.exists():
|
|
1183
|
+
# Check if the file is in the database
|
|
1184
|
+
in_db = self._get_event_input_path(self._event) / forcing.path.name
|
|
1185
|
+
if not in_db.exists():
|
|
1186
|
+
raise FileNotFoundError(
|
|
1187
|
+
f"Input file for track forcing not found: {forcing.path}"
|
|
1188
|
+
)
|
|
1189
|
+
forcing.path = in_db
|
|
1190
|
+
|
|
1191
|
+
if forcing.path.suffix == ".cyc":
|
|
1192
|
+
forcing.path = self._create_spw_file_from_track(
|
|
1193
|
+
track_forcing=forcing,
|
|
1194
|
+
hurricane_translation=self._event.hurricane_translation,
|
|
1195
|
+
name=self._event.name,
|
|
1196
|
+
output_dir=forcing.path.parent,
|
|
1197
|
+
include_rainfall=bool(self._event.forcings.get(ForcingType.RAINFALL)),
|
|
1198
|
+
recreate=False,
|
|
1199
|
+
)
|
|
1200
|
+
|
|
1201
|
+
if forcing.path.suffix != ".spw":
|
|
1202
|
+
raise ValueError(
|
|
1203
|
+
"Track files should be in one of [spw, ddb_cyc] file format and must have [.spw, .cyc] extension."
|
|
1204
|
+
)
|
|
1205
|
+
|
|
1206
|
+
sim_path = self.get_model_root()
|
|
1207
|
+
self.logger.info(f"Adding spiderweb forcing to Sfincs model: {sim_path.name}")
|
|
1208
|
+
|
|
1209
|
+
# prevent SameFileError
|
|
1210
|
+
output_spw_path = sim_path / forcing.path.name
|
|
1211
|
+
if forcing.path == output_spw_path:
|
|
1212
|
+
raise ValueError(
|
|
1213
|
+
"Add a different SPW file than the one already in the model."
|
|
1214
|
+
)
|
|
1215
|
+
|
|
1216
|
+
if output_spw_path.exists():
|
|
1217
|
+
os.remove(output_spw_path)
|
|
1218
|
+
shutil.copy2(forcing.path, output_spw_path)
|
|
1219
|
+
|
|
1220
|
+
self._model.set_config("spwfile", output_spw_path.name)
|
|
1221
|
+
|
|
1222
|
+
### MEASURES ###
|
|
1223
|
+
def _add_measure_floodwall(self, floodwall: FloodWall):
|
|
1224
|
+
"""Add floodwall to sfincs model.
|
|
1225
|
+
|
|
1226
|
+
Parameters
|
|
1227
|
+
----------
|
|
1228
|
+
floodwall : FloodWall
|
|
1229
|
+
floodwall information
|
|
1230
|
+
"""
|
|
1231
|
+
polygon_file = resolve_filepath(
|
|
1232
|
+
object_dir=ObjectDir.measure,
|
|
1233
|
+
obj_name=floodwall.name,
|
|
1234
|
+
path=floodwall.polygon_file,
|
|
1235
|
+
)
|
|
1236
|
+
|
|
1237
|
+
# HydroMT function: get geodataframe from filename
|
|
1238
|
+
gdf_floodwall = self._model.data_catalog.get_geodataframe(
|
|
1239
|
+
polygon_file, geom=self._model.region, crs=self._model.crs
|
|
1240
|
+
)
|
|
1241
|
+
|
|
1242
|
+
# Add floodwall attributes to geodataframe
|
|
1243
|
+
gdf_floodwall["name"] = floodwall.name
|
|
1244
|
+
if (gdf_floodwall.geometry.type == "MultiLineString").any():
|
|
1245
|
+
gdf_floodwall = gdf_floodwall.explode()
|
|
1246
|
+
|
|
1247
|
+
try:
|
|
1248
|
+
heights = [
|
|
1249
|
+
float(
|
|
1250
|
+
us.UnitfulLength(
|
|
1251
|
+
value=float(height),
|
|
1252
|
+
units=self.database.site.gui.units.default_length_units,
|
|
1253
|
+
).convert(us.UnitTypesLength("meters"))
|
|
1254
|
+
)
|
|
1255
|
+
for height in gdf_floodwall["z"]
|
|
1256
|
+
]
|
|
1257
|
+
gdf_floodwall["z"] = heights
|
|
1258
|
+
self.logger.info("Using floodwall height from shape file.")
|
|
1259
|
+
except Exception:
|
|
1260
|
+
self.logger.warning(
|
|
1261
|
+
f"Could not use height data from file due to missing `z` column or missing values therein. Using uniform height of {floodwall.elevation} instead."
|
|
1262
|
+
)
|
|
1263
|
+
gdf_floodwall["z"] = floodwall.elevation.convert(
|
|
1264
|
+
us.UnitTypesLength(us.UnitTypesLength.meters)
|
|
1265
|
+
)
|
|
1266
|
+
|
|
1267
|
+
# par1 is the overflow coefficient for weirs
|
|
1268
|
+
gdf_floodwall["par1"] = 0.6
|
|
1269
|
+
|
|
1270
|
+
# HydroMT function: create floodwall
|
|
1271
|
+
self._model.setup_structures(structures=gdf_floodwall, stype="weir", merge=True)
|
|
1272
|
+
|
|
1273
|
+
def _add_measure_greeninfra(self, green_infrastructure: GreenInfrastructure):
|
|
1274
|
+
# HydroMT function: get geodataframe from filename
|
|
1275
|
+
if green_infrastructure.selection_type == "polygon":
|
|
1276
|
+
polygon_file = resolve_filepath(
|
|
1277
|
+
ObjectDir.measure,
|
|
1278
|
+
green_infrastructure.name,
|
|
1279
|
+
green_infrastructure.polygon_file,
|
|
1280
|
+
)
|
|
1281
|
+
elif green_infrastructure.selection_type == "aggregation_area":
|
|
1282
|
+
# TODO this logic already exists in the Database controller but cannot be used due to cyclic imports
|
|
1283
|
+
# Loop through available aggregation area types
|
|
1284
|
+
for aggr_dict in self.database.site.fiat.config.aggregation:
|
|
1285
|
+
# check which one is used in measure
|
|
1286
|
+
if not aggr_dict.name == green_infrastructure.aggregation_area_type:
|
|
1287
|
+
continue
|
|
1288
|
+
# load geodataframe
|
|
1289
|
+
aggr_areas = gpd.read_file(
|
|
1290
|
+
db_path(TopLevelDir.static) / aggr_dict.file,
|
|
1291
|
+
engine="pyogrio",
|
|
1292
|
+
).to_crs(4326)
|
|
1293
|
+
# keep only aggregation area chosen
|
|
1294
|
+
polygon_file = aggr_areas.loc[
|
|
1295
|
+
aggr_areas[aggr_dict.field_name]
|
|
1296
|
+
== green_infrastructure.aggregation_area_name,
|
|
1297
|
+
["geometry"],
|
|
1298
|
+
].reset_index(drop=True)
|
|
1299
|
+
else:
|
|
1300
|
+
raise ValueError(
|
|
1301
|
+
f"The selection type: {green_infrastructure.selection_type} is not valid"
|
|
1302
|
+
)
|
|
1303
|
+
|
|
1304
|
+
gdf_green_infra = self._model.data_catalog.get_geodataframe(
|
|
1305
|
+
polygon_file,
|
|
1306
|
+
geom=self._model.region,
|
|
1307
|
+
crs=self._model.crs,
|
|
1308
|
+
)
|
|
1309
|
+
|
|
1310
|
+
# Make sure no multipolygons are there
|
|
1311
|
+
gdf_green_infra = gdf_green_infra.explode()
|
|
1312
|
+
|
|
1313
|
+
# HydroMT function: create storage volume
|
|
1314
|
+
self._model.setup_storage_volume(
|
|
1315
|
+
storage_locs=gdf_green_infra,
|
|
1316
|
+
volume=green_infrastructure.volume.convert(us.UnitTypesVolume.m3),
|
|
1317
|
+
merge=True,
|
|
1318
|
+
)
|
|
1319
|
+
|
|
1320
|
+
def _add_measure_pump(self, pump: Pump):
|
|
1321
|
+
"""Add pump to sfincs model.
|
|
1322
|
+
|
|
1323
|
+
Parameters
|
|
1324
|
+
----------
|
|
1325
|
+
pump : Pump
|
|
1326
|
+
pump information
|
|
1327
|
+
"""
|
|
1328
|
+
polygon_file = resolve_filepath(ObjectDir.measure, pump.name, pump.polygon_file)
|
|
1329
|
+
# HydroMT function: get geodataframe from filename
|
|
1330
|
+
gdf_pump = self._model.data_catalog.get_geodataframe(
|
|
1331
|
+
polygon_file, geom=self._model.region, crs=self._model.crs
|
|
1332
|
+
)
|
|
1333
|
+
|
|
1334
|
+
# HydroMT function: create floodwall
|
|
1335
|
+
self._model.setup_drainage_structures(
|
|
1336
|
+
structures=gdf_pump,
|
|
1337
|
+
stype="pump",
|
|
1338
|
+
discharge=pump.discharge.convert(us.UnitTypesDischarge.cms),
|
|
1339
|
+
merge=True,
|
|
1340
|
+
)
|
|
1341
|
+
|
|
1342
|
+
### SFINCS SETTERS ###
|
|
1343
|
+
def _set_single_river_forcing(self, discharge: IDischarge):
|
|
1344
|
+
"""Add discharge to overland sfincs model.
|
|
1345
|
+
|
|
1346
|
+
Parameters
|
|
1347
|
+
----------
|
|
1348
|
+
discharge : IDischarge
|
|
1349
|
+
Discharge object with discharge timeseries data and river information.
|
|
1350
|
+
"""
|
|
1351
|
+
if not isinstance(
|
|
1352
|
+
discharge, (DischargeConstant, DischargeSynthetic, DischargeCSV)
|
|
1353
|
+
):
|
|
1354
|
+
self.logger.warning(
|
|
1355
|
+
f"Unsupported discharge forcing type: {discharge.__class__.__name__}"
|
|
1356
|
+
)
|
|
1357
|
+
return
|
|
1358
|
+
|
|
1359
|
+
self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}")
|
|
1360
|
+
|
|
1361
|
+
time_frame = self.get_model_time()
|
|
1362
|
+
model_rivers = self._read_river_locations()
|
|
1363
|
+
|
|
1364
|
+
# Check that the river is defined in the model and that the coordinates match
|
|
1365
|
+
river_loc = shapely.Point(
|
|
1366
|
+
discharge.river.x_coordinate, discharge.river.y_coordinate
|
|
1367
|
+
)
|
|
1368
|
+
tolerance = 0.001 # in degrees, ~111 meters at the equator. (0.0001: 11 meters at the equator)
|
|
1369
|
+
river_gdf = model_rivers[model_rivers.distance(river_loc) <= tolerance]
|
|
1370
|
+
river_inds = river_gdf.index.to_list()
|
|
1371
|
+
if len(river_inds) != 1:
|
|
1372
|
+
raise ValueError(
|
|
1373
|
+
f"River {discharge.river.name} is not defined in the sfincs model. Please ensure the river coordinates in the site.toml match the coordinates for rivers in the SFINCS model."
|
|
1374
|
+
)
|
|
1375
|
+
|
|
1376
|
+
# Create a geodataframe with the river coordinates, the timeseries data and rename the column to the river index defined in the model
|
|
1377
|
+
if isinstance(discharge, DischargeCSV):
|
|
1378
|
+
df = discharge.to_dataframe(time_frame)
|
|
1379
|
+
conversion = us.UnitfulDischarge(value=1.0, units=discharge.units).convert(
|
|
1380
|
+
us.UnitTypesDischarge.cms
|
|
1381
|
+
)
|
|
1382
|
+
elif isinstance(discharge, DischargeConstant):
|
|
1383
|
+
df = discharge.to_dataframe(time_frame)
|
|
1384
|
+
conversion = us.UnitfulDischarge(
|
|
1385
|
+
value=1.0, units=discharge.discharge.units
|
|
1386
|
+
).convert(us.UnitTypesDischarge.cms)
|
|
1387
|
+
elif isinstance(discharge, DischargeSynthetic):
|
|
1388
|
+
df = discharge.to_dataframe(time_frame)
|
|
1389
|
+
conversion = us.UnitfulDischarge(
|
|
1390
|
+
value=1.0, units=discharge.timeseries.peak_value.units
|
|
1391
|
+
).convert(us.UnitTypesDischarge.cms)
|
|
1392
|
+
else:
|
|
1393
|
+
raise ValueError(
|
|
1394
|
+
f"Unsupported discharge forcing type: {discharge.__class__}"
|
|
1395
|
+
)
|
|
1396
|
+
|
|
1397
|
+
df *= conversion
|
|
1398
|
+
|
|
1399
|
+
df = df.rename(columns={df.columns[0]: river_inds[0]})
|
|
1400
|
+
|
|
1401
|
+
# HydroMT function: set discharge forcing from time series and river coordinates
|
|
1402
|
+
self._model.setup_discharge_forcing(
|
|
1403
|
+
locations=river_gdf,
|
|
1404
|
+
timeseries=df,
|
|
1405
|
+
merge=True,
|
|
1406
|
+
)
|
|
1407
|
+
|
|
1408
|
+
def _turn_off_bnd_press_correction(self):
|
|
1409
|
+
"""Turn off the boundary pressure correction in the sfincs model."""
|
|
1410
|
+
self.logger.info(
|
|
1411
|
+
"Turning off boundary pressure correction in the offshore model"
|
|
1412
|
+
)
|
|
1413
|
+
self._model.set_config("pavbnd", -9999)
|
|
1414
|
+
|
|
1415
|
+
def _set_waterlevel_forcing(self, df_ts: pd.DataFrame):
|
|
1416
|
+
"""
|
|
1417
|
+
Add water level forcing to sfincs model.
|
|
1418
|
+
|
|
1419
|
+
Values in the timeseries are expected to be relative to the main reference datum: `self.settings.water_level.reference`.
|
|
1420
|
+
The overland model reference: `self.settings.config.overland_model.reference` is used to convert the water levels to the reference of the overland model.
|
|
1421
|
+
|
|
1422
|
+
Parameters
|
|
1423
|
+
----------
|
|
1424
|
+
df_ts : pd.DataFrame
|
|
1425
|
+
Time series of water levels with the first column as the time index.
|
|
1426
|
+
|
|
1427
|
+
|
|
1428
|
+
"""
|
|
1429
|
+
# Determine bnd points from reference overland model
|
|
1430
|
+
gdf_locs = self._read_waterlevel_boundary_locations()
|
|
1431
|
+
|
|
1432
|
+
if len(df_ts.columns) == 1:
|
|
1433
|
+
# Go from 1 timeseries to timeseries for all boundary points
|
|
1434
|
+
name = df_ts.columns[0]
|
|
1435
|
+
for i in range(1, len(gdf_locs)):
|
|
1436
|
+
df_ts[i + 1] = df_ts[name]
|
|
1437
|
+
df_ts.columns = list(range(1, len(gdf_locs) + 1))
|
|
1438
|
+
|
|
1439
|
+
# Datum
|
|
1440
|
+
sfincs_overland_reference_height = self.settings.water_level.get_datum(
|
|
1441
|
+
self.settings.config.overland_model.reference
|
|
1442
|
+
).height.convert(us.UnitTypesLength.meters)
|
|
1443
|
+
|
|
1444
|
+
df_ts -= sfincs_overland_reference_height
|
|
1445
|
+
|
|
1446
|
+
# HydroMT function: set waterlevel forcing from time series
|
|
1447
|
+
self._model.set_forcing_1d(
|
|
1448
|
+
name="bzs", df_ts=df_ts, gdf_locs=gdf_locs, merge=False
|
|
1449
|
+
)
|
|
1450
|
+
|
|
1451
|
+
# OFFSHORE
|
|
1452
|
+
def _add_pressure_forcing_from_grid(self, ds: xr.DataArray):
|
|
1453
|
+
"""Add spatially varying barometric pressure to sfincs model.
|
|
1454
|
+
|
|
1455
|
+
Parameters
|
|
1456
|
+
----------
|
|
1457
|
+
ds : xr.DataArray
|
|
1458
|
+
- Required variables: ['press_msl' (Pa)]
|
|
1459
|
+
- Required coordinates: ['time', 'y', 'x']
|
|
1460
|
+
- spatial_ref: CRS
|
|
1461
|
+
"""
|
|
1462
|
+
self.logger.info("Adding pressure forcing to the offshore model")
|
|
1463
|
+
self._model.setup_pressure_forcing_from_grid(press=ds)
|
|
1464
|
+
|
|
1465
|
+
def _add_bzs_from_bca(self, event: Event, physical_projection: PhysicalProjection):
|
|
1466
|
+
# ONLY offshore models
|
|
1467
|
+
"""Convert tidal constituents from bca file to waterlevel timeseries that can be read in by hydromt_sfincs."""
|
|
1468
|
+
if self.settings.config.offshore_model is None:
|
|
1469
|
+
raise ValueError("No offshore model found in sfincs config.")
|
|
1470
|
+
|
|
1471
|
+
self.logger.info("Adding water level forcing to the offshore model")
|
|
1472
|
+
sb = SfincsBoundary()
|
|
1473
|
+
sb.read_flow_boundary_points(self.get_model_root() / "sfincs.bnd")
|
|
1474
|
+
sb.read_astro_boundary_conditions(self.get_model_root() / "sfincs.bca")
|
|
1475
|
+
|
|
1476
|
+
times = pd.date_range(
|
|
1477
|
+
start=event.time.start_time,
|
|
1478
|
+
end=event.time.end_time,
|
|
1479
|
+
freq="10T",
|
|
1480
|
+
)
|
|
1481
|
+
|
|
1482
|
+
# Predict tidal signal and add SLR
|
|
1483
|
+
if not sb.flow_boundary_points:
|
|
1484
|
+
raise ValueError("No flow boundary points found.")
|
|
1485
|
+
|
|
1486
|
+
if self.settings.config.offshore_model.vertical_offset:
|
|
1487
|
+
correction = self.settings.config.offshore_model.vertical_offset.convert(
|
|
1488
|
+
us.UnitTypesLength.meters
|
|
1489
|
+
)
|
|
1490
|
+
else:
|
|
1491
|
+
correction = 0.0
|
|
1492
|
+
|
|
1493
|
+
for bnd_ii in range(len(sb.flow_boundary_points)):
|
|
1494
|
+
tide_ii = (
|
|
1495
|
+
predict(sb.flow_boundary_points[bnd_ii].astro, times)
|
|
1496
|
+
+ correction
|
|
1497
|
+
+ physical_projection.sea_level_rise.convert(us.UnitTypesLength.meters)
|
|
1498
|
+
)
|
|
1499
|
+
|
|
1500
|
+
if bnd_ii == 0:
|
|
1501
|
+
wl_df = pd.DataFrame(data={1: tide_ii}, index=times)
|
|
1502
|
+
else:
|
|
1503
|
+
wl_df[bnd_ii + 1] = tide_ii
|
|
1504
|
+
|
|
1505
|
+
# Determine bnd points from reference overland model
|
|
1506
|
+
gdf_locs = self._read_waterlevel_boundary_locations()
|
|
1507
|
+
|
|
1508
|
+
# HydroMT function: set waterlevel forcing from time series
|
|
1509
|
+
self._model.set_forcing_1d(
|
|
1510
|
+
name="bzs", df_ts=wl_df, gdf_locs=gdf_locs, merge=False
|
|
1511
|
+
)
|
|
1512
|
+
|
|
1513
|
+
### PRIVATE GETTERS ###
|
|
1514
|
+
def _get_result_path(self, scenario: Scenario) -> Path:
|
|
1515
|
+
"""Return the path to store the results."""
|
|
1516
|
+
return self.database.scenarios.output_path / scenario.name / "Flooding"
|
|
1517
|
+
|
|
1518
|
+
def _get_simulation_path(
|
|
1519
|
+
self, scenario: Scenario, sub_event: Optional[Event] = None
|
|
1520
|
+
) -> Path:
|
|
1521
|
+
"""
|
|
1522
|
+
Return the path to the simulation results.
|
|
1523
|
+
|
|
1524
|
+
Parameters
|
|
1525
|
+
----------
|
|
1526
|
+
scenario : Scenario
|
|
1527
|
+
The scenario for which to get the simulation path.
|
|
1528
|
+
sub_event : Optional[Event], optional
|
|
1529
|
+
The sub-event for which to get the simulation path, by default None.
|
|
1530
|
+
Is only used when the event associated with the scenario is an EventSet.
|
|
1531
|
+
"""
|
|
1532
|
+
base_path = (
|
|
1533
|
+
self._get_result_path(scenario)
|
|
1534
|
+
/ "simulations"
|
|
1535
|
+
/ self.settings.config.overland_model.name
|
|
1536
|
+
)
|
|
1537
|
+
event = self.database.events.get(scenario.event)
|
|
1538
|
+
|
|
1539
|
+
if isinstance(event, EventSet):
|
|
1540
|
+
if sub_event is None:
|
|
1541
|
+
raise ValueError("Event must be provided when scenario is an EventSet.")
|
|
1542
|
+
return base_path.parent / sub_event.name / base_path.name
|
|
1543
|
+
elif isinstance(event, Event):
|
|
1544
|
+
return base_path
|
|
1545
|
+
else:
|
|
1546
|
+
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1547
|
+
|
|
1548
|
+
def _get_simulation_path_offshore(
|
|
1549
|
+
self, scenario: Scenario, sub_event: Optional[Event] = None
|
|
1550
|
+
) -> Path:
|
|
1551
|
+
# Get the path to the offshore model (will not be used if offshore model is not created)
|
|
1552
|
+
if self.settings.config.offshore_model is None:
|
|
1553
|
+
raise ValueError("No offshore model found in sfincs config.")
|
|
1554
|
+
base_path = (
|
|
1555
|
+
self._get_result_path(scenario)
|
|
1556
|
+
/ "simulations"
|
|
1557
|
+
/ self.settings.config.offshore_model.name
|
|
1558
|
+
)
|
|
1559
|
+
event = self.database.events.get(scenario.event)
|
|
1560
|
+
if isinstance(event, EventSet):
|
|
1561
|
+
return base_path.parent / sub_event.name / base_path.name
|
|
1562
|
+
elif isinstance(event, Event):
|
|
1563
|
+
return base_path
|
|
1564
|
+
else:
|
|
1565
|
+
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1566
|
+
|
|
1567
|
+
def _get_flood_map_paths(self, scenario: Scenario) -> list[Path]:
|
|
1568
|
+
"""Return the paths to the flood maps that running this scenario should produce."""
|
|
1569
|
+
results_path = self._get_result_path(scenario)
|
|
1570
|
+
event = self.database.events.get(scenario.event)
|
|
1571
|
+
|
|
1572
|
+
if isinstance(event, EventSet):
|
|
1573
|
+
map_fn = []
|
|
1574
|
+
for rp in self.database.site.fiat.risk.return_periods:
|
|
1575
|
+
map_fn.append(results_path / f"RP_{rp:04d}_maps.nc")
|
|
1576
|
+
elif isinstance(event, Event):
|
|
1577
|
+
map_fn = [results_path / "max_water_level_map.nc"]
|
|
1578
|
+
else:
|
|
1579
|
+
raise ValueError(f"Unsupported mode: {event.mode}")
|
|
1580
|
+
|
|
1581
|
+
return map_fn
|
|
1582
|
+
|
|
1583
|
+
def _get_event_input_path(self, event: Event) -> Path:
|
|
1584
|
+
"""Return the path to the event input directory."""
|
|
1585
|
+
return self.database.events.input_path / event.name
|
|
1586
|
+
|
|
1587
|
+
def _get_zsmax(self):
|
|
1588
|
+
"""Read zsmax file and return absolute maximum water level over entire simulation."""
|
|
1589
|
+
self._model.read_results()
|
|
1590
|
+
zsmax = self._model.results["zsmax"].max(dim="timemax")
|
|
1591
|
+
zsmax.attrs["units"] = "m"
|
|
1592
|
+
return zsmax
|
|
1593
|
+
|
|
1594
|
+
def _get_zs_points(self):
|
|
1595
|
+
"""Read water level (zs) timeseries at observation points.
|
|
1596
|
+
|
|
1597
|
+
Names are allocated from the site.toml.
|
|
1598
|
+
See also add_obs_points() above.
|
|
1599
|
+
"""
|
|
1600
|
+
self._model.read_results()
|
|
1601
|
+
da = self._model.results["point_zs"]
|
|
1602
|
+
df = pd.DataFrame(index=pd.DatetimeIndex(da.time), data=da.to_numpy())
|
|
1603
|
+
|
|
1604
|
+
names = []
|
|
1605
|
+
descriptions = []
|
|
1606
|
+
# get station names from site.toml
|
|
1607
|
+
if self.settings.obs_point is not None:
|
|
1608
|
+
obs_points = self.settings.obs_point
|
|
1609
|
+
for pt in obs_points:
|
|
1610
|
+
names.append(pt.name)
|
|
1611
|
+
descriptions.append(pt.description)
|
|
1612
|
+
|
|
1613
|
+
pt_df = pd.DataFrame({"Name": names, "Description": descriptions})
|
|
1614
|
+
gdf = gpd.GeoDataFrame(
|
|
1615
|
+
pt_df,
|
|
1616
|
+
geometry=gpd.points_from_xy(da.point_x.values, da.point_y.values),
|
|
1617
|
+
crs=self._model.crs,
|
|
1618
|
+
)
|
|
1619
|
+
return df, gdf
|
|
1620
|
+
|
|
1621
|
+
def _create_spw_file_from_track(
|
|
1622
|
+
self,
|
|
1623
|
+
track_forcing: Union[RainfallTrack, WindTrack],
|
|
1624
|
+
hurricane_translation: TranslationModel,
|
|
1625
|
+
name: str,
|
|
1626
|
+
output_dir: Path,
|
|
1627
|
+
include_rainfall: bool = False,
|
|
1628
|
+
recreate: bool = False,
|
|
1629
|
+
):
|
|
1630
|
+
"""
|
|
1631
|
+
Create a spiderweb file from a given TropicalCyclone track and save it to the event's input directory.
|
|
1632
|
+
|
|
1633
|
+
Providing the output_dir argument allows to save the spiderweb file in a different directory.
|
|
1634
|
+
|
|
1635
|
+
Parameters
|
|
1636
|
+
----------
|
|
1637
|
+
output_dir : Path
|
|
1638
|
+
The directory where the spiderweb file is saved (or copied to if it already exists and recreate is False)
|
|
1639
|
+
recreate : bool, optional
|
|
1640
|
+
If True, the spiderweb file is recreated even if it already exists, by default False
|
|
1641
|
+
|
|
1642
|
+
Returns
|
|
1643
|
+
-------
|
|
1644
|
+
Path
|
|
1645
|
+
the path to the created spiderweb file
|
|
1646
|
+
"""
|
|
1647
|
+
if track_forcing.path is None:
|
|
1648
|
+
raise ValueError("No path to track file provided.")
|
|
1649
|
+
|
|
1650
|
+
# Check file format
|
|
1651
|
+
match track_forcing.path.suffix:
|
|
1652
|
+
case ".spw":
|
|
1653
|
+
if recreate:
|
|
1654
|
+
raise ValueError(
|
|
1655
|
+
"Recreating spiderweb files from existing spiderweb files is not supported. Provide a track file instead."
|
|
1656
|
+
)
|
|
1657
|
+
|
|
1658
|
+
if track_forcing.path.exists():
|
|
1659
|
+
return track_forcing.path
|
|
1660
|
+
|
|
1661
|
+
elif (output_dir / track_forcing.path.name).exists():
|
|
1662
|
+
return output_dir / track_forcing.path.name
|
|
1663
|
+
|
|
1664
|
+
else:
|
|
1665
|
+
raise FileNotFoundError(f"SPW file not found: {track_forcing.path}")
|
|
1666
|
+
case ".cyc":
|
|
1667
|
+
pass
|
|
1668
|
+
case _:
|
|
1669
|
+
raise ValueError(
|
|
1670
|
+
"Track files should be in the DDB_CYC file format and must have .cyc extension, or in the SPW file format and must have .spw extension"
|
|
1671
|
+
)
|
|
1672
|
+
|
|
1673
|
+
# Check if the spiderweb file already exists
|
|
1674
|
+
spw_file = output_dir / track_forcing.path.with_suffix(".spw").name
|
|
1675
|
+
if spw_file.exists():
|
|
1676
|
+
if recreate:
|
|
1677
|
+
os.remove(spw_file)
|
|
1678
|
+
else:
|
|
1679
|
+
return spw_file
|
|
1680
|
+
|
|
1681
|
+
# Initialize the tropical cyclone
|
|
1682
|
+
tc = TropicalCyclone()
|
|
1683
|
+
tc.read_track(filename=str(track_forcing.path), fmt="ddb_cyc")
|
|
1684
|
+
|
|
1685
|
+
# Alter the track of the tc if necessary
|
|
1686
|
+
tc = self._translate_tc_track(
|
|
1687
|
+
tc=tc, hurricane_translation=hurricane_translation
|
|
1688
|
+
)
|
|
1689
|
+
|
|
1690
|
+
# Rainfall
|
|
1691
|
+
start = "Including" if include_rainfall else "Excluding"
|
|
1692
|
+
self.logger.info(f"{start} rainfall in the spiderweb file")
|
|
1693
|
+
tc.include_rainfall = include_rainfall
|
|
1694
|
+
|
|
1695
|
+
self.logger.info(
|
|
1696
|
+
f"Creating spiderweb file for hurricane event `{name}`. This may take a while."
|
|
1697
|
+
)
|
|
1698
|
+
|
|
1699
|
+
# Create spiderweb file from the track
|
|
1700
|
+
tc.to_spiderweb(spw_file)
|
|
1701
|
+
|
|
1702
|
+
return spw_file
|
|
1703
|
+
|
|
1704
|
+
def _translate_tc_track(
|
|
1705
|
+
self, tc: TropicalCyclone, hurricane_translation: TranslationModel
|
|
1706
|
+
):
|
|
1707
|
+
if math.isclose(
|
|
1708
|
+
hurricane_translation.eastwest_translation.value, 0, abs_tol=1e-6
|
|
1709
|
+
) and math.isclose(
|
|
1710
|
+
hurricane_translation.northsouth_translation.value, 0, abs_tol=1e-6
|
|
1711
|
+
):
|
|
1712
|
+
return tc
|
|
1713
|
+
|
|
1714
|
+
self.logger.info(f"Translating the track of the tropical cyclone `{tc.name}`")
|
|
1715
|
+
# First convert geodataframe to the local coordinate system
|
|
1716
|
+
crs = pyproj.CRS.from_string(self.settings.config.csname)
|
|
1717
|
+
tc.track = tc.track.to_crs(crs)
|
|
1718
|
+
|
|
1719
|
+
# Translate the track in the local coordinate system
|
|
1720
|
+
tc.track["geometry"] = tc.track["geometry"].apply(
|
|
1721
|
+
lambda geom: translate(
|
|
1722
|
+
geom,
|
|
1723
|
+
xoff=hurricane_translation.eastwest_translation.convert(
|
|
1724
|
+
us.UnitTypesLength.meters
|
|
1725
|
+
),
|
|
1726
|
+
yoff=hurricane_translation.northsouth_translation.convert(
|
|
1727
|
+
us.UnitTypesLength.meters
|
|
1728
|
+
),
|
|
1729
|
+
)
|
|
1730
|
+
)
|
|
1731
|
+
|
|
1732
|
+
# Convert the geodataframe to lat/lon
|
|
1733
|
+
tc.track = tc.track.to_crs(epsg=4326)
|
|
1734
|
+
|
|
1735
|
+
return tc
|
|
1736
|
+
|
|
1737
|
+
# @gundula do we keep this func, its not used anywhere?
|
|
1738
|
+
def _downscale_hmax(self, zsmax, demfile: Path):
|
|
1739
|
+
# read DEM and convert units to metric units used by SFINCS
|
|
1740
|
+
demfile_units = self.settings.dem.units
|
|
1741
|
+
dem_conversion = us.UnitfulLength(value=1.0, units=demfile_units).convert(
|
|
1742
|
+
us.UnitTypesLength("meters")
|
|
1743
|
+
)
|
|
1744
|
+
dem = dem_conversion * self._model.data_catalog.get_rasterdataset(demfile)
|
|
1745
|
+
dem = dem.rio.reproject(self._model.crs)
|
|
1746
|
+
|
|
1747
|
+
# determine conversion factor for output floodmap
|
|
1748
|
+
floodmap_units = self.settings.config.floodmap_units
|
|
1749
|
+
floodmap_conversion = us.UnitfulLength(
|
|
1750
|
+
value=1.0, units=us.UnitTypesLength.meters
|
|
1751
|
+
).convert(floodmap_units)
|
|
1752
|
+
|
|
1753
|
+
hmax = utils.downscale_floodmap(
|
|
1754
|
+
zsmax=floodmap_conversion * zsmax,
|
|
1755
|
+
dep=floodmap_conversion * dem,
|
|
1756
|
+
hmin=0.01,
|
|
1757
|
+
)
|
|
1758
|
+
return hmax
|
|
1759
|
+
|
|
1760
|
+
def _read_river_locations(self) -> gpd.GeoDataFrame:
|
|
1761
|
+
path = self.get_model_root() / "sfincs.src"
|
|
1762
|
+
|
|
1763
|
+
with open(path) as f:
|
|
1764
|
+
lines = f.readlines()
|
|
1765
|
+
coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
|
|
1766
|
+
points = [shapely.Point(coord) for coord in coords]
|
|
1767
|
+
|
|
1768
|
+
return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
|
|
1769
|
+
|
|
1770
|
+
def _read_waterlevel_boundary_locations(self) -> gpd.GeoDataFrame:
|
|
1771
|
+
with open(self.get_model_root() / "sfincs.bnd") as f:
|
|
1772
|
+
lines = f.readlines()
|
|
1773
|
+
coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
|
|
1774
|
+
points = [shapely.Point(coord) for coord in coords]
|
|
1775
|
+
|
|
1776
|
+
return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
|
|
1777
|
+
|
|
1778
|
+
def _setup_sfincs_logger(self, model_root: Path) -> logging.Logger:
|
|
1779
|
+
"""Initialize the logger for the SFINCS model."""
|
|
1780
|
+
# Create a logger for the SFINCS model manually
|
|
1781
|
+
sfincs_logger = logging.getLogger("SfincsModel")
|
|
1782
|
+
for handler in sfincs_logger.handlers[:]:
|
|
1783
|
+
sfincs_logger.removeHandler(handler)
|
|
1784
|
+
|
|
1785
|
+
# Add a file handler
|
|
1786
|
+
file_handler = logging.FileHandler(
|
|
1787
|
+
filename=model_root.resolve() / "sfincs_model.log",
|
|
1788
|
+
mode="w",
|
|
1789
|
+
)
|
|
1790
|
+
sfincs_logger.setLevel(logging.DEBUG)
|
|
1791
|
+
sfincs_logger.addHandler(file_handler)
|
|
1792
|
+
return sfincs_logger
|
|
1793
|
+
|
|
1794
|
+
def _cleanup_simulation_folder(
|
|
1795
|
+
self,
|
|
1796
|
+
path: Path,
|
|
1797
|
+
extensions: list[str] = [".spw"],
|
|
1798
|
+
):
|
|
1799
|
+
"""Remove all files with the given extensions in the given path."""
|
|
1800
|
+
if not path.exists():
|
|
1801
|
+
return
|
|
1802
|
+
|
|
1803
|
+
for ext in extensions:
|
|
1804
|
+
for file in path.glob(f"*{ext}"):
|
|
1805
|
+
file.unlink()
|
|
1806
|
+
|
|
1807
|
+
def _load_scenario_objects(self, scenario: Scenario, event: Event) -> None:
|
|
1808
|
+
self._scenario = scenario
|
|
1809
|
+
self._projection = self.database.projections.get(scenario.projection)
|
|
1810
|
+
self._strategy = self.database.strategies.get(scenario.strategy)
|
|
1811
|
+
self._event = event
|
|
1812
|
+
|
|
1813
|
+
_event = self.database.events.get(scenario.event)
|
|
1814
|
+
if isinstance(_event, EventSet):
|
|
1815
|
+
self._event_set = _event
|
|
1816
|
+
else:
|
|
1817
|
+
self._event_set = None
|
|
1818
|
+
|
|
1819
|
+
def _add_tide_gauge_plot(
|
|
1820
|
+
self, fig, event: Event, units: us.UnitTypesLength
|
|
1821
|
+
) -> None:
|
|
1822
|
+
# check if event is historic
|
|
1823
|
+
if not isinstance(event, HistoricalEvent):
|
|
1824
|
+
return
|
|
1825
|
+
if self.settings.tide_gauge is None:
|
|
1826
|
+
return
|
|
1827
|
+
df_gauge = self.settings.tide_gauge.get_waterlevels_in_time_frame(
|
|
1828
|
+
time=TimeFrame(
|
|
1829
|
+
start_time=event.time.start_time,
|
|
1830
|
+
end_time=event.time.end_time,
|
|
1831
|
+
),
|
|
1832
|
+
units=us.UnitTypesLength(units),
|
|
1833
|
+
)
|
|
1834
|
+
|
|
1835
|
+
if df_gauge is not None:
|
|
1836
|
+
gauge_reference_height = self.settings.water_level.get_datum(
|
|
1837
|
+
self.settings.tide_gauge.reference
|
|
1838
|
+
).height.convert(units)
|
|
1839
|
+
|
|
1840
|
+
waterlevel = df_gauge.iloc[:, 0] + gauge_reference_height
|
|
1841
|
+
|
|
1842
|
+
# If data is available, add to plot
|
|
1843
|
+
fig.add_trace(
|
|
1844
|
+
px.line(waterlevel, color_discrete_sequence=["#ea6404"]).data[0]
|
|
1845
|
+
)
|
|
1846
|
+
fig["data"][0]["name"] = "model"
|
|
1847
|
+
fig["data"][1]["name"] = "measurement"
|
|
1848
|
+
fig.update_layout(showlegend=True)
|