flood-adapt 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. flood_adapt/__init__.py +22 -0
  2. flood_adapt/adapter/__init__.py +9 -0
  3. flood_adapt/adapter/fiat_adapter.py +1502 -0
  4. flood_adapt/adapter/interface/__init__.py +0 -0
  5. flood_adapt/adapter/interface/hazard_adapter.py +70 -0
  6. flood_adapt/adapter/interface/impact_adapter.py +36 -0
  7. flood_adapt/adapter/interface/model_adapter.py +89 -0
  8. flood_adapt/adapter/interface/offshore.py +19 -0
  9. flood_adapt/adapter/sfincs_adapter.py +1857 -0
  10. flood_adapt/adapter/sfincs_offshore.py +193 -0
  11. flood_adapt/config/__init__.py +0 -0
  12. flood_adapt/config/config.py +245 -0
  13. flood_adapt/config/fiat.py +219 -0
  14. flood_adapt/config/gui.py +224 -0
  15. flood_adapt/config/sfincs.py +336 -0
  16. flood_adapt/config/site.py +124 -0
  17. flood_adapt/database_builder/__init__.py +0 -0
  18. flood_adapt/database_builder/database_builder.py +2175 -0
  19. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -0
  20. flood_adapt/database_builder/templates/default_units/metric.toml +9 -0
  21. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -0
  22. flood_adapt/database_builder/templates/icons/black_down_48x48.png +0 -0
  23. flood_adapt/database_builder/templates/icons/black_left_48x48.png +0 -0
  24. flood_adapt/database_builder/templates/icons/black_right_48x48.png +0 -0
  25. flood_adapt/database_builder/templates/icons/black_up_48x48.png +0 -0
  26. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_down.png +0 -0
  27. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_left.png +0 -0
  28. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_right.png +0 -0
  29. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_up.png +0 -0
  30. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_down.png +0 -0
  31. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_left.png +0 -0
  32. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_right.png +0 -0
  33. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_up.png +0 -0
  34. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_left.png +0 -0
  35. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_right.png +0 -0
  36. flood_adapt/database_builder/templates/icons/white_down_48x48.png +0 -0
  37. flood_adapt/database_builder/templates/icons/white_left_48x48.png +0 -0
  38. flood_adapt/database_builder/templates/icons/white_right_48x48.png +0 -0
  39. flood_adapt/database_builder/templates/icons/white_up_48x48.png +0 -0
  40. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -0
  41. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -0
  42. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -0
  43. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -0
  44. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -0
  45. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -0
  46. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -0
  47. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -0
  48. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -0
  49. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -0
  50. flood_adapt/database_builder/templates/infographics/images/ambulance.png +0 -0
  51. flood_adapt/database_builder/templates/infographics/images/car.png +0 -0
  52. flood_adapt/database_builder/templates/infographics/images/cart.png +0 -0
  53. flood_adapt/database_builder/templates/infographics/images/firetruck.png +0 -0
  54. flood_adapt/database_builder/templates/infographics/images/hospital.png +0 -0
  55. flood_adapt/database_builder/templates/infographics/images/house.png +0 -0
  56. flood_adapt/database_builder/templates/infographics/images/info.png +0 -0
  57. flood_adapt/database_builder/templates/infographics/images/money.png +0 -0
  58. flood_adapt/database_builder/templates/infographics/images/person.png +0 -0
  59. flood_adapt/database_builder/templates/infographics/images/school.png +0 -0
  60. flood_adapt/database_builder/templates/infographics/images/truck.png +0 -0
  61. flood_adapt/database_builder/templates/infographics/images/walking_person.png +0 -0
  62. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -0
  63. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -0
  64. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -0
  65. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -0
  66. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -0
  67. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -0
  68. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -0
  69. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -0
  70. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -0
  71. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -0
  72. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -0
  73. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -0
  74. flood_adapt/database_builder/templates/mapbox_layers/bin_colors.toml +5 -0
  75. flood_adapt/database_builder.py +16 -0
  76. flood_adapt/dbs_classes/__init__.py +21 -0
  77. flood_adapt/dbs_classes/database.py +716 -0
  78. flood_adapt/dbs_classes/dbs_benefit.py +97 -0
  79. flood_adapt/dbs_classes/dbs_event.py +91 -0
  80. flood_adapt/dbs_classes/dbs_measure.py +103 -0
  81. flood_adapt/dbs_classes/dbs_projection.py +52 -0
  82. flood_adapt/dbs_classes/dbs_scenario.py +150 -0
  83. flood_adapt/dbs_classes/dbs_static.py +261 -0
  84. flood_adapt/dbs_classes/dbs_strategy.py +147 -0
  85. flood_adapt/dbs_classes/dbs_template.py +302 -0
  86. flood_adapt/dbs_classes/interface/database.py +147 -0
  87. flood_adapt/dbs_classes/interface/element.py +137 -0
  88. flood_adapt/dbs_classes/interface/static.py +47 -0
  89. flood_adapt/flood_adapt.py +1371 -0
  90. flood_adapt/misc/__init__.py +0 -0
  91. flood_adapt/misc/database_user.py +16 -0
  92. flood_adapt/misc/log.py +183 -0
  93. flood_adapt/misc/path_builder.py +54 -0
  94. flood_adapt/misc/utils.py +185 -0
  95. flood_adapt/objects/__init__.py +59 -0
  96. flood_adapt/objects/benefits/__init__.py +0 -0
  97. flood_adapt/objects/benefits/benefits.py +61 -0
  98. flood_adapt/objects/events/__init__.py +0 -0
  99. flood_adapt/objects/events/event_factory.py +135 -0
  100. flood_adapt/objects/events/event_set.py +84 -0
  101. flood_adapt/objects/events/events.py +221 -0
  102. flood_adapt/objects/events/historical.py +55 -0
  103. flood_adapt/objects/events/hurricane.py +64 -0
  104. flood_adapt/objects/events/synthetic.py +48 -0
  105. flood_adapt/objects/forcing/__init__.py +0 -0
  106. flood_adapt/objects/forcing/csv.py +68 -0
  107. flood_adapt/objects/forcing/discharge.py +66 -0
  108. flood_adapt/objects/forcing/forcing.py +142 -0
  109. flood_adapt/objects/forcing/forcing_factory.py +182 -0
  110. flood_adapt/objects/forcing/meteo_handler.py +93 -0
  111. flood_adapt/objects/forcing/netcdf.py +40 -0
  112. flood_adapt/objects/forcing/plotting.py +428 -0
  113. flood_adapt/objects/forcing/rainfall.py +98 -0
  114. flood_adapt/objects/forcing/tide_gauge.py +191 -0
  115. flood_adapt/objects/forcing/time_frame.py +77 -0
  116. flood_adapt/objects/forcing/timeseries.py +552 -0
  117. flood_adapt/objects/forcing/unit_system.py +580 -0
  118. flood_adapt/objects/forcing/waterlevels.py +108 -0
  119. flood_adapt/objects/forcing/wind.py +124 -0
  120. flood_adapt/objects/measures/__init__.py +0 -0
  121. flood_adapt/objects/measures/measure_factory.py +92 -0
  122. flood_adapt/objects/measures/measures.py +506 -0
  123. flood_adapt/objects/object_model.py +68 -0
  124. flood_adapt/objects/projections/__init__.py +0 -0
  125. flood_adapt/objects/projections/projections.py +89 -0
  126. flood_adapt/objects/scenarios/__init__.py +0 -0
  127. flood_adapt/objects/scenarios/scenarios.py +22 -0
  128. flood_adapt/objects/strategies/__init__.py +0 -0
  129. flood_adapt/objects/strategies/strategies.py +68 -0
  130. flood_adapt/workflows/__init__.py +0 -0
  131. flood_adapt/workflows/benefit_runner.py +541 -0
  132. flood_adapt/workflows/floodmap.py +85 -0
  133. flood_adapt/workflows/impacts_integrator.py +82 -0
  134. flood_adapt/workflows/scenario_runner.py +69 -0
  135. flood_adapt-0.3.0.dist-info/LICENSE +21 -0
  136. flood_adapt-0.3.0.dist-info/METADATA +183 -0
  137. flood_adapt-0.3.0.dist-info/RECORD +139 -0
  138. flood_adapt-0.3.0.dist-info/WHEEL +5 -0
  139. flood_adapt-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,1857 @@
1
+ import logging
2
+ import math
3
+ import os
4
+ import shutil
5
+ import subprocess
6
+ import tempfile
7
+ from pathlib import Path
8
+ from typing import Optional, Union
9
+
10
+ import geopandas as gpd
11
+ import hydromt_sfincs.utils as utils
12
+ import numpy as np
13
+ import pandas as pd
14
+ import plotly.express as px
15
+ import plotly.graph_objects as go
16
+ import pyproj
17
+ import shapely
18
+ import xarray as xr
19
+ from cht_cyclones.tropical_cyclone import TropicalCyclone
20
+ from cht_tide.read_bca import SfincsBoundary
21
+ from cht_tide.tide_predict import predict
22
+ from hydromt_sfincs import SfincsModel as HydromtSfincsModel
23
+ from hydromt_sfincs.quadtree import QuadtreeGrid
24
+ from numpy import matlib
25
+ from shapely.affinity import translate
26
+
27
+ from flood_adapt.adapter.interface.hazard_adapter import IHazardAdapter
28
+ from flood_adapt.config.config import Settings
29
+ from flood_adapt.config.site import Site
30
+ from flood_adapt.misc.log import FloodAdaptLogging
31
+ from flood_adapt.misc.path_builder import (
32
+ ObjectDir,
33
+ TopLevelDir,
34
+ db_path,
35
+ )
36
+ from flood_adapt.misc.utils import cd, resolve_filepath
37
+ from flood_adapt.objects.events.event_set import EventSet
38
+ from flood_adapt.objects.events.events import Event, Mode, Template
39
+ from flood_adapt.objects.events.historical import HistoricalEvent
40
+ from flood_adapt.objects.events.hurricane import TranslationModel
41
+ from flood_adapt.objects.forcing import unit_system as us
42
+ from flood_adapt.objects.forcing.discharge import (
43
+ DischargeConstant,
44
+ DischargeCSV,
45
+ DischargeSynthetic,
46
+ )
47
+ from flood_adapt.objects.forcing.forcing import (
48
+ ForcingSource,
49
+ ForcingType,
50
+ IDischarge,
51
+ IForcing,
52
+ IRainfall,
53
+ IWaterlevel,
54
+ IWind,
55
+ )
56
+ from flood_adapt.objects.forcing.meteo_handler import MeteoHandler
57
+ from flood_adapt.objects.forcing.rainfall import (
58
+ RainfallConstant,
59
+ RainfallCSV,
60
+ RainfallMeteo,
61
+ RainfallNetCDF,
62
+ RainfallSynthetic,
63
+ RainfallTrack,
64
+ )
65
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
66
+ from flood_adapt.objects.forcing.waterlevels import (
67
+ WaterlevelCSV,
68
+ WaterlevelGauged,
69
+ WaterlevelModel,
70
+ WaterlevelSynthetic,
71
+ )
72
+ from flood_adapt.objects.forcing.wind import (
73
+ WindConstant,
74
+ WindCSV,
75
+ WindMeteo,
76
+ WindNetCDF,
77
+ WindSynthetic,
78
+ WindTrack,
79
+ )
80
+ from flood_adapt.objects.measures.measures import (
81
+ FloodWall,
82
+ GreenInfrastructure,
83
+ Measure,
84
+ Pump,
85
+ )
86
+ from flood_adapt.objects.projections.projections import (
87
+ PhysicalProjection,
88
+ Projection,
89
+ )
90
+ from flood_adapt.objects.scenarios.scenarios import Scenario
91
+
92
+
93
+ class SfincsAdapter(IHazardAdapter):
94
+ """Adapter for the SFINCS model.
95
+
96
+ This class is used to run the SFINCS model and process the results.
97
+
98
+ Attributes
99
+ ----------
100
+ settings : SfincsModel
101
+ The settings for the SFINCS model.
102
+ """
103
+
104
+ logger = FloodAdaptLogging.getLogger("SfincsAdapter")
105
+ _site: Site
106
+ _model: HydromtSfincsModel
107
+
108
+ ###############
109
+ ### PUBLIC ####
110
+ ###############
111
+
112
+ ### HAZARD ADAPTER METHODS ###
113
+ def __init__(self, model_root: Path):
114
+ """Load overland sfincs model based on a root directory.
115
+
116
+ Parameters
117
+ ----------
118
+ model_root : Path
119
+ Root directory of overland sfincs model.
120
+ """
121
+ self.settings = self.database.site.sfincs
122
+ self.units = self.database.site.gui.units
123
+ self.sfincs_logger = self._setup_sfincs_logger(model_root)
124
+ self._model = HydromtSfincsModel(
125
+ root=str(model_root.resolve()), mode="r", logger=self.sfincs_logger
126
+ )
127
+ self._model.read()
128
+
129
+ def read(self, path: Path):
130
+ """Read the sfincs model from the current model root."""
131
+ if Path(self._model.root).resolve() != Path(path).resolve():
132
+ self._model.set_root(root=str(path), mode="r")
133
+ self._model.read()
134
+
135
+ def write(self, path_out: Union[str, os.PathLike], overwrite: bool = True):
136
+ """Write the sfincs model configuration to a directory."""
137
+ root = self.get_model_root()
138
+ if not isinstance(path_out, Path):
139
+ path_out = Path(path_out).resolve()
140
+
141
+ if not path_out.exists():
142
+ path_out.mkdir(parents=True)
143
+
144
+ if root != path_out:
145
+ shutil.copytree(root, path_out, dirs_exist_ok=True)
146
+
147
+ write_mode = "w+" if overwrite else "w"
148
+ with cd(path_out):
149
+ self._model.set_root(root=str(path_out), mode=write_mode)
150
+ self._model.write()
151
+
152
+ def close_files(self):
153
+ """Close all open files and clean up file handles."""
154
+ for logger in [self.logger, self.sfincs_logger]:
155
+ if hasattr(logger, "handlers"):
156
+ for handler in logger.handlers:
157
+ if isinstance(handler, logging.FileHandler):
158
+ handler.close()
159
+ logger.removeHandler(handler)
160
+
161
+ def __enter__(self) -> "SfincsAdapter":
162
+ return self
163
+
164
+ def __exit__(self, exc_type, exc_value, traceback) -> bool:
165
+ self.close_files()
166
+ return False
167
+
168
+ def has_run(self, scenario: Scenario) -> bool:
169
+ """Check if the model has been run."""
170
+ event = self.database.events.get(scenario.event)
171
+ if event.mode == Mode.risk:
172
+ sim_paths = [
173
+ self._get_simulation_path(scenario, sub_event=sub_event)
174
+ for sub_event in event.sub_events
175
+ ]
176
+ # No need to check postprocessing for risk scenarios
177
+ return all(self.sfincs_completed(sim_path) for sim_path in sim_paths)
178
+ else:
179
+ return self.sfincs_completed(
180
+ self._get_simulation_path(scenario)
181
+ ) and self.run_completed(scenario)
182
+
183
+ def execute(self, path: Path, strict: bool = True) -> bool:
184
+ """
185
+ Run the sfincs executable in the specified path.
186
+
187
+ Parameters
188
+ ----------
189
+ path : str
190
+ Path to the simulation folder.
191
+ Default is None, in which case the model root is used.
192
+ strict : bool, optional
193
+ True: raise an error if the model fails to run.
194
+ False: log a warning.
195
+ Default is True.
196
+
197
+ Returns
198
+ -------
199
+ bool
200
+ True if the model ran successfully, False otherwise.
201
+
202
+ """
203
+ with cd(path):
204
+ self.logger.info(f"Running SFINCS in {path}")
205
+ process = subprocess.run(
206
+ str(Settings().sfincs_path),
207
+ stdout=subprocess.PIPE,
208
+ stderr=subprocess.PIPE,
209
+ text=True,
210
+ )
211
+ self.sfincs_logger.info(process.stdout)
212
+ self.logger.debug(process.stdout)
213
+
214
+ self._cleanup_simulation_folder(path)
215
+
216
+ if process.returncode != 0:
217
+ if Settings().delete_crashed_runs:
218
+ # Remove all files in the simulation folder except for the log files
219
+ for subdir, dirs, files in os.walk(path, topdown=False):
220
+ for file in files:
221
+ if not file.endswith(".log"):
222
+ os.remove(os.path.join(subdir, file))
223
+
224
+ if not os.listdir(subdir):
225
+ os.rmdir(subdir)
226
+
227
+ if strict:
228
+ raise RuntimeError(f"SFINCS model failed to run in {path}.")
229
+ else:
230
+ self.logger.error(f"SFINCS model failed to run in {path}.")
231
+
232
+ return process.returncode == 0
233
+
234
+ def run(self, scenario: Scenario):
235
+ """Run the whole workflow (Preprocess, process and postprocess) for a given scenario."""
236
+ self._ensure_no_existing_forcings()
237
+ event = self.database.events.get(scenario.event)
238
+
239
+ if event.mode == Mode.risk:
240
+ self._run_risk_scenario(scenario=scenario)
241
+ else:
242
+ self._run_single_event(scenario=scenario, event=event)
243
+
244
+ def preprocess(self, scenario: Scenario, event: Event):
245
+ """
246
+ Preprocess the SFINCS model for a given scenario.
247
+
248
+ Parameters
249
+ ----------
250
+ scenario : Scenario
251
+ Scenario to preprocess.
252
+ event : Event, optional
253
+ Event to preprocess, by default None.
254
+ """
255
+ # I dont like this due to it being state based and might break if people use functions in the wrong order
256
+ # Currently only used to pass projection + event stuff to WaterlevelModel
257
+
258
+ sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
259
+ sim_path.mkdir(parents=True, exist_ok=True)
260
+ template_path = (
261
+ self.database.static.get_overland_sfincs_model().get_model_root()
262
+ )
263
+ shutil.copytree(template_path, sim_path, dirs_exist_ok=True)
264
+
265
+ with SfincsAdapter(model_root=sim_path) as model:
266
+ model._load_scenario_objects(scenario, event)
267
+ is_risk = "Probabilistic " if model._event_set is not None else ""
268
+ self.logger.info(
269
+ f"Preprocessing Scenario `{model._scenario.name}`: {is_risk}Event `{model._event.name}`, Strategy `{model._strategy.name}`, Projection `{model._projection.name}`"
270
+ )
271
+ # Write template model to output path and set it as the model root so focings can write to it
272
+ model.set_timing(model._event.time)
273
+ model.write(sim_path)
274
+
275
+ # Event
276
+ for forcing in model._event.get_forcings():
277
+ model.add_forcing(forcing)
278
+
279
+ if self.rainfall is not None:
280
+ model.rainfall *= model._event.rainfall_multiplier
281
+ else:
282
+ model.logger.warning(
283
+ "Failed to add event rainfall multiplier, no rainfall forcing found in the model."
284
+ )
285
+
286
+ # Measures
287
+ for measure in model._strategy.get_hazard_measures():
288
+ model.add_measure(measure)
289
+
290
+ # Projection
291
+ model.add_projection(model._projection)
292
+
293
+ # Output
294
+ model.add_obs_points()
295
+
296
+ # Save any changes made to disk as well
297
+ model.write(path_out=sim_path)
298
+
299
+ def process(self, scenario: Scenario, event: Event):
300
+ if event.mode != Mode.single_event:
301
+ raise ValueError(f"Unsupported event mode: {event.mode}.")
302
+
303
+ sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
304
+ self.logger.info(f"Running SFINCS for single event Scenario `{scenario.name}`")
305
+ self.execute(sim_path)
306
+
307
+ def postprocess(self, scenario: Scenario, event: Event):
308
+ if event.mode != Mode.single_event:
309
+ raise ValueError(f"Unsupported event mode: {event.mode}.")
310
+
311
+ self.logger.info(f"Postprocessing SFINCS for Scenario `{scenario.name}`")
312
+ if not self.sfincs_completed(
313
+ self._get_simulation_path(scenario, sub_event=event)
314
+ ):
315
+ raise RuntimeError("SFINCS was not run successfully!")
316
+
317
+ self.write_floodmap_geotiff(scenario)
318
+ self.plot_wl_obs(scenario)
319
+ self.write_water_level_map(scenario)
320
+
321
+ def set_timing(self, time: TimeFrame):
322
+ """Set model reference times."""
323
+ self.logger.info(f"Setting timing for the SFINCS model: `{time}`")
324
+ self._model.set_config("tref", time.start_time)
325
+ self._model.set_config("tstart", time.start_time)
326
+ self._model.set_config("tstop", time.end_time)
327
+
328
+ def add_forcing(self, forcing: IForcing):
329
+ """Get forcing data and add it."""
330
+ if forcing is None:
331
+ return
332
+
333
+ self.logger.info(
334
+ f"Adding {forcing.type.capitalize()}: {forcing.source.capitalize()}"
335
+ )
336
+ if isinstance(forcing, IRainfall):
337
+ self._add_forcing_rain(forcing)
338
+ elif isinstance(forcing, IWind):
339
+ self._add_forcing_wind(forcing)
340
+ elif isinstance(forcing, IDischarge):
341
+ self._add_forcing_discharge(forcing)
342
+ elif isinstance(forcing, IWaterlevel):
343
+ self._add_forcing_waterlevels(forcing)
344
+ else:
345
+ self.logger.warning(
346
+ f"Skipping unsupported forcing type {forcing.__class__.__name__}"
347
+ )
348
+
349
+ def add_measure(self, measure: Measure):
350
+ """Get measure data and add it."""
351
+ self.logger.info(
352
+ f"Adding {measure.__class__.__name__.capitalize()} `{measure.name}`"
353
+ )
354
+
355
+ if isinstance(measure, FloodWall):
356
+ self._add_measure_floodwall(measure)
357
+ elif isinstance(measure, GreenInfrastructure):
358
+ self._add_measure_greeninfra(measure)
359
+ elif isinstance(measure, Pump):
360
+ self._add_measure_pump(measure)
361
+ else:
362
+ self.logger.warning(
363
+ f"Skipping unsupported measure type {measure.__class__.__name__}"
364
+ )
365
+
366
+ def add_projection(self, projection: Projection):
367
+ """Get forcing data currently in the sfincs model and add the projection it."""
368
+ self.logger.info(f"Adding Projection `{projection.name}`")
369
+ phys_projection = projection.physical_projection
370
+
371
+ if phys_projection.sea_level_rise:
372
+ self.logger.info(
373
+ f"Adding projected sea level rise `{phys_projection.sea_level_rise}`"
374
+ )
375
+ if self.waterlevels is not None:
376
+ self.waterlevels += phys_projection.sea_level_rise.convert(
377
+ us.UnitTypesLength.meters
378
+ )
379
+ else:
380
+ self.logger.warning(
381
+ "Failed to add sea level rise, no water level forcing found in the model."
382
+ )
383
+
384
+ if phys_projection.rainfall_multiplier:
385
+ self.logger.info(
386
+ f"Adding projected rainfall multiplier `{phys_projection.rainfall_multiplier}`"
387
+ )
388
+ if self.rainfall is not None:
389
+ self.rainfall *= phys_projection.rainfall_multiplier
390
+ else:
391
+ self.logger.warning(
392
+ "Failed to add projected rainfall multiplier, no rainfall forcing found in the model."
393
+ )
394
+
395
+ ### GETTERS ###
396
+ def get_model_time(self) -> TimeFrame:
397
+ t0, t1 = self._model.get_model_time()
398
+ return TimeFrame(start_time=t0, end_time=t1)
399
+
400
+ def get_model_root(self) -> Path:
401
+ return Path(self._model.root)
402
+
403
+ def get_mask(self):
404
+ """Get mask with inactive cells from model."""
405
+ mask = self._model.grid["msk"]
406
+ return mask
407
+
408
+ def get_bedlevel(self):
409
+ """Get bed level from model."""
410
+ self._model.read_results()
411
+ zb = self._model.results["zb"]
412
+ return zb
413
+
414
+ def get_model_boundary(self) -> gpd.GeoDataFrame:
415
+ """Get bounding box from model."""
416
+ return self._model.region
417
+
418
+ def get_model_grid(self) -> QuadtreeGrid:
419
+ """Get grid from model.
420
+
421
+ Returns
422
+ -------
423
+ QuadtreeGrid
424
+ QuadtreeGrid with the model grid
425
+ """
426
+ return self._model.quadtree
427
+
428
+ # Forcing properties
429
+ @property
430
+ def waterlevels(self) -> xr.Dataset | xr.DataArray | None:
431
+ return self._model.forcing.get("bzs")
432
+
433
+ @waterlevels.setter
434
+ def waterlevels(self, waterlevels: xr.Dataset | xr.DataArray):
435
+ if self.waterlevels is None or self.waterlevels.size == 0:
436
+ raise ValueError("No water level forcing found in the model.")
437
+ self._model.forcing["bzs"] = waterlevels
438
+
439
+ @property
440
+ def discharge(self) -> xr.Dataset | xr.DataArray | None:
441
+ return self._model.forcing.get("dis")
442
+
443
+ @discharge.setter
444
+ def discharge(self, discharge: xr.Dataset | xr.DataArray):
445
+ if self.discharge is None or self.discharge.size == 0:
446
+ raise ValueError("No discharge forcing found in the model.")
447
+ self._model.forcing["dis"] = discharge
448
+
449
+ @property
450
+ def rainfall(self) -> xr.Dataset | xr.DataArray | None:
451
+ names = ["precip", "precip_2d"]
452
+ in_model = [name for name in names if name in self._model.forcing]
453
+ if len(in_model) == 0:
454
+ return None
455
+ elif len(in_model) == 1:
456
+ return self._model.forcing[in_model[0]]
457
+ else:
458
+ raise ValueError("Multiple rainfall forcings found in the model.")
459
+
460
+ @rainfall.setter
461
+ def rainfall(self, rainfall: xr.Dataset | xr.DataArray):
462
+ if self.rainfall is None or self.rainfall.size == 0:
463
+ raise ValueError("No rainfall forcing found in the model.")
464
+ elif "precip_2d" in self._model.forcing:
465
+ self._model.forcing["precip_2d"] = rainfall
466
+ elif "precip" in self._model.forcing:
467
+ self._model.forcing["precip"] = rainfall
468
+ else:
469
+ raise ValueError("Unsupported rainfall forcing in the model.")
470
+
471
+ @property
472
+ def wind(self) -> xr.Dataset | xr.DataArray | None:
473
+ wind_names = ["wnd", "wind_2d", "wind", "wind10_u", "wind10_v"]
474
+ wind_in_model = [name for name in wind_names if name in self._model.forcing]
475
+ if len(wind_in_model) == 0:
476
+ return None
477
+ elif len(wind_in_model) == 1:
478
+ return self._model.forcing[wind_in_model[0]]
479
+ elif len(wind_in_model) == 2:
480
+ if not ("wind10_u" in wind_in_model and "wind10_v" in wind_in_model):
481
+ raise ValueError(
482
+ "Multiple wind forcings found in the model. Both should be wind10_u and wind10_v or a singular wind forcing."
483
+ )
484
+ return xr.Dataset(
485
+ {
486
+ "wind10_u": self._model.forcing["wind10_u"],
487
+ "wind10_v": self._model.forcing["wind10_v"],
488
+ }
489
+ )
490
+ else:
491
+ raise ValueError("Multiple wind forcings found in the model.")
492
+
493
+ @wind.setter
494
+ def wind(self, wind: xr.Dataset | xr.DataArray):
495
+ if (not self.wind) or (self.wind.size == 0):
496
+ raise ValueError("No wind forcing found in the model.")
497
+
498
+ elif "wind_2d" in self._model.forcing:
499
+ self._model.forcing["wind_2d"] = wind
500
+ elif "wind" in self._model.forcing:
501
+ self._model.forcing["wind"] = wind
502
+ elif "wnd" in self._model.forcing:
503
+ self._model.forcing["wnd"] = wind
504
+ elif "wind10_u" in self._model.forcing and "wind10_v" in self._model.forcing:
505
+ self._model.forcing["wind10_u"] = wind["wind10_u"]
506
+ self._model.forcing["wind10_v"] = wind["wind10_v"]
507
+ else:
508
+ raise ValueError("Unsupported wind forcing in the model.")
509
+
510
+ ### OUTPUT ###
511
+ def run_completed(self, scenario: Scenario) -> bool:
512
+ """Check if the entire model run has been completed successfully by checking if all flood maps exist that are created in postprocess().
513
+
514
+ Returns
515
+ -------
516
+ bool : True if all flood maps exist, False otherwise.
517
+
518
+ """
519
+ any_floodmap = len(self._get_flood_map_paths(scenario)) > 0
520
+ all_exist = all(
521
+ floodmap.exists() for floodmap in self._get_flood_map_paths(scenario)
522
+ )
523
+ return any_floodmap and all_exist
524
+
525
+ def sfincs_completed(self, sim_path: Path) -> bool:
526
+ """Check if the sfincs executable has been run successfully by checking if the output files exist in the simulation folder.
527
+
528
+ Parameters
529
+ ----------
530
+ sim_path : Path
531
+ Path to the simulation folder to check.
532
+
533
+ Returns
534
+ -------
535
+ bool: True if the sfincs executable has been run successfully, False otherwise.
536
+
537
+ """
538
+ SFINCS_OUTPUT_FILES = ["sfincs_map.nc"]
539
+
540
+ if self.settings.obs_point is not None:
541
+ SFINCS_OUTPUT_FILES.append("sfincs_his.nc")
542
+
543
+ to_check = [Path(sim_path) / file for file in SFINCS_OUTPUT_FILES]
544
+ return all(output.exists() for output in to_check)
545
+
546
+ def write_floodmap_geotiff(
547
+ self, scenario: Scenario, sim_path: Optional[Path] = None
548
+ ):
549
+ """
550
+ Read simulation results from SFINCS and saves a geotiff with the maximum water levels.
551
+
552
+ Produced floodmap is in the units defined in the sfincs config settings.
553
+
554
+ Parameters
555
+ ----------
556
+ scenario : Scenario
557
+ Scenario for which to create the floodmap.
558
+ sim_path : Path, optional
559
+ Path to the simulation folder, by default None.
560
+ """
561
+ self.logger.info("Writing flood maps to geotiff")
562
+ results_path = self._get_result_path(scenario)
563
+ sim_path = sim_path or self._get_simulation_path(scenario)
564
+ demfile = self.database.static_path / "dem" / self.settings.dem.filename
565
+
566
+ with SfincsAdapter(model_root=sim_path) as model:
567
+ zsmax = model._get_zsmax()
568
+
569
+ dem = model._model.data_catalog.get_rasterdataset(demfile)
570
+
571
+ # convert dem from dem units to floodmap units
572
+ dem_conversion = us.UnitfulLength(
573
+ value=1.0, units=self.settings.dem.units
574
+ ).convert(self.settings.config.floodmap_units)
575
+
576
+ floodmap_fn = results_path / f"FloodMap_{scenario.name}.tif"
577
+
578
+ # convert zsmax from meters to floodmap units
579
+ floodmap_conversion = us.UnitfulLength(
580
+ value=1.0, units=us.UnitTypesLength.meters
581
+ ).convert(self.settings.config.floodmap_units)
582
+
583
+ utils.downscale_floodmap(
584
+ zsmax=floodmap_conversion * zsmax,
585
+ dep=dem_conversion * dem,
586
+ hmin=0.01,
587
+ floodmap_fn=str(floodmap_fn),
588
+ )
589
+
590
+ def write_water_level_map(
591
+ self, scenario: Scenario, sim_path: Optional[Path] = None
592
+ ):
593
+ """Read simulation results from SFINCS and saves a netcdf with the maximum water levels."""
594
+ self.logger.info("Writing water level map to netcdf")
595
+ results_path = self._get_result_path(scenario)
596
+ sim_path = sim_path or self._get_simulation_path(scenario)
597
+
598
+ with SfincsAdapter(model_root=sim_path) as model:
599
+ zsmax = model._get_zsmax()
600
+ zsmax.to_netcdf(results_path / "max_water_level_map.nc")
601
+
602
+ def plot_wl_obs(
603
+ self,
604
+ scenario: Scenario,
605
+ ):
606
+ """Plot water levels at SFINCS observation points as html.
607
+
608
+ Only for single event scenarios, or for a specific simulation path containing the written and processed sfincs model.
609
+ """
610
+ if not self.settings.obs_point:
611
+ self.logger.warning("No observation points provided in config.")
612
+ return
613
+
614
+ self.logger.info("Plotting water levels at observation points")
615
+ sim_path = self._get_simulation_path(scenario)
616
+
617
+ # read SFINCS model
618
+ with SfincsAdapter(model_root=sim_path) as model:
619
+ df, gdf = model._get_zs_points()
620
+
621
+ gui_units = us.UnitTypesLength(
622
+ self.database.site.gui.units.default_length_units
623
+ )
624
+ conversion_factor = us.UnitfulLength(
625
+ value=1.0, units=us.UnitTypesLength("meters")
626
+ ).convert(gui_units)
627
+
628
+ overland_reference_height = self.settings.water_level.get_datum(
629
+ self.settings.config.overland_model.reference
630
+ ).height.convert(gui_units)
631
+
632
+ for ii, col in enumerate(df.columns):
633
+ # Plot actual thing
634
+ fig = px.line(
635
+ df[col] * conversion_factor
636
+ + overland_reference_height # convert to reference datum for plotting
637
+ )
638
+
639
+ fig.add_hline(
640
+ y=0,
641
+ line_dash="dash",
642
+ line_color="#000000",
643
+ annotation_text=self.settings.water_level.reference,
644
+ annotation_position="bottom right",
645
+ )
646
+
647
+ # plot reference water levels
648
+ for wl_ref in self.settings.water_level.datums:
649
+ if (
650
+ wl_ref.name == self.settings.config.overland_model.reference
651
+ or wl_ref.name in self.database.site.gui.plotting.excluded_datums
652
+ ):
653
+ continue
654
+ fig.add_hline(
655
+ y=wl_ref.height.convert(gui_units),
656
+ line_dash="dash",
657
+ line_color="#3ec97c",
658
+ annotation_text=wl_ref.name,
659
+ annotation_position="bottom right",
660
+ )
661
+
662
+ fig.update_layout(
663
+ autosize=False,
664
+ height=100 * 2,
665
+ width=280 * 2,
666
+ margin={"r": 0, "l": 0, "b": 0, "t": 20},
667
+ font={"size": 10, "color": "black", "family": "Arial"},
668
+ title={
669
+ "text": gdf.iloc[ii]["Description"],
670
+ "font": {"size": 12, "color": "black", "family": "Arial"},
671
+ "x": 0.5,
672
+ "xanchor": "center",
673
+ },
674
+ xaxis_title="Time",
675
+ yaxis_title=f"Water level [{gui_units.value}] above {self.settings.water_level.reference}",
676
+ yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
677
+ xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
678
+ showlegend=False,
679
+ )
680
+
681
+ event = self.database.events.get(scenario.event)
682
+ self._add_tide_gauge_plot(fig, event, units=gui_units)
683
+
684
+ # write html to results folder
685
+ station_name = gdf.iloc[ii]["Name"]
686
+ results_path = self._get_result_path(scenario)
687
+ fig.write_html(results_path / f"{station_name}_timeseries.html")
688
+
689
+ def add_obs_points(self):
690
+ """Add observation points provided in the site toml to SFINCS model."""
691
+ if self.settings.obs_point is not None:
692
+ self.logger.info("Adding observation points to the overland flood model")
693
+
694
+ obs_points = self.settings.obs_point
695
+ names = []
696
+ lat = []
697
+ lon = []
698
+ for pt in obs_points:
699
+ names.append(pt.name)
700
+ lat.append(pt.lat)
701
+ lon.append(pt.lon)
702
+
703
+ # create GeoDataFrame from obs_points in site file
704
+ df = pd.DataFrame({"name": names})
705
+ gdf = gpd.GeoDataFrame(
706
+ df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
707
+ )
708
+
709
+ # Add locations to SFINCS file
710
+ self._model.setup_observation_points(locations=gdf, merge=False)
711
+
712
+ def get_wl_df_from_offshore_his_results(self) -> pd.DataFrame:
713
+ """Create a pd.Dataframe with waterlevels from the offshore model at the bnd locations of the overland model.
714
+
715
+ Returns
716
+ -------
717
+ wl_df: pd.DataFrame
718
+ time series of water level.
719
+ """
720
+ self.logger.info("Reading water levels from offshore model")
721
+ ds_his = utils.read_sfincs_his_results(
722
+ Path(self._model.root) / "sfincs_his.nc",
723
+ crs=self._model.crs.to_epsg(),
724
+ )
725
+ wl_df = pd.DataFrame(
726
+ data=ds_his.point_zs.to_numpy(),
727
+ index=ds_his.time.to_numpy(),
728
+ columns=np.arange(1, ds_his.point_zs.to_numpy().shape[1] + 1, 1),
729
+ )
730
+ return wl_df
731
+
732
+ ## RISK EVENTS ##
733
+ def calculate_rp_floodmaps(self, scenario: Scenario):
734
+ """Calculate flood risk maps from a set of (currently) SFINCS water level outputs using linear interpolation.
735
+
736
+ It would be nice to make it more widely applicable and move the loading of the SFINCS results to self.postprocess_sfincs().
737
+
738
+ generates return period water level maps in netcdf format to be used by FIAT
739
+ generates return period water depth maps in geotiff format as product for users
740
+
741
+ TODO: make this robust and more efficient for bigger datasets.
742
+ """
743
+ event: EventSet = self.database.events.get(scenario.event)
744
+ if not isinstance(event, EventSet):
745
+ raise ValueError("This function is only available for risk scenarios.")
746
+
747
+ result_path = self._get_result_path(scenario)
748
+ sim_paths = [
749
+ self._get_simulation_path(scenario, sub_event=sub_event)
750
+ for sub_event in event._events
751
+ ]
752
+
753
+ phys_proj = self.database.projections.get(
754
+ scenario.projection
755
+ ).physical_projection
756
+
757
+ floodmap_rp = self.database.site.fiat.risk.return_periods
758
+ frequencies = [sub_event.frequency for sub_event in event.sub_events]
759
+
760
+ # adjust storm frequency for hurricane events
761
+ if not math.isclose(phys_proj.storm_frequency_increase, 0, abs_tol=1e-9):
762
+ storminess_increase = phys_proj.storm_frequency_increase / 100.0
763
+ for ii, event in enumerate(event._events):
764
+ if event.template == Template.Hurricane:
765
+ frequencies[ii] = frequencies[ii] * (1 + storminess_increase)
766
+
767
+ with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
768
+ # read mask and bed level
769
+ mask = dummymodel.get_mask().stack(z=("x", "y"))
770
+ zb = dummymodel.get_bedlevel().stack(z=("x", "y")).to_numpy()
771
+
772
+ zs_maps = []
773
+ for simulation_path in sim_paths:
774
+ # read zsmax data from overland sfincs model
775
+ with SfincsAdapter(model_root=simulation_path) as sim:
776
+ zsmax = sim._get_zsmax().load()
777
+ zs_stacked = zsmax.stack(z=("x", "y"))
778
+ zs_maps.append(zs_stacked)
779
+
780
+ # Create RP flood maps
781
+
782
+ # 1a: make a table of all water levels and associated frequencies
783
+ zs = xr.concat(zs_maps, pd.Index(frequencies, name="frequency"))
784
+ # Get the indices of columns with all NaN values
785
+ nan_cells = np.where(np.all(np.isnan(zs), axis=0))[0]
786
+ # fill nan values with minimum bed levels in each grid cell, np.interp cannot ignore nan values
787
+ zs = xr.where(np.isnan(zs), np.tile(zb, (zs.shape[0], 1)), zs)
788
+ # Get table of frequencies
789
+ freq = np.tile(frequencies, (zs.shape[1], 1)).transpose()
790
+
791
+ # 1b: sort water levels in descending order and include the frequencies in the sorting process
792
+ # (i.e. each h-value should be linked to the same p-values as in step 1a)
793
+ sort_index = zs.argsort(axis=0)
794
+ sorted_prob = np.flipud(np.take_along_axis(freq, sort_index, axis=0))
795
+ sorted_zs = np.flipud(np.take_along_axis(zs.values, sort_index, axis=0))
796
+
797
+ # 1c: Compute exceedance probabilities of water depths
798
+ # Method: accumulate probabilities from top to bottom
799
+ prob_exceed = np.cumsum(sorted_prob, axis=0)
800
+
801
+ # 1d: Compute return periods of water depths
802
+ # Method: simply take the inverse of the exceedance probability (1/Pex)
803
+ rp_zs = 1.0 / prob_exceed
804
+
805
+ # For each return period (T) of interest do the following:
806
+ # For each grid cell do the following:
807
+ # Use the table from step [1d] as a “lookup-table” to derive the T-year water depth. Use a 1-d interpolation technique:
808
+ # h(T) = interp1 (log(T*), h*, log(T))
809
+ # in which t* and h* are the values from the table and T is the return period (T) of interest
810
+ # The resulting T-year water depths for all grids combined form the T-year hazard map
811
+ rp_da = xr.DataArray(rp_zs, dims=zs.dims)
812
+
813
+ # no_data_value = -999 # in SFINCS
814
+ # sorted_zs = xr.where(sorted_zs == no_data_value, np.nan, sorted_zs)
815
+
816
+ valid_cells = np.where(mask == 1)[
817
+ 0
818
+ ] # only loop over cells where model is not masked
819
+ h = matlib.repmat(
820
+ np.copy(zb), len(floodmap_rp), 1
821
+ ) # if not flooded (i.e. not in valid_cells) revert to bed_level, read from SFINCS results so it is the minimum bed level in a grid cell
822
+
823
+ self.logger.info("Calculating flood risk maps, this may take some time")
824
+ for jj in valid_cells: # looping over all non-masked cells.
825
+ # linear interpolation for all return periods to evaluate
826
+ h[:, jj] = np.interp(
827
+ np.log10(floodmap_rp),
828
+ np.log10(rp_da[::-1, jj]),
829
+ sorted_zs[::-1, jj],
830
+ left=0,
831
+ )
832
+
833
+ # Re-fill locations that had nan water level for all simulations with nans
834
+ h[:, nan_cells] = np.full(h[:, nan_cells].shape, np.nan)
835
+
836
+ # If a cell has the same water-level as the bed elevation it should be dry (turn to nan)
837
+ diff = h - np.tile(zb, (h.shape[0], 1))
838
+ dry = (
839
+ diff < 10e-10
840
+ ) # here we use a small number instead of zero for rounding errors
841
+ h[dry] = np.nan
842
+
843
+ for ii, rp in enumerate(floodmap_rp):
844
+ # #create single nc
845
+ zs_rp_single = xr.DataArray(
846
+ data=h[ii, :], coords={"z": zs["z"]}, attrs={"units": "meters"}
847
+ ).unstack()
848
+ zs_rp_single = zs_rp_single.rio.write_crs(
849
+ zsmax.raster.crs
850
+ ) # , inplace=True)
851
+ zs_rp_single = zs_rp_single.to_dataset(name="risk_map")
852
+ fn_rp = result_path / f"RP_{rp:04d}_maps.nc"
853
+ zs_rp_single.to_netcdf(fn_rp)
854
+
855
+ # write geotiff
856
+ # dem file for high resolution flood depth map
857
+ demfile = self.database.static_path / "dem" / self.settings.dem.filename
858
+
859
+ # writing the geotiff to the scenario results folder
860
+ with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
861
+ dem = dummymodel._model.data_catalog.get_rasterdataset(demfile)
862
+ zsmax = zs_rp_single.to_array().squeeze().transpose()
863
+ floodmap_fn = fn_rp.with_suffix(".tif")
864
+
865
+ # convert dem from dem units to floodmap units
866
+ dem_conversion = us.UnitfulLength(
867
+ value=1.0, units=self.settings.dem.units
868
+ ).convert(self.settings.config.floodmap_units)
869
+
870
+ # convert zsmax from meters to floodmap units
871
+ floodmap_conversion = us.UnitfulLength(
872
+ value=1.0, units=us.UnitTypesLength.meters
873
+ ).convert(self.settings.config.floodmap_units)
874
+
875
+ utils.downscale_floodmap(
876
+ zsmax=floodmap_conversion * zsmax,
877
+ dep=dem_conversion * dem,
878
+ hmin=0.01,
879
+ floodmap_fn=str(floodmap_fn),
880
+ )
881
+
882
+ ######################################
883
+ ### PRIVATE - use at your own risk ###
884
+ ######################################
885
+ def _run_single_event(self, scenario: Scenario, event: Event):
886
+ self.preprocess(scenario, event)
887
+ self.process(scenario, event)
888
+ self.postprocess(scenario, event)
889
+ shutil.rmtree(
890
+ self._get_simulation_path(scenario, sub_event=event), ignore_errors=True
891
+ )
892
+
893
+ def _run_risk_scenario(self, scenario: Scenario):
894
+ """Run the whole workflow for a risk scenario.
895
+
896
+ This means preprocessing and running the SFINCS model for each event in the event set, and then postprocessing the results.
897
+ """
898
+ event_set: EventSet = self.database.events.get(scenario.event)
899
+ total = len(event_set._events)
900
+
901
+ for i, sub_event in enumerate(event_set._events):
902
+ sim_path = self._get_simulation_path(scenario, sub_event=sub_event)
903
+
904
+ # Preprocess
905
+ self.preprocess(scenario, event=sub_event)
906
+ self.logger.info(
907
+ f"Running SFINCS for Eventset Scenario `{scenario.name}`, Event `{sub_event.name}` ({i + 1}/{total})"
908
+ )
909
+ self.execute(sim_path)
910
+
911
+ # Postprocess
912
+ self.calculate_rp_floodmaps(scenario)
913
+
914
+ # Cleanup
915
+ for i, sub_event in enumerate(event_set._events):
916
+ shutil.rmtree(
917
+ self._get_simulation_path(scenario, sub_event=sub_event),
918
+ ignore_errors=True,
919
+ )
920
+
921
+ def _ensure_no_existing_forcings(self):
922
+ """Check for existing forcings in the model and raise an error if any are found."""
923
+ all_forcings = {
924
+ "waterlevel": self.waterlevels,
925
+ "rainfall": self.rainfall,
926
+ "wind": self.wind,
927
+ "discharge": self.discharge,
928
+ }
929
+ contains_forcings = ", ".join(
930
+ [
931
+ f"{name.capitalize()}"
932
+ for name, forcing in all_forcings.items()
933
+ if forcing is not None
934
+ ]
935
+ )
936
+ if contains_forcings:
937
+ raise ValueError(
938
+ f"{contains_forcings} forcing(s) should not exists in the SFINCS template model. Remove it from the SFINCS model located at: {self.get_model_root()}. For more information on SFINCS and its input files, see the SFINCS documentation at: `https://sfincs.readthedocs.io/en/latest/input.html`"
939
+ )
940
+
941
+ ### FORCING ###
942
+ def _add_forcing_wind(
943
+ self,
944
+ wind: IWind,
945
+ ):
946
+ """Add spatially constant wind forcing to sfincs model. Use timeseries or a constant magnitude and direction.
947
+
948
+ Parameters
949
+ ----------
950
+ timeseries : Union[str, os.PathLike], optional
951
+ path to file of timeseries file (.csv) which has three columns: time, magnitude and direction, by default None
952
+ const_mag : float, optional
953
+ magnitude of time-invariant wind forcing [m/s], by default None
954
+ const_dir : float, optional
955
+ direction of time-invariant wind forcing [deg], by default None
956
+ """
957
+ time_frame = self.get_model_time()
958
+ if isinstance(wind, WindConstant):
959
+ # HydroMT function: set wind forcing from constant magnitude and direction
960
+ self._model.setup_wind_forcing(
961
+ timeseries=None,
962
+ magnitude=wind.speed.convert(us.UnitTypesVelocity.mps),
963
+ direction=wind.direction.value,
964
+ )
965
+ elif isinstance(wind, WindSynthetic):
966
+ df = wind.to_dataframe(time_frame=time_frame)
967
+ df["mag"] *= us.UnitfulVelocity(
968
+ value=1.0, units=self.units.default_velocity_units
969
+ ).convert(us.UnitTypesVelocity.mps)
970
+
971
+ tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
972
+ df.to_csv(tmp_path)
973
+
974
+ # HydroMT function: set wind forcing from timeseries
975
+ self._model.setup_wind_forcing(
976
+ timeseries=tmp_path, magnitude=None, direction=None
977
+ )
978
+ elif isinstance(wind, WindMeteo):
979
+ ds = MeteoHandler().read(time_frame)
980
+ # data already in metric units so no conversion needed
981
+
982
+ # HydroMT function: set wind forcing from grid
983
+ self._model.setup_wind_forcing_from_grid(wind=ds)
984
+ elif isinstance(wind, WindTrack):
985
+ # data already in metric units so no conversion needed
986
+ self._add_forcing_spw(wind)
987
+ elif isinstance(wind, WindNetCDF):
988
+ ds = wind.read()
989
+ # time slicing to time_frame not needed, hydromt-sfincs handles it
990
+ conversion = us.UnitfulVelocity(value=1.0, units=wind.units).convert(
991
+ us.UnitTypesVelocity.mps
992
+ )
993
+ ds *= conversion
994
+ self._model.setup_wind_forcing_from_grid(wind=ds)
995
+ elif isinstance(wind, WindCSV):
996
+ df = wind.to_dataframe(time_frame=time_frame)
997
+
998
+ conversion = us.UnitfulVelocity(
999
+ value=1.0, units=wind.units["speed"]
1000
+ ).convert(us.UnitTypesVelocity.mps)
1001
+ df *= conversion
1002
+
1003
+ tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
1004
+ df.to_csv(tmp_path)
1005
+
1006
+ # HydroMT function: set wind forcing from timeseries
1007
+ self._model.setup_wind_forcing(
1008
+ timeseries=tmp_path,
1009
+ magnitude=None,
1010
+ direction=None,
1011
+ )
1012
+ else:
1013
+ self.logger.warning(
1014
+ f"Unsupported wind forcing type: {wind.__class__.__name__}"
1015
+ )
1016
+ return
1017
+
1018
+ def _add_forcing_rain(self, rainfall: IRainfall):
1019
+ """Add spatially constant rain forcing to sfincs model. Use timeseries or a constant magnitude.
1020
+
1021
+ Parameters
1022
+ ----------
1023
+ timeseries : Union[str, os.PathLike], optional
1024
+ path to file of timeseries file (.csv) which has two columns: time and precipitation, by default None
1025
+ const_intensity : float, optional
1026
+ time-invariant precipitation intensity [mm_hr], by default None
1027
+ """
1028
+ time_frame = self.get_model_time()
1029
+ if isinstance(rainfall, RainfallConstant):
1030
+ self._model.setup_precip_forcing(
1031
+ timeseries=None,
1032
+ magnitude=rainfall.intensity.convert(us.UnitTypesIntensity.mm_hr),
1033
+ )
1034
+ elif isinstance(rainfall, RainfallCSV):
1035
+ df = rainfall.to_dataframe(time_frame=time_frame)
1036
+ conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1037
+ us.UnitTypesIntensity.mm_hr
1038
+ )
1039
+ df *= conversion
1040
+
1041
+ tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1042
+ df.to_csv(tmp_path)
1043
+
1044
+ self._model.setup_precip_forcing(timeseries=tmp_path)
1045
+ elif isinstance(rainfall, RainfallSynthetic):
1046
+ df = rainfall.to_dataframe(time_frame=time_frame)
1047
+
1048
+ if rainfall.timeseries.cumulative is not None: # scs
1049
+ conversion = us.UnitfulLength(
1050
+ value=1.0, units=rainfall.timeseries.cumulative.units
1051
+ ).convert(us.UnitTypesLength.millimeters)
1052
+ else:
1053
+ conversion = us.UnitfulIntensity(
1054
+ value=1.0, units=rainfall.timeseries.peak_value.units
1055
+ ).convert(us.UnitTypesIntensity.mm_hr)
1056
+
1057
+ df *= conversion
1058
+ tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1059
+ df.to_csv(tmp_path)
1060
+
1061
+ self._model.setup_precip_forcing(timeseries=tmp_path)
1062
+ elif isinstance(rainfall, RainfallMeteo):
1063
+ ds = MeteoHandler().read(time_frame)
1064
+ # MeteoHandler always return metric so no conversion needed
1065
+ self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1066
+ elif isinstance(rainfall, RainfallTrack):
1067
+ # data already in metric units so no conversion needed
1068
+ self._add_forcing_spw(rainfall)
1069
+ elif isinstance(rainfall, RainfallNetCDF):
1070
+ ds = rainfall.read()
1071
+ # time slicing to time_frame not needed, hydromt-sfincs handles it
1072
+ conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1073
+ us.UnitTypesIntensity.mm_hr
1074
+ )
1075
+ ds *= conversion
1076
+ self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1077
+ else:
1078
+ self.logger.warning(
1079
+ f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}"
1080
+ )
1081
+ return
1082
+
1083
+ def _add_forcing_discharge(self, forcing: IDischarge):
1084
+ """Add spatially constant discharge forcing to sfincs model. Use timeseries or a constant magnitude.
1085
+
1086
+ Parameters
1087
+ ----------
1088
+ forcing : IDischarge
1089
+ The discharge forcing to add to the model.
1090
+ Can be a constant, synthetic or from a csv file.
1091
+ Also contains the river information.
1092
+ """
1093
+ if isinstance(forcing, (DischargeConstant, DischargeCSV, DischargeSynthetic)):
1094
+ self._set_single_river_forcing(discharge=forcing)
1095
+ else:
1096
+ self.logger.warning(
1097
+ f"Unsupported discharge forcing type: {forcing.__class__.__name__}"
1098
+ )
1099
+
1100
+ def _add_forcing_waterlevels(self, forcing: IWaterlevel):
1101
+ time_frame = self.get_model_time()
1102
+ if isinstance(forcing, WaterlevelSynthetic):
1103
+ df_ts = forcing.to_dataframe(time_frame=time_frame)
1104
+
1105
+ conversion = us.UnitfulLength(
1106
+ value=1.0, units=forcing.surge.timeseries.peak_value.units
1107
+ ).convert(us.UnitTypesLength.meters)
1108
+ datum_correction = self.settings.water_level.get_datum(
1109
+ self.database.site.gui.plotting.synthetic_tide.datum
1110
+ ).height.convert(us.UnitTypesLength.meters)
1111
+
1112
+ df_ts = df_ts * conversion + datum_correction
1113
+
1114
+ self._set_waterlevel_forcing(df_ts)
1115
+ elif isinstance(forcing, WaterlevelGauged):
1116
+ if self.settings.tide_gauge is None:
1117
+ raise ValueError("No tide gauge defined for this site.")
1118
+
1119
+ df_ts = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1120
+ time=time_frame,
1121
+ )
1122
+ conversion = us.UnitfulLength(
1123
+ value=1.0, units=self.settings.tide_gauge.units
1124
+ ).convert(us.UnitTypesLength.meters)
1125
+
1126
+ datum_height = self.settings.water_level.get_datum(
1127
+ self.settings.tide_gauge.reference
1128
+ ).height.convert(us.UnitTypesLength.meters)
1129
+
1130
+ df_ts = conversion * df_ts + datum_height
1131
+
1132
+ self._set_waterlevel_forcing(df_ts)
1133
+ elif isinstance(forcing, WaterlevelCSV):
1134
+ df_ts = forcing.to_dataframe(time_frame=time_frame)
1135
+
1136
+ if df_ts is None:
1137
+ raise ValueError("Failed to get waterlevel data.")
1138
+ conversion = us.UnitfulLength(value=1.0, units=forcing.units).convert(
1139
+ us.UnitTypesLength.meters
1140
+ )
1141
+ df_ts *= conversion
1142
+ self._set_waterlevel_forcing(df_ts)
1143
+
1144
+ elif isinstance(forcing, WaterlevelModel):
1145
+ from flood_adapt.adapter.sfincs_offshore import OffshoreSfincsHandler
1146
+
1147
+ if self.settings.config.offshore_model is None:
1148
+ raise ValueError("Offshore model configuration is missing.")
1149
+ if self._scenario is None or self._event is None:
1150
+ raise ValueError(
1151
+ "Scenario and event must be provided to run the offshore model."
1152
+ )
1153
+
1154
+ df_ts = OffshoreSfincsHandler(
1155
+ scenario=self._scenario, event=self._event
1156
+ ).get_resulting_waterlevels()
1157
+ if df_ts is None:
1158
+ raise ValueError("Failed to get waterlevel data.")
1159
+
1160
+ # Datum
1161
+ datum_correction = self.settings.water_level.get_datum(
1162
+ self.settings.config.offshore_model.reference
1163
+ ).height.convert(us.UnitTypesLength.meters)
1164
+ df_ts += datum_correction
1165
+
1166
+ # Already in meters since it was produced by SFINCS so no conversion needed
1167
+ self._set_waterlevel_forcing(df_ts)
1168
+ self._turn_off_bnd_press_correction()
1169
+ else:
1170
+ self.logger.warning(
1171
+ f"Unsupported waterlevel forcing type: {forcing.__class__.__name__}"
1172
+ )
1173
+
1174
+ # SPIDERWEB
1175
+ def _add_forcing_spw(self, forcing: Union[RainfallTrack, WindTrack]):
1176
+ """Add spiderweb forcing."""
1177
+ if forcing.source != ForcingSource.TRACK:
1178
+ raise ValueError("Forcing source should be TRACK.")
1179
+
1180
+ if forcing.path is None:
1181
+ raise ValueError("No path to track file provided.")
1182
+
1183
+ if not forcing.path.exists():
1184
+ # Check if the file is in the database
1185
+ in_db = self._get_event_input_path(self._event) / forcing.path.name
1186
+ if not in_db.exists():
1187
+ raise FileNotFoundError(
1188
+ f"Input file for track forcing not found: {forcing.path}"
1189
+ )
1190
+ forcing.path = in_db
1191
+
1192
+ if forcing.path.suffix == ".cyc":
1193
+ forcing.path = self._create_spw_file_from_track(
1194
+ track_forcing=forcing,
1195
+ hurricane_translation=self._event.hurricane_translation,
1196
+ name=self._event.name,
1197
+ output_dir=forcing.path.parent,
1198
+ include_rainfall=bool(self._event.forcings.get(ForcingType.RAINFALL)),
1199
+ recreate=False,
1200
+ )
1201
+
1202
+ if forcing.path.suffix != ".spw":
1203
+ raise ValueError(
1204
+ "Track files should be in one of [spw, ddb_cyc] file format and must have [.spw, .cyc] extension."
1205
+ )
1206
+
1207
+ sim_path = self.get_model_root()
1208
+ self.logger.info(f"Adding spiderweb forcing to Sfincs model: {sim_path.name}")
1209
+
1210
+ # prevent SameFileError
1211
+ output_spw_path = sim_path / forcing.path.name
1212
+ if forcing.path == output_spw_path:
1213
+ raise ValueError(
1214
+ "Add a different SPW file than the one already in the model."
1215
+ )
1216
+
1217
+ if output_spw_path.exists():
1218
+ os.remove(output_spw_path)
1219
+ shutil.copy2(forcing.path, output_spw_path)
1220
+
1221
+ self._model.set_config("spwfile", output_spw_path.name)
1222
+
1223
+ ### MEASURES ###
1224
+ def _add_measure_floodwall(self, floodwall: FloodWall):
1225
+ """Add floodwall to sfincs model.
1226
+
1227
+ Parameters
1228
+ ----------
1229
+ floodwall : FloodWall
1230
+ floodwall information
1231
+ """
1232
+ polygon_file = resolve_filepath(
1233
+ object_dir=ObjectDir.measure,
1234
+ obj_name=floodwall.name,
1235
+ path=floodwall.polygon_file,
1236
+ )
1237
+
1238
+ # HydroMT function: get geodataframe from filename
1239
+ gdf_floodwall = self._model.data_catalog.get_geodataframe(
1240
+ polygon_file, geom=self._model.region, crs=self._model.crs
1241
+ )
1242
+
1243
+ # Add floodwall attributes to geodataframe
1244
+ gdf_floodwall["name"] = floodwall.name
1245
+ if (gdf_floodwall.geometry.type == "MultiLineString").any():
1246
+ gdf_floodwall = gdf_floodwall.explode()
1247
+
1248
+ try:
1249
+ heights = [
1250
+ float(
1251
+ us.UnitfulLength(
1252
+ value=float(height),
1253
+ units=self.database.site.gui.units.default_length_units,
1254
+ ).convert(us.UnitTypesLength("meters"))
1255
+ )
1256
+ for height in gdf_floodwall["z"]
1257
+ ]
1258
+ gdf_floodwall["z"] = heights
1259
+ self.logger.info("Using floodwall height from shape file.")
1260
+ except Exception:
1261
+ self.logger.warning(
1262
+ f"Could not use height data from file due to missing `z` column or missing values therein. Using uniform height of {floodwall.elevation} instead."
1263
+ )
1264
+ gdf_floodwall["z"] = floodwall.elevation.convert(
1265
+ us.UnitTypesLength(us.UnitTypesLength.meters)
1266
+ )
1267
+
1268
+ # par1 is the overflow coefficient for weirs
1269
+ gdf_floodwall["par1"] = 0.6
1270
+
1271
+ # HydroMT function: create floodwall
1272
+ self._model.setup_structures(structures=gdf_floodwall, stype="weir", merge=True)
1273
+
1274
+ def _add_measure_greeninfra(self, green_infrastructure: GreenInfrastructure):
1275
+ # HydroMT function: get geodataframe from filename
1276
+ if green_infrastructure.selection_type == "polygon":
1277
+ polygon_file = resolve_filepath(
1278
+ ObjectDir.measure,
1279
+ green_infrastructure.name,
1280
+ green_infrastructure.polygon_file,
1281
+ )
1282
+ elif green_infrastructure.selection_type == "aggregation_area":
1283
+ # TODO this logic already exists in the Database controller but cannot be used due to cyclic imports
1284
+ # Loop through available aggregation area types
1285
+ for aggr_dict in self.database.site.fiat.config.aggregation:
1286
+ # check which one is used in measure
1287
+ if not aggr_dict.name == green_infrastructure.aggregation_area_type:
1288
+ continue
1289
+ # load geodataframe
1290
+ aggr_areas = gpd.read_file(
1291
+ db_path(TopLevelDir.static) / aggr_dict.file,
1292
+ engine="pyogrio",
1293
+ ).to_crs(4326)
1294
+ # keep only aggregation area chosen
1295
+ polygon_file = aggr_areas.loc[
1296
+ aggr_areas[aggr_dict.field_name]
1297
+ == green_infrastructure.aggregation_area_name,
1298
+ ["geometry"],
1299
+ ].reset_index(drop=True)
1300
+ else:
1301
+ raise ValueError(
1302
+ f"The selection type: {green_infrastructure.selection_type} is not valid"
1303
+ )
1304
+
1305
+ gdf_green_infra = self._model.data_catalog.get_geodataframe(
1306
+ polygon_file,
1307
+ geom=self._model.region,
1308
+ crs=self._model.crs,
1309
+ )
1310
+
1311
+ # Make sure no multipolygons are there
1312
+ gdf_green_infra = gdf_green_infra.explode()
1313
+
1314
+ # Volume is always already calculated and is converted to m3 for SFINCS
1315
+ height = None
1316
+ volume = green_infrastructure.volume.convert(
1317
+ us.UnitTypesVolume(us.UnitTypesVolume.m3)
1318
+ )
1319
+
1320
+ # HydroMT function: create storage volume
1321
+ self._model.setup_storage_volume(
1322
+ storage_locs=gdf_green_infra, volume=volume, height=height, merge=True
1323
+ )
1324
+
1325
+ def _add_measure_pump(self, pump: Pump):
1326
+ """Add pump to sfincs model.
1327
+
1328
+ Parameters
1329
+ ----------
1330
+ pump : Pump
1331
+ pump information
1332
+ """
1333
+ polygon_file = resolve_filepath(ObjectDir.measure, pump.name, pump.polygon_file)
1334
+ # HydroMT function: get geodataframe from filename
1335
+ gdf_pump = self._model.data_catalog.get_geodataframe(
1336
+ polygon_file, geom=self._model.region, crs=self._model.crs
1337
+ )
1338
+
1339
+ # HydroMT function: create floodwall
1340
+ self._model.setup_drainage_structures(
1341
+ structures=gdf_pump,
1342
+ stype="pump",
1343
+ discharge=pump.discharge.convert(us.UnitTypesDischarge.cms),
1344
+ merge=True,
1345
+ )
1346
+
1347
+ ### SFINCS SETTERS ###
1348
+ def _set_single_river_forcing(self, discharge: IDischarge):
1349
+ """Add discharge to overland sfincs model.
1350
+
1351
+ Parameters
1352
+ ----------
1353
+ discharge : IDischarge
1354
+ Discharge object with discharge timeseries data and river information.
1355
+ """
1356
+ if not isinstance(
1357
+ discharge, (DischargeConstant, DischargeSynthetic, DischargeCSV)
1358
+ ):
1359
+ self.logger.warning(
1360
+ f"Unsupported discharge forcing type: {discharge.__class__.__name__}"
1361
+ )
1362
+ return
1363
+
1364
+ self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}")
1365
+
1366
+ time_frame = self.get_model_time()
1367
+ model_rivers = self._read_river_locations()
1368
+
1369
+ # Check that the river is defined in the model and that the coordinates match
1370
+ river_loc = shapely.Point(
1371
+ discharge.river.x_coordinate, discharge.river.y_coordinate
1372
+ )
1373
+ tolerance = 0.001 # in degrees, ~111 meters at the equator. (0.0001: 11 meters at the equator)
1374
+ river_gdf = model_rivers[model_rivers.distance(river_loc) <= tolerance]
1375
+ river_inds = river_gdf.index.to_list()
1376
+ if len(river_inds) != 1:
1377
+ raise ValueError(
1378
+ f"River {discharge.river.name} is not defined in the sfincs model. Please ensure the river coordinates in the site.toml match the coordinates for rivers in the SFINCS model."
1379
+ )
1380
+
1381
+ # Create a geodataframe with the river coordinates, the timeseries data and rename the column to the river index defined in the model
1382
+ if isinstance(discharge, DischargeCSV):
1383
+ df = discharge.to_dataframe(time_frame)
1384
+ conversion = us.UnitfulDischarge(value=1.0, units=discharge.units).convert(
1385
+ us.UnitTypesDischarge.cms
1386
+ )
1387
+ elif isinstance(discharge, DischargeConstant):
1388
+ df = discharge.to_dataframe(time_frame)
1389
+ conversion = us.UnitfulDischarge(
1390
+ value=1.0, units=discharge.discharge.units
1391
+ ).convert(us.UnitTypesDischarge.cms)
1392
+ elif isinstance(discharge, DischargeSynthetic):
1393
+ df = discharge.to_dataframe(time_frame)
1394
+ conversion = us.UnitfulDischarge(
1395
+ value=1.0, units=discharge.timeseries.peak_value.units
1396
+ ).convert(us.UnitTypesDischarge.cms)
1397
+ else:
1398
+ raise ValueError(
1399
+ f"Unsupported discharge forcing type: {discharge.__class__}"
1400
+ )
1401
+
1402
+ df *= conversion
1403
+
1404
+ df = df.rename(columns={df.columns[0]: river_inds[0]})
1405
+
1406
+ # HydroMT function: set discharge forcing from time series and river coordinates
1407
+ self._model.setup_discharge_forcing(
1408
+ locations=river_gdf,
1409
+ timeseries=df,
1410
+ merge=True,
1411
+ )
1412
+
1413
+ def _turn_off_bnd_press_correction(self):
1414
+ """Turn off the boundary pressure correction in the sfincs model."""
1415
+ self.logger.info(
1416
+ "Turning off boundary pressure correction in the offshore model"
1417
+ )
1418
+ self._model.set_config("pavbnd", -9999)
1419
+
1420
+ def _set_waterlevel_forcing(self, df_ts: pd.DataFrame):
1421
+ """
1422
+ Add water level forcing to sfincs model.
1423
+
1424
+ Values in the timeseries are expected to be relative to the main reference datum: `self.settings.water_level.reference`.
1425
+ The overland model reference: `self.settings.config.overland_model.reference` is used to convert the water levels to the reference of the overland model.
1426
+
1427
+ Parameters
1428
+ ----------
1429
+ df_ts : pd.DataFrame
1430
+ Time series of water levels with the first column as the time index.
1431
+
1432
+
1433
+ """
1434
+ # Determine bnd points from reference overland model
1435
+ gdf_locs = self._read_waterlevel_boundary_locations()
1436
+
1437
+ if len(df_ts.columns) == 1:
1438
+ # Go from 1 timeseries to timeseries for all boundary points
1439
+ name = df_ts.columns[0]
1440
+ for i in range(1, len(gdf_locs)):
1441
+ df_ts[i + 1] = df_ts[name]
1442
+ df_ts.columns = list(range(1, len(gdf_locs) + 1))
1443
+
1444
+ # Datum
1445
+ sfincs_overland_reference_height = self.settings.water_level.get_datum(
1446
+ self.settings.config.overland_model.reference
1447
+ ).height.convert(us.UnitTypesLength.meters)
1448
+
1449
+ df_ts -= sfincs_overland_reference_height
1450
+
1451
+ # HydroMT function: set waterlevel forcing from time series
1452
+ self._model.set_forcing_1d(
1453
+ name="bzs", df_ts=df_ts, gdf_locs=gdf_locs, merge=False
1454
+ )
1455
+
1456
+ # OFFSHORE
1457
+ def _add_pressure_forcing_from_grid(self, ds: xr.DataArray):
1458
+ """Add spatially varying barometric pressure to sfincs model.
1459
+
1460
+ Parameters
1461
+ ----------
1462
+ ds : xr.DataArray
1463
+ - Required variables: ['press_msl' (Pa)]
1464
+ - Required coordinates: ['time', 'y', 'x']
1465
+ - spatial_ref: CRS
1466
+ """
1467
+ self.logger.info("Adding pressure forcing to the offshore model")
1468
+ self._model.setup_pressure_forcing_from_grid(press=ds)
1469
+
1470
+ def _add_bzs_from_bca(self, event: Event, physical_projection: PhysicalProjection):
1471
+ # ONLY offshore models
1472
+ """Convert tidal constituents from bca file to waterlevel timeseries that can be read in by hydromt_sfincs."""
1473
+ if self.settings.config.offshore_model is None:
1474
+ raise ValueError("No offshore model found in sfincs config.")
1475
+
1476
+ self.logger.info("Adding water level forcing to the offshore model")
1477
+ sb = SfincsBoundary()
1478
+ sb.read_flow_boundary_points(self.get_model_root() / "sfincs.bnd")
1479
+ sb.read_astro_boundary_conditions(self.get_model_root() / "sfincs.bca")
1480
+
1481
+ times = pd.date_range(
1482
+ start=event.time.start_time,
1483
+ end=event.time.end_time,
1484
+ freq="10T",
1485
+ )
1486
+
1487
+ # Predict tidal signal and add SLR
1488
+ if not sb.flow_boundary_points:
1489
+ raise ValueError("No flow boundary points found.")
1490
+
1491
+ if self.settings.config.offshore_model.vertical_offset:
1492
+ correction = self.settings.config.offshore_model.vertical_offset.convert(
1493
+ us.UnitTypesLength.meters
1494
+ )
1495
+ else:
1496
+ correction = 0.0
1497
+
1498
+ for bnd_ii in range(len(sb.flow_boundary_points)):
1499
+ tide_ii = (
1500
+ predict(sb.flow_boundary_points[bnd_ii].astro, times)
1501
+ + correction
1502
+ + physical_projection.sea_level_rise.convert(us.UnitTypesLength.meters)
1503
+ )
1504
+
1505
+ if bnd_ii == 0:
1506
+ wl_df = pd.DataFrame(data={1: tide_ii}, index=times)
1507
+ else:
1508
+ wl_df[bnd_ii + 1] = tide_ii
1509
+
1510
+ # Determine bnd points from reference overland model
1511
+ gdf_locs = self._read_waterlevel_boundary_locations()
1512
+
1513
+ # HydroMT function: set waterlevel forcing from time series
1514
+ self._model.set_forcing_1d(
1515
+ name="bzs", df_ts=wl_df, gdf_locs=gdf_locs, merge=False
1516
+ )
1517
+
1518
+ ### PRIVATE GETTERS ###
1519
+ def _get_result_path(self, scenario: Scenario) -> Path:
1520
+ """Return the path to store the results."""
1521
+ return self.database.scenarios.output_path / scenario.name / "Flooding"
1522
+
1523
+ def _get_simulation_path(
1524
+ self, scenario: Scenario, sub_event: Optional[Event] = None
1525
+ ) -> Path:
1526
+ """
1527
+ Return the path to the simulation results.
1528
+
1529
+ Parameters
1530
+ ----------
1531
+ scenario : Scenario
1532
+ The scenario for which to get the simulation path.
1533
+ sub_event : Optional[Event], optional
1534
+ The sub-event for which to get the simulation path, by default None.
1535
+ Is only used when the event associated with the scenario is an EventSet.
1536
+ """
1537
+ base_path = (
1538
+ self._get_result_path(scenario)
1539
+ / "simulations"
1540
+ / self.settings.config.overland_model.name
1541
+ )
1542
+ event = self.database.events.get(scenario.event)
1543
+
1544
+ if isinstance(event, EventSet):
1545
+ if sub_event is None:
1546
+ raise ValueError("Event must be provided when scenario is an EventSet.")
1547
+ return base_path.parent / sub_event.name / base_path.name
1548
+ elif isinstance(event, Event):
1549
+ return base_path
1550
+ else:
1551
+ raise ValueError(f"Unsupported mode: {event.mode}")
1552
+
1553
+ def _get_simulation_path_offshore(
1554
+ self, scenario: Scenario, sub_event: Optional[Event] = None
1555
+ ) -> Path:
1556
+ # Get the path to the offshore model (will not be used if offshore model is not created)
1557
+ if self.settings.config.offshore_model is None:
1558
+ raise ValueError("No offshore model found in sfincs config.")
1559
+ base_path = (
1560
+ self._get_result_path(scenario)
1561
+ / "simulations"
1562
+ / self.settings.config.offshore_model.name
1563
+ )
1564
+ event = self.database.events.get(scenario.event)
1565
+ if isinstance(event, EventSet):
1566
+ return base_path.parent / sub_event.name / base_path.name
1567
+ elif isinstance(event, Event):
1568
+ return base_path
1569
+ else:
1570
+ raise ValueError(f"Unsupported mode: {event.mode}")
1571
+
1572
+ def _get_flood_map_paths(self, scenario: Scenario) -> list[Path]:
1573
+ """Return the paths to the flood maps that running this scenario should produce."""
1574
+ results_path = self._get_result_path(scenario)
1575
+ event = self.database.events.get(scenario.event)
1576
+
1577
+ if isinstance(event, EventSet):
1578
+ map_fn = []
1579
+ for rp in self.database.site.fiat.risk.return_periods:
1580
+ map_fn.append(results_path / f"RP_{rp:04d}_maps.nc")
1581
+ elif isinstance(event, Event):
1582
+ map_fn = [results_path / "max_water_level_map.nc"]
1583
+ else:
1584
+ raise ValueError(f"Unsupported mode: {event.mode}")
1585
+
1586
+ return map_fn
1587
+
1588
+ def _get_event_input_path(self, event: Event) -> Path:
1589
+ """Return the path to the event input directory."""
1590
+ return self.database.events.input_path / event.name
1591
+
1592
+ def _get_zsmax(self):
1593
+ """Read zsmax file and return absolute maximum water level over entire simulation."""
1594
+ self._model.read_results()
1595
+ zsmax = self._model.results["zsmax"].max(dim="timemax")
1596
+ zsmax.attrs["units"] = "m"
1597
+ return zsmax
1598
+
1599
+ def _get_zs_points(self):
1600
+ """Read water level (zs) timeseries at observation points.
1601
+
1602
+ Names are allocated from the site.toml.
1603
+ See also add_obs_points() above.
1604
+ """
1605
+ self._model.read_results()
1606
+ da = self._model.results["point_zs"]
1607
+ df = pd.DataFrame(index=pd.DatetimeIndex(da.time), data=da.to_numpy())
1608
+
1609
+ names = []
1610
+ descriptions = []
1611
+ # get station names from site.toml
1612
+ if self.settings.obs_point is not None:
1613
+ obs_points = self.settings.obs_point
1614
+ for pt in obs_points:
1615
+ names.append(pt.name)
1616
+ descriptions.append(pt.description)
1617
+
1618
+ pt_df = pd.DataFrame({"Name": names, "Description": descriptions})
1619
+ gdf = gpd.GeoDataFrame(
1620
+ pt_df,
1621
+ geometry=gpd.points_from_xy(da.point_x.values, da.point_y.values),
1622
+ crs=self._model.crs,
1623
+ )
1624
+ return df, gdf
1625
+
1626
+ def _create_spw_file_from_track(
1627
+ self,
1628
+ track_forcing: Union[RainfallTrack, WindTrack],
1629
+ hurricane_translation: TranslationModel,
1630
+ name: str,
1631
+ output_dir: Path,
1632
+ include_rainfall: bool = False,
1633
+ recreate: bool = False,
1634
+ ):
1635
+ """
1636
+ Create a spiderweb file from a given TropicalCyclone track and save it to the event's input directory.
1637
+
1638
+ Providing the output_dir argument allows to save the spiderweb file in a different directory.
1639
+
1640
+ Parameters
1641
+ ----------
1642
+ output_dir : Path
1643
+ The directory where the spiderweb file is saved (or copied to if it already exists and recreate is False)
1644
+ recreate : bool, optional
1645
+ If True, the spiderweb file is recreated even if it already exists, by default False
1646
+
1647
+ Returns
1648
+ -------
1649
+ Path
1650
+ the path to the created spiderweb file
1651
+ """
1652
+ if track_forcing.path is None:
1653
+ raise ValueError("No path to track file provided.")
1654
+
1655
+ # Check file format
1656
+ match track_forcing.path.suffix:
1657
+ case ".spw":
1658
+ if recreate:
1659
+ raise ValueError(
1660
+ "Recreating spiderweb files from existing spiderweb files is not supported. Provide a track file instead."
1661
+ )
1662
+
1663
+ if track_forcing.path.exists():
1664
+ return track_forcing.path
1665
+
1666
+ elif (output_dir / track_forcing.path.name).exists():
1667
+ return output_dir / track_forcing.path.name
1668
+
1669
+ else:
1670
+ raise FileNotFoundError(f"SPW file not found: {track_forcing.path}")
1671
+ case ".cyc":
1672
+ pass
1673
+ case _:
1674
+ raise ValueError(
1675
+ "Track files should be in the DDB_CYC file format and must have .cyc extension, or in the SPW file format and must have .spw extension"
1676
+ )
1677
+
1678
+ # Check if the spiderweb file already exists
1679
+ spw_file = output_dir / track_forcing.path.with_suffix(".spw").name
1680
+ if spw_file.exists():
1681
+ if recreate:
1682
+ os.remove(spw_file)
1683
+ else:
1684
+ return spw_file
1685
+
1686
+ self.logger.info(
1687
+ f"Creating spiderweb file for hurricane event `{name}`. This may take a while."
1688
+ )
1689
+
1690
+ # Initialize the tropical cyclone
1691
+ tc = TropicalCyclone()
1692
+ tc.read_track(filename=str(track_forcing.path), fmt="ddb_cyc")
1693
+
1694
+ # Alter the track of the tc if necessary
1695
+ tc = self._translate_tc_track(
1696
+ tc=tc, hurricane_translation=hurricane_translation
1697
+ )
1698
+
1699
+ # Rainfall
1700
+ start = "Including" if include_rainfall else "Excluding"
1701
+ self.logger.info(f"{start} rainfall in the spiderweb file")
1702
+ tc.include_rainfall = include_rainfall
1703
+
1704
+ # Create spiderweb file from the track
1705
+ tc.to_spiderweb(spw_file)
1706
+
1707
+ return spw_file
1708
+
1709
+ def _translate_tc_track(
1710
+ self, tc: TropicalCyclone, hurricane_translation: TranslationModel
1711
+ ):
1712
+ if math.isclose(
1713
+ hurricane_translation.eastwest_translation.value, 0, abs_tol=1e-6
1714
+ ) and math.isclose(
1715
+ hurricane_translation.northsouth_translation.value, 0, abs_tol=1e-6
1716
+ ):
1717
+ return tc
1718
+
1719
+ self.logger.info(f"Translating the track of the tropical cyclone `{tc.name}`")
1720
+ # First convert geodataframe to the local coordinate system
1721
+ crs = pyproj.CRS.from_string(self.settings.config.csname)
1722
+ tc.track = tc.track.to_crs(crs)
1723
+
1724
+ # Translate the track in the local coordinate system
1725
+ tc.track["geometry"] = tc.track["geometry"].apply(
1726
+ lambda geom: translate(
1727
+ geom,
1728
+ xoff=hurricane_translation.eastwest_translation.convert(
1729
+ us.UnitTypesLength.meters
1730
+ ),
1731
+ yoff=hurricane_translation.northsouth_translation.convert(
1732
+ us.UnitTypesLength.meters
1733
+ ),
1734
+ )
1735
+ )
1736
+
1737
+ # Convert the geodataframe to lat/lon
1738
+ tc.track = tc.track.to_crs(epsg=4326)
1739
+
1740
+ return tc
1741
+
1742
+ # @gundula do we keep this func, its not used anywhere?
1743
+ def _downscale_hmax(self, zsmax, demfile: Path):
1744
+ # read DEM and convert units to metric units used by SFINCS
1745
+ demfile_units = self.settings.dem.units
1746
+ dem_conversion = us.UnitfulLength(value=1.0, units=demfile_units).convert(
1747
+ us.UnitTypesLength("meters")
1748
+ )
1749
+ dem = dem_conversion * self._model.data_catalog.get_rasterdataset(demfile)
1750
+ dem = dem.rio.reproject(self._model.crs)
1751
+
1752
+ # determine conversion factor for output floodmap
1753
+ floodmap_units = self.settings.config.floodmap_units
1754
+ floodmap_conversion = us.UnitfulLength(
1755
+ value=1.0, units=us.UnitTypesLength.meters
1756
+ ).convert(floodmap_units)
1757
+
1758
+ hmax = utils.downscale_floodmap(
1759
+ zsmax=floodmap_conversion * zsmax,
1760
+ dep=floodmap_conversion * dem,
1761
+ hmin=0.01,
1762
+ )
1763
+ return hmax
1764
+
1765
+ def _read_river_locations(self) -> gpd.GeoDataFrame:
1766
+ path = self.get_model_root() / "sfincs.src"
1767
+
1768
+ with open(path) as f:
1769
+ lines = f.readlines()
1770
+ coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1771
+ points = [shapely.Point(coord) for coord in coords]
1772
+
1773
+ return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1774
+
1775
+ def _read_waterlevel_boundary_locations(self) -> gpd.GeoDataFrame:
1776
+ with open(self.get_model_root() / "sfincs.bnd") as f:
1777
+ lines = f.readlines()
1778
+ coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1779
+ points = [shapely.Point(coord) for coord in coords]
1780
+
1781
+ return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1782
+
1783
+ def _setup_sfincs_logger(self, model_root: Path) -> logging.Logger:
1784
+ """Initialize the logger for the SFINCS model."""
1785
+ # Create a logger for the SFINCS model manually
1786
+ sfincs_logger = logging.getLogger("SfincsModel")
1787
+ for handler in sfincs_logger.handlers[:]:
1788
+ sfincs_logger.removeHandler(handler)
1789
+
1790
+ # Add a file handler
1791
+ file_handler = logging.FileHandler(
1792
+ filename=model_root.resolve() / "sfincs_model.log",
1793
+ mode="w",
1794
+ )
1795
+ sfincs_logger.setLevel(logging.DEBUG)
1796
+ sfincs_logger.addHandler(file_handler)
1797
+ return sfincs_logger
1798
+
1799
+ def _cleanup_simulation_folder(
1800
+ self,
1801
+ path: Path,
1802
+ extensions: list[str] = [".spw"],
1803
+ ):
1804
+ """Remove all files with the given extensions in the given path."""
1805
+ if not path.exists():
1806
+ return
1807
+
1808
+ for ext in extensions:
1809
+ for file in path.glob(f"*{ext}"):
1810
+ file.unlink()
1811
+
1812
+ def _load_scenario_objects(self, scenario: Scenario, event: Event) -> None:
1813
+ self._scenario = scenario
1814
+ self._projection = self.database.projections.get(scenario.projection)
1815
+ self._strategy = self.database.strategies.get(scenario.strategy)
1816
+ self._event = event
1817
+
1818
+ _event = self.database.events.get(scenario.event)
1819
+ if isinstance(_event, EventSet):
1820
+ self._event_set = _event
1821
+ else:
1822
+ self._event_set = None
1823
+
1824
+ def _add_tide_gauge_plot(
1825
+ self, fig, event: Event, units: us.UnitTypesLength
1826
+ ) -> None:
1827
+ # check if event is historic
1828
+ if not isinstance(event, HistoricalEvent):
1829
+ return
1830
+ if self.settings.tide_gauge is None:
1831
+ return
1832
+ df_gauge = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1833
+ time=TimeFrame(
1834
+ start_time=event.time.start_time,
1835
+ end_time=event.time.end_time,
1836
+ ),
1837
+ units=us.UnitTypesLength(units),
1838
+ )
1839
+
1840
+ if df_gauge is not None:
1841
+ gauge_reference_height = self.settings.water_level.get_datum(
1842
+ self.settings.tide_gauge.reference
1843
+ ).height.convert(units)
1844
+
1845
+ waterlevel = df_gauge.iloc[:, 0] + gauge_reference_height
1846
+
1847
+ # If data is available, add to plot
1848
+ fig.add_trace(
1849
+ go.Scatter(
1850
+ x=pd.DatetimeIndex(df_gauge.index),
1851
+ y=waterlevel,
1852
+ line_color="#ea6404",
1853
+ )
1854
+ )
1855
+ fig["data"][0]["name"] = "model"
1856
+ fig["data"][1]["name"] = "measurement"
1857
+ fig.update_layout(showlegend=True)