flood-adapt 0.3.9__py3-none-any.whl → 0.3.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. flood_adapt/__init__.py +26 -22
  2. flood_adapt/adapter/__init__.py +9 -9
  3. flood_adapt/adapter/fiat_adapter.py +1541 -1541
  4. flood_adapt/adapter/interface/hazard_adapter.py +70 -70
  5. flood_adapt/adapter/interface/impact_adapter.py +36 -36
  6. flood_adapt/adapter/interface/model_adapter.py +89 -89
  7. flood_adapt/adapter/interface/offshore.py +19 -19
  8. flood_adapt/adapter/sfincs_adapter.py +1853 -1848
  9. flood_adapt/adapter/sfincs_offshore.py +187 -193
  10. flood_adapt/config/config.py +248 -248
  11. flood_adapt/config/fiat.py +219 -219
  12. flood_adapt/config/gui.py +331 -331
  13. flood_adapt/config/sfincs.py +481 -336
  14. flood_adapt/config/site.py +129 -129
  15. flood_adapt/database_builder/database_builder.py +2210 -2210
  16. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -9
  17. flood_adapt/database_builder/templates/default_units/metric.toml +9 -9
  18. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -10
  19. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -90
  20. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -57
  21. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -121
  22. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -65
  23. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -45
  24. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -126
  25. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -60
  26. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -121
  27. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -65
  28. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -45
  29. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -4
  30. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -143
  31. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -153
  32. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -127
  33. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -57
  34. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -4
  35. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -191
  36. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -153
  37. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -178
  38. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -57
  39. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -9
  40. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -65
  41. flood_adapt/database_builder/templates/output_layers/bin_colors.toml +5 -5
  42. flood_adapt/database_builder.py +16 -16
  43. flood_adapt/dbs_classes/__init__.py +21 -21
  44. flood_adapt/dbs_classes/database.py +533 -684
  45. flood_adapt/dbs_classes/dbs_benefit.py +77 -76
  46. flood_adapt/dbs_classes/dbs_event.py +61 -59
  47. flood_adapt/dbs_classes/dbs_measure.py +112 -111
  48. flood_adapt/dbs_classes/dbs_projection.py +34 -34
  49. flood_adapt/dbs_classes/dbs_scenario.py +137 -137
  50. flood_adapt/dbs_classes/dbs_static.py +274 -273
  51. flood_adapt/dbs_classes/dbs_strategy.py +130 -129
  52. flood_adapt/dbs_classes/dbs_template.py +279 -278
  53. flood_adapt/dbs_classes/interface/database.py +107 -139
  54. flood_adapt/dbs_classes/interface/element.py +121 -121
  55. flood_adapt/dbs_classes/interface/static.py +47 -47
  56. flood_adapt/flood_adapt.py +1229 -1178
  57. flood_adapt/misc/database_user.py +16 -16
  58. flood_adapt/misc/exceptions.py +22 -0
  59. flood_adapt/misc/log.py +183 -183
  60. flood_adapt/misc/path_builder.py +54 -54
  61. flood_adapt/misc/utils.py +185 -185
  62. flood_adapt/objects/__init__.py +82 -82
  63. flood_adapt/objects/benefits/benefits.py +61 -61
  64. flood_adapt/objects/events/event_factory.py +135 -135
  65. flood_adapt/objects/events/event_set.py +88 -84
  66. flood_adapt/objects/events/events.py +236 -234
  67. flood_adapt/objects/events/historical.py +58 -58
  68. flood_adapt/objects/events/hurricane.py +68 -67
  69. flood_adapt/objects/events/synthetic.py +46 -50
  70. flood_adapt/objects/forcing/__init__.py +92 -92
  71. flood_adapt/objects/forcing/csv.py +68 -68
  72. flood_adapt/objects/forcing/discharge.py +66 -66
  73. flood_adapt/objects/forcing/forcing.py +150 -150
  74. flood_adapt/objects/forcing/forcing_factory.py +182 -182
  75. flood_adapt/objects/forcing/meteo_handler.py +93 -93
  76. flood_adapt/objects/forcing/netcdf.py +40 -40
  77. flood_adapt/objects/forcing/plotting.py +453 -429
  78. flood_adapt/objects/forcing/rainfall.py +98 -98
  79. flood_adapt/objects/forcing/tide_gauge.py +191 -191
  80. flood_adapt/objects/forcing/time_frame.py +90 -90
  81. flood_adapt/objects/forcing/timeseries.py +564 -564
  82. flood_adapt/objects/forcing/unit_system.py +580 -580
  83. flood_adapt/objects/forcing/waterlevels.py +108 -108
  84. flood_adapt/objects/forcing/wind.py +124 -124
  85. flood_adapt/objects/measures/measure_factory.py +92 -92
  86. flood_adapt/objects/measures/measures.py +551 -529
  87. flood_adapt/objects/object_model.py +74 -68
  88. flood_adapt/objects/projections/projections.py +103 -103
  89. flood_adapt/objects/scenarios/scenarios.py +22 -22
  90. flood_adapt/objects/strategies/strategies.py +89 -89
  91. flood_adapt/workflows/benefit_runner.py +579 -554
  92. flood_adapt/workflows/floodmap.py +85 -85
  93. flood_adapt/workflows/impacts_integrator.py +85 -85
  94. flood_adapt/workflows/scenario_runner.py +70 -70
  95. {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.11.dist-info}/LICENSE +674 -674
  96. {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.11.dist-info}/METADATA +867 -865
  97. flood_adapt-0.3.11.dist-info/RECORD +140 -0
  98. flood_adapt-0.3.9.dist-info/RECORD +0 -139
  99. {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.11.dist-info}/WHEEL +0 -0
  100. {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.11.dist-info}/top_level.txt +0 -0
@@ -1,1848 +1,1853 @@
1
- import logging
2
- import math
3
- import os
4
- import shutil
5
- import subprocess
6
- import tempfile
7
- from pathlib import Path
8
- from typing import Optional, Union
9
-
10
- import geopandas as gpd
11
- import hydromt_sfincs.utils as utils
12
- import numpy as np
13
- import pandas as pd
14
- import plotly.express as px
15
- import pyproj
16
- import shapely
17
- import xarray as xr
18
- from cht_cyclones.tropical_cyclone import TropicalCyclone
19
- from cht_tide.read_bca import SfincsBoundary
20
- from cht_tide.tide_predict import predict
21
- from hydromt_sfincs import SfincsModel as HydromtSfincsModel
22
- from hydromt_sfincs.quadtree import QuadtreeGrid
23
- from numpy import matlib
24
- from shapely.affinity import translate
25
-
26
- from flood_adapt.adapter.interface.hazard_adapter import IHazardAdapter
27
- from flood_adapt.config.config import Settings
28
- from flood_adapt.config.site import Site
29
- from flood_adapt.misc.log import FloodAdaptLogging
30
- from flood_adapt.misc.path_builder import (
31
- ObjectDir,
32
- TopLevelDir,
33
- db_path,
34
- )
35
- from flood_adapt.misc.utils import cd, resolve_filepath
36
- from flood_adapt.objects.events.event_set import EventSet
37
- from flood_adapt.objects.events.events import Event, Mode, Template
38
- from flood_adapt.objects.events.historical import HistoricalEvent
39
- from flood_adapt.objects.events.hurricane import TranslationModel
40
- from flood_adapt.objects.forcing import unit_system as us
41
- from flood_adapt.objects.forcing.discharge import (
42
- DischargeConstant,
43
- DischargeCSV,
44
- DischargeSynthetic,
45
- )
46
- from flood_adapt.objects.forcing.forcing import (
47
- ForcingSource,
48
- ForcingType,
49
- IDischarge,
50
- IForcing,
51
- IRainfall,
52
- IWaterlevel,
53
- IWind,
54
- )
55
- from flood_adapt.objects.forcing.meteo_handler import MeteoHandler
56
- from flood_adapt.objects.forcing.rainfall import (
57
- RainfallConstant,
58
- RainfallCSV,
59
- RainfallMeteo,
60
- RainfallNetCDF,
61
- RainfallSynthetic,
62
- RainfallTrack,
63
- )
64
- from flood_adapt.objects.forcing.time_frame import TimeFrame
65
- from flood_adapt.objects.forcing.waterlevels import (
66
- WaterlevelCSV,
67
- WaterlevelGauged,
68
- WaterlevelModel,
69
- WaterlevelSynthetic,
70
- )
71
- from flood_adapt.objects.forcing.wind import (
72
- WindConstant,
73
- WindCSV,
74
- WindMeteo,
75
- WindNetCDF,
76
- WindSynthetic,
77
- WindTrack,
78
- )
79
- from flood_adapt.objects.measures.measures import (
80
- FloodWall,
81
- GreenInfrastructure,
82
- Measure,
83
- Pump,
84
- )
85
- from flood_adapt.objects.projections.projections import (
86
- PhysicalProjection,
87
- Projection,
88
- )
89
- from flood_adapt.objects.scenarios.scenarios import Scenario
90
-
91
-
92
- class SfincsAdapter(IHazardAdapter):
93
- """Adapter for the SFINCS model.
94
-
95
- This class is used to run the SFINCS model and process the results.
96
-
97
- Attributes
98
- ----------
99
- settings : SfincsModel
100
- The settings for the SFINCS model.
101
- """
102
-
103
- logger = FloodAdaptLogging.getLogger("SfincsAdapter")
104
- _site: Site
105
- _model: HydromtSfincsModel
106
-
107
- ###############
108
- ### PUBLIC ####
109
- ###############
110
-
111
- ### HAZARD ADAPTER METHODS ###
112
- def __init__(self, model_root: Path):
113
- """Load overland sfincs model based on a root directory.
114
-
115
- Parameters
116
- ----------
117
- model_root : Path
118
- Root directory of overland sfincs model.
119
- """
120
- self.settings = self.database.site.sfincs
121
- self.units = self.database.site.gui.units
122
- self.sfincs_logger = self._setup_sfincs_logger(model_root)
123
- self._model = HydromtSfincsModel(
124
- root=str(model_root.resolve()), mode="r", logger=self.sfincs_logger
125
- )
126
- self._model.read()
127
-
128
- def read(self, path: Path):
129
- """Read the sfincs model from the current model root."""
130
- if Path(self._model.root).resolve() != Path(path).resolve():
131
- self._model.set_root(root=str(path), mode="r")
132
- self._model.read()
133
-
134
- def write(self, path_out: Union[str, os.PathLike], overwrite: bool = True):
135
- """Write the sfincs model configuration to a directory."""
136
- root = self.get_model_root()
137
- if not isinstance(path_out, Path):
138
- path_out = Path(path_out).resolve()
139
-
140
- if not path_out.exists():
141
- path_out.mkdir(parents=True)
142
-
143
- if root != path_out:
144
- shutil.copytree(root, path_out, dirs_exist_ok=True)
145
-
146
- write_mode = "w+" if overwrite else "w"
147
- with cd(path_out):
148
- self._model.set_root(root=str(path_out), mode=write_mode)
149
- self._model.write()
150
-
151
- def close_files(self):
152
- """Close all open files and clean up file handles."""
153
- for logger in [self.logger, self.sfincs_logger]:
154
- if hasattr(logger, "handlers"):
155
- for handler in logger.handlers:
156
- if isinstance(handler, logging.FileHandler):
157
- handler.close()
158
- logger.removeHandler(handler)
159
-
160
- def __enter__(self) -> "SfincsAdapter":
161
- return self
162
-
163
- def __exit__(self, exc_type, exc_value, traceback) -> bool:
164
- self.close_files()
165
- return False
166
-
167
- def has_run(self, scenario: Scenario) -> bool:
168
- """Check if the model has been run."""
169
- event = self.database.events.get(scenario.event)
170
- if event.mode == Mode.risk:
171
- sim_paths = [
172
- self._get_simulation_path(scenario, sub_event=sub_event)
173
- for sub_event in event.sub_events
174
- ]
175
- # No need to check postprocessing for risk scenarios
176
- return all(self.sfincs_completed(sim_path) for sim_path in sim_paths)
177
- else:
178
- return self.sfincs_completed(self._get_simulation_path(scenario))
179
-
180
- def execute(self, path: Path, strict: bool = True) -> bool:
181
- """
182
- Run the sfincs executable in the specified path.
183
-
184
- Parameters
185
- ----------
186
- path : str
187
- Path to the simulation folder.
188
- Default is None, in which case the model root is used.
189
- strict : bool, optional
190
- True: raise an error if the model fails to run.
191
- False: log a warning.
192
- Default is True.
193
-
194
- Returns
195
- -------
196
- bool
197
- True if the model ran successfully, False otherwise.
198
-
199
- """
200
- with cd(path):
201
- self.logger.info(f"Running SFINCS in {path}")
202
- process = subprocess.run(
203
- str(Settings().sfincs_bin_path),
204
- stdout=subprocess.PIPE,
205
- stderr=subprocess.PIPE,
206
- text=True,
207
- )
208
- self.sfincs_logger.info(process.stdout)
209
- self.logger.debug(process.stdout)
210
-
211
- self._cleanup_simulation_folder(path)
212
-
213
- if process.returncode != 0:
214
- if Settings().delete_crashed_runs:
215
- # Remove all files in the simulation folder except for the log files
216
- for subdir, dirs, files in os.walk(path, topdown=False):
217
- for file in files:
218
- if not file.endswith(".log"):
219
- os.remove(os.path.join(subdir, file))
220
-
221
- if not os.listdir(subdir):
222
- os.rmdir(subdir)
223
-
224
- if strict:
225
- raise RuntimeError(f"SFINCS model failed to run in {path}.")
226
- else:
227
- self.logger.error(f"SFINCS model failed to run in {path}.")
228
-
229
- return process.returncode == 0
230
-
231
- def run(self, scenario: Scenario):
232
- """Run the whole workflow (Preprocess, process and postprocess) for a given scenario."""
233
- self._ensure_no_existing_forcings()
234
- event = self.database.events.get(scenario.event)
235
-
236
- if event.mode == Mode.risk:
237
- self._run_risk_scenario(scenario=scenario)
238
- else:
239
- self._run_single_event(scenario=scenario, event=event)
240
-
241
- def preprocess(self, scenario: Scenario, event: Event):
242
- """
243
- Preprocess the SFINCS model for a given scenario.
244
-
245
- Parameters
246
- ----------
247
- scenario : Scenario
248
- Scenario to preprocess.
249
- event : Event, optional
250
- Event to preprocess, by default None.
251
- """
252
- # I dont like this due to it being state based and might break if people use functions in the wrong order
253
- # Currently only used to pass projection + event stuff to WaterlevelModel
254
-
255
- sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
256
- sim_path.mkdir(parents=True, exist_ok=True)
257
- template_path = (
258
- self.database.static.get_overland_sfincs_model().get_model_root()
259
- )
260
- shutil.copytree(template_path, sim_path, dirs_exist_ok=True)
261
-
262
- with SfincsAdapter(model_root=sim_path) as model:
263
- model._load_scenario_objects(scenario, event)
264
- is_risk = "Probabilistic " if model._event_set is not None else ""
265
- self.logger.info(
266
- f"Preprocessing Scenario `{model._scenario.name}`: {is_risk}Event `{model._event.name}`, Strategy `{model._strategy.name}`, Projection `{model._projection.name}`"
267
- )
268
- # Write template model to output path and set it as the model root so focings can write to it
269
- model.set_timing(model._event.time)
270
- model.write(sim_path)
271
-
272
- # Event
273
- for forcing in model._event.get_forcings():
274
- model.add_forcing(forcing)
275
-
276
- if self.rainfall is not None:
277
- model.rainfall *= model._event.rainfall_multiplier
278
- else:
279
- model.logger.warning(
280
- "Failed to add event rainfall multiplier, no rainfall forcing found in the model."
281
- )
282
-
283
- # Measures
284
- for measure in model._strategy.get_hazard_measures():
285
- model.add_measure(measure)
286
-
287
- # Projection
288
- model.add_projection(model._projection)
289
-
290
- # Output
291
- model.add_obs_points()
292
-
293
- # Save any changes made to disk as well
294
- model.write(path_out=sim_path)
295
-
296
- def process(self, scenario: Scenario, event: Event):
297
- if event.mode != Mode.single_event:
298
- raise ValueError(f"Unsupported event mode: {event.mode}.")
299
-
300
- sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
301
- self.logger.info(f"Running SFINCS for single event Scenario `{scenario.name}`")
302
- self.execute(sim_path)
303
-
304
- def postprocess(self, scenario: Scenario, event: Event):
305
- if event.mode != Mode.single_event:
306
- raise ValueError(f"Unsupported event mode: {event.mode}.")
307
-
308
- self.logger.info(f"Postprocessing SFINCS for Scenario `{scenario.name}`")
309
- if not self.sfincs_completed(
310
- self._get_simulation_path(scenario, sub_event=event)
311
- ):
312
- raise RuntimeError("SFINCS was not run successfully!")
313
-
314
- self.write_floodmap_geotiff(scenario)
315
- self.plot_wl_obs(scenario)
316
- self.write_water_level_map(scenario)
317
-
318
- def set_timing(self, time: TimeFrame):
319
- """Set model reference times."""
320
- self.logger.info(f"Setting timing for the SFINCS model: `{time}`")
321
- self._model.set_config("tref", time.start_time)
322
- self._model.set_config("tstart", time.start_time)
323
- self._model.set_config("tstop", time.end_time)
324
-
325
- def add_forcing(self, forcing: IForcing):
326
- """Get forcing data and add it."""
327
- if forcing is None:
328
- return
329
-
330
- self.logger.info(
331
- f"Adding {forcing.type.capitalize()}: {forcing.source.capitalize()}"
332
- )
333
- if isinstance(forcing, IRainfall):
334
- self._add_forcing_rain(forcing)
335
- elif isinstance(forcing, IWind):
336
- self._add_forcing_wind(forcing)
337
- elif isinstance(forcing, IDischarge):
338
- self._add_forcing_discharge(forcing)
339
- elif isinstance(forcing, IWaterlevel):
340
- self._add_forcing_waterlevels(forcing)
341
- else:
342
- self.logger.warning(
343
- f"Skipping unsupported forcing type {forcing.__class__.__name__}"
344
- )
345
-
346
- def add_measure(self, measure: Measure):
347
- """Get measure data and add it."""
348
- self.logger.info(
349
- f"Adding {measure.__class__.__name__.capitalize()} `{measure.name}`"
350
- )
351
-
352
- if isinstance(measure, FloodWall):
353
- self._add_measure_floodwall(measure)
354
- elif isinstance(measure, GreenInfrastructure):
355
- self._add_measure_greeninfra(measure)
356
- elif isinstance(measure, Pump):
357
- self._add_measure_pump(measure)
358
- else:
359
- self.logger.warning(
360
- f"Skipping unsupported measure type {measure.__class__.__name__}"
361
- )
362
-
363
- def add_projection(self, projection: Projection):
364
- """Get forcing data currently in the sfincs model and add the projection it."""
365
- self.logger.info(f"Adding Projection `{projection.name}`")
366
- phys_projection = projection.physical_projection
367
-
368
- if phys_projection.sea_level_rise:
369
- self.logger.info(
370
- f"Adding projected sea level rise `{phys_projection.sea_level_rise}`"
371
- )
372
- if self.waterlevels is not None:
373
- self.waterlevels += phys_projection.sea_level_rise.convert(
374
- us.UnitTypesLength.meters
375
- )
376
- else:
377
- self.logger.warning(
378
- "Failed to add sea level rise, no water level forcing found in the model."
379
- )
380
-
381
- if phys_projection.rainfall_multiplier:
382
- self.logger.info(
383
- f"Adding projected rainfall multiplier `{phys_projection.rainfall_multiplier}`"
384
- )
385
- if self.rainfall is not None:
386
- self.rainfall *= phys_projection.rainfall_multiplier
387
- else:
388
- self.logger.warning(
389
- "Failed to add projected rainfall multiplier, no rainfall forcing found in the model."
390
- )
391
-
392
- ### GETTERS ###
393
- def get_model_time(self) -> TimeFrame:
394
- t0, t1 = self._model.get_model_time()
395
- return TimeFrame(start_time=t0, end_time=t1)
396
-
397
- def get_model_root(self) -> Path:
398
- return Path(self._model.root)
399
-
400
- def get_mask(self):
401
- """Get mask with inactive cells from model."""
402
- mask = self._model.grid["msk"]
403
- return mask
404
-
405
- def get_bedlevel(self):
406
- """Get bed level from model."""
407
- self._model.read_results()
408
- zb = self._model.results["zb"]
409
- return zb
410
-
411
- def get_model_boundary(self) -> gpd.GeoDataFrame:
412
- """Get bounding box from model."""
413
- return self._model.region
414
-
415
- def get_model_grid(self) -> QuadtreeGrid:
416
- """Get grid from model.
417
-
418
- Returns
419
- -------
420
- QuadtreeGrid
421
- QuadtreeGrid with the model grid
422
- """
423
- return self._model.quadtree
424
-
425
- # Forcing properties
426
- @property
427
- def waterlevels(self) -> xr.Dataset | xr.DataArray | None:
428
- return self._model.forcing.get("bzs")
429
-
430
- @waterlevels.setter
431
- def waterlevels(self, waterlevels: xr.Dataset | xr.DataArray):
432
- if self.waterlevels is None or self.waterlevels.size == 0:
433
- raise ValueError("No water level forcing found in the model.")
434
- self._model.forcing["bzs"] = waterlevels
435
-
436
- @property
437
- def discharge(self) -> xr.Dataset | xr.DataArray | None:
438
- return self._model.forcing.get("dis")
439
-
440
- @discharge.setter
441
- def discharge(self, discharge: xr.Dataset | xr.DataArray):
442
- if self.discharge is None or self.discharge.size == 0:
443
- raise ValueError("No discharge forcing found in the model.")
444
- self._model.forcing["dis"] = discharge
445
-
446
- @property
447
- def rainfall(self) -> xr.Dataset | xr.DataArray | None:
448
- names = ["precip", "precip_2d"]
449
- in_model = [name for name in names if name in self._model.forcing]
450
- if len(in_model) == 0:
451
- return None
452
- elif len(in_model) == 1:
453
- return self._model.forcing[in_model[0]]
454
- else:
455
- raise ValueError("Multiple rainfall forcings found in the model.")
456
-
457
- @rainfall.setter
458
- def rainfall(self, rainfall: xr.Dataset | xr.DataArray):
459
- if self.rainfall is None or self.rainfall.size == 0:
460
- raise ValueError("No rainfall forcing found in the model.")
461
- elif "precip_2d" in self._model.forcing:
462
- self._model.forcing["precip_2d"] = rainfall
463
- elif "precip" in self._model.forcing:
464
- self._model.forcing["precip"] = rainfall
465
- else:
466
- raise ValueError("Unsupported rainfall forcing in the model.")
467
-
468
- @property
469
- def wind(self) -> xr.Dataset | xr.DataArray | None:
470
- wind_names = ["wnd", "wind_2d", "wind", "wind10_u", "wind10_v"]
471
- wind_in_model = [name for name in wind_names if name in self._model.forcing]
472
- if len(wind_in_model) == 0:
473
- return None
474
- elif len(wind_in_model) == 1:
475
- return self._model.forcing[wind_in_model[0]]
476
- elif len(wind_in_model) == 2:
477
- if not ("wind10_u" in wind_in_model and "wind10_v" in wind_in_model):
478
- raise ValueError(
479
- "Multiple wind forcings found in the model. Both should be wind10_u and wind10_v or a singular wind forcing."
480
- )
481
- return xr.Dataset(
482
- {
483
- "wind10_u": self._model.forcing["wind10_u"],
484
- "wind10_v": self._model.forcing["wind10_v"],
485
- }
486
- )
487
- else:
488
- raise ValueError("Multiple wind forcings found in the model.")
489
-
490
- @wind.setter
491
- def wind(self, wind: xr.Dataset | xr.DataArray):
492
- if (not self.wind) or (self.wind.size == 0):
493
- raise ValueError("No wind forcing found in the model.")
494
-
495
- elif "wind_2d" in self._model.forcing:
496
- self._model.forcing["wind_2d"] = wind
497
- elif "wind" in self._model.forcing:
498
- self._model.forcing["wind"] = wind
499
- elif "wnd" in self._model.forcing:
500
- self._model.forcing["wnd"] = wind
501
- elif "wind10_u" in self._model.forcing and "wind10_v" in self._model.forcing:
502
- self._model.forcing["wind10_u"] = wind["wind10_u"]
503
- self._model.forcing["wind10_v"] = wind["wind10_v"]
504
- else:
505
- raise ValueError("Unsupported wind forcing in the model.")
506
-
507
- ### OUTPUT ###
508
- def run_completed(self, scenario: Scenario) -> bool:
509
- """Check if the entire model run has been completed successfully by checking if all flood maps exist that are created in postprocess().
510
-
511
- Returns
512
- -------
513
- bool : True if all flood maps exist, False otherwise.
514
-
515
- """
516
- any_floodmap = len(self._get_flood_map_paths(scenario)) > 0
517
- all_exist = all(
518
- floodmap.exists() for floodmap in self._get_flood_map_paths(scenario)
519
- )
520
- return any_floodmap and all_exist
521
-
522
- def sfincs_completed(self, sim_path: Path) -> bool:
523
- """Check if the sfincs executable has been run successfully by checking if the output files exist in the simulation folder.
524
-
525
- Parameters
526
- ----------
527
- sim_path : Path
528
- Path to the simulation folder to check.
529
-
530
- Returns
531
- -------
532
- bool: True if the sfincs executable has been run successfully, False otherwise.
533
-
534
- """
535
- SFINCS_OUTPUT_FILES = ["sfincs_map.nc"]
536
-
537
- if self.settings.obs_point is not None:
538
- SFINCS_OUTPUT_FILES.append("sfincs_his.nc")
539
-
540
- to_check = [Path(sim_path) / file for file in SFINCS_OUTPUT_FILES]
541
- return all(output.exists() for output in to_check)
542
-
543
- def write_floodmap_geotiff(
544
- self, scenario: Scenario, sim_path: Optional[Path] = None
545
- ):
546
- """
547
- Read simulation results from SFINCS and saves a geotiff with the maximum water levels.
548
-
549
- Produced floodmap is in the units defined in the sfincs config settings.
550
-
551
- Parameters
552
- ----------
553
- scenario : Scenario
554
- Scenario for which to create the floodmap.
555
- sim_path : Path, optional
556
- Path to the simulation folder, by default None.
557
- """
558
- self.logger.info("Writing flood maps to geotiff")
559
- results_path = self._get_result_path(scenario)
560
- sim_path = sim_path or self._get_simulation_path(scenario)
561
- demfile = self.database.static_path / "dem" / self.settings.dem.filename
562
-
563
- with SfincsAdapter(model_root=sim_path) as model:
564
- zsmax = model._get_zsmax()
565
-
566
- dem = model._model.data_catalog.get_rasterdataset(demfile)
567
-
568
- # convert dem from dem units to floodmap units
569
- dem_conversion = us.UnitfulLength(
570
- value=1.0, units=self.settings.dem.units
571
- ).convert(self.settings.config.floodmap_units)
572
-
573
- floodmap_fn = results_path / f"FloodMap_{scenario.name}.tif"
574
-
575
- # convert zsmax from meters to floodmap units
576
- floodmap_conversion = us.UnitfulLength(
577
- value=1.0, units=us.UnitTypesLength.meters
578
- ).convert(self.settings.config.floodmap_units)
579
-
580
- utils.downscale_floodmap(
581
- zsmax=floodmap_conversion * zsmax,
582
- dep=dem_conversion * dem,
583
- hmin=0.01,
584
- floodmap_fn=str(floodmap_fn),
585
- )
586
-
587
- def write_water_level_map(
588
- self, scenario: Scenario, sim_path: Optional[Path] = None
589
- ):
590
- """Read simulation results from SFINCS and saves a netcdf with the maximum water levels."""
591
- self.logger.info("Writing water level map to netcdf")
592
- results_path = self._get_result_path(scenario)
593
- sim_path = sim_path or self._get_simulation_path(scenario)
594
-
595
- with SfincsAdapter(model_root=sim_path) as model:
596
- zsmax = model._get_zsmax()
597
- zsmax.to_netcdf(results_path / "max_water_level_map.nc")
598
-
599
- def plot_wl_obs(
600
- self,
601
- scenario: Scenario,
602
- ):
603
- """Plot water levels at SFINCS observation points as html.
604
-
605
- Only for single event scenarios, or for a specific simulation path containing the written and processed sfincs model.
606
- """
607
- if not self.settings.obs_point:
608
- self.logger.warning("No observation points provided in config.")
609
- return
610
-
611
- self.logger.info("Plotting water levels at observation points")
612
- sim_path = self._get_simulation_path(scenario)
613
-
614
- # read SFINCS model
615
- with SfincsAdapter(model_root=sim_path) as model:
616
- df, gdf = model._get_zs_points()
617
-
618
- gui_units = us.UnitTypesLength(
619
- self.database.site.gui.units.default_length_units
620
- )
621
- conversion_factor = us.UnitfulLength(
622
- value=1.0, units=us.UnitTypesLength("meters")
623
- ).convert(gui_units)
624
-
625
- overland_reference_height = self.settings.water_level.get_datum(
626
- self.settings.config.overland_model.reference
627
- ).height.convert(gui_units)
628
-
629
- for ii, col in enumerate(df.columns):
630
- # Plot actual thing
631
- fig = px.line(
632
- df[col] * conversion_factor
633
- + overland_reference_height # convert to reference datum for plotting
634
- )
635
-
636
- fig.add_hline(
637
- y=0,
638
- line_dash="dash",
639
- line_color="#000000",
640
- annotation_text=self.settings.water_level.reference,
641
- annotation_position="bottom right",
642
- )
643
-
644
- # plot reference water levels
645
- for wl_ref in self.settings.water_level.datums:
646
- if (
647
- wl_ref.name == self.settings.config.overland_model.reference
648
- or wl_ref.name in self.database.site.gui.plotting.excluded_datums
649
- ):
650
- continue
651
- fig.add_hline(
652
- y=wl_ref.height.convert(gui_units),
653
- line_dash="dash",
654
- line_color="#3ec97c",
655
- annotation_text=wl_ref.name,
656
- annotation_position="bottom right",
657
- )
658
-
659
- fig.update_layout(
660
- autosize=False,
661
- height=100 * 2,
662
- width=280 * 2,
663
- margin={"r": 0, "l": 0, "b": 0, "t": 20},
664
- font={"size": 10, "color": "black", "family": "Arial"},
665
- title={
666
- "text": gdf.iloc[ii]["Description"],
667
- "font": {"size": 12, "color": "black", "family": "Arial"},
668
- "x": 0.5,
669
- "xanchor": "center",
670
- },
671
- xaxis_title="Time",
672
- yaxis_title=f"Water level [{gui_units.value}] above {self.settings.water_level.reference}",
673
- yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
674
- xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
675
- showlegend=False,
676
- )
677
-
678
- event = self.database.events.get(scenario.event)
679
- if self.settings.obs_point[ii].name == self.settings.tide_gauge.name:
680
- self._add_tide_gauge_plot(fig, event, units=gui_units)
681
-
682
- # write html to results folder
683
- station_name = gdf.iloc[ii]["Name"]
684
- results_path = self._get_result_path(scenario)
685
- fig.write_html(results_path / f"{station_name}_timeseries.html")
686
-
687
- def add_obs_points(self):
688
- """Add observation points provided in the site toml to SFINCS model."""
689
- if self.settings.obs_point is None:
690
- return
691
- self.logger.info("Adding observation points to the overland flood model")
692
-
693
- obs_points = self.settings.obs_point
694
- names = []
695
- lat = []
696
- lon = []
697
- for pt in obs_points:
698
- names.append(pt.name)
699
- lat.append(pt.lat)
700
- lon.append(pt.lon)
701
-
702
- # create GeoDataFrame from obs_points in site file
703
- df = pd.DataFrame({"name": names})
704
- gdf = gpd.GeoDataFrame(
705
- df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
706
- )
707
-
708
- # Add locations to SFINCS file
709
- self._model.setup_observation_points(locations=gdf, merge=False)
710
-
711
- def get_wl_df_from_offshore_his_results(self) -> pd.DataFrame:
712
- """Create a pd.Dataframe with waterlevels from the offshore model at the bnd locations of the overland model.
713
-
714
- Returns
715
- -------
716
- wl_df: pd.DataFrame
717
- time series of water level.
718
- """
719
- self.logger.info("Reading water levels from offshore model")
720
- ds_his = utils.read_sfincs_his_results(
721
- Path(self._model.root) / "sfincs_his.nc",
722
- crs=self._model.crs.to_epsg(),
723
- )
724
- wl_df = pd.DataFrame(
725
- data=ds_his.point_zs.to_numpy(),
726
- index=ds_his.time.to_numpy(),
727
- columns=np.arange(1, ds_his.point_zs.to_numpy().shape[1] + 1, 1),
728
- )
729
- return wl_df
730
-
731
- ## RISK EVENTS ##
732
- def calculate_rp_floodmaps(self, scenario: Scenario):
733
- """Calculate flood risk maps from a set of (currently) SFINCS water level outputs using linear interpolation.
734
-
735
- It would be nice to make it more widely applicable and move the loading of the SFINCS results to self.postprocess_sfincs().
736
-
737
- generates return period water level maps in netcdf format to be used by FIAT
738
- generates return period water depth maps in geotiff format as product for users
739
-
740
- TODO: make this robust and more efficient for bigger datasets.
741
- """
742
- event: EventSet = self.database.events.get(scenario.event)
743
- if not isinstance(event, EventSet):
744
- raise ValueError("This function is only available for risk scenarios.")
745
-
746
- result_path = self._get_result_path(scenario)
747
- sim_paths = [
748
- self._get_simulation_path(scenario, sub_event=sub_event)
749
- for sub_event in event._events
750
- ]
751
-
752
- phys_proj = self.database.projections.get(
753
- scenario.projection
754
- ).physical_projection
755
-
756
- floodmap_rp = self.database.site.fiat.risk.return_periods
757
- frequencies = [sub_event.frequency for sub_event in event.sub_events]
758
-
759
- # adjust storm frequency for hurricane events
760
- if not math.isclose(phys_proj.storm_frequency_increase, 0, abs_tol=1e-9):
761
- storminess_increase = phys_proj.storm_frequency_increase / 100.0
762
- for ii, event in enumerate(event._events):
763
- if event.template == Template.Hurricane:
764
- frequencies[ii] = frequencies[ii] * (1 + storminess_increase)
765
-
766
- with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
767
- # read mask and bed level
768
- mask = dummymodel.get_mask().stack(z=("x", "y"))
769
- zb = dummymodel.get_bedlevel().stack(z=("x", "y")).to_numpy()
770
-
771
- zs_maps = []
772
- for simulation_path in sim_paths:
773
- # read zsmax data from overland sfincs model
774
- with SfincsAdapter(model_root=simulation_path) as sim:
775
- zsmax = sim._get_zsmax().load()
776
- zs_stacked = zsmax.stack(z=("x", "y"))
777
- zs_maps.append(zs_stacked)
778
-
779
- # Create RP flood maps
780
-
781
- # 1a: make a table of all water levels and associated frequencies
782
- zs = xr.concat(zs_maps, pd.Index(frequencies, name="frequency"))
783
- # Get the indices of columns with all NaN values
784
- nan_cells = np.where(np.all(np.isnan(zs), axis=0))[0]
785
- # fill nan values with minimum bed levels in each grid cell, np.interp cannot ignore nan values
786
- zs = xr.where(np.isnan(zs), np.tile(zb, (zs.shape[0], 1)), zs)
787
- # Get table of frequencies
788
- freq = np.tile(frequencies, (zs.shape[1], 1)).transpose()
789
-
790
- # 1b: sort water levels in descending order and include the frequencies in the sorting process
791
- # (i.e. each h-value should be linked to the same p-values as in step 1a)
792
- sort_index = zs.argsort(axis=0)
793
- sorted_prob = np.flipud(np.take_along_axis(freq, sort_index, axis=0))
794
- sorted_zs = np.flipud(np.take_along_axis(zs.values, sort_index, axis=0))
795
-
796
- # 1c: Compute exceedance probabilities of water depths
797
- # Method: accumulate probabilities from top to bottom
798
- prob_exceed = np.cumsum(sorted_prob, axis=0)
799
-
800
- # 1d: Compute return periods of water depths
801
- # Method: simply take the inverse of the exceedance probability (1/Pex)
802
- rp_zs = 1.0 / prob_exceed
803
-
804
- # For each return period (T) of interest do the following:
805
- # For each grid cell do the following:
806
- # Use the table from step [1d] as a “lookup-table” to derive the T-year water depth. Use a 1-d interpolation technique:
807
- # h(T) = interp1 (log(T*), h*, log(T))
808
- # in which t* and h* are the values from the table and T is the return period (T) of interest
809
- # The resulting T-year water depths for all grids combined form the T-year hazard map
810
- rp_da = xr.DataArray(rp_zs, dims=zs.dims)
811
-
812
- # no_data_value = -999 # in SFINCS
813
- # sorted_zs = xr.where(sorted_zs == no_data_value, np.nan, sorted_zs)
814
-
815
- valid_cells = np.where(mask == 1)[
816
- 0
817
- ] # only loop over cells where model is not masked
818
- h = matlib.repmat(
819
- np.copy(zb), len(floodmap_rp), 1
820
- ) # if not flooded (i.e. not in valid_cells) revert to bed_level, read from SFINCS results so it is the minimum bed level in a grid cell
821
-
822
- self.logger.info("Calculating flood risk maps, this may take some time")
823
- for jj in valid_cells: # looping over all non-masked cells.
824
- # linear interpolation for all return periods to evaluate
825
- h[:, jj] = np.interp(
826
- np.log10(floodmap_rp),
827
- np.log10(rp_da[::-1, jj]),
828
- sorted_zs[::-1, jj],
829
- left=0,
830
- )
831
-
832
- # Re-fill locations that had nan water level for all simulations with nans
833
- h[:, nan_cells] = np.full(h[:, nan_cells].shape, np.nan)
834
-
835
- # If a cell has the same water-level as the bed elevation it should be dry (turn to nan)
836
- diff = h - np.tile(zb, (h.shape[0], 1))
837
- dry = (
838
- diff < 10e-10
839
- ) # here we use a small number instead of zero for rounding errors
840
- h[dry] = np.nan
841
-
842
- for ii, rp in enumerate(floodmap_rp):
843
- # #create single nc
844
- zs_rp_single = xr.DataArray(
845
- data=h[ii, :], coords={"z": zs["z"]}, attrs={"units": "meters"}
846
- ).unstack()
847
- zs_rp_single = zs_rp_single.rio.write_crs(
848
- zsmax.raster.crs
849
- ) # , inplace=True)
850
- zs_rp_single = zs_rp_single.to_dataset(name="risk_map")
851
- fn_rp = result_path / f"RP_{rp:04d}_maps.nc"
852
- zs_rp_single.to_netcdf(fn_rp)
853
-
854
- # write geotiff
855
- # dem file for high resolution flood depth map
856
- demfile = self.database.static_path / "dem" / self.settings.dem.filename
857
-
858
- # writing the geotiff to the scenario results folder
859
- with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
860
- dem = dummymodel._model.data_catalog.get_rasterdataset(demfile)
861
- zsmax = zs_rp_single.to_array().squeeze().transpose()
862
- floodmap_fn = fn_rp.with_suffix(".tif")
863
-
864
- # convert dem from dem units to floodmap units
865
- dem_conversion = us.UnitfulLength(
866
- value=1.0, units=self.settings.dem.units
867
- ).convert(self.settings.config.floodmap_units)
868
-
869
- # convert zsmax from meters to floodmap units
870
- floodmap_conversion = us.UnitfulLength(
871
- value=1.0, units=us.UnitTypesLength.meters
872
- ).convert(self.settings.config.floodmap_units)
873
-
874
- utils.downscale_floodmap(
875
- zsmax=floodmap_conversion * zsmax,
876
- dep=dem_conversion * dem,
877
- hmin=0.01,
878
- floodmap_fn=str(floodmap_fn),
879
- )
880
-
881
- ######################################
882
- ### PRIVATE - use at your own risk ###
883
- ######################################
884
- def _run_single_event(self, scenario: Scenario, event: Event):
885
- self.preprocess(scenario, event)
886
- self.process(scenario, event)
887
- self.postprocess(scenario, event)
888
- shutil.rmtree(
889
- self._get_simulation_path(scenario, sub_event=event), ignore_errors=True
890
- )
891
-
892
- def _run_risk_scenario(self, scenario: Scenario):
893
- """Run the whole workflow for a risk scenario.
894
-
895
- This means preprocessing and running the SFINCS model for each event in the event set, and then postprocessing the results.
896
- """
897
- event_set: EventSet = self.database.events.get(scenario.event)
898
- total = len(event_set._events)
899
-
900
- for i, sub_event in enumerate(event_set._events):
901
- sim_path = self._get_simulation_path(scenario, sub_event=sub_event)
902
-
903
- # Preprocess
904
- self.preprocess(scenario, event=sub_event)
905
- self.logger.info(
906
- f"Running SFINCS for Eventset Scenario `{scenario.name}`, Event `{sub_event.name}` ({i + 1}/{total})"
907
- )
908
- self.execute(sim_path)
909
-
910
- # Postprocess
911
- self.calculate_rp_floodmaps(scenario)
912
-
913
- # Cleanup
914
- for i, sub_event in enumerate(event_set._events):
915
- shutil.rmtree(
916
- self._get_simulation_path(scenario, sub_event=sub_event),
917
- ignore_errors=True,
918
- )
919
-
920
- def _ensure_no_existing_forcings(self):
921
- """Check for existing forcings in the model and raise an error if any are found."""
922
- all_forcings = {
923
- "waterlevel": self.waterlevels,
924
- "rainfall": self.rainfall,
925
- "wind": self.wind,
926
- "discharge": self.discharge,
927
- }
928
- contains_forcings = ", ".join(
929
- [
930
- f"{name.capitalize()}"
931
- for name, forcing in all_forcings.items()
932
- if forcing is not None
933
- ]
934
- )
935
- if contains_forcings:
936
- raise ValueError(
937
- f"{contains_forcings} forcing(s) should not exists in the SFINCS template model. Remove it from the SFINCS model located at: {self.get_model_root()}. For more information on SFINCS and its input files, see the SFINCS documentation at: `https://sfincs.readthedocs.io/en/latest/input.html`"
938
- )
939
-
940
- ### FORCING ###
941
- def _add_forcing_wind(
942
- self,
943
- wind: IWind,
944
- ):
945
- """Add spatially constant wind forcing to sfincs model. Use timeseries or a constant magnitude and direction.
946
-
947
- Parameters
948
- ----------
949
- timeseries : Union[str, os.PathLike], optional
950
- path to file of timeseries file (.csv) which has three columns: time, magnitude and direction, by default None
951
- const_mag : float, optional
952
- magnitude of time-invariant wind forcing [m/s], by default None
953
- const_dir : float, optional
954
- direction of time-invariant wind forcing [deg], by default None
955
- """
956
- time_frame = self.get_model_time()
957
- if isinstance(wind, WindConstant):
958
- # HydroMT function: set wind forcing from constant magnitude and direction
959
- self._model.setup_wind_forcing(
960
- timeseries=None,
961
- magnitude=wind.speed.convert(us.UnitTypesVelocity.mps),
962
- direction=wind.direction.value,
963
- )
964
- elif isinstance(wind, WindSynthetic):
965
- df = wind.to_dataframe(time_frame=time_frame)
966
- df["mag"] *= us.UnitfulVelocity(
967
- value=1.0, units=self.units.default_velocity_units
968
- ).convert(us.UnitTypesVelocity.mps)
969
-
970
- tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
971
- df.to_csv(tmp_path)
972
-
973
- # HydroMT function: set wind forcing from timeseries
974
- self._model.setup_wind_forcing(
975
- timeseries=tmp_path, magnitude=None, direction=None
976
- )
977
- elif isinstance(wind, WindMeteo):
978
- ds = MeteoHandler().read(time_frame)
979
- # data already in metric units so no conversion needed
980
-
981
- # HydroMT function: set wind forcing from grid
982
- self._model.setup_wind_forcing_from_grid(wind=ds)
983
- elif isinstance(wind, WindTrack):
984
- # data already in metric units so no conversion needed
985
- self._add_forcing_spw(wind)
986
- elif isinstance(wind, WindNetCDF):
987
- ds = wind.read()
988
- # time slicing to time_frame not needed, hydromt-sfincs handles it
989
- conversion = us.UnitfulVelocity(value=1.0, units=wind.units).convert(
990
- us.UnitTypesVelocity.mps
991
- )
992
- ds *= conversion
993
- self._model.setup_wind_forcing_from_grid(wind=ds)
994
- elif isinstance(wind, WindCSV):
995
- df = wind.to_dataframe(time_frame=time_frame)
996
-
997
- conversion = us.UnitfulVelocity(
998
- value=1.0, units=wind.units["speed"]
999
- ).convert(us.UnitTypesVelocity.mps)
1000
- df *= conversion
1001
-
1002
- tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
1003
- df.to_csv(tmp_path)
1004
-
1005
- # HydroMT function: set wind forcing from timeseries
1006
- self._model.setup_wind_forcing(
1007
- timeseries=tmp_path,
1008
- magnitude=None,
1009
- direction=None,
1010
- )
1011
- else:
1012
- self.logger.warning(
1013
- f"Unsupported wind forcing type: {wind.__class__.__name__}"
1014
- )
1015
- return
1016
-
1017
- def _add_forcing_rain(self, rainfall: IRainfall):
1018
- """Add spatially constant rain forcing to sfincs model. Use timeseries or a constant magnitude.
1019
-
1020
- Parameters
1021
- ----------
1022
- timeseries : Union[str, os.PathLike], optional
1023
- path to file of timeseries file (.csv) which has two columns: time and precipitation, by default None
1024
- const_intensity : float, optional
1025
- time-invariant precipitation intensity [mm_hr], by default None
1026
- """
1027
- time_frame = self.get_model_time()
1028
- if isinstance(rainfall, RainfallConstant):
1029
- self._model.setup_precip_forcing(
1030
- timeseries=None,
1031
- magnitude=rainfall.intensity.convert(us.UnitTypesIntensity.mm_hr),
1032
- )
1033
- elif isinstance(rainfall, RainfallCSV):
1034
- df = rainfall.to_dataframe(time_frame=time_frame)
1035
- conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1036
- us.UnitTypesIntensity.mm_hr
1037
- )
1038
- df *= conversion
1039
-
1040
- tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1041
- df.to_csv(tmp_path)
1042
-
1043
- self._model.setup_precip_forcing(timeseries=tmp_path)
1044
- elif isinstance(rainfall, RainfallSynthetic):
1045
- df = rainfall.to_dataframe(time_frame=time_frame)
1046
-
1047
- if rainfall.timeseries.cumulative is not None: # scs
1048
- conversion = us.UnitfulLength(
1049
- value=1.0, units=rainfall.timeseries.cumulative.units
1050
- ).convert(us.UnitTypesLength.millimeters)
1051
- else:
1052
- conversion = us.UnitfulIntensity(
1053
- value=1.0, units=rainfall.timeseries.peak_value.units
1054
- ).convert(us.UnitTypesIntensity.mm_hr)
1055
-
1056
- df *= conversion
1057
- tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1058
- df.to_csv(tmp_path)
1059
-
1060
- self._model.setup_precip_forcing(timeseries=tmp_path)
1061
- elif isinstance(rainfall, RainfallMeteo):
1062
- ds = MeteoHandler().read(time_frame)
1063
- # MeteoHandler always return metric so no conversion needed
1064
- self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1065
- elif isinstance(rainfall, RainfallTrack):
1066
- # data already in metric units so no conversion needed
1067
- self._add_forcing_spw(rainfall)
1068
- elif isinstance(rainfall, RainfallNetCDF):
1069
- ds = rainfall.read()
1070
- # time slicing to time_frame not needed, hydromt-sfincs handles it
1071
- conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1072
- us.UnitTypesIntensity.mm_hr
1073
- )
1074
- ds *= conversion
1075
- self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1076
- else:
1077
- self.logger.warning(
1078
- f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}"
1079
- )
1080
- return
1081
-
1082
- def _add_forcing_discharge(self, forcing: IDischarge):
1083
- """Add spatially constant discharge forcing to sfincs model. Use timeseries or a constant magnitude.
1084
-
1085
- Parameters
1086
- ----------
1087
- forcing : IDischarge
1088
- The discharge forcing to add to the model.
1089
- Can be a constant, synthetic or from a csv file.
1090
- Also contains the river information.
1091
- """
1092
- if isinstance(forcing, (DischargeConstant, DischargeCSV, DischargeSynthetic)):
1093
- self._set_single_river_forcing(discharge=forcing)
1094
- else:
1095
- self.logger.warning(
1096
- f"Unsupported discharge forcing type: {forcing.__class__.__name__}"
1097
- )
1098
-
1099
- def _add_forcing_waterlevels(self, forcing: IWaterlevel):
1100
- time_frame = self.get_model_time()
1101
- if isinstance(forcing, WaterlevelSynthetic):
1102
- df_ts = forcing.to_dataframe(time_frame=time_frame)
1103
-
1104
- conversion = us.UnitfulLength(
1105
- value=1.0, units=forcing.surge.timeseries.peak_value.units
1106
- ).convert(us.UnitTypesLength.meters)
1107
- datum_correction = self.settings.water_level.get_datum(
1108
- self.database.site.gui.plotting.synthetic_tide.datum
1109
- ).height.convert(us.UnitTypesLength.meters)
1110
-
1111
- df_ts = df_ts * conversion + datum_correction
1112
-
1113
- self._set_waterlevel_forcing(df_ts)
1114
- elif isinstance(forcing, WaterlevelGauged):
1115
- if self.settings.tide_gauge is None:
1116
- raise ValueError("No tide gauge defined for this site.")
1117
-
1118
- df_ts = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1119
- time=time_frame,
1120
- )
1121
- conversion = us.UnitfulLength(
1122
- value=1.0, units=self.settings.tide_gauge.units
1123
- ).convert(us.UnitTypesLength.meters)
1124
-
1125
- datum_height = self.settings.water_level.get_datum(
1126
- self.settings.tide_gauge.reference
1127
- ).height.convert(us.UnitTypesLength.meters)
1128
-
1129
- df_ts = conversion * df_ts + datum_height
1130
-
1131
- self._set_waterlevel_forcing(df_ts)
1132
- elif isinstance(forcing, WaterlevelCSV):
1133
- df_ts = forcing.to_dataframe(time_frame=time_frame)
1134
-
1135
- if df_ts is None:
1136
- raise ValueError("Failed to get waterlevel data.")
1137
- conversion = us.UnitfulLength(value=1.0, units=forcing.units).convert(
1138
- us.UnitTypesLength.meters
1139
- )
1140
- df_ts *= conversion
1141
- self._set_waterlevel_forcing(df_ts)
1142
-
1143
- elif isinstance(forcing, WaterlevelModel):
1144
- from flood_adapt.adapter.sfincs_offshore import OffshoreSfincsHandler
1145
-
1146
- if self.settings.config.offshore_model is None:
1147
- raise ValueError("Offshore model configuration is missing.")
1148
- if self._scenario is None or self._event is None:
1149
- raise ValueError(
1150
- "Scenario and event must be provided to run the offshore model."
1151
- )
1152
-
1153
- df_ts = OffshoreSfincsHandler(
1154
- scenario=self._scenario, event=self._event
1155
- ).get_resulting_waterlevels()
1156
- if df_ts is None:
1157
- raise ValueError("Failed to get waterlevel data.")
1158
-
1159
- # Datum
1160
- datum_correction = self.settings.water_level.get_datum(
1161
- self.settings.config.offshore_model.reference
1162
- ).height.convert(us.UnitTypesLength.meters)
1163
- df_ts += datum_correction
1164
-
1165
- # Already in meters since it was produced by SFINCS so no conversion needed
1166
- self._set_waterlevel_forcing(df_ts)
1167
- self._turn_off_bnd_press_correction()
1168
- else:
1169
- self.logger.warning(
1170
- f"Unsupported waterlevel forcing type: {forcing.__class__.__name__}"
1171
- )
1172
-
1173
- # SPIDERWEB
1174
- def _add_forcing_spw(self, forcing: Union[RainfallTrack, WindTrack]):
1175
- """Add spiderweb forcing."""
1176
- if forcing.source != ForcingSource.TRACK:
1177
- raise ValueError("Forcing source should be TRACK.")
1178
-
1179
- if forcing.path is None:
1180
- raise ValueError("No path to track file provided.")
1181
-
1182
- if not forcing.path.exists():
1183
- # Check if the file is in the database
1184
- in_db = self._get_event_input_path(self._event) / forcing.path.name
1185
- if not in_db.exists():
1186
- raise FileNotFoundError(
1187
- f"Input file for track forcing not found: {forcing.path}"
1188
- )
1189
- forcing.path = in_db
1190
-
1191
- if forcing.path.suffix == ".cyc":
1192
- forcing.path = self._create_spw_file_from_track(
1193
- track_forcing=forcing,
1194
- hurricane_translation=self._event.hurricane_translation,
1195
- name=self._event.name,
1196
- output_dir=forcing.path.parent,
1197
- include_rainfall=bool(self._event.forcings.get(ForcingType.RAINFALL)),
1198
- recreate=False,
1199
- )
1200
-
1201
- if forcing.path.suffix != ".spw":
1202
- raise ValueError(
1203
- "Track files should be in one of [spw, ddb_cyc] file format and must have [.spw, .cyc] extension."
1204
- )
1205
-
1206
- sim_path = self.get_model_root()
1207
- self.logger.info(f"Adding spiderweb forcing to Sfincs model: {sim_path.name}")
1208
-
1209
- # prevent SameFileError
1210
- output_spw_path = sim_path / forcing.path.name
1211
- if forcing.path == output_spw_path:
1212
- raise ValueError(
1213
- "Add a different SPW file than the one already in the model."
1214
- )
1215
-
1216
- if output_spw_path.exists():
1217
- os.remove(output_spw_path)
1218
- shutil.copy2(forcing.path, output_spw_path)
1219
-
1220
- self._model.set_config("spwfile", output_spw_path.name)
1221
-
1222
- ### MEASURES ###
1223
- def _add_measure_floodwall(self, floodwall: FloodWall):
1224
- """Add floodwall to sfincs model.
1225
-
1226
- Parameters
1227
- ----------
1228
- floodwall : FloodWall
1229
- floodwall information
1230
- """
1231
- polygon_file = resolve_filepath(
1232
- object_dir=ObjectDir.measure,
1233
- obj_name=floodwall.name,
1234
- path=floodwall.polygon_file,
1235
- )
1236
-
1237
- # HydroMT function: get geodataframe from filename
1238
- gdf_floodwall = self._model.data_catalog.get_geodataframe(
1239
- polygon_file, geom=self._model.region, crs=self._model.crs
1240
- )
1241
-
1242
- # Add floodwall attributes to geodataframe
1243
- gdf_floodwall["name"] = floodwall.name
1244
- if (gdf_floodwall.geometry.type == "MultiLineString").any():
1245
- gdf_floodwall = gdf_floodwall.explode()
1246
-
1247
- try:
1248
- heights = [
1249
- float(
1250
- us.UnitfulLength(
1251
- value=float(height),
1252
- units=self.database.site.gui.units.default_length_units,
1253
- ).convert(us.UnitTypesLength("meters"))
1254
- )
1255
- for height in gdf_floodwall["z"]
1256
- ]
1257
- gdf_floodwall["z"] = heights
1258
- self.logger.info("Using floodwall height from shape file.")
1259
- except Exception:
1260
- self.logger.warning(
1261
- f"Could not use height data from file due to missing `z` column or missing values therein. Using uniform height of {floodwall.elevation} instead."
1262
- )
1263
- gdf_floodwall["z"] = floodwall.elevation.convert(
1264
- us.UnitTypesLength(us.UnitTypesLength.meters)
1265
- )
1266
-
1267
- # par1 is the overflow coefficient for weirs
1268
- gdf_floodwall["par1"] = 0.6
1269
-
1270
- # HydroMT function: create floodwall
1271
- self._model.setup_structures(structures=gdf_floodwall, stype="weir", merge=True)
1272
-
1273
- def _add_measure_greeninfra(self, green_infrastructure: GreenInfrastructure):
1274
- # HydroMT function: get geodataframe from filename
1275
- if green_infrastructure.selection_type == "polygon":
1276
- polygon_file = resolve_filepath(
1277
- ObjectDir.measure,
1278
- green_infrastructure.name,
1279
- green_infrastructure.polygon_file,
1280
- )
1281
- elif green_infrastructure.selection_type == "aggregation_area":
1282
- # TODO this logic already exists in the Database controller but cannot be used due to cyclic imports
1283
- # Loop through available aggregation area types
1284
- for aggr_dict in self.database.site.fiat.config.aggregation:
1285
- # check which one is used in measure
1286
- if not aggr_dict.name == green_infrastructure.aggregation_area_type:
1287
- continue
1288
- # load geodataframe
1289
- aggr_areas = gpd.read_file(
1290
- db_path(TopLevelDir.static) / aggr_dict.file,
1291
- engine="pyogrio",
1292
- ).to_crs(4326)
1293
- # keep only aggregation area chosen
1294
- polygon_file = aggr_areas.loc[
1295
- aggr_areas[aggr_dict.field_name]
1296
- == green_infrastructure.aggregation_area_name,
1297
- ["geometry"],
1298
- ].reset_index(drop=True)
1299
- else:
1300
- raise ValueError(
1301
- f"The selection type: {green_infrastructure.selection_type} is not valid"
1302
- )
1303
-
1304
- gdf_green_infra = self._model.data_catalog.get_geodataframe(
1305
- polygon_file,
1306
- geom=self._model.region,
1307
- crs=self._model.crs,
1308
- )
1309
-
1310
- # Make sure no multipolygons are there
1311
- gdf_green_infra = gdf_green_infra.explode()
1312
-
1313
- # HydroMT function: create storage volume
1314
- self._model.setup_storage_volume(
1315
- storage_locs=gdf_green_infra,
1316
- volume=green_infrastructure.volume.convert(us.UnitTypesVolume.m3),
1317
- merge=True,
1318
- )
1319
-
1320
- def _add_measure_pump(self, pump: Pump):
1321
- """Add pump to sfincs model.
1322
-
1323
- Parameters
1324
- ----------
1325
- pump : Pump
1326
- pump information
1327
- """
1328
- polygon_file = resolve_filepath(ObjectDir.measure, pump.name, pump.polygon_file)
1329
- # HydroMT function: get geodataframe from filename
1330
- gdf_pump = self._model.data_catalog.get_geodataframe(
1331
- polygon_file, geom=self._model.region, crs=self._model.crs
1332
- )
1333
-
1334
- # HydroMT function: create floodwall
1335
- self._model.setup_drainage_structures(
1336
- structures=gdf_pump,
1337
- stype="pump",
1338
- discharge=pump.discharge.convert(us.UnitTypesDischarge.cms),
1339
- merge=True,
1340
- )
1341
-
1342
- ### SFINCS SETTERS ###
1343
- def _set_single_river_forcing(self, discharge: IDischarge):
1344
- """Add discharge to overland sfincs model.
1345
-
1346
- Parameters
1347
- ----------
1348
- discharge : IDischarge
1349
- Discharge object with discharge timeseries data and river information.
1350
- """
1351
- if not isinstance(
1352
- discharge, (DischargeConstant, DischargeSynthetic, DischargeCSV)
1353
- ):
1354
- self.logger.warning(
1355
- f"Unsupported discharge forcing type: {discharge.__class__.__name__}"
1356
- )
1357
- return
1358
-
1359
- self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}")
1360
-
1361
- time_frame = self.get_model_time()
1362
- model_rivers = self._read_river_locations()
1363
-
1364
- # Check that the river is defined in the model and that the coordinates match
1365
- river_loc = shapely.Point(
1366
- discharge.river.x_coordinate, discharge.river.y_coordinate
1367
- )
1368
- tolerance = 0.001 # in degrees, ~111 meters at the equator. (0.0001: 11 meters at the equator)
1369
- river_gdf = model_rivers[model_rivers.distance(river_loc) <= tolerance]
1370
- river_inds = river_gdf.index.to_list()
1371
- if len(river_inds) != 1:
1372
- raise ValueError(
1373
- f"River {discharge.river.name} is not defined in the sfincs model. Please ensure the river coordinates in the site.toml match the coordinates for rivers in the SFINCS model."
1374
- )
1375
-
1376
- # Create a geodataframe with the river coordinates, the timeseries data and rename the column to the river index defined in the model
1377
- if isinstance(discharge, DischargeCSV):
1378
- df = discharge.to_dataframe(time_frame)
1379
- conversion = us.UnitfulDischarge(value=1.0, units=discharge.units).convert(
1380
- us.UnitTypesDischarge.cms
1381
- )
1382
- elif isinstance(discharge, DischargeConstant):
1383
- df = discharge.to_dataframe(time_frame)
1384
- conversion = us.UnitfulDischarge(
1385
- value=1.0, units=discharge.discharge.units
1386
- ).convert(us.UnitTypesDischarge.cms)
1387
- elif isinstance(discharge, DischargeSynthetic):
1388
- df = discharge.to_dataframe(time_frame)
1389
- conversion = us.UnitfulDischarge(
1390
- value=1.0, units=discharge.timeseries.peak_value.units
1391
- ).convert(us.UnitTypesDischarge.cms)
1392
- else:
1393
- raise ValueError(
1394
- f"Unsupported discharge forcing type: {discharge.__class__}"
1395
- )
1396
-
1397
- df *= conversion
1398
-
1399
- df = df.rename(columns={df.columns[0]: river_inds[0]})
1400
-
1401
- # HydroMT function: set discharge forcing from time series and river coordinates
1402
- self._model.setup_discharge_forcing(
1403
- locations=river_gdf,
1404
- timeseries=df,
1405
- merge=True,
1406
- )
1407
-
1408
- def _turn_off_bnd_press_correction(self):
1409
- """Turn off the boundary pressure correction in the sfincs model."""
1410
- self.logger.info(
1411
- "Turning off boundary pressure correction in the offshore model"
1412
- )
1413
- self._model.set_config("pavbnd", -9999)
1414
-
1415
- def _set_waterlevel_forcing(self, df_ts: pd.DataFrame):
1416
- """
1417
- Add water level forcing to sfincs model.
1418
-
1419
- Values in the timeseries are expected to be relative to the main reference datum: `self.settings.water_level.reference`.
1420
- The overland model reference: `self.settings.config.overland_model.reference` is used to convert the water levels to the reference of the overland model.
1421
-
1422
- Parameters
1423
- ----------
1424
- df_ts : pd.DataFrame
1425
- Time series of water levels with the first column as the time index.
1426
-
1427
-
1428
- """
1429
- # Determine bnd points from reference overland model
1430
- gdf_locs = self._read_waterlevel_boundary_locations()
1431
-
1432
- if len(df_ts.columns) == 1:
1433
- # Go from 1 timeseries to timeseries for all boundary points
1434
- name = df_ts.columns[0]
1435
- for i in range(1, len(gdf_locs)):
1436
- df_ts[i + 1] = df_ts[name]
1437
- df_ts.columns = list(range(1, len(gdf_locs) + 1))
1438
-
1439
- # Datum
1440
- sfincs_overland_reference_height = self.settings.water_level.get_datum(
1441
- self.settings.config.overland_model.reference
1442
- ).height.convert(us.UnitTypesLength.meters)
1443
-
1444
- df_ts -= sfincs_overland_reference_height
1445
-
1446
- # HydroMT function: set waterlevel forcing from time series
1447
- self._model.set_forcing_1d(
1448
- name="bzs", df_ts=df_ts, gdf_locs=gdf_locs, merge=False
1449
- )
1450
-
1451
- # OFFSHORE
1452
- def _add_pressure_forcing_from_grid(self, ds: xr.DataArray):
1453
- """Add spatially varying barometric pressure to sfincs model.
1454
-
1455
- Parameters
1456
- ----------
1457
- ds : xr.DataArray
1458
- - Required variables: ['press_msl' (Pa)]
1459
- - Required coordinates: ['time', 'y', 'x']
1460
- - spatial_ref: CRS
1461
- """
1462
- self.logger.info("Adding pressure forcing to the offshore model")
1463
- self._model.setup_pressure_forcing_from_grid(press=ds)
1464
-
1465
- def _add_bzs_from_bca(self, event: Event, physical_projection: PhysicalProjection):
1466
- # ONLY offshore models
1467
- """Convert tidal constituents from bca file to waterlevel timeseries that can be read in by hydromt_sfincs."""
1468
- if self.settings.config.offshore_model is None:
1469
- raise ValueError("No offshore model found in sfincs config.")
1470
-
1471
- self.logger.info("Adding water level forcing to the offshore model")
1472
- sb = SfincsBoundary()
1473
- sb.read_flow_boundary_points(self.get_model_root() / "sfincs.bnd")
1474
- sb.read_astro_boundary_conditions(self.get_model_root() / "sfincs.bca")
1475
-
1476
- times = pd.date_range(
1477
- start=event.time.start_time,
1478
- end=event.time.end_time,
1479
- freq="10T",
1480
- )
1481
-
1482
- # Predict tidal signal and add SLR
1483
- if not sb.flow_boundary_points:
1484
- raise ValueError("No flow boundary points found.")
1485
-
1486
- if self.settings.config.offshore_model.vertical_offset:
1487
- correction = self.settings.config.offshore_model.vertical_offset.convert(
1488
- us.UnitTypesLength.meters
1489
- )
1490
- else:
1491
- correction = 0.0
1492
-
1493
- for bnd_ii in range(len(sb.flow_boundary_points)):
1494
- tide_ii = (
1495
- predict(sb.flow_boundary_points[bnd_ii].astro, times)
1496
- + correction
1497
- + physical_projection.sea_level_rise.convert(us.UnitTypesLength.meters)
1498
- )
1499
-
1500
- if bnd_ii == 0:
1501
- wl_df = pd.DataFrame(data={1: tide_ii}, index=times)
1502
- else:
1503
- wl_df[bnd_ii + 1] = tide_ii
1504
-
1505
- # Determine bnd points from reference overland model
1506
- gdf_locs = self._read_waterlevel_boundary_locations()
1507
-
1508
- # HydroMT function: set waterlevel forcing from time series
1509
- self._model.set_forcing_1d(
1510
- name="bzs", df_ts=wl_df, gdf_locs=gdf_locs, merge=False
1511
- )
1512
-
1513
- ### PRIVATE GETTERS ###
1514
- def _get_result_path(self, scenario: Scenario) -> Path:
1515
- """Return the path to store the results."""
1516
- return self.database.scenarios.output_path / scenario.name / "Flooding"
1517
-
1518
- def _get_simulation_path(
1519
- self, scenario: Scenario, sub_event: Optional[Event] = None
1520
- ) -> Path:
1521
- """
1522
- Return the path to the simulation results.
1523
-
1524
- Parameters
1525
- ----------
1526
- scenario : Scenario
1527
- The scenario for which to get the simulation path.
1528
- sub_event : Optional[Event], optional
1529
- The sub-event for which to get the simulation path, by default None.
1530
- Is only used when the event associated with the scenario is an EventSet.
1531
- """
1532
- base_path = (
1533
- self._get_result_path(scenario)
1534
- / "simulations"
1535
- / self.settings.config.overland_model.name
1536
- )
1537
- event = self.database.events.get(scenario.event)
1538
-
1539
- if isinstance(event, EventSet):
1540
- if sub_event is None:
1541
- raise ValueError("Event must be provided when scenario is an EventSet.")
1542
- return base_path.parent / sub_event.name / base_path.name
1543
- elif isinstance(event, Event):
1544
- return base_path
1545
- else:
1546
- raise ValueError(f"Unsupported mode: {event.mode}")
1547
-
1548
- def _get_simulation_path_offshore(
1549
- self, scenario: Scenario, sub_event: Optional[Event] = None
1550
- ) -> Path:
1551
- # Get the path to the offshore model (will not be used if offshore model is not created)
1552
- if self.settings.config.offshore_model is None:
1553
- raise ValueError("No offshore model found in sfincs config.")
1554
- base_path = (
1555
- self._get_result_path(scenario)
1556
- / "simulations"
1557
- / self.settings.config.offshore_model.name
1558
- )
1559
- event = self.database.events.get(scenario.event)
1560
- if isinstance(event, EventSet):
1561
- return base_path.parent / sub_event.name / base_path.name
1562
- elif isinstance(event, Event):
1563
- return base_path
1564
- else:
1565
- raise ValueError(f"Unsupported mode: {event.mode}")
1566
-
1567
- def _get_flood_map_paths(self, scenario: Scenario) -> list[Path]:
1568
- """Return the paths to the flood maps that running this scenario should produce."""
1569
- results_path = self._get_result_path(scenario)
1570
- event = self.database.events.get(scenario.event)
1571
-
1572
- if isinstance(event, EventSet):
1573
- map_fn = []
1574
- for rp in self.database.site.fiat.risk.return_periods:
1575
- map_fn.append(results_path / f"RP_{rp:04d}_maps.nc")
1576
- elif isinstance(event, Event):
1577
- map_fn = [results_path / "max_water_level_map.nc"]
1578
- else:
1579
- raise ValueError(f"Unsupported mode: {event.mode}")
1580
-
1581
- return map_fn
1582
-
1583
- def _get_event_input_path(self, event: Event) -> Path:
1584
- """Return the path to the event input directory."""
1585
- return self.database.events.input_path / event.name
1586
-
1587
- def _get_zsmax(self):
1588
- """Read zsmax file and return absolute maximum water level over entire simulation."""
1589
- self._model.read_results()
1590
- zsmax = self._model.results["zsmax"].max(dim="timemax")
1591
- zsmax.attrs["units"] = "m"
1592
- return zsmax
1593
-
1594
- def _get_zs_points(self):
1595
- """Read water level (zs) timeseries at observation points.
1596
-
1597
- Names are allocated from the site.toml.
1598
- See also add_obs_points() above.
1599
- """
1600
- self._model.read_results()
1601
- da = self._model.results["point_zs"]
1602
- df = pd.DataFrame(index=pd.DatetimeIndex(da.time), data=da.to_numpy())
1603
-
1604
- names = []
1605
- descriptions = []
1606
- # get station names from site.toml
1607
- if self.settings.obs_point is not None:
1608
- obs_points = self.settings.obs_point
1609
- for pt in obs_points:
1610
- names.append(pt.name)
1611
- descriptions.append(pt.description)
1612
-
1613
- pt_df = pd.DataFrame({"Name": names, "Description": descriptions})
1614
- gdf = gpd.GeoDataFrame(
1615
- pt_df,
1616
- geometry=gpd.points_from_xy(da.point_x.values, da.point_y.values),
1617
- crs=self._model.crs,
1618
- )
1619
- return df, gdf
1620
-
1621
- def _create_spw_file_from_track(
1622
- self,
1623
- track_forcing: Union[RainfallTrack, WindTrack],
1624
- hurricane_translation: TranslationModel,
1625
- name: str,
1626
- output_dir: Path,
1627
- include_rainfall: bool = False,
1628
- recreate: bool = False,
1629
- ):
1630
- """
1631
- Create a spiderweb file from a given TropicalCyclone track and save it to the event's input directory.
1632
-
1633
- Providing the output_dir argument allows to save the spiderweb file in a different directory.
1634
-
1635
- Parameters
1636
- ----------
1637
- output_dir : Path
1638
- The directory where the spiderweb file is saved (or copied to if it already exists and recreate is False)
1639
- recreate : bool, optional
1640
- If True, the spiderweb file is recreated even if it already exists, by default False
1641
-
1642
- Returns
1643
- -------
1644
- Path
1645
- the path to the created spiderweb file
1646
- """
1647
- if track_forcing.path is None:
1648
- raise ValueError("No path to track file provided.")
1649
-
1650
- # Check file format
1651
- match track_forcing.path.suffix:
1652
- case ".spw":
1653
- if recreate:
1654
- raise ValueError(
1655
- "Recreating spiderweb files from existing spiderweb files is not supported. Provide a track file instead."
1656
- )
1657
-
1658
- if track_forcing.path.exists():
1659
- return track_forcing.path
1660
-
1661
- elif (output_dir / track_forcing.path.name).exists():
1662
- return output_dir / track_forcing.path.name
1663
-
1664
- else:
1665
- raise FileNotFoundError(f"SPW file not found: {track_forcing.path}")
1666
- case ".cyc":
1667
- pass
1668
- case _:
1669
- raise ValueError(
1670
- "Track files should be in the DDB_CYC file format and must have .cyc extension, or in the SPW file format and must have .spw extension"
1671
- )
1672
-
1673
- # Check if the spiderweb file already exists
1674
- spw_file = output_dir / track_forcing.path.with_suffix(".spw").name
1675
- if spw_file.exists():
1676
- if recreate:
1677
- os.remove(spw_file)
1678
- else:
1679
- return spw_file
1680
-
1681
- # Initialize the tropical cyclone
1682
- tc = TropicalCyclone()
1683
- tc.read_track(filename=str(track_forcing.path), fmt="ddb_cyc")
1684
-
1685
- # Alter the track of the tc if necessary
1686
- tc = self._translate_tc_track(
1687
- tc=tc, hurricane_translation=hurricane_translation
1688
- )
1689
-
1690
- # Rainfall
1691
- start = "Including" if include_rainfall else "Excluding"
1692
- self.logger.info(f"{start} rainfall in the spiderweb file")
1693
- tc.include_rainfall = include_rainfall
1694
-
1695
- self.logger.info(
1696
- f"Creating spiderweb file for hurricane event `{name}`. This may take a while."
1697
- )
1698
-
1699
- # Create spiderweb file from the track
1700
- tc.to_spiderweb(spw_file)
1701
-
1702
- return spw_file
1703
-
1704
- def _translate_tc_track(
1705
- self, tc: TropicalCyclone, hurricane_translation: TranslationModel
1706
- ):
1707
- if math.isclose(
1708
- hurricane_translation.eastwest_translation.value, 0, abs_tol=1e-6
1709
- ) and math.isclose(
1710
- hurricane_translation.northsouth_translation.value, 0, abs_tol=1e-6
1711
- ):
1712
- return tc
1713
-
1714
- self.logger.info(f"Translating the track of the tropical cyclone `{tc.name}`")
1715
- # First convert geodataframe to the local coordinate system
1716
- crs = pyproj.CRS.from_string(self.settings.config.csname)
1717
- tc.track = tc.track.to_crs(crs)
1718
-
1719
- # Translate the track in the local coordinate system
1720
- tc.track["geometry"] = tc.track["geometry"].apply(
1721
- lambda geom: translate(
1722
- geom,
1723
- xoff=hurricane_translation.eastwest_translation.convert(
1724
- us.UnitTypesLength.meters
1725
- ),
1726
- yoff=hurricane_translation.northsouth_translation.convert(
1727
- us.UnitTypesLength.meters
1728
- ),
1729
- )
1730
- )
1731
-
1732
- # Convert the geodataframe to lat/lon
1733
- tc.track = tc.track.to_crs(epsg=4326)
1734
-
1735
- return tc
1736
-
1737
- # @gundula do we keep this func, its not used anywhere?
1738
- def _downscale_hmax(self, zsmax, demfile: Path):
1739
- # read DEM and convert units to metric units used by SFINCS
1740
- demfile_units = self.settings.dem.units
1741
- dem_conversion = us.UnitfulLength(value=1.0, units=demfile_units).convert(
1742
- us.UnitTypesLength("meters")
1743
- )
1744
- dem = dem_conversion * self._model.data_catalog.get_rasterdataset(demfile)
1745
- dem = dem.rio.reproject(self._model.crs)
1746
-
1747
- # determine conversion factor for output floodmap
1748
- floodmap_units = self.settings.config.floodmap_units
1749
- floodmap_conversion = us.UnitfulLength(
1750
- value=1.0, units=us.UnitTypesLength.meters
1751
- ).convert(floodmap_units)
1752
-
1753
- hmax = utils.downscale_floodmap(
1754
- zsmax=floodmap_conversion * zsmax,
1755
- dep=floodmap_conversion * dem,
1756
- hmin=0.01,
1757
- )
1758
- return hmax
1759
-
1760
- def _read_river_locations(self) -> gpd.GeoDataFrame:
1761
- path = self.get_model_root() / "sfincs.src"
1762
-
1763
- with open(path) as f:
1764
- lines = f.readlines()
1765
- coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1766
- points = [shapely.Point(coord) for coord in coords]
1767
-
1768
- return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1769
-
1770
- def _read_waterlevel_boundary_locations(self) -> gpd.GeoDataFrame:
1771
- with open(self.get_model_root() / "sfincs.bnd") as f:
1772
- lines = f.readlines()
1773
- coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1774
- points = [shapely.Point(coord) for coord in coords]
1775
-
1776
- return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1777
-
1778
- def _setup_sfincs_logger(self, model_root: Path) -> logging.Logger:
1779
- """Initialize the logger for the SFINCS model."""
1780
- # Create a logger for the SFINCS model manually
1781
- sfincs_logger = logging.getLogger("SfincsModel")
1782
- for handler in sfincs_logger.handlers[:]:
1783
- sfincs_logger.removeHandler(handler)
1784
-
1785
- # Add a file handler
1786
- file_handler = logging.FileHandler(
1787
- filename=model_root.resolve() / "sfincs_model.log",
1788
- mode="w",
1789
- )
1790
- sfincs_logger.setLevel(logging.DEBUG)
1791
- sfincs_logger.addHandler(file_handler)
1792
- return sfincs_logger
1793
-
1794
- def _cleanup_simulation_folder(
1795
- self,
1796
- path: Path,
1797
- extensions: list[str] = [".spw"],
1798
- ):
1799
- """Remove all files with the given extensions in the given path."""
1800
- if not path.exists():
1801
- return
1802
-
1803
- for ext in extensions:
1804
- for file in path.glob(f"*{ext}"):
1805
- file.unlink()
1806
-
1807
- def _load_scenario_objects(self, scenario: Scenario, event: Event) -> None:
1808
- self._scenario = scenario
1809
- self._projection = self.database.projections.get(scenario.projection)
1810
- self._strategy = self.database.strategies.get(scenario.strategy)
1811
- self._event = event
1812
-
1813
- _event = self.database.events.get(scenario.event)
1814
- if isinstance(_event, EventSet):
1815
- self._event_set = _event
1816
- else:
1817
- self._event_set = None
1818
-
1819
- def _add_tide_gauge_plot(
1820
- self, fig, event: Event, units: us.UnitTypesLength
1821
- ) -> None:
1822
- # check if event is historic
1823
- if not isinstance(event, HistoricalEvent):
1824
- return
1825
- if self.settings.tide_gauge is None:
1826
- return
1827
- df_gauge = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1828
- time=TimeFrame(
1829
- start_time=event.time.start_time,
1830
- end_time=event.time.end_time,
1831
- ),
1832
- units=us.UnitTypesLength(units),
1833
- )
1834
-
1835
- if df_gauge is not None:
1836
- gauge_reference_height = self.settings.water_level.get_datum(
1837
- self.settings.tide_gauge.reference
1838
- ).height.convert(units)
1839
-
1840
- waterlevel = df_gauge.iloc[:, 0] + gauge_reference_height
1841
-
1842
- # If data is available, add to plot
1843
- fig.add_trace(
1844
- px.line(waterlevel, color_discrete_sequence=["#ea6404"]).data[0]
1845
- )
1846
- fig["data"][0]["name"] = "model"
1847
- fig["data"][1]["name"] = "measurement"
1848
- fig.update_layout(showlegend=True)
1
+ import logging
2
+ import math
3
+ import os
4
+ import shutil
5
+ import subprocess
6
+ import tempfile
7
+ from pathlib import Path
8
+ from typing import Optional, Union
9
+
10
+ import geopandas as gpd
11
+ import hydromt_sfincs.utils as utils
12
+ import numpy as np
13
+ import pandas as pd
14
+ import plotly.express as px
15
+ import pyproj
16
+ import shapely
17
+ import xarray as xr
18
+ from cht_cyclones.tropical_cyclone import TropicalCyclone
19
+ from cht_tide.read_bca import SfincsBoundary
20
+ from cht_tide.tide_predict import predict
21
+ from hydromt_sfincs import SfincsModel as HydromtSfincsModel
22
+ from hydromt_sfincs.quadtree import QuadtreeGrid
23
+ from numpy import matlib
24
+ from shapely.affinity import translate
25
+
26
+ from flood_adapt.adapter.interface.hazard_adapter import IHazardAdapter
27
+ from flood_adapt.config.config import Settings
28
+ from flood_adapt.config.site import Site
29
+ from flood_adapt.misc.log import FloodAdaptLogging
30
+ from flood_adapt.misc.path_builder import (
31
+ ObjectDir,
32
+ TopLevelDir,
33
+ db_path,
34
+ )
35
+ from flood_adapt.misc.utils import cd, resolve_filepath
36
+ from flood_adapt.objects.events.event_set import EventSet
37
+ from flood_adapt.objects.events.events import Event, Mode, Template
38
+ from flood_adapt.objects.events.hurricane import TranslationModel
39
+ from flood_adapt.objects.events.synthetic import SyntheticEvent
40
+ from flood_adapt.objects.forcing import unit_system as us
41
+ from flood_adapt.objects.forcing.discharge import (
42
+ DischargeConstant,
43
+ DischargeCSV,
44
+ DischargeSynthetic,
45
+ )
46
+ from flood_adapt.objects.forcing.forcing import (
47
+ ForcingSource,
48
+ ForcingType,
49
+ IDischarge,
50
+ IForcing,
51
+ IRainfall,
52
+ IWaterlevel,
53
+ IWind,
54
+ )
55
+ from flood_adapt.objects.forcing.meteo_handler import MeteoHandler
56
+ from flood_adapt.objects.forcing.rainfall import (
57
+ RainfallConstant,
58
+ RainfallCSV,
59
+ RainfallMeteo,
60
+ RainfallNetCDF,
61
+ RainfallSynthetic,
62
+ RainfallTrack,
63
+ )
64
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
65
+ from flood_adapt.objects.forcing.waterlevels import (
66
+ WaterlevelCSV,
67
+ WaterlevelGauged,
68
+ WaterlevelModel,
69
+ WaterlevelSynthetic,
70
+ )
71
+ from flood_adapt.objects.forcing.wind import (
72
+ WindConstant,
73
+ WindCSV,
74
+ WindMeteo,
75
+ WindNetCDF,
76
+ WindSynthetic,
77
+ WindTrack,
78
+ )
79
+ from flood_adapt.objects.measures.measures import (
80
+ FloodWall,
81
+ GreenInfrastructure,
82
+ Measure,
83
+ Pump,
84
+ )
85
+ from flood_adapt.objects.projections.projections import (
86
+ PhysicalProjection,
87
+ Projection,
88
+ )
89
+ from flood_adapt.objects.scenarios.scenarios import Scenario
90
+
91
+
92
+ class SfincsAdapter(IHazardAdapter):
93
+ """Adapter for the SFINCS model.
94
+
95
+ This class is used to run the SFINCS model and process the results.
96
+
97
+ Attributes
98
+ ----------
99
+ settings : SfincsModel
100
+ The settings for the SFINCS model.
101
+ """
102
+
103
+ logger = FloodAdaptLogging.getLogger("SfincsAdapter")
104
+ _site: Site
105
+ _model: HydromtSfincsModel
106
+
107
+ ###############
108
+ ### PUBLIC ####
109
+ ###############
110
+
111
+ ### HAZARD ADAPTER METHODS ###
112
+ def __init__(self, model_root: Path):
113
+ """Load overland sfincs model based on a root directory.
114
+
115
+ Parameters
116
+ ----------
117
+ model_root : Path
118
+ Root directory of overland sfincs model.
119
+ """
120
+ self.settings = self.database.site.sfincs
121
+ self.units = self.database.site.gui.units
122
+ self.sfincs_logger = self._setup_sfincs_logger(model_root)
123
+ self._model = HydromtSfincsModel(
124
+ root=str(model_root.resolve()), mode="r", logger=self.sfincs_logger
125
+ )
126
+ self._model.read()
127
+
128
+ def read(self, path: Path):
129
+ """Read the sfincs model from the current model root."""
130
+ if Path(self._model.root).resolve() != Path(path).resolve():
131
+ self._model.set_root(root=str(path), mode="r")
132
+ self._model.read()
133
+
134
+ def write(self, path_out: Union[str, os.PathLike], overwrite: bool = True):
135
+ """Write the sfincs model configuration to a directory."""
136
+ root = self.get_model_root()
137
+ if not isinstance(path_out, Path):
138
+ path_out = Path(path_out).resolve()
139
+
140
+ if not path_out.exists():
141
+ path_out.mkdir(parents=True)
142
+
143
+ if root != path_out:
144
+ shutil.copytree(root, path_out, dirs_exist_ok=True)
145
+
146
+ write_mode = "w+" if overwrite else "w"
147
+ with cd(path_out):
148
+ self._model.set_root(root=str(path_out), mode=write_mode)
149
+ self._model.write()
150
+
151
+ def close_files(self):
152
+ """Close all open files and clean up file handles."""
153
+ for logger in [self.logger, self.sfincs_logger]:
154
+ if hasattr(logger, "handlers"):
155
+ for handler in logger.handlers:
156
+ if isinstance(handler, logging.FileHandler):
157
+ handler.close()
158
+ logger.removeHandler(handler)
159
+
160
+ def __enter__(self) -> "SfincsAdapter":
161
+ return self
162
+
163
+ def __exit__(self, exc_type, exc_value, traceback) -> bool:
164
+ self.close_files()
165
+ return False
166
+
167
+ def has_run(self, scenario: Scenario) -> bool:
168
+ """Check if the model has been run."""
169
+ event = self.database.events.get(scenario.event)
170
+ if event.mode == Mode.risk:
171
+ sim_paths = [
172
+ self._get_simulation_path(scenario, sub_event=sub_event)
173
+ for sub_event in event.sub_events
174
+ ]
175
+ # No need to check postprocessing for risk scenarios
176
+ return all(self.sfincs_completed(sim_path) for sim_path in sim_paths)
177
+ else:
178
+ return self.sfincs_completed(self._get_simulation_path(scenario))
179
+
180
+ def execute(self, path: Path, strict: bool = True) -> bool:
181
+ """
182
+ Run the sfincs executable in the specified path.
183
+
184
+ Parameters
185
+ ----------
186
+ path : str
187
+ Path to the simulation folder.
188
+ Default is None, in which case the model root is used.
189
+ strict : bool, optional
190
+ True: raise an error if the model fails to run.
191
+ False: log a warning.
192
+ Default is True.
193
+
194
+ Returns
195
+ -------
196
+ bool
197
+ True if the model ran successfully, False otherwise.
198
+
199
+ """
200
+ with cd(path):
201
+ self.logger.info(f"Running SFINCS in {path}")
202
+ process = subprocess.run(
203
+ str(Settings().sfincs_bin_path),
204
+ stdout=subprocess.PIPE,
205
+ stderr=subprocess.PIPE,
206
+ text=True,
207
+ )
208
+ self.sfincs_logger.info(process.stdout)
209
+ self.logger.debug(process.stdout)
210
+
211
+ self._cleanup_simulation_folder(path)
212
+
213
+ if process.returncode != 0:
214
+ if Settings().delete_crashed_runs:
215
+ # Remove all files in the simulation folder except for the log files
216
+ for subdir, dirs, files in os.walk(path, topdown=False):
217
+ for file in files:
218
+ if not file.endswith(".log"):
219
+ os.remove(os.path.join(subdir, file))
220
+
221
+ if not os.listdir(subdir):
222
+ os.rmdir(subdir)
223
+
224
+ if strict:
225
+ raise RuntimeError(f"SFINCS model failed to run in {path}.")
226
+ else:
227
+ self.logger.error(f"SFINCS model failed to run in {path}.")
228
+
229
+ return process.returncode == 0
230
+
231
+ def run(self, scenario: Scenario):
232
+ """Run the whole workflow (Preprocess, process and postprocess) for a given scenario."""
233
+ self._ensure_no_existing_forcings()
234
+ event = self.database.events.get(scenario.event)
235
+
236
+ if event.mode == Mode.risk:
237
+ self._run_risk_scenario(scenario=scenario)
238
+ else:
239
+ self._run_single_event(scenario=scenario, event=event)
240
+
241
+ def preprocess(self, scenario: Scenario, event: Event):
242
+ """
243
+ Preprocess the SFINCS model for a given scenario.
244
+
245
+ Parameters
246
+ ----------
247
+ scenario : Scenario
248
+ Scenario to preprocess.
249
+ event : Event, optional
250
+ Event to preprocess, by default None.
251
+ """
252
+ # I dont like this due to it being state based and might break if people use functions in the wrong order
253
+ # Currently only used to pass projection + event stuff to WaterlevelModel
254
+
255
+ sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
256
+ sim_path.mkdir(parents=True, exist_ok=True)
257
+ template_path = (
258
+ self.database.static.get_overland_sfincs_model().get_model_root()
259
+ )
260
+ shutil.copytree(template_path, sim_path, dirs_exist_ok=True)
261
+
262
+ with SfincsAdapter(model_root=sim_path) as model:
263
+ model._load_scenario_objects(scenario, event)
264
+ is_risk = "Probabilistic " if model._event_set is not None else ""
265
+ self.logger.info(
266
+ f"Preprocessing Scenario `{model._scenario.name}`: {is_risk}Event `{model._event.name}`, Strategy `{model._strategy.name}`, Projection `{model._projection.name}`"
267
+ )
268
+ # Write template model to output path and set it as the model root so focings can write to it
269
+ model.set_timing(model._event.time)
270
+ model.write(sim_path)
271
+
272
+ # Event
273
+ for forcing in model._event.get_forcings():
274
+ model.add_forcing(forcing)
275
+
276
+ if self.rainfall is not None:
277
+ model.rainfall *= model._event.rainfall_multiplier
278
+ else:
279
+ model.logger.warning(
280
+ "Failed to add event rainfall multiplier, no rainfall forcing found in the model."
281
+ )
282
+
283
+ # Measures
284
+ for measure in model._strategy.get_hazard_measures():
285
+ model.add_measure(measure)
286
+
287
+ # Projection
288
+ model.add_projection(model._projection)
289
+
290
+ # Output
291
+ model.add_obs_points()
292
+
293
+ # Save any changes made to disk as well
294
+ model.write(path_out=sim_path)
295
+
296
+ def process(self, scenario: Scenario, event: Event):
297
+ if event.mode != Mode.single_event:
298
+ raise ValueError(f"Unsupported event mode: {event.mode}.")
299
+
300
+ sim_path = self._get_simulation_path(scenario=scenario, sub_event=event)
301
+ self.logger.info(f"Running SFINCS for single event Scenario `{scenario.name}`")
302
+ self.execute(sim_path)
303
+
304
+ def postprocess(self, scenario: Scenario, event: Event):
305
+ if event.mode != Mode.single_event:
306
+ raise ValueError(f"Unsupported event mode: {event.mode}.")
307
+
308
+ self.logger.info(f"Postprocessing SFINCS for Scenario `{scenario.name}`")
309
+ if not self.sfincs_completed(
310
+ self._get_simulation_path(scenario, sub_event=event)
311
+ ):
312
+ raise RuntimeError("SFINCS was not run successfully!")
313
+
314
+ self.write_floodmap_geotiff(scenario)
315
+ self.plot_wl_obs(scenario)
316
+ self.write_water_level_map(scenario)
317
+
318
+ def set_timing(self, time: TimeFrame):
319
+ """Set model reference times."""
320
+ self.logger.info(f"Setting timing for the SFINCS model: `{time}`")
321
+ self._model.set_config("tref", time.start_time)
322
+ self._model.set_config("tstart", time.start_time)
323
+ self._model.set_config("tstop", time.end_time)
324
+
325
+ def add_forcing(self, forcing: IForcing):
326
+ """Get forcing data and add it."""
327
+ if forcing is None:
328
+ return
329
+
330
+ self.logger.info(
331
+ f"Adding {forcing.type.capitalize()}: {forcing.source.capitalize()}"
332
+ )
333
+ if isinstance(forcing, IRainfall):
334
+ self._add_forcing_rain(forcing)
335
+ elif isinstance(forcing, IWind):
336
+ self._add_forcing_wind(forcing)
337
+ elif isinstance(forcing, IDischarge):
338
+ self._add_forcing_discharge(forcing)
339
+ elif isinstance(forcing, IWaterlevel):
340
+ self._add_forcing_waterlevels(forcing)
341
+ else:
342
+ self.logger.warning(
343
+ f"Skipping unsupported forcing type {forcing.__class__.__name__}"
344
+ )
345
+
346
+ def add_measure(self, measure: Measure):
347
+ """Get measure data and add it."""
348
+ self.logger.info(
349
+ f"Adding {measure.__class__.__name__.capitalize()} `{measure.name}`"
350
+ )
351
+
352
+ if isinstance(measure, FloodWall):
353
+ self._add_measure_floodwall(measure)
354
+ elif isinstance(measure, GreenInfrastructure):
355
+ self._add_measure_greeninfra(measure)
356
+ elif isinstance(measure, Pump):
357
+ self._add_measure_pump(measure)
358
+ else:
359
+ self.logger.warning(
360
+ f"Skipping unsupported measure type {measure.__class__.__name__}"
361
+ )
362
+
363
+ def add_projection(self, projection: Projection):
364
+ """Get forcing data currently in the sfincs model and add the projection it."""
365
+ self.logger.info(f"Adding Projection `{projection.name}`")
366
+ phys_projection = projection.physical_projection
367
+
368
+ if phys_projection.sea_level_rise:
369
+ self.logger.info(
370
+ f"Adding projected sea level rise `{phys_projection.sea_level_rise}`"
371
+ )
372
+ if self.waterlevels is not None:
373
+ self.waterlevels += phys_projection.sea_level_rise.convert(
374
+ us.UnitTypesLength.meters
375
+ )
376
+ else:
377
+ self.logger.warning(
378
+ "Failed to add sea level rise, no water level forcing found in the model."
379
+ )
380
+
381
+ if phys_projection.rainfall_multiplier:
382
+ self.logger.info(
383
+ f"Adding projected rainfall multiplier `{phys_projection.rainfall_multiplier}`"
384
+ )
385
+ if self.rainfall is not None:
386
+ self.rainfall *= phys_projection.rainfall_multiplier
387
+ else:
388
+ self.logger.warning(
389
+ "Failed to add projected rainfall multiplier, no rainfall forcing found in the model."
390
+ )
391
+
392
+ ### GETTERS ###
393
+ def get_model_time(self) -> TimeFrame:
394
+ t0, t1 = self._model.get_model_time()
395
+ return TimeFrame(start_time=t0, end_time=t1)
396
+
397
+ def get_model_root(self) -> Path:
398
+ return Path(self._model.root)
399
+
400
+ def get_mask(self):
401
+ """Get mask with inactive cells from model."""
402
+ mask = self._model.grid["msk"]
403
+ return mask
404
+
405
+ def get_bedlevel(self):
406
+ """Get bed level from model."""
407
+ self._model.read_results()
408
+ zb = self._model.results["zb"]
409
+ return zb
410
+
411
+ def get_model_boundary(self) -> gpd.GeoDataFrame:
412
+ """Get bounding box from model."""
413
+ return self._model.region
414
+
415
+ def get_model_grid(self) -> QuadtreeGrid:
416
+ """Get grid from model.
417
+
418
+ Returns
419
+ -------
420
+ QuadtreeGrid
421
+ QuadtreeGrid with the model grid
422
+ """
423
+ return self._model.quadtree
424
+
425
+ # Forcing properties
426
+ @property
427
+ def waterlevels(self) -> xr.Dataset | xr.DataArray | None:
428
+ return self._model.forcing.get("bzs")
429
+
430
+ @waterlevels.setter
431
+ def waterlevels(self, waterlevels: xr.Dataset | xr.DataArray):
432
+ if self.waterlevels is None or self.waterlevels.size == 0:
433
+ raise ValueError("No water level forcing found in the model.")
434
+ self._model.forcing["bzs"] = waterlevels
435
+
436
+ @property
437
+ def discharge(self) -> xr.Dataset | xr.DataArray | None:
438
+ return self._model.forcing.get("dis")
439
+
440
+ @discharge.setter
441
+ def discharge(self, discharge: xr.Dataset | xr.DataArray):
442
+ if self.discharge is None or self.discharge.size == 0:
443
+ raise ValueError("No discharge forcing found in the model.")
444
+ self._model.forcing["dis"] = discharge
445
+
446
+ @property
447
+ def rainfall(self) -> xr.Dataset | xr.DataArray | None:
448
+ names = ["precip", "precip_2d"]
449
+ in_model = [name for name in names if name in self._model.forcing]
450
+ if len(in_model) == 0:
451
+ return None
452
+ elif len(in_model) == 1:
453
+ return self._model.forcing[in_model[0]]
454
+ else:
455
+ raise ValueError("Multiple rainfall forcings found in the model.")
456
+
457
+ @rainfall.setter
458
+ def rainfall(self, rainfall: xr.Dataset | xr.DataArray):
459
+ if self.rainfall is None or self.rainfall.size == 0:
460
+ raise ValueError("No rainfall forcing found in the model.")
461
+ elif "precip_2d" in self._model.forcing:
462
+ self._model.forcing["precip_2d"] = rainfall
463
+ elif "precip" in self._model.forcing:
464
+ self._model.forcing["precip"] = rainfall
465
+ else:
466
+ raise ValueError("Unsupported rainfall forcing in the model.")
467
+
468
+ @property
469
+ def wind(self) -> xr.Dataset | xr.DataArray | None:
470
+ wind_names = ["wnd", "wind_2d", "wind", "wind10_u", "wind10_v"]
471
+ wind_in_model = [name for name in wind_names if name in self._model.forcing]
472
+ if len(wind_in_model) == 0:
473
+ return None
474
+ elif len(wind_in_model) == 1:
475
+ return self._model.forcing[wind_in_model[0]]
476
+ elif len(wind_in_model) == 2:
477
+ if not ("wind10_u" in wind_in_model and "wind10_v" in wind_in_model):
478
+ raise ValueError(
479
+ "Multiple wind forcings found in the model. Both should be wind10_u and wind10_v or a singular wind forcing."
480
+ )
481
+ return xr.Dataset(
482
+ {
483
+ "wind10_u": self._model.forcing["wind10_u"],
484
+ "wind10_v": self._model.forcing["wind10_v"],
485
+ }
486
+ )
487
+ else:
488
+ raise ValueError("Multiple wind forcings found in the model.")
489
+
490
+ @wind.setter
491
+ def wind(self, wind: xr.Dataset | xr.DataArray):
492
+ if (not self.wind) or (self.wind.size == 0):
493
+ raise ValueError("No wind forcing found in the model.")
494
+
495
+ elif "wind_2d" in self._model.forcing:
496
+ self._model.forcing["wind_2d"] = wind
497
+ elif "wind" in self._model.forcing:
498
+ self._model.forcing["wind"] = wind
499
+ elif "wnd" in self._model.forcing:
500
+ self._model.forcing["wnd"] = wind
501
+ elif "wind10_u" in self._model.forcing and "wind10_v" in self._model.forcing:
502
+ self._model.forcing["wind10_u"] = wind["wind10_u"]
503
+ self._model.forcing["wind10_v"] = wind["wind10_v"]
504
+ else:
505
+ raise ValueError("Unsupported wind forcing in the model.")
506
+
507
+ ### OUTPUT ###
508
+ def run_completed(self, scenario: Scenario) -> bool:
509
+ """Check if the entire model run has been completed successfully by checking if all flood maps exist that are created in postprocess().
510
+
511
+ Returns
512
+ -------
513
+ bool : True if all flood maps exist, False otherwise.
514
+
515
+ """
516
+ any_floodmap = len(self._get_flood_map_paths(scenario)) > 0
517
+ all_exist = all(
518
+ floodmap.exists() for floodmap in self._get_flood_map_paths(scenario)
519
+ )
520
+ return any_floodmap and all_exist
521
+
522
+ def sfincs_completed(self, sim_path: Path) -> bool:
523
+ """Check if the sfincs executable has been run successfully by checking if the output files exist in the simulation folder.
524
+
525
+ Parameters
526
+ ----------
527
+ sim_path : Path
528
+ Path to the simulation folder to check.
529
+
530
+ Returns
531
+ -------
532
+ bool: True if the sfincs executable has been run successfully, False otherwise.
533
+
534
+ """
535
+ SFINCS_OUTPUT_FILES = ["sfincs_map.nc"]
536
+
537
+ if self.settings.obs_point is not None:
538
+ SFINCS_OUTPUT_FILES.append("sfincs_his.nc")
539
+
540
+ to_check = [Path(sim_path) / file for file in SFINCS_OUTPUT_FILES]
541
+ return all(output.exists() for output in to_check)
542
+
543
+ def write_floodmap_geotiff(
544
+ self, scenario: Scenario, sim_path: Optional[Path] = None
545
+ ):
546
+ """
547
+ Read simulation results from SFINCS and saves a geotiff with the maximum water levels.
548
+
549
+ Produced floodmap is in the units defined in the sfincs config settings.
550
+
551
+ Parameters
552
+ ----------
553
+ scenario : Scenario
554
+ Scenario for which to create the floodmap.
555
+ sim_path : Path, optional
556
+ Path to the simulation folder, by default None.
557
+ """
558
+ self.logger.info("Writing flood maps to geotiff")
559
+ results_path = self._get_result_path(scenario)
560
+ sim_path = sim_path or self._get_simulation_path(scenario)
561
+ demfile = self.database.static_path / "dem" / self.settings.dem.filename
562
+
563
+ with SfincsAdapter(model_root=sim_path) as model:
564
+ zsmax = model._get_zsmax()
565
+
566
+ dem = model._model.data_catalog.get_rasterdataset(demfile)
567
+
568
+ # convert dem from dem units to floodmap units
569
+ dem_conversion = us.UnitfulLength(
570
+ value=1.0, units=self.settings.dem.units
571
+ ).convert(self.settings.config.floodmap_units)
572
+
573
+ floodmap_fn = results_path / f"FloodMap_{scenario.name}.tif"
574
+
575
+ # convert zsmax from meters to floodmap units
576
+ floodmap_conversion = us.UnitfulLength(
577
+ value=1.0, units=us.UnitTypesLength.meters
578
+ ).convert(self.settings.config.floodmap_units)
579
+
580
+ utils.downscale_floodmap(
581
+ zsmax=floodmap_conversion * zsmax,
582
+ dep=dem_conversion * dem,
583
+ hmin=0.01,
584
+ floodmap_fn=str(floodmap_fn),
585
+ )
586
+
587
+ def write_water_level_map(
588
+ self, scenario: Scenario, sim_path: Optional[Path] = None
589
+ ):
590
+ """Read simulation results from SFINCS and saves a netcdf with the maximum water levels."""
591
+ self.logger.info("Writing water level map to netcdf")
592
+ results_path = self._get_result_path(scenario)
593
+ sim_path = sim_path or self._get_simulation_path(scenario)
594
+
595
+ with SfincsAdapter(model_root=sim_path) as model:
596
+ zsmax = model._get_zsmax()
597
+ zsmax.to_netcdf(results_path / "max_water_level_map.nc")
598
+
599
+ def plot_wl_obs(
600
+ self,
601
+ scenario: Scenario,
602
+ ):
603
+ """Plot water levels at SFINCS observation points as html.
604
+
605
+ Only for single event scenarios, or for a specific simulation path containing the written and processed sfincs model.
606
+ """
607
+ if not self.settings.obs_point:
608
+ self.logger.warning("No observation points provided in config.")
609
+ return
610
+
611
+ self.logger.info("Plotting water levels at observation points")
612
+ sim_path = self._get_simulation_path(scenario)
613
+
614
+ # read SFINCS model
615
+ with SfincsAdapter(model_root=sim_path) as model:
616
+ df, gdf = model._get_zs_points()
617
+
618
+ gui_units = us.UnitTypesLength(
619
+ self.database.site.gui.units.default_length_units
620
+ )
621
+ conversion_factor = us.UnitfulLength(
622
+ value=1.0, units=us.UnitTypesLength("meters")
623
+ ).convert(gui_units)
624
+
625
+ overland_reference_height = self.settings.water_level.get_datum(
626
+ self.settings.config.overland_model.reference
627
+ ).height.convert(gui_units)
628
+
629
+ for ii, col in enumerate(df.columns):
630
+ # Plot actual thing
631
+ fig = px.line(
632
+ df[col] * conversion_factor
633
+ + overland_reference_height # convert to reference datum for plotting
634
+ )
635
+
636
+ fig.add_hline(
637
+ y=0,
638
+ line_dash="dash",
639
+ line_color="#000000",
640
+ annotation_text=self.settings.water_level.reference,
641
+ annotation_position="bottom right",
642
+ )
643
+
644
+ # plot reference water levels
645
+ for wl_ref in self.settings.water_level.datums:
646
+ if (
647
+ wl_ref.name == self.settings.config.overland_model.reference
648
+ or wl_ref.name in self.database.site.gui.plotting.excluded_datums
649
+ ):
650
+ continue
651
+ fig.add_hline(
652
+ y=wl_ref.height.convert(gui_units),
653
+ line_dash="dash",
654
+ line_color="#3ec97c",
655
+ annotation_text=wl_ref.name,
656
+ annotation_position="bottom right",
657
+ )
658
+
659
+ fig.update_layout(
660
+ autosize=False,
661
+ height=100 * 2,
662
+ width=280 * 2,
663
+ margin={"r": 0, "l": 0, "b": 0, "t": 20},
664
+ font={"size": 10, "color": "black", "family": "Arial"},
665
+ title={
666
+ "text": gdf.iloc[ii]["Description"],
667
+ "font": {"size": 12, "color": "black", "family": "Arial"},
668
+ "x": 0.5,
669
+ "xanchor": "center",
670
+ },
671
+ xaxis_title="Time",
672
+ yaxis_title=f"Water level [{gui_units.value}] above {self.settings.water_level.reference}",
673
+ yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
674
+ xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
675
+ showlegend=False,
676
+ )
677
+
678
+ event = self.database.events.get(scenario.event)
679
+ if self.settings.obs_point[ii].name == self.settings.tide_gauge.name:
680
+ self._add_tide_gauge_plot(fig, event, units=gui_units)
681
+
682
+ # write html to results folder
683
+ station_name = gdf.iloc[ii]["Name"]
684
+ results_path = self._get_result_path(scenario)
685
+ fig.write_html(results_path / f"{station_name}_timeseries.html")
686
+
687
+ def add_obs_points(self):
688
+ """Add observation points provided in the site toml to SFINCS model."""
689
+ if self.settings.obs_point is None:
690
+ return
691
+ self.logger.info("Adding observation points to the overland flood model")
692
+
693
+ obs_points = self.settings.obs_point
694
+ names = []
695
+ lat = []
696
+ lon = []
697
+ for pt in obs_points:
698
+ names.append(pt.name)
699
+ lat.append(pt.lat)
700
+ lon.append(pt.lon)
701
+
702
+ # create GeoDataFrame from obs_points in site file
703
+ df = pd.DataFrame({"name": names})
704
+ gdf = gpd.GeoDataFrame(
705
+ df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
706
+ )
707
+
708
+ # Add locations to SFINCS file
709
+ self._model.setup_observation_points(locations=gdf, merge=False)
710
+
711
+ def get_wl_df_from_offshore_his_results(self) -> pd.DataFrame:
712
+ """Create a pd.Dataframe with waterlevels from the offshore model at the bnd locations of the overland model.
713
+
714
+ Returns
715
+ -------
716
+ wl_df: pd.DataFrame
717
+ time series of water level.
718
+ """
719
+ self.logger.info("Reading water levels from offshore model")
720
+ ds_his = utils.read_sfincs_his_results(
721
+ Path(self._model.root) / "sfincs_his.nc",
722
+ crs=self._model.crs.to_epsg(),
723
+ )
724
+ wl_df = pd.DataFrame(
725
+ data=ds_his.point_zs.to_numpy(),
726
+ index=ds_his.time.to_numpy(),
727
+ columns=np.arange(1, ds_his.point_zs.to_numpy().shape[1] + 1, 1),
728
+ )
729
+ return wl_df
730
+
731
+ ## RISK EVENTS ##
732
+ def calculate_rp_floodmaps(self, scenario: Scenario):
733
+ """Calculate flood risk maps from a set of (currently) SFINCS water level outputs using linear interpolation.
734
+
735
+ It would be nice to make it more widely applicable and move the loading of the SFINCS results to self.postprocess_sfincs().
736
+
737
+ generates return period water level maps in netcdf format to be used by FIAT
738
+ generates return period water depth maps in geotiff format as product for users
739
+
740
+ TODO: make this robust and more efficient for bigger datasets.
741
+ """
742
+ event: EventSet = self.database.events.get(scenario.event, load_all=True)
743
+ if not isinstance(event, EventSet):
744
+ raise ValueError("This function is only available for risk scenarios.")
745
+
746
+ result_path = self._get_result_path(scenario)
747
+ sim_paths = [
748
+ self._get_simulation_path(scenario, sub_event=sub_event)
749
+ for sub_event in event._events
750
+ ]
751
+
752
+ phys_proj = self.database.projections.get(
753
+ scenario.projection
754
+ ).physical_projection
755
+
756
+ floodmap_rp = self.database.site.fiat.risk.return_periods
757
+ frequencies = [sub_event.frequency for sub_event in event.sub_events]
758
+
759
+ # adjust storm frequency for hurricane events
760
+ if not math.isclose(phys_proj.storm_frequency_increase, 0, abs_tol=1e-9):
761
+ storminess_increase = phys_proj.storm_frequency_increase / 100.0
762
+ for ii, event in enumerate(event._events):
763
+ if event.template == Template.Hurricane:
764
+ frequencies[ii] = frequencies[ii] * (1 + storminess_increase)
765
+
766
+ with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
767
+ # read mask and bed level
768
+ mask = dummymodel.get_mask().stack(z=("x", "y"))
769
+ zb = dummymodel.get_bedlevel().stack(z=("x", "y")).to_numpy()
770
+
771
+ zs_maps = []
772
+ for simulation_path in sim_paths:
773
+ # read zsmax data from overland sfincs model
774
+ with SfincsAdapter(model_root=simulation_path) as sim:
775
+ zsmax = sim._get_zsmax().load()
776
+ zs_stacked = zsmax.stack(z=("x", "y"))
777
+ zs_maps.append(zs_stacked)
778
+
779
+ # Create RP flood maps
780
+
781
+ # 1a: make a table of all water levels and associated frequencies
782
+ zs = xr.concat(zs_maps, pd.Index(frequencies, name="frequency"))
783
+ # Get the indices of columns with all NaN values
784
+ nan_cells = np.where(np.all(np.isnan(zs), axis=0))[0]
785
+ # fill nan values with minimum bed levels in each grid cell, np.interp cannot ignore nan values
786
+ zs = xr.where(np.isnan(zs), np.tile(zb, (zs.shape[0], 1)), zs)
787
+ # Get table of frequencies
788
+ freq = np.tile(frequencies, (zs.shape[1], 1)).transpose()
789
+
790
+ # 1b: sort water levels in descending order and include the frequencies in the sorting process
791
+ # (i.e. each h-value should be linked to the same p-values as in step 1a)
792
+ sort_index = zs.argsort(axis=0)
793
+ sorted_prob = np.flipud(np.take_along_axis(freq, sort_index, axis=0))
794
+ sorted_zs = np.flipud(np.take_along_axis(zs.values, sort_index, axis=0))
795
+
796
+ # 1c: Compute exceedance probabilities of water depths
797
+ # Method: accumulate probabilities from top to bottom
798
+ prob_exceed = np.cumsum(sorted_prob, axis=0)
799
+
800
+ # 1d: Compute return periods of water depths
801
+ # Method: simply take the inverse of the exceedance probability (1/Pex)
802
+ rp_zs = 1.0 / prob_exceed
803
+
804
+ # For each return period (T) of interest do the following:
805
+ # For each grid cell do the following:
806
+ # Use the table from step [1d] as a “lookup-table” to derive the T-year water depth. Use a 1-d interpolation technique:
807
+ # h(T) = interp1 (log(T*), h*, log(T))
808
+ # in which t* and h* are the values from the table and T is the return period (T) of interest
809
+ # The resulting T-year water depths for all grids combined form the T-year hazard map
810
+ rp_da = xr.DataArray(rp_zs, dims=zs.dims)
811
+
812
+ # no_data_value = -999 # in SFINCS
813
+ # sorted_zs = xr.where(sorted_zs == no_data_value, np.nan, sorted_zs)
814
+
815
+ valid_cells = np.where(mask == 1)[
816
+ 0
817
+ ] # only loop over cells where model is not masked
818
+ h = matlib.repmat(
819
+ np.copy(zb), len(floodmap_rp), 1
820
+ ) # if not flooded (i.e. not in valid_cells) revert to bed_level, read from SFINCS results so it is the minimum bed level in a grid cell
821
+
822
+ self.logger.info("Calculating flood risk maps, this may take some time")
823
+ for jj in valid_cells: # looping over all non-masked cells.
824
+ # linear interpolation for all return periods to evaluate
825
+ h[:, jj] = np.interp(
826
+ np.log10(floodmap_rp),
827
+ np.log10(rp_da[::-1, jj]),
828
+ sorted_zs[::-1, jj],
829
+ left=0,
830
+ )
831
+
832
+ # Re-fill locations that had nan water level for all simulations with nans
833
+ h[:, nan_cells] = np.full(h[:, nan_cells].shape, np.nan)
834
+
835
+ # If a cell has the same water-level as the bed elevation it should be dry (turn to nan)
836
+ diff = h - np.tile(zb, (h.shape[0], 1))
837
+ dry = (
838
+ diff < 10e-10
839
+ ) # here we use a small number instead of zero for rounding errors
840
+ h[dry] = np.nan
841
+
842
+ for ii, rp in enumerate(floodmap_rp):
843
+ # #create single nc
844
+ zs_rp_single = xr.DataArray(
845
+ data=h[ii, :], coords={"z": zs["z"]}, attrs={"units": "meters"}
846
+ ).unstack()
847
+ zs_rp_single = zs_rp_single.rio.write_crs(
848
+ zsmax.raster.crs
849
+ ) # , inplace=True)
850
+ zs_rp_single = zs_rp_single.to_dataset(name="risk_map")
851
+ fn_rp = result_path / f"RP_{rp:04d}_maps.nc"
852
+ zs_rp_single.to_netcdf(fn_rp)
853
+
854
+ # write geotiff
855
+ # dem file for high resolution flood depth map
856
+ demfile = self.database.static_path / "dem" / self.settings.dem.filename
857
+
858
+ # writing the geotiff to the scenario results folder
859
+ with SfincsAdapter(model_root=sim_paths[0]) as dummymodel:
860
+ dem = dummymodel._model.data_catalog.get_rasterdataset(demfile)
861
+ zsmax = zs_rp_single.to_array().squeeze().transpose()
862
+ floodmap_fn = fn_rp.with_suffix(".tif")
863
+
864
+ # convert dem from dem units to floodmap units
865
+ dem_conversion = us.UnitfulLength(
866
+ value=1.0, units=self.settings.dem.units
867
+ ).convert(self.settings.config.floodmap_units)
868
+
869
+ # convert zsmax from meters to floodmap units
870
+ floodmap_conversion = us.UnitfulLength(
871
+ value=1.0, units=us.UnitTypesLength.meters
872
+ ).convert(self.settings.config.floodmap_units)
873
+
874
+ utils.downscale_floodmap(
875
+ zsmax=floodmap_conversion * zsmax,
876
+ dep=dem_conversion * dem,
877
+ hmin=0.01,
878
+ floodmap_fn=str(floodmap_fn),
879
+ )
880
+
881
+ ######################################
882
+ ### PRIVATE - use at your own risk ###
883
+ ######################################
884
+ def _run_single_event(self, scenario: Scenario, event: Event):
885
+ self.preprocess(scenario, event)
886
+ self.process(scenario, event)
887
+ self.postprocess(scenario, event)
888
+
889
+ if not self.settings.config.save_simulation:
890
+ shutil.rmtree(
891
+ self._get_simulation_path(scenario, sub_event=event), ignore_errors=True
892
+ )
893
+
894
+ def _run_risk_scenario(self, scenario: Scenario):
895
+ """Run the whole workflow for a risk scenario.
896
+
897
+ This means preprocessing and running the SFINCS model for each event in the event set, and then postprocessing the results.
898
+ """
899
+ event_set: EventSet = self.database.events.get(scenario.event, load_all=True)
900
+ total = len(event_set._events)
901
+
902
+ for i, sub_event in enumerate(event_set._events):
903
+ sim_path = self._get_simulation_path(scenario, sub_event=sub_event)
904
+
905
+ # Preprocess
906
+ self.preprocess(scenario, event=sub_event)
907
+ self.logger.info(
908
+ f"Running SFINCS for Eventset Scenario `{scenario.name}`, Event `{sub_event.name}` ({i + 1}/{total})"
909
+ )
910
+ self.execute(sim_path)
911
+
912
+ # Postprocess
913
+ self.calculate_rp_floodmaps(scenario)
914
+
915
+ # Cleanup
916
+ if not self.settings.config.save_simulation:
917
+ for i, sub_event in enumerate(event_set._events):
918
+ shutil.rmtree(
919
+ self._get_simulation_path(scenario, sub_event=sub_event),
920
+ ignore_errors=True,
921
+ )
922
+
923
+ def _ensure_no_existing_forcings(self):
924
+ """Check for existing forcings in the model and raise an error if any are found."""
925
+ all_forcings = {
926
+ "waterlevel": self.waterlevels,
927
+ "rainfall": self.rainfall,
928
+ "wind": self.wind,
929
+ "discharge": self.discharge,
930
+ }
931
+ contains_forcings = ", ".join(
932
+ [
933
+ f"{name.capitalize()}"
934
+ for name, forcing in all_forcings.items()
935
+ if forcing is not None
936
+ ]
937
+ )
938
+ if contains_forcings:
939
+ raise ValueError(
940
+ f"{contains_forcings} forcing(s) should not exists in the SFINCS template model. Remove it from the SFINCS model located at: {self.get_model_root()}. For more information on SFINCS and its input files, see the SFINCS documentation at: `https://sfincs.readthedocs.io/en/latest/input.html`"
941
+ )
942
+
943
+ ### FORCING ###
944
+ def _add_forcing_wind(
945
+ self,
946
+ wind: IWind,
947
+ ):
948
+ """Add spatially constant wind forcing to sfincs model. Use timeseries or a constant magnitude and direction.
949
+
950
+ Parameters
951
+ ----------
952
+ timeseries : Union[str, os.PathLike], optional
953
+ path to file of timeseries file (.csv) which has three columns: time, magnitude and direction, by default None
954
+ const_mag : float, optional
955
+ magnitude of time-invariant wind forcing [m/s], by default None
956
+ const_dir : float, optional
957
+ direction of time-invariant wind forcing [deg], by default None
958
+ """
959
+ time_frame = self.get_model_time()
960
+ if isinstance(wind, WindConstant):
961
+ # HydroMT function: set wind forcing from constant magnitude and direction
962
+ self._model.setup_wind_forcing(
963
+ timeseries=None,
964
+ magnitude=wind.speed.convert(us.UnitTypesVelocity.mps),
965
+ direction=wind.direction.value,
966
+ )
967
+ elif isinstance(wind, WindSynthetic):
968
+ df = wind.to_dataframe(time_frame=time_frame)
969
+ df["mag"] *= us.UnitfulVelocity(
970
+ value=1.0, units=self.units.default_velocity_units
971
+ ).convert(us.UnitTypesVelocity.mps)
972
+
973
+ tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
974
+ df.to_csv(tmp_path)
975
+
976
+ # HydroMT function: set wind forcing from timeseries
977
+ self._model.setup_wind_forcing(
978
+ timeseries=tmp_path, magnitude=None, direction=None
979
+ )
980
+ elif isinstance(wind, WindMeteo):
981
+ ds = MeteoHandler().read(time_frame)
982
+ # data already in metric units so no conversion needed
983
+
984
+ # HydroMT function: set wind forcing from grid
985
+ self._model.setup_wind_forcing_from_grid(wind=ds)
986
+ elif isinstance(wind, WindTrack):
987
+ # data already in metric units so no conversion needed
988
+ self._add_forcing_spw(wind)
989
+ elif isinstance(wind, WindNetCDF):
990
+ ds = wind.read()
991
+ # time slicing to time_frame not needed, hydromt-sfincs handles it
992
+ conversion = us.UnitfulVelocity(value=1.0, units=wind.units).convert(
993
+ us.UnitTypesVelocity.mps
994
+ )
995
+ ds *= conversion
996
+ self._model.setup_wind_forcing_from_grid(wind=ds)
997
+ elif isinstance(wind, WindCSV):
998
+ df = wind.to_dataframe(time_frame=time_frame)
999
+
1000
+ conversion = us.UnitfulVelocity(
1001
+ value=1.0, units=wind.units["speed"]
1002
+ ).convert(us.UnitTypesVelocity.mps)
1003
+ df *= conversion
1004
+
1005
+ tmp_path = Path(tempfile.gettempdir()) / "wind.csv"
1006
+ df.to_csv(tmp_path)
1007
+
1008
+ # HydroMT function: set wind forcing from timeseries
1009
+ self._model.setup_wind_forcing(
1010
+ timeseries=tmp_path,
1011
+ magnitude=None,
1012
+ direction=None,
1013
+ )
1014
+ else:
1015
+ self.logger.warning(
1016
+ f"Unsupported wind forcing type: {wind.__class__.__name__}"
1017
+ )
1018
+ return
1019
+
1020
+ def _add_forcing_rain(self, rainfall: IRainfall):
1021
+ """Add spatially constant rain forcing to sfincs model. Use timeseries or a constant magnitude.
1022
+
1023
+ Parameters
1024
+ ----------
1025
+ timeseries : Union[str, os.PathLike], optional
1026
+ path to file of timeseries file (.csv) which has two columns: time and precipitation, by default None
1027
+ const_intensity : float, optional
1028
+ time-invariant precipitation intensity [mm_hr], by default None
1029
+ """
1030
+ time_frame = self.get_model_time()
1031
+ if isinstance(rainfall, RainfallConstant):
1032
+ self._model.setup_precip_forcing(
1033
+ timeseries=None,
1034
+ magnitude=rainfall.intensity.convert(us.UnitTypesIntensity.mm_hr),
1035
+ )
1036
+ elif isinstance(rainfall, RainfallCSV):
1037
+ df = rainfall.to_dataframe(time_frame=time_frame)
1038
+ conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1039
+ us.UnitTypesIntensity.mm_hr
1040
+ )
1041
+ df *= conversion
1042
+
1043
+ tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1044
+ df.to_csv(tmp_path)
1045
+
1046
+ self._model.setup_precip_forcing(timeseries=tmp_path)
1047
+ elif isinstance(rainfall, RainfallSynthetic):
1048
+ df = rainfall.to_dataframe(time_frame=time_frame)
1049
+
1050
+ if rainfall.timeseries.cumulative is not None: # scs
1051
+ conversion = us.UnitfulLength(
1052
+ value=1.0, units=rainfall.timeseries.cumulative.units
1053
+ ).convert(us.UnitTypesLength.millimeters)
1054
+ else:
1055
+ conversion = us.UnitfulIntensity(
1056
+ value=1.0, units=rainfall.timeseries.peak_value.units
1057
+ ).convert(us.UnitTypesIntensity.mm_hr)
1058
+
1059
+ df *= conversion
1060
+ tmp_path = Path(tempfile.gettempdir()) / "precip.csv"
1061
+ df.to_csv(tmp_path)
1062
+
1063
+ self._model.setup_precip_forcing(timeseries=tmp_path)
1064
+ elif isinstance(rainfall, RainfallMeteo):
1065
+ ds = MeteoHandler().read(time_frame)
1066
+ # MeteoHandler always return metric so no conversion needed
1067
+ self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1068
+ elif isinstance(rainfall, RainfallTrack):
1069
+ # data already in metric units so no conversion needed
1070
+ self._add_forcing_spw(rainfall)
1071
+ elif isinstance(rainfall, RainfallNetCDF):
1072
+ ds = rainfall.read()
1073
+ # time slicing to time_frame not needed, hydromt-sfincs handles it
1074
+ conversion = us.UnitfulIntensity(value=1.0, units=rainfall.units).convert(
1075
+ us.UnitTypesIntensity.mm_hr
1076
+ )
1077
+ ds *= conversion
1078
+ self._model.setup_precip_forcing_from_grid(precip=ds, aggregate=False)
1079
+ else:
1080
+ self.logger.warning(
1081
+ f"Unsupported rainfall forcing type: {rainfall.__class__.__name__}"
1082
+ )
1083
+ return
1084
+
1085
+ def _add_forcing_discharge(self, forcing: IDischarge):
1086
+ """Add spatially constant discharge forcing to sfincs model. Use timeseries or a constant magnitude.
1087
+
1088
+ Parameters
1089
+ ----------
1090
+ forcing : IDischarge
1091
+ The discharge forcing to add to the model.
1092
+ Can be a constant, synthetic or from a csv file.
1093
+ Also contains the river information.
1094
+ """
1095
+ if isinstance(forcing, (DischargeConstant, DischargeCSV, DischargeSynthetic)):
1096
+ self._set_single_river_forcing(discharge=forcing)
1097
+ else:
1098
+ self.logger.warning(
1099
+ f"Unsupported discharge forcing type: {forcing.__class__.__name__}"
1100
+ )
1101
+
1102
+ def _add_forcing_waterlevels(self, forcing: IWaterlevel):
1103
+ time_frame = self.get_model_time()
1104
+ if isinstance(forcing, WaterlevelSynthetic):
1105
+ df_ts = forcing.to_dataframe(time_frame=time_frame)
1106
+
1107
+ conversion = us.UnitfulLength(
1108
+ value=1.0, units=forcing.surge.timeseries.peak_value.units
1109
+ ).convert(us.UnitTypesLength.meters)
1110
+ datum_correction = self.settings.water_level.get_datum(
1111
+ self.database.site.gui.plotting.synthetic_tide.datum
1112
+ ).height.convert(us.UnitTypesLength.meters)
1113
+
1114
+ df_ts = df_ts * conversion + datum_correction
1115
+
1116
+ self._set_waterlevel_forcing(df_ts)
1117
+ elif isinstance(forcing, WaterlevelGauged):
1118
+ if self.settings.tide_gauge is None:
1119
+ raise ValueError("No tide gauge defined for this site.")
1120
+
1121
+ df_ts = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1122
+ time=time_frame,
1123
+ )
1124
+ conversion = us.UnitfulLength(
1125
+ value=1.0, units=self.settings.tide_gauge.units
1126
+ ).convert(us.UnitTypesLength.meters)
1127
+
1128
+ datum_height = self.settings.water_level.get_datum(
1129
+ self.settings.tide_gauge.reference
1130
+ ).height.convert(us.UnitTypesLength.meters)
1131
+
1132
+ df_ts = conversion * df_ts + datum_height
1133
+
1134
+ self._set_waterlevel_forcing(df_ts)
1135
+ elif isinstance(forcing, WaterlevelCSV):
1136
+ df_ts = forcing.to_dataframe(time_frame=time_frame)
1137
+
1138
+ if df_ts is None:
1139
+ raise ValueError("Failed to get waterlevel data.")
1140
+ conversion = us.UnitfulLength(value=1.0, units=forcing.units).convert(
1141
+ us.UnitTypesLength.meters
1142
+ )
1143
+ df_ts *= conversion
1144
+ self._set_waterlevel_forcing(df_ts)
1145
+
1146
+ elif isinstance(forcing, WaterlevelModel):
1147
+ from flood_adapt.adapter.sfincs_offshore import OffshoreSfincsHandler
1148
+
1149
+ if self.settings.config.offshore_model is None:
1150
+ raise ValueError("Offshore model configuration is missing.")
1151
+ if self._scenario is None or self._event is None:
1152
+ raise ValueError(
1153
+ "Scenario and event must be provided to run the offshore model."
1154
+ )
1155
+
1156
+ df_ts = OffshoreSfincsHandler(
1157
+ scenario=self._scenario, event=self._event
1158
+ ).get_resulting_waterlevels()
1159
+ if df_ts is None:
1160
+ raise ValueError("Failed to get waterlevel data.")
1161
+
1162
+ # Datum
1163
+ datum_correction = self.settings.water_level.get_datum(
1164
+ self.settings.config.offshore_model.reference
1165
+ ).height.convert(us.UnitTypesLength.meters)
1166
+ df_ts += datum_correction
1167
+
1168
+ # Already in meters since it was produced by SFINCS so no conversion needed
1169
+ self._set_waterlevel_forcing(df_ts)
1170
+ self._turn_off_bnd_press_correction()
1171
+ else:
1172
+ self.logger.warning(
1173
+ f"Unsupported waterlevel forcing type: {forcing.__class__.__name__}"
1174
+ )
1175
+
1176
+ # SPIDERWEB
1177
+ def _add_forcing_spw(self, forcing: Union[RainfallTrack, WindTrack]):
1178
+ """Add spiderweb forcing."""
1179
+ if forcing.source != ForcingSource.TRACK:
1180
+ raise ValueError("Forcing source should be TRACK.")
1181
+
1182
+ if forcing.path is None:
1183
+ raise ValueError("No path to track file provided.")
1184
+
1185
+ if not forcing.path.exists():
1186
+ # Check if the file is in the database
1187
+ in_db = self._get_event_input_path(self._event) / forcing.path.name
1188
+ if not in_db.exists():
1189
+ raise FileNotFoundError(
1190
+ f"Input file for track forcing not found: {forcing.path}"
1191
+ )
1192
+ forcing.path = in_db
1193
+
1194
+ if forcing.path.suffix == ".cyc":
1195
+ forcing.path = self._create_spw_file_from_track(
1196
+ track_forcing=forcing,
1197
+ hurricane_translation=self._event.hurricane_translation,
1198
+ name=self._event.name,
1199
+ output_dir=forcing.path.parent,
1200
+ include_rainfall=bool(self._event.forcings.get(ForcingType.RAINFALL)),
1201
+ recreate=False,
1202
+ )
1203
+
1204
+ if forcing.path.suffix != ".spw":
1205
+ raise ValueError(
1206
+ "Track files should be in one of [spw, ddb_cyc] file format and must have [.spw, .cyc] extension."
1207
+ )
1208
+
1209
+ sim_path = self.get_model_root()
1210
+ self.logger.info(f"Adding spiderweb forcing to Sfincs model: {sim_path.name}")
1211
+
1212
+ # prevent SameFileError
1213
+ output_spw_path = sim_path / forcing.path.name
1214
+ if forcing.path == output_spw_path:
1215
+ raise ValueError(
1216
+ "Add a different SPW file than the one already in the model."
1217
+ )
1218
+
1219
+ if output_spw_path.exists():
1220
+ os.remove(output_spw_path)
1221
+ shutil.copy2(forcing.path, output_spw_path)
1222
+
1223
+ self._model.set_config("spwfile", output_spw_path.name)
1224
+
1225
+ ### MEASURES ###
1226
+ def _add_measure_floodwall(self, floodwall: FloodWall):
1227
+ """Add floodwall to sfincs model.
1228
+
1229
+ Parameters
1230
+ ----------
1231
+ floodwall : FloodWall
1232
+ floodwall information
1233
+ """
1234
+ polygon_file = resolve_filepath(
1235
+ object_dir=ObjectDir.measure,
1236
+ obj_name=floodwall.name,
1237
+ path=floodwall.polygon_file,
1238
+ )
1239
+
1240
+ # HydroMT function: get geodataframe from filename
1241
+ gdf_floodwall = self._model.data_catalog.get_geodataframe(
1242
+ polygon_file, geom=self._model.region, crs=self._model.crs
1243
+ )
1244
+
1245
+ # Add floodwall attributes to geodataframe
1246
+ gdf_floodwall["name"] = floodwall.name
1247
+ if (gdf_floodwall.geometry.type == "MultiLineString").any():
1248
+ gdf_floodwall = gdf_floodwall.explode()
1249
+
1250
+ try:
1251
+ heights = [
1252
+ float(
1253
+ us.UnitfulLength(
1254
+ value=float(height),
1255
+ units=self.database.site.gui.units.default_length_units,
1256
+ ).convert(us.UnitTypesLength("meters"))
1257
+ )
1258
+ for height in gdf_floodwall["z"]
1259
+ ]
1260
+ gdf_floodwall["z"] = heights
1261
+ self.logger.info("Using floodwall height from shape file.")
1262
+ except Exception:
1263
+ self.logger.warning(
1264
+ f"Could not use height data from file due to missing `z` column or missing values therein. Using uniform height of {floodwall.elevation} instead."
1265
+ )
1266
+ gdf_floodwall["z"] = floodwall.elevation.convert(
1267
+ us.UnitTypesLength(us.UnitTypesLength.meters)
1268
+ )
1269
+
1270
+ # par1 is the overflow coefficient for weirs
1271
+ gdf_floodwall["par1"] = 0.6
1272
+
1273
+ # HydroMT function: create floodwall
1274
+ self._model.setup_structures(structures=gdf_floodwall, stype="weir", merge=True)
1275
+
1276
+ def _add_measure_greeninfra(self, green_infrastructure: GreenInfrastructure):
1277
+ # HydroMT function: get geodataframe from filename
1278
+ if green_infrastructure.selection_type == "polygon":
1279
+ polygon_file = resolve_filepath(
1280
+ ObjectDir.measure,
1281
+ green_infrastructure.name,
1282
+ green_infrastructure.polygon_file,
1283
+ )
1284
+ elif green_infrastructure.selection_type == "aggregation_area":
1285
+ # TODO this logic already exists in the Database controller but cannot be used due to cyclic imports
1286
+ # Loop through available aggregation area types
1287
+ for aggr_dict in self.database.site.fiat.config.aggregation:
1288
+ # check which one is used in measure
1289
+ if not aggr_dict.name == green_infrastructure.aggregation_area_type:
1290
+ continue
1291
+ # load geodataframe
1292
+ aggr_areas = gpd.read_file(
1293
+ db_path(TopLevelDir.static) / aggr_dict.file,
1294
+ engine="pyogrio",
1295
+ ).to_crs(4326)
1296
+ # keep only aggregation area chosen
1297
+ polygon_file = aggr_areas.loc[
1298
+ aggr_areas[aggr_dict.field_name]
1299
+ == green_infrastructure.aggregation_area_name,
1300
+ ["geometry"],
1301
+ ].reset_index(drop=True)
1302
+ else:
1303
+ raise ValueError(
1304
+ f"The selection type: {green_infrastructure.selection_type} is not valid"
1305
+ )
1306
+
1307
+ gdf_green_infra = self._model.data_catalog.get_geodataframe(
1308
+ polygon_file,
1309
+ geom=self._model.region,
1310
+ crs=self._model.crs,
1311
+ )
1312
+
1313
+ # Make sure no multipolygons are there
1314
+ gdf_green_infra = gdf_green_infra.explode()
1315
+
1316
+ # HydroMT function: create storage volume
1317
+ self._model.setup_storage_volume(
1318
+ storage_locs=gdf_green_infra,
1319
+ volume=green_infrastructure.volume.convert(us.UnitTypesVolume.m3),
1320
+ merge=True,
1321
+ )
1322
+
1323
+ def _add_measure_pump(self, pump: Pump):
1324
+ """Add pump to sfincs model.
1325
+
1326
+ Parameters
1327
+ ----------
1328
+ pump : Pump
1329
+ pump information
1330
+ """
1331
+ polygon_file = resolve_filepath(ObjectDir.measure, pump.name, pump.polygon_file)
1332
+ # HydroMT function: get geodataframe from filename
1333
+ gdf_pump = self._model.data_catalog.get_geodataframe(
1334
+ polygon_file, geom=self._model.region, crs=self._model.crs
1335
+ )
1336
+
1337
+ # HydroMT function: create floodwall
1338
+ self._model.setup_drainage_structures(
1339
+ structures=gdf_pump,
1340
+ stype="pump",
1341
+ discharge=pump.discharge.convert(us.UnitTypesDischarge.cms),
1342
+ merge=True,
1343
+ )
1344
+
1345
+ ### SFINCS SETTERS ###
1346
+ def _set_single_river_forcing(self, discharge: IDischarge):
1347
+ """Add discharge to overland sfincs model.
1348
+
1349
+ Parameters
1350
+ ----------
1351
+ discharge : IDischarge
1352
+ Discharge object with discharge timeseries data and river information.
1353
+ """
1354
+ if not isinstance(
1355
+ discharge, (DischargeConstant, DischargeSynthetic, DischargeCSV)
1356
+ ):
1357
+ self.logger.warning(
1358
+ f"Unsupported discharge forcing type: {discharge.__class__.__name__}"
1359
+ )
1360
+ return
1361
+
1362
+ self.logger.info(f"Setting discharge forcing for river: {discharge.river.name}")
1363
+
1364
+ time_frame = self.get_model_time()
1365
+ model_rivers = self._read_river_locations()
1366
+
1367
+ # Check that the river is defined in the model and that the coordinates match
1368
+ river_loc = shapely.Point(
1369
+ discharge.river.x_coordinate, discharge.river.y_coordinate
1370
+ )
1371
+ tolerance = 0.001 # in degrees, ~111 meters at the equator. (0.0001: 11 meters at the equator)
1372
+ river_gdf = model_rivers[model_rivers.distance(river_loc) <= tolerance]
1373
+ river_inds = river_gdf.index.to_list()
1374
+ if len(river_inds) != 1:
1375
+ raise ValueError(
1376
+ f"River {discharge.river.name} is not defined in the sfincs model. Please ensure the river coordinates in the site.toml match the coordinates for rivers in the SFINCS model."
1377
+ )
1378
+
1379
+ # Create a geodataframe with the river coordinates, the timeseries data and rename the column to the river index defined in the model
1380
+ if isinstance(discharge, DischargeCSV):
1381
+ df = discharge.to_dataframe(time_frame)
1382
+ conversion = us.UnitfulDischarge(value=1.0, units=discharge.units).convert(
1383
+ us.UnitTypesDischarge.cms
1384
+ )
1385
+ elif isinstance(discharge, DischargeConstant):
1386
+ df = discharge.to_dataframe(time_frame)
1387
+ conversion = us.UnitfulDischarge(
1388
+ value=1.0, units=discharge.discharge.units
1389
+ ).convert(us.UnitTypesDischarge.cms)
1390
+ elif isinstance(discharge, DischargeSynthetic):
1391
+ df = discharge.to_dataframe(time_frame)
1392
+ conversion = us.UnitfulDischarge(
1393
+ value=1.0, units=discharge.timeseries.peak_value.units
1394
+ ).convert(us.UnitTypesDischarge.cms)
1395
+ else:
1396
+ raise ValueError(
1397
+ f"Unsupported discharge forcing type: {discharge.__class__}"
1398
+ )
1399
+
1400
+ df *= conversion
1401
+
1402
+ df = df.rename(columns={df.columns[0]: river_inds[0]})
1403
+
1404
+ # HydroMT function: set discharge forcing from time series and river coordinates
1405
+ self._model.setup_discharge_forcing(
1406
+ locations=river_gdf,
1407
+ timeseries=df,
1408
+ merge=True,
1409
+ )
1410
+
1411
+ def _turn_off_bnd_press_correction(self):
1412
+ """Turn off the boundary pressure correction in the sfincs model."""
1413
+ self.logger.info(
1414
+ "Turning off boundary pressure correction in the offshore model"
1415
+ )
1416
+ self._model.set_config("pavbnd", -9999)
1417
+
1418
+ def _set_waterlevel_forcing(self, df_ts: pd.DataFrame):
1419
+ """
1420
+ Add water level forcing to sfincs model.
1421
+
1422
+ Values in the timeseries are expected to be relative to the main reference datum: `self.settings.water_level.reference`.
1423
+ The overland model reference: `self.settings.config.overland_model.reference` is used to convert the water levels to the reference of the overland model.
1424
+
1425
+ Parameters
1426
+ ----------
1427
+ df_ts : pd.DataFrame
1428
+ Time series of water levels with the first column as the time index.
1429
+
1430
+
1431
+ """
1432
+ # Determine bnd points from reference overland model
1433
+ gdf_locs = self._read_waterlevel_boundary_locations()
1434
+
1435
+ if len(df_ts.columns) == 1:
1436
+ # Go from 1 timeseries to timeseries for all boundary points
1437
+ name = df_ts.columns[0]
1438
+ for i in range(1, len(gdf_locs)):
1439
+ df_ts[i + 1] = df_ts[name]
1440
+ df_ts.columns = list(range(1, len(gdf_locs) + 1))
1441
+
1442
+ # Datum
1443
+ sfincs_overland_reference_height = self.settings.water_level.get_datum(
1444
+ self.settings.config.overland_model.reference
1445
+ ).height.convert(us.UnitTypesLength.meters)
1446
+
1447
+ df_ts -= sfincs_overland_reference_height
1448
+
1449
+ # HydroMT function: set waterlevel forcing from time series
1450
+ self._model.set_forcing_1d(
1451
+ name="bzs", df_ts=df_ts, gdf_locs=gdf_locs, merge=False
1452
+ )
1453
+
1454
+ # OFFSHORE
1455
+ def _add_pressure_forcing_from_grid(self, ds: xr.DataArray):
1456
+ """Add spatially varying barometric pressure to sfincs model.
1457
+
1458
+ Parameters
1459
+ ----------
1460
+ ds : xr.DataArray
1461
+ - Required variables: ['press_msl' (Pa)]
1462
+ - Required coordinates: ['time', 'y', 'x']
1463
+ - spatial_ref: CRS
1464
+ """
1465
+ self.logger.info("Adding pressure forcing to the offshore model")
1466
+ self._model.setup_pressure_forcing_from_grid(press=ds)
1467
+
1468
+ def _add_bzs_from_bca(self, event: Event, physical_projection: PhysicalProjection):
1469
+ # ONLY offshore models
1470
+ """Convert tidal constituents from bca file to waterlevel timeseries that can be read in by hydromt_sfincs."""
1471
+ if self.settings.config.offshore_model is None:
1472
+ raise ValueError("No offshore model found in sfincs config.")
1473
+
1474
+ self.logger.info("Adding water level forcing to the offshore model")
1475
+ sb = SfincsBoundary()
1476
+ sb.read_flow_boundary_points(self.get_model_root() / "sfincs.bnd")
1477
+ sb.read_astro_boundary_conditions(self.get_model_root() / "sfincs.bca")
1478
+
1479
+ times = pd.date_range(
1480
+ start=event.time.start_time,
1481
+ end=event.time.end_time,
1482
+ freq="10T",
1483
+ )
1484
+
1485
+ # Predict tidal signal and add SLR
1486
+ if not sb.flow_boundary_points:
1487
+ raise ValueError("No flow boundary points found.")
1488
+
1489
+ if self.settings.config.offshore_model.vertical_offset:
1490
+ correction = self.settings.config.offshore_model.vertical_offset.convert(
1491
+ us.UnitTypesLength.meters
1492
+ )
1493
+ else:
1494
+ correction = 0.0
1495
+
1496
+ for bnd_ii in range(len(sb.flow_boundary_points)):
1497
+ tide_ii = (
1498
+ predict(sb.flow_boundary_points[bnd_ii].astro, times)
1499
+ + correction
1500
+ + physical_projection.sea_level_rise.convert(us.UnitTypesLength.meters)
1501
+ )
1502
+
1503
+ if bnd_ii == 0:
1504
+ wl_df = pd.DataFrame(data={1: tide_ii}, index=times)
1505
+ else:
1506
+ wl_df[bnd_ii + 1] = tide_ii
1507
+
1508
+ # Determine bnd points from reference overland model
1509
+ gdf_locs = self._read_waterlevel_boundary_locations()
1510
+
1511
+ # HydroMT function: set waterlevel forcing from time series
1512
+ self._model.set_forcing_1d(
1513
+ name="bzs", df_ts=wl_df, gdf_locs=gdf_locs, merge=False
1514
+ )
1515
+
1516
+ ### PRIVATE GETTERS ###
1517
+ def _get_result_path(self, scenario: Scenario) -> Path:
1518
+ """Return the path to store the results."""
1519
+ return self.database.scenarios.output_path / scenario.name / "Flooding"
1520
+
1521
+ def _get_simulation_path(
1522
+ self, scenario: Scenario, sub_event: Optional[Event] = None
1523
+ ) -> Path:
1524
+ """
1525
+ Return the path to the simulation results.
1526
+
1527
+ Parameters
1528
+ ----------
1529
+ scenario : Scenario
1530
+ The scenario for which to get the simulation path.
1531
+ sub_event : Optional[Event], optional
1532
+ The sub-event for which to get the simulation path, by default None.
1533
+ Is only used when the event associated with the scenario is an EventSet.
1534
+ """
1535
+ base_path = (
1536
+ self._get_result_path(scenario)
1537
+ / "simulations"
1538
+ / self.settings.config.overland_model.name
1539
+ )
1540
+ event = self.database.events.get(scenario.event)
1541
+
1542
+ if isinstance(event, EventSet):
1543
+ if sub_event is None:
1544
+ raise ValueError("Event must be provided when scenario is an EventSet.")
1545
+ return base_path.parent / sub_event.name / base_path.name
1546
+ elif isinstance(event, Event):
1547
+ return base_path
1548
+ else:
1549
+ raise ValueError(f"Unsupported mode: {event.mode}")
1550
+
1551
+ def _get_simulation_path_offshore(
1552
+ self, scenario: Scenario, sub_event: Optional[Event] = None
1553
+ ) -> Path:
1554
+ # Get the path to the offshore model (will not be used if offshore model is not created)
1555
+ if self.settings.config.offshore_model is None:
1556
+ raise ValueError("No offshore model found in sfincs config.")
1557
+ base_path = (
1558
+ self._get_result_path(scenario)
1559
+ / "simulations"
1560
+ / self.settings.config.offshore_model.name
1561
+ )
1562
+ event = self.database.events.get(scenario.event)
1563
+ if isinstance(event, EventSet):
1564
+ return base_path.parent / sub_event.name / base_path.name
1565
+ elif isinstance(event, Event):
1566
+ return base_path
1567
+ else:
1568
+ raise ValueError(f"Unsupported mode: {event.mode}")
1569
+
1570
+ def _get_flood_map_paths(self, scenario: Scenario) -> list[Path]:
1571
+ """Return the paths to the flood maps that running this scenario should produce."""
1572
+ results_path = self._get_result_path(scenario)
1573
+ event = self.database.events.get(scenario.event)
1574
+
1575
+ if isinstance(event, EventSet):
1576
+ map_fn = []
1577
+ for rp in self.database.site.fiat.risk.return_periods:
1578
+ map_fn.append(results_path / f"RP_{rp:04d}_maps.nc")
1579
+ elif isinstance(event, Event):
1580
+ map_fn = [results_path / "max_water_level_map.nc"]
1581
+ else:
1582
+ raise ValueError(f"Unsupported mode: {event.mode}")
1583
+
1584
+ return map_fn
1585
+
1586
+ def _get_event_input_path(self, event: Event) -> Path:
1587
+ """Return the path to the event input directory."""
1588
+ return self.database.events.input_path / event.name
1589
+
1590
+ def _get_zsmax(self):
1591
+ """Read zsmax file and return absolute maximum water level over entire simulation."""
1592
+ self._model.read_results()
1593
+ zsmax = self._model.results["zsmax"].max(dim="timemax")
1594
+ zsmax.attrs["units"] = "m"
1595
+ return zsmax
1596
+
1597
+ def _get_zs_points(self):
1598
+ """Read water level (zs) timeseries at observation points.
1599
+
1600
+ Names are allocated from the site.toml.
1601
+ See also add_obs_points() above.
1602
+ """
1603
+ self._model.read_results()
1604
+ da = self._model.results["point_zs"]
1605
+ df = pd.DataFrame(index=pd.DatetimeIndex(da.time), data=da.to_numpy())
1606
+
1607
+ names = []
1608
+ descriptions = []
1609
+ # get station names from site.toml
1610
+ if self.settings.obs_point is not None:
1611
+ obs_points = self.settings.obs_point
1612
+ for pt in obs_points:
1613
+ names.append(pt.name)
1614
+ descriptions.append(pt.description)
1615
+
1616
+ pt_df = pd.DataFrame({"Name": names, "Description": descriptions})
1617
+ gdf = gpd.GeoDataFrame(
1618
+ pt_df,
1619
+ geometry=gpd.points_from_xy(da.point_x.values, da.point_y.values),
1620
+ crs=self._model.crs,
1621
+ )
1622
+ return df, gdf
1623
+
1624
+ def _create_spw_file_from_track(
1625
+ self,
1626
+ track_forcing: Union[RainfallTrack, WindTrack],
1627
+ hurricane_translation: TranslationModel,
1628
+ name: str,
1629
+ output_dir: Path,
1630
+ include_rainfall: bool = False,
1631
+ recreate: bool = False,
1632
+ ):
1633
+ """
1634
+ Create a spiderweb file from a given TropicalCyclone track and save it to the event's input directory.
1635
+
1636
+ Providing the output_dir argument allows to save the spiderweb file in a different directory.
1637
+
1638
+ Parameters
1639
+ ----------
1640
+ output_dir : Path
1641
+ The directory where the spiderweb file is saved (or copied to if it already exists and recreate is False)
1642
+ recreate : bool, optional
1643
+ If True, the spiderweb file is recreated even if it already exists, by default False
1644
+
1645
+ Returns
1646
+ -------
1647
+ Path
1648
+ the path to the created spiderweb file
1649
+ """
1650
+ if track_forcing.path is None:
1651
+ raise ValueError("No path to track file provided.")
1652
+
1653
+ # Check file format
1654
+ match track_forcing.path.suffix:
1655
+ case ".spw":
1656
+ if recreate:
1657
+ raise ValueError(
1658
+ "Recreating spiderweb files from existing spiderweb files is not supported. Provide a track file instead."
1659
+ )
1660
+
1661
+ if track_forcing.path.exists():
1662
+ return track_forcing.path
1663
+
1664
+ elif (output_dir / track_forcing.path.name).exists():
1665
+ return output_dir / track_forcing.path.name
1666
+
1667
+ else:
1668
+ raise FileNotFoundError(f"SPW file not found: {track_forcing.path}")
1669
+ case ".cyc":
1670
+ pass
1671
+ case _:
1672
+ raise ValueError(
1673
+ "Track files should be in the DDB_CYC file format and must have .cyc extension, or in the SPW file format and must have .spw extension"
1674
+ )
1675
+
1676
+ # Check if the spiderweb file already exists
1677
+ spw_file = output_dir / track_forcing.path.with_suffix(".spw").name
1678
+ if spw_file.exists():
1679
+ if recreate:
1680
+ os.remove(spw_file)
1681
+ else:
1682
+ return spw_file
1683
+
1684
+ # Initialize the tropical cyclone
1685
+ tc = TropicalCyclone()
1686
+ tc.read_track(filename=str(track_forcing.path), fmt="ddb_cyc")
1687
+
1688
+ # Alter the track of the tc if necessary
1689
+ tc = self._translate_tc_track(
1690
+ tc=tc, hurricane_translation=hurricane_translation
1691
+ )
1692
+
1693
+ # Rainfall
1694
+ start = "Including" if include_rainfall else "Excluding"
1695
+ self.logger.info(f"{start} rainfall in the spiderweb file")
1696
+ tc.include_rainfall = include_rainfall
1697
+
1698
+ self.logger.info(
1699
+ f"Creating spiderweb file for hurricane event `{name}`. This may take a while."
1700
+ )
1701
+
1702
+ # Create spiderweb file from the track
1703
+ tc.to_spiderweb(spw_file)
1704
+
1705
+ return spw_file
1706
+
1707
+ def _translate_tc_track(
1708
+ self, tc: TropicalCyclone, hurricane_translation: TranslationModel
1709
+ ):
1710
+ if math.isclose(
1711
+ hurricane_translation.eastwest_translation.value, 0, abs_tol=1e-6
1712
+ ) and math.isclose(
1713
+ hurricane_translation.northsouth_translation.value, 0, abs_tol=1e-6
1714
+ ):
1715
+ return tc
1716
+
1717
+ self.logger.info(f"Translating the track of the tropical cyclone `{tc.name}`")
1718
+ # First convert geodataframe to the local coordinate system
1719
+ crs = pyproj.CRS.from_string(self.settings.config.csname)
1720
+ tc.track = tc.track.to_crs(crs)
1721
+
1722
+ # Translate the track in the local coordinate system
1723
+ tc.track["geometry"] = tc.track["geometry"].apply(
1724
+ lambda geom: translate(
1725
+ geom,
1726
+ xoff=hurricane_translation.eastwest_translation.convert(
1727
+ us.UnitTypesLength.meters
1728
+ ),
1729
+ yoff=hurricane_translation.northsouth_translation.convert(
1730
+ us.UnitTypesLength.meters
1731
+ ),
1732
+ )
1733
+ )
1734
+
1735
+ # Convert the geodataframe to lat/lon
1736
+ tc.track = tc.track.to_crs(epsg=4326)
1737
+
1738
+ return tc
1739
+
1740
+ # @gundula do we keep this func, its not used anywhere?
1741
+ def _downscale_hmax(self, zsmax, demfile: Path):
1742
+ # read DEM and convert units to metric units used by SFINCS
1743
+ demfile_units = self.settings.dem.units
1744
+ dem_conversion = us.UnitfulLength(value=1.0, units=demfile_units).convert(
1745
+ us.UnitTypesLength("meters")
1746
+ )
1747
+ dem = dem_conversion * self._model.data_catalog.get_rasterdataset(demfile)
1748
+ dem = dem.rio.reproject(self._model.crs)
1749
+
1750
+ # determine conversion factor for output floodmap
1751
+ floodmap_units = self.settings.config.floodmap_units
1752
+ floodmap_conversion = us.UnitfulLength(
1753
+ value=1.0, units=us.UnitTypesLength.meters
1754
+ ).convert(floodmap_units)
1755
+
1756
+ hmax = utils.downscale_floodmap(
1757
+ zsmax=floodmap_conversion * zsmax,
1758
+ dep=floodmap_conversion * dem,
1759
+ hmin=0.01,
1760
+ )
1761
+ return hmax
1762
+
1763
+ def _read_river_locations(self) -> gpd.GeoDataFrame:
1764
+ path = self.get_model_root() / "sfincs.src"
1765
+
1766
+ with open(path) as f:
1767
+ lines = f.readlines()
1768
+ coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1769
+ points = [shapely.Point(coord) for coord in coords]
1770
+
1771
+ return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1772
+
1773
+ def _read_waterlevel_boundary_locations(self) -> gpd.GeoDataFrame:
1774
+ with open(self.get_model_root() / "sfincs.bnd") as f:
1775
+ lines = f.readlines()
1776
+ coords = [(float(line.split()[0]), float(line.split()[1])) for line in lines]
1777
+ points = [shapely.Point(coord) for coord in coords]
1778
+
1779
+ return gpd.GeoDataFrame({"geometry": points}, crs=self._model.crs)
1780
+
1781
+ def _setup_sfincs_logger(self, model_root: Path) -> logging.Logger:
1782
+ """Initialize the logger for the SFINCS model."""
1783
+ # Create a logger for the SFINCS model manually
1784
+ sfincs_logger = logging.getLogger("SfincsModel")
1785
+ for handler in sfincs_logger.handlers[:]:
1786
+ sfincs_logger.removeHandler(handler)
1787
+
1788
+ # Add a file handler
1789
+ file_handler = logging.FileHandler(
1790
+ filename=model_root.resolve() / "sfincs_model.log",
1791
+ mode="w",
1792
+ )
1793
+ sfincs_logger.setLevel(logging.DEBUG)
1794
+ sfincs_logger.addHandler(file_handler)
1795
+ return sfincs_logger
1796
+
1797
+ def _cleanup_simulation_folder(
1798
+ self,
1799
+ path: Path,
1800
+ extensions: list[str] = [".spw"],
1801
+ ):
1802
+ """Remove all files with the given extensions in the given path."""
1803
+ if not path.exists():
1804
+ return
1805
+
1806
+ for ext in extensions:
1807
+ for file in path.glob(f"*{ext}"):
1808
+ file.unlink()
1809
+
1810
+ def _load_scenario_objects(self, scenario: Scenario, event: Event) -> None:
1811
+ self._scenario = scenario
1812
+ self._projection = self.database.projections.get(scenario.projection)
1813
+ self._strategy = self.database.strategies.get(scenario.strategy)
1814
+ self._event = event
1815
+
1816
+ _event = self.database.events.get(scenario.event)
1817
+ if isinstance(_event, EventSet):
1818
+ self._event_set = _event
1819
+ else:
1820
+ self._event_set = None
1821
+
1822
+ def _add_tide_gauge_plot(
1823
+ self, fig, event: Event, units: us.UnitTypesLength
1824
+ ) -> None:
1825
+ if isinstance(event, SyntheticEvent):
1826
+ return
1827
+ if self.settings.tide_gauge is None:
1828
+ return
1829
+ df_gauge = self.settings.tide_gauge.get_waterlevels_in_time_frame(
1830
+ time=TimeFrame(
1831
+ start_time=event.time.start_time,
1832
+ end_time=event.time.end_time,
1833
+ ),
1834
+ units=us.UnitTypesLength(units),
1835
+ )
1836
+
1837
+ if df_gauge is None:
1838
+ self.logger.warning(
1839
+ "No water level data available for the tide gauge. Could not add it to the plot."
1840
+ )
1841
+ return
1842
+
1843
+ gauge_reference_height = self.settings.water_level.get_datum(
1844
+ self.settings.tide_gauge.reference
1845
+ ).height.convert(units)
1846
+
1847
+ waterlevel = df_gauge.iloc[:, 0] + gauge_reference_height
1848
+
1849
+ # If data is available, add to plot
1850
+ fig.add_trace(px.line(waterlevel, color_discrete_sequence=["#ea6404"]).data[0])
1851
+ fig["data"][0]["name"] = "model"
1852
+ fig["data"][1]["name"] = "measurement"
1853
+ fig.update_layout(showlegend=True)