flood-adapt 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. flood_adapt/__init__.py +22 -0
  2. flood_adapt/adapter/__init__.py +9 -0
  3. flood_adapt/adapter/fiat_adapter.py +1502 -0
  4. flood_adapt/adapter/interface/__init__.py +0 -0
  5. flood_adapt/adapter/interface/hazard_adapter.py +70 -0
  6. flood_adapt/adapter/interface/impact_adapter.py +36 -0
  7. flood_adapt/adapter/interface/model_adapter.py +89 -0
  8. flood_adapt/adapter/interface/offshore.py +19 -0
  9. flood_adapt/adapter/sfincs_adapter.py +1857 -0
  10. flood_adapt/adapter/sfincs_offshore.py +193 -0
  11. flood_adapt/config/__init__.py +0 -0
  12. flood_adapt/config/config.py +245 -0
  13. flood_adapt/config/fiat.py +219 -0
  14. flood_adapt/config/gui.py +224 -0
  15. flood_adapt/config/sfincs.py +336 -0
  16. flood_adapt/config/site.py +124 -0
  17. flood_adapt/database_builder/__init__.py +0 -0
  18. flood_adapt/database_builder/database_builder.py +2175 -0
  19. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -0
  20. flood_adapt/database_builder/templates/default_units/metric.toml +9 -0
  21. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -0
  22. flood_adapt/database_builder/templates/icons/black_down_48x48.png +0 -0
  23. flood_adapt/database_builder/templates/icons/black_left_48x48.png +0 -0
  24. flood_adapt/database_builder/templates/icons/black_right_48x48.png +0 -0
  25. flood_adapt/database_builder/templates/icons/black_up_48x48.png +0 -0
  26. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_down.png +0 -0
  27. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_left.png +0 -0
  28. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_right.png +0 -0
  29. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_up.png +0 -0
  30. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_down.png +0 -0
  31. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_left.png +0 -0
  32. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_right.png +0 -0
  33. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_up.png +0 -0
  34. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_left.png +0 -0
  35. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_right.png +0 -0
  36. flood_adapt/database_builder/templates/icons/white_down_48x48.png +0 -0
  37. flood_adapt/database_builder/templates/icons/white_left_48x48.png +0 -0
  38. flood_adapt/database_builder/templates/icons/white_right_48x48.png +0 -0
  39. flood_adapt/database_builder/templates/icons/white_up_48x48.png +0 -0
  40. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -0
  41. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -0
  42. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -0
  43. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -0
  44. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -0
  45. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -0
  46. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -0
  47. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -0
  48. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -0
  49. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -0
  50. flood_adapt/database_builder/templates/infographics/images/ambulance.png +0 -0
  51. flood_adapt/database_builder/templates/infographics/images/car.png +0 -0
  52. flood_adapt/database_builder/templates/infographics/images/cart.png +0 -0
  53. flood_adapt/database_builder/templates/infographics/images/firetruck.png +0 -0
  54. flood_adapt/database_builder/templates/infographics/images/hospital.png +0 -0
  55. flood_adapt/database_builder/templates/infographics/images/house.png +0 -0
  56. flood_adapt/database_builder/templates/infographics/images/info.png +0 -0
  57. flood_adapt/database_builder/templates/infographics/images/money.png +0 -0
  58. flood_adapt/database_builder/templates/infographics/images/person.png +0 -0
  59. flood_adapt/database_builder/templates/infographics/images/school.png +0 -0
  60. flood_adapt/database_builder/templates/infographics/images/truck.png +0 -0
  61. flood_adapt/database_builder/templates/infographics/images/walking_person.png +0 -0
  62. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -0
  63. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -0
  64. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -0
  65. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -0
  66. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -0
  67. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -0
  68. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -0
  69. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -0
  70. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -0
  71. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -0
  72. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -0
  73. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -0
  74. flood_adapt/database_builder/templates/mapbox_layers/bin_colors.toml +5 -0
  75. flood_adapt/database_builder.py +16 -0
  76. flood_adapt/dbs_classes/__init__.py +21 -0
  77. flood_adapt/dbs_classes/database.py +716 -0
  78. flood_adapt/dbs_classes/dbs_benefit.py +97 -0
  79. flood_adapt/dbs_classes/dbs_event.py +91 -0
  80. flood_adapt/dbs_classes/dbs_measure.py +103 -0
  81. flood_adapt/dbs_classes/dbs_projection.py +52 -0
  82. flood_adapt/dbs_classes/dbs_scenario.py +150 -0
  83. flood_adapt/dbs_classes/dbs_static.py +261 -0
  84. flood_adapt/dbs_classes/dbs_strategy.py +147 -0
  85. flood_adapt/dbs_classes/dbs_template.py +302 -0
  86. flood_adapt/dbs_classes/interface/database.py +147 -0
  87. flood_adapt/dbs_classes/interface/element.py +137 -0
  88. flood_adapt/dbs_classes/interface/static.py +47 -0
  89. flood_adapt/flood_adapt.py +1371 -0
  90. flood_adapt/misc/__init__.py +0 -0
  91. flood_adapt/misc/database_user.py +16 -0
  92. flood_adapt/misc/log.py +183 -0
  93. flood_adapt/misc/path_builder.py +54 -0
  94. flood_adapt/misc/utils.py +185 -0
  95. flood_adapt/objects/__init__.py +59 -0
  96. flood_adapt/objects/benefits/__init__.py +0 -0
  97. flood_adapt/objects/benefits/benefits.py +61 -0
  98. flood_adapt/objects/events/__init__.py +0 -0
  99. flood_adapt/objects/events/event_factory.py +135 -0
  100. flood_adapt/objects/events/event_set.py +84 -0
  101. flood_adapt/objects/events/events.py +221 -0
  102. flood_adapt/objects/events/historical.py +55 -0
  103. flood_adapt/objects/events/hurricane.py +64 -0
  104. flood_adapt/objects/events/synthetic.py +48 -0
  105. flood_adapt/objects/forcing/__init__.py +0 -0
  106. flood_adapt/objects/forcing/csv.py +68 -0
  107. flood_adapt/objects/forcing/discharge.py +66 -0
  108. flood_adapt/objects/forcing/forcing.py +142 -0
  109. flood_adapt/objects/forcing/forcing_factory.py +182 -0
  110. flood_adapt/objects/forcing/meteo_handler.py +93 -0
  111. flood_adapt/objects/forcing/netcdf.py +40 -0
  112. flood_adapt/objects/forcing/plotting.py +428 -0
  113. flood_adapt/objects/forcing/rainfall.py +98 -0
  114. flood_adapt/objects/forcing/tide_gauge.py +191 -0
  115. flood_adapt/objects/forcing/time_frame.py +77 -0
  116. flood_adapt/objects/forcing/timeseries.py +552 -0
  117. flood_adapt/objects/forcing/unit_system.py +580 -0
  118. flood_adapt/objects/forcing/waterlevels.py +108 -0
  119. flood_adapt/objects/forcing/wind.py +124 -0
  120. flood_adapt/objects/measures/__init__.py +0 -0
  121. flood_adapt/objects/measures/measure_factory.py +92 -0
  122. flood_adapt/objects/measures/measures.py +506 -0
  123. flood_adapt/objects/object_model.py +68 -0
  124. flood_adapt/objects/projections/__init__.py +0 -0
  125. flood_adapt/objects/projections/projections.py +89 -0
  126. flood_adapt/objects/scenarios/__init__.py +0 -0
  127. flood_adapt/objects/scenarios/scenarios.py +22 -0
  128. flood_adapt/objects/strategies/__init__.py +0 -0
  129. flood_adapt/objects/strategies/strategies.py +68 -0
  130. flood_adapt/workflows/__init__.py +0 -0
  131. flood_adapt/workflows/benefit_runner.py +541 -0
  132. flood_adapt/workflows/floodmap.py +85 -0
  133. flood_adapt/workflows/impacts_integrator.py +82 -0
  134. flood_adapt/workflows/scenario_runner.py +69 -0
  135. flood_adapt-0.3.0.dist-info/LICENSE +21 -0
  136. flood_adapt-0.3.0.dist-info/METADATA +183 -0
  137. flood_adapt-0.3.0.dist-info/RECORD +139 -0
  138. flood_adapt-0.3.0.dist-info/WHEEL +5 -0
  139. flood_adapt-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,716 @@
1
+ import gc
2
+ import os
3
+ import shutil
4
+ import time
5
+ from datetime import datetime
6
+ from pathlib import Path
7
+ from typing import Any, Optional, Union
8
+
9
+ import geopandas as gpd
10
+ import numpy as np
11
+ import pandas as pd
12
+ import xarray as xr
13
+ from cht_cyclones.tropical_cyclone import TropicalCyclone
14
+ from geopandas import GeoDataFrame
15
+ from plotly.express import line
16
+ from plotly.express.colors import sample_colorscale
17
+
18
+ from flood_adapt.config.config import Settings
19
+ from flood_adapt.config.site import Site
20
+ from flood_adapt.dbs_classes.dbs_benefit import DbsBenefit
21
+ from flood_adapt.dbs_classes.dbs_event import DbsEvent
22
+ from flood_adapt.dbs_classes.dbs_measure import DbsMeasure
23
+ from flood_adapt.dbs_classes.dbs_projection import DbsProjection
24
+ from flood_adapt.dbs_classes.dbs_scenario import DbsScenario
25
+ from flood_adapt.dbs_classes.dbs_static import DbsStatic
26
+ from flood_adapt.dbs_classes.dbs_strategy import DbsStrategy
27
+ from flood_adapt.dbs_classes.interface.database import IDatabase
28
+ from flood_adapt.misc.log import FloodAdaptLogging
29
+ from flood_adapt.misc.path_builder import (
30
+ TopLevelDir,
31
+ db_path,
32
+ )
33
+ from flood_adapt.misc.utils import finished_file_exists
34
+ from flood_adapt.objects.benefits.benefits import Benefit
35
+ from flood_adapt.objects.events.events import Event
36
+ from flood_adapt.objects.forcing import unit_system as us
37
+ from flood_adapt.objects.scenarios.scenarios import Scenario
38
+ from flood_adapt.workflows.benefit_runner import BenefitRunner
39
+ from flood_adapt.workflows.scenario_runner import ScenarioRunner
40
+
41
+
42
+ class Database(IDatabase):
43
+ """Implementation of IDatabase class that holds the site information and has methods to get static data info, and all the input information.
44
+
45
+ Additionally it can manipulate (add, edit, copy and delete) any of the objects in the input.
46
+ """
47
+
48
+ _instance = None
49
+
50
+ database_path: Union[str, os.PathLike]
51
+ database_name: str
52
+ _init_done: bool = False
53
+
54
+ base_path: Path
55
+ input_path: Path
56
+ static_path: Path
57
+ output_path: Path
58
+
59
+ _site: Site
60
+
61
+ _events: DbsEvent
62
+ _scenarios: DbsScenario
63
+ _strategies: DbsStrategy
64
+ _measures: DbsMeasure
65
+ _projections: DbsProjection
66
+ _benefits: DbsBenefit
67
+
68
+ _static: DbsStatic
69
+
70
+ def __new__(cls, *args, **kwargs):
71
+ if not cls._instance: # Singleton pattern
72
+ cls._instance = super(Database, cls).__new__(cls)
73
+ return cls._instance
74
+
75
+ def __init__(
76
+ self,
77
+ database_path: Union[str, os.PathLike, None] = None,
78
+ database_name: Optional[str] = None,
79
+ ) -> None:
80
+ """
81
+ Initialize the DatabaseController object.
82
+
83
+ Parameters
84
+ ----------
85
+ database_path : Union[str, os.PathLike]
86
+ The path to the database root
87
+ database_name : str
88
+ The name of the database.
89
+ -----
90
+ """
91
+ if database_path is None or database_name is None:
92
+ if not self._init_done:
93
+ raise ValueError(
94
+ """Database path and name must be provided for the first initialization.
95
+ To do this, run `flood_adapt.api.static.read_database(database_path, site_name)` first."""
96
+ )
97
+ else:
98
+ return # Skip re-initialization
99
+
100
+ if (
101
+ self._init_done
102
+ and self.database_path == database_path
103
+ and self.database_name == database_name
104
+ ):
105
+ return # Skip re-initialization
106
+
107
+ # If the database is not initialized, or a new path or name is provided, (re-)initialize
108
+ re_option = "re-" if self._init_done else ""
109
+ self.logger = FloodAdaptLogging.getLogger("Database")
110
+ self.logger.info(
111
+ f"{re_option}initializing database to {database_name} at {database_path}".capitalize()
112
+ )
113
+ self.database_path = database_path
114
+ self.database_name = database_name
115
+
116
+ # Set the paths
117
+
118
+ self.base_path = Path(database_path) / database_name
119
+ self.input_path = db_path(TopLevelDir.input)
120
+ self.static_path = db_path(TopLevelDir.static)
121
+ self.output_path = db_path(TopLevelDir.output)
122
+
123
+ self._site = Site.load_file(self.static_path / "config" / "site.toml")
124
+
125
+ # Initialize the different database objects
126
+ self._static = DbsStatic(self)
127
+ self._events = DbsEvent(self)
128
+ self._scenarios = DbsScenario(self)
129
+ self._strategies = DbsStrategy(self)
130
+ self._measures = DbsMeasure(self)
131
+ self._projections = DbsProjection(self)
132
+ self._benefits = DbsBenefit(self)
133
+
134
+ # Delete any unfinished/crashed scenario output
135
+ self.cleanup()
136
+
137
+ self._init_done = True
138
+
139
+ def shutdown(self):
140
+ """Explicitly shut down the singleton and clear all references."""
141
+ import gc
142
+
143
+ self._instance = None
144
+ self._init_done = False
145
+
146
+ self.__class__._instance = None
147
+ self.__dict__.clear()
148
+ gc.collect()
149
+
150
+ # Property methods
151
+ @property
152
+ def site(self) -> Site:
153
+ return self._site
154
+
155
+ @property
156
+ def static(self) -> DbsStatic:
157
+ return self._static
158
+
159
+ @property
160
+ def events(self) -> DbsEvent:
161
+ return self._events
162
+
163
+ @property
164
+ def scenarios(self) -> DbsScenario:
165
+ return self._scenarios
166
+
167
+ @property
168
+ def strategies(self) -> DbsStrategy:
169
+ return self._strategies
170
+
171
+ @property
172
+ def measures(self) -> DbsMeasure:
173
+ return self._measures
174
+
175
+ @property
176
+ def projections(self) -> DbsProjection:
177
+ return self._projections
178
+
179
+ @property
180
+ def benefits(self) -> DbsBenefit:
181
+ return self._benefits
182
+
183
+ def interp_slr(self, slr_scenario: str, year: float) -> float:
184
+ """Interpolate SLR value and reference it to the SLR reference year from the site toml.
185
+
186
+ Parameters
187
+ ----------
188
+ slr_scenario : str
189
+ SLR scenario name from the coulmn names in static/slr/slr.csv
190
+ year : float
191
+ year to evaluate
192
+
193
+ Returns
194
+ -------
195
+ float
196
+ _description_
197
+
198
+ Raises
199
+ ------
200
+ ValueError
201
+ if the reference year is outside of the time range in the slr.csv file
202
+ ValueError
203
+ if the year to evaluate is outside of the time range in the slr.csv file
204
+ """
205
+ if self.site.sfincs.slr_scenarios is None:
206
+ raise ValueError("No SLR scenarios defined in the site configuration.")
207
+
208
+ input_file = self.static_path / self.site.sfincs.slr_scenarios.file
209
+ df = pd.read_csv(input_file)
210
+ if year > df["year"].max() or year < df["year"].min():
211
+ raise ValueError(
212
+ "The selected year is outside the range of the available SLR scenarios"
213
+ )
214
+ else:
215
+ slr = np.interp(year, df["year"], df[slr_scenario])
216
+ ref_year = self.site.sfincs.slr_scenarios.relative_to_year
217
+ if ref_year > df["year"].max() or ref_year < df["year"].min():
218
+ raise ValueError(
219
+ f"The reference year {ref_year} is outside the range of the available SLR scenarios"
220
+ )
221
+ else:
222
+ ref_slr = np.interp(ref_year, df["year"], df[slr_scenario])
223
+ new_slr = us.UnitfulLength(
224
+ value=slr - ref_slr,
225
+ units=df["units"][0],
226
+ )
227
+ gui_units = self.site.gui.units.default_length_units
228
+ return np.round(new_slr.convert(gui_units), decimals=2)
229
+
230
+ # TODO: should probably be moved to frontend
231
+ def plot_slr_scenarios(self) -> str:
232
+ if self.site.sfincs.slr_scenarios is None:
233
+ raise ValueError("No SLR scenarios defined in the site configuration.")
234
+ input_file = self.input_path.parent.joinpath(
235
+ "static", self.site.sfincs.slr_scenarios.file
236
+ )
237
+ df = pd.read_csv(input_file)
238
+ ncolors = len(df.columns) - 2
239
+ if "units" not in df.columns:
240
+ raise ValueError(f"Expected column `units` in {input_file}.")
241
+
242
+ units = df["units"].iloc[0]
243
+ units = us.UnitTypesLength(units)
244
+
245
+ if "Year" not in df.columns:
246
+ if "year" not in df.columns:
247
+ raise ValueError(f"Expected column `year` in {input_file}.")
248
+ else:
249
+ df = df.rename(columns={"year": "Year"})
250
+
251
+ ref_year = self.site.sfincs.slr_scenarios.relative_to_year
252
+ if ref_year > df["Year"].max() or ref_year < df["Year"].min():
253
+ raise ValueError(
254
+ f"The reference year {ref_year} is outside the range of the available SLR scenarios"
255
+ )
256
+ else:
257
+ scenarios = self._static.get_slr_scn_names()
258
+ for scn in scenarios:
259
+ ref_slr = np.interp(ref_year, df["Year"], df[scn])
260
+ df[scn] -= ref_slr
261
+
262
+ df = df.drop(columns="units").melt(id_vars=["Year"]).reset_index(drop=True)
263
+ # convert to units used in GUI
264
+ slr_current_units = us.UnitfulLength(value=1.0, units=units)
265
+ conversion_factor = slr_current_units.convert(
266
+ self.site.gui.units.default_length_units
267
+ )
268
+ df.iloc[:, -1] = (conversion_factor * df.iloc[:, -1]).round(decimals=2)
269
+
270
+ # rename column names that will be shown in html
271
+ df = df.rename(
272
+ columns={
273
+ "variable": "Scenario",
274
+ "value": f"Sea level rise [{self.site.gui.units.default_length_units.value}]",
275
+ }
276
+ )
277
+
278
+ colors = sample_colorscale(
279
+ "rainbow", [n / (ncolors - 1) for n in range(ncolors)]
280
+ )
281
+ fig = line(
282
+ df,
283
+ x="Year",
284
+ y=f"Sea level rise [{self.site.gui.units.default_length_units.value}]",
285
+ color="Scenario",
286
+ color_discrete_sequence=colors,
287
+ )
288
+
289
+ # fig.update_traces(marker={"line": {"color": "#000000", "width": 2}})
290
+
291
+ fig.update_layout(
292
+ autosize=False,
293
+ height=100 * 1.2,
294
+ width=280 * 1.3,
295
+ margin={"r": 0, "l": 0, "b": 0, "t": 0},
296
+ font={"size": 10, "color": "black", "family": "Arial"},
297
+ title_font={"size": 10, "color": "black", "family": "Arial"},
298
+ legend_font={"size": 10, "color": "black", "family": "Arial"},
299
+ legend_grouptitlefont={"size": 10, "color": "black", "family": "Arial"},
300
+ legend={"entrywidthmode": "fraction", "entrywidth": 0.2},
301
+ yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
302
+ xaxis_title=None,
303
+ xaxis_range=[ref_year, df["Year"].max()],
304
+ legend_title=None,
305
+ # paper_bgcolor="#3A3A3A",
306
+ # plot_bgcolor="#131313",
307
+ )
308
+
309
+ # write html to results folder
310
+ output_loc = self.input_path.parent.joinpath("temp", "slr.html")
311
+ output_loc.parent.mkdir(parents=True, exist_ok=True)
312
+ fig.write_html(output_loc)
313
+ return str(output_loc)
314
+
315
+ def write_to_csv(self, name: str, event: Event, df: pd.DataFrame):
316
+ df.to_csv(
317
+ self.events.input_path.joinpath(event.name, f"{name}.csv"),
318
+ header=False,
319
+ )
320
+
321
+ def write_cyc(self, event: Event, track: TropicalCyclone):
322
+ cyc_file = self.events.input_path / event.name / f"{event.track_name}.cyc"
323
+ # cht_cyclone function to write TropicalCyclone as .cyc file
324
+ track.write_track(filename=cyc_file, fmt="ddb_cyc")
325
+
326
+ def check_benefit_scenarios(self, benefit: Benefit) -> pd.DataFrame:
327
+ """Return a dataframe with the scenarios needed for this benefit assessment run.
328
+
329
+ Parameters
330
+ ----------
331
+ benefit : Benefit
332
+ """
333
+ runner = BenefitRunner(self, benefit=benefit)
334
+ return runner.check_scenarios()
335
+
336
+ def create_benefit_scenarios(self, benefit: Benefit) -> None:
337
+ """Create any scenarios that are needed for the (cost-)benefit assessment and are not there already.
338
+
339
+ Parameters
340
+ ----------
341
+ benefit : Benefit
342
+ """
343
+ runner = BenefitRunner(self, benefit=benefit)
344
+ runner.check_scenarios()
345
+
346
+ # Iterate through the scenarios needed and create them if not existing
347
+ for index, row in runner.scenarios.iterrows():
348
+ if row["scenario created"] == "No":
349
+ scenario_dict = {}
350
+ scenario_dict["event"] = row["event"]
351
+ scenario_dict["projection"] = row["projection"]
352
+ scenario_dict["strategy"] = row["strategy"]
353
+ scenario_dict["name"] = "_".join(
354
+ [row["projection"], row["event"], row["strategy"]]
355
+ )
356
+
357
+ scenario_obj = Scenario(**scenario_dict)
358
+ # Check if scenario already exists (because it was created before in the loop)
359
+ try:
360
+ self.scenarios.save(scenario_obj)
361
+ except ValueError as e:
362
+ if "name is already used" not in str(e):
363
+ # some other error was raised, so we re-raise it
364
+ raise e
365
+ # otherwise, if it already exists and we dont need to save it, we can just continue
366
+
367
+ # Update the scenarios check
368
+ runner.check_scenarios()
369
+
370
+ def run_benefit(self, benefit_name: Union[str, list[str]]) -> None:
371
+ """Run a (cost-)benefit analysis.
372
+
373
+ Parameters
374
+ ----------
375
+ benefit_name : Union[str, list[str]]
376
+ name(s) of the benefits to run.
377
+ """
378
+ if not isinstance(benefit_name, list):
379
+ benefit_name = [benefit_name]
380
+ for name in benefit_name:
381
+ benefit = self.benefits.get(name)
382
+ runner = BenefitRunner(self, benefit=benefit)
383
+ runner.run_cost_benefit()
384
+
385
+ def update(self) -> None:
386
+ self.projections_list = self._projections.list_objects()
387
+ self.events_list = self._events.list_objects()
388
+ self.measures_list = self._measures.list_objects()
389
+ self.strategies_list = self._strategies.list_objects()
390
+ self.scenarios_list = self._scenarios.list_objects()
391
+ self.benefits_list = self._benefits.list_objects()
392
+
393
+ def get_outputs(self) -> dict[str, Any]:
394
+ """Return a dictionary with info on the outputs that currently exist in the database.
395
+
396
+ Returns
397
+ -------
398
+ dict[str, Any]
399
+ Includes 'name', 'path', 'last_modification_date' and "finished" info
400
+ """
401
+ all_scenarios = pd.DataFrame(self._scenarios.list_objects())
402
+ if len(all_scenarios) > 0:
403
+ df = all_scenarios[all_scenarios["finished"]]
404
+ else:
405
+ df = all_scenarios
406
+ finished = df.drop(columns="finished").reset_index(drop=True)
407
+ return finished.to_dict()
408
+
409
+ def get_topobathy_path(self) -> str:
410
+ """Return the path of the topobathy tiles in order to create flood maps with water level maps.
411
+
412
+ Returns
413
+ -------
414
+ str
415
+ path to topobathy tiles
416
+ """
417
+ path = self.input_path.parent.joinpath("static", "dem", "tiles", "topobathy")
418
+ return str(path)
419
+
420
+ def get_index_path(self) -> str:
421
+ """Return the path of the index tiles which are used to connect each water level cell with the topobathy tiles.
422
+
423
+ Returns
424
+ -------
425
+ str
426
+ path to index tiles
427
+ """
428
+ path = self.input_path.parent.joinpath("static", "dem", "tiles", "indices")
429
+ return str(path)
430
+
431
+ def get_depth_conversion(self) -> float:
432
+ """Return the flood depth conversion that is need in the gui to plot the flood map.
433
+
434
+ Returns
435
+ -------
436
+ float
437
+ conversion factor
438
+ """
439
+ # Get conresion factor need to get from the sfincs units to the gui units
440
+ units = us.UnitfulLength(
441
+ value=1, units=self.site.gui.units.default_length_units
442
+ )
443
+ unit_cor = units.convert(new_units=us.UnitTypesLength.meters)
444
+
445
+ return unit_cor
446
+
447
+ def get_max_water_level(
448
+ self,
449
+ scenario_name: str,
450
+ return_period: Optional[int] = None,
451
+ ) -> np.ndarray:
452
+ """Return an array with the maximum water levels during an event.
453
+
454
+ Parameters
455
+ ----------
456
+ scenario_name : str
457
+ name of scenario
458
+ return_period : int, optional
459
+ return period in years, by default None
460
+
461
+ Returns
462
+ -------
463
+ np.array
464
+ 2D map of maximum water levels
465
+ """
466
+ # If single event read with hydromt-sfincs
467
+ if not return_period:
468
+ map_path = self.scenarios.output_path.joinpath(
469
+ scenario_name,
470
+ "Flooding",
471
+ "max_water_level_map.nc",
472
+ )
473
+ with xr.open_dataarray(map_path) as map:
474
+ zsmax = map.to_numpy()
475
+ else:
476
+ file_path = self.scenarios.output_path.joinpath(
477
+ scenario_name,
478
+ "Flooding",
479
+ f"RP_{return_period:04d}_maps.nc",
480
+ )
481
+ with xr.open_dataset(file_path) as ds:
482
+ zsmax = ds["risk_map"][:, :].to_numpy().T
483
+ return zsmax
484
+
485
+ def get_building_footprints(self, scenario_name: str) -> GeoDataFrame:
486
+ """Return a geodataframe of the impacts at the footprint level.
487
+
488
+ Parameters
489
+ ----------
490
+ scenario_name : str
491
+ name of scenario
492
+
493
+ Returns
494
+ -------
495
+ GeoDataFrame
496
+ impacts at footprint level
497
+ """
498
+ out_path = self.scenarios.output_path.joinpath(scenario_name, "Impacts")
499
+ footprints = out_path / f"Impacts_building_footprints_{scenario_name}.gpkg"
500
+ gdf = gpd.read_file(footprints, engine="pyogrio")
501
+ gdf = gdf.to_crs(4326)
502
+ return gdf
503
+
504
+ def get_roads(self, scenario_name: str) -> GeoDataFrame:
505
+ """Return a geodataframe of the impacts at roads.
506
+
507
+ Parameters
508
+ ----------
509
+ scenario_name : str
510
+ name of scenario
511
+
512
+ Returns
513
+ -------
514
+ GeoDataFrame
515
+ Impacts at roads
516
+ """
517
+ out_path = self.scenarios.output_path.joinpath(scenario_name, "Impacts")
518
+ roads = out_path / f"Impacts_roads_{scenario_name}.gpkg"
519
+ gdf = gpd.read_file(roads, engine="pyogrio")
520
+ gdf = gdf.to_crs(4326)
521
+ return gdf
522
+
523
+ def get_aggregation(self, scenario_name: str) -> dict[str, gpd.GeoDataFrame]:
524
+ """Return a dictionary with the aggregated impacts as geodataframes.
525
+
526
+ Parameters
527
+ ----------
528
+ scenario_name : str
529
+ name of the scenario
530
+
531
+ Returns
532
+ -------
533
+ dict[GeoDataFrame]
534
+ dictionary with aggregated damages per aggregation type
535
+ """
536
+ out_path = self.scenarios.output_path.joinpath(scenario_name, "Impacts")
537
+ gdfs = {}
538
+ for aggr_area in out_path.glob(f"Impacts_aggregated_{scenario_name}_*.gpkg"):
539
+ label = aggr_area.stem.split(f"{scenario_name}_")[-1]
540
+ gdfs[label] = gpd.read_file(aggr_area, engine="pyogrio")
541
+ gdfs[label] = gdfs[label].to_crs(4326)
542
+ return gdfs
543
+
544
+ def get_aggregation_benefits(
545
+ self, benefit_name: str
546
+ ) -> dict[str, gpd.GeoDataFrame]:
547
+ """Get a dictionary with the aggregated benefits as geodataframes.
548
+
549
+ Parameters
550
+ ----------
551
+ benefit_name : str
552
+ name of the benefit analysis
553
+
554
+ Returns
555
+ -------
556
+ dict[GeoDataFrame]
557
+ dictionary with aggregated benefits per aggregation type
558
+ """
559
+ out_path = self.benefits.output_path.joinpath(
560
+ benefit_name,
561
+ )
562
+ gdfs = {}
563
+ for aggr_area in out_path.glob("benefits_*.gpkg"):
564
+ label = aggr_area.stem.split("benefits_")[-1]
565
+ gdfs[label] = gpd.read_file(aggr_area, engine="pyogrio")
566
+ gdfs[label] = gdfs[label].to_crs(4326)
567
+ return gdfs
568
+
569
+ def get_object_list(self, object_type: str) -> dict[str, Any]:
570
+ """Get a dictionary with all the toml paths and last modification dates that exist in the database that correspond to object_type.
571
+
572
+ Parameters
573
+ ----------
574
+ object_type : str
575
+ Can be 'projections', 'events', 'measures', 'strategies' or 'scenarios'
576
+
577
+ Returns
578
+ -------
579
+ dict[str, Any]
580
+ Includes 'path' and 'last_modification_date' info
581
+ """
582
+ paths = [
583
+ path / f"{path.name}.toml"
584
+ for path in list((self.input_path / object_type).iterdir())
585
+ ]
586
+ last_modification_date = [
587
+ datetime.fromtimestamp(file.stat().st_mtime) for file in paths
588
+ ]
589
+
590
+ objects = {
591
+ "path": paths,
592
+ "last_modification_date": last_modification_date,
593
+ }
594
+
595
+ return objects
596
+
597
+ def has_run_hazard(self, scenario_name: str) -> None:
598
+ """Check if there is already a simulation that has the exact same hazard component.
599
+
600
+ If yes that is copied to avoid running the hazard model twice.
601
+
602
+ Parameters
603
+ ----------
604
+ scenario_name : str
605
+ name of the scenario to check if needs to be rerun for hazard
606
+ """
607
+ scenario = self._scenarios.get(scenario_name)
608
+ runner = ScenarioRunner(self, scenario=scenario)
609
+
610
+ # Dont do anything if the hazard model has already been run in itself
611
+ if runner.impacts.hazard.has_run:
612
+ return
613
+
614
+ scns_simulated = [
615
+ sim
616
+ for sim in self.scenarios.list_objects()["objects"]
617
+ if self.scenarios.output_path.joinpath(sim.name, "Flooding").is_dir()
618
+ ]
619
+
620
+ for scn in scns_simulated:
621
+ if self.scenarios.equal_hazard_components(scn, scenario):
622
+ existing = self.scenarios.output_path.joinpath(scn.name, "Flooding")
623
+ path_new = self.scenarios.output_path.joinpath(
624
+ scenario.name, "Flooding"
625
+ )
626
+ runner._load_objects(scn)
627
+ if runner.impacts.hazard.has_run: # only copy results if the hazard model has actually finished and skip simulation folders
628
+ shutil.copytree(
629
+ existing,
630
+ path_new,
631
+ dirs_exist_ok=True,
632
+ ignore=shutil.ignore_patterns("simulations"),
633
+ )
634
+ self.logger.info(
635
+ f"Hazard simulation is used from the '{scn.name}' scenario"
636
+ )
637
+
638
+ def run_scenario(self, scenario_name: Union[str, list[str]]) -> None:
639
+ """Run a scenario hazard and impacts.
640
+
641
+ Parameters
642
+ ----------
643
+ scenario_name : Union[str, list[str]]
644
+ name(s) of the scenarios to run.
645
+
646
+ Raises
647
+ ------
648
+ RuntimeError
649
+ If an error occurs while running one of the scenarios
650
+ """
651
+ if not isinstance(scenario_name, list):
652
+ scenario_name = [scenario_name]
653
+
654
+ errors = []
655
+
656
+ for scn in scenario_name:
657
+ try:
658
+ self.has_run_hazard(scn)
659
+ scenario = self.scenarios.get(scn)
660
+ runner = ScenarioRunner(self, scenario=scenario)
661
+ runner.run(scenario)
662
+ except RuntimeError as e:
663
+ errors.append(scn)
664
+ self.logger.error(f"Error running scenario {scn}: {e}")
665
+ if errors:
666
+ raise RuntimeError(
667
+ "FloodAdapt failed to run for the following scenarios: "
668
+ + ", ".join(errors)
669
+ + ". Check the logs for more information."
670
+ )
671
+
672
+ def cleanup(self) -> None:
673
+ """
674
+ Remove corrupted scenario output.
675
+
676
+ This method removes any scenario output that:
677
+ - is corrupted due to unfinished runs
678
+ - does not have a corresponding input
679
+
680
+ """
681
+ if not Settings().delete_crashed_runs:
682
+ return
683
+
684
+ if not self.scenarios.output_path.is_dir():
685
+ return
686
+
687
+ input_scenarios = [
688
+ (self.scenarios.input_path / dir).resolve()
689
+ for dir in os.listdir(self.scenarios.input_path)
690
+ ]
691
+ output_scenarios = [
692
+ (self.scenarios.output_path / dir).resolve()
693
+ for dir in os.listdir(self.scenarios.output_path)
694
+ ]
695
+
696
+ def _call_garbage_collector(func, path, exc_info, retries=5, delay=0.1):
697
+ """Retry deletion up to 5 times if the file is locked."""
698
+ for attempt in range(retries):
699
+ gc.collect()
700
+ time.sleep(delay)
701
+ try:
702
+ func(path) # Retry deletion
703
+ return # Exit if successful
704
+ except Exception as e:
705
+ print(
706
+ f"Attempt {attempt + 1}/{retries} failed to delete {path}: {e}"
707
+ )
708
+
709
+ print(f"Giving up on deleting {path} after {retries} attempts.")
710
+
711
+ for dir in output_scenarios:
712
+ # Delete if: input was deleted or corrupted output due to unfinished run
713
+ if dir.name not in [
714
+ path.name for path in input_scenarios
715
+ ] or not finished_file_exists(dir):
716
+ shutil.rmtree(dir, onerror=_call_garbage_collector)