flood-adapt 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. flood_adapt/__init__.py +22 -0
  2. flood_adapt/adapter/__init__.py +9 -0
  3. flood_adapt/adapter/fiat_adapter.py +1502 -0
  4. flood_adapt/adapter/interface/__init__.py +0 -0
  5. flood_adapt/adapter/interface/hazard_adapter.py +70 -0
  6. flood_adapt/adapter/interface/impact_adapter.py +36 -0
  7. flood_adapt/adapter/interface/model_adapter.py +89 -0
  8. flood_adapt/adapter/interface/offshore.py +19 -0
  9. flood_adapt/adapter/sfincs_adapter.py +1857 -0
  10. flood_adapt/adapter/sfincs_offshore.py +193 -0
  11. flood_adapt/config/__init__.py +0 -0
  12. flood_adapt/config/config.py +245 -0
  13. flood_adapt/config/fiat.py +219 -0
  14. flood_adapt/config/gui.py +224 -0
  15. flood_adapt/config/sfincs.py +336 -0
  16. flood_adapt/config/site.py +124 -0
  17. flood_adapt/database_builder/__init__.py +0 -0
  18. flood_adapt/database_builder/database_builder.py +2175 -0
  19. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -0
  20. flood_adapt/database_builder/templates/default_units/metric.toml +9 -0
  21. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -0
  22. flood_adapt/database_builder/templates/icons/black_down_48x48.png +0 -0
  23. flood_adapt/database_builder/templates/icons/black_left_48x48.png +0 -0
  24. flood_adapt/database_builder/templates/icons/black_right_48x48.png +0 -0
  25. flood_adapt/database_builder/templates/icons/black_up_48x48.png +0 -0
  26. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_down.png +0 -0
  27. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_left.png +0 -0
  28. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_right.png +0 -0
  29. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_up.png +0 -0
  30. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_down.png +0 -0
  31. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_left.png +0 -0
  32. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_right.png +0 -0
  33. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_up.png +0 -0
  34. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_left.png +0 -0
  35. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_right.png +0 -0
  36. flood_adapt/database_builder/templates/icons/white_down_48x48.png +0 -0
  37. flood_adapt/database_builder/templates/icons/white_left_48x48.png +0 -0
  38. flood_adapt/database_builder/templates/icons/white_right_48x48.png +0 -0
  39. flood_adapt/database_builder/templates/icons/white_up_48x48.png +0 -0
  40. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -0
  41. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -0
  42. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -0
  43. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -0
  44. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -0
  45. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -0
  46. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -0
  47. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -0
  48. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -0
  49. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -0
  50. flood_adapt/database_builder/templates/infographics/images/ambulance.png +0 -0
  51. flood_adapt/database_builder/templates/infographics/images/car.png +0 -0
  52. flood_adapt/database_builder/templates/infographics/images/cart.png +0 -0
  53. flood_adapt/database_builder/templates/infographics/images/firetruck.png +0 -0
  54. flood_adapt/database_builder/templates/infographics/images/hospital.png +0 -0
  55. flood_adapt/database_builder/templates/infographics/images/house.png +0 -0
  56. flood_adapt/database_builder/templates/infographics/images/info.png +0 -0
  57. flood_adapt/database_builder/templates/infographics/images/money.png +0 -0
  58. flood_adapt/database_builder/templates/infographics/images/person.png +0 -0
  59. flood_adapt/database_builder/templates/infographics/images/school.png +0 -0
  60. flood_adapt/database_builder/templates/infographics/images/truck.png +0 -0
  61. flood_adapt/database_builder/templates/infographics/images/walking_person.png +0 -0
  62. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -0
  63. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -0
  64. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -0
  65. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -0
  66. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -0
  67. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -0
  68. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -0
  69. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -0
  70. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -0
  71. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -0
  72. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -0
  73. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -0
  74. flood_adapt/database_builder/templates/mapbox_layers/bin_colors.toml +5 -0
  75. flood_adapt/database_builder.py +16 -0
  76. flood_adapt/dbs_classes/__init__.py +21 -0
  77. flood_adapt/dbs_classes/database.py +716 -0
  78. flood_adapt/dbs_classes/dbs_benefit.py +97 -0
  79. flood_adapt/dbs_classes/dbs_event.py +91 -0
  80. flood_adapt/dbs_classes/dbs_measure.py +103 -0
  81. flood_adapt/dbs_classes/dbs_projection.py +52 -0
  82. flood_adapt/dbs_classes/dbs_scenario.py +150 -0
  83. flood_adapt/dbs_classes/dbs_static.py +261 -0
  84. flood_adapt/dbs_classes/dbs_strategy.py +147 -0
  85. flood_adapt/dbs_classes/dbs_template.py +302 -0
  86. flood_adapt/dbs_classes/interface/database.py +147 -0
  87. flood_adapt/dbs_classes/interface/element.py +137 -0
  88. flood_adapt/dbs_classes/interface/static.py +47 -0
  89. flood_adapt/flood_adapt.py +1371 -0
  90. flood_adapt/misc/__init__.py +0 -0
  91. flood_adapt/misc/database_user.py +16 -0
  92. flood_adapt/misc/log.py +183 -0
  93. flood_adapt/misc/path_builder.py +54 -0
  94. flood_adapt/misc/utils.py +185 -0
  95. flood_adapt/objects/__init__.py +59 -0
  96. flood_adapt/objects/benefits/__init__.py +0 -0
  97. flood_adapt/objects/benefits/benefits.py +61 -0
  98. flood_adapt/objects/events/__init__.py +0 -0
  99. flood_adapt/objects/events/event_factory.py +135 -0
  100. flood_adapt/objects/events/event_set.py +84 -0
  101. flood_adapt/objects/events/events.py +221 -0
  102. flood_adapt/objects/events/historical.py +55 -0
  103. flood_adapt/objects/events/hurricane.py +64 -0
  104. flood_adapt/objects/events/synthetic.py +48 -0
  105. flood_adapt/objects/forcing/__init__.py +0 -0
  106. flood_adapt/objects/forcing/csv.py +68 -0
  107. flood_adapt/objects/forcing/discharge.py +66 -0
  108. flood_adapt/objects/forcing/forcing.py +142 -0
  109. flood_adapt/objects/forcing/forcing_factory.py +182 -0
  110. flood_adapt/objects/forcing/meteo_handler.py +93 -0
  111. flood_adapt/objects/forcing/netcdf.py +40 -0
  112. flood_adapt/objects/forcing/plotting.py +428 -0
  113. flood_adapt/objects/forcing/rainfall.py +98 -0
  114. flood_adapt/objects/forcing/tide_gauge.py +191 -0
  115. flood_adapt/objects/forcing/time_frame.py +77 -0
  116. flood_adapt/objects/forcing/timeseries.py +552 -0
  117. flood_adapt/objects/forcing/unit_system.py +580 -0
  118. flood_adapt/objects/forcing/waterlevels.py +108 -0
  119. flood_adapt/objects/forcing/wind.py +124 -0
  120. flood_adapt/objects/measures/__init__.py +0 -0
  121. flood_adapt/objects/measures/measure_factory.py +92 -0
  122. flood_adapt/objects/measures/measures.py +506 -0
  123. flood_adapt/objects/object_model.py +68 -0
  124. flood_adapt/objects/projections/__init__.py +0 -0
  125. flood_adapt/objects/projections/projections.py +89 -0
  126. flood_adapt/objects/scenarios/__init__.py +0 -0
  127. flood_adapt/objects/scenarios/scenarios.py +22 -0
  128. flood_adapt/objects/strategies/__init__.py +0 -0
  129. flood_adapt/objects/strategies/strategies.py +68 -0
  130. flood_adapt/workflows/__init__.py +0 -0
  131. flood_adapt/workflows/benefit_runner.py +541 -0
  132. flood_adapt/workflows/floodmap.py +85 -0
  133. flood_adapt/workflows/impacts_integrator.py +82 -0
  134. flood_adapt/workflows/scenario_runner.py +69 -0
  135. flood_adapt-0.3.0.dist-info/LICENSE +21 -0
  136. flood_adapt-0.3.0.dist-info/METADATA +183 -0
  137. flood_adapt-0.3.0.dist-info/RECORD +139 -0
  138. flood_adapt-0.3.0.dist-info/WHEEL +5 -0
  139. flood_adapt-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,2175 @@
1
+ import datetime
2
+ import math
3
+ import os
4
+ import shutil
5
+ import warnings
6
+ from enum import Enum
7
+ from pathlib import Path
8
+ from typing import Optional, Union
9
+ from urllib.request import urlretrieve
10
+
11
+ import cht_observations.observation_stations as obs
12
+ import geopandas as gpd
13
+ import numpy as np
14
+ import pandas as pd
15
+ import rioxarray as rxr
16
+ import tomli
17
+ import tomli_w
18
+ import xarray as xr
19
+ from hydromt_fiat import FiatModel as HydromtFiatModel
20
+ from hydromt_fiat.data_apis.open_street_maps import get_buildings_from_osm
21
+ from hydromt_sfincs import SfincsModel as HydromtSfincsModel
22
+ from pydantic import BaseModel, Field
23
+ from shapely import MultiLineString, MultiPolygon, Polygon
24
+
25
+ from flood_adapt.adapter.fiat_adapter import _FIAT_COLUMNS
26
+ from flood_adapt.config.fiat import (
27
+ AggregationModel,
28
+ BenefitsModel,
29
+ BFEModel,
30
+ EquityModel,
31
+ FiatConfigModel,
32
+ FiatModel,
33
+ RiskModel,
34
+ SVIModel,
35
+ )
36
+ from flood_adapt.config.gui import (
37
+ GuiModel,
38
+ GuiUnitModel,
39
+ MapboxLayersModel,
40
+ PlottingModel,
41
+ SyntheticTideModel,
42
+ VisualizationLayersModel,
43
+ )
44
+ from flood_adapt.config.sfincs import (
45
+ Cstype,
46
+ CycloneTrackDatabaseModel,
47
+ DatumModel,
48
+ DemModel,
49
+ FloodmapType,
50
+ FloodModel,
51
+ ObsPointModel,
52
+ RiverModel,
53
+ SCSModel,
54
+ SfincsConfigModel,
55
+ SfincsModel,
56
+ SlrScenariosModel,
57
+ WaterlevelReferenceModel,
58
+ )
59
+ from flood_adapt.config.site import (
60
+ Site,
61
+ StandardObjectModel,
62
+ )
63
+ from flood_adapt.dbs_classes.database import Database
64
+ from flood_adapt.misc.log import FloodAdaptLogging
65
+ from flood_adapt.misc.utils import modified_environ
66
+ from flood_adapt.objects.events.event_set import EventSet
67
+ from flood_adapt.objects.forcing import unit_system as us
68
+ from flood_adapt.objects.forcing.tide_gauge import (
69
+ TideGauge,
70
+ TideGaugeSource,
71
+ )
72
+ from flood_adapt.objects.projections.projections import (
73
+ PhysicalProjection,
74
+ Projection,
75
+ SocioEconomicChange,
76
+ )
77
+ from flood_adapt.objects.strategies.strategies import Strategy
78
+
79
+
80
+ def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
81
+ """
82
+ Check if the given path is absolute and return the absolute path.
83
+
84
+ Args:
85
+ path (str): The path to be checked.
86
+
87
+ Returns
88
+ -------
89
+ str: The absolute path.
90
+
91
+ Raises
92
+ ------
93
+ ValueError: If the path is not absolute and no config_path is provided.
94
+ """
95
+ path = Path(str_path)
96
+ if not path.is_absolute():
97
+ if config_path is not None:
98
+ path = Path(config_path).parent.joinpath(path).resolve()
99
+ else:
100
+ raise ValueError(f"Value '{path}' should be an absolute path.")
101
+ return path.as_posix()
102
+
103
+
104
+ class SpatialJoinModel(BaseModel):
105
+ """
106
+ Represents a spatial join model.
107
+
108
+ Attributes
109
+ ----------
110
+ name (Optional[str]): The name of the model (optional).
111
+ file (str): The file associated with the model.
112
+ field_name (str): The field name used for the spatial join.
113
+ """
114
+
115
+ name: Optional[str] = None
116
+ file: str
117
+ field_name: str
118
+
119
+
120
+ class UnitSystems(str, Enum):
121
+ """The `UnitSystems` class is an enumeration that represents the accepted values for the `metric_system` field.
122
+
123
+ It provides two options: `imperial` and `metric`.
124
+
125
+ Attributes
126
+ ----------
127
+ imperial (str): Represents the imperial unit system.
128
+ metric (str): Represents the metric unit system.
129
+ """
130
+
131
+ imperial = "imperial"
132
+ metric = "metric"
133
+
134
+
135
+ class FootprintsOptions(str, Enum):
136
+ OSM = "OSM"
137
+
138
+
139
+ class Basins(str, Enum):
140
+ """
141
+ Enumeration class representing different basins.
142
+
143
+ Each basin is represented by a string value.
144
+
145
+ Attributes
146
+ ----------
147
+ NA (str): North Atlantic
148
+ SA (str): South Atlantic
149
+ EP (str): Eastern North Pacific (which includes the Central Pacific region)
150
+ WP (str): Western North Pacific
151
+ SP (str): South Pacific
152
+ SI (str): South Indian
153
+ NI (str): North Indian
154
+ """
155
+
156
+ NA = "NA"
157
+ SA = "SA"
158
+ EP = "EP"
159
+ WP = "WP"
160
+ SP = "SP"
161
+ SI = "SI"
162
+ NI = "NI"
163
+
164
+
165
+ class GuiConfigModel(BaseModel):
166
+ """
167
+ Represents a GUI model for for FloodAdapt.
168
+
169
+ Attributes
170
+ ----------
171
+ max_flood_depth (float): The last visualization bin will be ">value".
172
+ max_aggr_dmg (float): The last visualization bin will be ">value".
173
+ max_footprint_dmg (float): The last visualization bin will be ">value".
174
+ max_benefits (float): The last visualization bin will be ">value".
175
+ """
176
+
177
+ max_flood_depth: float
178
+ max_aggr_dmg: float
179
+ max_footprint_dmg: float
180
+ max_benefits: float
181
+
182
+
183
+ class SviConfigModel(SpatialJoinModel):
184
+ """
185
+ Represents a model for the Social Vulnerability Index (SVI).
186
+
187
+ Attributes
188
+ ----------
189
+ threshold (float): The threshold value for the SVI model to specify vulnerability.
190
+ """
191
+
192
+ threshold: float
193
+
194
+
195
+ class Point(BaseModel):
196
+ lat: float
197
+ lon: float
198
+
199
+
200
+ class TideGaugeConfigModel(BaseModel):
201
+ """
202
+ Represents a tide gauge model.
203
+
204
+ Attributes
205
+ ----------
206
+ source (str): The source of the tide gauge data.
207
+ file (Optional[str]): The file associated with the tide gauge data (default: None).
208
+ max_distance (Optional[float]): The maximum distance (default: None).
209
+ ref (str): The reference name. Should be defined in the water level references.
210
+ """
211
+
212
+ source: TideGaugeSource
213
+ description: str = ""
214
+ ref: Optional[str] = None
215
+ id: Optional[int] = None
216
+ lon: Optional[float] = None
217
+ lat: Optional[float] = None
218
+ file: Optional[str] = None
219
+ max_distance: Optional[us.UnitfulLength] = None
220
+
221
+
222
+ class SviModel(SpatialJoinModel):
223
+ """
224
+ Represents a model for the Social Vulnerability Index (SVI).
225
+
226
+ Attributes
227
+ ----------
228
+ threshold (float): The threshold value for the SVI model to specify vulnerability.
229
+ """
230
+
231
+ threshold: float
232
+
233
+
234
+ class ConfigModel(BaseModel):
235
+ """
236
+ Represents the configuration model for FloodAdapt.
237
+
238
+ Attributes
239
+ ----------
240
+ name : str
241
+ The name of the site.
242
+ description : Optional[str], default ""
243
+ The description of the site.
244
+ database_path : Optional[str], default None
245
+ The path to the database where all the sites are located.
246
+ sfincs : str
247
+ The SFINCS model path.
248
+ sfincs_offshore : Optional[str], default None
249
+ The offshore SFINCS model path.
250
+ fiat : str
251
+ The FIAT model path.
252
+ unit_system : UnitSystems
253
+ The unit system.
254
+ gui : GuiModel
255
+ The GUI model representing scaling values for the layers.
256
+ building_footprints : Optional[SpatialJoinModel], default None
257
+ The building footprints model.
258
+ slr_scenarios : Optional[SlrModelDef], default SlrModelDef()
259
+ The sea level rise model.
260
+ tide_gauge : Optional[TideGaugeConfigModel], default None
261
+ The tide gauge model.
262
+ bfe : Optional[SpatialJoinModel], default None
263
+ The BFE model.
264
+ svi : Optional[SviModel], default None
265
+ The SVI model.
266
+ road_width : Optional[float], default 2
267
+ The road width in meters.
268
+ cyclones : Optional[bool], default True
269
+ Indicates if cyclones are enabled.
270
+ cyclone_basin : Optional[Basins], default None
271
+ The cyclone basin.
272
+ obs_point : Optional[list[ObsPointModel]], default None
273
+ The list of observation point models.
274
+ probabilistic_set : Optional[str], default None
275
+ The probabilistic set path.
276
+ infographics : Optional[bool], default True
277
+ Indicates if infographics are enabled.
278
+ """
279
+
280
+ # General
281
+ name: str = Field(..., min_length=1, pattern='^[^<>:"/\\\\|?* ]*$')
282
+ description: Optional[str] = None
283
+ database_path: Optional[str] = None
284
+ unit_system: UnitSystems
285
+ gui: GuiConfigModel
286
+ infographics: Optional[bool] = True
287
+
288
+ # FIAT
289
+ fiat: str
290
+ aggregation_areas: Optional[list[SpatialJoinModel]] = None
291
+ building_footprints: Optional[SpatialJoinModel | FootprintsOptions] = (
292
+ FootprintsOptions.OSM
293
+ )
294
+ fiat_buildings_name: Optional[str] = "buildings"
295
+ fiat_roads_name: Optional[str] = "roads"
296
+ bfe: Optional[SpatialJoinModel] = None
297
+ svi: Optional[SviConfigModel] = None
298
+ road_width: Optional[float] = 5
299
+ return_periods: list[int] = Field(default_factory=list)
300
+
301
+ # SFINCS
302
+ references: WaterlevelReferenceModel = WaterlevelReferenceModel(
303
+ reference="MSL",
304
+ datums=[
305
+ DatumModel(
306
+ name="MSL",
307
+ height=us.UnitfulLength(value=0.0, units=us.UnitTypesLength.meters),
308
+ ),
309
+ ],
310
+ )
311
+
312
+ sfincs_overland: FloodModel
313
+ sfincs_offshore: Optional[FloodModel] = None
314
+ dem: Optional[DemModel] = None
315
+
316
+ excluded_datums: list[str] = Field(default_factory=list)
317
+
318
+ slr_scenarios: Optional[SlrScenariosModel] = None
319
+ scs: Optional[SCSModel] = None
320
+ tide_gauge: Optional[TideGaugeConfigModel] = None
321
+ cyclones: Optional[bool] = True
322
+ cyclone_basin: Optional[Basins] = None
323
+ obs_point: Optional[list[ObsPointModel]] = None
324
+ probabilistic_set: Optional[str] = None
325
+
326
+ @staticmethod
327
+ def read(toml_path: Path) -> "ConfigModel":
328
+ """
329
+ Read a configuration file and returns the validated attributes.
330
+
331
+ Args:
332
+ config (str): The path to the configuration file.
333
+
334
+ Returns
335
+ -------
336
+ ConfigModel: The validated attributes from the configuration file.
337
+ """
338
+ with open(toml_path, mode="rb") as fp:
339
+ toml = tomli.load(fp)
340
+ config = ConfigModel.model_validate(toml)
341
+
342
+ # check if database path is provided and use config_file path if not
343
+ if config.database_path is None:
344
+ dbs_path = Path(toml_path).parent / "Database"
345
+ if not dbs_path.exists():
346
+ dbs_path.mkdir(parents=True)
347
+ config.database_path = dbs_path.as_posix()
348
+ # check if paths are relative to the config file and make them absolute
349
+ config.database_path = path_check(config.database_path, toml_path)
350
+ config.fiat = path_check(config.fiat, toml_path)
351
+ config.sfincs_overland.name = path_check(config.sfincs_overland.name, toml_path)
352
+ if config.sfincs_offshore:
353
+ config.sfincs_offshore.name = path_check(
354
+ config.sfincs_offshore.name, toml_path
355
+ )
356
+ if isinstance(config.building_footprints, SpatialJoinModel):
357
+ config.building_footprints.file = path_check(
358
+ config.building_footprints.file, toml_path
359
+ )
360
+ if config.tide_gauge and config.tide_gauge.file:
361
+ config.tide_gauge.file = path_check(config.tide_gauge.file, toml_path)
362
+ if config.svi:
363
+ config.svi.file = path_check(config.svi.file, toml_path)
364
+ if config.bfe:
365
+ config.bfe.file = path_check(config.bfe.file, toml_path)
366
+ if config.slr_scenarios:
367
+ config.slr_scenarios.file = path_check(config.slr_scenarios.file, toml_path)
368
+ if config.probabilistic_set:
369
+ config.probabilistic_set = path_check(config.probabilistic_set, toml_path)
370
+ if config.aggregation_areas:
371
+ for aggr in config.aggregation_areas:
372
+ aggr.file = path_check(aggr.file, toml_path)
373
+
374
+ return config
375
+
376
+
377
+ class DatabaseBuilder:
378
+ logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
379
+
380
+ _has_roads: bool = False
381
+ _aggregation_areas: Optional[list] = None
382
+
383
+ def __init__(self, config: ConfigModel, overwrite: bool = True):
384
+ self.config = config
385
+
386
+ # Set database root
387
+ if config.database_path:
388
+ self.root = Path(config.database_path).joinpath(self.config.name)
389
+ else:
390
+ raise ValueError(
391
+ "Database path is not provided. Please provide a path using the 'database_path' attribute."
392
+ )
393
+
394
+ # Read info that needs to be used to create other models
395
+ self.unit_system = self.create_default_units()
396
+
397
+ # Read info that needs to be updated with other model info
398
+ self.water_level_references = self.config.references
399
+
400
+ @property
401
+ def static_path(self) -> Path:
402
+ return self.root / "static"
403
+
404
+ def build(self, overwrite: bool = False) -> None:
405
+ # Check if database already exists
406
+ if self.root.exists() and not overwrite:
407
+ raise ValueError(
408
+ f"There is already a Database folder in '{self.root.as_posix()}'."
409
+ )
410
+ if self.root.exists() and overwrite:
411
+ shutil.rmtree(self.root)
412
+ warnings.warn(
413
+ f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
414
+ )
415
+ # Create database folder
416
+ self.root.mkdir(parents=True)
417
+
418
+ with FloodAdaptLogging.to_file(
419
+ file_path=self.root.joinpath("database_builder.log")
420
+ ):
421
+ self.logger.info(
422
+ f"Creating a FloodAdapt database in '{self.root.as_posix()}'"
423
+ )
424
+
425
+ # Make folder structure and read models
426
+ self.setup()
427
+
428
+ # Prepare site configuration
429
+ site = self.create_site_config()
430
+ site.save(self.static_path / "config" / "site.toml")
431
+
432
+ # Add infometric and infographic configurations
433
+ self.create_infometrics()
434
+
435
+ # Save standard objects
436
+ self.create_standard_objects()
437
+
438
+ # Save log file
439
+ self.logger.info("FloodAdapt database creation finished!")
440
+
441
+ def setup(self) -> None:
442
+ # Create the models
443
+ self.make_folder_structure()
444
+
445
+ # Read user models and copy to templates
446
+ self.read_template_fiat_model()
447
+ self.read_template_sfincs_overland_model()
448
+ self.read_template_sfincs_offshore_model()
449
+
450
+ def set_standard_objects(self):
451
+ # Define name and create object
452
+ self._no_measures_strategy_name = "no_measures"
453
+ self._current_projection_name = "current"
454
+ if self._probabilistic_set_name is not None:
455
+ event_list = [self._probabilistic_set_name]
456
+ else:
457
+ event_list = []
458
+ std_obj = StandardObjectModel(
459
+ events=event_list,
460
+ projections=[self._current_projection_name],
461
+ strategies=[self._no_measures_strategy_name],
462
+ )
463
+ return std_obj
464
+
465
+ def create_standard_objects(self):
466
+ with modified_environ(
467
+ DATABASE_ROOT=str(self.root.parent),
468
+ DATABASE_NAME=self.root.name,
469
+ ):
470
+ self.logger.info(
471
+ "Creating `no measures` strategy and `current` projection."
472
+ )
473
+ # Create database instance
474
+ db = Database(self.root.parent, self.config.name)
475
+ # Create no measures strategy
476
+ strategy = Strategy(
477
+ name=self._no_measures_strategy_name,
478
+ measures=[],
479
+ )
480
+ db.strategies.save(strategy)
481
+ # Create current projection
482
+ projection = Projection(
483
+ name=self._current_projection_name,
484
+ physical_projection=PhysicalProjection(),
485
+ socio_economic_change=SocioEconomicChange(),
486
+ )
487
+ db.projections.save(projection)
488
+ # Check prob set
489
+ if self._probabilistic_set_name is not None:
490
+ path_toml = (
491
+ db.input_path
492
+ / "events"
493
+ / self._probabilistic_set_name
494
+ / f"{self._probabilistic_set_name}.toml"
495
+ )
496
+ try:
497
+ EventSet.load_file(path_toml)
498
+ except Exception as e:
499
+ raise ValueError(
500
+ f"Provided probabilistic event set '{self._probabilistic_set_name}' is not valid. Error: {e}"
501
+ )
502
+
503
+ ### TEMPLATE READERS ###
504
+ def read_template_fiat_model(self):
505
+ user_provided = self._check_exists_and_absolute(self.config.fiat)
506
+
507
+ # Read config model
508
+ HydromtFiatModel(root=str(user_provided), mode="r+").read()
509
+
510
+ # Success, so copy to db and read again
511
+ location_in_db = self.static_path / "templates" / "fiat"
512
+ if location_in_db.exists():
513
+ shutil.rmtree(location_in_db)
514
+ shutil.copytree(user_provided, location_in_db)
515
+ in_db = HydromtFiatModel(root=str(location_in_db), mode="r+")
516
+ in_db.read()
517
+ # Add check to make sure the geoms are correct
518
+ # TODO this should be handled in hydromt-FIAT
519
+ no_geoms = len(
520
+ [name for name in in_db.config["exposure"]["geom"].keys() if "file" in name]
521
+ )
522
+ in_db.exposure.exposure_geoms = in_db.exposure.exposure_geoms[:no_geoms]
523
+ in_db.exposure._geom_names = in_db.exposure._geom_names[:no_geoms]
524
+
525
+ # Make sure that a region polygon is included
526
+ if "region" not in in_db.geoms:
527
+ gdf = in_db.exposure.get_full_gdf(in_db.exposure.exposure_db)
528
+ # Combine all geometries into a single geometry
529
+ merged_geometry = gdf.unary_union
530
+
531
+ # If the result is not a polygon, you can create a convex hull
532
+ if not isinstance(merged_geometry, Polygon):
533
+ merged_geometry = merged_geometry.convex_hull
534
+ # Create a new GeoDataFrame with the resulting polygon
535
+ in_db.geoms["region"] = gpd.GeoDataFrame(
536
+ geometry=[merged_geometry], crs=gdf.crs
537
+ )
538
+
539
+ self.fiat_model = in_db
540
+
541
+ def read_template_sfincs_overland_model(self):
542
+ user_provided = self._check_exists_and_absolute(
543
+ self.config.sfincs_overland.name
544
+ )
545
+ user_model = HydromtSfincsModel(root=str(user_provided), mode="r")
546
+ user_model.read()
547
+ if user_model.crs is None:
548
+ raise ValueError("CRS is not defined in the SFINCS model.")
549
+
550
+ location_in_db = self.static_path / "templates" / "overland"
551
+ if location_in_db.exists():
552
+ shutil.rmtree(location_in_db)
553
+ shutil.copytree(user_provided, location_in_db)
554
+ in_db = HydromtSfincsModel(root=str(location_in_db), mode="r+")
555
+ in_db.read()
556
+ self.sfincs_overland_model = in_db
557
+
558
+ def read_template_sfincs_offshore_model(self):
559
+ if self.config.sfincs_offshore is None:
560
+ self.sfincs_offshore_model = None
561
+ return
562
+ user_provided = self._check_exists_and_absolute(
563
+ self.config.sfincs_offshore.name
564
+ )
565
+ user_model = HydromtSfincsModel(root=str(user_provided), mode="r+")
566
+ user_model.read()
567
+ if user_model.crs is None:
568
+ raise ValueError("CRS is not defined in the SFINCS model.")
569
+ epsg = user_model.crs.to_epsg()
570
+
571
+ location_in_db = self.static_path / "templates" / "offshore"
572
+ if location_in_db.exists():
573
+ shutil.rmtree(location_in_db)
574
+ shutil.copytree(user_provided, location_in_db)
575
+ in_db = HydromtSfincsModel(str(location_in_db), mode="r+")
576
+ in_db.read(epsg=epsg)
577
+ self.sfincs_offshore_model = in_db
578
+
579
+ ### FIAT ###
580
+ def create_fiat_model(self) -> FiatModel:
581
+ fiat = FiatModel(
582
+ risk=self.create_risk_model(),
583
+ config=self.create_fiat_config(),
584
+ benefits=self.create_benefit_config(),
585
+ )
586
+ return fiat
587
+
588
+ def create_risk_model(self) -> RiskModel:
589
+ # Check if return periods are provided
590
+ if not self.config.return_periods:
591
+ risk = RiskModel()
592
+ self.logger.warning(
593
+ f"Return periods for risk calculations not provided. Default values of {risk.return_periods} will be used."
594
+ )
595
+ else:
596
+ risk = RiskModel(return_periods=self.config.return_periods)
597
+ return risk
598
+
599
+ def create_benefit_config(self) -> Optional[BenefitsModel]:
600
+ if self._probabilistic_set_name is None:
601
+ self.logger.warning(
602
+ "No probabilistic set found in the config, benefits will not be available."
603
+ )
604
+ return None
605
+ return BenefitsModel(
606
+ current_year=datetime.datetime.now().year,
607
+ current_projection="current",
608
+ baseline_strategy="no_measures",
609
+ event_set=self._probabilistic_set_name,
610
+ )
611
+
612
+ def create_fiat_config(self) -> FiatConfigModel:
613
+ # Make sure only csv objects have geometries
614
+ for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
615
+ keep = geoms[_FIAT_COLUMNS.object_id].isin(
616
+ self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
617
+ )
618
+ geoms = geoms[keep].reset_index(drop=True)
619
+ self.fiat_model.exposure.exposure_geoms[i] = geoms
620
+
621
+ footprints = self.create_footprints()
622
+ if footprints is not None:
623
+ footprints = footprints.as_posix()
624
+
625
+ # Clip hazard and reset buildings # TODO use hydromt-FIAT instead
626
+ if not self.fiat_model.region.empty:
627
+ self._clip_hazard_extend()
628
+
629
+ # Store result for possible future use in create_infographics
630
+ self._aggregation_areas = self.create_aggregation_areas()
631
+
632
+ roads_gpkg = self.create_roads()
633
+ non_building_names = []
634
+ if roads_gpkg is not None:
635
+ non_building_names.append("road")
636
+
637
+ # Update elevations
638
+ self.update_fiat_elevation()
639
+
640
+ config = FiatConfigModel(
641
+ exposure_crs=self.fiat_model.exposure.crs,
642
+ floodmap_type=self.read_floodmap_type(),
643
+ bfe=self.create_bfe(),
644
+ non_building_names=non_building_names,
645
+ damage_unit=self.read_damage_unit(),
646
+ building_footprints=footprints,
647
+ roads_file_name=roads_gpkg,
648
+ new_development_file_name=self.create_new_developments(),
649
+ save_simulation=False, # TODO
650
+ infographics=self.config.infographics,
651
+ aggregation=self._aggregation_areas,
652
+ svi=self.create_svi(),
653
+ )
654
+
655
+ # Update output geoms names
656
+ output_geom = {}
657
+ counter = 0
658
+ for key in self.fiat_model.config["exposure"]["geom"].keys():
659
+ if "file" in key:
660
+ counter += 1
661
+ output_geom[f"name{counter}"] = Path(
662
+ self.fiat_model.config["exposure"]["geom"][key]
663
+ ).name
664
+ self.fiat_model.config["output"]["geom"] = output_geom
665
+ # Update FIAT model with the new config
666
+ self.fiat_model.write()
667
+
668
+ return config
669
+
670
+ def update_fiat_elevation(self):
671
+ """
672
+ Update the ground elevations of FIAT objects based on the SFINCS ground elevation map.
673
+
674
+ This method reads the DEM file and the exposure CSV file, and updates the ground elevations
675
+ of the FIAT objects (roads and buildings) based on the nearest elevation values from the DEM.
676
+ """
677
+ dem_file = self._dem_path
678
+ # TODO resolve issue with double geometries in hydromt-FIAT and use update_ground_elevation method instead
679
+ # self.fiat_model.update_ground_elevation(dem_file, grnd_elev_unit="meters")
680
+ self.logger.info(
681
+ "Updating FIAT objects ground elevations from SFINCS ground elevation map."
682
+ )
683
+ SFINCS_units = us.UnitfulLength(
684
+ value=1.0, units=us.UnitTypesLength.meters
685
+ ) # SFINCS is always in meters
686
+ FIAT_units = self.unit_system.default_length_units
687
+ conversion_factor = SFINCS_units.convert(FIAT_units)
688
+
689
+ if not math.isclose(conversion_factor, 1):
690
+ self.logger.info(
691
+ f"Ground elevation for FIAT objects is in '{FIAT_units}', while SFINCS ground elevation is in 'meters'. Values in the exposure csv will be converted by a factor of {conversion_factor}"
692
+ )
693
+
694
+ exposure = self.fiat_model.exposure.exposure_db
695
+ dem = rxr.open_rasterio(dem_file)
696
+ # TODO make sure only fiat_model object changes take place!
697
+ if self.config.fiat_roads_name in self.fiat_model.exposure.geom_names:
698
+ roads = self.fiat_model.exposure.exposure_geoms[
699
+ self._get_fiat_road_index()
700
+ ].to_crs(dem.spatial_ref.crs_wkt)
701
+ roads["centroid"] = roads.geometry.centroid # get centroids
702
+
703
+ x_points = xr.DataArray(roads["centroid"].x, dims="points")
704
+ y_points = xr.DataArray(roads["centroid"].y, dims="points")
705
+ roads["elev"] = (
706
+ dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
707
+ * conversion_factor
708
+ )
709
+
710
+ exposure.loc[
711
+ exposure[_FIAT_COLUMNS.primary_object_type] == "road",
712
+ _FIAT_COLUMNS.ground_floor_height,
713
+ ] = 0
714
+ exposure = exposure.merge(
715
+ roads[[_FIAT_COLUMNS.object_id, "elev"]],
716
+ on=_FIAT_COLUMNS.object_id,
717
+ how="left",
718
+ )
719
+ exposure.loc[
720
+ exposure[_FIAT_COLUMNS.primary_object_type] == "road",
721
+ _FIAT_COLUMNS.ground_elevation,
722
+ ] = exposure.loc[
723
+ exposure[_FIAT_COLUMNS.primary_object_type] == "road", "elev"
724
+ ]
725
+ del exposure["elev"]
726
+ self.fiat_model.exposure.exposure_db = exposure
727
+
728
+ buildings = self.fiat_model.exposure.exposure_geoms[
729
+ self._get_fiat_building_index()
730
+ ].to_crs(dem.spatial_ref.crs_wkt)
731
+ buildings["geometry"] = buildings.geometry.centroid
732
+ x_points = xr.DataArray(buildings["geometry"].x, dims="points")
733
+ y_points = xr.DataArray(buildings["geometry"].y, dims="points")
734
+ buildings["elev"] = (
735
+ dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
736
+ * conversion_factor
737
+ )
738
+ exposure = exposure.merge(
739
+ buildings[[_FIAT_COLUMNS.object_id, "elev"]],
740
+ on=_FIAT_COLUMNS.object_id,
741
+ how="left",
742
+ )
743
+ exposure.loc[
744
+ exposure[_FIAT_COLUMNS.primary_object_type] != "road",
745
+ _FIAT_COLUMNS.ground_elevation,
746
+ ] = exposure.loc[exposure[_FIAT_COLUMNS.primary_object_type] != "road", "elev"]
747
+ del exposure["elev"]
748
+
749
+ def read_damage_unit(self) -> str:
750
+ if self.fiat_model.exposure.damage_unit is not None:
751
+ return self.fiat_model.exposure.damage_unit
752
+ else:
753
+ self.logger.warning(
754
+ "Delft-FIAT model was missing damage units so '$' was assumed."
755
+ )
756
+ return "$"
757
+
758
+ def read_floodmap_type(self) -> FloodmapType:
759
+ # If there is at least on object that uses the area method, use water depths for FA calcs
760
+ if (
761
+ self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.extraction_method]
762
+ == "area"
763
+ ).any():
764
+ return FloodmapType.water_depth
765
+ else:
766
+ return FloodmapType.water_level
767
+
768
+ def create_roads(self) -> Optional[str]:
769
+ # Make sure that FIAT roads are polygons
770
+ if self.config.fiat_roads_name not in self.fiat_model.exposure.geom_names:
771
+ self.logger.warning(
772
+ "Road objects are not available in the FIAT model and thus would not be available in FloodAdapt."
773
+ )
774
+ # TODO check how this naming of output geoms should become more explicit!
775
+ return None
776
+
777
+ roads = self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()]
778
+
779
+ # TODO do we need the lanes column?
780
+ if (
781
+ _FIAT_COLUMNS.segment_length
782
+ not in self.fiat_model.exposure.exposure_db.columns
783
+ ):
784
+ self.logger.warning(
785
+ f"'{_FIAT_COLUMNS.segment_length}' column not present in the FIAT exposure csv. Road impact infometrics cannot be produced."
786
+ )
787
+
788
+ # TODO should this should be performed through hydromt-FIAT?
789
+ if not isinstance(roads.geometry.iloc[0], Polygon):
790
+ roads = roads.to_crs(roads.estimate_utm_crs())
791
+ roads.geometry = roads.geometry.buffer(
792
+ self.config.road_width / 2, cap_style=2
793
+ )
794
+ roads = roads.to_crs(self.fiat_model.exposure.crs)
795
+ self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()] = roads
796
+ self.logger.info(
797
+ f"FIAT road objects transformed from lines to polygons assuming a road width of {self.config.road_width} meters."
798
+ )
799
+
800
+ self._has_roads = True
801
+ return f"{self.config.fiat_roads_name}.gpkg"
802
+
803
+ def create_new_developments(self) -> Optional[str]:
804
+ return "new_development_area.gpkg"
805
+
806
+ def create_footprints(self) -> Optional[Path]:
807
+ if isinstance(self.config.building_footprints, SpatialJoinModel):
808
+ # Use the provided building footprints
809
+ building_footprints_file = self._check_exists_and_absolute(
810
+ self.config.building_footprints.file
811
+ )
812
+
813
+ self.logger.info(
814
+ f"Using building footprints from {Path(building_footprints_file).as_posix()}."
815
+ )
816
+ # Spatially join buildings and map
817
+ # TODO use hydromt method instead
818
+ path = self._join_building_footprints(
819
+ self.config.building_footprints.file,
820
+ self.config.building_footprints.field_name,
821
+ )
822
+ return path
823
+ elif self.config.building_footprints == FootprintsOptions.OSM:
824
+ self.logger.info(
825
+ "Building footprint data will be downloaded from Open Street Maps."
826
+ )
827
+ region = self.fiat_model.region
828
+ if region is None:
829
+ raise ValueError(
830
+ "No region file found in the FIAT model. Building footprints cannot be created."
831
+ )
832
+ region = region.to_crs(4326)
833
+ if isinstance(region.boundary.to_numpy()[0], MultiLineString):
834
+ polygon = Polygon(
835
+ region.boundary.to_numpy()[0].envelope
836
+ ) # TODO check if this is correct
837
+ else:
838
+ polygon = Polygon(region.boundary.to_numpy()[0])
839
+ footprints = get_buildings_from_osm(polygon)
840
+ footprints["BF_FID"] = np.arange(1, len(footprints) + 1)
841
+ footprints = footprints[["BF_FID", "geometry"]]
842
+ path = self._join_building_footprints(footprints, "BF_FID")
843
+ return path
844
+ # Then check if geometries are already footprints
845
+ elif isinstance(
846
+ self.fiat_model.exposure.exposure_geoms[
847
+ self._get_fiat_building_index()
848
+ ].geometry.iloc[0],
849
+ (Polygon, MultiPolygon),
850
+ ):
851
+ self.logger.info(
852
+ "Building footprints are already available in the FIAT model geometry files."
853
+ )
854
+ return None
855
+ # check if it is spatially joined and/or exists already
856
+ elif "BF_FID" in self.fiat_model.exposure.exposure_db.columns:
857
+ add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
858
+ fiat_path = Path(self.fiat_model.root)
859
+
860
+ if not (add_attrs and "BF_FID" in [attr["name"] for attr in add_attrs]):
861
+ raise KeyError(
862
+ "While 'BF_FID' column exists, connection to a spatial footprints file is missing."
863
+ )
864
+
865
+ ind = [attr["name"] for attr in add_attrs].index("BF_FID")
866
+ footprints = add_attrs[ind]
867
+ footprints_path = fiat_path / footprints["file"]
868
+
869
+ if not footprints_path.exists():
870
+ raise FileNotFoundError(
871
+ f"While 'BF_FID' column exists, building footprints file {footprints_path} not found."
872
+ )
873
+
874
+ self.logger.info(
875
+ f"Using the building footprints located at {footprints_path}."
876
+ )
877
+ return footprints_path.relative_to(self.static_path)
878
+
879
+ # Other methods
880
+ else:
881
+ self.logger.warning(
882
+ "No building footprints are available. Buildings will be plotted with a default shape in FloodAdapt."
883
+ )
884
+ return None
885
+
886
+ def create_bfe(self) -> Optional[BFEModel]:
887
+ if self.config.bfe is None:
888
+ self.logger.warning(
889
+ "No base flood elevation provided. Elevating building relative to base flood elevation will not be possible in FloodAdapt."
890
+ )
891
+ return None
892
+
893
+ # TODO can we use hydromt-FIAT?
894
+ bfe_file = self._check_exists_and_absolute(self.config.bfe.file)
895
+
896
+ self.logger.info(
897
+ f"Using map from {Path(bfe_file).as_posix()} as base flood elevation."
898
+ )
899
+
900
+ # Spatially join buildings and map
901
+ buildings_joined, bfe = self.spatial_join(
902
+ self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()],
903
+ bfe_file,
904
+ self.config.bfe.field_name,
905
+ )
906
+
907
+ # Make sure in case of multiple values that the max is kept
908
+ buildings_joined = (
909
+ buildings_joined.groupby(_FIAT_COLUMNS.object_id)
910
+ .max(self.config.bfe.field_name)
911
+ .sort_values(by=[_FIAT_COLUMNS.object_id])
912
+ .reset_index()
913
+ )
914
+
915
+ # Save the files
916
+ fa_bfe_file = self.static_path / "bfe" / "bfe.gpkg"
917
+ fa_bfe_file.parent.mkdir(parents=True, exist_ok=True)
918
+ bfe.to_file(fa_bfe_file)
919
+ csv_path = fa_bfe_file.parent / "bfe.csv"
920
+ buildings_joined.to_csv(csv_path, index=False)
921
+
922
+ # Save attributes
923
+ return BFEModel(
924
+ geom=fa_bfe_file.relative_to(self.static_path).as_posix(),
925
+ table=csv_path.relative_to(self.static_path).as_posix(),
926
+ field_name=self.config.bfe.field_name,
927
+ )
928
+
929
+ def create_aggregation_areas(self) -> list[AggregationModel]:
930
+ # TODO split this to 3 methods?
931
+ aggregation_areas = []
932
+
933
+ # first check if the FIAT model has existing aggregation areas
934
+ if self.fiat_model.spatial_joins["aggregation_areas"]:
935
+ # Use the aggregation areas from the FIAT model
936
+ for aggr in self.fiat_model.spatial_joins["aggregation_areas"]:
937
+ # Check if the exposure csv has the correct column
938
+ col_name = _FIAT_COLUMNS.aggregation_label.format(name=aggr["name"])
939
+ if col_name not in self.fiat_model.exposure.exposure_db.columns:
940
+ raise KeyError(
941
+ f"While aggregation area '{aggr['name']}' exists in the spatial joins of the FIAT model, the column '{col_name}' is missing in the exposure csv."
942
+ )
943
+ # Check equity config
944
+ if aggr["equity"] is not None:
945
+ equity_config = EquityModel(
946
+ census_data=str(
947
+ self.static_path.joinpath(
948
+ "templates", "fiat", aggr["equity"]["census_data"]
949
+ )
950
+ .relative_to(self.static_path)
951
+ .as_posix()
952
+ ),
953
+ percapitaincome_label=aggr["equity"]["percapitaincome_label"],
954
+ totalpopulation_label=aggr["equity"]["totalpopulation_label"],
955
+ )
956
+ else:
957
+ equity_config = None
958
+ # Make aggregation config
959
+ aggr = AggregationModel(
960
+ name=aggr["name"],
961
+ file=str(
962
+ self.static_path.joinpath("templates", "fiat", aggr["file"])
963
+ .relative_to(self.static_path)
964
+ .as_posix()
965
+ ),
966
+ field_name=aggr["field_name"],
967
+ equity=equity_config,
968
+ )
969
+ aggregation_areas.append(aggr)
970
+
971
+ self.logger.info(
972
+ f"Aggregation areas: {aggr.name} from the FIAT model are going to be used."
973
+ )
974
+
975
+ # Then check if the user has provided extra aggregation areas in the config
976
+ if self.config.aggregation_areas:
977
+ # Loop through aggr areas given in config
978
+ for aggr in self.config.aggregation_areas:
979
+ # Get name of type of aggregation area
980
+ if aggr.name is not None:
981
+ aggr_name = aggr.name
982
+ else:
983
+ aggr_name = Path(aggr.file).stem
984
+ # If aggregation area already in FIAT model raise Error
985
+ if aggr_name in [aggr.name for aggr in aggregation_areas]:
986
+ raise ValueError(
987
+ f"Aggregation area '{aggr_name}' already exists in the FIAT model."
988
+ )
989
+ # Do spatial join of FIAT objects and aggregation areas
990
+ exposure_csv = self.fiat_model.exposure.exposure_db
991
+ buildings_joined, aggr_areas = self.spatial_join(
992
+ objects=self.fiat_model.exposure.exposure_geoms[
993
+ self._get_fiat_building_index()
994
+ ],
995
+ layer=str(self._check_exists_and_absolute(aggr.file)),
996
+ field_name=aggr.field_name,
997
+ rename=_FIAT_COLUMNS.aggregation_label.format(name=aggr_name),
998
+ )
999
+ aggr_path = Path(self.fiat_model.root).joinpath(
1000
+ "exposure", "aggregation_areas", f"{Path(aggr.file).stem}.gpkg"
1001
+ )
1002
+ aggr_path.parent.mkdir(parents=True, exist_ok=True)
1003
+ aggr_areas.to_file(aggr_path)
1004
+ exposure_csv = exposure_csv.merge(
1005
+ buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1006
+ )
1007
+ self.fiat_model.exposure.exposure_db = exposure_csv
1008
+ # Update spatial joins in FIAT model
1009
+ if self.fiat_model.spatial_joins["aggregation_areas"] is None:
1010
+ self.fiat_model.spatial_joins["aggregation_areas"] = []
1011
+ self.fiat_model.spatial_joins["aggregation_areas"].append(
1012
+ {
1013
+ "name": aggr_name,
1014
+ "file": aggr_path.relative_to(self.fiat_model.root).as_posix(),
1015
+ "field_name": _FIAT_COLUMNS.aggregation_label.format(
1016
+ name=aggr_name
1017
+ ),
1018
+ "equity": None, # TODO allow adding equity as well?
1019
+ }
1020
+ )
1021
+ # Update the aggregation areas list in the config
1022
+ aggregation_areas.append(
1023
+ AggregationModel(
1024
+ name=aggr_name,
1025
+ file=aggr_path.relative_to(self.static_path).as_posix(),
1026
+ field_name=_FIAT_COLUMNS.aggregation_label.format(
1027
+ name=aggr_name
1028
+ ),
1029
+ )
1030
+ )
1031
+
1032
+ # No config provided, no aggr areas in the model -> try to use the region file as a mock aggregation area
1033
+ if (
1034
+ not self.fiat_model.spatial_joins["aggregation_areas"]
1035
+ and not self.config.aggregation_areas
1036
+ ):
1037
+ exposure_csv = self.fiat_model.exposure.exposure_db
1038
+ region = self.fiat_model.geoms["region"]
1039
+ region = region.explode().reset_index()
1040
+ region["aggr_id"] = ["region_" + str(i) for i in np.arange(len(region)) + 1]
1041
+ aggregation_path = Path(self.fiat_model.root).joinpath(
1042
+ "aggregation_areas", "region.geojson"
1043
+ )
1044
+ if not aggregation_path.parent.exists():
1045
+ aggregation_path.parent.mkdir()
1046
+
1047
+ region.to_file(aggregation_path)
1048
+ aggr = AggregationModel(
1049
+ name="region",
1050
+ file=str(aggregation_path.relative_to(self.static_path).as_posix()),
1051
+ field_name="aggr_id",
1052
+ )
1053
+ aggregation_areas.append(aggr)
1054
+
1055
+ # Add column in FIAT
1056
+ buildings_joined, _ = self.spatial_join(
1057
+ objects=self.fiat_model.exposure.exposure_geoms[
1058
+ self._get_fiat_building_index()
1059
+ ],
1060
+ layer=region,
1061
+ field_name="aggr_id",
1062
+ rename=_FIAT_COLUMNS.aggregation_label.format(name="region"),
1063
+ )
1064
+ exposure_csv = exposure_csv.merge(
1065
+ buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1066
+ )
1067
+ self.fiat_model.exposure.exposure_db = exposure_csv
1068
+ self.logger.warning(
1069
+ "No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
1070
+ )
1071
+ return aggregation_areas
1072
+
1073
+ def create_svi(self) -> Optional[SVIModel]:
1074
+ if self.config.svi:
1075
+ svi_file = self._check_exists_and_absolute(self.config.svi.file)
1076
+ exposure_csv = self.fiat_model.exposure.exposure_db
1077
+ buildings_joined, svi = self.spatial_join(
1078
+ self.fiat_model.exposure.exposure_geoms[
1079
+ self._get_fiat_building_index()
1080
+ ],
1081
+ svi_file,
1082
+ self.config.svi.field_name,
1083
+ rename="SVI",
1084
+ filter=True,
1085
+ )
1086
+ # Add column to exposure
1087
+ if "SVI" in exposure_csv.columns:
1088
+ self.logger.info(
1089
+ f"'SVI' column in the FIAT exposure csv will be replaced by {svi_file.as_posix()}."
1090
+ )
1091
+ del exposure_csv["SVI"]
1092
+ else:
1093
+ self.logger.info(
1094
+ f"'SVI' column in the FIAT exposure csv will be filled by {svi_file.as_posix()}."
1095
+ )
1096
+ exposure_csv = exposure_csv.merge(
1097
+ buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1098
+ )
1099
+ self.fiat_model.exposure.exposure_db = exposure_csv
1100
+
1101
+ # Save the spatial file for future use
1102
+ svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
1103
+ svi_path.parent.mkdir(parents=True, exist_ok=True)
1104
+ svi.to_file(svi_path)
1105
+ self.logger.info(
1106
+ f"An SVI map can be shown in FloodAdapt GUI using '{self.config.svi.field_name}' column from {svi_file.as_posix()}"
1107
+ )
1108
+
1109
+ return SVIModel(
1110
+ geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
1111
+ field_name="SVI",
1112
+ )
1113
+ elif "SVI" in self.fiat_model.exposure.exposure_db.columns:
1114
+ self.logger.info(
1115
+ "'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
1116
+ )
1117
+ add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
1118
+ if "SVI" not in [attr["name"] for attr in add_attrs]:
1119
+ self.logger.warning(
1120
+ "No SVI map found to display in the FloodAdapt GUI!"
1121
+ )
1122
+
1123
+ ind = [attr["name"] for attr in add_attrs].index("SVI")
1124
+ svi = add_attrs[ind]
1125
+ svi_path = self.static_path / "templates" / "fiat" / svi["file"]
1126
+ self.logger.info(
1127
+ f"An SVI map can be shown in FloodAdapt GUI using '{svi['field_name']}' column from {svi['file']}"
1128
+ )
1129
+ # Save site attributes
1130
+ return SVIModel(
1131
+ geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
1132
+ field_name=svi["field_name"],
1133
+ )
1134
+
1135
+ else:
1136
+ self.logger.warning(
1137
+ "'SVI' column not present in the FIAT exposure csv. Vulnerability type infometrics cannot be produced."
1138
+ )
1139
+ return None
1140
+
1141
+ ### SFINCS ###
1142
+ def create_sfincs_config(self) -> SfincsModel:
1143
+ # call these functions before others to make sure water level references are updated
1144
+ config = self.create_sfincs_model_config()
1145
+ tide_gauge = self.create_tide_gauge()
1146
+
1147
+ sfincs = SfincsModel(
1148
+ config=config,
1149
+ water_level=self.water_level_references,
1150
+ slr_scenarios=self.create_slr(),
1151
+ dem=self.create_dem_model(),
1152
+ scs=self.create_scs_model(),
1153
+ cyclone_track_database=self.create_cyclone_track_database(),
1154
+ tide_gauge=tide_gauge,
1155
+ river=self.create_rivers(),
1156
+ obs_point=self.create_observation_points(),
1157
+ )
1158
+
1159
+ return sfincs
1160
+
1161
+ def create_cyclone_track_database(self) -> Optional[CycloneTrackDatabaseModel]:
1162
+ if not self.config.cyclones or not self.config.sfincs_offshore:
1163
+ self.logger.warning("No cyclones will be available in the database.")
1164
+ return None
1165
+
1166
+ if self.config.cyclone_basin:
1167
+ basin = self.config.cyclone_basin
1168
+ else:
1169
+ basin = "ALL"
1170
+
1171
+ name = f"IBTrACS.{basin.value}.v04r01.nc"
1172
+ url = f"https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/netcdf/{name}"
1173
+ self.logger.info(f"Downloading cyclone track database from {url}")
1174
+ fn = Path(self.root) / "static" / "cyclone_track_database" / name
1175
+ fn.parent.mkdir(parents=True, exist_ok=True)
1176
+
1177
+ try:
1178
+ urlretrieve(url, fn)
1179
+ except Exception:
1180
+ raise RuntimeError(f"Could not retrieve cyclone track database from {url}")
1181
+
1182
+ return CycloneTrackDatabaseModel(file=name)
1183
+
1184
+ def create_scs_model(self) -> Optional[SCSModel]:
1185
+ if self.config.scs is None:
1186
+ return None
1187
+ scs_file = self._check_exists_and_absolute(self.config.scs.file)
1188
+ db_scs_file = self.static_path / "scs" / scs_file.name
1189
+ db_scs_file.parent.mkdir(parents=True, exist_ok=True)
1190
+ shutil.copy2(scs_file, db_scs_file)
1191
+
1192
+ return SCSModel(file=scs_file.name, type=self.config.scs.type)
1193
+
1194
+ def create_dem_model(self) -> DemModel:
1195
+ if self.config.dem:
1196
+ subgrid_sfincs = Path(self.config.dem.filename)
1197
+ else:
1198
+ self.logger.warning(
1199
+ "No subgrid depth geotiff file provided in the config file. Using the one from the SFINCS model."
1200
+ )
1201
+ subgrid_sfincs = (
1202
+ Path(self.sfincs_overland_model.root) / "subgrid" / "dep_subgrid.tif"
1203
+ )
1204
+
1205
+ dem_file = self._check_exists_and_absolute(subgrid_sfincs)
1206
+ fa_subgrid_path = self.static_path / "dem" / dem_file.name
1207
+ fa_subgrid_path.parent.mkdir(parents=True, exist_ok=True)
1208
+
1209
+ # Check tiles
1210
+ tiles_sfincs = Path(self.sfincs_overland_model.root) / "tiles"
1211
+ fa_tiles_path = self.static_path / "dem" / "tiles"
1212
+ if tiles_sfincs.exists():
1213
+ shutil.move(tiles_sfincs, fa_tiles_path)
1214
+ if (fa_tiles_path / "index").exists():
1215
+ os.rename(fa_tiles_path / "index", fa_tiles_path / "indices")
1216
+ self.logger.info(
1217
+ "Tiles were already available in the SFINCS model and will directly be used in FloodAdapt."
1218
+ )
1219
+ else:
1220
+ # Make tiles
1221
+ fa_tiles_path.mkdir(parents=True)
1222
+ self.sfincs_overland_model.setup_tiles(
1223
+ path=fa_tiles_path,
1224
+ datasets_dep=[{"elevtn": dem_file}],
1225
+ zoom_range=[0, 13],
1226
+ fmt="png",
1227
+ )
1228
+ self.logger.info(
1229
+ f"Tiles were created using the {subgrid_sfincs.as_posix()} as the elevation map."
1230
+ )
1231
+
1232
+ shutil.copy2(dem_file, fa_subgrid_path)
1233
+ self._dem_path = fa_subgrid_path
1234
+ return DemModel(
1235
+ filename=fa_subgrid_path.name, units=us.UnitTypesLength.meters
1236
+ ) # always in meters
1237
+
1238
+ def create_sfincs_model_config(self) -> SfincsConfigModel:
1239
+ config = SfincsConfigModel(
1240
+ csname=self.sfincs_overland_model.crs.name,
1241
+ cstype=Cstype(
1242
+ self.sfincs_overland_model.crs.type_name.split(" ")[0].lower()
1243
+ ),
1244
+ offshore_model=self.create_offshore_model(),
1245
+ overland_model=self.create_overland_model(),
1246
+ floodmap_units=self.unit_system.default_length_units,
1247
+ save_simulation=False,
1248
+ )
1249
+
1250
+ return config
1251
+
1252
+ def create_slr(self) -> Optional[SlrScenariosModel]:
1253
+ if self.config.slr_scenarios is None:
1254
+ return None
1255
+
1256
+ self.config.slr_scenarios.file = str(
1257
+ self._check_exists_and_absolute(self.config.slr_scenarios.file)
1258
+ )
1259
+ slr_path = self.static_path / "slr_scenarios"
1260
+ slr_path.mkdir()
1261
+ new_file = slr_path / Path(self.config.slr_scenarios.file).name
1262
+ shutil.copyfile(self.config.slr_scenarios.file, new_file)
1263
+
1264
+ return SlrScenariosModel(
1265
+ file=new_file.relative_to(self.static_path).as_posix(),
1266
+ relative_to_year=self.config.slr_scenarios.relative_to_year,
1267
+ )
1268
+
1269
+ def create_observation_points(self) -> list[ObsPointModel]:
1270
+ if self.config.obs_point is None:
1271
+ return []
1272
+
1273
+ self.logger.info("Observation points were provided in the config file.")
1274
+ return self.config.obs_point
1275
+
1276
+ def create_rivers(self) -> list[RiverModel]:
1277
+ src_file = Path(self.sfincs_overland_model.root) / "sfincs.src"
1278
+ if not src_file.exists():
1279
+ self.logger.warning("No rivers found in the SFINCS model.")
1280
+ return []
1281
+
1282
+ df = pd.read_csv(src_file, delim_whitespace=True, header=None, names=["x", "y"])
1283
+ river_locs = gpd.GeoDataFrame(
1284
+ df,
1285
+ geometry=gpd.points_from_xy(df.x, df.y),
1286
+ crs=self.sfincs_overland_model.crs,
1287
+ )
1288
+ rivers = []
1289
+ for idx, row in river_locs.iterrows():
1290
+ if "dis" in self.sfincs_overland_model.forcing:
1291
+ discharge = (
1292
+ self.sfincs_overland_model.forcing["dis"]
1293
+ .sel(index=idx + 1)
1294
+ .to_numpy()
1295
+ .mean()
1296
+ )
1297
+ else:
1298
+ discharge = 0
1299
+ self.logger.warning(
1300
+ f"No river discharge conditions were found in the SFINCS model for river {idx}. A default value of 0 will be used."
1301
+ )
1302
+
1303
+ river = RiverModel(
1304
+ name=f"river_{idx}",
1305
+ x_coordinate=row.x,
1306
+ y_coordinate=row.y,
1307
+ mean_discharge=us.UnitfulDischarge(
1308
+ value=discharge, units=self.unit_system.default_discharge_units
1309
+ ),
1310
+ )
1311
+ rivers.append(river)
1312
+
1313
+ self.logger.info(
1314
+ f"{len(river_locs)} river(s) were identified from the SFINCS model and will be available in FloodAdapt for discharge input."
1315
+ )
1316
+
1317
+ return rivers
1318
+
1319
+ def create_tide_gauge(self) -> Optional[TideGauge]:
1320
+ if self.config.tide_gauge is None:
1321
+ self.logger.warning(
1322
+ "Tide gauge information not provided. Historical events will not have an option to use gauged data in FloodAdapt!"
1323
+ )
1324
+ self.logger.warning(
1325
+ "No water level references were found. It is assumed that MSL is equal to the datum used in the SFINCS overland model. You can provide these values with the tide_gauge.ref attribute in the site.toml."
1326
+ )
1327
+ return None
1328
+
1329
+ if self.config.tide_gauge.source == TideGaugeSource.file:
1330
+ if self.config.tide_gauge.file is None:
1331
+ raise ValueError(
1332
+ "Tide gauge file needs to be provided when 'file' is selected as the source."
1333
+ )
1334
+ if self.config.tide_gauge.ref is None:
1335
+ self.logger.warning(
1336
+ "Tide gauge reference not provided. MSL is assumed as the reference of the water levels in the file."
1337
+ )
1338
+ self.config.tide_gauge.ref = "MSL"
1339
+
1340
+ tide_gauge_file = self._check_exists_and_absolute(
1341
+ self.config.tide_gauge.file
1342
+ )
1343
+ db_file_path = Path(self.static_path / "tide_gauges") / tide_gauge_file.name
1344
+
1345
+ db_file_path.parent.mkdir(parents=True, exist_ok=True)
1346
+ shutil.copyfile(self.config.tide_gauge.file, db_file_path)
1347
+
1348
+ rel_db_path = Path(db_file_path.relative_to(self.static_path))
1349
+ self.logger.warning(
1350
+ f"Tide gauge from file {rel_db_path} assumed to be in {self.unit_system.default_length_units}!"
1351
+ )
1352
+ tide_gauge = TideGauge(
1353
+ reference=self.config.tide_gauge.ref,
1354
+ description="Observations from file stored in database",
1355
+ source=TideGaugeSource.file,
1356
+ file=rel_db_path,
1357
+ lon=self.config.tide_gauge.lon,
1358
+ lat=self.config.tide_gauge.lat,
1359
+ units=self.unit_system.default_length_units,
1360
+ )
1361
+
1362
+ return tide_gauge
1363
+
1364
+ elif self.config.tide_gauge.source == TideGaugeSource.noaa_coops:
1365
+ if self.config.tide_gauge.ref is not None:
1366
+ ref = self.config.tide_gauge.ref
1367
+ else:
1368
+ ref = "MLLW" # If reference is not provided use MLLW
1369
+
1370
+ self.water_level_references.reference = (
1371
+ ref # update the water level reference
1372
+ )
1373
+
1374
+ if self.config.tide_gauge.id is None:
1375
+ station_id = self._get_closest_station()
1376
+ self.logger.info(
1377
+ "The closest NOAA tide gauge station to the site will be searched."
1378
+ )
1379
+ else:
1380
+ station_id = self.config.tide_gauge.id
1381
+ self.logger.info(
1382
+ f"The NOAA tide gauge station with the provided ID {station_id} will be used."
1383
+ )
1384
+ station = self._get_station_metadata(station_id=station_id, ref=ref)
1385
+ if station is not None:
1386
+ # Add tide_gauge information in site toml
1387
+ tide_gauge = TideGauge(
1388
+ name=station["name"],
1389
+ description=f"observations from '{self.config.tide_gauge.source}' api",
1390
+ source=self.config.tide_gauge.source,
1391
+ reference=ref,
1392
+ ID=int(station["id"]),
1393
+ lon=station["lon"],
1394
+ lat=station["lat"],
1395
+ units=us.UnitTypesLength.meters, # the api always asks for SI units right now
1396
+ )
1397
+
1398
+ local_datum = DatumModel(
1399
+ name=station["datum_name"],
1400
+ height=us.UnitfulLength(
1401
+ value=station["datum"], units=station["units"]
1402
+ ).transform(self.unit_system.default_length_units),
1403
+ )
1404
+ self.water_level_references.datums.append(local_datum)
1405
+
1406
+ msl = DatumModel(
1407
+ name="MSL",
1408
+ height=us.UnitfulLength(
1409
+ value=station["msl"], units=station["units"]
1410
+ ).transform(self.unit_system.default_length_units),
1411
+ )
1412
+ # Check if MSL is already there and if yes replace it
1413
+ existing_msl = next(
1414
+ (
1415
+ datum
1416
+ for datum in self.water_level_references.datums
1417
+ if datum.name == "MSL"
1418
+ ),
1419
+ None,
1420
+ )
1421
+ if existing_msl:
1422
+ self.water_level_references.datums.remove(existing_msl)
1423
+ self.water_level_references.datums.append(msl)
1424
+
1425
+ for name in ["MLLW", "MHHW"]:
1426
+ height = us.UnitfulLength(
1427
+ value=station[name.lower()], units=station["units"]
1428
+ ).transform(self.unit_system.default_length_units)
1429
+
1430
+ wl_info = DatumModel(
1431
+ name=name,
1432
+ height=height,
1433
+ )
1434
+ self.water_level_references.datums.append(wl_info)
1435
+ return tide_gauge
1436
+ else:
1437
+ self.logger.warning(
1438
+ f"Tide gauge source not recognized: {self.config.tide_gauge.source}. Historical events will not have an option to use gauged data in FloodAdapt!"
1439
+ )
1440
+ return None
1441
+
1442
+ def create_offshore_model(self) -> Optional[FloodModel]:
1443
+ if self.sfincs_offshore_model is None:
1444
+ return None
1445
+ # Connect boundary points of overland to output points of offshore
1446
+ fn = Path(self.sfincs_overland_model.root) / "sfincs.bnd"
1447
+ bnd = pd.read_csv(fn, sep=" ", lineterminator="\n", header=None)
1448
+ bnd = bnd.rename(columns={0: "x", 1: "y"})
1449
+ bnd_geo = gpd.GeoDataFrame(
1450
+ bnd,
1451
+ geometry=gpd.points_from_xy(bnd.x, bnd.y),
1452
+ crs=self.sfincs_overland_model.config["epsg"],
1453
+ )
1454
+ obs_geo = bnd_geo.to_crs(4326)
1455
+ obs_geo["x"] = obs_geo.geometry.x
1456
+ obs_geo["y"] = obs_geo.geometry.y
1457
+ del obs_geo["geometry"]
1458
+ obs_geo["name"] = [f"bnd_pt{num:02d}" for num in range(1, len(obs_geo) + 1)]
1459
+ fn_off = Path(self.sfincs_offshore_model.root) / "sfincs.obs"
1460
+ obs_geo.to_csv(
1461
+ fn_off,
1462
+ sep="\t",
1463
+ index=False,
1464
+ header=False,
1465
+ )
1466
+ self.logger.info(
1467
+ "Output points of the offshore SFINCS model were reconfigured to the boundary points of the overland SFINCS model."
1468
+ )
1469
+
1470
+ return FloodModel(
1471
+ name="offshore",
1472
+ reference=self.config.sfincs_offshore.reference,
1473
+ vertical_offset=self.config.sfincs_offshore.vertical_offset,
1474
+ )
1475
+
1476
+ def create_overland_model(self) -> FloodModel:
1477
+ return FloodModel(
1478
+ name="overland",
1479
+ reference=self.config.sfincs_overland.reference,
1480
+ )
1481
+
1482
+ ### SITE ###
1483
+ def create_site_config(self) -> Site:
1484
+ # call this before fiat to ensure the dem is where its expected
1485
+ sfincs = self.create_sfincs_config()
1486
+
1487
+ # call this after sfincs to get waterlevel references
1488
+ gui = self.create_gui_config()
1489
+
1490
+ # set the probabilistic event set if provided
1491
+ self.add_probabilistic_set()
1492
+
1493
+ # Create the FIAT configuration
1494
+ fiat = self.create_fiat_model()
1495
+ lon, lat = self.read_location() # Get centroid of site
1496
+
1497
+ # Set standard objects
1498
+ std_objs = self.set_standard_objects()
1499
+
1500
+ # Description of site
1501
+ description = (
1502
+ self.config.description if self.config.description else self.config.name
1503
+ )
1504
+
1505
+ config = Site(
1506
+ name=self.config.name,
1507
+ description=description,
1508
+ lat=lat,
1509
+ lon=lon,
1510
+ fiat=fiat,
1511
+ gui=gui,
1512
+ sfincs=sfincs,
1513
+ standard_objects=std_objs,
1514
+ )
1515
+ return config
1516
+
1517
+ def read_location(self) -> tuple[float, float]:
1518
+ # Get center of area of interest
1519
+ if not self.fiat_model.region.empty:
1520
+ center = self.fiat_model.region.dissolve().centroid.to_crs(4326)[0]
1521
+ else:
1522
+ center = (
1523
+ self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()]
1524
+ .dissolve()
1525
+ .centroid.to_crs(4326)[0]
1526
+ )
1527
+ return center.x, center.y
1528
+
1529
+ def create_gui_config(self) -> GuiModel:
1530
+ gui = GuiModel(
1531
+ units=self.unit_system,
1532
+ plotting=self.create_hazard_plotting_config(),
1533
+ mapbox_layers=self.create_mapbox_layers_config(),
1534
+ visualization_layers=self.create_visualization_layers(),
1535
+ )
1536
+
1537
+ return gui
1538
+
1539
+ def create_default_units(self) -> GuiUnitModel:
1540
+ if self.config.unit_system == UnitSystems.imperial:
1541
+ return GuiUnitModel.imperial()
1542
+ elif self.config.unit_system == UnitSystems.metric:
1543
+ return GuiUnitModel.metric()
1544
+ else:
1545
+ raise ValueError(
1546
+ f"Unit system {self.config.unit_system} not recognized. Please choose 'imperial' or 'metric'."
1547
+ )
1548
+
1549
+ def create_visualization_layers(self) -> VisualizationLayersModel:
1550
+ visualization_layers = VisualizationLayersModel(
1551
+ default_bin_number=4,
1552
+ default_colors=["#FFFFFF", "#FEE9CE", "#E03720", "#860000"],
1553
+ layer_names=[],
1554
+ layer_long_names=[],
1555
+ layer_paths=[],
1556
+ field_names=[],
1557
+ bins=[],
1558
+ colors=[],
1559
+ )
1560
+
1561
+ return visualization_layers
1562
+
1563
+ def create_mapbox_layers_config(self) -> MapboxLayersModel:
1564
+ # Read default colors from template
1565
+ fd_max = self.config.gui.max_flood_depth
1566
+ ad_max = self.config.gui.max_aggr_dmg
1567
+ ftd_max = self.config.gui.max_footprint_dmg
1568
+ b_max = self.config.gui.max_benefits
1569
+
1570
+ svi_bins = None
1571
+ if self.config.svi is not None:
1572
+ svi_bins = [0.05, 0.2, 0.4, 0.6, 0.8]
1573
+
1574
+ mapbox_layers = MapboxLayersModel(
1575
+ flood_map_depth_min=0.0, # mask areas with flood depth lower than this (zero = all depths shown) # TODO How to define this?
1576
+ flood_map_zbmax=-9999, # mask areas with elevation lower than this (very negative = show all calculated flood depths) # TODO How to define this?,
1577
+ flood_map_bins=[0.2 * fd_max, 0.6 * fd_max, fd_max],
1578
+ damage_decimals=0,
1579
+ footprints_dmg_type="absolute",
1580
+ aggregation_dmg_bins=[
1581
+ 0.00001,
1582
+ 0.1 * ad_max,
1583
+ 0.25 * ad_max,
1584
+ 0.5 * ad_max,
1585
+ ad_max,
1586
+ ],
1587
+ footprints_dmg_bins=[
1588
+ 0.00001,
1589
+ 0.06 * ftd_max,
1590
+ 0.2 * ftd_max,
1591
+ 0.4 * ftd_max,
1592
+ ftd_max,
1593
+ ],
1594
+ benefits_bins=[0, 0.01, 0.02 * b_max, 0.2 * b_max, b_max],
1595
+ svi_bins=svi_bins,
1596
+ **self._get_bin_colors(),
1597
+ )
1598
+
1599
+ return mapbox_layers
1600
+
1601
+ def create_hazard_plotting_config(self) -> PlottingModel:
1602
+ datum_names = [datum.name for datum in self.water_level_references.datums]
1603
+ if "MHHW" in datum_names:
1604
+ amplitude = (
1605
+ self.water_level_references.get_datum("MHHW").height
1606
+ - self.water_level_references.get_datum("MSL").height
1607
+ )
1608
+ self.logger.info(
1609
+ f"The default tidal amplitude in the GUI will be {amplitude.transform(self.unit_system.default_length_units)}, calculated as the difference between MHHW and MSL from the tide gauge data."
1610
+ )
1611
+ else:
1612
+ amplitude = us.UnitfulLength(
1613
+ value=0.0, units=self.unit_system.default_length_units
1614
+ )
1615
+ self.logger.warning(
1616
+ "The default tidal amplitude in the GUI will be 0.0, since no tide-gauge water levels are available. You can change this in the site.toml with the 'gui.tide_harmonic_amplitude' attribute."
1617
+ )
1618
+
1619
+ ref = "MSL"
1620
+ if ref not in datum_names:
1621
+ self.logger.warning(
1622
+ f"The Mean Sea Level (MSL) datum is not available in the site.toml. The synthetic tide will be created relative to the main reference: {self.water_level_references.reference}."
1623
+ )
1624
+ ref = self.water_level_references.reference
1625
+
1626
+ plotting = PlottingModel(
1627
+ synthetic_tide=SyntheticTideModel(
1628
+ harmonic_amplitude=amplitude,
1629
+ datum=ref,
1630
+ ),
1631
+ excluded_datums=self.config.excluded_datums,
1632
+ )
1633
+
1634
+ return plotting
1635
+
1636
+ def create_infometrics(self):
1637
+ """
1638
+ Copy the infometrics and infographics templates to the appropriate location and modifies the metrics_config.toml files.
1639
+
1640
+ This method copies the templates from the 'infometrics' and 'infographics' folders to the 'static/templates' folder in the root directory.
1641
+ It then modifies the 'metrics_config.toml' and 'metrics_config_risk.toml' files by updating the 'aggregateBy' attribute with the names
1642
+ of the aggregations defined in the 'fiat' section of the 'site_attrs' attribute.
1643
+ """
1644
+ # TODO there should be generalized infometric queries with NSI or OSM, and with SVI or without. Then Based on the user input these should be chosen automatically
1645
+ templates_path = Path(__file__).parent.resolve().joinpath("templates")
1646
+
1647
+ # Create template folder
1648
+ path_im = self.root.joinpath("static", "templates", "infometrics")
1649
+ path_im.mkdir()
1650
+
1651
+ # Copy mandatory metric configs
1652
+ path_im_temp = templates_path.joinpath("infometrics")
1653
+ for file in path_im_temp.glob("*.toml"):
1654
+ shutil.copy2(file, path_im)
1655
+
1656
+ self._create_optional_infometrics(templates_path, path_im)
1657
+
1658
+ files = list(path_im.glob("*metrics_config*.toml"))
1659
+ # Update aggregation areas in metrics config
1660
+ for file in files:
1661
+ file = path_im.joinpath(file)
1662
+ with open(file, "rb") as f:
1663
+ attrs = tomli.load(f)
1664
+
1665
+ # add aggration levels
1666
+ if self._aggregation_areas is None:
1667
+ self._aggregation_areas = self.create_aggregation_areas()
1668
+ attrs["aggregateBy"] = [aggr.name for aggr in self._aggregation_areas]
1669
+
1670
+ # take out road metrics if needed
1671
+ if not self._has_roads:
1672
+ attrs["queries"] = [
1673
+ query
1674
+ for query in attrs["queries"]
1675
+ if "road" not in query["name"].lower()
1676
+ ]
1677
+
1678
+ # Replace Damage Unit
1679
+ # TODO do this in a better manner
1680
+ for i, query in enumerate(attrs["queries"]):
1681
+ if "$" in query["long_name"]:
1682
+ query["long_name"] = query["long_name"].replace(
1683
+ "$", self.read_damage_unit()
1684
+ )
1685
+
1686
+ # replace the SVI threshold if needed
1687
+ if self.config.svi:
1688
+ for i, query in enumerate(attrs["queries"]):
1689
+ query["filter"] = query["filter"].replace(
1690
+ "SVI_threshold", str(self.config.svi.threshold)
1691
+ )
1692
+
1693
+ with open(file, "wb") as f:
1694
+ tomli_w.dump(attrs, f)
1695
+
1696
+ def _create_optional_infometrics(self, templates_path: Path, path_im: Path):
1697
+ # If infographics are going to be created in FA, get template metric configurations
1698
+ if not self.config.infographics:
1699
+ return
1700
+
1701
+ # Check what type of infographics should be used
1702
+ if self.config.unit_system == UnitSystems.imperial:
1703
+ metrics_folder_name = "US_NSI"
1704
+ self.logger.info(
1705
+ "Default NSI infometrics and infographics will be created."
1706
+ )
1707
+ elif self.config.unit_system == UnitSystems.metric:
1708
+ metrics_folder_name = "OSM"
1709
+ self.logger.info(
1710
+ "Default OSM infometrics and infographics will be created."
1711
+ )
1712
+ else:
1713
+ raise ValueError(
1714
+ f"Unit system {self.config.unit_system} is not recognized. Please choose 'imperial' or 'metric'."
1715
+ )
1716
+
1717
+ if self.config.svi is not None:
1718
+ svi_folder_name = "with_SVI"
1719
+ else:
1720
+ svi_folder_name = "without_SVI"
1721
+
1722
+ # Copy metrics config for infographics
1723
+ path_0 = templates_path.joinpath(
1724
+ "infometrics", metrics_folder_name, svi_folder_name
1725
+ )
1726
+ for file in path_0.glob("*.toml"):
1727
+ shutil.copy2(file, path_im)
1728
+
1729
+ # Copy additional risk config
1730
+ file = templates_path.joinpath(
1731
+ "infometrics",
1732
+ metrics_folder_name,
1733
+ "metrics_additional_risk_configs.toml",
1734
+ )
1735
+ shutil.copy2(file, path_im)
1736
+
1737
+ # Copy infographics config
1738
+ path_ig_temp = templates_path.joinpath("infographics", metrics_folder_name)
1739
+ path_ig = self.root.joinpath("static", "templates", "infographics")
1740
+ path_ig.mkdir()
1741
+ files_ig = ["styles.css", "config_charts.toml"]
1742
+
1743
+ if self.config.svi is not None:
1744
+ files_ig.append("config_risk_charts.toml")
1745
+ files_ig.append("config_people.toml")
1746
+
1747
+ if self._has_roads:
1748
+ files_ig.append("config_roads.toml")
1749
+
1750
+ for file in files_ig:
1751
+ shutil.copy2(path_ig_temp.joinpath(file), path_ig.joinpath(file))
1752
+
1753
+ # Copy images
1754
+ path_0 = templates_path.joinpath("infographics", "images")
1755
+ path_1 = self.root.joinpath("static", "templates", "infographics", "images")
1756
+ shutil.copytree(path_0, path_1)
1757
+
1758
+ def add_static_files(self):
1759
+ """
1760
+ Copy static files from the 'templates' folder to the 'static' folder.
1761
+
1762
+ This method iterates over a list of folders and copies the contents of each folder from the 'templates' directory
1763
+ to the corresponding folder in the 'static' directory.
1764
+ """
1765
+ templates_path = Path(__file__).parent.resolve().joinpath("templates")
1766
+ folders = ["icons", "green_infra_table"]
1767
+ for folder in folders:
1768
+ path_0 = templates_path.joinpath(folder)
1769
+ path_1 = self.static_path / folder
1770
+ shutil.copytree(path_0, path_1)
1771
+
1772
+ def add_probabilistic_set(self):
1773
+ # Copy prob set if given
1774
+ if self.config.probabilistic_set:
1775
+ self.logger.info(
1776
+ f"Probabilistic event set imported from {self.config.probabilistic_set}"
1777
+ )
1778
+ prob_event_name = Path(self.config.probabilistic_set).name
1779
+ path_db = self.root.joinpath("input", "events", prob_event_name)
1780
+ shutil.copytree(self.config.probabilistic_set, path_db)
1781
+ self._probabilistic_set_name = prob_event_name
1782
+ else:
1783
+ self.logger.warning(
1784
+ "Probabilistic event set not provided. Risk scenarios cannot be run in FloodAdapt."
1785
+ )
1786
+ self._probabilistic_set_name = None
1787
+
1788
+ ### HELPER FUNCTIONS ###
1789
+ def make_folder_structure(self) -> None:
1790
+ """
1791
+ Create the folder structure for the database.
1792
+
1793
+ This method creates the necessary folder structure for the FloodAdapt database, including
1794
+ the input and static folders. It also creates subfolders within the input and
1795
+ static folders based on a predefined list of names.
1796
+ """
1797
+ self.logger.info("Preparing the database folder structure.")
1798
+ inputs = [
1799
+ "events",
1800
+ "projections",
1801
+ "measures",
1802
+ "strategies",
1803
+ "scenarios",
1804
+ "benefits",
1805
+ ]
1806
+ for name in inputs:
1807
+ (self.root / "input" / name).mkdir(parents=True, exist_ok=True)
1808
+
1809
+ # Prepare static folder structure
1810
+ folders = ["templates", "config"]
1811
+ for name in folders:
1812
+ (self.static_path / name).mkdir(parents=True, exist_ok=True)
1813
+
1814
+ def _check_exists_and_absolute(self, path: str) -> Path:
1815
+ """Check if the path is absolute or relative and return a Path object. Raises an error if the path is not valid."""
1816
+ if not Path(path).exists():
1817
+ raise FileNotFoundError(f"Path {path} does not exist.")
1818
+
1819
+ if Path(path).is_absolute():
1820
+ return Path(path)
1821
+ else:
1822
+ raise ValueError(f"Path {path} is not absolute.")
1823
+
1824
+ def _join_building_footprints(
1825
+ self, building_footprints: gpd.GeoDataFrame, field_name: str
1826
+ ) -> Path:
1827
+ """
1828
+ Join building footprints with existing building data and updates the exposure CSV.
1829
+
1830
+ Args:
1831
+ building_footprints (GeoDataFrame): GeoDataFrame containing the building footprints to be joined.
1832
+ field_name (str): The field name to use for the spatial join.
1833
+
1834
+ Returns
1835
+ -------
1836
+ This method performs the following steps:
1837
+ 1. Reads the exposure CSV file.
1838
+ 2. Performs a spatial join between the buildings and building footprints.
1839
+ 3. Ensures that in case of multiple values, the first is kept.
1840
+ 4. Creates a folder to store the building footprints.
1841
+ 5. Saves the spatial file for future use.
1842
+ 6. Merges the joined buildings with the exposure CSV and saves it.
1843
+ 7. Updates the site attributes with the relative path to the saved building footprints.
1844
+ 8. Logs the location where the building footprints are saved.
1845
+ """
1846
+ buildings = self.fiat_model.exposure.exposure_geoms[
1847
+ self._get_fiat_building_index()
1848
+ ]
1849
+ exposure_csv = self.fiat_model.exposure.exposure_db
1850
+ if "BF_FID" in exposure_csv.columns:
1851
+ self.logger.warning(
1852
+ "Column 'BF_FID' already exists in the exposure columns and will be replaced."
1853
+ )
1854
+ del exposure_csv["BF_FID"]
1855
+ buildings_joined, building_footprints = self.spatial_join(
1856
+ buildings,
1857
+ building_footprints,
1858
+ field_name,
1859
+ rename="BF_FID",
1860
+ filter=True,
1861
+ )
1862
+ # Make sure in case of multiple values that the first is kept
1863
+ buildings_joined = (
1864
+ buildings_joined.groupby(_FIAT_COLUMNS.object_id)
1865
+ .first()
1866
+ .sort_values(by=[_FIAT_COLUMNS.object_id])
1867
+ )
1868
+ # Create folder
1869
+ bf_folder = Path(self.fiat_model.root) / "exposure" / "building_footprints"
1870
+ bf_folder.mkdir(parents=True, exist_ok=True)
1871
+
1872
+ # Save the spatial file for future use
1873
+ geo_path = bf_folder / "building_footprints.gpkg"
1874
+ building_footprints.to_file(geo_path)
1875
+
1876
+ # Save to exposure csv
1877
+ exposure_csv = exposure_csv.merge(
1878
+ buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1879
+ )
1880
+
1881
+ # Set model building footprints
1882
+ self.fiat_model.building_footprint = building_footprints
1883
+ self.fiat_model.exposure.exposure_db = exposure_csv
1884
+
1885
+ # Save site attributes
1886
+ buildings_path = geo_path.relative_to(self.static_path)
1887
+ self.logger.info(
1888
+ f"Building footprints saved at {(self.static_path / buildings_path).resolve().as_posix()}"
1889
+ )
1890
+
1891
+ return buildings_path
1892
+
1893
+ def _clip_hazard_extend(self, clip_footprints=True):
1894
+ """
1895
+ Clip the exposure data to the bounding box of the hazard data.
1896
+
1897
+ This method clips the exposure data to the bounding box of the hazard data. It creates a GeoDataFrame
1898
+ from the hazard polygons, and then uses the `gpd.clip` function to clip the exposure geometries to the
1899
+ bounding box of the hazard polygons. If the exposure data contains roads, it is split into two separate
1900
+ GeoDataFrames: one for buildings and one for roads. The clipped exposure data is then saved back to the
1901
+ `exposure_db` attribute of the `FiatModel` object.
1902
+
1903
+ Parameters
1904
+ ----------
1905
+ None
1906
+
1907
+ Returns
1908
+ -------
1909
+ None
1910
+ """
1911
+ gdf = self.fiat_model.exposure.get_full_gdf(
1912
+ self.fiat_model.exposure.exposure_db
1913
+ )
1914
+ crs = gdf.crs
1915
+ sfincs_extend = self.sfincs_overland_model.region
1916
+ sfincs_extend = sfincs_extend.to_crs(crs)
1917
+
1918
+ # Clip the fiat region
1919
+ clipped_region = self.fiat_model.region.to_crs(crs).clip(sfincs_extend)
1920
+ self.fiat_model.geoms["region"] = clipped_region
1921
+
1922
+ # Clip the exposure geometries
1923
+ # Filter buildings and roads
1924
+ road_inds = gdf[_FIAT_COLUMNS.primary_object_type].str.contains("road")
1925
+ # Ensure road_inds is a boolean Series
1926
+ if not road_inds.dtype == bool:
1927
+ road_inds = road_inds.astype(bool)
1928
+ # Clip buildings
1929
+ gdf_buildings = gdf[~road_inds]
1930
+ gdf_buildings = self._clip_gdf(
1931
+ gdf_buildings, clipped_region, predicate="within"
1932
+ ).reset_index(drop=True)
1933
+
1934
+ if road_inds.any():
1935
+ # Clip roads
1936
+ gdf_roads = gdf[road_inds]
1937
+ gdf_roads = self._clip_gdf(
1938
+ gdf_roads, clipped_region, predicate="within"
1939
+ ).reset_index(drop=True)
1940
+
1941
+ idx_buildings = self.fiat_model.exposure.geom_names.index(
1942
+ self.config.fiat_buildings_name
1943
+ )
1944
+ idx_roads = self.fiat_model.exposure.geom_names.index(
1945
+ self.config.fiat_roads_name
1946
+ )
1947
+ self.fiat_model.exposure.exposure_geoms[idx_buildings] = gdf_buildings[
1948
+ [_FIAT_COLUMNS.object_id, "geometry"]
1949
+ ]
1950
+ self.fiat_model.exposure.exposure_geoms[idx_roads] = gdf_roads[
1951
+ [_FIAT_COLUMNS.object_id, "geometry"]
1952
+ ]
1953
+ gdf = pd.concat([gdf_buildings, gdf_roads])
1954
+ else:
1955
+ gdf = gdf_buildings
1956
+ self.fiat_model.exposure.exposure_geoms[0] = gdf[
1957
+ [_FIAT_COLUMNS.object_id, "geometry"]
1958
+ ]
1959
+
1960
+ # Save exposure dataframe
1961
+ del gdf["geometry"]
1962
+ self.fiat_model.exposure.exposure_db = gdf.reset_index(drop=True)
1963
+
1964
+ # Clip the building footprints
1965
+ fieldname = "BF_FID"
1966
+ if clip_footprints and not self.fiat_model.building_footprint.empty:
1967
+ # Get buildings after filtering and their footprint id
1968
+ self.fiat_model.building_footprint = self.fiat_model.building_footprint[
1969
+ self.fiat_model.building_footprint[fieldname].isin(
1970
+ gdf_buildings[fieldname]
1971
+ )
1972
+ ].reset_index(drop=True)
1973
+
1974
+ @staticmethod
1975
+ def _clip_gdf(
1976
+ gdf1: gpd.GeoDataFrame, gdf2: gpd.GeoDataFrame, predicate: str = "within"
1977
+ ):
1978
+ gdf_new = gpd.sjoin(gdf1, gdf2, how="inner", predicate=predicate)
1979
+ gdf_new = gdf_new.drop(
1980
+ columns=[
1981
+ col
1982
+ for col in gdf_new.columns
1983
+ if col.endswith("_right") or (col in gdf2.columns and col != "geometry")
1984
+ ]
1985
+ )
1986
+
1987
+ return gdf_new
1988
+
1989
+ @staticmethod
1990
+ def spatial_join(
1991
+ objects: gpd.GeoDataFrame,
1992
+ layer: Union[str, gpd.GeoDataFrame],
1993
+ field_name: str,
1994
+ rename: Optional[str] = None,
1995
+ filter: Optional[bool] = False,
1996
+ ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
1997
+ """
1998
+ Perform a spatial join between two GeoDataFrames.
1999
+
2000
+ Args:
2001
+ objects (gpd.GeoDataFrame): The GeoDataFrame representing the objects.
2002
+ layer (Union[str, gpd.GeoDataFrame]): The GeoDataFrame or file path of the layer to join with.
2003
+ field_name (str): The name of the field to use for the join.
2004
+ rename (Optional[str], optional): The new name to assign to the joined field. Defaults to None.
2005
+
2006
+ Returns
2007
+ -------
2008
+ tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]: A tuple containing the joined GeoDataFrame and the layer GeoDataFrame.
2009
+
2010
+ """
2011
+ # Read in layer and keep only column of interest
2012
+ if not isinstance(layer, gpd.GeoDataFrame):
2013
+ layer = gpd.read_file(layer)
2014
+ layer = layer[[field_name, "geometry"]]
2015
+ layer = layer.to_crs(objects.crs)
2016
+ if field_name in objects.columns:
2017
+ layer = layer.rename(columns={field_name: "layer_field"})
2018
+ field_name = "layer_field"
2019
+ # Spatial join of the layers
2020
+ objects_joined = objects.sjoin(layer, how="left", predicate="intersects")
2021
+
2022
+ # Keep only the first intersection for each object
2023
+ objects_joined = (
2024
+ objects_joined.groupby(_FIAT_COLUMNS.object_id).first().reset_index()
2025
+ )
2026
+
2027
+ # if needed filter out unused objects in the layer
2028
+ if filter:
2029
+ layer_inds = objects_joined["index_right"].dropna().unique()
2030
+ layer = layer.iloc[np.sort(layer_inds)].reset_index(drop=True)
2031
+ objects_joined = objects_joined[[_FIAT_COLUMNS.object_id, field_name]]
2032
+ # rename field if provided
2033
+ if rename:
2034
+ objects_joined = objects_joined.rename(columns={field_name: rename})
2035
+ layer = layer.rename(columns={field_name: rename})
2036
+ return objects_joined, layer
2037
+
2038
+ def _get_fiat_building_index(self) -> int:
2039
+ return self.fiat_model.exposure.geom_names.index(
2040
+ self.config.fiat_buildings_name
2041
+ )
2042
+
2043
+ def _get_fiat_road_index(self) -> int:
2044
+ return self.fiat_model.exposure.geom_names.index(self.config.fiat_roads_name)
2045
+
2046
+ def _get_closest_station(self):
2047
+ # Get available stations from source
2048
+ obs_data = obs.source(self.config.tide_gauge.source)
2049
+ obs_data.get_active_stations()
2050
+ obs_stations = obs_data.gdf()
2051
+ # Calculate distance from SFINCS region to all available stations in degrees
2052
+ obs_stations["distance"] = obs_stations.distance(
2053
+ self.sfincs_overland_model.region.to_crs(4326).geometry.item()
2054
+ )
2055
+ # Get the closest station and its distance in meters
2056
+ closest_station = obs_stations[
2057
+ obs_stations["distance"] == obs_stations["distance"].min()
2058
+ ]
2059
+ distance = round(
2060
+ closest_station.to_crs(self.sfincs_overland_model.region.crs)
2061
+ .distance(self.sfincs_overland_model.region.geometry.item())
2062
+ .item(),
2063
+ 0,
2064
+ )
2065
+
2066
+ distance = us.UnitfulLength(value=distance, units=us.UnitTypesLength.meters)
2067
+ self.logger.info(
2068
+ f"The closest tide gauge from {self.config.tide_gauge.source} is located {distance.transform(self.unit_system.default_length_units)} from the SFINCS domain"
2069
+ )
2070
+ # Check if user provided max distance
2071
+ # TODO make sure units are explicit for max_distance
2072
+ if self.config.tide_gauge.max_distance is not None:
2073
+ units_new = self.config.tide_gauge.max_distance.units
2074
+ distance_new = us.UnitfulLength(
2075
+ value=distance.convert(units_new), units=units_new
2076
+ )
2077
+ if distance_new.value > self.config.tide_gauge.max_distance.value:
2078
+ self.logger.warning(
2079
+ f"This distance is larger than the 'max_distance' value of {self.config.tide_gauge.max_distance.value} {units_new} provided in the config file. The station cannot be used."
2080
+ )
2081
+ return None
2082
+
2083
+ # get station id
2084
+ station_id = closest_station["id"].item()
2085
+
2086
+ return station_id
2087
+
2088
+ def _get_station_metadata(self, station_id: str, ref: str = "MLLW"):
2089
+ """
2090
+ Find the closest tide gauge station to the SFINCS domain and retrieves its metadata.
2091
+
2092
+ Args:
2093
+ ref (str, optional): The reference level for water level measurements. Defaults to "MLLW".
2094
+
2095
+ Returns
2096
+ -------
2097
+ dict: A dictionary containing the metadata of the closest tide gauge station.
2098
+ The dictionary includes the following keys:
2099
+ - "id": The station ID.
2100
+ - "name": The station name.
2101
+ - "datum": The difference between the station's datum and the reference level.
2102
+ - "datum_name": The name of the datum used by the station.
2103
+ - "msl": The difference between the Mean Sea Level (MSL) and the reference level.
2104
+ - "reference": The reference level used for water level measurements.
2105
+ - "lon": The longitude of the station.
2106
+ - "lat": The latitude of the station.
2107
+ """
2108
+ # Get available stations from source
2109
+ obs_data = obs.source(self.config.tide_gauge.source)
2110
+ # read station metadata
2111
+ station_metadata = obs_data.get_meta_data(station_id)
2112
+ # TODO check if all stations can be used? Tidal attr?
2113
+ # Get water levels by using the ref provided
2114
+ datum_name = station_metadata["datums"]["OrthometricDatum"]
2115
+ datums = station_metadata["datums"]["datums"]
2116
+ names = [datum["name"] for datum in datums]
2117
+
2118
+ ref_value = datums[names.index(ref)]["value"]
2119
+
2120
+ meta = {
2121
+ "id": station_id,
2122
+ "name": station_metadata["name"],
2123
+ "datum": round(datums[names.index(datum_name)]["value"] - ref_value, 3),
2124
+ "datum_name": datum_name,
2125
+ "msl": round(datums[names.index("MSL")]["value"] - ref_value, 3),
2126
+ "mllw": round(datums[names.index("MLLW")]["value"] - ref_value, 3),
2127
+ "mhhw": round(datums[names.index("MHHW")]["value"] - ref_value, 3),
2128
+ "reference": ref,
2129
+ "units": station_metadata["datums"]["units"],
2130
+ "lon": station_metadata["lng"],
2131
+ "lat": station_metadata["lat"],
2132
+ }
2133
+
2134
+ self.logger.info(
2135
+ f"The tide gauge station '{station_metadata['name']}' from {self.config.tide_gauge.source} will be used to download nearshore historical water level time-series."
2136
+ )
2137
+
2138
+ self.logger.info(
2139
+ f"The station metadata will be used to fill in the water_level attribute in the site.toml. The reference level will be {ref}."
2140
+ )
2141
+
2142
+ return meta
2143
+
2144
+ def _get_bin_colors(self):
2145
+ """
2146
+ Retrieve the bin colors from the bin_colors.toml file.
2147
+
2148
+ Returns
2149
+ -------
2150
+ dict: A dictionary containing the bin colors.
2151
+ """
2152
+ templates_path = Path(__file__).parent.resolve().joinpath("templates")
2153
+ with open(
2154
+ templates_path.joinpath("mapbox_layers", "bin_colors.toml"), "rb"
2155
+ ) as f:
2156
+ bin_colors = tomli.load(f)
2157
+ return bin_colors
2158
+
2159
+
2160
+ if __name__ == "__main__":
2161
+ while True:
2162
+ config_path = Path(
2163
+ input(
2164
+ "Please provide the path to the database creation configuration toml: \n"
2165
+ )
2166
+ )
2167
+ try:
2168
+ config = ConfigModel.read(config_path)
2169
+ dbs = DatabaseBuilder(config)
2170
+ dbs.build()
2171
+ except Exception as e:
2172
+ print(e)
2173
+ quit = input("Do you want to quit? (y/n)")
2174
+ if quit == "y":
2175
+ exit()