flood-adapt 0.3.9__py3-none-any.whl → 0.3.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flood_adapt/__init__.py +26 -22
- flood_adapt/adapter/__init__.py +9 -9
- flood_adapt/adapter/fiat_adapter.py +1541 -1541
- flood_adapt/adapter/interface/hazard_adapter.py +70 -70
- flood_adapt/adapter/interface/impact_adapter.py +36 -36
- flood_adapt/adapter/interface/model_adapter.py +89 -89
- flood_adapt/adapter/interface/offshore.py +19 -19
- flood_adapt/adapter/sfincs_adapter.py +1848 -1848
- flood_adapt/adapter/sfincs_offshore.py +193 -193
- flood_adapt/config/config.py +248 -248
- flood_adapt/config/fiat.py +219 -219
- flood_adapt/config/gui.py +331 -331
- flood_adapt/config/sfincs.py +481 -336
- flood_adapt/config/site.py +129 -129
- flood_adapt/database_builder/database_builder.py +2210 -2210
- flood_adapt/database_builder/templates/default_units/imperial.toml +9 -9
- flood_adapt/database_builder/templates/default_units/metric.toml +9 -9
- flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -10
- flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -90
- flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -57
- flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -121
- flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -65
- flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -45
- flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -126
- flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -60
- flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -121
- flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -65
- flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -45
- flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -4
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -143
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -153
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -127
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -57
- flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -4
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -191
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -153
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -178
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -57
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -9
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -65
- flood_adapt/database_builder/templates/output_layers/bin_colors.toml +5 -5
- flood_adapt/database_builder.py +16 -16
- flood_adapt/dbs_classes/__init__.py +21 -21
- flood_adapt/dbs_classes/database.py +495 -684
- flood_adapt/dbs_classes/dbs_benefit.py +77 -76
- flood_adapt/dbs_classes/dbs_event.py +61 -59
- flood_adapt/dbs_classes/dbs_measure.py +112 -111
- flood_adapt/dbs_classes/dbs_projection.py +34 -34
- flood_adapt/dbs_classes/dbs_scenario.py +137 -137
- flood_adapt/dbs_classes/dbs_static.py +274 -273
- flood_adapt/dbs_classes/dbs_strategy.py +130 -129
- flood_adapt/dbs_classes/dbs_template.py +279 -278
- flood_adapt/dbs_classes/interface/database.py +107 -139
- flood_adapt/dbs_classes/interface/element.py +121 -121
- flood_adapt/dbs_classes/interface/static.py +47 -47
- flood_adapt/flood_adapt.py +1207 -1178
- flood_adapt/misc/database_user.py +16 -16
- flood_adapt/misc/exceptions.py +22 -0
- flood_adapt/misc/log.py +183 -183
- flood_adapt/misc/path_builder.py +54 -54
- flood_adapt/misc/utils.py +185 -185
- flood_adapt/objects/__init__.py +82 -82
- flood_adapt/objects/benefits/benefits.py +61 -61
- flood_adapt/objects/events/event_factory.py +135 -135
- flood_adapt/objects/events/event_set.py +88 -84
- flood_adapt/objects/events/events.py +234 -234
- flood_adapt/objects/events/historical.py +58 -58
- flood_adapt/objects/events/hurricane.py +68 -67
- flood_adapt/objects/events/synthetic.py +46 -50
- flood_adapt/objects/forcing/__init__.py +92 -92
- flood_adapt/objects/forcing/csv.py +68 -68
- flood_adapt/objects/forcing/discharge.py +66 -66
- flood_adapt/objects/forcing/forcing.py +150 -150
- flood_adapt/objects/forcing/forcing_factory.py +182 -182
- flood_adapt/objects/forcing/meteo_handler.py +93 -93
- flood_adapt/objects/forcing/netcdf.py +40 -40
- flood_adapt/objects/forcing/plotting.py +453 -429
- flood_adapt/objects/forcing/rainfall.py +98 -98
- flood_adapt/objects/forcing/tide_gauge.py +191 -191
- flood_adapt/objects/forcing/time_frame.py +90 -90
- flood_adapt/objects/forcing/timeseries.py +564 -564
- flood_adapt/objects/forcing/unit_system.py +580 -580
- flood_adapt/objects/forcing/waterlevels.py +108 -108
- flood_adapt/objects/forcing/wind.py +124 -124
- flood_adapt/objects/measures/measure_factory.py +92 -92
- flood_adapt/objects/measures/measures.py +529 -529
- flood_adapt/objects/object_model.py +74 -68
- flood_adapt/objects/projections/projections.py +103 -103
- flood_adapt/objects/scenarios/scenarios.py +22 -22
- flood_adapt/objects/strategies/strategies.py +89 -89
- flood_adapt/workflows/benefit_runner.py +579 -554
- flood_adapt/workflows/floodmap.py +85 -85
- flood_adapt/workflows/impacts_integrator.py +85 -85
- flood_adapt/workflows/scenario_runner.py +70 -70
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/LICENSE +674 -674
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/METADATA +866 -865
- flood_adapt-0.3.10.dist-info/RECORD +140 -0
- flood_adapt-0.3.9.dist-info/RECORD +0 -139
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/WHEEL +0 -0
- {flood_adapt-0.3.9.dist-info → flood_adapt-0.3.10.dist-info}/top_level.txt +0 -0
|
@@ -1,2210 +1,2210 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import math
|
|
3
|
-
import os
|
|
4
|
-
import shutil
|
|
5
|
-
import warnings
|
|
6
|
-
from enum import Enum
|
|
7
|
-
from pathlib import Path
|
|
8
|
-
from typing import Optional, Union
|
|
9
|
-
from urllib.request import urlretrieve
|
|
10
|
-
|
|
11
|
-
import cht_observations.observation_stations as obs
|
|
12
|
-
import geopandas as gpd
|
|
13
|
-
import numpy as np
|
|
14
|
-
import pandas as pd
|
|
15
|
-
import rioxarray as rxr
|
|
16
|
-
import tomli
|
|
17
|
-
import tomli_w
|
|
18
|
-
import xarray as xr
|
|
19
|
-
from hydromt_fiat import FiatModel as HydromtFiatModel
|
|
20
|
-
from hydromt_fiat.data_apis.open_street_maps import get_buildings_from_osm
|
|
21
|
-
from hydromt_sfincs import SfincsModel as HydromtSfincsModel
|
|
22
|
-
from pydantic import BaseModel, Field
|
|
23
|
-
from shapely import MultiLineString, MultiPolygon, Polygon
|
|
24
|
-
|
|
25
|
-
from flood_adapt.adapter.fiat_adapter import _FIAT_COLUMNS
|
|
26
|
-
from flood_adapt.config.fiat import (
|
|
27
|
-
AggregationModel,
|
|
28
|
-
BenefitsModel,
|
|
29
|
-
BFEModel,
|
|
30
|
-
EquityModel,
|
|
31
|
-
FiatConfigModel,
|
|
32
|
-
FiatModel,
|
|
33
|
-
RiskModel,
|
|
34
|
-
SVIModel,
|
|
35
|
-
)
|
|
36
|
-
from flood_adapt.config.gui import (
|
|
37
|
-
AggregationDmgLayer,
|
|
38
|
-
BenefitsLayer,
|
|
39
|
-
FloodMapLayer,
|
|
40
|
-
FootprintsDmgLayer,
|
|
41
|
-
GuiModel,
|
|
42
|
-
GuiUnitModel,
|
|
43
|
-
OutputLayers,
|
|
44
|
-
PlottingModel,
|
|
45
|
-
SyntheticTideModel,
|
|
46
|
-
VisualizationLayers,
|
|
47
|
-
)
|
|
48
|
-
from flood_adapt.config.sfincs import (
|
|
49
|
-
Cstype,
|
|
50
|
-
CycloneTrackDatabaseModel,
|
|
51
|
-
DatumModel,
|
|
52
|
-
DemModel,
|
|
53
|
-
FloodmapType,
|
|
54
|
-
FloodModel,
|
|
55
|
-
ObsPointModel,
|
|
56
|
-
RiverModel,
|
|
57
|
-
SCSModel,
|
|
58
|
-
SfincsConfigModel,
|
|
59
|
-
SfincsModel,
|
|
60
|
-
SlrScenariosModel,
|
|
61
|
-
WaterlevelReferenceModel,
|
|
62
|
-
)
|
|
63
|
-
from flood_adapt.config.site import (
|
|
64
|
-
Site,
|
|
65
|
-
StandardObjectModel,
|
|
66
|
-
)
|
|
67
|
-
from flood_adapt.dbs_classes.database import Database
|
|
68
|
-
from flood_adapt.misc.log import FloodAdaptLogging
|
|
69
|
-
from flood_adapt.misc.utils import modified_environ
|
|
70
|
-
from flood_adapt.objects.events.event_set import EventSet
|
|
71
|
-
from flood_adapt.objects.forcing import unit_system as us
|
|
72
|
-
from flood_adapt.objects.forcing.tide_gauge import (
|
|
73
|
-
TideGauge,
|
|
74
|
-
TideGaugeSource,
|
|
75
|
-
)
|
|
76
|
-
from flood_adapt.objects.projections.projections import (
|
|
77
|
-
PhysicalProjection,
|
|
78
|
-
Projection,
|
|
79
|
-
SocioEconomicChange,
|
|
80
|
-
)
|
|
81
|
-
from flood_adapt.objects.strategies.strategies import Strategy
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
|
|
85
|
-
"""
|
|
86
|
-
Check if the given path is absolute and return the absolute path.
|
|
87
|
-
|
|
88
|
-
Args:
|
|
89
|
-
path (str): The path to be checked.
|
|
90
|
-
|
|
91
|
-
Returns
|
|
92
|
-
-------
|
|
93
|
-
str: The absolute path.
|
|
94
|
-
|
|
95
|
-
Raises
|
|
96
|
-
------
|
|
97
|
-
ValueError: If the path is not absolute and no config_path is provided.
|
|
98
|
-
"""
|
|
99
|
-
path = Path(str_path)
|
|
100
|
-
if not path.is_absolute():
|
|
101
|
-
if config_path is not None:
|
|
102
|
-
path = Path(config_path).parent.joinpath(path).resolve()
|
|
103
|
-
else:
|
|
104
|
-
raise ValueError(f"Value '{path}' should be an absolute path.")
|
|
105
|
-
return path.as_posix()
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
class SpatialJoinModel(BaseModel):
|
|
109
|
-
"""
|
|
110
|
-
Represents a spatial join model.
|
|
111
|
-
|
|
112
|
-
Attributes
|
|
113
|
-
----------
|
|
114
|
-
name (Optional[str]): The name of the model (optional).
|
|
115
|
-
file (str): The file associated with the model.
|
|
116
|
-
field_name (str): The field name used for the spatial join.
|
|
117
|
-
"""
|
|
118
|
-
|
|
119
|
-
name: Optional[str] = None
|
|
120
|
-
file: str
|
|
121
|
-
field_name: str
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class UnitSystems(str, Enum):
|
|
125
|
-
"""The `UnitSystems` class is an enumeration that represents the accepted values for the `metric_system` field.
|
|
126
|
-
|
|
127
|
-
It provides two options: `imperial` and `metric`.
|
|
128
|
-
|
|
129
|
-
Attributes
|
|
130
|
-
----------
|
|
131
|
-
imperial (str): Represents the imperial unit system.
|
|
132
|
-
metric (str): Represents the metric unit system.
|
|
133
|
-
"""
|
|
134
|
-
|
|
135
|
-
imperial = "imperial"
|
|
136
|
-
metric = "metric"
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
class FootprintsOptions(str, Enum):
|
|
140
|
-
OSM = "OSM"
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
class Basins(str, Enum):
|
|
144
|
-
"""
|
|
145
|
-
Enumeration class representing different basins.
|
|
146
|
-
|
|
147
|
-
Each basin is represented by a string value.
|
|
148
|
-
|
|
149
|
-
Attributes
|
|
150
|
-
----------
|
|
151
|
-
NA (str): North Atlantic
|
|
152
|
-
SA (str): South Atlantic
|
|
153
|
-
EP (str): Eastern North Pacific (which includes the Central Pacific region)
|
|
154
|
-
WP (str): Western North Pacific
|
|
155
|
-
SP (str): South Pacific
|
|
156
|
-
SI (str): South Indian
|
|
157
|
-
NI (str): North Indian
|
|
158
|
-
"""
|
|
159
|
-
|
|
160
|
-
NA = "NA"
|
|
161
|
-
SA = "SA"
|
|
162
|
-
EP = "EP"
|
|
163
|
-
WP = "WP"
|
|
164
|
-
SP = "SP"
|
|
165
|
-
SI = "SI"
|
|
166
|
-
NI = "NI"
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
class GuiConfigModel(BaseModel):
|
|
170
|
-
"""
|
|
171
|
-
Represents a GUI model for for FloodAdapt.
|
|
172
|
-
|
|
173
|
-
Attributes
|
|
174
|
-
----------
|
|
175
|
-
max_flood_depth (float): The last visualization bin will be ">value".
|
|
176
|
-
max_aggr_dmg (float): The last visualization bin will be ">value".
|
|
177
|
-
max_footprint_dmg (float): The last visualization bin will be ">value".
|
|
178
|
-
max_benefits (float): The last visualization bin will be ">value".
|
|
179
|
-
"""
|
|
180
|
-
|
|
181
|
-
max_flood_depth: float
|
|
182
|
-
max_aggr_dmg: float
|
|
183
|
-
max_footprint_dmg: float
|
|
184
|
-
max_benefits: float
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
class SviConfigModel(SpatialJoinModel):
|
|
188
|
-
"""
|
|
189
|
-
Represents a model for the Social Vulnerability Index (SVI).
|
|
190
|
-
|
|
191
|
-
Attributes
|
|
192
|
-
----------
|
|
193
|
-
threshold (float): The threshold value for the SVI model to specify vulnerability.
|
|
194
|
-
"""
|
|
195
|
-
|
|
196
|
-
threshold: float
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
class Point(BaseModel):
|
|
200
|
-
lat: float
|
|
201
|
-
lon: float
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
class TideGaugeConfigModel(BaseModel):
|
|
205
|
-
"""
|
|
206
|
-
Represents a tide gauge model.
|
|
207
|
-
|
|
208
|
-
Attributes
|
|
209
|
-
----------
|
|
210
|
-
source (str): The source of the tide gauge data.
|
|
211
|
-
file (Optional[str]): The file associated with the tide gauge data (default: None).
|
|
212
|
-
max_distance (Optional[float]): The maximum distance (default: None).
|
|
213
|
-
ref (str): The reference name. Should be defined in the water level references.
|
|
214
|
-
"""
|
|
215
|
-
|
|
216
|
-
source: TideGaugeSource
|
|
217
|
-
description: str = ""
|
|
218
|
-
ref: Optional[str] = None
|
|
219
|
-
id: Optional[int] = None
|
|
220
|
-
lon: Optional[float] = None
|
|
221
|
-
lat: Optional[float] = None
|
|
222
|
-
file: Optional[str] = None
|
|
223
|
-
max_distance: Optional[us.UnitfulLength] = None
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
class SviModel(SpatialJoinModel):
|
|
227
|
-
"""
|
|
228
|
-
Represents a model for the Social Vulnerability Index (SVI).
|
|
229
|
-
|
|
230
|
-
Attributes
|
|
231
|
-
----------
|
|
232
|
-
threshold (float): The threshold value for the SVI model to specify vulnerability.
|
|
233
|
-
"""
|
|
234
|
-
|
|
235
|
-
threshold: float
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
class ConfigModel(BaseModel):
|
|
239
|
-
"""
|
|
240
|
-
Represents the configuration model for FloodAdapt.
|
|
241
|
-
|
|
242
|
-
Attributes
|
|
243
|
-
----------
|
|
244
|
-
name : str
|
|
245
|
-
The name of the site.
|
|
246
|
-
description : Optional[str], default ""
|
|
247
|
-
The description of the site.
|
|
248
|
-
database_path : Optional[str], default None
|
|
249
|
-
The path to the database where all the sites are located.
|
|
250
|
-
sfincs : str
|
|
251
|
-
The SFINCS model path.
|
|
252
|
-
sfincs_offshore : Optional[str], default None
|
|
253
|
-
The offshore SFINCS model path.
|
|
254
|
-
fiat : str
|
|
255
|
-
The FIAT model path.
|
|
256
|
-
unit_system : UnitSystems
|
|
257
|
-
The unit system.
|
|
258
|
-
gui : GuiModel
|
|
259
|
-
The GUI model representing scaling values for the layers.
|
|
260
|
-
building_footprints : Optional[SpatialJoinModel], default None
|
|
261
|
-
The building footprints model.
|
|
262
|
-
slr_scenarios : Optional[SlrModelDef], default SlrModelDef()
|
|
263
|
-
The sea level rise model.
|
|
264
|
-
tide_gauge : Optional[TideGaugeConfigModel], default None
|
|
265
|
-
The tide gauge model.
|
|
266
|
-
bfe : Optional[SpatialJoinModel], default None
|
|
267
|
-
The BFE model.
|
|
268
|
-
svi : Optional[SviModel], default None
|
|
269
|
-
The SVI model.
|
|
270
|
-
road_width : Optional[float], default 2
|
|
271
|
-
The road width in meters.
|
|
272
|
-
cyclones : Optional[bool], default True
|
|
273
|
-
Indicates if cyclones are enabled.
|
|
274
|
-
cyclone_basin : Optional[Basins], default None
|
|
275
|
-
The cyclone basin.
|
|
276
|
-
obs_point : Optional[list[ObsPointModel]], default None
|
|
277
|
-
The list of observation point models.
|
|
278
|
-
probabilistic_set : Optional[str], default None
|
|
279
|
-
The probabilistic set path.
|
|
280
|
-
infographics : Optional[bool], default True
|
|
281
|
-
Indicates if infographics are enabled.
|
|
282
|
-
"""
|
|
283
|
-
|
|
284
|
-
# General
|
|
285
|
-
name: str = Field(..., min_length=1, pattern='^[^<>:"/\\\\|?* ]*$')
|
|
286
|
-
description: Optional[str] = None
|
|
287
|
-
database_path: Optional[str] = None
|
|
288
|
-
unit_system: UnitSystems
|
|
289
|
-
gui: GuiConfigModel
|
|
290
|
-
infographics: Optional[bool] = True
|
|
291
|
-
|
|
292
|
-
# FIAT
|
|
293
|
-
fiat: str
|
|
294
|
-
aggregation_areas: Optional[list[SpatialJoinModel]] = None
|
|
295
|
-
building_footprints: Optional[SpatialJoinModel | FootprintsOptions] = (
|
|
296
|
-
FootprintsOptions.OSM
|
|
297
|
-
)
|
|
298
|
-
fiat_buildings_name: Optional[str] = "buildings"
|
|
299
|
-
fiat_roads_name: Optional[str] = "roads"
|
|
300
|
-
bfe: Optional[SpatialJoinModel] = None
|
|
301
|
-
svi: Optional[SviConfigModel] = None
|
|
302
|
-
road_width: Optional[float] = 5
|
|
303
|
-
return_periods: list[int] = Field(default_factory=list)
|
|
304
|
-
|
|
305
|
-
# SFINCS
|
|
306
|
-
references: WaterlevelReferenceModel = WaterlevelReferenceModel(
|
|
307
|
-
reference="MSL",
|
|
308
|
-
datums=[
|
|
309
|
-
DatumModel(
|
|
310
|
-
name="MSL",
|
|
311
|
-
height=us.UnitfulLength(value=0.0, units=us.UnitTypesLength.meters),
|
|
312
|
-
),
|
|
313
|
-
],
|
|
314
|
-
)
|
|
315
|
-
|
|
316
|
-
sfincs_overland: FloodModel
|
|
317
|
-
sfincs_offshore: Optional[FloodModel] = None
|
|
318
|
-
dem: Optional[DemModel] = None
|
|
319
|
-
|
|
320
|
-
excluded_datums: list[str] = Field(default_factory=list)
|
|
321
|
-
|
|
322
|
-
slr_scenarios: Optional[SlrScenariosModel] = None
|
|
323
|
-
scs: Optional[SCSModel] = None
|
|
324
|
-
tide_gauge: Optional[TideGaugeConfigModel] = None
|
|
325
|
-
cyclones: Optional[bool] = True
|
|
326
|
-
cyclone_basin: Optional[Basins] = None
|
|
327
|
-
obs_point: Optional[list[ObsPointModel]] = None
|
|
328
|
-
probabilistic_set: Optional[str] = None
|
|
329
|
-
|
|
330
|
-
@staticmethod
|
|
331
|
-
def read(toml_path: Path) -> "ConfigModel":
|
|
332
|
-
"""
|
|
333
|
-
Read a configuration file and returns the validated attributes.
|
|
334
|
-
|
|
335
|
-
Args:
|
|
336
|
-
config (str): The path to the configuration file.
|
|
337
|
-
|
|
338
|
-
Returns
|
|
339
|
-
-------
|
|
340
|
-
ConfigModel: The validated attributes from the configuration file.
|
|
341
|
-
"""
|
|
342
|
-
with open(toml_path, mode="rb") as fp:
|
|
343
|
-
toml = tomli.load(fp)
|
|
344
|
-
config = ConfigModel.model_validate(toml)
|
|
345
|
-
|
|
346
|
-
# check if database path is provided and use config_file path if not
|
|
347
|
-
if config.database_path is None:
|
|
348
|
-
dbs_path = Path(toml_path).parent / "Database"
|
|
349
|
-
if not dbs_path.exists():
|
|
350
|
-
dbs_path.mkdir(parents=True)
|
|
351
|
-
config.database_path = dbs_path.as_posix()
|
|
352
|
-
# check if paths are relative to the config file and make them absolute
|
|
353
|
-
config.database_path = path_check(config.database_path, toml_path)
|
|
354
|
-
config.fiat = path_check(config.fiat, toml_path)
|
|
355
|
-
config.sfincs_overland.name = path_check(config.sfincs_overland.name, toml_path)
|
|
356
|
-
if config.sfincs_offshore:
|
|
357
|
-
config.sfincs_offshore.name = path_check(
|
|
358
|
-
config.sfincs_offshore.name, toml_path
|
|
359
|
-
)
|
|
360
|
-
if isinstance(config.building_footprints, SpatialJoinModel):
|
|
361
|
-
config.building_footprints.file = path_check(
|
|
362
|
-
config.building_footprints.file, toml_path
|
|
363
|
-
)
|
|
364
|
-
if config.tide_gauge and config.tide_gauge.file:
|
|
365
|
-
config.tide_gauge.file = path_check(config.tide_gauge.file, toml_path)
|
|
366
|
-
if config.svi:
|
|
367
|
-
config.svi.file = path_check(config.svi.file, toml_path)
|
|
368
|
-
if config.bfe:
|
|
369
|
-
config.bfe.file = path_check(config.bfe.file, toml_path)
|
|
370
|
-
if config.slr_scenarios:
|
|
371
|
-
config.slr_scenarios.file = path_check(config.slr_scenarios.file, toml_path)
|
|
372
|
-
if config.probabilistic_set:
|
|
373
|
-
config.probabilistic_set = path_check(config.probabilistic_set, toml_path)
|
|
374
|
-
if config.aggregation_areas:
|
|
375
|
-
for aggr in config.aggregation_areas:
|
|
376
|
-
aggr.file = path_check(aggr.file, toml_path)
|
|
377
|
-
|
|
378
|
-
return config
|
|
379
|
-
|
|
380
|
-
|
|
381
|
-
class DatabaseBuilder:
|
|
382
|
-
logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
|
|
383
|
-
|
|
384
|
-
_has_roads: bool = False
|
|
385
|
-
_aggregation_areas: Optional[list] = None
|
|
386
|
-
_probabilistic_set_name: Optional[str] = None
|
|
387
|
-
|
|
388
|
-
def __init__(self, config: ConfigModel, overwrite: bool = True):
|
|
389
|
-
self.config = config
|
|
390
|
-
|
|
391
|
-
# Set database root
|
|
392
|
-
if config.database_path:
|
|
393
|
-
self.root = Path(config.database_path).joinpath(self.config.name)
|
|
394
|
-
else:
|
|
395
|
-
raise ValueError(
|
|
396
|
-
"Database path is not provided. Please provide a path using the 'database_path' attribute."
|
|
397
|
-
)
|
|
398
|
-
|
|
399
|
-
# Read info that needs to be used to create other models
|
|
400
|
-
self.unit_system = self.create_default_units()
|
|
401
|
-
|
|
402
|
-
# Read info that needs to be updated with other model info
|
|
403
|
-
self.water_level_references = self.config.references
|
|
404
|
-
|
|
405
|
-
@property
|
|
406
|
-
def static_path(self) -> Path:
|
|
407
|
-
return self.root / "static"
|
|
408
|
-
|
|
409
|
-
def build(self, overwrite: bool = False) -> None:
|
|
410
|
-
# Check if database already exists
|
|
411
|
-
if self.root.exists() and not overwrite:
|
|
412
|
-
raise ValueError(
|
|
413
|
-
f"There is already a Database folder in '{self.root.as_posix()}'."
|
|
414
|
-
)
|
|
415
|
-
if self.root.exists() and overwrite:
|
|
416
|
-
shutil.rmtree(self.root)
|
|
417
|
-
warnings.warn(
|
|
418
|
-
f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
|
|
419
|
-
)
|
|
420
|
-
# Create database folder
|
|
421
|
-
self.root.mkdir(parents=True)
|
|
422
|
-
|
|
423
|
-
with FloodAdaptLogging.to_file(
|
|
424
|
-
file_path=self.root.joinpath("database_builder.log")
|
|
425
|
-
):
|
|
426
|
-
self.logger.info(
|
|
427
|
-
f"Creating a FloodAdapt database in '{self.root.as_posix()}'"
|
|
428
|
-
)
|
|
429
|
-
|
|
430
|
-
# Make folder structure and read models
|
|
431
|
-
self.setup()
|
|
432
|
-
|
|
433
|
-
# Prepare site configuration
|
|
434
|
-
site = self.create_site_config()
|
|
435
|
-
site.save(self.static_path / "config" / "site.toml")
|
|
436
|
-
|
|
437
|
-
# Add infometric and infographic configurations
|
|
438
|
-
self.create_infometrics()
|
|
439
|
-
|
|
440
|
-
# Save standard objects
|
|
441
|
-
self.create_standard_objects()
|
|
442
|
-
|
|
443
|
-
# Save log file
|
|
444
|
-
self.logger.info("FloodAdapt database creation finished!")
|
|
445
|
-
|
|
446
|
-
def setup(self) -> None:
|
|
447
|
-
# Create the models
|
|
448
|
-
self.make_folder_structure()
|
|
449
|
-
|
|
450
|
-
# Read user models and copy to templates
|
|
451
|
-
self.read_template_fiat_model()
|
|
452
|
-
self.read_template_sfincs_overland_model()
|
|
453
|
-
self.read_template_sfincs_offshore_model()
|
|
454
|
-
|
|
455
|
-
def set_standard_objects(self):
|
|
456
|
-
# Define name and create object
|
|
457
|
-
self._no_measures_strategy_name = "no_measures"
|
|
458
|
-
self._current_projection_name = "current"
|
|
459
|
-
if self._probabilistic_set_name is not None:
|
|
460
|
-
event_list = [self._probabilistic_set_name]
|
|
461
|
-
else:
|
|
462
|
-
event_list = []
|
|
463
|
-
std_obj = StandardObjectModel(
|
|
464
|
-
events=event_list,
|
|
465
|
-
projections=[self._current_projection_name],
|
|
466
|
-
strategies=[self._no_measures_strategy_name],
|
|
467
|
-
)
|
|
468
|
-
return std_obj
|
|
469
|
-
|
|
470
|
-
def create_standard_objects(self):
|
|
471
|
-
with modified_environ(
|
|
472
|
-
DATABASE_ROOT=str(self.root.parent),
|
|
473
|
-
DATABASE_NAME=self.root.name,
|
|
474
|
-
):
|
|
475
|
-
self.logger.info(
|
|
476
|
-
"Creating `no measures` strategy and `current` projection."
|
|
477
|
-
)
|
|
478
|
-
# Create database instance
|
|
479
|
-
db = Database(self.root.parent, self.config.name)
|
|
480
|
-
# Create no measures strategy
|
|
481
|
-
strategy = Strategy(
|
|
482
|
-
name=self._no_measures_strategy_name,
|
|
483
|
-
measures=[],
|
|
484
|
-
)
|
|
485
|
-
db.strategies.save(strategy)
|
|
486
|
-
# Create current projection
|
|
487
|
-
projection = Projection(
|
|
488
|
-
name=self._current_projection_name,
|
|
489
|
-
physical_projection=PhysicalProjection(),
|
|
490
|
-
socio_economic_change=SocioEconomicChange(),
|
|
491
|
-
)
|
|
492
|
-
db.projections.save(projection)
|
|
493
|
-
# Check prob set
|
|
494
|
-
if self._probabilistic_set_name is not None:
|
|
495
|
-
path_toml = (
|
|
496
|
-
db.input_path
|
|
497
|
-
/ "events"
|
|
498
|
-
/ self._probabilistic_set_name
|
|
499
|
-
/ f"{self._probabilistic_set_name}.toml"
|
|
500
|
-
)
|
|
501
|
-
try:
|
|
502
|
-
EventSet.load_file(path_toml)
|
|
503
|
-
except Exception as e:
|
|
504
|
-
raise ValueError(
|
|
505
|
-
f"Provided probabilistic event set '{self._probabilistic_set_name}' is not valid. Error: {e}"
|
|
506
|
-
)
|
|
507
|
-
|
|
508
|
-
### TEMPLATE READERS ###
|
|
509
|
-
def read_template_fiat_model(self):
|
|
510
|
-
user_provided = self._check_exists_and_absolute(self.config.fiat)
|
|
511
|
-
|
|
512
|
-
# Read config model
|
|
513
|
-
HydromtFiatModel(root=str(user_provided), mode="r+").read()
|
|
514
|
-
|
|
515
|
-
# Success, so copy to db and read again
|
|
516
|
-
location_in_db = self.static_path / "templates" / "fiat"
|
|
517
|
-
if location_in_db.exists():
|
|
518
|
-
shutil.rmtree(location_in_db)
|
|
519
|
-
shutil.copytree(user_provided, location_in_db)
|
|
520
|
-
in_db = HydromtFiatModel(root=str(location_in_db), mode="r+")
|
|
521
|
-
in_db.read()
|
|
522
|
-
# Add check to make sure the geoms are correct
|
|
523
|
-
# TODO this should be handled in hydromt-FIAT
|
|
524
|
-
no_geoms = len(
|
|
525
|
-
[name for name in in_db.config["exposure"]["geom"].keys() if "file" in name]
|
|
526
|
-
)
|
|
527
|
-
in_db.exposure.exposure_geoms = in_db.exposure.exposure_geoms[:no_geoms]
|
|
528
|
-
in_db.exposure._geom_names = in_db.exposure._geom_names[:no_geoms]
|
|
529
|
-
|
|
530
|
-
# Make sure that a region polygon is included
|
|
531
|
-
if "region" not in in_db.geoms:
|
|
532
|
-
gdf = in_db.exposure.get_full_gdf(in_db.exposure.exposure_db)
|
|
533
|
-
# Combine all geometries into a single geometry
|
|
534
|
-
merged_geometry = gdf.unary_union
|
|
535
|
-
|
|
536
|
-
# If the result is not a polygon, you can create a convex hull
|
|
537
|
-
if not isinstance(merged_geometry, Polygon):
|
|
538
|
-
merged_geometry = merged_geometry.convex_hull
|
|
539
|
-
# Create a new GeoDataFrame with the resulting polygon
|
|
540
|
-
in_db.geoms["region"] = gpd.GeoDataFrame(
|
|
541
|
-
geometry=[merged_geometry], crs=gdf.crs
|
|
542
|
-
)
|
|
543
|
-
|
|
544
|
-
self.fiat_model = in_db
|
|
545
|
-
|
|
546
|
-
def read_template_sfincs_overland_model(self):
|
|
547
|
-
user_provided = self._check_exists_and_absolute(
|
|
548
|
-
self.config.sfincs_overland.name
|
|
549
|
-
)
|
|
550
|
-
user_model = HydromtSfincsModel(root=str(user_provided), mode="r")
|
|
551
|
-
user_model.read()
|
|
552
|
-
if user_model.crs is None:
|
|
553
|
-
raise ValueError("CRS is not defined in the SFINCS model.")
|
|
554
|
-
|
|
555
|
-
location_in_db = self.static_path / "templates" / "overland"
|
|
556
|
-
if location_in_db.exists():
|
|
557
|
-
shutil.rmtree(location_in_db)
|
|
558
|
-
shutil.copytree(user_provided, location_in_db)
|
|
559
|
-
in_db = HydromtSfincsModel(root=str(location_in_db), mode="r+")
|
|
560
|
-
in_db.read()
|
|
561
|
-
self.sfincs_overland_model = in_db
|
|
562
|
-
|
|
563
|
-
def read_template_sfincs_offshore_model(self):
|
|
564
|
-
if self.config.sfincs_offshore is None:
|
|
565
|
-
self.sfincs_offshore_model = None
|
|
566
|
-
return
|
|
567
|
-
user_provided = self._check_exists_and_absolute(
|
|
568
|
-
self.config.sfincs_offshore.name
|
|
569
|
-
)
|
|
570
|
-
user_model = HydromtSfincsModel(root=str(user_provided), mode="r+")
|
|
571
|
-
user_model.read()
|
|
572
|
-
if user_model.crs is None:
|
|
573
|
-
raise ValueError("CRS is not defined in the SFINCS model.")
|
|
574
|
-
epsg = user_model.crs.to_epsg()
|
|
575
|
-
|
|
576
|
-
location_in_db = self.static_path / "templates" / "offshore"
|
|
577
|
-
if location_in_db.exists():
|
|
578
|
-
shutil.rmtree(location_in_db)
|
|
579
|
-
shutil.copytree(user_provided, location_in_db)
|
|
580
|
-
in_db = HydromtSfincsModel(str(location_in_db), mode="r+")
|
|
581
|
-
in_db.read(epsg=epsg)
|
|
582
|
-
self.sfincs_offshore_model = in_db
|
|
583
|
-
|
|
584
|
-
### FIAT ###
|
|
585
|
-
def create_fiat_model(self) -> FiatModel:
|
|
586
|
-
fiat = FiatModel(
|
|
587
|
-
config=self.create_fiat_config(),
|
|
588
|
-
benefits=self.create_benefit_config(),
|
|
589
|
-
risk=self.create_risk_model(),
|
|
590
|
-
)
|
|
591
|
-
return fiat
|
|
592
|
-
|
|
593
|
-
def create_risk_model(self) -> Optional[RiskModel]:
|
|
594
|
-
# Check if return periods are provided
|
|
595
|
-
if not self.config.return_periods:
|
|
596
|
-
if self._probabilistic_set_name:
|
|
597
|
-
risk = RiskModel()
|
|
598
|
-
self.logger.warning(
|
|
599
|
-
f"No return periods provided, but a probabilistic set is available. Using default return periods {risk.return_periods}."
|
|
600
|
-
)
|
|
601
|
-
return risk
|
|
602
|
-
else:
|
|
603
|
-
self.logger.warning(
|
|
604
|
-
"No return periods provided and no probabilistic set available. Risk calculations will not be performed."
|
|
605
|
-
)
|
|
606
|
-
return None
|
|
607
|
-
else:
|
|
608
|
-
risk = RiskModel(return_periods=self.config.return_periods)
|
|
609
|
-
return risk
|
|
610
|
-
|
|
611
|
-
def create_benefit_config(self) -> Optional[BenefitsModel]:
|
|
612
|
-
if self._probabilistic_set_name is None:
|
|
613
|
-
self.logger.warning(
|
|
614
|
-
"No probabilistic set found in the config, benefits will not be available."
|
|
615
|
-
)
|
|
616
|
-
return None
|
|
617
|
-
return BenefitsModel(
|
|
618
|
-
current_year=datetime.datetime.now().year,
|
|
619
|
-
current_projection="current",
|
|
620
|
-
baseline_strategy="no_measures",
|
|
621
|
-
event_set=self._probabilistic_set_name,
|
|
622
|
-
)
|
|
623
|
-
|
|
624
|
-
def create_fiat_config(self) -> FiatConfigModel:
|
|
625
|
-
# Make sure only csv objects have geometries
|
|
626
|
-
for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
|
|
627
|
-
keep = geoms[_FIAT_COLUMNS.object_id].isin(
|
|
628
|
-
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
|
|
629
|
-
)
|
|
630
|
-
geoms = geoms[keep].reset_index(drop=True)
|
|
631
|
-
self.fiat_model.exposure.exposure_geoms[i] = geoms
|
|
632
|
-
|
|
633
|
-
footprints = self.create_footprints()
|
|
634
|
-
if footprints is not None:
|
|
635
|
-
footprints = footprints.as_posix()
|
|
636
|
-
|
|
637
|
-
# Clip hazard and reset buildings # TODO use hydromt-FIAT instead
|
|
638
|
-
if not self.fiat_model.region.empty:
|
|
639
|
-
self._clip_hazard_extend()
|
|
640
|
-
|
|
641
|
-
# Store result for possible future use in create_infographics
|
|
642
|
-
self._aggregation_areas = self.create_aggregation_areas()
|
|
643
|
-
|
|
644
|
-
roads_gpkg = self.create_roads()
|
|
645
|
-
non_building_names = []
|
|
646
|
-
if roads_gpkg is not None:
|
|
647
|
-
non_building_names.append("road")
|
|
648
|
-
|
|
649
|
-
# Update elevations
|
|
650
|
-
self.update_fiat_elevation()
|
|
651
|
-
|
|
652
|
-
self._svi = self.create_svi()
|
|
653
|
-
|
|
654
|
-
config = FiatConfigModel(
|
|
655
|
-
exposure_crs=self.fiat_model.exposure.crs,
|
|
656
|
-
floodmap_type=self.read_floodmap_type(),
|
|
657
|
-
bfe=self.create_bfe(),
|
|
658
|
-
non_building_names=non_building_names,
|
|
659
|
-
damage_unit=self.read_damage_unit(),
|
|
660
|
-
building_footprints=footprints,
|
|
661
|
-
roads_file_name=roads_gpkg,
|
|
662
|
-
new_development_file_name=self.create_new_developments(),
|
|
663
|
-
save_simulation=False, # TODO
|
|
664
|
-
infographics=self.config.infographics,
|
|
665
|
-
aggregation=self._aggregation_areas,
|
|
666
|
-
svi=self._svi,
|
|
667
|
-
)
|
|
668
|
-
|
|
669
|
-
# Update output geoms names
|
|
670
|
-
output_geom = {}
|
|
671
|
-
counter = 0
|
|
672
|
-
for key in self.fiat_model.config["exposure"]["geom"].keys():
|
|
673
|
-
if "file" in key:
|
|
674
|
-
counter += 1
|
|
675
|
-
output_geom[f"name{counter}"] = Path(
|
|
676
|
-
self.fiat_model.config["exposure"]["geom"][key]
|
|
677
|
-
).name
|
|
678
|
-
self.fiat_model.config["output"]["geom"] = output_geom
|
|
679
|
-
# Update FIAT model with the new config
|
|
680
|
-
self.fiat_model.write()
|
|
681
|
-
|
|
682
|
-
return config
|
|
683
|
-
|
|
684
|
-
def update_fiat_elevation(self):
|
|
685
|
-
"""
|
|
686
|
-
Update the ground elevations of FIAT objects based on the SFINCS ground elevation map.
|
|
687
|
-
|
|
688
|
-
This method reads the DEM file and the exposure CSV file, and updates the ground elevations
|
|
689
|
-
of the FIAT objects (roads and buildings) based on the nearest elevation values from the DEM.
|
|
690
|
-
"""
|
|
691
|
-
dem_file = self._dem_path
|
|
692
|
-
# TODO resolve issue with double geometries in hydromt-FIAT and use update_ground_elevation method instead
|
|
693
|
-
# self.fiat_model.update_ground_elevation(dem_file, grnd_elev_unit="meters")
|
|
694
|
-
self.logger.info(
|
|
695
|
-
"Updating FIAT objects ground elevations from SFINCS ground elevation map."
|
|
696
|
-
)
|
|
697
|
-
SFINCS_units = us.UnitfulLength(
|
|
698
|
-
value=1.0, units=us.UnitTypesLength.meters
|
|
699
|
-
) # SFINCS is always in meters
|
|
700
|
-
FIAT_units = self.unit_system.default_length_units
|
|
701
|
-
conversion_factor = SFINCS_units.convert(FIAT_units)
|
|
702
|
-
|
|
703
|
-
if not math.isclose(conversion_factor, 1):
|
|
704
|
-
self.logger.info(
|
|
705
|
-
f"Ground elevation for FIAT objects is in '{FIAT_units}', while SFINCS ground elevation is in 'meters'. Values in the exposure csv will be converted by a factor of {conversion_factor}"
|
|
706
|
-
)
|
|
707
|
-
|
|
708
|
-
exposure = self.fiat_model.exposure.exposure_db
|
|
709
|
-
dem = rxr.open_rasterio(dem_file)
|
|
710
|
-
# TODO make sure only fiat_model object changes take place!
|
|
711
|
-
if self.config.fiat_roads_name in self.fiat_model.exposure.geom_names:
|
|
712
|
-
roads = self.fiat_model.exposure.exposure_geoms[
|
|
713
|
-
self._get_fiat_road_index()
|
|
714
|
-
].to_crs(dem.spatial_ref.crs_wkt)
|
|
715
|
-
roads["centroid"] = roads.geometry.centroid # get centroids
|
|
716
|
-
|
|
717
|
-
x_points = xr.DataArray(roads["centroid"].x, dims="points")
|
|
718
|
-
y_points = xr.DataArray(roads["centroid"].y, dims="points")
|
|
719
|
-
roads["elev"] = (
|
|
720
|
-
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
721
|
-
* conversion_factor
|
|
722
|
-
)
|
|
723
|
-
|
|
724
|
-
exposure.loc[
|
|
725
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
726
|
-
_FIAT_COLUMNS.ground_floor_height,
|
|
727
|
-
] = 0
|
|
728
|
-
exposure = exposure.merge(
|
|
729
|
-
roads[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
730
|
-
on=_FIAT_COLUMNS.object_id,
|
|
731
|
-
how="left",
|
|
732
|
-
)
|
|
733
|
-
exposure.loc[
|
|
734
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
735
|
-
_FIAT_COLUMNS.ground_elevation,
|
|
736
|
-
] = exposure.loc[
|
|
737
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road", "elev"
|
|
738
|
-
]
|
|
739
|
-
del exposure["elev"]
|
|
740
|
-
self.fiat_model.exposure.exposure_db = exposure
|
|
741
|
-
|
|
742
|
-
buildings = self.fiat_model.exposure.exposure_geoms[
|
|
743
|
-
self._get_fiat_building_index()
|
|
744
|
-
].to_crs(dem.spatial_ref.crs_wkt)
|
|
745
|
-
buildings["geometry"] = buildings.geometry.centroid
|
|
746
|
-
x_points = xr.DataArray(buildings["geometry"].x, dims="points")
|
|
747
|
-
y_points = xr.DataArray(buildings["geometry"].y, dims="points")
|
|
748
|
-
buildings["elev"] = (
|
|
749
|
-
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
750
|
-
* conversion_factor
|
|
751
|
-
)
|
|
752
|
-
exposure = exposure.merge(
|
|
753
|
-
buildings[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
754
|
-
on=_FIAT_COLUMNS.object_id,
|
|
755
|
-
how="left",
|
|
756
|
-
)
|
|
757
|
-
exposure.loc[
|
|
758
|
-
exposure[_FIAT_COLUMNS.primary_object_type] != "road",
|
|
759
|
-
_FIAT_COLUMNS.ground_elevation,
|
|
760
|
-
] = exposure.loc[exposure[_FIAT_COLUMNS.primary_object_type] != "road", "elev"]
|
|
761
|
-
del exposure["elev"]
|
|
762
|
-
|
|
763
|
-
def read_damage_unit(self) -> str:
|
|
764
|
-
if self.fiat_model.exposure.damage_unit is not None:
|
|
765
|
-
return self.fiat_model.exposure.damage_unit
|
|
766
|
-
else:
|
|
767
|
-
self.logger.warning(
|
|
768
|
-
"Delft-FIAT model was missing damage units so '$' was assumed."
|
|
769
|
-
)
|
|
770
|
-
return "$"
|
|
771
|
-
|
|
772
|
-
def read_floodmap_type(self) -> FloodmapType:
|
|
773
|
-
# If there is at least on object that uses the area method, use water depths for FA calcs
|
|
774
|
-
if (
|
|
775
|
-
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.extraction_method]
|
|
776
|
-
== "area"
|
|
777
|
-
).any():
|
|
778
|
-
return FloodmapType.water_depth
|
|
779
|
-
else:
|
|
780
|
-
return FloodmapType.water_level
|
|
781
|
-
|
|
782
|
-
def create_roads(self) -> Optional[str]:
|
|
783
|
-
# Make sure that FIAT roads are polygons
|
|
784
|
-
if self.config.fiat_roads_name not in self.fiat_model.exposure.geom_names:
|
|
785
|
-
self.logger.warning(
|
|
786
|
-
"Road objects are not available in the FIAT model and thus would not be available in FloodAdapt."
|
|
787
|
-
)
|
|
788
|
-
# TODO check how this naming of output geoms should become more explicit!
|
|
789
|
-
return None
|
|
790
|
-
|
|
791
|
-
roads = self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()]
|
|
792
|
-
|
|
793
|
-
# TODO do we need the lanes column?
|
|
794
|
-
if (
|
|
795
|
-
_FIAT_COLUMNS.segment_length
|
|
796
|
-
not in self.fiat_model.exposure.exposure_db.columns
|
|
797
|
-
):
|
|
798
|
-
self.logger.warning(
|
|
799
|
-
f"'{_FIAT_COLUMNS.segment_length}' column not present in the FIAT exposure csv. Road impact infometrics cannot be produced."
|
|
800
|
-
)
|
|
801
|
-
|
|
802
|
-
# TODO should this should be performed through hydromt-FIAT?
|
|
803
|
-
if not isinstance(roads.geometry.iloc[0], Polygon):
|
|
804
|
-
roads = roads.to_crs(roads.estimate_utm_crs())
|
|
805
|
-
roads.geometry = roads.geometry.buffer(
|
|
806
|
-
self.config.road_width / 2, cap_style=2
|
|
807
|
-
)
|
|
808
|
-
roads = roads.to_crs(self.fiat_model.exposure.crs)
|
|
809
|
-
self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()] = roads
|
|
810
|
-
self.logger.info(
|
|
811
|
-
f"FIAT road objects transformed from lines to polygons assuming a road width of {self.config.road_width} meters."
|
|
812
|
-
)
|
|
813
|
-
|
|
814
|
-
self._has_roads = True
|
|
815
|
-
return f"{self.config.fiat_roads_name}.gpkg"
|
|
816
|
-
|
|
817
|
-
def create_new_developments(self) -> Optional[str]:
|
|
818
|
-
return "new_development_area.gpkg"
|
|
819
|
-
|
|
820
|
-
def create_footprints(self) -> Optional[Path]:
|
|
821
|
-
if isinstance(self.config.building_footprints, SpatialJoinModel):
|
|
822
|
-
# Use the provided building footprints
|
|
823
|
-
building_footprints_file = self._check_exists_and_absolute(
|
|
824
|
-
self.config.building_footprints.file
|
|
825
|
-
)
|
|
826
|
-
|
|
827
|
-
self.logger.info(
|
|
828
|
-
f"Using building footprints from {Path(building_footprints_file).as_posix()}."
|
|
829
|
-
)
|
|
830
|
-
# Spatially join buildings and map
|
|
831
|
-
# TODO use hydromt method instead
|
|
832
|
-
path = self._join_building_footprints(
|
|
833
|
-
self.config.building_footprints.file,
|
|
834
|
-
self.config.building_footprints.field_name,
|
|
835
|
-
)
|
|
836
|
-
return path
|
|
837
|
-
elif self.config.building_footprints == FootprintsOptions.OSM:
|
|
838
|
-
self.logger.info(
|
|
839
|
-
"Building footprint data will be downloaded from Open Street Maps."
|
|
840
|
-
)
|
|
841
|
-
region = self.fiat_model.region
|
|
842
|
-
if region is None:
|
|
843
|
-
raise ValueError(
|
|
844
|
-
"No region file found in the FIAT model. Building footprints cannot be created."
|
|
845
|
-
)
|
|
846
|
-
region = region.to_crs(4326)
|
|
847
|
-
if isinstance(region.boundary.to_numpy()[0], MultiLineString):
|
|
848
|
-
polygon = Polygon(
|
|
849
|
-
region.boundary.to_numpy()[0].envelope
|
|
850
|
-
) # TODO check if this is correct
|
|
851
|
-
else:
|
|
852
|
-
polygon = Polygon(region.boundary.to_numpy()[0])
|
|
853
|
-
footprints = get_buildings_from_osm(polygon)
|
|
854
|
-
footprints["BF_FID"] = np.arange(1, len(footprints) + 1)
|
|
855
|
-
footprints = footprints[["BF_FID", "geometry"]]
|
|
856
|
-
path = self._join_building_footprints(footprints, "BF_FID")
|
|
857
|
-
return path
|
|
858
|
-
# Then check if geometries are already footprints
|
|
859
|
-
elif isinstance(
|
|
860
|
-
self.fiat_model.exposure.exposure_geoms[
|
|
861
|
-
self._get_fiat_building_index()
|
|
862
|
-
].geometry.iloc[0],
|
|
863
|
-
(Polygon, MultiPolygon),
|
|
864
|
-
):
|
|
865
|
-
self.logger.info(
|
|
866
|
-
"Building footprints are already available in the FIAT model geometry files."
|
|
867
|
-
)
|
|
868
|
-
return None
|
|
869
|
-
# check if it is spatially joined and/or exists already
|
|
870
|
-
elif "BF_FID" in self.fiat_model.exposure.exposure_db.columns:
|
|
871
|
-
add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
|
|
872
|
-
fiat_path = Path(self.fiat_model.root)
|
|
873
|
-
|
|
874
|
-
if not (add_attrs and "BF_FID" in [attr["name"] for attr in add_attrs]):
|
|
875
|
-
raise KeyError(
|
|
876
|
-
"While 'BF_FID' column exists, connection to a spatial footprints file is missing."
|
|
877
|
-
)
|
|
878
|
-
|
|
879
|
-
ind = [attr["name"] for attr in add_attrs].index("BF_FID")
|
|
880
|
-
footprints = add_attrs[ind]
|
|
881
|
-
footprints_path = fiat_path / footprints["file"]
|
|
882
|
-
|
|
883
|
-
if not footprints_path.exists():
|
|
884
|
-
raise FileNotFoundError(
|
|
885
|
-
f"While 'BF_FID' column exists, building footprints file {footprints_path} not found."
|
|
886
|
-
)
|
|
887
|
-
|
|
888
|
-
self.logger.info(
|
|
889
|
-
f"Using the building footprints located at {footprints_path}."
|
|
890
|
-
)
|
|
891
|
-
return footprints_path.relative_to(self.static_path)
|
|
892
|
-
|
|
893
|
-
# Other methods
|
|
894
|
-
else:
|
|
895
|
-
self.logger.warning(
|
|
896
|
-
"No building footprints are available. Buildings will be plotted with a default shape in FloodAdapt."
|
|
897
|
-
)
|
|
898
|
-
return None
|
|
899
|
-
|
|
900
|
-
def create_bfe(self) -> Optional[BFEModel]:
|
|
901
|
-
if self.config.bfe is None:
|
|
902
|
-
self.logger.warning(
|
|
903
|
-
"No base flood elevation provided. Elevating building relative to base flood elevation will not be possible in FloodAdapt."
|
|
904
|
-
)
|
|
905
|
-
return None
|
|
906
|
-
|
|
907
|
-
# TODO can we use hydromt-FIAT?
|
|
908
|
-
bfe_file = self._check_exists_and_absolute(self.config.bfe.file)
|
|
909
|
-
|
|
910
|
-
self.logger.info(
|
|
911
|
-
f"Using map from {Path(bfe_file).as_posix()} as base flood elevation."
|
|
912
|
-
)
|
|
913
|
-
|
|
914
|
-
# Spatially join buildings and map
|
|
915
|
-
buildings_joined, bfe = self.spatial_join(
|
|
916
|
-
self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()],
|
|
917
|
-
bfe_file,
|
|
918
|
-
self.config.bfe.field_name,
|
|
919
|
-
)
|
|
920
|
-
|
|
921
|
-
# Make sure in case of multiple values that the max is kept
|
|
922
|
-
buildings_joined = (
|
|
923
|
-
buildings_joined.groupby(_FIAT_COLUMNS.object_id)
|
|
924
|
-
.max(self.config.bfe.field_name)
|
|
925
|
-
.sort_values(by=[_FIAT_COLUMNS.object_id])
|
|
926
|
-
.reset_index()
|
|
927
|
-
)
|
|
928
|
-
|
|
929
|
-
# Save the files
|
|
930
|
-
fa_bfe_file = self.static_path / "bfe" / "bfe.gpkg"
|
|
931
|
-
fa_bfe_file.parent.mkdir(parents=True, exist_ok=True)
|
|
932
|
-
bfe.to_file(fa_bfe_file)
|
|
933
|
-
csv_path = fa_bfe_file.parent / "bfe.csv"
|
|
934
|
-
buildings_joined.to_csv(csv_path, index=False)
|
|
935
|
-
|
|
936
|
-
# Save attributes
|
|
937
|
-
return BFEModel(
|
|
938
|
-
geom=fa_bfe_file.relative_to(self.static_path).as_posix(),
|
|
939
|
-
table=csv_path.relative_to(self.static_path).as_posix(),
|
|
940
|
-
field_name=self.config.bfe.field_name,
|
|
941
|
-
)
|
|
942
|
-
|
|
943
|
-
def create_aggregation_areas(self) -> list[AggregationModel]:
|
|
944
|
-
# TODO split this to 3 methods?
|
|
945
|
-
aggregation_areas = []
|
|
946
|
-
|
|
947
|
-
# first check if the FIAT model has existing aggregation areas
|
|
948
|
-
if self.fiat_model.spatial_joins["aggregation_areas"]:
|
|
949
|
-
# Use the aggregation areas from the FIAT model
|
|
950
|
-
for aggr in self.fiat_model.spatial_joins["aggregation_areas"]:
|
|
951
|
-
# Check if the exposure csv has the correct column
|
|
952
|
-
col_name = _FIAT_COLUMNS.aggregation_label.format(name=aggr["name"])
|
|
953
|
-
if col_name not in self.fiat_model.exposure.exposure_db.columns:
|
|
954
|
-
raise KeyError(
|
|
955
|
-
f"While aggregation area '{aggr['name']}' exists in the spatial joins of the FIAT model, the column '{col_name}' is missing in the exposure csv."
|
|
956
|
-
)
|
|
957
|
-
# Check equity config
|
|
958
|
-
if aggr["equity"] is not None:
|
|
959
|
-
equity_config = EquityModel(
|
|
960
|
-
census_data=str(
|
|
961
|
-
self.static_path.joinpath(
|
|
962
|
-
"templates", "fiat", aggr["equity"]["census_data"]
|
|
963
|
-
)
|
|
964
|
-
.relative_to(self.static_path)
|
|
965
|
-
.as_posix()
|
|
966
|
-
),
|
|
967
|
-
percapitaincome_label=aggr["equity"]["percapitaincome_label"],
|
|
968
|
-
totalpopulation_label=aggr["equity"]["totalpopulation_label"],
|
|
969
|
-
)
|
|
970
|
-
else:
|
|
971
|
-
equity_config = None
|
|
972
|
-
# Make aggregation config
|
|
973
|
-
aggr = AggregationModel(
|
|
974
|
-
name=aggr["name"],
|
|
975
|
-
file=str(
|
|
976
|
-
self.static_path.joinpath("templates", "fiat", aggr["file"])
|
|
977
|
-
.relative_to(self.static_path)
|
|
978
|
-
.as_posix()
|
|
979
|
-
),
|
|
980
|
-
field_name=aggr["field_name"],
|
|
981
|
-
equity=equity_config,
|
|
982
|
-
)
|
|
983
|
-
aggregation_areas.append(aggr)
|
|
984
|
-
|
|
985
|
-
self.logger.info(
|
|
986
|
-
f"Aggregation areas: {aggr.name} from the FIAT model are going to be used."
|
|
987
|
-
)
|
|
988
|
-
|
|
989
|
-
# Then check if the user has provided extra aggregation areas in the config
|
|
990
|
-
if self.config.aggregation_areas:
|
|
991
|
-
# Loop through aggr areas given in config
|
|
992
|
-
for aggr in self.config.aggregation_areas:
|
|
993
|
-
# Get name of type of aggregation area
|
|
994
|
-
if aggr.name is not None:
|
|
995
|
-
aggr_name = aggr.name
|
|
996
|
-
else:
|
|
997
|
-
aggr_name = Path(aggr.file).stem
|
|
998
|
-
# If aggregation area already in FIAT model raise Error
|
|
999
|
-
if aggr_name in [aggr.name for aggr in aggregation_areas]:
|
|
1000
|
-
raise ValueError(
|
|
1001
|
-
f"Aggregation area '{aggr_name}' already exists in the FIAT model."
|
|
1002
|
-
)
|
|
1003
|
-
# Do spatial join of FIAT objects and aggregation areas
|
|
1004
|
-
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1005
|
-
buildings_joined, aggr_areas = self.spatial_join(
|
|
1006
|
-
objects=self.fiat_model.exposure.exposure_geoms[
|
|
1007
|
-
self._get_fiat_building_index()
|
|
1008
|
-
],
|
|
1009
|
-
layer=str(self._check_exists_and_absolute(aggr.file)),
|
|
1010
|
-
field_name=aggr.field_name,
|
|
1011
|
-
rename=_FIAT_COLUMNS.aggregation_label.format(name=aggr_name),
|
|
1012
|
-
)
|
|
1013
|
-
aggr_path = Path(self.fiat_model.root).joinpath(
|
|
1014
|
-
"exposure", "aggregation_areas", f"{Path(aggr.file).stem}.gpkg"
|
|
1015
|
-
)
|
|
1016
|
-
aggr_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1017
|
-
aggr_areas.to_file(aggr_path)
|
|
1018
|
-
exposure_csv = exposure_csv.merge(
|
|
1019
|
-
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1020
|
-
)
|
|
1021
|
-
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1022
|
-
# Update spatial joins in FIAT model
|
|
1023
|
-
if self.fiat_model.spatial_joins["aggregation_areas"] is None:
|
|
1024
|
-
self.fiat_model.spatial_joins["aggregation_areas"] = []
|
|
1025
|
-
self.fiat_model.spatial_joins["aggregation_areas"].append(
|
|
1026
|
-
{
|
|
1027
|
-
"name": aggr_name,
|
|
1028
|
-
"file": aggr_path.relative_to(self.fiat_model.root).as_posix(),
|
|
1029
|
-
"field_name": _FIAT_COLUMNS.aggregation_label.format(
|
|
1030
|
-
name=aggr_name
|
|
1031
|
-
),
|
|
1032
|
-
"equity": None, # TODO allow adding equity as well?
|
|
1033
|
-
}
|
|
1034
|
-
)
|
|
1035
|
-
# Update the aggregation areas list in the config
|
|
1036
|
-
aggregation_areas.append(
|
|
1037
|
-
AggregationModel(
|
|
1038
|
-
name=aggr_name,
|
|
1039
|
-
file=aggr_path.relative_to(self.static_path).as_posix(),
|
|
1040
|
-
field_name=_FIAT_COLUMNS.aggregation_label.format(
|
|
1041
|
-
name=aggr_name
|
|
1042
|
-
),
|
|
1043
|
-
)
|
|
1044
|
-
)
|
|
1045
|
-
|
|
1046
|
-
# No config provided, no aggr areas in the model -> try to use the region file as a mock aggregation area
|
|
1047
|
-
if (
|
|
1048
|
-
not self.fiat_model.spatial_joins["aggregation_areas"]
|
|
1049
|
-
and not self.config.aggregation_areas
|
|
1050
|
-
):
|
|
1051
|
-
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1052
|
-
region = self.fiat_model.geoms["region"]
|
|
1053
|
-
region = region.explode().reset_index()
|
|
1054
|
-
region["aggr_id"] = ["region_" + str(i) for i in np.arange(len(region)) + 1]
|
|
1055
|
-
aggregation_path = Path(self.fiat_model.root).joinpath(
|
|
1056
|
-
"aggregation_areas", "region.geojson"
|
|
1057
|
-
)
|
|
1058
|
-
if not aggregation_path.parent.exists():
|
|
1059
|
-
aggregation_path.parent.mkdir()
|
|
1060
|
-
|
|
1061
|
-
region.to_file(aggregation_path)
|
|
1062
|
-
aggr = AggregationModel(
|
|
1063
|
-
name="region",
|
|
1064
|
-
file=str(aggregation_path.relative_to(self.static_path).as_posix()),
|
|
1065
|
-
field_name="aggr_id",
|
|
1066
|
-
)
|
|
1067
|
-
aggregation_areas.append(aggr)
|
|
1068
|
-
|
|
1069
|
-
# Add column in FIAT
|
|
1070
|
-
buildings_joined, _ = self.spatial_join(
|
|
1071
|
-
objects=self.fiat_model.exposure.exposure_geoms[
|
|
1072
|
-
self._get_fiat_building_index()
|
|
1073
|
-
],
|
|
1074
|
-
layer=region,
|
|
1075
|
-
field_name="aggr_id",
|
|
1076
|
-
rename=_FIAT_COLUMNS.aggregation_label.format(name="region"),
|
|
1077
|
-
)
|
|
1078
|
-
exposure_csv = exposure_csv.merge(
|
|
1079
|
-
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1080
|
-
)
|
|
1081
|
-
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1082
|
-
self.logger.warning(
|
|
1083
|
-
"No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
|
|
1084
|
-
)
|
|
1085
|
-
return aggregation_areas
|
|
1086
|
-
|
|
1087
|
-
def create_svi(self) -> Optional[SVIModel]:
|
|
1088
|
-
if self.config.svi:
|
|
1089
|
-
svi_file = self._check_exists_and_absolute(self.config.svi.file)
|
|
1090
|
-
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1091
|
-
buildings_joined, svi = self.spatial_join(
|
|
1092
|
-
self.fiat_model.exposure.exposure_geoms[
|
|
1093
|
-
self._get_fiat_building_index()
|
|
1094
|
-
],
|
|
1095
|
-
svi_file,
|
|
1096
|
-
self.config.svi.field_name,
|
|
1097
|
-
rename="SVI",
|
|
1098
|
-
filter=True,
|
|
1099
|
-
)
|
|
1100
|
-
# Add column to exposure
|
|
1101
|
-
if "SVI" in exposure_csv.columns:
|
|
1102
|
-
self.logger.info(
|
|
1103
|
-
f"'SVI' column in the FIAT exposure csv will be replaced by {svi_file.as_posix()}."
|
|
1104
|
-
)
|
|
1105
|
-
del exposure_csv["SVI"]
|
|
1106
|
-
else:
|
|
1107
|
-
self.logger.info(
|
|
1108
|
-
f"'SVI' column in the FIAT exposure csv will be filled by {svi_file.as_posix()}."
|
|
1109
|
-
)
|
|
1110
|
-
exposure_csv = exposure_csv.merge(
|
|
1111
|
-
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1112
|
-
)
|
|
1113
|
-
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1114
|
-
|
|
1115
|
-
# Save the spatial file for future use
|
|
1116
|
-
svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
|
|
1117
|
-
svi_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1118
|
-
svi.to_file(svi_path)
|
|
1119
|
-
self.logger.info(
|
|
1120
|
-
f"An SVI map can be shown in FloodAdapt GUI using '{self.config.svi.field_name}' column from {svi_file.as_posix()}"
|
|
1121
|
-
)
|
|
1122
|
-
|
|
1123
|
-
return SVIModel(
|
|
1124
|
-
geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
|
|
1125
|
-
field_name="SVI",
|
|
1126
|
-
)
|
|
1127
|
-
elif "SVI" in self.fiat_model.exposure.exposure_db.columns:
|
|
1128
|
-
self.logger.info(
|
|
1129
|
-
"'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
|
|
1130
|
-
)
|
|
1131
|
-
add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
|
|
1132
|
-
if "SVI" not in [attr["name"] for attr in add_attrs]:
|
|
1133
|
-
self.logger.warning(
|
|
1134
|
-
"No SVI map found to display in the FloodAdapt GUI!"
|
|
1135
|
-
)
|
|
1136
|
-
|
|
1137
|
-
ind = [attr["name"] for attr in add_attrs].index("SVI")
|
|
1138
|
-
svi = add_attrs[ind]
|
|
1139
|
-
svi_path = self.static_path / "templates" / "fiat" / svi["file"]
|
|
1140
|
-
self.logger.info(
|
|
1141
|
-
f"An SVI map can be shown in FloodAdapt GUI using '{svi['field_name']}' column from {svi['file']}"
|
|
1142
|
-
)
|
|
1143
|
-
# Save site attributes
|
|
1144
|
-
return SVIModel(
|
|
1145
|
-
geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
|
|
1146
|
-
field_name=svi["field_name"],
|
|
1147
|
-
)
|
|
1148
|
-
|
|
1149
|
-
else:
|
|
1150
|
-
self.logger.warning(
|
|
1151
|
-
"'SVI' column not present in the FIAT exposure csv. Vulnerability type infometrics cannot be produced."
|
|
1152
|
-
)
|
|
1153
|
-
return None
|
|
1154
|
-
|
|
1155
|
-
### SFINCS ###
|
|
1156
|
-
def create_sfincs_config(self) -> SfincsModel:
|
|
1157
|
-
# call these functions before others to make sure water level references are updated
|
|
1158
|
-
config = self.create_sfincs_model_config()
|
|
1159
|
-
tide_gauge = self.create_tide_gauge()
|
|
1160
|
-
|
|
1161
|
-
sfincs = SfincsModel(
|
|
1162
|
-
config=config,
|
|
1163
|
-
water_level=self.water_level_references,
|
|
1164
|
-
slr_scenarios=self.create_slr(),
|
|
1165
|
-
dem=self.create_dem_model(),
|
|
1166
|
-
scs=self.create_scs_model(),
|
|
1167
|
-
cyclone_track_database=self.create_cyclone_track_database(),
|
|
1168
|
-
tide_gauge=tide_gauge,
|
|
1169
|
-
river=self.create_rivers(),
|
|
1170
|
-
obs_point=self.create_observation_points(),
|
|
1171
|
-
)
|
|
1172
|
-
|
|
1173
|
-
return sfincs
|
|
1174
|
-
|
|
1175
|
-
def create_cyclone_track_database(self) -> Optional[CycloneTrackDatabaseModel]:
|
|
1176
|
-
if not self.config.cyclones or not self.config.sfincs_offshore:
|
|
1177
|
-
self.logger.warning("No cyclones will be available in the database.")
|
|
1178
|
-
return None
|
|
1179
|
-
|
|
1180
|
-
if self.config.cyclone_basin:
|
|
1181
|
-
basin = self.config.cyclone_basin
|
|
1182
|
-
else:
|
|
1183
|
-
basin = "ALL"
|
|
1184
|
-
|
|
1185
|
-
name = f"IBTrACS.{basin.value}.v04r01.nc"
|
|
1186
|
-
url = f"https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/netcdf/{name}"
|
|
1187
|
-
self.logger.info(f"Downloading cyclone track database from {url}")
|
|
1188
|
-
fn = Path(self.root) / "static" / "cyclone_track_database" / name
|
|
1189
|
-
fn.parent.mkdir(parents=True, exist_ok=True)
|
|
1190
|
-
|
|
1191
|
-
try:
|
|
1192
|
-
urlretrieve(url, fn)
|
|
1193
|
-
except Exception:
|
|
1194
|
-
raise RuntimeError(f"Could not retrieve cyclone track database from {url}")
|
|
1195
|
-
|
|
1196
|
-
return CycloneTrackDatabaseModel(file=name)
|
|
1197
|
-
|
|
1198
|
-
def create_scs_model(self) -> Optional[SCSModel]:
|
|
1199
|
-
if self.config.scs is None:
|
|
1200
|
-
return None
|
|
1201
|
-
scs_file = self._check_exists_and_absolute(self.config.scs.file)
|
|
1202
|
-
db_scs_file = self.static_path / "scs" / scs_file.name
|
|
1203
|
-
db_scs_file.parent.mkdir(parents=True, exist_ok=True)
|
|
1204
|
-
shutil.copy2(scs_file, db_scs_file)
|
|
1205
|
-
|
|
1206
|
-
return SCSModel(file=scs_file.name, type=self.config.scs.type)
|
|
1207
|
-
|
|
1208
|
-
def create_dem_model(self) -> DemModel:
|
|
1209
|
-
if self.config.dem:
|
|
1210
|
-
subgrid_sfincs = Path(self.config.dem.filename)
|
|
1211
|
-
else:
|
|
1212
|
-
self.logger.warning(
|
|
1213
|
-
"No subgrid depth geotiff file provided in the config file. Using the one from the SFINCS model."
|
|
1214
|
-
)
|
|
1215
|
-
subgrid_sfincs = (
|
|
1216
|
-
Path(self.sfincs_overland_model.root) / "subgrid" / "dep_subgrid.tif"
|
|
1217
|
-
)
|
|
1218
|
-
|
|
1219
|
-
dem_file = self._check_exists_and_absolute(subgrid_sfincs)
|
|
1220
|
-
fa_subgrid_path = self.static_path / "dem" / dem_file.name
|
|
1221
|
-
fa_subgrid_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1222
|
-
|
|
1223
|
-
# Check tiles
|
|
1224
|
-
tiles_sfincs = Path(self.sfincs_overland_model.root) / "tiles"
|
|
1225
|
-
fa_tiles_path = self.static_path / "dem" / "tiles"
|
|
1226
|
-
if tiles_sfincs.exists():
|
|
1227
|
-
shutil.move(tiles_sfincs, fa_tiles_path)
|
|
1228
|
-
if (fa_tiles_path / "index").exists():
|
|
1229
|
-
os.rename(fa_tiles_path / "index", fa_tiles_path / "indices")
|
|
1230
|
-
self.logger.info(
|
|
1231
|
-
"Tiles were already available in the SFINCS model and will directly be used in FloodAdapt."
|
|
1232
|
-
)
|
|
1233
|
-
else:
|
|
1234
|
-
# Make tiles
|
|
1235
|
-
fa_tiles_path.mkdir(parents=True)
|
|
1236
|
-
self.sfincs_overland_model.setup_tiles(
|
|
1237
|
-
path=fa_tiles_path,
|
|
1238
|
-
datasets_dep=[{"elevtn": dem_file}],
|
|
1239
|
-
zoom_range=[0, 13],
|
|
1240
|
-
fmt="png",
|
|
1241
|
-
)
|
|
1242
|
-
self.logger.info(
|
|
1243
|
-
f"Tiles were created using the {subgrid_sfincs.as_posix()} as the elevation map."
|
|
1244
|
-
)
|
|
1245
|
-
|
|
1246
|
-
shutil.copy2(dem_file, fa_subgrid_path)
|
|
1247
|
-
self._dem_path = fa_subgrid_path
|
|
1248
|
-
return DemModel(
|
|
1249
|
-
filename=fa_subgrid_path.name, units=us.UnitTypesLength.meters
|
|
1250
|
-
) # always in meters
|
|
1251
|
-
|
|
1252
|
-
def create_sfincs_model_config(self) -> SfincsConfigModel:
|
|
1253
|
-
config = SfincsConfigModel(
|
|
1254
|
-
csname=self.sfincs_overland_model.crs.name,
|
|
1255
|
-
cstype=Cstype(
|
|
1256
|
-
self.sfincs_overland_model.crs.type_name.split(" ")[0].lower()
|
|
1257
|
-
),
|
|
1258
|
-
offshore_model=self.create_offshore_model(),
|
|
1259
|
-
overland_model=self.create_overland_model(),
|
|
1260
|
-
floodmap_units=self.unit_system.default_length_units,
|
|
1261
|
-
save_simulation=False,
|
|
1262
|
-
)
|
|
1263
|
-
|
|
1264
|
-
return config
|
|
1265
|
-
|
|
1266
|
-
def create_slr(self) -> Optional[SlrScenariosModel]:
|
|
1267
|
-
if self.config.slr_scenarios is None:
|
|
1268
|
-
return None
|
|
1269
|
-
|
|
1270
|
-
self.config.slr_scenarios.file = str(
|
|
1271
|
-
self._check_exists_and_absolute(self.config.slr_scenarios.file)
|
|
1272
|
-
)
|
|
1273
|
-
slr_path = self.static_path / "slr_scenarios"
|
|
1274
|
-
slr_path.mkdir()
|
|
1275
|
-
new_file = slr_path / Path(self.config.slr_scenarios.file).name
|
|
1276
|
-
shutil.copyfile(self.config.slr_scenarios.file, new_file)
|
|
1277
|
-
|
|
1278
|
-
return SlrScenariosModel(
|
|
1279
|
-
file=new_file.relative_to(self.static_path).as_posix(),
|
|
1280
|
-
relative_to_year=self.config.slr_scenarios.relative_to_year,
|
|
1281
|
-
)
|
|
1282
|
-
|
|
1283
|
-
def create_observation_points(self) -> Union[list[ObsPointModel], None]:
|
|
1284
|
-
if self.config.obs_point is None:
|
|
1285
|
-
return None
|
|
1286
|
-
|
|
1287
|
-
self.logger.info("Observation points were provided in the config file.")
|
|
1288
|
-
return self.config.obs_point
|
|
1289
|
-
|
|
1290
|
-
def create_rivers(self) -> list[RiverModel]:
|
|
1291
|
-
src_file = Path(self.sfincs_overland_model.root) / "sfincs.src"
|
|
1292
|
-
if not src_file.exists():
|
|
1293
|
-
self.logger.warning("No rivers found in the SFINCS model.")
|
|
1294
|
-
return []
|
|
1295
|
-
|
|
1296
|
-
df = pd.read_csv(src_file, delim_whitespace=True, header=None, names=["x", "y"])
|
|
1297
|
-
river_locs = gpd.GeoDataFrame(
|
|
1298
|
-
df,
|
|
1299
|
-
geometry=gpd.points_from_xy(df.x, df.y),
|
|
1300
|
-
crs=self.sfincs_overland_model.crs,
|
|
1301
|
-
)
|
|
1302
|
-
rivers = []
|
|
1303
|
-
for idx, row in river_locs.iterrows():
|
|
1304
|
-
if "dis" in self.sfincs_overland_model.forcing:
|
|
1305
|
-
discharge = (
|
|
1306
|
-
self.sfincs_overland_model.forcing["dis"]
|
|
1307
|
-
.sel(index=idx + 1)
|
|
1308
|
-
.to_numpy()
|
|
1309
|
-
.mean()
|
|
1310
|
-
)
|
|
1311
|
-
else:
|
|
1312
|
-
discharge = 0
|
|
1313
|
-
self.logger.warning(
|
|
1314
|
-
f"No river discharge conditions were found in the SFINCS model for river {idx}. A default value of 0 will be used."
|
|
1315
|
-
)
|
|
1316
|
-
|
|
1317
|
-
river = RiverModel(
|
|
1318
|
-
name=f"river_{idx}",
|
|
1319
|
-
x_coordinate=row.x,
|
|
1320
|
-
y_coordinate=row.y,
|
|
1321
|
-
mean_discharge=us.UnitfulDischarge(
|
|
1322
|
-
value=discharge, units=self.unit_system.default_discharge_units
|
|
1323
|
-
),
|
|
1324
|
-
)
|
|
1325
|
-
rivers.append(river)
|
|
1326
|
-
|
|
1327
|
-
self.logger.info(
|
|
1328
|
-
f"{len(river_locs)} river(s) were identified from the SFINCS model and will be available in FloodAdapt for discharge input."
|
|
1329
|
-
)
|
|
1330
|
-
|
|
1331
|
-
return rivers
|
|
1332
|
-
|
|
1333
|
-
def create_tide_gauge(self) -> Optional[TideGauge]:
|
|
1334
|
-
if self.config.tide_gauge is None:
|
|
1335
|
-
self.logger.warning(
|
|
1336
|
-
"Tide gauge information not provided. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1337
|
-
)
|
|
1338
|
-
self.logger.warning(
|
|
1339
|
-
"No water level references were found. It is assumed that MSL is equal to the datum used in the SFINCS overland model. You can provide these values with the tide_gauge.ref attribute in the site.toml."
|
|
1340
|
-
)
|
|
1341
|
-
return None
|
|
1342
|
-
|
|
1343
|
-
if self.config.tide_gauge.source == TideGaugeSource.file:
|
|
1344
|
-
if self.config.tide_gauge.file is None:
|
|
1345
|
-
raise ValueError(
|
|
1346
|
-
"Tide gauge file needs to be provided when 'file' is selected as the source."
|
|
1347
|
-
)
|
|
1348
|
-
if self.config.tide_gauge.ref is None:
|
|
1349
|
-
self.logger.warning(
|
|
1350
|
-
"Tide gauge reference not provided. MSL is assumed as the reference of the water levels in the file."
|
|
1351
|
-
)
|
|
1352
|
-
self.config.tide_gauge.ref = "MSL"
|
|
1353
|
-
|
|
1354
|
-
tide_gauge_file = self._check_exists_and_absolute(
|
|
1355
|
-
self.config.tide_gauge.file
|
|
1356
|
-
)
|
|
1357
|
-
db_file_path = Path(self.static_path / "tide_gauges") / tide_gauge_file.name
|
|
1358
|
-
|
|
1359
|
-
db_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1360
|
-
shutil.copyfile(self.config.tide_gauge.file, db_file_path)
|
|
1361
|
-
|
|
1362
|
-
rel_db_path = Path(db_file_path.relative_to(self.static_path))
|
|
1363
|
-
self.logger.warning(
|
|
1364
|
-
f"Tide gauge from file {rel_db_path} assumed to be in {self.unit_system.default_length_units}!"
|
|
1365
|
-
)
|
|
1366
|
-
tide_gauge = TideGauge(
|
|
1367
|
-
reference=self.config.tide_gauge.ref,
|
|
1368
|
-
description="Observations from file stored in database",
|
|
1369
|
-
source=TideGaugeSource.file,
|
|
1370
|
-
file=rel_db_path,
|
|
1371
|
-
lon=self.config.tide_gauge.lon,
|
|
1372
|
-
lat=self.config.tide_gauge.lat,
|
|
1373
|
-
units=self.unit_system.default_length_units,
|
|
1374
|
-
)
|
|
1375
|
-
|
|
1376
|
-
return tide_gauge
|
|
1377
|
-
|
|
1378
|
-
elif self.config.tide_gauge.source == TideGaugeSource.noaa_coops:
|
|
1379
|
-
if self.config.tide_gauge.ref is not None:
|
|
1380
|
-
ref = self.config.tide_gauge.ref
|
|
1381
|
-
else:
|
|
1382
|
-
ref = "MLLW" # If reference is not provided use MLLW
|
|
1383
|
-
|
|
1384
|
-
self.water_level_references.reference = (
|
|
1385
|
-
ref # update the water level reference
|
|
1386
|
-
)
|
|
1387
|
-
|
|
1388
|
-
if self.config.tide_gauge.id is None:
|
|
1389
|
-
station_id = self._get_closest_station()
|
|
1390
|
-
self.logger.info(
|
|
1391
|
-
"The closest NOAA tide gauge station to the site will be searched."
|
|
1392
|
-
)
|
|
1393
|
-
else:
|
|
1394
|
-
station_id = self.config.tide_gauge.id
|
|
1395
|
-
self.logger.info(
|
|
1396
|
-
f"The NOAA tide gauge station with the provided ID {station_id} will be used."
|
|
1397
|
-
)
|
|
1398
|
-
station = self._get_station_metadata(station_id=station_id, ref=ref)
|
|
1399
|
-
if station is not None:
|
|
1400
|
-
# Add tide_gauge information in site toml
|
|
1401
|
-
tide_gauge = TideGauge(
|
|
1402
|
-
name=station["name"],
|
|
1403
|
-
description=f"observations from '{self.config.tide_gauge.source}' api",
|
|
1404
|
-
source=self.config.tide_gauge.source,
|
|
1405
|
-
reference=ref,
|
|
1406
|
-
ID=int(station["id"]),
|
|
1407
|
-
lon=station["lon"],
|
|
1408
|
-
lat=station["lat"],
|
|
1409
|
-
units=us.UnitTypesLength.meters, # the api always asks for SI units right now
|
|
1410
|
-
)
|
|
1411
|
-
|
|
1412
|
-
local_datum = DatumModel(
|
|
1413
|
-
name=station["datum_name"],
|
|
1414
|
-
height=us.UnitfulLength(
|
|
1415
|
-
value=station["datum"], units=station["units"]
|
|
1416
|
-
).transform(self.unit_system.default_length_units),
|
|
1417
|
-
)
|
|
1418
|
-
self.water_level_references.datums.append(local_datum)
|
|
1419
|
-
|
|
1420
|
-
msl = DatumModel(
|
|
1421
|
-
name="MSL",
|
|
1422
|
-
height=us.UnitfulLength(
|
|
1423
|
-
value=station["msl"], units=station["units"]
|
|
1424
|
-
).transform(self.unit_system.default_length_units),
|
|
1425
|
-
)
|
|
1426
|
-
# Check if MSL is already there and if yes replace it
|
|
1427
|
-
existing_msl = next(
|
|
1428
|
-
(
|
|
1429
|
-
datum
|
|
1430
|
-
for datum in self.water_level_references.datums
|
|
1431
|
-
if datum.name == "MSL"
|
|
1432
|
-
),
|
|
1433
|
-
None,
|
|
1434
|
-
)
|
|
1435
|
-
if existing_msl:
|
|
1436
|
-
self.water_level_references.datums.remove(existing_msl)
|
|
1437
|
-
self.water_level_references.datums.append(msl)
|
|
1438
|
-
|
|
1439
|
-
for name in ["MLLW", "MHHW"]:
|
|
1440
|
-
height = us.UnitfulLength(
|
|
1441
|
-
value=station[name.lower()], units=station["units"]
|
|
1442
|
-
).transform(self.unit_system.default_length_units)
|
|
1443
|
-
|
|
1444
|
-
wl_info = DatumModel(
|
|
1445
|
-
name=name,
|
|
1446
|
-
height=height,
|
|
1447
|
-
)
|
|
1448
|
-
self.water_level_references.datums.append(wl_info)
|
|
1449
|
-
return tide_gauge
|
|
1450
|
-
else:
|
|
1451
|
-
self.logger.warning(
|
|
1452
|
-
f"Tide gauge source not recognized: {self.config.tide_gauge.source}. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1453
|
-
)
|
|
1454
|
-
return None
|
|
1455
|
-
|
|
1456
|
-
def create_offshore_model(self) -> Optional[FloodModel]:
|
|
1457
|
-
if self.sfincs_offshore_model is None:
|
|
1458
|
-
return None
|
|
1459
|
-
# Connect boundary points of overland to output points of offshore
|
|
1460
|
-
fn = Path(self.sfincs_overland_model.root) / "sfincs.bnd"
|
|
1461
|
-
bnd = pd.read_csv(fn, sep=" ", lineterminator="\n", header=None)
|
|
1462
|
-
bnd = bnd.rename(columns={0: "x", 1: "y"})
|
|
1463
|
-
bnd_geo = gpd.GeoDataFrame(
|
|
1464
|
-
bnd,
|
|
1465
|
-
geometry=gpd.points_from_xy(bnd.x, bnd.y),
|
|
1466
|
-
crs=self.sfincs_overland_model.config["epsg"],
|
|
1467
|
-
)
|
|
1468
|
-
obs_geo = bnd_geo.to_crs(4326)
|
|
1469
|
-
obs_geo["x"] = obs_geo.geometry.x
|
|
1470
|
-
obs_geo["y"] = obs_geo.geometry.y
|
|
1471
|
-
del obs_geo["geometry"]
|
|
1472
|
-
obs_geo["name"] = [f"bnd_pt{num:02d}" for num in range(1, len(obs_geo) + 1)]
|
|
1473
|
-
fn_off = Path(self.sfincs_offshore_model.root) / "sfincs.obs"
|
|
1474
|
-
obs_geo.to_csv(
|
|
1475
|
-
fn_off,
|
|
1476
|
-
sep="\t",
|
|
1477
|
-
index=False,
|
|
1478
|
-
header=False,
|
|
1479
|
-
)
|
|
1480
|
-
self.logger.info(
|
|
1481
|
-
"Output points of the offshore SFINCS model were reconfigured to the boundary points of the overland SFINCS model."
|
|
1482
|
-
)
|
|
1483
|
-
|
|
1484
|
-
return FloodModel(
|
|
1485
|
-
name="offshore",
|
|
1486
|
-
reference=self.config.sfincs_offshore.reference,
|
|
1487
|
-
vertical_offset=self.config.sfincs_offshore.vertical_offset,
|
|
1488
|
-
)
|
|
1489
|
-
|
|
1490
|
-
def create_overland_model(self) -> FloodModel:
|
|
1491
|
-
return FloodModel(
|
|
1492
|
-
name="overland",
|
|
1493
|
-
reference=self.config.sfincs_overland.reference,
|
|
1494
|
-
)
|
|
1495
|
-
|
|
1496
|
-
### SITE ###
|
|
1497
|
-
def create_site_config(self) -> Site:
|
|
1498
|
-
"""Create the site configuration for the FloodAdapt model.
|
|
1499
|
-
|
|
1500
|
-
The order of these functions is important!
|
|
1501
|
-
1. Create the SFINCS model.
|
|
1502
|
-
needs: water level references
|
|
1503
|
-
provides: updated water level references with optional tide gauge
|
|
1504
|
-
2. Create the FIAT model.
|
|
1505
|
-
needs: water level references and optional probabilistic event set
|
|
1506
|
-
provides: svi and exposure geometries
|
|
1507
|
-
3. Create the GUI model. (requires water level references and FIAT model to be updated)
|
|
1508
|
-
needs: water level references and FIAT model to be updated
|
|
1509
|
-
provides: gui model with output layers, visualization layers and plotting.
|
|
1510
|
-
|
|
1511
|
-
"""
|
|
1512
|
-
sfincs = self.create_sfincs_config()
|
|
1513
|
-
self.add_probabilistic_set()
|
|
1514
|
-
fiat = self.create_fiat_model()
|
|
1515
|
-
gui = self.create_gui_config()
|
|
1516
|
-
|
|
1517
|
-
# Order doesnt matter from here
|
|
1518
|
-
lon, lat = self.read_location()
|
|
1519
|
-
std_objs = self.set_standard_objects()
|
|
1520
|
-
description = (
|
|
1521
|
-
self.config.description if self.config.description else self.config.name
|
|
1522
|
-
)
|
|
1523
|
-
|
|
1524
|
-
config = Site(
|
|
1525
|
-
name=self.config.name,
|
|
1526
|
-
description=description,
|
|
1527
|
-
lat=lat,
|
|
1528
|
-
lon=lon,
|
|
1529
|
-
fiat=fiat,
|
|
1530
|
-
gui=gui,
|
|
1531
|
-
sfincs=sfincs,
|
|
1532
|
-
standard_objects=std_objs,
|
|
1533
|
-
)
|
|
1534
|
-
return config
|
|
1535
|
-
|
|
1536
|
-
def read_location(self) -> tuple[float, float]:
|
|
1537
|
-
# Get center of area of interest
|
|
1538
|
-
if not self.fiat_model.region.empty:
|
|
1539
|
-
center = self.fiat_model.region.dissolve().centroid.to_crs(4326)[0]
|
|
1540
|
-
else:
|
|
1541
|
-
center = (
|
|
1542
|
-
self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()]
|
|
1543
|
-
.dissolve()
|
|
1544
|
-
.centroid.to_crs(4326)[0]
|
|
1545
|
-
)
|
|
1546
|
-
return center.x, center.y
|
|
1547
|
-
|
|
1548
|
-
def create_gui_config(self) -> GuiModel:
|
|
1549
|
-
gui = GuiModel(
|
|
1550
|
-
units=self.unit_system,
|
|
1551
|
-
plotting=self.create_hazard_plotting_config(),
|
|
1552
|
-
output_layers=self.create_output_layers_config(),
|
|
1553
|
-
visualization_layers=self.create_visualization_layers(),
|
|
1554
|
-
)
|
|
1555
|
-
|
|
1556
|
-
return gui
|
|
1557
|
-
|
|
1558
|
-
def create_default_units(self) -> GuiUnitModel:
|
|
1559
|
-
if self.config.unit_system == UnitSystems.imperial:
|
|
1560
|
-
return GuiUnitModel.imperial()
|
|
1561
|
-
elif self.config.unit_system == UnitSystems.metric:
|
|
1562
|
-
return GuiUnitModel.metric()
|
|
1563
|
-
else:
|
|
1564
|
-
raise ValueError(
|
|
1565
|
-
f"Unit system {self.config.unit_system} not recognized. Please choose 'imperial' or 'metric'."
|
|
1566
|
-
)
|
|
1567
|
-
|
|
1568
|
-
def create_visualization_layers(self) -> VisualizationLayers:
|
|
1569
|
-
visualization_layers = VisualizationLayers()
|
|
1570
|
-
if self._svi is not None:
|
|
1571
|
-
visualization_layers.add_layer(
|
|
1572
|
-
name="svi",
|
|
1573
|
-
long_name="Social Vulnerability Index (SVI)",
|
|
1574
|
-
path=str(self.static_path / self._svi.geom),
|
|
1575
|
-
database_path=self.root,
|
|
1576
|
-
field_name="SVI",
|
|
1577
|
-
bins=[0.05, 0.2, 0.4, 0.6, 0.8],
|
|
1578
|
-
)
|
|
1579
|
-
return visualization_layers
|
|
1580
|
-
|
|
1581
|
-
def create_output_layers_config(self) -> OutputLayers:
|
|
1582
|
-
# Read default colors from template
|
|
1583
|
-
fd_max = self.config.gui.max_flood_depth
|
|
1584
|
-
ad_max = self.config.gui.max_aggr_dmg
|
|
1585
|
-
ftd_max = self.config.gui.max_footprint_dmg
|
|
1586
|
-
b_max = self.config.gui.max_benefits
|
|
1587
|
-
|
|
1588
|
-
benefits_layer = None
|
|
1589
|
-
if self.config.probabilistic_set is not None:
|
|
1590
|
-
benefits_layer = BenefitsLayer(
|
|
1591
|
-
bins=[0, 0.01, 0.02 * b_max, 0.2 * b_max, b_max],
|
|
1592
|
-
colors=[
|
|
1593
|
-
"#FF7D7D",
|
|
1594
|
-
"#FFFFFF",
|
|
1595
|
-
"#DCEDC8",
|
|
1596
|
-
"#AED581",
|
|
1597
|
-
"#7CB342",
|
|
1598
|
-
"#33691E",
|
|
1599
|
-
],
|
|
1600
|
-
threshold=0.0,
|
|
1601
|
-
)
|
|
1602
|
-
|
|
1603
|
-
output_layers = OutputLayers(
|
|
1604
|
-
floodmap=FloodMapLayer(
|
|
1605
|
-
bins=[0.2 * fd_max, 0.6 * fd_max, fd_max],
|
|
1606
|
-
colors=["#D7ECFB", "#8ABDDD", "#1C73A4", "#081D58"],
|
|
1607
|
-
zbmax=-9999,
|
|
1608
|
-
depth_min=0.0,
|
|
1609
|
-
),
|
|
1610
|
-
aggregation_dmg=AggregationDmgLayer(
|
|
1611
|
-
bins=[0.00001, 0.1 * ad_max, 0.25 * ad_max, 0.5 * ad_max, ad_max],
|
|
1612
|
-
colors=[
|
|
1613
|
-
"#FFFFFF",
|
|
1614
|
-
"#FEE9CE",
|
|
1615
|
-
"#FDBB84",
|
|
1616
|
-
"#FC844E",
|
|
1617
|
-
"#E03720",
|
|
1618
|
-
"#860000",
|
|
1619
|
-
],
|
|
1620
|
-
),
|
|
1621
|
-
footprints_dmg=FootprintsDmgLayer(
|
|
1622
|
-
bins=[0.00001, 0.06 * ftd_max, 0.2 * ftd_max, 0.4 * ftd_max, ftd_max],
|
|
1623
|
-
colors=[
|
|
1624
|
-
"#FFFFFF",
|
|
1625
|
-
"#FEE9CE",
|
|
1626
|
-
"#FDBB84",
|
|
1627
|
-
"#FC844E",
|
|
1628
|
-
"#E03720",
|
|
1629
|
-
"#860000",
|
|
1630
|
-
],
|
|
1631
|
-
),
|
|
1632
|
-
benefits=benefits_layer,
|
|
1633
|
-
)
|
|
1634
|
-
return output_layers
|
|
1635
|
-
|
|
1636
|
-
def create_hazard_plotting_config(self) -> PlottingModel:
|
|
1637
|
-
datum_names = [datum.name for datum in self.water_level_references.datums]
|
|
1638
|
-
if "MHHW" in datum_names:
|
|
1639
|
-
amplitude = (
|
|
1640
|
-
self.water_level_references.get_datum("MHHW").height
|
|
1641
|
-
- self.water_level_references.get_datum("MSL").height
|
|
1642
|
-
)
|
|
1643
|
-
self.logger.info(
|
|
1644
|
-
f"The default tidal amplitude in the GUI will be {amplitude.transform(self.unit_system.default_length_units)}, calculated as the difference between MHHW and MSL from the tide gauge data."
|
|
1645
|
-
)
|
|
1646
|
-
else:
|
|
1647
|
-
amplitude = us.UnitfulLength(
|
|
1648
|
-
value=0.0, units=self.unit_system.default_length_units
|
|
1649
|
-
)
|
|
1650
|
-
self.logger.warning(
|
|
1651
|
-
"The default tidal amplitude in the GUI will be 0.0, since no tide-gauge water levels are available. You can change this in the site.toml with the 'gui.tide_harmonic_amplitude' attribute."
|
|
1652
|
-
)
|
|
1653
|
-
|
|
1654
|
-
ref = "MSL"
|
|
1655
|
-
if ref not in datum_names:
|
|
1656
|
-
self.logger.warning(
|
|
1657
|
-
f"The Mean Sea Level (MSL) datum is not available in the site.toml. The synthetic tide will be created relative to the main reference: {self.water_level_references.reference}."
|
|
1658
|
-
)
|
|
1659
|
-
ref = self.water_level_references.reference
|
|
1660
|
-
|
|
1661
|
-
plotting = PlottingModel(
|
|
1662
|
-
synthetic_tide=SyntheticTideModel(
|
|
1663
|
-
harmonic_amplitude=amplitude,
|
|
1664
|
-
datum=ref,
|
|
1665
|
-
),
|
|
1666
|
-
excluded_datums=self.config.excluded_datums,
|
|
1667
|
-
)
|
|
1668
|
-
|
|
1669
|
-
return plotting
|
|
1670
|
-
|
|
1671
|
-
def create_infometrics(self):
|
|
1672
|
-
"""
|
|
1673
|
-
Copy the infometrics and infographics templates to the appropriate location and modifies the metrics_config.toml files.
|
|
1674
|
-
|
|
1675
|
-
This method copies the templates from the 'infometrics' and 'infographics' folders to the 'static/templates' folder in the root directory.
|
|
1676
|
-
It then modifies the 'metrics_config.toml' and 'metrics_config_risk.toml' files by updating the 'aggregateBy' attribute with the names
|
|
1677
|
-
of the aggregations defined in the 'fiat' section of the 'site_attrs' attribute.
|
|
1678
|
-
"""
|
|
1679
|
-
# TODO there should be generalized infometric queries with NSI or OSM, and with SVI or without. Then Based on the user input these should be chosen automatically
|
|
1680
|
-
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
1681
|
-
|
|
1682
|
-
# Create template folder
|
|
1683
|
-
path_im = self.root.joinpath("static", "templates", "infometrics")
|
|
1684
|
-
path_im.mkdir()
|
|
1685
|
-
|
|
1686
|
-
# Copy mandatory metric configs
|
|
1687
|
-
path_im_temp = templates_path.joinpath("infometrics")
|
|
1688
|
-
for file in path_im_temp.glob("*.toml"):
|
|
1689
|
-
shutil.copy2(file, path_im)
|
|
1690
|
-
|
|
1691
|
-
self._create_optional_infometrics(templates_path, path_im)
|
|
1692
|
-
|
|
1693
|
-
files = list(path_im.glob("*metrics_config*.toml"))
|
|
1694
|
-
# Update aggregation areas in metrics config
|
|
1695
|
-
for file in files:
|
|
1696
|
-
file = path_im.joinpath(file)
|
|
1697
|
-
with open(file, "rb") as f:
|
|
1698
|
-
attrs = tomli.load(f)
|
|
1699
|
-
|
|
1700
|
-
# add aggration levels
|
|
1701
|
-
if self._aggregation_areas is None:
|
|
1702
|
-
self._aggregation_areas = self.create_aggregation_areas()
|
|
1703
|
-
attrs["aggregateBy"] = [aggr.name for aggr in self._aggregation_areas]
|
|
1704
|
-
|
|
1705
|
-
# take out road metrics if needed
|
|
1706
|
-
if not self._has_roads:
|
|
1707
|
-
attrs["queries"] = [
|
|
1708
|
-
query
|
|
1709
|
-
for query in attrs["queries"]
|
|
1710
|
-
if "road" not in query["name"].lower()
|
|
1711
|
-
]
|
|
1712
|
-
|
|
1713
|
-
# Replace Damage Unit
|
|
1714
|
-
# TODO do this in a better manner
|
|
1715
|
-
for i, query in enumerate(attrs["queries"]):
|
|
1716
|
-
if "$" in query["long_name"]:
|
|
1717
|
-
query["long_name"] = query["long_name"].replace(
|
|
1718
|
-
"$", self.read_damage_unit()
|
|
1719
|
-
)
|
|
1720
|
-
|
|
1721
|
-
# replace the SVI threshold if needed
|
|
1722
|
-
if self.config.svi:
|
|
1723
|
-
for i, query in enumerate(attrs["queries"]):
|
|
1724
|
-
query["filter"] = query["filter"].replace(
|
|
1725
|
-
"SVI_threshold", str(self.config.svi.threshold)
|
|
1726
|
-
)
|
|
1727
|
-
|
|
1728
|
-
with open(file, "wb") as f:
|
|
1729
|
-
tomli_w.dump(attrs, f)
|
|
1730
|
-
|
|
1731
|
-
def _create_optional_infometrics(self, templates_path: Path, path_im: Path):
|
|
1732
|
-
# If infographics are going to be created in FA, get template metric configurations
|
|
1733
|
-
if not self.config.infographics:
|
|
1734
|
-
return
|
|
1735
|
-
|
|
1736
|
-
# Check what type of infographics should be used
|
|
1737
|
-
if self.config.unit_system == UnitSystems.imperial:
|
|
1738
|
-
metrics_folder_name = "US_NSI"
|
|
1739
|
-
self.logger.info(
|
|
1740
|
-
"Default NSI infometrics and infographics will be created."
|
|
1741
|
-
)
|
|
1742
|
-
elif self.config.unit_system == UnitSystems.metric:
|
|
1743
|
-
metrics_folder_name = "OSM"
|
|
1744
|
-
self.logger.info(
|
|
1745
|
-
"Default OSM infometrics and infographics will be created."
|
|
1746
|
-
)
|
|
1747
|
-
else:
|
|
1748
|
-
raise ValueError(
|
|
1749
|
-
f"Unit system {self.config.unit_system} is not recognized. Please choose 'imperial' or 'metric'."
|
|
1750
|
-
)
|
|
1751
|
-
|
|
1752
|
-
if self.config.svi is not None:
|
|
1753
|
-
svi_folder_name = "with_SVI"
|
|
1754
|
-
else:
|
|
1755
|
-
svi_folder_name = "without_SVI"
|
|
1756
|
-
|
|
1757
|
-
# Copy metrics config for infographics
|
|
1758
|
-
path_0 = templates_path.joinpath(
|
|
1759
|
-
"infometrics", metrics_folder_name, svi_folder_name
|
|
1760
|
-
)
|
|
1761
|
-
for file in path_0.glob("*.toml"):
|
|
1762
|
-
shutil.copy2(file, path_im)
|
|
1763
|
-
|
|
1764
|
-
# Copy additional risk config
|
|
1765
|
-
file = templates_path.joinpath(
|
|
1766
|
-
"infometrics",
|
|
1767
|
-
metrics_folder_name,
|
|
1768
|
-
"metrics_additional_risk_configs.toml",
|
|
1769
|
-
)
|
|
1770
|
-
shutil.copy2(file, path_im)
|
|
1771
|
-
|
|
1772
|
-
# Copy infographics config
|
|
1773
|
-
path_ig_temp = templates_path.joinpath("infographics", metrics_folder_name)
|
|
1774
|
-
path_ig = self.root.joinpath("static", "templates", "infographics")
|
|
1775
|
-
path_ig.mkdir()
|
|
1776
|
-
files_ig = ["styles.css", "config_charts.toml"]
|
|
1777
|
-
|
|
1778
|
-
if self.config.svi is not None:
|
|
1779
|
-
files_ig.append("config_risk_charts.toml")
|
|
1780
|
-
files_ig.append("config_people.toml")
|
|
1781
|
-
|
|
1782
|
-
if self._has_roads:
|
|
1783
|
-
files_ig.append("config_roads.toml")
|
|
1784
|
-
|
|
1785
|
-
for file in files_ig:
|
|
1786
|
-
shutil.copy2(path_ig_temp.joinpath(file), path_ig.joinpath(file))
|
|
1787
|
-
|
|
1788
|
-
# Copy images
|
|
1789
|
-
path_0 = templates_path.joinpath("infographics", "images")
|
|
1790
|
-
path_1 = self.root.joinpath("static", "templates", "infographics", "images")
|
|
1791
|
-
shutil.copytree(path_0, path_1)
|
|
1792
|
-
|
|
1793
|
-
def add_static_files(self):
|
|
1794
|
-
"""
|
|
1795
|
-
Copy static files from the 'templates' folder to the 'static' folder.
|
|
1796
|
-
|
|
1797
|
-
This method iterates over a list of folders and copies the contents of each folder from the 'templates' directory
|
|
1798
|
-
to the corresponding folder in the 'static' directory.
|
|
1799
|
-
"""
|
|
1800
|
-
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
1801
|
-
folders = ["icons", "green_infra_table"]
|
|
1802
|
-
for folder in folders:
|
|
1803
|
-
path_0 = templates_path.joinpath(folder)
|
|
1804
|
-
path_1 = self.static_path / folder
|
|
1805
|
-
shutil.copytree(path_0, path_1)
|
|
1806
|
-
|
|
1807
|
-
def add_probabilistic_set(self):
|
|
1808
|
-
# Copy prob set if given
|
|
1809
|
-
if self.config.probabilistic_set:
|
|
1810
|
-
self.logger.info(
|
|
1811
|
-
f"Probabilistic event set imported from {self.config.probabilistic_set}"
|
|
1812
|
-
)
|
|
1813
|
-
prob_event_name = Path(self.config.probabilistic_set).name
|
|
1814
|
-
path_db = self.root.joinpath("input", "events", prob_event_name)
|
|
1815
|
-
shutil.copytree(self.config.probabilistic_set, path_db)
|
|
1816
|
-
self._probabilistic_set_name = prob_event_name
|
|
1817
|
-
else:
|
|
1818
|
-
self.logger.warning(
|
|
1819
|
-
"Probabilistic event set not provided. Risk scenarios cannot be run in FloodAdapt."
|
|
1820
|
-
)
|
|
1821
|
-
self._probabilistic_set_name = None
|
|
1822
|
-
|
|
1823
|
-
### HELPER FUNCTIONS ###
|
|
1824
|
-
def make_folder_structure(self) -> None:
|
|
1825
|
-
"""
|
|
1826
|
-
Create the folder structure for the database.
|
|
1827
|
-
|
|
1828
|
-
This method creates the necessary folder structure for the FloodAdapt database, including
|
|
1829
|
-
the input and static folders. It also creates subfolders within the input and
|
|
1830
|
-
static folders based on a predefined list of names.
|
|
1831
|
-
"""
|
|
1832
|
-
self.logger.info("Preparing the database folder structure.")
|
|
1833
|
-
inputs = [
|
|
1834
|
-
"events",
|
|
1835
|
-
"projections",
|
|
1836
|
-
"measures",
|
|
1837
|
-
"strategies",
|
|
1838
|
-
"scenarios",
|
|
1839
|
-
"benefits",
|
|
1840
|
-
]
|
|
1841
|
-
for name in inputs:
|
|
1842
|
-
(self.root / "input" / name).mkdir(parents=True, exist_ok=True)
|
|
1843
|
-
|
|
1844
|
-
# Prepare static folder structure
|
|
1845
|
-
folders = ["templates", "config"]
|
|
1846
|
-
for name in folders:
|
|
1847
|
-
(self.static_path / name).mkdir(parents=True, exist_ok=True)
|
|
1848
|
-
|
|
1849
|
-
def _check_exists_and_absolute(self, path: str) -> Path:
|
|
1850
|
-
"""Check if the path is absolute or relative and return a Path object. Raises an error if the path is not valid."""
|
|
1851
|
-
if not Path(path).exists():
|
|
1852
|
-
raise FileNotFoundError(f"Path {path} does not exist.")
|
|
1853
|
-
|
|
1854
|
-
if Path(path).is_absolute():
|
|
1855
|
-
return Path(path)
|
|
1856
|
-
else:
|
|
1857
|
-
raise ValueError(f"Path {path} is not absolute.")
|
|
1858
|
-
|
|
1859
|
-
def _join_building_footprints(
|
|
1860
|
-
self, building_footprints: gpd.GeoDataFrame, field_name: str
|
|
1861
|
-
) -> Path:
|
|
1862
|
-
"""
|
|
1863
|
-
Join building footprints with existing building data and updates the exposure CSV.
|
|
1864
|
-
|
|
1865
|
-
Args:
|
|
1866
|
-
building_footprints (GeoDataFrame): GeoDataFrame containing the building footprints to be joined.
|
|
1867
|
-
field_name (str): The field name to use for the spatial join.
|
|
1868
|
-
|
|
1869
|
-
Returns
|
|
1870
|
-
-------
|
|
1871
|
-
This method performs the following steps:
|
|
1872
|
-
1. Reads the exposure CSV file.
|
|
1873
|
-
2. Performs a spatial join between the buildings and building footprints.
|
|
1874
|
-
3. Ensures that in case of multiple values, the first is kept.
|
|
1875
|
-
4. Creates a folder to store the building footprints.
|
|
1876
|
-
5. Saves the spatial file for future use.
|
|
1877
|
-
6. Merges the joined buildings with the exposure CSV and saves it.
|
|
1878
|
-
7. Updates the site attributes with the relative path to the saved building footprints.
|
|
1879
|
-
8. Logs the location where the building footprints are saved.
|
|
1880
|
-
"""
|
|
1881
|
-
buildings = self.fiat_model.exposure.exposure_geoms[
|
|
1882
|
-
self._get_fiat_building_index()
|
|
1883
|
-
]
|
|
1884
|
-
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1885
|
-
if "BF_FID" in exposure_csv.columns:
|
|
1886
|
-
self.logger.warning(
|
|
1887
|
-
"Column 'BF_FID' already exists in the exposure columns and will be replaced."
|
|
1888
|
-
)
|
|
1889
|
-
del exposure_csv["BF_FID"]
|
|
1890
|
-
buildings_joined, building_footprints = self.spatial_join(
|
|
1891
|
-
buildings,
|
|
1892
|
-
building_footprints,
|
|
1893
|
-
field_name,
|
|
1894
|
-
rename="BF_FID",
|
|
1895
|
-
filter=True,
|
|
1896
|
-
)
|
|
1897
|
-
# Make sure in case of multiple values that the first is kept
|
|
1898
|
-
buildings_joined = (
|
|
1899
|
-
buildings_joined.groupby(_FIAT_COLUMNS.object_id)
|
|
1900
|
-
.first()
|
|
1901
|
-
.sort_values(by=[_FIAT_COLUMNS.object_id])
|
|
1902
|
-
)
|
|
1903
|
-
# Create folder
|
|
1904
|
-
bf_folder = Path(self.fiat_model.root) / "exposure" / "building_footprints"
|
|
1905
|
-
bf_folder.mkdir(parents=True, exist_ok=True)
|
|
1906
|
-
|
|
1907
|
-
# Save the spatial file for future use
|
|
1908
|
-
geo_path = bf_folder / "building_footprints.gpkg"
|
|
1909
|
-
building_footprints.to_file(geo_path)
|
|
1910
|
-
|
|
1911
|
-
# Save to exposure csv
|
|
1912
|
-
exposure_csv = exposure_csv.merge(
|
|
1913
|
-
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1914
|
-
)
|
|
1915
|
-
|
|
1916
|
-
# Set model building footprints
|
|
1917
|
-
self.fiat_model.building_footprint = building_footprints
|
|
1918
|
-
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1919
|
-
|
|
1920
|
-
# Save site attributes
|
|
1921
|
-
buildings_path = geo_path.relative_to(self.static_path)
|
|
1922
|
-
self.logger.info(
|
|
1923
|
-
f"Building footprints saved at {(self.static_path / buildings_path).resolve().as_posix()}"
|
|
1924
|
-
)
|
|
1925
|
-
|
|
1926
|
-
return buildings_path
|
|
1927
|
-
|
|
1928
|
-
def _clip_hazard_extend(self, clip_footprints=True):
|
|
1929
|
-
"""
|
|
1930
|
-
Clip the exposure data to the bounding box of the hazard data.
|
|
1931
|
-
|
|
1932
|
-
This method clips the exposure data to the bounding box of the hazard data. It creates a GeoDataFrame
|
|
1933
|
-
from the hazard polygons, and then uses the `gpd.clip` function to clip the exposure geometries to the
|
|
1934
|
-
bounding box of the hazard polygons. If the exposure data contains roads, it is split into two separate
|
|
1935
|
-
GeoDataFrames: one for buildings and one for roads. The clipped exposure data is then saved back to the
|
|
1936
|
-
`exposure_db` attribute of the `FiatModel` object.
|
|
1937
|
-
|
|
1938
|
-
Parameters
|
|
1939
|
-
----------
|
|
1940
|
-
None
|
|
1941
|
-
|
|
1942
|
-
Returns
|
|
1943
|
-
-------
|
|
1944
|
-
None
|
|
1945
|
-
"""
|
|
1946
|
-
gdf = self.fiat_model.exposure.get_full_gdf(
|
|
1947
|
-
self.fiat_model.exposure.exposure_db
|
|
1948
|
-
)
|
|
1949
|
-
crs = gdf.crs
|
|
1950
|
-
sfincs_extend = self.sfincs_overland_model.region
|
|
1951
|
-
sfincs_extend = sfincs_extend.to_crs(crs)
|
|
1952
|
-
|
|
1953
|
-
# Clip the fiat region
|
|
1954
|
-
clipped_region = self.fiat_model.region.to_crs(crs).clip(sfincs_extend)
|
|
1955
|
-
self.fiat_model.geoms["region"] = clipped_region
|
|
1956
|
-
|
|
1957
|
-
# Clip the exposure geometries
|
|
1958
|
-
# Filter buildings and roads
|
|
1959
|
-
road_inds = gdf[_FIAT_COLUMNS.primary_object_type].str.contains("road")
|
|
1960
|
-
# Ensure road_inds is a boolean Series
|
|
1961
|
-
if not road_inds.dtype == bool:
|
|
1962
|
-
road_inds = road_inds.astype(bool)
|
|
1963
|
-
# Clip buildings
|
|
1964
|
-
gdf_buildings = gdf[~road_inds]
|
|
1965
|
-
gdf_buildings = self._clip_gdf(
|
|
1966
|
-
gdf_buildings, clipped_region, predicate="within"
|
|
1967
|
-
).reset_index(drop=True)
|
|
1968
|
-
|
|
1969
|
-
if road_inds.any():
|
|
1970
|
-
# Clip roads
|
|
1971
|
-
gdf_roads = gdf[road_inds]
|
|
1972
|
-
gdf_roads = self._clip_gdf(
|
|
1973
|
-
gdf_roads, clipped_region, predicate="within"
|
|
1974
|
-
).reset_index(drop=True)
|
|
1975
|
-
|
|
1976
|
-
idx_buildings = self.fiat_model.exposure.geom_names.index(
|
|
1977
|
-
self.config.fiat_buildings_name
|
|
1978
|
-
)
|
|
1979
|
-
idx_roads = self.fiat_model.exposure.geom_names.index(
|
|
1980
|
-
self.config.fiat_roads_name
|
|
1981
|
-
)
|
|
1982
|
-
self.fiat_model.exposure.exposure_geoms[idx_buildings] = gdf_buildings[
|
|
1983
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1984
|
-
]
|
|
1985
|
-
self.fiat_model.exposure.exposure_geoms[idx_roads] = gdf_roads[
|
|
1986
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1987
|
-
]
|
|
1988
|
-
gdf = pd.concat([gdf_buildings, gdf_roads])
|
|
1989
|
-
else:
|
|
1990
|
-
gdf = gdf_buildings
|
|
1991
|
-
self.fiat_model.exposure.exposure_geoms[0] = gdf[
|
|
1992
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1993
|
-
]
|
|
1994
|
-
|
|
1995
|
-
# Save exposure dataframe
|
|
1996
|
-
del gdf["geometry"]
|
|
1997
|
-
self.fiat_model.exposure.exposure_db = gdf.reset_index(drop=True)
|
|
1998
|
-
|
|
1999
|
-
# Clip the building footprints
|
|
2000
|
-
fieldname = "BF_FID"
|
|
2001
|
-
if clip_footprints and not self.fiat_model.building_footprint.empty:
|
|
2002
|
-
# Get buildings after filtering and their footprint id
|
|
2003
|
-
self.fiat_model.building_footprint = self.fiat_model.building_footprint[
|
|
2004
|
-
self.fiat_model.building_footprint[fieldname].isin(
|
|
2005
|
-
gdf_buildings[fieldname]
|
|
2006
|
-
)
|
|
2007
|
-
].reset_index(drop=True)
|
|
2008
|
-
|
|
2009
|
-
@staticmethod
|
|
2010
|
-
def _clip_gdf(
|
|
2011
|
-
gdf1: gpd.GeoDataFrame, gdf2: gpd.GeoDataFrame, predicate: str = "within"
|
|
2012
|
-
):
|
|
2013
|
-
gdf_new = gpd.sjoin(gdf1, gdf2, how="inner", predicate=predicate)
|
|
2014
|
-
gdf_new = gdf_new.drop(
|
|
2015
|
-
columns=[
|
|
2016
|
-
col
|
|
2017
|
-
for col in gdf_new.columns
|
|
2018
|
-
if col.endswith("_right") or (col in gdf2.columns and col != "geometry")
|
|
2019
|
-
]
|
|
2020
|
-
)
|
|
2021
|
-
|
|
2022
|
-
return gdf_new
|
|
2023
|
-
|
|
2024
|
-
@staticmethod
|
|
2025
|
-
def spatial_join(
|
|
2026
|
-
objects: gpd.GeoDataFrame,
|
|
2027
|
-
layer: Union[str, gpd.GeoDataFrame],
|
|
2028
|
-
field_name: str,
|
|
2029
|
-
rename: Optional[str] = None,
|
|
2030
|
-
filter: Optional[bool] = False,
|
|
2031
|
-
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
|
|
2032
|
-
"""
|
|
2033
|
-
Perform a spatial join between two GeoDataFrames.
|
|
2034
|
-
|
|
2035
|
-
Args:
|
|
2036
|
-
objects (gpd.GeoDataFrame): The GeoDataFrame representing the objects.
|
|
2037
|
-
layer (Union[str, gpd.GeoDataFrame]): The GeoDataFrame or file path of the layer to join with.
|
|
2038
|
-
field_name (str): The name of the field to use for the join.
|
|
2039
|
-
rename (Optional[str], optional): The new name to assign to the joined field. Defaults to None.
|
|
2040
|
-
|
|
2041
|
-
Returns
|
|
2042
|
-
-------
|
|
2043
|
-
tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]: A tuple containing the joined GeoDataFrame and the layer GeoDataFrame.
|
|
2044
|
-
|
|
2045
|
-
"""
|
|
2046
|
-
# Read in layer and keep only column of interest
|
|
2047
|
-
if not isinstance(layer, gpd.GeoDataFrame):
|
|
2048
|
-
layer = gpd.read_file(layer)
|
|
2049
|
-
layer = layer[[field_name, "geometry"]]
|
|
2050
|
-
layer = layer.to_crs(objects.crs)
|
|
2051
|
-
if field_name in objects.columns:
|
|
2052
|
-
layer = layer.rename(columns={field_name: "layer_field"})
|
|
2053
|
-
field_name = "layer_field"
|
|
2054
|
-
# Spatial join of the layers
|
|
2055
|
-
objects_joined = objects.sjoin(layer, how="left", predicate="intersects")
|
|
2056
|
-
|
|
2057
|
-
# Keep only the first intersection for each object
|
|
2058
|
-
objects_joined = (
|
|
2059
|
-
objects_joined.groupby(_FIAT_COLUMNS.object_id).first().reset_index()
|
|
2060
|
-
)
|
|
2061
|
-
|
|
2062
|
-
# if needed filter out unused objects in the layer
|
|
2063
|
-
if filter:
|
|
2064
|
-
layer_inds = objects_joined["index_right"].dropna().unique()
|
|
2065
|
-
layer = layer.iloc[np.sort(layer_inds)].reset_index(drop=True)
|
|
2066
|
-
objects_joined = objects_joined[[_FIAT_COLUMNS.object_id, field_name]]
|
|
2067
|
-
# rename field if provided
|
|
2068
|
-
if rename:
|
|
2069
|
-
objects_joined = objects_joined.rename(columns={field_name: rename})
|
|
2070
|
-
layer = layer.rename(columns={field_name: rename})
|
|
2071
|
-
return objects_joined, layer
|
|
2072
|
-
|
|
2073
|
-
def _get_fiat_building_index(self) -> int:
|
|
2074
|
-
return self.fiat_model.exposure.geom_names.index(
|
|
2075
|
-
self.config.fiat_buildings_name
|
|
2076
|
-
)
|
|
2077
|
-
|
|
2078
|
-
def _get_fiat_road_index(self) -> int:
|
|
2079
|
-
return self.fiat_model.exposure.geom_names.index(self.config.fiat_roads_name)
|
|
2080
|
-
|
|
2081
|
-
def _get_closest_station(self):
|
|
2082
|
-
# Get available stations from source
|
|
2083
|
-
obs_data = obs.source(self.config.tide_gauge.source)
|
|
2084
|
-
obs_data.get_active_stations()
|
|
2085
|
-
obs_stations = obs_data.gdf()
|
|
2086
|
-
# Calculate distance from SFINCS region to all available stations in degrees
|
|
2087
|
-
obs_stations["distance"] = obs_stations.distance(
|
|
2088
|
-
self.sfincs_overland_model.region.to_crs(4326).geometry.item()
|
|
2089
|
-
)
|
|
2090
|
-
# Get the closest station and its distance in meters
|
|
2091
|
-
closest_station = obs_stations[
|
|
2092
|
-
obs_stations["distance"] == obs_stations["distance"].min()
|
|
2093
|
-
]
|
|
2094
|
-
distance = round(
|
|
2095
|
-
closest_station.to_crs(self.sfincs_overland_model.region.crs)
|
|
2096
|
-
.distance(self.sfincs_overland_model.region.geometry.item())
|
|
2097
|
-
.item(),
|
|
2098
|
-
0,
|
|
2099
|
-
)
|
|
2100
|
-
|
|
2101
|
-
distance = us.UnitfulLength(value=distance, units=us.UnitTypesLength.meters)
|
|
2102
|
-
self.logger.info(
|
|
2103
|
-
f"The closest tide gauge from {self.config.tide_gauge.source} is located {distance.transform(self.unit_system.default_length_units)} from the SFINCS domain"
|
|
2104
|
-
)
|
|
2105
|
-
# Check if user provided max distance
|
|
2106
|
-
# TODO make sure units are explicit for max_distance
|
|
2107
|
-
if self.config.tide_gauge.max_distance is not None:
|
|
2108
|
-
units_new = self.config.tide_gauge.max_distance.units
|
|
2109
|
-
distance_new = us.UnitfulLength(
|
|
2110
|
-
value=distance.convert(units_new), units=units_new
|
|
2111
|
-
)
|
|
2112
|
-
if distance_new.value > self.config.tide_gauge.max_distance.value:
|
|
2113
|
-
self.logger.warning(
|
|
2114
|
-
f"This distance is larger than the 'max_distance' value of {self.config.tide_gauge.max_distance.value} {units_new} provided in the config file. The station cannot be used."
|
|
2115
|
-
)
|
|
2116
|
-
return None
|
|
2117
|
-
|
|
2118
|
-
# get station id
|
|
2119
|
-
station_id = closest_station["id"].item()
|
|
2120
|
-
|
|
2121
|
-
return station_id
|
|
2122
|
-
|
|
2123
|
-
def _get_station_metadata(self, station_id: str, ref: str = "MLLW"):
|
|
2124
|
-
"""
|
|
2125
|
-
Find the closest tide gauge station to the SFINCS domain and retrieves its metadata.
|
|
2126
|
-
|
|
2127
|
-
Args:
|
|
2128
|
-
ref (str, optional): The reference level for water level measurements. Defaults to "MLLW".
|
|
2129
|
-
|
|
2130
|
-
Returns
|
|
2131
|
-
-------
|
|
2132
|
-
dict: A dictionary containing the metadata of the closest tide gauge station.
|
|
2133
|
-
The dictionary includes the following keys:
|
|
2134
|
-
- "id": The station ID.
|
|
2135
|
-
- "name": The station name.
|
|
2136
|
-
- "datum": The difference between the station's datum and the reference level.
|
|
2137
|
-
- "datum_name": The name of the datum used by the station.
|
|
2138
|
-
- "msl": The difference between the Mean Sea Level (MSL) and the reference level.
|
|
2139
|
-
- "reference": The reference level used for water level measurements.
|
|
2140
|
-
- "lon": The longitude of the station.
|
|
2141
|
-
- "lat": The latitude of the station.
|
|
2142
|
-
"""
|
|
2143
|
-
# Get available stations from source
|
|
2144
|
-
obs_data = obs.source(self.config.tide_gauge.source)
|
|
2145
|
-
# read station metadata
|
|
2146
|
-
station_metadata = obs_data.get_meta_data(station_id)
|
|
2147
|
-
# TODO check if all stations can be used? Tidal attr?
|
|
2148
|
-
# Get water levels by using the ref provided
|
|
2149
|
-
datum_name = station_metadata["datums"]["OrthometricDatum"]
|
|
2150
|
-
datums = station_metadata["datums"]["datums"]
|
|
2151
|
-
names = [datum["name"] for datum in datums]
|
|
2152
|
-
|
|
2153
|
-
ref_value = datums[names.index(ref)]["value"]
|
|
2154
|
-
|
|
2155
|
-
meta = {
|
|
2156
|
-
"id": station_id,
|
|
2157
|
-
"name": station_metadata["name"],
|
|
2158
|
-
"datum": round(datums[names.index(datum_name)]["value"] - ref_value, 3),
|
|
2159
|
-
"datum_name": datum_name,
|
|
2160
|
-
"msl": round(datums[names.index("MSL")]["value"] - ref_value, 3),
|
|
2161
|
-
"mllw": round(datums[names.index("MLLW")]["value"] - ref_value, 3),
|
|
2162
|
-
"mhhw": round(datums[names.index("MHHW")]["value"] - ref_value, 3),
|
|
2163
|
-
"reference": ref,
|
|
2164
|
-
"units": station_metadata["datums"]["units"],
|
|
2165
|
-
"lon": station_metadata["lng"],
|
|
2166
|
-
"lat": station_metadata["lat"],
|
|
2167
|
-
}
|
|
2168
|
-
|
|
2169
|
-
self.logger.info(
|
|
2170
|
-
f"The tide gauge station '{station_metadata['name']}' from {self.config.tide_gauge.source} will be used to download nearshore historical water level time-series."
|
|
2171
|
-
)
|
|
2172
|
-
|
|
2173
|
-
self.logger.info(
|
|
2174
|
-
f"The station metadata will be used to fill in the water_level attribute in the site.toml. The reference level will be {ref}."
|
|
2175
|
-
)
|
|
2176
|
-
|
|
2177
|
-
return meta
|
|
2178
|
-
|
|
2179
|
-
def _get_bin_colors(self):
|
|
2180
|
-
"""
|
|
2181
|
-
Retrieve the bin colors from the bin_colors.toml file.
|
|
2182
|
-
|
|
2183
|
-
Returns
|
|
2184
|
-
-------
|
|
2185
|
-
dict: A dictionary containing the bin colors.
|
|
2186
|
-
"""
|
|
2187
|
-
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
2188
|
-
with open(
|
|
2189
|
-
templates_path.joinpath("output_layers", "bin_colors.toml"), "rb"
|
|
2190
|
-
) as f:
|
|
2191
|
-
bin_colors = tomli.load(f)
|
|
2192
|
-
return bin_colors
|
|
2193
|
-
|
|
2194
|
-
|
|
2195
|
-
if __name__ == "__main__":
|
|
2196
|
-
while True:
|
|
2197
|
-
config_path = Path(
|
|
2198
|
-
input(
|
|
2199
|
-
"Please provide the path to the database creation configuration toml: \n"
|
|
2200
|
-
)
|
|
2201
|
-
)
|
|
2202
|
-
try:
|
|
2203
|
-
config = ConfigModel.read(config_path)
|
|
2204
|
-
dbs = DatabaseBuilder(config)
|
|
2205
|
-
dbs.build()
|
|
2206
|
-
except Exception as e:
|
|
2207
|
-
print(e)
|
|
2208
|
-
quit = input("Do you want to quit? (y/n)")
|
|
2209
|
-
if quit == "y":
|
|
2210
|
-
exit()
|
|
1
|
+
import datetime
|
|
2
|
+
import math
|
|
3
|
+
import os
|
|
4
|
+
import shutil
|
|
5
|
+
import warnings
|
|
6
|
+
from enum import Enum
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Optional, Union
|
|
9
|
+
from urllib.request import urlretrieve
|
|
10
|
+
|
|
11
|
+
import cht_observations.observation_stations as obs
|
|
12
|
+
import geopandas as gpd
|
|
13
|
+
import numpy as np
|
|
14
|
+
import pandas as pd
|
|
15
|
+
import rioxarray as rxr
|
|
16
|
+
import tomli
|
|
17
|
+
import tomli_w
|
|
18
|
+
import xarray as xr
|
|
19
|
+
from hydromt_fiat import FiatModel as HydromtFiatModel
|
|
20
|
+
from hydromt_fiat.data_apis.open_street_maps import get_buildings_from_osm
|
|
21
|
+
from hydromt_sfincs import SfincsModel as HydromtSfincsModel
|
|
22
|
+
from pydantic import BaseModel, Field
|
|
23
|
+
from shapely import MultiLineString, MultiPolygon, Polygon
|
|
24
|
+
|
|
25
|
+
from flood_adapt.adapter.fiat_adapter import _FIAT_COLUMNS
|
|
26
|
+
from flood_adapt.config.fiat import (
|
|
27
|
+
AggregationModel,
|
|
28
|
+
BenefitsModel,
|
|
29
|
+
BFEModel,
|
|
30
|
+
EquityModel,
|
|
31
|
+
FiatConfigModel,
|
|
32
|
+
FiatModel,
|
|
33
|
+
RiskModel,
|
|
34
|
+
SVIModel,
|
|
35
|
+
)
|
|
36
|
+
from flood_adapt.config.gui import (
|
|
37
|
+
AggregationDmgLayer,
|
|
38
|
+
BenefitsLayer,
|
|
39
|
+
FloodMapLayer,
|
|
40
|
+
FootprintsDmgLayer,
|
|
41
|
+
GuiModel,
|
|
42
|
+
GuiUnitModel,
|
|
43
|
+
OutputLayers,
|
|
44
|
+
PlottingModel,
|
|
45
|
+
SyntheticTideModel,
|
|
46
|
+
VisualizationLayers,
|
|
47
|
+
)
|
|
48
|
+
from flood_adapt.config.sfincs import (
|
|
49
|
+
Cstype,
|
|
50
|
+
CycloneTrackDatabaseModel,
|
|
51
|
+
DatumModel,
|
|
52
|
+
DemModel,
|
|
53
|
+
FloodmapType,
|
|
54
|
+
FloodModel,
|
|
55
|
+
ObsPointModel,
|
|
56
|
+
RiverModel,
|
|
57
|
+
SCSModel,
|
|
58
|
+
SfincsConfigModel,
|
|
59
|
+
SfincsModel,
|
|
60
|
+
SlrScenariosModel,
|
|
61
|
+
WaterlevelReferenceModel,
|
|
62
|
+
)
|
|
63
|
+
from flood_adapt.config.site import (
|
|
64
|
+
Site,
|
|
65
|
+
StandardObjectModel,
|
|
66
|
+
)
|
|
67
|
+
from flood_adapt.dbs_classes.database import Database
|
|
68
|
+
from flood_adapt.misc.log import FloodAdaptLogging
|
|
69
|
+
from flood_adapt.misc.utils import modified_environ
|
|
70
|
+
from flood_adapt.objects.events.event_set import EventSet
|
|
71
|
+
from flood_adapt.objects.forcing import unit_system as us
|
|
72
|
+
from flood_adapt.objects.forcing.tide_gauge import (
|
|
73
|
+
TideGauge,
|
|
74
|
+
TideGaugeSource,
|
|
75
|
+
)
|
|
76
|
+
from flood_adapt.objects.projections.projections import (
|
|
77
|
+
PhysicalProjection,
|
|
78
|
+
Projection,
|
|
79
|
+
SocioEconomicChange,
|
|
80
|
+
)
|
|
81
|
+
from flood_adapt.objects.strategies.strategies import Strategy
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
|
|
85
|
+
"""
|
|
86
|
+
Check if the given path is absolute and return the absolute path.
|
|
87
|
+
|
|
88
|
+
Args:
|
|
89
|
+
path (str): The path to be checked.
|
|
90
|
+
|
|
91
|
+
Returns
|
|
92
|
+
-------
|
|
93
|
+
str: The absolute path.
|
|
94
|
+
|
|
95
|
+
Raises
|
|
96
|
+
------
|
|
97
|
+
ValueError: If the path is not absolute and no config_path is provided.
|
|
98
|
+
"""
|
|
99
|
+
path = Path(str_path)
|
|
100
|
+
if not path.is_absolute():
|
|
101
|
+
if config_path is not None:
|
|
102
|
+
path = Path(config_path).parent.joinpath(path).resolve()
|
|
103
|
+
else:
|
|
104
|
+
raise ValueError(f"Value '{path}' should be an absolute path.")
|
|
105
|
+
return path.as_posix()
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class SpatialJoinModel(BaseModel):
|
|
109
|
+
"""
|
|
110
|
+
Represents a spatial join model.
|
|
111
|
+
|
|
112
|
+
Attributes
|
|
113
|
+
----------
|
|
114
|
+
name (Optional[str]): The name of the model (optional).
|
|
115
|
+
file (str): The file associated with the model.
|
|
116
|
+
field_name (str): The field name used for the spatial join.
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
name: Optional[str] = None
|
|
120
|
+
file: str
|
|
121
|
+
field_name: str
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class UnitSystems(str, Enum):
|
|
125
|
+
"""The `UnitSystems` class is an enumeration that represents the accepted values for the `metric_system` field.
|
|
126
|
+
|
|
127
|
+
It provides two options: `imperial` and `metric`.
|
|
128
|
+
|
|
129
|
+
Attributes
|
|
130
|
+
----------
|
|
131
|
+
imperial (str): Represents the imperial unit system.
|
|
132
|
+
metric (str): Represents the metric unit system.
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
imperial = "imperial"
|
|
136
|
+
metric = "metric"
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class FootprintsOptions(str, Enum):
|
|
140
|
+
OSM = "OSM"
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class Basins(str, Enum):
|
|
144
|
+
"""
|
|
145
|
+
Enumeration class representing different basins.
|
|
146
|
+
|
|
147
|
+
Each basin is represented by a string value.
|
|
148
|
+
|
|
149
|
+
Attributes
|
|
150
|
+
----------
|
|
151
|
+
NA (str): North Atlantic
|
|
152
|
+
SA (str): South Atlantic
|
|
153
|
+
EP (str): Eastern North Pacific (which includes the Central Pacific region)
|
|
154
|
+
WP (str): Western North Pacific
|
|
155
|
+
SP (str): South Pacific
|
|
156
|
+
SI (str): South Indian
|
|
157
|
+
NI (str): North Indian
|
|
158
|
+
"""
|
|
159
|
+
|
|
160
|
+
NA = "NA"
|
|
161
|
+
SA = "SA"
|
|
162
|
+
EP = "EP"
|
|
163
|
+
WP = "WP"
|
|
164
|
+
SP = "SP"
|
|
165
|
+
SI = "SI"
|
|
166
|
+
NI = "NI"
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
class GuiConfigModel(BaseModel):
|
|
170
|
+
"""
|
|
171
|
+
Represents a GUI model for for FloodAdapt.
|
|
172
|
+
|
|
173
|
+
Attributes
|
|
174
|
+
----------
|
|
175
|
+
max_flood_depth (float): The last visualization bin will be ">value".
|
|
176
|
+
max_aggr_dmg (float): The last visualization bin will be ">value".
|
|
177
|
+
max_footprint_dmg (float): The last visualization bin will be ">value".
|
|
178
|
+
max_benefits (float): The last visualization bin will be ">value".
|
|
179
|
+
"""
|
|
180
|
+
|
|
181
|
+
max_flood_depth: float
|
|
182
|
+
max_aggr_dmg: float
|
|
183
|
+
max_footprint_dmg: float
|
|
184
|
+
max_benefits: float
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
class SviConfigModel(SpatialJoinModel):
|
|
188
|
+
"""
|
|
189
|
+
Represents a model for the Social Vulnerability Index (SVI).
|
|
190
|
+
|
|
191
|
+
Attributes
|
|
192
|
+
----------
|
|
193
|
+
threshold (float): The threshold value for the SVI model to specify vulnerability.
|
|
194
|
+
"""
|
|
195
|
+
|
|
196
|
+
threshold: float
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
class Point(BaseModel):
|
|
200
|
+
lat: float
|
|
201
|
+
lon: float
|
|
202
|
+
|
|
203
|
+
|
|
204
|
+
class TideGaugeConfigModel(BaseModel):
|
|
205
|
+
"""
|
|
206
|
+
Represents a tide gauge model.
|
|
207
|
+
|
|
208
|
+
Attributes
|
|
209
|
+
----------
|
|
210
|
+
source (str): The source of the tide gauge data.
|
|
211
|
+
file (Optional[str]): The file associated with the tide gauge data (default: None).
|
|
212
|
+
max_distance (Optional[float]): The maximum distance (default: None).
|
|
213
|
+
ref (str): The reference name. Should be defined in the water level references.
|
|
214
|
+
"""
|
|
215
|
+
|
|
216
|
+
source: TideGaugeSource
|
|
217
|
+
description: str = ""
|
|
218
|
+
ref: Optional[str] = None
|
|
219
|
+
id: Optional[int] = None
|
|
220
|
+
lon: Optional[float] = None
|
|
221
|
+
lat: Optional[float] = None
|
|
222
|
+
file: Optional[str] = None
|
|
223
|
+
max_distance: Optional[us.UnitfulLength] = None
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
class SviModel(SpatialJoinModel):
|
|
227
|
+
"""
|
|
228
|
+
Represents a model for the Social Vulnerability Index (SVI).
|
|
229
|
+
|
|
230
|
+
Attributes
|
|
231
|
+
----------
|
|
232
|
+
threshold (float): The threshold value for the SVI model to specify vulnerability.
|
|
233
|
+
"""
|
|
234
|
+
|
|
235
|
+
threshold: float
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
class ConfigModel(BaseModel):
|
|
239
|
+
"""
|
|
240
|
+
Represents the configuration model for FloodAdapt.
|
|
241
|
+
|
|
242
|
+
Attributes
|
|
243
|
+
----------
|
|
244
|
+
name : str
|
|
245
|
+
The name of the site.
|
|
246
|
+
description : Optional[str], default ""
|
|
247
|
+
The description of the site.
|
|
248
|
+
database_path : Optional[str], default None
|
|
249
|
+
The path to the database where all the sites are located.
|
|
250
|
+
sfincs : str
|
|
251
|
+
The SFINCS model path.
|
|
252
|
+
sfincs_offshore : Optional[str], default None
|
|
253
|
+
The offshore SFINCS model path.
|
|
254
|
+
fiat : str
|
|
255
|
+
The FIAT model path.
|
|
256
|
+
unit_system : UnitSystems
|
|
257
|
+
The unit system.
|
|
258
|
+
gui : GuiModel
|
|
259
|
+
The GUI model representing scaling values for the layers.
|
|
260
|
+
building_footprints : Optional[SpatialJoinModel], default None
|
|
261
|
+
The building footprints model.
|
|
262
|
+
slr_scenarios : Optional[SlrModelDef], default SlrModelDef()
|
|
263
|
+
The sea level rise model.
|
|
264
|
+
tide_gauge : Optional[TideGaugeConfigModel], default None
|
|
265
|
+
The tide gauge model.
|
|
266
|
+
bfe : Optional[SpatialJoinModel], default None
|
|
267
|
+
The BFE model.
|
|
268
|
+
svi : Optional[SviModel], default None
|
|
269
|
+
The SVI model.
|
|
270
|
+
road_width : Optional[float], default 2
|
|
271
|
+
The road width in meters.
|
|
272
|
+
cyclones : Optional[bool], default True
|
|
273
|
+
Indicates if cyclones are enabled.
|
|
274
|
+
cyclone_basin : Optional[Basins], default None
|
|
275
|
+
The cyclone basin.
|
|
276
|
+
obs_point : Optional[list[ObsPointModel]], default None
|
|
277
|
+
The list of observation point models.
|
|
278
|
+
probabilistic_set : Optional[str], default None
|
|
279
|
+
The probabilistic set path.
|
|
280
|
+
infographics : Optional[bool], default True
|
|
281
|
+
Indicates if infographics are enabled.
|
|
282
|
+
"""
|
|
283
|
+
|
|
284
|
+
# General
|
|
285
|
+
name: str = Field(..., min_length=1, pattern='^[^<>:"/\\\\|?* ]*$')
|
|
286
|
+
description: Optional[str] = None
|
|
287
|
+
database_path: Optional[str] = None
|
|
288
|
+
unit_system: UnitSystems
|
|
289
|
+
gui: GuiConfigModel
|
|
290
|
+
infographics: Optional[bool] = True
|
|
291
|
+
|
|
292
|
+
# FIAT
|
|
293
|
+
fiat: str
|
|
294
|
+
aggregation_areas: Optional[list[SpatialJoinModel]] = None
|
|
295
|
+
building_footprints: Optional[SpatialJoinModel | FootprintsOptions] = (
|
|
296
|
+
FootprintsOptions.OSM
|
|
297
|
+
)
|
|
298
|
+
fiat_buildings_name: Optional[str] = "buildings"
|
|
299
|
+
fiat_roads_name: Optional[str] = "roads"
|
|
300
|
+
bfe: Optional[SpatialJoinModel] = None
|
|
301
|
+
svi: Optional[SviConfigModel] = None
|
|
302
|
+
road_width: Optional[float] = 5
|
|
303
|
+
return_periods: list[int] = Field(default_factory=list)
|
|
304
|
+
|
|
305
|
+
# SFINCS
|
|
306
|
+
references: WaterlevelReferenceModel = WaterlevelReferenceModel(
|
|
307
|
+
reference="MSL",
|
|
308
|
+
datums=[
|
|
309
|
+
DatumModel(
|
|
310
|
+
name="MSL",
|
|
311
|
+
height=us.UnitfulLength(value=0.0, units=us.UnitTypesLength.meters),
|
|
312
|
+
),
|
|
313
|
+
],
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
sfincs_overland: FloodModel
|
|
317
|
+
sfincs_offshore: Optional[FloodModel] = None
|
|
318
|
+
dem: Optional[DemModel] = None
|
|
319
|
+
|
|
320
|
+
excluded_datums: list[str] = Field(default_factory=list)
|
|
321
|
+
|
|
322
|
+
slr_scenarios: Optional[SlrScenariosModel] = None
|
|
323
|
+
scs: Optional[SCSModel] = None
|
|
324
|
+
tide_gauge: Optional[TideGaugeConfigModel] = None
|
|
325
|
+
cyclones: Optional[bool] = True
|
|
326
|
+
cyclone_basin: Optional[Basins] = None
|
|
327
|
+
obs_point: Optional[list[ObsPointModel]] = None
|
|
328
|
+
probabilistic_set: Optional[str] = None
|
|
329
|
+
|
|
330
|
+
@staticmethod
|
|
331
|
+
def read(toml_path: Path) -> "ConfigModel":
|
|
332
|
+
"""
|
|
333
|
+
Read a configuration file and returns the validated attributes.
|
|
334
|
+
|
|
335
|
+
Args:
|
|
336
|
+
config (str): The path to the configuration file.
|
|
337
|
+
|
|
338
|
+
Returns
|
|
339
|
+
-------
|
|
340
|
+
ConfigModel: The validated attributes from the configuration file.
|
|
341
|
+
"""
|
|
342
|
+
with open(toml_path, mode="rb") as fp:
|
|
343
|
+
toml = tomli.load(fp)
|
|
344
|
+
config = ConfigModel.model_validate(toml)
|
|
345
|
+
|
|
346
|
+
# check if database path is provided and use config_file path if not
|
|
347
|
+
if config.database_path is None:
|
|
348
|
+
dbs_path = Path(toml_path).parent / "Database"
|
|
349
|
+
if not dbs_path.exists():
|
|
350
|
+
dbs_path.mkdir(parents=True)
|
|
351
|
+
config.database_path = dbs_path.as_posix()
|
|
352
|
+
# check if paths are relative to the config file and make them absolute
|
|
353
|
+
config.database_path = path_check(config.database_path, toml_path)
|
|
354
|
+
config.fiat = path_check(config.fiat, toml_path)
|
|
355
|
+
config.sfincs_overland.name = path_check(config.sfincs_overland.name, toml_path)
|
|
356
|
+
if config.sfincs_offshore:
|
|
357
|
+
config.sfincs_offshore.name = path_check(
|
|
358
|
+
config.sfincs_offshore.name, toml_path
|
|
359
|
+
)
|
|
360
|
+
if isinstance(config.building_footprints, SpatialJoinModel):
|
|
361
|
+
config.building_footprints.file = path_check(
|
|
362
|
+
config.building_footprints.file, toml_path
|
|
363
|
+
)
|
|
364
|
+
if config.tide_gauge and config.tide_gauge.file:
|
|
365
|
+
config.tide_gauge.file = path_check(config.tide_gauge.file, toml_path)
|
|
366
|
+
if config.svi:
|
|
367
|
+
config.svi.file = path_check(config.svi.file, toml_path)
|
|
368
|
+
if config.bfe:
|
|
369
|
+
config.bfe.file = path_check(config.bfe.file, toml_path)
|
|
370
|
+
if config.slr_scenarios:
|
|
371
|
+
config.slr_scenarios.file = path_check(config.slr_scenarios.file, toml_path)
|
|
372
|
+
if config.probabilistic_set:
|
|
373
|
+
config.probabilistic_set = path_check(config.probabilistic_set, toml_path)
|
|
374
|
+
if config.aggregation_areas:
|
|
375
|
+
for aggr in config.aggregation_areas:
|
|
376
|
+
aggr.file = path_check(aggr.file, toml_path)
|
|
377
|
+
|
|
378
|
+
return config
|
|
379
|
+
|
|
380
|
+
|
|
381
|
+
class DatabaseBuilder:
|
|
382
|
+
logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
|
|
383
|
+
|
|
384
|
+
_has_roads: bool = False
|
|
385
|
+
_aggregation_areas: Optional[list] = None
|
|
386
|
+
_probabilistic_set_name: Optional[str] = None
|
|
387
|
+
|
|
388
|
+
def __init__(self, config: ConfigModel, overwrite: bool = True):
|
|
389
|
+
self.config = config
|
|
390
|
+
|
|
391
|
+
# Set database root
|
|
392
|
+
if config.database_path:
|
|
393
|
+
self.root = Path(config.database_path).joinpath(self.config.name)
|
|
394
|
+
else:
|
|
395
|
+
raise ValueError(
|
|
396
|
+
"Database path is not provided. Please provide a path using the 'database_path' attribute."
|
|
397
|
+
)
|
|
398
|
+
|
|
399
|
+
# Read info that needs to be used to create other models
|
|
400
|
+
self.unit_system = self.create_default_units()
|
|
401
|
+
|
|
402
|
+
# Read info that needs to be updated with other model info
|
|
403
|
+
self.water_level_references = self.config.references
|
|
404
|
+
|
|
405
|
+
@property
|
|
406
|
+
def static_path(self) -> Path:
|
|
407
|
+
return self.root / "static"
|
|
408
|
+
|
|
409
|
+
def build(self, overwrite: bool = False) -> None:
|
|
410
|
+
# Check if database already exists
|
|
411
|
+
if self.root.exists() and not overwrite:
|
|
412
|
+
raise ValueError(
|
|
413
|
+
f"There is already a Database folder in '{self.root.as_posix()}'."
|
|
414
|
+
)
|
|
415
|
+
if self.root.exists() and overwrite:
|
|
416
|
+
shutil.rmtree(self.root)
|
|
417
|
+
warnings.warn(
|
|
418
|
+
f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
|
|
419
|
+
)
|
|
420
|
+
# Create database folder
|
|
421
|
+
self.root.mkdir(parents=True)
|
|
422
|
+
|
|
423
|
+
with FloodAdaptLogging.to_file(
|
|
424
|
+
file_path=self.root.joinpath("database_builder.log")
|
|
425
|
+
):
|
|
426
|
+
self.logger.info(
|
|
427
|
+
f"Creating a FloodAdapt database in '{self.root.as_posix()}'"
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
# Make folder structure and read models
|
|
431
|
+
self.setup()
|
|
432
|
+
|
|
433
|
+
# Prepare site configuration
|
|
434
|
+
site = self.create_site_config()
|
|
435
|
+
site.save(self.static_path / "config" / "site.toml")
|
|
436
|
+
|
|
437
|
+
# Add infometric and infographic configurations
|
|
438
|
+
self.create_infometrics()
|
|
439
|
+
|
|
440
|
+
# Save standard objects
|
|
441
|
+
self.create_standard_objects()
|
|
442
|
+
|
|
443
|
+
# Save log file
|
|
444
|
+
self.logger.info("FloodAdapt database creation finished!")
|
|
445
|
+
|
|
446
|
+
def setup(self) -> None:
|
|
447
|
+
# Create the models
|
|
448
|
+
self.make_folder_structure()
|
|
449
|
+
|
|
450
|
+
# Read user models and copy to templates
|
|
451
|
+
self.read_template_fiat_model()
|
|
452
|
+
self.read_template_sfincs_overland_model()
|
|
453
|
+
self.read_template_sfincs_offshore_model()
|
|
454
|
+
|
|
455
|
+
def set_standard_objects(self):
|
|
456
|
+
# Define name and create object
|
|
457
|
+
self._no_measures_strategy_name = "no_measures"
|
|
458
|
+
self._current_projection_name = "current"
|
|
459
|
+
if self._probabilistic_set_name is not None:
|
|
460
|
+
event_list = [self._probabilistic_set_name]
|
|
461
|
+
else:
|
|
462
|
+
event_list = []
|
|
463
|
+
std_obj = StandardObjectModel(
|
|
464
|
+
events=event_list,
|
|
465
|
+
projections=[self._current_projection_name],
|
|
466
|
+
strategies=[self._no_measures_strategy_name],
|
|
467
|
+
)
|
|
468
|
+
return std_obj
|
|
469
|
+
|
|
470
|
+
def create_standard_objects(self):
|
|
471
|
+
with modified_environ(
|
|
472
|
+
DATABASE_ROOT=str(self.root.parent),
|
|
473
|
+
DATABASE_NAME=self.root.name,
|
|
474
|
+
):
|
|
475
|
+
self.logger.info(
|
|
476
|
+
"Creating `no measures` strategy and `current` projection."
|
|
477
|
+
)
|
|
478
|
+
# Create database instance
|
|
479
|
+
db = Database(self.root.parent, self.config.name)
|
|
480
|
+
# Create no measures strategy
|
|
481
|
+
strategy = Strategy(
|
|
482
|
+
name=self._no_measures_strategy_name,
|
|
483
|
+
measures=[],
|
|
484
|
+
)
|
|
485
|
+
db.strategies.save(strategy)
|
|
486
|
+
# Create current projection
|
|
487
|
+
projection = Projection(
|
|
488
|
+
name=self._current_projection_name,
|
|
489
|
+
physical_projection=PhysicalProjection(),
|
|
490
|
+
socio_economic_change=SocioEconomicChange(),
|
|
491
|
+
)
|
|
492
|
+
db.projections.save(projection)
|
|
493
|
+
# Check prob set
|
|
494
|
+
if self._probabilistic_set_name is not None:
|
|
495
|
+
path_toml = (
|
|
496
|
+
db.input_path
|
|
497
|
+
/ "events"
|
|
498
|
+
/ self._probabilistic_set_name
|
|
499
|
+
/ f"{self._probabilistic_set_name}.toml"
|
|
500
|
+
)
|
|
501
|
+
try:
|
|
502
|
+
EventSet.load_file(path_toml)
|
|
503
|
+
except Exception as e:
|
|
504
|
+
raise ValueError(
|
|
505
|
+
f"Provided probabilistic event set '{self._probabilistic_set_name}' is not valid. Error: {e}"
|
|
506
|
+
)
|
|
507
|
+
|
|
508
|
+
### TEMPLATE READERS ###
|
|
509
|
+
def read_template_fiat_model(self):
|
|
510
|
+
user_provided = self._check_exists_and_absolute(self.config.fiat)
|
|
511
|
+
|
|
512
|
+
# Read config model
|
|
513
|
+
HydromtFiatModel(root=str(user_provided), mode="r+").read()
|
|
514
|
+
|
|
515
|
+
# Success, so copy to db and read again
|
|
516
|
+
location_in_db = self.static_path / "templates" / "fiat"
|
|
517
|
+
if location_in_db.exists():
|
|
518
|
+
shutil.rmtree(location_in_db)
|
|
519
|
+
shutil.copytree(user_provided, location_in_db)
|
|
520
|
+
in_db = HydromtFiatModel(root=str(location_in_db), mode="r+")
|
|
521
|
+
in_db.read()
|
|
522
|
+
# Add check to make sure the geoms are correct
|
|
523
|
+
# TODO this should be handled in hydromt-FIAT
|
|
524
|
+
no_geoms = len(
|
|
525
|
+
[name for name in in_db.config["exposure"]["geom"].keys() if "file" in name]
|
|
526
|
+
)
|
|
527
|
+
in_db.exposure.exposure_geoms = in_db.exposure.exposure_geoms[:no_geoms]
|
|
528
|
+
in_db.exposure._geom_names = in_db.exposure._geom_names[:no_geoms]
|
|
529
|
+
|
|
530
|
+
# Make sure that a region polygon is included
|
|
531
|
+
if "region" not in in_db.geoms:
|
|
532
|
+
gdf = in_db.exposure.get_full_gdf(in_db.exposure.exposure_db)
|
|
533
|
+
# Combine all geometries into a single geometry
|
|
534
|
+
merged_geometry = gdf.unary_union
|
|
535
|
+
|
|
536
|
+
# If the result is not a polygon, you can create a convex hull
|
|
537
|
+
if not isinstance(merged_geometry, Polygon):
|
|
538
|
+
merged_geometry = merged_geometry.convex_hull
|
|
539
|
+
# Create a new GeoDataFrame with the resulting polygon
|
|
540
|
+
in_db.geoms["region"] = gpd.GeoDataFrame(
|
|
541
|
+
geometry=[merged_geometry], crs=gdf.crs
|
|
542
|
+
)
|
|
543
|
+
|
|
544
|
+
self.fiat_model = in_db
|
|
545
|
+
|
|
546
|
+
def read_template_sfincs_overland_model(self):
|
|
547
|
+
user_provided = self._check_exists_and_absolute(
|
|
548
|
+
self.config.sfincs_overland.name
|
|
549
|
+
)
|
|
550
|
+
user_model = HydromtSfincsModel(root=str(user_provided), mode="r")
|
|
551
|
+
user_model.read()
|
|
552
|
+
if user_model.crs is None:
|
|
553
|
+
raise ValueError("CRS is not defined in the SFINCS model.")
|
|
554
|
+
|
|
555
|
+
location_in_db = self.static_path / "templates" / "overland"
|
|
556
|
+
if location_in_db.exists():
|
|
557
|
+
shutil.rmtree(location_in_db)
|
|
558
|
+
shutil.copytree(user_provided, location_in_db)
|
|
559
|
+
in_db = HydromtSfincsModel(root=str(location_in_db), mode="r+")
|
|
560
|
+
in_db.read()
|
|
561
|
+
self.sfincs_overland_model = in_db
|
|
562
|
+
|
|
563
|
+
def read_template_sfincs_offshore_model(self):
|
|
564
|
+
if self.config.sfincs_offshore is None:
|
|
565
|
+
self.sfincs_offshore_model = None
|
|
566
|
+
return
|
|
567
|
+
user_provided = self._check_exists_and_absolute(
|
|
568
|
+
self.config.sfincs_offshore.name
|
|
569
|
+
)
|
|
570
|
+
user_model = HydromtSfincsModel(root=str(user_provided), mode="r+")
|
|
571
|
+
user_model.read()
|
|
572
|
+
if user_model.crs is None:
|
|
573
|
+
raise ValueError("CRS is not defined in the SFINCS model.")
|
|
574
|
+
epsg = user_model.crs.to_epsg()
|
|
575
|
+
|
|
576
|
+
location_in_db = self.static_path / "templates" / "offshore"
|
|
577
|
+
if location_in_db.exists():
|
|
578
|
+
shutil.rmtree(location_in_db)
|
|
579
|
+
shutil.copytree(user_provided, location_in_db)
|
|
580
|
+
in_db = HydromtSfincsModel(str(location_in_db), mode="r+")
|
|
581
|
+
in_db.read(epsg=epsg)
|
|
582
|
+
self.sfincs_offshore_model = in_db
|
|
583
|
+
|
|
584
|
+
### FIAT ###
|
|
585
|
+
def create_fiat_model(self) -> FiatModel:
|
|
586
|
+
fiat = FiatModel(
|
|
587
|
+
config=self.create_fiat_config(),
|
|
588
|
+
benefits=self.create_benefit_config(),
|
|
589
|
+
risk=self.create_risk_model(),
|
|
590
|
+
)
|
|
591
|
+
return fiat
|
|
592
|
+
|
|
593
|
+
def create_risk_model(self) -> Optional[RiskModel]:
|
|
594
|
+
# Check if return periods are provided
|
|
595
|
+
if not self.config.return_periods:
|
|
596
|
+
if self._probabilistic_set_name:
|
|
597
|
+
risk = RiskModel()
|
|
598
|
+
self.logger.warning(
|
|
599
|
+
f"No return periods provided, but a probabilistic set is available. Using default return periods {risk.return_periods}."
|
|
600
|
+
)
|
|
601
|
+
return risk
|
|
602
|
+
else:
|
|
603
|
+
self.logger.warning(
|
|
604
|
+
"No return periods provided and no probabilistic set available. Risk calculations will not be performed."
|
|
605
|
+
)
|
|
606
|
+
return None
|
|
607
|
+
else:
|
|
608
|
+
risk = RiskModel(return_periods=self.config.return_periods)
|
|
609
|
+
return risk
|
|
610
|
+
|
|
611
|
+
def create_benefit_config(self) -> Optional[BenefitsModel]:
|
|
612
|
+
if self._probabilistic_set_name is None:
|
|
613
|
+
self.logger.warning(
|
|
614
|
+
"No probabilistic set found in the config, benefits will not be available."
|
|
615
|
+
)
|
|
616
|
+
return None
|
|
617
|
+
return BenefitsModel(
|
|
618
|
+
current_year=datetime.datetime.now().year,
|
|
619
|
+
current_projection="current",
|
|
620
|
+
baseline_strategy="no_measures",
|
|
621
|
+
event_set=self._probabilistic_set_name,
|
|
622
|
+
)
|
|
623
|
+
|
|
624
|
+
def create_fiat_config(self) -> FiatConfigModel:
|
|
625
|
+
# Make sure only csv objects have geometries
|
|
626
|
+
for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
|
|
627
|
+
keep = geoms[_FIAT_COLUMNS.object_id].isin(
|
|
628
|
+
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
|
|
629
|
+
)
|
|
630
|
+
geoms = geoms[keep].reset_index(drop=True)
|
|
631
|
+
self.fiat_model.exposure.exposure_geoms[i] = geoms
|
|
632
|
+
|
|
633
|
+
footprints = self.create_footprints()
|
|
634
|
+
if footprints is not None:
|
|
635
|
+
footprints = footprints.as_posix()
|
|
636
|
+
|
|
637
|
+
# Clip hazard and reset buildings # TODO use hydromt-FIAT instead
|
|
638
|
+
if not self.fiat_model.region.empty:
|
|
639
|
+
self._clip_hazard_extend()
|
|
640
|
+
|
|
641
|
+
# Store result for possible future use in create_infographics
|
|
642
|
+
self._aggregation_areas = self.create_aggregation_areas()
|
|
643
|
+
|
|
644
|
+
roads_gpkg = self.create_roads()
|
|
645
|
+
non_building_names = []
|
|
646
|
+
if roads_gpkg is not None:
|
|
647
|
+
non_building_names.append("road")
|
|
648
|
+
|
|
649
|
+
# Update elevations
|
|
650
|
+
self.update_fiat_elevation()
|
|
651
|
+
|
|
652
|
+
self._svi = self.create_svi()
|
|
653
|
+
|
|
654
|
+
config = FiatConfigModel(
|
|
655
|
+
exposure_crs=self.fiat_model.exposure.crs,
|
|
656
|
+
floodmap_type=self.read_floodmap_type(),
|
|
657
|
+
bfe=self.create_bfe(),
|
|
658
|
+
non_building_names=non_building_names,
|
|
659
|
+
damage_unit=self.read_damage_unit(),
|
|
660
|
+
building_footprints=footprints,
|
|
661
|
+
roads_file_name=roads_gpkg,
|
|
662
|
+
new_development_file_name=self.create_new_developments(),
|
|
663
|
+
save_simulation=False, # TODO
|
|
664
|
+
infographics=self.config.infographics,
|
|
665
|
+
aggregation=self._aggregation_areas,
|
|
666
|
+
svi=self._svi,
|
|
667
|
+
)
|
|
668
|
+
|
|
669
|
+
# Update output geoms names
|
|
670
|
+
output_geom = {}
|
|
671
|
+
counter = 0
|
|
672
|
+
for key in self.fiat_model.config["exposure"]["geom"].keys():
|
|
673
|
+
if "file" in key:
|
|
674
|
+
counter += 1
|
|
675
|
+
output_geom[f"name{counter}"] = Path(
|
|
676
|
+
self.fiat_model.config["exposure"]["geom"][key]
|
|
677
|
+
).name
|
|
678
|
+
self.fiat_model.config["output"]["geom"] = output_geom
|
|
679
|
+
# Update FIAT model with the new config
|
|
680
|
+
self.fiat_model.write()
|
|
681
|
+
|
|
682
|
+
return config
|
|
683
|
+
|
|
684
|
+
def update_fiat_elevation(self):
|
|
685
|
+
"""
|
|
686
|
+
Update the ground elevations of FIAT objects based on the SFINCS ground elevation map.
|
|
687
|
+
|
|
688
|
+
This method reads the DEM file and the exposure CSV file, and updates the ground elevations
|
|
689
|
+
of the FIAT objects (roads and buildings) based on the nearest elevation values from the DEM.
|
|
690
|
+
"""
|
|
691
|
+
dem_file = self._dem_path
|
|
692
|
+
# TODO resolve issue with double geometries in hydromt-FIAT and use update_ground_elevation method instead
|
|
693
|
+
# self.fiat_model.update_ground_elevation(dem_file, grnd_elev_unit="meters")
|
|
694
|
+
self.logger.info(
|
|
695
|
+
"Updating FIAT objects ground elevations from SFINCS ground elevation map."
|
|
696
|
+
)
|
|
697
|
+
SFINCS_units = us.UnitfulLength(
|
|
698
|
+
value=1.0, units=us.UnitTypesLength.meters
|
|
699
|
+
) # SFINCS is always in meters
|
|
700
|
+
FIAT_units = self.unit_system.default_length_units
|
|
701
|
+
conversion_factor = SFINCS_units.convert(FIAT_units)
|
|
702
|
+
|
|
703
|
+
if not math.isclose(conversion_factor, 1):
|
|
704
|
+
self.logger.info(
|
|
705
|
+
f"Ground elevation for FIAT objects is in '{FIAT_units}', while SFINCS ground elevation is in 'meters'. Values in the exposure csv will be converted by a factor of {conversion_factor}"
|
|
706
|
+
)
|
|
707
|
+
|
|
708
|
+
exposure = self.fiat_model.exposure.exposure_db
|
|
709
|
+
dem = rxr.open_rasterio(dem_file)
|
|
710
|
+
# TODO make sure only fiat_model object changes take place!
|
|
711
|
+
if self.config.fiat_roads_name in self.fiat_model.exposure.geom_names:
|
|
712
|
+
roads = self.fiat_model.exposure.exposure_geoms[
|
|
713
|
+
self._get_fiat_road_index()
|
|
714
|
+
].to_crs(dem.spatial_ref.crs_wkt)
|
|
715
|
+
roads["centroid"] = roads.geometry.centroid # get centroids
|
|
716
|
+
|
|
717
|
+
x_points = xr.DataArray(roads["centroid"].x, dims="points")
|
|
718
|
+
y_points = xr.DataArray(roads["centroid"].y, dims="points")
|
|
719
|
+
roads["elev"] = (
|
|
720
|
+
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
721
|
+
* conversion_factor
|
|
722
|
+
)
|
|
723
|
+
|
|
724
|
+
exposure.loc[
|
|
725
|
+
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
726
|
+
_FIAT_COLUMNS.ground_floor_height,
|
|
727
|
+
] = 0
|
|
728
|
+
exposure = exposure.merge(
|
|
729
|
+
roads[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
730
|
+
on=_FIAT_COLUMNS.object_id,
|
|
731
|
+
how="left",
|
|
732
|
+
)
|
|
733
|
+
exposure.loc[
|
|
734
|
+
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
735
|
+
_FIAT_COLUMNS.ground_elevation,
|
|
736
|
+
] = exposure.loc[
|
|
737
|
+
exposure[_FIAT_COLUMNS.primary_object_type] == "road", "elev"
|
|
738
|
+
]
|
|
739
|
+
del exposure["elev"]
|
|
740
|
+
self.fiat_model.exposure.exposure_db = exposure
|
|
741
|
+
|
|
742
|
+
buildings = self.fiat_model.exposure.exposure_geoms[
|
|
743
|
+
self._get_fiat_building_index()
|
|
744
|
+
].to_crs(dem.spatial_ref.crs_wkt)
|
|
745
|
+
buildings["geometry"] = buildings.geometry.centroid
|
|
746
|
+
x_points = xr.DataArray(buildings["geometry"].x, dims="points")
|
|
747
|
+
y_points = xr.DataArray(buildings["geometry"].y, dims="points")
|
|
748
|
+
buildings["elev"] = (
|
|
749
|
+
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
750
|
+
* conversion_factor
|
|
751
|
+
)
|
|
752
|
+
exposure = exposure.merge(
|
|
753
|
+
buildings[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
754
|
+
on=_FIAT_COLUMNS.object_id,
|
|
755
|
+
how="left",
|
|
756
|
+
)
|
|
757
|
+
exposure.loc[
|
|
758
|
+
exposure[_FIAT_COLUMNS.primary_object_type] != "road",
|
|
759
|
+
_FIAT_COLUMNS.ground_elevation,
|
|
760
|
+
] = exposure.loc[exposure[_FIAT_COLUMNS.primary_object_type] != "road", "elev"]
|
|
761
|
+
del exposure["elev"]
|
|
762
|
+
|
|
763
|
+
def read_damage_unit(self) -> str:
|
|
764
|
+
if self.fiat_model.exposure.damage_unit is not None:
|
|
765
|
+
return self.fiat_model.exposure.damage_unit
|
|
766
|
+
else:
|
|
767
|
+
self.logger.warning(
|
|
768
|
+
"Delft-FIAT model was missing damage units so '$' was assumed."
|
|
769
|
+
)
|
|
770
|
+
return "$"
|
|
771
|
+
|
|
772
|
+
def read_floodmap_type(self) -> FloodmapType:
|
|
773
|
+
# If there is at least on object that uses the area method, use water depths for FA calcs
|
|
774
|
+
if (
|
|
775
|
+
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.extraction_method]
|
|
776
|
+
== "area"
|
|
777
|
+
).any():
|
|
778
|
+
return FloodmapType.water_depth
|
|
779
|
+
else:
|
|
780
|
+
return FloodmapType.water_level
|
|
781
|
+
|
|
782
|
+
def create_roads(self) -> Optional[str]:
|
|
783
|
+
# Make sure that FIAT roads are polygons
|
|
784
|
+
if self.config.fiat_roads_name not in self.fiat_model.exposure.geom_names:
|
|
785
|
+
self.logger.warning(
|
|
786
|
+
"Road objects are not available in the FIAT model and thus would not be available in FloodAdapt."
|
|
787
|
+
)
|
|
788
|
+
# TODO check how this naming of output geoms should become more explicit!
|
|
789
|
+
return None
|
|
790
|
+
|
|
791
|
+
roads = self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()]
|
|
792
|
+
|
|
793
|
+
# TODO do we need the lanes column?
|
|
794
|
+
if (
|
|
795
|
+
_FIAT_COLUMNS.segment_length
|
|
796
|
+
not in self.fiat_model.exposure.exposure_db.columns
|
|
797
|
+
):
|
|
798
|
+
self.logger.warning(
|
|
799
|
+
f"'{_FIAT_COLUMNS.segment_length}' column not present in the FIAT exposure csv. Road impact infometrics cannot be produced."
|
|
800
|
+
)
|
|
801
|
+
|
|
802
|
+
# TODO should this should be performed through hydromt-FIAT?
|
|
803
|
+
if not isinstance(roads.geometry.iloc[0], Polygon):
|
|
804
|
+
roads = roads.to_crs(roads.estimate_utm_crs())
|
|
805
|
+
roads.geometry = roads.geometry.buffer(
|
|
806
|
+
self.config.road_width / 2, cap_style=2
|
|
807
|
+
)
|
|
808
|
+
roads = roads.to_crs(self.fiat_model.exposure.crs)
|
|
809
|
+
self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()] = roads
|
|
810
|
+
self.logger.info(
|
|
811
|
+
f"FIAT road objects transformed from lines to polygons assuming a road width of {self.config.road_width} meters."
|
|
812
|
+
)
|
|
813
|
+
|
|
814
|
+
self._has_roads = True
|
|
815
|
+
return f"{self.config.fiat_roads_name}.gpkg"
|
|
816
|
+
|
|
817
|
+
def create_new_developments(self) -> Optional[str]:
|
|
818
|
+
return "new_development_area.gpkg"
|
|
819
|
+
|
|
820
|
+
def create_footprints(self) -> Optional[Path]:
|
|
821
|
+
if isinstance(self.config.building_footprints, SpatialJoinModel):
|
|
822
|
+
# Use the provided building footprints
|
|
823
|
+
building_footprints_file = self._check_exists_and_absolute(
|
|
824
|
+
self.config.building_footprints.file
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
self.logger.info(
|
|
828
|
+
f"Using building footprints from {Path(building_footprints_file).as_posix()}."
|
|
829
|
+
)
|
|
830
|
+
# Spatially join buildings and map
|
|
831
|
+
# TODO use hydromt method instead
|
|
832
|
+
path = self._join_building_footprints(
|
|
833
|
+
self.config.building_footprints.file,
|
|
834
|
+
self.config.building_footprints.field_name,
|
|
835
|
+
)
|
|
836
|
+
return path
|
|
837
|
+
elif self.config.building_footprints == FootprintsOptions.OSM:
|
|
838
|
+
self.logger.info(
|
|
839
|
+
"Building footprint data will be downloaded from Open Street Maps."
|
|
840
|
+
)
|
|
841
|
+
region = self.fiat_model.region
|
|
842
|
+
if region is None:
|
|
843
|
+
raise ValueError(
|
|
844
|
+
"No region file found in the FIAT model. Building footprints cannot be created."
|
|
845
|
+
)
|
|
846
|
+
region = region.to_crs(4326)
|
|
847
|
+
if isinstance(region.boundary.to_numpy()[0], MultiLineString):
|
|
848
|
+
polygon = Polygon(
|
|
849
|
+
region.boundary.to_numpy()[0].envelope
|
|
850
|
+
) # TODO check if this is correct
|
|
851
|
+
else:
|
|
852
|
+
polygon = Polygon(region.boundary.to_numpy()[0])
|
|
853
|
+
footprints = get_buildings_from_osm(polygon)
|
|
854
|
+
footprints["BF_FID"] = np.arange(1, len(footprints) + 1)
|
|
855
|
+
footprints = footprints[["BF_FID", "geometry"]]
|
|
856
|
+
path = self._join_building_footprints(footprints, "BF_FID")
|
|
857
|
+
return path
|
|
858
|
+
# Then check if geometries are already footprints
|
|
859
|
+
elif isinstance(
|
|
860
|
+
self.fiat_model.exposure.exposure_geoms[
|
|
861
|
+
self._get_fiat_building_index()
|
|
862
|
+
].geometry.iloc[0],
|
|
863
|
+
(Polygon, MultiPolygon),
|
|
864
|
+
):
|
|
865
|
+
self.logger.info(
|
|
866
|
+
"Building footprints are already available in the FIAT model geometry files."
|
|
867
|
+
)
|
|
868
|
+
return None
|
|
869
|
+
# check if it is spatially joined and/or exists already
|
|
870
|
+
elif "BF_FID" in self.fiat_model.exposure.exposure_db.columns:
|
|
871
|
+
add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
|
|
872
|
+
fiat_path = Path(self.fiat_model.root)
|
|
873
|
+
|
|
874
|
+
if not (add_attrs and "BF_FID" in [attr["name"] for attr in add_attrs]):
|
|
875
|
+
raise KeyError(
|
|
876
|
+
"While 'BF_FID' column exists, connection to a spatial footprints file is missing."
|
|
877
|
+
)
|
|
878
|
+
|
|
879
|
+
ind = [attr["name"] for attr in add_attrs].index("BF_FID")
|
|
880
|
+
footprints = add_attrs[ind]
|
|
881
|
+
footprints_path = fiat_path / footprints["file"]
|
|
882
|
+
|
|
883
|
+
if not footprints_path.exists():
|
|
884
|
+
raise FileNotFoundError(
|
|
885
|
+
f"While 'BF_FID' column exists, building footprints file {footprints_path} not found."
|
|
886
|
+
)
|
|
887
|
+
|
|
888
|
+
self.logger.info(
|
|
889
|
+
f"Using the building footprints located at {footprints_path}."
|
|
890
|
+
)
|
|
891
|
+
return footprints_path.relative_to(self.static_path)
|
|
892
|
+
|
|
893
|
+
# Other methods
|
|
894
|
+
else:
|
|
895
|
+
self.logger.warning(
|
|
896
|
+
"No building footprints are available. Buildings will be plotted with a default shape in FloodAdapt."
|
|
897
|
+
)
|
|
898
|
+
return None
|
|
899
|
+
|
|
900
|
+
def create_bfe(self) -> Optional[BFEModel]:
|
|
901
|
+
if self.config.bfe is None:
|
|
902
|
+
self.logger.warning(
|
|
903
|
+
"No base flood elevation provided. Elevating building relative to base flood elevation will not be possible in FloodAdapt."
|
|
904
|
+
)
|
|
905
|
+
return None
|
|
906
|
+
|
|
907
|
+
# TODO can we use hydromt-FIAT?
|
|
908
|
+
bfe_file = self._check_exists_and_absolute(self.config.bfe.file)
|
|
909
|
+
|
|
910
|
+
self.logger.info(
|
|
911
|
+
f"Using map from {Path(bfe_file).as_posix()} as base flood elevation."
|
|
912
|
+
)
|
|
913
|
+
|
|
914
|
+
# Spatially join buildings and map
|
|
915
|
+
buildings_joined, bfe = self.spatial_join(
|
|
916
|
+
self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()],
|
|
917
|
+
bfe_file,
|
|
918
|
+
self.config.bfe.field_name,
|
|
919
|
+
)
|
|
920
|
+
|
|
921
|
+
# Make sure in case of multiple values that the max is kept
|
|
922
|
+
buildings_joined = (
|
|
923
|
+
buildings_joined.groupby(_FIAT_COLUMNS.object_id)
|
|
924
|
+
.max(self.config.bfe.field_name)
|
|
925
|
+
.sort_values(by=[_FIAT_COLUMNS.object_id])
|
|
926
|
+
.reset_index()
|
|
927
|
+
)
|
|
928
|
+
|
|
929
|
+
# Save the files
|
|
930
|
+
fa_bfe_file = self.static_path / "bfe" / "bfe.gpkg"
|
|
931
|
+
fa_bfe_file.parent.mkdir(parents=True, exist_ok=True)
|
|
932
|
+
bfe.to_file(fa_bfe_file)
|
|
933
|
+
csv_path = fa_bfe_file.parent / "bfe.csv"
|
|
934
|
+
buildings_joined.to_csv(csv_path, index=False)
|
|
935
|
+
|
|
936
|
+
# Save attributes
|
|
937
|
+
return BFEModel(
|
|
938
|
+
geom=fa_bfe_file.relative_to(self.static_path).as_posix(),
|
|
939
|
+
table=csv_path.relative_to(self.static_path).as_posix(),
|
|
940
|
+
field_name=self.config.bfe.field_name,
|
|
941
|
+
)
|
|
942
|
+
|
|
943
|
+
def create_aggregation_areas(self) -> list[AggregationModel]:
|
|
944
|
+
# TODO split this to 3 methods?
|
|
945
|
+
aggregation_areas = []
|
|
946
|
+
|
|
947
|
+
# first check if the FIAT model has existing aggregation areas
|
|
948
|
+
if self.fiat_model.spatial_joins["aggregation_areas"]:
|
|
949
|
+
# Use the aggregation areas from the FIAT model
|
|
950
|
+
for aggr in self.fiat_model.spatial_joins["aggregation_areas"]:
|
|
951
|
+
# Check if the exposure csv has the correct column
|
|
952
|
+
col_name = _FIAT_COLUMNS.aggregation_label.format(name=aggr["name"])
|
|
953
|
+
if col_name not in self.fiat_model.exposure.exposure_db.columns:
|
|
954
|
+
raise KeyError(
|
|
955
|
+
f"While aggregation area '{aggr['name']}' exists in the spatial joins of the FIAT model, the column '{col_name}' is missing in the exposure csv."
|
|
956
|
+
)
|
|
957
|
+
# Check equity config
|
|
958
|
+
if aggr["equity"] is not None:
|
|
959
|
+
equity_config = EquityModel(
|
|
960
|
+
census_data=str(
|
|
961
|
+
self.static_path.joinpath(
|
|
962
|
+
"templates", "fiat", aggr["equity"]["census_data"]
|
|
963
|
+
)
|
|
964
|
+
.relative_to(self.static_path)
|
|
965
|
+
.as_posix()
|
|
966
|
+
),
|
|
967
|
+
percapitaincome_label=aggr["equity"]["percapitaincome_label"],
|
|
968
|
+
totalpopulation_label=aggr["equity"]["totalpopulation_label"],
|
|
969
|
+
)
|
|
970
|
+
else:
|
|
971
|
+
equity_config = None
|
|
972
|
+
# Make aggregation config
|
|
973
|
+
aggr = AggregationModel(
|
|
974
|
+
name=aggr["name"],
|
|
975
|
+
file=str(
|
|
976
|
+
self.static_path.joinpath("templates", "fiat", aggr["file"])
|
|
977
|
+
.relative_to(self.static_path)
|
|
978
|
+
.as_posix()
|
|
979
|
+
),
|
|
980
|
+
field_name=aggr["field_name"],
|
|
981
|
+
equity=equity_config,
|
|
982
|
+
)
|
|
983
|
+
aggregation_areas.append(aggr)
|
|
984
|
+
|
|
985
|
+
self.logger.info(
|
|
986
|
+
f"Aggregation areas: {aggr.name} from the FIAT model are going to be used."
|
|
987
|
+
)
|
|
988
|
+
|
|
989
|
+
# Then check if the user has provided extra aggregation areas in the config
|
|
990
|
+
if self.config.aggregation_areas:
|
|
991
|
+
# Loop through aggr areas given in config
|
|
992
|
+
for aggr in self.config.aggregation_areas:
|
|
993
|
+
# Get name of type of aggregation area
|
|
994
|
+
if aggr.name is not None:
|
|
995
|
+
aggr_name = aggr.name
|
|
996
|
+
else:
|
|
997
|
+
aggr_name = Path(aggr.file).stem
|
|
998
|
+
# If aggregation area already in FIAT model raise Error
|
|
999
|
+
if aggr_name in [aggr.name for aggr in aggregation_areas]:
|
|
1000
|
+
raise ValueError(
|
|
1001
|
+
f"Aggregation area '{aggr_name}' already exists in the FIAT model."
|
|
1002
|
+
)
|
|
1003
|
+
# Do spatial join of FIAT objects and aggregation areas
|
|
1004
|
+
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1005
|
+
buildings_joined, aggr_areas = self.spatial_join(
|
|
1006
|
+
objects=self.fiat_model.exposure.exposure_geoms[
|
|
1007
|
+
self._get_fiat_building_index()
|
|
1008
|
+
],
|
|
1009
|
+
layer=str(self._check_exists_and_absolute(aggr.file)),
|
|
1010
|
+
field_name=aggr.field_name,
|
|
1011
|
+
rename=_FIAT_COLUMNS.aggregation_label.format(name=aggr_name),
|
|
1012
|
+
)
|
|
1013
|
+
aggr_path = Path(self.fiat_model.root).joinpath(
|
|
1014
|
+
"exposure", "aggregation_areas", f"{Path(aggr.file).stem}.gpkg"
|
|
1015
|
+
)
|
|
1016
|
+
aggr_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1017
|
+
aggr_areas.to_file(aggr_path)
|
|
1018
|
+
exposure_csv = exposure_csv.merge(
|
|
1019
|
+
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1020
|
+
)
|
|
1021
|
+
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1022
|
+
# Update spatial joins in FIAT model
|
|
1023
|
+
if self.fiat_model.spatial_joins["aggregation_areas"] is None:
|
|
1024
|
+
self.fiat_model.spatial_joins["aggregation_areas"] = []
|
|
1025
|
+
self.fiat_model.spatial_joins["aggregation_areas"].append(
|
|
1026
|
+
{
|
|
1027
|
+
"name": aggr_name,
|
|
1028
|
+
"file": aggr_path.relative_to(self.fiat_model.root).as_posix(),
|
|
1029
|
+
"field_name": _FIAT_COLUMNS.aggregation_label.format(
|
|
1030
|
+
name=aggr_name
|
|
1031
|
+
),
|
|
1032
|
+
"equity": None, # TODO allow adding equity as well?
|
|
1033
|
+
}
|
|
1034
|
+
)
|
|
1035
|
+
# Update the aggregation areas list in the config
|
|
1036
|
+
aggregation_areas.append(
|
|
1037
|
+
AggregationModel(
|
|
1038
|
+
name=aggr_name,
|
|
1039
|
+
file=aggr_path.relative_to(self.static_path).as_posix(),
|
|
1040
|
+
field_name=_FIAT_COLUMNS.aggregation_label.format(
|
|
1041
|
+
name=aggr_name
|
|
1042
|
+
),
|
|
1043
|
+
)
|
|
1044
|
+
)
|
|
1045
|
+
|
|
1046
|
+
# No config provided, no aggr areas in the model -> try to use the region file as a mock aggregation area
|
|
1047
|
+
if (
|
|
1048
|
+
not self.fiat_model.spatial_joins["aggregation_areas"]
|
|
1049
|
+
and not self.config.aggregation_areas
|
|
1050
|
+
):
|
|
1051
|
+
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1052
|
+
region = self.fiat_model.geoms["region"]
|
|
1053
|
+
region = region.explode().reset_index()
|
|
1054
|
+
region["aggr_id"] = ["region_" + str(i) for i in np.arange(len(region)) + 1]
|
|
1055
|
+
aggregation_path = Path(self.fiat_model.root).joinpath(
|
|
1056
|
+
"aggregation_areas", "region.geojson"
|
|
1057
|
+
)
|
|
1058
|
+
if not aggregation_path.parent.exists():
|
|
1059
|
+
aggregation_path.parent.mkdir()
|
|
1060
|
+
|
|
1061
|
+
region.to_file(aggregation_path)
|
|
1062
|
+
aggr = AggregationModel(
|
|
1063
|
+
name="region",
|
|
1064
|
+
file=str(aggregation_path.relative_to(self.static_path).as_posix()),
|
|
1065
|
+
field_name="aggr_id",
|
|
1066
|
+
)
|
|
1067
|
+
aggregation_areas.append(aggr)
|
|
1068
|
+
|
|
1069
|
+
# Add column in FIAT
|
|
1070
|
+
buildings_joined, _ = self.spatial_join(
|
|
1071
|
+
objects=self.fiat_model.exposure.exposure_geoms[
|
|
1072
|
+
self._get_fiat_building_index()
|
|
1073
|
+
],
|
|
1074
|
+
layer=region,
|
|
1075
|
+
field_name="aggr_id",
|
|
1076
|
+
rename=_FIAT_COLUMNS.aggregation_label.format(name="region"),
|
|
1077
|
+
)
|
|
1078
|
+
exposure_csv = exposure_csv.merge(
|
|
1079
|
+
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1080
|
+
)
|
|
1081
|
+
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1082
|
+
self.logger.warning(
|
|
1083
|
+
"No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
|
|
1084
|
+
)
|
|
1085
|
+
return aggregation_areas
|
|
1086
|
+
|
|
1087
|
+
def create_svi(self) -> Optional[SVIModel]:
|
|
1088
|
+
if self.config.svi:
|
|
1089
|
+
svi_file = self._check_exists_and_absolute(self.config.svi.file)
|
|
1090
|
+
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1091
|
+
buildings_joined, svi = self.spatial_join(
|
|
1092
|
+
self.fiat_model.exposure.exposure_geoms[
|
|
1093
|
+
self._get_fiat_building_index()
|
|
1094
|
+
],
|
|
1095
|
+
svi_file,
|
|
1096
|
+
self.config.svi.field_name,
|
|
1097
|
+
rename="SVI",
|
|
1098
|
+
filter=True,
|
|
1099
|
+
)
|
|
1100
|
+
# Add column to exposure
|
|
1101
|
+
if "SVI" in exposure_csv.columns:
|
|
1102
|
+
self.logger.info(
|
|
1103
|
+
f"'SVI' column in the FIAT exposure csv will be replaced by {svi_file.as_posix()}."
|
|
1104
|
+
)
|
|
1105
|
+
del exposure_csv["SVI"]
|
|
1106
|
+
else:
|
|
1107
|
+
self.logger.info(
|
|
1108
|
+
f"'SVI' column in the FIAT exposure csv will be filled by {svi_file.as_posix()}."
|
|
1109
|
+
)
|
|
1110
|
+
exposure_csv = exposure_csv.merge(
|
|
1111
|
+
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1112
|
+
)
|
|
1113
|
+
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1114
|
+
|
|
1115
|
+
# Save the spatial file for future use
|
|
1116
|
+
svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
|
|
1117
|
+
svi_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1118
|
+
svi.to_file(svi_path)
|
|
1119
|
+
self.logger.info(
|
|
1120
|
+
f"An SVI map can be shown in FloodAdapt GUI using '{self.config.svi.field_name}' column from {svi_file.as_posix()}"
|
|
1121
|
+
)
|
|
1122
|
+
|
|
1123
|
+
return SVIModel(
|
|
1124
|
+
geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
|
|
1125
|
+
field_name="SVI",
|
|
1126
|
+
)
|
|
1127
|
+
elif "SVI" in self.fiat_model.exposure.exposure_db.columns:
|
|
1128
|
+
self.logger.info(
|
|
1129
|
+
"'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
|
|
1130
|
+
)
|
|
1131
|
+
add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
|
|
1132
|
+
if "SVI" not in [attr["name"] for attr in add_attrs]:
|
|
1133
|
+
self.logger.warning(
|
|
1134
|
+
"No SVI map found to display in the FloodAdapt GUI!"
|
|
1135
|
+
)
|
|
1136
|
+
|
|
1137
|
+
ind = [attr["name"] for attr in add_attrs].index("SVI")
|
|
1138
|
+
svi = add_attrs[ind]
|
|
1139
|
+
svi_path = self.static_path / "templates" / "fiat" / svi["file"]
|
|
1140
|
+
self.logger.info(
|
|
1141
|
+
f"An SVI map can be shown in FloodAdapt GUI using '{svi['field_name']}' column from {svi['file']}"
|
|
1142
|
+
)
|
|
1143
|
+
# Save site attributes
|
|
1144
|
+
return SVIModel(
|
|
1145
|
+
geom=Path(svi_path.relative_to(self.static_path)).as_posix(),
|
|
1146
|
+
field_name=svi["field_name"],
|
|
1147
|
+
)
|
|
1148
|
+
|
|
1149
|
+
else:
|
|
1150
|
+
self.logger.warning(
|
|
1151
|
+
"'SVI' column not present in the FIAT exposure csv. Vulnerability type infometrics cannot be produced."
|
|
1152
|
+
)
|
|
1153
|
+
return None
|
|
1154
|
+
|
|
1155
|
+
### SFINCS ###
|
|
1156
|
+
def create_sfincs_config(self) -> SfincsModel:
|
|
1157
|
+
# call these functions before others to make sure water level references are updated
|
|
1158
|
+
config = self.create_sfincs_model_config()
|
|
1159
|
+
tide_gauge = self.create_tide_gauge()
|
|
1160
|
+
|
|
1161
|
+
sfincs = SfincsModel(
|
|
1162
|
+
config=config,
|
|
1163
|
+
water_level=self.water_level_references,
|
|
1164
|
+
slr_scenarios=self.create_slr(),
|
|
1165
|
+
dem=self.create_dem_model(),
|
|
1166
|
+
scs=self.create_scs_model(),
|
|
1167
|
+
cyclone_track_database=self.create_cyclone_track_database(),
|
|
1168
|
+
tide_gauge=tide_gauge,
|
|
1169
|
+
river=self.create_rivers(),
|
|
1170
|
+
obs_point=self.create_observation_points(),
|
|
1171
|
+
)
|
|
1172
|
+
|
|
1173
|
+
return sfincs
|
|
1174
|
+
|
|
1175
|
+
def create_cyclone_track_database(self) -> Optional[CycloneTrackDatabaseModel]:
|
|
1176
|
+
if not self.config.cyclones or not self.config.sfincs_offshore:
|
|
1177
|
+
self.logger.warning("No cyclones will be available in the database.")
|
|
1178
|
+
return None
|
|
1179
|
+
|
|
1180
|
+
if self.config.cyclone_basin:
|
|
1181
|
+
basin = self.config.cyclone_basin
|
|
1182
|
+
else:
|
|
1183
|
+
basin = "ALL"
|
|
1184
|
+
|
|
1185
|
+
name = f"IBTrACS.{basin.value}.v04r01.nc"
|
|
1186
|
+
url = f"https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/netcdf/{name}"
|
|
1187
|
+
self.logger.info(f"Downloading cyclone track database from {url}")
|
|
1188
|
+
fn = Path(self.root) / "static" / "cyclone_track_database" / name
|
|
1189
|
+
fn.parent.mkdir(parents=True, exist_ok=True)
|
|
1190
|
+
|
|
1191
|
+
try:
|
|
1192
|
+
urlretrieve(url, fn)
|
|
1193
|
+
except Exception:
|
|
1194
|
+
raise RuntimeError(f"Could not retrieve cyclone track database from {url}")
|
|
1195
|
+
|
|
1196
|
+
return CycloneTrackDatabaseModel(file=name)
|
|
1197
|
+
|
|
1198
|
+
def create_scs_model(self) -> Optional[SCSModel]:
|
|
1199
|
+
if self.config.scs is None:
|
|
1200
|
+
return None
|
|
1201
|
+
scs_file = self._check_exists_and_absolute(self.config.scs.file)
|
|
1202
|
+
db_scs_file = self.static_path / "scs" / scs_file.name
|
|
1203
|
+
db_scs_file.parent.mkdir(parents=True, exist_ok=True)
|
|
1204
|
+
shutil.copy2(scs_file, db_scs_file)
|
|
1205
|
+
|
|
1206
|
+
return SCSModel(file=scs_file.name, type=self.config.scs.type)
|
|
1207
|
+
|
|
1208
|
+
def create_dem_model(self) -> DemModel:
|
|
1209
|
+
if self.config.dem:
|
|
1210
|
+
subgrid_sfincs = Path(self.config.dem.filename)
|
|
1211
|
+
else:
|
|
1212
|
+
self.logger.warning(
|
|
1213
|
+
"No subgrid depth geotiff file provided in the config file. Using the one from the SFINCS model."
|
|
1214
|
+
)
|
|
1215
|
+
subgrid_sfincs = (
|
|
1216
|
+
Path(self.sfincs_overland_model.root) / "subgrid" / "dep_subgrid.tif"
|
|
1217
|
+
)
|
|
1218
|
+
|
|
1219
|
+
dem_file = self._check_exists_and_absolute(subgrid_sfincs)
|
|
1220
|
+
fa_subgrid_path = self.static_path / "dem" / dem_file.name
|
|
1221
|
+
fa_subgrid_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1222
|
+
|
|
1223
|
+
# Check tiles
|
|
1224
|
+
tiles_sfincs = Path(self.sfincs_overland_model.root) / "tiles"
|
|
1225
|
+
fa_tiles_path = self.static_path / "dem" / "tiles"
|
|
1226
|
+
if tiles_sfincs.exists():
|
|
1227
|
+
shutil.move(tiles_sfincs, fa_tiles_path)
|
|
1228
|
+
if (fa_tiles_path / "index").exists():
|
|
1229
|
+
os.rename(fa_tiles_path / "index", fa_tiles_path / "indices")
|
|
1230
|
+
self.logger.info(
|
|
1231
|
+
"Tiles were already available in the SFINCS model and will directly be used in FloodAdapt."
|
|
1232
|
+
)
|
|
1233
|
+
else:
|
|
1234
|
+
# Make tiles
|
|
1235
|
+
fa_tiles_path.mkdir(parents=True)
|
|
1236
|
+
self.sfincs_overland_model.setup_tiles(
|
|
1237
|
+
path=fa_tiles_path,
|
|
1238
|
+
datasets_dep=[{"elevtn": dem_file}],
|
|
1239
|
+
zoom_range=[0, 13],
|
|
1240
|
+
fmt="png",
|
|
1241
|
+
)
|
|
1242
|
+
self.logger.info(
|
|
1243
|
+
f"Tiles were created using the {subgrid_sfincs.as_posix()} as the elevation map."
|
|
1244
|
+
)
|
|
1245
|
+
|
|
1246
|
+
shutil.copy2(dem_file, fa_subgrid_path)
|
|
1247
|
+
self._dem_path = fa_subgrid_path
|
|
1248
|
+
return DemModel(
|
|
1249
|
+
filename=fa_subgrid_path.name, units=us.UnitTypesLength.meters
|
|
1250
|
+
) # always in meters
|
|
1251
|
+
|
|
1252
|
+
def create_sfincs_model_config(self) -> SfincsConfigModel:
|
|
1253
|
+
config = SfincsConfigModel(
|
|
1254
|
+
csname=self.sfincs_overland_model.crs.name,
|
|
1255
|
+
cstype=Cstype(
|
|
1256
|
+
self.sfincs_overland_model.crs.type_name.split(" ")[0].lower()
|
|
1257
|
+
),
|
|
1258
|
+
offshore_model=self.create_offshore_model(),
|
|
1259
|
+
overland_model=self.create_overland_model(),
|
|
1260
|
+
floodmap_units=self.unit_system.default_length_units,
|
|
1261
|
+
save_simulation=False,
|
|
1262
|
+
)
|
|
1263
|
+
|
|
1264
|
+
return config
|
|
1265
|
+
|
|
1266
|
+
def create_slr(self) -> Optional[SlrScenariosModel]:
|
|
1267
|
+
if self.config.slr_scenarios is None:
|
|
1268
|
+
return None
|
|
1269
|
+
|
|
1270
|
+
self.config.slr_scenarios.file = str(
|
|
1271
|
+
self._check_exists_and_absolute(self.config.slr_scenarios.file)
|
|
1272
|
+
)
|
|
1273
|
+
slr_path = self.static_path / "slr_scenarios"
|
|
1274
|
+
slr_path.mkdir()
|
|
1275
|
+
new_file = slr_path / Path(self.config.slr_scenarios.file).name
|
|
1276
|
+
shutil.copyfile(self.config.slr_scenarios.file, new_file)
|
|
1277
|
+
|
|
1278
|
+
return SlrScenariosModel(
|
|
1279
|
+
file=new_file.relative_to(self.static_path).as_posix(),
|
|
1280
|
+
relative_to_year=self.config.slr_scenarios.relative_to_year,
|
|
1281
|
+
)
|
|
1282
|
+
|
|
1283
|
+
def create_observation_points(self) -> Union[list[ObsPointModel], None]:
|
|
1284
|
+
if self.config.obs_point is None:
|
|
1285
|
+
return None
|
|
1286
|
+
|
|
1287
|
+
self.logger.info("Observation points were provided in the config file.")
|
|
1288
|
+
return self.config.obs_point
|
|
1289
|
+
|
|
1290
|
+
def create_rivers(self) -> list[RiverModel]:
|
|
1291
|
+
src_file = Path(self.sfincs_overland_model.root) / "sfincs.src"
|
|
1292
|
+
if not src_file.exists():
|
|
1293
|
+
self.logger.warning("No rivers found in the SFINCS model.")
|
|
1294
|
+
return []
|
|
1295
|
+
|
|
1296
|
+
df = pd.read_csv(src_file, delim_whitespace=True, header=None, names=["x", "y"])
|
|
1297
|
+
river_locs = gpd.GeoDataFrame(
|
|
1298
|
+
df,
|
|
1299
|
+
geometry=gpd.points_from_xy(df.x, df.y),
|
|
1300
|
+
crs=self.sfincs_overland_model.crs,
|
|
1301
|
+
)
|
|
1302
|
+
rivers = []
|
|
1303
|
+
for idx, row in river_locs.iterrows():
|
|
1304
|
+
if "dis" in self.sfincs_overland_model.forcing:
|
|
1305
|
+
discharge = (
|
|
1306
|
+
self.sfincs_overland_model.forcing["dis"]
|
|
1307
|
+
.sel(index=idx + 1)
|
|
1308
|
+
.to_numpy()
|
|
1309
|
+
.mean()
|
|
1310
|
+
)
|
|
1311
|
+
else:
|
|
1312
|
+
discharge = 0
|
|
1313
|
+
self.logger.warning(
|
|
1314
|
+
f"No river discharge conditions were found in the SFINCS model for river {idx}. A default value of 0 will be used."
|
|
1315
|
+
)
|
|
1316
|
+
|
|
1317
|
+
river = RiverModel(
|
|
1318
|
+
name=f"river_{idx}",
|
|
1319
|
+
x_coordinate=row.x,
|
|
1320
|
+
y_coordinate=row.y,
|
|
1321
|
+
mean_discharge=us.UnitfulDischarge(
|
|
1322
|
+
value=discharge, units=self.unit_system.default_discharge_units
|
|
1323
|
+
),
|
|
1324
|
+
)
|
|
1325
|
+
rivers.append(river)
|
|
1326
|
+
|
|
1327
|
+
self.logger.info(
|
|
1328
|
+
f"{len(river_locs)} river(s) were identified from the SFINCS model and will be available in FloodAdapt for discharge input."
|
|
1329
|
+
)
|
|
1330
|
+
|
|
1331
|
+
return rivers
|
|
1332
|
+
|
|
1333
|
+
def create_tide_gauge(self) -> Optional[TideGauge]:
|
|
1334
|
+
if self.config.tide_gauge is None:
|
|
1335
|
+
self.logger.warning(
|
|
1336
|
+
"Tide gauge information not provided. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1337
|
+
)
|
|
1338
|
+
self.logger.warning(
|
|
1339
|
+
"No water level references were found. It is assumed that MSL is equal to the datum used in the SFINCS overland model. You can provide these values with the tide_gauge.ref attribute in the site.toml."
|
|
1340
|
+
)
|
|
1341
|
+
return None
|
|
1342
|
+
|
|
1343
|
+
if self.config.tide_gauge.source == TideGaugeSource.file:
|
|
1344
|
+
if self.config.tide_gauge.file is None:
|
|
1345
|
+
raise ValueError(
|
|
1346
|
+
"Tide gauge file needs to be provided when 'file' is selected as the source."
|
|
1347
|
+
)
|
|
1348
|
+
if self.config.tide_gauge.ref is None:
|
|
1349
|
+
self.logger.warning(
|
|
1350
|
+
"Tide gauge reference not provided. MSL is assumed as the reference of the water levels in the file."
|
|
1351
|
+
)
|
|
1352
|
+
self.config.tide_gauge.ref = "MSL"
|
|
1353
|
+
|
|
1354
|
+
tide_gauge_file = self._check_exists_and_absolute(
|
|
1355
|
+
self.config.tide_gauge.file
|
|
1356
|
+
)
|
|
1357
|
+
db_file_path = Path(self.static_path / "tide_gauges") / tide_gauge_file.name
|
|
1358
|
+
|
|
1359
|
+
db_file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1360
|
+
shutil.copyfile(self.config.tide_gauge.file, db_file_path)
|
|
1361
|
+
|
|
1362
|
+
rel_db_path = Path(db_file_path.relative_to(self.static_path))
|
|
1363
|
+
self.logger.warning(
|
|
1364
|
+
f"Tide gauge from file {rel_db_path} assumed to be in {self.unit_system.default_length_units}!"
|
|
1365
|
+
)
|
|
1366
|
+
tide_gauge = TideGauge(
|
|
1367
|
+
reference=self.config.tide_gauge.ref,
|
|
1368
|
+
description="Observations from file stored in database",
|
|
1369
|
+
source=TideGaugeSource.file,
|
|
1370
|
+
file=rel_db_path,
|
|
1371
|
+
lon=self.config.tide_gauge.lon,
|
|
1372
|
+
lat=self.config.tide_gauge.lat,
|
|
1373
|
+
units=self.unit_system.default_length_units,
|
|
1374
|
+
)
|
|
1375
|
+
|
|
1376
|
+
return tide_gauge
|
|
1377
|
+
|
|
1378
|
+
elif self.config.tide_gauge.source == TideGaugeSource.noaa_coops:
|
|
1379
|
+
if self.config.tide_gauge.ref is not None:
|
|
1380
|
+
ref = self.config.tide_gauge.ref
|
|
1381
|
+
else:
|
|
1382
|
+
ref = "MLLW" # If reference is not provided use MLLW
|
|
1383
|
+
|
|
1384
|
+
self.water_level_references.reference = (
|
|
1385
|
+
ref # update the water level reference
|
|
1386
|
+
)
|
|
1387
|
+
|
|
1388
|
+
if self.config.tide_gauge.id is None:
|
|
1389
|
+
station_id = self._get_closest_station()
|
|
1390
|
+
self.logger.info(
|
|
1391
|
+
"The closest NOAA tide gauge station to the site will be searched."
|
|
1392
|
+
)
|
|
1393
|
+
else:
|
|
1394
|
+
station_id = self.config.tide_gauge.id
|
|
1395
|
+
self.logger.info(
|
|
1396
|
+
f"The NOAA tide gauge station with the provided ID {station_id} will be used."
|
|
1397
|
+
)
|
|
1398
|
+
station = self._get_station_metadata(station_id=station_id, ref=ref)
|
|
1399
|
+
if station is not None:
|
|
1400
|
+
# Add tide_gauge information in site toml
|
|
1401
|
+
tide_gauge = TideGauge(
|
|
1402
|
+
name=station["name"],
|
|
1403
|
+
description=f"observations from '{self.config.tide_gauge.source}' api",
|
|
1404
|
+
source=self.config.tide_gauge.source,
|
|
1405
|
+
reference=ref,
|
|
1406
|
+
ID=int(station["id"]),
|
|
1407
|
+
lon=station["lon"],
|
|
1408
|
+
lat=station["lat"],
|
|
1409
|
+
units=us.UnitTypesLength.meters, # the api always asks for SI units right now
|
|
1410
|
+
)
|
|
1411
|
+
|
|
1412
|
+
local_datum = DatumModel(
|
|
1413
|
+
name=station["datum_name"],
|
|
1414
|
+
height=us.UnitfulLength(
|
|
1415
|
+
value=station["datum"], units=station["units"]
|
|
1416
|
+
).transform(self.unit_system.default_length_units),
|
|
1417
|
+
)
|
|
1418
|
+
self.water_level_references.datums.append(local_datum)
|
|
1419
|
+
|
|
1420
|
+
msl = DatumModel(
|
|
1421
|
+
name="MSL",
|
|
1422
|
+
height=us.UnitfulLength(
|
|
1423
|
+
value=station["msl"], units=station["units"]
|
|
1424
|
+
).transform(self.unit_system.default_length_units),
|
|
1425
|
+
)
|
|
1426
|
+
# Check if MSL is already there and if yes replace it
|
|
1427
|
+
existing_msl = next(
|
|
1428
|
+
(
|
|
1429
|
+
datum
|
|
1430
|
+
for datum in self.water_level_references.datums
|
|
1431
|
+
if datum.name == "MSL"
|
|
1432
|
+
),
|
|
1433
|
+
None,
|
|
1434
|
+
)
|
|
1435
|
+
if existing_msl:
|
|
1436
|
+
self.water_level_references.datums.remove(existing_msl)
|
|
1437
|
+
self.water_level_references.datums.append(msl)
|
|
1438
|
+
|
|
1439
|
+
for name in ["MLLW", "MHHW"]:
|
|
1440
|
+
height = us.UnitfulLength(
|
|
1441
|
+
value=station[name.lower()], units=station["units"]
|
|
1442
|
+
).transform(self.unit_system.default_length_units)
|
|
1443
|
+
|
|
1444
|
+
wl_info = DatumModel(
|
|
1445
|
+
name=name,
|
|
1446
|
+
height=height,
|
|
1447
|
+
)
|
|
1448
|
+
self.water_level_references.datums.append(wl_info)
|
|
1449
|
+
return tide_gauge
|
|
1450
|
+
else:
|
|
1451
|
+
self.logger.warning(
|
|
1452
|
+
f"Tide gauge source not recognized: {self.config.tide_gauge.source}. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1453
|
+
)
|
|
1454
|
+
return None
|
|
1455
|
+
|
|
1456
|
+
def create_offshore_model(self) -> Optional[FloodModel]:
|
|
1457
|
+
if self.sfincs_offshore_model is None:
|
|
1458
|
+
return None
|
|
1459
|
+
# Connect boundary points of overland to output points of offshore
|
|
1460
|
+
fn = Path(self.sfincs_overland_model.root) / "sfincs.bnd"
|
|
1461
|
+
bnd = pd.read_csv(fn, sep=" ", lineterminator="\n", header=None)
|
|
1462
|
+
bnd = bnd.rename(columns={0: "x", 1: "y"})
|
|
1463
|
+
bnd_geo = gpd.GeoDataFrame(
|
|
1464
|
+
bnd,
|
|
1465
|
+
geometry=gpd.points_from_xy(bnd.x, bnd.y),
|
|
1466
|
+
crs=self.sfincs_overland_model.config["epsg"],
|
|
1467
|
+
)
|
|
1468
|
+
obs_geo = bnd_geo.to_crs(4326)
|
|
1469
|
+
obs_geo["x"] = obs_geo.geometry.x
|
|
1470
|
+
obs_geo["y"] = obs_geo.geometry.y
|
|
1471
|
+
del obs_geo["geometry"]
|
|
1472
|
+
obs_geo["name"] = [f"bnd_pt{num:02d}" for num in range(1, len(obs_geo) + 1)]
|
|
1473
|
+
fn_off = Path(self.sfincs_offshore_model.root) / "sfincs.obs"
|
|
1474
|
+
obs_geo.to_csv(
|
|
1475
|
+
fn_off,
|
|
1476
|
+
sep="\t",
|
|
1477
|
+
index=False,
|
|
1478
|
+
header=False,
|
|
1479
|
+
)
|
|
1480
|
+
self.logger.info(
|
|
1481
|
+
"Output points of the offshore SFINCS model were reconfigured to the boundary points of the overland SFINCS model."
|
|
1482
|
+
)
|
|
1483
|
+
|
|
1484
|
+
return FloodModel(
|
|
1485
|
+
name="offshore",
|
|
1486
|
+
reference=self.config.sfincs_offshore.reference,
|
|
1487
|
+
vertical_offset=self.config.sfincs_offshore.vertical_offset,
|
|
1488
|
+
)
|
|
1489
|
+
|
|
1490
|
+
def create_overland_model(self) -> FloodModel:
|
|
1491
|
+
return FloodModel(
|
|
1492
|
+
name="overland",
|
|
1493
|
+
reference=self.config.sfincs_overland.reference,
|
|
1494
|
+
)
|
|
1495
|
+
|
|
1496
|
+
### SITE ###
|
|
1497
|
+
def create_site_config(self) -> Site:
|
|
1498
|
+
"""Create the site configuration for the FloodAdapt model.
|
|
1499
|
+
|
|
1500
|
+
The order of these functions is important!
|
|
1501
|
+
1. Create the SFINCS model.
|
|
1502
|
+
needs: water level references
|
|
1503
|
+
provides: updated water level references with optional tide gauge
|
|
1504
|
+
2. Create the FIAT model.
|
|
1505
|
+
needs: water level references and optional probabilistic event set
|
|
1506
|
+
provides: svi and exposure geometries
|
|
1507
|
+
3. Create the GUI model. (requires water level references and FIAT model to be updated)
|
|
1508
|
+
needs: water level references and FIAT model to be updated
|
|
1509
|
+
provides: gui model with output layers, visualization layers and plotting.
|
|
1510
|
+
|
|
1511
|
+
"""
|
|
1512
|
+
sfincs = self.create_sfincs_config()
|
|
1513
|
+
self.add_probabilistic_set()
|
|
1514
|
+
fiat = self.create_fiat_model()
|
|
1515
|
+
gui = self.create_gui_config()
|
|
1516
|
+
|
|
1517
|
+
# Order doesnt matter from here
|
|
1518
|
+
lon, lat = self.read_location()
|
|
1519
|
+
std_objs = self.set_standard_objects()
|
|
1520
|
+
description = (
|
|
1521
|
+
self.config.description if self.config.description else self.config.name
|
|
1522
|
+
)
|
|
1523
|
+
|
|
1524
|
+
config = Site(
|
|
1525
|
+
name=self.config.name,
|
|
1526
|
+
description=description,
|
|
1527
|
+
lat=lat,
|
|
1528
|
+
lon=lon,
|
|
1529
|
+
fiat=fiat,
|
|
1530
|
+
gui=gui,
|
|
1531
|
+
sfincs=sfincs,
|
|
1532
|
+
standard_objects=std_objs,
|
|
1533
|
+
)
|
|
1534
|
+
return config
|
|
1535
|
+
|
|
1536
|
+
def read_location(self) -> tuple[float, float]:
|
|
1537
|
+
# Get center of area of interest
|
|
1538
|
+
if not self.fiat_model.region.empty:
|
|
1539
|
+
center = self.fiat_model.region.dissolve().centroid.to_crs(4326)[0]
|
|
1540
|
+
else:
|
|
1541
|
+
center = (
|
|
1542
|
+
self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()]
|
|
1543
|
+
.dissolve()
|
|
1544
|
+
.centroid.to_crs(4326)[0]
|
|
1545
|
+
)
|
|
1546
|
+
return center.x, center.y
|
|
1547
|
+
|
|
1548
|
+
def create_gui_config(self) -> GuiModel:
|
|
1549
|
+
gui = GuiModel(
|
|
1550
|
+
units=self.unit_system,
|
|
1551
|
+
plotting=self.create_hazard_plotting_config(),
|
|
1552
|
+
output_layers=self.create_output_layers_config(),
|
|
1553
|
+
visualization_layers=self.create_visualization_layers(),
|
|
1554
|
+
)
|
|
1555
|
+
|
|
1556
|
+
return gui
|
|
1557
|
+
|
|
1558
|
+
def create_default_units(self) -> GuiUnitModel:
|
|
1559
|
+
if self.config.unit_system == UnitSystems.imperial:
|
|
1560
|
+
return GuiUnitModel.imperial()
|
|
1561
|
+
elif self.config.unit_system == UnitSystems.metric:
|
|
1562
|
+
return GuiUnitModel.metric()
|
|
1563
|
+
else:
|
|
1564
|
+
raise ValueError(
|
|
1565
|
+
f"Unit system {self.config.unit_system} not recognized. Please choose 'imperial' or 'metric'."
|
|
1566
|
+
)
|
|
1567
|
+
|
|
1568
|
+
def create_visualization_layers(self) -> VisualizationLayers:
|
|
1569
|
+
visualization_layers = VisualizationLayers()
|
|
1570
|
+
if self._svi is not None:
|
|
1571
|
+
visualization_layers.add_layer(
|
|
1572
|
+
name="svi",
|
|
1573
|
+
long_name="Social Vulnerability Index (SVI)",
|
|
1574
|
+
path=str(self.static_path / self._svi.geom),
|
|
1575
|
+
database_path=self.root,
|
|
1576
|
+
field_name="SVI",
|
|
1577
|
+
bins=[0.05, 0.2, 0.4, 0.6, 0.8],
|
|
1578
|
+
)
|
|
1579
|
+
return visualization_layers
|
|
1580
|
+
|
|
1581
|
+
def create_output_layers_config(self) -> OutputLayers:
|
|
1582
|
+
# Read default colors from template
|
|
1583
|
+
fd_max = self.config.gui.max_flood_depth
|
|
1584
|
+
ad_max = self.config.gui.max_aggr_dmg
|
|
1585
|
+
ftd_max = self.config.gui.max_footprint_dmg
|
|
1586
|
+
b_max = self.config.gui.max_benefits
|
|
1587
|
+
|
|
1588
|
+
benefits_layer = None
|
|
1589
|
+
if self.config.probabilistic_set is not None:
|
|
1590
|
+
benefits_layer = BenefitsLayer(
|
|
1591
|
+
bins=[0, 0.01, 0.02 * b_max, 0.2 * b_max, b_max],
|
|
1592
|
+
colors=[
|
|
1593
|
+
"#FF7D7D",
|
|
1594
|
+
"#FFFFFF",
|
|
1595
|
+
"#DCEDC8",
|
|
1596
|
+
"#AED581",
|
|
1597
|
+
"#7CB342",
|
|
1598
|
+
"#33691E",
|
|
1599
|
+
],
|
|
1600
|
+
threshold=0.0,
|
|
1601
|
+
)
|
|
1602
|
+
|
|
1603
|
+
output_layers = OutputLayers(
|
|
1604
|
+
floodmap=FloodMapLayer(
|
|
1605
|
+
bins=[0.2 * fd_max, 0.6 * fd_max, fd_max],
|
|
1606
|
+
colors=["#D7ECFB", "#8ABDDD", "#1C73A4", "#081D58"],
|
|
1607
|
+
zbmax=-9999,
|
|
1608
|
+
depth_min=0.0,
|
|
1609
|
+
),
|
|
1610
|
+
aggregation_dmg=AggregationDmgLayer(
|
|
1611
|
+
bins=[0.00001, 0.1 * ad_max, 0.25 * ad_max, 0.5 * ad_max, ad_max],
|
|
1612
|
+
colors=[
|
|
1613
|
+
"#FFFFFF",
|
|
1614
|
+
"#FEE9CE",
|
|
1615
|
+
"#FDBB84",
|
|
1616
|
+
"#FC844E",
|
|
1617
|
+
"#E03720",
|
|
1618
|
+
"#860000",
|
|
1619
|
+
],
|
|
1620
|
+
),
|
|
1621
|
+
footprints_dmg=FootprintsDmgLayer(
|
|
1622
|
+
bins=[0.00001, 0.06 * ftd_max, 0.2 * ftd_max, 0.4 * ftd_max, ftd_max],
|
|
1623
|
+
colors=[
|
|
1624
|
+
"#FFFFFF",
|
|
1625
|
+
"#FEE9CE",
|
|
1626
|
+
"#FDBB84",
|
|
1627
|
+
"#FC844E",
|
|
1628
|
+
"#E03720",
|
|
1629
|
+
"#860000",
|
|
1630
|
+
],
|
|
1631
|
+
),
|
|
1632
|
+
benefits=benefits_layer,
|
|
1633
|
+
)
|
|
1634
|
+
return output_layers
|
|
1635
|
+
|
|
1636
|
+
def create_hazard_plotting_config(self) -> PlottingModel:
|
|
1637
|
+
datum_names = [datum.name for datum in self.water_level_references.datums]
|
|
1638
|
+
if "MHHW" in datum_names:
|
|
1639
|
+
amplitude = (
|
|
1640
|
+
self.water_level_references.get_datum("MHHW").height
|
|
1641
|
+
- self.water_level_references.get_datum("MSL").height
|
|
1642
|
+
)
|
|
1643
|
+
self.logger.info(
|
|
1644
|
+
f"The default tidal amplitude in the GUI will be {amplitude.transform(self.unit_system.default_length_units)}, calculated as the difference between MHHW and MSL from the tide gauge data."
|
|
1645
|
+
)
|
|
1646
|
+
else:
|
|
1647
|
+
amplitude = us.UnitfulLength(
|
|
1648
|
+
value=0.0, units=self.unit_system.default_length_units
|
|
1649
|
+
)
|
|
1650
|
+
self.logger.warning(
|
|
1651
|
+
"The default tidal amplitude in the GUI will be 0.0, since no tide-gauge water levels are available. You can change this in the site.toml with the 'gui.tide_harmonic_amplitude' attribute."
|
|
1652
|
+
)
|
|
1653
|
+
|
|
1654
|
+
ref = "MSL"
|
|
1655
|
+
if ref not in datum_names:
|
|
1656
|
+
self.logger.warning(
|
|
1657
|
+
f"The Mean Sea Level (MSL) datum is not available in the site.toml. The synthetic tide will be created relative to the main reference: {self.water_level_references.reference}."
|
|
1658
|
+
)
|
|
1659
|
+
ref = self.water_level_references.reference
|
|
1660
|
+
|
|
1661
|
+
plotting = PlottingModel(
|
|
1662
|
+
synthetic_tide=SyntheticTideModel(
|
|
1663
|
+
harmonic_amplitude=amplitude,
|
|
1664
|
+
datum=ref,
|
|
1665
|
+
),
|
|
1666
|
+
excluded_datums=self.config.excluded_datums,
|
|
1667
|
+
)
|
|
1668
|
+
|
|
1669
|
+
return plotting
|
|
1670
|
+
|
|
1671
|
+
def create_infometrics(self):
|
|
1672
|
+
"""
|
|
1673
|
+
Copy the infometrics and infographics templates to the appropriate location and modifies the metrics_config.toml files.
|
|
1674
|
+
|
|
1675
|
+
This method copies the templates from the 'infometrics' and 'infographics' folders to the 'static/templates' folder in the root directory.
|
|
1676
|
+
It then modifies the 'metrics_config.toml' and 'metrics_config_risk.toml' files by updating the 'aggregateBy' attribute with the names
|
|
1677
|
+
of the aggregations defined in the 'fiat' section of the 'site_attrs' attribute.
|
|
1678
|
+
"""
|
|
1679
|
+
# TODO there should be generalized infometric queries with NSI or OSM, and with SVI or without. Then Based on the user input these should be chosen automatically
|
|
1680
|
+
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
1681
|
+
|
|
1682
|
+
# Create template folder
|
|
1683
|
+
path_im = self.root.joinpath("static", "templates", "infometrics")
|
|
1684
|
+
path_im.mkdir()
|
|
1685
|
+
|
|
1686
|
+
# Copy mandatory metric configs
|
|
1687
|
+
path_im_temp = templates_path.joinpath("infometrics")
|
|
1688
|
+
for file in path_im_temp.glob("*.toml"):
|
|
1689
|
+
shutil.copy2(file, path_im)
|
|
1690
|
+
|
|
1691
|
+
self._create_optional_infometrics(templates_path, path_im)
|
|
1692
|
+
|
|
1693
|
+
files = list(path_im.glob("*metrics_config*.toml"))
|
|
1694
|
+
# Update aggregation areas in metrics config
|
|
1695
|
+
for file in files:
|
|
1696
|
+
file = path_im.joinpath(file)
|
|
1697
|
+
with open(file, "rb") as f:
|
|
1698
|
+
attrs = tomli.load(f)
|
|
1699
|
+
|
|
1700
|
+
# add aggration levels
|
|
1701
|
+
if self._aggregation_areas is None:
|
|
1702
|
+
self._aggregation_areas = self.create_aggregation_areas()
|
|
1703
|
+
attrs["aggregateBy"] = [aggr.name for aggr in self._aggregation_areas]
|
|
1704
|
+
|
|
1705
|
+
# take out road metrics if needed
|
|
1706
|
+
if not self._has_roads:
|
|
1707
|
+
attrs["queries"] = [
|
|
1708
|
+
query
|
|
1709
|
+
for query in attrs["queries"]
|
|
1710
|
+
if "road" not in query["name"].lower()
|
|
1711
|
+
]
|
|
1712
|
+
|
|
1713
|
+
# Replace Damage Unit
|
|
1714
|
+
# TODO do this in a better manner
|
|
1715
|
+
for i, query in enumerate(attrs["queries"]):
|
|
1716
|
+
if "$" in query["long_name"]:
|
|
1717
|
+
query["long_name"] = query["long_name"].replace(
|
|
1718
|
+
"$", self.read_damage_unit()
|
|
1719
|
+
)
|
|
1720
|
+
|
|
1721
|
+
# replace the SVI threshold if needed
|
|
1722
|
+
if self.config.svi:
|
|
1723
|
+
for i, query in enumerate(attrs["queries"]):
|
|
1724
|
+
query["filter"] = query["filter"].replace(
|
|
1725
|
+
"SVI_threshold", str(self.config.svi.threshold)
|
|
1726
|
+
)
|
|
1727
|
+
|
|
1728
|
+
with open(file, "wb") as f:
|
|
1729
|
+
tomli_w.dump(attrs, f)
|
|
1730
|
+
|
|
1731
|
+
def _create_optional_infometrics(self, templates_path: Path, path_im: Path):
|
|
1732
|
+
# If infographics are going to be created in FA, get template metric configurations
|
|
1733
|
+
if not self.config.infographics:
|
|
1734
|
+
return
|
|
1735
|
+
|
|
1736
|
+
# Check what type of infographics should be used
|
|
1737
|
+
if self.config.unit_system == UnitSystems.imperial:
|
|
1738
|
+
metrics_folder_name = "US_NSI"
|
|
1739
|
+
self.logger.info(
|
|
1740
|
+
"Default NSI infometrics and infographics will be created."
|
|
1741
|
+
)
|
|
1742
|
+
elif self.config.unit_system == UnitSystems.metric:
|
|
1743
|
+
metrics_folder_name = "OSM"
|
|
1744
|
+
self.logger.info(
|
|
1745
|
+
"Default OSM infometrics and infographics will be created."
|
|
1746
|
+
)
|
|
1747
|
+
else:
|
|
1748
|
+
raise ValueError(
|
|
1749
|
+
f"Unit system {self.config.unit_system} is not recognized. Please choose 'imperial' or 'metric'."
|
|
1750
|
+
)
|
|
1751
|
+
|
|
1752
|
+
if self.config.svi is not None:
|
|
1753
|
+
svi_folder_name = "with_SVI"
|
|
1754
|
+
else:
|
|
1755
|
+
svi_folder_name = "without_SVI"
|
|
1756
|
+
|
|
1757
|
+
# Copy metrics config for infographics
|
|
1758
|
+
path_0 = templates_path.joinpath(
|
|
1759
|
+
"infometrics", metrics_folder_name, svi_folder_name
|
|
1760
|
+
)
|
|
1761
|
+
for file in path_0.glob("*.toml"):
|
|
1762
|
+
shutil.copy2(file, path_im)
|
|
1763
|
+
|
|
1764
|
+
# Copy additional risk config
|
|
1765
|
+
file = templates_path.joinpath(
|
|
1766
|
+
"infometrics",
|
|
1767
|
+
metrics_folder_name,
|
|
1768
|
+
"metrics_additional_risk_configs.toml",
|
|
1769
|
+
)
|
|
1770
|
+
shutil.copy2(file, path_im)
|
|
1771
|
+
|
|
1772
|
+
# Copy infographics config
|
|
1773
|
+
path_ig_temp = templates_path.joinpath("infographics", metrics_folder_name)
|
|
1774
|
+
path_ig = self.root.joinpath("static", "templates", "infographics")
|
|
1775
|
+
path_ig.mkdir()
|
|
1776
|
+
files_ig = ["styles.css", "config_charts.toml"]
|
|
1777
|
+
|
|
1778
|
+
if self.config.svi is not None:
|
|
1779
|
+
files_ig.append("config_risk_charts.toml")
|
|
1780
|
+
files_ig.append("config_people.toml")
|
|
1781
|
+
|
|
1782
|
+
if self._has_roads:
|
|
1783
|
+
files_ig.append("config_roads.toml")
|
|
1784
|
+
|
|
1785
|
+
for file in files_ig:
|
|
1786
|
+
shutil.copy2(path_ig_temp.joinpath(file), path_ig.joinpath(file))
|
|
1787
|
+
|
|
1788
|
+
# Copy images
|
|
1789
|
+
path_0 = templates_path.joinpath("infographics", "images")
|
|
1790
|
+
path_1 = self.root.joinpath("static", "templates", "infographics", "images")
|
|
1791
|
+
shutil.copytree(path_0, path_1)
|
|
1792
|
+
|
|
1793
|
+
def add_static_files(self):
|
|
1794
|
+
"""
|
|
1795
|
+
Copy static files from the 'templates' folder to the 'static' folder.
|
|
1796
|
+
|
|
1797
|
+
This method iterates over a list of folders and copies the contents of each folder from the 'templates' directory
|
|
1798
|
+
to the corresponding folder in the 'static' directory.
|
|
1799
|
+
"""
|
|
1800
|
+
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
1801
|
+
folders = ["icons", "green_infra_table"]
|
|
1802
|
+
for folder in folders:
|
|
1803
|
+
path_0 = templates_path.joinpath(folder)
|
|
1804
|
+
path_1 = self.static_path / folder
|
|
1805
|
+
shutil.copytree(path_0, path_1)
|
|
1806
|
+
|
|
1807
|
+
def add_probabilistic_set(self):
|
|
1808
|
+
# Copy prob set if given
|
|
1809
|
+
if self.config.probabilistic_set:
|
|
1810
|
+
self.logger.info(
|
|
1811
|
+
f"Probabilistic event set imported from {self.config.probabilistic_set}"
|
|
1812
|
+
)
|
|
1813
|
+
prob_event_name = Path(self.config.probabilistic_set).name
|
|
1814
|
+
path_db = self.root.joinpath("input", "events", prob_event_name)
|
|
1815
|
+
shutil.copytree(self.config.probabilistic_set, path_db)
|
|
1816
|
+
self._probabilistic_set_name = prob_event_name
|
|
1817
|
+
else:
|
|
1818
|
+
self.logger.warning(
|
|
1819
|
+
"Probabilistic event set not provided. Risk scenarios cannot be run in FloodAdapt."
|
|
1820
|
+
)
|
|
1821
|
+
self._probabilistic_set_name = None
|
|
1822
|
+
|
|
1823
|
+
### HELPER FUNCTIONS ###
|
|
1824
|
+
def make_folder_structure(self) -> None:
|
|
1825
|
+
"""
|
|
1826
|
+
Create the folder structure for the database.
|
|
1827
|
+
|
|
1828
|
+
This method creates the necessary folder structure for the FloodAdapt database, including
|
|
1829
|
+
the input and static folders. It also creates subfolders within the input and
|
|
1830
|
+
static folders based on a predefined list of names.
|
|
1831
|
+
"""
|
|
1832
|
+
self.logger.info("Preparing the database folder structure.")
|
|
1833
|
+
inputs = [
|
|
1834
|
+
"events",
|
|
1835
|
+
"projections",
|
|
1836
|
+
"measures",
|
|
1837
|
+
"strategies",
|
|
1838
|
+
"scenarios",
|
|
1839
|
+
"benefits",
|
|
1840
|
+
]
|
|
1841
|
+
for name in inputs:
|
|
1842
|
+
(self.root / "input" / name).mkdir(parents=True, exist_ok=True)
|
|
1843
|
+
|
|
1844
|
+
# Prepare static folder structure
|
|
1845
|
+
folders = ["templates", "config"]
|
|
1846
|
+
for name in folders:
|
|
1847
|
+
(self.static_path / name).mkdir(parents=True, exist_ok=True)
|
|
1848
|
+
|
|
1849
|
+
def _check_exists_and_absolute(self, path: str) -> Path:
|
|
1850
|
+
"""Check if the path is absolute or relative and return a Path object. Raises an error if the path is not valid."""
|
|
1851
|
+
if not Path(path).exists():
|
|
1852
|
+
raise FileNotFoundError(f"Path {path} does not exist.")
|
|
1853
|
+
|
|
1854
|
+
if Path(path).is_absolute():
|
|
1855
|
+
return Path(path)
|
|
1856
|
+
else:
|
|
1857
|
+
raise ValueError(f"Path {path} is not absolute.")
|
|
1858
|
+
|
|
1859
|
+
def _join_building_footprints(
|
|
1860
|
+
self, building_footprints: gpd.GeoDataFrame, field_name: str
|
|
1861
|
+
) -> Path:
|
|
1862
|
+
"""
|
|
1863
|
+
Join building footprints with existing building data and updates the exposure CSV.
|
|
1864
|
+
|
|
1865
|
+
Args:
|
|
1866
|
+
building_footprints (GeoDataFrame): GeoDataFrame containing the building footprints to be joined.
|
|
1867
|
+
field_name (str): The field name to use for the spatial join.
|
|
1868
|
+
|
|
1869
|
+
Returns
|
|
1870
|
+
-------
|
|
1871
|
+
This method performs the following steps:
|
|
1872
|
+
1. Reads the exposure CSV file.
|
|
1873
|
+
2. Performs a spatial join between the buildings and building footprints.
|
|
1874
|
+
3. Ensures that in case of multiple values, the first is kept.
|
|
1875
|
+
4. Creates a folder to store the building footprints.
|
|
1876
|
+
5. Saves the spatial file for future use.
|
|
1877
|
+
6. Merges the joined buildings with the exposure CSV and saves it.
|
|
1878
|
+
7. Updates the site attributes with the relative path to the saved building footprints.
|
|
1879
|
+
8. Logs the location where the building footprints are saved.
|
|
1880
|
+
"""
|
|
1881
|
+
buildings = self.fiat_model.exposure.exposure_geoms[
|
|
1882
|
+
self._get_fiat_building_index()
|
|
1883
|
+
]
|
|
1884
|
+
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1885
|
+
if "BF_FID" in exposure_csv.columns:
|
|
1886
|
+
self.logger.warning(
|
|
1887
|
+
"Column 'BF_FID' already exists in the exposure columns and will be replaced."
|
|
1888
|
+
)
|
|
1889
|
+
del exposure_csv["BF_FID"]
|
|
1890
|
+
buildings_joined, building_footprints = self.spatial_join(
|
|
1891
|
+
buildings,
|
|
1892
|
+
building_footprints,
|
|
1893
|
+
field_name,
|
|
1894
|
+
rename="BF_FID",
|
|
1895
|
+
filter=True,
|
|
1896
|
+
)
|
|
1897
|
+
# Make sure in case of multiple values that the first is kept
|
|
1898
|
+
buildings_joined = (
|
|
1899
|
+
buildings_joined.groupby(_FIAT_COLUMNS.object_id)
|
|
1900
|
+
.first()
|
|
1901
|
+
.sort_values(by=[_FIAT_COLUMNS.object_id])
|
|
1902
|
+
)
|
|
1903
|
+
# Create folder
|
|
1904
|
+
bf_folder = Path(self.fiat_model.root) / "exposure" / "building_footprints"
|
|
1905
|
+
bf_folder.mkdir(parents=True, exist_ok=True)
|
|
1906
|
+
|
|
1907
|
+
# Save the spatial file for future use
|
|
1908
|
+
geo_path = bf_folder / "building_footprints.gpkg"
|
|
1909
|
+
building_footprints.to_file(geo_path)
|
|
1910
|
+
|
|
1911
|
+
# Save to exposure csv
|
|
1912
|
+
exposure_csv = exposure_csv.merge(
|
|
1913
|
+
buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1914
|
+
)
|
|
1915
|
+
|
|
1916
|
+
# Set model building footprints
|
|
1917
|
+
self.fiat_model.building_footprint = building_footprints
|
|
1918
|
+
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1919
|
+
|
|
1920
|
+
# Save site attributes
|
|
1921
|
+
buildings_path = geo_path.relative_to(self.static_path)
|
|
1922
|
+
self.logger.info(
|
|
1923
|
+
f"Building footprints saved at {(self.static_path / buildings_path).resolve().as_posix()}"
|
|
1924
|
+
)
|
|
1925
|
+
|
|
1926
|
+
return buildings_path
|
|
1927
|
+
|
|
1928
|
+
def _clip_hazard_extend(self, clip_footprints=True):
|
|
1929
|
+
"""
|
|
1930
|
+
Clip the exposure data to the bounding box of the hazard data.
|
|
1931
|
+
|
|
1932
|
+
This method clips the exposure data to the bounding box of the hazard data. It creates a GeoDataFrame
|
|
1933
|
+
from the hazard polygons, and then uses the `gpd.clip` function to clip the exposure geometries to the
|
|
1934
|
+
bounding box of the hazard polygons. If the exposure data contains roads, it is split into two separate
|
|
1935
|
+
GeoDataFrames: one for buildings and one for roads. The clipped exposure data is then saved back to the
|
|
1936
|
+
`exposure_db` attribute of the `FiatModel` object.
|
|
1937
|
+
|
|
1938
|
+
Parameters
|
|
1939
|
+
----------
|
|
1940
|
+
None
|
|
1941
|
+
|
|
1942
|
+
Returns
|
|
1943
|
+
-------
|
|
1944
|
+
None
|
|
1945
|
+
"""
|
|
1946
|
+
gdf = self.fiat_model.exposure.get_full_gdf(
|
|
1947
|
+
self.fiat_model.exposure.exposure_db
|
|
1948
|
+
)
|
|
1949
|
+
crs = gdf.crs
|
|
1950
|
+
sfincs_extend = self.sfincs_overland_model.region
|
|
1951
|
+
sfincs_extend = sfincs_extend.to_crs(crs)
|
|
1952
|
+
|
|
1953
|
+
# Clip the fiat region
|
|
1954
|
+
clipped_region = self.fiat_model.region.to_crs(crs).clip(sfincs_extend)
|
|
1955
|
+
self.fiat_model.geoms["region"] = clipped_region
|
|
1956
|
+
|
|
1957
|
+
# Clip the exposure geometries
|
|
1958
|
+
# Filter buildings and roads
|
|
1959
|
+
road_inds = gdf[_FIAT_COLUMNS.primary_object_type].str.contains("road")
|
|
1960
|
+
# Ensure road_inds is a boolean Series
|
|
1961
|
+
if not road_inds.dtype == bool:
|
|
1962
|
+
road_inds = road_inds.astype(bool)
|
|
1963
|
+
# Clip buildings
|
|
1964
|
+
gdf_buildings = gdf[~road_inds]
|
|
1965
|
+
gdf_buildings = self._clip_gdf(
|
|
1966
|
+
gdf_buildings, clipped_region, predicate="within"
|
|
1967
|
+
).reset_index(drop=True)
|
|
1968
|
+
|
|
1969
|
+
if road_inds.any():
|
|
1970
|
+
# Clip roads
|
|
1971
|
+
gdf_roads = gdf[road_inds]
|
|
1972
|
+
gdf_roads = self._clip_gdf(
|
|
1973
|
+
gdf_roads, clipped_region, predicate="within"
|
|
1974
|
+
).reset_index(drop=True)
|
|
1975
|
+
|
|
1976
|
+
idx_buildings = self.fiat_model.exposure.geom_names.index(
|
|
1977
|
+
self.config.fiat_buildings_name
|
|
1978
|
+
)
|
|
1979
|
+
idx_roads = self.fiat_model.exposure.geom_names.index(
|
|
1980
|
+
self.config.fiat_roads_name
|
|
1981
|
+
)
|
|
1982
|
+
self.fiat_model.exposure.exposure_geoms[idx_buildings] = gdf_buildings[
|
|
1983
|
+
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1984
|
+
]
|
|
1985
|
+
self.fiat_model.exposure.exposure_geoms[idx_roads] = gdf_roads[
|
|
1986
|
+
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1987
|
+
]
|
|
1988
|
+
gdf = pd.concat([gdf_buildings, gdf_roads])
|
|
1989
|
+
else:
|
|
1990
|
+
gdf = gdf_buildings
|
|
1991
|
+
self.fiat_model.exposure.exposure_geoms[0] = gdf[
|
|
1992
|
+
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1993
|
+
]
|
|
1994
|
+
|
|
1995
|
+
# Save exposure dataframe
|
|
1996
|
+
del gdf["geometry"]
|
|
1997
|
+
self.fiat_model.exposure.exposure_db = gdf.reset_index(drop=True)
|
|
1998
|
+
|
|
1999
|
+
# Clip the building footprints
|
|
2000
|
+
fieldname = "BF_FID"
|
|
2001
|
+
if clip_footprints and not self.fiat_model.building_footprint.empty:
|
|
2002
|
+
# Get buildings after filtering and their footprint id
|
|
2003
|
+
self.fiat_model.building_footprint = self.fiat_model.building_footprint[
|
|
2004
|
+
self.fiat_model.building_footprint[fieldname].isin(
|
|
2005
|
+
gdf_buildings[fieldname]
|
|
2006
|
+
)
|
|
2007
|
+
].reset_index(drop=True)
|
|
2008
|
+
|
|
2009
|
+
@staticmethod
|
|
2010
|
+
def _clip_gdf(
|
|
2011
|
+
gdf1: gpd.GeoDataFrame, gdf2: gpd.GeoDataFrame, predicate: str = "within"
|
|
2012
|
+
):
|
|
2013
|
+
gdf_new = gpd.sjoin(gdf1, gdf2, how="inner", predicate=predicate)
|
|
2014
|
+
gdf_new = gdf_new.drop(
|
|
2015
|
+
columns=[
|
|
2016
|
+
col
|
|
2017
|
+
for col in gdf_new.columns
|
|
2018
|
+
if col.endswith("_right") or (col in gdf2.columns and col != "geometry")
|
|
2019
|
+
]
|
|
2020
|
+
)
|
|
2021
|
+
|
|
2022
|
+
return gdf_new
|
|
2023
|
+
|
|
2024
|
+
@staticmethod
|
|
2025
|
+
def spatial_join(
|
|
2026
|
+
objects: gpd.GeoDataFrame,
|
|
2027
|
+
layer: Union[str, gpd.GeoDataFrame],
|
|
2028
|
+
field_name: str,
|
|
2029
|
+
rename: Optional[str] = None,
|
|
2030
|
+
filter: Optional[bool] = False,
|
|
2031
|
+
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
|
|
2032
|
+
"""
|
|
2033
|
+
Perform a spatial join between two GeoDataFrames.
|
|
2034
|
+
|
|
2035
|
+
Args:
|
|
2036
|
+
objects (gpd.GeoDataFrame): The GeoDataFrame representing the objects.
|
|
2037
|
+
layer (Union[str, gpd.GeoDataFrame]): The GeoDataFrame or file path of the layer to join with.
|
|
2038
|
+
field_name (str): The name of the field to use for the join.
|
|
2039
|
+
rename (Optional[str], optional): The new name to assign to the joined field. Defaults to None.
|
|
2040
|
+
|
|
2041
|
+
Returns
|
|
2042
|
+
-------
|
|
2043
|
+
tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]: A tuple containing the joined GeoDataFrame and the layer GeoDataFrame.
|
|
2044
|
+
|
|
2045
|
+
"""
|
|
2046
|
+
# Read in layer and keep only column of interest
|
|
2047
|
+
if not isinstance(layer, gpd.GeoDataFrame):
|
|
2048
|
+
layer = gpd.read_file(layer)
|
|
2049
|
+
layer = layer[[field_name, "geometry"]]
|
|
2050
|
+
layer = layer.to_crs(objects.crs)
|
|
2051
|
+
if field_name in objects.columns:
|
|
2052
|
+
layer = layer.rename(columns={field_name: "layer_field"})
|
|
2053
|
+
field_name = "layer_field"
|
|
2054
|
+
# Spatial join of the layers
|
|
2055
|
+
objects_joined = objects.sjoin(layer, how="left", predicate="intersects")
|
|
2056
|
+
|
|
2057
|
+
# Keep only the first intersection for each object
|
|
2058
|
+
objects_joined = (
|
|
2059
|
+
objects_joined.groupby(_FIAT_COLUMNS.object_id).first().reset_index()
|
|
2060
|
+
)
|
|
2061
|
+
|
|
2062
|
+
# if needed filter out unused objects in the layer
|
|
2063
|
+
if filter:
|
|
2064
|
+
layer_inds = objects_joined["index_right"].dropna().unique()
|
|
2065
|
+
layer = layer.iloc[np.sort(layer_inds)].reset_index(drop=True)
|
|
2066
|
+
objects_joined = objects_joined[[_FIAT_COLUMNS.object_id, field_name]]
|
|
2067
|
+
# rename field if provided
|
|
2068
|
+
if rename:
|
|
2069
|
+
objects_joined = objects_joined.rename(columns={field_name: rename})
|
|
2070
|
+
layer = layer.rename(columns={field_name: rename})
|
|
2071
|
+
return objects_joined, layer
|
|
2072
|
+
|
|
2073
|
+
def _get_fiat_building_index(self) -> int:
|
|
2074
|
+
return self.fiat_model.exposure.geom_names.index(
|
|
2075
|
+
self.config.fiat_buildings_name
|
|
2076
|
+
)
|
|
2077
|
+
|
|
2078
|
+
def _get_fiat_road_index(self) -> int:
|
|
2079
|
+
return self.fiat_model.exposure.geom_names.index(self.config.fiat_roads_name)
|
|
2080
|
+
|
|
2081
|
+
def _get_closest_station(self):
|
|
2082
|
+
# Get available stations from source
|
|
2083
|
+
obs_data = obs.source(self.config.tide_gauge.source)
|
|
2084
|
+
obs_data.get_active_stations()
|
|
2085
|
+
obs_stations = obs_data.gdf()
|
|
2086
|
+
# Calculate distance from SFINCS region to all available stations in degrees
|
|
2087
|
+
obs_stations["distance"] = obs_stations.distance(
|
|
2088
|
+
self.sfincs_overland_model.region.to_crs(4326).geometry.item()
|
|
2089
|
+
)
|
|
2090
|
+
# Get the closest station and its distance in meters
|
|
2091
|
+
closest_station = obs_stations[
|
|
2092
|
+
obs_stations["distance"] == obs_stations["distance"].min()
|
|
2093
|
+
]
|
|
2094
|
+
distance = round(
|
|
2095
|
+
closest_station.to_crs(self.sfincs_overland_model.region.crs)
|
|
2096
|
+
.distance(self.sfincs_overland_model.region.geometry.item())
|
|
2097
|
+
.item(),
|
|
2098
|
+
0,
|
|
2099
|
+
)
|
|
2100
|
+
|
|
2101
|
+
distance = us.UnitfulLength(value=distance, units=us.UnitTypesLength.meters)
|
|
2102
|
+
self.logger.info(
|
|
2103
|
+
f"The closest tide gauge from {self.config.tide_gauge.source} is located {distance.transform(self.unit_system.default_length_units)} from the SFINCS domain"
|
|
2104
|
+
)
|
|
2105
|
+
# Check if user provided max distance
|
|
2106
|
+
# TODO make sure units are explicit for max_distance
|
|
2107
|
+
if self.config.tide_gauge.max_distance is not None:
|
|
2108
|
+
units_new = self.config.tide_gauge.max_distance.units
|
|
2109
|
+
distance_new = us.UnitfulLength(
|
|
2110
|
+
value=distance.convert(units_new), units=units_new
|
|
2111
|
+
)
|
|
2112
|
+
if distance_new.value > self.config.tide_gauge.max_distance.value:
|
|
2113
|
+
self.logger.warning(
|
|
2114
|
+
f"This distance is larger than the 'max_distance' value of {self.config.tide_gauge.max_distance.value} {units_new} provided in the config file. The station cannot be used."
|
|
2115
|
+
)
|
|
2116
|
+
return None
|
|
2117
|
+
|
|
2118
|
+
# get station id
|
|
2119
|
+
station_id = closest_station["id"].item()
|
|
2120
|
+
|
|
2121
|
+
return station_id
|
|
2122
|
+
|
|
2123
|
+
def _get_station_metadata(self, station_id: str, ref: str = "MLLW"):
|
|
2124
|
+
"""
|
|
2125
|
+
Find the closest tide gauge station to the SFINCS domain and retrieves its metadata.
|
|
2126
|
+
|
|
2127
|
+
Args:
|
|
2128
|
+
ref (str, optional): The reference level for water level measurements. Defaults to "MLLW".
|
|
2129
|
+
|
|
2130
|
+
Returns
|
|
2131
|
+
-------
|
|
2132
|
+
dict: A dictionary containing the metadata of the closest tide gauge station.
|
|
2133
|
+
The dictionary includes the following keys:
|
|
2134
|
+
- "id": The station ID.
|
|
2135
|
+
- "name": The station name.
|
|
2136
|
+
- "datum": The difference between the station's datum and the reference level.
|
|
2137
|
+
- "datum_name": The name of the datum used by the station.
|
|
2138
|
+
- "msl": The difference between the Mean Sea Level (MSL) and the reference level.
|
|
2139
|
+
- "reference": The reference level used for water level measurements.
|
|
2140
|
+
- "lon": The longitude of the station.
|
|
2141
|
+
- "lat": The latitude of the station.
|
|
2142
|
+
"""
|
|
2143
|
+
# Get available stations from source
|
|
2144
|
+
obs_data = obs.source(self.config.tide_gauge.source)
|
|
2145
|
+
# read station metadata
|
|
2146
|
+
station_metadata = obs_data.get_meta_data(station_id)
|
|
2147
|
+
# TODO check if all stations can be used? Tidal attr?
|
|
2148
|
+
# Get water levels by using the ref provided
|
|
2149
|
+
datum_name = station_metadata["datums"]["OrthometricDatum"]
|
|
2150
|
+
datums = station_metadata["datums"]["datums"]
|
|
2151
|
+
names = [datum["name"] for datum in datums]
|
|
2152
|
+
|
|
2153
|
+
ref_value = datums[names.index(ref)]["value"]
|
|
2154
|
+
|
|
2155
|
+
meta = {
|
|
2156
|
+
"id": station_id,
|
|
2157
|
+
"name": station_metadata["name"],
|
|
2158
|
+
"datum": round(datums[names.index(datum_name)]["value"] - ref_value, 3),
|
|
2159
|
+
"datum_name": datum_name,
|
|
2160
|
+
"msl": round(datums[names.index("MSL")]["value"] - ref_value, 3),
|
|
2161
|
+
"mllw": round(datums[names.index("MLLW")]["value"] - ref_value, 3),
|
|
2162
|
+
"mhhw": round(datums[names.index("MHHW")]["value"] - ref_value, 3),
|
|
2163
|
+
"reference": ref,
|
|
2164
|
+
"units": station_metadata["datums"]["units"],
|
|
2165
|
+
"lon": station_metadata["lng"],
|
|
2166
|
+
"lat": station_metadata["lat"],
|
|
2167
|
+
}
|
|
2168
|
+
|
|
2169
|
+
self.logger.info(
|
|
2170
|
+
f"The tide gauge station '{station_metadata['name']}' from {self.config.tide_gauge.source} will be used to download nearshore historical water level time-series."
|
|
2171
|
+
)
|
|
2172
|
+
|
|
2173
|
+
self.logger.info(
|
|
2174
|
+
f"The station metadata will be used to fill in the water_level attribute in the site.toml. The reference level will be {ref}."
|
|
2175
|
+
)
|
|
2176
|
+
|
|
2177
|
+
return meta
|
|
2178
|
+
|
|
2179
|
+
def _get_bin_colors(self):
|
|
2180
|
+
"""
|
|
2181
|
+
Retrieve the bin colors from the bin_colors.toml file.
|
|
2182
|
+
|
|
2183
|
+
Returns
|
|
2184
|
+
-------
|
|
2185
|
+
dict: A dictionary containing the bin colors.
|
|
2186
|
+
"""
|
|
2187
|
+
templates_path = Path(__file__).parent.resolve().joinpath("templates")
|
|
2188
|
+
with open(
|
|
2189
|
+
templates_path.joinpath("output_layers", "bin_colors.toml"), "rb"
|
|
2190
|
+
) as f:
|
|
2191
|
+
bin_colors = tomli.load(f)
|
|
2192
|
+
return bin_colors
|
|
2193
|
+
|
|
2194
|
+
|
|
2195
|
+
if __name__ == "__main__":
|
|
2196
|
+
while True:
|
|
2197
|
+
config_path = Path(
|
|
2198
|
+
input(
|
|
2199
|
+
"Please provide the path to the database creation configuration toml: \n"
|
|
2200
|
+
)
|
|
2201
|
+
)
|
|
2202
|
+
try:
|
|
2203
|
+
config = ConfigModel.read(config_path)
|
|
2204
|
+
dbs = DatabaseBuilder(config)
|
|
2205
|
+
dbs.build()
|
|
2206
|
+
except Exception as e:
|
|
2207
|
+
print(e)
|
|
2208
|
+
quit = input("Do you want to quit? (y/n)")
|
|
2209
|
+
if quit == "y":
|
|
2210
|
+
exit()
|