flood-adapt 0.3.11__py3-none-any.whl → 0.3.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flood_adapt/__init__.py +3 -2
- flood_adapt/adapter/fiat_adapter.py +44 -9
- flood_adapt/adapter/sfincs_adapter.py +173 -76
- flood_adapt/config/gui.py +1 -0
- flood_adapt/database_builder/__init__.py +23 -0
- flood_adapt/database_builder/database_builder.py +615 -362
- flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +2 -2
- flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +1 -1
- flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +1 -1
- flood_adapt/dbs_classes/database.py +62 -3
- flood_adapt/dbs_classes/dbs_benefit.py +1 -0
- flood_adapt/dbs_classes/dbs_event.py +1 -0
- flood_adapt/dbs_classes/dbs_measure.py +1 -0
- flood_adapt/dbs_classes/dbs_projection.py +1 -0
- flood_adapt/dbs_classes/dbs_scenario.py +1 -0
- flood_adapt/dbs_classes/dbs_strategy.py +1 -0
- flood_adapt/dbs_classes/dbs_template.py +2 -1
- flood_adapt/misc/log.py +20 -12
- flood_adapt/objects/events/historical.py +3 -3
- flood_adapt/objects/events/hurricane.py +1 -1
- flood_adapt/objects/forcing/plotting.py +7 -34
- {flood_adapt-0.3.11.dist-info → flood_adapt-0.3.13.dist-info}/METADATA +1 -1
- {flood_adapt-0.3.11.dist-info → flood_adapt-0.3.13.dist-info}/RECORD +26 -27
- flood_adapt/database_builder.py +0 -16
- {flood_adapt-0.3.11.dist-info → flood_adapt-0.3.13.dist-info}/LICENSE +0 -0
- {flood_adapt-0.3.11.dist-info → flood_adapt-0.3.13.dist-info}/WHEEL +0 -0
- {flood_adapt-0.3.11.dist-info → flood_adapt-0.3.13.dist-info}/top_level.txt +0 -0
|
@@ -1,9 +1,13 @@
|
|
|
1
1
|
import datetime
|
|
2
|
+
import logging
|
|
2
3
|
import math
|
|
3
4
|
import os
|
|
5
|
+
import re
|
|
4
6
|
import shutil
|
|
7
|
+
import time
|
|
5
8
|
import warnings
|
|
6
9
|
from enum import Enum
|
|
10
|
+
from functools import wraps
|
|
7
11
|
from pathlib import Path
|
|
8
12
|
from typing import Optional, Union
|
|
9
13
|
from urllib.request import urlretrieve
|
|
@@ -80,21 +84,49 @@ from flood_adapt.objects.projections.projections import (
|
|
|
80
84
|
)
|
|
81
85
|
from flood_adapt.objects.strategies.strategies import Strategy
|
|
82
86
|
|
|
87
|
+
logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def debug_timer(func):
|
|
91
|
+
@wraps(func)
|
|
92
|
+
def wrapper(*args, **kwargs):
|
|
93
|
+
logger = FloodAdaptLogging.getLogger("DatabaseBuilder") # No forced log level
|
|
94
|
+
if logger.isEnabledFor(logging.DEBUG):
|
|
95
|
+
logger.debug(f"Started '{func.__name__}'")
|
|
96
|
+
start_time = time.perf_counter()
|
|
97
|
+
|
|
98
|
+
result = func(*args, **kwargs)
|
|
99
|
+
|
|
100
|
+
end_time = time.perf_counter()
|
|
101
|
+
elapsed = end_time - start_time
|
|
102
|
+
logger.debug(f"Finished '{func.__name__}' in {elapsed:.4f} seconds")
|
|
103
|
+
else:
|
|
104
|
+
result = func(*args, **kwargs)
|
|
105
|
+
|
|
106
|
+
return result
|
|
107
|
+
|
|
108
|
+
return wrapper
|
|
109
|
+
|
|
83
110
|
|
|
84
111
|
def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
|
|
85
|
-
"""
|
|
86
|
-
Check if the given path is absolute and return the absolute path.
|
|
112
|
+
"""Check if the given path is absolute and return the absolute path.
|
|
87
113
|
|
|
88
|
-
|
|
89
|
-
|
|
114
|
+
Parameters
|
|
115
|
+
----------
|
|
116
|
+
str_path : str
|
|
117
|
+
The path to be checked.
|
|
118
|
+
config_path : Optional[Path], default None
|
|
119
|
+
The base path to resolve relative paths.
|
|
90
120
|
|
|
91
121
|
Returns
|
|
92
122
|
-------
|
|
93
|
-
|
|
123
|
+
str
|
|
124
|
+
The absolute path as a string.
|
|
94
125
|
|
|
95
126
|
Raises
|
|
96
127
|
------
|
|
97
|
-
|
|
128
|
+
ValueError
|
|
129
|
+
If the path is not absolute and no config_path is provided.
|
|
98
130
|
"""
|
|
99
131
|
path = Path(str_path)
|
|
100
132
|
if not path.is_absolute():
|
|
@@ -106,14 +138,16 @@ def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
|
|
|
106
138
|
|
|
107
139
|
|
|
108
140
|
class SpatialJoinModel(BaseModel):
|
|
109
|
-
"""
|
|
110
|
-
Represents a spatial join model.
|
|
141
|
+
"""Represents a spatial join model.
|
|
111
142
|
|
|
112
143
|
Attributes
|
|
113
144
|
----------
|
|
114
|
-
name
|
|
115
|
-
|
|
116
|
-
|
|
145
|
+
name : Optional[str], default None
|
|
146
|
+
The name of the model.
|
|
147
|
+
file : str
|
|
148
|
+
The file associated with the model.
|
|
149
|
+
field_name : str
|
|
150
|
+
The field name used for the spatial join.
|
|
117
151
|
"""
|
|
118
152
|
|
|
119
153
|
name: Optional[str] = None
|
|
@@ -122,14 +156,14 @@ class SpatialJoinModel(BaseModel):
|
|
|
122
156
|
|
|
123
157
|
|
|
124
158
|
class UnitSystems(str, Enum):
|
|
125
|
-
"""
|
|
126
|
-
|
|
127
|
-
It provides two options: `imperial` and `metric`.
|
|
159
|
+
"""Enumeration for accepted values for the unit_system field.
|
|
128
160
|
|
|
129
161
|
Attributes
|
|
130
162
|
----------
|
|
131
|
-
|
|
132
|
-
|
|
163
|
+
imperial : str
|
|
164
|
+
Represents the imperial unit system.
|
|
165
|
+
metric : str
|
|
166
|
+
Represents the metric unit system.
|
|
133
167
|
"""
|
|
134
168
|
|
|
135
169
|
imperial = "imperial"
|
|
@@ -137,24 +171,36 @@ class UnitSystems(str, Enum):
|
|
|
137
171
|
|
|
138
172
|
|
|
139
173
|
class FootprintsOptions(str, Enum):
|
|
174
|
+
"""Enumeration for accepted values for the building_footprints field.
|
|
175
|
+
|
|
176
|
+
Attributes
|
|
177
|
+
----------
|
|
178
|
+
OSM : str
|
|
179
|
+
Use OpenStreetMap for building footprints.
|
|
180
|
+
"""
|
|
181
|
+
|
|
140
182
|
OSM = "OSM"
|
|
141
183
|
|
|
142
184
|
|
|
143
185
|
class Basins(str, Enum):
|
|
144
|
-
"""
|
|
145
|
-
Enumeration class representing different basins.
|
|
146
|
-
|
|
147
|
-
Each basin is represented by a string value.
|
|
186
|
+
"""Enumeration class representing different basins.
|
|
148
187
|
|
|
149
188
|
Attributes
|
|
150
189
|
----------
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
190
|
+
NA : str
|
|
191
|
+
North Atlantic
|
|
192
|
+
SA : str
|
|
193
|
+
South Atlantic
|
|
194
|
+
EP : str
|
|
195
|
+
Eastern North Pacific (which includes the Central Pacific region)
|
|
196
|
+
WP : str
|
|
197
|
+
Western North Pacific
|
|
198
|
+
SP : str
|
|
199
|
+
South Pacific
|
|
200
|
+
SI : str
|
|
201
|
+
South Indian
|
|
202
|
+
NI : str
|
|
203
|
+
North Indian
|
|
158
204
|
"""
|
|
159
205
|
|
|
160
206
|
NA = "NA"
|
|
@@ -167,15 +213,18 @@ class Basins(str, Enum):
|
|
|
167
213
|
|
|
168
214
|
|
|
169
215
|
class GuiConfigModel(BaseModel):
|
|
170
|
-
"""
|
|
171
|
-
Represents a GUI model for for FloodAdapt.
|
|
216
|
+
"""Represents a GUI model for FloodAdapt.
|
|
172
217
|
|
|
173
218
|
Attributes
|
|
174
219
|
----------
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
220
|
+
max_flood_depth : float
|
|
221
|
+
The last visualization bin will be ">value".
|
|
222
|
+
max_aggr_dmg : float
|
|
223
|
+
The last visualization bin will be ">value".
|
|
224
|
+
max_footprint_dmg : float
|
|
225
|
+
The last visualization bin will be ">value".
|
|
226
|
+
max_benefits : float
|
|
227
|
+
The last visualization bin will be ">value".
|
|
179
228
|
"""
|
|
180
229
|
|
|
181
230
|
max_flood_depth: float
|
|
@@ -185,32 +234,38 @@ class GuiConfigModel(BaseModel):
|
|
|
185
234
|
|
|
186
235
|
|
|
187
236
|
class SviConfigModel(SpatialJoinModel):
|
|
188
|
-
"""
|
|
189
|
-
Represents a model for the Social Vulnerability Index (SVI).
|
|
237
|
+
"""Represents a model for the Social Vulnerability Index (SVI).
|
|
190
238
|
|
|
191
239
|
Attributes
|
|
192
240
|
----------
|
|
193
|
-
|
|
241
|
+
threshold : float
|
|
242
|
+
The threshold value for the SVI model to specify vulnerability.
|
|
194
243
|
"""
|
|
195
244
|
|
|
196
245
|
threshold: float
|
|
197
246
|
|
|
198
247
|
|
|
199
|
-
class Point(BaseModel):
|
|
200
|
-
lat: float
|
|
201
|
-
lon: float
|
|
202
|
-
|
|
203
|
-
|
|
204
248
|
class TideGaugeConfigModel(BaseModel):
|
|
205
|
-
"""
|
|
206
|
-
Represents a tide gauge model.
|
|
249
|
+
"""Represents a tide gauge model.
|
|
207
250
|
|
|
208
251
|
Attributes
|
|
209
252
|
----------
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
253
|
+
source : TideGaugeSource
|
|
254
|
+
The source of the tide gauge data.
|
|
255
|
+
description : str, default ""
|
|
256
|
+
Description of the tide gauge.
|
|
257
|
+
ref : Optional[str], default None
|
|
258
|
+
The reference name. Should be defined in the water level references.
|
|
259
|
+
id : Optional[int], default None
|
|
260
|
+
The station ID.
|
|
261
|
+
lon : Optional[float], default None
|
|
262
|
+
Longitude of the tide gauge.
|
|
263
|
+
lat : Optional[float], default None
|
|
264
|
+
Latitude of the tide gauge.
|
|
265
|
+
file : Optional[str], default None
|
|
266
|
+
The file associated with the tide gauge data.
|
|
267
|
+
max_distance : Optional[us.UnitfulLength], default None
|
|
268
|
+
The maximum distance.
|
|
214
269
|
"""
|
|
215
270
|
|
|
216
271
|
source: TideGaugeSource
|
|
@@ -223,52 +278,59 @@ class TideGaugeConfigModel(BaseModel):
|
|
|
223
278
|
max_distance: Optional[us.UnitfulLength] = None
|
|
224
279
|
|
|
225
280
|
|
|
226
|
-
class SviModel(SpatialJoinModel):
|
|
227
|
-
"""
|
|
228
|
-
Represents a model for the Social Vulnerability Index (SVI).
|
|
229
|
-
|
|
230
|
-
Attributes
|
|
231
|
-
----------
|
|
232
|
-
threshold (float): The threshold value for the SVI model to specify vulnerability.
|
|
233
|
-
"""
|
|
234
|
-
|
|
235
|
-
threshold: float
|
|
236
|
-
|
|
237
|
-
|
|
238
281
|
class ConfigModel(BaseModel):
|
|
239
|
-
"""
|
|
240
|
-
Represents the configuration model for FloodAdapt.
|
|
282
|
+
"""Represents the configuration model for FloodAdapt.
|
|
241
283
|
|
|
242
284
|
Attributes
|
|
243
285
|
----------
|
|
244
286
|
name : str
|
|
245
287
|
The name of the site.
|
|
246
|
-
description : Optional[str], default
|
|
288
|
+
description : Optional[str], default None
|
|
247
289
|
The description of the site.
|
|
248
290
|
database_path : Optional[str], default None
|
|
249
291
|
The path to the database where all the sites are located.
|
|
250
|
-
sfincs : str
|
|
251
|
-
The SFINCS model path.
|
|
252
|
-
sfincs_offshore : Optional[str], default None
|
|
253
|
-
The offshore SFINCS model path.
|
|
254
|
-
fiat : str
|
|
255
|
-
The FIAT model path.
|
|
256
292
|
unit_system : UnitSystems
|
|
257
293
|
The unit system.
|
|
258
|
-
gui :
|
|
294
|
+
gui : GuiConfigModel
|
|
259
295
|
The GUI model representing scaling values for the layers.
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
The
|
|
264
|
-
|
|
265
|
-
The
|
|
296
|
+
infographics : Optional[bool], default True
|
|
297
|
+
Indicates if infographics are enabled.
|
|
298
|
+
fiat : str
|
|
299
|
+
The FIAT model path.
|
|
300
|
+
aggregation_areas : Optional[list[SpatialJoinModel]], default None
|
|
301
|
+
The list of aggregation area models.
|
|
302
|
+
building_footprints : Optional[SpatialJoinModel | FootprintsOptions], default FootprintsOptions.OSM
|
|
303
|
+
The building footprints model or OSM option.
|
|
304
|
+
fiat_buildings_name : str | list[str], default "buildings"
|
|
305
|
+
The name(s) of the buildings geometry in the FIAT model.
|
|
306
|
+
fiat_roads_name : Optional[str], default "roads"
|
|
307
|
+
The name of the roads geometry in the FIAT model.
|
|
266
308
|
bfe : Optional[SpatialJoinModel], default None
|
|
267
309
|
The BFE model.
|
|
268
|
-
svi : Optional[
|
|
310
|
+
svi : Optional[SviConfigModel], default None
|
|
269
311
|
The SVI model.
|
|
270
|
-
road_width : Optional[float], default
|
|
312
|
+
road_width : Optional[float], default 5
|
|
271
313
|
The road width in meters.
|
|
314
|
+
return_periods : list[int], default []
|
|
315
|
+
The list of return periods for risk calculations.
|
|
316
|
+
floodmap_type : Optional[FloodmapType], default None
|
|
317
|
+
The type of floodmap to use.
|
|
318
|
+
references : WaterlevelReferenceModel, default WaterlevelReferenceModel(...)
|
|
319
|
+
The water level reference model.
|
|
320
|
+
sfincs_overland : FloodModel
|
|
321
|
+
The overland SFINCS model.
|
|
322
|
+
sfincs_offshore : Optional[FloodModel], default None
|
|
323
|
+
The offshore SFINCS model.
|
|
324
|
+
dem : Optional[DemModel], default None
|
|
325
|
+
The DEM model.
|
|
326
|
+
excluded_datums : list[str], default []
|
|
327
|
+
List of datums to exclude from plotting.
|
|
328
|
+
slr_scenarios : Optional[SlrScenariosModel], default None
|
|
329
|
+
The sea level rise scenarios model.
|
|
330
|
+
scs : Optional[SCSModel], default None
|
|
331
|
+
The SCS model.
|
|
332
|
+
tide_gauge : Optional[TideGaugeConfigModel], default None
|
|
333
|
+
The tide gauge model.
|
|
272
334
|
cyclones : Optional[bool], default True
|
|
273
335
|
Indicates if cyclones are enabled.
|
|
274
336
|
cyclone_basin : Optional[Basins], default None
|
|
@@ -277,8 +339,6 @@ class ConfigModel(BaseModel):
|
|
|
277
339
|
The list of observation point models.
|
|
278
340
|
probabilistic_set : Optional[str], default None
|
|
279
341
|
The probabilistic set path.
|
|
280
|
-
infographics : Optional[bool], default True
|
|
281
|
-
Indicates if infographics are enabled.
|
|
282
342
|
"""
|
|
283
343
|
|
|
284
344
|
# General
|
|
@@ -295,24 +355,18 @@ class ConfigModel(BaseModel):
|
|
|
295
355
|
building_footprints: Optional[SpatialJoinModel | FootprintsOptions] = (
|
|
296
356
|
FootprintsOptions.OSM
|
|
297
357
|
)
|
|
298
|
-
fiat_buildings_name:
|
|
358
|
+
fiat_buildings_name: str | list[str] = "buildings"
|
|
299
359
|
fiat_roads_name: Optional[str] = "roads"
|
|
300
360
|
bfe: Optional[SpatialJoinModel] = None
|
|
301
361
|
svi: Optional[SviConfigModel] = None
|
|
302
|
-
road_width:
|
|
362
|
+
road_width: us.UnitfulLength = us.UnitfulLength(
|
|
363
|
+
value=5.0, units=us.UnitTypesLength.meters
|
|
364
|
+
)
|
|
303
365
|
return_periods: list[int] = Field(default_factory=list)
|
|
366
|
+
floodmap_type: Optional[FloodmapType] = None
|
|
304
367
|
|
|
305
368
|
# SFINCS
|
|
306
|
-
references: WaterlevelReferenceModel =
|
|
307
|
-
reference="MSL",
|
|
308
|
-
datums=[
|
|
309
|
-
DatumModel(
|
|
310
|
-
name="MSL",
|
|
311
|
-
height=us.UnitfulLength(value=0.0, units=us.UnitTypesLength.meters),
|
|
312
|
-
),
|
|
313
|
-
],
|
|
314
|
-
)
|
|
315
|
-
|
|
369
|
+
references: Optional[WaterlevelReferenceModel] = None
|
|
316
370
|
sfincs_overland: FloodModel
|
|
317
371
|
sfincs_offshore: Optional[FloodModel] = None
|
|
318
372
|
dem: Optional[DemModel] = None
|
|
@@ -328,17 +382,18 @@ class ConfigModel(BaseModel):
|
|
|
328
382
|
probabilistic_set: Optional[str] = None
|
|
329
383
|
|
|
330
384
|
@staticmethod
|
|
331
|
-
def read(toml_path: Path) -> "ConfigModel":
|
|
385
|
+
def read(toml_path: Union[str, Path]) -> "ConfigModel":
|
|
332
386
|
"""
|
|
333
387
|
Read a configuration file and returns the validated attributes.
|
|
334
388
|
|
|
335
389
|
Args:
|
|
336
|
-
|
|
390
|
+
toml_path (str | Path): The path to the configuration file.
|
|
337
391
|
|
|
338
392
|
Returns
|
|
339
393
|
-------
|
|
340
394
|
ConfigModel: The validated attributes from the configuration file.
|
|
341
395
|
"""
|
|
396
|
+
toml_path = Path(toml_path)
|
|
342
397
|
with open(toml_path, mode="rb") as fp:
|
|
343
398
|
toml = tomli.load(fp)
|
|
344
399
|
config = ConfigModel.model_validate(toml)
|
|
@@ -379,13 +434,11 @@ class ConfigModel(BaseModel):
|
|
|
379
434
|
|
|
380
435
|
|
|
381
436
|
class DatabaseBuilder:
|
|
382
|
-
logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
|
|
383
|
-
|
|
384
437
|
_has_roads: bool = False
|
|
385
438
|
_aggregation_areas: Optional[list] = None
|
|
386
439
|
_probabilistic_set_name: Optional[str] = None
|
|
387
440
|
|
|
388
|
-
def __init__(self, config: ConfigModel
|
|
441
|
+
def __init__(self, config: ConfigModel):
|
|
389
442
|
self.config = config
|
|
390
443
|
|
|
391
444
|
# Set database root
|
|
@@ -399,13 +452,11 @@ class DatabaseBuilder:
|
|
|
399
452
|
# Read info that needs to be used to create other models
|
|
400
453
|
self.unit_system = self.create_default_units()
|
|
401
454
|
|
|
402
|
-
# Read info that needs to be updated with other model info
|
|
403
|
-
self.water_level_references = self.config.references
|
|
404
|
-
|
|
405
455
|
@property
|
|
406
456
|
def static_path(self) -> Path:
|
|
407
457
|
return self.root / "static"
|
|
408
458
|
|
|
459
|
+
@debug_timer
|
|
409
460
|
def build(self, overwrite: bool = False) -> None:
|
|
410
461
|
# Check if database already exists
|
|
411
462
|
if self.root.exists() and not overwrite:
|
|
@@ -423,9 +474,7 @@ class DatabaseBuilder:
|
|
|
423
474
|
with FloodAdaptLogging.to_file(
|
|
424
475
|
file_path=self.root.joinpath("database_builder.log")
|
|
425
476
|
):
|
|
426
|
-
|
|
427
|
-
f"Creating a FloodAdapt database in '{self.root.as_posix()}'"
|
|
428
|
-
)
|
|
477
|
+
logger.info(f"Creating a FloodAdapt database in '{self.root.as_posix()}'")
|
|
429
478
|
|
|
430
479
|
# Make folder structure and read models
|
|
431
480
|
self.setup()
|
|
@@ -441,8 +490,9 @@ class DatabaseBuilder:
|
|
|
441
490
|
self.create_standard_objects()
|
|
442
491
|
|
|
443
492
|
# Save log file
|
|
444
|
-
|
|
493
|
+
logger.info("FloodAdapt database creation finished!")
|
|
445
494
|
|
|
495
|
+
@debug_timer
|
|
446
496
|
def setup(self) -> None:
|
|
447
497
|
# Create the models
|
|
448
498
|
self.make_folder_structure()
|
|
@@ -452,6 +502,10 @@ class DatabaseBuilder:
|
|
|
452
502
|
self.read_template_sfincs_overland_model()
|
|
453
503
|
self.read_template_sfincs_offshore_model()
|
|
454
504
|
|
|
505
|
+
# Copy standard static files
|
|
506
|
+
self.add_static_files()
|
|
507
|
+
|
|
508
|
+
@debug_timer
|
|
455
509
|
def set_standard_objects(self):
|
|
456
510
|
# Define name and create object
|
|
457
511
|
self._no_measures_strategy_name = "no_measures"
|
|
@@ -467,14 +521,13 @@ class DatabaseBuilder:
|
|
|
467
521
|
)
|
|
468
522
|
return std_obj
|
|
469
523
|
|
|
524
|
+
@debug_timer
|
|
470
525
|
def create_standard_objects(self):
|
|
471
526
|
with modified_environ(
|
|
472
527
|
DATABASE_ROOT=str(self.root.parent),
|
|
473
528
|
DATABASE_NAME=self.root.name,
|
|
474
529
|
):
|
|
475
|
-
|
|
476
|
-
"Creating `no measures` strategy and `current` projection."
|
|
477
|
-
)
|
|
530
|
+
logger.info("Creating `no measures` strategy and `current` projection.")
|
|
478
531
|
# Create database instance
|
|
479
532
|
db = Database(self.root.parent, self.config.name)
|
|
480
533
|
# Create no measures strategy
|
|
@@ -506,6 +559,7 @@ class DatabaseBuilder:
|
|
|
506
559
|
)
|
|
507
560
|
|
|
508
561
|
### TEMPLATE READERS ###
|
|
562
|
+
@debug_timer
|
|
509
563
|
def read_template_fiat_model(self):
|
|
510
564
|
user_provided = self._check_exists_and_absolute(self.config.fiat)
|
|
511
565
|
|
|
@@ -543,6 +597,7 @@ class DatabaseBuilder:
|
|
|
543
597
|
|
|
544
598
|
self.fiat_model = in_db
|
|
545
599
|
|
|
600
|
+
@debug_timer
|
|
546
601
|
def read_template_sfincs_overland_model(self):
|
|
547
602
|
user_provided = self._check_exists_and_absolute(
|
|
548
603
|
self.config.sfincs_overland.name
|
|
@@ -560,6 +615,7 @@ class DatabaseBuilder:
|
|
|
560
615
|
in_db.read()
|
|
561
616
|
self.sfincs_overland_model = in_db
|
|
562
617
|
|
|
618
|
+
@debug_timer
|
|
563
619
|
def read_template_sfincs_offshore_model(self):
|
|
564
620
|
if self.config.sfincs_offshore is None:
|
|
565
621
|
self.sfincs_offshore_model = None
|
|
@@ -582,6 +638,7 @@ class DatabaseBuilder:
|
|
|
582
638
|
self.sfincs_offshore_model = in_db
|
|
583
639
|
|
|
584
640
|
### FIAT ###
|
|
641
|
+
@debug_timer
|
|
585
642
|
def create_fiat_model(self) -> FiatModel:
|
|
586
643
|
fiat = FiatModel(
|
|
587
644
|
config=self.create_fiat_config(),
|
|
@@ -590,17 +647,18 @@ class DatabaseBuilder:
|
|
|
590
647
|
)
|
|
591
648
|
return fiat
|
|
592
649
|
|
|
650
|
+
@debug_timer
|
|
593
651
|
def create_risk_model(self) -> Optional[RiskModel]:
|
|
594
652
|
# Check if return periods are provided
|
|
595
653
|
if not self.config.return_periods:
|
|
596
654
|
if self._probabilistic_set_name:
|
|
597
655
|
risk = RiskModel()
|
|
598
|
-
|
|
656
|
+
logger.warning(
|
|
599
657
|
f"No return periods provided, but a probabilistic set is available. Using default return periods {risk.return_periods}."
|
|
600
658
|
)
|
|
601
659
|
return risk
|
|
602
660
|
else:
|
|
603
|
-
|
|
661
|
+
logger.warning(
|
|
604
662
|
"No return periods provided and no probabilistic set available. Risk calculations will not be performed."
|
|
605
663
|
)
|
|
606
664
|
return None
|
|
@@ -608,9 +666,10 @@ class DatabaseBuilder:
|
|
|
608
666
|
risk = RiskModel(return_periods=self.config.return_periods)
|
|
609
667
|
return risk
|
|
610
668
|
|
|
669
|
+
@debug_timer
|
|
611
670
|
def create_benefit_config(self) -> Optional[BenefitsModel]:
|
|
612
671
|
if self._probabilistic_set_name is None:
|
|
613
|
-
|
|
672
|
+
logger.warning(
|
|
614
673
|
"No probabilistic set found in the config, benefits will not be available."
|
|
615
674
|
)
|
|
616
675
|
return None
|
|
@@ -621,14 +680,10 @@ class DatabaseBuilder:
|
|
|
621
680
|
event_set=self._probabilistic_set_name,
|
|
622
681
|
)
|
|
623
682
|
|
|
683
|
+
@debug_timer
|
|
624
684
|
def create_fiat_config(self) -> FiatConfigModel:
|
|
625
685
|
# Make sure only csv objects have geometries
|
|
626
|
-
|
|
627
|
-
keep = geoms[_FIAT_COLUMNS.object_id].isin(
|
|
628
|
-
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
|
|
629
|
-
)
|
|
630
|
-
geoms = geoms[keep].reset_index(drop=True)
|
|
631
|
-
self.fiat_model.exposure.exposure_geoms[i] = geoms
|
|
686
|
+
self._delete_extra_geometries()
|
|
632
687
|
|
|
633
688
|
footprints = self.create_footprints()
|
|
634
689
|
if footprints is not None:
|
|
@@ -642,9 +697,16 @@ class DatabaseBuilder:
|
|
|
642
697
|
self._aggregation_areas = self.create_aggregation_areas()
|
|
643
698
|
|
|
644
699
|
roads_gpkg = self.create_roads()
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
700
|
+
|
|
701
|
+
# Get classes of non-building objects
|
|
702
|
+
non_buildings = ~self.fiat_model.exposure.exposure_db[
|
|
703
|
+
_FIAT_COLUMNS.object_id
|
|
704
|
+
].isin(self._get_fiat_building_geoms()[_FIAT_COLUMNS.object_id])
|
|
705
|
+
non_building_names = list(
|
|
706
|
+
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.primary_object_type][
|
|
707
|
+
non_buildings
|
|
708
|
+
].unique()
|
|
709
|
+
)
|
|
648
710
|
|
|
649
711
|
# Update elevations
|
|
650
712
|
self.update_fiat_elevation()
|
|
@@ -676,11 +738,18 @@ class DatabaseBuilder:
|
|
|
676
738
|
self.fiat_model.config["exposure"]["geom"][key]
|
|
677
739
|
).name
|
|
678
740
|
self.fiat_model.config["output"]["geom"] = output_geom
|
|
741
|
+
# Make sure objects are ordered based on object id
|
|
742
|
+
self.fiat_model.exposure.exposure_db = (
|
|
743
|
+
self.fiat_model.exposure.exposure_db.sort_values(
|
|
744
|
+
by=[_FIAT_COLUMNS.object_id], ignore_index=True
|
|
745
|
+
)
|
|
746
|
+
)
|
|
679
747
|
# Update FIAT model with the new config
|
|
680
748
|
self.fiat_model.write()
|
|
681
749
|
|
|
682
750
|
return config
|
|
683
751
|
|
|
752
|
+
@debug_timer
|
|
684
753
|
def update_fiat_elevation(self):
|
|
685
754
|
"""
|
|
686
755
|
Update the ground elevations of FIAT objects based on the SFINCS ground elevation map.
|
|
@@ -691,7 +760,7 @@ class DatabaseBuilder:
|
|
|
691
760
|
dem_file = self._dem_path
|
|
692
761
|
# TODO resolve issue with double geometries in hydromt-FIAT and use update_ground_elevation method instead
|
|
693
762
|
# self.fiat_model.update_ground_elevation(dem_file, grnd_elev_unit="meters")
|
|
694
|
-
|
|
763
|
+
logger.info(
|
|
695
764
|
"Updating FIAT objects ground elevations from SFINCS ground elevation map."
|
|
696
765
|
)
|
|
697
766
|
SFINCS_units = us.UnitfulLength(
|
|
@@ -701,88 +770,59 @@ class DatabaseBuilder:
|
|
|
701
770
|
conversion_factor = SFINCS_units.convert(FIAT_units)
|
|
702
771
|
|
|
703
772
|
if not math.isclose(conversion_factor, 1):
|
|
704
|
-
|
|
773
|
+
logger.info(
|
|
705
774
|
f"Ground elevation for FIAT objects is in '{FIAT_units}', while SFINCS ground elevation is in 'meters'. Values in the exposure csv will be converted by a factor of {conversion_factor}"
|
|
706
775
|
)
|
|
707
776
|
|
|
708
777
|
exposure = self.fiat_model.exposure.exposure_db
|
|
709
778
|
dem = rxr.open_rasterio(dem_file)
|
|
710
|
-
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
x_points = xr.DataArray(roads["centroid"].x, dims="points")
|
|
718
|
-
y_points = xr.DataArray(roads["centroid"].y, dims="points")
|
|
719
|
-
roads["elev"] = (
|
|
720
|
-
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
721
|
-
* conversion_factor
|
|
722
|
-
)
|
|
723
|
-
|
|
724
|
-
exposure.loc[
|
|
725
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
726
|
-
_FIAT_COLUMNS.ground_floor_height,
|
|
727
|
-
] = 0
|
|
728
|
-
exposure = exposure.merge(
|
|
729
|
-
roads[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
730
|
-
on=_FIAT_COLUMNS.object_id,
|
|
731
|
-
how="left",
|
|
732
|
-
)
|
|
733
|
-
exposure.loc[
|
|
734
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road",
|
|
735
|
-
_FIAT_COLUMNS.ground_elevation,
|
|
736
|
-
] = exposure.loc[
|
|
737
|
-
exposure[_FIAT_COLUMNS.primary_object_type] == "road", "elev"
|
|
738
|
-
]
|
|
739
|
-
del exposure["elev"]
|
|
740
|
-
self.fiat_model.exposure.exposure_db = exposure
|
|
741
|
-
|
|
742
|
-
buildings = self.fiat_model.exposure.exposure_geoms[
|
|
743
|
-
self._get_fiat_building_index()
|
|
744
|
-
].to_crs(dem.spatial_ref.crs_wkt)
|
|
745
|
-
buildings["geometry"] = buildings.geometry.centroid
|
|
746
|
-
x_points = xr.DataArray(buildings["geometry"].x, dims="points")
|
|
747
|
-
y_points = xr.DataArray(buildings["geometry"].y, dims="points")
|
|
748
|
-
buildings["elev"] = (
|
|
779
|
+
|
|
780
|
+
gdf = self._get_fiat_gdf_full()
|
|
781
|
+
gdf["centroid"] = gdf.geometry.centroid
|
|
782
|
+
x_points = xr.DataArray(gdf["centroid"].x, dims="points")
|
|
783
|
+
y_points = xr.DataArray(gdf["centroid"].y, dims="points")
|
|
784
|
+
gdf["elev"] = (
|
|
749
785
|
dem.sel(x=x_points, y=y_points, band=1, method="nearest").to_numpy()
|
|
750
786
|
* conversion_factor
|
|
751
787
|
)
|
|
788
|
+
|
|
752
789
|
exposure = exposure.merge(
|
|
753
|
-
|
|
790
|
+
gdf[[_FIAT_COLUMNS.object_id, "elev"]],
|
|
754
791
|
on=_FIAT_COLUMNS.object_id,
|
|
755
792
|
how="left",
|
|
756
793
|
)
|
|
757
|
-
exposure.
|
|
758
|
-
exposure[_FIAT_COLUMNS.primary_object_type] != "road",
|
|
759
|
-
_FIAT_COLUMNS.ground_elevation,
|
|
760
|
-
] = exposure.loc[exposure[_FIAT_COLUMNS.primary_object_type] != "road", "elev"]
|
|
794
|
+
exposure[_FIAT_COLUMNS.ground_elevation] = exposure["elev"]
|
|
761
795
|
del exposure["elev"]
|
|
762
796
|
|
|
797
|
+
self.fiat_model.exposure.exposure_db = exposure
|
|
798
|
+
|
|
763
799
|
def read_damage_unit(self) -> str:
|
|
764
|
-
if self.fiat_model.exposure.damage_unit is
|
|
765
|
-
|
|
766
|
-
else:
|
|
767
|
-
self.logger.warning(
|
|
800
|
+
if self.fiat_model.exposure.damage_unit is None:
|
|
801
|
+
logger.warning(
|
|
768
802
|
"Delft-FIAT model was missing damage units so '$' was assumed."
|
|
769
803
|
)
|
|
770
|
-
|
|
804
|
+
self.fiat_model.exposure.damage_unit = "$"
|
|
805
|
+
return self.fiat_model.exposure.damage_unit
|
|
771
806
|
|
|
807
|
+
@debug_timer
|
|
772
808
|
def read_floodmap_type(self) -> FloodmapType:
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.extraction_method]
|
|
776
|
-
== "area"
|
|
777
|
-
).any():
|
|
778
|
-
return FloodmapType.water_depth
|
|
809
|
+
if self.config.floodmap_type is not None:
|
|
810
|
+
return self.config.floodmap_type
|
|
779
811
|
else:
|
|
780
|
-
|
|
812
|
+
# If there is at least on object that uses the area method, use water depths for FA calcs
|
|
813
|
+
if (
|
|
814
|
+
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.extraction_method]
|
|
815
|
+
== "area"
|
|
816
|
+
).any():
|
|
817
|
+
return FloodmapType.water_depth
|
|
818
|
+
else:
|
|
819
|
+
return FloodmapType.water_level
|
|
781
820
|
|
|
821
|
+
@debug_timer
|
|
782
822
|
def create_roads(self) -> Optional[str]:
|
|
783
823
|
# Make sure that FIAT roads are polygons
|
|
784
824
|
if self.config.fiat_roads_name not in self.fiat_model.exposure.geom_names:
|
|
785
|
-
|
|
825
|
+
logger.warning(
|
|
786
826
|
"Road objects are not available in the FIAT model and thus would not be available in FloodAdapt."
|
|
787
827
|
)
|
|
788
828
|
# TODO check how this naming of output geoms should become more explicit!
|
|
@@ -795,28 +835,29 @@ class DatabaseBuilder:
|
|
|
795
835
|
_FIAT_COLUMNS.segment_length
|
|
796
836
|
not in self.fiat_model.exposure.exposure_db.columns
|
|
797
837
|
):
|
|
798
|
-
|
|
838
|
+
logger.warning(
|
|
799
839
|
f"'{_FIAT_COLUMNS.segment_length}' column not present in the FIAT exposure csv. Road impact infometrics cannot be produced."
|
|
800
840
|
)
|
|
801
841
|
|
|
802
842
|
# TODO should this should be performed through hydromt-FIAT?
|
|
803
843
|
if not isinstance(roads.geometry.iloc[0], Polygon):
|
|
804
844
|
roads = roads.to_crs(roads.estimate_utm_crs())
|
|
805
|
-
|
|
806
|
-
|
|
807
|
-
)
|
|
845
|
+
road_width = self.config.road_width.convert(us.UnitTypesLength.meters)
|
|
846
|
+
roads.geometry = roads.geometry.buffer(road_width / 2, cap_style=2)
|
|
808
847
|
roads = roads.to_crs(self.fiat_model.exposure.crs)
|
|
809
848
|
self.fiat_model.exposure.exposure_geoms[self._get_fiat_road_index()] = roads
|
|
810
|
-
|
|
849
|
+
logger.info(
|
|
811
850
|
f"FIAT road objects transformed from lines to polygons assuming a road width of {self.config.road_width} meters."
|
|
812
851
|
)
|
|
813
852
|
|
|
814
853
|
self._has_roads = True
|
|
815
854
|
return f"{self.config.fiat_roads_name}.gpkg"
|
|
816
855
|
|
|
856
|
+
@debug_timer
|
|
817
857
|
def create_new_developments(self) -> Optional[str]:
|
|
818
858
|
return "new_development_area.gpkg"
|
|
819
859
|
|
|
860
|
+
@debug_timer
|
|
820
861
|
def create_footprints(self) -> Optional[Path]:
|
|
821
862
|
if isinstance(self.config.building_footprints, SpatialJoinModel):
|
|
822
863
|
# Use the provided building footprints
|
|
@@ -824,7 +865,7 @@ class DatabaseBuilder:
|
|
|
824
865
|
self.config.building_footprints.file
|
|
825
866
|
)
|
|
826
867
|
|
|
827
|
-
|
|
868
|
+
logger.info(
|
|
828
869
|
f"Using building footprints from {Path(building_footprints_file).as_posix()}."
|
|
829
870
|
)
|
|
830
871
|
# Spatially join buildings and map
|
|
@@ -835,7 +876,7 @@ class DatabaseBuilder:
|
|
|
835
876
|
)
|
|
836
877
|
return path
|
|
837
878
|
elif self.config.building_footprints == FootprintsOptions.OSM:
|
|
838
|
-
|
|
879
|
+
logger.info(
|
|
839
880
|
"Building footprint data will be downloaded from Open Street Maps."
|
|
840
881
|
)
|
|
841
882
|
region = self.fiat_model.region
|
|
@@ -857,12 +898,10 @@ class DatabaseBuilder:
|
|
|
857
898
|
return path
|
|
858
899
|
# Then check if geometries are already footprints
|
|
859
900
|
elif isinstance(
|
|
860
|
-
self.
|
|
861
|
-
self._get_fiat_building_index()
|
|
862
|
-
].geometry.iloc[0],
|
|
901
|
+
self._get_fiat_building_geoms().geometry.iloc[0],
|
|
863
902
|
(Polygon, MultiPolygon),
|
|
864
903
|
):
|
|
865
|
-
|
|
904
|
+
logger.info(
|
|
866
905
|
"Building footprints are already available in the FIAT model geometry files."
|
|
867
906
|
)
|
|
868
907
|
return None
|
|
@@ -885,21 +924,20 @@ class DatabaseBuilder:
|
|
|
885
924
|
f"While 'BF_FID' column exists, building footprints file {footprints_path} not found."
|
|
886
925
|
)
|
|
887
926
|
|
|
888
|
-
|
|
889
|
-
f"Using the building footprints located at {footprints_path}."
|
|
890
|
-
)
|
|
927
|
+
logger.info(f"Using the building footprints located at {footprints_path}.")
|
|
891
928
|
return footprints_path.relative_to(self.static_path)
|
|
892
929
|
|
|
893
930
|
# Other methods
|
|
894
931
|
else:
|
|
895
|
-
|
|
932
|
+
logger.warning(
|
|
896
933
|
"No building footprints are available. Buildings will be plotted with a default shape in FloodAdapt."
|
|
897
934
|
)
|
|
898
935
|
return None
|
|
899
936
|
|
|
937
|
+
@debug_timer
|
|
900
938
|
def create_bfe(self) -> Optional[BFEModel]:
|
|
901
939
|
if self.config.bfe is None:
|
|
902
|
-
|
|
940
|
+
logger.warning(
|
|
903
941
|
"No base flood elevation provided. Elevating building relative to base flood elevation will not be possible in FloodAdapt."
|
|
904
942
|
)
|
|
905
943
|
return None
|
|
@@ -907,13 +945,13 @@ class DatabaseBuilder:
|
|
|
907
945
|
# TODO can we use hydromt-FIAT?
|
|
908
946
|
bfe_file = self._check_exists_and_absolute(self.config.bfe.file)
|
|
909
947
|
|
|
910
|
-
|
|
948
|
+
logger.info(
|
|
911
949
|
f"Using map from {Path(bfe_file).as_posix()} as base flood elevation."
|
|
912
950
|
)
|
|
913
951
|
|
|
914
952
|
# Spatially join buildings and map
|
|
915
953
|
buildings_joined, bfe = self.spatial_join(
|
|
916
|
-
self.
|
|
954
|
+
self._get_fiat_building_geoms(),
|
|
917
955
|
bfe_file,
|
|
918
956
|
self.config.bfe.field_name,
|
|
919
957
|
)
|
|
@@ -940,6 +978,7 @@ class DatabaseBuilder:
|
|
|
940
978
|
field_name=self.config.bfe.field_name,
|
|
941
979
|
)
|
|
942
980
|
|
|
981
|
+
@debug_timer
|
|
943
982
|
def create_aggregation_areas(self) -> list[AggregationModel]:
|
|
944
983
|
# TODO split this to 3 methods?
|
|
945
984
|
aggregation_areas = []
|
|
@@ -982,7 +1021,7 @@ class DatabaseBuilder:
|
|
|
982
1021
|
)
|
|
983
1022
|
aggregation_areas.append(aggr)
|
|
984
1023
|
|
|
985
|
-
|
|
1024
|
+
logger.info(
|
|
986
1025
|
f"Aggregation areas: {aggr.name} from the FIAT model are going to be used."
|
|
987
1026
|
)
|
|
988
1027
|
|
|
@@ -997,18 +1036,19 @@ class DatabaseBuilder:
|
|
|
997
1036
|
aggr_name = Path(aggr.file).stem
|
|
998
1037
|
# If aggregation area already in FIAT model raise Error
|
|
999
1038
|
if aggr_name in [aggr.name for aggr in aggregation_areas]:
|
|
1000
|
-
|
|
1001
|
-
f"Aggregation area '{aggr_name}' already exists in the FIAT model."
|
|
1039
|
+
logger.warning(
|
|
1040
|
+
f"Aggregation area '{aggr_name}' already exists in the FIAT model. The input aggregation area will be ignored."
|
|
1002
1041
|
)
|
|
1042
|
+
continue
|
|
1003
1043
|
# Do spatial join of FIAT objects and aggregation areas
|
|
1004
1044
|
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
],
|
|
1045
|
+
gdf = self._get_fiat_gdf_full()
|
|
1046
|
+
gdf_joined, aggr_areas = self.spatial_join(
|
|
1047
|
+
objects=gdf[[_FIAT_COLUMNS.object_id, "geometry"]],
|
|
1009
1048
|
layer=str(self._check_exists_and_absolute(aggr.file)),
|
|
1010
1049
|
field_name=aggr.field_name,
|
|
1011
1050
|
rename=_FIAT_COLUMNS.aggregation_label.format(name=aggr_name),
|
|
1051
|
+
filter=True,
|
|
1012
1052
|
)
|
|
1013
1053
|
aggr_path = Path(self.fiat_model.root).joinpath(
|
|
1014
1054
|
"exposure", "aggregation_areas", f"{Path(aggr.file).stem}.gpkg"
|
|
@@ -1016,7 +1056,7 @@ class DatabaseBuilder:
|
|
|
1016
1056
|
aggr_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1017
1057
|
aggr_areas.to_file(aggr_path)
|
|
1018
1058
|
exposure_csv = exposure_csv.merge(
|
|
1019
|
-
|
|
1059
|
+
gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1020
1060
|
)
|
|
1021
1061
|
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1022
1062
|
# Update spatial joins in FIAT model
|
|
@@ -1042,6 +1082,9 @@ class DatabaseBuilder:
|
|
|
1042
1082
|
),
|
|
1043
1083
|
)
|
|
1044
1084
|
)
|
|
1085
|
+
logger.info(
|
|
1086
|
+
f"Aggregation areas: {aggr_name} provided in the config are going to be used."
|
|
1087
|
+
)
|
|
1045
1088
|
|
|
1046
1089
|
# No config provided, no aggr areas in the model -> try to use the region file as a mock aggregation area
|
|
1047
1090
|
if (
|
|
@@ -1067,31 +1110,29 @@ class DatabaseBuilder:
|
|
|
1067
1110
|
aggregation_areas.append(aggr)
|
|
1068
1111
|
|
|
1069
1112
|
# Add column in FIAT
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
],
|
|
1113
|
+
gdf = self._get_fiat_gdf_full()
|
|
1114
|
+
gdf_joined, aggr_areas = self.spatial_join(
|
|
1115
|
+
objects=gdf[[_FIAT_COLUMNS.object_id, "geometry"]],
|
|
1074
1116
|
layer=region,
|
|
1075
1117
|
field_name="aggr_id",
|
|
1076
1118
|
rename=_FIAT_COLUMNS.aggregation_label.format(name="region"),
|
|
1077
1119
|
)
|
|
1078
1120
|
exposure_csv = exposure_csv.merge(
|
|
1079
|
-
|
|
1121
|
+
gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
|
|
1080
1122
|
)
|
|
1081
1123
|
self.fiat_model.exposure.exposure_db = exposure_csv
|
|
1082
|
-
|
|
1124
|
+
logger.warning(
|
|
1083
1125
|
"No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
|
|
1084
1126
|
)
|
|
1085
1127
|
return aggregation_areas
|
|
1086
1128
|
|
|
1129
|
+
@debug_timer
|
|
1087
1130
|
def create_svi(self) -> Optional[SVIModel]:
|
|
1088
1131
|
if self.config.svi:
|
|
1089
1132
|
svi_file = self._check_exists_and_absolute(self.config.svi.file)
|
|
1090
1133
|
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1091
1134
|
buildings_joined, svi = self.spatial_join(
|
|
1092
|
-
self.
|
|
1093
|
-
self._get_fiat_building_index()
|
|
1094
|
-
],
|
|
1135
|
+
self._get_fiat_building_geoms(),
|
|
1095
1136
|
svi_file,
|
|
1096
1137
|
self.config.svi.field_name,
|
|
1097
1138
|
rename="SVI",
|
|
@@ -1099,12 +1140,12 @@ class DatabaseBuilder:
|
|
|
1099
1140
|
)
|
|
1100
1141
|
# Add column to exposure
|
|
1101
1142
|
if "SVI" in exposure_csv.columns:
|
|
1102
|
-
|
|
1143
|
+
logger.info(
|
|
1103
1144
|
f"'SVI' column in the FIAT exposure csv will be replaced by {svi_file.as_posix()}."
|
|
1104
1145
|
)
|
|
1105
1146
|
del exposure_csv["SVI"]
|
|
1106
1147
|
else:
|
|
1107
|
-
|
|
1148
|
+
logger.info(
|
|
1108
1149
|
f"'SVI' column in the FIAT exposure csv will be filled by {svi_file.as_posix()}."
|
|
1109
1150
|
)
|
|
1110
1151
|
exposure_csv = exposure_csv.merge(
|
|
@@ -1116,7 +1157,7 @@ class DatabaseBuilder:
|
|
|
1116
1157
|
svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
|
|
1117
1158
|
svi_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1118
1159
|
svi.to_file(svi_path)
|
|
1119
|
-
|
|
1160
|
+
logger.info(
|
|
1120
1161
|
f"An SVI map can be shown in FloodAdapt GUI using '{self.config.svi.field_name}' column from {svi_file.as_posix()}"
|
|
1121
1162
|
)
|
|
1122
1163
|
|
|
@@ -1125,19 +1166,17 @@ class DatabaseBuilder:
|
|
|
1125
1166
|
field_name="SVI",
|
|
1126
1167
|
)
|
|
1127
1168
|
elif "SVI" in self.fiat_model.exposure.exposure_db.columns:
|
|
1128
|
-
|
|
1169
|
+
logger.info(
|
|
1129
1170
|
"'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
|
|
1130
1171
|
)
|
|
1131
1172
|
add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
|
|
1132
1173
|
if "SVI" not in [attr["name"] for attr in add_attrs]:
|
|
1133
|
-
|
|
1134
|
-
"No SVI map found to display in the FloodAdapt GUI!"
|
|
1135
|
-
)
|
|
1174
|
+
logger.warning("No SVI map found to display in the FloodAdapt GUI!")
|
|
1136
1175
|
|
|
1137
1176
|
ind = [attr["name"] for attr in add_attrs].index("SVI")
|
|
1138
1177
|
svi = add_attrs[ind]
|
|
1139
1178
|
svi_path = self.static_path / "templates" / "fiat" / svi["file"]
|
|
1140
|
-
|
|
1179
|
+
logger.info(
|
|
1141
1180
|
f"An SVI map can be shown in FloodAdapt GUI using '{svi['field_name']}' column from {svi['file']}"
|
|
1142
1181
|
)
|
|
1143
1182
|
# Save site attributes
|
|
@@ -1147,16 +1186,18 @@ class DatabaseBuilder:
|
|
|
1147
1186
|
)
|
|
1148
1187
|
|
|
1149
1188
|
else:
|
|
1150
|
-
|
|
1189
|
+
logger.warning(
|
|
1151
1190
|
"'SVI' column not present in the FIAT exposure csv. Vulnerability type infometrics cannot be produced."
|
|
1152
1191
|
)
|
|
1153
1192
|
return None
|
|
1154
1193
|
|
|
1155
1194
|
### SFINCS ###
|
|
1195
|
+
@debug_timer
|
|
1156
1196
|
def create_sfincs_config(self) -> SfincsModel:
|
|
1157
1197
|
# call these functions before others to make sure water level references are updated
|
|
1158
1198
|
config = self.create_sfincs_model_config()
|
|
1159
|
-
|
|
1199
|
+
self.water_level_references = self.create_water_level_references()
|
|
1200
|
+
self.tide_gauge = self.create_tide_gauge()
|
|
1160
1201
|
|
|
1161
1202
|
sfincs = SfincsModel(
|
|
1162
1203
|
config=config,
|
|
@@ -1165,16 +1206,46 @@ class DatabaseBuilder:
|
|
|
1165
1206
|
dem=self.create_dem_model(),
|
|
1166
1207
|
scs=self.create_scs_model(),
|
|
1167
1208
|
cyclone_track_database=self.create_cyclone_track_database(),
|
|
1168
|
-
tide_gauge=tide_gauge,
|
|
1209
|
+
tide_gauge=self.tide_gauge,
|
|
1169
1210
|
river=self.create_rivers(),
|
|
1170
1211
|
obs_point=self.create_observation_points(),
|
|
1171
1212
|
)
|
|
1172
1213
|
|
|
1173
1214
|
return sfincs
|
|
1174
1215
|
|
|
1216
|
+
@debug_timer
|
|
1217
|
+
def create_water_level_references(self) -> WaterlevelReferenceModel:
|
|
1218
|
+
sfincs_ref = self.config.sfincs_overland.reference
|
|
1219
|
+
if self.config.references is None:
|
|
1220
|
+
logger.warning(
|
|
1221
|
+
f"No water level references provided in the config file. Using reference provided for overland SFINCS model '{sfincs_ref}' as the main reference."
|
|
1222
|
+
)
|
|
1223
|
+
refs = WaterlevelReferenceModel(
|
|
1224
|
+
reference=sfincs_ref,
|
|
1225
|
+
datums=[
|
|
1226
|
+
DatumModel(
|
|
1227
|
+
name=sfincs_ref,
|
|
1228
|
+
height=us.UnitfulLength(
|
|
1229
|
+
value=0.0, units=self.unit_system.default_length_units
|
|
1230
|
+
),
|
|
1231
|
+
)
|
|
1232
|
+
],
|
|
1233
|
+
)
|
|
1234
|
+
else:
|
|
1235
|
+
# Check if sfincs_ref is in the references
|
|
1236
|
+
if sfincs_ref not in [ref.name for ref in self.config.references.datums]:
|
|
1237
|
+
raise ValueError(
|
|
1238
|
+
f"Reference '{sfincs_ref}' not found in the provided references."
|
|
1239
|
+
)
|
|
1240
|
+
else:
|
|
1241
|
+
refs = self.config.references
|
|
1242
|
+
|
|
1243
|
+
return refs
|
|
1244
|
+
|
|
1245
|
+
@debug_timer
|
|
1175
1246
|
def create_cyclone_track_database(self) -> Optional[CycloneTrackDatabaseModel]:
|
|
1176
1247
|
if not self.config.cyclones or not self.config.sfincs_offshore:
|
|
1177
|
-
|
|
1248
|
+
logger.warning("No cyclones will be available in the database.")
|
|
1178
1249
|
return None
|
|
1179
1250
|
|
|
1180
1251
|
if self.config.cyclone_basin:
|
|
@@ -1184,17 +1255,20 @@ class DatabaseBuilder:
|
|
|
1184
1255
|
|
|
1185
1256
|
name = f"IBTrACS.{basin.value}.v04r01.nc"
|
|
1186
1257
|
url = f"https://www.ncei.noaa.gov/data/international-best-track-archive-for-climate-stewardship-ibtracs/v04r01/access/netcdf/{name}"
|
|
1187
|
-
|
|
1258
|
+
logger.info(f"Downloading cyclone track database from {url}")
|
|
1188
1259
|
fn = Path(self.root) / "static" / "cyclone_track_database" / name
|
|
1189
1260
|
fn.parent.mkdir(parents=True, exist_ok=True)
|
|
1190
1261
|
|
|
1191
1262
|
try:
|
|
1192
1263
|
urlretrieve(url, fn)
|
|
1193
1264
|
except Exception:
|
|
1194
|
-
|
|
1265
|
+
logger.warning(f"Could not retrieve cyclone track database from {url}")
|
|
1266
|
+
logger.warning("No cyclones will be available in the database.")
|
|
1267
|
+
return None
|
|
1195
1268
|
|
|
1196
1269
|
return CycloneTrackDatabaseModel(file=name)
|
|
1197
1270
|
|
|
1271
|
+
@debug_timer
|
|
1198
1272
|
def create_scs_model(self) -> Optional[SCSModel]:
|
|
1199
1273
|
if self.config.scs is None:
|
|
1200
1274
|
return None
|
|
@@ -1205,11 +1279,12 @@ class DatabaseBuilder:
|
|
|
1205
1279
|
|
|
1206
1280
|
return SCSModel(file=scs_file.name, type=self.config.scs.type)
|
|
1207
1281
|
|
|
1282
|
+
@debug_timer
|
|
1208
1283
|
def create_dem_model(self) -> DemModel:
|
|
1209
1284
|
if self.config.dem:
|
|
1210
1285
|
subgrid_sfincs = Path(self.config.dem.filename)
|
|
1211
1286
|
else:
|
|
1212
|
-
|
|
1287
|
+
logger.warning(
|
|
1213
1288
|
"No subgrid depth geotiff file provided in the config file. Using the one from the SFINCS model."
|
|
1214
1289
|
)
|
|
1215
1290
|
subgrid_sfincs = (
|
|
@@ -1227,7 +1302,7 @@ class DatabaseBuilder:
|
|
|
1227
1302
|
shutil.move(tiles_sfincs, fa_tiles_path)
|
|
1228
1303
|
if (fa_tiles_path / "index").exists():
|
|
1229
1304
|
os.rename(fa_tiles_path / "index", fa_tiles_path / "indices")
|
|
1230
|
-
|
|
1305
|
+
logger.info(
|
|
1231
1306
|
"Tiles were already available in the SFINCS model and will directly be used in FloodAdapt."
|
|
1232
1307
|
)
|
|
1233
1308
|
else:
|
|
@@ -1239,7 +1314,7 @@ class DatabaseBuilder:
|
|
|
1239
1314
|
zoom_range=[0, 13],
|
|
1240
1315
|
fmt="png",
|
|
1241
1316
|
)
|
|
1242
|
-
|
|
1317
|
+
logger.info(
|
|
1243
1318
|
f"Tiles were created using the {subgrid_sfincs.as_posix()} as the elevation map."
|
|
1244
1319
|
)
|
|
1245
1320
|
|
|
@@ -1249,6 +1324,7 @@ class DatabaseBuilder:
|
|
|
1249
1324
|
filename=fa_subgrid_path.name, units=us.UnitTypesLength.meters
|
|
1250
1325
|
) # always in meters
|
|
1251
1326
|
|
|
1327
|
+
@debug_timer
|
|
1252
1328
|
def create_sfincs_model_config(self) -> SfincsConfigModel:
|
|
1253
1329
|
config = SfincsConfigModel(
|
|
1254
1330
|
csname=self.sfincs_overland_model.crs.name,
|
|
@@ -1263,6 +1339,7 @@ class DatabaseBuilder:
|
|
|
1263
1339
|
|
|
1264
1340
|
return config
|
|
1265
1341
|
|
|
1342
|
+
@debug_timer
|
|
1266
1343
|
def create_slr(self) -> Optional[SlrScenariosModel]:
|
|
1267
1344
|
if self.config.slr_scenarios is None:
|
|
1268
1345
|
return None
|
|
@@ -1280,17 +1357,52 @@ class DatabaseBuilder:
|
|
|
1280
1357
|
relative_to_year=self.config.slr_scenarios.relative_to_year,
|
|
1281
1358
|
)
|
|
1282
1359
|
|
|
1360
|
+
@debug_timer
|
|
1283
1361
|
def create_observation_points(self) -> Union[list[ObsPointModel], None]:
|
|
1284
1362
|
if self.config.obs_point is None:
|
|
1285
|
-
|
|
1363
|
+
obs_points = []
|
|
1364
|
+
else:
|
|
1365
|
+
logger.info("Observation points were provided in the config file.")
|
|
1366
|
+
obs_points = self.config.obs_point
|
|
1367
|
+
if self.tide_gauge is not None:
|
|
1368
|
+
# Check if the tide gauge point is within the SFINCS region
|
|
1369
|
+
region = self.sfincs_overland_model.region
|
|
1370
|
+
point = gpd.GeoSeries(
|
|
1371
|
+
[gpd.points_from_xy([self.tide_gauge.lon], [self.tide_gauge.lat])[0]],
|
|
1372
|
+
crs=4326,
|
|
1373
|
+
)
|
|
1374
|
+
region_4326 = region.to_crs(4326)
|
|
1375
|
+
if not point.within(region_4326.unary_union).item():
|
|
1376
|
+
logger.warning(
|
|
1377
|
+
"The tide gauge location is outside the SFINCS region and will not be added as an observation point."
|
|
1378
|
+
)
|
|
1379
|
+
else:
|
|
1380
|
+
logger.info(
|
|
1381
|
+
"A tide gauge has been setup in the database. It will be used as an observation point as well."
|
|
1382
|
+
)
|
|
1383
|
+
obs_points.append(
|
|
1384
|
+
ObsPointModel(
|
|
1385
|
+
name=self.tide_gauge.name,
|
|
1386
|
+
description="Tide gauge observation point",
|
|
1387
|
+
ID=self.tide_gauge.ID,
|
|
1388
|
+
lon=self.tide_gauge.lon,
|
|
1389
|
+
lat=self.tide_gauge.lat,
|
|
1390
|
+
)
|
|
1391
|
+
)
|
|
1286
1392
|
|
|
1287
|
-
|
|
1288
|
-
|
|
1393
|
+
if not obs_points:
|
|
1394
|
+
logger.warning(
|
|
1395
|
+
"No observation points were provided in the config file or created from the tide gauge. No observation points will be available in FloodAdapt."
|
|
1396
|
+
)
|
|
1397
|
+
return None
|
|
1398
|
+
else:
|
|
1399
|
+
return obs_points
|
|
1289
1400
|
|
|
1401
|
+
@debug_timer
|
|
1290
1402
|
def create_rivers(self) -> list[RiverModel]:
|
|
1291
1403
|
src_file = Path(self.sfincs_overland_model.root) / "sfincs.src"
|
|
1292
1404
|
if not src_file.exists():
|
|
1293
|
-
|
|
1405
|
+
logger.warning("No rivers found in the SFINCS model.")
|
|
1294
1406
|
return []
|
|
1295
1407
|
|
|
1296
1408
|
df = pd.read_csv(src_file, delim_whitespace=True, header=None, names=["x", "y"])
|
|
@@ -1310,7 +1422,7 @@ class DatabaseBuilder:
|
|
|
1310
1422
|
)
|
|
1311
1423
|
else:
|
|
1312
1424
|
discharge = 0
|
|
1313
|
-
|
|
1425
|
+
logger.warning(
|
|
1314
1426
|
f"No river discharge conditions were found in the SFINCS model for river {idx}. A default value of 0 will be used."
|
|
1315
1427
|
)
|
|
1316
1428
|
|
|
@@ -1324,20 +1436,18 @@ class DatabaseBuilder:
|
|
|
1324
1436
|
)
|
|
1325
1437
|
rivers.append(river)
|
|
1326
1438
|
|
|
1327
|
-
|
|
1439
|
+
logger.info(
|
|
1328
1440
|
f"{len(river_locs)} river(s) were identified from the SFINCS model and will be available in FloodAdapt for discharge input."
|
|
1329
1441
|
)
|
|
1330
1442
|
|
|
1331
1443
|
return rivers
|
|
1332
1444
|
|
|
1445
|
+
@debug_timer
|
|
1333
1446
|
def create_tide_gauge(self) -> Optional[TideGauge]:
|
|
1334
1447
|
if self.config.tide_gauge is None:
|
|
1335
|
-
|
|
1448
|
+
logger.warning(
|
|
1336
1449
|
"Tide gauge information not provided. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1337
1450
|
)
|
|
1338
|
-
self.logger.warning(
|
|
1339
|
-
"No water level references were found. It is assumed that MSL is equal to the datum used in the SFINCS overland model. You can provide these values with the tide_gauge.ref attribute in the site.toml."
|
|
1340
|
-
)
|
|
1341
1451
|
return None
|
|
1342
1452
|
|
|
1343
1453
|
if self.config.tide_gauge.source == TideGaugeSource.file:
|
|
@@ -1346,10 +1456,17 @@ class DatabaseBuilder:
|
|
|
1346
1456
|
"Tide gauge file needs to be provided when 'file' is selected as the source."
|
|
1347
1457
|
)
|
|
1348
1458
|
if self.config.tide_gauge.ref is None:
|
|
1349
|
-
|
|
1350
|
-
"Tide gauge reference not provided.
|
|
1459
|
+
logger.warning(
|
|
1460
|
+
f"Tide gauge reference not provided. '{self.water_level_references.reference}' is assumed as the reference of the water levels in the file."
|
|
1351
1461
|
)
|
|
1352
|
-
self.config.tide_gauge.ref =
|
|
1462
|
+
self.config.tide_gauge.ref = self.water_level_references.reference
|
|
1463
|
+
else:
|
|
1464
|
+
if self.config.tide_gauge.ref not in [
|
|
1465
|
+
datum.name for datum in self.water_level_references.datums
|
|
1466
|
+
]:
|
|
1467
|
+
raise ValueError(
|
|
1468
|
+
f"Provided tide gauge reference '{self.config.tide_gauge.ref}' not found in the water level references!"
|
|
1469
|
+
)
|
|
1353
1470
|
|
|
1354
1471
|
tide_gauge_file = self._check_exists_and_absolute(
|
|
1355
1472
|
self.config.tide_gauge.file
|
|
@@ -1360,7 +1477,7 @@ class DatabaseBuilder:
|
|
|
1360
1477
|
shutil.copyfile(self.config.tide_gauge.file, db_file_path)
|
|
1361
1478
|
|
|
1362
1479
|
rel_db_path = Path(db_file_path.relative_to(self.static_path))
|
|
1363
|
-
|
|
1480
|
+
logger.warning(
|
|
1364
1481
|
f"Tide gauge from file {rel_db_path} assumed to be in {self.unit_system.default_length_units}!"
|
|
1365
1482
|
)
|
|
1366
1483
|
tide_gauge = TideGauge(
|
|
@@ -1381,22 +1498,92 @@ class DatabaseBuilder:
|
|
|
1381
1498
|
else:
|
|
1382
1499
|
ref = "MLLW" # If reference is not provided use MLLW
|
|
1383
1500
|
|
|
1384
|
-
self.water_level_references.reference = (
|
|
1385
|
-
ref # update the water level reference
|
|
1386
|
-
)
|
|
1387
|
-
|
|
1388
1501
|
if self.config.tide_gauge.id is None:
|
|
1389
1502
|
station_id = self._get_closest_station()
|
|
1390
|
-
|
|
1503
|
+
logger.info(
|
|
1391
1504
|
"The closest NOAA tide gauge station to the site will be searched."
|
|
1392
1505
|
)
|
|
1393
1506
|
else:
|
|
1394
1507
|
station_id = self.config.tide_gauge.id
|
|
1395
|
-
|
|
1508
|
+
logger.info(
|
|
1396
1509
|
f"The NOAA tide gauge station with the provided ID {station_id} will be used."
|
|
1397
1510
|
)
|
|
1398
1511
|
station = self._get_station_metadata(station_id=station_id, ref=ref)
|
|
1399
1512
|
if station is not None:
|
|
1513
|
+
# First create water level references based on station
|
|
1514
|
+
# Get datums
|
|
1515
|
+
datums = []
|
|
1516
|
+
# Get local datum
|
|
1517
|
+
datums.append(
|
|
1518
|
+
DatumModel(
|
|
1519
|
+
name=station["datum_name"],
|
|
1520
|
+
height=us.UnitfulLength(
|
|
1521
|
+
value=station["datum"], units=station["units"]
|
|
1522
|
+
).transform(self.unit_system.default_length_units),
|
|
1523
|
+
)
|
|
1524
|
+
)
|
|
1525
|
+
# Get MSL
|
|
1526
|
+
datums.append(
|
|
1527
|
+
DatumModel(
|
|
1528
|
+
name="MSL",
|
|
1529
|
+
height=us.UnitfulLength(
|
|
1530
|
+
value=station["msl"], units=station["units"]
|
|
1531
|
+
).transform(self.unit_system.default_length_units),
|
|
1532
|
+
)
|
|
1533
|
+
)
|
|
1534
|
+
# Get extras
|
|
1535
|
+
for name in ["MLLW", "MHHW"]:
|
|
1536
|
+
height = us.UnitfulLength(
|
|
1537
|
+
value=station[name.lower()], units=station["units"]
|
|
1538
|
+
).transform(self.unit_system.default_length_units)
|
|
1539
|
+
|
|
1540
|
+
wl_info = DatumModel(
|
|
1541
|
+
name=name,
|
|
1542
|
+
height=height,
|
|
1543
|
+
)
|
|
1544
|
+
datums.append(wl_info)
|
|
1545
|
+
|
|
1546
|
+
station_refs = WaterlevelReferenceModel(reference=ref, datums=datums)
|
|
1547
|
+
|
|
1548
|
+
# Check if we can translate the rest of the datums
|
|
1549
|
+
if self.water_level_references.reference != station_refs.reference:
|
|
1550
|
+
for dat in self.water_level_references.datums:
|
|
1551
|
+
if dat.name not in [
|
|
1552
|
+
datum.name for datum in station_refs.datums
|
|
1553
|
+
]:
|
|
1554
|
+
# If datum is not in the datums, try to convert it
|
|
1555
|
+
h1 = dat.height
|
|
1556
|
+
ref1 = self.water_level_references.reference
|
|
1557
|
+
h2 = h1 + station_refs.get_datum(ref1).height
|
|
1558
|
+
# Replace the datum in self.water_level_references.datums
|
|
1559
|
+
dat.height = h2
|
|
1560
|
+
logger.warning(
|
|
1561
|
+
f"Datum '{dat.name}' converted to reference '{ref1}' with new height {h2}."
|
|
1562
|
+
)
|
|
1563
|
+
|
|
1564
|
+
# Check if datums already exist in the water level references and replace
|
|
1565
|
+
for datum in datums:
|
|
1566
|
+
existing_datum = next(
|
|
1567
|
+
(
|
|
1568
|
+
dat
|
|
1569
|
+
for dat in self.water_level_references.datums
|
|
1570
|
+
if dat.name == datum.name
|
|
1571
|
+
),
|
|
1572
|
+
None,
|
|
1573
|
+
)
|
|
1574
|
+
if existing_datum:
|
|
1575
|
+
self.water_level_references.datums.remove(existing_datum)
|
|
1576
|
+
logger.warning(
|
|
1577
|
+
f"Datum '{datum.name}' already exists in config reference. Replacing it based on NOAA station data."
|
|
1578
|
+
)
|
|
1579
|
+
self.water_level_references.datums.append(datum)
|
|
1580
|
+
|
|
1581
|
+
# Update reference datum
|
|
1582
|
+
self.water_level_references.reference = (
|
|
1583
|
+
ref # update the water level reference
|
|
1584
|
+
)
|
|
1585
|
+
logger.warning(f"Main water level reference set to '{ref}'.")
|
|
1586
|
+
|
|
1400
1587
|
# Add tide_gauge information in site toml
|
|
1401
1588
|
tide_gauge = TideGauge(
|
|
1402
1589
|
name=station["name"],
|
|
@@ -1409,50 +1596,14 @@ class DatabaseBuilder:
|
|
|
1409
1596
|
units=us.UnitTypesLength.meters, # the api always asks for SI units right now
|
|
1410
1597
|
)
|
|
1411
1598
|
|
|
1412
|
-
local_datum = DatumModel(
|
|
1413
|
-
name=station["datum_name"],
|
|
1414
|
-
height=us.UnitfulLength(
|
|
1415
|
-
value=station["datum"], units=station["units"]
|
|
1416
|
-
).transform(self.unit_system.default_length_units),
|
|
1417
|
-
)
|
|
1418
|
-
self.water_level_references.datums.append(local_datum)
|
|
1419
|
-
|
|
1420
|
-
msl = DatumModel(
|
|
1421
|
-
name="MSL",
|
|
1422
|
-
height=us.UnitfulLength(
|
|
1423
|
-
value=station["msl"], units=station["units"]
|
|
1424
|
-
).transform(self.unit_system.default_length_units),
|
|
1425
|
-
)
|
|
1426
|
-
# Check if MSL is already there and if yes replace it
|
|
1427
|
-
existing_msl = next(
|
|
1428
|
-
(
|
|
1429
|
-
datum
|
|
1430
|
-
for datum in self.water_level_references.datums
|
|
1431
|
-
if datum.name == "MSL"
|
|
1432
|
-
),
|
|
1433
|
-
None,
|
|
1434
|
-
)
|
|
1435
|
-
if existing_msl:
|
|
1436
|
-
self.water_level_references.datums.remove(existing_msl)
|
|
1437
|
-
self.water_level_references.datums.append(msl)
|
|
1438
|
-
|
|
1439
|
-
for name in ["MLLW", "MHHW"]:
|
|
1440
|
-
height = us.UnitfulLength(
|
|
1441
|
-
value=station[name.lower()], units=station["units"]
|
|
1442
|
-
).transform(self.unit_system.default_length_units)
|
|
1443
|
-
|
|
1444
|
-
wl_info = DatumModel(
|
|
1445
|
-
name=name,
|
|
1446
|
-
height=height,
|
|
1447
|
-
)
|
|
1448
|
-
self.water_level_references.datums.append(wl_info)
|
|
1449
1599
|
return tide_gauge
|
|
1450
1600
|
else:
|
|
1451
|
-
|
|
1601
|
+
logger.warning(
|
|
1452
1602
|
f"Tide gauge source not recognized: {self.config.tide_gauge.source}. Historical events will not have an option to use gauged data in FloodAdapt!"
|
|
1453
1603
|
)
|
|
1454
1604
|
return None
|
|
1455
1605
|
|
|
1606
|
+
@debug_timer
|
|
1456
1607
|
def create_offshore_model(self) -> Optional[FloodModel]:
|
|
1457
1608
|
if self.sfincs_offshore_model is None:
|
|
1458
1609
|
return None
|
|
@@ -1477,7 +1628,7 @@ class DatabaseBuilder:
|
|
|
1477
1628
|
index=False,
|
|
1478
1629
|
header=False,
|
|
1479
1630
|
)
|
|
1480
|
-
|
|
1631
|
+
logger.info(
|
|
1481
1632
|
"Output points of the offshore SFINCS model were reconfigured to the boundary points of the overland SFINCS model."
|
|
1482
1633
|
)
|
|
1483
1634
|
|
|
@@ -1487,6 +1638,7 @@ class DatabaseBuilder:
|
|
|
1487
1638
|
vertical_offset=self.config.sfincs_offshore.vertical_offset,
|
|
1488
1639
|
)
|
|
1489
1640
|
|
|
1641
|
+
@debug_timer
|
|
1490
1642
|
def create_overland_model(self) -> FloodModel:
|
|
1491
1643
|
return FloodModel(
|
|
1492
1644
|
name="overland",
|
|
@@ -1494,6 +1646,7 @@ class DatabaseBuilder:
|
|
|
1494
1646
|
)
|
|
1495
1647
|
|
|
1496
1648
|
### SITE ###
|
|
1649
|
+
@debug_timer
|
|
1497
1650
|
def create_site_config(self) -> Site:
|
|
1498
1651
|
"""Create the site configuration for the FloodAdapt model.
|
|
1499
1652
|
|
|
@@ -1533,18 +1686,16 @@ class DatabaseBuilder:
|
|
|
1533
1686
|
)
|
|
1534
1687
|
return config
|
|
1535
1688
|
|
|
1689
|
+
@debug_timer
|
|
1536
1690
|
def read_location(self) -> tuple[float, float]:
|
|
1537
1691
|
# Get center of area of interest
|
|
1538
1692
|
if not self.fiat_model.region.empty:
|
|
1539
1693
|
center = self.fiat_model.region.dissolve().centroid.to_crs(4326)[0]
|
|
1540
1694
|
else:
|
|
1541
|
-
center = (
|
|
1542
|
-
self.fiat_model.exposure.exposure_geoms[self._get_fiat_building_index()]
|
|
1543
|
-
.dissolve()
|
|
1544
|
-
.centroid.to_crs(4326)[0]
|
|
1545
|
-
)
|
|
1695
|
+
center = self._get_fiat_building_geoms().dissolve().centroid.to_crs(4326)[0]
|
|
1546
1696
|
return center.x, center.y
|
|
1547
1697
|
|
|
1698
|
+
@debug_timer
|
|
1548
1699
|
def create_gui_config(self) -> GuiModel:
|
|
1549
1700
|
gui = GuiModel(
|
|
1550
1701
|
units=self.unit_system,
|
|
@@ -1555,6 +1706,7 @@ class DatabaseBuilder:
|
|
|
1555
1706
|
|
|
1556
1707
|
return gui
|
|
1557
1708
|
|
|
1709
|
+
@debug_timer
|
|
1558
1710
|
def create_default_units(self) -> GuiUnitModel:
|
|
1559
1711
|
if self.config.unit_system == UnitSystems.imperial:
|
|
1560
1712
|
return GuiUnitModel.imperial()
|
|
@@ -1565,6 +1717,7 @@ class DatabaseBuilder:
|
|
|
1565
1717
|
f"Unit system {self.config.unit_system} not recognized. Please choose 'imperial' or 'metric'."
|
|
1566
1718
|
)
|
|
1567
1719
|
|
|
1720
|
+
@debug_timer
|
|
1568
1721
|
def create_visualization_layers(self) -> VisualizationLayers:
|
|
1569
1722
|
visualization_layers = VisualizationLayers()
|
|
1570
1723
|
if self._svi is not None:
|
|
@@ -1578,6 +1731,7 @@ class DatabaseBuilder:
|
|
|
1578
1731
|
)
|
|
1579
1732
|
return visualization_layers
|
|
1580
1733
|
|
|
1734
|
+
@debug_timer
|
|
1581
1735
|
def create_output_layers_config(self) -> OutputLayers:
|
|
1582
1736
|
# Read default colors from template
|
|
1583
1737
|
fd_max = self.config.gui.max_flood_depth
|
|
@@ -1633,6 +1787,7 @@ class DatabaseBuilder:
|
|
|
1633
1787
|
)
|
|
1634
1788
|
return output_layers
|
|
1635
1789
|
|
|
1790
|
+
@debug_timer
|
|
1636
1791
|
def create_hazard_plotting_config(self) -> PlottingModel:
|
|
1637
1792
|
datum_names = [datum.name for datum in self.water_level_references.datums]
|
|
1638
1793
|
if "MHHW" in datum_names:
|
|
@@ -1640,20 +1795,20 @@ class DatabaseBuilder:
|
|
|
1640
1795
|
self.water_level_references.get_datum("MHHW").height
|
|
1641
1796
|
- self.water_level_references.get_datum("MSL").height
|
|
1642
1797
|
)
|
|
1643
|
-
|
|
1798
|
+
logger.info(
|
|
1644
1799
|
f"The default tidal amplitude in the GUI will be {amplitude.transform(self.unit_system.default_length_units)}, calculated as the difference between MHHW and MSL from the tide gauge data."
|
|
1645
1800
|
)
|
|
1646
1801
|
else:
|
|
1647
1802
|
amplitude = us.UnitfulLength(
|
|
1648
1803
|
value=0.0, units=self.unit_system.default_length_units
|
|
1649
1804
|
)
|
|
1650
|
-
|
|
1805
|
+
logger.warning(
|
|
1651
1806
|
"The default tidal amplitude in the GUI will be 0.0, since no tide-gauge water levels are available. You can change this in the site.toml with the 'gui.tide_harmonic_amplitude' attribute."
|
|
1652
1807
|
)
|
|
1653
1808
|
|
|
1654
1809
|
ref = "MSL"
|
|
1655
1810
|
if ref not in datum_names:
|
|
1656
|
-
|
|
1811
|
+
logger.warning(
|
|
1657
1812
|
f"The Mean Sea Level (MSL) datum is not available in the site.toml. The synthetic tide will be created relative to the main reference: {self.water_level_references.reference}."
|
|
1658
1813
|
)
|
|
1659
1814
|
ref = self.water_level_references.reference
|
|
@@ -1668,6 +1823,7 @@ class DatabaseBuilder:
|
|
|
1668
1823
|
|
|
1669
1824
|
return plotting
|
|
1670
1825
|
|
|
1826
|
+
@debug_timer
|
|
1671
1827
|
def create_infometrics(self):
|
|
1672
1828
|
"""
|
|
1673
1829
|
Copy the infometrics and infographics templates to the appropriate location and modifies the metrics_config.toml files.
|
|
@@ -1728,6 +1884,7 @@ class DatabaseBuilder:
|
|
|
1728
1884
|
with open(file, "wb") as f:
|
|
1729
1885
|
tomli_w.dump(attrs, f)
|
|
1730
1886
|
|
|
1887
|
+
@debug_timer
|
|
1731
1888
|
def _create_optional_infometrics(self, templates_path: Path, path_im: Path):
|
|
1732
1889
|
# If infographics are going to be created in FA, get template metric configurations
|
|
1733
1890
|
if not self.config.infographics:
|
|
@@ -1736,14 +1893,10 @@ class DatabaseBuilder:
|
|
|
1736
1893
|
# Check what type of infographics should be used
|
|
1737
1894
|
if self.config.unit_system == UnitSystems.imperial:
|
|
1738
1895
|
metrics_folder_name = "US_NSI"
|
|
1739
|
-
|
|
1740
|
-
"Default NSI infometrics and infographics will be created."
|
|
1741
|
-
)
|
|
1896
|
+
logger.info("Default NSI infometrics and infographics will be created.")
|
|
1742
1897
|
elif self.config.unit_system == UnitSystems.metric:
|
|
1743
1898
|
metrics_folder_name = "OSM"
|
|
1744
|
-
|
|
1745
|
-
"Default OSM infometrics and infographics will be created."
|
|
1746
|
-
)
|
|
1899
|
+
logger.info("Default OSM infometrics and infographics will be created.")
|
|
1747
1900
|
else:
|
|
1748
1901
|
raise ValueError(
|
|
1749
1902
|
f"Unit system {self.config.unit_system} is not recognized. Please choose 'imperial' or 'metric'."
|
|
@@ -1790,6 +1943,7 @@ class DatabaseBuilder:
|
|
|
1790
1943
|
path_1 = self.root.joinpath("static", "templates", "infographics", "images")
|
|
1791
1944
|
shutil.copytree(path_0, path_1)
|
|
1792
1945
|
|
|
1946
|
+
@debug_timer
|
|
1793
1947
|
def add_static_files(self):
|
|
1794
1948
|
"""
|
|
1795
1949
|
Copy static files from the 'templates' folder to the 'static' folder.
|
|
@@ -1804,10 +1958,38 @@ class DatabaseBuilder:
|
|
|
1804
1958
|
path_1 = self.static_path / folder
|
|
1805
1959
|
shutil.copytree(path_0, path_1)
|
|
1806
1960
|
|
|
1961
|
+
# Check table values
|
|
1962
|
+
green_infra_path = (
|
|
1963
|
+
self.static_path / "green_infra_table" / "green_infra_lookup_table.csv"
|
|
1964
|
+
)
|
|
1965
|
+
df = pd.read_csv(green_infra_path)
|
|
1966
|
+
# Convert "Infiltration depth (feet)" to the database unit system and rename column
|
|
1967
|
+
# Find the column that has "Infiltration depth" in its name
|
|
1968
|
+
infiltration_col = next(
|
|
1969
|
+
(col for col in df.columns if "Infiltration depth" in col), None
|
|
1970
|
+
)
|
|
1971
|
+
# Try to infer the units from the column name, e.g., "Infiltration depth (feet)"
|
|
1972
|
+
match = re.search(r"\((.*?)\)", infiltration_col)
|
|
1973
|
+
current_units = match.group(1).strip()
|
|
1974
|
+
|
|
1975
|
+
# Determine target units and column name
|
|
1976
|
+
if self.unit_system.default_length_units != current_units:
|
|
1977
|
+
target_units = self.unit_system.default_length_units
|
|
1978
|
+
new_col = f"Infiltration depth ({target_units.value})"
|
|
1979
|
+
conversion_factor = us.UnitfulLength(
|
|
1980
|
+
value=1.0, units=current_units
|
|
1981
|
+
).convert(target_units)
|
|
1982
|
+
|
|
1983
|
+
df[new_col] = (df[infiltration_col] * conversion_factor).round(2)
|
|
1984
|
+
df = df.drop(columns=[infiltration_col])
|
|
1985
|
+
# Save the updated table
|
|
1986
|
+
df.to_csv(green_infra_path, index=False)
|
|
1987
|
+
|
|
1988
|
+
@debug_timer
|
|
1807
1989
|
def add_probabilistic_set(self):
|
|
1808
1990
|
# Copy prob set if given
|
|
1809
1991
|
if self.config.probabilistic_set:
|
|
1810
|
-
|
|
1992
|
+
logger.info(
|
|
1811
1993
|
f"Probabilistic event set imported from {self.config.probabilistic_set}"
|
|
1812
1994
|
)
|
|
1813
1995
|
prob_event_name = Path(self.config.probabilistic_set).name
|
|
@@ -1815,7 +1997,7 @@ class DatabaseBuilder:
|
|
|
1815
1997
|
shutil.copytree(self.config.probabilistic_set, path_db)
|
|
1816
1998
|
self._probabilistic_set_name = prob_event_name
|
|
1817
1999
|
else:
|
|
1818
|
-
|
|
2000
|
+
logger.warning(
|
|
1819
2001
|
"Probabilistic event set not provided. Risk scenarios cannot be run in FloodAdapt."
|
|
1820
2002
|
)
|
|
1821
2003
|
self._probabilistic_set_name = None
|
|
@@ -1829,7 +2011,7 @@ class DatabaseBuilder:
|
|
|
1829
2011
|
the input and static folders. It also creates subfolders within the input and
|
|
1830
2012
|
static folders based on a predefined list of names.
|
|
1831
2013
|
"""
|
|
1832
|
-
|
|
2014
|
+
logger.info("Preparing the database folder structure.")
|
|
1833
2015
|
inputs = [
|
|
1834
2016
|
"events",
|
|
1835
2017
|
"projections",
|
|
@@ -1856,6 +2038,23 @@ class DatabaseBuilder:
|
|
|
1856
2038
|
else:
|
|
1857
2039
|
raise ValueError(f"Path {path} is not absolute.")
|
|
1858
2040
|
|
|
2041
|
+
def _get_fiat_building_geoms(self) -> gpd.GeoDataFrame:
|
|
2042
|
+
"""
|
|
2043
|
+
Get the building geometries from the FIAT model.
|
|
2044
|
+
|
|
2045
|
+
Returns
|
|
2046
|
+
-------
|
|
2047
|
+
gpd.GeoDataFrame
|
|
2048
|
+
A GeoDataFrame containing the building geometries.
|
|
2049
|
+
"""
|
|
2050
|
+
building_indices = self._get_fiat_building_index()
|
|
2051
|
+
buildings = pd.concat(
|
|
2052
|
+
[self.fiat_model.exposure.exposure_geoms[i] for i in building_indices],
|
|
2053
|
+
ignore_index=True,
|
|
2054
|
+
)
|
|
2055
|
+
return buildings
|
|
2056
|
+
|
|
2057
|
+
@debug_timer
|
|
1859
2058
|
def _join_building_footprints(
|
|
1860
2059
|
self, building_footprints: gpd.GeoDataFrame, field_name: str
|
|
1861
2060
|
) -> Path:
|
|
@@ -1878,12 +2077,10 @@ class DatabaseBuilder:
|
|
|
1878
2077
|
7. Updates the site attributes with the relative path to the saved building footprints.
|
|
1879
2078
|
8. Logs the location where the building footprints are saved.
|
|
1880
2079
|
"""
|
|
1881
|
-
buildings = self.
|
|
1882
|
-
self._get_fiat_building_index()
|
|
1883
|
-
]
|
|
2080
|
+
buildings = self._get_fiat_building_geoms()
|
|
1884
2081
|
exposure_csv = self.fiat_model.exposure.exposure_db
|
|
1885
2082
|
if "BF_FID" in exposure_csv.columns:
|
|
1886
|
-
|
|
2083
|
+
logger.warning(
|
|
1887
2084
|
"Column 'BF_FID' already exists in the exposure columns and will be replaced."
|
|
1888
2085
|
)
|
|
1889
2086
|
del exposure_csv["BF_FID"]
|
|
@@ -1919,12 +2116,13 @@ class DatabaseBuilder:
|
|
|
1919
2116
|
|
|
1920
2117
|
# Save site attributes
|
|
1921
2118
|
buildings_path = geo_path.relative_to(self.static_path)
|
|
1922
|
-
|
|
2119
|
+
logger.info(
|
|
1923
2120
|
f"Building footprints saved at {(self.static_path / buildings_path).resolve().as_posix()}"
|
|
1924
2121
|
)
|
|
1925
2122
|
|
|
1926
2123
|
return buildings_path
|
|
1927
2124
|
|
|
2125
|
+
@debug_timer
|
|
1928
2126
|
def _clip_hazard_extend(self, clip_footprints=True):
|
|
1929
2127
|
"""
|
|
1930
2128
|
Clip the exposure data to the bounding box of the hazard data.
|
|
@@ -1937,15 +2135,15 @@ class DatabaseBuilder:
|
|
|
1937
2135
|
|
|
1938
2136
|
Parameters
|
|
1939
2137
|
----------
|
|
1940
|
-
|
|
2138
|
+
clip_footprints : bool, default True
|
|
2139
|
+
Whether to clip the building footprints to the hazard area.
|
|
1941
2140
|
|
|
1942
2141
|
Returns
|
|
1943
2142
|
-------
|
|
1944
2143
|
None
|
|
1945
2144
|
"""
|
|
1946
|
-
gdf = self.
|
|
1947
|
-
|
|
1948
|
-
)
|
|
2145
|
+
gdf = self._get_fiat_gdf_full()
|
|
2146
|
+
|
|
1949
2147
|
crs = gdf.crs
|
|
1950
2148
|
sfincs_extend = self.sfincs_overland_model.region
|
|
1951
2149
|
sfincs_extend = sfincs_extend.to_crs(crs)
|
|
@@ -1955,55 +2153,21 @@ class DatabaseBuilder:
|
|
|
1955
2153
|
self.fiat_model.geoms["region"] = clipped_region
|
|
1956
2154
|
|
|
1957
2155
|
# Clip the exposure geometries
|
|
1958
|
-
|
|
1959
|
-
road_inds = gdf[_FIAT_COLUMNS.primary_object_type].str.contains("road")
|
|
1960
|
-
# Ensure road_inds is a boolean Series
|
|
1961
|
-
if not road_inds.dtype == bool:
|
|
1962
|
-
road_inds = road_inds.astype(bool)
|
|
1963
|
-
# Clip buildings
|
|
1964
|
-
gdf_buildings = gdf[~road_inds]
|
|
1965
|
-
gdf_buildings = self._clip_gdf(
|
|
1966
|
-
gdf_buildings, clipped_region, predicate="within"
|
|
1967
|
-
).reset_index(drop=True)
|
|
1968
|
-
|
|
1969
|
-
if road_inds.any():
|
|
1970
|
-
# Clip roads
|
|
1971
|
-
gdf_roads = gdf[road_inds]
|
|
1972
|
-
gdf_roads = self._clip_gdf(
|
|
1973
|
-
gdf_roads, clipped_region, predicate="within"
|
|
1974
|
-
).reset_index(drop=True)
|
|
1975
|
-
|
|
1976
|
-
idx_buildings = self.fiat_model.exposure.geom_names.index(
|
|
1977
|
-
self.config.fiat_buildings_name
|
|
1978
|
-
)
|
|
1979
|
-
idx_roads = self.fiat_model.exposure.geom_names.index(
|
|
1980
|
-
self.config.fiat_roads_name
|
|
1981
|
-
)
|
|
1982
|
-
self.fiat_model.exposure.exposure_geoms[idx_buildings] = gdf_buildings[
|
|
1983
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1984
|
-
]
|
|
1985
|
-
self.fiat_model.exposure.exposure_geoms[idx_roads] = gdf_roads[
|
|
1986
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1987
|
-
]
|
|
1988
|
-
gdf = pd.concat([gdf_buildings, gdf_roads])
|
|
1989
|
-
else:
|
|
1990
|
-
gdf = gdf_buildings
|
|
1991
|
-
self.fiat_model.exposure.exposure_geoms[0] = gdf[
|
|
1992
|
-
[_FIAT_COLUMNS.object_id, "geometry"]
|
|
1993
|
-
]
|
|
2156
|
+
gdf = self._clip_gdf(gdf, sfincs_extend, predicate="within")
|
|
1994
2157
|
|
|
1995
2158
|
# Save exposure dataframe
|
|
1996
2159
|
del gdf["geometry"]
|
|
1997
2160
|
self.fiat_model.exposure.exposure_db = gdf.reset_index(drop=True)
|
|
1998
2161
|
|
|
2162
|
+
# Make
|
|
2163
|
+
self._delete_extra_geometries()
|
|
2164
|
+
|
|
1999
2165
|
# Clip the building footprints
|
|
2000
2166
|
fieldname = "BF_FID"
|
|
2001
2167
|
if clip_footprints and not self.fiat_model.building_footprint.empty:
|
|
2002
2168
|
# Get buildings after filtering and their footprint id
|
|
2003
2169
|
self.fiat_model.building_footprint = self.fiat_model.building_footprint[
|
|
2004
|
-
self.fiat_model.building_footprint[fieldname].isin(
|
|
2005
|
-
gdf_buildings[fieldname]
|
|
2006
|
-
)
|
|
2170
|
+
self.fiat_model.building_footprint[fieldname].isin(gdf[fieldname])
|
|
2007
2171
|
].reset_index(drop=True)
|
|
2008
2172
|
|
|
2009
2173
|
@staticmethod
|
|
@@ -2022,6 +2186,7 @@ class DatabaseBuilder:
|
|
|
2022
2186
|
return gdf_new
|
|
2023
2187
|
|
|
2024
2188
|
@staticmethod
|
|
2189
|
+
@debug_timer
|
|
2025
2190
|
def spatial_join(
|
|
2026
2191
|
objects: gpd.GeoDataFrame,
|
|
2027
2192
|
layer: Union[str, gpd.GeoDataFrame],
|
|
@@ -2070,14 +2235,25 @@ class DatabaseBuilder:
|
|
|
2070
2235
|
layer = layer.rename(columns={field_name: rename})
|
|
2071
2236
|
return objects_joined, layer
|
|
2072
2237
|
|
|
2073
|
-
def _get_fiat_building_index(self) -> int:
|
|
2074
|
-
|
|
2075
|
-
|
|
2238
|
+
def _get_fiat_building_index(self) -> list[int]:
|
|
2239
|
+
names = self.config.fiat_buildings_name
|
|
2240
|
+
if isinstance(names, str):
|
|
2241
|
+
names = [names]
|
|
2242
|
+
indices = [
|
|
2243
|
+
self.fiat_model.exposure.geom_names.index(name)
|
|
2244
|
+
for name in names
|
|
2245
|
+
if name in self.fiat_model.exposure.geom_names
|
|
2246
|
+
]
|
|
2247
|
+
if indices:
|
|
2248
|
+
return indices
|
|
2249
|
+
raise ValueError(
|
|
2250
|
+
f"None of the specified building geometry names {names} found in FIAT model exposure geom_names."
|
|
2076
2251
|
)
|
|
2077
2252
|
|
|
2078
2253
|
def _get_fiat_road_index(self) -> int:
|
|
2079
2254
|
return self.fiat_model.exposure.geom_names.index(self.config.fiat_roads_name)
|
|
2080
2255
|
|
|
2256
|
+
@debug_timer
|
|
2081
2257
|
def _get_closest_station(self):
|
|
2082
2258
|
# Get available stations from source
|
|
2083
2259
|
obs_data = obs.source(self.config.tide_gauge.source)
|
|
@@ -2099,7 +2275,7 @@ class DatabaseBuilder:
|
|
|
2099
2275
|
)
|
|
2100
2276
|
|
|
2101
2277
|
distance = us.UnitfulLength(value=distance, units=us.UnitTypesLength.meters)
|
|
2102
|
-
|
|
2278
|
+
logger.info(
|
|
2103
2279
|
f"The closest tide gauge from {self.config.tide_gauge.source} is located {distance.transform(self.unit_system.default_length_units)} from the SFINCS domain"
|
|
2104
2280
|
)
|
|
2105
2281
|
# Check if user provided max distance
|
|
@@ -2110,7 +2286,7 @@ class DatabaseBuilder:
|
|
|
2110
2286
|
value=distance.convert(units_new), units=units_new
|
|
2111
2287
|
)
|
|
2112
2288
|
if distance_new.value > self.config.tide_gauge.max_distance.value:
|
|
2113
|
-
|
|
2289
|
+
logger.warning(
|
|
2114
2290
|
f"This distance is larger than the 'max_distance' value of {self.config.tide_gauge.max_distance.value} {units_new} provided in the config file. The station cannot be used."
|
|
2115
2291
|
)
|
|
2116
2292
|
return None
|
|
@@ -2120,11 +2296,13 @@ class DatabaseBuilder:
|
|
|
2120
2296
|
|
|
2121
2297
|
return station_id
|
|
2122
2298
|
|
|
2299
|
+
@debug_timer
|
|
2123
2300
|
def _get_station_metadata(self, station_id: str, ref: str = "MLLW"):
|
|
2124
2301
|
"""
|
|
2125
2302
|
Find the closest tide gauge station to the SFINCS domain and retrieves its metadata.
|
|
2126
2303
|
|
|
2127
2304
|
Args:
|
|
2305
|
+
station_id (str): The ID of the tide gauge station.
|
|
2128
2306
|
ref (str, optional): The reference level for water level measurements. Defaults to "MLLW".
|
|
2129
2307
|
|
|
2130
2308
|
Returns
|
|
@@ -2166,12 +2344,12 @@ class DatabaseBuilder:
|
|
|
2166
2344
|
"lat": station_metadata["lat"],
|
|
2167
2345
|
}
|
|
2168
2346
|
|
|
2169
|
-
|
|
2347
|
+
logger.info(
|
|
2170
2348
|
f"The tide gauge station '{station_metadata['name']}' from {self.config.tide_gauge.source} will be used to download nearshore historical water level time-series."
|
|
2171
2349
|
)
|
|
2172
2350
|
|
|
2173
|
-
|
|
2174
|
-
f"The station metadata will be used to fill in the water_level attribute in the site.toml. The reference level will be {ref}."
|
|
2351
|
+
logger.info(
|
|
2352
|
+
f"The station metadata will be used to fill in the water_level attribute in the site.toml. The reference level will be '{ref}'."
|
|
2175
2353
|
)
|
|
2176
2354
|
|
|
2177
2355
|
return meta
|
|
@@ -2191,14 +2369,85 @@ class DatabaseBuilder:
|
|
|
2191
2369
|
bin_colors = tomli.load(f)
|
|
2192
2370
|
return bin_colors
|
|
2193
2371
|
|
|
2372
|
+
def _delete_extra_geometries(self) -> None:
|
|
2373
|
+
"""
|
|
2374
|
+
Remove extra geometries from the exposure_geoms list that do not have a corresponding object_id in the exposure_db DataFrame.
|
|
2194
2375
|
|
|
2195
|
-
|
|
2376
|
+
Returns
|
|
2377
|
+
-------
|
|
2378
|
+
None
|
|
2379
|
+
"""
|
|
2380
|
+
# Make sure only csv objects have geometries
|
|
2381
|
+
for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
|
|
2382
|
+
keep = geoms[_FIAT_COLUMNS.object_id].isin(
|
|
2383
|
+
self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
|
|
2384
|
+
)
|
|
2385
|
+
geoms = geoms[keep].reset_index(drop=True)
|
|
2386
|
+
self.fiat_model.exposure.exposure_geoms[i] = geoms
|
|
2387
|
+
|
|
2388
|
+
def _get_fiat_gdf_full(self) -> gpd.GeoDataFrame:
|
|
2389
|
+
"""
|
|
2390
|
+
Get the full GeoDataFrame of the Fiat model.
|
|
2391
|
+
|
|
2392
|
+
Returns
|
|
2393
|
+
-------
|
|
2394
|
+
gpd.GeoDataFrame: The full GeoDataFrame of the Fiat model.
|
|
2395
|
+
"""
|
|
2396
|
+
gdf = self.fiat_model.exposure.get_full_gdf(
|
|
2397
|
+
self.fiat_model.exposure.exposure_db
|
|
2398
|
+
)
|
|
2399
|
+
# Keep only unique "object_id" rows, keeping the first occurrence
|
|
2400
|
+
gdf = gdf.drop_duplicates(
|
|
2401
|
+
subset=_FIAT_COLUMNS.object_id, keep="first"
|
|
2402
|
+
).reset_index(drop=True)
|
|
2403
|
+
|
|
2404
|
+
return gdf
|
|
2405
|
+
|
|
2406
|
+
|
|
2407
|
+
def create_database(config: Union[str, Path, ConfigModel], overwrite=False) -> None:
|
|
2408
|
+
"""Create a new database from a configuration file or ConfigModel.
|
|
2409
|
+
|
|
2410
|
+
Parameters
|
|
2411
|
+
----------
|
|
2412
|
+
config : str, Path, or ConfigModel
|
|
2413
|
+
The path to the configuration file (as a string or Path) or a ConfigModel instance.
|
|
2414
|
+
overwrite : bool, default False
|
|
2415
|
+
Whether to overwrite the existing database if it exists.
|
|
2416
|
+
"""
|
|
2417
|
+
if isinstance(config, (str, Path)):
|
|
2418
|
+
config = ConfigModel.read(config)
|
|
2419
|
+
|
|
2420
|
+
DatabaseBuilder(config=config).build(overwrite)
|
|
2421
|
+
|
|
2422
|
+
|
|
2423
|
+
def main():
|
|
2196
2424
|
while True:
|
|
2197
2425
|
config_path = Path(
|
|
2198
2426
|
input(
|
|
2199
2427
|
"Please provide the path to the database creation configuration toml: \n"
|
|
2200
2428
|
)
|
|
2201
2429
|
)
|
|
2430
|
+
print(
|
|
2431
|
+
"Please select the log verbosity level for the database creation process.\n"
|
|
2432
|
+
"From most verbose to least verbose: `DEBUG`, `INFO`, `WARNING`.'n"
|
|
2433
|
+
)
|
|
2434
|
+
log_level = input("Enter log level: ")
|
|
2435
|
+
match log_level:
|
|
2436
|
+
case "DEBUG":
|
|
2437
|
+
level = logging.DEBUG
|
|
2438
|
+
case "INFO":
|
|
2439
|
+
level = logging.INFO
|
|
2440
|
+
case "WARNING":
|
|
2441
|
+
level = logging.WARNING
|
|
2442
|
+
case _:
|
|
2443
|
+
print(
|
|
2444
|
+
f"Log level `{log_level}` not recognized. Defaulting to INFO. Please choose from: `DEBUG`, `INFO`, `WARNING`."
|
|
2445
|
+
)
|
|
2446
|
+
log_level = "INFO"
|
|
2447
|
+
level = logging.INFO
|
|
2448
|
+
|
|
2449
|
+
FloodAdaptLogging(level=level)
|
|
2450
|
+
|
|
2202
2451
|
try:
|
|
2203
2452
|
config = ConfigModel.read(config_path)
|
|
2204
2453
|
dbs = DatabaseBuilder(config)
|
|
@@ -2208,3 +2457,7 @@ if __name__ == "__main__":
|
|
|
2208
2457
|
quit = input("Do you want to quit? (y/n)")
|
|
2209
2458
|
if quit == "y":
|
|
2210
2459
|
exit()
|
|
2460
|
+
|
|
2461
|
+
|
|
2462
|
+
if __name__ == "__main__":
|
|
2463
|
+
main()
|