flood-adapt 1.1.2__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
flood_adapt/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # has to be here at the start to avoid circular imports
2
- __version__ = "1.1.2"
2
+ __version__ = "1.1.4"
3
3
 
4
4
  from flood_adapt import adapter, database_builder, dbs_classes, objects
5
5
  from flood_adapt.config.config import Settings
@@ -677,23 +677,20 @@ class SfincsAdapter(IHazardAdapter):
677
677
 
678
678
  def add_obs_points(self):
679
679
  """Add observation points provided in the site toml to SFINCS model."""
680
- if self.settings.obs_point is None:
680
+ obs_points = self.settings.obs_point
681
+ if not obs_points:
681
682
  return
682
- logger.info("Adding observation points to the overland flood model")
683
683
 
684
- obs_points = self.settings.obs_point
685
- names = []
686
- lat = []
687
- lon = []
688
- for pt in obs_points:
689
- names.append(pt.name)
690
- lat.append(pt.lat)
691
- lon.append(pt.lon)
692
-
693
- # create GeoDataFrame from obs_points in site file
684
+ names = [pt.name for pt in obs_points]
685
+ lat = [pt.lat for pt in obs_points]
686
+ lon = [pt.lon for pt in obs_points]
687
+
688
+ logger.info("Adding observation points to the overland flood model")
694
689
  df = pd.DataFrame({"name": names})
695
690
  gdf = gpd.GeoDataFrame(
696
- df, geometry=gpd.points_from_xy(lon, lat), crs="EPSG:4326"
691
+ df,
692
+ geometry=gpd.points_from_xy(lon, lat),
693
+ crs="EPSG:4326",
697
694
  )
698
695
 
699
696
  # Add locations to SFINCS file
@@ -1,15 +1,31 @@
1
+ from flood_adapt.config import (
2
+ FloodModel,
3
+ SlrScenariosModel,
4
+ )
1
5
  from flood_adapt.database_builder.database_builder import (
2
6
  Basins,
3
7
  ConfigModel,
4
8
  FootprintsOptions,
5
9
  GuiConfigModel,
10
+ ObsPointModel,
6
11
  SpatialJoinModel,
7
12
  SviConfigModel,
8
13
  TideGaugeConfigModel,
14
+ TideGaugeSource,
9
15
  UnitSystems,
10
16
  create_database,
11
17
  )
12
18
 
19
+ from .metrics_utils import (
20
+ BuildingsInfographicModel,
21
+ EventInfographicModel,
22
+ HomesInfographicModel,
23
+ ImpactCategoriesModel,
24
+ MetricModel,
25
+ RiskInfographicModel,
26
+ RoadsInfographicModel,
27
+ )
28
+
13
29
  __all__ = [
14
30
  "Basins",
15
31
  "ConfigModel",
@@ -18,6 +34,17 @@ __all__ = [
18
34
  "SpatialJoinModel",
19
35
  "SviConfigModel",
20
36
  "TideGaugeConfigModel",
37
+ "TideGaugeSource",
21
38
  "UnitSystems",
22
39
  "create_database",
40
+ "BuildingsInfographicModel",
41
+ "EventInfographicModel",
42
+ "HomesInfographicModel",
43
+ "MetricModel",
44
+ "RiskInfographicModel",
45
+ "RoadsInfographicModel",
46
+ "ImpactCategoriesModel",
47
+ "FloodModel",
48
+ "SlrScenariosModel",
49
+ "ObsPointModel",
23
50
  ]
@@ -17,12 +17,14 @@ import numpy as np
17
17
  import pandas as pd
18
18
  import rioxarray as rxr
19
19
  import tomli
20
+ import tomli_w
20
21
  import xarray as xr
21
22
  from hydromt_fiat import FiatModel as HydromtFiatModel
22
23
  from hydromt_fiat.data_apis.open_street_maps import get_buildings_from_osm
23
24
  from hydromt_sfincs import SfincsModel as HydromtSfincsModel
24
25
  from pydantic import BaseModel, Field
25
26
  from shapely import MultiLineString, MultiPolygon, Polygon
27
+ from shapely.ops import nearest_points
26
28
 
27
29
  from flood_adapt.adapter.fiat_adapter import _FIAT_COLUMNS
28
30
  from flood_adapt.config.fiat import (
@@ -32,10 +34,15 @@ from flood_adapt.config.fiat import (
32
34
  from flood_adapt.config.gui import (
33
35
  AggregationDmgLayer,
34
36
  BenefitsLayer,
37
+ FieldName,
38
+ FilterCondition,
39
+ FilterGroup,
35
40
  FloodMapLayer,
36
41
  FootprintsDmgLayer,
37
42
  GuiModel,
38
43
  GuiUnitModel,
44
+ LogicalOperator,
45
+ MetricLayer,
39
46
  OutputLayers,
40
47
  PlottingModel,
41
48
  SyntheticTideModel,
@@ -70,6 +77,15 @@ from flood_adapt.config.site import (
70
77
  Site,
71
78
  StandardObjectModel,
72
79
  )
80
+ from flood_adapt.database_builder.metrics_utils import (
81
+ BuildingsInfographicModel,
82
+ EventInfographicModel,
83
+ HomesInfographicModel,
84
+ MetricModel,
85
+ Metrics,
86
+ RiskInfographicModel,
87
+ RoadsInfographicModel,
88
+ )
73
89
  from flood_adapt.dbs_classes.database import Database
74
90
  from flood_adapt.misc.debug_timer import debug_timer
75
91
  from flood_adapt.misc.log import FloodAdaptLogging
@@ -87,16 +103,6 @@ from flood_adapt.objects.projections.projections import (
87
103
  )
88
104
  from flood_adapt.objects.strategies.strategies import Strategy
89
105
 
90
- from .metrics_utils import (
91
- BuildingsInfographicModel,
92
- EventInfographicModel,
93
- HomesInfographicModel,
94
- MetricModel,
95
- Metrics,
96
- RiskInfographicModel,
97
- RoadsInfographicModel,
98
- )
99
-
100
106
  logger = FloodAdaptLogging.getLogger("DatabaseBuilder")
101
107
 
102
108
 
@@ -129,17 +135,43 @@ def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
129
135
  return path.as_posix()
130
136
 
131
137
 
138
+ def make_relative(str_path: str | Path, toml_path: Path) -> str:
139
+ """Make a path relative to the config file path.
140
+
141
+ Parameters
142
+ ----------
143
+ str_path : str | Path
144
+ The path to be made relative.
145
+ toml_path : Path
146
+ The path to the config file.
147
+
148
+ Returns
149
+ -------
150
+ str
151
+ The relative path as a string.
152
+ """
153
+ path = Path(str_path)
154
+ if not path.is_absolute():
155
+ return path.as_posix()
156
+ try:
157
+ relative_path = path.relative_to(toml_path.parent)
158
+ return relative_path.as_posix()
159
+ except ValueError:
160
+ return path.as_posix()
161
+
162
+
132
163
  class SpatialJoinModel(BaseModel):
133
- """Represents a spatial join model.
164
+ """
165
+ Model for representing a spatial join between geometries and tabular data.
134
166
 
135
- Attributes
167
+ Parameters
136
168
  ----------
137
- name : Optional[str], default None
138
- The name of the model.
169
+ name : Optional[str]
170
+ Name of the spatial join (optional).
139
171
  file : str
140
- The file associated with the model.
172
+ Path to the file containing the spatial data to join.
141
173
  field_name : str
142
- The field name used for the spatial join.
174
+ Name of the field used for joining.
143
175
  """
144
176
 
145
177
  name: Optional[str] = None
@@ -148,14 +180,13 @@ class SpatialJoinModel(BaseModel):
148
180
 
149
181
 
150
182
  class UnitSystems(str, Enum):
151
- """Enumeration for accepted values for the unit_system field.
183
+ """
184
+ Enumeration for supported unit systems.
152
185
 
153
- Attributes
154
- ----------
155
- imperial : str
156
- Represents the imperial unit system.
157
- metric : str
158
- Represents the metric unit system.
186
+ Values
187
+ ------
188
+ imperial : Imperial units (feet, miles, etc.)
189
+ metric : Metric units (meters, kilometers, etc.)
159
190
  """
160
191
 
161
192
  imperial = "imperial"
@@ -163,36 +194,30 @@ class UnitSystems(str, Enum):
163
194
 
164
195
 
165
196
  class FootprintsOptions(str, Enum):
166
- """Enumeration for accepted values for the building_footprints field.
197
+ """
198
+ Enumeration for building footprints data sources.
167
199
 
168
- Attributes
169
- ----------
170
- OSM : str
171
- Use OpenStreetMap for building footprints.
200
+ Values
201
+ ------
202
+ OSM : Use OpenStreetMap for building footprints.
172
203
  """
173
204
 
174
205
  OSM = "OSM"
175
206
 
176
207
 
177
208
  class Basins(str, Enum):
178
- """Enumeration class representing different basins.
209
+ """
210
+ Enumeration of global cyclone basins.
179
211
 
180
- Attributes
181
- ----------
182
- NA : str
183
- North Atlantic
184
- SA : str
185
- South Atlantic
186
- EP : str
187
- Eastern North Pacific (which includes the Central Pacific region)
188
- WP : str
189
- Western North Pacific
190
- SP : str
191
- South Pacific
192
- SI : str
193
- South Indian
194
- NI : str
195
- North Indian
212
+ Values
213
+ ------
214
+ NA : North Atlantic
215
+ SA : South Atlantic
216
+ EP : Eastern North Pacific (includes Central Pacific)
217
+ WP : Western North Pacific
218
+ SP : South Pacific
219
+ SI : South Indian
220
+ NI : North Indian
196
221
  """
197
222
 
198
223
  NA = "NA"
@@ -205,59 +230,67 @@ class Basins(str, Enum):
205
230
 
206
231
 
207
232
  class GuiConfigModel(BaseModel):
208
- """Represents a GUI model for FloodAdapt.
233
+ """
234
+ Configuration for FloodAdapt GUI visualization scaling.
209
235
 
210
- Attributes
236
+ Parameters
211
237
  ----------
212
238
  max_flood_depth : float
213
- The last visualization bin will be ">value".
239
+ Maximum flood depth for visualization bins (last bin is ">value").
214
240
  max_aggr_dmg : float
215
- The last visualization bin will be ">value".
241
+ Maximum aggregated damage for visualization bins.
216
242
  max_footprint_dmg : float
217
- The last visualization bin will be ">value".
243
+ Maximum footprint damage for visualization bins.
218
244
  max_benefits : float
219
- The last visualization bin will be ">value".
245
+ Maximum benefits for visualization bins.
246
+ additional_aggregated_layers : Optional[list[MetricLayer]]
247
+ Additional metric layers for aggregation (optional).
220
248
  """
221
249
 
222
250
  max_flood_depth: float
223
251
  max_aggr_dmg: float
224
252
  max_footprint_dmg: float
225
253
  max_benefits: float
254
+ additional_aggregated_layers: Optional[list[MetricLayer]] = None
226
255
 
227
256
 
228
257
  class SviConfigModel(SpatialJoinModel):
229
- """Represents a model for the Social Vulnerability Index (SVI).
258
+ """
259
+ Model for Social Vulnerability Index (SVI) spatial join.
230
260
 
231
- Attributes
261
+ Inherits from SpatialJoinModel.
262
+
263
+ Parameters
232
264
  ----------
233
265
  threshold : float
234
- The threshold value for the SVI model to specify vulnerability.
266
+ Threshold value to specify vulnerability.
235
267
  """
236
268
 
237
269
  threshold: float
238
270
 
239
271
 
240
272
  class TideGaugeConfigModel(BaseModel):
241
- """Represents a tide gauge model.
273
+ """
274
+ Model for tide gauge configuration.
242
275
 
243
- Attributes
276
+ Parameters
244
277
  ----------
245
278
  source : TideGaugeSource
246
- The source of the tide gauge data.
247
- description : str, default ""
279
+ Source of tide gauge data.
280
+ description : str
248
281
  Description of the tide gauge.
249
- ref : Optional[str], default None
250
- The reference name. Should be defined in the water level references.
251
- id : Optional[int], default None
252
- The station ID.
253
- lon : Optional[float], default None
254
- Longitude of the tide gauge.
255
- lat : Optional[float], default None
256
- Latitude of the tide gauge.
257
- file : Optional[str], default None
258
- The file associated with the tide gauge data.
259
- max_distance : Optional[us.UnitfulLength], default None
260
- The maximum distance.
282
+ ref : Optional[str]
283
+ Reference name (should match water level references).
284
+ id : Optional[int]
285
+ Station ID.
286
+ lon : Optional[float]
287
+ Longitude.
288
+ lat : Optional[float]
289
+ Latitude.
290
+ file : Optional[str]
291
+ Path to tide gauge data file.
292
+ max_distance : Optional[us.UnitfulLength]
293
+ Maximum distance for gauge association.
261
294
  """
262
295
 
263
296
  source: TideGaugeSource
@@ -272,74 +305,76 @@ class TideGaugeConfigModel(BaseModel):
272
305
 
273
306
  class ConfigModel(BaseModel):
274
307
  """
275
- Represents the configuration model for FloodAdapt.
308
+ Main configuration model for FloodAdapt database builder.
276
309
 
277
- Attributes
310
+ Parameters
278
311
  ----------
279
312
  name : str
280
- The name of the site.
281
- description : Optional[str], default None
282
- The description of the site.
283
- database_path : Optional[str], default None
284
- The path to the database where all the sites are located.
313
+ Name of the site (must be valid for folder names).
314
+ description : Optional[str]
315
+ Description of the site.
316
+ database_path : Optional[str]
317
+ Path to the database root directory.
285
318
  unit_system : UnitSystems
286
- The unit system.
319
+ Unit system for all calculations (imperial or metric).
287
320
  gui : GuiConfigModel
288
- The GUI model representing scaling values for the layers.
289
- infographics : bool, default True
290
- Indicates if infographics are enabled.
291
- event_infographics : Optional[EventInfographicModel], default None
292
- Event infographic configuration.
293
- risk_infographics : Optional[RiskInfographicModel], default None
294
- Risk infographic configuration.
295
- event_additional_infometrics : Optional[list[MetricModel]], default None
321
+ GUI visualization scaling configuration.
322
+ infographics : bool
323
+ Enable/disable infographics.
324
+ event_infographics : Optional[EventInfographicModel]
325
+ Configuration for event infographics.
326
+ risk_infographics : Optional[RiskInfographicModel]
327
+ Configuration for risk infographics.
328
+ event_additional_infometrics : Optional[list[MetricModel]]
296
329
  Additional event infometrics.
297
- risk_additional_infometrics : Optional[list[MetricModel]], default None
330
+ risk_additional_infometrics : Optional[list[MetricModel]]
298
331
  Additional risk infometrics.
299
332
  fiat : str
300
- The FIAT model path.
301
- aggregation_areas : Optional[list[SpatialJoinModel]], default None
302
- The list of aggregation area models.
303
- building_footprints : Optional[SpatialJoinModel | FootprintsOptions], default FootprintsOptions.OSM
304
- The building footprints model or OSM option.
305
- fiat_buildings_name : str | list[str], default "buildings"
306
- The name(s) of the buildings geometry in the FIAT model.
307
- fiat_roads_name : Optional[str], default "roads"
308
- The name of the roads geometry in the FIAT model.
309
- bfe : Optional[SpatialJoinModel], default None
310
- The BFE model.
311
- svi : Optional[SviConfigModel], default None
312
- The SVI model.
313
- road_width : us.UnitfulLength, default 5 meters
314
- The road width.
315
- return_periods : Optional[list[int]], default None
316
- The list of return periods for risk calculations.
317
- floodmap_type : Optional[FloodmapType], default None
318
- The type of floodmap to use.
319
- references : Optional[WaterlevelReferenceModel], default None
320
- The water level reference model.
333
+ Path to the FIAT model directory.
334
+ aggregation_areas : Optional[list[SpatialJoinModel]]
335
+ List of aggregation area spatial join models.
336
+ building_footprints : Optional[SpatialJoinModel | FootprintsOptions]
337
+ Building footprints source or spatial join model.
338
+ fiat_buildings_name : str | list[str]
339
+ Name(s) of buildings geometry in FIAT model.
340
+ fiat_roads_name : Optional[str]
341
+ Name of roads geometry in FIAT model.
342
+ bfe : Optional[SpatialJoinModel]
343
+ Base Flood Elevation spatial join model.
344
+ svi : Optional[SviConfigModel]
345
+ Social Vulnerability Index spatial join model.
346
+ road_width : us.UnitfulLength
347
+ Road width (default 5 meters).
348
+ return_periods : Optional[list[int]]
349
+ List of return periods for risk calculations.
350
+ floodmap_type : Optional[FloodmapType]
351
+ Type of floodmap to use.
352
+ references : Optional[WaterlevelReferenceModel]
353
+ Water level reference model.
321
354
  sfincs_overland : FloodModel
322
- The overland SFINCS model.
323
- sfincs_offshore : Optional[FloodModel], default None
324
- The offshore SFINCS model.
325
- dem : Optional[DemModel], default None
326
- The DEM model.
327
- excluded_datums : list[str], default []
355
+ Overland SFINCS model configuration.
356
+ sfincs_offshore : Optional[FloodModel]
357
+ Offshore SFINCS model configuration.
358
+ dem : Optional[DemModel]
359
+ Digital Elevation Model configuration.
360
+ river_names : Optional[list[str]]
361
+ List of river names (optional).
362
+ excluded_datums : list[str]
328
363
  List of datums to exclude from plotting.
329
- slr_scenarios : Optional[SlrScenariosModel], default None
330
- The sea level rise scenarios model.
331
- scs : Optional[SCSModel], default None
332
- The SCS model.
333
- tide_gauge : Optional[TideGaugeConfigModel], default None
334
- The tide gauge model.
335
- cyclones : Optional[bool], default True
336
- Indicates if cyclones are enabled.
337
- cyclone_basin : Optional[Basins], default None
338
- The cyclone basin.
339
- obs_point : Optional[list[ObsPointModel]], default None
340
- The list of observation point models.
341
- probabilistic_set : Optional[str], default None
342
- The probabilistic set path.
364
+ slr_scenarios : Optional[SlrScenariosModel]
365
+ Sea level rise scenarios configuration.
366
+ scs : Optional[SCSModel]
367
+ SCS model configuration.
368
+ tide_gauge : Optional[TideGaugeConfigModel]
369
+ Tide gauge configuration.
370
+ cyclones : Optional[bool]
371
+ Enable/disable cyclones.
372
+ cyclone_basin : Optional[Basins]
373
+ Cyclone basin selection.
374
+ obs_point : Optional[list[ObsPointModel]]
375
+ List of observation point models.
376
+ probabilistic_set : Optional[str]
377
+ Path to probabilistic event set.
343
378
  """
344
379
 
345
380
  # General
@@ -375,7 +410,7 @@ class ConfigModel(BaseModel):
375
410
  sfincs_overland: FloodModel
376
411
  sfincs_offshore: Optional[FloodModel] = None
377
412
  dem: Optional[DemModel] = None
378
-
413
+ river_names: Optional[list[str]] = None
379
414
  excluded_datums: list[str] = Field(default_factory=list)
380
415
 
381
416
  slr_scenarios: Optional[SlrScenariosModel] = None
@@ -413,6 +448,8 @@ class ConfigModel(BaseModel):
413
448
  config.database_path = path_check(config.database_path, toml_path)
414
449
  config.fiat = path_check(config.fiat, toml_path)
415
450
  config.sfincs_overland.name = path_check(config.sfincs_overland.name, toml_path)
451
+ if config.dem:
452
+ config.dem.filename = path_check(config.dem.filename, toml_path)
416
453
  if config.sfincs_offshore:
417
454
  config.sfincs_offshore.name = path_check(
418
455
  config.sfincs_offshore.name, toml_path
@@ -437,6 +474,67 @@ class ConfigModel(BaseModel):
437
474
 
438
475
  return config
439
476
 
477
+ def write(self, toml_path: Path) -> None:
478
+ """
479
+ Write the configuration model to a TOML file.
480
+
481
+ Parameters
482
+ ----------
483
+ toml_path : Path
484
+ The path to the TOML file where the configuration will be saved.
485
+ """
486
+ config_dict = self.model_dump(exclude_none=True)
487
+
488
+ # Make paths relative to the config file
489
+ config_dict["database_path"] = make_relative(
490
+ config_dict["database_path"], toml_path
491
+ )
492
+ config_dict["fiat"] = make_relative(config_dict["fiat"], toml_path)
493
+ config_dict["sfincs_overland"]["name"] = make_relative(
494
+ config_dict["sfincs_overland"]["name"], toml_path
495
+ )
496
+ if self.dem:
497
+ config_dict["dem"]["filename"] = make_relative(
498
+ config_dict["dem"]["filename"], toml_path
499
+ )
500
+ if config_dict.get("sfincs_offshore"):
501
+ config_dict["sfincs_offshore"]["name"] = make_relative(
502
+ config_dict["sfincs_offshore"]["name"], toml_path
503
+ )
504
+ if isinstance(self.building_footprints, SpatialJoinModel):
505
+ config_dict["building_footprints"]["file"] = make_relative(
506
+ config_dict["building_footprints"]["file"], toml_path
507
+ )
508
+ if self.tide_gauge and self.tide_gauge.file:
509
+ config_dict["tide_gauge"]["file"] = make_relative(
510
+ config_dict["tide_gauge"]["file"], toml_path
511
+ )
512
+ if self.svi:
513
+ config_dict["svi"]["file"] = make_relative(
514
+ config_dict["svi"]["file"], toml_path
515
+ )
516
+ if self.bfe:
517
+ config_dict["bfe"]["file"] = make_relative(
518
+ config_dict["bfe"]["file"], toml_path
519
+ )
520
+ if self.slr_scenarios:
521
+ config_dict["slr_scenarios"]["file"] = make_relative(
522
+ config_dict["slr_scenarios"]["file"], toml_path
523
+ )
524
+
525
+ if config_dict.get("probabilistic_set"):
526
+ config_dict["probabilistic_set"] = make_relative(
527
+ config_dict["probabilistic_set"], toml_path
528
+ )
529
+
530
+ if config_dict.get("aggregation_areas"):
531
+ for ag in config_dict["aggregation_areas"]:
532
+ ag["file"] = make_relative(ag["file"], toml_path)
533
+
534
+ toml_path.parent.mkdir(parents=True, exist_ok=True)
535
+ with open(toml_path, mode="wb") as fp:
536
+ tomli_w.dump(config_dict, fp)
537
+
440
538
 
441
539
  class DatabaseBuilder:
442
540
  _has_roads: bool = False
@@ -464,15 +562,16 @@ class DatabaseBuilder:
464
562
  @debug_timer
465
563
  def build(self, overwrite: bool = False) -> None:
466
564
  # Check if database already exists
467
- if self.root.exists() and not overwrite:
468
- raise ValueError(
469
- f"There is already a Database folder in '{self.root.as_posix()}'."
470
- )
471
- if self.root.exists() and overwrite:
472
- shutil.rmtree(self.root)
473
- warnings.warn(
474
- f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
475
- )
565
+ if self.root.exists():
566
+ if overwrite:
567
+ shutil.rmtree(self.root)
568
+ warnings.warn(
569
+ f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
570
+ )
571
+ else:
572
+ raise ValueError(
573
+ f"There is already a Database folder in '{self.root.as_posix()}'."
574
+ )
476
575
  # Create database folder
477
576
  self.root.mkdir(parents=True)
478
577
 
@@ -815,7 +914,7 @@ class DatabaseBuilder:
815
914
  exposure[_FIAT_COLUMNS.ground_elevation] = exposure["elev"]
816
915
  del exposure["elev"]
817
916
 
818
- self.fiat_model.exposure.exposure_db = exposure
917
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(exposure)
819
918
 
820
919
  def read_damage_unit(self) -> str:
821
920
  if self.fiat_model.exposure.damage_unit is None:
@@ -1079,7 +1178,9 @@ class DatabaseBuilder:
1079
1178
  exposure_csv = exposure_csv.merge(
1080
1179
  gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
1081
1180
  )
1082
- self.fiat_model.exposure.exposure_db = exposure_csv
1181
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1182
+ exposure_csv
1183
+ )
1083
1184
  # Update spatial joins in FIAT model
1084
1185
  if self.fiat_model.spatial_joins["aggregation_areas"] is None:
1085
1186
  self.fiat_model.spatial_joins["aggregation_areas"] = []
@@ -1141,7 +1242,9 @@ class DatabaseBuilder:
1141
1242
  exposure_csv = exposure_csv.merge(
1142
1243
  gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
1143
1244
  )
1144
- self.fiat_model.exposure.exposure_db = exposure_csv
1245
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1246
+ exposure_csv
1247
+ )
1145
1248
  logger.warning(
1146
1249
  "No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
1147
1250
  )
@@ -1172,7 +1275,9 @@ class DatabaseBuilder:
1172
1275
  exposure_csv = exposure_csv.merge(
1173
1276
  buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1174
1277
  )
1175
- self.fiat_model.exposure.exposure_db = exposure_csv
1278
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1279
+ exposure_csv
1280
+ )
1176
1281
 
1177
1282
  # Save the spatial file for future use
1178
1283
  svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
@@ -1191,8 +1296,15 @@ class DatabaseBuilder:
1191
1296
  "'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
1192
1297
  )
1193
1298
  add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
1299
+ if add_attrs is None:
1300
+ logger.warning(
1301
+ "'SVI' column present in the FIAT exposure csv, but no spatial join found with the SVI map."
1302
+ )
1303
+ return None
1304
+
1194
1305
  if "SVI" not in [attr["name"] for attr in add_attrs]:
1195
1306
  logger.warning("No SVI map found to display in the FloodAdapt GUI!")
1307
+ return None
1196
1308
 
1197
1309
  ind = [attr["name"] for attr in add_attrs].index("SVI")
1198
1310
  svi = add_attrs[ind]
@@ -1347,7 +1459,12 @@ class DatabaseBuilder:
1347
1459
  if delete_sfincs_folder:
1348
1460
  gc.collect()
1349
1461
  if subgrid_sfincs_folder.exists() and subgrid_sfincs_folder.is_dir():
1350
- shutil.rmtree(subgrid_sfincs_folder)
1462
+ try:
1463
+ shutil.rmtree(subgrid_sfincs_folder)
1464
+ except Exception:
1465
+ logger.warning(
1466
+ f"Could not delete temporary SFINCS subgrid folder at {subgrid_sfincs_folder.as_posix()}."
1467
+ )
1351
1468
 
1352
1469
  return DemModel(
1353
1470
  filename=fa_subgrid_path.name, units=us.UnitTypesLength.meters
@@ -1393,22 +1510,23 @@ class DatabaseBuilder:
1393
1510
  else:
1394
1511
  logger.info("Observation points were provided in the config file.")
1395
1512
  obs_points = self.config.obs_point
1513
+
1514
+ model_region = self.sfincs_overland_model.region.union_all()
1515
+
1396
1516
  if self.tide_gauge is not None:
1397
- # Check if the tide gauge point is within the SFINCS region
1398
- region = self.sfincs_overland_model.region
1399
- point = gpd.GeoSeries(
1400
- [gpd.points_from_xy([self.tide_gauge.lon], [self.tide_gauge.lat])[0]],
1401
- crs=4326,
1402
- )
1403
- region_4326 = region.to_crs(4326)
1404
- if not point.within(region_4326.unary_union).item():
1405
- logger.warning(
1406
- "The tide gauge location is outside the SFINCS region and will not be added as an observation point."
1407
- )
1408
- else:
1409
- logger.info(
1410
- "A tide gauge has been setup in the database. It will be used as an observation point as well."
1517
+ # Check if tide gauge is within model domain
1518
+ coord = (
1519
+ gpd.GeoSeries(
1520
+ gpd.points_from_xy(
1521
+ x=[self.tide_gauge.lon], y=[self.tide_gauge.lat]
1522
+ ),
1523
+ crs="EPSG:4326",
1411
1524
  )
1525
+ .to_crs(self.sfincs_overland_model.crs)
1526
+ .iloc[0]
1527
+ )
1528
+ # Add tide gauge as obs point if within model region
1529
+ if coord.within(model_region):
1412
1530
  obs_points.append(
1413
1531
  ObsPointModel(
1414
1532
  name=self.tide_gauge.name,
@@ -1418,14 +1536,43 @@ class DatabaseBuilder:
1418
1536
  lat=self.tide_gauge.lat,
1419
1537
  )
1420
1538
  )
1539
+ else:
1540
+ boundary = model_region.boundary
1541
+ snapped = nearest_points(coord, boundary)[1]
1542
+ distance = us.UnitfulLength(
1543
+ value=coord.distance(snapped), units=us.UnitTypesLength.meters
1544
+ )
1545
+ logger.warning(
1546
+ f"Tide gauge lies outside the model domain by {distance}. It will not be used as an observation point in FloodAdapt."
1547
+ )
1421
1548
 
1422
1549
  if not obs_points:
1423
1550
  logger.warning(
1424
1551
  "No observation points were provided in the config file or created from the tide gauge. No observation points will be available in FloodAdapt."
1425
1552
  )
1426
1553
  return None
1427
- else:
1428
- return obs_points
1554
+
1555
+ # Check if all obs points are within model domain
1556
+ lon = [p.lon for p in obs_points]
1557
+ lat = [p.lat for p in obs_points]
1558
+ names = [p.name for p in obs_points]
1559
+ coords = gpd.GeoDataFrame(
1560
+ {"name": names},
1561
+ geometry=gpd.points_from_xy(lon, lat),
1562
+ crs="EPSG:4326",
1563
+ )
1564
+ coords = coords.to_crs(self.sfincs_overland_model.crs)
1565
+ valid_coords = coords.within(model_region)
1566
+ if not valid_coords.all():
1567
+ invalid = coords.loc[~valid_coords, "name"].tolist()
1568
+ lat = coords.loc[~valid_coords].geometry.y.tolist()
1569
+ lon = coords.loc[~valid_coords].geometry.x.tolist()
1570
+ bounds = model_region.bounds
1571
+ raise ValueError(
1572
+ f"Observation points outside model domain: {invalid}, {lat=}, {lon=}, {bounds=}"
1573
+ )
1574
+
1575
+ return obs_points
1429
1576
 
1430
1577
  @debug_timer
1431
1578
  def create_rivers(self) -> list[RiverModel]:
@@ -1440,6 +1587,17 @@ class DatabaseBuilder:
1440
1587
  geometry=gpd.points_from_xy(df.x, df.y),
1441
1588
  crs=self.sfincs_overland_model.crs,
1442
1589
  )
1590
+
1591
+ if self.config.river_names:
1592
+ if len(self.config.river_names) != len(river_locs):
1593
+ msg = "The number of river names provided does not match the number of rivers found in the SFINCS model."
1594
+ logger.error(msg)
1595
+ raise ValueError(msg)
1596
+ else:
1597
+ river_locs["name"] = self.config.river_names
1598
+ else:
1599
+ river_locs["name"] = [f"river_{idx}" for idx in range(len(river_locs))]
1600
+
1443
1601
  rivers = []
1444
1602
  for idx, row in river_locs.iterrows():
1445
1603
  if "dis" in self.sfincs_overland_model.forcing:
@@ -1456,7 +1614,7 @@ class DatabaseBuilder:
1456
1614
  )
1457
1615
 
1458
1616
  river = RiverModel(
1459
- name=f"river_{idx}",
1617
+ name=row["name"],
1460
1618
  x_coordinate=row.x,
1461
1619
  y_coordinate=row.y,
1462
1620
  mean_discharge=us.UnitfulDischarge(
@@ -1782,6 +1940,69 @@ class DatabaseBuilder:
1782
1940
  ],
1783
1941
  threshold=0.0,
1784
1942
  )
1943
+ red_colors_6 = [
1944
+ "#FFFFFF",
1945
+ "#FEE9CE",
1946
+ "#FDBB84",
1947
+ "#FC844E",
1948
+ "#E03720",
1949
+ "#860000",
1950
+ ]
1951
+
1952
+ aggregated_metrics = []
1953
+
1954
+ building_count = MetricLayer(
1955
+ type="building_count",
1956
+ bins=[1, 10, 50, 200, 500], # TODO provide max values
1957
+ colors=red_colors_6,
1958
+ filters=FilterGroup(
1959
+ conditions=[
1960
+ FilterCondition(
1961
+ field_name=FieldName.NAME,
1962
+ values=["Count"],
1963
+ ),
1964
+ FilterCondition(
1965
+ field_name=FieldName.LONG_NAME,
1966
+ values=["(#)"],
1967
+ ),
1968
+ ],
1969
+ operator=LogicalOperator.OR,
1970
+ ),
1971
+ )
1972
+ aggregated_metrics.append(building_count)
1973
+
1974
+ if self.config.gui.additional_aggregated_layers:
1975
+ aggregated_metrics.extend(self.config.gui.additional_aggregated_layers)
1976
+
1977
+ if self._has_roads:
1978
+ bins = [0.0001, 1, 5, 10, 20] # in kms
1979
+ if self.unit_system == GuiUnitModel.imperial():
1980
+ # convert to miles
1981
+ bins = [
1982
+ us.UnitfulLength(value=b * 1000, units=us.UnitTypesLength.meters)
1983
+ .transform(us.UnitTypesLength.miles)
1984
+ .value
1985
+ for b in bins
1986
+ ]
1987
+ roads_length = MetricLayer(
1988
+ type="road_length",
1989
+ bins=bins, # TODO provide max values
1990
+ colors=red_colors_6,
1991
+ filters=FilterGroup(
1992
+ conditions=[
1993
+ FilterCondition(
1994
+ field_name=FieldName.NAME,
1995
+ values=["RoadsLength"],
1996
+ ),
1997
+ FilterCondition(
1998
+ field_name=FieldName.LONG_NAME,
1999
+ values=["roads"],
2000
+ ),
2001
+ ],
2002
+ operator=LogicalOperator.AND,
2003
+ ),
2004
+ )
2005
+ aggregated_metrics.append(roads_length)
1785
2006
 
1786
2007
  output_layers = OutputLayers(
1787
2008
  floodmap=FloodMapLayer(
@@ -1792,25 +2013,12 @@ class DatabaseBuilder:
1792
2013
  ),
1793
2014
  aggregation_dmg=AggregationDmgLayer(
1794
2015
  bins=[0.00001, 0.1 * ad_max, 0.25 * ad_max, 0.5 * ad_max, ad_max],
1795
- colors=[
1796
- "#FFFFFF",
1797
- "#FEE9CE",
1798
- "#FDBB84",
1799
- "#FC844E",
1800
- "#E03720",
1801
- "#860000",
1802
- ],
2016
+ colors=red_colors_6,
1803
2017
  ),
2018
+ aggregated_metrics=aggregated_metrics,
1804
2019
  footprints_dmg=FootprintsDmgLayer(
1805
2020
  bins=[0.00001, 0.06 * ftd_max, 0.2 * ftd_max, 0.4 * ftd_max, ftd_max],
1806
- colors=[
1807
- "#FFFFFF",
1808
- "#FEE9CE",
1809
- "#FDBB84",
1810
- "#FC844E",
1811
- "#E03720",
1812
- "#860000",
1813
- ],
2021
+ colors=red_colors_6,
1814
2022
  ),
1815
2023
  benefits=benefits_layer,
1816
2024
  )
@@ -1893,12 +2101,14 @@ class DatabaseBuilder:
1893
2101
  # Write the metrics config files
1894
2102
  self._write_infometrics(metrics, path_im, path_ig)
1895
2103
 
1896
- def _create_mandatory_metrics(self, metrics):
2104
+ self.metrics = metrics
2105
+
2106
+ def _create_mandatory_metrics(self, metrics: Metrics):
1897
2107
  metrics.create_mandatory_metrics_event()
1898
2108
  if self._probabilistic_set_name is not None:
1899
2109
  metrics.create_mandatory_metrics_risk()
1900
2110
 
1901
- def _create_event_infographics(self, metrics):
2111
+ def _create_event_infographics(self, metrics: Metrics):
1902
2112
  exposure_type = self._get_exposure_type()
1903
2113
  # If not specific infographic config is given, create a standard one
1904
2114
  if not self.config.event_infographics:
@@ -1930,7 +2140,7 @@ class DatabaseBuilder:
1930
2140
  )
1931
2141
  metrics.create_infographics_metrics_event(config=self.config.event_infographics)
1932
2142
 
1933
- def _create_risk_infographics(self, metrics):
2143
+ def _create_risk_infographics(self, metrics: Metrics):
1934
2144
  exposure_type = self._get_exposure_type()
1935
2145
  # If not specific infographic config is given, create a standard one
1936
2146
  if not self.config.risk_infographics:
@@ -1947,17 +2157,17 @@ class DatabaseBuilder:
1947
2157
  )
1948
2158
  metrics.create_infographics_metrics_risk(config=self.config.risk_infographics)
1949
2159
 
1950
- def _add_additional_event_metrics(self, metrics):
2160
+ def _add_additional_event_metrics(self, metrics: Metrics):
1951
2161
  if self.config.event_additional_infometrics:
1952
2162
  for metric in self.config.event_additional_infometrics:
1953
2163
  metrics.add_event_metric(metric)
1954
2164
 
1955
- def _add_additional_risk_metrics(self, metrics):
2165
+ def _add_additional_risk_metrics(self, metrics: Metrics):
1956
2166
  if self.config.risk_additional_infometrics:
1957
2167
  for metric in self.config.risk_additional_infometrics:
1958
2168
  metrics.add_risk_metric(metric)
1959
2169
 
1960
- def _write_infometrics(self, metrics, path_im, path_ig):
2170
+ def _write_infometrics(self, metrics: Metrics, path_im: Path, path_ig: Path):
1961
2171
  if self._aggregation_areas is None:
1962
2172
  self._aggregation_areas = self.create_aggregation_areas()
1963
2173
  aggr_levels = [aggr.name for aggr in self._aggregation_areas]
@@ -2136,7 +2346,7 @@ class DatabaseBuilder:
2136
2346
 
2137
2347
  # Set model building footprints
2138
2348
  self.fiat_model.building_footprint = building_footprints
2139
- self.fiat_model.exposure.exposure_db = exposure_csv
2349
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(exposure_csv)
2140
2350
 
2141
2351
  # Save site attributes
2142
2352
  buildings_path = geo_path.relative_to(self.static_path)
@@ -2403,6 +2613,11 @@ class DatabaseBuilder:
2403
2613
  """
2404
2614
  # Make sure only csv objects have geometries
2405
2615
  for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
2616
+ if _FIAT_COLUMNS.object_id not in geoms.columns:
2617
+ logger.warning(
2618
+ f"Geometry '{self.fiat_model.exposure.geom_names[i]}' does not have an '{_FIAT_COLUMNS.object_id}' column and will be ignored."
2619
+ )
2620
+ continue
2406
2621
  keep = geoms[_FIAT_COLUMNS.object_id].isin(
2407
2622
  self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
2408
2623
  )
@@ -2443,6 +2658,35 @@ class DatabaseBuilder:
2443
2658
 
2444
2659
  return gdf
2445
2660
 
2661
+ @staticmethod
2662
+ def _clean_suffix_columns(df: pd.DataFrame) -> pd.DataFrame:
2663
+ """Detect and resolves duplicate columns with _x/_y suffixes that appear after a pandas merge.
2664
+
2665
+ (e.g., 'Aggregation Label: Census Blockgroup_x' and 'Aggregation Label: Census Blockgroup_y').
2666
+
2667
+ Keeps the first non-null column of each pair and removes redundant ones.
2668
+ """
2669
+ cols = df.columns.tolist()
2670
+ suffix_pairs = {}
2671
+
2672
+ for col in cols:
2673
+ if col.endswith("_x"):
2674
+ base = col[:-2]
2675
+ if f"{base}_y" in df.columns:
2676
+ suffix_pairs[base] = (f"{base}_x", f"{base}_y")
2677
+
2678
+ for base, (col_x, col_y) in suffix_pairs.items():
2679
+ # If both columns exist, prefer the one with more non-null values
2680
+ x_notna = df[col_x].notna().sum()
2681
+ y_notna = df[col_y].notna().sum()
2682
+ keep_col = col_x if x_notna >= y_notna else col_y
2683
+ df[base] = df[keep_col]
2684
+
2685
+ # Drop the old suffixed versions
2686
+ df = df.drop(columns=[col_x, col_y])
2687
+
2688
+ return df
2689
+
2446
2690
 
2447
2691
  def create_database(config: Union[str, Path, ConfigModel], overwrite=False) -> None:
2448
2692
  """Create a new database from a configuration file or ConfigModel.
@@ -3,7 +3,7 @@ from pathlib import Path
3
3
  from typing import Any, Dict, List, Literal, Optional, Union
4
4
 
5
5
  import tomli_w
6
- from pydantic import BaseModel, Field, field_validator
6
+ from pydantic import BaseModel, Field, field_validator, model_validator
7
7
 
8
8
  from flood_adapt.adapter.fiat_adapter import _IMPACT_COLUMNS
9
9
 
@@ -131,52 +131,42 @@ class MetricModel(BaseModel):
131
131
  name : str
132
132
  The short name of the metric.
133
133
  long_name : Optional[str], default=None
134
- The long descriptive name of the metric. Defaults to `name` if not provided.
134
+ The long descriptive name of the metric. If not provided, defaults to `name`.
135
135
  show_in_metrics_table : Optional[bool], default=True
136
136
  Indicates whether the metric should be displayed in the metrics table.
137
+ show_in_map : Optional[bool], default=True
138
+ Indicates whether the metric should be displayed on the map.
137
139
  description : Optional[str], default=None
138
- A detailed description of the metric. Defaults to `name` if not provided.
140
+ A detailed description of the metric. If not provided, defaults to `name`.
139
141
  select : str
140
142
  The SQL select statement or expression for the metric.
141
143
  filter : Optional[str], default=""
142
144
  An optional SQL filter to apply to the metric. Defaults to no filter.
143
145
 
144
- Methods
145
- -------
146
- set_defaults(value, info)
147
- Sets default values for `long_name` and `description` fields using the `name` field if they are not provided.
146
+ Validation
147
+ ----------
148
+ If `long_name` or `description` are None, they will be set to the value of `name`.
148
149
  """
149
150
 
150
151
  name: str
151
152
  long_name: Optional[str] = None
152
153
  show_in_metrics_table: Optional[bool] = True
154
+ show_in_map: Optional[bool] = True
153
155
  description: Optional[str] = None
154
156
  select: str
155
157
  filter: Optional[str] = "" # This defaults to no filter
156
158
 
157
- @field_validator("long_name", "description", mode="after")
158
- @classmethod
159
- def set_defaults(cls, value, info):
160
- """
161
- Set default values for long_name and description fields.
159
+ @model_validator(mode="after")
160
+ def fill_defaults(self):
161
+ # If long_name is missing, copy name
162
+ if self.long_name is None:
163
+ self.long_name = self.name
162
164
 
163
- Parameters
164
- ----------
165
- value : Any
166
- The current field value.
167
- info : Any
168
- Field validation info containing all field values.
165
+ # If description is missing, copy long_name (which is now guaranteed)
166
+ if self.description is None:
167
+ self.description = self.long_name
169
168
 
170
- Returns
171
- -------
172
- str
173
- The field value or the default value from 'name' field.
174
- """
175
- # info.data contains all field values
176
- if value is None:
177
- # Use 'name' field as default
178
- return info.data.get("name")
179
- return value
169
+ return self
180
170
 
181
171
 
182
172
  class ImpactCategoriesModel(BaseModel):
@@ -233,7 +233,7 @@ class Database(IDatabase):
233
233
  if _type == FloodmapType.water_level:
234
234
  paths = [base_dir / "max_water_level_map.nc"]
235
235
  elif _type == FloodmapType.water_depth:
236
- paths = [base_dir / f"FloodMap_{self.name}.tif"]
236
+ paths = [base_dir / f"FloodMap_{scenario_name}.tif"]
237
237
  elif mode == Mode.risk:
238
238
  if _type == FloodmapType.water_level:
239
239
  paths = list(base_dir.glob("RP_*_maps.nc"))
@@ -4,6 +4,7 @@ from typing import ClassVar, Optional
4
4
 
5
5
  import cht_observations.observation_stations as cht_station
6
6
  import pandas as pd
7
+ import requests
7
8
  from noaa_coops.station import COOPSAPIError
8
9
  from pydantic import BaseModel, model_validator
9
10
 
@@ -178,7 +179,7 @@ class TideGauge(BaseModel):
178
179
  series = series.reindex(index, method="nearest")
179
180
  df = pd.DataFrame(data=series, index=index)
180
181
 
181
- except COOPSAPIError as e:
182
+ except (COOPSAPIError, requests.JSONDecodeError) as e:
182
183
  logger.error(
183
184
  f"Could not download tide gauge data for station {self.ID}. {e}"
184
185
  )
@@ -110,10 +110,12 @@ class ValueUnitPair(ABC, BaseModel, Generic[TUnit]):
110
110
  raise ValueError(f"Unsupported or unknown unit: {str_unit}")
111
111
 
112
112
  def __str__(self) -> str:
113
- return f"{self.value} {self.units.value}"
113
+ return f"{self.value:.2f} {self.units.value}"
114
114
 
115
115
  def __repr__(self) -> str:
116
- return f"{type(self).__name__}(value={self.value}, units={self.units})"
116
+ return (
117
+ f"{type(self).__name__}(value={self.value:.2f}, units={self.units.value})"
118
+ )
117
119
 
118
120
  def __sub__(self: TClass, other: TClass) -> TClass:
119
121
  if not isinstance(other, type(self)):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: flood-adapt
3
- Version: 1.1.2
3
+ Version: 1.1.4
4
4
  Summary: A software package support system which can be used to assess the benefits and costs of flood resilience measures
5
5
  Author-email: Gundula Winter <Gundula.Winter@deltares.nl>, Panos Athanasiou <Panos.Athanasiou@deltares.nl>, Frederique de Groen <Frederique.deGroen@deltares.nl>, Tim de Wilde <Tim.deWilde@deltares.nl>, Julian Hofer <Julian.Hofer@deltares.nl>, Daley Adrichem <Daley.Adrichem@deltares.nl>, Luuk Blom <Luuk.Blom@deltares.nl>
6
6
  License: ====================================================
@@ -712,14 +712,12 @@ Requires-Dist: cht-cyclones<2.0,>=1.0.3
712
712
  Requires-Dist: cht-meteo<1.0,>=0.3.1
713
713
  Requires-Dist: cht-observations<1.0,>=0.2.1
714
714
  Requires-Dist: cht-tide<1.0,>=0.1.1
715
- Requires-Dist: dask==2024.11.2
716
- Requires-Dist: numba_celltree==0.2.2
717
715
  Requires-Dist: fiat-toolbox<0.2.0,>=0.1.22
718
716
  Requires-Dist: fiona<2.0,>=1.0
719
717
  Requires-Dist: geojson<4.0,>=3.0
720
718
  Requires-Dist: geopandas<2.0,>=1.0
721
719
  Requires-Dist: hydromt-fiat<1.0,>=0.5.9
722
- Requires-Dist: hydromt-sfincs<2.0,>=1.2.0
720
+ Requires-Dist: hydromt-sfincs<2.0,>=1.2.2
723
721
  Requires-Dist: numpy<2.0,>=1.0
724
722
  Requires-Dist: numpy-financial<2.0,>=1.0
725
723
  Requires-Dist: pandas<3.0,>=2.0
@@ -1,8 +1,8 @@
1
- flood_adapt/__init__.py,sha256=3YQ8FGjALB_ckt6bSvYNYb4kp-L5GMSoNxipSQle_ZY,779
1
+ flood_adapt/__init__.py,sha256=HuJSWvtJutuwuiXwgvArRmpnG72guATUP3IFvzowaJY,779
2
2
  flood_adapt/flood_adapt.py,sha256=HVFS4OFhcB0TqHtMw3kbEei0IfJxsciauHfG3XZ38-0,40747
3
3
  flood_adapt/adapter/__init__.py,sha256=vnF8NCkEVX-N-gtGS-J_A1H1YYAjihWjJZFyYGwcp8Q,180
4
4
  flood_adapt/adapter/fiat_adapter.py,sha256=seDjPoumkhUOd7qer3ni1_Ut3dwyq0-_yhJNaTEFc2E,60284
5
- flood_adapt/adapter/sfincs_adapter.py,sha256=0M2miklSvKIsedFU5JCRclyHvGAombLEyklZPgwByIY,79138
5
+ flood_adapt/adapter/sfincs_adapter.py,sha256=_S-fABsi4flOkbrbIy_ROTuZrJyIMZtJu2wNh-TYSYs,79040
6
6
  flood_adapt/adapter/sfincs_offshore.py,sha256=DkqGwx0Fx4dojY1YH8tW3MUS4Omgd5DC6QINEsTP0Uk,7659
7
7
  flood_adapt/adapter/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  flood_adapt/adapter/interface/hazard_adapter.py,sha256=S2NIUAMSRgxC_E-tZRJ2qIP06U1zEVdn-MnvMTrn86s,2828
@@ -17,9 +17,9 @@ flood_adapt/config/hazard.py,sha256=Ev6mj78cZ_vQuJ11KYjhJOzmfRB6fz267OJeKI0bYaM,
17
17
  flood_adapt/config/impacts.py,sha256=O7vE7jB3GSXnkqAvv7TqJiJ_j1uJ3mck_KQ-ScsB3bo,3192
18
18
  flood_adapt/config/sfincs.py,sha256=y8C3PzFwwgMB_sb8rBzgteaQ8fCxep6DnZxuk0q__bc,4825
19
19
  flood_adapt/config/site.py,sha256=VR90jCHWcxgoQJptNyXy7LseGjXUDRtdOjNGCddFVzI,4328
20
- flood_adapt/database_builder/__init__.py,sha256=YsI5bGcAKYmsmb5W-spp91hzsKSTRtkXBLNRxLOWml4,474
21
- flood_adapt/database_builder/database_builder.py,sha256=WHCsRGZq2owNyuLMpk2wEnGR2QrELAVDqJfE8xG12JE,100855
22
- flood_adapt/database_builder/metrics_utils.py,sha256=VCLhEIViwlmGxh9LBoEGMsRcSEvktiKvh8IolUjHPfI,66459
20
+ flood_adapt/database_builder/__init__.py,sha256=h4ietZ6sAZa7j2kvSzp5-58BueGrfJsXvq8PFu1RLyI,1112
21
+ flood_adapt/database_builder/database_builder.py,sha256=cv_uwkKqgc8RLQ5lAmTqRN1fm5gBOYbIVR3v2MgD_gY,109935
22
+ flood_adapt/database_builder/metrics_utils.py,sha256=aU7YfXLmBjFT0fQQQl3o0yIzdFJ6XJGlld0GnkJytGc,66258
23
23
  flood_adapt/database_builder/templates/default_units/imperial.toml,sha256=zIjPlxIa2kWLUjSYisd8UolXGo5iKdFoDDz_JkKBXTM,295
24
24
  flood_adapt/database_builder/templates/default_units/metric.toml,sha256=tc0XMKs7xGL9noB9lAb0gyQfjYxzokgHa3NqpccxWl0,302
25
25
  flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv,sha256=ooQzNGQwAMSAphy5W2ZyR5boQlcwvPv9ToJx1MlZhVE,466
@@ -56,7 +56,7 @@ flood_adapt/database_builder/templates/infographics/images/truck.png,sha256=0Ihv
56
56
  flood_adapt/database_builder/templates/infographics/images/walking_person.png,sha256=vaxO4oGejK5Q4KyGXuew2YgpGPabpnwLyxTFH1WMbmo,11080
57
57
  flood_adapt/database_builder/templates/output_layers/bin_colors.toml,sha256=yN3_h2IimOyjtfhZ-ZoWyNa-2cAeFRNlbvaNTLhEMfA,417
58
58
  flood_adapt/dbs_classes/__init__.py,sha256=J-a6BEkjDhuUzzRKuAn_AtTg_D9wNIsmY3BnVTiC2JA,731
59
- flood_adapt/dbs_classes/database.py,sha256=6zYAhVJVL3bNjxEzObVn4jOnuSQJYmsEuWN85fOeDT4,23425
59
+ flood_adapt/dbs_classes/database.py,sha256=2OxzIrZOLmgv3wpq6cIKer1gchM6MuU2S9r2cHUZ4i8,23429
60
60
  flood_adapt/dbs_classes/dbs_benefit.py,sha256=ayEYz8ga49HLdYuUsDWZOuZnpRnBpTuyhvfe2IyWAKI,1825
61
61
  flood_adapt/dbs_classes/dbs_event.py,sha256=ak3kHan6L1EfC8agDLKiCe8gaY5leOmj_qUBsI61q9A,1869
62
62
  flood_adapt/dbs_classes/dbs_measure.py,sha256=vVs-LtnHJN7eSGIFUglJdpbtfq_QI_Ftkv4lh5mfnNM,4085
@@ -95,10 +95,10 @@ flood_adapt/objects/forcing/meteo_handler.py,sha256=rTxY5WNobK_Ifzj2eVcoSPGgb3Tz
95
95
  flood_adapt/objects/forcing/netcdf.py,sha256=ZBzFtN5joVs36lVjvYErVaHEylUQ6eKIhR0uk_MD-zM,1388
96
96
  flood_adapt/objects/forcing/plotting.py,sha256=Y7f_9bY8d9jbd7BqEAeRmof-aaJhlznM3_wGBOI7g-s,14828
97
97
  flood_adapt/objects/forcing/rainfall.py,sha256=e6P3IMzItvnsmXbcMXl1oV-d9LDuh3jTIc_vt6Kz5zo,3282
98
- flood_adapt/objects/forcing/tide_gauge.py,sha256=jGIh6jQlhecGkPfBaZ8NKbr7FlpmLZAwmlqgp8lEWu0,7143
98
+ flood_adapt/objects/forcing/tide_gauge.py,sha256=XhplyNHtCn0hRM1oeD5v-fMYAOLAJIKidmxKxVxCUlw,7188
99
99
  flood_adapt/objects/forcing/time_frame.py,sha256=1X3G0Ax18BHRvAomf-CW_ISRk_3qgAakwgZCIBxIkL4,2855
100
100
  flood_adapt/objects/forcing/timeseries.py,sha256=bD27JWzC3owq5ah3zPzJ7xoUzSH_t4J03s_SycYW0mQ,19740
101
- flood_adapt/objects/forcing/unit_system.py,sha256=EHz4ixI8nmjfDeyU2AszXTf6ebaqChbGg0PuJHMJdh8,16502
101
+ flood_adapt/objects/forcing/unit_system.py,sha256=7FFOmaxq6EOvXx64QDxlpNU4uMExqridFcdFwyTJ4Lo,16542
102
102
  flood_adapt/objects/forcing/waterlevels.py,sha256=8lCmUdeyABurJwftae4_Iut9hCn24xVqCEPEa73OOcA,3437
103
103
  flood_adapt/objects/forcing/wind.py,sha256=xs_xZdUoZUDP1y1xITlNVJwiyDt6wQsFbPFhVRDjSqg,3925
104
104
  flood_adapt/objects/measures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -114,8 +114,8 @@ flood_adapt/objects/strategies/strategies.py,sha256=Jw-WJDCamL9p_7VEir3AdmYPMVAi
114
114
  flood_adapt/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
115
115
  flood_adapt/workflows/benefit_runner.py,sha256=eA21TuHdeZ6QYO8ehXri6BHlkyHsVsZphIdIca5g0KA,21824
116
116
  flood_adapt/workflows/scenario_runner.py,sha256=9_Y6GmMYhYoTRkBUIlju0eBy6DosGf4Zl2tgu1QEubI,4119
117
- flood_adapt-1.1.2.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
118
- flood_adapt-1.1.2.dist-info/METADATA,sha256=nnnU7EvxfCCRU_9kCJqoDR3zpjAGUojTDO5dXZ9C12I,48876
119
- flood_adapt-1.1.2.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
120
- flood_adapt-1.1.2.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
121
- flood_adapt-1.1.2.dist-info/RECORD,,
117
+ flood_adapt-1.1.4.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
118
+ flood_adapt-1.1.4.dist-info/METADATA,sha256=70kUiml9cQzOes7B0XAgpqzYCU1kAYuhSAiXLdmqa8U,48806
119
+ flood_adapt-1.1.4.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
120
+ flood_adapt-1.1.4.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
121
+ flood_adapt-1.1.4.dist-info/RECORD,,