flood-adapt 1.1.3__py3-none-any.whl → 1.1.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
flood_adapt/__init__.py CHANGED
@@ -1,5 +1,5 @@
1
1
  # has to be here at the start to avoid circular imports
2
- __version__ = "1.1.3"
2
+ __version__ = "1.1.4"
3
3
 
4
4
  from flood_adapt import adapter, database_builder, dbs_classes, objects
5
5
  from flood_adapt.config.config import Settings
@@ -1,11 +1,17 @@
1
+ from flood_adapt.config import (
2
+ FloodModel,
3
+ SlrScenariosModel,
4
+ )
1
5
  from flood_adapt.database_builder.database_builder import (
2
6
  Basins,
3
7
  ConfigModel,
4
8
  FootprintsOptions,
5
9
  GuiConfigModel,
10
+ ObsPointModel,
6
11
  SpatialJoinModel,
7
12
  SviConfigModel,
8
13
  TideGaugeConfigModel,
14
+ TideGaugeSource,
9
15
  UnitSystems,
10
16
  create_database,
11
17
  )
@@ -28,6 +34,7 @@ __all__ = [
28
34
  "SpatialJoinModel",
29
35
  "SviConfigModel",
30
36
  "TideGaugeConfigModel",
37
+ "TideGaugeSource",
31
38
  "UnitSystems",
32
39
  "create_database",
33
40
  "BuildingsInfographicModel",
@@ -37,4 +44,7 @@ __all__ = [
37
44
  "RiskInfographicModel",
38
45
  "RoadsInfographicModel",
39
46
  "ImpactCategoriesModel",
47
+ "FloodModel",
48
+ "SlrScenariosModel",
49
+ "ObsPointModel",
40
50
  ]
@@ -17,12 +17,14 @@ import numpy as np
17
17
  import pandas as pd
18
18
  import rioxarray as rxr
19
19
  import tomli
20
+ import tomli_w
20
21
  import xarray as xr
21
22
  from hydromt_fiat import FiatModel as HydromtFiatModel
22
23
  from hydromt_fiat.data_apis.open_street_maps import get_buildings_from_osm
23
24
  from hydromt_sfincs import SfincsModel as HydromtSfincsModel
24
25
  from pydantic import BaseModel, Field
25
26
  from shapely import MultiLineString, MultiPolygon, Polygon
27
+ from shapely.ops import nearest_points
26
28
 
27
29
  from flood_adapt.adapter.fiat_adapter import _FIAT_COLUMNS
28
30
  from flood_adapt.config.fiat import (
@@ -133,6 +135,31 @@ def path_check(str_path: str, config_path: Optional[Path] = None) -> str:
133
135
  return path.as_posix()
134
136
 
135
137
 
138
+ def make_relative(str_path: str | Path, toml_path: Path) -> str:
139
+ """Make a path relative to the config file path.
140
+
141
+ Parameters
142
+ ----------
143
+ str_path : str | Path
144
+ The path to be made relative.
145
+ toml_path : Path
146
+ The path to the config file.
147
+
148
+ Returns
149
+ -------
150
+ str
151
+ The relative path as a string.
152
+ """
153
+ path = Path(str_path)
154
+ if not path.is_absolute():
155
+ return path.as_posix()
156
+ try:
157
+ relative_path = path.relative_to(toml_path.parent)
158
+ return relative_path.as_posix()
159
+ except ValueError:
160
+ return path.as_posix()
161
+
162
+
136
163
  class SpatialJoinModel(BaseModel):
137
164
  """
138
165
  Model for representing a spatial join between geometries and tabular data.
@@ -421,6 +448,8 @@ class ConfigModel(BaseModel):
421
448
  config.database_path = path_check(config.database_path, toml_path)
422
449
  config.fiat = path_check(config.fiat, toml_path)
423
450
  config.sfincs_overland.name = path_check(config.sfincs_overland.name, toml_path)
451
+ if config.dem:
452
+ config.dem.filename = path_check(config.dem.filename, toml_path)
424
453
  if config.sfincs_offshore:
425
454
  config.sfincs_offshore.name = path_check(
426
455
  config.sfincs_offshore.name, toml_path
@@ -445,6 +474,67 @@ class ConfigModel(BaseModel):
445
474
 
446
475
  return config
447
476
 
477
+ def write(self, toml_path: Path) -> None:
478
+ """
479
+ Write the configuration model to a TOML file.
480
+
481
+ Parameters
482
+ ----------
483
+ toml_path : Path
484
+ The path to the TOML file where the configuration will be saved.
485
+ """
486
+ config_dict = self.model_dump(exclude_none=True)
487
+
488
+ # Make paths relative to the config file
489
+ config_dict["database_path"] = make_relative(
490
+ config_dict["database_path"], toml_path
491
+ )
492
+ config_dict["fiat"] = make_relative(config_dict["fiat"], toml_path)
493
+ config_dict["sfincs_overland"]["name"] = make_relative(
494
+ config_dict["sfincs_overland"]["name"], toml_path
495
+ )
496
+ if self.dem:
497
+ config_dict["dem"]["filename"] = make_relative(
498
+ config_dict["dem"]["filename"], toml_path
499
+ )
500
+ if config_dict.get("sfincs_offshore"):
501
+ config_dict["sfincs_offshore"]["name"] = make_relative(
502
+ config_dict["sfincs_offshore"]["name"], toml_path
503
+ )
504
+ if isinstance(self.building_footprints, SpatialJoinModel):
505
+ config_dict["building_footprints"]["file"] = make_relative(
506
+ config_dict["building_footprints"]["file"], toml_path
507
+ )
508
+ if self.tide_gauge and self.tide_gauge.file:
509
+ config_dict["tide_gauge"]["file"] = make_relative(
510
+ config_dict["tide_gauge"]["file"], toml_path
511
+ )
512
+ if self.svi:
513
+ config_dict["svi"]["file"] = make_relative(
514
+ config_dict["svi"]["file"], toml_path
515
+ )
516
+ if self.bfe:
517
+ config_dict["bfe"]["file"] = make_relative(
518
+ config_dict["bfe"]["file"], toml_path
519
+ )
520
+ if self.slr_scenarios:
521
+ config_dict["slr_scenarios"]["file"] = make_relative(
522
+ config_dict["slr_scenarios"]["file"], toml_path
523
+ )
524
+
525
+ if config_dict.get("probabilistic_set"):
526
+ config_dict["probabilistic_set"] = make_relative(
527
+ config_dict["probabilistic_set"], toml_path
528
+ )
529
+
530
+ if config_dict.get("aggregation_areas"):
531
+ for ag in config_dict["aggregation_areas"]:
532
+ ag["file"] = make_relative(ag["file"], toml_path)
533
+
534
+ toml_path.parent.mkdir(parents=True, exist_ok=True)
535
+ with open(toml_path, mode="wb") as fp:
536
+ tomli_w.dump(config_dict, fp)
537
+
448
538
 
449
539
  class DatabaseBuilder:
450
540
  _has_roads: bool = False
@@ -472,15 +562,16 @@ class DatabaseBuilder:
472
562
  @debug_timer
473
563
  def build(self, overwrite: bool = False) -> None:
474
564
  # Check if database already exists
475
- if self.root.exists() and not overwrite:
476
- raise ValueError(
477
- f"There is already a Database folder in '{self.root.as_posix()}'."
478
- )
479
- if self.root.exists() and overwrite:
480
- shutil.rmtree(self.root)
481
- warnings.warn(
482
- f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
483
- )
565
+ if self.root.exists():
566
+ if overwrite:
567
+ shutil.rmtree(self.root)
568
+ warnings.warn(
569
+ f"There is already a Database folder in '{self.root.as_posix()}, which will be overwritten'."
570
+ )
571
+ else:
572
+ raise ValueError(
573
+ f"There is already a Database folder in '{self.root.as_posix()}'."
574
+ )
484
575
  # Create database folder
485
576
  self.root.mkdir(parents=True)
486
577
 
@@ -823,7 +914,7 @@ class DatabaseBuilder:
823
914
  exposure[_FIAT_COLUMNS.ground_elevation] = exposure["elev"]
824
915
  del exposure["elev"]
825
916
 
826
- self.fiat_model.exposure.exposure_db = exposure
917
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(exposure)
827
918
 
828
919
  def read_damage_unit(self) -> str:
829
920
  if self.fiat_model.exposure.damage_unit is None:
@@ -1087,7 +1178,9 @@ class DatabaseBuilder:
1087
1178
  exposure_csv = exposure_csv.merge(
1088
1179
  gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
1089
1180
  )
1090
- self.fiat_model.exposure.exposure_db = exposure_csv
1181
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1182
+ exposure_csv
1183
+ )
1091
1184
  # Update spatial joins in FIAT model
1092
1185
  if self.fiat_model.spatial_joins["aggregation_areas"] is None:
1093
1186
  self.fiat_model.spatial_joins["aggregation_areas"] = []
@@ -1149,7 +1242,9 @@ class DatabaseBuilder:
1149
1242
  exposure_csv = exposure_csv.merge(
1150
1243
  gdf_joined, on=_FIAT_COLUMNS.object_id, how="left"
1151
1244
  )
1152
- self.fiat_model.exposure.exposure_db = exposure_csv
1245
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1246
+ exposure_csv
1247
+ )
1153
1248
  logger.warning(
1154
1249
  "No aggregation areas were available in the FIAT model and none were provided in the config file. The region file will be used as a mock aggregation area."
1155
1250
  )
@@ -1180,7 +1275,9 @@ class DatabaseBuilder:
1180
1275
  exposure_csv = exposure_csv.merge(
1181
1276
  buildings_joined, on=_FIAT_COLUMNS.object_id, how="left"
1182
1277
  )
1183
- self.fiat_model.exposure.exposure_db = exposure_csv
1278
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(
1279
+ exposure_csv
1280
+ )
1184
1281
 
1185
1282
  # Save the spatial file for future use
1186
1283
  svi_path = self.static_path / "templates" / "fiat" / "svi" / "svi.gpkg"
@@ -1199,8 +1296,15 @@ class DatabaseBuilder:
1199
1296
  "'SVI' column present in the FIAT exposure csv. Vulnerability type infometrics can be produced."
1200
1297
  )
1201
1298
  add_attrs = self.fiat_model.spatial_joins["additional_attributes"]
1299
+ if add_attrs is None:
1300
+ logger.warning(
1301
+ "'SVI' column present in the FIAT exposure csv, but no spatial join found with the SVI map."
1302
+ )
1303
+ return None
1304
+
1202
1305
  if "SVI" not in [attr["name"] for attr in add_attrs]:
1203
1306
  logger.warning("No SVI map found to display in the FloodAdapt GUI!")
1307
+ return None
1204
1308
 
1205
1309
  ind = [attr["name"] for attr in add_attrs].index("SVI")
1206
1310
  svi = add_attrs[ind]
@@ -1407,19 +1511,40 @@ class DatabaseBuilder:
1407
1511
  logger.info("Observation points were provided in the config file.")
1408
1512
  obs_points = self.config.obs_point
1409
1513
 
1514
+ model_region = self.sfincs_overland_model.region.union_all()
1515
+
1410
1516
  if self.tide_gauge is not None:
1411
- logger.info(
1412
- "A tide gauge has been setup in the database. It will be used as an observation point as well."
1413
- )
1414
- obs_points.append(
1415
- ObsPointModel(
1416
- name=self.tide_gauge.name,
1417
- description="Tide gauge observation point",
1418
- ID=self.tide_gauge.ID,
1419
- lon=self.tide_gauge.lon,
1420
- lat=self.tide_gauge.lat,
1517
+ # Check if tide gauge is within model domain
1518
+ coord = (
1519
+ gpd.GeoSeries(
1520
+ gpd.points_from_xy(
1521
+ x=[self.tide_gauge.lon], y=[self.tide_gauge.lat]
1522
+ ),
1523
+ crs="EPSG:4326",
1524
+ )
1525
+ .to_crs(self.sfincs_overland_model.crs)
1526
+ .iloc[0]
1527
+ )
1528
+ # Add tide gauge as obs point if within model region
1529
+ if coord.within(model_region):
1530
+ obs_points.append(
1531
+ ObsPointModel(
1532
+ name=self.tide_gauge.name,
1533
+ description="Tide gauge observation point",
1534
+ ID=self.tide_gauge.ID,
1535
+ lon=self.tide_gauge.lon,
1536
+ lat=self.tide_gauge.lat,
1537
+ )
1538
+ )
1539
+ else:
1540
+ boundary = model_region.boundary
1541
+ snapped = nearest_points(coord, boundary)[1]
1542
+ distance = us.UnitfulLength(
1543
+ value=coord.distance(snapped), units=us.UnitTypesLength.meters
1544
+ )
1545
+ logger.warning(
1546
+ f"Tide gauge lies outside the model domain by {distance}. It will not be used as an observation point in FloodAdapt."
1421
1547
  )
1422
- )
1423
1548
 
1424
1549
  if not obs_points:
1425
1550
  logger.warning(
@@ -1427,20 +1552,25 @@ class DatabaseBuilder:
1427
1552
  )
1428
1553
  return None
1429
1554
 
1555
+ # Check if all obs points are within model domain
1430
1556
  lon = [p.lon for p in obs_points]
1431
1557
  lat = [p.lat for p in obs_points]
1432
1558
  names = [p.name for p in obs_points]
1433
1559
  coords = gpd.GeoDataFrame(
1434
- {"names": names},
1560
+ {"name": names},
1435
1561
  geometry=gpd.points_from_xy(lon, lat),
1436
1562
  crs="EPSG:4326",
1437
1563
  )
1438
1564
  coords = coords.to_crs(self.sfincs_overland_model.crs)
1439
- model_region = self.sfincs_overland_model.region.union_all()
1440
1565
  valid_coords = coords.within(model_region)
1441
1566
  if not valid_coords.all():
1442
1567
  invalid = coords.loc[~valid_coords, "name"].tolist()
1443
- raise ValueError(f"Observation points outside model domain: {invalid}")
1568
+ lat = coords.loc[~valid_coords].geometry.y.tolist()
1569
+ lon = coords.loc[~valid_coords].geometry.x.tolist()
1570
+ bounds = model_region.bounds
1571
+ raise ValueError(
1572
+ f"Observation points outside model domain: {invalid}, {lat=}, {lon=}, {bounds=}"
1573
+ )
1444
1574
 
1445
1575
  return obs_points
1446
1576
 
@@ -1973,12 +2103,12 @@ class DatabaseBuilder:
1973
2103
 
1974
2104
  self.metrics = metrics
1975
2105
 
1976
- def _create_mandatory_metrics(self, metrics):
2106
+ def _create_mandatory_metrics(self, metrics: Metrics):
1977
2107
  metrics.create_mandatory_metrics_event()
1978
2108
  if self._probabilistic_set_name is not None:
1979
2109
  metrics.create_mandatory_metrics_risk()
1980
2110
 
1981
- def _create_event_infographics(self, metrics):
2111
+ def _create_event_infographics(self, metrics: Metrics):
1982
2112
  exposure_type = self._get_exposure_type()
1983
2113
  # If not specific infographic config is given, create a standard one
1984
2114
  if not self.config.event_infographics:
@@ -2010,7 +2140,7 @@ class DatabaseBuilder:
2010
2140
  )
2011
2141
  metrics.create_infographics_metrics_event(config=self.config.event_infographics)
2012
2142
 
2013
- def _create_risk_infographics(self, metrics):
2143
+ def _create_risk_infographics(self, metrics: Metrics):
2014
2144
  exposure_type = self._get_exposure_type()
2015
2145
  # If not specific infographic config is given, create a standard one
2016
2146
  if not self.config.risk_infographics:
@@ -2027,17 +2157,17 @@ class DatabaseBuilder:
2027
2157
  )
2028
2158
  metrics.create_infographics_metrics_risk(config=self.config.risk_infographics)
2029
2159
 
2030
- def _add_additional_event_metrics(self, metrics):
2160
+ def _add_additional_event_metrics(self, metrics: Metrics):
2031
2161
  if self.config.event_additional_infometrics:
2032
2162
  for metric in self.config.event_additional_infometrics:
2033
2163
  metrics.add_event_metric(metric)
2034
2164
 
2035
- def _add_additional_risk_metrics(self, metrics):
2165
+ def _add_additional_risk_metrics(self, metrics: Metrics):
2036
2166
  if self.config.risk_additional_infometrics:
2037
2167
  for metric in self.config.risk_additional_infometrics:
2038
2168
  metrics.add_risk_metric(metric)
2039
2169
 
2040
- def _write_infometrics(self, metrics, path_im, path_ig):
2170
+ def _write_infometrics(self, metrics: Metrics, path_im: Path, path_ig: Path):
2041
2171
  if self._aggregation_areas is None:
2042
2172
  self._aggregation_areas = self.create_aggregation_areas()
2043
2173
  aggr_levels = [aggr.name for aggr in self._aggregation_areas]
@@ -2216,7 +2346,7 @@ class DatabaseBuilder:
2216
2346
 
2217
2347
  # Set model building footprints
2218
2348
  self.fiat_model.building_footprint = building_footprints
2219
- self.fiat_model.exposure.exposure_db = exposure_csv
2349
+ self.fiat_model.exposure.exposure_db = self._clean_suffix_columns(exposure_csv)
2220
2350
 
2221
2351
  # Save site attributes
2222
2352
  buildings_path = geo_path.relative_to(self.static_path)
@@ -2483,6 +2613,11 @@ class DatabaseBuilder:
2483
2613
  """
2484
2614
  # Make sure only csv objects have geometries
2485
2615
  for i, geoms in enumerate(self.fiat_model.exposure.exposure_geoms):
2616
+ if _FIAT_COLUMNS.object_id not in geoms.columns:
2617
+ logger.warning(
2618
+ f"Geometry '{self.fiat_model.exposure.geom_names[i]}' does not have an '{_FIAT_COLUMNS.object_id}' column and will be ignored."
2619
+ )
2620
+ continue
2486
2621
  keep = geoms[_FIAT_COLUMNS.object_id].isin(
2487
2622
  self.fiat_model.exposure.exposure_db[_FIAT_COLUMNS.object_id]
2488
2623
  )
@@ -2523,6 +2658,35 @@ class DatabaseBuilder:
2523
2658
 
2524
2659
  return gdf
2525
2660
 
2661
+ @staticmethod
2662
+ def _clean_suffix_columns(df: pd.DataFrame) -> pd.DataFrame:
2663
+ """Detect and resolves duplicate columns with _x/_y suffixes that appear after a pandas merge.
2664
+
2665
+ (e.g., 'Aggregation Label: Census Blockgroup_x' and 'Aggregation Label: Census Blockgroup_y').
2666
+
2667
+ Keeps the first non-null column of each pair and removes redundant ones.
2668
+ """
2669
+ cols = df.columns.tolist()
2670
+ suffix_pairs = {}
2671
+
2672
+ for col in cols:
2673
+ if col.endswith("_x"):
2674
+ base = col[:-2]
2675
+ if f"{base}_y" in df.columns:
2676
+ suffix_pairs[base] = (f"{base}_x", f"{base}_y")
2677
+
2678
+ for base, (col_x, col_y) in suffix_pairs.items():
2679
+ # If both columns exist, prefer the one with more non-null values
2680
+ x_notna = df[col_x].notna().sum()
2681
+ y_notna = df[col_y].notna().sum()
2682
+ keep_col = col_x if x_notna >= y_notna else col_y
2683
+ df[base] = df[keep_col]
2684
+
2685
+ # Drop the old suffixed versions
2686
+ df = df.drop(columns=[col_x, col_y])
2687
+
2688
+ return df
2689
+
2526
2690
 
2527
2691
  def create_database(config: Union[str, Path, ConfigModel], overwrite=False) -> None:
2528
2692
  """Create a new database from a configuration file or ConfigModel.
@@ -110,10 +110,12 @@ class ValueUnitPair(ABC, BaseModel, Generic[TUnit]):
110
110
  raise ValueError(f"Unsupported or unknown unit: {str_unit}")
111
111
 
112
112
  def __str__(self) -> str:
113
- return f"{self.value} {self.units.value}"
113
+ return f"{self.value:.2f} {self.units.value}"
114
114
 
115
115
  def __repr__(self) -> str:
116
- return f"{type(self).__name__}(value={self.value}, units={self.units})"
116
+ return (
117
+ f"{type(self).__name__}(value={self.value:.2f}, units={self.units.value})"
118
+ )
117
119
 
118
120
  def __sub__(self: TClass, other: TClass) -> TClass:
119
121
  if not isinstance(other, type(self)):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: flood-adapt
3
- Version: 1.1.3
3
+ Version: 1.1.4
4
4
  Summary: A software package support system which can be used to assess the benefits and costs of flood resilience measures
5
5
  Author-email: Gundula Winter <Gundula.Winter@deltares.nl>, Panos Athanasiou <Panos.Athanasiou@deltares.nl>, Frederique de Groen <Frederique.deGroen@deltares.nl>, Tim de Wilde <Tim.deWilde@deltares.nl>, Julian Hofer <Julian.Hofer@deltares.nl>, Daley Adrichem <Daley.Adrichem@deltares.nl>, Luuk Blom <Luuk.Blom@deltares.nl>
6
6
  License: ====================================================
@@ -712,14 +712,12 @@ Requires-Dist: cht-cyclones<2.0,>=1.0.3
712
712
  Requires-Dist: cht-meteo<1.0,>=0.3.1
713
713
  Requires-Dist: cht-observations<1.0,>=0.2.1
714
714
  Requires-Dist: cht-tide<1.0,>=0.1.1
715
- Requires-Dist: dask==2024.11.2
716
- Requires-Dist: numba_celltree==0.2.2
717
715
  Requires-Dist: fiat-toolbox<0.2.0,>=0.1.22
718
716
  Requires-Dist: fiona<2.0,>=1.0
719
717
  Requires-Dist: geojson<4.0,>=3.0
720
718
  Requires-Dist: geopandas<2.0,>=1.0
721
719
  Requires-Dist: hydromt-fiat<1.0,>=0.5.9
722
- Requires-Dist: hydromt-sfincs<2.0,>=1.2.0
720
+ Requires-Dist: hydromt-sfincs<2.0,>=1.2.2
723
721
  Requires-Dist: numpy<2.0,>=1.0
724
722
  Requires-Dist: numpy-financial<2.0,>=1.0
725
723
  Requires-Dist: pandas<3.0,>=2.0
@@ -1,4 +1,4 @@
1
- flood_adapt/__init__.py,sha256=ZFaVlYxQCAmSCEcKRDh-vvJUduWCIiLhuc2dBFHMbaw,779
1
+ flood_adapt/__init__.py,sha256=HuJSWvtJutuwuiXwgvArRmpnG72guATUP3IFvzowaJY,779
2
2
  flood_adapt/flood_adapt.py,sha256=HVFS4OFhcB0TqHtMw3kbEei0IfJxsciauHfG3XZ38-0,40747
3
3
  flood_adapt/adapter/__init__.py,sha256=vnF8NCkEVX-N-gtGS-J_A1H1YYAjihWjJZFyYGwcp8Q,180
4
4
  flood_adapt/adapter/fiat_adapter.py,sha256=seDjPoumkhUOd7qer3ni1_Ut3dwyq0-_yhJNaTEFc2E,60284
@@ -17,8 +17,8 @@ flood_adapt/config/hazard.py,sha256=Ev6mj78cZ_vQuJ11KYjhJOzmfRB6fz267OJeKI0bYaM,
17
17
  flood_adapt/config/impacts.py,sha256=O7vE7jB3GSXnkqAvv7TqJiJ_j1uJ3mck_KQ-ScsB3bo,3192
18
18
  flood_adapt/config/sfincs.py,sha256=y8C3PzFwwgMB_sb8rBzgteaQ8fCxep6DnZxuk0q__bc,4825
19
19
  flood_adapt/config/site.py,sha256=VR90jCHWcxgoQJptNyXy7LseGjXUDRtdOjNGCddFVzI,4328
20
- flood_adapt/database_builder/__init__.py,sha256=nofsq8RqgoCKHXGq6P2ZN01H_GYbEpOzyQu0N_73suQ,901
21
- flood_adapt/database_builder/database_builder.py,sha256=f9k-xbS5RC7fNs_qQZaSBvQO2es03OEfvCkWBO5qSGc,103498
20
+ flood_adapt/database_builder/__init__.py,sha256=h4ietZ6sAZa7j2kvSzp5-58BueGrfJsXvq8PFu1RLyI,1112
21
+ flood_adapt/database_builder/database_builder.py,sha256=cv_uwkKqgc8RLQ5lAmTqRN1fm5gBOYbIVR3v2MgD_gY,109935
22
22
  flood_adapt/database_builder/metrics_utils.py,sha256=aU7YfXLmBjFT0fQQQl3o0yIzdFJ6XJGlld0GnkJytGc,66258
23
23
  flood_adapt/database_builder/templates/default_units/imperial.toml,sha256=zIjPlxIa2kWLUjSYisd8UolXGo5iKdFoDDz_JkKBXTM,295
24
24
  flood_adapt/database_builder/templates/default_units/metric.toml,sha256=tc0XMKs7xGL9noB9lAb0gyQfjYxzokgHa3NqpccxWl0,302
@@ -98,7 +98,7 @@ flood_adapt/objects/forcing/rainfall.py,sha256=e6P3IMzItvnsmXbcMXl1oV-d9LDuh3jTI
98
98
  flood_adapt/objects/forcing/tide_gauge.py,sha256=XhplyNHtCn0hRM1oeD5v-fMYAOLAJIKidmxKxVxCUlw,7188
99
99
  flood_adapt/objects/forcing/time_frame.py,sha256=1X3G0Ax18BHRvAomf-CW_ISRk_3qgAakwgZCIBxIkL4,2855
100
100
  flood_adapt/objects/forcing/timeseries.py,sha256=bD27JWzC3owq5ah3zPzJ7xoUzSH_t4J03s_SycYW0mQ,19740
101
- flood_adapt/objects/forcing/unit_system.py,sha256=EHz4ixI8nmjfDeyU2AszXTf6ebaqChbGg0PuJHMJdh8,16502
101
+ flood_adapt/objects/forcing/unit_system.py,sha256=7FFOmaxq6EOvXx64QDxlpNU4uMExqridFcdFwyTJ4Lo,16542
102
102
  flood_adapt/objects/forcing/waterlevels.py,sha256=8lCmUdeyABurJwftae4_Iut9hCn24xVqCEPEa73OOcA,3437
103
103
  flood_adapt/objects/forcing/wind.py,sha256=xs_xZdUoZUDP1y1xITlNVJwiyDt6wQsFbPFhVRDjSqg,3925
104
104
  flood_adapt/objects/measures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -114,8 +114,8 @@ flood_adapt/objects/strategies/strategies.py,sha256=Jw-WJDCamL9p_7VEir3AdmYPMVAi
114
114
  flood_adapt/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
115
115
  flood_adapt/workflows/benefit_runner.py,sha256=eA21TuHdeZ6QYO8ehXri6BHlkyHsVsZphIdIca5g0KA,21824
116
116
  flood_adapt/workflows/scenario_runner.py,sha256=9_Y6GmMYhYoTRkBUIlju0eBy6DosGf4Zl2tgu1QEubI,4119
117
- flood_adapt-1.1.3.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
118
- flood_adapt-1.1.3.dist-info/METADATA,sha256=7QOIs6mTL4QYNxEd9YCX7uNCWaAOfj_wtRg4buQi9NI,48876
119
- flood_adapt-1.1.3.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
120
- flood_adapt-1.1.3.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
121
- flood_adapt-1.1.3.dist-info/RECORD,,
117
+ flood_adapt-1.1.4.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
118
+ flood_adapt-1.1.4.dist-info/METADATA,sha256=70kUiml9cQzOes7B0XAgpqzYCU1kAYuhSAiXLdmqa8U,48806
119
+ flood_adapt-1.1.4.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
120
+ flood_adapt-1.1.4.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
121
+ flood_adapt-1.1.4.dist-info/RECORD,,