flood-adapt 1.0.6__py3-none-any.whl → 1.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. flood_adapt/__init__.py +1 -2
  2. flood_adapt/adapter/fiat_adapter.py +12 -7
  3. flood_adapt/adapter/sfincs_adapter.py +20 -23
  4. flood_adapt/config/fiat.py +1 -1
  5. flood_adapt/config/gui.py +185 -8
  6. flood_adapt/config/hazard.py +1 -1
  7. flood_adapt/database_builder/database_builder.py +155 -129
  8. flood_adapt/database_builder/metrics_utils.py +1834 -0
  9. flood_adapt/dbs_classes/database.py +4 -4
  10. flood_adapt/dbs_classes/dbs_static.py +2 -2
  11. flood_adapt/flood_adapt.py +65 -14
  12. flood_adapt/misc/utils.py +29 -10
  13. flood_adapt/objects/forcing/plotting.py +4 -4
  14. flood_adapt/objects/measures/measures.py +3 -1
  15. flood_adapt/workflows/benefit_runner.py +3 -2
  16. {flood_adapt-1.0.6.dist-info → flood_adapt-1.1.1.dist-info}/METADATA +13 -124
  17. {flood_adapt-1.0.6.dist-info → flood_adapt-1.1.1.dist-info}/RECORD +21 -41
  18. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +0 -90
  19. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +0 -57
  20. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +0 -121
  21. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +0 -65
  22. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +0 -126
  23. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +0 -60
  24. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +0 -121
  25. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +0 -65
  26. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +0 -45
  27. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +0 -4
  28. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +0 -143
  29. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +0 -153
  30. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +0 -127
  31. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +0 -57
  32. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +0 -4
  33. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +0 -191
  34. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +0 -153
  35. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +0 -178
  36. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +0 -57
  37. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +0 -9
  38. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +0 -65
  39. /flood_adapt/database_builder/templates/infographics/{OSM/styles.css → styles.css} +0 -0
  40. {flood_adapt-1.0.6.dist-info → flood_adapt-1.1.1.dist-info}/LICENSE +0 -0
  41. {flood_adapt-1.0.6.dist-info → flood_adapt-1.1.1.dist-info}/WHEEL +0 -0
  42. {flood_adapt-1.0.6.dist-info → flood_adapt-1.1.1.dist-info}/top_level.txt +0 -0
@@ -192,7 +192,7 @@ class Database(IDatabase):
192
192
  if self.site.sfincs.slr_scenarios is None:
193
193
  raise ConfigError("No SLR scenarios defined in the site configuration.")
194
194
  slr = self.site.sfincs.slr_scenarios
195
- slr.file = str(self.static_path / slr.file)
195
+ slr.file = (self.static_path / slr.file).as_posix()
196
196
  return slr
197
197
 
198
198
  def get_outputs(self) -> dict[str, Any]:
@@ -233,7 +233,7 @@ class Database(IDatabase):
233
233
  if _type == FloodmapType.water_level:
234
234
  paths = [base_dir / "max_water_level_map.nc"]
235
235
  elif _type == FloodmapType.water_depth:
236
- paths = [base_dir / f"FloodMap_{scenario_name}.tif"]
236
+ paths = [base_dir / f"FloodMap_{self.name}.tif"]
237
237
  elif mode == Mode.risk:
238
238
  if _type == FloodmapType.water_level:
239
239
  paths = list(base_dir.glob("RP_*_maps.nc"))
@@ -285,7 +285,7 @@ class Database(IDatabase):
285
285
  path to topobathy tiles
286
286
  """
287
287
  path = self.input_path.parent.joinpath("static", "dem", "tiles", "topobathy")
288
- return str(path)
288
+ return path.as_posix()
289
289
 
290
290
  def get_index_path(self) -> str:
291
291
  """Return the path of the index tiles which are used to connect each water level cell with the topobathy tiles.
@@ -296,7 +296,7 @@ class Database(IDatabase):
296
296
  path to index tiles
297
297
  """
298
298
  path = self.input_path.parent.joinpath("static", "dem", "tiles", "indices")
299
- return str(path)
299
+ return path.as_posix()
300
300
 
301
301
  def get_depth_conversion(self) -> float:
302
302
  """Return the flood depth conversion that is need in the gui to plot the flood map.
@@ -290,9 +290,9 @@ class DbsStatic(IDbsStatic):
290
290
  raise ConfigError(
291
291
  "No cyclone track database defined in the site configuration."
292
292
  )
293
- database_file = str(
293
+ database_file = (
294
294
  self._database.static_path
295
295
  / "cyclone_track_database"
296
296
  / self._database.site.sfincs.cyclone_track_database.file
297
- )
297
+ ).as_posix()
298
298
  return CycloneTrackDatabase("ibtracs", file_name=database_file)
@@ -1,5 +1,5 @@
1
1
  from pathlib import Path
2
- from typing import Any, List, Optional, Union
2
+ from typing import Any, List, Literal, Optional, Union
3
3
 
4
4
  import geopandas as gpd
5
5
  import numpy as np
@@ -891,7 +891,7 @@ class FloodAdapt:
891
891
 
892
892
  output_path = self.database.get_flooding_path(scenario.name)
893
893
  obs_points["html"] = [
894
- str(output_path.joinpath(f"{station}_timeseries.html"))
894
+ (output_path / f"{station}_timeseries.html").as_posix()
895
895
  for station in obs_points.name
896
896
  ]
897
897
 
@@ -935,43 +935,94 @@ class FloodAdapt:
935
935
 
936
936
  return infographic_path
937
937
 
938
- def get_infometrics(self, name: str) -> pd.DataFrame:
939
- """Return the metrics for the given scenario.
938
+ def get_infometrics(
939
+ self, name: str, aggr_name: Optional[str] = None
940
+ ) -> pd.DataFrame:
941
+ """Return the infometrics DataFrame for the given scenario and optional aggregation.
940
942
 
941
943
  Parameters
942
944
  ----------
943
945
  name : str
944
946
  The name of the scenario.
947
+ aggr_name : Optional[str], default None
948
+ The name of the aggregation, if any.
945
949
 
946
950
  Returns
947
951
  -------
948
- metrics: pd.DataFrame
949
- The metrics for the scenario.
952
+ df : pd.DataFrame
953
+ The infometrics DataFrame for the scenario (and aggregation if specified).
950
954
 
951
955
  Raises
952
956
  ------
953
957
  FileNotFoundError
954
- If the metrics file does not exist.
958
+ If the metrics file does not exist for the given scenario (and aggregation).
955
959
  """
960
+ if aggr_name is not None:
961
+ fn = f"Infometrics_{name}_{aggr_name}.csv"
962
+ else:
963
+ fn = f"Infometrics_{name}.csv"
956
964
  # Create the infographic path
957
- metrics_path = self.database.scenarios.output_path.joinpath(
958
- name,
959
- f"Infometrics_{name}.csv",
960
- )
965
+ metrics_path = self.database.scenarios.output_path.joinpath(name, fn)
961
966
 
962
967
  # Check if the file exists
963
968
  if not metrics_path.exists():
964
969
  raise FileNotFoundError(
965
- f"The metrics file for scenario {name}({str(metrics_path)}) does not exist."
970
+ f"The metrics file for scenario {name}({metrics_path.as_posix()}) does not exist."
966
971
  )
967
-
968
972
  # Read the metrics file
969
- return MetricsFileReader(str(metrics_path)).read_metrics_from_file(
973
+ df = MetricsFileReader(metrics_path.as_posix()).read_metrics_from_file(
970
974
  include_long_names=True,
971
975
  include_description=True,
972
976
  include_metrics_table_selection=True,
977
+ include_metrics_map_selection=True,
978
+ )
979
+ if aggr_name is not None:
980
+ df = df.T
981
+ return df
982
+
983
+ def get_aggr_metric_layers(
984
+ self,
985
+ name: str,
986
+ aggr_type: str,
987
+ type: Literal["single_event", "risk"] = "single_event",
988
+ rp: Optional[int] = None,
989
+ equity: bool = False,
990
+ ) -> list[dict]:
991
+ # Read infometrics from csv file
992
+ metrics_df = self.get_infometrics(name, aggr_name=aggr_type)
993
+
994
+ # Filter based on "Show in Metrics Map" column
995
+ if "Show In Metrics Map" in metrics_df.index:
996
+ mask = metrics_df.loc["Show In Metrics Map"].to_numpy().astype(bool)
997
+ metrics_df = metrics_df.loc[:, mask]
998
+
999
+ # Keep only relevant attributes of the infometrics
1000
+ keep_rows = [
1001
+ "Description",
1002
+ "Long Name",
1003
+ "Show In Metrics Table",
1004
+ "Show In Metrics Map",
1005
+ ]
1006
+ metrics_df = metrics_df.loc[
1007
+ [row for row in keep_rows if row in metrics_df.index]
1008
+ ]
1009
+
1010
+ # Transform to list of dicts
1011
+ metrics = []
1012
+ for col in metrics_df.columns:
1013
+ metric_dict = {"name": col}
1014
+ # Add the first 4 rows as key-value pairs
1015
+ for i, idx in enumerate(metrics_df.index):
1016
+ metric_dict[idx] = metrics_df.loc[idx, col]
1017
+ metrics.append(metric_dict)
1018
+
1019
+ # Get the filtered metrics layers from the GUI configuration
1020
+ filtered_metrics = self.database.site.gui.output_layers.get_aggr_metrics_layers(
1021
+ metrics, type, rp, equity
973
1022
  )
974
1023
 
1024
+ return filtered_metrics
1025
+
975
1026
  # Static
976
1027
  def load_static_data(self):
977
1028
  """Read the static data into the cache.
flood_adapt/misc/utils.py CHANGED
@@ -4,13 +4,17 @@ from contextlib import contextmanager
4
4
  from pathlib import Path
5
5
  from typing import Union
6
6
 
7
+ import geopandas as gpd
7
8
  from pydantic import BeforeValidator
8
9
 
10
+ from flood_adapt.misc.log import FloodAdaptLogging
9
11
  from flood_adapt.misc.path_builder import (
10
12
  ObjectDir,
11
13
  db_path,
12
14
  )
13
15
 
16
+ logger = FloodAdaptLogging.getLogger(__name__)
17
+
14
18
 
15
19
  @contextmanager
16
20
  def modified_environ(*remove, **update):
@@ -136,22 +140,37 @@ def save_file_to_database(
136
140
  if src_file == dst_file:
137
141
  return dst_file
138
142
  elif dst_file.exists():
139
- if dst_file.suffix == ".shp":
140
- for file in list(dst_file.parent.glob(f"{dst_file.stem}.*")):
141
- os.remove(file)
142
- else:
143
- os.remove(dst_file)
143
+ match dst_file.suffix:
144
+ case ".shp":
145
+ for file in list(dst_file.parent.glob(f"{dst_file.stem}.*")):
146
+ os.remove(file)
147
+ case _:
148
+ os.remove(dst_file)
144
149
 
145
150
  dst_file.parent.mkdir(parents=True, exist_ok=True)
146
- if src_file.suffix == ".shp":
147
- for file in list(src_file.parent.glob(f"{src_file.stem}.*")):
148
- shutil.copy2(file, dst_file.parent.joinpath(file.name))
149
- else:
150
- shutil.copy2(src_file, dst_file)
151
+ match src_file.suffix:
152
+ case ".shp" | ".geojson" | ".gpkg":
153
+ write_gdf_with_global_crs(src_file, dst_file)
154
+ case _:
155
+ shutil.copy2(src_file, dst_file)
151
156
 
152
157
  return dst_file
153
158
 
154
159
 
160
+ def write_gdf_with_global_crs(src: gpd.GeoDataFrame | Path, dst_path: Path) -> None:
161
+ if isinstance(src, gpd.GeoDataFrame):
162
+ gdf = src
163
+ else:
164
+ gdf = gpd.read_file(src)
165
+
166
+ if gdf.crs is None:
167
+ logger.warning(f"CRS is not defined for {src}. Assuming EPSG:4326.")
168
+ gdf = gdf.set_crs(epsg=4326)
169
+ else:
170
+ gdf = gdf.to_crs(epsg=4326)
171
+ gdf.to_file(dst_path)
172
+
173
+
155
174
  def copy_file_to_output_dir(file_path: Path, output_dir: Path) -> Path:
156
175
  output_dir = output_dir.resolve()
157
176
  if file_path == output_dir / file_path.name:
@@ -161,7 +161,7 @@ def plot_discharge(
161
161
  if output_loc.exists():
162
162
  output_loc.unlink()
163
163
  fig.write_html(output_loc)
164
- return str(output_loc), None
164
+ return output_loc.as_posix(), None
165
165
 
166
166
 
167
167
  def plot_waterlevel(
@@ -278,7 +278,7 @@ def plot_waterlevel(
278
278
  if output_loc.exists():
279
279
  output_loc.unlink()
280
280
  fig.write_html(output_loc)
281
- return str(output_loc), None
281
+ return output_loc.as_posix(), None
282
282
 
283
283
 
284
284
  def plot_rainfall(
@@ -351,7 +351,7 @@ def plot_rainfall(
351
351
  if output_loc.exists():
352
352
  output_loc.unlink()
353
353
  fig.write_html(output_loc)
354
- return str(output_loc), None
354
+ return output_loc.as_posix(), None
355
355
 
356
356
 
357
357
  def plot_wind(
@@ -423,4 +423,4 @@ def plot_wind(
423
423
  if output_loc.exists():
424
424
  output_loc.unlink()
425
425
  fig.write_html(output_loc)
426
- return str(output_loc), None
426
+ return output_loc.as_posix(), None
@@ -198,7 +198,9 @@ class Measure(Object):
198
198
  measure = cls.model_validate(toml)
199
199
 
200
200
  if measure.polygon_file:
201
- measure.polygon_file = str(Path(file_path).parent / measure.polygon_file)
201
+ measure.polygon_file = (
202
+ Path(file_path).parent / measure.polygon_file
203
+ ).as_posix()
202
204
 
203
205
  return measure
204
206
 
@@ -1,4 +1,5 @@
1
1
  import shutil
2
+ from pathlib import Path
2
3
  from typing import Any
3
4
 
4
5
  import geopandas as gpd
@@ -32,7 +33,7 @@ class BenefitRunner:
32
33
  self.benefit = benefit
33
34
 
34
35
  # Get output path based on database path
35
- self.results_path = self.database.benefits.output_path.joinpath(
36
+ self.results_path: Path = self.database.benefits.output_path.joinpath(
36
37
  self.benefit.name
37
38
  )
38
39
  self.site_info = self.database.site
@@ -71,7 +72,7 @@ class BenefitRunner:
71
72
  results_html = self.results_path.joinpath("benefits.html")
72
73
  with open(results_toml, mode="rb") as fp:
73
74
  results = tomli.load(fp)
74
- results["html"] = str(results_html)
75
+ results["html"] = results_html.as_posix()
75
76
  self._results = results
76
77
  return results
77
78
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: flood-adapt
3
- Version: 1.0.6
3
+ Version: 1.1.1
4
4
  Summary: A software package support system which can be used to assess the benefits and costs of flood resilience measures
5
5
  Author-email: Gundula Winter <Gundula.Winter@deltares.nl>, Panos Athanasiou <Panos.Athanasiou@deltares.nl>, Frederique de Groen <Frederique.deGroen@deltares.nl>, Tim de Wilde <Tim.deWilde@deltares.nl>, Julian Hofer <Julian.Hofer@deltares.nl>, Daley Adrichem <Daley.Adrichem@deltares.nl>, Luuk Blom <Luuk.Blom@deltares.nl>
6
6
  License: ====================================================
@@ -705,19 +705,21 @@ Project-URL: Source, https://github.com/Deltares-research/FloodAdapt
705
705
  Classifier: Intended Audience :: Science/Research
706
706
  Classifier: License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication
707
707
  Classifier: Topic :: Scientific/Engineering :: Hydrology
708
- Requires-Python: <3.13,>=3.8
708
+ Requires-Python: <3.13,>=3.10
709
709
  Description-Content-Type: text/markdown
710
710
  License-File: LICENSE
711
- Requires-Dist: cht-cyclones==1.0.3
712
- Requires-Dist: cht-meteo==0.3.1
713
- Requires-Dist: cht-observations==0.2.1
714
- Requires-Dist: cht-tide==0.1.1
715
- Requires-Dist: fiat-toolbox==0.1.20
711
+ Requires-Dist: cht-cyclones<2.0,>=1.0.3
712
+ Requires-Dist: cht-meteo<1.0,>=0.3.1
713
+ Requires-Dist: cht-observations<1.0,>=0.2.1
714
+ Requires-Dist: cht-tide<1.0,>=0.1.1
715
+ Requires-Dist: dask==2024.11.2
716
+ Requires-Dist: numba_celltree==0.2.2
717
+ Requires-Dist: fiat-toolbox<0.2.0,>=0.1.22
716
718
  Requires-Dist: fiona<2.0,>=1.0
717
719
  Requires-Dist: geojson<4.0,>=3.0
718
720
  Requires-Dist: geopandas<2.0,>=1.0
719
- Requires-Dist: hydromt-fiat<1.0,>=0.5.5
720
- Requires-Dist: hydromt-sfincs[quadtree]<2.0,>=1.2.2
721
+ Requires-Dist: hydromt-fiat<1.0,>=0.5.9
722
+ Requires-Dist: hydromt-sfincs<2.0,>=1.2.0
721
723
  Requires-Dist: numpy<2.0,>=1.0
722
724
  Requires-Dist: numpy-financial<2.0,>=1.0
723
725
  Requires-Dist: pandas<3.0,>=2.0
@@ -753,8 +755,6 @@ Requires-Dist: python-dotenv<2.0,>=1.0; extra == "docs"
753
755
  Requires-Dist: folium<1.0,>=0.19.0; extra == "docs"
754
756
  Requires-Dist: mapclassify<3.0,>=2.8.0; extra == "docs"
755
757
  Requires-Dist: contextily; extra == "docs"
756
- Provides-Extra: all
757
- Requires-Dist: flood-adapt[build,dev,docs]; extra == "all"
758
758
 
759
759
  # FloodAdapt
760
760
  FloodAdapt is a decision-support tool that seeks to advance and accelerate flooding-related adaptation planning. It brings rapid, physics-based compound flood and detailed impact modelling into an easy-to-use system, allowing non-expert end-users to evaluate a wide variety of compound events, future conditions, and adaptation options in minutes. FloodAdapt serves as a connector between scientific advances and practitioner needs, improving and increasing the uptake and impact of adaptation research and development.
@@ -772,117 +772,6 @@ Recent developments of the decision-support system include (1) simplifying and p
772
772
 
773
773
  FloodAdapt is currently in an intensive development stage. Independent usage of the repository will be challenging prior to end-of-year 2024. FloodAdapt documentation will be expanded on throughout 2024.
774
774
 
775
- # Installation
775
+ # Getting Started
776
776
 
777
- ## Setting up Pixi
778
- To build the environment, manage dependencies and run tasks, FloodAdapt uses [Pixi](https://pixi.sh/latest/), please download and install it by following the instructions on the pixi website.
779
-
780
- Before continuing the installation process, make sure you have access to all required private repositories by ensuring you are in the Teams `FloodAdaptUsers` in the [Deltares](https://github.com/orgs/Deltares/teams/floodadaptusers) and [Deltares-research](https://github.com/orgs/Deltares-research/teams/floodadaptusers) organizations.
781
-
782
- ## Windows
783
- Then run these commands to install FloodAdapt:
784
- ```bash
785
- git clone https://github.com/Deltares-research/FloodAdapt.git
786
- cd FloodAdapt
787
- pixi install
788
- ```
789
- ## Linux
790
- Linux is not supported at the moment, but will be supported in the near future.
791
-
792
- ## Configure database
793
-
794
- #### TODO add section for the DatabaseBuilder.
795
-
796
- FloodAdapt uses a database to store, handle and organize input files, output files and static data. This database needs to be configured the first time you want to use FloodAdapt. Which is done via `flood_adapt/misc/config.py` which contains the `Settings` class to set and validate environment variables, specific to your system.
797
-
798
- To initialize FloodAdapt and configure the database, add the following lines to the top of your script / initialize function to validate and set the environment variables:
799
- ```python
800
- from pathlib import Path
801
- from flood_adapt import Settings
802
-
803
- # Usually ends in `Database` and can contain multiple sites
804
- root = Path("path/to/your/database/root")
805
-
806
- # Specifies which site to use
807
- name = "database_name"
808
-
809
- # Define the paths to the model kernel binaries
810
- sfincs_bin = Path("path/to/your/sfincs/bin.exe")
811
- fiat_bin = Path("path/to/your/fiat/bin.exe")
812
-
813
- # Validate and set environment variables
814
- Settings(
815
- DATABASE_ROOT=root,
816
- DATABASE_NAME=name,
817
- SFINCS_BIN_PATH=sfincs_bin,
818
- FIAT_BIN_PATH=fiat_path,
819
- VALIDATE_BINARIES=True,
820
- )
821
- ```
822
-
823
- ## Developing FloodAdapt
824
-
825
- To contribute to FloodAdapt, you will need to install additional dependencies. To do so, clone the repository and install the development environment:
826
-
827
- ```bash
828
- # Install dev environment
829
- git clone https://github.com/Deltares/FloodAdapt
830
- cd FloodAdapt
831
-
832
- # This will install the required environment and run the tests to verify
833
- pixi run tests
834
- ```
835
-
836
- Alternatively, you can open an interactive shell and have pixi take care of activating and updating your environment.
837
- ```bash
838
- # `activate` the dev environment
839
- pixi shell -e dev
840
-
841
- # Develop
842
- pytest tests/test_x/test_y/test_z.py
843
- python scripts/my_script.py
844
- ...
845
- ```
846
-
847
- ### Adding editable installations to your environment
848
-
849
- To make developing easier and not have to reinstall packages after every change, editable installs exist.
850
- Pixi supports editable installs, but not in the most intuitive way, as they need to be defined as editable in the project specification.
851
-
852
- Example command to add the package `example_package` as an editable install to the default environment:
853
- - go to the non-pixi sections in `pyproject.toml` and comment out the `example_package`. (`[dependencies]` or `[optional-dependencies]`)
854
- - in the pixi section `[tool.pixi.pypi-dependencies]`: add the following line `example_package = {path = "./path/to/example_package", editable = true }`. Note that this path is relative to the root of this project.
855
- - run `pixi update`
856
-
857
- ## Useful pixi commands
858
- ```bash
859
- # Display all pixi commands and options
860
- pixi -h
861
-
862
- # Install a non default pixi environment defined in pyproject.toml
863
- pixi install -e [ENV_NAME]
864
-
865
- # Update environment(s) to the latest allowed by dependency specifications in pyproject.toml
866
- pixi update
867
-
868
- # List all available tasks
869
- pixi task list
870
-
871
- # Run a task in the default environment for that task
872
- pixi run [TASK]
873
-
874
- # Start a shell in the pixi environment
875
- pixi shell -e [ENV_NAME]
876
-
877
- # Add a package to the dependencies
878
- pixi add [PACKAGE]
879
-
880
- # Run a task in a specific environment
881
- pixi run -e [ENV_NAME] [TASK]
882
- ```
883
-
884
- ### Generating the documentation
885
-
886
- We use `quartodoc` to generate our API documentation automatically. If you have the `docs` optional dependency group installed.
887
- you can do this by running `quartodoc build` from the `docs` directory, and it will create the documentation for you.
888
- After this is done, if you wish, you can build and view the documentation locally by running `quarto preview` from the `docs` directory
777
+ Please review our [`developer guide`](DEVELOPER_GUIDE.md) for information on how to install and use FloodAdapt locally.
@@ -1,8 +1,8 @@
1
- flood_adapt/__init__.py,sha256=th2A3Ub_atS7QHg1y2SxGCiECW7BKlWrXZX7fuHd4M8,781
2
- flood_adapt/flood_adapt.py,sha256=g-OpS0spvi-puwQFhvsfu_vvduozTPXk9DU2ay9ypgs,38698
1
+ flood_adapt/__init__.py,sha256=qFXo1YJVbLyqFpONc_TfAdUhZw9fgTacsn1XbFq1LxM,779
2
+ flood_adapt/flood_adapt.py,sha256=HVFS4OFhcB0TqHtMw3kbEei0IfJxsciauHfG3XZ38-0,40747
3
3
  flood_adapt/adapter/__init__.py,sha256=vnF8NCkEVX-N-gtGS-J_A1H1YYAjihWjJZFyYGwcp8Q,180
4
- flood_adapt/adapter/fiat_adapter.py,sha256=s6M-_bwr4NUWLZvlc619BUmnA_z-DNMggVcnlG-jrqM,60075
5
- flood_adapt/adapter/sfincs_adapter.py,sha256=QIfD_7UnciN4MdminT5BRDEKCyubJBGf8RsSOd2hs9s,78864
4
+ flood_adapt/adapter/fiat_adapter.py,sha256=seDjPoumkhUOd7qer3ni1_Ut3dwyq0-_yhJNaTEFc2E,60284
5
+ flood_adapt/adapter/sfincs_adapter.py,sha256=SSZ5hlnHpXLmDQqbyBMfybdAQIOpYOqIsSzoKk4XLZg,78825
6
6
  flood_adapt/adapter/sfincs_offshore.py,sha256=DkqGwx0Fx4dojY1YH8tW3MUS4Omgd5DC6QINEsTP0Uk,7659
7
7
  flood_adapt/adapter/interface/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
8
8
  flood_adapt/adapter/interface/hazard_adapter.py,sha256=S2NIUAMSRgxC_E-tZRJ2qIP06U1zEVdn-MnvMTrn86s,2828
@@ -11,14 +11,15 @@ flood_adapt/adapter/interface/model_adapter.py,sha256=8OMjxw-3gv6eX60bv_44AaU_lE
11
11
  flood_adapt/adapter/interface/offshore.py,sha256=G3ZpCWw6SjpyhOJIh0Acc5jnoUS8SO_zWLmfMED7zho,481
12
12
  flood_adapt/config/__init__.py,sha256=wfcPGORAGBXWUi8G7E5tLWvBS9A2ajJ9atHfxatExj4,1933
13
13
  flood_adapt/config/config.py,sha256=-aMmeBLE03O0Az-S9ms6Rz5hX8_5Kn2vr7IjjMI9w2o,9374
14
- flood_adapt/config/fiat.py,sha256=ZKrzNmzh8iFfm31Z1HV6YuU7QXWr1nFP1RHMhgeZ-HA,3427
15
- flood_adapt/config/gui.py,sha256=NRsSehmMk_8sCh9pxeCWAcwNcTo0qWTEZtkB1wZNN_g,10527
16
- flood_adapt/config/hazard.py,sha256=vhFvubkFdl4oTnhL7oeNHA7I_h3WCl9QzAzBByt5qrU,12210
14
+ flood_adapt/config/fiat.py,sha256=svx3zRp8ZcHifTl0hLbl2JCeYuqckxQFJjDd-C0oqok,3417
15
+ flood_adapt/config/gui.py,sha256=9VFfzQOjRs4E6sY0bweBUpPGgHfKc7Az7yBlNetotcg,16784
16
+ flood_adapt/config/hazard.py,sha256=Ev6mj78cZ_vQuJ11KYjhJOzmfRB6fz267OJeKI0bYaM,12216
17
17
  flood_adapt/config/impacts.py,sha256=O7vE7jB3GSXnkqAvv7TqJiJ_j1uJ3mck_KQ-ScsB3bo,3192
18
18
  flood_adapt/config/sfincs.py,sha256=y8C3PzFwwgMB_sb8rBzgteaQ8fCxep6DnZxuk0q__bc,4825
19
19
  flood_adapt/config/site.py,sha256=VR90jCHWcxgoQJptNyXy7LseGjXUDRtdOjNGCddFVzI,4328
20
20
  flood_adapt/database_builder/__init__.py,sha256=YsI5bGcAKYmsmb5W-spp91hzsKSTRtkXBLNRxLOWml4,474
21
- flood_adapt/database_builder/database_builder.py,sha256=yJH1OIrPIJiT18Ab97fSZu-30uVT5u0oQYidx-gSGsc,99653
21
+ flood_adapt/database_builder/database_builder.py,sha256=WHCsRGZq2owNyuLMpk2wEnGR2QrELAVDqJfE8xG12JE,100855
22
+ flood_adapt/database_builder/metrics_utils.py,sha256=VCLhEIViwlmGxh9LBoEGMsRcSEvktiKvh8IolUjHPfI,66459
22
23
  flood_adapt/database_builder/templates/default_units/imperial.toml,sha256=zIjPlxIa2kWLUjSYisd8UolXGo5iKdFoDDz_JkKBXTM,295
23
24
  flood_adapt/database_builder/templates/default_units/metric.toml,sha256=tc0XMKs7xGL9noB9lAb0gyQfjYxzokgHa3NqpccxWl0,302
24
25
  flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv,sha256=ooQzNGQwAMSAphy5W2ZyR5boQlcwvPv9ToJx1MlZhVE,466
@@ -40,16 +41,7 @@ flood_adapt/database_builder/templates/icons/white_down_48x48.png,sha256=ZxXz4WK
40
41
  flood_adapt/database_builder/templates/icons/white_left_48x48.png,sha256=mLjrGhsMo79EyaeEK9Ykmym-knv2LHk92JTMWaAfaw0,738
41
42
  flood_adapt/database_builder/templates/icons/white_right_48x48.png,sha256=dxYJT-zTp8KXLcjRkd4toZjkPrFCjDwBbSmqPspw9Qk,753
42
43
  flood_adapt/database_builder/templates/icons/white_up_48x48.png,sha256=eahYlRiknCFlIfA26BJQl__o74cqwuKTRRZ35SgCXcY,733
43
- flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml,sha256=bHvgceR1Gb05jIv7Rk2NFNWowtFn4ztVTgF4z_OOYOk,2608
44
- flood_adapt/database_builder/templates/infographics/OSM/config_people.toml,sha256=k-0q5reU-0LA0QMlT5Mat8mldH9t37fMSt2pUZ0P0do,1754
45
- flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml,sha256=kwBVwsSY-D3ADuJdLxFQlSLuhxguxdNXk-dn-Pt_ud8,3804
46
- flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml,sha256=VFlCpP2KzGlfBPoblNdL6YlJEmccUvIlUgLaOtwxwrc,1874
47
- flood_adapt/database_builder/templates/infographics/OSM/styles.css,sha256=yV8U2Z1DwuyfaAL13iQHjEJcPiDHXCUzGt2REXlSKeg,762
48
- flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml,sha256=7mRXeh7-pvfsHp-gFdubQXR33nqaSyzKuJVc1WILdJU,3762
49
- flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml,sha256=9VJ0rzIj4YZIYWATQZYmbO6KLBB-C9i7skBOvE0M3oA,1892
50
- flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml,sha256=kwBVwsSY-D3ADuJdLxFQlSLuhxguxdNXk-dn-Pt_ud8,3804
51
- flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml,sha256=T1QqW95D0MTeWMLDV85WdodXrIf0T7uIRZA9FU0l_is,1873
52
- flood_adapt/database_builder/templates/infographics/US_NSI/styles.css,sha256=yV8U2Z1DwuyfaAL13iQHjEJcPiDHXCUzGt2REXlSKeg,762
44
+ flood_adapt/database_builder/templates/infographics/styles.css,sha256=yV8U2Z1DwuyfaAL13iQHjEJcPiDHXCUzGt2REXlSKeg,762
53
45
  flood_adapt/database_builder/templates/infographics/images/ambulance.png,sha256=dw5vcU_kRePrBA3Zl4_aQFz1Rc93x26y8PelY2MJ_Lo,7757
54
46
  flood_adapt/database_builder/templates/infographics/images/car.png,sha256=PFED6xR31G2P15ZGM14CyJeca7zfUlwQe2348bpPCxM,6600
55
47
  flood_adapt/database_builder/templates/infographics/images/cart.png,sha256=hLknRSLm7YCSw5kmDP_JysAN6cLiNCVKkqhfsaoT_8c,27453
@@ -62,27 +54,15 @@ flood_adapt/database_builder/templates/infographics/images/person.png,sha256=ipz
62
54
  flood_adapt/database_builder/templates/infographics/images/school.png,sha256=7N6EkuNxHKyFuBVSRDQVN3wZkX7O-eXrcvjJeyOb8w8,7554
63
55
  flood_adapt/database_builder/templates/infographics/images/truck.png,sha256=0IhvYYvmfAPvFiA5ipwTz3VnvlFe5vyfNrejPinrlOo,9492
64
56
  flood_adapt/database_builder/templates/infographics/images/walking_person.png,sha256=vaxO4oGejK5Q4KyGXuew2YgpGPabpnwLyxTFH1WMbmo,11080
65
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml,sha256=3iR6iKvpJuM_Ol2BQpdEU_2Y9niETYDRv8jql_wze3Q,211
66
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml,sha256=7bczTZqoMgXot_OTQmabUbUFGFBmKkvaWSB0tsHvodk,1759
67
- flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml,sha256=_UTrZOqdM8ewvkHaKWfhgi7-xbTtajQTBubN7NO0Bo8,80
68
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml,sha256=hqdh1XcgNiGWBenCbNom-5G_xvVxM3JVqi2iT9M3tBI,5457
69
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml,sha256=lKBrte-wP6fpeEGRkn8g1lE6CjP1EuR7wNI_wGuuYxo,6436
70
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml,sha256=060PJozh88C-j-TxqMsoRb7TFL1yJ7f0iLShvPPwBYc,4626
71
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml,sha256=mKOoD1o7K3vqak_0vBWm83fsuu0SMKZNm8vv2b_5PW0,2096
72
- flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml,sha256=-J8wqk43scvvRQ804tqv3mOrbI8CJ3RsaRLVhzgrVEs,79
73
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml,sha256=CjTyfC3wwG6krpHM8LMPhIM0c7u3Ljet8FHglNSjkPw,7436
74
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml,sha256=5hZnHWUX_UnA8TAHh_MAnFV8ra-m-8DWyAFkwyT8iHQ,6301
75
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml,sha256=aPCMJ2BlpXjSHZA9x80JhhvCkbDDqZ-P7gwOYnxq34A,6631
76
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml,sha256=VLO7NRWTPzYRj3jYDb_mImPTppSm8Lh1ajCdNPuyc4U,2045
77
57
  flood_adapt/database_builder/templates/output_layers/bin_colors.toml,sha256=yN3_h2IimOyjtfhZ-ZoWyNa-2cAeFRNlbvaNTLhEMfA,417
78
58
  flood_adapt/dbs_classes/__init__.py,sha256=J-a6BEkjDhuUzzRKuAn_AtTg_D9wNIsmY3BnVTiC2JA,731
79
- flood_adapt/dbs_classes/database.py,sha256=oR0aC2HRsGGe0pwFrPtPAzXxfv_lqtTdjY9UrS9wwtU,23409
59
+ flood_adapt/dbs_classes/database.py,sha256=6zYAhVJVL3bNjxEzObVn4jOnuSQJYmsEuWN85fOeDT4,23425
80
60
  flood_adapt/dbs_classes/dbs_benefit.py,sha256=ayEYz8ga49HLdYuUsDWZOuZnpRnBpTuyhvfe2IyWAKI,1825
81
61
  flood_adapt/dbs_classes/dbs_event.py,sha256=ak3kHan6L1EfC8agDLKiCe8gaY5leOmj_qUBsI61q9A,1869
82
62
  flood_adapt/dbs_classes/dbs_measure.py,sha256=vVs-LtnHJN7eSGIFUglJdpbtfq_QI_Ftkv4lh5mfnNM,4085
83
63
  flood_adapt/dbs_classes/dbs_projection.py,sha256=lyiU_ctP2ixK28RKnBN6mVJbOuaDsWCj1y6-MHlyi_k,1078
84
64
  flood_adapt/dbs_classes/dbs_scenario.py,sha256=LHWx3Dr1XR47bPyPRkR70h3VcT0f0MVgB-R8V_G_O04,3993
85
- flood_adapt/dbs_classes/dbs_static.py,sha256=KPKjz7xyzjn6yqVvfEHlzW7Wnqi_DmSkFhC9l4hEAr4,10557
65
+ flood_adapt/dbs_classes/dbs_static.py,sha256=Yzs-bsfAq2jkZ_-0_ojuzNf81Wifaxw8a1APNNS0mqM,10565
86
66
  flood_adapt/dbs_classes/dbs_strategy.py,sha256=qiEObHZeYL93GmdjSiGQls1ZmxdMZPkRkwzHgmoYwyE,4856
87
67
  flood_adapt/dbs_classes/dbs_template.py,sha256=b2x2sWNYTnaWU8Plgp51PFPrZGEv2kRRn9JBAgYhLbI,11578
88
68
  flood_adapt/dbs_classes/interface/database.py,sha256=kPHsmreB-vHaFD_FRXbP06tVs7pGxt4Rucim6aEqiKg,2550
@@ -94,7 +74,7 @@ flood_adapt/misc/debug_timer.py,sha256=TUXsJSX62P66AQrf7rqpmd95EZECJVWGT7pHpuxNG
94
74
  flood_adapt/misc/exceptions.py,sha256=66lD9OlAfGadhbMN2t2C03KofzU_zOoVHnuEeDtSc0w,1827
95
75
  flood_adapt/misc/log.py,sha256=aK5uJch8p3a4Js4f79tO5AM9yZvNlGPjHYPsY5EuWbc,6898
96
76
  flood_adapt/misc/path_builder.py,sha256=sLhvk3tq-QzI3fFjdzckpBYYZeuGyHBbuI0R98Tqud8,1396
97
- flood_adapt/misc/utils.py,sha256=-wOWgbDqswwGVYJARKfq-3L7DIs4OIHfrze5g3-qapU,5588
77
+ flood_adapt/misc/utils.py,sha256=geIPsi448gvS8U-W3szPkQj2G2J3ASoYiOOnO20B4ko,6134
98
78
  flood_adapt/objects/__init__.py,sha256=PlLA7iblvM4Efy7KUOZn0S1ThWcRA8L6T3CSrmr8Kvg,2134
99
79
  flood_adapt/objects/object_model.py,sha256=mNBAdsOhqGGc2DSOU2W_TfqBKdEAKFPksSJaNcLpznI,2422
100
80
  flood_adapt/objects/benefits/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -113,7 +93,7 @@ flood_adapt/objects/forcing/forcing.py,sha256=MonjhiHP6ge_5UFDMcLmj0pt5jK76Eznb4
113
93
  flood_adapt/objects/forcing/forcing_factory.py,sha256=lpugKlMt0MN8BxyykottMOuQd_fp1MHrkuuqbHWWdRY,5965
114
94
  flood_adapt/objects/forcing/meteo_handler.py,sha256=rTxY5WNobK_Ifzj2eVcoSPGgb3TzuZljSv_dLn5FLo8,3016
115
95
  flood_adapt/objects/forcing/netcdf.py,sha256=ZBzFtN5joVs36lVjvYErVaHEylUQ6eKIhR0uk_MD-zM,1388
116
- flood_adapt/objects/forcing/plotting.py,sha256=z612jv_9aRyK6cUm181vXFhihWz5XX_c6UDQg8rU02o,14804
96
+ flood_adapt/objects/forcing/plotting.py,sha256=Y7f_9bY8d9jbd7BqEAeRmof-aaJhlznM3_wGBOI7g-s,14828
117
97
  flood_adapt/objects/forcing/rainfall.py,sha256=e6P3IMzItvnsmXbcMXl1oV-d9LDuh3jTIc_vt6Kz5zo,3282
118
98
  flood_adapt/objects/forcing/tide_gauge.py,sha256=jGIh6jQlhecGkPfBaZ8NKbr7FlpmLZAwmlqgp8lEWu0,7143
119
99
  flood_adapt/objects/forcing/time_frame.py,sha256=1X3G0Ax18BHRvAomf-CW_ISRk_3qgAakwgZCIBxIkL4,2855
@@ -123,7 +103,7 @@ flood_adapt/objects/forcing/waterlevels.py,sha256=8lCmUdeyABurJwftae4_Iut9hCn24x
123
103
  flood_adapt/objects/forcing/wind.py,sha256=xs_xZdUoZUDP1y1xITlNVJwiyDt6wQsFbPFhVRDjSqg,3925
124
104
  flood_adapt/objects/measures/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
125
105
  flood_adapt/objects/measures/measure_factory.py,sha256=tPT4fSzT5buiLf3gmxtap1vrfH2HusQOWTluucFFwJ0,2728
126
- flood_adapt/objects/measures/measures.py,sha256=WCVwMJNvvHNgYjALwrIMKPUChe4qPF1FyI4eG6jUQnc,19717
106
+ flood_adapt/objects/measures/measures.py,sha256=fJ7f6zcViQJIMlyKQlnL8hOwWcNZmsuheh1LeRjDB7U,19757
127
107
  flood_adapt/objects/output/floodmap.py,sha256=_JmPFIjORSwyAKICgUtEgTn0wswWdGY-sEFy3KqNo_k,281
128
108
  flood_adapt/objects/projections/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
129
109
  flood_adapt/objects/projections/projections.py,sha256=GCbT2nAGEkiBicVrfuYsw1UYIj9BLj9v0pGnhBI7DZk,4052
@@ -132,10 +112,10 @@ flood_adapt/objects/scenarios/scenarios.py,sha256=2_VSO9rl5bdy2PT-Po5ngnEp9a0gmH
132
112
  flood_adapt/objects/strategies/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
133
113
  flood_adapt/objects/strategies/strategies.py,sha256=Jw-WJDCamL9p_7VEir3AdmYPMVAiCVRU9n_whG6WcgE,2981
134
114
  flood_adapt/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
135
- flood_adapt/workflows/benefit_runner.py,sha256=VtYt0sHFymNyErpzOtuN55cKJGVm5hT2a_Qzprg6T88,21786
115
+ flood_adapt/workflows/benefit_runner.py,sha256=eA21TuHdeZ6QYO8ehXri6BHlkyHsVsZphIdIca5g0KA,21824
136
116
  flood_adapt/workflows/scenario_runner.py,sha256=9_Y6GmMYhYoTRkBUIlju0eBy6DosGf4Zl2tgu1QEubI,4119
137
- flood_adapt-1.0.6.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
138
- flood_adapt-1.0.6.dist-info/METADATA,sha256=FvbH5BpNMV_cXv23oWzPC2APQvSzPmGBwQKZjyBIFGY,53209
139
- flood_adapt-1.0.6.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
140
- flood_adapt-1.0.6.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
141
- flood_adapt-1.0.6.dist-info/RECORD,,
117
+ flood_adapt-1.1.1.dist-info/LICENSE,sha256=Ui5E03pQ0EVKxvKA54lTPA1xrtgA2HMGLQai95eOzoE,36321
118
+ flood_adapt-1.1.1.dist-info/METADATA,sha256=ZCiBsiXoCBBXNEMSsEhJbI5wZiVyW6aVH6EVZBaJY8k,48876
119
+ flood_adapt-1.1.1.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
120
+ flood_adapt-1.1.1.dist-info/top_level.txt,sha256=JvzMi6cTcQPEThCfpgMEeVny3ghI1urSH0CCgVIqSzw,12
121
+ flood_adapt-1.1.1.dist-info/RECORD,,