flood-adapt 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. flood_adapt/__init__.py +26 -22
  2. flood_adapt/adapter/__init__.py +9 -9
  3. flood_adapt/adapter/fiat_adapter.py +1541 -1536
  4. flood_adapt/adapter/interface/hazard_adapter.py +70 -70
  5. flood_adapt/adapter/interface/impact_adapter.py +36 -36
  6. flood_adapt/adapter/interface/model_adapter.py +89 -89
  7. flood_adapt/adapter/interface/offshore.py +19 -19
  8. flood_adapt/adapter/sfincs_adapter.py +1848 -1846
  9. flood_adapt/adapter/sfincs_offshore.py +193 -193
  10. flood_adapt/config/config.py +248 -290
  11. flood_adapt/config/fiat.py +219 -219
  12. flood_adapt/config/gui.py +331 -331
  13. flood_adapt/config/sfincs.py +481 -336
  14. flood_adapt/config/site.py +129 -129
  15. flood_adapt/database_builder/database_builder.py +2210 -2210
  16. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -9
  17. flood_adapt/database_builder/templates/default_units/metric.toml +9 -9
  18. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -10
  19. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -90
  20. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -57
  21. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -121
  22. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -65
  23. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -45
  24. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -126
  25. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -60
  26. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -121
  27. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -65
  28. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -45
  29. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -4
  30. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -143
  31. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -153
  32. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -127
  33. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -57
  34. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -4
  35. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -191
  36. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -153
  37. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -178
  38. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -57
  39. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -9
  40. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -65
  41. flood_adapt/database_builder/templates/output_layers/bin_colors.toml +5 -5
  42. flood_adapt/database_builder.py +16 -16
  43. flood_adapt/dbs_classes/__init__.py +21 -21
  44. flood_adapt/dbs_classes/database.py +495 -688
  45. flood_adapt/dbs_classes/dbs_benefit.py +77 -76
  46. flood_adapt/dbs_classes/dbs_event.py +61 -59
  47. flood_adapt/dbs_classes/dbs_measure.py +112 -111
  48. flood_adapt/dbs_classes/dbs_projection.py +34 -34
  49. flood_adapt/dbs_classes/dbs_scenario.py +137 -137
  50. flood_adapt/dbs_classes/dbs_static.py +274 -273
  51. flood_adapt/dbs_classes/dbs_strategy.py +130 -129
  52. flood_adapt/dbs_classes/dbs_template.py +279 -278
  53. flood_adapt/dbs_classes/interface/database.py +107 -139
  54. flood_adapt/dbs_classes/interface/element.py +121 -121
  55. flood_adapt/dbs_classes/interface/static.py +47 -47
  56. flood_adapt/flood_adapt.py +1207 -1178
  57. flood_adapt/misc/database_user.py +16 -16
  58. flood_adapt/misc/exceptions.py +22 -0
  59. flood_adapt/misc/log.py +183 -183
  60. flood_adapt/misc/path_builder.py +54 -54
  61. flood_adapt/misc/utils.py +185 -185
  62. flood_adapt/objects/__init__.py +82 -82
  63. flood_adapt/objects/benefits/benefits.py +61 -61
  64. flood_adapt/objects/events/event_factory.py +135 -135
  65. flood_adapt/objects/events/event_set.py +88 -84
  66. flood_adapt/objects/events/events.py +234 -234
  67. flood_adapt/objects/events/historical.py +58 -58
  68. flood_adapt/objects/events/hurricane.py +68 -67
  69. flood_adapt/objects/events/synthetic.py +46 -50
  70. flood_adapt/objects/forcing/__init__.py +92 -92
  71. flood_adapt/objects/forcing/csv.py +68 -68
  72. flood_adapt/objects/forcing/discharge.py +66 -66
  73. flood_adapt/objects/forcing/forcing.py +150 -150
  74. flood_adapt/objects/forcing/forcing_factory.py +182 -182
  75. flood_adapt/objects/forcing/meteo_handler.py +93 -93
  76. flood_adapt/objects/forcing/netcdf.py +40 -40
  77. flood_adapt/objects/forcing/plotting.py +453 -429
  78. flood_adapt/objects/forcing/rainfall.py +98 -98
  79. flood_adapt/objects/forcing/tide_gauge.py +191 -191
  80. flood_adapt/objects/forcing/time_frame.py +90 -90
  81. flood_adapt/objects/forcing/timeseries.py +564 -564
  82. flood_adapt/objects/forcing/unit_system.py +580 -580
  83. flood_adapt/objects/forcing/waterlevels.py +108 -108
  84. flood_adapt/objects/forcing/wind.py +124 -124
  85. flood_adapt/objects/measures/measure_factory.py +92 -92
  86. flood_adapt/objects/measures/measures.py +529 -529
  87. flood_adapt/objects/object_model.py +74 -68
  88. flood_adapt/objects/projections/projections.py +103 -89
  89. flood_adapt/objects/scenarios/scenarios.py +22 -22
  90. flood_adapt/objects/strategies/strategies.py +89 -89
  91. flood_adapt/workflows/benefit_runner.py +579 -544
  92. flood_adapt/workflows/floodmap.py +85 -85
  93. flood_adapt/workflows/impacts_integrator.py +85 -82
  94. flood_adapt/workflows/scenario_runner.py +70 -70
  95. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/LICENSE +674 -674
  96. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/METADATA +866 -860
  97. flood_adapt-0.3.10.dist-info/RECORD +140 -0
  98. flood_adapt-0.3.8.dist-info/RECORD +0 -139
  99. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/WHEEL +0 -0
  100. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/top_level.txt +0 -0
@@ -1,98 +1,98 @@
1
- import os
2
- from pathlib import Path
3
- from typing import Annotated
4
-
5
- import pandas as pd
6
- import xarray as xr
7
-
8
- from flood_adapt.misc.utils import (
9
- copy_file_to_output_dir,
10
- validate_file_extension,
11
- )
12
- from flood_adapt.objects.forcing import unit_system as us
13
- from flood_adapt.objects.forcing.forcing import (
14
- ForcingSource,
15
- IRainfall,
16
- )
17
- from flood_adapt.objects.forcing.netcdf import validate_netcdf_forcing
18
- from flood_adapt.objects.forcing.time_frame import TimeFrame
19
- from flood_adapt.objects.forcing.timeseries import (
20
- CSVTimeseries,
21
- SyntheticTimeseries,
22
- TimeseriesFactory,
23
- )
24
-
25
-
26
- class RainfallConstant(IRainfall):
27
- source: ForcingSource = ForcingSource.CONSTANT
28
-
29
- intensity: us.UnitfulIntensity
30
-
31
- def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
32
- time = pd.date_range(
33
- start=time_frame.start_time,
34
- end=time_frame.end_time,
35
- freq=time_frame.time_step,
36
- name="time",
37
- )
38
- values = [self.intensity.value for _ in range(len(time))]
39
- return pd.DataFrame(data=values, index=time)
40
-
41
-
42
- class RainfallSynthetic(IRainfall):
43
- source: ForcingSource = ForcingSource.SYNTHETIC
44
- timeseries: SyntheticTimeseries
45
-
46
- def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
47
- return TimeseriesFactory.from_object(self.timeseries).to_dataframe(
48
- time_frame=time_frame
49
- )
50
-
51
-
52
- class RainfallMeteo(IRainfall):
53
- source: ForcingSource = ForcingSource.METEO
54
- precip_units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
55
- wind_units: us.UnitTypesVelocity = us.UnitTypesVelocity.mps
56
-
57
-
58
- class RainfallTrack(IRainfall):
59
- source: ForcingSource = ForcingSource.TRACK
60
-
61
- path: Annotated[Path, validate_file_extension([".cyc", ".spw"])]
62
-
63
- def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
64
- if self.path:
65
- self.path = copy_file_to_output_dir(self.path, Path(output_dir))
66
-
67
-
68
- class RainfallCSV(IRainfall):
69
- source: ForcingSource = ForcingSource.CSV
70
-
71
- path: Annotated[Path, validate_file_extension([".csv"])]
72
-
73
- units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
74
-
75
- def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
76
- return CSVTimeseries.load_file(
77
- path=self.path, units=us.UnitfulIntensity(value=0, units=self.units)
78
- ).to_dataframe(time_frame=time_frame)
79
-
80
- def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
81
- self.path = copy_file_to_output_dir(self.path, Path(output_dir))
82
-
83
-
84
- class RainfallNetCDF(IRainfall):
85
- source: ForcingSource = ForcingSource.NETCDF
86
- units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
87
-
88
- path: Annotated[Path, validate_file_extension([".nc"])]
89
-
90
- def read(self) -> xr.Dataset:
91
- required_vars = ("precip",)
92
- required_coords = ("time", "lat", "lon")
93
- with xr.open_dataset(self.path) as ds:
94
- validated_ds = validate_netcdf_forcing(ds, required_vars, required_coords)
95
- return validated_ds
96
-
97
- def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
98
- self.path = copy_file_to_output_dir(self.path, Path(output_dir))
1
+ import os
2
+ from pathlib import Path
3
+ from typing import Annotated
4
+
5
+ import pandas as pd
6
+ import xarray as xr
7
+
8
+ from flood_adapt.misc.utils import (
9
+ copy_file_to_output_dir,
10
+ validate_file_extension,
11
+ )
12
+ from flood_adapt.objects.forcing import unit_system as us
13
+ from flood_adapt.objects.forcing.forcing import (
14
+ ForcingSource,
15
+ IRainfall,
16
+ )
17
+ from flood_adapt.objects.forcing.netcdf import validate_netcdf_forcing
18
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
19
+ from flood_adapt.objects.forcing.timeseries import (
20
+ CSVTimeseries,
21
+ SyntheticTimeseries,
22
+ TimeseriesFactory,
23
+ )
24
+
25
+
26
+ class RainfallConstant(IRainfall):
27
+ source: ForcingSource = ForcingSource.CONSTANT
28
+
29
+ intensity: us.UnitfulIntensity
30
+
31
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
32
+ time = pd.date_range(
33
+ start=time_frame.start_time,
34
+ end=time_frame.end_time,
35
+ freq=time_frame.time_step,
36
+ name="time",
37
+ )
38
+ values = [self.intensity.value for _ in range(len(time))]
39
+ return pd.DataFrame(data=values, index=time)
40
+
41
+
42
+ class RainfallSynthetic(IRainfall):
43
+ source: ForcingSource = ForcingSource.SYNTHETIC
44
+ timeseries: SyntheticTimeseries
45
+
46
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
47
+ return TimeseriesFactory.from_object(self.timeseries).to_dataframe(
48
+ time_frame=time_frame
49
+ )
50
+
51
+
52
+ class RainfallMeteo(IRainfall):
53
+ source: ForcingSource = ForcingSource.METEO
54
+ precip_units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
55
+ wind_units: us.UnitTypesVelocity = us.UnitTypesVelocity.mps
56
+
57
+
58
+ class RainfallTrack(IRainfall):
59
+ source: ForcingSource = ForcingSource.TRACK
60
+
61
+ path: Annotated[Path, validate_file_extension([".cyc", ".spw"])]
62
+
63
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
64
+ if self.path:
65
+ self.path = copy_file_to_output_dir(self.path, Path(output_dir))
66
+
67
+
68
+ class RainfallCSV(IRainfall):
69
+ source: ForcingSource = ForcingSource.CSV
70
+
71
+ path: Annotated[Path, validate_file_extension([".csv"])]
72
+
73
+ units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
74
+
75
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
76
+ return CSVTimeseries.load_file(
77
+ path=self.path, units=us.UnitfulIntensity(value=0, units=self.units)
78
+ ).to_dataframe(time_frame=time_frame)
79
+
80
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
81
+ self.path = copy_file_to_output_dir(self.path, Path(output_dir))
82
+
83
+
84
+ class RainfallNetCDF(IRainfall):
85
+ source: ForcingSource = ForcingSource.NETCDF
86
+ units: us.UnitTypesIntensity = us.UnitTypesIntensity.mm_hr
87
+
88
+ path: Annotated[Path, validate_file_extension([".nc"])]
89
+
90
+ def read(self) -> xr.Dataset:
91
+ required_vars = ("precip",)
92
+ required_coords = ("time", "lat", "lon")
93
+ with xr.open_dataset(self.path) as ds:
94
+ validated_ds = validate_netcdf_forcing(ds, required_vars, required_coords)
95
+ return validated_ds
96
+
97
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
98
+ self.path = copy_file_to_output_dir(self.path, Path(output_dir))
@@ -1,191 +1,191 @@
1
- from enum import Enum
2
- from pathlib import Path
3
- from typing import ClassVar, Optional
4
-
5
- import cht_observations.observation_stations as cht_station
6
- import pandas as pd
7
- from noaa_coops.station import COOPSAPIError
8
- from pydantic import BaseModel, model_validator
9
-
10
- from flood_adapt.misc.log import FloodAdaptLogging
11
- from flood_adapt.objects.forcing import unit_system as us
12
- from flood_adapt.objects.forcing.time_frame import TimeFrame
13
- from flood_adapt.objects.forcing.timeseries import CSVTimeseries
14
-
15
-
16
- class TideGaugeSource(str, Enum):
17
- """The accepted input for the variable source in tide_gauge."""
18
-
19
- file = "file"
20
- noaa_coops = "noaa_coops"
21
-
22
-
23
- class TideGauge(BaseModel):
24
- """The accepted input for the variable tide_gauge in Site.
25
-
26
- The obs_station is used for the download of tide gauge data, to be added to the hazard model as water level boundary condition.
27
-
28
- Attributes
29
- ----------
30
- name : Optional[int, str]
31
- Name of the tide gauge. Default is None.
32
- description : Optional[str]
33
- Description of the tide gauge. Default is "".
34
- source : TideGaugeSource
35
- Source of the tide gauge data.
36
- reference : str
37
- Reference of the tide gauge data. Should be defined in site.sfincs.water_level
38
- ID : Optional[int]
39
- ID of the tide gauge data. Default is None.
40
- file : Optional[Path]
41
- Only for file based tide gauges. Should be a path relative to the static folder. Default is None.
42
- lat : Optional[float]
43
- Latitude of the tide gauge data. Default is None.
44
- lon : Optional[float]
45
- Longitude of the tide gauge data. Default is None.
46
- units : us.UnitTypesLength
47
- Units of the water levels in the downloaded file. Default is us.UnitTypesLength.meters.
48
-
49
- """
50
-
51
- name: Optional[int | str] = None
52
- description: Optional[str] = ""
53
- source: TideGaugeSource
54
- reference: str
55
- ID: Optional[int] = None # Attribute used to download from correct gauge
56
- file: Optional[Path] = None # for locally stored data
57
- lat: Optional[float] = None
58
- lon: Optional[float] = None
59
- units: us.UnitTypesLength = (
60
- us.UnitTypesLength.meters
61
- ) # units of the water levels in the downloaded file
62
-
63
- _cached_data: ClassVar[dict[str, pd.DataFrame]] = {}
64
- logger: ClassVar = FloodAdaptLogging.getLogger("TideGauge")
65
-
66
- @model_validator(mode="after")
67
- def validate_selection_type(self) -> "TideGauge":
68
- if self.source == TideGaugeSource.file and self.file is None:
69
- raise ValueError(
70
- "If `source` is 'file' a file path relative to the static folder should be provided with the attribute 'file'."
71
- )
72
- elif self.source == TideGaugeSource.noaa_coops and self.ID is None:
73
- raise ValueError(
74
- "If `source` is 'noaa_coops' the id of the station should be provided with the attribute 'ID'."
75
- )
76
-
77
- return self
78
-
79
- def get_waterlevels_in_time_frame(
80
- self,
81
- time: TimeFrame,
82
- out_path: Optional[Path] = None,
83
- units: us.UnitTypesLength = us.UnitTypesLength.meters,
84
- ) -> pd.DataFrame:
85
- """Download waterlevel data from NOAA station using station_id, start and stop time.
86
-
87
- Parameters
88
- ----------
89
- time : TimeFrame
90
- Time model with start and end time.
91
- tide_gauge : TideGauge
92
- Tide gauge model.
93
- out_path : Optional[Path], optional
94
- Path to save the data, by default None.
95
- units : us.UnitTypesLength, optional
96
- Unit of the waterlevel, by default us.UnitTypesLength.meters.
97
-
98
- Returns
99
- -------
100
- pd.DataFrame
101
- Dataframe with time as index and the waterlevel for each observation station as columns.
102
- """
103
- self.logger.info(f"Retrieving waterlevels for tide gauge {self.ID} for {time}")
104
- if self.file:
105
- gauge_data = self._read_imported_waterlevels(time=time, path=self.file)
106
- else:
107
- gauge_data = self._download_tide_gauge_data(time=time)
108
-
109
- if gauge_data is None:
110
- self.logger.warning(
111
- f"Could not retrieve waterlevels for tide gauge {self.ID}"
112
- )
113
- return pd.DataFrame()
114
-
115
- gauge_data.columns = [f"waterlevel_{self.ID}"]
116
- gauge_data = gauge_data * us.UnitfulLength(value=1.0, units=self.units).convert(
117
- units
118
- )
119
-
120
- if out_path is not None:
121
- Path(out_path).parent.mkdir(parents=True, exist_ok=True)
122
- gauge_data.to_csv(Path(out_path))
123
- return gauge_data
124
-
125
- def _read_imported_waterlevels(self, time: TimeFrame, path: Path) -> pd.DataFrame:
126
- """Read waterlevels from an imported csv file.
127
-
128
- Parameters
129
- ----------
130
- path : Path
131
- Path to the csv file containing the waterlevel data. The csv file should have a column with the waterlevel data and a column with the time data.
132
-
133
- Returns
134
- -------
135
- pd.DataFrame
136
- Dataframe with time as index and the waterlevel for each observation station as columns.
137
- The data is sliced to the time range specified in the time model.
138
- """
139
- return CSVTimeseries.load_file(
140
- path=path, units=us.UnitfulLength(value=0, units=self.units)
141
- ).to_dataframe(time_frame=time)
142
-
143
- def _download_tide_gauge_data(self, time: TimeFrame) -> pd.DataFrame | None:
144
- """Download waterlevel data from NOAA station using station_id, start and stop time.
145
-
146
- Parameters
147
- ----------
148
- obs_point : ObsPointModel
149
- Observation point model.
150
- source : str
151
- Source of the data.
152
-
153
- Returns
154
- -------
155
- pd.DataFrame
156
- Dataframe with time as index and the waterlevel of the observation station as the column.
157
- None
158
- If the data could not be downloaded.
159
- """
160
- cache_key = f"{self.ID}_{time.start_time}_{time.end_time}"
161
- if cache_key in self.__class__._cached_data:
162
- self.logger.info("Tide gauge data retrieved from cache")
163
- return self.__class__._cached_data[cache_key]
164
-
165
- try:
166
- source_obj = cht_station.source(self.source.value)
167
- series = source_obj.get_data(
168
- id=self.ID,
169
- tstart=time.start_time,
170
- tstop=time.end_time,
171
- datum=self.reference,
172
- )
173
- index = pd.date_range(
174
- start=time.start_time,
175
- end=time.end_time,
176
- freq=time.time_step,
177
- name="time",
178
- )
179
- series = series.reindex(index, method="nearest")
180
- df = pd.DataFrame(data=series, index=index)
181
-
182
- except COOPSAPIError as e:
183
- self.logger.error(
184
- f"Could not download tide gauge data for station {self.ID}. {e}"
185
- )
186
- return None
187
-
188
- # Cache the result
189
- self.__class__._cached_data[cache_key] = df
190
-
191
- return df
1
+ from enum import Enum
2
+ from pathlib import Path
3
+ from typing import ClassVar, Optional
4
+
5
+ import cht_observations.observation_stations as cht_station
6
+ import pandas as pd
7
+ from noaa_coops.station import COOPSAPIError
8
+ from pydantic import BaseModel, model_validator
9
+
10
+ from flood_adapt.misc.log import FloodAdaptLogging
11
+ from flood_adapt.objects.forcing import unit_system as us
12
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
13
+ from flood_adapt.objects.forcing.timeseries import CSVTimeseries
14
+
15
+
16
+ class TideGaugeSource(str, Enum):
17
+ """The accepted input for the variable source in tide_gauge."""
18
+
19
+ file = "file"
20
+ noaa_coops = "noaa_coops"
21
+
22
+
23
+ class TideGauge(BaseModel):
24
+ """The accepted input for the variable tide_gauge in Site.
25
+
26
+ The obs_station is used for the download of tide gauge data, to be added to the hazard model as water level boundary condition.
27
+
28
+ Attributes
29
+ ----------
30
+ name : Optional[int, str]
31
+ Name of the tide gauge. Default is None.
32
+ description : Optional[str]
33
+ Description of the tide gauge. Default is "".
34
+ source : TideGaugeSource
35
+ Source of the tide gauge data.
36
+ reference : str
37
+ Reference of the tide gauge data. Should be defined in site.sfincs.water_level
38
+ ID : Optional[int]
39
+ ID of the tide gauge data. Default is None.
40
+ file : Optional[Path]
41
+ Only for file based tide gauges. Should be a path relative to the static folder. Default is None.
42
+ lat : Optional[float]
43
+ Latitude of the tide gauge data. Default is None.
44
+ lon : Optional[float]
45
+ Longitude of the tide gauge data. Default is None.
46
+ units : us.UnitTypesLength
47
+ Units of the water levels in the downloaded file. Default is us.UnitTypesLength.meters.
48
+
49
+ """
50
+
51
+ name: Optional[int | str] = None
52
+ description: Optional[str] = ""
53
+ source: TideGaugeSource
54
+ reference: str
55
+ ID: Optional[int] = None # Attribute used to download from correct gauge
56
+ file: Optional[Path] = None # for locally stored data
57
+ lat: Optional[float] = None
58
+ lon: Optional[float] = None
59
+ units: us.UnitTypesLength = (
60
+ us.UnitTypesLength.meters
61
+ ) # units of the water levels in the downloaded file
62
+
63
+ _cached_data: ClassVar[dict[str, pd.DataFrame]] = {}
64
+ logger: ClassVar = FloodAdaptLogging.getLogger("TideGauge")
65
+
66
+ @model_validator(mode="after")
67
+ def validate_selection_type(self) -> "TideGauge":
68
+ if self.source == TideGaugeSource.file and self.file is None:
69
+ raise ValueError(
70
+ "If `source` is 'file' a file path relative to the static folder should be provided with the attribute 'file'."
71
+ )
72
+ elif self.source == TideGaugeSource.noaa_coops and self.ID is None:
73
+ raise ValueError(
74
+ "If `source` is 'noaa_coops' the id of the station should be provided with the attribute 'ID'."
75
+ )
76
+
77
+ return self
78
+
79
+ def get_waterlevels_in_time_frame(
80
+ self,
81
+ time: TimeFrame,
82
+ out_path: Optional[Path] = None,
83
+ units: us.UnitTypesLength = us.UnitTypesLength.meters,
84
+ ) -> pd.DataFrame:
85
+ """Download waterlevel data from NOAA station using station_id, start and stop time.
86
+
87
+ Parameters
88
+ ----------
89
+ time : TimeFrame
90
+ Time model with start and end time.
91
+ tide_gauge : TideGauge
92
+ Tide gauge model.
93
+ out_path : Optional[Path], optional
94
+ Path to save the data, by default None.
95
+ units : us.UnitTypesLength, optional
96
+ Unit of the waterlevel, by default us.UnitTypesLength.meters.
97
+
98
+ Returns
99
+ -------
100
+ pd.DataFrame
101
+ Dataframe with time as index and the waterlevel for each observation station as columns.
102
+ """
103
+ self.logger.info(f"Retrieving waterlevels for tide gauge {self.ID} for {time}")
104
+ if self.file:
105
+ gauge_data = self._read_imported_waterlevels(time=time, path=self.file)
106
+ else:
107
+ gauge_data = self._download_tide_gauge_data(time=time)
108
+
109
+ if gauge_data is None:
110
+ self.logger.warning(
111
+ f"Could not retrieve waterlevels for tide gauge {self.ID}"
112
+ )
113
+ return pd.DataFrame()
114
+
115
+ gauge_data.columns = [f"waterlevel_{self.ID}"]
116
+ gauge_data = gauge_data * us.UnitfulLength(value=1.0, units=self.units).convert(
117
+ units
118
+ )
119
+
120
+ if out_path is not None:
121
+ Path(out_path).parent.mkdir(parents=True, exist_ok=True)
122
+ gauge_data.to_csv(Path(out_path))
123
+ return gauge_data
124
+
125
+ def _read_imported_waterlevels(self, time: TimeFrame, path: Path) -> pd.DataFrame:
126
+ """Read waterlevels from an imported csv file.
127
+
128
+ Parameters
129
+ ----------
130
+ path : Path
131
+ Path to the csv file containing the waterlevel data. The csv file should have a column with the waterlevel data and a column with the time data.
132
+
133
+ Returns
134
+ -------
135
+ pd.DataFrame
136
+ Dataframe with time as index and the waterlevel for each observation station as columns.
137
+ The data is sliced to the time range specified in the time model.
138
+ """
139
+ return CSVTimeseries.load_file(
140
+ path=path, units=us.UnitfulLength(value=0, units=self.units)
141
+ ).to_dataframe(time_frame=time)
142
+
143
+ def _download_tide_gauge_data(self, time: TimeFrame) -> pd.DataFrame | None:
144
+ """Download waterlevel data from NOAA station using station_id, start and stop time.
145
+
146
+ Parameters
147
+ ----------
148
+ obs_point : ObsPointModel
149
+ Observation point model.
150
+ source : str
151
+ Source of the data.
152
+
153
+ Returns
154
+ -------
155
+ pd.DataFrame
156
+ Dataframe with time as index and the waterlevel of the observation station as the column.
157
+ None
158
+ If the data could not be downloaded.
159
+ """
160
+ cache_key = f"{self.ID}_{time.start_time}_{time.end_time}"
161
+ if cache_key in self.__class__._cached_data:
162
+ self.logger.info("Tide gauge data retrieved from cache")
163
+ return self.__class__._cached_data[cache_key]
164
+
165
+ try:
166
+ source_obj = cht_station.source(self.source.value)
167
+ series = source_obj.get_data(
168
+ id=self.ID,
169
+ tstart=time.start_time,
170
+ tstop=time.end_time,
171
+ datum=self.reference,
172
+ )
173
+ index = pd.date_range(
174
+ start=time.start_time,
175
+ end=time.end_time,
176
+ freq=time.time_step,
177
+ name="time",
178
+ )
179
+ series = series.reindex(index, method="nearest")
180
+ df = pd.DataFrame(data=series, index=index)
181
+
182
+ except COOPSAPIError as e:
183
+ self.logger.error(
184
+ f"Could not download tide gauge data for station {self.ID}. {e}"
185
+ )
186
+ return None
187
+
188
+ # Cache the result
189
+ self.__class__._cached_data[cache_key] = df
190
+
191
+ return df