flood-adapt 0.3.8__py3-none-any.whl → 0.3.10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. flood_adapt/__init__.py +26 -22
  2. flood_adapt/adapter/__init__.py +9 -9
  3. flood_adapt/adapter/fiat_adapter.py +1541 -1536
  4. flood_adapt/adapter/interface/hazard_adapter.py +70 -70
  5. flood_adapt/adapter/interface/impact_adapter.py +36 -36
  6. flood_adapt/adapter/interface/model_adapter.py +89 -89
  7. flood_adapt/adapter/interface/offshore.py +19 -19
  8. flood_adapt/adapter/sfincs_adapter.py +1848 -1846
  9. flood_adapt/adapter/sfincs_offshore.py +193 -193
  10. flood_adapt/config/config.py +248 -290
  11. flood_adapt/config/fiat.py +219 -219
  12. flood_adapt/config/gui.py +331 -331
  13. flood_adapt/config/sfincs.py +481 -336
  14. flood_adapt/config/site.py +129 -129
  15. flood_adapt/database_builder/database_builder.py +2210 -2210
  16. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -9
  17. flood_adapt/database_builder/templates/default_units/metric.toml +9 -9
  18. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -10
  19. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -90
  20. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -57
  21. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -121
  22. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -65
  23. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -45
  24. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -126
  25. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -60
  26. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -121
  27. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -65
  28. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -45
  29. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -4
  30. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -143
  31. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -153
  32. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -127
  33. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -57
  34. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -4
  35. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -191
  36. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -153
  37. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -178
  38. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -57
  39. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -9
  40. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -65
  41. flood_adapt/database_builder/templates/output_layers/bin_colors.toml +5 -5
  42. flood_adapt/database_builder.py +16 -16
  43. flood_adapt/dbs_classes/__init__.py +21 -21
  44. flood_adapt/dbs_classes/database.py +495 -688
  45. flood_adapt/dbs_classes/dbs_benefit.py +77 -76
  46. flood_adapt/dbs_classes/dbs_event.py +61 -59
  47. flood_adapt/dbs_classes/dbs_measure.py +112 -111
  48. flood_adapt/dbs_classes/dbs_projection.py +34 -34
  49. flood_adapt/dbs_classes/dbs_scenario.py +137 -137
  50. flood_adapt/dbs_classes/dbs_static.py +274 -273
  51. flood_adapt/dbs_classes/dbs_strategy.py +130 -129
  52. flood_adapt/dbs_classes/dbs_template.py +279 -278
  53. flood_adapt/dbs_classes/interface/database.py +107 -139
  54. flood_adapt/dbs_classes/interface/element.py +121 -121
  55. flood_adapt/dbs_classes/interface/static.py +47 -47
  56. flood_adapt/flood_adapt.py +1207 -1178
  57. flood_adapt/misc/database_user.py +16 -16
  58. flood_adapt/misc/exceptions.py +22 -0
  59. flood_adapt/misc/log.py +183 -183
  60. flood_adapt/misc/path_builder.py +54 -54
  61. flood_adapt/misc/utils.py +185 -185
  62. flood_adapt/objects/__init__.py +82 -82
  63. flood_adapt/objects/benefits/benefits.py +61 -61
  64. flood_adapt/objects/events/event_factory.py +135 -135
  65. flood_adapt/objects/events/event_set.py +88 -84
  66. flood_adapt/objects/events/events.py +234 -234
  67. flood_adapt/objects/events/historical.py +58 -58
  68. flood_adapt/objects/events/hurricane.py +68 -67
  69. flood_adapt/objects/events/synthetic.py +46 -50
  70. flood_adapt/objects/forcing/__init__.py +92 -92
  71. flood_adapt/objects/forcing/csv.py +68 -68
  72. flood_adapt/objects/forcing/discharge.py +66 -66
  73. flood_adapt/objects/forcing/forcing.py +150 -150
  74. flood_adapt/objects/forcing/forcing_factory.py +182 -182
  75. flood_adapt/objects/forcing/meteo_handler.py +93 -93
  76. flood_adapt/objects/forcing/netcdf.py +40 -40
  77. flood_adapt/objects/forcing/plotting.py +453 -429
  78. flood_adapt/objects/forcing/rainfall.py +98 -98
  79. flood_adapt/objects/forcing/tide_gauge.py +191 -191
  80. flood_adapt/objects/forcing/time_frame.py +90 -90
  81. flood_adapt/objects/forcing/timeseries.py +564 -564
  82. flood_adapt/objects/forcing/unit_system.py +580 -580
  83. flood_adapt/objects/forcing/waterlevels.py +108 -108
  84. flood_adapt/objects/forcing/wind.py +124 -124
  85. flood_adapt/objects/measures/measure_factory.py +92 -92
  86. flood_adapt/objects/measures/measures.py +529 -529
  87. flood_adapt/objects/object_model.py +74 -68
  88. flood_adapt/objects/projections/projections.py +103 -89
  89. flood_adapt/objects/scenarios/scenarios.py +22 -22
  90. flood_adapt/objects/strategies/strategies.py +89 -89
  91. flood_adapt/workflows/benefit_runner.py +579 -544
  92. flood_adapt/workflows/floodmap.py +85 -85
  93. flood_adapt/workflows/impacts_integrator.py +85 -82
  94. flood_adapt/workflows/scenario_runner.py +70 -70
  95. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/LICENSE +674 -674
  96. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/METADATA +866 -860
  97. flood_adapt-0.3.10.dist-info/RECORD +140 -0
  98. flood_adapt-0.3.8.dist-info/RECORD +0 -139
  99. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/WHEEL +0 -0
  100. {flood_adapt-0.3.8.dist-info → flood_adapt-0.3.10.dist-info}/top_level.txt +0 -0
@@ -1,182 +1,182 @@
1
- from pathlib import Path
2
- from typing import Any, List, Type
3
-
4
- import tomli
5
-
6
- from flood_adapt.objects.forcing.discharge import (
7
- DischargeConstant,
8
- DischargeCSV,
9
- DischargeSynthetic,
10
- )
11
- from flood_adapt.objects.forcing.forcing import (
12
- ForcingSource,
13
- ForcingType,
14
- IForcing,
15
- IForcingFactory,
16
- )
17
- from flood_adapt.objects.forcing.rainfall import (
18
- RainfallConstant,
19
- RainfallCSV,
20
- RainfallMeteo,
21
- RainfallNetCDF,
22
- RainfallSynthetic,
23
- RainfallTrack,
24
- )
25
- from flood_adapt.objects.forcing.waterlevels import (
26
- WaterlevelCSV,
27
- WaterlevelGauged,
28
- WaterlevelModel,
29
- WaterlevelSynthetic,
30
- )
31
- from flood_adapt.objects.forcing.wind import (
32
- WindConstant,
33
- WindCSV,
34
- WindMeteo,
35
- WindNetCDF,
36
- WindSynthetic,
37
- WindTrack,
38
- )
39
-
40
- __all__ = [
41
- "ForcingFactory",
42
- "ForcingSource",
43
- "ForcingType",
44
- "IForcing",
45
- "IForcingFactory",
46
- "WindConstant",
47
- "WindCSV",
48
- "WindMeteo",
49
- "WindSynthetic",
50
- "WindTrack",
51
- "WaterlevelCSV",
52
- "WaterlevelGauged",
53
- "WaterlevelModel",
54
- "WaterlevelSynthetic",
55
- "RainfallConstant",
56
- "RainfallCSV",
57
- "RainfallMeteo",
58
- "RainfallSynthetic",
59
- "RainfallTrack",
60
- "DischargeConstant",
61
- "DischargeCSV",
62
- "DischargeSynthetic",
63
- ]
64
-
65
-
66
- class ForcingFactory(IForcingFactory):
67
- """Factory class for creating forcings."""
68
-
69
- WATERLEVELS: dict[ForcingSource, Type[IForcing]] = {
70
- ForcingSource.MODEL: WaterlevelModel,
71
- ForcingSource.CSV: WaterlevelCSV,
72
- ForcingSource.SYNTHETIC: WaterlevelSynthetic,
73
- ForcingSource.GAUGED: WaterlevelGauged,
74
- }
75
-
76
- WIND: dict[ForcingSource, Type[IForcing]] = {
77
- ForcingSource.METEO: WindMeteo,
78
- ForcingSource.TRACK: WindTrack,
79
- ForcingSource.CSV: WindCSV,
80
- ForcingSource.SYNTHETIC: WindSynthetic,
81
- ForcingSource.CONSTANT: WindConstant,
82
- ForcingSource.NETCDF: WindNetCDF,
83
- }
84
-
85
- RAINFALL: dict[ForcingSource, Type[IForcing]] = {
86
- ForcingSource.METEO: RainfallMeteo,
87
- ForcingSource.TRACK: RainfallTrack,
88
- ForcingSource.CSV: RainfallCSV,
89
- ForcingSource.SYNTHETIC: RainfallSynthetic,
90
- ForcingSource.CONSTANT: RainfallConstant,
91
- ForcingSource.NETCDF: RainfallNetCDF,
92
- }
93
-
94
- DISCHARGE: dict[ForcingSource, Type[IForcing]] = {
95
- ForcingSource.CSV: DischargeCSV,
96
- ForcingSource.SYNTHETIC: DischargeSynthetic,
97
- ForcingSource.CONSTANT: DischargeConstant,
98
- }
99
-
100
- FORCINGTYPES: dict[ForcingType, dict[ForcingSource, Type[IForcing]]] = {
101
- ForcingType.WATERLEVEL: WATERLEVELS,
102
- ForcingType.RAINFALL: RAINFALL,
103
- ForcingType.WIND: WIND,
104
- ForcingType.DISCHARGE: DISCHARGE,
105
- }
106
-
107
- @classmethod
108
- def read_forcing(
109
- cls,
110
- filepath: Path,
111
- ) -> tuple[Type[IForcing], ForcingType, ForcingSource]:
112
- """Extract forcing type and source from a TOML file."""
113
- with open(filepath, mode="rb") as fp:
114
- toml_data = tomli.load(fp)
115
- type = toml_data.get("type")
116
- source = toml_data.get("source")
117
-
118
- if type is None or source is None:
119
- raise ValueError(
120
- f"Forcing type {type} or source {source} not found in {filepath}"
121
- )
122
- forcing_cls = cls.get_forcing_class(ForcingType(type), ForcingSource(source))
123
- return forcing_cls, ForcingType(type), ForcingSource(source)
124
-
125
- @classmethod
126
- def get_forcing_class(
127
- cls, type: ForcingType, source: ForcingSource
128
- ) -> Type[IForcing]:
129
- """Get the forcing class corresponding to the type and source."""
130
- if (sources := cls.FORCINGTYPES.get(type)) is None:
131
- raise ValueError(f"Invalid forcing type: {type}")
132
-
133
- if (forcing_cls := sources.get(source)) is None:
134
- raise ValueError(
135
- f"Invalid forcing source: {source} for forcing type: {type}"
136
- )
137
- return forcing_cls
138
-
139
- @classmethod
140
- def load_file(cls, toml_file: Path) -> IForcing:
141
- """Create a forcing object from a TOML file."""
142
- with open(toml_file, mode="rb") as fp:
143
- toml_data = tomli.load(fp)
144
- return cls.load_dict(toml_data)
145
-
146
- @classmethod
147
- def load_dict(cls, attrs: dict[str, Any] | IForcing) -> IForcing:
148
- """Create a forcing object from a dictionary of attributes."""
149
- if isinstance(attrs, IForcing):
150
- return attrs
151
- type = attrs.get("type")
152
- source = attrs.get("source")
153
- if type is None or source is None:
154
- raise ValueError(
155
- f"Forcing type {type} or source {source} not found in attributes."
156
- )
157
- return cls.get_forcing_class(
158
- ForcingType(type), ForcingSource(source)
159
- ).model_validate(attrs)
160
-
161
- @classmethod
162
- def list_forcings(cls) -> List[Type[IForcing]]:
163
- """List all available forcing classes."""
164
- forcing_classes = set()
165
- for source_map in cls.FORCINGTYPES.values():
166
- for forcing in source_map.values():
167
- if forcing is not None:
168
- forcing_classes.add(forcing)
169
- return list(forcing_classes)
170
-
171
- @classmethod
172
- def list_forcing_types_and_sources(cls) -> List[tuple[ForcingType, ForcingSource]]:
173
- """List all available forcing classes using a tuple of ForcingType and ForcingSource."""
174
- # TODO remove this when the backend supports all forcings
175
- ONLY_BACKEND_FORCINGS = {(ForcingType.WIND, ForcingSource.SYNTHETIC)}
176
- combinations = set()
177
- for type, source_map in cls.FORCINGTYPES.items():
178
- for source in source_map.keys():
179
- if (type, source) in ONLY_BACKEND_FORCINGS:
180
- continue
181
- combinations.add((type, source))
182
- return list(combinations)
1
+ from pathlib import Path
2
+ from typing import Any, List, Type
3
+
4
+ import tomli
5
+
6
+ from flood_adapt.objects.forcing.discharge import (
7
+ DischargeConstant,
8
+ DischargeCSV,
9
+ DischargeSynthetic,
10
+ )
11
+ from flood_adapt.objects.forcing.forcing import (
12
+ ForcingSource,
13
+ ForcingType,
14
+ IForcing,
15
+ IForcingFactory,
16
+ )
17
+ from flood_adapt.objects.forcing.rainfall import (
18
+ RainfallConstant,
19
+ RainfallCSV,
20
+ RainfallMeteo,
21
+ RainfallNetCDF,
22
+ RainfallSynthetic,
23
+ RainfallTrack,
24
+ )
25
+ from flood_adapt.objects.forcing.waterlevels import (
26
+ WaterlevelCSV,
27
+ WaterlevelGauged,
28
+ WaterlevelModel,
29
+ WaterlevelSynthetic,
30
+ )
31
+ from flood_adapt.objects.forcing.wind import (
32
+ WindConstant,
33
+ WindCSV,
34
+ WindMeteo,
35
+ WindNetCDF,
36
+ WindSynthetic,
37
+ WindTrack,
38
+ )
39
+
40
+ __all__ = [
41
+ "ForcingFactory",
42
+ "ForcingSource",
43
+ "ForcingType",
44
+ "IForcing",
45
+ "IForcingFactory",
46
+ "WindConstant",
47
+ "WindCSV",
48
+ "WindMeteo",
49
+ "WindSynthetic",
50
+ "WindTrack",
51
+ "WaterlevelCSV",
52
+ "WaterlevelGauged",
53
+ "WaterlevelModel",
54
+ "WaterlevelSynthetic",
55
+ "RainfallConstant",
56
+ "RainfallCSV",
57
+ "RainfallMeteo",
58
+ "RainfallSynthetic",
59
+ "RainfallTrack",
60
+ "DischargeConstant",
61
+ "DischargeCSV",
62
+ "DischargeSynthetic",
63
+ ]
64
+
65
+
66
+ class ForcingFactory(IForcingFactory):
67
+ """Factory class for creating forcings."""
68
+
69
+ WATERLEVELS: dict[ForcingSource, Type[IForcing]] = {
70
+ ForcingSource.MODEL: WaterlevelModel,
71
+ ForcingSource.CSV: WaterlevelCSV,
72
+ ForcingSource.SYNTHETIC: WaterlevelSynthetic,
73
+ ForcingSource.GAUGED: WaterlevelGauged,
74
+ }
75
+
76
+ WIND: dict[ForcingSource, Type[IForcing]] = {
77
+ ForcingSource.METEO: WindMeteo,
78
+ ForcingSource.TRACK: WindTrack,
79
+ ForcingSource.CSV: WindCSV,
80
+ ForcingSource.SYNTHETIC: WindSynthetic,
81
+ ForcingSource.CONSTANT: WindConstant,
82
+ ForcingSource.NETCDF: WindNetCDF,
83
+ }
84
+
85
+ RAINFALL: dict[ForcingSource, Type[IForcing]] = {
86
+ ForcingSource.METEO: RainfallMeteo,
87
+ ForcingSource.TRACK: RainfallTrack,
88
+ ForcingSource.CSV: RainfallCSV,
89
+ ForcingSource.SYNTHETIC: RainfallSynthetic,
90
+ ForcingSource.CONSTANT: RainfallConstant,
91
+ ForcingSource.NETCDF: RainfallNetCDF,
92
+ }
93
+
94
+ DISCHARGE: dict[ForcingSource, Type[IForcing]] = {
95
+ ForcingSource.CSV: DischargeCSV,
96
+ ForcingSource.SYNTHETIC: DischargeSynthetic,
97
+ ForcingSource.CONSTANT: DischargeConstant,
98
+ }
99
+
100
+ FORCINGTYPES: dict[ForcingType, dict[ForcingSource, Type[IForcing]]] = {
101
+ ForcingType.WATERLEVEL: WATERLEVELS,
102
+ ForcingType.RAINFALL: RAINFALL,
103
+ ForcingType.WIND: WIND,
104
+ ForcingType.DISCHARGE: DISCHARGE,
105
+ }
106
+
107
+ @classmethod
108
+ def read_forcing(
109
+ cls,
110
+ filepath: Path,
111
+ ) -> tuple[Type[IForcing], ForcingType, ForcingSource]:
112
+ """Extract forcing type and source from a TOML file."""
113
+ with open(filepath, mode="rb") as fp:
114
+ toml_data = tomli.load(fp)
115
+ type = toml_data.get("type")
116
+ source = toml_data.get("source")
117
+
118
+ if type is None or source is None:
119
+ raise ValueError(
120
+ f"Forcing type {type} or source {source} not found in {filepath}"
121
+ )
122
+ forcing_cls = cls.get_forcing_class(ForcingType(type), ForcingSource(source))
123
+ return forcing_cls, ForcingType(type), ForcingSource(source)
124
+
125
+ @classmethod
126
+ def get_forcing_class(
127
+ cls, type: ForcingType, source: ForcingSource
128
+ ) -> Type[IForcing]:
129
+ """Get the forcing class corresponding to the type and source."""
130
+ if (sources := cls.FORCINGTYPES.get(type)) is None:
131
+ raise ValueError(f"Invalid forcing type: {type}")
132
+
133
+ if (forcing_cls := sources.get(source)) is None:
134
+ raise ValueError(
135
+ f"Invalid forcing source: {source} for forcing type: {type}"
136
+ )
137
+ return forcing_cls
138
+
139
+ @classmethod
140
+ def load_file(cls, toml_file: Path) -> IForcing:
141
+ """Create a forcing object from a TOML file."""
142
+ with open(toml_file, mode="rb") as fp:
143
+ toml_data = tomli.load(fp)
144
+ return cls.load_dict(toml_data)
145
+
146
+ @classmethod
147
+ def load_dict(cls, attrs: dict[str, Any] | IForcing) -> IForcing:
148
+ """Create a forcing object from a dictionary of attributes."""
149
+ if isinstance(attrs, IForcing):
150
+ return attrs
151
+ type = attrs.get("type")
152
+ source = attrs.get("source")
153
+ if type is None or source is None:
154
+ raise ValueError(
155
+ f"Forcing type {type} or source {source} not found in attributes."
156
+ )
157
+ return cls.get_forcing_class(
158
+ ForcingType(type), ForcingSource(source)
159
+ ).model_validate(attrs)
160
+
161
+ @classmethod
162
+ def list_forcings(cls) -> List[Type[IForcing]]:
163
+ """List all available forcing classes."""
164
+ forcing_classes = set()
165
+ for source_map in cls.FORCINGTYPES.values():
166
+ for forcing in source_map.values():
167
+ if forcing is not None:
168
+ forcing_classes.add(forcing)
169
+ return list(forcing_classes)
170
+
171
+ @classmethod
172
+ def list_forcing_types_and_sources(cls) -> List[tuple[ForcingType, ForcingSource]]:
173
+ """List all available forcing classes using a tuple of ForcingType and ForcingSource."""
174
+ # TODO remove this when the backend supports all forcings
175
+ ONLY_BACKEND_FORCINGS = {(ForcingType.WIND, ForcingSource.SYNTHETIC)}
176
+ combinations = set()
177
+ for type, source_map in cls.FORCINGTYPES.items():
178
+ for source in source_map.keys():
179
+ if (type, source) in ONLY_BACKEND_FORCINGS:
180
+ continue
181
+ combinations.add((type, source))
182
+ return list(combinations)
@@ -1,93 +1,93 @@
1
- from datetime import datetime
2
- from pathlib import Path
3
- from typing import Optional
4
-
5
- import cht_meteo
6
- import numpy as np
7
- import xarray as xr
8
- from cht_meteo.dataset import MeteoDataset
9
-
10
- from flood_adapt.config.config import Settings
11
- from flood_adapt.config.site import Site
12
- from flood_adapt.objects.forcing.time_frame import TimeFrame
13
-
14
-
15
- class MeteoHandler:
16
- def __init__(self, dir: Optional[Path] = None, site: Optional[Site] = None) -> None:
17
- self.dir: Path = dir or Settings().database_path / "static" / "meteo"
18
- self.dir.mkdir(parents=True, exist_ok=True)
19
- self.site: Site = site or Site.load_file(
20
- Settings().database_path / "static" / "config" / "site.toml"
21
- )
22
- # Create GFS dataset
23
- self.dataset = cht_meteo.dataset(
24
- name="gfs_anl_0p50",
25
- source="gfs_analysis_0p50",
26
- path=self.dir,
27
- lon_range=(self.site.lon - 10, self.site.lon + 10),
28
- lat_range=(self.site.lat - 10, self.site.lat + 10),
29
- )
30
- # quick fix for sites near the 0 degree longitude -> shift the meteo download area either east or west of the 0 degree longitude
31
- # TODO implement a good solution to this in cht_meteo
32
- self.dataset.lon_range = self._shift_grid_to_positive_lon(self.dataset)
33
-
34
- def download(self, time: TimeFrame):
35
- # Download and collect data
36
- time_range = self.get_time_range(time)
37
-
38
- self.dataset.download(time_range=time_range)
39
-
40
- def read(self, time: TimeFrame) -> xr.Dataset:
41
- self.download(time)
42
- time_range = self.get_time_range(time)
43
- ds = self.dataset.collect(time_range=time_range)
44
-
45
- if ds is None:
46
- raise FileNotFoundError(
47
- f"No meteo files found in meteo directory {self.dir}"
48
- )
49
-
50
- ds.raster.set_crs(4326)
51
-
52
- # Rename the variables to match what hydromt-sfincs expects
53
- ds = ds.rename(
54
- {
55
- "barometric_pressure": "press_msl",
56
- "precipitation": "precip",
57
- "wind_u": "wind10_u",
58
- "wind_v": "wind10_v",
59
- }
60
- )
61
-
62
- # Convert the longitude to -180 to 180 to match hydromt-sfincs
63
- if ds["lon"].min() > 180:
64
- ds["lon"] = ds["lon"] - 360
65
-
66
- return ds
67
-
68
- @staticmethod
69
- def get_time_range(time: TimeFrame) -> tuple:
70
- t0 = time.start_time
71
- t1 = time.end_time
72
- if not isinstance(t0, datetime):
73
- t0 = datetime.strptime(t0, "%Y%m%d %H%M%S")
74
- if not isinstance(t1, datetime):
75
- t1 = datetime.strptime(t1, "%Y%m%d %H%M%S")
76
- time_range = (t0, t1)
77
- return time_range
78
-
79
- @staticmethod
80
- def _shift_grid_to_positive_lon(grid: MeteoDataset):
81
- """Shift the grid to positive longitudes if the grid crosses the 0 degree longitude."""
82
- if np.prod(grid.lon_range) < 0:
83
- if np.abs(grid.lon_range[0]) > np.abs(grid.lon_range[1]):
84
- grid.lon_range = [
85
- grid.lon_range[0] - grid.lon_range[1] - 1,
86
- -1,
87
- ]
88
- else:
89
- grid.lon_range = [
90
- 1,
91
- grid.lon_range[1] - grid.lon_range[0] + 1,
92
- ]
93
- return grid.lon_range
1
+ from datetime import datetime
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+ import cht_meteo
6
+ import numpy as np
7
+ import xarray as xr
8
+ from cht_meteo.dataset import MeteoDataset
9
+
10
+ from flood_adapt.config.config import Settings
11
+ from flood_adapt.config.site import Site
12
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
13
+
14
+
15
+ class MeteoHandler:
16
+ def __init__(self, dir: Optional[Path] = None, site: Optional[Site] = None) -> None:
17
+ self.dir: Path = dir or Settings().database_path / "static" / "meteo"
18
+ self.dir.mkdir(parents=True, exist_ok=True)
19
+ self.site: Site = site or Site.load_file(
20
+ Settings().database_path / "static" / "config" / "site.toml"
21
+ )
22
+ # Create GFS dataset
23
+ self.dataset = cht_meteo.dataset(
24
+ name="gfs_anl_0p50",
25
+ source="gfs_analysis_0p50",
26
+ path=self.dir,
27
+ lon_range=(self.site.lon - 10, self.site.lon + 10),
28
+ lat_range=(self.site.lat - 10, self.site.lat + 10),
29
+ )
30
+ # quick fix for sites near the 0 degree longitude -> shift the meteo download area either east or west of the 0 degree longitude
31
+ # TODO implement a good solution to this in cht_meteo
32
+ self.dataset.lon_range = self._shift_grid_to_positive_lon(self.dataset)
33
+
34
+ def download(self, time: TimeFrame):
35
+ # Download and collect data
36
+ time_range = self.get_time_range(time)
37
+
38
+ self.dataset.download(time_range=time_range)
39
+
40
+ def read(self, time: TimeFrame) -> xr.Dataset:
41
+ self.download(time)
42
+ time_range = self.get_time_range(time)
43
+ ds = self.dataset.collect(time_range=time_range)
44
+
45
+ if ds is None:
46
+ raise FileNotFoundError(
47
+ f"No meteo files found in meteo directory {self.dir}"
48
+ )
49
+
50
+ ds.raster.set_crs(4326)
51
+
52
+ # Rename the variables to match what hydromt-sfincs expects
53
+ ds = ds.rename(
54
+ {
55
+ "barometric_pressure": "press_msl",
56
+ "precipitation": "precip",
57
+ "wind_u": "wind10_u",
58
+ "wind_v": "wind10_v",
59
+ }
60
+ )
61
+
62
+ # Convert the longitude to -180 to 180 to match hydromt-sfincs
63
+ if ds["lon"].min() > 180:
64
+ ds["lon"] = ds["lon"] - 360
65
+
66
+ return ds
67
+
68
+ @staticmethod
69
+ def get_time_range(time: TimeFrame) -> tuple:
70
+ t0 = time.start_time
71
+ t1 = time.end_time
72
+ if not isinstance(t0, datetime):
73
+ t0 = datetime.strptime(t0, "%Y%m%d %H%M%S")
74
+ if not isinstance(t1, datetime):
75
+ t1 = datetime.strptime(t1, "%Y%m%d %H%M%S")
76
+ time_range = (t0, t1)
77
+ return time_range
78
+
79
+ @staticmethod
80
+ def _shift_grid_to_positive_lon(grid: MeteoDataset):
81
+ """Shift the grid to positive longitudes if the grid crosses the 0 degree longitude."""
82
+ if np.prod(grid.lon_range) < 0:
83
+ if np.abs(grid.lon_range[0]) > np.abs(grid.lon_range[1]):
84
+ grid.lon_range = [
85
+ grid.lon_range[0] - grid.lon_range[1] - 1,
86
+ -1,
87
+ ]
88
+ else:
89
+ grid.lon_range = [
90
+ 1,
91
+ grid.lon_range[1] - grid.lon_range[0] + 1,
92
+ ]
93
+ return grid.lon_range
@@ -1,40 +1,40 @@
1
- import numpy as np
2
- import pandas as pd
3
- import xarray as xr
4
-
5
-
6
- @staticmethod
7
- def validate_netcdf_forcing(
8
- ds: xr.Dataset, required_vars: tuple[str, ...], required_coords: tuple[str, ...]
9
- ) -> xr.Dataset:
10
- """Validate a forcing dataset by checking for required variables and coordinates."""
11
- # Check variables
12
- _required_vars = set(required_vars)
13
- if not _required_vars.issubset(ds.data_vars):
14
- missing_vars = _required_vars - set(ds.data_vars)
15
- raise ValueError(
16
- f"Missing required variables for netcdf forcing: {missing_vars}"
17
- )
18
-
19
- # Check coordinates
20
- _required_coords = set(required_coords)
21
- if not _required_coords.issubset(ds.coords):
22
- missing_coords = _required_coords - set(ds.coords)
23
- raise ValueError(
24
- f"Missing required coordinates for netcdf forcing: {missing_coords}"
25
- )
26
-
27
- # Check time step
28
- ts = pd.to_timedelta(np.diff(ds.time).mean())
29
- if ts < pd.to_timedelta("1H"):
30
- raise ValueError(
31
- f"SFINCS NetCDF forcing time step cannot be less than 1 hour: {ts}"
32
- )
33
-
34
- for var in ds.data_vars:
35
- # Check order of dimensions
36
- if ds[var].dims != required_coords:
37
- raise ValueError(
38
- f"Order of dimensions for variable {var} must be {required_coords}"
39
- )
40
- return ds
1
+ import numpy as np
2
+ import pandas as pd
3
+ import xarray as xr
4
+
5
+
6
+ @staticmethod
7
+ def validate_netcdf_forcing(
8
+ ds: xr.Dataset, required_vars: tuple[str, ...], required_coords: tuple[str, ...]
9
+ ) -> xr.Dataset:
10
+ """Validate a forcing dataset by checking for required variables and coordinates."""
11
+ # Check variables
12
+ _required_vars = set(required_vars)
13
+ if not _required_vars.issubset(ds.data_vars):
14
+ missing_vars = _required_vars - set(ds.data_vars)
15
+ raise ValueError(
16
+ f"Missing required variables for netcdf forcing: {missing_vars}"
17
+ )
18
+
19
+ # Check coordinates
20
+ _required_coords = set(required_coords)
21
+ if not _required_coords.issubset(ds.coords):
22
+ missing_coords = _required_coords - set(ds.coords)
23
+ raise ValueError(
24
+ f"Missing required coordinates for netcdf forcing: {missing_coords}"
25
+ )
26
+
27
+ # Check time step
28
+ ts = pd.to_timedelta(np.diff(ds.time).mean())
29
+ if ts < pd.to_timedelta("1H"):
30
+ raise ValueError(
31
+ f"SFINCS NetCDF forcing time step cannot be less than 1 hour: {ts}"
32
+ )
33
+
34
+ for var in ds.data_vars:
35
+ # Check order of dimensions
36
+ if ds[var].dims != required_coords:
37
+ raise ValueError(
38
+ f"Order of dimensions for variable {var} must be {required_coords}"
39
+ )
40
+ return ds