flood-adapt 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flood_adapt/__init__.py +22 -0
- flood_adapt/adapter/__init__.py +9 -0
- flood_adapt/adapter/fiat_adapter.py +1502 -0
- flood_adapt/adapter/interface/__init__.py +0 -0
- flood_adapt/adapter/interface/hazard_adapter.py +70 -0
- flood_adapt/adapter/interface/impact_adapter.py +36 -0
- flood_adapt/adapter/interface/model_adapter.py +89 -0
- flood_adapt/adapter/interface/offshore.py +19 -0
- flood_adapt/adapter/sfincs_adapter.py +1857 -0
- flood_adapt/adapter/sfincs_offshore.py +193 -0
- flood_adapt/config/__init__.py +0 -0
- flood_adapt/config/config.py +245 -0
- flood_adapt/config/fiat.py +219 -0
- flood_adapt/config/gui.py +224 -0
- flood_adapt/config/sfincs.py +336 -0
- flood_adapt/config/site.py +124 -0
- flood_adapt/database_builder/__init__.py +0 -0
- flood_adapt/database_builder/database_builder.py +2175 -0
- flood_adapt/database_builder/templates/default_units/imperial.toml +9 -0
- flood_adapt/database_builder/templates/default_units/metric.toml +9 -0
- flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -0
- flood_adapt/database_builder/templates/icons/black_down_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/black_left_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/black_right_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/black_up_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_down.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_left.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_right.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_up.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_down.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_left.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_right.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_up.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_left.png +0 -0
- flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_right.png +0 -0
- flood_adapt/database_builder/templates/icons/white_down_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/white_left_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/white_right_48x48.png +0 -0
- flood_adapt/database_builder/templates/icons/white_up_48x48.png +0 -0
- flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -0
- flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -0
- flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -0
- flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -0
- flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -0
- flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -0
- flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -0
- flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -0
- flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -0
- flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -0
- flood_adapt/database_builder/templates/infographics/images/ambulance.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/car.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/cart.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/firetruck.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/hospital.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/house.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/info.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/money.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/person.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/school.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/truck.png +0 -0
- flood_adapt/database_builder/templates/infographics/images/walking_person.png +0 -0
- flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -0
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -0
- flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -0
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -0
- flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -0
- flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -0
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -0
- flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -0
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -0
- flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -0
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -0
- flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -0
- flood_adapt/database_builder/templates/mapbox_layers/bin_colors.toml +5 -0
- flood_adapt/database_builder.py +16 -0
- flood_adapt/dbs_classes/__init__.py +21 -0
- flood_adapt/dbs_classes/database.py +716 -0
- flood_adapt/dbs_classes/dbs_benefit.py +97 -0
- flood_adapt/dbs_classes/dbs_event.py +91 -0
- flood_adapt/dbs_classes/dbs_measure.py +103 -0
- flood_adapt/dbs_classes/dbs_projection.py +52 -0
- flood_adapt/dbs_classes/dbs_scenario.py +150 -0
- flood_adapt/dbs_classes/dbs_static.py +261 -0
- flood_adapt/dbs_classes/dbs_strategy.py +147 -0
- flood_adapt/dbs_classes/dbs_template.py +302 -0
- flood_adapt/dbs_classes/interface/database.py +147 -0
- flood_adapt/dbs_classes/interface/element.py +137 -0
- flood_adapt/dbs_classes/interface/static.py +47 -0
- flood_adapt/flood_adapt.py +1371 -0
- flood_adapt/misc/__init__.py +0 -0
- flood_adapt/misc/database_user.py +16 -0
- flood_adapt/misc/log.py +183 -0
- flood_adapt/misc/path_builder.py +54 -0
- flood_adapt/misc/utils.py +185 -0
- flood_adapt/objects/__init__.py +59 -0
- flood_adapt/objects/benefits/__init__.py +0 -0
- flood_adapt/objects/benefits/benefits.py +61 -0
- flood_adapt/objects/events/__init__.py +0 -0
- flood_adapt/objects/events/event_factory.py +135 -0
- flood_adapt/objects/events/event_set.py +84 -0
- flood_adapt/objects/events/events.py +221 -0
- flood_adapt/objects/events/historical.py +55 -0
- flood_adapt/objects/events/hurricane.py +64 -0
- flood_adapt/objects/events/synthetic.py +48 -0
- flood_adapt/objects/forcing/__init__.py +0 -0
- flood_adapt/objects/forcing/csv.py +68 -0
- flood_adapt/objects/forcing/discharge.py +66 -0
- flood_adapt/objects/forcing/forcing.py +142 -0
- flood_adapt/objects/forcing/forcing_factory.py +182 -0
- flood_adapt/objects/forcing/meteo_handler.py +93 -0
- flood_adapt/objects/forcing/netcdf.py +40 -0
- flood_adapt/objects/forcing/plotting.py +428 -0
- flood_adapt/objects/forcing/rainfall.py +98 -0
- flood_adapt/objects/forcing/tide_gauge.py +191 -0
- flood_adapt/objects/forcing/time_frame.py +77 -0
- flood_adapt/objects/forcing/timeseries.py +552 -0
- flood_adapt/objects/forcing/unit_system.py +580 -0
- flood_adapt/objects/forcing/waterlevels.py +108 -0
- flood_adapt/objects/forcing/wind.py +124 -0
- flood_adapt/objects/measures/__init__.py +0 -0
- flood_adapt/objects/measures/measure_factory.py +92 -0
- flood_adapt/objects/measures/measures.py +506 -0
- flood_adapt/objects/object_model.py +68 -0
- flood_adapt/objects/projections/__init__.py +0 -0
- flood_adapt/objects/projections/projections.py +89 -0
- flood_adapt/objects/scenarios/__init__.py +0 -0
- flood_adapt/objects/scenarios/scenarios.py +22 -0
- flood_adapt/objects/strategies/__init__.py +0 -0
- flood_adapt/objects/strategies/strategies.py +68 -0
- flood_adapt/workflows/__init__.py +0 -0
- flood_adapt/workflows/benefit_runner.py +541 -0
- flood_adapt/workflows/floodmap.py +85 -0
- flood_adapt/workflows/impacts_integrator.py +82 -0
- flood_adapt/workflows/scenario_runner.py +69 -0
- flood_adapt-0.3.0.dist-info/LICENSE +21 -0
- flood_adapt-0.3.0.dist-info/METADATA +183 -0
- flood_adapt-0.3.0.dist-info/RECORD +139 -0
- flood_adapt-0.3.0.dist-info/WHEEL +5 -0
- flood_adapt-0.3.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,142 @@
|
|
|
1
|
+
import logging
|
|
2
|
+
import os
|
|
3
|
+
from abc import ABC, abstractmethod
|
|
4
|
+
from enum import Enum
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any, ClassVar, List, Type
|
|
7
|
+
|
|
8
|
+
import tomli
|
|
9
|
+
from pydantic import BaseModel, field_serializer
|
|
10
|
+
|
|
11
|
+
from flood_adapt.config.sfincs import RiverModel
|
|
12
|
+
from flood_adapt.misc.log import FloodAdaptLogging
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
### ENUMS ###
|
|
16
|
+
class ForcingType(str, Enum):
|
|
17
|
+
"""Enum class for the different types of forcing parameters."""
|
|
18
|
+
|
|
19
|
+
WIND = "WIND"
|
|
20
|
+
RAINFALL = "RAINFALL"
|
|
21
|
+
DISCHARGE = "DISCHARGE"
|
|
22
|
+
WATERLEVEL = "WATERLEVEL"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class ForcingSource(str, Enum):
|
|
26
|
+
"""Enum class for the different sources of forcing parameters."""
|
|
27
|
+
|
|
28
|
+
MODEL = "MODEL" # 'our' hindcast/ sfincs offshore model
|
|
29
|
+
TRACK = "TRACK" # 'our' hindcast/ sfincs offshore model + (shifted) hurricane
|
|
30
|
+
CSV = "CSV" # user provided csv file
|
|
31
|
+
NETCDF = "NETCDF" # user provided netcdf file
|
|
32
|
+
|
|
33
|
+
SYNTHETIC = "SYNTHETIC" # synthetic data
|
|
34
|
+
CONSTANT = "CONSTANT" # synthetic data
|
|
35
|
+
|
|
36
|
+
GAUGED = "GAUGED" # data downloaded from a gauge
|
|
37
|
+
METEO = "METEO" # external hindcast data
|
|
38
|
+
|
|
39
|
+
NONE = "NONE" # no forcing data
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class IForcing(BaseModel, ABC):
|
|
43
|
+
"""BaseModel describing the expected variables and data types for forcing parameters of hazard model."""
|
|
44
|
+
|
|
45
|
+
class Config:
|
|
46
|
+
arbitrary_types_allowed = True
|
|
47
|
+
|
|
48
|
+
type: ForcingType
|
|
49
|
+
source: ForcingSource
|
|
50
|
+
logger: ClassVar[logging.Logger] = FloodAdaptLogging.getLogger("Forcing")
|
|
51
|
+
|
|
52
|
+
@classmethod
|
|
53
|
+
def load_file(cls, path: Path):
|
|
54
|
+
with open(path, mode="rb") as fp:
|
|
55
|
+
toml_data = tomli.load(fp)
|
|
56
|
+
return cls.load_dict(toml_data)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def load_dict(cls, attrs):
|
|
60
|
+
return cls.model_validate(attrs)
|
|
61
|
+
|
|
62
|
+
def model_dump(self, **kwargs: Any) -> dict[str, Any]:
|
|
63
|
+
"""Override the default model_dump to include class variables `type` and `source`."""
|
|
64
|
+
data = super().model_dump(**kwargs)
|
|
65
|
+
data.update({"type": self.type, "source": self.source})
|
|
66
|
+
return data
|
|
67
|
+
|
|
68
|
+
def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
|
|
69
|
+
"""Save additional data of the forcing."""
|
|
70
|
+
return
|
|
71
|
+
|
|
72
|
+
@field_serializer("path", check_fields=False)
|
|
73
|
+
@classmethod
|
|
74
|
+
def serialize_path(cls, value: Path) -> str:
|
|
75
|
+
"""Serialize filepath-like fields by saving only the filename. It is assumed that the file will be saved in the same directory."""
|
|
76
|
+
return value.name
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class IDischarge(IForcing):
|
|
80
|
+
type: ForcingType = ForcingType.DISCHARGE
|
|
81
|
+
river: RiverModel
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class IRainfall(IForcing):
|
|
85
|
+
type: ForcingType = ForcingType.RAINFALL
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
class IWind(IForcing):
|
|
89
|
+
type: ForcingType = ForcingType.WIND
|
|
90
|
+
|
|
91
|
+
|
|
92
|
+
class IWaterlevel(IForcing):
|
|
93
|
+
type: ForcingType = ForcingType.WATERLEVEL
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
class IForcingFactory:
|
|
97
|
+
@classmethod
|
|
98
|
+
@abstractmethod
|
|
99
|
+
def load_file(cls, toml_file: Path) -> IForcing:
|
|
100
|
+
"""Create a forcing object from a TOML file."""
|
|
101
|
+
...
|
|
102
|
+
|
|
103
|
+
@classmethod
|
|
104
|
+
@abstractmethod
|
|
105
|
+
def load_dict(cls, attrs: dict[str, Any] | IForcing) -> IForcing:
|
|
106
|
+
"""Create a forcing object from a dictionary of attributes."""
|
|
107
|
+
...
|
|
108
|
+
|
|
109
|
+
@classmethod
|
|
110
|
+
@abstractmethod
|
|
111
|
+
def read_forcing(
|
|
112
|
+
cls,
|
|
113
|
+
filepath: Path,
|
|
114
|
+
) -> tuple[Type[IForcing], ForcingType, ForcingSource]:
|
|
115
|
+
"""Extract forcing class, type and source from a TOML file."""
|
|
116
|
+
...
|
|
117
|
+
|
|
118
|
+
@classmethod
|
|
119
|
+
@abstractmethod
|
|
120
|
+
def get_forcing_class(
|
|
121
|
+
cls, type: ForcingType, source: ForcingSource
|
|
122
|
+
) -> Type[IForcing]:
|
|
123
|
+
"""Get the forcing class corresponding to the type and source."""
|
|
124
|
+
...
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
@abstractmethod
|
|
128
|
+
def list_forcing_types(cls) -> List[ForcingType]:
|
|
129
|
+
"""List all available forcing types."""
|
|
130
|
+
...
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
@abstractmethod
|
|
134
|
+
def list_forcing_classes(cls) -> List[Type[IForcing]]:
|
|
135
|
+
"""List all available forcing classes."""
|
|
136
|
+
...
|
|
137
|
+
|
|
138
|
+
@classmethod
|
|
139
|
+
@abstractmethod
|
|
140
|
+
def list_forcing_types_and_sources(cls) -> List[tuple[ForcingType, ForcingSource]]:
|
|
141
|
+
"""List all available combinations of forcing types and sources."""
|
|
142
|
+
...
|
|
@@ -0,0 +1,182 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Any, List, Type
|
|
3
|
+
|
|
4
|
+
import tomli
|
|
5
|
+
|
|
6
|
+
from flood_adapt.objects.forcing.discharge import (
|
|
7
|
+
DischargeConstant,
|
|
8
|
+
DischargeCSV,
|
|
9
|
+
DischargeSynthetic,
|
|
10
|
+
)
|
|
11
|
+
from flood_adapt.objects.forcing.forcing import (
|
|
12
|
+
ForcingSource,
|
|
13
|
+
ForcingType,
|
|
14
|
+
IForcing,
|
|
15
|
+
IForcingFactory,
|
|
16
|
+
)
|
|
17
|
+
from flood_adapt.objects.forcing.rainfall import (
|
|
18
|
+
RainfallConstant,
|
|
19
|
+
RainfallCSV,
|
|
20
|
+
RainfallMeteo,
|
|
21
|
+
RainfallNetCDF,
|
|
22
|
+
RainfallSynthetic,
|
|
23
|
+
RainfallTrack,
|
|
24
|
+
)
|
|
25
|
+
from flood_adapt.objects.forcing.waterlevels import (
|
|
26
|
+
WaterlevelCSV,
|
|
27
|
+
WaterlevelGauged,
|
|
28
|
+
WaterlevelModel,
|
|
29
|
+
WaterlevelSynthetic,
|
|
30
|
+
)
|
|
31
|
+
from flood_adapt.objects.forcing.wind import (
|
|
32
|
+
WindConstant,
|
|
33
|
+
WindCSV,
|
|
34
|
+
WindMeteo,
|
|
35
|
+
WindNetCDF,
|
|
36
|
+
WindSynthetic,
|
|
37
|
+
WindTrack,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
__all__ = [
|
|
41
|
+
"ForcingFactory",
|
|
42
|
+
"ForcingSource",
|
|
43
|
+
"ForcingType",
|
|
44
|
+
"IForcing",
|
|
45
|
+
"IForcingFactory",
|
|
46
|
+
"WindConstant",
|
|
47
|
+
"WindCSV",
|
|
48
|
+
"WindMeteo",
|
|
49
|
+
"WindSynthetic",
|
|
50
|
+
"WindTrack",
|
|
51
|
+
"WaterlevelCSV",
|
|
52
|
+
"WaterlevelGauged",
|
|
53
|
+
"WaterlevelModel",
|
|
54
|
+
"WaterlevelSynthetic",
|
|
55
|
+
"RainfallConstant",
|
|
56
|
+
"RainfallCSV",
|
|
57
|
+
"RainfallMeteo",
|
|
58
|
+
"RainfallSynthetic",
|
|
59
|
+
"RainfallTrack",
|
|
60
|
+
"DischargeConstant",
|
|
61
|
+
"DischargeCSV",
|
|
62
|
+
"DischargeSynthetic",
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
class ForcingFactory(IForcingFactory):
|
|
67
|
+
"""Factory class for creating forcings."""
|
|
68
|
+
|
|
69
|
+
WATERLEVELS: dict[ForcingSource, Type[IForcing]] = {
|
|
70
|
+
ForcingSource.MODEL: WaterlevelModel,
|
|
71
|
+
ForcingSource.CSV: WaterlevelCSV,
|
|
72
|
+
ForcingSource.SYNTHETIC: WaterlevelSynthetic,
|
|
73
|
+
ForcingSource.GAUGED: WaterlevelGauged,
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
WIND: dict[ForcingSource, Type[IForcing]] = {
|
|
77
|
+
ForcingSource.METEO: WindMeteo,
|
|
78
|
+
ForcingSource.TRACK: WindTrack,
|
|
79
|
+
ForcingSource.CSV: WindCSV,
|
|
80
|
+
ForcingSource.SYNTHETIC: WindSynthetic,
|
|
81
|
+
ForcingSource.CONSTANT: WindConstant,
|
|
82
|
+
ForcingSource.NETCDF: WindNetCDF,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
RAINFALL: dict[ForcingSource, Type[IForcing]] = {
|
|
86
|
+
ForcingSource.METEO: RainfallMeteo,
|
|
87
|
+
ForcingSource.TRACK: RainfallTrack,
|
|
88
|
+
ForcingSource.CSV: RainfallCSV,
|
|
89
|
+
ForcingSource.SYNTHETIC: RainfallSynthetic,
|
|
90
|
+
ForcingSource.CONSTANT: RainfallConstant,
|
|
91
|
+
ForcingSource.NETCDF: RainfallNetCDF,
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
DISCHARGE: dict[ForcingSource, Type[IForcing]] = {
|
|
95
|
+
ForcingSource.CSV: DischargeCSV,
|
|
96
|
+
ForcingSource.SYNTHETIC: DischargeSynthetic,
|
|
97
|
+
ForcingSource.CONSTANT: DischargeConstant,
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
FORCINGTYPES: dict[ForcingType, dict[ForcingSource, Type[IForcing]]] = {
|
|
101
|
+
ForcingType.WATERLEVEL: WATERLEVELS,
|
|
102
|
+
ForcingType.RAINFALL: RAINFALL,
|
|
103
|
+
ForcingType.WIND: WIND,
|
|
104
|
+
ForcingType.DISCHARGE: DISCHARGE,
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
@classmethod
|
|
108
|
+
def read_forcing(
|
|
109
|
+
cls,
|
|
110
|
+
filepath: Path,
|
|
111
|
+
) -> tuple[Type[IForcing], ForcingType, ForcingSource]:
|
|
112
|
+
"""Extract forcing type and source from a TOML file."""
|
|
113
|
+
with open(filepath, mode="rb") as fp:
|
|
114
|
+
toml_data = tomli.load(fp)
|
|
115
|
+
type = toml_data.get("type")
|
|
116
|
+
source = toml_data.get("source")
|
|
117
|
+
|
|
118
|
+
if type is None or source is None:
|
|
119
|
+
raise ValueError(
|
|
120
|
+
f"Forcing type {type} or source {source} not found in {filepath}"
|
|
121
|
+
)
|
|
122
|
+
forcing_cls = cls.get_forcing_class(ForcingType(type), ForcingSource(source))
|
|
123
|
+
return forcing_cls, ForcingType(type), ForcingSource(source)
|
|
124
|
+
|
|
125
|
+
@classmethod
|
|
126
|
+
def get_forcing_class(
|
|
127
|
+
cls, type: ForcingType, source: ForcingSource
|
|
128
|
+
) -> Type[IForcing]:
|
|
129
|
+
"""Get the forcing class corresponding to the type and source."""
|
|
130
|
+
if (sources := cls.FORCINGTYPES.get(type)) is None:
|
|
131
|
+
raise ValueError(f"Invalid forcing type: {type}")
|
|
132
|
+
|
|
133
|
+
if (forcing_cls := sources.get(source)) is None:
|
|
134
|
+
raise ValueError(
|
|
135
|
+
f"Invalid forcing source: {source} for forcing type: {type}"
|
|
136
|
+
)
|
|
137
|
+
return forcing_cls
|
|
138
|
+
|
|
139
|
+
@classmethod
|
|
140
|
+
def load_file(cls, toml_file: Path) -> IForcing:
|
|
141
|
+
"""Create a forcing object from a TOML file."""
|
|
142
|
+
with open(toml_file, mode="rb") as fp:
|
|
143
|
+
toml_data = tomli.load(fp)
|
|
144
|
+
return cls.load_dict(toml_data)
|
|
145
|
+
|
|
146
|
+
@classmethod
|
|
147
|
+
def load_dict(cls, attrs: dict[str, Any] | IForcing) -> IForcing:
|
|
148
|
+
"""Create a forcing object from a dictionary of attributes."""
|
|
149
|
+
if isinstance(attrs, IForcing):
|
|
150
|
+
return attrs
|
|
151
|
+
type = attrs.get("type")
|
|
152
|
+
source = attrs.get("source")
|
|
153
|
+
if type is None or source is None:
|
|
154
|
+
raise ValueError(
|
|
155
|
+
f"Forcing type {type} or source {source} not found in attributes."
|
|
156
|
+
)
|
|
157
|
+
return cls.get_forcing_class(
|
|
158
|
+
ForcingType(type), ForcingSource(source)
|
|
159
|
+
).model_validate(attrs)
|
|
160
|
+
|
|
161
|
+
@classmethod
|
|
162
|
+
def list_forcings(cls) -> List[Type[IForcing]]:
|
|
163
|
+
"""List all available forcing classes."""
|
|
164
|
+
forcing_classes = set()
|
|
165
|
+
for source_map in cls.FORCINGTYPES.values():
|
|
166
|
+
for forcing in source_map.values():
|
|
167
|
+
if forcing is not None:
|
|
168
|
+
forcing_classes.add(forcing)
|
|
169
|
+
return list(forcing_classes)
|
|
170
|
+
|
|
171
|
+
@classmethod
|
|
172
|
+
def list_forcing_types_and_sources(cls) -> List[tuple[ForcingType, ForcingSource]]:
|
|
173
|
+
"""List all available forcing classes using a tuple of ForcingType and ForcingSource."""
|
|
174
|
+
# TODO remove this when the backend supports all forcings
|
|
175
|
+
ONLY_BACKEND_FORCINGS = {(ForcingType.WIND, ForcingSource.SYNTHETIC)}
|
|
176
|
+
combinations = set()
|
|
177
|
+
for type, source_map in cls.FORCINGTYPES.items():
|
|
178
|
+
for source in source_map.keys():
|
|
179
|
+
if (type, source) in ONLY_BACKEND_FORCINGS:
|
|
180
|
+
continue
|
|
181
|
+
combinations.add((type, source))
|
|
182
|
+
return list(combinations)
|
|
@@ -0,0 +1,93 @@
|
|
|
1
|
+
from datetime import datetime
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
import cht_meteo
|
|
6
|
+
import numpy as np
|
|
7
|
+
import xarray as xr
|
|
8
|
+
from cht_meteo.dataset import MeteoDataset
|
|
9
|
+
|
|
10
|
+
from flood_adapt.config.config import Settings
|
|
11
|
+
from flood_adapt.config.site import Site
|
|
12
|
+
from flood_adapt.objects.forcing.time_frame import TimeFrame
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class MeteoHandler:
|
|
16
|
+
def __init__(self, dir: Optional[Path] = None, site: Optional[Site] = None) -> None:
|
|
17
|
+
self.dir: Path = dir or Settings().database_path / "static" / "meteo"
|
|
18
|
+
self.dir.mkdir(parents=True, exist_ok=True)
|
|
19
|
+
self.site: Site = site or Site.load_file(
|
|
20
|
+
Settings().database_path / "static" / "config" / "site.toml"
|
|
21
|
+
)
|
|
22
|
+
# Create GFS dataset
|
|
23
|
+
self.dataset = cht_meteo.dataset(
|
|
24
|
+
name="gfs_anl_0p50",
|
|
25
|
+
source="gfs_analysis_0p50",
|
|
26
|
+
path=self.dir,
|
|
27
|
+
lon_range=(self.site.lon - 10, self.site.lon + 10),
|
|
28
|
+
lat_range=(self.site.lat - 10, self.site.lat + 10),
|
|
29
|
+
)
|
|
30
|
+
# quick fix for sites near the 0 degree longitude -> shift the meteo download area either east or west of the 0 degree longitude
|
|
31
|
+
# TODO implement a good solution to this in cht_meteo
|
|
32
|
+
self.dataset.lon_range = self._shift_grid_to_positive_lon(self.dataset)
|
|
33
|
+
|
|
34
|
+
def download(self, time: TimeFrame):
|
|
35
|
+
# Download and collect data
|
|
36
|
+
time_range = self.get_time_range(time)
|
|
37
|
+
|
|
38
|
+
self.dataset.download(time_range=time_range)
|
|
39
|
+
|
|
40
|
+
def read(self, time: TimeFrame) -> xr.Dataset:
|
|
41
|
+
self.download(time)
|
|
42
|
+
time_range = self.get_time_range(time)
|
|
43
|
+
ds = self.dataset.collect(time_range=time_range)
|
|
44
|
+
|
|
45
|
+
if ds is None:
|
|
46
|
+
raise FileNotFoundError(
|
|
47
|
+
f"No meteo files found in meteo directory {self.dir}"
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
ds.raster.set_crs(4326)
|
|
51
|
+
|
|
52
|
+
# Rename the variables to match what hydromt-sfincs expects
|
|
53
|
+
ds = ds.rename(
|
|
54
|
+
{
|
|
55
|
+
"barometric_pressure": "press_msl",
|
|
56
|
+
"precipitation": "precip",
|
|
57
|
+
"wind_u": "wind10_u",
|
|
58
|
+
"wind_v": "wind10_v",
|
|
59
|
+
}
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Convert the longitude to -180 to 180 to match hydromt-sfincs
|
|
63
|
+
if ds["lon"].min() > 180:
|
|
64
|
+
ds["lon"] = ds["lon"] - 360
|
|
65
|
+
|
|
66
|
+
return ds
|
|
67
|
+
|
|
68
|
+
@staticmethod
|
|
69
|
+
def get_time_range(time: TimeFrame) -> tuple:
|
|
70
|
+
t0 = time.start_time
|
|
71
|
+
t1 = time.end_time
|
|
72
|
+
if not isinstance(t0, datetime):
|
|
73
|
+
t0 = datetime.strptime(t0, "%Y%m%d %H%M%S")
|
|
74
|
+
if not isinstance(t1, datetime):
|
|
75
|
+
t1 = datetime.strptime(t1, "%Y%m%d %H%M%S")
|
|
76
|
+
time_range = (t0, t1)
|
|
77
|
+
return time_range
|
|
78
|
+
|
|
79
|
+
@staticmethod
|
|
80
|
+
def _shift_grid_to_positive_lon(grid: MeteoDataset):
|
|
81
|
+
"""Shift the grid to positive longitudes if the grid crosses the 0 degree longitude."""
|
|
82
|
+
if np.prod(grid.lon_range) < 0:
|
|
83
|
+
if np.abs(grid.lon_range[0]) > np.abs(grid.lon_range[1]):
|
|
84
|
+
grid.lon_range = [
|
|
85
|
+
grid.lon_range[0] - grid.lon_range[1] - 1,
|
|
86
|
+
-1,
|
|
87
|
+
]
|
|
88
|
+
else:
|
|
89
|
+
grid.lon_range = [
|
|
90
|
+
1,
|
|
91
|
+
grid.lon_range[1] - grid.lon_range[0] + 1,
|
|
92
|
+
]
|
|
93
|
+
return grid.lon_range
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
import numpy as np
|
|
2
|
+
import pandas as pd
|
|
3
|
+
import xarray as xr
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
@staticmethod
|
|
7
|
+
def validate_netcdf_forcing(
|
|
8
|
+
ds: xr.Dataset, required_vars: tuple[str, ...], required_coords: tuple[str, ...]
|
|
9
|
+
) -> xr.Dataset:
|
|
10
|
+
"""Validate a forcing dataset by checking for required variables and coordinates."""
|
|
11
|
+
# Check variables
|
|
12
|
+
_required_vars = set(required_vars)
|
|
13
|
+
if not _required_vars.issubset(ds.data_vars):
|
|
14
|
+
missing_vars = _required_vars - set(ds.data_vars)
|
|
15
|
+
raise ValueError(
|
|
16
|
+
f"Missing required variables for netcdf forcing: {missing_vars}"
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
# Check coordinates
|
|
20
|
+
_required_coords = set(required_coords)
|
|
21
|
+
if not _required_coords.issubset(ds.coords):
|
|
22
|
+
missing_coords = _required_coords - set(ds.coords)
|
|
23
|
+
raise ValueError(
|
|
24
|
+
f"Missing required coordinates for netcdf forcing: {missing_coords}"
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
# Check time step
|
|
28
|
+
ts = pd.to_timedelta(np.diff(ds.time).mean())
|
|
29
|
+
if ts < pd.to_timedelta("1H"):
|
|
30
|
+
raise ValueError(
|
|
31
|
+
f"SFINCS NetCDF forcing time step cannot be less than 1 hour: {ts}"
|
|
32
|
+
)
|
|
33
|
+
|
|
34
|
+
for var in ds.data_vars:
|
|
35
|
+
# Check order of dimensions
|
|
36
|
+
if ds[var].dims != required_coords:
|
|
37
|
+
raise ValueError(
|
|
38
|
+
f"Order of dimensions for variable {var} must be {required_coords}"
|
|
39
|
+
)
|
|
40
|
+
return ds
|