flood-adapt 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (139) hide show
  1. flood_adapt/__init__.py +22 -0
  2. flood_adapt/adapter/__init__.py +9 -0
  3. flood_adapt/adapter/fiat_adapter.py +1502 -0
  4. flood_adapt/adapter/interface/__init__.py +0 -0
  5. flood_adapt/adapter/interface/hazard_adapter.py +70 -0
  6. flood_adapt/adapter/interface/impact_adapter.py +36 -0
  7. flood_adapt/adapter/interface/model_adapter.py +89 -0
  8. flood_adapt/adapter/interface/offshore.py +19 -0
  9. flood_adapt/adapter/sfincs_adapter.py +1857 -0
  10. flood_adapt/adapter/sfincs_offshore.py +193 -0
  11. flood_adapt/config/__init__.py +0 -0
  12. flood_adapt/config/config.py +245 -0
  13. flood_adapt/config/fiat.py +219 -0
  14. flood_adapt/config/gui.py +224 -0
  15. flood_adapt/config/sfincs.py +336 -0
  16. flood_adapt/config/site.py +124 -0
  17. flood_adapt/database_builder/__init__.py +0 -0
  18. flood_adapt/database_builder/database_builder.py +2175 -0
  19. flood_adapt/database_builder/templates/default_units/imperial.toml +9 -0
  20. flood_adapt/database_builder/templates/default_units/metric.toml +9 -0
  21. flood_adapt/database_builder/templates/green_infra_table/green_infra_lookup_table.csv +10 -0
  22. flood_adapt/database_builder/templates/icons/black_down_48x48.png +0 -0
  23. flood_adapt/database_builder/templates/icons/black_left_48x48.png +0 -0
  24. flood_adapt/database_builder/templates/icons/black_right_48x48.png +0 -0
  25. flood_adapt/database_builder/templates/icons/black_up_48x48.png +0 -0
  26. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_down.png +0 -0
  27. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_left.png +0 -0
  28. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_right.png +0 -0
  29. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-16_white_up.png +0 -0
  30. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_down.png +0 -0
  31. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_left.png +0 -0
  32. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_right.png +0 -0
  33. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_black_up.png +0 -0
  34. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_left.png +0 -0
  35. flood_adapt/database_builder/templates/icons/icons8-triangle-arrow-24_white_right.png +0 -0
  36. flood_adapt/database_builder/templates/icons/white_down_48x48.png +0 -0
  37. flood_adapt/database_builder/templates/icons/white_left_48x48.png +0 -0
  38. flood_adapt/database_builder/templates/icons/white_right_48x48.png +0 -0
  39. flood_adapt/database_builder/templates/icons/white_up_48x48.png +0 -0
  40. flood_adapt/database_builder/templates/infographics/OSM/config_charts.toml +90 -0
  41. flood_adapt/database_builder/templates/infographics/OSM/config_people.toml +57 -0
  42. flood_adapt/database_builder/templates/infographics/OSM/config_risk_charts.toml +121 -0
  43. flood_adapt/database_builder/templates/infographics/OSM/config_roads.toml +65 -0
  44. flood_adapt/database_builder/templates/infographics/OSM/styles.css +45 -0
  45. flood_adapt/database_builder/templates/infographics/US_NSI/config_charts.toml +126 -0
  46. flood_adapt/database_builder/templates/infographics/US_NSI/config_people.toml +60 -0
  47. flood_adapt/database_builder/templates/infographics/US_NSI/config_risk_charts.toml +121 -0
  48. flood_adapt/database_builder/templates/infographics/US_NSI/config_roads.toml +65 -0
  49. flood_adapt/database_builder/templates/infographics/US_NSI/styles.css +45 -0
  50. flood_adapt/database_builder/templates/infographics/images/ambulance.png +0 -0
  51. flood_adapt/database_builder/templates/infographics/images/car.png +0 -0
  52. flood_adapt/database_builder/templates/infographics/images/cart.png +0 -0
  53. flood_adapt/database_builder/templates/infographics/images/firetruck.png +0 -0
  54. flood_adapt/database_builder/templates/infographics/images/hospital.png +0 -0
  55. flood_adapt/database_builder/templates/infographics/images/house.png +0 -0
  56. flood_adapt/database_builder/templates/infographics/images/info.png +0 -0
  57. flood_adapt/database_builder/templates/infographics/images/money.png +0 -0
  58. flood_adapt/database_builder/templates/infographics/images/person.png +0 -0
  59. flood_adapt/database_builder/templates/infographics/images/school.png +0 -0
  60. flood_adapt/database_builder/templates/infographics/images/truck.png +0 -0
  61. flood_adapt/database_builder/templates/infographics/images/walking_person.png +0 -0
  62. flood_adapt/database_builder/templates/infometrics/OSM/metrics_additional_risk_configs.toml +4 -0
  63. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config.toml +143 -0
  64. flood_adapt/database_builder/templates/infometrics/OSM/with_SVI/infographic_metrics_config_risk.toml +153 -0
  65. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config.toml +127 -0
  66. flood_adapt/database_builder/templates/infometrics/OSM/without_SVI/infographic_metrics_config_risk.toml +57 -0
  67. flood_adapt/database_builder/templates/infometrics/US_NSI/metrics_additional_risk_configs.toml +4 -0
  68. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config.toml +191 -0
  69. flood_adapt/database_builder/templates/infometrics/US_NSI/with_SVI/infographic_metrics_config_risk.toml +153 -0
  70. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config.toml +178 -0
  71. flood_adapt/database_builder/templates/infometrics/US_NSI/without_SVI/infographic_metrics_config_risk.toml +57 -0
  72. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config.toml +9 -0
  73. flood_adapt/database_builder/templates/infometrics/mandatory_metrics_config_risk.toml +65 -0
  74. flood_adapt/database_builder/templates/mapbox_layers/bin_colors.toml +5 -0
  75. flood_adapt/database_builder.py +16 -0
  76. flood_adapt/dbs_classes/__init__.py +21 -0
  77. flood_adapt/dbs_classes/database.py +716 -0
  78. flood_adapt/dbs_classes/dbs_benefit.py +97 -0
  79. flood_adapt/dbs_classes/dbs_event.py +91 -0
  80. flood_adapt/dbs_classes/dbs_measure.py +103 -0
  81. flood_adapt/dbs_classes/dbs_projection.py +52 -0
  82. flood_adapt/dbs_classes/dbs_scenario.py +150 -0
  83. flood_adapt/dbs_classes/dbs_static.py +261 -0
  84. flood_adapt/dbs_classes/dbs_strategy.py +147 -0
  85. flood_adapt/dbs_classes/dbs_template.py +302 -0
  86. flood_adapt/dbs_classes/interface/database.py +147 -0
  87. flood_adapt/dbs_classes/interface/element.py +137 -0
  88. flood_adapt/dbs_classes/interface/static.py +47 -0
  89. flood_adapt/flood_adapt.py +1371 -0
  90. flood_adapt/misc/__init__.py +0 -0
  91. flood_adapt/misc/database_user.py +16 -0
  92. flood_adapt/misc/log.py +183 -0
  93. flood_adapt/misc/path_builder.py +54 -0
  94. flood_adapt/misc/utils.py +185 -0
  95. flood_adapt/objects/__init__.py +59 -0
  96. flood_adapt/objects/benefits/__init__.py +0 -0
  97. flood_adapt/objects/benefits/benefits.py +61 -0
  98. flood_adapt/objects/events/__init__.py +0 -0
  99. flood_adapt/objects/events/event_factory.py +135 -0
  100. flood_adapt/objects/events/event_set.py +84 -0
  101. flood_adapt/objects/events/events.py +221 -0
  102. flood_adapt/objects/events/historical.py +55 -0
  103. flood_adapt/objects/events/hurricane.py +64 -0
  104. flood_adapt/objects/events/synthetic.py +48 -0
  105. flood_adapt/objects/forcing/__init__.py +0 -0
  106. flood_adapt/objects/forcing/csv.py +68 -0
  107. flood_adapt/objects/forcing/discharge.py +66 -0
  108. flood_adapt/objects/forcing/forcing.py +142 -0
  109. flood_adapt/objects/forcing/forcing_factory.py +182 -0
  110. flood_adapt/objects/forcing/meteo_handler.py +93 -0
  111. flood_adapt/objects/forcing/netcdf.py +40 -0
  112. flood_adapt/objects/forcing/plotting.py +428 -0
  113. flood_adapt/objects/forcing/rainfall.py +98 -0
  114. flood_adapt/objects/forcing/tide_gauge.py +191 -0
  115. flood_adapt/objects/forcing/time_frame.py +77 -0
  116. flood_adapt/objects/forcing/timeseries.py +552 -0
  117. flood_adapt/objects/forcing/unit_system.py +580 -0
  118. flood_adapt/objects/forcing/waterlevels.py +108 -0
  119. flood_adapt/objects/forcing/wind.py +124 -0
  120. flood_adapt/objects/measures/__init__.py +0 -0
  121. flood_adapt/objects/measures/measure_factory.py +92 -0
  122. flood_adapt/objects/measures/measures.py +506 -0
  123. flood_adapt/objects/object_model.py +68 -0
  124. flood_adapt/objects/projections/__init__.py +0 -0
  125. flood_adapt/objects/projections/projections.py +89 -0
  126. flood_adapt/objects/scenarios/__init__.py +0 -0
  127. flood_adapt/objects/scenarios/scenarios.py +22 -0
  128. flood_adapt/objects/strategies/__init__.py +0 -0
  129. flood_adapt/objects/strategies/strategies.py +68 -0
  130. flood_adapt/workflows/__init__.py +0 -0
  131. flood_adapt/workflows/benefit_runner.py +541 -0
  132. flood_adapt/workflows/floodmap.py +85 -0
  133. flood_adapt/workflows/impacts_integrator.py +82 -0
  134. flood_adapt/workflows/scenario_runner.py +69 -0
  135. flood_adapt-0.3.0.dist-info/LICENSE +21 -0
  136. flood_adapt-0.3.0.dist-info/METADATA +183 -0
  137. flood_adapt-0.3.0.dist-info/RECORD +139 -0
  138. flood_adapt-0.3.0.dist-info/WHEEL +5 -0
  139. flood_adapt-0.3.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,84 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import List, Optional
4
+
5
+ import tomli
6
+ from pydantic import BaseModel
7
+
8
+ from flood_adapt.objects.events.events import Event, Mode
9
+ from flood_adapt.objects.object_model import Object
10
+
11
+
12
+ class SubEventModel(BaseModel):
13
+ """The accepted input for a sub event in FloodAdapt.
14
+
15
+ Attributes
16
+ ----------
17
+ name : str
18
+ The name of the sub event.
19
+ frequency : float
20
+ The frequency of the sub event.
21
+ """
22
+
23
+ name: str
24
+ frequency: float
25
+
26
+
27
+ class EventSet(Object):
28
+ """BaseModel describing the expected variables and data types for parameters of EventSet.
29
+
30
+ An EventSet is a collection of events that can be used to create a scenario and perform a probabilistoc risk assessment.
31
+
32
+ Attributes
33
+ ----------
34
+ name : str
35
+ The name of the event.
36
+ description : str, default=""
37
+ The description of the event.
38
+ mode : Mode, default=Mode.risk
39
+ The mode of the event.
40
+ sub_events : List[SubEventModel]
41
+ The sub events of the event set.
42
+ """
43
+
44
+ _events: Optional[List[Event]] = None
45
+
46
+ mode: Mode = Mode.risk
47
+ sub_events: List[SubEventModel]
48
+
49
+ def load_sub_events(
50
+ self,
51
+ sub_events: Optional[List[Event]] = None,
52
+ file_path: Optional[Path] = None,
53
+ ) -> None:
54
+ """Load sub events from a list or from a file path."""
55
+ if sub_events is not None:
56
+ self._events = sub_events
57
+ elif file_path is not None:
58
+ from flood_adapt.objects.events.event_factory import EventFactory
59
+
60
+ sub_events = []
61
+ for sub_event in self.sub_events:
62
+ sub_toml = (
63
+ Path(file_path).parent / sub_event.name / f"{sub_event.name}.toml"
64
+ )
65
+ sub_events.append(EventFactory.load_file(sub_toml))
66
+
67
+ self._events = sub_events
68
+ else:
69
+ raise ValueError("Either `sub_events` or `file_path` must be provided.")
70
+
71
+ @classmethod
72
+ def load_file(cls, file_path: Path | str | os.PathLike):
73
+ """Load object from file."""
74
+ with open(file_path, mode="rb") as fp:
75
+ event_set = tomli.load(fp)
76
+ event_set = EventSet(**event_set)
77
+ event_set.load_sub_events(file_path=file_path)
78
+ return event_set
79
+
80
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
81
+ for sub_event in self._events:
82
+ sub_dir = Path(output_dir) / sub_event.name
83
+ sub_dir.mkdir(parents=True, exist_ok=True)
84
+ sub_event.save(sub_dir / f"{sub_event.name}.toml")
@@ -0,0 +1,221 @@
1
+ import os
2
+ from enum import Enum
3
+ from pathlib import Path
4
+ from typing import Any, ClassVar, List, Optional, Protocol, runtime_checkable
5
+
6
+ import tomli
7
+ from pydantic import (
8
+ Field,
9
+ field_serializer,
10
+ field_validator,
11
+ model_validator,
12
+ )
13
+
14
+ from flood_adapt.config.config import Settings
15
+ from flood_adapt.objects.forcing.forcing import (
16
+ ForcingSource,
17
+ ForcingType,
18
+ IForcing,
19
+ )
20
+ from flood_adapt.objects.forcing.forcing_factory import ForcingFactory
21
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
22
+ from flood_adapt.objects.object_model import Object
23
+
24
+
25
+ class Mode(str, Enum):
26
+ """Class describing the accepted input for the variable mode in Event."""
27
+
28
+ single_event = "single_event"
29
+ risk = "risk"
30
+
31
+
32
+ class Template(str, Enum):
33
+ """Class describing the accepted input for the variable template in Event."""
34
+
35
+ Synthetic = "Synthetic"
36
+ Hurricane = "Hurricane"
37
+ Historical = "Historical"
38
+
39
+ @property
40
+ def description(self) -> str:
41
+ match self:
42
+ case Template.Historical:
43
+ return "Select a time period for a historic event. This method can use offshore wind and pressure fields for the selected time period to simulate nearshore water levels or download gauged waterlevels to perform a realistic simulation. These water levels are used together with rainfall and river discharge input to simulate flooding in the site area."
44
+ case Template.Hurricane:
45
+ return "Select a historical hurricane track from the hurricane database, and shift the track if desired."
46
+ case Template.Synthetic:
47
+ return "Customize a synthetic event by specifying the waterlevels, wind, rainfall and river discharges without being based on a historical event."
48
+ case _:
49
+ raise ValueError(f"Invalid event template: {self}")
50
+
51
+
52
+ @runtime_checkable
53
+ class PathBasedForcing(Protocol):
54
+ """Protocol for forcing classes that have a path attribute.
55
+
56
+ Performing an isinstance check on this class will return True if the class has a path attribute (even if it is None).
57
+ """
58
+
59
+ path: Path
60
+
61
+
62
+ class Event(Object):
63
+ """The accepted input for an event in FloodAdapt.
64
+
65
+ Attributes
66
+ ----------
67
+ name : str
68
+ The name of the event.
69
+ description : str, default=""
70
+ The description of the event.
71
+ time : TimeFrame
72
+ The time frame of the event.
73
+ template : Template
74
+ The template of the event.
75
+ mode : Mode
76
+ The mode of the event.
77
+ rainfall_multiplier : float
78
+ The rainfall multiplier of the event.
79
+ forcings : dict[ForcingType, list[IForcing]]
80
+ The forcings of the event.
81
+ """
82
+
83
+ ALLOWED_FORCINGS: ClassVar[dict[ForcingType, List[ForcingSource]]]
84
+
85
+ time: TimeFrame
86
+ template: Template
87
+ mode: Mode = Mode.single_event
88
+
89
+ forcings: dict[ForcingType, list[IForcing]] = Field(default_factory=dict)
90
+ rainfall_multiplier: float = Field(default=1.0, ge=0)
91
+
92
+ @classmethod
93
+ def get_allowed_forcings(cls) -> dict[str, List[str]]:
94
+ return {k.value: [s.value for s in v] for k, v in cls.ALLOWED_FORCINGS.items()}
95
+
96
+ def get_forcings(self) -> list[IForcing]:
97
+ """Return a list of all forcings in the event."""
98
+ return [forcing for forcings in self.forcings.values() for forcing in forcings]
99
+
100
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
101
+ """Save any additional files associated with the event."""
102
+ for forcing in self.get_forcings():
103
+ forcing.save_additional(output_dir)
104
+
105
+ @classmethod
106
+ def load_file(cls, file_path: Path | str | os.PathLike) -> "Event":
107
+ """Load object from file.
108
+
109
+ Parameters
110
+ ----------
111
+ file_path : Path | str | os.PathLike
112
+ Path to the file to load.
113
+
114
+ """
115
+ with open(file_path, mode="rb") as fp:
116
+ toml = tomli.load(fp)
117
+
118
+ event = cls.model_validate(toml)
119
+
120
+ # Update all forcings with paths to absolute paths
121
+ for forcing in event.get_forcings():
122
+ if isinstance(forcing, PathBasedForcing):
123
+ if forcing.path.exists():
124
+ continue
125
+ elif forcing.path == Path(forcing.path.name):
126
+ # convert relative path to absolute path
127
+ in_dir = Path(file_path).parent / forcing.path.name
128
+ if not in_dir.exists():
129
+ raise FileNotFoundError(
130
+ f"Failed to load Event. File {forcing.path} does not exist in {in_dir.parent}."
131
+ )
132
+ forcing.path = in_dir
133
+ else:
134
+ raise FileNotFoundError(
135
+ f"Failed to load Event. File {forcing.path} does not exist."
136
+ )
137
+
138
+ return event
139
+
140
+ @staticmethod
141
+ def _parse_forcing_from_dict(
142
+ forcing_attrs: dict[str, Any] | IForcing,
143
+ ftype: Optional[ForcingType] = None,
144
+ fsource: Optional[ForcingSource] = None,
145
+ ) -> IForcing:
146
+ if isinstance(forcing_attrs, IForcing):
147
+ # forcing_attrs is already a forcing object
148
+ return forcing_attrs
149
+ elif isinstance(forcing_attrs, dict):
150
+ # forcing_attrs is a dict with valid forcing attributes
151
+ if "type" not in forcing_attrs and ftype:
152
+ forcing_attrs["type"] = ftype
153
+ if "source" not in forcing_attrs and fsource:
154
+ forcing_attrs["source"] = fsource
155
+
156
+ return ForcingFactory.load_dict(forcing_attrs)
157
+ else:
158
+ raise ValueError(
159
+ f"Invalid forcing attributes: {forcing_attrs}. "
160
+ "Forcings must be one of:\n"
161
+ "1. Instance of IForcing\n"
162
+ "2. dict with the keys `type` (ForcingType), `source` (ForcingSource) specifying the class, and with valid forcing attributes for that class."
163
+ )
164
+
165
+ @field_validator("forcings", mode="before")
166
+ @classmethod
167
+ def create_forcings(
168
+ cls, value: dict[str, list[dict[str, Any]]]
169
+ ) -> dict[ForcingType, list[IForcing]]:
170
+ forcings = {}
171
+ for ftype, forcing_list in value.items():
172
+ ftype = ForcingType(ftype)
173
+ forcings[ftype] = [
174
+ cls._parse_forcing_from_dict(forcing, ftype) for forcing in forcing_list
175
+ ]
176
+ return forcings
177
+
178
+ @model_validator(mode="after")
179
+ def validate_forcings(self):
180
+ def validate_concrete_forcing(concrete_forcing):
181
+ type = concrete_forcing.type
182
+ source = concrete_forcing.source
183
+
184
+ # Check type
185
+ if type not in self.__class__.ALLOWED_FORCINGS:
186
+ allowed_types = ", ".join(
187
+ t.value for t in self.__class__.ALLOWED_FORCINGS.keys()
188
+ )
189
+ raise ValueError(
190
+ f"Forcing type {type.value} is not allowed. Allowed types are: {allowed_types}"
191
+ )
192
+
193
+ # Check source
194
+ if source not in self.__class__.ALLOWED_FORCINGS[type]:
195
+ allowed_sources = ", ".join(
196
+ s.value for s in self.__class__.ALLOWED_FORCINGS[type]
197
+ )
198
+ raise ValueError(
199
+ f"Forcing source {source.value} is not allowed for forcing type {type.value}. "
200
+ f"Allowed sources are: {allowed_sources}"
201
+ )
202
+
203
+ if Settings().validate_allowed_forcings:
204
+ # Validate forcings
205
+ for _, concrete_forcings in self.forcings.items():
206
+ for concrete_forcing in concrete_forcings:
207
+ validate_concrete_forcing(concrete_forcing)
208
+
209
+ return self
210
+
211
+ @field_serializer("forcings")
212
+ @classmethod
213
+ def serialize_forcings(
214
+ cls, value: dict[ForcingType, List[IForcing]]
215
+ ) -> dict[str, List[dict[str, Any]]]:
216
+ dct = {}
217
+ for ftype, forcing_list in value.items():
218
+ dct[ftype.value] = [
219
+ forcing.model_dump(exclude_none=True) for forcing in forcing_list
220
+ ]
221
+ return dct
@@ -0,0 +1,55 @@
1
+ from typing import ClassVar, List
2
+
3
+ from flood_adapt.objects.events.events import Event, Template
4
+ from flood_adapt.objects.forcing.forcing import (
5
+ ForcingSource,
6
+ ForcingType,
7
+ )
8
+
9
+
10
+ class HistoricalEvent(Event):
11
+ """BaseModel describing the expected variables and data types for parameters of HistoricalEvent that extend the parent class Event.
12
+
13
+ Attributes
14
+ ----------
15
+ name : str
16
+ The name of the event.
17
+ description : str, default=""
18
+ The description of the event.
19
+ time : TimeFrame
20
+ The time frame of the event.
21
+ template : Template, default=Template.Historical
22
+ The template of the event.
23
+ mode : Mode, default=Mode.single_event
24
+ The mode of the event.
25
+ rainfall_multiplier : float, default=1.0
26
+ The rainfall multiplier of the event.
27
+ forcings : dict[ForcingType, list[IForcing]]
28
+ The forcings of the event.
29
+ """
30
+
31
+ ALLOWED_FORCINGS: ClassVar[dict[ForcingType, List[ForcingSource]]] = {
32
+ ForcingType.RAINFALL: [
33
+ ForcingSource.CSV,
34
+ ForcingSource.METEO,
35
+ ForcingSource.SYNTHETIC,
36
+ ForcingSource.CONSTANT,
37
+ ],
38
+ ForcingType.WIND: [
39
+ ForcingSource.CSV,
40
+ ForcingSource.METEO,
41
+ ForcingSource.CONSTANT,
42
+ ],
43
+ ForcingType.WATERLEVEL: [
44
+ ForcingSource.MODEL,
45
+ ForcingSource.CSV,
46
+ ForcingSource.SYNTHETIC,
47
+ ForcingSource.GAUGED,
48
+ ],
49
+ ForcingType.DISCHARGE: [
50
+ ForcingSource.CSV,
51
+ ForcingSource.SYNTHETIC,
52
+ ForcingSource.CONSTANT,
53
+ ],
54
+ }
55
+ template: Template = Template.Historical
@@ -0,0 +1,64 @@
1
+ from typing import ClassVar, List
2
+
3
+ from pydantic import BaseModel
4
+
5
+ from flood_adapt.objects.events.events import Event, Template
6
+ from flood_adapt.objects.forcing import unit_system as us
7
+ from flood_adapt.objects.forcing.forcing import (
8
+ ForcingSource,
9
+ ForcingType,
10
+ )
11
+
12
+
13
+ class TranslationModel(BaseModel):
14
+ """BaseModel describing the expected variables and data types for translation parameters of hurricane model."""
15
+
16
+ eastwest_translation: us.UnitfulLength = us.UnitfulLength(
17
+ value=0.0, units=us.UnitTypesLength.meters
18
+ )
19
+ northsouth_translation: us.UnitfulLength = us.UnitfulLength(
20
+ value=0.0, units=us.UnitTypesLength.meters
21
+ )
22
+
23
+
24
+ class HurricaneEvent(Event):
25
+ """BaseModel describing the expected variables and data types for parameters of HurricaneEvent that extend the parent class Event.
26
+
27
+ Attributes
28
+ ----------
29
+ name : str
30
+ The name of the event.
31
+ description : str, default=""
32
+ The description of the event.
33
+ time : TimeFrame
34
+ The time frame of the event.
35
+ template : Template, default=Template.Hurricane
36
+ The template of the event.
37
+ mode : Mode, default=Mode.single_event
38
+ The mode of the event.
39
+ rainfall_multiplier : float, default=1.0
40
+ The rainfall multiplier of the event.
41
+ forcings : dict[ForcingType, list[IForcing]]
42
+ The forcings of the event.
43
+ track_name : str
44
+ The name of the hurricane track.
45
+ """
46
+
47
+ ALLOWED_FORCINGS: ClassVar[dict[ForcingType, List[ForcingSource]]] = {
48
+ ForcingType.RAINFALL: [
49
+ ForcingSource.CONSTANT,
50
+ ForcingSource.CSV,
51
+ ForcingSource.SYNTHETIC,
52
+ ForcingSource.TRACK,
53
+ ],
54
+ ForcingType.WIND: [ForcingSource.TRACK],
55
+ ForcingType.WATERLEVEL: [ForcingSource.MODEL],
56
+ ForcingType.DISCHARGE: [
57
+ ForcingSource.CSV,
58
+ ForcingSource.SYNTHETIC,
59
+ ForcingSource.CONSTANT,
60
+ ],
61
+ }
62
+ template: Template = Template.Hurricane
63
+ hurricane_translation: TranslationModel = TranslationModel()
64
+ track_name: str
@@ -0,0 +1,48 @@
1
+ from typing import ClassVar, List
2
+
3
+ from flood_adapt.objects.events.events import (
4
+ Event,
5
+ ForcingSource,
6
+ ForcingType,
7
+ Template,
8
+ )
9
+
10
+
11
+ class SyntheticEvent(Event):
12
+ """BaseModel describing the expected variables and data types for parameters of Synthetic that extend the parent class Event.
13
+
14
+ Attributes
15
+ ----------
16
+ time : TimeFrame
17
+ The time frame of the event.
18
+ template : Template, default=Template.Synthetic
19
+ The template of the event.
20
+ mode : Mode, default=Mode.single_event
21
+ The mode of the event.
22
+ rainfall_multiplier : float, default=1.0
23
+ The rainfall multiplier of the event.
24
+ forcings : dict[ForcingType, list[IForcing]]
25
+ The forcings of the event.
26
+ """
27
+
28
+ ALLOWED_FORCINGS: ClassVar[dict[ForcingType, List[ForcingSource]]] = {
29
+ ForcingType.RAINFALL: [
30
+ ForcingSource.CONSTANT,
31
+ ForcingSource.CSV,
32
+ ForcingSource.SYNTHETIC,
33
+ ],
34
+ ForcingType.WIND: [
35
+ ForcingSource.CSV,
36
+ ForcingSource.CONSTANT,
37
+ ],
38
+ ForcingType.WATERLEVEL: [
39
+ ForcingSource.CSV,
40
+ ForcingSource.SYNTHETIC,
41
+ ],
42
+ ForcingType.DISCHARGE: [
43
+ ForcingSource.CSV,
44
+ ForcingSource.SYNTHETIC,
45
+ ForcingSource.CONSTANT,
46
+ ],
47
+ }
48
+ template: Template = Template.Synthetic
File without changes
@@ -0,0 +1,68 @@
1
+ import csv
2
+ from pathlib import Path
3
+
4
+ import pandas as pd
5
+
6
+
7
+ def read_csv(csvpath: Path) -> pd.DataFrame:
8
+ """Read a timeseries file and return a pd.DataFrame.
9
+
10
+ Parameters
11
+ ----------
12
+ csvpath : Path
13
+ Path to the CSV file.
14
+
15
+ Returns
16
+ -------
17
+ pd.DataFrame
18
+ Dataframe with time as index and (a) data column(s).
19
+ """
20
+ num_columns = None
21
+ has_header = None
22
+ with open(csvpath, "r") as f:
23
+ try:
24
+ # read the first 1024 bytes to determine if there is a header
25
+ has_header = csv.Sniffer().has_header(f.read(1024))
26
+ except csv.Error:
27
+ has_header = False
28
+ f.seek(0)
29
+ reader = csv.reader(f, delimiter=",")
30
+ try:
31
+ first_row = next(reader)
32
+ num_columns = len(first_row) - 1 # subtract 1 for the index column
33
+ except StopIteration:
34
+ raise ValueError(f"The CSV file is empty: {csvpath}.")
35
+
36
+ if has_header is None:
37
+ raise ValueError(
38
+ f"Could not determine if the CSV file has a header: {csvpath}."
39
+ )
40
+ if num_columns is None:
41
+ raise ValueError(
42
+ f"Could not determine the number of columns in the CSV file: {csvpath}."
43
+ )
44
+ elif num_columns < 1:
45
+ raise ValueError(f"CSV file must have at least one data column: {csvpath}.")
46
+ columns = [f"data_{i}" for i in range(num_columns)]
47
+ dtype = {name: float for name in columns}
48
+
49
+ df = pd.read_csv(
50
+ csvpath,
51
+ index_col=0,
52
+ names=columns,
53
+ header=0 if has_header else None,
54
+ parse_dates=True,
55
+ infer_datetime_format=True,
56
+ dtype=dtype,
57
+ )
58
+
59
+ # Any index that cannot be converted to datetime will be NaT
60
+ df.index = pd.to_datetime(df.index, errors="coerce")
61
+ df.index.names = ["time"]
62
+ if len(df.index) > 2:
63
+ df.index.freq = pd.infer_freq(df.index)
64
+
65
+ # Drop rows where the index is NaT
66
+ df = df[~df.index.isna()]
67
+
68
+ return df
@@ -0,0 +1,66 @@
1
+ import os
2
+ from pathlib import Path
3
+ from typing import Annotated
4
+
5
+ import pandas as pd
6
+
7
+ from flood_adapt.misc.utils import (
8
+ copy_file_to_output_dir,
9
+ validate_file_extension,
10
+ )
11
+ from flood_adapt.objects.forcing import unit_system as us
12
+ from flood_adapt.objects.forcing.forcing import (
13
+ ForcingSource,
14
+ IDischarge,
15
+ )
16
+ from flood_adapt.objects.forcing.time_frame import TimeFrame
17
+ from flood_adapt.objects.forcing.timeseries import (
18
+ CSVTimeseries,
19
+ SyntheticTimeseries,
20
+ TimeseriesFactory,
21
+ )
22
+
23
+
24
+ class DischargeConstant(IDischarge):
25
+ source: ForcingSource = ForcingSource.CONSTANT
26
+
27
+ discharge: us.UnitfulDischarge
28
+
29
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
30
+ time = pd.date_range(
31
+ start=time_frame.start_time,
32
+ end=time_frame.end_time,
33
+ freq=time_frame.time_step,
34
+ name="time",
35
+ )
36
+ data = [self.discharge.value for _ in range(len(time))]
37
+ return pd.DataFrame(index=time, data=data, columns=[self.river.name])
38
+
39
+
40
+ class DischargeSynthetic(IDischarge):
41
+ source: ForcingSource = ForcingSource.SYNTHETIC
42
+
43
+ timeseries: SyntheticTimeseries
44
+
45
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
46
+ df = TimeseriesFactory.from_object(self.timeseries).to_dataframe(
47
+ time_frame=time_frame
48
+ )
49
+ df.columns = [self.river.name]
50
+ return df
51
+
52
+
53
+ class DischargeCSV(IDischarge):
54
+ source: ForcingSource = ForcingSource.CSV
55
+
56
+ path: Annotated[Path, validate_file_extension([".csv"])]
57
+
58
+ units: us.UnitTypesDischarge = us.UnitTypesDischarge.cms
59
+
60
+ def to_dataframe(self, time_frame: TimeFrame) -> pd.DataFrame:
61
+ return CSVTimeseries.load_file(
62
+ path=self.path, units=us.UnitfulDischarge(value=0, units=self.units)
63
+ ).to_dataframe(time_frame=time_frame)
64
+
65
+ def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
66
+ self.path = copy_file_to_output_dir(self.path, Path(output_dir))