flood-adapt 0.3.10__py3-none-any.whl → 0.3.12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- flood_adapt/__init__.py +3 -1
- flood_adapt/adapter/fiat_adapter.py +35 -9
- flood_adapt/adapter/sfincs_adapter.py +192 -93
- flood_adapt/adapter/sfincs_offshore.py +1 -7
- flood_adapt/config/gui.py +1 -0
- flood_adapt/database_builder/database_builder.py +316 -223
- flood_adapt/dbs_classes/database.py +100 -3
- flood_adapt/dbs_classes/dbs_benefit.py +1 -0
- flood_adapt/dbs_classes/dbs_event.py +1 -0
- flood_adapt/dbs_classes/dbs_measure.py +1 -0
- flood_adapt/dbs_classes/dbs_projection.py +1 -0
- flood_adapt/dbs_classes/dbs_scenario.py +1 -0
- flood_adapt/dbs_classes/dbs_strategy.py +1 -0
- flood_adapt/dbs_classes/dbs_template.py +2 -1
- flood_adapt/flood_adapt.py +23 -1
- flood_adapt/misc/log.py +20 -12
- flood_adapt/objects/events/events.py +5 -3
- flood_adapt/objects/events/historical.py +3 -3
- flood_adapt/objects/events/hurricane.py +1 -1
- flood_adapt/objects/forcing/plotting.py +7 -34
- flood_adapt/objects/measures/measures.py +88 -66
- {flood_adapt-0.3.10.dist-info → flood_adapt-0.3.12.dist-info}/METADATA +2 -1
- {flood_adapt-0.3.10.dist-info → flood_adapt-0.3.12.dist-info}/RECORD +26 -26
- {flood_adapt-0.3.10.dist-info → flood_adapt-0.3.12.dist-info}/LICENSE +0 -0
- {flood_adapt-0.3.10.dist-info → flood_adapt-0.3.12.dist-info}/WHEEL +0 -0
- {flood_adapt-0.3.10.dist-info → flood_adapt-0.3.12.dist-info}/top_level.txt +0 -0
|
@@ -28,6 +28,7 @@ from flood_adapt.misc.path_builder import (
|
|
|
28
28
|
db_path,
|
|
29
29
|
)
|
|
30
30
|
from flood_adapt.misc.utils import finished_file_exists
|
|
31
|
+
from flood_adapt.objects.events.events import Mode
|
|
31
32
|
from flood_adapt.objects.forcing import unit_system as us
|
|
32
33
|
from flood_adapt.workflows.scenario_runner import ScenarioRunner
|
|
33
34
|
|
|
@@ -129,12 +130,11 @@ class Database(IDatabase):
|
|
|
129
130
|
self, standard_objects=self.site.standard_objects.projections
|
|
130
131
|
)
|
|
131
132
|
self._benefits = DbsBenefit(self)
|
|
133
|
+
self._init_done = True
|
|
132
134
|
|
|
133
|
-
# Delete any unfinished/crashed scenario output
|
|
135
|
+
# Delete any unfinished/crashed scenario output after initialization
|
|
134
136
|
self.cleanup()
|
|
135
137
|
|
|
136
|
-
self._init_done = True
|
|
137
|
-
|
|
138
138
|
def shutdown(self):
|
|
139
139
|
"""Explicitly shut down the singleton and clear all references."""
|
|
140
140
|
import gc
|
|
@@ -285,6 +285,44 @@ class Database(IDatabase):
|
|
|
285
285
|
zsmax = ds["risk_map"][:, :].to_numpy().T
|
|
286
286
|
return zsmax
|
|
287
287
|
|
|
288
|
+
def get_flood_map_geotiff(
|
|
289
|
+
self,
|
|
290
|
+
scenario_name: str,
|
|
291
|
+
return_period: Optional[int] = None,
|
|
292
|
+
) -> Optional[Path]:
|
|
293
|
+
"""Return the path to the geotiff file with the flood map for the given scenario.
|
|
294
|
+
|
|
295
|
+
Parameters
|
|
296
|
+
----------
|
|
297
|
+
scenario_name : str
|
|
298
|
+
name of scenario
|
|
299
|
+
return_period : int, optional
|
|
300
|
+
return period in years, by default None. Only for risk scenarios.
|
|
301
|
+
|
|
302
|
+
Returns
|
|
303
|
+
-------
|
|
304
|
+
Optional[Path]
|
|
305
|
+
path to the flood map geotiff file, or None if it does not exist
|
|
306
|
+
"""
|
|
307
|
+
if not return_period:
|
|
308
|
+
file_path = self.scenarios.output_path.joinpath(
|
|
309
|
+
scenario_name,
|
|
310
|
+
"Flooding",
|
|
311
|
+
f"FloodMap_{scenario_name}.tif",
|
|
312
|
+
)
|
|
313
|
+
else:
|
|
314
|
+
file_path = self.scenarios.output_path.joinpath(
|
|
315
|
+
scenario_name,
|
|
316
|
+
"Flooding",
|
|
317
|
+
f"RP_{return_period:04d}_maps.tif",
|
|
318
|
+
)
|
|
319
|
+
if not file_path.is_file():
|
|
320
|
+
self.logger.warning(
|
|
321
|
+
f"Flood map for scenario '{scenario_name}' at {file_path} does not exist."
|
|
322
|
+
)
|
|
323
|
+
return None
|
|
324
|
+
return file_path
|
|
325
|
+
|
|
288
326
|
def get_building_footprints(self, scenario_name: str) -> GeoDataFrame:
|
|
289
327
|
"""Return a geodataframe of the impacts at the footprint level.
|
|
290
328
|
|
|
@@ -471,6 +509,9 @@ class Database(IDatabase):
|
|
|
471
509
|
(self.scenarios.output_path / dir).resolve()
|
|
472
510
|
for dir in os.listdir(self.scenarios.output_path)
|
|
473
511
|
]
|
|
512
|
+
self.logger.info(
|
|
513
|
+
f"Cleaning up scenario outputs: {len(output_scenarios)} scenarios found."
|
|
514
|
+
)
|
|
474
515
|
|
|
475
516
|
def _call_garbage_collector(func, path, exc_info, retries=5, delay=0.1):
|
|
476
517
|
"""Retry deletion up to 5 times if the file is locked."""
|
|
@@ -493,3 +534,59 @@ class Database(IDatabase):
|
|
|
493
534
|
path.name for path in input_scenarios
|
|
494
535
|
] or not finished_file_exists(dir):
|
|
495
536
|
shutil.rmtree(dir, onerror=_call_garbage_collector)
|
|
537
|
+
# If the scenario is finished, delete the simulation folders
|
|
538
|
+
elif finished_file_exists(dir):
|
|
539
|
+
self._delete_simulations(dir.name)
|
|
540
|
+
|
|
541
|
+
def _delete_simulations(self, scenario_name: str) -> None:
|
|
542
|
+
"""Delete all simulation folders for a given scenario.
|
|
543
|
+
|
|
544
|
+
Parameters
|
|
545
|
+
----------
|
|
546
|
+
scenario_name : str
|
|
547
|
+
Name of the scenario to delete simulations for.
|
|
548
|
+
"""
|
|
549
|
+
scn = self.scenarios.get(scenario_name)
|
|
550
|
+
event = self.events.get(scn.event, load_all=True)
|
|
551
|
+
sub_events = event._events if event.mode == Mode.risk else None
|
|
552
|
+
|
|
553
|
+
if not self.site.sfincs.config.save_simulation:
|
|
554
|
+
# Delete SFINCS overland
|
|
555
|
+
overland = self.static.get_overland_sfincs_model()
|
|
556
|
+
if sub_events:
|
|
557
|
+
for sub_event in sub_events:
|
|
558
|
+
overland._delete_simulation_folder(scn, sub_event=sub_event)
|
|
559
|
+
|
|
560
|
+
else:
|
|
561
|
+
overland._delete_simulation_folder(scn)
|
|
562
|
+
|
|
563
|
+
# Delete SFINCS offshore
|
|
564
|
+
if self.site.sfincs.config.offshore_model:
|
|
565
|
+
offshore = self.static.get_offshore_sfincs_model()
|
|
566
|
+
if sub_events:
|
|
567
|
+
for sub_event in sub_events:
|
|
568
|
+
sim_path = offshore._get_simulation_path_offshore(
|
|
569
|
+
scn, sub_event=sub_event
|
|
570
|
+
)
|
|
571
|
+
if sim_path.exists():
|
|
572
|
+
shutil.rmtree(sim_path, ignore_errors=True)
|
|
573
|
+
self.logger.info(f"Deleted simulation folder: {sim_path}")
|
|
574
|
+
if sim_path.parent.exists() and not any(
|
|
575
|
+
sim_path.parent.iterdir()
|
|
576
|
+
):
|
|
577
|
+
# Remove the parent directory `simulations` if it is empty
|
|
578
|
+
sim_path.parent.rmdir()
|
|
579
|
+
else:
|
|
580
|
+
sim_path = offshore._get_simulation_path_offshore(scn)
|
|
581
|
+
if sim_path.exists():
|
|
582
|
+
shutil.rmtree(sim_path, ignore_errors=True)
|
|
583
|
+
self.logger.info(f"Deleted simulation folder: {sim_path}")
|
|
584
|
+
|
|
585
|
+
if sim_path.parent.exists() and not any(sim_path.parent.iterdir()):
|
|
586
|
+
# Remove the parent directory `simulations` if it is empty
|
|
587
|
+
sim_path.parent.rmdir()
|
|
588
|
+
|
|
589
|
+
if not self.site.fiat.config.save_simulation:
|
|
590
|
+
# Delete FIAT
|
|
591
|
+
fiat = self.static.get_fiat_model()
|
|
592
|
+
fiat._delete_simulation_folder(scn)
|
|
@@ -6,6 +6,7 @@ class DbsProjection(DbsTemplate[Projection]):
|
|
|
6
6
|
dir_name = "projections"
|
|
7
7
|
display_name = "Projection"
|
|
8
8
|
_object_class = Projection
|
|
9
|
+
_higher_lvl_object = "Scenario"
|
|
9
10
|
|
|
10
11
|
def check_higher_level_usage(self, name: str) -> list[str]:
|
|
11
12
|
"""Check if a projection is used in a scenario.
|
|
@@ -11,6 +11,7 @@ class DbsScenario(DbsTemplate[Scenario]):
|
|
|
11
11
|
dir_name = "scenarios"
|
|
12
12
|
display_name = "Scenario"
|
|
13
13
|
_object_class = Scenario
|
|
14
|
+
_higher_lvl_object = "Benefit"
|
|
14
15
|
|
|
15
16
|
def summarize_objects(self) -> dict[str, list[Any]]:
|
|
16
17
|
"""Return a dictionary with info on the events that currently exist in the database.
|
|
@@ -18,6 +18,7 @@ class DbsTemplate(AbstractDatabaseElement[T_OBJECTMODEL]):
|
|
|
18
18
|
display_name: str
|
|
19
19
|
dir_name: str
|
|
20
20
|
_object_class: type[T_OBJECTMODEL]
|
|
21
|
+
_higher_lvl_object: str
|
|
21
22
|
|
|
22
23
|
def __init__(
|
|
23
24
|
self, database: IDatabase, standard_objects: Optional[list[str]] = None
|
|
@@ -161,7 +162,7 @@ class DbsTemplate(AbstractDatabaseElement[T_OBJECTMODEL]):
|
|
|
161
162
|
# Check if object is used in a higher level object. If it is, raise an error
|
|
162
163
|
if used_in := self.check_higher_level_usage(name):
|
|
163
164
|
raise DatabaseError(
|
|
164
|
-
f"{self.display_name}: '{name}' cannot be deleted/modified since it is already used in: {', '.join(used_in)}"
|
|
165
|
+
f"{self.display_name}: '{name}' cannot be deleted/modified since it is already used in the {self._higher_lvl_object.capitalize()}(s): {', '.join(used_in)}"
|
|
165
166
|
)
|
|
166
167
|
|
|
167
168
|
# Once all checks are passed, delete the object
|
flood_adapt/flood_adapt.py
CHANGED
|
@@ -770,7 +770,9 @@ class FloodAdapt:
|
|
|
770
770
|
"""
|
|
771
771
|
return self.database.get_depth_conversion()
|
|
772
772
|
|
|
773
|
-
def get_max_water_level_map(
|
|
773
|
+
def get_max_water_level_map(
|
|
774
|
+
self, name: str, rp: Optional[int] = None
|
|
775
|
+
) -> np.ndarray:
|
|
774
776
|
"""
|
|
775
777
|
Return the maximum water level for the given scenario.
|
|
776
778
|
|
|
@@ -788,6 +790,26 @@ class FloodAdapt:
|
|
|
788
790
|
"""
|
|
789
791
|
return self.database.get_max_water_level(name, rp)
|
|
790
792
|
|
|
793
|
+
def get_flood_map_geotiff(
|
|
794
|
+
self, name: str, rp: Optional[int] = None
|
|
795
|
+
) -> Optional[Path]:
|
|
796
|
+
"""
|
|
797
|
+
Return the path to the geotiff file with the flood map for the given scenario.
|
|
798
|
+
|
|
799
|
+
Parameters
|
|
800
|
+
----------
|
|
801
|
+
name : str
|
|
802
|
+
The name of the scenario.
|
|
803
|
+
rp : int, optional
|
|
804
|
+
The return period of the water level, by default None. Only for event set scenarios.
|
|
805
|
+
|
|
806
|
+
Returns
|
|
807
|
+
-------
|
|
808
|
+
flood_map_geotiff : Optional[Path]
|
|
809
|
+
The path to the geotiff file with the flood map for the scenario if it exists, otherwise None.
|
|
810
|
+
"""
|
|
811
|
+
return self.database.get_flood_map_geotiff(name, rp)
|
|
812
|
+
|
|
791
813
|
def get_building_footprint_impacts(self, name: str) -> gpd.GeoDataFrame:
|
|
792
814
|
"""
|
|
793
815
|
Return a geodataframe of the impacts at the footprint level.
|
flood_adapt/misc/log.py
CHANGED
|
@@ -15,26 +15,24 @@ class FloodAdaptLogging:
|
|
|
15
15
|
def __init__(
|
|
16
16
|
self,
|
|
17
17
|
file_path: Optional[Path] = None,
|
|
18
|
-
|
|
19
|
-
loglevel_root: int = logging.INFO,
|
|
20
|
-
loglevel_files: int = logging.DEBUG,
|
|
18
|
+
level: int = logging.INFO,
|
|
21
19
|
formatter: logging.Formatter = _DEFAULT_FORMATTER,
|
|
22
20
|
ignore_warnings: Optional[list[type[Warning]]] = None,
|
|
23
21
|
) -> None:
|
|
24
22
|
"""Initialize the logging system for the FloodAdapt."""
|
|
25
23
|
self._formatter = formatter
|
|
26
24
|
|
|
27
|
-
self._root_logger.setLevel(
|
|
25
|
+
self._root_logger.setLevel(level)
|
|
28
26
|
if self._root_logger.hasHandlers():
|
|
29
27
|
self._root_logger.handlers.clear()
|
|
30
28
|
|
|
31
29
|
# Add file handler if provided
|
|
32
30
|
if file_path is not None:
|
|
33
|
-
self.add_file_handler(file_path,
|
|
31
|
+
self.add_file_handler(file_path, level, formatter)
|
|
34
32
|
|
|
35
33
|
# Add console handler
|
|
36
34
|
console_handler = logging.StreamHandler()
|
|
37
|
-
console_handler.setLevel(
|
|
35
|
+
console_handler.setLevel(level)
|
|
38
36
|
console_handler.setFormatter(formatter)
|
|
39
37
|
self._root_logger.addHandler(console_handler)
|
|
40
38
|
|
|
@@ -46,7 +44,7 @@ class FloodAdaptLogging:
|
|
|
46
44
|
def add_file_handler(
|
|
47
45
|
cls,
|
|
48
46
|
file_path: Path,
|
|
49
|
-
|
|
47
|
+
level: int = logging.DEBUG,
|
|
50
48
|
formatter: Optional[logging.Formatter] = None,
|
|
51
49
|
) -> None:
|
|
52
50
|
"""Add a file handler to the logger that directs outputs to a the file."""
|
|
@@ -60,7 +58,7 @@ class FloodAdaptLogging:
|
|
|
60
58
|
file_path.parent.mkdir(parents=True, exist_ok=True)
|
|
61
59
|
|
|
62
60
|
file_handler = logging.FileHandler(filename=file_path, mode="a")
|
|
63
|
-
file_handler.setLevel(
|
|
61
|
+
file_handler.setLevel(level)
|
|
64
62
|
|
|
65
63
|
formatter = formatter or cls._DEFAULT_FORMATTER
|
|
66
64
|
file_handler.setFormatter(formatter)
|
|
@@ -79,7 +77,7 @@ class FloodAdaptLogging:
|
|
|
79
77
|
|
|
80
78
|
@classmethod
|
|
81
79
|
def getLogger(
|
|
82
|
-
cls, name: Optional[str] = None, level: int =
|
|
80
|
+
cls, name: Optional[str] = None, level: Optional[int] = None
|
|
83
81
|
) -> logging.Logger:
|
|
84
82
|
"""Get a logger with the specified name. If no name is provided, return the root logger.
|
|
85
83
|
|
|
@@ -101,9 +99,19 @@ class FloodAdaptLogging:
|
|
|
101
99
|
logger = cls._root_logger
|
|
102
100
|
else:
|
|
103
101
|
logger = logging.getLogger(f"FloodAdapt.{name}")
|
|
104
|
-
|
|
102
|
+
|
|
103
|
+
if level:
|
|
104
|
+
logger.setLevel(level)
|
|
105
|
+
|
|
105
106
|
return logger
|
|
106
107
|
|
|
108
|
+
@classmethod
|
|
109
|
+
def set_global_level(cls, level: int) -> None:
|
|
110
|
+
"""Set the logging level for FloodAdapt."""
|
|
111
|
+
loggers = [logging.getLogger(name) for name in logging.root.manager.loggerDict]
|
|
112
|
+
for logger in loggers:
|
|
113
|
+
logger.setLevel(level)
|
|
114
|
+
|
|
107
115
|
@classmethod
|
|
108
116
|
def shutdown(cls):
|
|
109
117
|
root_logger = cls.getLogger()
|
|
@@ -120,7 +128,7 @@ class FloodAdaptLogging:
|
|
|
120
128
|
cls,
|
|
121
129
|
*,
|
|
122
130
|
file_path: Path,
|
|
123
|
-
|
|
131
|
+
level: int = logging.DEBUG,
|
|
124
132
|
formatter: logging.Formatter = _DEFAULT_FORMATTER,
|
|
125
133
|
):
|
|
126
134
|
"""Open a file at filepath to write logs to. Does not affect other loggers.
|
|
@@ -131,7 +139,7 @@ class FloodAdaptLogging:
|
|
|
131
139
|
raise ValueError(
|
|
132
140
|
"file_path must be provided as a key value pair: 'file_path=<file_path>'."
|
|
133
141
|
)
|
|
134
|
-
cls.add_file_handler(file_path,
|
|
142
|
+
cls.add_file_handler(file_path, level, formatter)
|
|
135
143
|
try:
|
|
136
144
|
yield
|
|
137
145
|
finally:
|
|
@@ -53,11 +53,13 @@ class Template(str, Enum):
|
|
|
53
53
|
def description(self) -> str:
|
|
54
54
|
match self:
|
|
55
55
|
case Template.Historical:
|
|
56
|
-
return "Select
|
|
56
|
+
return "Select and optionally modify a real event by specifying a past time period."
|
|
57
57
|
case Template.Hurricane:
|
|
58
|
-
return
|
|
58
|
+
return (
|
|
59
|
+
"Select a historical hurricane track from the hurricane database."
|
|
60
|
+
)
|
|
59
61
|
case Template.Synthetic:
|
|
60
|
-
return "
|
|
62
|
+
return "Build a custom event by specifying wind, water levels, rainfall, and discharge."
|
|
61
63
|
case _:
|
|
62
64
|
raise ValueError(f"Invalid event template: {self}")
|
|
63
65
|
|
|
@@ -34,17 +34,17 @@ class HistoricalEvent(Event):
|
|
|
34
34
|
ALLOWED_FORCINGS: ClassVar[dict[ForcingType, List[ForcingSource]]] = {
|
|
35
35
|
ForcingType.RAINFALL: [
|
|
36
36
|
ForcingSource.CSV,
|
|
37
|
-
ForcingSource.METEO,
|
|
37
|
+
# ForcingSource.METEO, # Temporarily excluded due to bug in hydromt-sfincs. fixed in v1.3.0
|
|
38
38
|
ForcingSource.SYNTHETIC,
|
|
39
39
|
ForcingSource.CONSTANT,
|
|
40
40
|
],
|
|
41
41
|
ForcingType.WIND: [
|
|
42
42
|
ForcingSource.CSV,
|
|
43
|
-
ForcingSource.METEO,
|
|
43
|
+
# ForcingSource.METEO, # Temporarily excluded due to bug in hydromt-sfincs. fixed in v1.3.0
|
|
44
44
|
ForcingSource.CONSTANT,
|
|
45
45
|
],
|
|
46
46
|
ForcingType.WATERLEVEL: [
|
|
47
|
-
ForcingSource.MODEL,
|
|
47
|
+
# ForcingSource.MODEL, # Temporarily excluded due to the METEO bug in hydromt-sfincs. fixed in v1.3.0
|
|
48
48
|
ForcingSource.CSV,
|
|
49
49
|
ForcingSource.SYNTHETIC,
|
|
50
50
|
ForcingSource.GAUGED,
|
|
@@ -53,7 +53,7 @@ class HurricaneEvent(Event):
|
|
|
53
53
|
ForcingSource.CSV,
|
|
54
54
|
ForcingSource.SYNTHETIC,
|
|
55
55
|
ForcingSource.TRACK,
|
|
56
|
-
ForcingSource.METEO,
|
|
56
|
+
# ForcingSource.METEO, # Temporarily excluded due to bug in hydromt-sfincs. fixed in v1.3.0
|
|
57
57
|
],
|
|
58
58
|
ForcingType.WIND: [ForcingSource.TRACK],
|
|
59
59
|
ForcingType.WATERLEVEL: [ForcingSource.MODEL],
|
|
@@ -5,7 +5,6 @@ from typing import List, Optional
|
|
|
5
5
|
import pandas as pd
|
|
6
6
|
import plotly.express as px
|
|
7
7
|
import plotly.graph_objects as go
|
|
8
|
-
from plotly.subplots import make_subplots
|
|
9
8
|
|
|
10
9
|
from flood_adapt.config.site import Site
|
|
11
10
|
from flood_adapt.misc.log import FloodAdaptLogging
|
|
@@ -395,36 +394,7 @@ def plot_wind(
|
|
|
395
394
|
x_title = "Time"
|
|
396
395
|
|
|
397
396
|
# Plot actual thing
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
fig = make_subplots(specs=[[{"secondary_y": True}]])
|
|
401
|
-
|
|
402
|
-
# Add traces
|
|
403
|
-
fig.add_trace(
|
|
404
|
-
go.Scatter(
|
|
405
|
-
x=data.index,
|
|
406
|
-
y=data.iloc[:, 0],
|
|
407
|
-
name="Wind speed",
|
|
408
|
-
mode="lines",
|
|
409
|
-
),
|
|
410
|
-
secondary_y=False,
|
|
411
|
-
)
|
|
412
|
-
fig.add_trace(
|
|
413
|
-
go.Scatter(
|
|
414
|
-
x=data.index, y=data.iloc[:, 1], name="Wind direction", mode="markers"
|
|
415
|
-
),
|
|
416
|
-
secondary_y=True,
|
|
417
|
-
)
|
|
418
|
-
|
|
419
|
-
# Set y-axes titles
|
|
420
|
-
fig.update_yaxes(
|
|
421
|
-
title_text=f"Wind speed [{site.gui.units.default_velocity_units.value}]",
|
|
422
|
-
secondary_y=False,
|
|
423
|
-
)
|
|
424
|
-
fig.update_yaxes(
|
|
425
|
-
title_text=f"Wind direction {site.gui.units.default_direction_units.value}",
|
|
426
|
-
secondary_y=True,
|
|
427
|
-
)
|
|
397
|
+
fig = px.line(x=data.index, y=data.iloc[:, 0])
|
|
428
398
|
|
|
429
399
|
fig.update_layout(
|
|
430
400
|
autosize=False,
|
|
@@ -434,11 +404,14 @@ def plot_wind(
|
|
|
434
404
|
font={"size": 10, "color": "black", "family": "Arial"},
|
|
435
405
|
title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
436
406
|
legend=None,
|
|
437
|
-
|
|
438
|
-
xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
407
|
+
showlegend=False,
|
|
439
408
|
xaxis={"range": [event.time.start_time, event.time.end_time]},
|
|
440
409
|
xaxis_title={"text": x_title},
|
|
441
|
-
|
|
410
|
+
xaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
411
|
+
yaxis_title_font={"size": 10, "color": "black", "family": "Arial"},
|
|
412
|
+
yaxis_title={
|
|
413
|
+
"text": f"Wind speed [{site.gui.units.default_velocity_units.value}]"
|
|
414
|
+
},
|
|
442
415
|
)
|
|
443
416
|
|
|
444
417
|
# Only save to the the event folder if that has been created already.
|
|
@@ -1,11 +1,12 @@
|
|
|
1
1
|
import os
|
|
2
2
|
from enum import Enum
|
|
3
3
|
from pathlib import Path
|
|
4
|
-
from typing import Any, Optional
|
|
4
|
+
from typing import Any, Optional, Type, TypeVar
|
|
5
5
|
|
|
6
6
|
import geopandas as gpd
|
|
7
7
|
import pyproj
|
|
8
|
-
|
|
8
|
+
import tomli
|
|
9
|
+
from pydantic import Field, field_serializer, field_validator, model_validator
|
|
9
10
|
|
|
10
11
|
from flood_adapt.config.site import Site
|
|
11
12
|
from flood_adapt.misc.utils import resolve_filepath, save_file_to_database
|
|
@@ -107,6 +108,9 @@ class SelectionType(str, Enum):
|
|
|
107
108
|
all = "all"
|
|
108
109
|
|
|
109
110
|
|
|
111
|
+
T = TypeVar("T", bound="Measure")
|
|
112
|
+
|
|
113
|
+
|
|
110
114
|
class Measure(Object):
|
|
111
115
|
"""The expected variables and data types of attributes common to all measures.
|
|
112
116
|
|
|
@@ -120,9 +124,91 @@ class Measure(Object):
|
|
|
120
124
|
Description of the measure.
|
|
121
125
|
type: MeasureType
|
|
122
126
|
Type of measure. Should be one of the MeasureType enum values.
|
|
127
|
+
selection_type: SelectionType
|
|
128
|
+
Type of selection. Should be one of the SelectionType enum values.
|
|
129
|
+
polygon_file: str, Optional
|
|
130
|
+
Path to a polygon file, either absolute or relative to the measure's toml path in the database.
|
|
131
|
+
aggregation_area_name: str, Optional
|
|
132
|
+
Name of the aggregation area. Required if `selection_type` is 'aggregation_area'.
|
|
133
|
+
aggregation_area_type: str, Optional
|
|
134
|
+
Type of aggregation area. Required if `selection_type` is 'aggregation_area'.
|
|
123
135
|
"""
|
|
124
136
|
|
|
125
137
|
type: MeasureType
|
|
138
|
+
selection_type: SelectionType
|
|
139
|
+
|
|
140
|
+
polygon_file: Optional[str] = Field(
|
|
141
|
+
default=None,
|
|
142
|
+
min_length=1,
|
|
143
|
+
description="Path to a polygon file, either absolute or relative to the measure path.",
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
aggregation_area_type: Optional[str] = None
|
|
147
|
+
aggregation_area_name: Optional[str] = None
|
|
148
|
+
|
|
149
|
+
@model_validator(mode="after")
|
|
150
|
+
def validate_selection_type(self) -> "Measure":
|
|
151
|
+
match self.selection_type:
|
|
152
|
+
case SelectionType.all:
|
|
153
|
+
pass
|
|
154
|
+
case SelectionType.polygon | SelectionType.polyline:
|
|
155
|
+
if not self.polygon_file:
|
|
156
|
+
raise ValueError(
|
|
157
|
+
"If `selection_type` is 'polygon' or 'polyline', then `polygon_file` needs to be set."
|
|
158
|
+
)
|
|
159
|
+
case SelectionType.aggregation_area:
|
|
160
|
+
if not self.aggregation_area_name:
|
|
161
|
+
raise ValueError(
|
|
162
|
+
"If `selection_type` is 'aggregation_area', then `aggregation_area_name` needs to be set."
|
|
163
|
+
)
|
|
164
|
+
if not self.aggregation_area_type:
|
|
165
|
+
raise ValueError(
|
|
166
|
+
"If `selection_type` is 'aggregation_area', then `aggregation_area_type` needs to be set."
|
|
167
|
+
)
|
|
168
|
+
case _:
|
|
169
|
+
raise ValueError(
|
|
170
|
+
f"Invalid selection type: {self.selection_type}. "
|
|
171
|
+
"Must be one of 'aggregation_area', 'polygon', 'polyline', or 'all'."
|
|
172
|
+
)
|
|
173
|
+
return self
|
|
174
|
+
|
|
175
|
+
@field_serializer("polygon_file")
|
|
176
|
+
def serialize_polygon_file(self, value: Optional[str]) -> Optional[str]:
|
|
177
|
+
"""Serialize the polygon_file attribute to a string of only the file name."""
|
|
178
|
+
if value is None:
|
|
179
|
+
return None
|
|
180
|
+
return Path(value).name
|
|
181
|
+
|
|
182
|
+
@classmethod
|
|
183
|
+
def load_file(cls: Type[T], file_path: Path | str | os.PathLike) -> T:
|
|
184
|
+
"""Load the measure from a file.
|
|
185
|
+
|
|
186
|
+
Parameters
|
|
187
|
+
----------
|
|
188
|
+
filepath : Path | str | os.PathLike
|
|
189
|
+
Path to the file to load the measure from.
|
|
190
|
+
|
|
191
|
+
Returns
|
|
192
|
+
-------
|
|
193
|
+
Measure
|
|
194
|
+
The loaded measure object.
|
|
195
|
+
"""
|
|
196
|
+
with open(file_path, mode="rb") as fp:
|
|
197
|
+
toml = tomli.load(fp)
|
|
198
|
+
measure = cls.model_validate(toml)
|
|
199
|
+
|
|
200
|
+
if measure.polygon_file:
|
|
201
|
+
measure.polygon_file = str(Path(file_path).parent / measure.polygon_file)
|
|
202
|
+
|
|
203
|
+
return measure
|
|
204
|
+
|
|
205
|
+
def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
|
|
206
|
+
if self.polygon_file:
|
|
207
|
+
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
208
|
+
src_path = resolve_filepath("measures", self.name, self.polygon_file)
|
|
209
|
+
path = save_file_to_database(src_path, Path(output_dir))
|
|
210
|
+
# Update the shapefile path in the object so it is saved in the toml file as well
|
|
211
|
+
self.polygon_file = path.name
|
|
126
212
|
|
|
127
213
|
|
|
128
214
|
class HazardMeasure(Measure):
|
|
@@ -143,39 +229,12 @@ class HazardMeasure(Measure):
|
|
|
143
229
|
|
|
144
230
|
"""
|
|
145
231
|
|
|
146
|
-
selection_type: SelectionType
|
|
147
|
-
polygon_file: Optional[str] = Field(
|
|
148
|
-
default=None,
|
|
149
|
-
min_length=1,
|
|
150
|
-
description="Path to a polygon file, either absolute or relative to the measure path.",
|
|
151
|
-
)
|
|
152
|
-
|
|
153
232
|
@field_validator("type")
|
|
154
233
|
def validate_type(cls, value):
|
|
155
234
|
if not MeasureType.is_hazard(value):
|
|
156
235
|
raise ValueError(f"Invalid hazard type: {value}")
|
|
157
236
|
return value
|
|
158
237
|
|
|
159
|
-
@model_validator(mode="after")
|
|
160
|
-
def validate_selection_type(self) -> "HazardMeasure":
|
|
161
|
-
if (
|
|
162
|
-
self.selection_type
|
|
163
|
-
not in [SelectionType.aggregation_area, SelectionType.all]
|
|
164
|
-
and self.polygon_file is None
|
|
165
|
-
):
|
|
166
|
-
raise ValueError(
|
|
167
|
-
"If `selection_type` is not 'aggregation_area' or 'all', then `polygon_file` needs to be set."
|
|
168
|
-
)
|
|
169
|
-
return self
|
|
170
|
-
|
|
171
|
-
def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
|
|
172
|
-
if self.polygon_file:
|
|
173
|
-
Path(output_dir).mkdir(parents=True, exist_ok=True)
|
|
174
|
-
src_path = resolve_filepath("measures", self.name, self.polygon_file)
|
|
175
|
-
path = save_file_to_database(src_path, Path(output_dir))
|
|
176
|
-
# Update the shapefile path in the object so it is saved in the toml file as well
|
|
177
|
-
self.polygon_file = path.name
|
|
178
|
-
|
|
179
238
|
|
|
180
239
|
class ImpactMeasure(Measure):
|
|
181
240
|
"""The expected variables and data types of attributes common to all impact measures.
|
|
@@ -200,15 +259,6 @@ class ImpactMeasure(Measure):
|
|
|
200
259
|
Name of the aggregation area.
|
|
201
260
|
"""
|
|
202
261
|
|
|
203
|
-
type: MeasureType
|
|
204
|
-
selection_type: SelectionType
|
|
205
|
-
aggregation_area_type: Optional[str] = None
|
|
206
|
-
aggregation_area_name: Optional[str] = None
|
|
207
|
-
polygon_file: Optional[str] = Field(
|
|
208
|
-
default=None,
|
|
209
|
-
min_length=1,
|
|
210
|
-
description="Path to a polygon file, relative to the database path.",
|
|
211
|
-
)
|
|
212
262
|
property_type: str # TODO make enum
|
|
213
263
|
|
|
214
264
|
@field_validator("type")
|
|
@@ -217,34 +267,6 @@ class ImpactMeasure(Measure):
|
|
|
217
267
|
raise ValueError(f"Invalid impact type: {value}")
|
|
218
268
|
return value
|
|
219
269
|
|
|
220
|
-
@model_validator(mode="after")
|
|
221
|
-
def validate_aggregation_area_name(self):
|
|
222
|
-
if (
|
|
223
|
-
self.selection_type == SelectionType.aggregation_area
|
|
224
|
-
and self.aggregation_area_name is None
|
|
225
|
-
):
|
|
226
|
-
raise ValueError(
|
|
227
|
-
"If `selection_type` is 'aggregation_area', then `aggregation_area_name` needs to be set."
|
|
228
|
-
)
|
|
229
|
-
return self
|
|
230
|
-
|
|
231
|
-
@model_validator(mode="after")
|
|
232
|
-
def validate_polygon_file(self):
|
|
233
|
-
if self.selection_type == SelectionType.polygon and self.polygon_file is None:
|
|
234
|
-
raise ValueError(
|
|
235
|
-
"If `selection_type` is 'polygon', then `polygon_file` needs to be set."
|
|
236
|
-
)
|
|
237
|
-
|
|
238
|
-
return self
|
|
239
|
-
|
|
240
|
-
def save_additional(self, output_dir: Path | str | os.PathLike) -> None:
|
|
241
|
-
"""Save the additional files to the database."""
|
|
242
|
-
if self.polygon_file:
|
|
243
|
-
src_path = resolve_filepath("measures", self.name, self.polygon_file)
|
|
244
|
-
path = save_file_to_database(src_path, Path(output_dir))
|
|
245
|
-
# Update the shapefile path in the object so it is saved in the toml file as well
|
|
246
|
-
self.polygon_file = path.name
|
|
247
|
-
|
|
248
270
|
|
|
249
271
|
class Elevate(ImpactMeasure):
|
|
250
272
|
"""The expected variables and data types of the "elevate" impact measure.
|