wolfhece 2.2.38__py3-none-any.whl → 2.2.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. wolfhece/Coordinates_operations.py +5 -0
  2. wolfhece/GraphNotebook.py +72 -1
  3. wolfhece/GraphProfile.py +1 -1
  4. wolfhece/MulticriteriAnalysis.py +1579 -0
  5. wolfhece/PandasGrid.py +62 -1
  6. wolfhece/PyCrosssections.py +194 -43
  7. wolfhece/PyDraw.py +891 -73
  8. wolfhece/PyGui.py +913 -72
  9. wolfhece/PyGuiHydrology.py +528 -74
  10. wolfhece/PyPalette.py +26 -4
  11. wolfhece/PyParams.py +33 -0
  12. wolfhece/PyPictures.py +2 -2
  13. wolfhece/PyVertex.py +25 -0
  14. wolfhece/PyVertexvectors.py +94 -28
  15. wolfhece/PyWMS.py +52 -36
  16. wolfhece/acceptability/acceptability.py +15 -8
  17. wolfhece/acceptability/acceptability_gui.py +507 -360
  18. wolfhece/acceptability/func.py +80 -183
  19. wolfhece/apps/version.py +1 -1
  20. wolfhece/compare_series.py +480 -0
  21. wolfhece/drawing_obj.py +12 -1
  22. wolfhece/hydrology/Catchment.py +228 -162
  23. wolfhece/hydrology/Internal_variables.py +43 -2
  24. wolfhece/hydrology/Models_characteristics.py +69 -67
  25. wolfhece/hydrology/Optimisation.py +893 -182
  26. wolfhece/hydrology/PyWatershed.py +267 -165
  27. wolfhece/hydrology/SubBasin.py +185 -140
  28. wolfhece/hydrology/cst_exchanges.py +76 -1
  29. wolfhece/hydrology/forcedexchanges.py +413 -49
  30. wolfhece/hydrology/read.py +65 -5
  31. wolfhece/hydrometry/kiwis.py +14 -7
  32. wolfhece/insyde_be/INBE_func.py +746 -0
  33. wolfhece/insyde_be/INBE_gui.py +1776 -0
  34. wolfhece/insyde_be/__init__.py +3 -0
  35. wolfhece/interpolating_raster.py +366 -0
  36. wolfhece/irm_alaro.py +1457 -0
  37. wolfhece/irm_qdf.py +889 -57
  38. wolfhece/lifewatch.py +6 -3
  39. wolfhece/picc.py +124 -8
  40. wolfhece/pyLandUseFlanders.py +146 -0
  41. wolfhece/pydownloader.py +2 -1
  42. wolfhece/pywalous.py +225 -31
  43. wolfhece/toolshydrology_dll.py +149 -0
  44. wolfhece/wolf_array.py +63 -25
  45. {wolfhece-2.2.38.dist-info → wolfhece-2.2.39.dist-info}/METADATA +3 -1
  46. {wolfhece-2.2.38.dist-info → wolfhece-2.2.39.dist-info}/RECORD +49 -40
  47. {wolfhece-2.2.38.dist-info → wolfhece-2.2.39.dist-info}/WHEEL +0 -0
  48. {wolfhece-2.2.38.dist-info → wolfhece-2.2.39.dist-info}/entry_points.txt +0 -0
  49. {wolfhece-2.2.38.dist-info → wolfhece-2.2.39.dist-info}/top_level.txt +0 -0
wolfhece/irm_alaro.py ADDED
@@ -0,0 +1,1457 @@
1
+ from importlib.resources import files
2
+ from pathlib import Path
3
+ from enum import Enum
4
+
5
+ import numpy as np
6
+ import logging
7
+
8
+ import ftplib
9
+
10
+ from eccodes import codes_grib_new_from_file, codes_get, codes_get_values, codes_release, codes_keys_iterator_new, codes_keys_iterator_next, codes_keys_iterator_get_name
11
+
12
+ from datetime import datetime as dt, timedelta as td
13
+ from datetime import timezone as timezone
14
+
15
+ from shapely.geometry import Polygon, Point
16
+ import pandas as pd
17
+ import geopandas as gpd
18
+ import matplotlib.pyplot as plt
19
+ import matplotlib.animation as animation
20
+
21
+ import wx
22
+
23
+ from .pydownloader import DATADIR, toys_dataset
24
+ from .Coordinates_operations import transform_coordinates
25
+ from .wolf_array import header_wolf, WolfArray
26
+ from .PyVertexvectors import Zones, zone, vector, wolfvertex as wv
27
+ from .PyTranslate import _
28
+
29
+ OPENDATA_FORECASTS = 'forecasts'
30
+ OPENDATA_FTP_SERVER = 'opendata.meteo.be'
31
+ OPENDATA_ALARO_40L = 'alaro_40l'
32
+ FILE_PREFIX = 'alaro40l'
33
+
34
+ # Date Format : YYYYMMDDHH with HH in [00, 06, 12, 18]
35
+
36
+ def _convert_col2date_str(col:str) -> str:
37
+ """ Create a string representation of the date from the column name. """
38
+
39
+ if col is None:
40
+ return "No data"
41
+
42
+ parts = col.split('_')
43
+ run_date = dt.strptime(parts[0], '%Y%m%d%H').strftime('%Y-%m-%d %H:%M')
44
+ real_date = dt.strptime(parts[1], '%Y%m%d').strftime('%Y-%m-%d')
45
+ hour = parts[2]
46
+
47
+ date_str = _(f'Forecast date : {real_date}, Hour : {hour} - Run : {run_date}')
48
+
49
+ return date_str
50
+
51
+ def _extract_dates_from_columnstr(col:str) -> list[dt]:
52
+ """ Extract the run date and forecast date from the column name. """
53
+
54
+ if col is None:
55
+ return [None, None]
56
+
57
+ parts = col.split('_')
58
+ run_date = dt.strptime(parts[0], '%Y%m%d%H')
59
+ real_date = dt.strptime(parts[1], '%Y%m%d')
60
+ hour = int(parts[2])
61
+
62
+ # set timezone as UTC
63
+ run_date = run_date.replace(tzinfo=timezone.utc)
64
+ real_date = real_date.replace(tzinfo=timezone.utc)
65
+
66
+ forecast_date = real_date + td(hours=hour)
67
+
68
+ return [run_date, forecast_date]
69
+
70
+ def ArrayBelgium() -> WolfArray:
71
+ """
72
+ Create a WolfArray for the Belgium domain.
73
+ """
74
+
75
+ # Define the spatial extent for Belgium
76
+ h = header_wolf()
77
+ h.set_origin(17_000., 20_000.)
78
+ h.set_resolution(4_000., 4_000.)
79
+ h.shape = (int((296_000.-17_000.)//4_000.), int((245_000.-20_000.)//4_000.))
80
+
81
+ # Create the WolfArray
82
+ return WolfArray(srcheader=h)
83
+
84
+
85
+ class GribFiles(Enum):
86
+ """
87
+ Enum for Grib files used in IRM Alaro data.
88
+ """
89
+
90
+ # FIXME : check the units and descriptions
91
+
92
+ FILE_10U = '10U.grb' # 10m U-component of wind [m/s]
93
+ FILE_10V = '10V.grb' # 10m V-component of wind [m/s]
94
+ FILE_MaxT2 = 'MaxT2.grb' # Maximum 2m temperature [K]
95
+ FILE_MinT2 = 'MinT2.grb' # Minimum 2m temperature [K]
96
+ FILE_2D = '2D.grb' # 2m dew point temperature [K]
97
+ FILE_2T = '2T.grb' # 2m temperature [K]
98
+ FILE_RH2M = 'RH2M.grb' # 2m relative humidity [0-1]
99
+ FILE_CR = 'CR.grb' # Convective rain [mm] or [kg/m²]
100
+ FILE_CS = 'CS.grb' # Convective snow [mm] or [kg/m²]
101
+ FILE_Z = 'Z.grb' # Geopotential height [m]
102
+ FILE_IFCCC = 'IFCCC.grb' # Instant fluctuation convective cloud cover [0-1]
103
+ FILE_IFHCC = 'IFHCC.grb' # Instant fluctuation high cloud cover [0-1]
104
+ FILE_IFLCC = 'IFLCC.grb' # Instant fluctuation low cloud cover [0-1]
105
+ FILE_IFMCC = 'IFMCC.grb' # Instant fluctuation medium cloud cover [0-1]
106
+ FILE_IFTCC = 'IFTCC.grb' # Instant fluctuation total cloud cover [0-1]
107
+ FILE_LSR = 'LSR.grb' # Large scale rain [mm] or [kg/m²]
108
+ FILE_LSS = 'LSS.grb' # Large scale snow [mm] or [kg/m²]
109
+ FILE_MSLP = 'MSLP.grb' # Mean sea level pressure [Pa]
110
+
111
+ FILE_R = 'R.grb' # Relative humidity [0-1] ??
112
+ FILE_Q = 'Q.grb' # Relative humidity isobaric [0-1] ??
113
+
114
+ FILE_MerGust = 'MerGust.grb' # SBL Medidian gust [m/s]
115
+ FILE_ZonGust = 'ZonGust.grb' # SBL Zonal gust [m/s]
116
+ FILE_SurfSWrad = 'SurfSWrad.grb' # Surface shortwave radiation [W/m²]
117
+ FILE_SurfLWrad = 'SurfLWrad.grb' # Surface longwave radiation [W/m²]
118
+ FILE_SurfCape = 'SurfCape.grb' # Surface CAPE [J/kg]
119
+ FILE_ST = 'ST.grb' # Surface temperature [K]
120
+ FILE_ORO = 'ORO.grb' # Surface orography [m]
121
+ FILE_T = 'T.grb' # Temperature [K]
122
+ FILE_TotPrecip = 'TotPrecip.grb' # Total precipitation [m_water_equivalent]
123
+ FILE_U = 'U.grb' # U-component of wind [m/s]
124
+ FILE_V = 'V.grb' # V-component of wind [m/s]
125
+ FILE_W = 'W.grb' # Vertical velocity [Pa/s]
126
+ FILE_WBPT = 'WBPT.grb' # Wet bulb potential temperature [K]
127
+ FILE_fzht = 'fzht.grb' # Freezing leveml (0°C isotherm) [m]
128
+
129
+ class IRM_Alaro():
130
+ """
131
+ Class for handling IRM Alaro forecasts.
132
+ """
133
+
134
+ def __init__(self, ftp_server: str = OPENDATA_FTP_SERVER, ftp_path: str = OPENDATA_FORECASTS):
135
+ """
136
+ Initialize the IRM_Alaro class with FTP server and path.
137
+ """
138
+
139
+ self.ftp_server = ftp_server
140
+ self.ftp_path = ftp_path
141
+ self._gdf = None
142
+ self._gdf_cache = None
143
+ self._gdf_diff = None
144
+ self._zones = None
145
+ self._array = ArrayBelgium()
146
+ self._available_run_dates = []
147
+
148
+ self._colormap = plt.get_cmap('Blues')
149
+
150
+ self._cities = gpd.read_file(toys_dataset('Communes_Belgique', 'PDS__COMMUNES.shp'))
151
+
152
+ def _ftp_init(self):
153
+ """ Initialize the FTP connection. """
154
+ self.ftp = ftplib.FTP(self.ftp_server,)
155
+ self.ftp.login()
156
+ self.ftp.cwd(self.ftp_path + '/' + OPENDATA_ALARO_40L)
157
+
158
+ def _ftp_close(self):
159
+ """
160
+ Close the FTP connection.
161
+ """
162
+ try:
163
+ self.ftp.close()
164
+ except Exception as e:
165
+ logging.error(f"Error closing FTP connection: {e}")
166
+
167
+ def list_run_dates_cached(self) -> list:
168
+ """
169
+ Return the cached list of available run dates.
170
+ """
171
+ all_grib_files = self.data_directory.rglob('*.grb')
172
+ dates = [f.stem.split('_')[1] for f in all_grib_files if f.is_file() and len(f.stem.split('_')) == 3]
173
+ return sorted(list(set(dates)))
174
+
175
+ def list_run_dates(self) -> list:
176
+ """
177
+ List available data files on the FTP server.
178
+ """
179
+
180
+ today = dt.now()
181
+ possible_run_dates = []
182
+
183
+ # hours
184
+ hours = ['00', '06', '12', '18']
185
+
186
+ date_format = '%Y%m%d'
187
+ # Generate forecast strings for the last 3 days
188
+ for i in range(3):
189
+ forecast_time = today - td(days=i)
190
+ forecast_str = forecast_time.strftime(date_format)
191
+ possible_run_dates.append([forecast_str + hour for hour in hours])
192
+ # flatten the list of lists
193
+ possible_run_dates = [item for sublist in possible_run_dates for item in sublist]
194
+
195
+ try:
196
+
197
+ self._ftp_init()
198
+
199
+ for poss in possible_run_dates:
200
+ try:
201
+ self.ftp.cwd(poss)
202
+ files = self.ftp.nlst()
203
+ if files:
204
+ self._available_run_dates.append(poss)
205
+ self.ftp.cwd('..') # Go back to the parent directory
206
+ except ftplib.error_perm:
207
+ continue
208
+
209
+ self._ftp_close()
210
+
211
+ self._available_run_dates.sort()
212
+ return self._available_run_dates
213
+
214
+ except ftplib.error_perm as e:
215
+ logging.error(f"Error listing files: {e}")
216
+ return []
217
+
218
+ @property
219
+ def run_dates(self) -> list:
220
+ """ Return the available forecasts run dates. """
221
+ if len(self._available_run_dates) == 0:
222
+ self.list_run_dates()
223
+
224
+ return self._available_run_dates
225
+
226
+ @property
227
+ def run_dates_str(self) -> str:
228
+ """ Return the available forecasts run dates as a string. """
229
+ return [dt.strptime(run, '%Y%m%d%H').strftime('%Y-%m-%d %H:%M') for run in self.run_dates]
230
+
231
+ def list_files_for_forecast(self, run_date: str) -> list:
232
+ """
233
+ List files for a specific forecast.
234
+
235
+ :param run_date: The forecast time.
236
+ """
237
+ try:
238
+ self._ftp_init()
239
+ self.ftp.cwd(f"{run_date}")
240
+ files = self.ftp.nlst()
241
+ self._ftp_close()
242
+
243
+ # check if all files are present
244
+ missing_files = [file for file in GribFiles if self._get_filename(file, run_date) not in files]
245
+ if missing_files:
246
+ logging.warning(f"Missing files for forecast {run_date}: {missing_files}")
247
+
248
+ return files
249
+ except ftplib.error_perm as e:
250
+ logging.error(f"Error listing files for forecast {run_date}: {e}")
251
+ return []
252
+
253
+ def _get_filename(self, file: GribFiles, run_date: str) -> str:
254
+ """
255
+ Generate the filename for a given Grib file and forecast.
256
+
257
+ :param file: The Grib file enum.
258
+ :param run_date: The forecast time.
259
+ """
260
+ return f"{FILE_PREFIX}_{run_date}_{file.value}"
261
+
262
+ @property
263
+ def data_directory(self) -> Path:
264
+ """ Return the data directory path. """
265
+ return Path(DATADIR) / 'forecasts'
266
+
267
+ def download_data(self, filename: GribFiles | str, run_date: str) -> Path:
268
+ """
269
+ Download data from the FTP server.
270
+
271
+ :param filename: The Grib file to download or a specific filename.
272
+ :param run_date: The forecast time.
273
+ """
274
+ if isinstance(filename, str):
275
+ fn = filename
276
+ else:
277
+ fn = self._get_filename(filename, run_date)
278
+ local_path = self.data_directory / f'{fn}'
279
+ local_path.parent.mkdir(parents=True, exist_ok=True)
280
+
281
+ if run_date not in self.run_dates:
282
+ if not local_path.exists():
283
+ logging.info(f"Run date {run_date} not available on FTP server nor locally.")
284
+ return local_path
285
+
286
+ else:
287
+ logging.info(f"Run date {run_date} not available on FTP server, but file exists locally.")
288
+ return local_path
289
+
290
+ self._ftp_init()
291
+ self.ftp.cwd(f"{run_date}")
292
+
293
+ # Get size of the file
294
+ try:
295
+ remote_size = self.ftp.size(fn)
296
+ if local_path.exists():
297
+ local_size = local_path.stat().st_size
298
+ if local_size == remote_size:
299
+ logging.info(f"File {fn} already exists and is complete. Skipping download.")
300
+ else:
301
+ logging.info(f"File {fn} exists but is incomplete. Re-downloading.")
302
+
303
+ with open(local_path, 'wb') as f:
304
+ self.ftp.retrbinary(f'RETR {fn}', f.write)
305
+
306
+ else:
307
+ logging.info(f"File {fn} does not exist. Re-downloading.")
308
+
309
+ with open(local_path, 'wb') as f:
310
+ self.ftp.retrbinary(f'RETR {fn}', f.write)
311
+
312
+ except ftplib.error_perm as e:
313
+ logging.error(f"Could not get size for file {fn}: {e}")
314
+
315
+ # Proceed to download the file anyway
316
+ with open(local_path, 'wb') as f:
317
+ self.ftp.retrbinary(f'RETR {fn}', f.write)
318
+
319
+ self._ftp_close()
320
+
321
+ return local_path
322
+
323
+ def download_TotalPrecipitations_available_files(self) -> list[Path]:
324
+ """
325
+ Download Cumulated rain, Temperature et 2m - files available on the FTP server.
326
+ """
327
+
328
+ to_download = [GribFiles.FILE_TotPrecip, GribFiles.FILE_2T]
329
+
330
+ rundates = self.run_dates
331
+
332
+ downloaded_files = []
333
+
334
+ for rundate in rundates:
335
+ for file in self.list_files_for_forecast(rundate):
336
+ for selection in to_download:
337
+ if selection.value in file:
338
+ local_path = self.download_data(file, rundate)
339
+ downloaded_files.append(local_path)
340
+
341
+ return downloaded_files
342
+
343
+ def download_all_available_files(self) -> list[Path]:
344
+ """
345
+ Download all files available on the FTP server.
346
+ """
347
+
348
+ rundates = self.run_dates
349
+
350
+ downloaded_files = []
351
+
352
+ for rundate in rundates:
353
+ for file in self.list_files_for_forecast(rundate):
354
+ local_path = self.download_data(file, rundate)
355
+ downloaded_files.append(local_path)
356
+
357
+ return downloaded_files
358
+
359
+ def _get_center_coordinates(self, filename: GribFiles,
360
+ run_date: str,
361
+ EPSG:str = 'EPSG:31370') -> tuple[np.ndarray, np.ndarray]:
362
+ """
363
+ Load GRIB data and compute coordinates.
364
+
365
+ :param filename: The GRIB file to process.
366
+ :param forecast: The forecast time.
367
+ :param download: Whether to download the file if it doesn't exist.
368
+ :param EPSG: The target EPSG code for the output coordinates.
369
+ :return: The center coordinates (x, y) for the given GRIB file and forecast.
370
+ """
371
+
372
+ file_path = self.download_data(filename, run_date)
373
+
374
+ if not file_path.exists():
375
+ logging.error(f"File {file_path} does not exist.")
376
+ return np.array([])
377
+
378
+ with open(file_path, 'rb') as f:
379
+ gid = codes_grib_new_from_file(f)
380
+
381
+ # Type de grille
382
+ grid_type = codes_get(gid, "gridType")
383
+ logging.info("Type de grille :", grid_type)
384
+
385
+ # Dimensions de la grille
386
+ Ni = codes_get(gid, "Ni") # nombre de points en longitude
387
+ Nj = codes_get(gid, "Nj") # nombre de points en latitude
388
+ logging.info(f"Dimensions : {Ni} x {Nj}")
389
+
390
+ # Coordonnées du premier et dernier point
391
+ lat1 = codes_get(gid, "latitudeOfFirstGridPointInDegrees")
392
+ lon1 = codes_get(gid, "longitudeOfFirstGridPointInDegrees")
393
+ lat2 = codes_get(gid, "latitudeOfLastGridPointInDegrees")
394
+ lon2 = codes_get(gid, "longitudeOfLastGridPointInDegrees")
395
+ logging.info(f"Grille de ({lat1}, {lon1}) à ({lat2}, {lon2})")
396
+
397
+ # Incréments
398
+ dlat = codes_get(gid, "jDirectionIncrementInDegrees")
399
+ dlon = codes_get(gid, "iDirectionIncrementInDegrees")
400
+ logging.info(f"Incréments : {dlon}° en longitude, {dlat}° en latitude")
401
+
402
+ # Reconstruire les coordonnées
403
+ lats = np.linspace(lat1, lat2, Nj)
404
+ lons = np.linspace(lon1, lon2, Ni)
405
+
406
+ # ---------------
407
+ # ATTENTION : data are enumerated longitude first, then latitude
408
+ # We need to use meshgrid to get the correct order
409
+ lon_grid, lat_grid = np.meshgrid(lons, lats)
410
+ # ---------------
411
+
412
+ # Convert to numpy arrays
413
+ lats = lat_grid.flatten()
414
+ lons = lon_grid.flatten()
415
+
416
+ # Convert to Lambert 72 E
417
+ xy = transform_coordinates(np.vstack([lons, lats]).T, "EPSG:4326", EPSG, chunk_size= 50_000)
418
+ #
419
+ x, y = xy[:, 0], xy[:, 1]
420
+ # reshape the coordinates
421
+ x_grid = np.reshape(x, (Nj, Ni))
422
+ y_grid = np.reshape(y, (Nj, Ni))
423
+
424
+ return (x_grid, y_grid)
425
+
426
+ def _get_corners_coordinates(self, filename: GribFiles,
427
+ run_date: str,
428
+ EPSG:str = 'EPSG:31370') -> tuple[tuple[np.ndarray, np.ndarray]]:
429
+ """
430
+ Load GRIB data and compute coordinates.
431
+
432
+ :param filename: The GRIB file to process.
433
+ :param forecast: The forecast time.
434
+ :param download: Whether to download the file if it doesn't exist.
435
+ :param EPSG: The target EPSG code for the output coordinates.
436
+ :return: The coordinates (x, y) for (center, lower-left, lower-right, upper-right, upper-left).
437
+ """
438
+
439
+ CHUNK_SIZE = 200_000
440
+
441
+ file_path = self.download_data(filename, run_date)
442
+
443
+ if not file_path.exists():
444
+ logging.error(f"File {file_path} does not exist.")
445
+ return np.array([])
446
+
447
+ with open(file_path, 'rb') as f:
448
+ gid = codes_grib_new_from_file(f)
449
+
450
+ # Type de grille
451
+ grid_type = codes_get(gid, "gridType")
452
+ logging.info(f"Type de grille : {grid_type}")
453
+
454
+ # Dimensions de la grille
455
+ Ni = codes_get(gid, "Ni") # nombre de points en longitude
456
+ Nj = codes_get(gid, "Nj") # nombre de points en latitude
457
+ logging.info(f"Dimensions : {Ni} x {Nj}")
458
+
459
+ # Coordonnées du premier et dernier point
460
+ lat1 = codes_get(gid, "latitudeOfFirstGridPointInDegrees")
461
+ lon1 = codes_get(gid, "longitudeOfFirstGridPointInDegrees")
462
+ lat2 = codes_get(gid, "latitudeOfLastGridPointInDegrees")
463
+ lon2 = codes_get(gid, "longitudeOfLastGridPointInDegrees")
464
+ logging.info(f"Grille de ({lat1}, {lon1}) à ({lat2}, {lon2})")
465
+
466
+ # Incréments
467
+ dlat = codes_get(gid, "jDirectionIncrementInDegrees")
468
+ dlon = codes_get(gid, "iDirectionIncrementInDegrees")
469
+ logging.info(f"Incréments : {dlon}° en longitude, {dlat}° en latitude")
470
+
471
+ # Reconstruire les coordonnées
472
+ lats = np.linspace(lat1, lat2, Ni)
473
+ lons = np.linspace(lon1, lon2, Nj)
474
+
475
+ # ---------------
476
+ # ATTENTION : data are enumerated longitude first, then latitude
477
+ # We need to use meshgrid to get the correct order
478
+ lon_grid, lat_grid = np.meshgrid(lons, lats)
479
+ # ---------------
480
+
481
+ lat_corners_ll = lat_grid.copy()
482
+ lat_corners_ul = lat_grid.copy()
483
+
484
+ lon_corners_ll = lon_grid.copy()
485
+ lon_corners_lr = lon_grid.copy()
486
+
487
+ # Estimate Corners by averaging the nearest neighbors
488
+ lat_corners_ll[1:, :] = (lat_grid[:-1, :] + lat_grid[1:, :]) / 2
489
+ lat_corners_ul[:-1, :] = (lat_grid[:-1, :] + lat_grid[1:, :]) / 2
490
+
491
+ lat_corners_ll[0, :] = lat_grid[0, :] - dlat / 2
492
+ lat_corners_ul[-1, :] = lat_grid[-1, :] + dlat / 2
493
+
494
+ lat_corners_lr = lat_corners_ll.copy()
495
+ lat_corners_ur = lat_corners_ul.copy()
496
+
497
+ lon_corners_ll[:, 1:] = (lon_grid[:, :-1] + lon_grid[:, 1:]) / 2
498
+ lon_corners_lr[:, :-1] = (lon_grid[:, :-1] + lon_grid[:, 1:]) / 2
499
+
500
+ lon_corners_ll[:, 0] = lon_grid[:, 0] - dlon / 2
501
+ lon_corners_lr[:, -1] = lon_grid[:, -1] + dlon / 2
502
+
503
+ lon_corners_ul = lon_corners_ll.copy()
504
+ lon_corners_ur = lon_corners_lr.copy()
505
+
506
+ # Convert to numpy arrays
507
+ lats = lat_grid.flatten()
508
+ lons = lon_grid.flatten()
509
+ lat_corners_ll = lat_corners_ll.flatten()
510
+ lon_corners_ll = lon_corners_ll.flatten()
511
+ lat_corners_ul = lat_corners_ul.flatten()
512
+ lon_corners_ul = lon_corners_ul.flatten()
513
+ lat_corners_lr = lat_corners_lr.flatten()
514
+ lon_corners_lr = lon_corners_lr.flatten()
515
+ lat_corners_ur = lat_corners_ur.flatten()
516
+ lon_corners_ur = lon_corners_ur.flatten()
517
+
518
+ # Convert to Lambert 72 E
519
+ xy_center = transform_coordinates(np.vstack([lons, lats]).T, "EPSG:4326", EPSG, chunk_size= CHUNK_SIZE)
520
+
521
+ # concatenate
522
+ all_corners = np.concatenate((np.vstack([lon_corners_ll, lat_corners_ll]).T,
523
+ np.vstack([lon_corners_ul, lat_corners_ul]).T,
524
+ np.vstack([lon_corners_lr, lat_corners_lr]).T,
525
+ np.vstack([lon_corners_ur, lat_corners_ur]).T))
526
+ all_corners = transform_coordinates(all_corners, "EPSG:4326", EPSG, chunk_size= CHUNK_SIZE)
527
+
528
+ # split
529
+ xy_corners_ll = all_corners[0:Ni*Nj, :]
530
+ xy_corners_ul = all_corners[Ni*Nj:2*Ni*Nj, :]
531
+ xy_corners_lr = all_corners[2*Ni*Nj:3*Ni*Nj, :]
532
+ xy_corners_ur = all_corners[3*Ni*Nj:4*Ni*Nj, :]
533
+ #
534
+ x, y = xy_center[:, 0], xy_center[:, 1]
535
+ # reshape the coordinates
536
+ x_grid = np.reshape(x, (Nj, Ni))
537
+ y_grid = np.reshape(y, (Nj, Ni))
538
+
539
+ x_ll, y_ll = xy_corners_ll[:, 0], xy_corners_ll[:, 1]
540
+ x_ul, y_ul = xy_corners_ul[:, 0], xy_corners_ul[:, 1]
541
+ x_lr, y_lr = xy_corners_lr[:, 0], xy_corners_lr[:, 1]
542
+ x_ur, y_ur = xy_corners_ur[:, 0], xy_corners_ur[:, 1]
543
+
544
+ x_ll = np.reshape(x_ll, (Nj, Ni))
545
+ y_ll = np.reshape(y_ll, (Nj, Ni))
546
+ x_ul = np.reshape(x_ul, (Nj, Ni))
547
+ y_ul = np.reshape(y_ul, (Nj, Ni))
548
+ x_lr = np.reshape(x_lr, (Nj, Ni))
549
+ y_lr = np.reshape(y_lr, (Nj, Ni))
550
+ x_ur = np.reshape(x_ur, (Nj, Ni))
551
+ y_ur = np.reshape(y_ur, (Nj, Ni))
552
+
553
+ return (x_grid, y_grid), (x_ll, y_ll), (x_lr, y_lr), (x_ur, y_ur), (x_ul, y_ul)
554
+
555
+ def _prepare_gdf_from_grib(self, filename: GribFiles,
556
+ run_date: str,
557
+ EPSG:str = 'EPSG:31370') -> gpd.GeoDataFrame:
558
+ """
559
+ Prepare a GeoDataFrame from grib file.
560
+
561
+ :param filename: The GRIB file to process.
562
+ :param forecast: The forecast time.
563
+ :param EPSG: The target EPSG code for the output coordinates.
564
+ :return: The GeoDataFrame with polygons for each grid cell.
565
+ """
566
+
567
+ file_path = self.download_data(filename, run_date)
568
+
569
+ if not file_path.exists():
570
+ logging.error(f"File {file_path} does not exist.")
571
+ return np.array([])
572
+
573
+ (x_grid, y_grid), (x_ll, y_ll), (x_lr, y_lr), (x_ur, y_ur), (x_ul, y_ul) = self._get_corners_coordinates(filename, run_date, EPSG)
574
+
575
+ # Create polygons around the grid points
576
+ # Corners are between grid points, so we need to create polygons
577
+
578
+ Ni, Nj = x_grid.shape
579
+ polygons = [Polygon([(x_ll[i, j], y_ll[i, j]),
580
+ (x_lr[i, j], y_lr[i, j]),
581
+ (x_ur[i, j], y_ur[i, j]),
582
+ (x_ul[i, j], y_ul[i, j])])
583
+ for i in range(Ni) for j in range(Nj)]
584
+
585
+ data = {}
586
+ data['centroid_x'] = x_grid.flatten()
587
+ data['centroid_y'] = y_grid.flatten()
588
+ self._gdf = gpd.GeoDataFrame(data, geometry=polygons, crs=EPSG)
589
+
590
+ self._gdf_cache = self._gdf.copy()
591
+
592
+ return self._gdf
593
+
594
+ def _prepare_Zones_from_grib(self, filename: GribFiles,
595
+ run_date: str,
596
+ EPSG:str = 'EPSG:31370') -> Zones:
597
+ """
598
+ Prepare a Zones from brib file.
599
+
600
+ :param filename: The GRIB file to process.
601
+ :param forecast: The forecast time.
602
+ :param EPSG: The target EPSG code for the output coordinates.
603
+ :return: The Zones with polygons for each grid cell.
604
+ """
605
+ file_path = self.download_data(filename, run_date)
606
+
607
+ if not file_path.exists():
608
+ logging.error(f"File {file_path} does not exist.")
609
+ return np.array([])
610
+
611
+ (x_grid, y_grid), (x_ll, y_ll), (x_lr, y_lr), (x_ur, y_ur), (x_ul, y_ul) = self._get_corners_coordinates(filename, run_date, EPSG)
612
+
613
+ # Create polygons around the grid points
614
+ # Corners are between grid points, so we need to create polygons
615
+
616
+ self._zones = Zones(idx = 'Alaro forecasts')
617
+
618
+ Ni, Nj = x_grid.shape
619
+ polygons = [Polygon([(x_ll[i, j], y_ll[i, j]),
620
+ (x_lr[i, j], y_lr[i, j]),
621
+ (x_ur[i, j], y_ur[i, j]),
622
+ (x_ul[i, j], y_ul[i, j])])
623
+ for i in range(Ni) for j in range(Nj)]
624
+
625
+ for i in range(Ni):
626
+ for j in range(Nj):
627
+ loczone = zone(name=f"Alaro_{i}_{j}")
628
+ self._zones.add_zone(loczone, forceparent = True)
629
+ locvec = vector(name=f"Alaro_{i}_{j}_polygon")
630
+ loczone.add_vector(locvec, forceparent=True)
631
+ locvec.add_vertex([wv(x_ll[i, j], y_ll[i, j]),
632
+ wv(x_lr[i, j], y_lr[i, j]),
633
+ wv(x_ur[i, j], y_ur[i, j]),
634
+ wv(x_ul[i, j], y_ul[i, j])])
635
+ locvec.force_to_close()
636
+
637
+ return self._zones
638
+
639
+ def _load_grib_data(self, filename: GribFiles,
640
+ run_dates: str | list[str],
641
+ ) -> gpd.GeoDataFrame:
642
+ """
643
+ Load GRIB data from a file and add it to the GeoDataFrame.
644
+
645
+ :param filename: The GRIB file to process.
646
+ :param run_dates: The forecast run dates.
647
+ :return: The GeoDataFrame with added data.
648
+ """
649
+
650
+ if isinstance(run_dates, str):
651
+ run_dates = [run_dates]
652
+
653
+ new_data = {}
654
+
655
+ for run_date in run_dates:
656
+ file_path = self.download_data(filename, run_date)
657
+
658
+ if not file_path.exists():
659
+ logging.error(f"File {file_path} does not exist.")
660
+ return self._gdf
661
+
662
+ with open(file_path, 'rb') as f:
663
+ while True:
664
+ gid = codes_grib_new_from_file(f)
665
+
666
+ if gid is None:
667
+ break
668
+
669
+ # Dimensions de la grille
670
+ Ni = codes_get(gid, "Ni") # nombre de points en longitude
671
+ Nj = codes_get(gid, "Nj") # nombre de points en latitude
672
+
673
+ validityDate = codes_get(gid, "validityDate")
674
+ validityTime = codes_get(gid, "validityTime") // 100
675
+
676
+ # Valeurs du champ (ex. température, pression)
677
+ values = codes_get_values(gid)
678
+
679
+ # Reshape des valeurs si nécessaire
680
+ # data = np.reshape(values, (Nj, Ni))
681
+
682
+ new_data[f"{run_date}_{validityDate}_{validityTime:02d}"] = values
683
+
684
+ codes_release(gid)
685
+
686
+ return new_data
687
+
688
+ def reset_gdf(self):
689
+ """ Reset the GeoDataFrame. """
690
+
691
+ self._gdf = self._gdf_cache.copy() if self._gdf_cache is not None else None
692
+ self._gdf_diff = None
693
+
694
+ def load_grib_data_to_gdf(self, filename: GribFiles,
695
+ run_dates: str | list[str],
696
+ ) -> gpd.GeoDataFrame:
697
+ """
698
+ Load GRIB data from a file and add it to the GeoDataFrame.
699
+
700
+ :param filename: The GRIB file to process.
701
+ :param run_dates: The forecast run dates.
702
+ :return: The GeoDataFrame with added data.
703
+ """
704
+
705
+ if isinstance(run_dates, str):
706
+ run_dates = [run_dates]
707
+
708
+ new_data = self._load_grib_data(filename, run_dates)
709
+
710
+ if new_data is None:
711
+ logging.error(f"No data found for {run_dates}.")
712
+ return self._gdf
713
+
714
+ if self._gdf is None:
715
+ self._prepare_gdf_from_grib(filename, run_dates[0])
716
+
717
+ if len(new_data) == 0:
718
+ logging.error(f"No data found for {run_dates}.")
719
+ return self._gdf
720
+
721
+ new_gdf = gpd.GeoDataFrame(new_data)
722
+
723
+ # Concatenate gdf and new_gdf
724
+ self._gdf = pd.concat([self._gdf, new_gdf], axis=1)
725
+
726
+ return self._gdf
727
+
728
+ def load_grib_data_to_Zones(self, filename: GribFiles,
729
+ run_dates: str | list[str],
730
+ ) -> gpd.GeoDataFrame:
731
+ """
732
+ Load GRIB data from a file and add it to the Zones.
733
+
734
+ :param filename: The GRIB file to process.
735
+ :param run_dates: The forecast run dates.
736
+ :return: The Zones with added data.
737
+ """
738
+
739
+ new_data = self._load_grib_data(filename, run_dates)
740
+
741
+ if self._zones is None:
742
+ self._prepare_zones_from_grib(filename, run_dates[0])
743
+
744
+ if len(new_data) == 0:
745
+ logging.error(f"No data found for {run_dates}.")
746
+ return self._zones
747
+
748
+ # Put values in zones
749
+ for key, values in new_data.items():
750
+ self._zones.add_values(key, values)
751
+
752
+ return self._zones
753
+
754
+ def forecasts_to_arrays(self, forecasts: str | list[str] = None) -> list[WolfArray]:
755
+ """ Set the forecasts to the WolfArray.
756
+
757
+ :param forecasts: List of forecast columns to convert to WolfArray. If None, all forecast columns are used.
758
+ """
759
+
760
+ if forecasts is None:
761
+ forecasts = self.get_forecast_columns()
762
+
763
+ elif isinstance(forecasts, str):
764
+ forecasts = [forecasts]
765
+
766
+ # Create a numpy array from centroid and values from forecast column
767
+ xyz = np.zeros((self._gdf.shape[0], 3))
768
+ xyz[:,0] = self._gdf["centroid_x"].values
769
+ xyz[:,1] = self._gdf["centroid_y"].values
770
+
771
+ arrays = []
772
+ for forecast in forecasts:
773
+ if not forecast in self._gdf.columns:
774
+ logging.error(f"Forecast {forecast} not found in GeoDataFrame columns.")
775
+ continue
776
+
777
+ xyz[:,2] = self._gdf[forecast].values
778
+
779
+ new_array = WolfArray(mold=self._array, idx=forecast)
780
+ new_array.fillin_from_xyz(xyz)
781
+ arrays.append(new_array)
782
+
783
+ return arrays
784
+
785
+ def get_forecast_columns(self, diff:bool = False) -> list[str]:
786
+ """ Get the list of forecast columns in the GeoDataFrame.
787
+
788
+ Exclude 'geometry', 'centroid_x', 'centroid_y' columns.
789
+
790
+ :param diff: If True, get columns from the diff GeoDataFrame.
791
+ """
792
+
793
+ if diff:
794
+ if self._gdf_diff is None:
795
+ self._compute_diff()
796
+ columns = self._gdf_diff.columns
797
+ # pop 'geometry', 'centroid_x', 'centroid_y'
798
+ columns = columns[columns != 'centroid_x']
799
+ columns = columns[columns != 'centroid_y']
800
+ columns = columns[columns != 'geometry']
801
+ columns = [col for col in columns if col is not None]
802
+
803
+ else:
804
+ if self._gdf is None:
805
+ logging.error("No data loaded. Please load GRIB data first.")
806
+ return []
807
+
808
+ columns = self._gdf.columns
809
+ # pop 'geometry', 'centroid_x', 'centroid_y'
810
+ columns = columns[columns != 'centroid_x']
811
+ columns = columns[columns != 'centroid_y']
812
+ columns = columns[columns != 'geometry']
813
+ columns = [col for col in columns if col is not None]
814
+
815
+ return list(columns)
816
+
817
+ def _compute_diff(self):
818
+ """ Compute local rain by difference in total cumulated rainfall """
819
+
820
+ if self._gdf is None:
821
+ logging.error("No data loaded. Please load GRIB data first.")
822
+ return None
823
+
824
+ self._gdf_diff = self._gdf.copy()
825
+
826
+ columns = self.get_forecast_columns(diff=True)
827
+ # all date and hours
828
+ dates_hours = list(set([col.split('_')[1] + '_' + col.split('_')[2] for col in columns]))
829
+ rundates = list(set([col.split('_')[0] for col in columns]))
830
+ rundates.sort()
831
+ dates_hours.sort()
832
+
833
+ columns_lists = []
834
+ for datehour in dates_hours:
835
+ columns_lists.append([f"{rundate}_{datehour}" if f"{rundate}_{datehour}" in columns else None for rundate in rundates])
836
+
837
+ # Diff all columns
838
+ ref = columns_lists[0]
839
+ for cols in columns_lists[1:]:
840
+ for loccol, locref in zip(cols, ref):
841
+ self._gdf_diff[loccol] = 0.0
842
+ if loccol is not None and locref is not None:
843
+ self._gdf_diff[locref] = self._gdf[loccol] - self._gdf[locref]
844
+
845
+ if self._gdf_diff[locref].min() < 0.:
846
+ logging.debug(f"Negative values found in column {loccol} : {self._gdf_diff[locref].min()}")
847
+ ref = cols
848
+
849
+
850
+ def _load_grib_metadata(self, filename: GribFiles, run_date: str) -> np.ndarray:
851
+ """
852
+ Load GRIB metadata from a file.
853
+
854
+ :param filename: The GRIB file to process.
855
+ :param run_date: The forecast run date.
856
+ """
857
+ file_path = self.download_data(filename, run_date)
858
+
859
+ if not file_path.exists():
860
+ logging.error(f"File {file_path} does not exist.")
861
+ return np.array([])
862
+
863
+ i=0
864
+ with open(file_path, 'rb') as f:
865
+ while True:
866
+ gid = codes_grib_new_from_file(f)
867
+
868
+ if gid is None:
869
+ break
870
+
871
+ keys_iter = codes_keys_iterator_new(gid)
872
+ while codes_keys_iterator_next(keys_iter):
873
+ key = codes_keys_iterator_get_name(keys_iter)
874
+ if "validity" in key:
875
+ print(key)
876
+
877
+ print(i)
878
+ i+=1
879
+
880
+
881
+ # Dimensions de la grille
882
+ Ni = codes_get(gid, "Ni") # nombre de points en longitude
883
+ Nj = codes_get(gid, "Nj") # nombre de points en latitude
884
+ time = codes_get(gid, "validityDate") * 10000 + codes_get(gid, "validityTime")
885
+ date = codes_get(gid, "year"), codes_get(gid, "month"), codes_get(gid, "day"), codes_get(gid, "hour")
886
+ name = codes_get(gid, "parameterName")
887
+ units = codes_get(gid, "parameterUnits")
888
+ step_units = codes_get(gid, "stepUnits")
889
+ units2 = codes_get(gid, "units")
890
+ start_step = codes_get(gid, "startStep")
891
+ end_step = codes_get(gid, "endStep")
892
+
893
+ dataDate = codes_get(gid, "dataDate")
894
+ dataTime = codes_get(gid, "dataTime")
895
+ # forecastTime = codes_get(gid, "forecastTime")
896
+ stepRange = codes_get(gid, "stepRange")
897
+
898
+ codes_release(gid)
899
+ return
900
+
901
+ def _create_animation(self, filename: GribFiles,
902
+ run_date: str,
903
+ bounds: list[list[float], list[float]] | str = 'Belgium',
904
+ vmin: float = None,
905
+ vmax: float = None,
906
+ factor: float = 1.0
907
+ ) -> animation.FuncAnimation:
908
+ """
909
+ Create a video from the GeoDataFrame data.
910
+
911
+ :param filename: The GRIB file to process.
912
+ :param run_date: The forecast run date to visualize.
913
+ :param bounds: Bounds for the plot. Can be 'Belgium' or a list of [xlim, ylim].
914
+ :param vmin: Minimum value for color scaling.
915
+ :param vmax: Maximum value for color scaling.
916
+ :param factor: Factor to multiply the data values for scaling.
917
+ :return: The animation object.
918
+ """
919
+
920
+ self.load_grib_data_to_gdf(filename, run_date)
921
+
922
+ from matplotlib.collections import PatchCollection
923
+ from matplotlib.patches import Polygon as MplPolygon
924
+ from matplotlib.cm import ScalarMappable
925
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
926
+
927
+ fig, ax = plt.subplots(figsize=(10, 10))
928
+
929
+ columns = self.get_forecast_columns()
930
+
931
+ # Filter columns containing the "run_date" string
932
+ columns = [col for col in columns if col.startswith(run_date)]
933
+
934
+ if vmin is None:
935
+ vmin = self._gdf[columns].min().min()
936
+ if vmax is None:
937
+ vmax = self._gdf[columns].max().max()
938
+
939
+ vmin *= factor
940
+ vmax *= factor
941
+
942
+ norm = plt.Normalize(vmin=vmin, vmax=vmax)
943
+ colors = self._colormap(norm(self._gdf[columns[0]].values * factor))
944
+
945
+ # Création d'un axe pour la colorbar sans modifier ax
946
+ divider = make_axes_locatable(ax)
947
+ cax = divider.append_axes("right", size="5%", pad=0.1)
948
+
949
+ # Ajout de la colorbar
950
+ sm = ScalarMappable(norm=norm, cmap=self._colormap)
951
+ cbar = fig.colorbar(sm, cax=cax,)
952
+
953
+ patches = [MplPolygon(geom.exterior.coords, closed=True) for geom in self._gdf.geometry]
954
+ collection = PatchCollection(patches, facecolor=colors, edgecolor='black')
955
+
956
+ ax.add_collection(collection)
957
+ self._cities.plot(ax=ax, facecolor = 'none', edgecolor='black', linewidth=0.5)
958
+
959
+ if bounds == 'Belgium':
960
+ ax.set_xlim(self._cities.total_bounds[[0, 2]])
961
+ ax.set_ylim(self._cities.total_bounds[[1, 3]])
962
+ elif isinstance(bounds, list) and len(bounds) == 4:
963
+ ax.set_xlim(bounds[0][0], bounds[0][1])
964
+ ax.set_ylim(bounds[1][0], bounds[1][1])
965
+ else:
966
+ ax.set_xlim(self._gdf.total_bounds[[0, 2]])
967
+ ax.set_ylim(self._gdf.total_bounds[[1, 3]])
968
+
969
+ def update(column):
970
+ new_colors = self._colormap(norm(self._gdf[column].values * factor))
971
+ collection.set_color(new_colors)
972
+
973
+ ax.set_title(f"Total Precip. for {_convert_col2date_str(column)} - [mm] - time UTC")
974
+ if wx.GetApp() is not None:
975
+ wx.GetApp().Yield() # Allow GUI to update
976
+
977
+ ani = animation.FuncAnimation(fig, update, frames=columns, repeat=False)
978
+
979
+ return ani
980
+
981
+ def _create_comparison_animation(self, filename: GribFiles,
982
+ run_dates: list[str],
983
+ size = 10,
984
+ bounds: list[list[float], list[float]] | str = 'Belgium',
985
+ vmin:float = None,
986
+ vmax:float = None,
987
+ factor: float = 1.0) -> animation.FuncAnimation:
988
+ """
989
+ Create a video from the GeoDataFrame data.
990
+
991
+ :param filename: The GRIB file to process.
992
+ :param run_dates: List of forecast run dates to compare.
993
+ :param size: Size of each subplot.
994
+ :param bounds: Bounds for the plot. Can be 'Belgium' or a list of [xlim, ylim].
995
+ :param vmin: Minimum value for color scaling.
996
+ :param vmax: Maximum value for color scaling.
997
+ :param factor: Factor to multiply the data values for scaling.
998
+ :return: The animation object.
999
+ """
1000
+
1001
+ assert len(run_dates) > 1, "At least two forecasts must be provided."
1002
+
1003
+ self.load_grib_data_to_gdf(filename, run_dates)
1004
+
1005
+ from matplotlib.collections import PatchCollection
1006
+ from matplotlib.patches import Polygon as MplPolygon
1007
+ from matplotlib.cm import ScalarMappable
1008
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
1009
+
1010
+ fig, axes = plt.subplots(ncols=len(run_dates), figsize=(len(run_dates) * size, size))
1011
+
1012
+ fig.suptitle(_('Rain intensity [mm/h]'))
1013
+
1014
+ columns = self.get_forecast_columns(diff=False)
1015
+ # all date and hours
1016
+ dates_hours = list(set([col.split('_')[1] + '_' + col.split('_')[2] for col in columns]))
1017
+ dates_hours.sort()
1018
+
1019
+ columns_lists = []
1020
+ for datehour in dates_hours:
1021
+ columns_lists.append([f"{forecast}_{datehour}" if f"{forecast}_{datehour}" in columns else None for forecast in run_dates])
1022
+
1023
+ if vmin is None:
1024
+ vmin = self._gdf[columns].min().min()
1025
+ if vmax is None:
1026
+ vmax = self._gdf[columns].max().max()
1027
+
1028
+ vmin *= factor
1029
+ vmax *= factor
1030
+
1031
+ norm = plt.Normalize(vmin=vmin, vmax=vmax)
1032
+ colors = self._colormap(norm(self._gdf[columns[0]].values * factor))
1033
+
1034
+ # Création d'un axe pour la colorbar sans modifier ax
1035
+ divider = make_axes_locatable(axes[-1])
1036
+ cax = divider.append_axes("right", size="5%", pad=0.1)
1037
+
1038
+ # Ajout de la colorbar
1039
+ sm = ScalarMappable(norm=norm, cmap=self._colormap)
1040
+ cbar = fig.colorbar(sm, cax=cax,)
1041
+
1042
+ patches = []
1043
+ collection = []
1044
+ for idx, forecast in enumerate(run_dates):
1045
+ patches.append([MplPolygon(geom.exterior.coords, closed=True) for geom in self._gdf.geometry])
1046
+ collection.append(PatchCollection(patches[idx], facecolor=colors, edgecolor='black'))
1047
+
1048
+ ax = axes[idx]
1049
+ ax.add_collection(collection[idx])
1050
+ # Plot cities but just the edges
1051
+ self._cities.plot(ax=ax, facecolor = 'none', edgecolor='black', linewidth=0.5)
1052
+
1053
+ if bounds == 'Belgium':
1054
+ ax.set_xlim(self._cities.total_bounds[[0, 2]])
1055
+ ax.set_ylim(self._cities.total_bounds[[1, 3]])
1056
+ elif isinstance(bounds, list) and len(bounds) == 4:
1057
+ ax.set_xlim(bounds[0][0], bounds[0][1])
1058
+ ax.set_ylim(bounds[1][0], bounds[1][1])
1059
+ else:
1060
+ ax.set_xlim(self._gdf.total_bounds[[0, 2]])
1061
+ ax.set_ylim(self._gdf.total_bounds[[1, 3]])
1062
+ ax.set_aspect('equal')
1063
+
1064
+ fig.tight_layout()
1065
+
1066
+ def update(column):
1067
+ for idx, ax in enumerate(axes):
1068
+ if column[idx] is None:
1069
+ # All white
1070
+ new_colors = np.ones((self._gdf.shape[0], 4))
1071
+ else:
1072
+ new_colors = self._colormap(norm(self._gdf[column[idx]].values * factor))
1073
+ collection[idx].set_color(new_colors)
1074
+ ax.set_title(f"{_convert_col2date_str(column[idx])} - time UTC")
1075
+
1076
+ if wx.GetApp() is not None:
1077
+ wx.GetApp().Yield() # Allow GUI to update
1078
+
1079
+ # ax.set_title(f"Data for {column}")
1080
+
1081
+ ani = animation.FuncAnimation(fig, update, frames=columns_lists, repeat=False)
1082
+
1083
+ return ani
1084
+
1085
+ def _create_comparison_animation_diff(self, filename: GribFiles,
1086
+ run_dates: list[str],
1087
+ size = 10,
1088
+ bounds: list[list[float], list[float]] | str = 'Belgium',
1089
+ vmin:float = None,
1090
+ vmax:float = None,
1091
+ factor: float = 1.0) -> animation.FuncAnimation:
1092
+ """
1093
+ Create a video from the GeoDataFrame data.
1094
+
1095
+ :param filename: The GRIB file to process.
1096
+ :param run_dates: List of forecast run dates to compare.
1097
+ :param size: Size of each subplot.
1098
+ :param bounds: Bounds for the plot. Can be 'Belgium' or a list of [xlim, ylim].
1099
+ :param vmin: Minimum value for color scaling.
1100
+ :param vmax: Maximum value for color scaling.
1101
+ :param factor: Factor to multiply the data values for scaling.
1102
+ :return: The animation object.
1103
+ """
1104
+
1105
+ assert len(run_dates) > 1, "At least two forecasts must be provided."
1106
+
1107
+ self.load_grib_data_to_gdf(filename, run_dates)
1108
+ self._compute_diff()
1109
+
1110
+ from matplotlib.collections import PatchCollection
1111
+ from matplotlib.patches import Polygon as MplPolygon
1112
+ from matplotlib.cm import ScalarMappable
1113
+ from mpl_toolkits.axes_grid1 import make_axes_locatable
1114
+
1115
+ fig, axes = plt.subplots(ncols=len(run_dates), figsize=(len(run_dates) * size, size))
1116
+
1117
+ fig.suptitle(_('Rain intensity [mm/h]'))
1118
+
1119
+ columns = self.get_forecast_columns(diff=True)
1120
+ # all date and hours
1121
+ dates_hours = list(set([col.split('_')[1] + '_' + col.split('_')[2] for col in columns]))
1122
+ dates_hours.sort()
1123
+
1124
+ columns_lists = []
1125
+ for datehour in dates_hours:
1126
+ columns_lists.append([f"{forecast}_{datehour}" if f"{forecast}_{datehour}" in columns else None for forecast in run_dates])
1127
+
1128
+ if vmin is None:
1129
+ vmin = self._gdf_diff[columns].min().min()
1130
+ if vmax is None:
1131
+ vmax = self._gdf_diff[columns].max().max()
1132
+
1133
+ vmin *= factor
1134
+ vmax *= factor
1135
+
1136
+ norm = plt.Normalize(vmin=vmin, vmax=vmax)
1137
+ colors = self._colormap(norm(self._gdf_diff[columns[0]].values * factor))
1138
+
1139
+ # Création d'un axe pour la colorbar sans modifier ax
1140
+ divider = make_axes_locatable(axes[-1])
1141
+ cax = divider.append_axes("right", size="5%", pad=0.1)
1142
+
1143
+ # Ajout de la colorbar
1144
+ sm = ScalarMappable(norm=norm, cmap=self._colormap)
1145
+ cbar = fig.colorbar(sm, cax=cax,)
1146
+
1147
+ patches = []
1148
+ collection = []
1149
+ for idx, forecast in enumerate(run_dates):
1150
+ patches.append([MplPolygon(geom.exterior.coords, closed=True) for geom in self._gdf_diff.geometry])
1151
+ collection.append(PatchCollection(patches[idx], facecolor=colors, edgecolor='black'))
1152
+
1153
+ ax = axes[idx]
1154
+ ax.add_collection(collection[idx])
1155
+ # Plot cities but just the edges
1156
+ self._cities.plot(ax=ax, facecolor = 'none', edgecolor='black', linewidth=0.5)
1157
+
1158
+ if bounds == 'Belgium':
1159
+ ax.set_xlim(self._cities.total_bounds[[0, 2]])
1160
+ ax.set_ylim(self._cities.total_bounds[[1, 3]])
1161
+ elif isinstance(bounds, list) and len(bounds) == 4:
1162
+ ax.set_xlim(bounds[0][0], bounds[0][1])
1163
+ ax.set_ylim(bounds[1][0], bounds[1][1])
1164
+ else:
1165
+ ax.set_xlim(self._gdf_diff.total_bounds[[0, 2]])
1166
+ ax.set_ylim(self._gdf_diff.total_bounds[[1, 3]])
1167
+ ax.set_aspect('equal')
1168
+
1169
+ fig.tight_layout()
1170
+
1171
+ def update(column):
1172
+ for idx, ax in enumerate(axes):
1173
+ if column[idx] is None:
1174
+ # All white
1175
+ new_colors = np.ones((self._gdf_diff.shape[0], 4))
1176
+ else:
1177
+ new_colors = self._colormap(norm(self._gdf_diff[column[idx]].values * factor))
1178
+ collection[idx].set_color(new_colors)
1179
+ ax.set_title(_convert_col2date_str(column[idx]))
1180
+
1181
+ if wx.GetApp() is not None:
1182
+ wx.GetApp().Yield() # Allow GUI to update
1183
+
1184
+ ani = animation.FuncAnimation(fig, update, frames=columns_lists, repeat=False)
1185
+
1186
+ return ani
1187
+
1188
+ def __del__(self):
1189
+ """
1190
+ Destructor to ensure the FTP connection is closed.
1191
+ """
1192
+ try:
1193
+ self._ftp_close()
1194
+ except Exception as e:
1195
+ logging.error(f"Error closing FTP connection: {e}")
1196
+
1197
+ def video_cumulated_rain(self, run_date:str, output_file:Path, fps:int = 2):
1198
+ """
1199
+ Create a MP4 video comparison of cumulated rain forecasts.
1200
+
1201
+ :param forecast: The forecast date string.
1202
+ :param output_file: The output MP4 file path.
1203
+ :param fps: Frames per second for the video.
1204
+ """
1205
+
1206
+ output_file = Path(output_file)
1207
+
1208
+ if output_file.suffix != '.mp4':
1209
+ output_file = output_file.with_suffix('.mp4')
1210
+
1211
+ ani = self._create_animation(GribFiles.FILE_TotPrecip, run_date, factor=1000.)
1212
+
1213
+ output_file.parent.mkdir(parents=True, exist_ok=True)
1214
+ ani.save(output_file, writer='ffmpeg', fps=fps)
1215
+
1216
+ return output_file
1217
+
1218
+ def videos_cumulated_rain_allforecasts(self, output_dir:Path, fps:int = 2, run_dates: str | list[str] = None) -> list[Path] | None:
1219
+ """
1220
+ Create a MP4 video comparison of cumulated rain forecasts.
1221
+
1222
+ :param output_dir: The output directory for the MP4 files.
1223
+ :param fps: Frames per second for the video.
1224
+ """
1225
+ output_dir = Path(output_dir)
1226
+
1227
+ if run_dates is not None:
1228
+ if isinstance(run_dates, str):
1229
+ files = [run_dates]
1230
+ files = run_dates
1231
+ else:
1232
+ files = self.run_dates
1233
+
1234
+ if files:
1235
+ videos_out = []
1236
+ output_dir.mkdir(parents=True, exist_ok=True)
1237
+ for forecast in files:
1238
+ output_file = output_dir / f"Alaro_cumulated_rain_{forecast}.mp4"
1239
+ self.video_cumulated_rain(forecast, output_file, fps=fps)
1240
+ videos_out.append(output_file)
1241
+
1242
+ return videos_out
1243
+ return None
1244
+
1245
+ def video_gradient_cumulated_rain_compare(self, output_file:Path, fps:int = 2, run_dates: str | list[str] = None) -> Path | None:
1246
+ """
1247
+ Create a MP4 video comparison of cumulated rain forecasts.
1248
+
1249
+ :param output_file: The output MP4 file path.
1250
+ :param fps: Frames per second for the video.
1251
+ """
1252
+
1253
+ output_file = Path(output_file)
1254
+
1255
+ if output_file.suffix != '.mp4':
1256
+ output_file = output_file.with_suffix('.mp4')
1257
+
1258
+ if run_dates is None:
1259
+ files = self.list_run_dates()
1260
+ elif isinstance(run_dates, str):
1261
+ files = [run_dates]
1262
+ else:
1263
+ files = run_dates
1264
+
1265
+ if files:
1266
+ ani = self._create_comparison_animation_diff(GribFiles.FILE_TotPrecip, files, factor=1000.)
1267
+
1268
+ output_file.parent.mkdir(parents=True, exist_ok=True)
1269
+ ani.save(output_file, writer='ffmpeg', fps=fps)
1270
+
1271
+ return output_file
1272
+ return None
1273
+
1274
+ def convert_gdf2dataframe(self, X:float, Y:float, use_diff:bool = False) -> pd.DataFrame:
1275
+ """
1276
+ Convert the GeoDataFrame to a Pandas DataFrame for a given point (X, Y).
1277
+
1278
+ :param X: The X coordinate.
1279
+ :param Y: The Y coordinate.
1280
+ :return: The Pandas DataFrame with the data for the nearest grid cell.
1281
+ """
1282
+
1283
+ if self._gdf is None:
1284
+ logging.error("No data loaded. Please load GRIB data first.")
1285
+ return pd.DataFrame()
1286
+
1287
+ if use_diff and self._gdf_diff is None:
1288
+ self._compute_diff()
1289
+
1290
+ gdf = self._gdf_diff if use_diff else self._gdf
1291
+
1292
+ point = Point(X, Y)
1293
+ # Find the nearest polygon
1294
+ distances = gdf.geometry.distance(point)
1295
+ idx_min = distances.idxmin()
1296
+
1297
+ # The data are currently stored columns wise, we want to return a Dataframe with 3 columns:
1298
+ # 'forecast_date', 'value', 'run_date'
1299
+ # where 'forecast_date' is the date of the forecast, 'value' is the
1300
+ # value of the forecast, and 'run_date' is the date of the run.
1301
+ # The index of the dataframe is a combination of 'run_date'_'forecast_date'
1302
+
1303
+ columns = self.get_forecast_columns()
1304
+ df = pd.DataFrame({
1305
+ 'forecast_date': [_extract_dates_from_columnstr(col)[1] for col in columns],
1306
+ 'value': [gdf.loc[idx_min][col] for col in columns],
1307
+ 'run_date': [_extract_dates_from_columnstr(col)[0] for col in columns]
1308
+ })
1309
+
1310
+ return df
1311
+
1312
+ def _plot4XY(self, X:float, Y:float, factor:float = 1., size:tuple[int, int]=(10, 5), use_diff:bool = False, figax:tuple[plt.Figure, plt.Axes] = None) -> plt.Figure:
1313
+ """
1314
+ Plot the data for a given point (X, Y).
1315
+
1316
+ :param X: The X coordinate.
1317
+ :param Y: The Y coordinate.
1318
+ :param factor: The factor to multiply the data values for scaling.
1319
+ :param size: The size of the plot.
1320
+ :return: The Matplotlib Figure object.
1321
+ """
1322
+
1323
+ df = self.convert_gdf2dataframe(X, Y, use_diff=use_diff)
1324
+
1325
+ if df.empty:
1326
+ logging.error("No data available for the given point.")
1327
+ return None
1328
+
1329
+ if figax is not None:
1330
+ fig, ax = figax
1331
+ else:
1332
+ fig, ax = plt.subplots(figsize=size)
1333
+
1334
+ # Pivot the dataframe to have 'forecast_date' as index and 'run_date' as columns
1335
+ df_pivot = df.pivot(index='forecast_date', columns='run_date', values='value')
1336
+
1337
+ # Plot each run_date as a separate line
1338
+ for run_date in df_pivot.columns:
1339
+ ax.plot(df_pivot.index, df_pivot[run_date] * factor, marker='o', label=run_date)
1340
+
1341
+ ax.set_title(f"Total precipitation at point ({X}, {Y})")
1342
+ ax.set_xlabel("Forecast Date (time zone: UTC)")
1343
+ ax.set_ylabel("Total precipitation [mm]")
1344
+ ax.legend(title="Run Date")
1345
+ ax.grid(True)
1346
+
1347
+ # Set xticks every rounded 6 hours
1348
+ import matplotlib.dates as mdates
1349
+ ax.set_xlim([df_pivot.index.min(), df_pivot.index.max()])
1350
+ ax.xaxis.set_major_locator(mdates.HourLocator(interval=6))
1351
+ ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H+00'))
1352
+
1353
+ fig.autofmt_xdate()
1354
+
1355
+ return fig
1356
+
1357
+ def _bar4XY(self, X:float, Y:float, factor:float = 1., size:tuple[int, int]=(10, 5), use_diff:bool = False, figax:tuple[plt.Figure, plt.Axes] = None) -> plt.Figure:
1358
+ """
1359
+ Plot the data for a given point (X, Y).
1360
+
1361
+ :param X: The X coordinate.
1362
+ :param Y: The Y coordinate.
1363
+ :param factor: The factor to multiply the data values for scaling.
1364
+ :param size: The size of the plot.
1365
+ :return: The Matplotlib Figure object.
1366
+ """
1367
+
1368
+ df = self.convert_gdf2dataframe(X, Y, use_diff=use_diff)
1369
+
1370
+ if df.empty:
1371
+ logging.error("No data available for the given point.")
1372
+ return None
1373
+
1374
+ if figax is not None:
1375
+ fig, ax = figax
1376
+ else:
1377
+ fig, ax = plt.subplots(figsize=size)
1378
+
1379
+ # Pivot the dataframe to have 'forecast_date' as index and 'run_date' as columns
1380
+ df_pivot = df.pivot(index='forecast_date', columns='run_date', values='value')
1381
+
1382
+ # Plot each run_date as a separate line
1383
+ for run_date in df_pivot.columns:
1384
+ # Filter Nan value
1385
+ used_range = df_pivot[run_date].notna()
1386
+ ax.bar(df_pivot.index[used_range], df_pivot[run_date][used_range] * factor, width=td(seconds=3600), label=run_date, align='edge', alpha=0.7)
1387
+
1388
+ ax.set_title(f"Rain intensity at point ({X}, {Y})")
1389
+ ax.set_xlabel("Forecast Date (time zone: UTC)")
1390
+ ax.set_ylabel("Rain intensity [mm/h]")
1391
+
1392
+ # Set xticks every rounded 6 hours
1393
+ import matplotlib.dates as mdates
1394
+ ax.set_xlim([df_pivot.index.min(), df_pivot.index.max()])
1395
+ ax.xaxis.set_major_locator(mdates.HourLocator(interval=6))
1396
+ ax.xaxis.set_major_formatter(mdates.DateFormatter('%Y-%m-%d %H+00'))
1397
+
1398
+ ax.legend(title="Run Date")
1399
+ ax.grid(True)
1400
+
1401
+ fig.autofmt_xdate()
1402
+
1403
+ return fig
1404
+
1405
+ def plot_TotPrec4XY(self, X:float, Y:float, size:tuple[int, int]=(10, 5)) -> plt.Figure:
1406
+ """
1407
+ Plot the total precipitation data for a given point (X, Y).
1408
+
1409
+ :param X: The X coordinate.
1410
+ :param Y: The Y coordinate.
1411
+ :param size: The size of the plot.
1412
+ :return: The Matplotlib Figure object.
1413
+ """
1414
+
1415
+ if self._gdf is None:
1416
+ self.load_grib_data_to_gdf(GribFiles.FILE_TotPrecip, self.run_dates)
1417
+
1418
+ return self._plot4XY(X, Y, factor=1000., size=size)
1419
+
1420
+ def plot_RainIntensity4XY(self, X:float, Y:float, size:tuple[int, int]=(10, 5)) -> plt.Figure:
1421
+ """
1422
+ Plot the rain intensity data for a given point (X, Y).
1423
+
1424
+ :param X: The X coordinate.
1425
+ :param Y: The Y coordinate.
1426
+ :param size: The size of the plot.
1427
+ :return: The Matplotlib Figure object.
1428
+ """
1429
+
1430
+ if self._gdf is None:
1431
+ self.load_grib_data_to_gdf(GribFiles.FILE_TotPrecip, self.run_dates)
1432
+
1433
+ fig = self._bar4XY(X, Y, factor=1000., size=size, use_diff=True)
1434
+
1435
+ return fig
1436
+
1437
+ def plot_Rain_and_TotPrecip4XY(self, X:float, Y:float, size:tuple[int, int]=(10, 10)) -> plt.Figure:
1438
+ """
1439
+ Plot the rain intensity and total precipitation data for a given point (X, Y).
1440
+
1441
+ :param X: The X coordinate.
1442
+ :param Y: The Y coordinate.
1443
+ :param size: The size of the plot.
1444
+ :return: The Matplotlib Figure object.
1445
+ """
1446
+
1447
+ if self._gdf is None:
1448
+ self.load_grib_data_to_gdf(GribFiles.FILE_TotPrecip, self.run_dates)
1449
+
1450
+ fig, (ax1, ax2) = plt.subplots(nrows=2, figsize=size)
1451
+
1452
+ self._plot4XY(X, Y, factor=1000., size=size, use_diff=False, figax=(fig, ax1))
1453
+ self._bar4XY(X, Y, factor=1000., size=size, use_diff=True, figax=(fig, ax2))
1454
+
1455
+ fig.tight_layout()
1456
+
1457
+ return fig