wolfhece 2.2.37__py3-none-any.whl → 2.2.39__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. wolfhece/Coordinates_operations.py +5 -0
  2. wolfhece/GraphNotebook.py +72 -1
  3. wolfhece/GraphProfile.py +1 -1
  4. wolfhece/MulticriteriAnalysis.py +1579 -0
  5. wolfhece/PandasGrid.py +62 -1
  6. wolfhece/PyCrosssections.py +194 -43
  7. wolfhece/PyDraw.py +891 -73
  8. wolfhece/PyGui.py +913 -72
  9. wolfhece/PyGuiHydrology.py +528 -74
  10. wolfhece/PyPalette.py +26 -4
  11. wolfhece/PyParams.py +33 -0
  12. wolfhece/PyPictures.py +2 -2
  13. wolfhece/PyVertex.py +32 -0
  14. wolfhece/PyVertexvectors.py +147 -75
  15. wolfhece/PyWMS.py +52 -36
  16. wolfhece/acceptability/acceptability.py +15 -8
  17. wolfhece/acceptability/acceptability_gui.py +507 -360
  18. wolfhece/acceptability/func.py +80 -183
  19. wolfhece/apps/version.py +1 -1
  20. wolfhece/compare_series.py +480 -0
  21. wolfhece/drawing_obj.py +12 -1
  22. wolfhece/hydrology/Catchment.py +228 -162
  23. wolfhece/hydrology/Internal_variables.py +43 -2
  24. wolfhece/hydrology/Models_characteristics.py +69 -67
  25. wolfhece/hydrology/Optimisation.py +893 -182
  26. wolfhece/hydrology/PyWatershed.py +267 -165
  27. wolfhece/hydrology/SubBasin.py +185 -140
  28. wolfhece/hydrology/climate_data.py +334 -0
  29. wolfhece/hydrology/constant.py +11 -0
  30. wolfhece/hydrology/cst_exchanges.py +76 -1
  31. wolfhece/hydrology/forcedexchanges.py +413 -49
  32. wolfhece/hydrology/hyetograms.py +2095 -0
  33. wolfhece/hydrology/read.py +65 -5
  34. wolfhece/hydrometry/kiwis.py +42 -26
  35. wolfhece/hydrometry/kiwis_gui.py +7 -2
  36. wolfhece/insyde_be/INBE_func.py +746 -0
  37. wolfhece/insyde_be/INBE_gui.py +1776 -0
  38. wolfhece/insyde_be/__init__.py +3 -0
  39. wolfhece/interpolating_raster.py +366 -0
  40. wolfhece/irm_alaro.py +1457 -0
  41. wolfhece/irm_qdf.py +889 -57
  42. wolfhece/lifewatch.py +6 -3
  43. wolfhece/picc.py +124 -8
  44. wolfhece/pyLandUseFlanders.py +146 -0
  45. wolfhece/pydownloader.py +2 -1
  46. wolfhece/pywalous.py +225 -31
  47. wolfhece/toolshydrology_dll.py +149 -0
  48. wolfhece/wolf_array.py +63 -25
  49. {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/METADATA +3 -1
  50. {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/RECORD +53 -42
  51. {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/WHEEL +0 -0
  52. {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/entry_points.txt +0 -0
  53. {wolfhece-2.2.37.dist-info → wolfhece-2.2.39.dist-info}/top_level.txt +0 -0
wolfhece/irm_qdf.py CHANGED
@@ -16,8 +16,10 @@ from time import sleep
16
16
  from typing import Literal, Union
17
17
  import logging
18
18
 
19
+ import matplotlib as mpl
19
20
  from tqdm import tqdm
20
21
  import pandas as pd
22
+ import geopandas as gpd
21
23
  import matplotlib.pyplot as plt
22
24
  from scipy.optimize import minimize,curve_fit
23
25
  from scipy.stats import gumbel_r,genextreme
@@ -28,6 +30,9 @@ import pdfplumber
28
30
 
29
31
  from .ins import Localities
30
32
  from .PyTranslate import _
33
+ from .pydownloader import toys_dataset, DATADIR
34
+ from .PyVertexvectors import Zones, vector, Point, Polygon, wolfvertex as wv, getIfromRGB
35
+ from .drawing_obj import Element_To_Draw
31
36
 
32
37
  Montana_a1 = 'a1'
33
38
  Montana_a2 = 'a2'
@@ -75,9 +80,11 @@ dur30d = '30 d'
75
80
 
76
81
  durationstext=[dur10min,dur20min,dur30min,dur1h,dur2h,dur3h,dur6h,dur12h,dur1d,
77
82
  dur2d,dur3d,dur4d,dur5d,dur7d,dur10d,dur15d,dur20d,dur25d,dur30d]
78
- durations=np.array([10,20,30,60,120,180,360,720],np.float64)
79
- durationsd=np.array([1,2,3,4,5,7,10,15,20,25,30],np.float64)*24.*60.
83
+ durations = np.array([10,20,30,60,120,180,360,720],np.float64)
84
+ durationsd = np.array([1,2,3,4,5,7,10,15,20,25,30],np.float64)*24.*60.
85
+
80
86
  durations = np.concatenate([durations,durationsd])
87
+ durations_seconds = durations * 60. # Convert durations to seconds
81
88
 
82
89
  class MontanaIRM():
83
90
  """ Classe pour la gestion des relations de Montana pour les précipitations """
@@ -195,7 +202,7 @@ class MontanaIRM():
195
202
  """
196
203
  x,y = self.get_hyeto(durmax,T,r)
197
204
 
198
- fig,ax = plt.subplots(1,1,figsize=[15,10])
205
+ fig,ax = plt.subplots(1,1,figsize=[15,10], tight_layout=True)
199
206
  ax.plot(x,y,label=_("Hyetogram"))
200
207
 
201
208
  ax.set_xlabel(_('Time [min]'))
@@ -211,7 +218,7 @@ class MontanaIRM():
211
218
  :param r: Decentration coefficient
212
219
  """
213
220
 
214
- fig,ax = plt.subplots(1,1,figsize=[15,10])
221
+ fig,ax = plt.subplots(1,1,figsize=[15,10], tight_layout=True)
215
222
 
216
223
  for curT in RT:
217
224
  x,y = self.get_hyeto(durmax,curT,r)
@@ -250,6 +257,8 @@ class Qdf_IRM():
250
257
  - confintup : la valeur supérieure de l'intervalle de confiance (+2*stddev)
251
258
  - montanacoeff : les coeffciients de Montana
252
259
 
260
+ L'index est le temps (dur10min, dur30min, dur1h, ... -- durationstext) et les colonnes sont les périodes de retour (RT2, RT5, RT10, ... -- RT).
261
+
253
262
  Il est par exemple possible d'accéder aux coefficients de Montana via l'une de ces lignes ou une combinaison :
254
263
 
255
264
  display(myqdf.montanacoeff)
@@ -264,7 +273,8 @@ class Qdf_IRM():
264
273
  code:int= 0, name= '',
265
274
  force_import= False,
266
275
  ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018,
267
- localities:Localities = None) -> None:
276
+ localities:Localities = None,
277
+ dataframe:pd.DataFrame = None) -> None:
268
278
 
269
279
  if localities is None:
270
280
 
@@ -274,12 +284,16 @@ class Qdf_IRM():
274
284
  else:
275
285
  self.myloc = localities
276
286
 
277
- self.store = store_path
278
-
287
+ self.store = Path(store_path)
279
288
 
280
289
  # This one will hold Qdf data of one locality. If it is None it means no
281
290
  # data has been loaded.
282
291
  self.qdf = None
292
+ self.standarddev = None
293
+ self.confintlow = None
294
+ self.confintup = None
295
+ self.montanacoeff = None
296
+ self.montana = None
283
297
 
284
298
  if force_import:
285
299
  # Import all QDF's from IRM
@@ -287,15 +301,60 @@ class Qdf_IRM():
287
301
  self._code = None
288
302
  self._name = None
289
303
 
290
- if code !=0:
291
- if self.ins_read_excel(code=str(code)):
304
+ self._qdf_image_table = None
305
+ self._qdf_image_plot = None
306
+
307
+ if dataframe is not None:
308
+ """ If a dataframe is provided, we assume it contains the QDF data
309
+ and we set it directly.
310
+ """
311
+ self._code = int(code)
312
+ self._name = self.myloc.get_namefromINS(code)
313
+
314
+ # Find columns containing '_Q'
315
+ qdf_columns = ['Duration'] + [col for col in dataframe.columns if '_Q' in col]
316
+ self.qdf = dataframe[qdf_columns].copy()
317
+
318
+ #replace duration in seconds with duration texts
319
+ self.qdf['Duration'] = self.qdf['Duration'].apply(lambda x: durationstext[list(durations_seconds).index(x)] if x in durations_seconds else x)
320
+ # replace columns names
321
+ self.qdf.columns = [col.replace('_Q', '') for col in self.qdf.columns]
322
+ # Set duration as index
323
+ self.qdf.set_index('Duration', inplace=True)
324
+ # Remove the name of the index
325
+ self.qdf.index.name = None
326
+
327
+ # Convert columns name to string
328
+ self.qdf.columns = [str(col) for col in self.qdf.columns]
329
+
330
+ std_columns = ['Duration'] + [col for col in dataframe.columns if '_Std' in col]
331
+ self.standarddev = dataframe[std_columns].copy()
332
+ self.standarddev['Duration'] = self.standarddev['Duration'].apply(lambda x: durationstext[list(durations_seconds).index(x)] if x in durations_seconds else x)
333
+ self.standarddev.set_index('Duration', inplace=True)
334
+
335
+ confintlow_columns = ['Duration'] + [col for col in dataframe.columns if '_Low' in col]
336
+ self.confintlow = dataframe[confintlow_columns].copy()
337
+ self.confintlow['Duration'] = self.confintlow['Duration'].apply(lambda x: durationstext[list(durations_seconds).index(x)] if x in durations_seconds else x)
338
+ self.confintlow.set_index('Duration', inplace=True)
339
+
340
+ confintup_columns = ['Duration'] + [col for col in dataframe.columns if '_Up' in col]
341
+ self.confintup = dataframe[confintup_columns].copy()
342
+ self.confintup['Duration'] = self.confintup['Duration'].apply(lambda x: durationstext[list(durations_seconds).index(x)] if x in durations_seconds else x)
343
+ self.confintup.set_index('Duration', inplace=True)
344
+
345
+ self._read_csv_or_excel_Montana_only(code = self._code)
346
+
347
+ self.fit_all()
348
+
349
+ elif code !=0:
350
+ if self._read_csv_or_excel(code=str(code)):
292
351
  self.fit_all()
293
352
  self._code = code
294
353
  self._name = self.myloc.get_namefromINS(code)
295
354
  else:
296
355
  logging.debug(f"INS code {code} not found in the store")
297
356
  elif name!='':
298
- if self.ins_read_excel(name=name):
357
+ if self._read_csv_or_excel(name=name):
299
358
  self.fit_all()
300
359
  self._name = name
301
360
  self._code = self.myloc.get_INSfromname(name)
@@ -330,7 +389,7 @@ class Qdf_IRM():
330
389
 
331
390
  for curcode in self.myloc.get_allcodes():
332
391
 
333
- self.ins_read_excel(code=curcode)
392
+ self._read_csv_or_excel(code=curcode)
334
393
  if self.montanacoeff is not None:
335
394
  self.montanacoeff['INS'] = [curcode]*12
336
395
  self.montanacoeff['Name'] = [self.myloc.get_namefromINS(int(curcode))]*12
@@ -344,7 +403,7 @@ class Qdf_IRM():
344
403
 
345
404
 
346
405
  @classmethod
347
- def importfromwebsite(cls, store_path= 'irm', verbose:bool= False, waitingtime:float= .01, ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018, ins_code: int = None):
406
+ def importfromwebsite(cls, store_path:Path = 'irm', verbose:bool= False, waitingtime:float= .01, ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018, ins_code: int = None):
348
407
  """ Import Excel files for one or all municipalities from the IRM website
349
408
 
350
409
  :param store_path: Where to store the downloaded data. Directory will be created if it doesn't exists.
@@ -363,11 +422,13 @@ class Qdf_IRM():
363
422
 
364
423
  myloc = Localities(ins)
365
424
 
425
+ store_path = Path(store_path)
426
+
366
427
  if ins_code is not None:
367
428
  codes_to_load = [ins_code]
368
429
  else:
369
- if not path.exists(store_path):
370
- mkdir(store_path)
430
+ if not store_path.exists():
431
+ store_path.mkdir(parents=True, exist_ok=True)
371
432
  codes_to_load = myloc.inscode2name
372
433
 
373
434
  for key,myins in enumerate(codes_to_load):
@@ -376,14 +437,14 @@ class Qdf_IRM():
376
437
  #Obtention du fichiers depuis le site web de l'IRM
377
438
  response=requests.get(url)
378
439
 
379
- if str(response.content).find("Page not found")==-1 :
440
+ if str(response.content).find("not found")==-1:
380
441
 
381
442
  # Make sure we create the store path only if we have
382
443
  # something to put inside.
383
- if ins_code is not None and not path.exists(store_path):
384
- mkdir(store_path)
444
+ if ins_code is not None and not store_path.exists():
445
+ store_path.mkdir(parents=True, exist_ok=True)
385
446
 
386
- file=open(path.join(store_path,str(myins)+".xlsx"), 'wb')
447
+ file=open(store_path / (str(myins)+".xlsx"), 'wb')
387
448
  file.write(response.content)
388
449
  file.close()
389
450
  if verbose:
@@ -397,8 +458,13 @@ class Qdf_IRM():
397
458
 
398
459
  sleep(waitingtime)
399
460
 
400
- def ins_read_excel(self,code='',name=''):
401
- """Lecture des caractéristiques d'une commune depuis le fichier Excel associé au code INS"""
461
+ def _read_csv_or_excel(self, code='', name=''):
462
+ """ Lecture des caractéristiques d'une commune
463
+ depuis le fichier CSV ou Excel associé au code INS
464
+
465
+ :param code: le code INS de la commune
466
+ :param name: le nom de la commune
467
+ """
402
468
  import warnings
403
469
 
404
470
  if code !='':
@@ -412,7 +478,7 @@ class Qdf_IRM():
412
478
  self._code = loccode
413
479
  self._name = name
414
480
 
415
- store = Path(self.store)
481
+ store = self.store
416
482
 
417
483
  pathname_xls = store / (loccode+".xlsx")
418
484
  pathname_csv = store / 'csv' / loccode
@@ -425,29 +491,71 @@ class Qdf_IRM():
425
491
  self.montanacoeff = pd.read_csv(pathname_csv / 'montanacoeff.csv', index_col=0)
426
492
  self.montana = MontanaIRM(self.montanacoeff)
427
493
  return True
494
+ else:
495
+ # with warnings.catch_warnings(record=True):
496
+ # warnings.simplefilter("always")
497
+ if path.exists(pathname_xls):
498
+ self.qdf=pd.read_excel(pathname_xls,"Return level",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
499
+ self.standarddev=pd.read_excel(pathname_xls,"Standard deviation",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
500
+ self.confintlow=pd.read_excel(pathname_xls,"Conf. interval, lower bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
501
+ self.confintup=pd.read_excel(pathname_xls,"Conf. interval, upper bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
502
+ self.montanacoeff=pd.read_excel(pathname_xls,"Montana coefficients",index_col=0,skiprows=range(11),nrows=12,usecols="A:G",engine='openpyxl', engine_kwargs={'read_only': True})
503
+ self.montana = MontanaIRM(self.montanacoeff)
504
+ return True
505
+ else:
506
+ self.qdf=None
507
+ self.standarddev=None
508
+ self.confintlow=None
509
+ self.confintup=None
510
+ self.montanacoeff=None
511
+ self.montana=None
512
+ return False
513
+
514
+ def _read_csv_or_excel_Montana_only(self, code='', name=''):
515
+ """ Lecture des caractéristiques d'une commune depuis
516
+ le fichier CSV Excel associé au code INS
517
+
518
+ :param code: le code INS de la commune
519
+ :param name: le nom de la commune
520
+ """
521
+
522
+ import warnings
523
+
524
+ if code !='':
525
+ loccode=str(code)
526
+ name = self.myloc.get_namefromINS(int(loccode))
527
+ elif name!='':
528
+ if not name.lower() in self.myloc.insname2code.keys():
529
+ return _('Bad name ! - Retry')
530
+ loccode=str(self.myloc.insname2code[name.lower()])
531
+
532
+ self._code = loccode
533
+ self._name = name
534
+
535
+ store = self.store
536
+
537
+ pathname_xls = store / (loccode+".xlsx")
538
+ pathname_csv = store / 'csv' / loccode
539
+
540
+ if pathname_csv.exists():
541
+ self.montanacoeff = pd.read_csv(pathname_csv / 'montanacoeff.csv', index_col=0)
542
+ self.montana = MontanaIRM(self.montanacoeff)
543
+ return True
428
544
  else:
429
545
  with warnings.catch_warnings(record=True):
430
546
  warnings.simplefilter("always")
431
547
  if path.exists(pathname_xls):
432
- self.qdf=pd.read_excel(pathname_xls,"Return level",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
433
- self.standarddev=pd.read_excel(pathname_xls,"Standard deviation",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
434
- self.confintlow=pd.read_excel(pathname_xls,"Conf. interval, lower bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
435
- self.confintup=pd.read_excel(pathname_xls,"Conf. interval, upper bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
436
548
  self.montanacoeff=pd.read_excel(pathname_xls,"Montana coefficients",index_col=0,skiprows=range(11),nrows=12,usecols="A:G",engine='openpyxl', engine_kwargs={'read_only': True})
437
549
  self.montana = MontanaIRM(self.montanacoeff)
438
550
  return True
439
551
  else:
440
- self.qdf=None
441
- self.standarddev=None
442
- self.confintlow=None
443
- self.confintup=None
444
552
  self.montanacoeff=None
445
553
  self.montana=None
446
554
  return False
447
555
 
448
556
  @classmethod
449
557
  def convert_xls2csv(cls, store_path= 'irm', ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018):
450
- """ Convert all Excel files to JSON files
558
+ """ Convert all Excel files to CSV files
451
559
 
452
560
  :param store_path: Where to store the downloaded data. Directory will be created if it doesn't exists.
453
561
  :param ins: The year of the INS codes to use.
@@ -460,24 +568,30 @@ class Qdf_IRM():
460
568
  for key,myins in enumerate(myloc.get_allcodes()):
461
569
  pathname = store_path / (str(myins)+".xlsx")
462
570
  if pathname.exists():
463
-
464
- qdf=pd.read_excel(pathname,"Return level",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
465
- standarddev=pd.read_excel(pathname,"Standard deviation",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
466
- confintlow=pd.read_excel(pathname,"Conf. interval, lower bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
467
- confintup=pd.read_excel(pathname,"Conf. interval, upper bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
468
- montanacoeff=pd.read_excel(pathname,"Montana coefficients",index_col=0,skiprows=range(11),nrows=12,usecols="A:G",engine='openpyxl', engine_kwargs={'read_only': True})
469
-
470
- store_csv = store_path / 'csv' / str(myins)
471
- store_csv.mkdir(exist_ok=True)
472
-
473
- qdf.to_csv(store_csv / 'qdf.csv')
474
- standarddev.to_csv(store_csv / 'standarddev.csv')
475
- confintlow.to_csv(store_csv / 'confintlow.csv')
476
- confintup.to_csv(store_csv / 'confintup.csv')
477
- montanacoeff.to_csv(store_csv / 'montanacoeff.csv')
571
+ try:
572
+ logging.info(f"Converting {pathname} to CSV files")
573
+ qdf=pd.read_excel(pathname,"Return level",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
574
+ standarddev=pd.read_excel(pathname,"Standard deviation",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
575
+ confintlow=pd.read_excel(pathname,"Conf. interval, lower bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
576
+ confintup=pd.read_excel(pathname,"Conf. interval, upper bound",index_col=0,skiprows=range(7),nrows=19,usecols="A:M",engine='openpyxl', engine_kwargs={'read_only': True})
577
+ montanacoeff=pd.read_excel(pathname,"Montana coefficients",index_col=0,skiprows=range(11),nrows=12,usecols="A:G",engine='openpyxl', engine_kwargs={'read_only': True})
578
+
579
+ store_csv = store_path / 'csv' / str(myins)
580
+ store_csv.mkdir(exist_ok=True, parents=True)
581
+
582
+ qdf.to_csv(store_csv / 'qdf.csv')
583
+ standarddev.to_csv(store_csv / 'standarddev.csv')
584
+ confintlow.to_csv(store_csv / 'confintlow.csv')
585
+ confintup.to_csv(store_csv / 'confintup.csv')
586
+ montanacoeff.to_csv(store_csv / 'montanacoeff.csv')
587
+ except Exception as e:
588
+ logging.error(f"Error processing {pathname}: {e}")
589
+ else:
590
+ logging.warning(f"File {pathname} does not exist, skipping conversion.")
591
+ logging.info(_("If it is a problem, try to reimport the data from the IRM website."))
478
592
 
479
593
 
480
- def plot_idf(self,T=None,which='All',color=[27./255.,136./255.,245./255.]):
594
+ def plot_idf(self, T=None, which:Literal['All', 'Montana', 'QDFTable'] = 'All', color=[27./255.,136./255.,245./255.]):
481
595
  """
482
596
  Plot IDF relations on a new figure
483
597
 
@@ -487,7 +601,16 @@ class Qdf_IRM():
487
601
  - 'QDFTable'
488
602
  - 'All'
489
603
  """
490
- fig,ax = plt.subplots(1,1,figsize=(15,10))
604
+
605
+ if self.montana is None and which != 'QDFTable':
606
+ logging.error(_("Montana coefficients are not available for this locality."))
607
+ return None, None
608
+
609
+ if self.qdf is None and which != 'Montana':
610
+ logging.error(_("QDF data is not available for this locality."))
611
+ return None, None
612
+
613
+ fig,ax = plt.subplots(1,1,figsize=(15,10), tight_layout=True)
491
614
  ax.set_xscale('log')
492
615
  ax.set_yscale('log')
493
616
 
@@ -502,6 +625,8 @@ class Qdf_IRM():
502
625
  iMontana = [self.montana.get_meanrain(curdur,RT[k]) for curdur in durations]
503
626
  ax.plot(durations,iMontana,label=RT[k] + ' Montana',color=mycolor)
504
627
  else:
628
+ assert T in RT, _('Bad return period ! - Retry')
629
+
505
630
  if which=='All' or which=='QDFTable':
506
631
  ax.scatter(durations,self.qdf[T],label=T+ _(' QDF Table'),color=color)
507
632
 
@@ -518,7 +643,7 @@ class Qdf_IRM():
518
643
 
519
644
  return fig,ax
520
645
 
521
- def plot_qdf(self,T=None,which='All',color=[27./255.,136./255.,245./255.]):
646
+ def plot_qdf(self, T=None, which:Literal['All', 'Montana', 'QDFTable'] = 'All', color=[27./255.,136./255.,245./255.]):
522
647
  """
523
648
  Plot QDF relations on a new figure
524
649
  :param T : the return period (based on RT constants)
@@ -527,7 +652,15 @@ class Qdf_IRM():
527
652
  - 'QDFTable'
528
653
  - 'All'
529
654
  """
530
- fig,ax = plt.subplots(1,1,figsize=(15,10))
655
+
656
+ if self.qdf is None and which != 'Montana':
657
+ logging.error(_("QDF data is not available for this locality."))
658
+ return None, None
659
+ if self.montana is None and which != 'QDFTable':
660
+ logging.error(_("Montana coefficients are not available for this locality."))
661
+ return None, None
662
+
663
+ fig,ax = plt.subplots(1,1,figsize=(12,8), tight_layout=True)
531
664
  ax.set_xscale('log')
532
665
 
533
666
  if T is None:
@@ -541,6 +674,8 @@ class Qdf_IRM():
541
674
  QMontana = [self.montana.get_Q(curdur,RT[k]) for curdur in durations]
542
675
  ax.plot(durations,QMontana,label=RT[k] + ' Montana',color=mycolor)
543
676
  else:
677
+ assert T in RT, _('Bad return period ! - Retry')
678
+
544
679
  if which=='All' or which=='QDFTable':
545
680
  ax.scatter(durations,self.qdf[T],label=T+ _(' QDF Table'),color=color)
546
681
 
@@ -548,6 +683,7 @@ class Qdf_IRM():
548
683
  QMontana = [self.montana.get_Q(curdur,T) for curdur in durations]
549
684
  ax.plot(durations,QMontana,label=T + ' Montana',color=color)
550
685
 
686
+ ax.grid(True, which='both', linestyle='--', linewidth=0.5)
551
687
  ax.legend().set_draggable(True)
552
688
  ax.set_xlabel(_('Duration [min]'))
553
689
  ax.set_ylabel(_('Quantity [mm]'))
@@ -557,16 +693,22 @@ class Qdf_IRM():
557
693
 
558
694
  return fig,ax
559
695
 
560
- def plot_cdf(self,dur=None):
696
+ def plot_cdf(self, dur=None):
561
697
  """ Plot the cdf of the QDF data for a given duration """
562
698
 
563
- fig,ax = plt.subplots(1,1,figsize=(10,10))
699
+ if self.qdf is None:
700
+ logging.error(_("QDF data is not available for this locality."))
701
+ return None, None
702
+
703
+ fig,ax = plt.subplots(1,1,figsize=(10,10), tight_layout=True)
564
704
  if dur is None:
565
705
  for k in range(len(durations)):
566
706
  pond = .3+.7*float(k/len(durations))
567
707
  mycolor = (27./255.,136./255.,245./255.,pond)
568
708
  ax.scatter(self.qdf.loc[durationstext[k]],freqndep,marker='o',label=durationstext[k],color=mycolor)
569
709
  else:
710
+ assert dur in durationstext, _('Bad duration - Retry !')
711
+
570
712
  ax.scatter(self.qdf.loc[dur],freqndep,marker='o',label=dur,color=(0,0,1))
571
713
 
572
714
  ax.legend().set_draggable(True)
@@ -592,7 +734,7 @@ class Qdf_IRM():
592
734
  def save_fits_json(self):
593
735
  """ Save the fits in a csv file """
594
736
 
595
- with open(path.join(self.store, str(self._code) + '_fits.json'), 'w') as f:
737
+ with open(self.store / (str(self._code) + '_fits.json'), 'w') as f:
596
738
  df = pd.DataFrame(self.popt_all)
597
739
  df.to_json(f)
598
740
 
@@ -605,9 +747,9 @@ class Qdf_IRM():
605
747
 
606
748
  import json
607
749
 
608
- filename = path.join(self.store, str(self._code) + '_fits.json')
750
+ filename = self.store / (str(self._code) + '_fits.json')
609
751
 
610
- if path.exists(filename):
752
+ if filename.exists():
611
753
  with open(filename, 'r') as f:
612
754
  self.popt_all = json.load(f)
613
755
 
@@ -719,19 +861,140 @@ class Qdf_IRM():
719
861
  """ Return the QDF data as a string """
720
862
  return self.qdf.__str__()
721
863
 
864
+ def make_image_qdf_plot(self, T= None, which:Literal['All', 'Montana', 'QDFTable'] = 'All', color=[27./255.,136./255.,245./255.]):
865
+ """ Create an image of the QDF plot.
866
+
867
+ We use the `matplotlib` library to create a PNG image of the QDF data.
868
+ The image will be saved in the store path with the name `<code>_qdf_plot.png`.
869
+
870
+ :param durmax: the maximum duration of the hyetograms
871
+ :param r: Decentration coefficient
872
+ :return: a PNG image
873
+ """
874
+ import matplotlib
875
+
876
+ self._qdf_image_plot = self.store / f"{self.code_name}_qdf_plot.png"
877
+ if self._qdf_image_plot.exists():
878
+ return self._qdf_image_plot
879
+
880
+ old_backend = matplotlib.get_backend()
881
+ matplotlib.use('Agg') # Use a non-interactive backend for saving images
882
+ fig, ax = self.plot_qdf(T=T, which=which, color=color)
883
+ fig.savefig(self._qdf_image_plot, dpi=300)
884
+ plt.close(fig)
885
+ matplotlib.use(old_backend) # Restore the original backend
886
+
887
+ return self._qdf_image_plot
888
+
889
+ def make_image_qdf_table(self):
890
+ """ Create an image of the QDF data.
891
+
892
+ We use the `dataframe_image` library to create a PNG image of the QDF data.
893
+ Added style to the DataFrame to make it more readable.
894
+
895
+ :return: a PNG image
896
+ """
897
+
898
+ try:
899
+ import dataframe_image as dfimg
900
+ except ImportError:
901
+ logging.error(_("The 'dataframe_image' library is not installed. Please install it to create QDF table images."))
902
+ return None
903
+
904
+ if self.qdf is None:
905
+ logging.error(_("QDF data is not available for this locality."))
906
+ return None
907
+
908
+ qdf = self.qdf.copy()
909
+
910
+ # Create a styled DataFrame
911
+ # Add a caption to the DataFrame
912
+ qdf.attrs['caption'] = f"QDF data for {self._name} (INS code: {self._code})<br>\
913
+ <div style='font-size:8px;'>source : https://www.meteo.be/fr/climat/climat-de-la-belgique/climat-dans-votre-commune<br> \
914
+ Data extracted from IRM (Institut Royal Météorologique de Belgique) and processed by Wolf - ULiège"
915
+
916
+
917
+ qdf.columns = pd.MultiIndex.from_tuples([(f"{_('Return period')}", str(col)) for col in qdf.columns])
918
+
919
+
920
+ # Style the DataFrame
921
+ # One line per duration, one column per return period
922
+ # We will use light colors for the background and borders
923
+ # to highlight every other line and center the text
924
+ styled_df = qdf.style.format(precision=1) \
925
+ .set_caption(qdf.attrs['caption']) \
926
+ .set_properties(**{
927
+ 'text-align': 'center',
928
+ 'font-size': '12px',
929
+ 'border': '1px solid black',
930
+ # 'background-color': '#f0f0f0',
931
+ }).set_table_styles([
932
+ {
933
+ 'selector': 'thead th.row_heading.level0',
934
+ # 'props': [('text-align', 'center'), ('background-color', '#d9edf7'), ('color', '#31708f'),],
935
+ 'props': [('color', 'transparent')],
936
+ },
937
+ {
938
+ 'selector': 'thead th',
939
+ 'props': [('text-align', 'center'), ('background-color', '#d9edf7'), ('color', '#31708f')],
940
+ },
941
+ ])
942
+ # Define the path for the image
943
+ self._qdf_image_table = self.store / f"{self.code_name}_qdf.png"
944
+ # Save the styled DataFrame as an image
945
+ dfimg.export(styled_df, self._qdf_image_table, dpi=300)
946
+
947
+ def make_images(self):
948
+ """ Create all images for the QDF data. """
949
+
950
+ self.make_image_qdf_table()
951
+ self.make_image_qdf_plot()
952
+
953
+ return self._qdf_image_table, self._qdf_image_plot
954
+
955
+ @property
956
+ def path_image_plot(self):
957
+ """ Get the path for the QDF plot image. """
958
+ if self._qdf_image_plot is None:
959
+ self.make_image_qdf_plot()
960
+ return self._qdf_image_plot
961
+
962
+ @property
963
+ def path_image_table(self):
964
+ """ Get the path for the QDF table image. """
965
+ if self._qdf_image_table is None:
966
+ self.make_image_qdf_table()
967
+ return self._qdf_image_table
968
+
722
969
  class QDF_Belgium():
970
+ """ Class to manage all QDF data for Belgium """
723
971
 
724
- def __init__(self, store_path= 'irm', ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018) -> None:
972
+ def __init__(self, store_path= 'irm',
973
+ ins:Literal['2018', '2019', '2025', 2018, 2019, 2025] = 2018,
974
+ force_import: bool = False) -> None:
725
975
 
726
976
  self.localities = Localities(ins)
727
977
  self.store_path = Path(store_path)
728
978
 
729
- self.all = {}
979
+ if force_import or len(list(self.store_path.glob('*.xlsx'))) == 0:
980
+ Qdf_IRM.importfromwebsite(store_path=str(self.store_path), verbose=True, ins=ins)
981
+ if len(list(self.store_path.rglob('*.csv'))) == 0:
982
+ Qdf_IRM.convert_xls2csv(store_path=str(self.store_path), ins=ins)
983
+
984
+ self.all:dict[int, Qdf_IRM] = {}
730
985
  for loc_ins in tqdm(self.localities.get_allcodes()):
731
- loc = Qdf_IRM(store_path=str(self.store_path), code=loc_ins, localities=self.localities)
986
+ loc = Qdf_IRM(store_path=str(self.store_path),
987
+ code=loc_ins,
988
+ localities=self.localities)
732
989
  if loc.qdf is not None:
733
990
  self.all[loc_ins] = loc
734
991
 
992
+ def make_images(self):
993
+ """ Create all images for all QDF data. """
994
+
995
+ for loc in self.all.values():
996
+ loc.make_images()
997
+
735
998
  def __getitem__(self, key) -> Qdf_IRM:
736
999
 
737
1000
  if isinstance(key, int):
@@ -753,6 +1016,11 @@ class QDF_Belgium():
753
1016
  logging.error(f"Name {key} not found in the data")
754
1017
  return None
755
1018
 
1019
+ def __iter__(self):
1020
+ """ Iterate over all localities """
1021
+ for qdf_municip in self.all.values():
1022
+ yield qdf_municip
1023
+
756
1024
 
757
1025
  TRANSLATION_HEADER = {'année': 'year', 'janv.': 'January', 'févr.': 'February', 'mars': 'March',
758
1026
  'avr.': 'April', 'mai': 'May', 'juin': 'June',
@@ -877,3 +1145,567 @@ class Climate_IRM():
877
1145
  with pd.ExcelWriter(dest_file) as writer: # doctest: +SKIP
878
1146
  df.to_excel(writer, sheet_name='Rain')
879
1147
  df_sun.to_excel(writer, sheet_name='Sun')
1148
+
1149
+
1150
+ PLUVIO_INI = "pluvio.ini"
1151
+ MATCH_NUM_ZONE_SHAPEFILE_INS_INDEX = "Match_num_zone_shapefile_INS_index.txt"
1152
+ EXTREME_PRECIP_COMMUNES = "Extreme_rain_ins.txt"
1153
+ GEOMETRY_MUNICIPALITIES = "PDS__COMMUNES.shp"
1154
+
1155
+ class QDF_Hydrology():
1156
+ """ Prepare data from IRM website for WOLF hydrology calculations.
1157
+
1158
+ We need :
1159
+ - pluvio.ini
1160
+ - Match_num_zone_shapefile_INS_index.txt
1161
+
1162
+ "pluvio.ini" contains the path to the rainfall data files for each locality:
1163
+ - Extreme_precip_communes.txt
1164
+
1165
+ """
1166
+
1167
+ def __init__(self, store_path= DATADIR / 'irm_qdf',
1168
+ ini_file:str = PLUVIO_INI,
1169
+ ins:Literal['2018', 2018] = 2018,
1170
+ geometry:str = GEOMETRY_MUNICIPALITIES) -> None:
1171
+
1172
+ self.store_path = Path(store_path)
1173
+ self._data:dict[int, Qdf_IRM] = {}
1174
+
1175
+ self._ins = ins # INS version to use. IRM has updated the QDF data in 2016, so we force the use of the 2018 version.
1176
+
1177
+ self._extreme_file = EXTREME_PRECIP_COMMUNES
1178
+ self._nb_lines_extreme_file = 0
1179
+ self._nb_cols_extreme_file = 0
1180
+
1181
+ self.localities = Localities(ins)
1182
+
1183
+ self._ini_file = ini_file
1184
+
1185
+ self._geometry = geometry
1186
+ self._geometry_df = None
1187
+
1188
+ if not self.store_path.exists():
1189
+ self.store_path.mkdir(parents=True, exist_ok=True)
1190
+
1191
+ if not self.store_path.joinpath(self._geometry).exists():
1192
+ # get the municipalities shapefile from HECE Gitlab public repository
1193
+ self.download_municipalities_2018()
1194
+
1195
+ try:
1196
+ if self.store_path.joinpath(self._geometry).exists():
1197
+ self._geometry_df = gpd.read_file(self.store_path / self._geometry)
1198
+ else:
1199
+ logging.error(f"Geometry file {self._geometry} not found in {self.store_path}.")
1200
+ logging.error("Please download the 2018 version of municipalities shapefile from HECE or what you want.")
1201
+ return
1202
+ except Exception as e:
1203
+ logging.error(f"Error reading geometry file {self._geometry}: {e}")
1204
+ self._geometry_df = None
1205
+
1206
+ if not self.store_path.joinpath(ini_file).exists():
1207
+ self.create_default_data()
1208
+
1209
+ self.read_data()
1210
+
1211
+ def read_data(self):
1212
+ """ Read the data from the ini file and the extreme precipitation file. """
1213
+
1214
+ try:
1215
+ with open(self.store_path / PLUVIO_INI, 'r') as f:
1216
+ lines = f.readlines()
1217
+ self._extreme_file = lines[0].strip()
1218
+ self._nb_lines_extreme_file = int(lines[1].strip())
1219
+ self._nb_cols_extreme_file = int(lines[2].strip())
1220
+
1221
+ _nb_return_periods = int(lines[3].strip())
1222
+ _nb_values_per_return_period = int(lines[4].strip())
1223
+
1224
+ rt =[]
1225
+ for i in range(_nb_return_periods):
1226
+ try:
1227
+ rt.append(int(lines[5 + i].strip()))
1228
+ except ValueError:
1229
+ logging.error(f"Invalid return period value in ini file: {lines[5 + i].strip()}")
1230
+ rt.append(0)
1231
+
1232
+ if self.store_path.joinpath(self._extreme_file).exists():
1233
+ df = pd.read_csv(self.store_path / self._extreme_file, sep='\t', header=None)
1234
+ # Set column names based on the number of return periods and values per return period
1235
+ columns = ['INS', 'Duration']
1236
+ for rt_value in rt:
1237
+ for value_type in ['Q', 'Std', 'Low', 'Up']:
1238
+ columns.append(f"{rt_value}_{value_type}")
1239
+ df.columns = columns
1240
+
1241
+ all_ins = df['INS'].astype(int).unique()
1242
+ self._data = {ins: Qdf_IRM(store_path=self.store_path, code=ins, localities=self.localities, dataframe=df[df['INS'] == ins]) for ins in all_ins}
1243
+
1244
+ else:
1245
+ logging.error(f"Extreme precipitation file {self._extreme_file} not found in {self.store_path}.")
1246
+ self._data = {}
1247
+ except:
1248
+ logging.error(f"Error during reading {self._ini_file} in {self.store_path}.")
1249
+ logging.error("Check your data or delete files in the directory to force a new download.")
1250
+ self._data = {}
1251
+ self._extreme_file = None
1252
+ self._nb_lines_extreme_file = 0
1253
+ self._nb_cols_extreme_file = 0
1254
+
1255
+ def __getitem__(self, key) -> Qdf_IRM:
1256
+ """ Get the QDF data for a given INS code. """
1257
+
1258
+ if isinstance(key, int):
1259
+ ins_code = key
1260
+ elif isinstance(key, str):
1261
+ ins_code = self.localities.get_INSfromname(key)
1262
+ if ins_code is None:
1263
+ try:
1264
+ # Try to convert the string to an integer (INS code)
1265
+ ins_code = int(key)
1266
+ except ValueError:
1267
+ # If it fails, raise an error
1268
+ raise KeyError(f"Locality {key} not found.")
1269
+ else:
1270
+ raise TypeError("Key must be an integer (INS code) or a string (locality name).")
1271
+
1272
+ if ins_code in self._data:
1273
+ return self._data[ins_code]
1274
+ else:
1275
+ raise KeyError(f"Data for INS code {ins_code} not found.")
1276
+
1277
+ def __iter__(self):
1278
+ """ Iterate over all QDF data. """
1279
+ for ins_code in self._data:
1280
+ yield self._data[ins_code]
1281
+
1282
+ def download_municipalities_2018(self, force:bool = False):
1283
+ """ Download the municipalities shapefile from HECE.
1284
+
1285
+ :param force: If `True`, will download the file even if it already exists.
1286
+ """
1287
+ munic = Path(toys_dataset('Communes_Belgique', 'PDS__COMMUNES.zip'))
1288
+
1289
+ # Unzip the file if it is not already unzipped.
1290
+ if munic.exists():
1291
+ import zipfile
1292
+ with zipfile.ZipFile(munic, 'r') as zip_ref:
1293
+ zip_ref.extractall(self.store_path)
1294
+
1295
+ self._geometry = 'PDS__COMMUNES.shp'
1296
+
1297
+ def create_match_num_zone_shapefile(self):
1298
+ """ Create the Match_num_zone_shapefile_INS_index.txt file.
1299
+
1300
+ This file contains the mapping between the INS codes and the shapefile
1301
+ indices.
1302
+ """
1303
+
1304
+ match_file = self.store_path / MATCH_NUM_ZONE_SHAPEFILE_INS_INDEX
1305
+
1306
+ colname = 'INS'
1307
+ if colname not in self._geometry_df.columns:
1308
+ colname = 'NSI' # Use NSI if INS is not available
1309
+ if colname not in self._geometry_df.columns:
1310
+ logging.error(f"Column {colname} not found in the geometry DataFrame.")
1311
+ return
1312
+
1313
+ if not match_file.exists():
1314
+ with open(match_file, 'w') as f:
1315
+ for idx, row in self._geometry_df.iterrows():
1316
+ f.write(f"{row[colname]}\n")
1317
+
1318
+ def create_default_data(self):
1319
+ """ Create data from scratch for WOLF hydrology calculations. """
1320
+ self.create_match_num_zone_shapefile()
1321
+ self.create_extreme_precipitation_file()
1322
+ self.create_ini_file()
1323
+
1324
+ def create_extreme_precipitation_file(self):
1325
+ """ Create the extreme precipitation file for all localities.
1326
+
1327
+ Each line of the file contains the following data:
1328
+ - INS code
1329
+ - Duration in seconds
1330
+ - Quantity for each return period (RT2, RT5, RT10, RT20, RT50, RT100)
1331
+ """
1332
+ self.extreme_file = self.store_path / EXTREME_PRECIP_COMMUNES
1333
+
1334
+ if not self.extreme_file.exists():
1335
+
1336
+ all_qdf = QDF_Belgium(store_path=self.store_path, ins=self._ins, force_import=False)
1337
+
1338
+ self._nb_lines_extreme_file = 0
1339
+ with open(self.extreme_file, 'w') as f:
1340
+ for loc in all_qdf:
1341
+ loc:Qdf_IRM
1342
+ ins = loc.code
1343
+ qdf = loc.qdf
1344
+ low = loc.confintlow
1345
+ up = loc.confintup
1346
+ std = loc.standarddev
1347
+
1348
+ for (dur_text, dur_s) in zip(durationstext, durations_seconds):
1349
+ data = [ins]
1350
+ data.append(int(dur_s))
1351
+ for rt in RT:
1352
+ data.append(qdf.loc[dur_text, rt])
1353
+ data.append(std.loc[dur_text, rt])
1354
+ data.append(low.loc[dur_text, rt])
1355
+ data.append(up.loc[dur_text, rt])
1356
+ f.write("\t".join(map(str, data)) + "\n")
1357
+
1358
+ self._nb_lines_extreme_file += 1
1359
+ self._nb_cols_extreme_file = len(data)
1360
+
1361
+ def create_ini_file(self):
1362
+ """
1363
+ Create a parameter file for the class
1364
+ """
1365
+
1366
+ with open(self.store_path / PLUVIO_INI, 'w') as f:
1367
+ f.write(f"{self._extreme_file}\n") # name of the file containing the extreme precipitation data
1368
+
1369
+ if self._nb_lines_extreme_file == 0 or self._nb_cols_extreme_file == 0:
1370
+ with open(self.store_path / self._extreme_file, 'r') as ef:
1371
+ lines = ef.readlines()
1372
+ self._nb_lines_extreme_file = len(lines)
1373
+ if lines:
1374
+ self._nb_cols_extreme_file = len(lines[0].strip().split('\t'))
1375
+ else:
1376
+ self._nb_cols_extreme_file = 0
1377
+
1378
+ f.write(f"{self._nb_lines_extreme_file}\n") # number of lines in the extreme precipitation file
1379
+ f.write(f"{self._nb_cols_extreme_file}\n") # number of columns in the extreme precipitation file
1380
+ f.write(f"{len(RT)}\n") # Number of return periods
1381
+ f.write("4\n") # Number of values par return period (Q, std, low, up)
1382
+ for rt in RT:
1383
+ f.write(f"{rt}\n")
1384
+
1385
+ def get_all_ins(self) -> list[int]:
1386
+ """ Get a list of all INS codes. """
1387
+ return list(self._data.keys())
1388
+
1389
+ class QDF_Hydrology_Draw(Element_To_Draw):
1390
+
1391
+ """ Class to draw the QDF hydrology data on a map.
1392
+
1393
+ This class is used to draw the QDF hydrology data on a map using the
1394
+ WOLF hydrology calculations.
1395
+ """
1396
+
1397
+ def __init__(self, store_path= DATADIR / 'irm_qdf', ins:Literal['2018', 2018] = 2018, idx:str = '', plotted:bool = True, mapviewer = None) -> None:
1398
+
1399
+ super().__init__(idx=idx, plotted=plotted, mapviewer=mapviewer)
1400
+
1401
+ self._qdf_hydrology = QDF_Hydrology(store_path=store_path, ins=ins)
1402
+
1403
+ from .PyPictures import PictureCollection
1404
+
1405
+ self._scale_factor = 1.0 # Default scale factor for images
1406
+
1407
+ self._geometry_zones = Zones('', idx= idx+'_zones', plotted=plotted, mapviewer=mapviewer, parent = mapviewer)
1408
+ self._geometry_tables = PictureCollection('', idx= idx, plotted=plotted, mapviewer=mapviewer, parent = mapviewer)
1409
+ self._geometry_plots = PictureCollection('', idx= idx, plotted=plotted, mapviewer=mapviewer, parent = mapviewer)
1410
+
1411
+ self._geometry_zones.import_shapefile(self.store_path / self._qdf_hydrology._geometry, colname='NSI')
1412
+ self._geometry_zones.prep_listogl()
1413
+
1414
+ self._geometry_tables.import_shapefile(self.store_path / self._qdf_hydrology._geometry, colname='NSI')
1415
+ self._geometry_plots.import_shapefile(self.store_path / self._qdf_hydrology._geometry, colname='NSI')
1416
+ self._prepare_image_location()
1417
+
1418
+ self._centroids = {curzone[0].centroid: curzone.myname for curzone in self._geometry_tables.myzones}
1419
+
1420
+ self._show_table = False
1421
+ self._show_plot = False
1422
+ self._reload_images = True
1423
+ self._current_images = None
1424
+
1425
+ def _get_vector_tables(self, ins:str | int) -> vector:
1426
+ """ Get the vector for a given INS code. """
1427
+ return self._geometry_tables[(str(ins), str(ins))]
1428
+
1429
+ def _get_vector_plots(self, ins:str | int) -> vector:
1430
+ """ Get the vector for a given INS code. """
1431
+ return self._geometry_plots[(str(ins), str(ins))]
1432
+
1433
+ @property
1434
+ def store_path(self):
1435
+ """ Get the store path for the QDF hydrology data. """
1436
+ return self._qdf_hydrology.store_path
1437
+
1438
+ def _prepare_image_location(self):
1439
+ """ Set the default size for the images. """
1440
+
1441
+ # plots
1442
+ DEFAULT_SIZE = 2000. * self._scale_factor # Default size for the images
1443
+ RAP_W_H = 3600. / 2400.
1444
+ WIDTH_PLOTS = DEFAULT_SIZE * RAP_W_H
1445
+ HEIGHT_PLOTS = DEFAULT_SIZE
1446
+
1447
+ for curzone in self._geometry_plots.myzones:
1448
+ vec = curzone[0]
1449
+ vec.myprop.image_attached_pointx = vec.centroid.x
1450
+ vec.myprop.image_attached_pointy = vec.centroid.y
1451
+ vec.myprop.imagevisible = False
1452
+
1453
+ x, y = vec.centroid.x, vec.centroid.y
1454
+ y -= HEIGHT_PLOTS / 2.
1455
+
1456
+ vec.myvertices = [wv(x - WIDTH_PLOTS, y - HEIGHT_PLOTS),
1457
+ wv(x + WIDTH_PLOTS, y - HEIGHT_PLOTS),
1458
+ wv(x + WIDTH_PLOTS, y + HEIGHT_PLOTS),
1459
+ wv(x - WIDTH_PLOTS, y + HEIGHT_PLOTS),
1460
+ wv(x - WIDTH_PLOTS, y - HEIGHT_PLOTS)]
1461
+ vec.myprop.color = getIfromRGB([255, 255, 255, 0]) # Transparent color
1462
+ vec.find_minmax()
1463
+
1464
+ self._geometry_plots.prep_listogl()
1465
+
1466
+
1467
+ # tables
1468
+ RAP_W_H = 1730. / 2000.
1469
+ WIDTH_TABLES = DEFAULT_SIZE * RAP_W_H
1470
+ HEIGHT_TABLES = DEFAULT_SIZE
1471
+ for curzone in self._geometry_tables.myzones:
1472
+ vec = curzone[0]
1473
+ vec.myprop.image_attached_pointx = vec.centroid.x
1474
+ vec.myprop.image_attached_pointy = vec.centroid.y
1475
+ vec.myprop.imagevisible = False
1476
+
1477
+ x, y = vec.centroid.x, vec.centroid.y
1478
+ y += 2. * HEIGHT_TABLES - (HEIGHT_PLOTS / 2.) * 3./4.
1479
+
1480
+ vec.myvertices = [wv(x - WIDTH_TABLES, y - HEIGHT_TABLES),
1481
+ wv(x + WIDTH_TABLES, y - HEIGHT_TABLES),
1482
+ wv(x + WIDTH_TABLES, y + HEIGHT_TABLES),
1483
+ wv(x - WIDTH_TABLES, y + HEIGHT_TABLES),
1484
+ wv(x - WIDTH_TABLES, y - HEIGHT_TABLES)]
1485
+ vec.myprop.color = getIfromRGB([255, 255, 255, 0])
1486
+ vec.find_minmax()
1487
+
1488
+ self._geometry_tables.prep_listogl()
1489
+
1490
+ def set_images_as_legend(self, plot_or_table:Literal['plot', 'table'] = 'plot', which:list = None):
1491
+ """ Set all images in the collection as legend images. """
1492
+
1493
+ DEFAULT_SIZE = 2000. * self._scale_factor # Default size for the images
1494
+
1495
+ if which is None:
1496
+ which = self._qdf_hydrology.get_all_ins()
1497
+
1498
+ if plot_or_table == 'plot':
1499
+
1500
+ RAP_W_H = 3600. / 2400.
1501
+ WIDTH = DEFAULT_SIZE * RAP_W_H
1502
+ HEIGHT = DEFAULT_SIZE
1503
+
1504
+ for loc_ins in which:
1505
+ loc_qdf = self._qdf_hydrology[loc_ins]
1506
+ if loc_qdf.path_image_plot is not None:
1507
+ vec = self._get_vector_plots(loc_qdf.code)
1508
+ vec.myprop.image_path = loc_qdf.path_image_plot
1509
+ centroid = vec.centroid
1510
+ vec.myprop.image_attached_pointx, vec.myprop.image_attached_pointy = centroid.x, centroid.y
1511
+ vec.myprop.imagevisible = True
1512
+
1513
+ vec.myvertices = [wv(centroid.x - WIDTH, centroid.y - HEIGHT),
1514
+ wv(centroid.x + WIDTH, centroid.y - HEIGHT),
1515
+ wv(centroid.x + WIDTH, centroid.y + HEIGHT),
1516
+ wv(centroid.x - WIDTH, centroid.y + HEIGHT),
1517
+ wv(centroid.x - WIDTH, centroid.y - HEIGHT)]
1518
+
1519
+ vec.myprop.color = getIfromRGB([255, 255, 255, 0])
1520
+ vec.find_minmax()
1521
+
1522
+ self._geometry_plots.reset_listogl()
1523
+ self._geometry_plots.prep_listogl()
1524
+
1525
+ elif plot_or_table == 'table':
1526
+
1527
+ RAP_W_H = 1730. / 2000.
1528
+ WIDTH = DEFAULT_SIZE * RAP_W_H
1529
+ HEIGHT = DEFAULT_SIZE
1530
+
1531
+ for loc_ins in which:
1532
+ loc_qdf = self._qdf_hydrology[loc_ins]
1533
+ if loc_qdf.path_image_table is not None:
1534
+ vec = self._get_vector_tables(loc_qdf.code)
1535
+ vec.myprop.image_path = loc_qdf.path_image_table
1536
+ centroid = vec.centroid
1537
+ vec.myprop.image_attached_pointx, vec.myprop.image_attached_pointy = centroid.x, centroid.y
1538
+ vec.myprop.imagevisible = True
1539
+
1540
+ vec.myvertices = [wv(centroid.x - WIDTH, centroid.y - HEIGHT),
1541
+ wv(centroid.x + WIDTH, centroid.y - HEIGHT),
1542
+ wv(centroid.x + WIDTH, centroid.y + HEIGHT),
1543
+ wv(centroid.x - WIDTH, centroid.y + HEIGHT),
1544
+ wv(centroid.x - WIDTH, centroid.y - HEIGHT)]
1545
+
1546
+ vec.myprop.color = getIfromRGB([255, 255, 255, 0])
1547
+ vec.find_minmax()
1548
+
1549
+ self._geometry_tables.reset_listogl()
1550
+ self._geometry_tables.prep_listogl()
1551
+
1552
+ def hide_all_images(self):
1553
+ """ Hide all images in the collection. """
1554
+
1555
+ for curzone in self._geometry_tables.myzones:
1556
+ curzone[0].myprop.imagevisible = False
1557
+ self._geometry_tables.reset_listogl()
1558
+
1559
+ for curzone in self._geometry_plots.myzones:
1560
+ curzone[0].myprop.imagevisible = False
1561
+ self._geometry_plots.reset_listogl()
1562
+
1563
+ def check_plot(self):
1564
+ return super().check_plot()
1565
+
1566
+ def find_nearest_centroid(self, x: float, y: float, bounds: tuple[float, float, float, float]):
1567
+ """ Pick the municipality at the given coordinates.
1568
+
1569
+ :param x: The x coordinate.
1570
+ :param y: The y coordinate.
1571
+ :return: The name of the municipality or an empty string if not found.
1572
+ """
1573
+
1574
+ centroids = self.find_centroid_in_bounds(bounds)
1575
+ if not centroids:
1576
+ return ''
1577
+
1578
+ # Find the centroid closest to the given point
1579
+ closest_centroid = min(centroids, key=lambda c: c[0].distance(Point(x, y)))
1580
+ return closest_centroid[1]
1581
+
1582
+ def pick_municipality(self, x: float, y: float, bounds: tuple[float, float, float, float]):
1583
+ """ Activate plot for the nearest municipality to the given coordinates. """
1584
+
1585
+ which = [self.find_nearest_centroid(x, y, bounds)]
1586
+
1587
+ for loc_ins in which:
1588
+ loc_qdf = self._qdf_hydrology[loc_ins]
1589
+ if loc_qdf.path_image_plot is not None and loc_qdf.path_image_table is not None:
1590
+ vec_plt = self._get_vector_plots(loc_qdf.code)
1591
+ vec_tables = self._get_vector_tables(loc_qdf.code)
1592
+
1593
+ if vec_plt.myprop.imagevisible or vec_tables.myprop.imagevisible:
1594
+ vec_plt.myprop.imagevisible = False
1595
+ vec_tables.myprop.imagevisible = False
1596
+ else:
1597
+ vec_plt.myprop.image_path = loc_qdf.path_image_plot
1598
+ vec_plt.myprop.imagevisible = True
1599
+ vec_tables.myprop.image_path = loc_qdf.path_image_table
1600
+ vec_tables.myprop.imagevisible = True
1601
+
1602
+ # Reset the OpenGL lists to reflect the changes
1603
+ self._geometry_plots.reset_listogl()
1604
+ self._geometry_plots.prep_listogl()
1605
+
1606
+ self._geometry_tables.reset_listogl()
1607
+ self._geometry_tables.prep_listogl()
1608
+
1609
+
1610
+ def find_centroids_in_polygon(self, polygon: Polygon) -> list[tuple[vector, str]]:
1611
+ """ Find all centroids in a given polygon.
1612
+
1613
+ :param polygon: A shapely Polygon object defining the area to search.
1614
+ """
1615
+
1616
+ centroids = []
1617
+ for centroid, name in self._centroids.items():
1618
+ if centroid.within(polygon):
1619
+ centroids.append((centroid, name))
1620
+
1621
+ # Sort centroids by distance to the center of the polygon
1622
+ center_x, center_y = polygon.centroid.x, polygon.centroid.y
1623
+ dist_to_center = lambda c: ((c.x - center_x) ** 2 + (c.y - center_y) ** 2) ** 0.5
1624
+ centroids.sort(key=lambda c: dist_to_center(c[0]))
1625
+
1626
+ return centroids
1627
+
1628
+ def find_centroid_in_bounds(self, bounds: tuple[float, float, float, float]) -> list[tuple[vector, str]]:
1629
+ """ Find all centroids within the given bounds.
1630
+
1631
+ :param bounds: A tuple of (minx, miny, maxx, maxy) defining the bounding box.
1632
+ """
1633
+
1634
+ minx, miny, maxx, maxy = bounds
1635
+ centroids = []
1636
+ for centroid, name in self._centroids.items():
1637
+ if minx <= centroid.x <= maxx and miny <= centroid.y <= maxy:
1638
+ centroids.append((centroid, name))
1639
+
1640
+ dist_to_center = lambda c: ((c.x - (minx + maxx) / 2) ** 2 + (c.y - (miny + maxy) / 2) ** 2) ** 0.5
1641
+ centroids.sort(key=lambda c: dist_to_center(c[0]))
1642
+
1643
+ return centroids
1644
+
1645
+ @property
1646
+ def show_plot(self) -> bool:
1647
+ """ Check if the plot is shown. """
1648
+ return self._show_plot
1649
+
1650
+ @show_plot.setter
1651
+ def show_plot(self, value: bool):
1652
+ """ Set whether to show the plot or not. """
1653
+ self._show_plot = value
1654
+ if not value:
1655
+ self.hide_all_images()
1656
+ self._reload_images = value
1657
+
1658
+ @property
1659
+ def show_table(self) -> bool:
1660
+ """ Check if the table is shown. """
1661
+ return self._show_table
1662
+
1663
+ @show_table.setter
1664
+ def show_table(self, value: bool):
1665
+ """ Set whether to show the table or not. """
1666
+ self._show_table = value
1667
+ if not value:
1668
+ self.hide_all_images()
1669
+ self._reload_images = value
1670
+
1671
+ def scale_images(self, factor:float = 1.0):
1672
+ """ Scale the images in the collection by a given factor.
1673
+
1674
+ :param factor: The scaling factor to apply to the images.
1675
+ """
1676
+ assert isinstance(factor, (int, float)), "Scaling factor must be a number."
1677
+
1678
+ self._geometry_tables.scale_all_pictures(factor)
1679
+ self._geometry_plots.scale_all_pictures(factor)
1680
+
1681
+ def plot(self, sx=None, sy=None, xmin=None, ymin=None, xmax=None, ymax=None, size=None):
1682
+ """ Plot the QDF hydrology data on the map. """
1683
+
1684
+ NB_MAX = 10 # Maximum number of images to display
1685
+
1686
+ if self.show_plot and self._reload_images:
1687
+ _new_images = self.find_centroid_in_bounds((xmin, ymin, xmax, ymax))
1688
+ self._reload_images = False
1689
+
1690
+ if len(_new_images) > NB_MAX:
1691
+ logging.warning(_(f"Too many images to display. Showing only the first {NB_MAX}."))
1692
+ _new_images = _new_images[:NB_MAX]
1693
+
1694
+ if self._current_images is None or len(_new_images) != len(self._current_images):
1695
+ self.set_images_as_legend(plot_or_table='plot', which=[img[1] for img in _new_images])
1696
+
1697
+ elif self.show_table and self._reload_images:
1698
+
1699
+ _new_images = self.find_centroid_in_bounds((xmin, ymin, xmax, ymax))
1700
+ self._reload_images = False
1701
+
1702
+ if len(_new_images) > NB_MAX:
1703
+ logging.warning(_(f"Too many images to display. Showing only the first {NB_MAX}."))
1704
+ _new_images = _new_images[:NB_MAX]
1705
+
1706
+ if self._current_images is None or len(_new_images) != len(self._current_images):
1707
+ self.set_images_as_legend(plot_or_table='table', which=[img[1] for img in _new_images])
1708
+
1709
+ self._geometry_tables.plot(sx=sx, sy=sy, xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax, size=size)
1710
+ self._geometry_plots.plot(sx=sx, sy=sy, xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax, size=size)
1711
+ self._geometry_zones.plot(sx=sx, sy=sy, xmin=xmin, ymin=ymin, xmax=xmax, ymax=ymax, size=size)