wolfhece 2.0.17__py3-none-any.whl → 2.0.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. wolfhece/CpGrid.py +10 -13
  2. wolfhece/PyCrosssections.py +18 -13
  3. wolfhece/PyHydrographs.py +2 -2
  4. wolfhece/PyParams.py +15 -27
  5. wolfhece/PyPictures.py +48 -45
  6. wolfhece/PyVertexvectors.py +14 -19
  7. wolfhece/apps/curvedigitizer.py +4 -1
  8. wolfhece/apps/wolfcompare2Darrays.py +11 -7
  9. wolfhece/clientserver/clientserver.py +62 -0
  10. wolfhece/friction_law.py +39 -34
  11. wolfhece/ftp/downloader.py +8 -7
  12. wolfhece/gpuview.py +14 -13
  13. wolfhece/hydrology/Catchment.py +2 -2
  14. wolfhece/hydrology/PyWatershed.py +2 -2
  15. wolfhece/hydrology/SubBasin.py +13 -11
  16. wolfhece/hydrometry/kiwis_gui.py +9 -9
  17. wolfhece/irm_qdf.py +12 -10
  18. wolfhece/mar/Interface_MAR_WOLF_objet.py +260 -161
  19. wolfhece/opengl/py3d.py +4 -4
  20. wolfhece/pyshields.py +4 -4
  21. wolfhece/pythonfortran/example_makendarray.py +46 -41
  22. wolfhece/pythonfortran/example_numpy_memory.py +87 -83
  23. wolfhece/pythonfortran/tools.py +1 -1
  24. wolfhece/scenario/imposebc_void.py +2 -3
  25. wolfhece/scenario/update_void.py +6 -6
  26. wolfhece/wolf_array.py +17 -16
  27. wolfhece/wolfresults_2D.py +2 -4
  28. {wolfhece-2.0.17.dist-info → wolfhece-2.0.18.dist-info}/METADATA +7 -1
  29. {wolfhece-2.0.17.dist-info → wolfhece-2.0.18.dist-info}/RECORD +33 -38
  30. wolfhece/apps/wolfBernoulli.py +0 -18
  31. wolfhece/bernoulli/ModelJockgrim.py +0 -226
  32. wolfhece/bernoulli/NetworkOpenGL.py +0 -6461
  33. wolfhece/bernoulli/ReadNeupotzData.py +0 -223
  34. wolfhece/bernoulli/opti_results_interactive_plot.py +0 -212
  35. wolfhece/debug.py +0 -8
  36. /wolfhece/{bernoulli → clientserver}/__init__.py +0 -0
  37. {wolfhece-2.0.17.dist-info → wolfhece-2.0.18.dist-info}/WHEEL +0 -0
  38. {wolfhece-2.0.17.dist-info → wolfhece-2.0.18.dist-info}/entry_points.txt +0 -0
  39. {wolfhece-2.0.17.dist-info → wolfhece-2.0.18.dist-info}/top_level.txt +0 -0
@@ -32,15 +32,16 @@ class MAR_input_data:
32
32
  date_fin=datetime.datetime(2020,7,11,5),
33
33
  directory='~/BUP_srv7/',
34
34
  directory_hist_sim='~/BUP_srv7/',
35
+ model_name='MIROC6',
35
36
  var='MBRR',
36
37
  var_unb='E',
37
38
  UnborNot=0,
38
39
  syu=1981,eyu=2010,
39
- mod_ydays=1):
40
+ mod_ydays=1,
41
+ generate_quantiles=1):
40
42
 
41
43
 
42
44
  """
43
-
44
45
  xsummits : abscisses Lambert 72 du rectangle d'extraction'
45
46
  ysummits : idem pour ordonnées
46
47
 
@@ -58,8 +59,9 @@ class MAR_input_data:
58
59
 
59
60
  UnborNot : 1 si débiaisage, 0 si données brutes
60
61
 
61
- syu et eyu : année de début et de fin de la période historique utilisée pour comparer modèle et observations
62
+ syu et eyu : année de début et de fin de la période future utilisée pour comparer modèle et observations
62
63
 
64
+ mod_ydays: 1 si modèel avec années bissextiles, 0 sinon 1
63
65
  """
64
66
 
65
67
  self.directory_hist_sim=directory_hist_sim
@@ -79,19 +81,30 @@ class MAR_input_data:
79
81
 
80
82
  print(self.directory,date_debut.year)
81
83
  print(self.fn)
84
+
82
85
  self.ds=xr.open_dataset(self.fn[0])
86
+
83
87
  self.lons=np.transpose(np.array(self.ds.LON))
84
88
  self.lats=np.transpose(np.array(self.ds.LAT))
89
+
85
90
  self.Lb72=pyproj.Proj(projparams='epsg:31370')
86
91
  self.x_Lb72, self.y_Lb72 = self.Lb72(self.lons,self.lats)
92
+
87
93
  self.mask=self.mask_rectangles()
88
- self.plot_mask()
94
+
95
+ # self.plot_mask()
89
96
  self.vec_data=self.select_MARdata()
90
97
  # self.historical_matrix=
98
+
91
99
  self.directory_unbiasing="/srv7_tmp1/jbrajkovic/These/IRM/"
92
- self.syu=syu;self.eyu=eyu
93
100
 
101
+ self.fn_quant_ev='/srv7_tmp1/jbrajkovic/These/Unbiasing/evapotranspiration_quantiles_1981_2010.nc'
102
+ self.fn_quant_pr='/srv7_tmp1/jbrajkovic/These/Unbiasing/precipitation_quantiles_1981_2010.nc'
103
+
104
+ self.syu=syu;self.eyu=eyu
94
105
 
106
+ self.generate_quantiles=generate_quantiles
107
+ self.model_name=model_name
95
108
 
96
109
 
97
110
  def mask_rectangles(self):
@@ -196,7 +209,8 @@ class MAR_input_data:
196
209
  cmap=cm.Greens
197
210
  MSK=np.zeros_like(mask1)
198
211
  ct.quick_map_plot(self.lons, self.lats, mask1, bounds, cmap, MSK)
199
- plt.savefig('mask.png')
212
+ # plt.show()
213
+ # plt.savefig('mask.png')
200
214
 
201
215
 
202
216
  "Séléction des données entre les deux dates pour le masque rectangulaire"
@@ -436,6 +450,7 @@ class MAR_input_data:
436
450
  values2[:,j]=values2[:,j]*covers[:,1]
437
451
  values3[:,j]=values3[:,j]*covers[:,2]
438
452
  values=np.append(values,(values1+values2+values3),axis=1)
453
+ print(self.var,values.shape)
439
454
  else:
440
455
  fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
441
456
  if 'IRM_grid' in fn[0]:
@@ -456,9 +471,15 @@ class MAR_input_data:
456
471
 
457
472
  else:
458
473
  #print(mask)
474
+
459
475
  values=np.transpose(np.array(ds[var]))[:,:,indice_debut:][mask]
460
476
  print(self.var,values.shape)
461
477
  for y in range(year+1,self.date_fin.year+1):
478
+
479
+ fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
480
+ if 'IRM_grid' in fn[0]:
481
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
482
+ ds=xr.open_dataset(fn[0])
462
483
  print(y)
463
484
  if y<self.date_fin.year:
464
485
  values=np.append(values,
@@ -483,11 +504,14 @@ class MAR_input_data:
483
504
  "**********************************************************"
484
505
  "Lecture des données sur la période historiqe de simulation"
485
506
  "**********************************************************"
507
+ if self.generate_quantiles==1:
508
+ historical_matrix_unbias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
509
+ date_debut=datetime.datetime(1981,1,1,0),
510
+ date_fin=datetime.datetime(2010,12,31,23),
511
+ directory=self.directory_unbiasing, var=self.var_unb).vec_data
512
+
513
+
486
514
 
487
- historical_matrix_unbias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
488
- date_debut=datetime.datetime(1981,1,1,0),
489
- date_fin=datetime.datetime(2010,12,31,23),
490
- directory=self.directory_unbiasing, var=self.var_unb).vec_data
491
515
  date_debutu=datetime.datetime(self.syu,1,1,0)
492
516
  date_finu=datetime.datetime(self.eyu,12,31,23)
493
517
 
@@ -495,15 +519,18 @@ class MAR_input_data:
495
519
 
496
520
  if self.var_unb=='PRECIP_QUANTITY':
497
521
 
498
- historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
499
- date_debut=datetime.datetime(1981,1,1,0),
500
- date_fin=datetime.datetime(2010,12,31,23),
501
- directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
502
- MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
503
- date_debut=datetime.datetime(1981,1,1,0),
504
- date_fin=datetime.datetime(2010,12,31,23),
505
- directory=self.directory, var='MBSF',
506
- mod_ydays=self.mod_ydays).vec_data
522
+ if self.generate_quantiles==1:
523
+ print('on va le faire')
524
+ historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
525
+ date_debut=datetime.datetime(1981,1,1,0),
526
+ date_fin=datetime.datetime(2010,12,31,23),
527
+ directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
528
+ MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
529
+ date_debut=datetime.datetime(1981,1,1,0),
530
+ date_fin=datetime.datetime(2010,12,31,23),
531
+ directory=self.directory, var='MBSF',
532
+ mod_ydays=self.mod_ydays).vec_data
533
+
507
534
  biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
508
535
  date_debut=self.date_debut,
509
536
  date_fin=self.date_fin,
@@ -532,12 +559,12 @@ class MAR_input_data:
532
559
 
533
560
 
534
561
  elif self.var_unb=='E':
535
-
536
- historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
537
- date_debut=datetime.datetime(1981,1,1,0),
538
- date_fin=datetime.datetime(2010,12,31,23),
539
- directory=self.directory, var='MBEP',
540
- mod_ydays=self.mod_ydays).vec_data
562
+ if self.generate_quantiles==1:
563
+ historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
564
+ date_debut=datetime.datetime(1981,1,1,0),
565
+ date_fin=datetime.datetime(2010,12,31,23),
566
+ directory=self.directory, var='MBEP',
567
+ mod_ydays=self.mod_ydays).vec_data
541
568
  biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
542
569
  date_debut=self.date_debut,
543
570
  date_fin=self.date_fin,
@@ -553,29 +580,42 @@ class MAR_input_data:
553
580
  "****************************************************"
554
581
  "Calcul des quantiles historiques simulés et observés"
555
582
  "****************************************************"
556
-
557
- quant_mat=np.zeros([historical_matrix_unbias.shape[0],101])
558
- quant_mat_bias=np.zeros([historical_matrix_bias.shape[0],101])
559
- quant_coeffs=np.zeros([historical_matrix_unbias.shape[0],101])
560
- historical_matrix_unbias[historical_matrix_unbias<th_drizzle]=0
561
583
 
562
- if self.find_timestep()[1]=='hours':
563
- tsd=24
564
- historical_matrix_bias_d=np.zeros([historical_matrix_bias.shape[0],
565
- int(historical_matrix_bias.shape[1]/tsd)])
566
- for i in range(historical_matrix_bias_d.shape[0]):
567
- for d in range(historical_matrix_bias_d.shape[1]):
568
- historical_matrix_bias_d[i,d]=np.sum(historical_matrix_bias[i,d*tsd:(d+1)*tsd])
569
- historical_matrix_bias_d[historical_matrix_bias_d<th_drizzle]=0
570
- # print(historical_matrix_unbias.shape,historical_matrix_bias_d.shape)
584
+ quant_mat=np.zeros([biased_data.shape[0],101])
585
+ quant_mat_bias=np.zeros([biased_data.shape[0],101])
586
+ quant_coeffs=np.zeros([biased_data.shape[0],101])
587
+
588
+
589
+
590
+
591
+
592
+ if self.generate_quantiles==1:
571
593
 
572
- for i in range(historical_matrix_unbias.shape[0]):
594
+ historical_matrix_unbias[historical_matrix_unbias<th_drizzle]=0
595
+ if self.find_timestep()[1]=='hours':
596
+ tsd=24
597
+ historical_matrix_bias_d=np.zeros([historical_matrix_bias.shape[0],
598
+ int(historical_matrix_bias.shape[1]/tsd)])
599
+ for i in range(historical_matrix_bias_d.shape[0]):
600
+ for d in range(historical_matrix_bias_d.shape[1]):
601
+ historical_matrix_bias_d[i,d]=np.sum(historical_matrix_bias[i,d*tsd:(d+1)*tsd])
602
+ historical_matrix_bias_d[historical_matrix_bias_d<th_drizzle]=0
603
+ # print(historical_matrix_unbias.shape,historical_matrix_bias_d.shape)
573
604
 
574
- quant_mat_bias[i,:]=np.quantile(historical_matrix_bias_d[i,:]\
575
- [historical_matrix_bias_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
576
- quant_mat[i,:]=np.quantile(historical_matrix_unbias[i,:][historical_matrix_unbias[i,:]>th_drizzle],np.arange(0,1.01,0.01))
605
+ for i in range(historical_matrix_unbias.shape[0]):
606
+
607
+ quant_mat_bias[i,:]=np.quantile(historical_matrix_bias_d[i,:]\
608
+ [historical_matrix_bias_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
609
+ quant_mat[i,:]=np.quantile(historical_matrix_unbias[i,:][historical_matrix_unbias[i,:]>th_drizzle],np.arange(0,1.01,0.01))
610
+ for j in range(quant_mat.shape[1]):quant_coeffs[i,j]=quant_mat[i,j]/quant_mat_bias[i,j]
611
+
612
+ else:
613
+ if self.var_unb=='E':fn_quant=self.fn_quant_ev
614
+ else:fn_quant=self.fn_quant_pr
615
+ quant_mat_bias=ct.marray(xr.open_dataset(fn_quant),self.model_name)[self.mask]
616
+ quant_mat=ct.marray(xr.open_dataset(fn_quant),'IRM')[self.mask]
617
+ for i in range(quant_mat.shape[0]):
577
618
  for j in range(quant_mat.shape[1]):quant_coeffs[i,j]=quant_mat[i,j]/quant_mat_bias[i,j]
578
-
579
619
  # biased_data_var=np.array(self.vec_data)
580
620
 
581
621
  "******************************************"
@@ -583,13 +623,12 @@ class MAR_input_data:
583
623
  "******************************************"
584
624
 
585
625
  "Future quantiles to assess value location"
586
-
587
-
626
+
588
627
  if self.find_timestep()[1]=='hours':
589
628
  tsd=24
590
- FutUnb_d=np.zeros([historical_matrix_bias.shape[0],
629
+ FutUnb_d=np.zeros([biased_data.shape[0],
591
630
  int(FutUnb.shape[1]/tsd)])
592
- for i in range(historical_matrix_bias_d.shape[0]):
631
+ for i in range(biased_data.shape[0]):
593
632
  for d in range(FutUnb_d.shape[1]):
594
633
  FutUnb_d[i,d]=np.sum(FutUnb[i,d*tsd:(d+1)*tsd])
595
634
 
@@ -638,46 +677,74 @@ class MAR_input_data:
638
677
  Unbiased_data=np.zeros_like(biased_data)
639
678
 
640
679
  print ("redistributing on the daily time-step")
641
-
642
- for i in range(self.vec_data.shape[0]):
643
- # print(i)
644
- d=0
645
- while d<ydays:
646
- # if i==67:print(d,Unbiased_data_d[i,d])
647
- # if d%100==0:print(d)
648
- if Unbiased_data_d[i,d]<=0.1:d+=1
649
-
650
- else:
651
-
652
- d1=d
653
- ndays=0
654
-
655
- while d1<ydays and Unbiased_data_d[i,d1]>.1 :
656
- d1+=1;ndays+=1
657
-
658
-
659
- precip_sum_d=np.sum(Unbiased_data_d[i,d:d+ndays])
660
- biased_sum=np.sum(biased_data\
661
- [i,d*tsd:(d+ndays)*tsd])
662
- biased_hourly=(biased_data)\
663
- [i,d*tsd:(d+ndays)*tsd]
664
-
665
- weights=biased_hourly/biased_sum
666
-
667
- Unbiased_data[i,d*tsd:(d+ndays)*tsd]=\
668
- precip_sum_d*weights
669
-
670
- # print(d,PRECIP_IRM[i,j,d])
671
-
672
- d+=ndays
680
+ if self.var_unb=='PRECIP_QUANTITY':
681
+
682
+ "Si ce sont les pluies qui sont débiasées"
683
+ "On débiaise sur tout l'événement et non pas jour après jour"
684
+
685
+ for i in range(self.vec_data.shape[0]):
686
+ # print(i)
687
+ d=0
688
+ while d<ydays:
689
+ # if i==67:print(d,Unbiased_data_d[i,d])
690
+ # if d%100==0:print(d)
691
+ if Unbiased_data_d[i,d]<=0.1:d+=1
692
+
693
+ else:
694
+
695
+ d1=d
696
+ ndays=0
697
+
698
+ while d1<ydays and Unbiased_data_d[i,d1]>.1 :
699
+ d1+=1;ndays+=1
700
+
701
+
702
+ precip_sum_d=np.sum(Unbiased_data_d[i,d:d+ndays])
703
+ biased_sum=np.sum(biased_data\
704
+ [i,d*tsd:(d+ndays)*tsd])
705
+ biased_hourly=(biased_data)\
706
+ [i,d*tsd:(d+ndays)*tsd]
707
+
708
+ weights=biased_hourly/biased_sum
709
+
710
+ Unbiased_data[i,d*tsd:(d+ndays)*tsd]=\
711
+ precip_sum_d*weights
712
+
713
+ # print(d,PRECIP_IRM[i,j,d])
714
+
715
+ d+=ndays
716
+ else:
717
+ "Débiai<-sage jour après jour pour l'évapotranspiration notamment"
718
+ for i in range(self.vec_data.shape[0]):
719
+ # print(i)
720
+ d=0
721
+ while d<ydays:
722
+ # if i==67:print(d,Unbiased_data_d[i,d])
723
+ # if d%100==0:print(d)
724
+ if Unbiased_data_d[i,d]<=0.1:d+=1
725
+
726
+ else:
727
+
728
+
729
+ precip_sum_d=Unbiased_data_d[i,d]
730
+ biased_sum=np.sum(biased_data\
731
+ [i,d*tsd:(d+1)*tsd])
732
+ biased_hourly=(biased_data)\
733
+ [i,d*tsd:(d+1)*tsd]
734
+
735
+ weights=biased_hourly/biased_sum
736
+
737
+ Unbiased_data[i,d*tsd:(d+1)*tsd]=\
738
+ precip_sum_d*weights
739
+
740
+ # print(d,PRECIP_IRM[i,j,d])
741
+
742
+ d+=1
743
+
673
744
 
674
745
  if self.var=='MBRO3' or self.var=='MBRR' or self.var=='MBSF':
675
746
  # biased_data_var=np.array(self.vec_data)
676
- biased_data_var=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
677
- date_debut=self.date_debut,
678
- date_fin=self.date_fin,
679
- directory=self.directory, var=self.var,
680
- mod_ydays=self.mod_ydays).vec_data
747
+ biased_data_var=self.vec_data
681
748
 
682
749
  print("biased data var shape ",biased_data_var.shape)
683
750
  print('unbiased data shape' ,Unbiased_data.shape)
@@ -886,22 +953,33 @@ class MAR_input_data:
886
953
  "Test de l'objet"
887
954
 
888
955
  if __name__ == "__main__":
889
- # dir_ds="/srv1_tmp1/fettweis/EU-MAR-7.5km/histo/EUb-ERA/" #dossier avec sortie MAR au format Netcdf
890
- # dir_ds="/srv1_tmp1/fettweis/EUh-MPI-5km/output-5km-ssp370/"
891
- # dir_ds='/climato_tmp1/fettweis/MAR/out/EUb/output-hourly/'
892
- dir_ds='/phypc11_tmp3/MARv3.14/MARv3.14-EUk-NorESM2-MM-5km-ssp585/'
893
- dir_hist='/phypc11_tmp3/MARv3.14/MARv3.14-EUk-NorESM2-MM-5km-ssp585/'
956
+
957
+ dir_ds='/phypc11_tmp3/MARv3.14/MARv3.14-EUi-MIROC6-5km-ssp585/'
958
+ dir_hist='/phypc11_tmp3/MARv3.14/MARv3.14-EUi-MIROC6-5km-ssp585/'
894
959
 
895
- dir_stock='/phypc11_tmp3/MARv3.14/'
960
+ dir_stock='/srv1_tmp6/fettweis/MARv3.14/'
896
961
  dir_ins=['MARv3.14-EUh-MPI-ESM1-2-HR-5km-',
897
962
  'MARv3.14-EUi-MIROC6-5km-',
898
963
  'MARv3.14-EUm-EC-Earth3-Veg-5km-',
899
- 'MARv3.14-EUk-NorESM2-MM-5km-'
964
+ 'MARv3.14-EUk-NorESM2-MM-5km-',
965
+ 'MARv3.14-EUq-CMCC-CM2-SR5-5km-',
966
+ 'MARv3.14-EUl-IPSL-CM6A-LR-5km-'
967
+
900
968
  ]
901
969
 
902
- mod_racs=['MPI','MIR','EC3','NOR']
970
+ mod_names=['MPI-ESM1',
971
+ 'MIROC6',
972
+ 'EC3',
973
+ 'NorESM2',
974
+ 'CMCC-CM2-SR5',
975
+ 'IPSL'
976
+ ]
977
+
978
+ mod_racs=['MPI','MIR','EC3','NOR','CMC','IPSL']
979
+
903
980
  scens=['ssp126','ssp245','ssp370','ssp585']
904
- # dir_ds="/srv7_tmp1/jbrajkovic/These/ERA5/"
981
+
982
+
905
983
  dirout="/srv7_tmp1/jbrajkovic/These/forWOLF/evapo"#-MPI_1981-2010/" #dossier outputs
906
984
  filenameshp="grid.shp" #nom du shapefile en sortie
907
985
 
@@ -909,63 +987,85 @@ if __name__ == "__main__":
909
987
  "dates entre lesquelles sélectionner les données (Heures,jour,mois,annee"
910
988
  "code à retravailler si simulations futures avec pas de temps inférieur à l'heure"
911
989
 
912
- # date_debut1=datetime.datetime(2016,1,1,5)
913
- # date_fin1=datetime.datetime(2100,12,31,23)
914
-
915
- # "Définition d'un rectangle"
916
-
917
- # xs=np.array([200000,200000,
918
- # 272000,272000.])
919
- # ys=np.array([63000,152000,
920
- # 152000,63000])
921
-
922
- # for mod in range(4):
923
- # for scen in range(4):
924
-
925
- # dirin=dir_stock+dir_ins[mod]+scens[scen]+'/'
926
- # print(dirin)
927
- # objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
928
- # date_debut=date_debut1,
929
- # date_fin=date_fin1,
930
- # directory=dirin,
931
- # directory_hist_sim=dir_hist,
932
- # var='MBEP',
933
- # var_unb='E',
934
- # UnborNot=1,
935
- # syu=date_debut1.year,
936
- # eyu=date_fin1.year)
990
+ date_debut1=datetime.datetime(1981,1,1,0)
991
+ date_fin1=datetime.datetime(2010,12,31,23)
992
+
993
+ "Définition d'un rectangle"
994
+
995
+ xs=np.array([200000,200000,
996
+ 272000,272000.])
997
+ ys=np.array([63000,152000,
998
+ 152000,63000])
999
+
1000
+ dat_types=[1,1,1,0,0,1]
1001
+
1002
+ sc=3
1003
+
1004
+ for mod in range(6):
1005
+ # for sc in range(4):
1006
+ dirin=dir_stock+dir_ins[mod]+scens[sc]+'/'
1007
+ print(dirin)
1008
+ objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
1009
+ date_debut=date_debut1,
1010
+ date_fin=date_fin1,
1011
+ directory=dirin,
1012
+ directory_hist_sim=dir_hist,
1013
+ var='MBEP',
1014
+ var_unb='E',
1015
+ UnborNot=1,
1016
+ syu=date_debut1.year,
1017
+ eyu=date_fin1.year,
1018
+ mod_ydays=dat_types[mod],
1019
+ model_name=mod_names[mod],
1020
+ generate_quantiles=0)
937
1021
 
938
1022
 
939
- # print('ok')
940
- # dirout1=dirout+'-'+mod_racs[mod]+'_'+scens[scen]+'_'+str(date_debut1.year)+'-'+\
941
- # str(date_fin1.year)+'/'
1023
+ print('ok')
1024
+ if date_fin1.year>2015:
1025
+ dirout1=dirout+'-'+mod_racs[mod]+'_'+scens[sc]+'_'+str(date_debut1.year)+'-'+\
1026
+ str(date_fin1.year)+'/'
1027
+ else:
1028
+
1029
+ dirout1=dirout+'-'+mod_racs[mod]+'_'+str(date_debut1.year)+'-'+\
1030
+ str(date_fin1.year)+'/'
942
1031
 
943
- # objet_MAR.MAR_shapefile(filenameshp,dirout1)
944
- # objet_MAR.MAR_BinaryOutputs(dirout1)
945
-
946
-
947
-
1032
+ objet_MAR.MAR_shapefile(filenameshp,dirout1)
1033
+ objet_MAR.MAR_BinaryOutputs(dirout1)
1034
+
1035
+
1036
+
1037
+ "*************************************"
1038
+ "**Tests pour améliorer le programme***"
1039
+ "*************************************"
948
1040
 
949
1041
  xs=np.array([200000,200000,
950
1042
  210000,210000.])
951
1043
  ys=np.array([63000,73000,
952
1044
  73000,63000])
953
1045
  dirin=dir_hist
1046
+
954
1047
  date_debut1=datetime.datetime(2016,1,1,0)
955
- date_fin1=datetime.datetime(2017,12,31,23)
1048
+ date_fin1=datetime.datetime(2019,12,31,23)
956
1049
 
957
1050
  objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
958
- date_debut=date_debut1,
959
- date_fin=date_fin1,
960
- directory=dirin,
961
- directory_hist_sim=dir_hist,
962
- var='MBRO3',
963
- var_unb='PRECIP_QUANTITY',
964
- UnborNot=1,
965
- syu=1982,
966
- eyu=1983,
967
- mod_ydays=0)
968
-
1051
+ date_debut=date_debut1,
1052
+ date_fin=date_fin1,
1053
+ directory=dirin,
1054
+ directory_hist_sim=dir_hist,
1055
+ var='MBRO3',
1056
+ model_name='MIROC6',
1057
+ var_unb='PRECIP_QUANTITY',
1058
+ UnborNot=1,
1059
+ syu=2016,
1060
+ eyu=2017,
1061
+ mod_ydays=1,
1062
+ generate_quantiles=0)
1063
+
1064
+ dirout1='/srv7_tmp1/jbrajkovic/These/forWOLF/test/'
1065
+ filenameshp='test.shp'
1066
+ objet_MAR.MAR_shapefile(filenameshp,dirout1)
1067
+ objet_MAR.MAR_BinaryOutputs(dirout1)
1068
+
969
1069
  # "Tests outputs"
970
1070
  cmap=ct.IPCC_cmap()
971
1071
  objet_MAR.plot_mask()
@@ -980,25 +1080,24 @@ if __name__ == "__main__":
980
1080
  maxi=np.max(maxs)
981
1081
 
982
1082
 
983
- bounds=np.arange(-maxi,maxi+20,20)
984
- norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
985
-
986
-
987
- MSK=objet_MAR.mask_rectangles()
988
- fig=plt.figure(figsize=(6,6))
989
- ax=plt.subplot()
990
- m=ct.map_belgium_zoom(ax, objet_MAR.lons, objet_MAR.lats)
991
- lons_w=objet_MAR.lons[MSK==True];lats_w=objet_MAR.lats[MSK]
992
- MBRO3=np.array(objet_MAR.lons)
993
- for k in range(0,np.size(MBRO3_mask)):
994
- for i in range(0,MBRO3.shape[0]):
995
- for j in range(0,MBRO3.shape[1]):
996
- if lons_w[k]==objet_MAR.lons[i,j] and lats_w[k]==objet_MAR.lats[i,j]:
997
- MBRO3[i,j]=MBRO3_mask[k]
998
- vmax=np.max(MBRO3[pd.isna(MBRO3)==False])
999
- MBRO3[MSK==False]=float("nan")
1000
- x,y=m(objet_MAR.lons,objet_MAR.lats)
1001
- mapa=m.pcolormesh(x,y,MBRO3,norm=norm,cmap=cmap)
1002
- cbar=m.colorbar()
1003
- plt.savefig('fig.png',bbox_inches='tight')
1004
-
1083
+ # bounds=np.arange(-maxi,maxi+20,20)
1084
+ # norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
1085
+
1086
+
1087
+ # MSK=objet_MAR.mask_rectangles()
1088
+ # fig=plt.figure(figsize=(6,6))
1089
+ # ax=plt.subplot()
1090
+ # m=ct.map_belgium_zoom(ax, objet_MAR.lons, objet_MAR.lats)
1091
+ # lons_w=objet_MAR.lons[MSK==True];lats_w=objet_MAR.lats[MSK]
1092
+ # MBRO3=np.array(objet_MAR.lons)
1093
+ # for k in range(0,np.size(MBRO3_mask)):
1094
+ # for i in range(0,MBRO3.shape[0]):
1095
+ # for j in range(0,MBRO3.shape[1]):
1096
+ # if lons_w[k]==objet_MAR.lons[i,j] and lats_w[k]==objet_MAR.lats[i,j]:
1097
+ # MBRO3[i,j]=MBRO3_mask[k]
1098
+ # vmax=np.max(MBRO3[pd.isna(MBRO3)==False])
1099
+ # MBRO3[MSK==False]=float("nan")
1100
+ # x,y=m(objet_MAR.lons,objet_MAR.lats)
1101
+ # mapa=m.pcolormesh(x,y,MBRO3,norm=norm,cmap=cmap)
1102
+ # cbar=m.colorbar()
1103
+ # plt.savefig('fig.png',bbox_inches='tight')
wolfhece/opengl/py3d.py CHANGED
@@ -425,7 +425,7 @@ class WolfArray_plot3D():
425
425
  reference for texture 2D : https://registry.khronos.org/OpenGL-Refpages/gl4/html/glTexImage2D.xhtml
426
426
 
427
427
  OPENGL
428
- ------
428
+
429
429
  The first element corresponds to the lower left corner of the texture image.
430
430
  Subsequent elements progress left-to-right through the remaining texels in the lowest row of the texture image,
431
431
  and then in successively higher rows of the texture image.
@@ -442,7 +442,7 @@ class WolfArray_plot3D():
442
442
  const void * data);
443
443
 
444
444
  NUMPY
445
- -----
445
+
446
446
  shape[0] is the number of rows and shape[1] is the number of columns.
447
447
 
448
448
  The "data" buffer is row-major order or column-major order, depending on the value of the order parameter.
@@ -456,7 +456,7 @@ class WolfArray_plot3D():
456
456
  - OpenGL Texture width = shape[0]
457
457
  - OpenGL Texture height = shape[1]
458
458
 
459
- ** ++ IMPORTANT **
459
+ ++ IMPORTANT
460
460
 
461
461
  We assume that if data is row-major order, the indexing [i,j] is (y, x) and if data is column-major order, the indexing is (x, y)
462
462
 
@@ -469,7 +469,7 @@ class WolfArray_plot3D():
469
469
  - Transposition is done by changing the indexing convention.
470
470
  - "texture" calls in shaders is the same for both row-major and column-major order.
471
471
 
472
- ** -- IMPORTANT **
472
+ -- IMPORTANT
473
473
  """
474
474
 
475
475
  def __init__(self,
wolfhece/pyshields.py CHANGED
@@ -303,7 +303,7 @@ def _get_Rouse(d:float, q:float, h:float, K:float, rhom:float=2650., rho:float=R
303
303
  elif frac==100:
304
304
  return rouse-1.2
305
305
 
306
- def get_transport_mode(d:float, q:float, h:float, K:float, rhom:float=2650., rho:float=RHO_PUREWATER) -> BED_LOAD | SUSPENDED_LOAD_50 | SUSPENDED_LOAD_100 | WASH_LOAD:
306
+ def get_transport_mode(d:float, q:float, h:float, K:float, rhom:float=2650., rho:float=RHO_PUREWATER): # -> BED_LOAD | SUSPENDED_LOAD_50 | SUSPENDED_LOAD_100 | WASH_LOAD:
307
307
  """
308
308
  Transport mode
309
309
 
@@ -350,7 +350,7 @@ def get_d_cr_susp(q:float, h:float, K:float, rhom:float=2650., rho:float=RHO_PUR
350
350
  except:
351
351
  return 0.
352
352
 
353
- def shieldsdia_sadim(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWATER, figax=None) -> [plt.Figure,plt.Axes]:
353
+ def shieldsdia_sadim(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWATER, figax=None) -> tuple[plt.Figure,plt.Axes]:
354
354
  """ Plot Shields diagram with sadim"""
355
355
 
356
356
  smax = 1000
@@ -406,7 +406,7 @@ def shieldsdia_sadim(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWAT
406
406
 
407
407
  return fig,ax
408
408
 
409
- def shieldsdia_dstar(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWATER, figax=None) -> [plt.Figure,plt.Axes]:
409
+ def shieldsdia_dstar(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWATER, figax=None) -> tuple[plt.Figure,plt.Axes]:
410
410
  """ Plot Shields diagram with dstar"""
411
411
 
412
412
  smax = 1000
@@ -465,7 +465,7 @@ def shieldsdia_dstar(s_psicr=None, dstar_psicr=None, rhom=2650., rho=RHO_PUREWAT
465
465
 
466
466
  return fig,ax
467
467
 
468
- def shieldsdia_dim(figax=None) -> [plt.Figure,plt.Axes]:
468
+ def shieldsdia_dim(figax=None) -> tuple[plt.Figure,plt.Axes]:
469
469
  """ Plot Shields diagram with dimensional values"""
470
470
 
471
471
  smax=1e6