wolfhece 2.0.4__py3-none-any.whl → 2.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. wolfhece/GraphNotebook.py +0 -1
  2. wolfhece/GraphProfile.py +5 -14
  3. wolfhece/Lidar2002.py +0 -1
  4. wolfhece/PyCrosssections.py +21 -26
  5. wolfhece/PyDraw.py +219 -58
  6. wolfhece/PyGui.py +6 -3
  7. wolfhece/PyPalette.py +2 -2
  8. wolfhece/PyParams.py +48 -48
  9. wolfhece/PyVertex.py +1 -1
  10. wolfhece/PyVertexvectors.py +40 -4
  11. wolfhece/Results2DGPU.py +7 -6
  12. wolfhece/apps/WolfPython.png +0 -0
  13. wolfhece/bernoulli/NetworkOpenGL.py +1 -1
  14. wolfhece/cli.py +7 -0
  15. wolfhece/flow_SPWMI.py +1 -1
  16. wolfhece/friction_law.py +6 -6
  17. wolfhece/gpuview.py +1 -1
  18. wolfhece/hydrology/PyWatershed.py +9 -10
  19. wolfhece/lagrangian/emitter.py +1 -1
  20. wolfhece/lagrangian/example_domain.py +1 -1
  21. wolfhece/lagrangian/velocity_field.py +4 -4
  22. wolfhece/libs/WolfDll.dll +0 -0
  23. wolfhece/libs/WolfDll_CD.dll +0 -0
  24. wolfhece/libs/WolfOGL.c +28187 -28187
  25. wolfhece/mar/Interface_MAR_WOLF_objet.py +1004 -0
  26. wolfhece/mar/commontools.py +1289 -59
  27. wolfhece/mesh2d/bc_manager.py +89 -13
  28. wolfhece/mesh2d/cst_2D_boundary_conditions.py +12 -0
  29. wolfhece/mesh2d/wolf2dprev.py +1 -2
  30. wolfhece/pydike.py +1 -1
  31. wolfhece/pyshields.py +43 -43
  32. wolfhece/pywalous.py +2 -2
  33. wolfhece/scenario/config_manager.py +3 -1
  34. wolfhece/ui/wolf_multiselection_collapsiblepane.py +10 -10
  35. wolfhece/wolf_array.py +1298 -418
  36. wolfhece/wolf_texture.py +1 -1
  37. wolfhece/wolfresults_2D.py +124 -19
  38. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/METADATA +5 -1
  39. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/RECORD +42 -39
  40. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/WHEEL +0 -0
  41. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/entry_points.txt +0 -0
  42. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1004 @@
1
+
2
+ #!/usr/bin/env python3
3
+ # -*- coding: utf-8 -*-
4
+ """
5
+ Created on Tue Jan 3 16:31:47 2023
6
+
7
+ @author: jbrajkovic
8
+ """
9
+
10
+ import numpy as np
11
+ import matplotlib.pyplot as plt
12
+ import commontools as ct
13
+ import xarray as xr
14
+ import matplotlib as mpl
15
+ import matplotlib.cm as cm
16
+ import glob as glob
17
+ import pyproj
18
+ import geopandas as gpd
19
+ from shapely.geometry import Polygon
20
+ from fiona.crs import from_epsg
21
+ import datetime
22
+ import os
23
+ import pandas as pd
24
+ from zipfile import ZipFile
25
+ import struct
26
+
27
+
28
+ class MAR_input_data:
29
+
30
+ def __init__(self,xsummits=np.zeros(0),ysummits=np.zeros(0),
31
+ date_debut=datetime.datetime(2020,7,11,5),
32
+ date_fin=datetime.datetime(2020,7,11,5),
33
+ directory='~/BUP_srv7/',
34
+ directory_hist_sim='~/BUP_srv7/',
35
+ var='MBRR',
36
+ var_unb='E',
37
+ UnborNot=0,
38
+ syu=1981,eyu=2010,
39
+ mod_ydays=1):
40
+
41
+
42
+ """
43
+
44
+ xsummits : abscisses Lambert 72 du rectangle d'extraction'
45
+ ysummits : idem pour ordonnées
46
+
47
+ date_debut : Date de début de la série temporelle extraite
48
+ date_fin : idem pour la date de fin
49
+
50
+ directory : répertoire des fichier Netcdfs annuels
51
+ directory_hist_sim : répertoire des fichiers Netcdfs annuels de la période historique de simulation
52
+ (pour débiaisage)
53
+
54
+ var : nom de la variable MAR à extraire, si on veut l'evapotranspiration totale (toutes les composantes),il
55
+ faut noter MBEP'
56
+
57
+ var_unb : nom de la variable qui sert au débiasage dans les fichiers Netcdfs de l'IRM '
58
+
59
+ UnborNot : 1 si débiaisage, 0 si données brutes
60
+
61
+ syu et eyu : année de début et de fin de la période historique utilisée pour comparer modèle et observations
62
+
63
+ """
64
+
65
+ self.directory_hist_sim=directory_hist_sim
66
+ self.UnborNot=UnborNot
67
+ self.var_unb=var_unb
68
+ self.var=var
69
+ self.xsummits=xsummits
70
+ self.ysummits=ysummits
71
+ self.date_debut=date_debut
72
+ self.date_fin=date_fin
73
+ self.directory=directory
74
+ self.mod_ydays=mod_ydays
75
+ self.fn= glob.glob(self.directory+"*"+str(date_debut.year)+"**nc*")
76
+ if 'IRM_grid' in self.fn[0]:
77
+ print('Hajde Hajduce')
78
+ self.fn= glob.glob(self.directory+"*MAR_grid*"+str(date_debut.year)+"**nc*")
79
+
80
+ print(self.directory,date_debut.year)
81
+ print(self.fn)
82
+ self.ds=xr.open_dataset(self.fn[0])
83
+ self.lons=np.transpose(np.array(self.ds.LON))
84
+ self.lats=np.transpose(np.array(self.ds.LAT))
85
+ self.Lb72=pyproj.Proj(projparams='epsg:31370')
86
+ self.x_Lb72, self.y_Lb72 = self.Lb72(self.lons,self.lats)
87
+ self.mask=self.mask_rectangles()
88
+ self.plot_mask()
89
+ self.vec_data=self.select_MARdata()
90
+ # self.historical_matrix=
91
+ self.directory_unbiasing="/srv7_tmp1/jbrajkovic/These/IRM/"
92
+ self.syu=syu;self.eyu=eyu
93
+
94
+
95
+
96
+
97
+ def mask_rectangles(self):
98
+ """
99
+ Creates the rectangular mask
100
+ so MAR values can be extracted only for the
101
+ precised zone
102
+
103
+
104
+ """
105
+
106
+
107
+ i=0
108
+ xmin=np.min(self.xsummits);xmax=np.max(self.xsummits)
109
+ ymin=np.min(self.ysummits);ymax=np.max(self.ysummits)
110
+ x=self.x_Lb72;y=self.y_Lb72
111
+ mask=np.zeros([x.shape[0],x.shape[1]])
112
+
113
+
114
+ while i<3:
115
+ # print(i)
116
+ j=i+1
117
+ while j<4:
118
+ # print(i,xsummits)
119
+ # print(j)
120
+ if self.xsummits[j]<self.xsummits[i]:
121
+ tempx=self.xsummits[i]
122
+ tempy=self.ysummits[i]
123
+ self.xsummits[i]=self.xsummits[j]
124
+ self.ysummits[i]=self.ysummits[j]
125
+ self.xsummits[j]=tempx
126
+ self.ysummits[j]=tempy
127
+ j=i+1
128
+ j=j+1
129
+
130
+ i=i+1
131
+ #print(self.xsummits);print(self.ysummits)
132
+
133
+ if (self.xsummits[0]-self.xsummits[1])>0.01:
134
+ pab=((self.ysummits[1]-self.ysummits[0])/(self.xsummits[1]-self.xsummits[0]))
135
+ pac=((self.ysummits[2]-self.ysummits[0])/(self.xsummits[2]-self.xsummits[0]))
136
+ pbd=((self.ysummits[3]-self.ysummits[1])/(self.xsummits[3]-self.xsummits[1]))
137
+ pcd=((self.ysummits[3]-self.ysummits[2])/(self.xsummits[3]-self.xsummits[2]))
138
+
139
+ for i in range(0,x.shape[0]):
140
+ for j in range(0,y.shape[1]):
141
+
142
+ #cas 1 en dehors de la grande zone
143
+ xp=x[i,j];yp=y[i,j]
144
+ if xp>xmax or xp<xmin or yp>ymax or yp<ymin:
145
+ # print(i,j)
146
+ continue
147
+ if self.ysummits[1]>self.ysummits[0]:
148
+ # print(i,j)
149
+ if xp>self.xsummits[0] and xp<self.xsummits[1]:
150
+ yhaut=self.ysummits[0]+pab*(xp-self.xsummits[0])
151
+ ybas=self.ysummits[0]+pac*(xp-self.xsummits[0])
152
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1
153
+ else:continue
154
+ elif xp>self.xsummits[1] and xp<self.xsummits[2]:
155
+ # print(i,j)
156
+ yhaut=self.ysummits[1]+pbd*(xp-self.xsummits[1])
157
+ ybas=self.ysummits[0]+pac*(xp-self.xsummits[0])
158
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1
159
+ else:continue
160
+ else:
161
+ ybas=self.ysummits[2]+pcd*(xp-self.xsummits[2])
162
+ yhaut=self.ysummits[1]+pbd*(xp-self.xsummits[1])
163
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1
164
+ else:continue
165
+ else:
166
+ # if i==20:print(i,j)
167
+ if xp>self.xsummits[0] and xp<self.xsummits[1]:
168
+ # print('Hajmo')
169
+ ybas=self.ysummits[0]+pab*(xp-self.xsummits[0])
170
+ yhaut=self.ysummits[0]+pac*(xp-self.xsummits[0])
171
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1
172
+ else:continue
173
+ elif xp>self.xsummits[1] and xp<self.xsummits[2]:
174
+ # print(i,j)
175
+ ybas=self.ysummits[1]+pbd*(xp-self.xsummits[1])
176
+ yhaut=self.ysummits[0]+pac*(xp-self.xsummits[0])
177
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1
178
+ else:continue
179
+ elif xp>self.xsummits[2] and xp<self.xsummits[3] :
180
+ # print('Hajde')
181
+ yhaut=self.ysummits[2]+pcd*(xp-self.xsummits[2])
182
+ ybas=self.ysummits[1]+pbd*(xp-self.xsummits[1])
183
+ if yp<=yhaut and yp>=ybas:mask[i,j]=1;#print(i,j)
184
+ else:continue
185
+ else:
186
+ mask=((x>=xmin)&(x<=xmax))&((y>=ymin)&(y<=ymax))
187
+ mask=mask==1
188
+ print(mask[mask==True].shape)
189
+ return(mask)
190
+
191
+ def plot_mask(self):
192
+ mask1=np.zeros_like(self.mask)
193
+ mask1[self.mask]=1
194
+ mask1[self.mask==False]=0
195
+ bounds=np.arange(0,1.5,.5)
196
+ cmap=cm.Greens
197
+ MSK=np.zeros_like(mask1)
198
+ ct.quick_map_plot(self.lons, self.lats, mask1, bounds, cmap, MSK)
199
+ plt.savefig('mask.png')
200
+
201
+
202
+ "Séléction des données entre les deux dates pour le masque rectangulaire"
203
+
204
+ def select_MARdata(self):
205
+ '''
206
+ Input : var:nom de la variable hydro MAR (string)
207
+ date_debut:date initiale (vecteur[heure,jour,mois,année]
208
+ date_fin:idem pour date finale
209
+ directory:répertoire avec simus MAR (en fonction du GCM/scénario)
210
+ mask: masque spatiale(matrice de 0 et 1 de la zone d'intéret)
211
+ Description : Sélectionne la variable hydro MAR, pour les pixels du masque.
212
+ Retourne une matrice 2D avec toutes les valeurs MAR pour tous les pas de temps
213
+ exemple: 5 pas de temps et 100 pixels , output = matrice de dimensions(100,5)
214
+ '''
215
+
216
+ varnames=['PRECIP_QUANTITY','E','MBRR','MBSF','MBRO1','MBRO2','MBRO3','MBRO4',
217
+ 'MBRO5','MBRO6','MBCC','MBEP','MBET','MBSL','MBSC','MBM','MBSN']
218
+
219
+
220
+ var=self.var
221
+ mask=self.mask
222
+ for i in range(0,np.size(varnames)):
223
+ if var==varnames[i]:var_index=i
224
+
225
+ if var_index>3:
226
+ "To take into account the occupied fraction by subpixels"
227
+ var_subpixel_cover="FRV"
228
+ covers=xr.open_dataset(glob.glob(self.directory+"*"+str(self.date_debut.year)+"**nc*")[0])
229
+ covers=np.transpose(np.array(covers[var_subpixel_cover]))/100.
230
+ covers=covers[mask]
231
+
232
+
233
+ if self.date_debut.year==self.date_fin.year:
234
+ year=self.date_debut.year;day=self.date_debut.day;month=self.date_debut.month
235
+ fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
236
+ if 'IRM_grid' in fn[0]:
237
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
238
+ print(fn[0])
239
+ ds=xr.open_dataset(fn[0])
240
+ JJ=ct.date2JJ(day, month, year)
241
+ MAR_time_step=np.transpose(np.array(ds['MBRR'])).shape[2]
242
+
243
+ if ct.isbis(year)==1:ndays=366
244
+ else:ndays=365
245
+ MAR_time_step=float(ndays)/float(MAR_time_step)
246
+ MAR_time_step_hours=(MAR_time_step*24)
247
+
248
+ if MAR_time_step==1.:
249
+ indice_debut=JJ-1
250
+ indice_fin=ct.date2JJ(self.date_fin.day,month,year)-1
251
+ else:
252
+ indice_debut=JJ*(int(24/MAR_time_step_hours))-1+(int(self.date_debut.hour\
253
+ /MAR_time_step_hours))
254
+ indice_fin=ct.date2JJ(self.date_fin.day,month,year)*\
255
+ (int(24/MAR_time_step_hours))+(int(self.date_fin.hour\
256
+ /MAR_time_step_hours))-1
257
+
258
+
259
+ if var_index>3:
260
+ if var=='MBEP':
261
+
262
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
263
+ "**************Attention***************"
264
+ "Definition evapotranspiration dans MAR"
265
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
266
+
267
+ values1=(np.transpose(np.array(ds[var]))[:,:,0,indice_debut:indice_fin+1])[mask] +\
268
+ (np.transpose(np.array(ds['MBET']))[:,:,0,indice_debut:indice_fin+1])[mask]+\
269
+ (np.transpose(np.array(ds['MBSL']))[:,:,0,indice_debut:indice_fin+1])[mask]
270
+ values2=(np.transpose(np.array(ds[var]))[:,:,1,indice_debut:indice_fin+1])[mask]+\
271
+ (np.transpose(np.array(ds['MBET']))[:,:,1,indice_debut:indice_fin+1])[mask]+\
272
+ (np.transpose(np.array(ds['MBSL']))[:,:,1,indice_debut:indice_fin+1])[mask]
273
+
274
+ values3=(np.transpose(np.array(ds[var]))[:,:,2,indice_debut:indice_fin+1])[mask] +\
275
+ (np.transpose(np.array(ds['MBET']))[:,:,2,indice_debut:indice_fin+1])[mask]+\
276
+ (np.transpose(np.array(ds['MBSL']))[:,:,2,indice_debut:indice_fin+1])[mask]
277
+
278
+ for j in range(np.shape(values1)[2]):
279
+
280
+ values1[:,:,j]=values1[:,:,j]*covers[:,:,0]
281
+ values2[:,:,j]=values2[:,:,j]*covers[:,:,1]
282
+ values3[:,:,j]=values3[:,:,j]*covers[:,:,2]
283
+ values=values1+values2+values3
284
+ else:
285
+ values1=np.transpose(np.array(ds[var]))[:,:,0,indice_debut:indice_fin+1][mask]
286
+ values2=np.transpose(np.array(ds[var]))[:,:,1,indice_debut:indice_fin+1][mask]
287
+ values3=np.transpose(np.array(ds[var]))[:,:,2,indice_debut:indice_fin+1] [mask]
288
+ for j in range(np.shape(values1)[2]):
289
+
290
+ values1[:,j]=values1[:,j]*covers[:,0]
291
+ values2[:,j]=values2[:,j]*covers[:,1]
292
+ values3[:,j]=values3[:,j]*covers[:,2]
293
+ values=values1+values2+values3
294
+
295
+
296
+ else:
297
+ values=np.transpose(np.array(ds[var]))[:,:,indice_debut:indice_fin+1][mask]
298
+
299
+ else:
300
+ year=self.date_debut.year;day=self.date_debut.day;month=self.date_debut.month;hour=self.date_debut.hour
301
+ print(year,month,day,hour)
302
+ print(self.date_fin)
303
+ fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
304
+ if 'IRM_grid' in fn[0]:
305
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
306
+ ds=xr.open_dataset(fn[0])
307
+ JJ=ct.date2JJ(day, month, year,type_mod=self.mod_ydays)
308
+
309
+ MAR_time_step=np.transpose(np.array(ds[self.var])).shape[-1]
310
+ if self.mod_ydays==1:
311
+ if ct.isbis(year)==1:ndays=366
312
+ else:ndays=365
313
+ else:
314
+ ndays=365
315
+ MAR_time_step=ndays/float(MAR_time_step)
316
+ MAR_time_step_hours=(MAR_time_step*24)
317
+
318
+ if MAR_time_step==1.:
319
+ indice_debut=JJ-1
320
+ indice_fin=ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)
321
+ else:
322
+ indice_debut=(JJ-1)*(int(24/MAR_time_step_hours))+(int(hour\
323
+ /MAR_time_step_hours))
324
+ indice_fin=(ct.date2JJ(self.date_fin.day,self.date_fin.month,self.date_fin.year,type_mod=self.mod_ydays)-1)*\
325
+ (int(24/MAR_time_step_hours))+(int(self.date_fin.hour\
326
+ /MAR_time_step_hours))+1
327
+
328
+ print("indices début et fin",MAR_time_step_hours,indice_debut,indice_fin)
329
+
330
+ if var_index>3:
331
+ if var=='MBEP':
332
+
333
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
334
+ "**************Attention***************"
335
+ "Definition evapotranspiration dans MAR"
336
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
337
+
338
+ values1=(np.transpose(np.array(ds[var]))[:,:,0,indice_debut:])[mask] +\
339
+ (np.transpose(np.array(ds['MBET']))[:,:,0,indice_debut:])[mask]+\
340
+ (np.transpose(np.array(ds['MBSL']))[:,:,0,indice_debut:])[mask]
341
+ values2=(np.transpose(np.array(ds[var]))[:,:,1,indice_debut:])[mask]+\
342
+ (np.transpose(np.array(ds['MBET']))[:,:,1,indice_debut:])[mask]+\
343
+ (np.transpose(np.array(ds['MBSL']))[:,:,1,indice_debut:])[mask]
344
+ values3=(np.transpose(np.array(ds[var]))[:,:,2,indice_debut:])[mask] +\
345
+ (np.transpose(np.array(ds['MBET']))[:,:,2,indice_debut:])[mask]+\
346
+ (np.transpose(np.array(ds['MBSL']))[:,:,2,indice_debut:])[mask]
347
+
348
+ for j in range(np.shape(values1)[-1]):
349
+
350
+ values1[:,j]=values1[:,j]*covers[:,0]
351
+ values2[:,j]=values2[:,j]*covers[:,1]
352
+ values3[:,j]=values3[:,j]*covers[:,2]
353
+ values=(values1+values2+values3)
354
+
355
+
356
+ for y in range(year+1,self.date_fin.year+1):
357
+ print(y)
358
+ if y<self.date_fin.year:
359
+ fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
360
+ if 'IRM_grid' in fn[0]:
361
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
362
+ ds=xr.open_dataset(fn[0])
363
+ values1=(np.transpose(np.array(ds[var]))[:,:,0,:])[mask]+\
364
+ (np.transpose(np.array(ds['MBET']))[:,:,0,:])[mask]+\
365
+ (np.transpose(np.array(ds['MBSL']))[:,:,0,:])[mask]
366
+
367
+ values2=(np.transpose(np.array(ds[var]))[:,:,1,:])[mask]+\
368
+ (np.transpose(np.array(ds['MBET']))[:,:,1,:])[mask]+\
369
+ (np.transpose(np.array(ds['MBSL']))[:,:,1,:])[mask]
370
+
371
+ values3=(np.transpose(np.array(ds[var]))[:,:,2,:])[mask]+\
372
+ (np.transpose(np.array(ds['MBET']))[:,:,2,:])[mask]+\
373
+ (np.transpose(np.array(ds['MBSL']))[:,:,2,:])[mask]
374
+
375
+ for j in range(0,np.shape(values1)[-1]):
376
+
377
+ # print(j,np.shape(values1))
378
+ values1[:,j]=values1[:,j]*covers[:,0]
379
+ values2[:,j]=values2[:,j]*covers[:,1]
380
+ values3[:,j]=values3[:,j]*covers[:,2]
381
+ values=np.append(values,(values1+values2+values3),axis=1)
382
+ else:
383
+ fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
384
+ if 'IRM_grid' in fn[0]:
385
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
386
+ ds=xr.open_dataset(fn[0])
387
+ values1=(np.transpose(np.array(ds[var]))[:,:,0,:indice_fin])[mask] +\
388
+ (np.transpose(np.array(ds['MBET']))[:,:,0,:indice_fin])[mask]+\
389
+ (np.transpose(np.array(ds['MBSL']))[:,:,0,:indice_fin])[mask]
390
+ values2=(np.transpose(np.array(ds[var]))[:,:,1,:indice_fin])[mask]+\
391
+ (np.transpose(np.array(ds['MBET']))[:,:,1,:indice_fin])[mask]+\
392
+ (np.transpose(np.array(ds['MBSL']))[:,:,1,:indice_fin])[mask]
393
+ values3=(np.transpose(np.array(ds[var]))[:,:,2,:indice_fin])[mask]+\
394
+ (np.transpose(np.array(ds['MBET']))[:,:,2,:indice_fin])[mask]+\
395
+ (np.transpose(np.array(ds['MBSL']))[:,:,2,:indice_fin])[mask]
396
+
397
+ for j in range(0,np.shape(values1)[-1]):
398
+ # print(j,np.shape(values1))
399
+ values1[:,j]=values1[:,j]*covers[:,0]
400
+ values2[:,j]=values2[:,j]*covers[:,1]
401
+ values3[:,j]=values3[:,j]*covers[:,2]
402
+ values=np.append(values,(values1+values2+values3),axis=1)
403
+ else:
404
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
405
+ "**************Attention***************"
406
+ "Definition evapotranspiration dans MAR"
407
+ "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
408
+
409
+ values1=np.transpose(np.array(ds[var]))[:,:,0,indice_debut:][mask]
410
+ values2=np.transpose(np.array(ds[var]))[:,:,1,indice_debut:][mask]
411
+ values3=np.transpose(np.array(ds[var]))[:,:,2,indice_debut:][mask]
412
+
413
+ for j in range(np.shape(values1)[-1]):
414
+
415
+ values1[:,j]=values1[:,j]*covers[:,0]
416
+ values2[:,j]=values2[:,j]*covers[:,1]
417
+ values3[:,j]=values3[:,j]*covers[:,2]
418
+ values=(values1+values2+values3)
419
+
420
+ print(self.var,values.shape)
421
+ for y in range(year+1,self.date_fin.year+1):
422
+ print(y)
423
+ if y<self.date_fin.year:
424
+ fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
425
+ if 'IRM_grid' in fn[0]:
426
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
427
+ ds=xr.open_dataset(fn[0])
428
+ values1=np.transpose(np.array(ds[var]))[:,:,0,:][mask]
429
+ values2=np.transpose(np.array(ds[var]))[:,:,1,:][mask]
430
+ values3=np.transpose(np.array(ds[var]))[:,:,2,:][mask]
431
+ for j in range(np.shape(values1)[-1]):
432
+
433
+ # print(j,np.shape(values1))
434
+ # print(values1.shape,covers.shape)
435
+ values1[:,j]=values1[:,j]*covers[:,0]
436
+ values2[:,j]=values2[:,j]*covers[:,1]
437
+ values3[:,j]=values3[:,j]*covers[:,2]
438
+ values=np.append(values,(values1+values2+values3),axis=1)
439
+ else:
440
+ fn = glob.glob(self.directory+"*"+str(y)+"**nc*")
441
+ if 'IRM_grid' in fn[0]:
442
+ fn = glob.glob(self.directory+"*MAR_grid*"+str(year)+"**nc*")
443
+ ds=xr.open_dataset(fn[0])
444
+ values1=np.transpose(np.array(ds[var]))[:,:,0,:indice_fin][mask]
445
+ values2=np.transpose(np.array(ds[var]))[:,:,1,:indice_fin][mask]
446
+ values3=np.transpose(np.array(ds[var]))[:,:,2,:indice_fin][mask]
447
+ print(np.shape(values1)[-1])
448
+ for j in range(np.shape(values1)[-1]):
449
+ # print(j,np.shape(values1))
450
+ values1[:,j]=values1[:,j]*covers[:,0]
451
+ values2[:,j]=values2[:,j]*covers[:,1]
452
+ values3[:,j]=values3[:,j]*covers[:,2]
453
+ values=np.append(values,(values1+values2+values3),axis=1)
454
+ print(self.var,values.shape)
455
+
456
+
457
+ else:
458
+ #print(mask)
459
+ values=np.transpose(np.array(ds[var]))[:,:,indice_debut:][mask]
460
+ print(self.var,values.shape)
461
+ for y in range(year+1,self.date_fin.year+1):
462
+ print(y)
463
+ if y<self.date_fin.year:
464
+ values=np.append(values,
465
+ np.transpose(np.array(ds[var]))[:,:,:][mask],
466
+ axis=1)
467
+ else:
468
+ values=np.append(values,
469
+ np.transpose(np.array(ds[var]))[:,:,:indice_fin][mask],
470
+ axis=1)
471
+ print(self.var,values.shape)
472
+
473
+
474
+ return(values)
475
+
476
+ "Definition of the mar time-step"
477
+ "A modifier par la suite si le pas temporel du MAR est inférieur à l'heure"
478
+
479
+ def MAR_unbiasing(self):
480
+ th_drizzle=.1
481
+ print("letsgo")
482
+
483
+ "**********************************************************"
484
+ "Lecture des données sur la période historiqe de simulation"
485
+ "**********************************************************"
486
+
487
+ historical_matrix_unbias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
488
+ date_debut=datetime.datetime(1981,1,1,0),
489
+ date_fin=datetime.datetime(2010,12,31,23),
490
+ directory=self.directory_unbiasing, var=self.var_unb).vec_data
491
+ date_debutu=datetime.datetime(self.syu,1,1,0)
492
+ date_finu=datetime.datetime(self.eyu,12,31,23)
493
+
494
+
495
+
496
+ if self.var_unb=='PRECIP_QUANTITY':
497
+
498
+ historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
499
+ date_debut=datetime.datetime(1981,1,1,0),
500
+ date_fin=datetime.datetime(2010,12,31,23),
501
+ directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
502
+ MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
503
+ date_debut=datetime.datetime(1981,1,1,0),
504
+ date_fin=datetime.datetime(2010,12,31,23),
505
+ directory=self.directory, var='MBSF',
506
+ mod_ydays=self.mod_ydays).vec_data
507
+ biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
508
+ date_debut=self.date_debut,
509
+ date_fin=self.date_fin,
510
+ directory=self.directory, var='MBRR',mod_ydays=self.mod_ydays).vec_data+\
511
+ MAR_input_data(xsummits=self.xsummits,ysummits= self.ysummits,
512
+ date_debut=self.date_debut,
513
+ date_fin=self.date_fin,
514
+ directory= self.directory,var= 'MBSF',mod_ydays=self.mod_ydays).vec_data
515
+
516
+
517
+
518
+ print(self.date_debut,self.date_fin)
519
+ print('biased data shape',biased_data.shape)
520
+
521
+
522
+ FutUnb=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
523
+ date_debut=date_debutu,
524
+ date_fin=date_finu,
525
+ directory=self.directory, var='MBRR',
526
+ mod_ydays=self.mod_ydays).vec_data+\
527
+ MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
528
+ date_debut=date_debutu,
529
+ date_fin=date_finu,
530
+ directory=self.directory, var='MBSF',
531
+ mod_ydays=self.mod_ydays).vec_data
532
+
533
+
534
+ elif self.var_unb=='E':
535
+
536
+ historical_matrix_bias=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
537
+ date_debut=datetime.datetime(1981,1,1,0),
538
+ date_fin=datetime.datetime(2010,12,31,23),
539
+ directory=self.directory, var='MBEP',
540
+ mod_ydays=self.mod_ydays).vec_data
541
+ biased_data=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
542
+ date_debut=self.date_debut,
543
+ date_fin=self.date_fin,
544
+ directory=self.directory, var='MBEP',
545
+ mod_ydays=self.mod_ydays).vec_data
546
+
547
+ FutUnb=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
548
+ date_debut=date_debutu,
549
+ date_fin=date_finu,
550
+ directory=self.directory, var='MBEP',
551
+ mod_ydays=self.mod_ydays).vec_data
552
+
553
+ "****************************************************"
554
+ "Calcul des quantiles historiques simulés et observés"
555
+ "****************************************************"
556
+
557
+ quant_mat=np.zeros([historical_matrix_unbias.shape[0],101])
558
+ quant_mat_bias=np.zeros([historical_matrix_bias.shape[0],101])
559
+ quant_coeffs=np.zeros([historical_matrix_unbias.shape[0],101])
560
+ historical_matrix_unbias[historical_matrix_unbias<th_drizzle]=0
561
+
562
+ if self.find_timestep()[1]=='hours':
563
+ tsd=24
564
+ historical_matrix_bias_d=np.zeros([historical_matrix_bias.shape[0],
565
+ int(historical_matrix_bias.shape[1]/tsd)])
566
+ for i in range(historical_matrix_bias_d.shape[0]):
567
+ for d in range(historical_matrix_bias_d.shape[1]):
568
+ historical_matrix_bias_d[i,d]=np.sum(historical_matrix_bias[i,d*tsd:(d+1)*tsd])
569
+ historical_matrix_bias_d[historical_matrix_bias_d<th_drizzle]=0
570
+ # print(historical_matrix_unbias.shape,historical_matrix_bias_d.shape)
571
+
572
+ for i in range(historical_matrix_unbias.shape[0]):
573
+
574
+ quant_mat_bias[i,:]=np.quantile(historical_matrix_bias_d[i,:]\
575
+ [historical_matrix_bias_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
576
+ quant_mat[i,:]=np.quantile(historical_matrix_unbias[i,:][historical_matrix_unbias[i,:]>th_drizzle],np.arange(0,1.01,0.01))
577
+ for j in range(quant_mat.shape[1]):quant_coeffs[i,j]=quant_mat[i,j]/quant_mat_bias[i,j]
578
+
579
+ # biased_data_var=np.array(self.vec_data)
580
+
581
+ "******************************************"
582
+ "****Débiaisage des données daily**********"
583
+ "******************************************"
584
+
585
+ "Future quantiles to assess value location"
586
+
587
+
588
+ if self.find_timestep()[1]=='hours':
589
+ tsd=24
590
+ FutUnb_d=np.zeros([historical_matrix_bias.shape[0],
591
+ int(FutUnb.shape[1]/tsd)])
592
+ for i in range(historical_matrix_bias_d.shape[0]):
593
+ for d in range(FutUnb_d.shape[1]):
594
+ FutUnb_d[i,d]=np.sum(FutUnb[i,d*tsd:(d+1)*tsd])
595
+
596
+
597
+ quant_mat_fut=np.zeros_like(quant_mat)
598
+ for i in range(FutUnb.shape[0]):
599
+ quant_mat_fut[i,:]=np.quantile(FutUnb_d[i,:]\
600
+ [FutUnb_d[i,:]>th_drizzle],np.arange(0,1.01,0.01))
601
+
602
+
603
+
604
+ print(self.find_timestep()[1])
605
+ if self.find_timestep()[1]=='hours':
606
+ biased_data_d=np.zeros([biased_data.shape[0],
607
+ int(biased_data.shape[1]/24)+1])
608
+ for i in range(biased_data.shape[0]):
609
+ for d in range(biased_data_d.shape[1]):
610
+ biased_data_d[i,d]=np.sum(biased_data[i,d*tsd:(d+1)*tsd])
611
+
612
+ for i in range(self.vec_data.shape[0]):
613
+ for j in range(biased_data_d.shape[1]):
614
+ if biased_data_d[i,j]>th_drizzle:
615
+ for k in range(quant_mat.shape[1]):
616
+ if k==quant_mat.shape[1]-1:
617
+ if biased_data_d[i,j]>=quant_mat_fut[i,k]:
618
+ biased_data_d[i,j]=biased_data[i,j]*quant_coeffs[i,k]
619
+ elif k<quant_mat.shape[1]-1:
620
+ if biased_data_d[i,j]>=quant_mat_fut[i,k] and biased_data_d[i,j]<=quant_mat_fut[i,k+1]:
621
+ biased_data_d[i,j]=(quant_coeffs[i,k]*(biased_data_d[i,j]-quant_mat_fut[i,k])/\
622
+ (quant_mat_fut[i,k+1]-quant_mat_fut[i,k])+quant_coeffs[i,k+1]*(quant_mat_fut[i,k+1]-biased_data_d[i,j])/\
623
+ (quant_mat_fut[i,k+1]-quant_mat_fut[i,k]))*biased_data_d[i,j]
624
+ break
625
+
626
+ else:
627
+ biased_data_d[i,j]=0
628
+
629
+ if pd.isna(biased_data_d[i,j]):
630
+ biased_data_d[i,j]=0.
631
+ Unbiased_data_d=np.array(biased_data_d)
632
+
633
+ "*****************************************"
634
+ "**Redistribution au pas de temps horaire**"
635
+ "*****************************************"
636
+
637
+ ydays=biased_data_d.shape[1]
638
+ Unbiased_data=np.zeros_like(biased_data)
639
+
640
+ print ("redistributing on the daily time-step")
641
+
642
+ for i in range(self.vec_data.shape[0]):
643
+ # print(i)
644
+ d=0
645
+ while d<ydays:
646
+ # if i==67:print(d,Unbiased_data_d[i,d])
647
+ # if d%100==0:print(d)
648
+ if Unbiased_data_d[i,d]<=0.1:d+=1
649
+
650
+ else:
651
+
652
+ d1=d
653
+ ndays=0
654
+
655
+ while d1<ydays and Unbiased_data_d[i,d1]>.1 :
656
+ d1+=1;ndays+=1
657
+
658
+
659
+ precip_sum_d=np.sum(Unbiased_data_d[i,d:d+ndays])
660
+ biased_sum=np.sum(biased_data\
661
+ [i,d*tsd:(d+ndays)*tsd])
662
+ biased_hourly=(biased_data)\
663
+ [i,d*tsd:(d+ndays)*tsd]
664
+
665
+ weights=biased_hourly/biased_sum
666
+
667
+ Unbiased_data[i,d*tsd:(d+ndays)*tsd]=\
668
+ precip_sum_d*weights
669
+
670
+ # print(d,PRECIP_IRM[i,j,d])
671
+
672
+ d+=ndays
673
+
674
+ if self.var=='MBRO3' or self.var=='MBRR' or self.var=='MBSF':
675
+ # biased_data_var=np.array(self.vec_data)
676
+ biased_data_var=MAR_input_data(xsummits=self.xsummits, ysummits=self.ysummits,
677
+ date_debut=self.date_debut,
678
+ date_fin=self.date_fin,
679
+ directory=self.directory, var=self.var,
680
+ mod_ydays=self.mod_ydays).vec_data
681
+
682
+ print("biased data var shape ",biased_data_var.shape)
683
+ print('unbiased data shape' ,Unbiased_data.shape)
684
+ propor2var=(biased_data_var/biased_data)
685
+ Unbiased_data=Unbiased_data*propor2var
686
+
687
+ "**** 2 méthodes******"
688
+ Unbiased_data[pd.isna(Unbiased_data)]=0.
689
+ return(Unbiased_data)
690
+
691
+
692
+ def find_timestep(self):
693
+ """
694
+ Routine qui trouve le time step de MAR en heures
695
+ """
696
+ year=self.date_debut.year
697
+ fn = glob.glob(self.directory+"*"+str(year)+"**nc*")
698
+ ds=xr.open_dataset(fn[0])
699
+ vec_out=['','']
700
+ MAR_time_step=np.transpose(np.array(ds['MBRR'])).shape[2]
701
+ if self.mod_ydays==1:
702
+ if ct.isbis(year)==1:ndays=366
703
+ else:ndays=365
704
+ else:
705
+ ndays=365
706
+
707
+ MAR_time_step=ndays/MAR_time_step
708
+ MAR_time_step_hours=24*MAR_time_step
709
+ if MAR_time_step_hours<1:vec_out[1]='minutes';vec_out[0]=str(int(MAR_time_step_hours*60))
710
+ else:vec_out[1]='hours';vec_out[0]=str(int(MAR_time_step_hours))
711
+ # print(vec_out)
712
+ return(vec_out)
713
+
714
+ def make_time(self):
715
+ """
716
+ formatte une matrice avec la date pour chaque pas de temps en heure,jour,mois,année
717
+ à redévelopper si pas de temps inférieurs à l'heure
718
+ """
719
+ time_step=self.find_timestep()
720
+ if time_step[1]=='hours':
721
+ time_step=int(time_step[0])
722
+ date=np.array([self.date_debut])
723
+ end_month=[31,28,31,30,31,30,31,31,30,31,30,31]
724
+ i=0
725
+ datec=np.array(self.date_debut)
726
+ # print(datec,date_fin)
727
+ while ((self.date_fin[0] != datec[0]) or (self.date_fin[1] != datec[1]) \
728
+ or (self.date_fin[2] != datec[2]) or (self.date_fin[3] != datec[3])):
729
+ #print(datec)
730
+ if i!=0:datec=date[i,:]
731
+ #print(i)
732
+ new_hour=datec[0]+time_step
733
+ #print(new_hour)
734
+ if new_hour>=24.:new_day=datec[1]+1;new_hour=new_hour-24
735
+ else:new_day=datec[1]
736
+ if datec[2]==2.:
737
+ if self.mod_ydays==1:
738
+ if ct.isbis(datec[3]):end_month[1]=29
739
+ else:end_month[1]=28
740
+ if new_day>end_month[int(datec[2])-1]:
741
+ new_month=datec[2]+1
742
+ new_day=1
743
+ if new_month>12:
744
+ new_year=datec[3]+1
745
+ new_month=1
746
+ else:new_month=datec[2];new_year=datec[3]
747
+ new_vec=np.array([[new_hour,new_day,new_month,new_year]])
748
+ date=np.append(date,new_vec,axis=0)
749
+ datec=np.array([new_hour,new_day,new_month,new_year])
750
+ i=i+1
751
+ date=np.append(date,np.array([self.date_fin]),axis=0)
752
+
753
+ return(date)
754
+
755
+ "Calcul des sommets des pixels MAR"
756
+
757
+ def MAR_summits(self):
758
+ """
759
+ utilise les longitudes et latitudes des centres des pixels MAR
760
+ pour calculer les coordonnées des sommets des pixels en Lambert 72
761
+ outputs: deux matrices contenant pour chaque pixels les 4 coordonnées des 4 sommets
762
+
763
+ """
764
+ summits_lon=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
765
+ summits_lat=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
766
+ summits_x=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
767
+ summits_y=np.zeros([self.lons.shape[0],self.lons.shape[1],4])
768
+ for i in range(1,self.lons.shape[0]-1):
769
+ for j in range(1,self.lons.shape[1]-1):
770
+ summits_lon[i,j,0]=(self.lons[i,j]+self.lons[i-1,j]+self.lons[i-1,j-1]+self.lons[i,j-1])/4
771
+ summits_lon[i,j,1]=(self.lons[i,j]+self.lons[i-1,j]+self.lons[i-1,j+1]+self.lons[i,j+1])/4
772
+ summits_lon[i,j,2]=(self.lons[i,j]+self.lons[i,j+1]+self.lons[i+1,j]+self.lons[i+1,j+1])/4
773
+ summits_lon[i,j,3]=(self.lons[i,j]+self.lons[i,j-1]+self.lons[i+1,j-1]+self.lons[i+1,j])/4
774
+ summits_lat[i,j,0]=(self.lats[i,j]+self.lats[i-1,j]+self.lats[i-1,j-1]+self.lats[i,j-1])/4
775
+ summits_lat[i,j,1]=(self.lats[i,j]+self.lats[i-1,j]+self.lats[i-1,j+1]+self.lats[i,j+1])/4
776
+ summits_lat[i,j,2]=(self.lats[i,j]+self.lats[i,j+1]+self.lats[i+1,j]+self.lats[i+1,j+1])/4
777
+ summits_lat[i,j,3]=(self.lats[i,j]+self.lats[i,j-1]+self.lats[i+1,j-1]+self.lats[i+1,j])/4
778
+ summits_x,summits_y=self.Lb72(summits_lon,summits_lat)
779
+ return(summits_x,summits_y)
780
+
781
+
782
+ "Sortie shapefile"
783
+
784
+ def MAR_shapefile(self,name,dirout1):
785
+ """
786
+ cette routine sort les pixels MAR au format shapefile le nom donné
787
+ dans le sous-dossier GRID
788
+
789
+ """
790
+ MASK=self.mask_rectangles()
791
+ sommets_x,sommets_y=self.MAR_summits()
792
+ xs=np.array([sommets_x[:,:,0][MASK]])
793
+ ys=np.array([sommets_y[:,:,0][MASK]])
794
+ for i in range(1,4):
795
+ xs=np.append(xs,np.array([sommets_x[:,:,i][MASK]]),axis=0)
796
+ ys=np.append(ys,np.array([sommets_y[:,:,i][MASK]]),axis=0)
797
+ xs=np.transpose(xs);ys=np.transpose(ys)
798
+ newdata = gpd.GeoDataFrame()
799
+ newdata['geometry'] = None
800
+ for i in range(0,xs.shape[0]):
801
+ coordinates=[(xs[i,0],ys[i,0]),(xs[i,1],ys[i,1]),
802
+ (xs[i,2],ys[i,2]),(xs[i,3],ys[i,3])]
803
+ poly = Polygon(coordinates)
804
+ newdata.loc[i, 'geometry'] = poly
805
+ newdata.loc[i, 'polyID'] = str(i+1)
806
+ newdata.crss=from_epsg(31370)
807
+ #(newdata.crs).to_byte(byteorder='little'))
808
+ if os.path.exists(dirout1)==False:os.mkdir(dirout1)
809
+ if os.path.exists(dirout1+'GRID/')==False:os.mkdir(dirout1+'GRID/')
810
+ outfp=dirout1+'GRID/'+name
811
+ newdata.to_file(outfp)
812
+
813
+ "sortie fichiers textes"
814
+
815
+
816
+ def MAR_TextOutputs(self,dirout1):
817
+ """
818
+ sortie au format texte
819
+ 1 fichier par polygone
820
+ nom du fichier = ID du polygone.rain
821
+ """
822
+ time_step=self.find_timestep()
823
+ if not self.UnborNot:vec_data=self.vec_data
824
+ else:vec_data=self.MAR_unbiasing()
825
+
826
+ date_debut=self.date_debut
827
+
828
+ if os.path.exists(dirout1+'DATA/')==False:os.mkdir(dirout1+'DATA/')
829
+ date_debut=self.date_debut
830
+ if time_step[1]=='hours': MAR_timestep=datetime.timedelta(hours=int(time_step[0]))
831
+ elif time_step[1]=='minutes': MAR_timestep=datetime.timedelta(minutes=int(time_step[0]))
832
+
833
+ for i in range(0,vec_data.shape[0]):
834
+ filename=str(i+1)+'.rain'
835
+ f=open(dirout1+"DATA/"+filename,'w')
836
+ date_move=date_debut
837
+ for j in range(0,vec_data.shape[1]):
838
+
839
+ if j!=0:date_move=date_move+MAR_timestep
840
+ lines=[str(date_move.day),str(date_move.month),str(date_move.year),
841
+ str(date_move.hour),str(date_move.minute),str(date_move.second),
842
+ "{:.3f}".format(vec_data[i,j])]
843
+ line=""
844
+ for k in range(0,np.size(lines)):
845
+ line=line+lines[k]+"\t"
846
+
847
+ f.write(line)
848
+ f.write('\n')
849
+ f.close()
850
+ def MAR_BinaryOutputs(self,dirout1):
851
+ """
852
+ sortie au format texte
853
+ 1 fichier par polygone
854
+ nom du fichier = ID du polygone.rain
855
+ """
856
+ time_step=self.find_timestep()
857
+ if not self.UnborNot:vec_data=self.vec_data
858
+ else:vec_data=self.MAR_unbiasing()
859
+ date_debut=self.date_debut
860
+ if os.path.exists(dirout1)==False:os.mkdir(dirout1)
861
+ if os.path.exists(dirout1+'DATA/')==False:os.mkdir(dirout1+'DATA/')
862
+ date_debut=self.date_debut
863
+ if time_step[1]=='hours': MAR_timestep=datetime.timedelta(hours=int(time_step[0]))
864
+ elif time_step[1]=='minutes': MAR_timestep=datetime.timedelta(minutes=int(time_step[0]))
865
+
866
+ for i in range(0,vec_data.shape[0]):
867
+ filename=str(i+1)+'.rain'
868
+ f=open(dirout1+"DATA/"+filename,'wb')
869
+
870
+ date_move=date_debut
871
+
872
+ for j in range(0,vec_data.shape[1]):
873
+
874
+ if j!=0:date_move=date_move+MAR_timestep
875
+ dayb=date_move.day.to_bytes(1,byteorder='little',signed=False)
876
+ monthb=date_move.month.to_bytes(1,byteorder='little',signed=False)
877
+ yearb=date_move.year.to_bytes(2,byteorder='little',signed=False)
878
+ hourb=date_move.hour.to_bytes(1,byteorder='little',signed=False)
879
+ minuteb=date_move.minute.to_bytes(1,byteorder='little',signed=False)
880
+ secondb=date_move.second.to_bytes(1,byteorder='little',signed=False)
881
+ valb=bytearray(struct.pack("f", round(vec_data[i,j],3)))# .to_bytes(1,byteorder='little',signed=False)
882
+ f.write(dayb);f.write(monthb);f.write(yearb);f.write(hourb)
883
+ f.write(minuteb);f.write(secondb);f.write(valb)
884
+ # print(struct.unpack('f',valb),date_move.day)
885
+
886
+ "Test de l'objet"
887
+
888
+ if __name__ == "__main__":
889
+ # dir_ds="/srv1_tmp1/fettweis/EU-MAR-7.5km/histo/EUb-ERA/" #dossier avec sortie MAR au format Netcdf
890
+ # dir_ds="/srv1_tmp1/fettweis/EUh-MPI-5km/output-5km-ssp370/"
891
+ # dir_ds='/climato_tmp1/fettweis/MAR/out/EUb/output-hourly/'
892
+ dir_ds='/phypc11_tmp3/MARv3.14/MARv3.14-EUk-NorESM2-MM-5km-ssp585/'
893
+ dir_hist='/phypc11_tmp3/MARv3.14/MARv3.14-EUk-NorESM2-MM-5km-ssp585/'
894
+
895
+ dir_stock='/phypc11_tmp3/MARv3.14/'
896
+ dir_ins=['MARv3.14-EUh-MPI-ESM1-2-HR-5km-',
897
+ 'MARv3.14-EUi-MIROC6-5km-',
898
+ 'MARv3.14-EUm-EC-Earth3-Veg-5km-',
899
+ 'MARv3.14-EUk-NorESM2-MM-5km-'
900
+ ]
901
+
902
+ mod_racs=['MPI','MIR','EC3','NOR']
903
+ scens=['ssp126','ssp245','ssp370','ssp585']
904
+ # dir_ds="/srv7_tmp1/jbrajkovic/These/ERA5/"
905
+ dirout="/srv7_tmp1/jbrajkovic/These/forWOLF/evapo"#-MPI_1981-2010/" #dossier outputs
906
+ filenameshp="grid.shp" #nom du shapefile en sortie
907
+
908
+
909
+ "dates entre lesquelles sélectionner les données (Heures,jour,mois,annee"
910
+ "code à retravailler si simulations futures avec pas de temps inférieur à l'heure"
911
+
912
+ # date_debut1=datetime.datetime(2016,1,1,5)
913
+ # date_fin1=datetime.datetime(2100,12,31,23)
914
+
915
+ # "Définition d'un rectangle"
916
+
917
+ # xs=np.array([200000,200000,
918
+ # 272000,272000.])
919
+ # ys=np.array([63000,152000,
920
+ # 152000,63000])
921
+
922
+ # for mod in range(4):
923
+ # for scen in range(4):
924
+
925
+ # dirin=dir_stock+dir_ins[mod]+scens[scen]+'/'
926
+ # print(dirin)
927
+ # objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
928
+ # date_debut=date_debut1,
929
+ # date_fin=date_fin1,
930
+ # directory=dirin,
931
+ # directory_hist_sim=dir_hist,
932
+ # var='MBEP',
933
+ # var_unb='E',
934
+ # UnborNot=1,
935
+ # syu=date_debut1.year,
936
+ # eyu=date_fin1.year)
937
+
938
+
939
+ # print('ok')
940
+ # dirout1=dirout+'-'+mod_racs[mod]+'_'+scens[scen]+'_'+str(date_debut1.year)+'-'+\
941
+ # str(date_fin1.year)+'/'
942
+
943
+ # objet_MAR.MAR_shapefile(filenameshp,dirout1)
944
+ # objet_MAR.MAR_BinaryOutputs(dirout1)
945
+
946
+
947
+
948
+
949
+ xs=np.array([200000,200000,
950
+ 210000,210000.])
951
+ ys=np.array([63000,73000,
952
+ 73000,63000])
953
+ dirin=dir_hist
954
+ date_debut1=datetime.datetime(2016,1,1,0)
955
+ date_fin1=datetime.datetime(2017,12,31,23)
956
+
957
+ objet_MAR=MAR_input_data(xsummits=xs,ysummits=ys,
958
+ date_debut=date_debut1,
959
+ date_fin=date_fin1,
960
+ directory=dirin,
961
+ directory_hist_sim=dir_hist,
962
+ var='MBRO3',
963
+ var_unb='PRECIP_QUANTITY',
964
+ UnborNot=1,
965
+ syu=1982,
966
+ eyu=1983,
967
+ mod_ydays=0)
968
+
969
+ # "Tests outputs"
970
+ cmap=ct.IPCC_cmap()
971
+ objet_MAR.plot_mask()
972
+
973
+ matrice1=objet_MAR.vec_data
974
+ matrice=objet_MAR.MAR_unbiasing()
975
+ matrice=matrice-matrice1
976
+
977
+ MBRO3_mask=np.sum(matrice[:,:],axis=1)
978
+
979
+ maxs=np.array(abs(np.min(MBRO3_mask)),np.max(MBRO3_mask))
980
+ maxi=np.max(maxs)
981
+
982
+
983
+ bounds=np.arange(-maxi,maxi+20,20)
984
+ norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
985
+
986
+
987
+ MSK=objet_MAR.mask_rectangles()
988
+ fig=plt.figure(figsize=(6,6))
989
+ ax=plt.subplot()
990
+ m=ct.map_belgium_zoom(ax, objet_MAR.lons, objet_MAR.lats)
991
+ lons_w=objet_MAR.lons[MSK==True];lats_w=objet_MAR.lats[MSK]
992
+ MBRO3=np.array(objet_MAR.lons)
993
+ for k in range(0,np.size(MBRO3_mask)):
994
+ for i in range(0,MBRO3.shape[0]):
995
+ for j in range(0,MBRO3.shape[1]):
996
+ if lons_w[k]==objet_MAR.lons[i,j] and lats_w[k]==objet_MAR.lats[i,j]:
997
+ MBRO3[i,j]=MBRO3_mask[k]
998
+ vmax=np.max(MBRO3[pd.isna(MBRO3)==False])
999
+ MBRO3[MSK==False]=float("nan")
1000
+ x,y=m(objet_MAR.lons,objet_MAR.lats)
1001
+ mapa=m.pcolormesh(x,y,MBRO3,norm=norm,cmap=cmap)
1002
+ cbar=m.colorbar()
1003
+ plt.savefig('fig.png',bbox_inches='tight')
1004
+