wolfhece 2.0.4__py3-none-any.whl → 2.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (42) hide show
  1. wolfhece/GraphNotebook.py +0 -1
  2. wolfhece/GraphProfile.py +5 -14
  3. wolfhece/Lidar2002.py +0 -1
  4. wolfhece/PyCrosssections.py +21 -26
  5. wolfhece/PyDraw.py +219 -58
  6. wolfhece/PyGui.py +6 -3
  7. wolfhece/PyPalette.py +2 -2
  8. wolfhece/PyParams.py +48 -48
  9. wolfhece/PyVertex.py +1 -1
  10. wolfhece/PyVertexvectors.py +40 -4
  11. wolfhece/Results2DGPU.py +7 -6
  12. wolfhece/apps/WolfPython.png +0 -0
  13. wolfhece/bernoulli/NetworkOpenGL.py +1 -1
  14. wolfhece/cli.py +7 -0
  15. wolfhece/flow_SPWMI.py +1 -1
  16. wolfhece/friction_law.py +6 -6
  17. wolfhece/gpuview.py +1 -1
  18. wolfhece/hydrology/PyWatershed.py +9 -10
  19. wolfhece/lagrangian/emitter.py +1 -1
  20. wolfhece/lagrangian/example_domain.py +1 -1
  21. wolfhece/lagrangian/velocity_field.py +4 -4
  22. wolfhece/libs/WolfDll.dll +0 -0
  23. wolfhece/libs/WolfDll_CD.dll +0 -0
  24. wolfhece/libs/WolfOGL.c +28187 -28187
  25. wolfhece/mar/Interface_MAR_WOLF_objet.py +1004 -0
  26. wolfhece/mar/commontools.py +1289 -59
  27. wolfhece/mesh2d/bc_manager.py +89 -13
  28. wolfhece/mesh2d/cst_2D_boundary_conditions.py +12 -0
  29. wolfhece/mesh2d/wolf2dprev.py +1 -2
  30. wolfhece/pydike.py +1 -1
  31. wolfhece/pyshields.py +43 -43
  32. wolfhece/pywalous.py +2 -2
  33. wolfhece/scenario/config_manager.py +3 -1
  34. wolfhece/ui/wolf_multiselection_collapsiblepane.py +10 -10
  35. wolfhece/wolf_array.py +1298 -418
  36. wolfhece/wolf_texture.py +1 -1
  37. wolfhece/wolfresults_2D.py +124 -19
  38. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/METADATA +5 -1
  39. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/RECORD +42 -39
  40. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/WHEEL +0 -0
  41. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/entry_points.txt +0 -0
  42. {wolfhece-2.0.4.dist-info → wolfhece-2.0.6.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,6 @@
1
- #!/usr/bin/env python3
1
+ #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
+
3
4
  """
4
5
  Created on Mon Dec 5 09:03:34 2022
5
6
 
@@ -10,13 +11,23 @@ import numpy as np
10
11
  import matplotlib.pyplot as plt
11
12
  import commontools as ct
12
13
  from mpl_toolkits.basemap import Basemap
13
- import netCDF4 as nc
14
14
  import xarray as xr
15
15
  import matplotlib as mpl
16
16
  import pandas as pd
17
-
18
-
19
-
17
+ import glob as glob
18
+ import matplotlib.cm as cm
19
+ #import fiona
20
+ #import geopandas as gpd
21
+ import rasterio
22
+ from PIL import Image
23
+ import pyproj
24
+ from matplotlib.colors import ListedColormap, LinearSegmentedColormap
25
+ import matplotlib.colors
26
+ import h5py
27
+ from pyproj import Proj, transform
28
+ import matplotlib.patheffects as pe
29
+ import os
30
+ import csv
20
31
  def openfile(fileloc,col):
21
32
  f=open(fileloc,mode='r')
22
33
  V=[]
@@ -90,23 +101,145 @@ def seasonalmeans(fileloc,col,start_year,end_year,mod,season):
90
101
  # T[ind]=np.mean(var[MASK][0:end_year[mod]-1])
91
102
  return(T)
92
103
 
93
- def slidingmeans(TS,interval):
104
+
105
+ def seasonalsums(fileloc,col,start_year,end_year,mod,season):
106
+ years=openfile(fileloc,0)
107
+ var=openfile(fileloc,col)
108
+ T=[]
109
+ # T=np.zeros(end_year-start_year+1)
110
+
111
+
112
+ for y in range(start_year,end_year+1):
113
+ beg_summer=np.array([173,173,170]);end_summer=np.array([264,264,259])
114
+ beg_falls=end_summer+1;end_falls=np.array([355,355,359])
115
+ beg_winter=end_falls+1;end_winter=np.array([80,80,69])
116
+ beg_spring=end_winter+1;end_spring=beg_summer-1
117
+ end_year=np.array([365,365,360])
118
+
119
+ if(isbis(y)==1 and mod==0):
120
+ beg_summer=beg_summer+1;end_summer=end_summer+1
121
+ beg_falls=beg_falls+1;end_falls=end_falls+1
122
+ beg_winter=beg_winter+1;end_winter=end_winter+1
123
+ beg_spring=beg_spring+1;end_spring=beg_summer-1
124
+ end_year=end_year+1
125
+ # ind=y-start_year
126
+ if season=="Summer":
127
+ MASK=years==y
128
+ T=np.append(T,np.sum(var[MASK][beg_summer[mod]-1:end_summer[mod]-1]))
129
+ # T[ind]=np.mean(var[MASK][beg_summer[mod]-1:end_summer[mod]-1])
130
+ elif season=="Falls":
131
+ MASK=years==y
132
+ T=np.append(T,np.sum(var[MASK][beg_falls[mod]-1:end_falls[mod]-1]))
133
+ # T[ind]=np.mean(var[MASK][beg_falls[mod]-1:end_falls[mod]-1])
134
+ elif season=="Winter":
135
+ MASK1=years==y;MASK2=years==y+1
136
+ V1=var[MASK1][beg_winter[mod]-1:end_year[mod]-1];V2=var[MASK2][0:end_winter[mod]-1]
137
+ V=np.append(V1,V2)
138
+ T=np.append(T,np.sum(V))
139
+ # T[ind]=np.mean(V)
140
+ elif season=="Spring":
141
+ MASK=years==y
142
+ T=np.append(T,np.sum(var[MASK][beg_spring[mod]-1:end_spring[mod]-1]))
143
+ # T[ind]=np.mean(var[MASK][beg_spring[mod]-1:end_spring[mod]-1])
144
+ elif season=="year":
145
+ MASK=years==y
146
+ T=np.append(T,np.sum(var[MASK][0:end_year[mod]-1]))
147
+ # T[ind]=np.mean(var[MASK][0:end_year[mod]-1])
148
+ return(T)
149
+
150
+ def text_into_matrix(model_name,scenario,mx,my,sy,ey):
151
+ """
152
+ This function reads precipitation
153
+ text files to put all of it in a
154
+ 3D matrix of yearly precipitation
155
+ """
156
+ directory='/srv7_tmp1/jbrajkovic/These/TS_ppp2/'
157
+
158
+ mat_ret=np.zeros([mx,my,1])
159
+ if scenario=='ssp585':
160
+ y1=1980;y2=2100
161
+ else:
162
+ y1=2015;y2=2100
163
+ isuiv=1
164
+ isuiv2=0
165
+ for i in range(1,mx*my+1):
166
+ fn=directory+'Pr'+str(i)+model_name+'_'+scenario+'_'+str(y1)+'-'+str(y2)+'.txt'
167
+ if os.path.exists(fn)==False:
168
+ isuiv+=1
169
+ # print('File doesn\'t exist',fn)
170
+ continue
171
+ else:
172
+ isuiv2+=1
173
+
174
+ if isuiv<=my:
175
+ ii=0;jj=isuiv
176
+ else:
177
+ jj=int(isuiv%my)-1
178
+ ii=int((isuiv-jj)/my)
179
+
180
+ if jj==0:j=my-1;ii=ii-1
181
+ else:jj=jj-1
182
+ if isuiv2==1:
183
+ with open (fn, 'r') as f:
184
+ yys = [float(row[0]) for row in csv.reader(f,delimiter='\t')]
185
+ with open (fn, 'r') as f:
186
+ pr= [float(row[2]) for row in csv.reader(f,delimiter='\t')]
187
+ else:
188
+ with open (fn, 'r') as f:
189
+ pr= [float(row[0]) for row in csv.reader(f,delimiter='\t')]
190
+
191
+ if isuiv2==1:
192
+ mat_ret=np.zeros([mx,my,ey-sy+1])*float('nan')
193
+ for yy in range(sy,ey+1):
194
+ msk=np.array(yys)==float(yy)
195
+ # print(yys)
196
+ mat_ret[ii,jj,yy-sy]=np.sum(np.array(pr)[msk])
197
+ if isuiv2%200==0:print(isuiv2, mat_ret[ii,jj,0])
198
+ isuiv+=1
199
+ return(mat_ret)
200
+
201
+
202
+
203
+
204
+
205
+
206
+
207
+ def slidingmeans(TS,interval,std_or_mean=1):
94
208
  int2=int((interval-1)/2)
95
209
  s=np.size(TS)
96
210
  newTS=np.zeros(s)
97
211
  for i in range(0,s):
98
212
  if i<int2:
99
- newTS[i]=np.mean(TS[0:i+int2])
213
+ if std_or_mean: newTS[i]=np.mean(TS[0:i+int2])
214
+ else:newTS[i]=np.std(TS[0:i+int2])
100
215
  elif i>(s-int2-1):
101
- newTS[i]=np.mean(TS[i-int2:s-1])
216
+ if std_or_mean: newTS[i]=np.mean(TS[i-int2:s-1])
217
+ else:newTS[i]=np.std(TS[i-int2:s-1])
102
218
  else:
103
- newTS[i]=np.mean(TS[i-int2:i+int2])
219
+ if std_or_mean:newTS[i]=np.mean(TS[i-int2:i+int2])
220
+ else:newTS[i]=np.std(TS[i-int2:i+int2])
104
221
  return(newTS)
105
222
 
106
223
 
107
224
  def RGPD(vec,shape,scale,pu,teta,th):
225
+ # print(th)
108
226
  r=th+(scale/shape)*((vec*pu*teta)**shape-1)
109
227
  return (r)
228
+
229
+ def GPD_frequency(valu,shape,scale,pu,teta,th,events_per_year):
230
+ ret_p=((round((1+shape*((valu-th)/scale)),2)**(round((1/shape),2)))/(teta*pu*events_per_year))
231
+ # if(pd.isna(ret_p)):print(round((1+shape*((valu-th)/scale)),2),shape,(round((1/shape),2)),)
232
+ return(ret_p)
233
+
234
+ def RGPDI_values(vec,shape,scale,th):
235
+
236
+ vals=((((1-vec)**(-shape))-1)*scale)/shape+th
237
+ return (vals)
238
+
239
+ def RGPD_values(vec,shape,scale):
240
+ vals=1-(1+shape*vec/scale)**(-1/shape)
241
+ return (vals)
242
+
110
243
 
111
244
  def CIGPD(vec,shape,scale,pu,teta,th,varsc,varsh,cov):
112
245
  T1=(((vec*pu*teta)**shape-1)/shape)**2*varsc
@@ -116,25 +249,6 @@ def CIGPD(vec,shape,scale,pu,teta,th,varsc,varsh,cov):
116
249
  CI=np.sqrt(T1+T2+T3)*1.645
117
250
  return(CI)
118
251
 
119
- def map_belgium(ax,lons,lats):
120
-
121
- m = Basemap(width=55000,height=50000,
122
- rsphere=(649328.00,665262.0),\
123
- area_thresh=1000.,projection='lcc',\
124
- lat_1=49.83,lat_2=51.17,lat_0=np.mean(lats),lon_0=np.mean(lons),resolution='h')
125
- m.drawcountries()
126
- m.drawcoastlines()
127
- return(m)
128
-
129
- def map_belgium_zoom(ax,lons,lats):
130
-
131
- m = Basemap(width=35000,height=28000,
132
- rsphere=(649328.00,665262.0),\
133
- area_thresh=1000.,projection='lcc',\
134
- lat_1=49.83,lat_2=51.17,lat_0=np.mean(lats),lon_0=np.mean(lons)-0.35,resolution='h')
135
- m.drawcountries()
136
- m.drawcoastlines()
137
- return(m)
138
252
 
139
253
 
140
254
  def JJ2date(day,year):
@@ -142,7 +256,7 @@ def JJ2date(day,year):
142
256
  end_monthcum=np.zeros(12);end_monthcum[0]=end_month[0]
143
257
  monthlab=np.arange(1,13,1)
144
258
  jj=0;m=0
145
- if (ct.isbis(year)==1):end_month[1]=29
259
+ if ct.isbis(year):end_month[1]=29
146
260
  else:end_month=[31,28,31,30,31,30,31,31,30,31,30,31]
147
261
 
148
262
  for i in range(1,12):
@@ -157,41 +271,1157 @@ def JJ2date(day,year):
157
271
  if (day<=end_monthcum[i] and day>0):
158
272
  m=monthlab[i]
159
273
  jj=day
160
-
274
+ # jj+=1
161
275
  date=np.array([jj,m,year]);date.astype(int)
162
276
  return(date)
163
277
 
164
- def date2JJ(day,month,year):
165
- day=int(day);month=int(month);year=int(year)
166
- end_month=[31,28,31,30,31,30,31,31,30,31,30,31]
167
- if (ct.isbis(year)==1):end_month[1]=29
168
-
169
- JJ=0
170
- for i in range(0,month-1):
171
- JJ=JJ+end_month[i]
172
- JJ=JJ+day
173
- return(JJ)
278
+ def date2JJ(day,month,year,fn1='__',type_mod=2):
279
+ end_month=[31,28,31,30,31,30,31,31,30,31,30,31]
280
+ end_monthcum=np.zeros(12);
281
+ if type_mod==1:
282
+ if (ct.isbis(year)==1):end_month[1]=29
174
283
 
284
+
285
+ for i in range(1,12):
286
+ end_monthcum[i]=end_monthcum[i-1]+end_month[i]
175
287
 
288
+ jj=int(end_monthcum[int(month-1)])+day
289
+ # print(day,month,year,jj)
290
+ return(jj)
176
291
 
177
- def Diverging_bounds(M,step):
178
- M1=np.array(M)
179
- mask=pd.isna(M)
180
- M1[mask]=0.
181
- if (np.min(M1)*-1>np.max(M1)) or (np.max(M1)<=0.) :
182
- vmax=np.min(M1)*-1+(step-(np.min(M1)*-1)%step)
183
- else :
184
- vmax=np.max(M1)+(step-(np.max(M1))%step)
185
- bounds = np.arange(-1*vmax,vmax+step,step)
292
+ def makebounds(mat,step):
293
+ mat1=np.array(mat)
294
+ mask1=pd.isna(mat1)==False
295
+ maxi=np.max(mat1[mask1])
296
+ # print(maxi)
297
+ print(maxi,step)
298
+ bounds=np.arange(0,maxi+step,step)
186
299
  return(bounds)
187
300
 
188
- def makebounds(M,step):
189
- M1=np.array(M)
190
- mask=pd.isna(M)
191
- M1[mask]=0.
192
- vmax=np.max(M1)
193
- vmin=int(np.min(M1[M1>0.]))
194
- print(vmin,vmax,step)
195
- bounds = np.arange(float(vmin),vmax+step,step)
196
- return(bounds)
197
-
301
+ def map_belgium(ax,lons,lats):
302
+ lat_0=50.6;lon_0=4.73
303
+ m = Basemap(width=55000,height=50000,
304
+ rsphere=(649328.00,665262.0),\
305
+ area_thresh=1000.,projection='lcc',\
306
+ lat_1=49.83,lat_2=51.17,lat_0=lat_0,lon_0=lon_0,resolution='i')
307
+ m.drawcountries()
308
+ m.drawcoastlines()
309
+ return(m)
310
+
311
+ def map_belgium_J21(ax,lons,lats):
312
+ lat_0=50.15;lon_0=5.83
313
+ m = Basemap(width=15000,height=18000,
314
+ rsphere=(649328.00,665262.0),\
315
+ area_thresh=1000.,projection='lcc',\
316
+ lat_1=49.83,lat_2=51.17,lat_0=lat_0,lon_0=lon_0,resolution='i')
317
+ m.drawcountries(linewidth=3)
318
+ m.drawcoastlines(linewidth=4)
319
+ return(m)
320
+
321
+
322
+ def map_Vesdre(ax,lons,lats):
323
+ lat_0=50.55;lon_0=5.93
324
+ m = Basemap(width=6000,height=3800,
325
+ rsphere=(649328.00,665262.0),\
326
+ area_thresh=1000.,projection='lcc',\
327
+ lat_1=49.83,lat_2=51.17,lat_0=lat_0,lon_0=lon_0,resolution='i')
328
+ m.drawcountries()
329
+ m.drawcoastlines()
330
+ return(m)
331
+
332
+
333
+
334
+
335
+
336
+ def map_belgium_zoom(ax,lons,lats):
337
+ lat_0=50.6;lon_0=4.73
338
+
339
+ m = Basemap(width=34000,height=30000,
340
+ rsphere=(649328.00,665262.0),\
341
+ area_thresh=1000.,projection='lcc',\
342
+ lat_1=49.83,lat_2=51.17,lat_0=lat_0,
343
+ lon_0=lon_0,resolution='h')
344
+ m.drawcountries(linewidth=1)
345
+ m.drawcoastlines()
346
+ # m.drawrivers()
347
+ # m.drawmapboundary(fill_color='dodgerblue')
348
+ # m.fillcontinents(color='gainsboro',lake_color='aqua')
349
+ # m.bluemarble()
350
+ return(m)
351
+
352
+ def map_Europe(ax,lons,lats):
353
+
354
+ print(np.mean(lats))
355
+ m = Basemap(width=6000,height=150000,
356
+ rsphere=(649328.00,665262.0),\
357
+ area_thresh=1000.,projection='lcc',\
358
+ lat_1=35,lat_2=65,lat_0=52.,lon_0=10.,resolution='i')
359
+ m.drawcountries()
360
+ m.drawcoastlines()
361
+ # m.drawrivers()
362
+ # m.drawmapboundary(fill_color='dodgerblue')
363
+ # m.fillcontinents(color='gainsboro',lake_color='aqua')
364
+ # m.bluemarble()
365
+ return(m)
366
+
367
+ def mean_netcdf_alldomain(start_year,end_year,direct,var):
368
+ for y in range(start_year,end_year+1):
369
+ # print(y)
370
+ fn=glob.glob(direct+'*'+str(y)+'**nc*')[0]
371
+
372
+ if y==start_year:
373
+ matrice=np.average(np.transpose((np.array(xr.open_dataset(fn)[var]))),axis=2)
374
+ else:
375
+ mat2add=np.average(np.transpose((np.array(xr.open_dataset(fn)[var]))),axis=2)
376
+ matrice=np.append(matrice,mat2add,axis=2)
377
+ meant=np.mean(matrice)
378
+ # for i in range(dimx):
379
+ # for j in range(dimy):
380
+ # for k in range(dimt):
381
+ # matdaily[i,j,k]=np.mean(matrice[i,j,:,k])
382
+ # else:
383
+ # mat2add=np.zeros([dimx,dimy,dimt])
384
+ # for i in range(dimx):
385
+ # for j in range(dimy):
386
+ # for k in range(dimt):
387
+ # mat2add[i,j,k]=np.mean(matrice[i,j,:,k])
388
+ # matdaily=np.append(matdaily,mat2add,axis=2)
389
+
390
+
391
+ return(meant)
392
+
393
+ def quick_map_plot(lons,lats,mat,bounds,cmap,MSK=np.zeros(0),nticks=4):
394
+ # filemask="/srv7_tmp1/jbrajkovic/These/EU-7.5km.nc"
395
+ # ds_mask=xr.open_dataset(filemask)
396
+ # mask=np.transpose(np.array(ds_mask.MASK[:,:]))
397
+ # MSK=mask==1
398
+ if MSK.shape[0]==0:
399
+ MSK=np.zeros_like(mat)==0
400
+
401
+ lon_center=4.96
402
+ # lon_center=np.mean(lats)
403
+ lat_center=50.56
404
+ fig=plt.figure(figsize=(6,5))
405
+ ax=fig.add_subplot()
406
+ m = Basemap(width=50000,height=40000,
407
+ rsphere=(649328.00,665262.0),\
408
+ area_thresh=1000.,projection='lcc',\
409
+ lat_1=49.83,lat_2=51.17,lat_0=lat_center,lon_0=lon_center,resolution='i')
410
+ m.drawcountries(linewidth=1.0)
411
+ m.drawcoastlines(linewidth=2.0)
412
+ # m.drawrivers(linewidth=1.5,color='aqua')
413
+ vmax=np.max(mat[pd.isna(mat)==False])
414
+ step=vmax/10.
415
+ # bounds = bounds=np.arange(0,105,5)
416
+ # cmap=cm.jet
417
+ norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
418
+ x,y=m(lons,lats)
419
+ # print(x,y)
420
+ mapa=m.pcolormesh(x,y,mat,norm=norm,cmap=cmap)
421
+ # mapa=m.contourf(x,y,mat,norm=norm,cmap=cmap)
422
+ # m.contour(x,y,MSK,levels=0,linewidth=3.0)
423
+
424
+ # ds_mask=xr.open_dataset(filemask)
425
+ # mask=np.transpose(np.array(ds_mask.MASK[:,:]))
426
+ # MSK=mask==1
427
+ Pr_Vesdre=mat[MSK]
428
+ MPRV=np.mean(Pr_Vesdre[pd.isna(Pr_Vesdre)==False])
429
+ text="CM:\n"+"{:.1f}".format(MPRV)
430
+ # text1="{:.1f}".format(np.mean())
431
+ # plt.annotate(text, xy=(0.9, 0.5), xycoords='axes fraction',
432
+ # xytext=(0.95, 0.60), textcoords='axes fraction',
433
+ # color='black',
434
+ # arrowprops=dict(arrowstyle='Simple', color='black'),
435
+ # fontsize=24,weight='bold')
436
+ # plt.annotate(text1, xy=(0.9, 0.1), xycoords='axes fraction',
437
+ # color='black',
438
+ # fontsize=24,weight='bold')
439
+ # m.contourf(x,y,mat,norm=norm,cmap=cmap)
440
+ # m.colorbar(norm=norm,cmap=cmap,location='left',pad=0.6)
441
+ # cities=np.array(['Bruxelles','Charleroi','Liège','Antwerpen','Ghent','LUX.','FRANCE','GERMANY','NETHER-\nLANDS'])
442
+ # xv=np.array([4.354,4.439,5.58,4.402,3.732,5.75,5.371,6.52,4.82])
443
+ # yv=np.array([50.851,50.428,50.634,51.211,51.043,49.785,49.137,50.482,51.821])
444
+ # pos=['top','top','top','top','bottom','bottom','bottom','bottom','bottom']
445
+ # ps1=['left','left','left','left','right','left','left','left','left']
446
+ # decalage=[+500,+500,+500,+500,-500,+500,+500,+500,+500]
447
+ # xv,yv=m(xv,yv)
448
+ # # m.drawmapscale(5.5,49.2,5.5, 49)
449
+ # for i in range(np.size(cities)):
450
+ # if i<=4:
451
+ # plt.text(xv[i], yv[i]-decalage[i], cities[i],fontsize=10,
452
+ # ha=ps1[i],va=pos[i],color='k')
453
+ # else:
454
+ # plt.text(xv[i], yv[i]-decalage[i], cities[i],fontsize=10,
455
+ # ha=ps1[i],va=pos[i],color='k',weight='bold')
456
+ # if i<=4:plt.scatter(xv[i], yv[i],marker='+',color='black',s=8)
457
+ # for item in [fig, ax]:
458
+ # item.patch.set_visible(False)
459
+ ax.axis("off")
460
+ cbar_ax = fig.add_axes([-0.01, 0.25, 0.05, 0.35])
461
+ cbar=fig.colorbar(mapa,norm=norm, cmap=cmap,pad = 0.6,cax=cbar_ax,orientation="vertical",
462
+ ticks=np.arange(bounds[0],bounds[np.size(bounds)-1]+(bounds[1]-bounds[0]),(bounds[1]-bounds[0])*nticks),drawedges=True)
463
+ cbar.ax.tick_params(labelsize=10)
464
+ # cbar.set_label('Height (m)',fontsize=14,labelpad=10)
465
+ # cbar.solids.set_edgecolor("face")
466
+
467
+ def quick_map_plot2(lons,lats,mat,bounds,cmap,ax):
468
+ lat_0=50.6;lon_0=4.73
469
+ lon_center=lon_0
470
+ # lon_center=np.mean(lats)
471
+ lat_center=lat_0
472
+ m = Basemap(width=34000,height=30000,
473
+ rsphere=(649328.00,665262.0),\
474
+ area_thresh=1000.,projection='lcc',\
475
+ lat_1=49.83,lat_2=51.17,lat_0=lat_center,
476
+ lon_0=lon_center,resolution='h')
477
+ m.drawcountries(linewidth=1.0)
478
+ m.drawcoastlines(linewidth=2.0)
479
+ # m.drawrivers(linewidth=1.5,color='aqua')
480
+ vmax=np.max(mat[pd.isna(mat)==False])
481
+ step=vmax/10.
482
+ # bounds = bounds=np.arange(0,105,5)
483
+ # cmap=cm.jet
484
+ norm = mpl.colors.BoundaryNorm(bounds, cmap.N)
485
+ x,y=m(lons,lats)
486
+ mapa=m.pcolormesh(x,y,mat,norm=norm,cmap=cmap )
487
+ return(mapa)
488
+
489
+ # def get_coordinates(path_to_file):
490
+
491
+ # with fiona.open(path_to_file) as shapefile:
492
+ # # Iterate over the records
493
+ # for record in shapefile:
494
+ # # Print the record
495
+ # print(record)
496
+ # # Read the shapefile
497
+ # gdf = gpd.read_file(path_to_file)
498
+
499
+ # # Print the first few rows of the GeoDataFrame
500
+ # print(gdf.head())
501
+
502
+ def mask_belgium(lon,lat,path_in,path_out,center_or_all=2):
503
+
504
+ """This routine takes as arguments:
505
+ -The longitudes and latitudes of the netcdf (gridded)
506
+ -a .tif file of the mask we want to create (tif must be in epsg:31370 lambbert 72)
507
+ and creates a mask at the resolution of the input netcdf which is saved as netcdf in path_out
508
+
509
+ L'option center_or_all precise si l'on souhaite qu'un des 4 coins des pixels chosisis soient à l'intérieur
510
+ de la zone ou si on regarde uniquement le centre.
511
+ Si center_or_all vaut 1, on ragarde uniquement le centre et donc le masque sera plus petit
512
+
513
+ conseil : raster d'une résolution 100 mètres en input'
514
+ """
515
+
516
+ "Projecting lon lat to Lambert"
517
+ discheck=300000
518
+ lb=pyproj.Proj(projparams='epsg:31370')
519
+ xlb,ylb=lb(lon,lat)
520
+ if center_or_all!=0:
521
+ xlb_ur=np.zeros([xlb.shape[0],xlb.shape[1]])
522
+ ylb_ur=np.zeros([xlb.shape[0],xlb.shape[1]])
523
+
524
+ xlb_ul=np.zeros([xlb.shape[0],xlb.shape[1]])
525
+ ylb_ul=np.zeros([xlb.shape[0],xlb.shape[1]])
526
+
527
+
528
+ xlb_bl=np.zeros([xlb.shape[0],xlb.shape[1]])
529
+ ylb_bl=np.zeros([xlb.shape[0],xlb.shape[1]])
530
+
531
+ xlb_br=np.zeros([xlb.shape[0],xlb.shape[1]])
532
+ ylb_br=np.zeros([xlb.shape[0],xlb.shape[1]])
533
+
534
+ "Calcul des sommets des pixels"
535
+
536
+ for i in range(1,xlb.shape[0]-1):
537
+ for j in range(1,xlb.shape[1]-1):
538
+ xlb_ur[i,j]=np.mean(lon[i-1:i+1,j:j+2]);ylb_ur[i,j]=np.mean(lat[i-1:i+1,j:j+2])
539
+ xlb_ul[i,j]=np.mean(lon[i-1:i+1,j-1:j+1]);ylb_ul[i,j]=np.mean(lat[i-1:i,j-1:j+1])
540
+ xlb_bl[i,j]=np.mean(lon[i:i+2,j-1:j+1]);ylb_bl[i,j]=np.mean(lat[i:i+2,j-1:j+1])
541
+ xlb_br[i,j]=np.mean(lon[i:i+2,j:j+2]);ylb_br[i,j]=np.mean(lat[i:i+2,j:j+2])
542
+
543
+ xlb_ur,ylb_ur=lb(xlb_ur,ylb_ur)
544
+ xlb_ul,ylb_ul=lb(xlb_ul,ylb_ul)
545
+ xlb_bl,ylb_bl=lb(xlb_bl,ylb_bl)
546
+ xlb_br,ylb_br=lb(xlb_br,ylb_br)
547
+
548
+
549
+
550
+
551
+ # print (ylb_ur[1:]-ylb[1:])
552
+ "Opening the raster file"
553
+
554
+ im = Image.open(path_in)
555
+ imarray = np.array(im)
556
+
557
+ file_name = path_in
558
+ with rasterio.open(file_name) as src:
559
+ band1 = src.read(1)
560
+ print('Band1 has shape', band1.shape)
561
+ height = band1.shape[0]
562
+ width = band1.shape[1]
563
+ cols, rows = np.meshgrid(np.arange(width), np.arange(height))
564
+ xs, ys = rasterio.transform.xy(src.transform, rows, cols)
565
+ lons= np.array(xs)
566
+ lats = np.array(ys)
567
+ print('lons shape', lons.shape)
568
+ # print(ys,ylb)
569
+ lats=lats[imarray!=0]
570
+ lons=lons[imarray!=0]
571
+ print(lats)
572
+ MSK=np.zeros(xlb.shape)
573
+
574
+ print(np.max(xs),np.max(ys))
575
+
576
+ "Finding the pixels which are in the zone"
577
+
578
+ "perimetre de recherche"
579
+ maxi_lat=np.max(lats)
580
+ mini_lat=np.min(lats)
581
+ mini_lon=np.min(lons)
582
+ maxi_lon=np.max(lons)
583
+ print(maxi_lat,mini_lat)
584
+ disrech=10000000000
585
+ disu=disrech
586
+ disb=disrech
587
+ disl=disrech
588
+ disr=disrech
589
+ for i in range(xlb.shape[0]):
590
+ # print(i)
591
+ for j in range(xlb.shape[1]):
592
+ if abs(ylb[i,j]-maxi_lat)<disu and ylb[i,j]-maxi_lat>=0 :
593
+ iu=i
594
+ disu=abs(ylb[i,j]-maxi_lat)
595
+
596
+ if abs( ylb[i,j]-mini_lat)<disb and ylb[i,j]-mini_lat<=0:
597
+ ib=i
598
+ disb=abs( ylb[i,j]-mini_lat)
599
+
600
+ if abs(xlb[i,j]-mini_lon)<disl and xlb[i,j]-mini_lon <=0 :
601
+ # print(xlb[i,j]-mini_lon)
602
+ jl=j
603
+ disl=abs(xlb[i,j]-mini_lon)
604
+ if abs(xlb[i,j]-maxi_lon)<disr and xlb[i,j]-maxi_lon >=0 :
605
+ jr=j
606
+ disr=abs(xlb[i,j]-maxi_lon)
607
+
608
+ print(disu,disr,disb,disl)
609
+ print(iu,ib,jl,jr)
610
+ # if iu==ib or jl==jr:
611
+ # iu=1;ib=xlb.shape[0]-1
612
+ # jl=1;jr=xlb.shape[1]-1
613
+ print(iu,ib,jl,jr)
614
+ print('aire latitudinale de recherche : '+"{:.0f}".format(ylb[int(ib),int(jl)])+' '+"{:.0f}".format(ylb[int(iu),int(jl)]))
615
+ for i in range(iu-2,ib+2):
616
+ print(i,np.mean(ylb[i,:]))
617
+ for j in range(jl-2,jr+2):
618
+ # print(xlb[i,j],ylb[i,j])
619
+ # msk_stp=lons[((abs(lons-xlb_ur[i,j])<100)&(abs(lats-ylb_ur[i,j])<100))]
620
+ if center_or_all==1:
621
+ msk_stp=lons[((abs(lons-xlb[i,j])<discheck)&(abs(lats-ylb[i,j])<discheck))|
622
+ ((abs(lons-xlb_ur[i,j])<discheck)&(abs(lats-ylb_ur[i,j])<discheck))|
623
+ ((abs(lons-xlb_ul[i,j])<discheck)&(abs(lats-ylb_ul[i,j])<discheck))|
624
+ ((abs(lons-xlb_bl[i,j])<discheck)&(abs(lats-ylb_bl[i,j])<discheck))|
625
+ ((abs(lons-xlb_br[i,j])<discheck)&(abs(lats-ylb_br[i,j])<discheck))]
626
+
627
+ else:
628
+ msk_stp=lons[((abs(lons-xlb[i,j])<discheck)&(abs(lats-ylb[i,j])<discheck))]
629
+
630
+ # print(i,j)
631
+ # if i%10==0 and j==jl:
632
+ # print(i,j)
633
+ if np.size(msk_stp)!=0:#print('ok');
634
+ print(i,j)
635
+ MSK[i,j]=1
636
+ # print(msk_stp,i,j)
637
+ # if xlb[i,j]-lons[k]<100. and ylb[i,j]-lats[k]<100:
638
+ # MSK[i,j]=1.
639
+
640
+ # MSK
641
+ # time=[1]
642
+
643
+ "writing the output netcdf with the mask"
644
+
645
+ coords=dict(
646
+ LON=(["y","x"],np.transpose(xlb)),
647
+ LAT=(["y","x"],np.transpose(ylb)),
648
+ )
649
+ Mar_ds=xr.DataArray(
650
+ data=np.transpose(np.zeros([xlb.shape[0],xlb.shape[1]])),
651
+ dims=["y","x"],
652
+ coords=coords,
653
+ )
654
+ Mar_rain=xr.DataArray(
655
+ data=np.transpose(MSK),
656
+ dims=["y","x"],
657
+ coords=coords,
658
+ attrs=dict(
659
+ description='MSK',
660
+ units=''))
661
+
662
+ Mar_ds['MSK']=Mar_rain
663
+ format1='NETCDF4'
664
+ Mar_ds.to_netcdf(path_out,mode='w',format=format1)
665
+ MSK=[MSK==1]
666
+ return (ylb)
667
+ # return(coords)
668
+
669
+ def mask_belgiumV2(lon,lat,path_in,path_out,center_or_all=2,discheck=300000,buffer=2):
670
+ """This routine takes as arguments:
671
+ -The longitudes and latitudes of the netcdf (gridded)
672
+ -a .tif file of the mask we want to create (tif must be in epsg:31370 lambbert 72)
673
+ and creates a mask at the resolution of the input netcdf which is saved as netcdf in path_out
674
+
675
+ L'option center_or_all precise si l'on souhaite qu'un des 4 coins des pixels chosisis soient à l'intérieur
676
+ de la zone ou si on regarde uniquement le centre.
677
+ Si center_or_all vaut 1, on ragarde uniquement le centre et donc le masque sera plus petit
678
+
679
+ conseil : raster d'une résolution 100 mètres en input'
680
+ """
681
+
682
+ "Projecting lon lat to Lambert"
683
+ # discheck=300000
684
+ print('Check distance = '+str(discheck) + 'meters')
685
+ lb=pyproj.Proj(projparams='epsg:31370')
686
+
687
+ if center_or_all!=0:
688
+ xlb_ur=np.zeros_like(lon)
689
+ ylb_ur=np.zeros_like(lon)
690
+
691
+ xlb_ul=np.zeros_like(lon)
692
+ ylb_ul=np.zeros_like(lon)
693
+
694
+
695
+ xlb_bl=np.zeros_like(lon)
696
+ ylb_bl=np.zeros_like(lon)
697
+
698
+ xlb_br=np.zeros_like(lon)
699
+ ylb_br=np.zeros_like(lon)
700
+
701
+ "Calcul des sommets des pixels"
702
+
703
+ for i in range(1,lon.shape[0]-1):
704
+ for j in range(1,lon.shape[1]-1):
705
+ xlb_ur[i,j]=np.mean(lon[i-1:i+1,j:j+2]);ylb_ur[i,j]=np.mean(lat[i-1:i+1,j:j+2])
706
+ xlb_ul[i,j]=np.mean(lon[i-1:i+1,j-1:j+1]);ylb_ul[i,j]=np.mean(lat[i-1:i,j-1:j+1])
707
+ xlb_bl[i,j]=np.mean(lon[i:i+2,j-1:j+1]);ylb_bl[i,j]=np.mean(lat[i:i+2,j-1:j+1])
708
+ xlb_br[i,j]=np.mean(lon[i:i+2,j:j+2]);ylb_br[i,j]=np.mean(lat[i:i+2,j:j+2])
709
+
710
+
711
+
712
+
713
+ # print (ylb_ur[1:]-ylb[1:])
714
+ "Opening the raster file"
715
+
716
+ im = Image.open(path_in)
717
+ imarray = np.array(im)
718
+
719
+ file_name = path_in
720
+ with rasterio.open(file_name) as src:
721
+ band1 = src.read(1)
722
+ print('Band1 has shape', band1.shape)
723
+ height = band1.shape[0]
724
+ width = band1.shape[1]
725
+ cols, rows = np.meshgrid(np.arange(width), np.arange(height))
726
+ xs, ys = rasterio.transform.xy(src.transform, rows, cols)
727
+ lons= np.array(xs)
728
+ lats = np.array(ys)
729
+ print('lons shape', lons.shape)
730
+
731
+ inProj=Proj(init='epsg:31370')
732
+ outProj = Proj(init='epsg:4326')
733
+
734
+ lons,lats=transform(inProj,outProj,lons,lats)
735
+
736
+ # print(ys,ylb)
737
+ # plt.imshow(imarray);plt.colorbar()
738
+ # plt.show()
739
+ lats=lats[imarray>0]
740
+ lons=lons[imarray>0]
741
+
742
+ print(lats)
743
+ MSK=np.zeros(lon.shape)
744
+
745
+ print(np.max(xs),np.max(ys))
746
+
747
+ "Finding the pixels which are in the zone"
748
+
749
+ "perimetre de recherche"
750
+ maxi_lat=np.max(lats)
751
+ mini_lat=np.min(lats)
752
+ mini_lon=np.min(lons)
753
+ maxi_lon=np.max(lons)
754
+ me_lat=np.mean(lats)
755
+ print(maxi_lat,mini_lat)
756
+ disrech=10000000000
757
+ disu=disrech
758
+ disb=disrech
759
+ disl=disrech
760
+ disr=disrech
761
+
762
+ print('lon max ',maxi_lon,mini_lon,me_lat,maxi_lat,mini_lat)
763
+ for i in range(lon.shape[0]):
764
+ # print(i)
765
+ for j in range(lon.shape[1]):
766
+ if ct.dis2pix(lat[i,j], lon[i,j], maxi_lat, lon[i,j])<disu and lat[i,j]>=maxi_lat:
767
+ iu=i
768
+ disu=ct.dis2pix(lat[i,j], lon[i,j], maxi_lat, lon[i,j])
769
+
770
+ if ct.dis2pix(lat[i,j], lon[i,j], mini_lat, lon[i,j])<disb and lat[i,j]<=mini_lat:
771
+ ib=i
772
+ disb=ct.dis2pix(lat[i,j], lon[i,j], mini_lat, lon[i,j])
773
+
774
+ if ct.dis2pix(lat[i,j], lon[i,j], me_lat, mini_lon)<disl and lon[i,j]<=mini_lon:
775
+ jl=j
776
+ disl=ct.dis2pix(lat[i,j], lon[i,j], me_lat, mini_lon)
777
+
778
+ if ct.dis2pix(lat[i,j], lon[i,j], me_lat, maxi_lon)<disr and lon[i,j]>=maxi_lon:
779
+
780
+ jr=j
781
+ disr=ct.dis2pix(lat[i,j], lon[i,j], me_lat, maxi_lon)
782
+
783
+ print(disu,disr,disb,disl)
784
+ print(iu,ib,jl,jr)
785
+ print('aire latitudinale de recherche : '+"{:.0f}".format(lat[int(ib),int(jl)])+' '+"{:.0f}".format(lat[int(iu),int(jl)]))
786
+ ide=iu-2;ifi=ib+2
787
+ if ide<0:ide=0;
788
+ if ifi>lon.shape[0]:ifi=lon.shape[0]
789
+
790
+ jde=jl-2;jfi=jr+2
791
+ if jde<0:jde=0
792
+ if jr>lon.shape[0]:jfi=lon.shape[1]
793
+ center_pixel_lon=np.mean(lons)
794
+ center_pixel_lat=np.mean(lats)
795
+ min_dist1=100000
796
+ for i in range(ide,ifi):
797
+ print(i,np.mean(lat[i,:]))
798
+ for j in range(jde,jfi):
799
+ # print(xlb[i,j],ylb[i,j])
800
+ # msk_stp=lons[((abs(lons-xlb_ur[i,j])<100)&(abs(lats-ylb_ur[i,j])<100))]
801
+ if center_or_all !=2:
802
+ if center_or_all==1:
803
+
804
+ msk_stp=lons[((ct.dis2pix(lats, lons, lat[i,j], lon[i,j])<discheck)|
805
+ ((ct.dis2pix(lats, lons, ylb_ur[i,j], xlb_ur[i,j]))<discheck)|
806
+ ((ct.dis2pix(lats, lons, ylb_ul[i,j], xlb_ul[i,j]))<discheck)|
807
+ (ct.dis2pix(lats, lons, ylb_bl[i,j], xlb_bl[i,j])<discheck)|
808
+ ((ct.dis2pix(lats, lons, ylb_br[i,j], xlb_br[i,j]))<discheck))]
809
+
810
+
811
+ else:
812
+ msk_stp=lons[(ct.dis2pix(lats, lons, lat[i,j], lon[i,j])<discheck)]
813
+
814
+ if np.size(msk_stp)!=0:#print('ok');
815
+ print(i,j)
816
+ MSK[i,j]=1
817
+
818
+ else:
819
+ dists=np.matrix([ct.dis2pix(center_pixel_lat, center_pixel_lon, lat[i,j], lon[i,j]),
820
+ ct.dis2pix(center_pixel_lat, center_pixel_lon, ylb_ur[i,j], xlb_ur[i,j]),
821
+ ct.dis2pix(center_pixel_lat, center_pixel_lon, ylb_ul[i,j], xlb_ul[i,j]),
822
+ ct.dis2pix(center_pixel_lat, center_pixel_lon, ylb_bl[i,j], xlb_bl[i,j]),
823
+ ct.dis2pix(center_pixel_lat, center_pixel_lon, ylb_br[i,j], xlb_br[i,j])])
824
+ min_dist=np.min(dists)
825
+
826
+ if min_dist<min_dist1:
827
+ min_dist1=min_dist
828
+ iic=i;jjc=j
829
+ if center_or_all==2:
830
+ MSK[iic-buffer:iic+buffer+1,jjc-buffer:jjc+buffer+1]=1
831
+
832
+
833
+ "writing the output netcdf with the mask"
834
+
835
+ coords=dict(
836
+ LON=(["y","x"],np.transpose(lon)),
837
+ LAT=(["y","x"],np.transpose(lat)),
838
+ )
839
+ Mar_ds=xr.DataArray(
840
+ data=np.transpose(np.zeros([lon.shape[0],lat.shape[1]])),
841
+ dims=["y","x"],
842
+ coords=coords,
843
+ )
844
+ Mar_rain=xr.DataArray(
845
+ data=np.transpose(MSK),
846
+ dims=["y","x"],
847
+ coords=coords,
848
+ attrs=dict(
849
+ description='MSK',
850
+ units=''))
851
+
852
+ Mar_ds['MSK']=Mar_rain
853
+ format1='NETCDF4'
854
+ Mar_ds.to_netcdf(path_out,mode='w',format=format1)
855
+ MSK=[MSK==1]
856
+ return (MSK)
857
+
858
+ def dis2pix(lat1,lon1,lat2,lon2):
859
+ lat1,lon1,lat2,lon2=np.deg2rad(lat1),np.deg2rad(lon1),np.deg2rad(lat2),np.deg2rad(lon2)
860
+ dis=np.arccos(np.sin(lat1)*np.sin(lat2)+np.cos(lat1)*np.cos(lat2)*np.cos(abs(lon1-lon2)))*6371000
861
+ return(dis)
862
+
863
+ def anomaly_cmap():
864
+ cdict = {'blue': [[0.0, 0.0, 0.0]],
865
+ 'red': [[0.0, 0.0, 0.0]],
866
+ 'green': [[0.0, 0.0, 0.0]]}
867
+ newcmp = LinearSegmentedColormap('testCmap', segmentdata=cdict, N=256)
868
+ return(newcmp)
869
+
870
+ def grid_mean(folder,year,var,season,sum_or_mean=0,nts=24,lev=0,nf=0,fn1='__'):
871
+ # fn=glob.glob(folder+'*'+str(year)+'*')[nf]
872
+ fn=folder
873
+ # print(fn)
874
+ seasons_names=['SP','SU','F','W','DJF','MAM','JJA','SON']
875
+ if season!='Y':
876
+
877
+ beg_seas=[81,173,265,356,335,60,152,244]
878
+ end_seas=[172,264,355,80,59,151,243,334]
879
+ for i in range(8):
880
+ if seasons_names[i]==season:
881
+ bs=beg_seas[i]
882
+ es=end_seas[i]
883
+ break
884
+ else:
885
+ bs=1;es=365
886
+ # print(bs,es)
887
+
888
+ if var=='MBRR':
889
+ if sum_or_mean:
890
+ if season!='W' and season!='DJF':
891
+ Ds=np.sum(np.transpose(np.array(xr.open_dataset(fn)['MBRR'])+\
892
+ np.array(xr.open_dataset(fn)['MBSF']))\
893
+ [:,:,(bs-1)*nts+1:es*nts+nts],axis=2)
894
+ else:
895
+ # fn1=glob.glob(folder+'*'+str(year-1)+'*')[nf]
896
+ Ds=np.sum(np.transpose(np.array(xr.open_dataset(fn)['MBRR'])+\
897
+ np.array(xr.open_dataset(fn)['MBSF']))\
898
+ [:,:,0:es*nts+nts],axis=2)\
899
+ +np.sum(np.transpose(np.array(xr.open_dataset(fn1)['MBRR'])+\
900
+ np.array(xr.open_dataset(fn1)['MBSF']))\
901
+ [:,:,(bs-1)*nts+1:],axis=2)
902
+ else:
903
+ if season!='W' and season !='DJF':
904
+ Ds=np.average(np.transpose(np.array(xr.open_dataset(fn)['MBRR'])+\
905
+ np.array(xr.open_dataset(fn)['MBSF']))\
906
+ [:,:,(bs-1)*nts+1: es*nts+nts],axis=2)
907
+ else:
908
+ fn1=glob.glob(folder+'*'+str(year-1)+'*')[0]
909
+ Ds=(np.transpose(np.array(xr.open_dataset(fn)['MBRR'])+\
910
+ np.array(xr.open_dataset(fn)['MBSF']))\
911
+ [:,:,0:es*nts+nts])
912
+ Ds=np.append(Ds,np.transpose(np.array(xr.open_dataset(fn1)['MBRR'])+\
913
+ np.array(xr.open_dataset(fn1)['MBSF']))\
914
+ [:,:,(bs-1)*nts+1:],axis=2)
915
+ Ds=np.average(Ds,axis=2)
916
+ else:
917
+ mat=np.array(xr.open_dataset(fn)[var])
918
+ # fn1=glob.glob(folder+'*'+str(year-1)+'*')[nf]
919
+ if np.size(mat.shape)==4:
920
+ axis=3
921
+ if season=='W' or season=='DJF':
922
+ mat=np.append(np.transpose(mat)\
923
+ [:,:,lev,0:es*nts+nts],
924
+ np.transpose(np.array(xr.open_dataset(fn1)[var]))[:,:,lev,(bs-1)*nts+1:],axis=2)
925
+ elif season=='Y' :
926
+ mat=np.transpose(mat)\
927
+ [:,:,lev,:]
928
+ else:
929
+ mat=np.transpose(mat)\
930
+ [:,:,lev,(bs-1)*nts+1: es*nts+nts]
931
+ else:
932
+ axis=2
933
+ if season=='W' or season=='DJF':
934
+ mat=np.append(np.transpose(mat)\
935
+ [:,:,0:es*nts+nts],
936
+ np.transpose(np.array(xr.open_dataset(fn1)[var]))[:,:,(bs-1)*nts+1:],axis=2)
937
+ elif season=='Y' :
938
+ mat=np.transpose(mat)\
939
+ [:,:,:]
940
+
941
+ else:
942
+ mat=np.transpose(mat)\
943
+ [:,:,(bs-1)*nts+1: es*nts+nts]
944
+
945
+
946
+ if sum_or_mean:
947
+
948
+ Ds=np.sum(mat,axis=2)
949
+
950
+ else:
951
+ print(axis)
952
+ Ds=np.average(mat,axis=2)
953
+
954
+
955
+ return(Ds)
956
+
957
+ def find_pix_be(lon_p,lat_p,lons,lats):
958
+ Lb72=pyproj.Proj(projparams='epsg:31370')
959
+ xl,yl=Lb72(lon_p,lat_p)
960
+ xls,yls=Lb72(lons,lats)
961
+ mat_dis=((xls-xl)**2+(yls-yl)**2)**0.5
962
+ dists=10**12
963
+ for j in range(mat_dis.shape[0]):
964
+ for k in range(mat_dis.shape[1]):
965
+ if mat_dis[j,k]<=dists:
966
+ i_s=j;j_s=k;dists=mat_dis[j,k]
967
+ return([i_s,j_s])
968
+ # return(mat_dis)
969
+
970
+ def find_MARs_closest_pixel(lonsm,latsm,lonsi,latsi,neighbours=1):
971
+ Lb72=pyproj.Proj(projparams='epsg:31370')
972
+ xi,yi=Lb72(lonsi,latsi)
973
+ xm,ym=Lb72(lonsm,latsm)
974
+ dis2pixels=np.zeros([xm.shape[0],ym.shape[1],xi.shape[0],yi.shape[1]])
975
+ output=np.zeros([xm.shape[0],ym.shape[1],neighbours,3])
976
+ for i in range(xm.shape[0]):
977
+ for j in range(xm.shape[1]):
978
+ for n in range(neighbours):
979
+ dists=np.zeros(neighbours)
980
+ dists[:]=10E12
981
+ for k in range(xi.shape[0]):
982
+ for l in range(xi.shape[1]):
983
+ dis2pixels[i,j,k,l]=((xm[i,j]-xi[k,l])**2+(ym[i,j]-yi[k,l])**2)**0.5
984
+ if n==0:
985
+ if dis2pixels[i,j,k,l]<dists[0]:
986
+ dists[n]=dis2pixels[i,j,k,l]
987
+ output[i,j,n,0]=k
988
+ output[i,j,n,1]=l
989
+ output[i,j,n,2]=1/dists[n]
990
+ else:
991
+ if dis2pixels[i,j,k,l]<dists[n] and\
992
+ dis2pixels[i,j,k,l]>dists[n-1]:
993
+ dists[n]=dis2pixels[i,j,k,l]
994
+ output[i,j,n,0]=k
995
+ output[i,j,n,1]=l
996
+ output[i,j,n,2]=1/(dists[n]/1000)
997
+ return(output)
998
+
999
+ def IPCC_cmap():
1000
+ cmap = mpl.colors.LinearSegmentedColormap.from_list("", [(84/256, 48/256, 5/256),
1001
+ (245/256,245/256,245/256),
1002
+ (0/256,60/256,48/256)])
1003
+ return(cmap)
1004
+
1005
+ def draw_cities(m,fs_c=14,fs_C=16):
1006
+ fs=10
1007
+ cities=np.array(['Bruxelles','Charleroi','Liège',
1008
+ 'Antwerpen','Ghent',
1009
+ 'LUX.','FRANCE','GERMANY','NETHER-\nLANDS'])
1010
+ plot_or_not=[0,0,1,0,0,1,0,0,0]
1011
+ xv=np.array([4.354,4.439,5.58,4.402,3.732,5.81,5.371,6.52,4.82])
1012
+ yv=np.array([50.851,50.428,50.634,51.211,51.043,49.785,49.137,50.482,51.821])
1013
+ pos=['top','top','bottom','top','bottom','bottom','bottom','bottom','bottom']
1014
+ ps1=['left','left','right','left','right','left','left','left','left']
1015
+ decalage=np.array([+500,+500,+0,+500,-500,+500,+500,+500,+500])
1016
+ decalage[:]=0
1017
+ xv,yv=m(xv,yv)
1018
+ # m.drawmapscale(5.5,49.2,5.5, 49)
1019
+ for i in range(np.size(cities)):
1020
+ if plot_or_not[i]:
1021
+ if i<=4:
1022
+ plt.gca()
1023
+ plt.scatter(xv[i], yv[i],color='black',s=25)
1024
+ plt.text(xv[i], yv[i]-decalage[i], cities[i],fontsize=fs_c,
1025
+ ha=ps1[i],va=pos[i],color='k',weight='bold',
1026
+ path_effects=[pe.withStroke(linewidth=4, foreground="white")])
1027
+
1028
+ else:
1029
+ plt.text(xv[i], yv[i]-decalage[i], cities[i],fontsize=fs_C,
1030
+ ha=ps1[i],va=pos[i],color='k',weight='bold')
1031
+
1032
+
1033
+
1034
+ def draw_stations(m,n_id=1,fs=8):
1035
+
1036
+
1037
+ stations_lons=[6.07960,5.912,6.228,
1038
+ 5.594,5.405,5.255,
1039
+ 4.591,4.653,4.471,
1040
+ 5.453,4.667,3.780,
1041
+ 4.359,5.421,4.486,
1042
+ 3.799,3.664,3.115,
1043
+ 4.539,5.470,5.072,
1044
+ 4.470,4.381,3.208,
1045
+ 2.856,2.668 ]
1046
+
1047
+ stations_lats=[50.511,50.478,50.459,
1048
+ 49.647,50.037,50.207,
1049
+ 50.094,50.226,50.478,
1050
+ 50.653,50.593,50.581,
1051
+ 50.799,50.903,50.895,
1052
+ 50.993,50.943,50.892,
1053
+ 51.064,51.170,51.270,
1054
+ 51.219,51.322
1055
+ ,51.322,51.200,51.126]
1056
+
1057
+ stations_names=['Mont-Rigi','Spa','Elsenborn',
1058
+ 'Buzenol','Saint-Hubert','Humain',
1059
+ 'Dourbes','Florennes','Gosselies',
1060
+ 'Bierset','Ernage','Chièvres',
1061
+ 'Uccle','Diepenbeek','Zaventem',
1062
+ 'Melle','Semmerzake','Beitem',
1063
+ 'Sint-Katelijne','Kleine-Brogel','Retie',
1064
+ 'Deurne','Stabroek','Zeebrugge',
1065
+ 'Middelkerke','Koksijde']
1066
+
1067
+ stations_index=['06494','06490','06496',
1068
+ '06484','06476','06472',
1069
+ '06455','06456','06449',
1070
+ '06478','06459','06432',
1071
+ '06447','06477','06451',
1072
+ '06434','06428','06414',
1073
+ '06439','06479','06464',
1074
+ '06450','06438','06418',
1075
+ '_','06400']
1076
+ if n_id:te=stations_names
1077
+ else:te=stations_index
1078
+ xv,yv=m(stations_lons,stations_lats)
1079
+ # m.drawmapscale(5.5,49.2,5.5, 49)
1080
+ for i in range(np.size(stations_lons)):
1081
+ plt.text(xv[i], yv[i], te[i],fontsize=fs,
1082
+ color='k',weight='bold')
1083
+
1084
+
1085
+ def box_plot(data, edge_color, fill_color,ax):
1086
+ bp = ax.boxplot(data, patch_artist=True)
1087
+
1088
+ for element in ['boxes', 'whiskers', 'fliers', 'means', 'medians', 'caps']:
1089
+ plt.setp(bp[element], color=edge_color)
1090
+
1091
+ for patch in bp['boxes']:
1092
+ patch.set(facecolor=fill_color)
1093
+
1094
+ return bp
1095
+
1096
+
1097
+ def endmonth(year):
1098
+ if ct.isbis(year):
1099
+ em=[31,29,31,30,31,30,31,31,30,31,30,31]
1100
+ else:
1101
+ em=[31,28,31,30,31,30,31,31,30,31,30,31]
1102
+ return(em)
1103
+
1104
+ def radar_coord():
1105
+
1106
+ radar='/srv5_tmp3/RADCLIM/2021/20210714230000.radclim.accum1h.hdf'
1107
+ f = h5py.File(radar, "r")
1108
+
1109
+ ul_x=f['dataset1']['where'].attrs['UL_x']
1110
+ ul_y=f['dataset1']['where'].attrs['UL_y']
1111
+ xsize=f['dataset1']['where'].attrs['xsize']
1112
+ ysize=f['dataset1']['where'].attrs['ysize']
1113
+ xscale=f['dataset1']['where'].attrs['xscale']
1114
+ yscale=f['dataset1']['where'].attrs['yscale']
1115
+
1116
+ lr_x = ul_x + (xsize*xscale)
1117
+ lr_x = ul_x + (xsize*xscale)
1118
+ lr_y = ul_y - (ysize*yscale)
1119
+
1120
+ x=np.arange(ul_x, lr_x, xscale) + xscale/2
1121
+ y=np.arange(lr_y, ul_y, yscale) - yscale/2
1122
+
1123
+ xx,yy = np.meshgrid(x,y)
1124
+
1125
+ yy= np.flip(yy)
1126
+
1127
+ inProj=Proj(r'+proj=lcc +lat_1=49.83333333333334 +lat_2=51.16666666666666 +lat_0=50.797815 +lon_0=4.359215833333333 +x_0=649328 +y_0=665262 +ellps=GRS80 +towgs84=0,0,0,0,0,0,0 +units=m +no_defs ')
1128
+ outProj = Proj(init='epsg:4326')
1129
+
1130
+ lon,lat=transform(inProj,outProj,xx,yy)
1131
+ return(lon,lat)
1132
+
1133
+ def marray(ds,var):
1134
+ return(np.transpose(np.array(ds[var])))
1135
+
1136
+ def marrayV2(ds,var):
1137
+ return(np.transpose(np.array(ds[var]),axes=(1,2,0)))
1138
+ print('ok')
1139
+
1140
+ def RGEV(retp,nyears,loc,sca,sha):
1141
+ prob=1/(retp)
1142
+ ret=loc-(sca/sha)*(1-(-np.log(1-prob))**(-sha))
1143
+ return(ret)
1144
+
1145
+ def GEV_frequency(value,loc,sca,sha):
1146
+ prob=1-np.exp(-(1-(((loc-value)*sha)/sca))**(-1/sha))
1147
+
1148
+ ret=(1/prob)
1149
+ return(ret)
1150
+
1151
+ def GEVCI(retp,loc,sc,sh,
1152
+ varloc,varsc,varsh,
1153
+ covlocsc,covlocsh,covscsh):
1154
+ prob=1/(retp)
1155
+ derloc=1
1156
+ dersc=-(1-(-np.log(1-prob))**(-sh))
1157
+ dersh=sc*(((1-sh)*np.log(1-prob))/(sh*(1-sh))+(1-(-np.log(1-prob))**(-sh))/sh**2)
1158
+ S2T=derloc**2*varloc+dersc**2*varsc+dersh**2*varsh\
1159
+ +2*derloc*dersc*covlocsc+2*derloc*dersh*covlocsh+2*dersc*dersh*covscsh
1160
+ CI=S2T**0.5*1.645
1161
+ return(CI)
1162
+
1163
+
1164
+
1165
+
1166
+
1167
+ def gumCI(retp,loc,sc,
1168
+ varloc,varsc,
1169
+ covlocsc):
1170
+ prob=1/(retp)
1171
+ derloc=1
1172
+ dersc=-np.log(-np.log(1-prob))
1173
+ S2T=derloc**2*varloc+dersc**2*varsc+2*derloc*dersc*covlocsc
1174
+ CI=S2T**0.5*1.645
1175
+ return(CI)
1176
+
1177
+
1178
+ def RGum(retp,nyears,loc,sca):
1179
+ prob=1/(retp)#*nyears)
1180
+ ret=loc-sca*np.log(-np.log(1-prob))
1181
+ return(ret)
1182
+
1183
+ def Gum_frequency(value,loc,sca):
1184
+ prob=1-np.exp(-np.exp((loc-value)/sca))
1185
+ ret=1/prob
1186
+ return(ret)
1187
+
1188
+ def extreme_matrix(fn,ret_per=20,value=50,mx=80,my=50,abs_or_retour=1,ydays=365,start_year=2011,end_year=2040,nts=24,gpd_gev_gum=0):
1189
+ # print('ok')
1190
+
1191
+ f=open(fn,'r')
1192
+ indice_suivi=0
1193
+
1194
+ mat=np.zeros([mx,my])*float('nan')
1195
+ mv=np.array(ret_per*ydays)
1196
+ for line in f:
1197
+ if indice_suivi>0:
1198
+ lines=line.strip()
1199
+ columns=lines.split()
1200
+ # print(indice_suivi)
1201
+ # print(columns[0])
1202
+ if int(columns[0])<=my:
1203
+ i=0;j=int(columns[0])
1204
+ else:
1205
+ j=int(int(columns[0])%my)-1
1206
+ i=int((int(columns[0])-j)/my)
1207
+ # if int(columns[0])<1000:print(int(columns[0]),i,j)
1208
+ # print(i,j)
1209
+ if j==0:j=my-1;i=i-1
1210
+ else:j=j-1
1211
+ if gpd_gev_gum==0:
1212
+ sca=float(columns[1])
1213
+ sha=float(columns[2])
1214
+ ne=float(columns[7])
1215
+ ncl=float(columns[8])
1216
+ th=float(columns[9])
1217
+ varsh=float(columns[4])
1218
+ varsc=float(columns[3])
1219
+ cov=float(columns[5])
1220
+ pu=ne/(ydays*(end_year-start_year+1));teta=ncl/ne
1221
+ if abs_or_retour:
1222
+
1223
+ if sha==0:
1224
+ continue
1225
+ mat[i,j]=ct.RGPD(mv,sha,sca,pu,teta,th)
1226
+ elif abs_or_retour==0:
1227
+ if sha==0:
1228
+ continue
1229
+ if pd.isna(value[i,j])==False:
1230
+ mat[i,j]=ct.GPD_frequency(value[i,j], sha ,sca, pu, teta, th, ydays)
1231
+ elif abs_or_retour==2:
1232
+ mat[i,j]=ct.CIGPD(mv, sha, sca, pu, teta, th, varsc, varsh, cov)
1233
+ elif gpd_gev_gum==1:
1234
+ loc=float(columns[10])
1235
+ sca=float(columns[11])
1236
+ sha=float(columns[12])
1237
+ if sha==0:continue;indice_suivi+=1
1238
+ nye=end_year-start_year+1
1239
+ if abs_or_retour: mat[i,j]=ct.RGEV(ret_per,nye,loc,sca,sha)
1240
+ else:mat[i,j]=ct.GEV_frequency(value[i,j],loc,sca, sha)
1241
+
1242
+ elif gpd_gev_gum==2:
1243
+ nye=end_year-start_year+1
1244
+ loc=float(columns[13])
1245
+ sca=float(columns[14])
1246
+ if abs_or_retour: mat[i,j]=ct.RGum(ret_per,nye,loc,sca)
1247
+ else:mat[i,j]=ct.Gum_frequency(value[i,j],loc,sca)
1248
+ indice_suivi+=1
1249
+ return(mat)
1250
+
1251
+ def extreme_matrix_V2(fn,ret_per=20,value=50,mx=80,my=50,
1252
+ abs_or_retour=1,ydays=365,
1253
+ start_year=2011,end_year=2040,
1254
+ nts=24,
1255
+ gpd_gev_gum=0,unst_st=0,var_unst='MKam',y_unst=2021):
1256
+ # print('ok')
1257
+
1258
+
1259
+ data=ct.df_from_file(fn)
1260
+ mat=np.zeros([mx,my])*float('nan')
1261
+ mv=np.array(ret_per*ydays)
1262
+
1263
+ for p in range(data.shape[0]):
1264
+ # for line in f:
1265
+
1266
+ ind_pix=data['indice'][p]
1267
+ if ind_pix<=my:
1268
+ i=0;j=ind_pix
1269
+ else:
1270
+ j=int(ind_pix%my)-1
1271
+ i=int((ind_pix-j)/my)
1272
+ # if int(columns[0])<1000:print(int(columns[0]),i,j)
1273
+ # print(i,j)
1274
+ if j==0:j=my-1;i=i-1
1275
+ else:j=j-1
1276
+ if unst_st==0:
1277
+
1278
+ if gpd_gev_gum==0:
1279
+ sca=data['sc'][p]
1280
+ sha=data['sh'][p]
1281
+ ne=data['ne'][p]
1282
+ ncl=data['nc'][p]
1283
+ th=data['th'][p]
1284
+ varsh=data['varsh'][p]
1285
+ varsc=data['varsc'][p]
1286
+ cov=data['cov'][p]
1287
+ pu=ne/(ydays*(end_year-start_year+1));teta=ncl/ne
1288
+ if abs_or_retour:
1289
+ if sha==0:
1290
+ continue
1291
+ mat[i,j]=ct.RGPD(mv,sha,sca,pu,teta,th)
1292
+ elif abs_or_retour==0:
1293
+ if sha==0:
1294
+ continue
1295
+ if pd.isna(value[i,j])==False:
1296
+ mat[i,j]=ct.GPD_frequency(value[i,j], sha ,sca, pu, teta, th, ydays)
1297
+ elif abs_or_retour==2:
1298
+ mat[i,j]=ct.CIGPD(mv, sha, sca, pu, teta, th, varsc, varsh, cov)
1299
+ elif gpd_gev_gum==1:
1300
+ loc=data['GEVloc'][p]
1301
+ sca=data['GEVscale'][p]
1302
+ sha=data['GEVshape'][p]
1303
+ if sha==0:continue
1304
+ nye=end_year-start_year+1
1305
+ if abs_or_retour: mat[i,j]=ct.RGEV(ret_per,nye,loc,sca,sha)
1306
+ else:mat[i,j]=ct.GEV_frequency(value[i,j],loc,sca, sha)
1307
+
1308
+ elif gpd_gev_gum==2:
1309
+ nye=end_year-start_year+1
1310
+ loc=data['GUMshape'][p]
1311
+ sca=data['GUMscale'][p]
1312
+ if abs_or_retour: mat[i,j]=ct.RGum(ret_per,nye,loc,sca)
1313
+ else:mat[i,j]=ct.Gum_frequency(value[i,j],loc,sca)
1314
+ elif unst_st==2:
1315
+ mat[i,j]=data[var_unst][p]
1316
+ else:
1317
+ if gpd_gev_gum==0:
1318
+ sca=data['sc'][p]
1319
+ sha=data['sh'][p]
1320
+ ne=data['ne'][p]
1321
+ ncl=data['nc'][p]
1322
+ th=data['th'][p]
1323
+ varsh=data['varsh'][p]
1324
+ varsc=data['varsc'][p]
1325
+ cov=data['cov'][p]
1326
+ slam=data['slam'][p]
1327
+ th=th+(y_unst-start_year)*slam
1328
+ sca=sca+(y_unst-start_year)*slam
1329
+ if abs_or_retour:
1330
+ if sha==0:
1331
+ continue
1332
+ mat[i,j]=ct.RGPD(mv,sha,sca,pu,teta,th)
1333
+ elif abs_or_retour==0:
1334
+ if sha==0:
1335
+ continue
1336
+ if pd.isna(value[i,j])==False:
1337
+ mat[i,j]=ct.GPD_frequency(value[i,j], sha ,sca, pu, teta, th, ydays)
1338
+ elif abs_or_retour==2:
1339
+ mat[i,j]=ct.CIGPD(mv, sha, sca, pu, teta, th, varsc, varsh, cov)
1340
+ elif gpd_gev_gum==1:
1341
+ loc=data['GEVloc'][p]
1342
+ sca=data['GEVscale'][p]
1343
+ sha=data['GEVshape'][p]
1344
+ slam=data['slam'][p]
1345
+ loc=loc+(y_unst-start_year)*slam
1346
+ sca=sca+(y_unst-start_year)*slam
1347
+ if sha==0:continue
1348
+ nye=end_year-start_year+1
1349
+ if abs_or_retour: mat[i,j]=ct.RGEV(ret_per,nye,loc,sca,sha)
1350
+ else:mat[i,j]=ct.GEV_frequency(value[i,j],loc,sca, sha)
1351
+
1352
+ elif gpd_gev_gum==2:
1353
+ nye=end_year-start_year+1
1354
+ loc=data['GUMshape'][p]
1355
+ sca=data['GUMscale'][p]
1356
+ if abs_or_retour: mat[i,j]=ct.RGum(ret_per,nye,loc,sca)
1357
+ else:mat[i,j]=ct.Gum_frequency(value[i,j],loc,sca)
1358
+
1359
+
1360
+
1361
+
1362
+
1363
+
1364
+ return(mat)
1365
+
1366
+ def find_clusters(TS1):
1367
+ clss=np.zeros(0)
1368
+ indexes=np.zeros(0)
1369
+ t=0
1370
+ r=7
1371
+ qu=np.quantile(TS1, 0.99)
1372
+ TS=np.array(TS1)
1373
+ TS[TS1<qu]=float('nan')
1374
+ while t < np.size(TS):
1375
+ # print(t)
1376
+ if pd.isna(TS[t])==False:
1377
+ cl_maxima=TS[t]
1378
+ t1=t+1
1379
+ suiv=0
1380
+ cl_ind=t
1381
+ while suiv<r and t1<np.size(TS):
1382
+ suiv+=1
1383
+
1384
+ # print(t1)
1385
+ if pd.isna(TS[t1])==False:
1386
+ # print('ok')
1387
+ suiv=0
1388
+ if TS[t1]>cl_maxima:cl_maxima=TS[t1];cl_ind=t1
1389
+ # print(suiv)
1390
+ t1+=1
1391
+
1392
+ clss=np.append(clss,cl_maxima)
1393
+ indexes=np.append(indexes,cl_ind)
1394
+ # print(cls_dates.shape)
1395
+
1396
+ t=t1
1397
+
1398
+ t+=1
1399
+ return(clss,indexes)
1400
+
1401
+
1402
+ def df_from_file(fn):
1403
+ f=open(fn,mode='r')
1404
+ suiv=-1
1405
+ for line in f:
1406
+ suiv+=1
1407
+ lines=line.strip()
1408
+ lines=lines.split()
1409
+ if suiv>0:
1410
+
1411
+
1412
+ dat=lines[0]
1413
+ vappend=np.zeros([1,np.size(lines[:])])
1414
+ # dates=np.append(dates,dat[1:-1])
1415
+ for i in range(vappend.shape[1]):
1416
+ vappend[0,i]=float(lines[i])
1417
+
1418
+ if suiv==1:
1419
+ data=vappend
1420
+ else:
1421
+ data=np.append(data,vappend,axis=0)
1422
+ else:
1423
+ vnames=lines
1424
+
1425
+ data=pd.DataFrame(data)
1426
+ data.columns=vnames
1427
+ return(data)