metradar 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. metradar/__init__.py +7 -0
  2. metradar/cnrad_level2.py +1326 -0
  3. metradar/comm_func.py +135 -0
  4. metradar/construct_aws_refvpr_mainprog.py +515 -0
  5. metradar/construct_aws_refvpr_mainprog_cams.py +310 -0
  6. metradar/construct_aws_refvpr_mainprog_datan3d.py +386 -0
  7. metradar/construct_aws_refvpr_mainprog_swan.py +306 -0
  8. metradar/decode_fmt_pyart.py +200 -0
  9. metradar/decode_pup_rose.py +1993 -0
  10. metradar/draw_mosaic_new.py +421 -0
  11. metradar/draw_radar_aws_jilin_new.py +206 -0
  12. metradar/draw_radar_comp_func.py +1379 -0
  13. metradar/exceptions.py +50 -0
  14. metradar/geo_transforms_pyart.py +627 -0
  15. metradar/get_cross_section_from_pyart.py +354 -0
  16. metradar/get_tlogp_from_sharppy.py +93 -0
  17. metradar/grid.py +281 -0
  18. metradar/grid_data.py +64 -0
  19. metradar/main_pydda.py +653 -0
  20. metradar/make_gif.py +24 -0
  21. metradar/make_mosaic_mp_archive.py +538 -0
  22. metradar/mosaic_merge.py +64 -0
  23. metradar/mosaic_quickdraw.py +338 -0
  24. metradar/nowcast_by_pysteps.py +219 -0
  25. metradar/oa_couhua.py +166 -0
  26. metradar/oa_dig_func.py +955 -0
  27. metradar/parse_pal.py +148 -0
  28. metradar/pgmb_io.py +169 -0
  29. metradar/prepare_for_radar_draw.py +197 -0
  30. metradar/read_new_mosaic.py +33 -0
  31. metradar/read_new_mosaic_func.py +231 -0
  32. metradar/retrieve_cmadaas.py +3126 -0
  33. metradar/retrieve_micaps_server.py +2061 -0
  34. metradar/rose_structer.py +807 -0
  35. metradar/trans_nc_pgmb.py +62 -0
  36. metradar/trans_new_mosaic_nc.py +309 -0
  37. metradar/trans_polor2grid_func.py +203 -0
  38. metradar-0.1.0.dist-info/METADATA +12 -0
  39. metradar-0.1.0.dist-info/RECORD +41 -0
  40. metradar-0.1.0.dist-info/WHEEL +5 -0
  41. metradar-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,955 @@
1
+ '''
2
+ 自动站资料客观分析程序
3
+ 朱文剑
4
+ 20220221
5
+ '''
6
+
7
+ # %%
8
+ import retrieve_micaps_server as mserver
9
+ import pandas as pd
10
+ import matplotlib.pyplot as plt
11
+ import xarray as xr
12
+ import numpy as np
13
+ from collections import OrderedDict
14
+
15
+ import os
16
+ import gc
17
+ from datetime import datetime,timedelta
18
+ from oa_couhua import interp_sg_oa
19
+ import matplotlib
20
+ matplotlib.use('Agg')
21
+
22
+ def remove_nan_observations(x, y, z):
23
+ r"""Remove all x, y, and z where z is nan.
24
+
25
+ Will not destroy original values.
26
+
27
+ Parameters
28
+ ----------
29
+ x: array_like
30
+ x coordinate
31
+ y: array_like
32
+ y coordinate
33
+ z: array_like
34
+ observation value
35
+
36
+ Returns
37
+ -------
38
+ x, y, z
39
+ List of coordinate observation pairs without
40
+ nan valued observations.
41
+
42
+ """
43
+ x_ = x[~np.isnan(z)]
44
+ y_ = y[~np.isnan(z)]
45
+ z_ = z[~np.isnan(z)]
46
+
47
+ return x_, y_, z_
48
+
49
+ def remove_repeat_coordinates(x, y, z):
50
+ r"""Remove all x, y, and z where (x,y) is repeated and keep the first occurrence only.
51
+
52
+ Will not destroy original values.
53
+
54
+ Parameters
55
+ ----------
56
+ x: array_like
57
+ x coordinate
58
+ y: array_like
59
+ y coordinate
60
+ z: array_like
61
+ observation value
62
+
63
+ Returns
64
+ -------
65
+ x, y, z
66
+ List of coordinate observation pairs without
67
+ repeated coordinates.
68
+
69
+ """
70
+ coords = []
71
+ variable = []
72
+
73
+ for (x_, y_, t_) in zip(x, y, z):
74
+ if (x_, y_) not in coords:
75
+ coords.append((x_, y_))
76
+ variable.append(t_)
77
+
78
+ coords = np.array(coords)
79
+
80
+ x_ = coords[:, 0]
81
+ y_ = coords[:, 1]
82
+
83
+ z_ = np.array(variable)
84
+
85
+ return x_, y_, z_
86
+
87
+ def get_contour_verts(cn):
88
+ contours = []
89
+ # for each contour line
90
+ for cc in cn.collections:
91
+ paths = []
92
+ # for each separate section of the contour line
93
+ for pp in cc.get_paths():
94
+ paths.append(pp.vertices)
95
+ contours.append(paths)
96
+
97
+ return contours
98
+
99
+ # 从色标文件中获取色标数据
100
+ def get_colordf_from_file(colorfile='NCV_bright.rgb',reverse=False):
101
+ filepath = 'colors_ncl'
102
+
103
+ # filename = 'NCV_bright.rgb'
104
+ if not os.path.exists(filepath + os.sep + colorfile):
105
+ print(filepath + os.sep + colorfile + ' not exists!')
106
+ return None
107
+
108
+ try:
109
+ fin = open(filepath + os.sep + colorfile,'rt')
110
+ line=' '
111
+ line = fin.readline()
112
+ line = fin.readline()
113
+ r=[]
114
+ g=[]
115
+ b=[]
116
+ while True:
117
+ line = fin.readline()
118
+ if line == '':
119
+ break
120
+ tmp =line.split(' ')
121
+
122
+ while '' in tmp:
123
+ tmp.remove('')
124
+ # print(tmp)
125
+ r.append(int(tmp[0]))
126
+ g.append(int(tmp[1]))
127
+ b.append(int(tmp[2]))
128
+ if reverse:
129
+ r = np.flipud(r)
130
+ g = np.flipud(g)
131
+ b = np.flipud(b)
132
+ df = pd.DataFrame({'r':r,'g':g,'b':b})
133
+ fin.close()
134
+ return df
135
+ except:
136
+ return None
137
+
138
+ colors_gr2=[
139
+ [180, 215, 158],
140
+ [180, 180, 255],
141
+ [120, 120, 255],
142
+ [74, 199, 255],
143
+ [ 0, 180, 180],
144
+ [ 0, 255, 255],
145
+ [ 0, 255, 0],
146
+ [255, 255, 0],
147
+ [255, 128, 0],
148
+ [255, 0, 0],
149
+ [160, 0, 0],
150
+ [255, 0, 255],
151
+ [128, 0, 128],
152
+ [255, 255, 255]
153
+ ]
154
+
155
+ # 客观分析类
156
+ # 参考metpy等诊断分析库
157
+ class Object_Analyst:
158
+ def __init__(self) -> None:
159
+ self.debug_level = 0
160
+ self.minlon = 100
161
+ self.maxlon = 120
162
+ self.minlat = 30
163
+ self.maxlat = 45
164
+ self.reso=0.025
165
+
166
+ # 设置北京时还是世界时
167
+ def set_time_type(self,ttype):
168
+ self.time_type = ttype
169
+
170
+ def set_reso(self,reso):
171
+ self.reso = reso
172
+
173
+ def set_debug(self,level):
174
+ self.debug_level=level
175
+
176
+ # 设置要分析的经纬度范围
177
+ def set_boundary(self,minlon,maxlon,minlat,maxlat):
178
+ self.minlon = minlon
179
+ self.maxlon = maxlon
180
+ self.minlat = minlat
181
+ self.maxlat = maxlat
182
+ pass
183
+
184
+ # 设置当前时刻
185
+ def set_time(self,tstr='20220511141000'):
186
+ self.time_str=tstr
187
+
188
+ # 设置X坐标
189
+ def set_xcord(self,data_array):
190
+ self.xcord = data_array
191
+
192
+
193
+ # 设置Y坐标
194
+ def set_ycord(self,data_array):
195
+ self.ycord = data_array
196
+
197
+ # 设置要客观分析的变量
198
+ def set_vardata(self,data_array):
199
+ self.vardata = data_array
200
+
201
+ # 判断一个点是否在边界内
202
+ def is_in_boundary(self,point_lon,point_lat):
203
+ if point_lon < self.minlon \
204
+ or point_lon > self.maxlon \
205
+ or point_lat < self.minlat \
206
+ or point_lat > self.maxlat:
207
+ return False
208
+ else:
209
+ return True
210
+
211
+ # 针对从天擎下载的5分钟资料做客观分析,是从csv文件中读取的datafreame格式
212
+ def do_oa_csv_df(self,newdata=None,vartype='温度'):
213
+
214
+ if newdata is None:
215
+ print('newdata is None!')
216
+ return None
217
+
218
+
219
+ if self.debug_level > 0:
220
+ print(newdata.columns)
221
+
222
+ validindex=[]
223
+ for nn in range(newdata.shape[0]):
224
+ if not self.is_in_boundary(newdata['Lon'][nn],newdata['Lat'][nn]):
225
+ continue
226
+ else:
227
+ validindex.append(nn)
228
+
229
+
230
+ if len(validindex) <=10:
231
+ print('valid data less than 10, skip!')
232
+ return None
233
+
234
+ # print(validindex)
235
+
236
+ defaultvalue=999999
237
+ # 注意,务必将缺测值设置为nan
238
+ latlist = newdata['Lat'][validindex].values
239
+ lonlist = newdata['Lon'][validindex].values
240
+ templist = newdata['TEM'][validindex].values
241
+ rhlist = newdata['RHU'][validindex].values
242
+ dtlist = newdata['DPT'][validindex].values
243
+ sprslist = newdata['PRS_Sea'][validindex].values
244
+
245
+ self.set_boundary(np.min(lonlist),np.max(lonlist),np.min(latlist),np.max(latlist))
246
+ if vartype.find('温度')>=0:
247
+ flag = templist < defaultvalue
248
+ lonlist = lonlist[flag]
249
+ latlist = latlist[flag]
250
+ templist = templist[flag]
251
+
252
+ params={}
253
+ params['in_lon'] = lonlist
254
+ params['in_lat'] = latlist
255
+ params['in_data'] = templist
256
+ params['out_varname'] = 't2m'
257
+ params['out_long_name'] = 'surface temperature objective analyse'
258
+ params['out_short_name'] = 'oa_t'
259
+ params['out_units'] = 'degC'
260
+ params['method'] = 'couhua'
261
+ params['tipname'] = '温度'
262
+ outdata = self.do_oa_base(params)
263
+
264
+ elif vartype.find('露点')>=0:
265
+ flag = dtlist < defaultvalue
266
+ lonlist = lonlist[flag]
267
+ latlist = latlist[flag]
268
+ dtlist = dtlist[flag]
269
+
270
+ params={}
271
+ params['in_lon'] = lonlist
272
+ params['in_lat'] = latlist
273
+ params['in_data'] = dtlist
274
+ params['out_varname'] = 'td2m'
275
+ params['out_long_name'] = 'surface dewpoint temperature objective analyse'
276
+ params['out_short_name'] = 'oa_td'
277
+ params['out_units'] = 'degC'
278
+ params['method'] = 'couhua'
279
+ params['tipname'] = '露点温度'
280
+ outdata = self.do_oa_base(params)
281
+
282
+ elif vartype.find('湿度')>=0:
283
+ flag = rhlist < defaultvalue
284
+ lonlist = lonlist[flag]
285
+ latlist = latlist[flag]
286
+ rhlist = rhlist[flag]
287
+
288
+ params={}
289
+ params['in_lon'] = lonlist
290
+ params['in_lat'] = latlist
291
+ params['in_data'] = rhlist
292
+ params['out_varname'] = 'rh2m'
293
+ params['out_long_name'] = 'surface relative humidility objective analyse'
294
+ params['out_short_name'] = 'oa_rh'
295
+ params['out_units'] = '%'
296
+ params['method'] = 'couhua'
297
+ params['tipname'] = '相对湿度'
298
+ outdata = self.do_oa_base(params)
299
+
300
+ elif vartype.find('气压')>=0:
301
+ flag = sprslist < defaultvalue
302
+ lonlist = lonlist[flag]
303
+ latlist = latlist[flag]
304
+ sprslist = sprslist[flag]
305
+
306
+ params={}
307
+ params['in_lon'] = lonlist
308
+ params['in_lat'] = latlist
309
+ params['in_data'] = sprslist
310
+ params['out_varname'] = 'sprs2m'
311
+ params['out_long_name'] = 'sea pressure objective analyse'
312
+ params['out_short_name'] = 'oa_sprs'
313
+ params['out_units'] = 'hPa'
314
+ params['method'] = 'couhua'
315
+ params['tipname'] = '海平面气压'
316
+ outdata = self.do_oa_base(params)
317
+
318
+ return outdata
319
+
320
+ # 针对从天擎下载的5分钟资料做客观分析,是存储为csv格式的文件
321
+ def do_oa_csv(self,filename,vartype='温度'):
322
+
323
+ if not os.path.exists(filename):
324
+ print(filename + ' not exists!')
325
+ return None
326
+ newdata = pd.read_csv(filename,encoding='gbk')
327
+
328
+ if self.debug_level > 0:
329
+ print(newdata.columns)
330
+
331
+ validindex=[]
332
+ for nn in range(newdata.shape[0]):
333
+ if not self.is_in_boundary(newdata['Lon'][nn],newdata['Lat'][nn]):
334
+ continue
335
+ else:
336
+ validindex.append(nn)
337
+
338
+
339
+ if len(validindex) <=10:
340
+ print('valid data less than 10, skip!')
341
+ return None
342
+
343
+ # print(validindex)
344
+
345
+ defaultvalue=999999
346
+ tipname = ''
347
+ # 注意,务必将缺测值设置为nan
348
+ if vartype.find('温度')>=0:
349
+ varname = 'TEM'
350
+ tipname = '温度'
351
+ elif vartype.find('湿度')>=0:
352
+ varname = 'RHU'
353
+ tipname = '相对湿度'
354
+ elif vartype.find('气压')>=0:
355
+ varname = 'PRS_Sea'
356
+ tipname = '海平面气压'
357
+ elif vartype.find('露点')>=0:
358
+ varname = 'DPT'
359
+ tipname = '露点温度'
360
+ elif vartype.find('变温')>=0:
361
+ for var in newdata.columns:
362
+ if var.find('TEM_delta')>=0:
363
+ varname = var
364
+ tipname = '%s小时变温'%var.split('_')[-1][0]
365
+ break
366
+ elif vartype.find('变压')>=0:
367
+ for var in newdata.columns:
368
+ if var.find('PRS_Sea_delta')>=0:
369
+ varname = var
370
+ tipname = '%s小时变压'%var.split('_')[-1][0]
371
+ break
372
+
373
+
374
+ latlist = newdata['Lat'][validindex].values
375
+ lonlist = newdata['Lon'][validindex].values
376
+ # templist = newdata['TEM'][validindex].values
377
+ # rhlist = newdata['RHU'][validindex].values
378
+ # dtlist = newdata['DPT'][validindex].values
379
+ # sprslist = newdata['PRS_Sea'][validindex].values
380
+ datalist = newdata[varname][validindex].values
381
+
382
+ self.set_boundary(np.min(lonlist),np.max(lonlist),np.min(latlist),np.max(latlist))
383
+ flag = datalist < defaultvalue
384
+ lonlist = lonlist[flag]
385
+ latlist = latlist[flag]
386
+ datalist = datalist[flag]
387
+ params={}
388
+ params['in_lon'] = lonlist
389
+ params['in_lat'] = latlist
390
+ params['in_data'] = datalist
391
+ params['tipname'] = tipname
392
+ if vartype.find('温度')>=0:
393
+ params['out_varname'] = 't2m'
394
+ params['out_long_name'] = 'surface temperature objective analyse'
395
+ params['out_short_name'] = 'oa_t'
396
+ params['out_units'] = 'degC'
397
+ params['method'] = 'couhua'
398
+ outdata = self.do_oa_base(params)
399
+ elif vartype.find('露点')>=0:
400
+ params['out_varname'] = 'td2m'
401
+ params['out_long_name'] = 'surface dewpoint temperature objective analyse'
402
+ params['out_short_name'] = 'oa_td'
403
+ params['out_units'] = 'degC'
404
+ params['method'] = 'couhua'
405
+ outdata = self.do_oa_base(params)
406
+ elif vartype.find('湿度')>=0:
407
+ params['out_varname'] = 'rh2m'
408
+ params['out_long_name'] = 'surface relative humidility objective analyse'
409
+ params['out_short_name'] = 'oa_rh'
410
+ params['out_units'] = '%'
411
+ params['method'] = 'couhua'
412
+ outdata = self.do_oa_base(params)
413
+ elif vartype.find('气压')>=0:
414
+ params['out_varname'] = 'sprs2m'
415
+ params['out_long_name'] = 'sea pressure objective analyse'
416
+ params['out_short_name'] = 'oa_sprs'
417
+ params['out_units'] = 'hPa'
418
+ params['method'] = 'couhua'
419
+ outdata = self.do_oa_base(params)
420
+ elif vartype.find('变温')>=0:
421
+ params['out_varname'] = 't2m_delta_' + varname.split('_')[-1]
422
+ params['out_long_name'] = 'surface temperature delta objective analyse'
423
+ params['out_short_name'] = 'oa_t_delta' + varname.split('_')[-1]
424
+ params['out_units'] = 'degC'
425
+ params['method'] = 'couhua'
426
+ outdata = self.do_oa_base(params)
427
+ elif vartype.find('变压')>=0:
428
+ params['out_varname'] = 'sprs2m_delta_' + varname.split('_')[-1]
429
+ params['out_long_name'] = 'sea pressure delta objective analyse'
430
+ params['out_short_name'] = 'oa_sprs_delta' + varname.split('_')[-1]
431
+ params['out_units'] = 'hPa'
432
+ params['method'] = 'couhua'
433
+ outdata = self.do_oa_base(params)
434
+ return outdata
435
+
436
+ # 进行客观分析的基础函数
437
+ def do_oa_base(self,params):
438
+
439
+ if not 'method' in params.keys():
440
+ print('key "method" should be set')
441
+ return False
442
+
443
+ if not 'in_lon' in params.keys():
444
+ print('key "in_lon" should be set')
445
+ return False
446
+
447
+ if not 'in_lat' in params.keys():
448
+ print('key "in_lat" should be set')
449
+ return False
450
+
451
+ if not 'in_data' in params.keys():
452
+ print('key "in_data" should be set')
453
+ return False
454
+
455
+ if not 'out_varname' in params.keys():
456
+ print('key "out_varname" should be set')
457
+ return False
458
+
459
+ if not 'out_long_name' in params.keys():
460
+ print('key "out_long_name" should be set')
461
+ return False
462
+
463
+ if not 'out_short_name' in params.keys():
464
+ print('key "out_short_name" should be set')
465
+ return False
466
+
467
+ if not 'out_units' in params.keys():
468
+ print('key "out_units" should be set')
469
+ return False
470
+
471
+ lonlist = params['in_lon']
472
+ latlist = params['in_lat']
473
+ data = params['in_data']
474
+ x_masked, y_masked, data = remove_nan_observations(lonlist, latlist, data)
475
+ x_masked, y_masked, data = remove_repeat_coordinates(x_masked, y_masked, data)
476
+
477
+ # 4sigma
478
+ def four_sigma(df):
479
+ mean=df.data.mean()
480
+ std=df.data.std()
481
+ upper_limit=mean+4*std
482
+ lower_limit=mean-4*std
483
+ df['anomaly']=df.data.apply(lambda x: 1 if (x>upper_limit ) or (x<lower_limit) else 0)
484
+ return df
485
+
486
+ # 根据百分位进行异常值检查
487
+ tmpdata = pd.DataFrame(data,columns=['data'])
488
+ df1 = four_sigma(tmpdata)
489
+ # df1[df1['anomaly']==1]
490
+
491
+ x_masked = x_masked[df1['anomaly']==0]
492
+ y_masked = y_masked[df1['anomaly']==0]
493
+ data = data[df1['anomaly']==0]
494
+
495
+ if self.debug_level > 0:
496
+ print('read data over!')
497
+
498
+ # self.set_reso(0.05)
499
+ # 温度插值
500
+ # hreso = 0.05
501
+ if params['method'] != "couhua":
502
+ pass
503
+ # gx, gy, gd = interpolate_to_grid(x_masked, y_masked, data,
504
+ # interp_type='cressman',hres=self.reso,
505
+ # minimum_neighbors=1,
506
+ # )
507
+ else:
508
+ # 用凑华的oa方法
509
+ # gx, gy, gd = interp_sg_oa(lonlist, latlist, data,hreso)
510
+ gx, gy, gd = interp_sg_oa(x_masked, y_masked, data,self.reso,[self.minlon,self.maxlon,self.minlat,self.maxlat],sm=20)
511
+ pass
512
+
513
+ # 将客观分析结果在x和y方向调整坐标
514
+ gd = gd.T
515
+ gx = gx.T
516
+ gy = gy.T
517
+
518
+ # 构建xarray,并返回
519
+
520
+ # define coordinates
521
+ # time_coord = ('time', redic['time'][0])
522
+ lon_coord = ('lon', gx[:,0], {
523
+ 'long_name':'longitude', 'units':'degrees_east', '_CoordinateAxisType':'Lon'})
524
+ lat_coord = ('lat', gy[0,:], {
525
+ 'long_name':'latitude', 'units':'degrees_north', '_CoordinateAxisType':'Lat'})
526
+
527
+
528
+ # create xarray
529
+ varattrs={'long_name': params['out_long_name'], 'short_name': params['out_short_name'], 'units': params['out_units']}
530
+
531
+
532
+ result = xr.Dataset({params['out_varname']:(['lon', 'lat'], gd, varattrs),
533
+ },
534
+ coords={ 'lon':lon_coord, 'lat':lat_coord })
535
+
536
+ # add attributes
537
+ result.attrs['Conventions'] = "CF-1.6"
538
+ result.attrs['time'] = self.time_str# np.datetime64(datetime.strptime(self.time_str,'%Y%m%d%H%M%S',))
539
+ result.attrs['sta_maxvalue'] = np.max(data)
540
+ result.attrs['sta_minvalue'] = np.min(data)
541
+ if params['out_varname'] == 'sprs2m':
542
+ result.attrs['sta_maxvalue'] = (np.max(data) + 2.5)//2.5*2.5
543
+ result.attrs['sta_minvalue'] = (np.min(data) + 2.5)//2.5*2.5
544
+ result.attrs['varname'] = params['out_varname']
545
+ result.attrs['tipname'] = params['tipname']
546
+ return result
547
+
548
+ # 针对mdfs的5分钟资料做客观分析
549
+ def do_oa_mdfs(self,filename,vartype='温度'):
550
+ if not os.path.exists(filename):
551
+ print(filename + ' not exists!')
552
+ return None
553
+
554
+ redic = mserver.get_stadata_from_mdfs(filename)
555
+ if self.debug_level > 0:
556
+ print(redic.columns)
557
+
558
+ newdata = redic[['ID','lon','lat','温度','露点温度','相对湿度','平均风速_2分钟','平均风向_2分钟']]
559
+ validindex=[]
560
+ for nn in range(newdata.shape[0]):
561
+ if not self.is_in_boundary(newdata['lon'][nn],newdata['lat'][nn]):
562
+ continue
563
+ else:
564
+ validindex.append(nn)
565
+ # print(validindex)
566
+
567
+ # 注意,务必将缺测值设置为nan
568
+ latlist = newdata['lat'][validindex].values
569
+ lonlist = newdata['lon'][validindex].values
570
+ templist = newdata['温度'][validindex].values
571
+ rhlist = newdata['相对湿度'][validindex].values
572
+ dtlist = newdata['露点温度'][validindex].values
573
+ windspd = newdata['平均风速_2分钟'][validindex].values
574
+ winddir = newdata['平均风向_2分钟'][validindex].values
575
+
576
+ if vartype.find('温度')>=0:
577
+ params={}
578
+ params['in_lon'] = lonlist
579
+ params['in_lat'] = latlist
580
+ params['in_data'] = templist
581
+ params['out_varname'] = 't2m'
582
+ params['out_long_name'] = 'surface temperature objective analyse'
583
+ params['out_short_name'] = 'oa_t'
584
+ params['out_units'] = 'degC'
585
+ params['method'] = 'couhua'
586
+ outdata = self.do_oa_base(params)
587
+ elif vartype.find('露点')>=0:
588
+ params={}
589
+ params['in_lon'] = lonlist
590
+ params['in_lat'] = latlist
591
+ params['in_data'] = dtlist
592
+ params['out_varname'] = 'td2m'
593
+ params['out_long_name'] = 'surface dewpoint temperature objective analyse'
594
+ params['out_short_name'] = 'oa_td'
595
+ params['out_units'] = 'degC'
596
+ params['method'] = 'couhua'
597
+ outdata = self.do_oa_base(params)
598
+ elif vartype.find('湿度')>=0:
599
+ params={}
600
+ params['in_lon'] = lonlist
601
+ params['in_lat'] = latlist
602
+ params['in_data'] = rhlist
603
+ params['out_varname'] = 'rh2m'
604
+ params['out_long_name'] = 'surface relative humidility objective analyse'
605
+ params['out_short_name'] = 'oa_rh'
606
+ params['out_units'] = '%'
607
+ params['method'] = 'couhua'
608
+ outdata = self.do_oa_base(params)
609
+
610
+ # # wind
611
+ # wind_speed = (windspd * units('m/s'))
612
+ # wind_dir = winddir * units.degree
613
+ # good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))
614
+
615
+ # x_masked = lonlist[good_indices]
616
+ # y_masked = latlist[good_indices]
617
+ # wind_speed = wind_speed[good_indices]
618
+ # wind_dir = wind_dir[good_indices]
619
+ # u, v = wind_components(wind_speed, wind_dir)
620
+
621
+ # # uwind
622
+ # params['in_lon'] = x_masked
623
+ # params['in_lat'] = y_masked
624
+ # params['in_data'] = u.magnitude
625
+ # params['out_varname'] = 'u10m'
626
+ # params['out_long_name'] = '10m uwind objective analyse'
627
+ # params['out_short_name'] = 'oa_u10m'
628
+ # params['out_units'] = 'm/s'
629
+ # params['method'] = 'couhua'
630
+ # u10m = self.do_oa_base(params)
631
+
632
+ # # vwind
633
+ # params['in_lon'] = x_masked
634
+ # params['in_lat'] = y_masked
635
+ # params['in_data'] = v.magnitude
636
+ # params['out_varname'] = 'v10m'
637
+ # params['out_long_name'] = '10m vwind objective analyse'
638
+ # params['out_short_name'] = 'oa_v10m'
639
+ # params['out_units'] = 'm/s'
640
+ # params['method'] = 'couhua'
641
+ # v10m = self.do_oa_base(params)
642
+
643
+ # digdata = self.calc_vor_div(wind_speed,wind_dir,lonlist,latlist)
644
+
645
+ #
646
+
647
+ return outdata
648
+
649
+ # def calc_vor_div(self,wind_speed,wind_dir,lonlist,latlist):
650
+
651
+ # # 风场插值
652
+
653
+ # good_indices = np.where((~np.isnan(wind_dir)) & (~np.isnan(wind_speed)))
654
+
655
+ # x_masked = lonlist[good_indices]
656
+ # y_masked = latlist[good_indices]
657
+ # wind_speed = wind_speed[good_indices]
658
+ # wind_dir = wind_dir[good_indices]
659
+ # u, v = wind_components(wind_speed, wind_dir)
660
+ # windgridx, windgridy, uwind = interpolate_to_grid(x_masked, y_masked, np.array(u),interp_type='cressman', hres=self.reso)
661
+
662
+ # _, _, vwind = interpolate_to_grid(x_masked, y_masked, np.array(v), interp_type='cressman',hres=self.reso)
663
+ # if self.debug_level > 0:
664
+ # print('interpolate done!')
665
+
666
+ # # %%计算涡度散度
667
+ # dx, dy = lat_lon_grid_deltas(longitude=windgridx, latitude=windgridy)
668
+ # vtx=vorticity(u=uwind*units.meter/units.seconds ,v=vwind*units.meter/units.seconds,dx=dx,dy=dy)
669
+ # div=divergence(u=uwind*units.meter/units.seconds ,v=vwind*units.meter/units.seconds,dx=dx,dy=dy)
670
+
671
+ # # 乘上1e5,方便显示
672
+ # vtx = vtx * 1e5
673
+ # div = div * 1e5
674
+ # # print(vtx.shape)
675
+ # # print(div.shape)
676
+ # if self.debug_level > 0:
677
+ # print('涡度和散度计算完毕!')
678
+
679
+ # # 构建xarray,并返回
680
+
681
+ # # define coordinates
682
+ # # time_coord = ('time', redic['time'][0])
683
+ # lon_coord = ('lon', windgridx[0,:], {
684
+ # 'long_name':'longitude', 'units':'degrees_east', '_CoordinateAxisType':'Lon'})
685
+ # lat_coord = ('lat', windgridy[:,0], {
686
+ # 'long_name':'latitude', 'units':'degrees_north', '_CoordinateAxisType':'Lat'})
687
+
688
+
689
+ # # create xarray
690
+ # varattrs_vtx10m={'long_name': 'surface vortex ', 'short_name': 'oa_vtx_10m', 'units': '1e-5*1/s'}
691
+ # varattrs_div10m={'long_name': 'surface divergence ', 'short_name': 'oa_div_10m', 'units': '1e-5*1/s'}
692
+
693
+ # result = xr.Dataset({
694
+ # 'vtx_10m':(['lat', 'lon'], vtx.magnitude, varattrs_vtx10m),
695
+ # 'div_10m':(['lat', 'lon'], div.magnitude, varattrs_div10m),
696
+ # },
697
+ # coords={ 'lat':lat_coord, 'lon':lon_coord})
698
+
699
+ # # add attributes
700
+ # result.attrs['Conventions'] = "CF-1.6"
701
+
702
+ # # 对计算结果进行平滑处理
703
+ # vtx_smooth=result.vtx_10m.rolling(lon=5, lat=5, min_periods=1, center=True).mean()
704
+ # result.vtx_10m.data = vtx_smooth.values
705
+
706
+ # div_smooth=result.div_10m.rolling(lon=5, lat=5, min_periods=1, center=True).mean()
707
+ # result.div_10m.data = div_smooth.values
708
+
709
+
710
+ # pass
711
+ # return result
712
+
713
+
714
+ # # 显示客观分析结果
715
+ # def display_var(self,):
716
+ # # 显示
717
+ # to_proj = ccrs.LambertConformal(central_longitude=120, central_latitude=40)
718
+ # fig = plt.figure(figsize=(8, 7))
719
+
720
+ # view = fig.add_axes([0.05, 0.05, 0.9, 0.9]) # , projection=to_proj
721
+ # view.set_title('oa')
722
+
723
+ # shddata=div.magnitude
724
+ # # levels = list(range(10, 40, 1))
725
+ # levels = MaxNLocator(nbins=15).tick_values(-20,20) #shddata.min(), shddata.max()
726
+ # cmap = plt.get_cmap('rainbow')
727
+
728
+ # norm = BoundaryNorm(levels, ncolors=cmap.N, clip=True)
729
+
730
+ # mmb = view.pcolormesh(gx, gy, shddata, cmap=cmap, norm=norm)
731
+
732
+ # # add wind barbs
733
+ # sizesdic={'emptybarb':0}
734
+ # barb_incrementsdict={'half':2,'full':4,'flag':20}
735
+ # for xx in np.arange(0,windgridx.shape[0],3):
736
+ # for yy in np.arange(0,windgridx.shape[1],3):
737
+ # if abs(uwind[xx,yy]) > 1 and abs(vwind[xx,yy]) > 1:
738
+ # view.barbs(windgridx[xx,yy], windgridy[xx,yy], uwind[xx,yy], vwind[xx,yy],sizes=sizesdic,
739
+ # alpha=.4, length=5,barb_increments=barb_incrementsdict)
740
+
741
+ # fig.colorbar(mmb, shrink=.6, pad=0.02, boundaries=levels)
742
+
743
+ # plt.savefig('pic/test.png', dpi=300, bbox_inches='tight')
744
+ # # plt.show()
745
+
746
+
747
+ # 将等值线转换成gr2的格式
748
+ def trans_gr2(self,data,outpath,outname,badd_time_flag=False,m_pret=None,aws_duration=6,bhistory=False,encoding='gbk'):
749
+ '''
750
+ m_pret: 上一次的时间,用于历史数据构造
751
+ badd_time_flag : 是否需要添加时间信息
752
+ aws_duration: 数据持续显示的时间,单位分钟
753
+ bhistory: 是否是历史批处理模式
754
+ '''
755
+ varname = list(data.variables)[0]
756
+
757
+ # if maxvalue is None or minvalue is None:
758
+ # LEVS = np.linspace(np.nanmin(data[varname].values.flatten()),np.nanmax(data[varname].values.flatten()),len(colors_gr2))
759
+ # else:
760
+ # LEVS = np.linspace(minvalue,maxvalue,len(colors_gr2))
761
+
762
+ tipname = data.attrs['tipname']
763
+ if data.attrs['varname'] == 'sprs2m':
764
+ ccolors = get_colordf_from_file()
765
+ LEVS = np.arange(data.sta_minvalue,data.sta_maxvalue,2.5)
766
+ LEVS = LEVS.round(2)
767
+ if len(LEVS) > ccolors.shape[0]-1:
768
+ LEVS = LEVS[0:ccolors.shape[0]-1]
769
+ bb = np.linspace(0,ccolors.shape[0]-1,len(LEVS)).astype(int)
770
+ colors_gr2_new =[]
771
+ for i in bb:
772
+ colors_gr2_new.append(ccolors.iloc[i,:].values)
773
+ elif data.attrs['varname'].find('t2m_delta')>=0:
774
+ ccolors = get_colordf_from_file()
775
+ LEVS = np.arange(data.sta_minvalue,data.sta_maxvalue,0.5)
776
+ LEVS = LEVS.round(2)
777
+ if len(LEVS) > ccolors.shape[0]-1:
778
+ LEVS = LEVS[0:ccolors.shape[0]-1]
779
+ bb = np.linspace(0,ccolors.shape[0]-1,len(LEVS)).astype(int)
780
+ colors_gr2_new =[]
781
+ for i in bb:
782
+ colors_gr2_new.append(ccolors.iloc[i,:].values)
783
+ elif data.attrs['varname'].find('sprs2m_delta')>=0:
784
+ ccolors = get_colordf_from_file()
785
+ LEVS = np.arange(data.sta_minvalue,data.sta_maxvalue,0.1)
786
+ LEVS = LEVS.round(2)
787
+ if len(LEVS) > ccolors.shape[0]-1:
788
+ LEVS = LEVS[0:ccolors.shape[0]-1]
789
+ bb = np.linspace(0,ccolors.shape[0]-1,len(LEVS)).astype(int)
790
+ colors_gr2_new =[]
791
+ for i in bb:
792
+ colors_gr2_new.append(ccolors.iloc[i,:].values)
793
+ else:
794
+ colors_gr2_new = colors_gr2
795
+ LEVS = np.linspace(data.sta_minvalue,data.sta_maxvalue,len(colors_gr2_new))
796
+ #
797
+ if np.mean(abs(LEVS)) > 1:
798
+ if data.attrs['varname'] != 'sprs2m' and data.attrs['varname'].find('t2m_delta')<0 and data.attrs['varname'].find('sprs2m_delta')<0:
799
+ LEVS = LEVS.astype(int)
800
+ if LEVS[-1] - LEVS[0]<len(LEVS):
801
+ LEVS = np.linspace(LEVS[0],LEVS[0]+len(colors_gr2_new)-1,len(colors_gr2_new))
802
+ LEVS = LEVS.astype(int)
803
+
804
+ else:
805
+ LEVS = np.round(LEVS,3)
806
+
807
+ # This code could remove duplicates in a list
808
+ LEVS = list(OrderedDict.fromkeys(LEVS))
809
+
810
+ ax = plt.subplot()
811
+
812
+ pass
813
+
814
+ con_re = ax.contour(data.lat.values, data.lon.values, data[varname].values, LEVS,colors='k')
815
+
816
+ conts = get_contour_verts(con_re)
817
+
818
+ curyear = int(self.time_str[0:4])
819
+ curmonth = int(self.time_str[4:6])
820
+ curday = int(self.time_str[6:8])
821
+ curhour = int(self.time_str[8:10])
822
+ curmin = int(self.time_str[10:12])
823
+
824
+ obstime = datetime(curyear,curmonth,curday,curhour,curmin,0)
825
+
826
+ if self.time_type == "BJT":
827
+ curtime = obstime + timedelta(hours=8)
828
+
829
+ curtstr = '%04d'%curtime.year + '年' + '%02d'%curtime.month + '月' + '%02d'%curtime.day + '日' + '%02d'%curtime.hour + '时' + \
830
+ '%02d'%curtime.minute + '分'
831
+
832
+ with open(outpath + os.sep + outname, 'wt', encoding=encoding) as of:
833
+
834
+ # if bhistory:
835
+ # of.write('Refreshseconds: 30\n')
836
+ # of.write('Threshold: 999\n')
837
+ # of.write('Title: %s-等值线分析-%s(%s)\n'%(tipname,curtstr,self.time_type))
838
+ # of.write('Font: 1, 30, 1, "Arial"\n')
839
+
840
+ if badd_time_flag:
841
+
842
+ # 这里直接用m_pret 可以保证不会出现重叠的记录
843
+ if not m_pret is None:
844
+ pre_time = m_pret + timedelta(minutes=-1*int(aws_duration/2))
845
+ endtime = m_pret + timedelta(minutes=1*int(aws_duration/2))
846
+ else:
847
+ pre_time = obstime + timedelta(minutes = - (obstime.minute % 6) -1*int(aws_duration/2))
848
+ endtime = obstime + timedelta(minutes=20)
849
+
850
+
851
+ timerangestr = 'TimeRange: ' + pre_time.strftime('%Y-%m-%dT%H:%M:%S') + ' ' + endtime.strftime('%Y-%m-%dT%H:%M:%S')
852
+
853
+ # 实时模式下,改时间一定要放在timerange后面
854
+
855
+ if self.time_type == "BJT":
856
+ pre_time = pre_time + timedelta(hours=8)
857
+ endtime = endtime + timedelta(hours=8)
858
+
859
+ if bhistory or not m_pret is None:
860
+ timerangestr = 'TimeRange: ' + pre_time.strftime('%Y-%m-%dT%H:%M:%S') + ' ' + endtime.strftime('%Y-%m-%dT%H:%M:%S')
861
+
862
+ of.write(timerangestr + '\n')
863
+
864
+ # 添加等值线
865
+ for lv in np.arange(0,len(LEVS)):
866
+ if lv >= len(conts):
867
+ continue
868
+ if LEVS[lv] == int(LEVS[lv]):
869
+ labs = str(int(LEVS[lv]))
870
+ else:
871
+ labs = '%.1f'%(LEVS[lv])
872
+
873
+ if len(conts[lv]) > 0:
874
+ for jj in np.arange(0,len(conts[lv])):
875
+ if conts[lv][jj].shape[0] < 5:
876
+ continue
877
+ else:
878
+ of.write('Color: %d %d %d\n'%(colors_gr2_new[lv][0], colors_gr2_new[lv][1], colors_gr2_new[lv][2]))
879
+
880
+ of.write('Line: 4,0,"%s等于%s等值线"\n'%(tipname,labs))
881
+
882
+ for kk in np.arange(0,conts[lv][jj].shape[0]): #
883
+ lat = conts[lv][jj][kk][0]
884
+ lon = conts[lv][jj][kk][1]
885
+
886
+ of.write('%.4f,%.4f\n'%(lat,lon))
887
+ of.write('End:\n')
888
+ for mm in np.arange(0,conts[lv][jj].shape[0],int(conts[lv][jj].shape[0]/2)):
889
+ of.write('Text: %.3f,%.3f,1,%s\n'%(conts[lv][jj][mm][0], conts[lv][jj][mm][1], labs))
890
+ of.write('\n')
891
+ plt.close('all')
892
+ plt.close()
893
+ gc.collect()
894
+ print(outpath + os.sep + outname + ' is done!')
895
+ pass
896
+
897
+
898
+ if __name__ == '__main__':
899
+
900
+ filepath = 'testdata/'
901
+ # filename = 'PLOT_5MIN_20220511141000.000'
902
+ filename = 'surface_aws_20230409_0818.csv'
903
+ startlon = 106
904
+ endlon = 135
905
+ startlat = 18
906
+ endlat = 28
907
+
908
+ oa_class = Object_Analyst()
909
+ oa_class.set_debug(1)
910
+ oa_class.set_reso(0.025)
911
+ oa_class.set_boundary(startlon,endlon,startlat,endlat)
912
+
913
+ # oa_class.set_time('20220511141000')
914
+ tstr = filename[12:20]+filename[21:25]
915
+ oa_class.set_time(tstr)
916
+ # result = oa_class.do_oa_mdfs(filepath + os.sep + filename,vartype='温度')
917
+ result = oa_class.do_oa_csv(filepath + os.sep + filename,vartype='温度')
918
+
919
+ oa_class.trans_gr2(result,'output/placefiles',list(result.variables)[0] + '.pls')
920
+ print('done! ')
921
+
922
+
923
+ # if not result is None:
924
+ # # cflag = (abs(result.t2m)<20) & (abs(result.t2m)>3)
925
+ # result.t2m.plot.contourf(levels=15, add_colorbar=True)
926
+ # plt.title('t2m')
927
+ # plt.show()
928
+ # kk=0
929
+
930
+ # if not result is None:
931
+ # cflag = (abs(result.div_10m)<20) & (abs(result.div_10m)>3)
932
+ # result.div_10m.where(cflag).plot.contour(levels=15, add_colorbar=True)
933
+ # plt.title('divergence')
934
+ # plt.show()
935
+ # kk=0
936
+
937
+
938
+ # oa_class = Object_Analyst()
939
+ # oa_class.set_reso(0.01)
940
+ # params={}
941
+ # params['in_lon'] = aws_lons
942
+ # params['in_lat'] = aws_lats
943
+ # params['in_data'] = aws_tem
944
+ # params['out_varname'] = 't2m'
945
+ # params['out_long_name'] = 'surface temperature objective analyse'
946
+ # params['out_short_name'] = 'oa_t'
947
+ # params['out_units'] = 'degC'
948
+
949
+ # t2m = oa_class.do_oa_base(params)
950
+
951
+
952
+
953
+
954
+
955
+ # %%