pypromice 1.5.1__py3-none-any.whl → 1.5.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

@@ -7,6 +7,7 @@ import pandas as pd
7
7
  import xarray as xr
8
8
  import re, logging
9
9
  from pypromice.process.value_clipping import clip_values
10
+ from pypromice.process import wind
10
11
  logger = logging.getLogger(__name__)
11
12
 
12
13
 
@@ -23,12 +24,12 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
23
24
  Air temperature for sonic ranger adjustment
24
25
  tilt_threshold : int
25
26
  Tilt-o-meter threshold for valid measurements
26
-
27
+
27
28
  Returns
28
29
  -------
29
30
  ds : xarray.Dataset
30
31
  Level 1 dataset
31
- '''
32
+ '''
32
33
  assert(type(L0) == xr.Dataset)
33
34
  ds = L0
34
35
  ds.attrs['level'] = 'L1'
@@ -48,7 +49,7 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
48
49
  # If we do not want to shift hourly average values back -1 hr, then comment the following line.
49
50
  ds = addTimeShift(ds, vars_df)
50
51
 
51
- if hasattr(ds, 'dsr_eng_coef'):
52
+ if hasattr(ds, 'dsr_eng_coef'):
52
53
  ds['dsr'] = (ds['dsr'] * 10) / ds.attrs['dsr_eng_coef'] # Convert radiation from engineering to physical units
53
54
  if hasattr(ds, 'usr_eng_coef'): # TODO add metadata to indicate whether radiometer values are corrected with calibration values or not
54
55
  ds['usr'] = (ds['usr'] * 10) / ds.attrs['usr_eng_coef']
@@ -58,10 +59,10 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
58
59
  ds['ulr'] = ((ds['ulr'] * 10) / ds.attrs['ulr_eng_coef']) + 5.67E-8*(ds['t_rad'] + T_0)**4
59
60
 
60
61
  ds['z_boom_u'] = _reformatArray(ds['z_boom_u']) # Reformat boom height
61
-
62
+
62
63
  ds['t_u_interp'] = interpTemp(ds['t_u'], vars_df)
63
- ds['z_boom_u'] = ds['z_boom_u'] * ((ds['t_u_interp'] + T_0)/T_0)**0.5 # Adjust sonic ranger readings for sensitivity to air temperature
64
-
64
+ ds['z_boom_u'] = ds['z_boom_u'] * ((ds['t_u_interp'] + T_0)/T_0)**0.5 # Adjust sonic ranger readings for sensitivity to air temperature
65
+
65
66
  if ds['gps_lat'].dtype.kind == 'O': # Decode and reformat GPS information
66
67
  if 'NH' in ds['gps_lat'].dropna(dim='time').values[1]:
67
68
  ds = decodeGPS(ds, ['gps_lat','gps_lon','gps_time'])
@@ -73,22 +74,22 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
73
74
  else:
74
75
  try:
75
76
  ds = decodeGPS(ds, ['gps_lat','gps_lon','gps_time']) # TODO this is a work around specifically for L0 RAW processing for THU_U. Find a way to make this slicker
76
-
77
+
77
78
  except:
78
79
  print('Invalid GPS type {ds["gps_lat"].dtype} for decoding')
79
-
80
+
80
81
  for l in ['gps_lat', 'gps_lon', 'gps_alt','gps_time']:
81
- ds[l] = _reformatArray(ds[l])
82
+ ds[l] = _reformatArray(ds[l])
82
83
 
83
84
  if hasattr(ds, 'latitude') and hasattr(ds, 'longitude'):
84
85
  ds['gps_lat'] = reformatGPS(ds['gps_lat'], ds.attrs['latitude'])
85
86
  ds['gps_lon'] = reformatGPS(ds['gps_lon'], ds.attrs['longitude'])
86
87
 
87
88
  if hasattr(ds, 'logger_type'): # Convert tilt voltage to degrees
88
- if ds.attrs['logger_type'].upper() == 'CR1000':
89
- ds['tilt_x'] = getTiltDegrees(ds['tilt_x'], tilt_threshold)
90
- ds['tilt_y'] = getTiltDegrees(ds['tilt_y'], tilt_threshold)
91
-
89
+ if ds.attrs['logger_type'].upper() == 'CR1000':
90
+ ds['tilt_x'] = getTiltDegrees(ds['tilt_x'], tilt_threshold)
91
+ ds['tilt_y'] = getTiltDegrees(ds['tilt_y'], tilt_threshold)
92
+
92
93
  if hasattr(ds, 'tilt_y_factor'): # Apply tilt factor (e.g. -1 will invert tilt angle)
93
94
  ds['tilt_y'] = ds['tilt_y']*ds.attrs['tilt_y_factor']
94
95
 
@@ -97,39 +98,66 @@ def toL1(L0, vars_df, T_0=273.15, tilt_threshold=-100):
97
98
  # since we interpolate above in _getTiltDegrees. PJW
98
99
  ds['tilt_x'] = smoothTilt(ds['tilt_x'], 7) # Smooth tilt
99
100
  ds['tilt_y'] = smoothTilt(ds['tilt_y'], 7)
100
-
101
- if hasattr(ds, 'bedrock'): # Fix tilt to zero if station is on bedrock
102
- if ds.attrs['bedrock']==True or ds.attrs['bedrock'].lower() in 'true':
103
- ds.attrs['bedrock'] = True # ensures all AWS objects have a 'bedrock' attribute
104
- ds['tilt_x'] = (('time'), np.arange(ds['time'].size)*0)
105
- ds['tilt_y'] = (('time'), np.arange(ds['time'].size)*0)
106
- else:
107
- ds.attrs['bedrock'] = False # ensures all AWS objects have a 'bedrock' attribute
108
- else:
109
- ds.attrs['bedrock'] = False # ensures all AWS objects have a 'bedrock' attribute
110
-
101
+
102
+ # Apply wind factor if provided
103
+ # This is in the case of an anemometer rotations improperly translated to wind speed by the logger program
104
+ if hasattr(ds, 'wind_u_coef'):
105
+ logger.info(f'Wind speed correction applied to wspd_u based on factor of {ds.attrs["wind_u_coef"]}')
106
+ ds['wspd_u'] = wind.correct_wind_speed(ds['wspd_u'],
107
+ ds.attrs['wind_u_coef'])
108
+ if hasattr(ds, 'wind_l_coef'):
109
+ logger.info(f'Wind speed correction applied to wspd_u based on factor of {ds.attrs["wind_l_coef"]}')
110
+ ds['wspd_l'] = wind.correct_wind_speed(ds['wspd_l'],
111
+ ds.attrs['wind_l_coef'])
112
+ if hasattr(ds, 'wind_i_coef'):
113
+ logger.info(f'Wind speed correction applied to wspd_u based on factor of {ds.attrs["wind_i_coef"]}')
114
+ ds['wspd_i'] = wind.correct_wind_speed(ds['wspd_i'],
115
+ ds.attrs['wind_i_coef'])
116
+
117
+ # Handle cases where the bedrock attribute is incorrectly set
118
+ if not 'bedrock' in ds.attrs:
119
+ logger.warning('bedrock attribute is not set')
120
+ ds.attrs['bedrock'] = False
121
+ elif not isinstance(ds.attrs['bedrock'], bool):
122
+ logger.warning(f'bedrock attribute is not boolean: {ds.attrs["bedrock"]}')
123
+ ds.attrs['bedrock'] = str(ds.attrs['bedrock']).lower() == 'true'
124
+
125
+ is_bedrock = ds.attrs['bedrock']
126
+
127
+ if is_bedrock:
128
+ # some bedrock stations (e.g. KAN_B) do not have tilt in L0 files
129
+ # we need to create them manually
130
+ for var in ['tilt_x','tilt_y']:
131
+ if var not in ds.data_vars:
132
+ ds[var] = (('time'), np.full(ds['time'].size, np.nan))
133
+
134
+ # WEG_B has a non-null z_pt even though it is a berock station
135
+ if ~ds['z_pt'].isnull().all(): # Calculate pressure transducer fluid density
136
+ ds['z_pt'] = (('time'), np.full(ds['time'].size, np.nan))
137
+ logger.info('Warning: Non-null data for z_pt at a bedrock site')
138
+
111
139
  if ds.attrs['number_of_booms']==1: # 1-boom processing
112
- if ~ds['z_pt'].isnull().all(): # Calculate pressure transducer fluid density
140
+ if ~ds['z_pt'].isnull().all(): # Calculate pressure transducer fluid density
113
141
  if hasattr(ds, 'pt_z_offset'): # Apply SR50 stake offset
114
- ds['z_pt'] = ds['z_pt'] + int(ds.attrs['pt_z_offset'])
115
- ds['z_pt_cor'],ds['z_pt']=getPressDepth(ds['z_pt'], ds['p_u'],
116
- ds.attrs['pt_antifreeze'],
117
- ds.attrs['pt_z_factor'],
118
- ds.attrs['pt_z_coef'],
119
- ds.attrs['pt_z_p_coef'])
120
- ds['z_stake'] = _reformatArray(ds['z_stake']) # Reformat boom height
142
+ ds['z_pt'] = ds['z_pt'] + int(ds.attrs['pt_z_offset'])
143
+ ds['z_pt_cor'],ds['z_pt']=getPressDepth(ds['z_pt'], ds['p_u'],
144
+ ds.attrs['pt_antifreeze'],
145
+ ds.attrs['pt_z_factor'],
146
+ ds.attrs['pt_z_coef'],
147
+ ds.attrs['pt_z_p_coef'])
148
+ ds['z_stake'] = _reformatArray(ds['z_stake']) # Reformat boom height
121
149
  ds['z_stake'] = ds['z_stake'] * ((ds['t_u'] + T_0)/T_0)**0.5 # Adjust sonic ranger readings for sensitivity to air temperature
122
-
150
+
123
151
  elif ds.attrs['number_of_booms']==2: # 2-boom processing
124
- ds['z_boom_l'] = _reformatArray(ds['z_boom_l']) # Reformat boom height
152
+ ds['z_boom_l'] = _reformatArray(ds['z_boom_l']) # Reformat boom height
125
153
  ds['t_l_interp'] = interpTemp(ds['t_l'], vars_df)
126
- ds['z_boom_l'] = ds['z_boom_l'] * ((ds['t_l_interp']+ T_0)/T_0)**0.5 # Adjust sonic ranger readings for sensitivity to air temperature
154
+ ds['z_boom_l'] = ds['z_boom_l'] * ((ds['t_l_interp']+ T_0)/T_0)**0.5 # Adjust sonic ranger readings for sensitivity to air temperature
127
155
 
128
156
  ds = clip_values(ds, vars_df)
129
157
  for key in ['hygroclip_t_offset', 'dsr_eng_coef', 'usr_eng_coef',
130
- 'dlr_eng_coef', 'ulr_eng_coef', 'pt_z_coef', 'pt_z_p_coef',
131
- 'pt_z_factor', 'pt_antifreeze', 'boom_azimuth', 'nodata',
132
- 'conf', 'file']:
158
+ 'dlr_eng_coef', 'ulr_eng_coef', 'wind_u_coef','wind_l_coef',
159
+ 'wind_i_coef', 'pt_z_coef', 'pt_z_p_coef', 'pt_z_factor',
160
+ 'pt_antifreeze', 'boom_azimuth', 'nodata', 'conf', 'file']:
133
161
  ds.attrs.pop(key, None)
134
162
 
135
163
  return ds
@@ -220,10 +248,10 @@ def addTimeShift(ds, vars_df):
220
248
  # ds_out = xr.Dataset(dict(zip(df_out.columns, vals)), attrs=ds.attrs)
221
249
  return ds_out
222
250
 
223
- def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
224
- '''Adjust pressure depth and calculate pressure transducer depth based on
251
+ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
252
+ '''Adjust pressure depth and calculate pressure transducer depth based on
225
253
  pressure transducer fluid density
226
-
254
+
227
255
  Parameters
228
256
  ----------
229
257
  z_pt : xr.Dataarray
@@ -231,7 +259,7 @@ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
231
259
  p : xr.Dataarray
232
260
  Air pressure
233
261
  pt_antifreeze : float
234
- Pressure transducer anti-freeze percentage for fluid density
262
+ Pressure transducer anti-freeze percentage for fluid density
235
263
  correction
236
264
  pt_z_factor : float
237
265
  Pressure transducer factor
@@ -239,7 +267,7 @@ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
239
267
  Pressure transducer coefficient
240
268
  pt_z_p_coef : float
241
269
  Pressure transducer coefficient
242
-
270
+
243
271
  Returns
244
272
  -------
245
273
  z_pt_cor : xr.Dataarray
@@ -247,8 +275,8 @@ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
247
275
  z_pt : xr.Dataarray
248
276
  Pressure transducer depth
249
277
  '''
250
- # Calculate pressure transducer fluid density
251
- if pt_antifreeze == 50: #TODO: Implement function w/ reference (analytical or from LUT)
278
+ # Calculate pressure transducer fluid density
279
+ if pt_antifreeze == 50: #TODO: Implement function w/ reference (analytical or from LUT)
252
280
  rho_af = 1092 #TODO: Track uncertainty
253
281
  elif pt_antifreeze == 100:
254
282
  rho_af = 1145
@@ -257,19 +285,19 @@ def getPressDepth(z_pt, p, pt_antifreeze, pt_z_factor, pt_z_coef, pt_z_p_coef):
257
285
  logger.info('ERROR: Incorrect metadata: "pt_antifreeze" = ' +
258
286
  f'{pt_antifreeze}. Antifreeze mix only supported at 50% or 100%')
259
287
  # assert(False)
260
-
288
+
261
289
  # Correct pressure depth
262
290
  z_pt_cor = z_pt * pt_z_coef * pt_z_factor * 998.0 / rho_af + 100 * (pt_z_p_coef - p) / (rho_af * 9.81)
263
291
 
264
292
  # Calculate pressure transducer depth
265
293
  z_pt = z_pt * pt_z_coef * pt_z_factor * 998.0 / rho_af
266
-
294
+
267
295
  return z_pt_cor, z_pt
268
296
 
269
297
 
270
298
  def interpTemp(temp, var_configurations, max_interp=pd.Timedelta(12,'h')):
271
299
  '''Clip and interpolate temperature dataset for use in corrections
272
-
300
+
273
301
  Parameters
274
302
  ----------
275
303
  temp : `xarray.DataArray`
@@ -278,7 +306,7 @@ def interpTemp(temp, var_configurations, max_interp=pd.Timedelta(12,'h')):
278
306
  Dataframe to retrieve attribute hi-lo values from for temperature clipping
279
307
  max_interp : `pandas.Timedelta`
280
308
  Maximum time steps to interpolate across. The default is 12 hours.
281
-
309
+
282
310
  Returns
283
311
  -------
284
312
  temp_interp : `xarray.DataArray`
@@ -286,18 +314,18 @@ def interpTemp(temp, var_configurations, max_interp=pd.Timedelta(12,'h')):
286
314
  '''
287
315
  # Determine if upper or lower temperature array
288
316
  var = temp.name.lower()
289
-
317
+
290
318
  # Find range threshold and use it to clip measurements
291
319
  cols = ["lo", "hi", "OOL"]
292
320
  assert set(cols) <= set(var_configurations.columns)
293
321
  variable_limits = var_configurations[cols].dropna(how="all")
294
322
  temp = temp.where(temp >= variable_limits.loc[var,'lo'])
295
323
  temp = temp.where(temp <= variable_limits.loc[var, 'hi'])
296
-
324
+
297
325
  # Drop duplicates and interpolate across NaN values
298
326
  # temp_interp = temp.drop_duplicates(dim='time', keep='first')
299
327
  temp_interp = temp.interpolate_na(dim='time', max_gap=max_interp)
300
-
328
+
301
329
  return temp_interp
302
330
 
303
331
 
@@ -309,7 +337,7 @@ def smoothTilt(tilt, win_size):
309
337
  In Python, this should be
310
338
  dstxy = dstxy.rolling(time=7, win_type='boxcar', center=True).mean()
311
339
  But the EDGE_MIRROR makes it a bit more complicated
312
-
340
+
313
341
  Parameters
314
342
  ----------
315
343
  tilt : xarray.DataArray
@@ -338,9 +366,9 @@ def smoothTilt(tilt, win_size):
338
366
  return tdf_rolling
339
367
 
340
368
  def getTiltDegrees(tilt, threshold):
341
- '''Filter tilt with given threshold, and convert from voltage to degrees.
342
- Voltage-to-degrees converseion is based on the equation in 3.2.9 at
343
- https://essd.copernicus.org/articles/13/3819/2021/#section3
369
+ '''Filter tilt with given threshold, and convert from voltage to degrees.
370
+ Voltage-to-degrees converseion is based on the equation in 3.2.9 at
371
+ https://essd.copernicus.org/articles/13/3819/2021/#section3
344
372
 
345
373
  Parameters
346
374
  ----------
@@ -348,7 +376,7 @@ def getTiltDegrees(tilt, threshold):
348
376
  Array (either 'tilt_x' or 'tilt_y'), tilt values (voltage)
349
377
  threshold : int
350
378
  Values below this threshold (-100) will not be retained.
351
-
379
+
352
380
  Returns
353
381
  -------
354
382
  dst.interpolate_na() : xarray.DataArray
@@ -358,7 +386,7 @@ def getTiltDegrees(tilt, threshold):
358
386
  notOKtilt = (tilt < threshold)
359
387
  OKtilt = (tilt >= threshold)
360
388
  tilt = tilt / 10
361
-
389
+
362
390
  # IDL version:
363
391
  # tiltX = tiltX/10.
364
392
  # tiltnonzero = where(tiltX ne 0 and tiltX gt -40 and tiltX lt 40)
@@ -366,26 +394,26 @@ def getTiltDegrees(tilt, threshold):
366
394
  # tiltY = tiltY/10.
367
395
  # tiltnonzero = where(tiltY ne 0 and tiltY gt -40 and tiltY lt 40)
368
396
  # if n_elements(tiltnonzero) ne 1 then tiltY[tiltnonzero] = tiltY[tiltnonzero]/abs(tiltY[tiltnonzero])*(-0.49*(abs(tiltY[tiltnonzero]))^4 + 3.6*(abs(tiltY[tiltnonzero]))^3 - 10.4*(abs(tiltY[tiltnonzero]))^2 +21.1*(abs(tiltY[tiltnonzero])))
369
-
397
+
370
398
  dst = tilt
371
399
  nz = (dst != 0) & (np.abs(dst) < 40)
372
-
400
+
373
401
  dst = dst.where(~nz, other = dst / np.abs(dst)
374
402
  * (-0.49
375
403
  * (np.abs(dst))**4 + 3.6
376
404
  * (np.abs(dst))**3 - 10.4
377
405
  * (np.abs(dst))**2 + 21.1
378
406
  * (np.abs(dst))))
379
-
407
+
380
408
  # if n_elements(OKtiltX) gt 1 then tiltX[notOKtiltX] = interpol(tiltX[OKtiltX],OKtiltX,notOKtiltX) ; Interpolate over gaps for radiation correction; set to -999 again below.
381
409
  dst = dst.where(~notOKtilt)
382
410
  return dst.interpolate_na(dim='time', use_coordinate=False) #TODO: Filling w/o considering time gaps to re-create IDL/GDL outputs. Should fill with coordinate not False. Also consider 'max_gap' option?
383
411
 
384
-
412
+
385
413
  def decodeGPS(ds, gps_names):
386
- '''Decode GPS information based on names of GPS attributes. This should be
414
+ '''Decode GPS information based on names of GPS attributes. This should be
387
415
  applied if gps information does not consist of float values
388
-
416
+
389
417
  Parameters
390
418
  ----------
391
419
  ds : xr.Dataset
@@ -393,63 +421,63 @@ def decodeGPS(ds, gps_names):
393
421
  gps_names : list
394
422
  Variable names for GPS information, such as "gps_lat", "gps_lon" and
395
423
  "gps_alt"
396
-
424
+
397
425
  Returns
398
426
  -------
399
427
  ds : xr.Dataset
400
428
  Data set with decoded GPS information
401
429
  '''
402
430
  for v in gps_names:
403
- a = ds[v].attrs
431
+ a = ds[v].attrs
404
432
  str2nums = [re.findall(r"[-+]?\d*\.\d+|\d+", _) if isinstance(_, str) else [np.nan] for _ in ds[v].values]
405
433
  ds[v][:] = pd.DataFrame(str2nums).astype(float).T.values[0]
406
434
  ds[v] = ds[v].astype(float)
407
- ds[v].attrs = a
435
+ ds[v].attrs = a
408
436
  return ds
409
437
 
410
438
  def reformatGPS(pos_arr, attrs):
411
439
  '''Correct latitude and longitude from native format to decimal degrees.
412
-
440
+
413
441
  v2 stations should send "NH6429.01544","WH04932.86061" (NUK_L 2022)
414
442
  v3 stations should send coordinates as "6628.93936","04617.59187" (DY2) or 6430,4916 (NUK_Uv3)
415
443
  decodeGPS should have decoded these strings to floats in ddmm.mmmm format
416
444
  v1 stations however only saved decimal minutes (mm.mmmmm) as float<=60. '
417
- In this case, we use the integer part of the latitude given in the config
445
+ In this case, we use the integer part of the latitude given in the config
418
446
  file and append the gps value after it.
419
-
447
+
420
448
  Parameters
421
449
  ----------
422
450
  pos_arr : xr.Dataarray
423
451
  Array of latitude or longitude measured by the GPS
424
452
  attrs : dict
425
- The global attribute 'latitude' or 'longitude' associated with the
426
- file being processed. It is the standard latitude/longitude given in the
453
+ The global attribute 'latitude' or 'longitude' associated with the
454
+ file being processed. It is the standard latitude/longitude given in the
427
455
  config file for that station.
428
-
456
+
429
457
  Returns
430
458
  -------
431
459
  pos_arr : xr.Dataarray
432
460
  Formatted GPS position array in decimal degree
433
- '''
434
- if np.any((pos_arr <= 90) & (pos_arr > 0)):
435
- # then pos_arr is in decimal minutes, so we add to it the integer
461
+ '''
462
+ if np.any((pos_arr <= 90) & (pos_arr > 0)):
463
+ # then pos_arr is in decimal minutes, so we add to it the integer
436
464
  # part of the latitude given in the config file x100
437
465
  # so that it reads ddmm.mmmmmm like for v2 and v3 files
438
466
  # Note that np.sign and np.attrs handles negative longitudes.
439
467
  pos_arr = np.sign(attrs) * (pos_arr + 100*np.floor(np.abs(attrs)))
440
- a = pos_arr.attrs
468
+ a = pos_arr.attrs
441
469
  pos_arr = np.floor(pos_arr / 100) + (pos_arr / 100 - np.floor(pos_arr / 100)) * 100 / 60
442
- pos_arr.attrs = a
443
- return pos_arr
470
+ pos_arr.attrs = a
471
+ return pos_arr
444
472
 
445
473
  def _reformatArray(ds_arr):
446
474
  '''Reformat DataArray values and attributes
447
-
475
+
448
476
  Parameters
449
477
  ----------
450
478
  ds_arr : xr.Dataarray
451
479
  Data array
452
-
480
+
453
481
  Returns
454
482
  -------
455
483
  ds_arr : xr.Dataarray
@@ -458,18 +486,18 @@ def _reformatArray(ds_arr):
458
486
  a = ds_arr.attrs # Store
459
487
  ds_arr.values = pd.to_numeric(ds_arr, errors='coerce')
460
488
  ds_arr.attrs = a # Reformat
461
- return ds_arr
489
+ return ds_arr
462
490
 
463
491
  def _removeVars(ds, v_names):
464
492
  '''Remove redundant variables if present in dataset
465
-
493
+
466
494
  Parameters
467
495
  ----------
468
496
  ds : xr.Dataset
469
497
  Data set
470
498
  v_names : list
471
499
  List of column names to drop
472
-
500
+
473
501
  Returns
474
502
  -------
475
503
  ds : xr.Dataset
@@ -481,7 +509,7 @@ def _removeVars(ds, v_names):
481
509
 
482
510
  def _popCols(ds, booms, data_type, vars_df, cols):
483
511
  '''Populate data array columns with given variable names from look-up table
484
-
512
+
485
513
  Parameters
486
514
  ----------
487
515
  ds : xr.Dataset
@@ -494,7 +522,7 @@ def _popCols(ds, booms, data_type, vars_df, cols):
494
522
  Variables lookup table
495
523
  cols : list
496
524
  Names of columns to populate
497
-
525
+
498
526
  Returns
499
527
  -------
500
528
  ds : xr.Dataset
@@ -505,10 +533,10 @@ def _popCols(ds, booms, data_type, vars_df, cols):
505
533
 
506
534
  elif booms==2:
507
535
  names = vars_df.loc[(vars_df[cols[0]]!='one-boom')]
508
-
536
+
509
537
  for v in list(names.index):
510
538
  if v not in list(ds.variables):
511
- ds[v] = (('time'), np.arange(ds['time'].size)*np.nan)
539
+ ds[v] = (('time'), np.arange(ds['time'].size)*np.nan)
512
540
  return ds
513
541
 
514
542
  # def _popCols(ds, booms, data_type, vars_df, cols):
@@ -517,20 +545,20 @@ def _popCols(ds, booms, data_type, vars_df, cols):
517
545
  # names = vars_df.loc[(vars_df[cols[0]]!='two-boom')]
518
546
  # else:
519
547
  # names = vars_df.loc[(vars_df[cols[0]] != 'two-boom') & vars_df[cols[1]] != 'tx']
520
-
548
+
521
549
  # elif booms==2:
522
550
  # if data_type !='TX':
523
551
  # names = vars_df.loc[(vars_df[cols[0]]!='two-boom')]
524
552
  # else:
525
553
  # names = vars_df.loc[(vars_df[cols[0]] != 'two-boom') & vars_df[cols[1]] != 'tx']
526
-
554
+
527
555
  # for v in list(names.index):
528
556
  # if v not in list(ds.variables):
529
- # ds[v] = (('time'), np.arange(ds['time'].size)*np.nan)
557
+ # ds[v] = (('time'), np.arange(ds['time'].size)*np.nan)
530
558
  # return ds
531
559
 
532
560
  #------------------------------------------------------------------------------
533
561
 
534
- if __name__ == "__main__":
535
- # unittest.main()
536
- pass
562
+ if __name__ == "__main__":
563
+ # unittest.main()
564
+ pass
@@ -13,6 +13,7 @@ from pypromice.qc.github_data_issues import flagNAN, adjustTime, adjustData
13
13
  from pypromice.qc.percentiles.outlier_detector import ThresholdBasedOutlierDetector
14
14
  from pypromice.qc.persistence import persistence_qc
15
15
  from pypromice.process.value_clipping import clip_values
16
+ from pypromice.process import wind
16
17
 
17
18
  __all__ = [
18
19
  "toL2",
@@ -29,8 +30,6 @@ def toL2(
29
30
  T_0=273.15,
30
31
  ews=1013.246,
31
32
  ei0=6.1071,
32
- eps_overcast=1.0,
33
- eps_clear=9.36508e-6,
34
33
  emissivity=0.97,
35
34
  ) -> xr.Dataset:
36
35
  '''Process one Level 1 (L1) product to Level 2.
@@ -72,7 +71,7 @@ def toL2(
72
71
  ds : xarray.Dataset
73
72
  Level 2 dataset
74
73
  '''
75
- ds = L1.copy(deep=True) # Reassign dataset
74
+ ds = L1.copy() # Reassign dataset
76
75
  ds.attrs['level'] = 'L2'
77
76
  try:
78
77
  ds = adjustTime(ds, adj_dir=data_adjustments_dir.as_posix()) # Adjust time after a user-defined csv files
@@ -114,17 +113,83 @@ def toL2(
114
113
  ds['rh_i_wrt_ice_or_water'] = adjustHumidity(ds['rh_i'], ds['t_i'],
115
114
  T_0, T_100, ews, ei0)
116
115
 
117
- # Determiune cloud cover for on-ice stations
118
- cc = calcCloudCoverage(ds['t_u'], T_0, eps_overcast, eps_clear, # Calculate cloud coverage
119
- ds['dlr'], ds.attrs['station_id'])
120
- ds['cc'] = (('time'), cc.data)
121
-
122
116
  # Determine surface temperature
123
- ds['t_surf'] = calcSurfaceTemperature(T_0, ds['ulr'], ds['dlr'], # Calculate surface temperature
117
+ ds['t_surf'] = calcSurfaceTemperature(T_0, ds['ulr'], ds['dlr'],
124
118
  emissivity)
125
- if not ds.attrs['bedrock']:
126
- ds['t_surf'] = xr.where(ds['t_surf'] > 0, 0, ds['t_surf'])
119
+ is_bedrock = ds.attrs['bedrock']
120
+ if not is_bedrock:
121
+ ds['t_surf'] = ds['t_surf'].clip(max=0)
122
+
123
+ # smoothing tilt and rot
124
+ ds['tilt_x'] = smoothTilt(ds['tilt_x'])
125
+ ds['tilt_y'] = smoothTilt(ds['tilt_y'])
126
+ ds['rot'] = smoothRot(ds['rot'])
127
+
128
+ # Determiune cloud cover for on-ice stations
129
+ if not is_bedrock:
130
+ ds['cc'] = calcCloudCoverage(ds['t_u'], ds['dlr'], ds.attrs['station_id'], T_0)
131
+ else:
132
+ ds['cc'] = ds['t_u'].copy() * np.nan
133
+
134
+ # Filtering and correcting shortwave radiation
135
+ ds, _ = process_sw_radiation(ds)
127
136
 
137
+ # Correct precipitation
138
+ if hasattr(ds, 'correct_precip'):
139
+ precip_flag = ds.attrs['correct_precip']
140
+ else:
141
+ precip_flag=True
142
+ if ~ds['precip_u'].isnull().all() and precip_flag:
143
+ ds['precip_u_cor'], ds['precip_u_rate'] = correctPrecip(ds['precip_u'],
144
+ ds['wspd_u'])
145
+ if ds.attrs['number_of_booms']==2:
146
+ if ~ds['precip_l'].isnull().all() and precip_flag: # Correct precipitation
147
+ ds['precip_l_cor'], ds['precip_l_rate']= correctPrecip(ds['precip_l'],
148
+ ds['wspd_l'])
149
+
150
+ # Calculate directional wind speed for upper boom
151
+ ds['wdir_u'] = wind.filter_wind_direction(ds['wdir_u'],
152
+ ds['wspd_u'])
153
+ ds['wspd_x_u'], ds['wspd_y_u'] = wind.calculate_directional_wind_speed(ds['wspd_u'],
154
+ ds['wdir_u'])
155
+
156
+ # Calculate directional wind speed for lower boom
157
+ if ds.attrs['number_of_booms'] == 2:
158
+ ds['wdir_l'] = wind.filter_wind_direction(ds['wdir_l'],
159
+ ds['wspd_l'])
160
+ ds['wspd_x_l'], ds['wspd_y_l'] = wind.calculate_directional_wind_speed(ds['wspd_l'],
161
+
162
+ ds['wdir_l'])
163
+ # Calculate directional wind speed for instantaneous measurements
164
+ if hasattr(ds, 'wdir_i'):
165
+ if ~ds['wdir_i'].isnull().all() and ~ds['wspd_i'].isnull().all():
166
+ ds['wdir_i'] = wind.filter_wind_direction(ds['wdir_i'],
167
+ ds['wspd_i'])
168
+ ds['wspd_x_i'], ds['wspd_y_i'] = wind.calculate_directional_wind_speed(ds['wspd_i'],
169
+ ds['wdir_i'])
170
+ # Get directional wind speed
171
+
172
+ ds = clip_values(ds, vars_df)
173
+
174
+ return ds
175
+
176
+ def process_sw_radiation(ds):
177
+ """
178
+ Processes shortwave radiation data from a dataset by applying tilt and sun
179
+ angle corrections.
180
+
181
+ Parameters:
182
+ ds (xarray.Dataset): Dataset containing variables such as time, tilt_x,
183
+ tilt_y, dsr (downwelling SW radiation), usr (upwelling SW radiation),
184
+ cloud cover (cc), gps_lat, gps_lon, and optional attributes
185
+ latitude and longitude.
186
+
187
+ Returns:
188
+ ds (xarray.Dataset): Updated dataset with corrected downwelling ('dsr_cor')
189
+ and upwelling ('usr_cor') SW radiation, and derived surface albedo ('albedo').
190
+ tuple: A tuple containing masks and calculated TOA radiation:
191
+ (OKalbedos, sunonlowerdome, bad, isr_toa, TOA_crit_nopass)
192
+ """
128
193
  # Determine station position relative to sun
129
194
  doy = ds['time'].to_dataframe().index.dayofyear.values # Gather variables to calculate sun pos
130
195
  hour = ds['time'].to_dataframe().index.hour.values
@@ -137,11 +202,6 @@ def toL2(
137
202
  lat = ds['gps_lat'].mean()
138
203
  lon = ds['gps_lon'].mean()
139
204
 
140
- # smoothing tilt and rot
141
- ds['tilt_x'] = smoothTilt(ds['tilt_x'])
142
- ds['tilt_y'] = smoothTilt(ds['tilt_y'])
143
- ds['rot'] = smoothRot(ds['rot'])
144
-
145
205
  deg2rad, rad2deg = _getRotation() # Get degree-radian conversions
146
206
  phi_sensor_rad, theta_sensor_rad = calcTilt(ds['tilt_x'], ds['tilt_y'], # Calculate station tilt
147
207
  deg2rad)
@@ -152,108 +212,64 @@ def toL2(
152
212
  HourAngle_rad, deg2rad,
153
213
  rad2deg)
154
214
 
215
+ # Setting to zero when sun below the horizon.
216
+ bad = ZenithAngle_deg > 95
217
+ ds['dsr'][bad & ds['dsr'].notnull()] = 0
218
+ ds['usr'][bad & ds['usr'].notnull()] = 0
155
219
 
156
- # Correct Downwelling shortwave radiation
157
- DifFrac = 0.2 + 0.8 * cc
158
- CorFac_all = calcCorrectionFactor(Declination_rad, phi_sensor_rad, # Calculate correction
159
- theta_sensor_rad, HourAngle_rad,
160
- ZenithAngle_rad, ZenithAngle_deg,
161
- lat, DifFrac, deg2rad)
162
- CorFac_all = xr.where(ds['cc'].notnull(), CorFac_all, 1)
163
- ds['dsr_cor'] = ds['dsr'].copy(deep=True) * CorFac_all # Apply correction
220
+ # Setting to zero when values are negative
221
+ ds['dsr'] = ds['dsr'].clip(min=0)
222
+ ds['usr'] = ds['usr'].clip(min=0)
164
223
 
165
- AngleDif_deg = calcAngleDiff(ZenithAngle_rad, HourAngle_rad, # Calculate angle between sun and sensor
224
+ # Calculate angle between sun and sensor
225
+ AngleDif_deg = calcAngleDiff(ZenithAngle_rad, HourAngle_rad,
166
226
  phi_sensor_rad, theta_sensor_rad)
167
-
168
- ds['albedo'], OKalbedos = calcAlbedo(ds['usr'], ds['dsr_cor'], # Determine albedo
169
- AngleDif_deg, ZenithAngle_deg)
170
-
171
- # Correct upwelling and downwelling shortwave radiation
172
- sunonlowerdome =(AngleDif_deg >= 90) & (ZenithAngle_deg <= 90) # Determine when sun is in FOV of lower sensor, assuming sensor measures only diffuse radiation
173
- ds['dsr_cor'] = ds['dsr_cor'].where(~sunonlowerdome,
174
- other=ds['dsr'] / DifFrac) # Apply to downwelling
175
- ds['usr_cor'] = ds['usr'].copy(deep=True)
176
- ds['usr_cor'] = ds['usr_cor'].where(~sunonlowerdome,
177
- other=ds['albedo'] * ds['dsr'] / DifFrac) # Apply to upwelling
178
- bad = (ZenithAngle_deg > 95) | (ds['dsr_cor'] <= 0) | (ds['usr_cor'] <= 0) # Set to zero for solar zenith angles larger than 95 deg or either values are (less than) zero
179
- ds['dsr_cor'][bad] = 0
180
- ds['usr_cor'][bad] = 0
181
- ds['dsr_cor'] = ds['usr_cor'].copy(deep=True) / ds['albedo'] # Correct DWR using more reliable USWR when sun not in sight of upper sensor
182
- ds['albedo'] = ds['albedo'].where(OKalbedos) #TODO remove?
183
-
184
- # Remove data where TOA shortwave radiation invalid
227
+ tilt_correction_possible = AngleDif_deg.notnull() & ds['cc'].notnull()
228
+
229
+ # Filtering usr and dsr for sun on lower dome
230
+ # in theory, this is not a problem in cloudy conditions, but the cloud cover
231
+ # index is too uncertain at this point to be used
232
+ sunonlowerdome = (AngleDif_deg >= 90) & (ZenithAngle_deg <= 90)
233
+ mask = ~sunonlowerdome | AngleDif_deg.isnull() # relaxing the filter for cases where sensor tilt is unknown
234
+ ds['dsr'] = ds['dsr'].where(mask)
235
+ ds['usr'] = ds['usr'].where(mask)
236
+
237
+ # Filter dsr values that are greater than top of the atmosphere irradiance
238
+ # Case where no tilt is available. If it is, then the same filter is used
239
+ # after tilt correction.
185
240
  isr_toa = calcTOA(ZenithAngle_deg, ZenithAngle_rad) # Calculate TOA shortwave radiation
186
- TOA_crit_nopass = (ds['dsr_cor'] > (0.9 * isr_toa + 10)) # Determine filter
187
- ds['dsr_cor'][TOA_crit_nopass] = np.nan # Apply filter and interpolate
188
- ds['usr_cor'][TOA_crit_nopass] = np.nan
189
-
190
- ds['dsr_cor'] = ds.dsr_cor.where(ds.dsr.notnull())
191
- ds['usr_cor'] = ds.usr_cor.where(ds.usr.notnull())
192
- # # Check sun position
193
- # sundown = ZenithAngle_deg >= 90
194
- # _checkSunPos(ds, OKalbedos, sundown, sunonlowerdome, TOA_crit_nopass)
195
-
196
- if hasattr(ds, 'correct_precip'): # Correct precipitation
197
- precip_flag=ds.attrs['correct_precip']
198
- else:
199
- precip_flag=True
200
- if ~ds['precip_u'].isnull().all() and precip_flag:
201
- ds['precip_u_cor'], ds['precip_u_rate'] = correctPrecip(ds['precip_u'],
202
- ds['wspd_u'])
203
- if ds.attrs['number_of_booms']==2:
204
- if ~ds['precip_l'].isnull().all() and precip_flag: # Correct precipitation
205
- ds['precip_l_cor'], ds['precip_l_rate']= correctPrecip(ds['precip_l'],
206
- ds['wspd_l'])
207
-
208
- get_directional_wind_speed(ds) # Get directional wind speed
209
-
210
- ds = clip_values(ds, vars_df)
211
- return ds
212
-
213
- def get_directional_wind_speed(ds: xr.Dataset) -> xr.Dataset:
214
- """
215
- Calculate directional wind speed from wind speed and direction and mutates the dataset
216
- """
217
-
218
- ds['wdir_u'] = ds['wdir_u'].where(ds['wspd_u'] != 0)
219
- ds['wspd_x_u'], ds['wspd_y_u'] = calcDirWindSpeeds(ds['wspd_u'], ds['wdir_u'])
241
+ TOA_crit_nopass = ~tilt_correction_possible & (ds['dsr'] > (1.2 * isr_toa + 150))
242
+ ds['dsr'][TOA_crit_nopass] = np.nan
220
243
 
221
- if ds.attrs['number_of_booms']==2:
222
- ds['wdir_l'] = ds['wdir_l'].where(ds['wspd_l'] != 0)
223
- ds['wspd_x_l'], ds['wspd_y_l'] = calcDirWindSpeeds(ds['wspd_l'], ds['wdir_l'])
244
+ # the upward flux should not be higher than the TOA downard flux
245
+ TOA_crit_nopass_usr = (ds['usr'] > 0.8*(1.2 * isr_toa + 150))
246
+ ds['usr'][TOA_crit_nopass_usr] = np.nan
224
247
 
225
- if hasattr(ds, 'wdir_i'):
226
- if ~ds['wdir_i'].isnull().all() and ~ds['wspd_i'].isnull().all():
227
- ds['wdir_i'] = ds['wdir_i'].where(ds['wspd_i'] != 0)
228
- ds['wspd_x_i'], ds['wspd_y_i'] = calcDirWindSpeeds(ds['wspd_i'], ds['wdir_i'])
229
- return ds
248
+ # Diffuse to direct irradiance fraction
249
+ DifFrac = 0.2 + 0.8 * ds['cc']
250
+ CorFac_all = calcCorrectionFactor(Declination_rad, phi_sensor_rad, # Calculate correction
251
+ theta_sensor_rad, HourAngle_rad,
252
+ ZenithAngle_rad, ZenithAngle_deg,
253
+ lat, DifFrac, deg2rad)
254
+ CorFac_all = CorFac_all.where(tilt_correction_possible)
230
255
 
256
+ # Correct Downwelling shortwave radiation
257
+ ds['dsr_cor'] = ds['dsr'].copy() * CorFac_all
258
+ ds['usr_cor'] = ds['usr'].copy().where(ds['dsr_cor'].notnull())
231
259
 
232
- def calcDirWindSpeeds(wspd, wdir, deg2rad=np.pi/180):
233
- '''Calculate directional wind speed from wind speed and direction
260
+ # Remove data where TOA shortwave radiation invalid
261
+ # this can only be done after correcting for tilt
262
+ TOA_crit_nopass_cor = ds['dsr_cor'] > (1.2 * isr_toa + 150)
263
+ ds['dsr_cor'][TOA_crit_nopass_cor] = np.nan
264
+ ds['usr_cor'][TOA_crit_nopass_cor] = np.nan
234
265
 
235
- Parameters
236
- ----------
237
- wspd : xr.Dataarray
238
- Wind speed data array
239
- wdir : xr.Dataarray
240
- Wind direction data array
241
- deg2rad : float
242
- Degree to radians coefficient. The default is np.pi/180
266
+ ds, OKalbedos = calcAlbedo(ds, AngleDif_deg, ZenithAngle_deg)
243
267
 
244
- Returns
245
- -------
246
- wspd_x : xr.Dataarray
247
- Wind speed in X direction
248
- wspd_y : xr.Datarray
249
- Wind speed in Y direction
250
- '''
251
- wspd_x = wspd * np.sin(wdir * deg2rad)
252
- wspd_y = wspd * np.cos(wdir * deg2rad)
253
- return wspd_x, wspd_y
268
+ return ds, (OKalbedos, sunonlowerdome, bad, isr_toa, TOA_crit_nopass_cor, TOA_crit_nopass, TOA_crit_nopass_usr)
254
269
 
255
270
 
256
- def calcCloudCoverage(T, T_0, eps_overcast, eps_clear, dlr, station_id):
271
+ def calcCloudCoverage(T, dlr, station_id,T_0, eps_overcast=1.0,
272
+ eps_clear=9.36508e-6):
257
273
  '''Calculate cloud cover from T and T_0
258
274
 
259
275
  Parameters
@@ -291,6 +307,7 @@ def calcCloudCoverage(T, T_0, eps_overcast, eps_clear, dlr, station_id):
291
307
  cc = (dlr - LR_clear) / (LR_overcast - LR_clear)
292
308
  cc[cc > 1] = 1
293
309
  cc[cc < 0] = 0
310
+
294
311
  return cc
295
312
 
296
313
 
@@ -414,12 +431,10 @@ def calcTilt(tilt_x, tilt_y, deg2rad):
414
431
 
415
432
  # Total tilt of the sensor, i.e. 0 when horizontal
416
433
  theta_sensor_rad = np.arccos(Z / (X**2 + Y**2 + Z**2)**0.5)
417
- # phi_sensor_deg = phi_sensor_rad * rad2deg #TODO take these out if not needed
418
- # theta_sensor_deg = theta_sensor_rad * rad2deg
419
434
  return phi_sensor_rad, theta_sensor_rad
420
435
 
421
436
 
422
- def adjustHumidity(rh, T, T_0, T_100, ews, ei0): #TODO figure out if T replicate is needed
437
+ def adjustHumidity(rh, T, T_0, T_100, ews, ei0):
423
438
  '''Adjust relative humidity so that values are given with respect to
424
439
  saturation over ice in subfreezing conditions, and with respect to
425
440
  saturation over water (as given by the instrument) above the melting
@@ -655,42 +670,41 @@ def calcAngleDiff(ZenithAngle_rad, HourAngle_rad, phi_sensor_rad,
655
670
  + np.cos(ZenithAngle_rad)
656
671
  * np.cos(theta_sensor_rad))
657
672
 
658
- def calcAlbedo(usr, dsr_cor, AngleDif_deg, ZenithAngle_deg):
659
- '''Calculate surface albedo based on upwelling and downwelling shorwave
660
- flux, the angle between the sun and sensor, and the sun zenith
673
+
674
+ def calcAlbedo(ds, AngleDif_deg, ZenithAngle_deg):
675
+ '''
676
+ Calculate surface albedo based on upwelling and downwelling shortwave
677
+ flux, the angle between the sun and sensor, and the sun zenith angle.
661
678
 
662
679
  Parameters
663
680
  ----------
664
- usr : xarray.DataArray
665
- Upwelling shortwave radiation
666
- dsr_cor : xarray.DataArray
667
- Downwelling shortwave radiation corrected
668
- AngleDif_def : float
669
- Angle between sun and sensor in degrees
670
- ZenithAngle_deg: float
671
- Zenith angle in degrees
681
+ ds : xarray.Dataset
682
+ Dataset containing 'usr' (upwelling shortwave), 'dsr_cor' (corrected downwelling shortwave),
683
+ and optionally 'dsr' (uncorrected downwelling shortwave) and 'cc' (cloud cover).
684
+ AngleDif_deg : xarray.DataArray
685
+ Angle between the sun and the sensor in degrees.
686
+ ZenithAngle_deg : xarray.DataArray
687
+ Sun zenith angle in degrees.
672
688
 
673
689
  Returns
674
690
  -------
675
- albedo : xarray.DataArray
676
- Derived albedo
691
+ ds : xarray.Dataset
692
+ Input dataset with a new 'albedo' variable added.
677
693
  OKalbedos : xarray.DataArray
678
- Valid albedo measurements
694
+ Boolean mask indicating valid albedo values.
679
695
  '''
680
- albedo = usr / dsr_cor
696
+ tilt_correction_possible = AngleDif_deg.notnull() & ds['cc'].notnull()
681
697
 
682
- # NaN bad data
683
- OKalbedos = (AngleDif_deg < 70) & (ZenithAngle_deg < 70) & (albedo < 1) & (albedo > 0)
684
- albedo[~OKalbedos] = np.nan
685
-
686
- # Interpolate all. Note "use_coordinate=False" is used here to force
687
- # comparison against the GDL code when that is run with *only* a TX file.
688
- # Should eventually set to default (True) and interpolate based on time,
689
- # not index.
690
- albedo = albedo.interpolate_na(dim='time', use_coordinate=False)
691
- albedo = albedo.ffill(dim='time').bfill(dim='time') #TODO remove this line and one above?
692
- return albedo, OKalbedos
698
+ ds['albedo'] = xr.where(tilt_correction_possible,
699
+ ds['usr'] / ds['dsr_cor'],
700
+ ds['usr'] / ds['dsr'])
693
701
 
702
+ OOL = (ds['albedo'] >= 1) | (ds['albedo'] <= 0)
703
+ good_zenith_angle = ZenithAngle_deg < 70
704
+ good_relative_zenith_angle = (AngleDif_deg < 70) | (AngleDif_deg.isnull())
705
+ OKalbedos = good_relative_zenith_angle & good_zenith_angle & ~OOL
706
+ ds['albedo'] = ds['albedo'].where(OKalbedos)
707
+ return ds, OKalbedos
694
708
 
695
709
  def calcTOA(ZenithAngle_deg, ZenithAngle_rad):
696
710
  '''Calculate incoming shortwave radiation at the top of the atmosphere,
@@ -780,37 +794,8 @@ def calcCorrectionFactor(Declination_rad, phi_sensor_rad, theta_sensor_rad,
780
794
  # Calculating ds['dsr'] over a horizontal surface corrected for station/sensor tilt
781
795
  CorFac_all = CorFac / (1 - DifFrac + CorFac * DifFrac)
782
796
 
783
- return CorFac_all
784
-
785
-
786
- def _checkSunPos(ds, OKalbedos, sundown, sunonlowerdome, TOA_crit_nopass):
787
- '''Check sun position
797
+ return CorFac_all.where(theta_sensor_rad.notnull())
788
798
 
789
- Parameters
790
- ----------
791
- ds : xarray.Dataset
792
- Data set
793
- OKalbedos : xarray.DataArray
794
- Valid measurements flag
795
- sundown : xarray.DataArray
796
- Sun below horizon flag
797
- sunonlowerdome : xarray.DataArray
798
- Sun in view of lower sensor flag
799
- TOA_crit_nopass : xarray.DataArray
800
- Top-of-Atmosphere flag
801
- '''
802
- valid = (~(ds['dsr_cor'].isnull())).sum()
803
- print('Sun in view of upper sensor / workable albedos:', OKalbedos.sum().values,
804
- (100*OKalbedos.sum()/valid).round().values, "%")
805
- print('Sun below horizon:', sundown.sum(),
806
- (100*sundown.sum()/valid).round().values, "%")
807
- print('Sun in view of lower sensor:', sunonlowerdome.sum().values,
808
- (100*sunonlowerdome.sum()/valid).round().values, "%")
809
- print('Spikes removed using TOA criteria:', TOA_crit_nopass.sum().values,
810
- (100*TOA_crit_nopass.sum()/valid).round().values, "%")
811
- print('Mean net SR change by corrections:',
812
- (ds['dsr_cor']-ds['usr_cor']-ds['dsr']+ds['usr']).sum().values/valid.values,
813
- "W/m2")
814
799
 
815
800
  def _getTempK(T_0): #TODO same as L2toL3._getTempK()
816
801
  '''Return steam point temperature in Kelvins
@@ -41,6 +41,8 @@ def toL3(L2,
41
41
 
42
42
  T_100 = T_0+100 # Get steam point temperature as K
43
43
 
44
+ is_bedrock = (str(ds.attrs['bedrock']).lower() == 'true')
45
+
44
46
  # Turbulent heat flux calculation
45
47
  if ('t_u' in ds.keys()) and \
46
48
  ('p_u' in ds.keys()) and \
@@ -59,7 +61,7 @@ def toL3(L2,
59
61
  z_WS_u = ds['z_boom_u'].copy() + 0.4 # Get height of Anemometer
60
62
  z_T_u = ds['z_boom_u'].copy() - 0.1 # Get height of thermometer
61
63
 
62
- if not ds.attrs['bedrock']:
64
+ if not is_bedrock:
63
65
  SHF_h_u, LHF_h_u= calculate_tubulent_heat_fluxes(T_0, T_h_u, Tsurf_h, WS_h_u, # Calculate latent and sensible heat fluxes
64
66
  z_WS_u, z_T_u, q_h_u, p_h_u)
65
67
 
@@ -90,7 +92,8 @@ def toL3(L2,
90
92
  z_WS_l = ds['z_boom_l'].copy() + 0.4 # Get height of W
91
93
  z_T_l = ds['z_boom_l'].copy() - 0.1 # Get height of thermometer
92
94
  WS_h_l = ds['wspd_l'].copy()
93
- if not ds.attrs['bedrock']:
95
+
96
+ if not is_bedrock:
94
97
  SHF_h_l, LHF_h_l= calculate_tubulent_heat_fluxes(T_0, T_h_l, Tsurf_h, WS_h_l, # Calculate latent and sensible heat fluxes
95
98
  z_WS_l, z_T_l, q_h_l, p_h_l)
96
99
 
@@ -8,7 +8,7 @@ Created on Mon Jun 10 10:58:39 2024
8
8
  import logging
9
9
  import numpy as np
10
10
  import xarray as xr
11
- from pypromice.process.L1toL2 import calcDirWindSpeeds
11
+ from pypromice.process.wind import calculate_directional_wind_speed
12
12
  logger = logging.getLogger(__name__)
13
13
 
14
14
  def resample_dataset(ds_h, t):
@@ -81,7 +81,7 @@ def resample_dataset(ds_h, t):
81
81
  df_d[var] = _calcWindDir(df_d['wspd_x_'+boom], df_d['wspd_y_'+boom])
82
82
  else:
83
83
  logger.info(var+' in dataframe but not wspd_x_'+boom+' nor wspd_y_'+boom+', recalculating them')
84
- ds_h['wspd_x_'+boom], ds_h['wspd_y_'+boom] = calcDirWindSpeeds(ds_h['wspd_'+boom], ds_h['wdir_'+boom])
84
+ ds_h['wspd_x_'+boom], ds_h['wspd_y_'+boom] = calculate_directional_wind_speed(ds_h['wspd_'+boom], ds_h['wdir_'+boom])
85
85
  df_d[['wspd_x_'+boom, 'wspd_y_'+boom]] = ds_h[['wspd_x_'+boom, 'wspd_y_'+boom]].to_dataframe().resample(t).mean()
86
86
  df_d[var] = _calcWindDir(df_d['wspd_x_'+boom], df_d['wspd_y_'+boom])
87
87
 
@@ -0,0 +1,66 @@
1
+ __all__=['correct_wind_speed', 'filter_wind_direction', 'calculate_directional_wind_speed']
2
+
3
+ import numpy as np
4
+ import xarray as xr
5
+
6
+ DEG2RAD=np.pi/180
7
+
8
+ def correct_wind_speed(wspd: xr.DataArray, coefficient) -> xr.DataArray:
9
+ """Correct wind speed with a linear correction coefficient. For example, the conversion from a standard
10
+ Young anemometer to an Arctic Young anemometer is 1.7.
11
+
12
+ Parameters
13
+ ----------
14
+ wspd : xr.DataArray
15
+ Wind speed
16
+ coefficient : float
17
+ Correction coefficient
18
+
19
+ Returns
20
+ -------
21
+ xr.DataArray
22
+ Corrected wind speed
23
+ """
24
+ return wspd * coefficient
25
+
26
+ def filter_wind_direction(wdir: xr.DataArray, wspd: xr.DataArray) -> xr.DataArray:
27
+ """Filter wind direction by wind speed, where wind direction values are removed if
28
+ wind speed is zero.
29
+
30
+ Parameters
31
+ ----------
32
+ wdir : xr.DataArray
33
+ Wind direction
34
+ wspd : xr.DataArray
35
+ Wind speed
36
+
37
+ Returns
38
+ -------
39
+ xr.DataArray
40
+ Filtered wind direction
41
+ """
42
+ return wdir.where(wspd != 0)
43
+
44
+
45
+ def calculate_directional_wind_speed(wspd: xr.DataArray, wdir: xr.DataArray):
46
+ """Calculate directional wind speed from wind speed and direction
47
+
48
+ Parameters
49
+ ----------
50
+ wspd : xr.DataArray
51
+ Wind speed data array
52
+ wdir : xr.DataArray
53
+ Wind direction data array
54
+ deg2rad : float
55
+ Degree to radians coefficient. The default is np.pi/180
56
+
57
+ Returns
58
+ -------
59
+ wspd_x : xr.DataArray
60
+ Wind speed in X direction
61
+ wspd_y : xr.DatArray
62
+ Wind speed in Y direction
63
+ """
64
+ wspd_x = wspd * np.sin(wdir * DEG2RAD)
65
+ wspd_y = wspd * np.cos(wdir * DEG2RAD)
66
+ return wspd_x, wspd_y
pypromice/tx/tx.py CHANGED
@@ -454,9 +454,13 @@ class L0tx(EmailMessage, PayloadFormat):
454
454
  bool
455
455
  Valid format flag
456
456
  '''
457
+
457
458
  if self.getFirstByte().isdigit() or (self.payload[:2] == '\n' and self.imei == 300234064121930): #TODO needed?
458
459
  return None, None, None, None, -9999, False
459
460
 
461
+ elif self.email_data['subject'].startswith("Data from station"):
462
+ return None, None, None, None, -9999, False
463
+
460
464
  elif 'watson' in self.email_data['subject'].lower() or 'gios' in self.email_data['subject'].lower():
461
465
  return None, None, None, None, -9999, False
462
466
 
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.2
1
+ Metadata-Version: 2.4
2
2
  Name: pypromice
3
- Version: 1.5.1
3
+ Version: 1.5.3
4
4
  Summary: PROMICE/GC-Net data processing toolbox
5
5
  Home-page: https://github.com/GEUS-Glaciology-and-Climate/pypromice
6
6
  Author: GEUS Glaciology and Climate
@@ -34,6 +34,7 @@ Dynamic: description
34
34
  Dynamic: description-content-type
35
35
  Dynamic: home-page
36
36
  Dynamic: keywords
37
+ Dynamic: license-file
37
38
  Dynamic: project-url
38
39
  Dynamic: requires-dist
39
40
  Dynamic: requires-python
@@ -11,9 +11,9 @@ pypromice/postprocess/get_bufr.py,sha256=98BZ7tTbmvvrVAzhxJ8LgHib3w7Q3S_x757pCub
11
11
  pypromice/postprocess/make_metadata_csv.py,sha256=lUw8fZC6dzG10IePZ4d7eZIrTbquHYCDuppYP4r_dF0,9776
12
12
  pypromice/postprocess/positions_seed.csv,sha256=0kVCQ8UfEALdeXNYCddmwxpseRqLRudbFStqp_bZRBw,224
13
13
  pypromice/postprocess/real_time_utilities.py,sha256=2GZUua5R8ocp-TnIjEA4ig8lC4TlI3SkiimzLxoHXFE,9476
14
- pypromice/process/L0toL1.py,sha256=aQmVEXhtRisHNrJuPGIg7NA4hjjmV9jUBu1DClRIGzs,23175
15
- pypromice/process/L1toL2.py,sha256=PM6lToL-WXbZk5p2m90XCxY9S_RNNV1QUB3klZlQDzc,30832
16
- pypromice/process/L2toL3.py,sha256=XqnUGKO17ADJkTOn0p8TESeCAKQGtPXpFadNfF52g6E,61378
14
+ pypromice/process/L0toL1.py,sha256=U7TLoB3UfV7BcR4iReVA1FheR1WD_OBXk0faBV3QEW0,24022
15
+ pypromice/process/L1toL2.py,sha256=0xy54wmTetZKpQ4NkqTb57tt4ERuONMbvwFMncaOrjU,30174
16
+ pypromice/process/L2toL3.py,sha256=P7abSejGmq22L9Urv78K0wXnevG05GDGvnhabTMQql8,61424
17
17
  pypromice/process/__init__.py,sha256=xvd0I-9nIyVw4M4qjgkQ5vXYpNuKcVSkIVIROQsZDo0,147
18
18
  pypromice/process/aws.py,sha256=5N4rLptO23Ic9mgyR5K8NhtNmGPa9xb9Cu7SgzAgoi8,7561
19
19
  pypromice/process/get_l2.py,sha256=ALXJCMJ8qgg0_dEKx-dV5TQ9IAJnLLLGPUxlr5QVfpk,3076
@@ -21,9 +21,10 @@ pypromice/process/get_l2tol3.py,sha256=4Qu2d5rT25H2dObyCc70ivtJg3vw6WA-hzI-kRD6y
21
21
  pypromice/process/join_l2.py,sha256=ifjuhFR9scVvZt3xuy-ELp-iRchxV1dEK9qJ4UNh5bE,4567
22
22
  pypromice/process/join_l3.py,sha256=F90uvcy85iSl6GWxyj6C3K4j2eAoQvhQB8XqOXpShvo,20221
23
23
  pypromice/process/load.py,sha256=iaFvJeaDanAA60caVj4BWupZpqgQNj3CiNU4csz9FdU,4585
24
- pypromice/process/resample.py,sha256=VhcQ0UD5LkqFPHlwEFmwYIz3kLkUfvxTenxYadmCsSE,7301
24
+ pypromice/process/resample.py,sha256=yydbsSQeUpTSyrsY3tZN0uZzp0Ddbe5HKrW-onB3Yos,7329
25
25
  pypromice/process/utilities.py,sha256=1pqSaF3bIbvRNtOjb25mbegHfuW9MY4KpCBDVXWyML8,1773
26
26
  pypromice/process/value_clipping.py,sha256=_pTAxogsqfkjwBQslGyyE3jZzhYY6_Wp-g5ZPHcudMc,1506
27
+ pypromice/process/wind.py,sha256=-dpaBOtqGyPDIU4O1HbbWRzlLNRC2a50OdnZhIaWdeI,1701
27
28
  pypromice/process/write.py,sha256=ZpA9NAgtnzJIbnjEiJ0sOtsE0J1haHASbjHqzMvBDJE,15818
28
29
  pypromice/qc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
30
  pypromice/qc/github_data_issues.py,sha256=gbbF62oMMWbXiLnsrs60vXbwfAqSUP113plhidVeqCU,13353
@@ -41,13 +42,13 @@ pypromice/tx/get_l0tx.py,sha256=b34-96KGshTyTN2tBFaAIBl7oZZzbRB_JR7sXtDNfXA,6957
41
42
  pypromice/tx/get_msg.py,sha256=OGS60OHjy4Wf8JExTfOdK-9xhjFdjhuChxoTSPe_MjI,3417
42
43
  pypromice/tx/payload_formats.csv,sha256=A46-XcYdpe9-gzmADylP2UVizLi_UphF-BPT5u3Lyn8,7903
43
44
  pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
44
- pypromice/tx/tx.py,sha256=mghUjwGqUKe_4JWuAEgWMyH4ME2QRufeMPPHoL72R08,34267
45
+ pypromice/tx/tx.py,sha256=asbgXVI5vurKM-WVACTfpKRt-70wtzVvSbvjvYufajI,34416
45
46
  pypromice/utilities/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
46
47
  pypromice/utilities/dependency_graph.py,sha256=bqoXasC8pg5ipjBd6rqDhfHwIq11t2_cFlNT72ncw4w,3135
47
48
  pypromice/utilities/git.py,sha256=7EUGjDs_VZucrckakXKyZEclDAZ_mKIxhTWzhopCIxM,1785
48
- pypromice-1.5.1.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
49
- pypromice-1.5.1.dist-info/METADATA,sha256=Pk2LL4EmF_PBWe5VSaNo5uWyVINQO2FA68DKf6Xzyl0,4970
50
- pypromice-1.5.1.dist-info/WHEEL,sha256=52BFRY2Up02UkjOa29eZOS2VxUrpPORXg1pkohGGUS8,91
51
- pypromice-1.5.1.dist-info/entry_points.txt,sha256=ufX1npmY3nqMPtSVRKVxn3MhG9IyFHD5FjPZQcELVXo,618
52
- pypromice-1.5.1.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
53
- pypromice-1.5.1.dist-info/RECORD,,
49
+ pypromice-1.5.3.dist-info/licenses/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
50
+ pypromice-1.5.3.dist-info/METADATA,sha256=-WPlDUHP4MXavNOtyK8ANhUD486iYbaPArw-V4Vt4fQ,4992
51
+ pypromice-1.5.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
52
+ pypromice-1.5.3.dist-info/entry_points.txt,sha256=ufX1npmY3nqMPtSVRKVxn3MhG9IyFHD5FjPZQcELVXo,618
53
+ pypromice-1.5.3.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
54
+ pypromice-1.5.3.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (76.0.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5