pypromice 1.3.5__py3-none-any.whl → 1.3.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

pypromice/get/get.py CHANGED
@@ -181,26 +181,26 @@ class TestGet(unittest.TestCase):
181
181
  l = lookup_table(['doi:10.22008/FK2/IW73UU'])
182
182
  self.assertTrue('10.22008/FK2' in list(l.values())[0])
183
183
 
184
- def testAWSname(self):
185
- '''Test AWS names retrieval'''
186
- n = aws_names()
187
- self.assertIsInstance(n, list)
188
- self.assertTrue('nuk_k_hour.csv' in n)
184
+ # def testAWSname(self):
185
+ # '''Test AWS names retrieval'''
186
+ # n = aws_names()
187
+ # self.assertIsInstance(n, list)
188
+ # self.assertTrue('nuk_k_hour.csv' in n)
189
189
 
190
- def testAWScsv(self):
191
- '''Test AWS data retrieval'''
192
- kan_b = aws_data('kan_b_hour.csv')
193
- self.assertIsInstance(kan_b, pd.DataFrame)
194
-
195
- def testWatsonHour(self):
196
- '''Test Wason River discharge hourly data retrieval'''
197
- wh = watson_discharge()
198
- self.assertTrue(wh['Q']['2021-10-27 23:00:00']==5.48)
199
-
200
- def testWatsonDaily(self):
201
- '''Test Wason River discharge daily data retrieval'''
202
- wd = watson_discharge(t='day')
203
- self.assertTrue(wd['Q']['2009-09-04 00:00:00']==4.72)
190
+ # def testAWScsv(self):
191
+ # '''Test AWS data retrieval'''
192
+ # kan_b = aws_data('kan_b_hour.csv')
193
+ # self.assertIsInstance(kan_b, pd.DataFrame)
194
+
195
+ # def testWatsonHour(self):
196
+ # '''Test Wason River discharge hourly data retrieval'''
197
+ # wh = watson_discharge()
198
+ # self.assertTrue(wh['Q']['2021-10-27 23:00:00']==5.48)
199
+
200
+ # def testWatsonDaily(self):
201
+ # '''Test Wason River discharge daily data retrieval'''
202
+ # wd = watson_discharge(t='day')
203
+ # self.assertTrue(wd['Q']['2009-09-04 00:00:00']==4.72)
204
204
 
205
205
  def testGetCLI(self):
206
206
  '''Test get_promice_data'''
@@ -66,25 +66,34 @@ def toL2(
66
66
  except Exception:
67
67
  logger.exception('Flagging and fixing failed:')
68
68
 
69
- if ds.attrs['format'] == 'TX':
70
- ds = persistence_qc(ds) # Flag and remove persistence outliers
71
- # TODO: The configuration should be provided explicitly
72
- outlier_detector = ThresholdBasedOutlierDetector.default()
73
- ds = outlier_detector.filter_data(ds) # Flag and remove percentile outliers
69
+ ds = persistence_qc(ds) # Flag and remove persistence outliers
70
+ # if ds.attrs['format'] == 'TX':
71
+ # # TODO: The configuration should be provided explicitly
72
+ # outlier_detector = ThresholdBasedOutlierDetector.default()
73
+ # ds = outlier_detector.filter_data(ds) # Flag and remove percentile outliers
74
+
75
+ # filtering gps_lat, gps_lon and gps_alt based on the difference to a baseline elevation
76
+ # right now baseline elevation is gapfilled monthly median elevation
77
+ baseline_elevation = (ds.gps_alt.to_series().resample('M').median()
78
+ .reindex(ds.time.to_series().index, method='nearest')
79
+ .ffill().bfill())
80
+ mask = (np.abs(ds.gps_alt - baseline_elevation) < 100) & ds.gps_alt.notnull()
81
+ ds[['gps_alt','gps_lon', 'gps_lat']] = ds[['gps_alt','gps_lon', 'gps_lat']].where(mask)
82
+
83
+ # removing dlr and ulr that are missing t_rad
84
+ # this is done now becasue t_rad can be filtered either manually or with persistence
85
+ ds['dlr'] = ds.dlr.where(ds.t_rad.notnull())
86
+ ds['ulr'] = ds.ulr.where(ds.t_rad.notnull())
74
87
 
75
88
  T_100 = _getTempK(T_0)
76
89
  ds['rh_u_cor'] = correctHumidity(ds['rh_u'], ds['t_u'],
77
90
  T_0, T_100, ews, ei0)
78
91
 
79
92
  # Determiune cloud cover for on-ice stations
80
- if not ds.attrs['bedrock']:
81
- cc = calcCloudCoverage(ds['t_u'], T_0, eps_overcast, eps_clear, # Calculate cloud coverage
82
- ds['dlr'], ds.attrs['station_id'])
83
- ds['cc'] = (('time'), cc.data)
84
- else:
85
- # Default cloud cover for bedrock station for which tilt should be 0 anyway.
86
- cc = 0.8
87
-
93
+ cc = calcCloudCoverage(ds['t_u'], T_0, eps_overcast, eps_clear, # Calculate cloud coverage
94
+ ds['dlr'], ds.attrs['station_id'])
95
+ ds['cc'] = (('time'), cc.data)
96
+
88
97
  # Determine surface temperature
89
98
  ds['t_surf'] = calcSurfaceTemperature(T_0, ds['ulr'], ds['dlr'], # Calculate surface temperature
90
99
  emissivity)
@@ -102,6 +111,11 @@ def toL2(
102
111
  else:
103
112
  lat = ds['gps_lat'].mean()
104
113
  lon = ds['gps_lon'].mean()
114
+
115
+ # smoothing tilt and rot
116
+ ds['tilt_x'] = smoothTilt(ds['tilt_x'])
117
+ ds['tilt_y'] = smoothTilt(ds['tilt_y'])
118
+ ds['rot'] = smoothRot(ds['rot'])
105
119
 
106
120
  deg2rad, rad2deg = _getRotation() # Get degree-radian conversions
107
121
  phi_sensor_rad, theta_sensor_rad = calcTilt(ds['tilt_x'], ds['tilt_y'], # Calculate station tilt
@@ -112,13 +126,15 @@ def toL2(
112
126
  ZenithAngle_rad, ZenithAngle_deg = calcZenith(lat, Declination_rad, # Calculate zenith
113
127
  HourAngle_rad, deg2rad,
114
128
  rad2deg)
115
-
129
+
130
+
116
131
  # Correct Downwelling shortwave radiation
117
132
  DifFrac = 0.2 + 0.8 * cc
118
133
  CorFac_all = calcCorrectionFactor(Declination_rad, phi_sensor_rad, # Calculate correction
119
134
  theta_sensor_rad, HourAngle_rad,
120
135
  ZenithAngle_rad, ZenithAngle_deg,
121
136
  lat, DifFrac, deg2rad)
137
+ CorFac_all = xr.where(ds['cc'].notnull(), CorFac_all, 1)
122
138
  ds['dsr_cor'] = ds['dsr'].copy(deep=True) * CorFac_all # Apply correction
123
139
 
124
140
  AngleDif_deg = calcAngleDiff(ZenithAngle_rad, HourAngle_rad, # Calculate angle between sun and sensor
@@ -145,9 +161,9 @@ def toL2(
145
161
  TOA_crit_nopass = (ds['dsr_cor'] > (0.9 * isr_toa + 10)) # Determine filter
146
162
  ds['dsr_cor'][TOA_crit_nopass] = np.nan # Apply filter and interpolate
147
163
  ds['usr_cor'][TOA_crit_nopass] = np.nan
148
- ds['dsr_cor'] = ds['dsr_cor'].interpolate_na(dim='time', use_coordinate=False)
149
- ds['usr_cor'] = ds['usr_cor'].interpolate_na(dim='time', use_coordinate=False)
150
-
164
+
165
+ ds['dsr_cor'] = ds.dsr_cor.where(ds.dsr.notnull())
166
+ ds['usr_cor'] = ds.usr_cor.where(ds.usr.notnull())
151
167
  # # Check sun position
152
168
  # sundown = ZenithAngle_deg >= 90
153
169
  # _checkSunPos(ds, OKalbedos, sundown, sunonlowerdome, TOA_crit_nopass)
@@ -241,6 +257,65 @@ def calcSurfaceTemperature(T_0, ulr, dlr, emissivity):
241
257
  return t_surf
242
258
 
243
259
 
260
+ def smoothTilt(da: xr.DataArray, threshold=0.2):
261
+ '''Smooth the station tilt
262
+
263
+ Parameters
264
+ ----------
265
+ da : xarray.DataArray
266
+ either X or Y tilt inclinometer measurements
267
+ threshold : float
268
+ threshold used in a standrad.-deviation based filter
269
+
270
+ Returns
271
+ -------
272
+ xarray.DataArray
273
+ either X or Y smoothed tilt inclinometer measurements
274
+ '''
275
+ # we calculate the moving standard deviation over a 3-day sliding window
276
+ # hourly resampling is necessary to make sure the same threshold can be used
277
+ # for 10 min and hourly data
278
+ moving_std_gap_filled = da.to_series().resample('H').median().rolling(
279
+ 3*24, center=True, min_periods=2
280
+ ).std().reindex(da.time, method='bfill').values
281
+ # we select the good timestamps and gapfill assuming that
282
+ # - when tilt goes missing the last available value is used
283
+ # - when tilt is not available for the very first time steps, the first
284
+ # good value is used for backfill
285
+ return da.where(
286
+ moving_std_gap_filled < threshold
287
+ ).ffill(dim='time').bfill(dim='time')
288
+
289
+
290
+ def smoothRot(da: xr.DataArray, threshold=4):
291
+ '''Smooth the station rotation
292
+
293
+ Parameters
294
+ ----------
295
+ da : xarray.DataArray
296
+ rotation measurements from inclinometer
297
+ threshold : float
298
+ threshold used in a standrad-deviation based filter
299
+
300
+ Returns
301
+ -------
302
+ xarray.DataArray
303
+ smoothed rotation measurements from inclinometer
304
+ '''
305
+ moving_std_gap_filled = da.to_series().resample('H').median().rolling(
306
+ 3*24, center=True, min_periods=2
307
+ ).std().reindex(da.time, method='bfill').values
308
+ # same as for tilt with, in addition:
309
+ # - a resampling to daily values
310
+ # - a two week median smoothing
311
+ # - a resampling from these daily values to the original temporal resolution
312
+ return ('time', (da.where(moving_std_gap_filled <4).ffill(dim='time')
313
+ .to_series().resample('D').median()
314
+ .rolling(7*2,center=True,min_periods=2).median()
315
+ .reindex(da.time, method='bfill').values
316
+ ))
317
+
318
+
244
319
  def calcTilt(tilt_x, tilt_y, deg2rad):
245
320
  '''Calculate station tilt
246
321
 
@@ -323,7 +398,6 @@ def correctHumidity(rh, T, T_0, T_100, ews, ei0): #TODO f
323
398
 
324
399
  # Set to Groff & Gratch values when freezing, otherwise just rh
325
400
  rh_cor = rh.where(~freezing, other = rh*(e_s_wtr / e_s_ice))
326
- rh_cor = rh_cor.where(T.notnull())
327
401
  return rh_cor
328
402
 
329
403
 
@@ -161,7 +161,7 @@ def calcHeatFlux(T_0, T_h, Tsurf_h, rho_atm, WS_h, z_WS, z_T, nu, q_h, p_h,
161
161
  es_0 : int
162
162
  Saturation vapour pressure at the melting point (hPa). Default is 6.1071.
163
163
  eps : int
164
- Default is 0.622.
164
+ Ratio of molar masses of vapor and dry air (0.622).
165
165
  gamma : int
166
166
  Flux profile correction (Paulson & Dyer). Default is 16..
167
167
  L_sub : int
@@ -313,7 +313,7 @@ def calcHumid(T_0, T_100, T_h, es_0, es_100, eps, p_h, RH_cor_h):
313
313
  T_h : xarray.DataArray
314
314
  Air temperature
315
315
  eps : int
316
- DESCRIPTION
316
+ ratio of molar masses of vapor and dry air (0.622)
317
317
  es_0 : float
318
318
  Saturation vapour pressure at the melting point (hPa)
319
319
  es_100 : float
pypromice/process/aws.py CHANGED
@@ -224,7 +224,7 @@ class AWS(object):
224
224
 
225
225
  except pd.errors.ParserError as e:
226
226
  # ParserError: Too many columns specified: expected 40 and found 38
227
- logger.info(f'-----> No msg_lat or msg_lon for {k}')
227
+ # logger.info(f'-----> No msg_lat or msg_lon for {k}')
228
228
  for item in ['msg_lat', 'msg_lon']:
229
229
  target['columns'].remove(item) # Also removes from self.config
230
230
  ds_list.append(self.readL0file(target))
@@ -723,7 +723,7 @@ def resampleL3(ds_h, t):
723
723
  Parameters
724
724
  ----------
725
725
  ds_h : xarray.Dataset
726
- L3 AWS daily dataset
726
+ L3 AWS dataset either at 10 min (for raw data) or hourly (for tx data)
727
727
  t : str
728
728
  Resample factor, same variable definition as in
729
729
  pandas.DataFrame.resample()
@@ -731,9 +731,10 @@ def resampleL3(ds_h, t):
731
731
  Returns
732
732
  -------
733
733
  ds_d : xarray.Dataset
734
- L3 AWS hourly dataset
734
+ L3 AWS dataset resampled to the frequency defined by t
735
735
  '''
736
736
  df_d = ds_h.to_dataframe().resample(t).mean()
737
+
737
738
  # recalculating wind direction from averaged directional wind speeds
738
739
  for var in ['wdir_u','wdir_l','wdir_i']:
739
740
  if var in df_d.columns:
@@ -742,12 +743,68 @@ def resampleL3(ds_h, t):
742
743
  df_d['wspd_y_'+var.split('_')[1]])
743
744
  else:
744
745
  logger.info(var,'in dataframe but not','wspd_x_'+var.split('_')[1],'wspd_x_'+var.split('_')[1])
746
+
747
+ # recalculating relative humidity from average vapour pressure and average
748
+ # saturation vapor pressure
749
+ for var in ['rh_u','rh_l']:
750
+ lvl = var.split('_')[1]
751
+ if var in df_d.columns:
752
+ if ('t_'+lvl in ds_h.keys()):
753
+ es_wtr, es_cor = calculateSaturationVaporPressure(ds_h['t_'+lvl])
754
+ p_vap = ds_h[var] / 100 * es_wtr
755
+
756
+ df_d[var] = (p_vap.to_series().resample(t).mean() \
757
+ / es_wtr.to_series().resample(t).mean())*100
758
+ df_d[var+'_cor'] = (p_vap.to_series().resample(t).mean() \
759
+ / es_cor.to_series().resample(t).mean())*100
760
+
745
761
  vals = [xr.DataArray(data=df_d[c], dims=['time'],
746
762
  coords={'time':df_d.index}, attrs=ds_h[c].attrs) for c in df_d.columns]
747
763
  ds_d = xr.Dataset(dict(zip(df_d.columns,vals)), attrs=ds_h.attrs)
748
764
  return ds_d
749
765
 
750
766
 
767
+ def calculateSaturationVaporPressure(t, T_0=273.15, T_100=373.15, es_0=6.1071,
768
+ es_100=1013.246, eps=0.622):
769
+ '''Calculate specific humidity
770
+
771
+ Parameters
772
+ ----------
773
+ T_0 : float
774
+ Steam point temperature. Default is 273.15.
775
+ T_100 : float
776
+ Steam point temperature in Kelvin
777
+ t : xarray.DataArray
778
+ Air temperature
779
+ es_0 : float
780
+ Saturation vapour pressure at the melting point (hPa)
781
+ es_100 : float
782
+ Saturation vapour pressure at steam point temperature (hPa)
783
+
784
+ Returns
785
+ -------
786
+ xarray.DataArray
787
+ Saturation vapour pressure with regard to water above 0 C (hPa)
788
+ xarray.DataArray
789
+ Saturation vapour pressure where subfreezing timestamps are with regards to ice (hPa)
790
+ '''
791
+ # Saturation vapour pressure above 0 C (hPa)
792
+ es_wtr = 10**(-7.90298 * (T_100 / (t + T_0) - 1) + 5.02808 * np.log10(T_100 / (t + T_0))
793
+ - 1.3816E-7 * (10**(11.344 * (1 - (t + T_0) / T_100)) - 1)
794
+ + 8.1328E-3 * (10**(-3.49149 * (T_100 / (t + T_0) -1)) - 1) + np.log10(es_100))
795
+
796
+ # Saturation vapour pressure below 0 C (hPa)
797
+ es_ice = 10**(-9.09718 * (T_0 / (t + T_0) - 1) - 3.56654
798
+ * np.log10(T_0 / (t + T_0)) + 0.876793
799
+ * (1 - (t + T_0) / T_0)
800
+ + np.log10(es_0))
801
+
802
+ # Saturation vapour pressure (hPa)
803
+ es_cor = xr.where(t < 0, es_ice, es_wtr)
804
+
805
+ return es_wtr, es_cor
806
+
807
+
751
808
  def _calcWindDir(wspd_x, wspd_y):
752
809
  '''Calculate wind direction in degrees
753
810
 
@@ -89,4 +89,4 @@ wspd_i,wind_speed,Wind speed (instantaneous),m s-1,0,100,wdir_i wspd_x_i wspd_y_
89
89
  wdir_i,wind_from_direction,Wind from direction (instantaneous),degrees,1,360,wspd_x_i wspd_y_i,all,all,4,physicalMeasurement,time lat lon alt,True,For meteorological observations
90
90
  wspd_x_i,wind_speed_from_x_direction,Wind speed from x direction (instantaneous),m s-1,-100,100,wdir_i wspd_i,all,all,4,modelResult,time lat lon alt,True,For meteorological observations
91
91
  wspd_y_i,wind_speed_from_y_direction,Wind speed from y direction (instantaneous),m s-1,-100,100,wdir_i wspd_i,all,all,4,modelResult,time lat lon alt,True,For meteorological observations
92
- msg_i,message,Message string (instantaneous),-,,,,all,all,,qualityInformation,time lat lon alt,True,For meteorological observations
92
+ msg_i,message,Message string (instantaneous),-,,,,all,,,qualityInformation,time lat lon alt,True,L0 only
@@ -122,8 +122,8 @@ JAR,p_[ul],881.00,929.00,
122
122
  JAR,p_i,-119.00,-71.00,
123
123
  JAR,wspd_[uli],-11.62,29.82,
124
124
  JAR,t_[uli],-14.60,13.30,summer
125
- FRE,p_[ul],638.31,799.82,
126
- FRE,p_i,-361.69,-200.18,
125
+ FRE,p_[ul],800,1000,
126
+ FRE,p_i,-200,0,
127
127
  FRE,wspd_[uli],-12.00,22.62,
128
128
  FRE,t_[uli],-38.20,10.02,winter
129
129
  FRE,t_[uli],-36.55,18.07,spring
@@ -14,11 +14,15 @@ __all__ = [
14
14
 
15
15
  logger = logging.getLogger(__name__)
16
16
 
17
+ # period is given in hours, 2 persistent 10 min values will be flagged if period < 0.333
17
18
  DEFAULT_VARIABLE_THRESHOLDS = {
18
19
  "t": {"max_diff": 0.0001, "period": 2},
19
20
  "p": {"max_diff": 0.0001, "period": 2},
20
- # Relative humidity can be very stable around 100%.
21
- #"rh": {"max_diff": 0.0001, "period": 2},
21
+ 'gps_lat_lon':{"max_diff": 0.000001, "period": 6}, # gets special handling to remove simultaneously constant gps_lat and gps_lon
22
+ 'gps_alt':{"max_diff": 0.0001, "period": 6},
23
+ 't_rad':{"max_diff": 0.0001, "period": 2},
24
+ "rh": {"max_diff": 0.0001, "period": 2}, # gets special handling to allow constant 100%
25
+ "wspd": {"max_diff": 0.0001, "period": 6},
22
26
  }
23
27
 
24
28
 
@@ -58,27 +62,46 @@ def persistence_qc(
58
62
  if variable_thresholds is None:
59
63
  variable_thresholds = DEFAULT_VARIABLE_THRESHOLDS
60
64
 
61
- logger.debug(f"Running persistence_qc using {variable_thresholds}")
65
+ logger.info(f"Running persistence_qc using {variable_thresholds}")
62
66
 
63
67
  for k in variable_thresholds.keys():
64
- var_all = [
65
- k + "_u",
66
- k + "_l",
67
- k + "_i",
68
- ] # apply to upper, lower boom, and instant
68
+ if k in ['t','p','rh','wspd','wdir', 'z_boom']:
69
+ var_all = [
70
+ k + "_u",
71
+ k + "_l",
72
+ k + "_i",
73
+ ] # apply to upper, lower boom, and instant
74
+ else:
75
+ var_all = [k]
69
76
  max_diff = variable_thresholds[k]["max_diff"] # loading persistent limit
70
77
  period = variable_thresholds[k]["period"] # loading diff period
71
78
 
72
79
  for v in var_all:
73
80
  if v in df:
74
81
  mask = find_persistent_regions(df[v], period, max_diff)
82
+ if 'rh' in v:
83
+ mask = mask & (df[v]<99)
75
84
  n_masked = mask.sum()
76
85
  n_samples = len(mask)
77
- logger.debug(
86
+ logger.info(
78
87
  f"Applying persistent QC in {v}. Filtering {n_masked}/{n_samples} samples"
79
88
  )
80
89
  # setting outliers to NaN
81
90
  df.loc[mask, v] = np.nan
91
+ elif v == 'gps_lat_lon':
92
+ mask = (
93
+ find_persistent_regions(df['gps_lon'], period, max_diff)
94
+ & find_persistent_regions(df['gps_lat'], period, max_diff)
95
+ )
96
+
97
+ n_masked = mask.sum()
98
+ n_samples = len(mask)
99
+ logger.info(
100
+ f"Applying persistent QC in {v}. Filtering {n_masked}/{n_samples} samples"
101
+ )
102
+ # setting outliers to NaN
103
+ df.loc[mask, 'gps_lon'] = np.nan
104
+ df.loc[mask, 'gps_lat'] = np.nan
82
105
 
83
106
  # Back to xarray, and re-assign the original attrs
84
107
  ds_out = df.to_xarray()
@@ -110,33 +133,34 @@ def count_consecutive_persistent_values(
110
133
  ) -> pd.Series:
111
134
  diff = data.ffill().diff().abs() # forward filling all NaNs!
112
135
  mask: pd.Series = diff < max_diff
113
- return count_consecutive_true(mask)
136
+ return duration_consecutive_true(mask)
114
137
 
115
138
 
116
- def count_consecutive_true(
117
- series: Union[pd.Series, pd.DataFrame]
118
- ) -> Union[pd.Series, pd.DataFrame]:
139
+ def duration_consecutive_true(
140
+ series: pd.Series,
141
+ ) -> pd.Series:
119
142
  """
120
- Convert boolean series to integer series where the values represent the number of connective true values.
143
+ From a boolean series, calculates the duration, in hours, of the periods with connective true values.
121
144
 
122
145
  Examples
123
146
  --------
124
- >>> count_consecutive_true(pd.Series([False, True, False, False, True, True, True, False, True]))
147
+ >>> duration_consecutive_true(pd.Series([False, True, False, False, True, True, True, False, True]))
125
148
  pd.Series([0, 1, 0, 0, 1, 2, 3, 0, 1])
126
149
 
127
150
  Parameters
128
151
  ----------
129
- series
152
+ pd.Series
130
153
  Boolean pandas Series or DataFrame
131
154
 
132
155
  Returns
133
156
  -------
134
- consecutive_true_count
157
+ pd.Series
135
158
  Integer pandas Series or DataFrame with values representing the number of connective true values.
136
159
 
137
160
  """
138
161
  # assert series.dtype == bool
139
- cumsum = series.cumsum()
162
+ cumsum = ((series.index - series.index[0]).total_seconds()/3600).to_series(index=series.index)
140
163
  is_first = series.astype("int").diff() == 1
141
164
  offset = (is_first * cumsum).replace(0, np.nan).fillna(method="ffill").fillna(0)
142
- return ((cumsum - offset + 1) * series).astype("int")
165
+
166
+ return (cumsum - offset) * series
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: pypromice
3
- Version: 1.3.5
3
+ Version: 1.3.6
4
4
  Summary: PROMICE/GC-Net data processing toolbox
5
5
  Home-page: https://github.com/GEUS-Glaciology-and-Climate/pypromice
6
6
  Author: GEUS Glaciology and Climate
@@ -23,11 +23,11 @@ Requires-Dist: pandas >=1.5.0
23
23
  Requires-Dist: xarray >=2022.6.0
24
24
  Requires-Dist: toml
25
25
  Requires-Dist: scipy >=1.9.0
26
- Requires-Dist: scikit-learn >=1.1.0
27
26
  Requires-Dist: Bottleneck
28
27
  Requires-Dist: netcdf4
29
28
  Requires-Dist: pyDataverse
30
29
  Requires-Dist: eccodes
30
+ Requires-Dist: scikit-learn >=1.1.0
31
31
 
32
32
  # pypromice
33
33
  [![PyPI version](https://badge.fury.io/py/pypromice.svg)](https://badge.fury.io/py/pypromice) [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pypromice/badges/version.svg)](https://anaconda.org/conda-forge/pypromice) [![Anaconda-Server Badge](https://anaconda.org/conda-forge/pypromice/badges/platforms.svg)](https://anaconda.org/conda-forge/pypromice) [![](<https://img.shields.io/badge/Dataverse DOI-10.22008/FK2/3TSBF0-orange>)](https://www.doi.org/10.22008/FK2/3TSBF0) [![DOI](https://joss.theoj.org/papers/10.21105/joss.05298/status.svg)](https://doi.org/10.21105/joss.05298) [![Documentation Status](https://readthedocs.org/projects/pypromice/badge/?version=latest)](https://pypromice.readthedocs.io/en/latest/?badge=latest)
@@ -58,7 +58,14 @@ $ conda install pypromice -c conda-forge
58
58
  $ pip install pypromice
59
59
  ```
60
60
 
61
- For the most up-to-date version, pypromice can be installed directly from the repo:
61
+ The [eccodes](https://confluence.ecmwf.int/display/ECC/ecCodes+installation) package for pypromice's post-processing functionality needs to be installed specifically in the pip distribution:
62
+
63
+ ```
64
+ $ conda install eccodes -c conda-forge
65
+ $ pip install pypromice
66
+ ```
67
+
68
+ And for the most up-to-date version of pypromice, the package can be cloned and installed directly from the repo:
62
69
 
63
70
  ```
64
71
  $ pip install --upgrade git+http://github.com/GEUS-Glaciology-and-Climate/pypromice.git
@@ -76,19 +83,3 @@ $ cd pypromice/
76
83
  $ pip install .
77
84
  ```
78
85
 
79
- ### Additional dependencies
80
-
81
- Additional packages are required if you wish to use pypromice's post-processing functionality.
82
-
83
- [eccodes](https://confluence.ecmwf.int/display/ECC/ecCodes+installation) is the official package for BUFR encoding and decoding. Try firstly to install with conda-forge like so:
84
-
85
- ```
86
- $ conda install -c conda-forge eccodes
87
- ```
88
-
89
- If the environment cannot resolve the eccodes installation then follow the steps documented [here](https://gist.github.com/MHBalsmeier/a01ad4e07ecf467c90fad2ac7719844a) to download eccodes and then install eccodes' python bindings using pip.
90
-
91
- ```
92
- $ pip3 install eccodes-python
93
- ```
94
-
@@ -1,6 +1,6 @@
1
1
  pypromice/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  pypromice/get/__init__.py,sha256=n2L6P9EeUsdjsHaeU7BEanBjlkCBX9csGseT8z-laew,32
3
- pypromice/get/get.py,sha256=tPrHyllrukGotOR8PjBYJb5tcuL4S_t6WP7wvhbWkg8,7688
3
+ pypromice/get/get.py,sha256=8tdIbvdeXCpRWU7KmcKGIP9ZPdqIry3MjtJp9krumvo,7705
4
4
  pypromice/get/get_promice_data.py,sha256=bluNCaP50iRlWBzdEOXLrSPepOQdGB7SeQLkTWiqK4c,1806
5
5
  pypromice/postprocess/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  pypromice/postprocess/bufr_to_csv.py,sha256=RQZSJ3rC5v-R76rLQf60HNhgH7JQL3WhzXGgySHUyRg,317
@@ -10,23 +10,23 @@ pypromice/postprocess/positions_seed.csv,sha256=0kVCQ8UfEALdeXNYCddmwxpseRqLRudb
10
10
  pypromice/postprocess/real_time_utilities.py,sha256=hmJJ0t_dq6nU-VjFOgnWC_hAksECy6gOUw3jblgeekQ,8710
11
11
  pypromice/postprocess/station_configurations.toml,sha256=o_NOXTjf8KBNPSa2vQBtSZZPCymIjixTNniVvwUtltk,17025
12
12
  pypromice/process/L0toL1.py,sha256=eQw_cJjEEOj4zOuONp1SxTSDXPG5S0SIVlac7tuSlsc,22818
13
- pypromice/process/L1toL2.py,sha256=qeC83wraichbX9rYVDYncD8j4QHUxJMm38Hj7D8MSxU,25577
14
- pypromice/process/L2toL3.py,sha256=LnpxGgk1auTHLhef4JUx6iWEhraKoXCRkVsyWI2oYR4,18238
13
+ pypromice/process/L1toL2.py,sha256=4d8Etzv1zx0FI8LGlnJx0h1GwmJoNPMwng_8pOBmqw0,28532
14
+ pypromice/process/L2toL3.py,sha256=Tr74-Agaf0OZLdYmDj7VwdidLQTxxSMUjUu7n3cNeRo,18311
15
15
  pypromice/process/__init__.py,sha256=xvd0I-9nIyVw4M4qjgkQ5vXYpNuKcVSkIVIROQsZDo0,147
16
- pypromice/process/aws.py,sha256=fIQqOP3WrCBs17a4kMxXzVD-8OD3Mlagy3OxJ-ys_mw,29844
16
+ pypromice/process/aws.py,sha256=EfzwJ49IHNer5Ix9QW0aFcxdlSc5B3QfSwjYbSRKht8,32195
17
17
  pypromice/process/get_l3.py,sha256=jwb5nO22d9Pk5I3SrijkXbTwgqnmqVdxQn0_ACMabH0,1656
18
18
  pypromice/process/join_l3.py,sha256=aAu_7vCSVhigcU2siMOdkz7DL1iA9smImWKNQOYgS3M,4979
19
19
  pypromice/process/metadata.csv,sha256=Zp8Z40m23ohQfzUdLT4z43uHGFvx39rODXZHpUuYn6M,7055
20
20
  pypromice/process/value_clipping.py,sha256=FkBiDT_HK_BDFiVjB7NdWH-_nab7vONG9LOd2PpEBI8,1573
21
- pypromice/process/variables.csv,sha256=l6yqVNWlUntnTrXm6u1nrK8ZcrwpMmb_60Hhj-xfli0,13340
21
+ pypromice/process/variables.csv,sha256=zz7KsL4QGDDNLbfxKw_BKT0H91oF3VpCnG8Hkw9gKDQ,13313
22
22
  pypromice/qc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
23
23
  pypromice/qc/github_data_issues.py,sha256=DAu2F2HuPSYO_JICKI8-N8zGlbkVOsW6vr1qtMPfe5k,13537
24
- pypromice/qc/persistence.py,sha256=TuXxbedsekR1LSBdc0V9xypvlj8mkSE3xqI-U9xqcnY,4647
24
+ pypromice/qc/persistence.py,sha256=SLKxBQRVMqoeGcLyWO-GgAvHuOwAS8Klkj3c_Hp60kY,5792
25
25
  pypromice/qc/persistence_test.py,sha256=DbltqVnnKmxV5T-UiF3AGVMXSVsbzqjYRqhVV1pAL9c,5771
26
26
  pypromice/qc/percentiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  pypromice/qc/percentiles/compute_thresholds.py,sha256=CjopJRMr8g36SqMS5QB9BGxFr4ohnNvsduZ-Q0xY7S0,6086
28
28
  pypromice/qc/percentiles/outlier_detector.py,sha256=5_458aMm9MAubfYv7oIz1Lp9ME6Sn1xiSiAQmIe-riY,3370
29
- pypromice/qc/percentiles/thresholds.csv,sha256=jlakQFC67qNorRZuP5Fx2s_AVUD2nIhxKhwvObPRgww,9530
29
+ pypromice/qc/percentiles/thresholds.csv,sha256=KTQcYsg2VCZmR-Rf1Zzx1Jn-ZmR5yPPWWXYZ0Z03PDQ,9516
30
30
  pypromice/test/test_config1.toml,sha256=Pi26xdfGoQruaG0My2odchByoowzsOz0e6jd0AAfj-Y,2130
31
31
  pypromice/test/test_config2.toml,sha256=kt1PK_prvv068LZSEs_JNMyZguaV1AaJWncsQ532yM4,1914
32
32
  pypromice/test/test_email,sha256=M6ilVuEAkCU-hDXmH7SbmImpMMEhpfve2NBcqg1lBBY,3810
@@ -45,9 +45,9 @@ pypromice/tx/get_watsontx.py,sha256=vFSuDs_vvkATe_6WCF8OLVsx7Wa-MxLATZRfP9qUZqI,
45
45
  pypromice/tx/payload_formats.csv,sha256=4pClTt5qjdB4XViDY2QzuLeJY5EBkEBia0E7Y-aMLVo,7726
46
46
  pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
47
47
  pypromice/tx/tx.py,sha256=s1ADVUwX0hoykFMSo_BDgopsIUWTJh2l7KJ05nMNmQg,33692
48
- pypromice-1.3.5.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
49
- pypromice-1.3.5.dist-info/METADATA,sha256=q9mdglZdODMSj6WwoSEyD3Fx2ATEtNPeTlRfwAqcTZw,5096
50
- pypromice-1.3.5.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
51
- pypromice-1.3.5.dist-info/entry_points.txt,sha256=8CGN4oJByZuvVFt9rL8HG41F11XL2L6aSe-EPjHTkkE,352
52
- pypromice-1.3.5.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
53
- pypromice-1.3.5.dist-info/RECORD,,
48
+ pypromice-1.3.6.dist-info/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
49
+ pypromice-1.3.6.dist-info/METADATA,sha256=xmU13jhChjzhAMvmvxioBu1KAOAkZsXOIL842b7KUhk,4755
50
+ pypromice-1.3.6.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
51
+ pypromice-1.3.6.dist-info/entry_points.txt,sha256=8CGN4oJByZuvVFt9rL8HG41F11XL2L6aSe-EPjHTkkE,352
52
+ pypromice-1.3.6.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
53
+ pypromice-1.3.6.dist-info/RECORD,,