pypromice 1.6.0__py3-none-any.whl → 1.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pypromice might be problematic. Click here for more details.

@@ -159,7 +159,7 @@ def adjustData(ds, adj_dir, var_list=[], skip_var=[]):
159
159
  adj_info.loc[adj_info.t0.isnull()|(adj_info.t0==''), "t0"] = None
160
160
 
161
161
  # if "*" is in the variable name then we interpret it as regex
162
- selec = adj_info['variable'].str.contains('\*') & (adj_info['variable'] != "*")
162
+ selec = adj_info['variable'].str.contains(r'\*') & (adj_info['variable'] != "*")
163
163
  for ind in adj_info.loc[selec, :].index:
164
164
  line_template = adj_info.loc[ind, :].copy()
165
165
  regex = adj_info.loc[ind, 'variable']
@@ -209,23 +209,11 @@ def adjustData(ds, adj_dir, var_list=[], skip_var=[]):
209
209
 
210
210
  if func == "add":
211
211
  ds_out[var].loc[index_slice] = ds_out[var].loc[index_slice].values + val
212
- # flagging adjusted values
213
- # if var + "_adj_flag" not in ds_out.columns:
214
- # ds_out[var + "_adj_flag"] = 0
215
- # msk = ds_out[var].loc[index_slice])].notnull()
216
- # ind = ds_out[var].loc[index_slice])].loc[msk].time
217
- # ds_out.loc[ind, var + "_adj_flag"] = 1
218
212
 
219
213
  if func == "multiply":
220
214
  ds_out[var].loc[index_slice] = ds_out[var].loc[index_slice].values * val
221
215
  if "DW" in var:
222
216
  ds_out[var].loc[index_slice] = ds_out[var].loc[index_slice] % 360
223
- # flagging adjusted values
224
- # if var + "_adj_flag" not in ds_out.columns:
225
- # ds_out[var + "_adj_flag"] = 0
226
- # msk = ds_out[var].loc[index_slice].notnull()
227
- # ind = ds_out[var].loc[index_slice].loc[msk].time
228
- # ds_out.loc[ind, var + "_adj_flag"] = 1
229
217
 
230
218
  if func == "min_filter":
231
219
  tmp = ds_out[var].loc[index_slice].values
@@ -277,6 +265,27 @@ def adjustData(ds, adj_dir, var_list=[], skip_var=[]):
277
265
  ds_out[var2].loc[index_slice] = val_var
278
266
  ds_out[var].loc[index_slice] = val_var2
279
267
 
268
+ if "delete_when_same_as_" in func:
269
+ var2 = func.replace('delete_when_same_as_','')
270
+ tmp = ds_out[var].loc[index_slice]
271
+ msk = np.abs(tmp - ds_out[var2].loc[index_slice]) < val
272
+ tmp = tmp.where(~msk)
273
+ # remove isolated singletons and pairs surrounded by NaNs
274
+ m1 = tmp.notnull() & tmp.shift(time=1).isnull() & tmp.shift(time=-1).isnull()
275
+
276
+ m2_first = (tmp.notnull()
277
+ & tmp.shift(time=1).isnull() # left is NaN
278
+ & tmp.shift(time=-1).notnull() # right is value
279
+ & tmp.shift(time=-2).isnull()) # right+1 is NaN
280
+
281
+ m2_second = (tmp.notnull()
282
+ & tmp.shift(time=-1).isnull() # right is NaN
283
+ & tmp.shift(time=1).notnull() # left is value
284
+ & tmp.shift(time=2).isnull()) # left-1 is NaN
285
+
286
+ tmp = tmp.where(~(m1 | m2_first | m2_second))
287
+ ds_out[var].loc[index_slice] = tmp.values
288
+
280
289
  if func == "rotate":
281
290
  ds_out[var].loc[index_slice] = (ds_out[var].loc[index_slice].values + val) % 360
282
291
 
@@ -0,0 +1,142 @@
1
+ import datetime
2
+ import numpy as np
3
+ import pandas as pd
4
+
5
+ DEFAULT_COMPLETENESS_THRESHOLDS = {
6
+ "default": 0.8,
7
+ "albedo": 0.2,
8
+ "p_u": 0.5,
9
+ "p_l": 0.5,
10
+ "z_boom_u": 0.1,
11
+ "z_boom_l": 0.1,
12
+ "z_boom_cor_u": 0.1,
13
+ "z_boom_cor_l": 0.1,
14
+ "z_stake": 0.1,
15
+ "z_stake_cor": 0.1,
16
+ "z_surf_combined": 0.1,
17
+ "t_i_1": 0.1,
18
+ "t_i_2": 0.1,
19
+ "t_i_3": 0.1,
20
+ "t_i_4": 0.1,
21
+ "t_i_5": 0.1,
22
+ "t_i_6": 0.1,
23
+ "t_i_7": 0.1,
24
+ "t_i_8": 0.1,
25
+ "t_i_9": 0.1,
26
+ "t_i_10": 0.1,
27
+ "t_i_11": 0.1,
28
+ "gps_lat": 0.1,
29
+ "gps_lon": 0.1,
30
+ "gps_alt": 0.1,
31
+ "batt_v": 0.1,
32
+ }
33
+
34
+ ALLOWED_TIME_STAMP_DURATIONS = (
35
+ datetime.timedelta(minutes=10),
36
+ datetime.timedelta(minutes=30),
37
+ datetime.timedelta(hours=1),
38
+ datetime.timedelta(hours=6),
39
+ datetime.timedelta(days=1),
40
+ )
41
+
42
+
43
+ def classify_timestamp_durations(
44
+ index: pd.DatetimeIndex,
45
+ ) -> pd.TimedeltaIndex:
46
+ """
47
+ Classifies the durations between consecutive timestamps in a given DatetimeIndex.
48
+
49
+ The function computes the time differences between consecutive timestamps and
50
+ checks if these differences belong to a predefined set of allowed durations.
51
+ It performs backward filling to handle missing values
52
+
53
+ Parameters
54
+ ----------
55
+ index : pd.DatetimeIndex
56
+ A pandas DatetimeIndex containing the timestamps to classify.
57
+
58
+ Returns
59
+ -------
60
+ pd.TimedeltaIndex
61
+ A TimedeltaIndex containing the classified durations for the corresponding
62
+ timestamps in the input index.
63
+ """
64
+ return pd.TimedeltaIndex(
65
+ index.to_series()
66
+ .diff()
67
+ .where(lambda d: d.isin(ALLOWED_TIME_STAMP_DURATIONS))
68
+ .bfill()
69
+ )
70
+
71
+
72
+ def get_completeness_mask(
73
+ data_frame: pd.DataFrame,
74
+ resample_offset: str,
75
+ completeness_thresholds: dict[str, float] = DEFAULT_COMPLETENESS_THRESHOLDS,
76
+ *,
77
+ atol: float = 1e-9,
78
+ ) -> pd.DataFrame:
79
+ """
80
+ Returns a completeness mask for the given DataFrame based on the specified
81
+ resampling offset, completeness threshold, and tolerance for over-completeness.
82
+
83
+ This function evaluates the completeness of timestamped data, ensuring that
84
+ records match the expected durations defined by the `resample_offset`. It
85
+ computes whether each resampled group of data satisfies the completeness
86
+ constraints defined by the `completeness_threshold` and `atol`.
87
+
88
+ Parameters
89
+ ----------
90
+ data_frame : pd.DataFrame
91
+ Input data containing a DatetimeIndex and associated values. The index must
92
+ be a DatetimeIndex as the function relies on timestamp durations for
93
+ computations.
94
+ resample_offset : str
95
+ Offset string defining resampling frequency. Examples include 'MS' (month
96
+ start) or other Pandas-compatible offset strings.
97
+ completeness_threshold : float, optional
98
+ Dictionary containing the variable-specific minimum completeness ratio
99
+ required to consider a time period as valid. Must contain a key 'default'
100
+ used for variables not explicitly listed.
101
+ Defaults to the dictionary `DEFAULT_COMPLETENESS_THRESHOLD`.
102
+ atol : float, optional
103
+ Absolute tolerance for over-completeness. Specifies an allowable margin by
104
+ which completeness can exceed 1. Defaults to 1e-9.
105
+
106
+ Returns
107
+ -------
108
+ pd.DataFrame
109
+ A DataFrame containing Boolean values, where True indicates that the data
110
+ for the corresponding time period satisfies the completeness constraints,
111
+ while False indicates the data is either under-complete or over-complete.
112
+ """
113
+ if resample_offset in ['MS', 'ME']:
114
+ offset_timedelta = datetime.timedelta(days=30)
115
+ # Increase tolerance for overcomplete values in monthly resampling
116
+ # to handle months with 31 days.
117
+ atol = 1/30 + atol
118
+ else:
119
+ offset_timedelta = pd.to_timedelta(resample_offset)
120
+
121
+ index = data_frame.index
122
+ assert isinstance(index, pd.DatetimeIndex)
123
+
124
+ timestamp_durations = classify_timestamp_durations(index)
125
+ timestamp_coverage = timestamp_durations / np.array(offset_timedelta)
126
+ data_frame_is_valid = data_frame.notna()
127
+
128
+ completeness = (
129
+ data_frame_is_valid
130
+ .mul(timestamp_coverage, axis=0)
131
+ .resample(resample_offset).sum()
132
+ )
133
+
134
+ thresholds = pd.Series(
135
+ {col: completeness_thresholds.get(col, completeness_thresholds["default"])
136
+ for col in data_frame.columns}
137
+ )
138
+
139
+ is_under_complete = completeness.lt(thresholds, axis=1)
140
+ is_over_complete = completeness.gt(1 + atol)
141
+ completeness_mask = ~(is_under_complete | is_over_complete)
142
+ return completeness_mask
@@ -1,4 +1,4 @@
1
- __all__ = ["adjust", "adjust_and_include_uncorrected_values"]
1
+ __all__ = ["adjust", "include_uncorrected_values"]
2
2
 
3
3
  import numpy as np
4
4
  import xarray as xr
@@ -25,25 +25,51 @@ def adjust(z_boom: xr.DataArray,
25
25
  return z_boom * ((air_temperature + T_0)/T_0)**0.5
26
26
 
27
27
 
28
- def adjust_and_include_uncorrected_values(z_boom: xr.DataArray,
29
- air_temperature: xr.DataArray
28
+ def include_uncorrected_values(
29
+ z_boom: xr.DataArray,
30
+ z_boom_cor: xr.DataArray,
31
+ air_temperature_other_level: xr.DataArray = None,
32
+ t_rad: xr.DataArray = None,
33
+ T_0: float = 273.15,
30
34
  ) -> xr.DataArray:
31
- """Adjust sonic ranger readings for sensitivity to air temperature,
32
- and retain uncorrected values where air temperature measurements
33
- are not available.
35
+ """
36
+ Adjust sonic ranger readings for sensitivity to air temperature and
37
+ retain uncorrected values where temperature measurements are unavailable.
34
38
 
35
39
  Parameters
36
40
  ----------
37
41
  z_boom : xr.DataArray
38
- Station boom height from sonic ranger
39
- air_temperature : xr.DataArray
40
- Air temperature
42
+ Uncorrected station boom height from sonic ranger
43
+ z_boom_cor : xr.DataArray
44
+ Boom height corrected with air_temperature_1
45
+ air_temperature_other_level : xr.DataArray, optional
46
+ Secondary air temperature
47
+ t_rad : xr.DataArray, optional
48
+ Radiative temperature
49
+ T_0 : float, optional
50
+ Reference temperature in Kelvin (default 273.15)
41
51
 
42
52
  Returns
43
53
  -------
44
54
  xr.DataArray
45
- Adjusted station boom height
55
+ Corrected boom height with fallback where needed
46
56
  """
47
- return xr.where(air_temperature.notnull(),
48
- z_boom * ((air_temperature + T_0)/T_0)**0.5,
49
- z_boom)
57
+ if air_temperature_other_level is None:
58
+ air_temperature_other_level = xr.full_like(z_boom, np.nan)
59
+ if t_rad is None:
60
+ t_rad = xr.full_like(z_boom, np.nan)
61
+ else:
62
+ t_rad = t_rad.clip(max=0)
63
+
64
+ # first gap filling using values corrected with air_temperature_other_level
65
+ z_boom_cor_w_ta2 = z_boom * ((air_temperature_other_level + T_0) / T_0) ** 0.5
66
+ z_boom_cor=z_boom_cor.fillna(z_boom_cor_w_ta2)
67
+
68
+ # second gap filling using values corrected with t_rad
69
+ z_boom_cor_w_t_rad = z_boom * ((t_rad + T_0) / T_0) ** 0.5
70
+ z_boom_cor=z_boom_cor.fillna(z_boom_cor_w_t_rad)
71
+
72
+ # third gap filling using uncorrected values
73
+ z_boom_cor=z_boom_cor.fillna(z_boom)
74
+
75
+ return z_boom_cor
@@ -172,16 +172,28 @@ def process_surface_height(ds, data_adjustments_dir, station_config={}):
172
172
  ds['z_surf_1'] = ('time', ds['z_boom_u'].data * np.nan)
173
173
  ds['z_surf_2'] = ('time', ds['z_boom_u'].data * np.nan)
174
174
 
175
- z_boom_best_u = station_boom_height.adjust_and_include_uncorrected_values(ds["z_boom_u"], ds["t_u"])
175
+ z_boom_best_u = station_boom_height.include_uncorrected_values(
176
+ ds["z_boom_u"],
177
+ ds["z_boom_cor_u"],
178
+ ds["t_u"],
179
+ ds["t_l"] if "t_l" in ds.data_vars else None,
180
+ ds["t_rad"] if "t_rad" in ds.data_vars else None)
181
+
182
+
183
+
184
+ if 'z_stake' in ds.data_vars and ds.z_stake.notnull().any():
185
+ # Calculate stake boom height correction with uncorrected values where needed
186
+ z_stake_best = station_boom_height.include_uncorrected_values(
187
+ ds["z_stake"],
188
+ ds["z_stake_cor"],
189
+ ds["t_u"],
190
+ ds["t_l"] if "t_l" in ds.data_vars else None,
191
+ ds["t_rad"] if "t_rad" in ds.data_vars else None)
176
192
 
177
193
  if ds.attrs['site_type'] == 'ablation':
178
194
  # Calculate surface heights for ablation sites
179
195
  ds['z_surf_1'] = 2.6 - z_boom_best_u
180
196
  if ds.z_stake.notnull().any():
181
-
182
- # Calculate stake boom height correction with uncorrected values where needed
183
- z_stake_best = station_boom_height.adjust_and_include_uncorrected_values(ds["z_stake"], ds["t_u"])
184
-
185
197
  first_valid_index = ds.time.where((z_stake_best + z_boom_best_u).notnull(), drop=True).data[0]
186
198
  ds['z_surf_2'] = ds.z_surf_1.sel(time=first_valid_index) + z_stake_best.sel(time=first_valid_index) - z_stake_best
187
199
 
@@ -195,14 +207,18 @@ def process_surface_height(ds, data_adjustments_dir, station_config={}):
195
207
  ds['z_surf_1'] = z_boom_best_u.sel(time=first_valid_index) - z_boom_best_u
196
208
 
197
209
  if 'z_stake' in ds.data_vars and ds.z_stake.notnull().any():
198
- z_stake_best = station_boom_height.adjust_and_include_uncorrected_values(ds["z_stake"], ds["t_u"])
199
210
  first_valid_index = ds.time.where(z_stake_best.notnull(), drop=True).data[0]
200
211
  ds['z_surf_2'] = z_stake_best.sel(time=first_valid_index) - z_stake_best
201
212
 
202
213
  if 'z_boom_l' in ds.data_vars:
203
214
 
204
215
  # Calculate lower boom height correction with uncorrected values where needed
205
- z_boom_best_l = station_boom_height.adjust_and_include_uncorrected_values(ds["z_boom_l"], ds["t_l"])
216
+ z_boom_best_l = station_boom_height.include_uncorrected_values(
217
+ ds["z_boom_l"],
218
+ ds["z_boom_cor_l"],
219
+ ds["t_l"],
220
+ ds["t_u"] if "t_u" in ds.data_vars else None,
221
+ ds["t_rad"] if "t_rad" in ds.data_vars else None)
206
222
 
207
223
  # need a combine first because KAN_U switches from having a z_stake_best
208
224
  # to having a z_boom_best_l
@@ -92,7 +92,7 @@ def get_l2tol3(config_folder: Path|str, inpath, outpath, variables, metadata, da
92
92
  if outpath is not None:
93
93
  prepare_and_write(l3, outpath, v, m, '60min')
94
94
  prepare_and_write(l3, outpath, v, m, '1D')
95
- prepare_and_write(l3, outpath, v, m, 'ME')
95
+ prepare_and_write(l3, outpath, v, m, 'MS')
96
96
  return l3
97
97
 
98
98
  def main():
@@ -1,18 +1,15 @@
1
1
  #!/usr/bin/env python3
2
2
  # -*- coding: utf-8 -*-
3
- """
4
- Created on Mon Jun 10 10:58:39 2024
5
-
6
- @author: pho
7
- """
8
3
  import logging
9
4
  import numpy as np
10
5
  import pandas as pd
11
6
  import xarray as xr
7
+
8
+ from pypromice.core.resampling import get_completeness_mask, DEFAULT_COMPLETENESS_THRESHOLDS
12
9
  from pypromice.core.variables.wind import calculate_directional_wind_speed
13
10
  logger = logging.getLogger(__name__)
14
11
 
15
- def resample_dataset(ds_h, t):
12
+ def resample_dataset(ds_h, t, completeness_thresholds=DEFAULT_COMPLETENESS_THRESHOLDS):
16
13
  '''Resample L2 AWS data, e.g. hourly to daily average. This uses pandas
17
14
  DataFrame resampling at the moment as a work-around to the xarray Dataset
18
15
  resampling. As stated, xarray resampling is a lengthy process that takes
@@ -27,6 +24,11 @@ def resample_dataset(ds_h, t):
27
24
  t : str
28
25
  Resample factor( "60min", "1D" or "MS"), same variable definition as in
29
26
  pandas.DataFrame.resample()
27
+ completeness_thresholds : Dict
28
+ A dict with, for each variable, the lower limit of completness of an
29
+ hourly/daily/monthly aggregate (nr of samples in aggregate / expected
30
+ nr of samples). Aggregates below that limit are replaced by NaNs.
31
+ Must include a "default" value used for variables not listed explicitly.
30
32
 
31
33
  Returns
32
34
  -------
@@ -62,6 +64,15 @@ def resample_dataset(ds_h, t):
62
64
  if var in df_h.columns:
63
65
  df_resampled[var] = df_h[var].resample(t).sum()
64
66
 
67
+ # Apply completeness filter based on the the data frame time index
68
+ completeness_mask = get_completeness_mask(
69
+ data_frame=df_h,
70
+ resample_offset=t,
71
+ completeness_thresholds=completeness_thresholds,
72
+ )
73
+
74
+ df_resampled[~completeness_mask] = np.nan
75
+
65
76
  # taking the 10 min data and using it as instantaneous values:
66
77
  is_10_minutes_timestamp = (ds_h.time.diff(dim='time') / np.timedelta64(1, 's') == 600)
67
78
  if (t == '60min') and is_10_minutes_timestamp.any():
@@ -27,8 +27,8 @@ dlhf_u,LHF
27
27
  dlhf_l,
28
28
  dshf_u,SHF
29
29
  dshf_l,
30
- z_boom_u,HW2
31
- z_boom_l,HW1
30
+ z_boom_cor_u,HW2
31
+ z_boom_cor_l,HW1
32
32
  precip_u,
33
33
  precip_u_cor,
34
34
  precip_l,
@@ -34,13 +34,13 @@ dlhf_l,surface_downward_latent_heat_flux,Latent heat flux (lower boom),W m-2,mod
34
34
  dshf_u,surface_downward_sensible_heat_flux,Sensible heat flux (upper boom),W m-2,modelResult,time,FALSE,L3 or later,,,,all,0,0,1,4
35
35
  dshf_l,surface_downward_sensible_heat_flux,Sensible heat flux (lower boom),W m-2,modelResult,time,FALSE,L3 or later,,,,two-boom,0,0,1,4
36
36
  z_boom_u,distance_to_surface_from_boom,Upper boom height,m,physicalMeasurement,time,TRUE,,0.3,10,z_boom_cor_u,all,1,1,1,4
37
- z_boom_cor_u,distance_to_surface_from_boom_corrected,Upper boom height - corrected,m,modelResult,time,TRUE,,0.3,10,"",all,1,1,1,4
37
+ z_boom_cor_u,distance_to_surface_from_boom_corrected,Upper boom height - corrected,m,modelResult,time,TRUE,,0.3,10,z_boom_u,all,1,1,1,4
38
38
  z_boom_q_u,distance_to_surface_from_boom_quality,Upper boom height (quality),-,qualityInformation,time,TRUE,L0 or L2,,,,all,1,1,0,4
39
39
  z_boom_l,distance_to_surface_from_boom,Lower boom height,m,physicalMeasurement,time,TRUE,,0.3,5,z_boom_cor_l,two-boom,1,1,1,4
40
- z_boom_cor_l,distance_to_surface_from_boom_corrected,Lower boom height - corrected,m,modelResult,time,TRUE,,0.3,5,"",two-boom,1,1,1,4
40
+ z_boom_cor_l,distance_to_surface_from_boom_corrected,Lower boom height - corrected,m,modelResult,time,TRUE,,0.3,5,z_boom_l,two-boom,1,1,1,4
41
41
  z_boom_q_l,distance_to_surface_from_boom_quality,Lower boom height (quality),-,qualityInformation,time,TRUE,L0 or L2,,,,two-boom,1,1,0,4
42
42
  z_stake,distance_to_surface_from_stake_assembly,Stake height,m,physicalMeasurement,time,TRUE,,0.3,8,z_stake_cor,one-boom,1,1,1,4
43
- z_stake_cor,distance_to_surface_from_stake_assembly_corrected,Stake height - corrected,m,physicalMeasurement,time,TRUE,,0.3,8,,one-boom,1,1,1,4
43
+ z_stake_cor,distance_to_surface_from_stake_assembly_corrected,Stake height - corrected,m,physicalMeasurement,time,TRUE,,0.3,8,z_stake,one-boom,1,1,1,4
44
44
  z_stake_q,distance_to_surface_from_stake_assembly_quality,Stake height (quality),-,qualityInformation,time,TRUE,L0 or L2,,,,one-boom,1,1,0,4
45
45
  z_pt,depth_of_pressure_transducer_in_ice,Depth of pressure transducer in ice,m,physicalMeasurement,time,FALSE,,0,30,z_pt_cor,one-boom,1,1,1,4
46
46
  z_pt_cor,depth_of_pressure_transducer_in_ice_corrected,Depth of pressure transducer in ice - corrected,m,modelResult,time,FALSE,L2 or later,0,30,,one-boom,0,1,1,4
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pypromice
3
- Version: 1.6.0
3
+ Version: 1.7.0
4
4
  Summary: PROMICE/GC-Net data processing toolbox
5
5
  Home-page: https://github.com/GEUS-Glaciology-and-Climate/pypromice
6
6
  Author: GEUS Glaciology and Climate
@@ -1,8 +1,9 @@
1
1
  pypromice/__init__.py,sha256=X2LaniNJv4iVYqRtmf2jyGMQLaj59bIgkhlWhT6LCgQ,74
2
2
  pypromice/core/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  pypromice/core/dependency_graph.py,sha256=bqoXasC8pg5ipjBd6rqDhfHwIq11t2_cFlNT72ncw4w,3135
4
+ pypromice/core/resampling.py,sha256=KyYk6HWFqJo60CPiX-gBd0uwLx5iPrYJBMQ3Sqb9yFg,4645
4
5
  pypromice/core/qc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
- pypromice/core/qc/github_data_issues.py,sha256=gbbF62oMMWbXiLnsrs60vXbwfAqSUP113plhidVeqCU,13353
6
+ pypromice/core/qc/github_data_issues.py,sha256=mTnJArv89p8vSzJdWFRkA4kBAYnFcR-I-Xz4QzlrifA,13857
6
7
  pypromice/core/qc/persistence.py,sha256=Y9CmAAPHNqEjdsZW4LEAVm7cnMEBK-zwEAD_UBDASVw,6466
7
8
  pypromice/core/qc/value_clipping.py,sha256=KGLLN54-QeD4TQ-Dd-NeooYG3kdEC6SPhz4LT2ZsRi4,1533
8
9
  pypromice/core/qc/percentiles/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -16,7 +17,7 @@ pypromice/core/variables/humidity.py,sha256=TfQfGbQ0gyj_A9zRChGei47oZYMRAR13eIzj
16
17
  pypromice/core/variables/precipitation.py,sha256=iH7m1sJDwbstZuAutO2HCRVo3RqaSuDG33YERBtvQag,3467
17
18
  pypromice/core/variables/pressure_transducer_depth.py,sha256=SadI_8oRKmMI2dnsYfYDJD7AnE6-i1bv0MOmO5xms8E,2418
18
19
  pypromice/core/variables/radiation.py,sha256=11iGl2cq5WGJxnPwTAy3H4Y5bU-QXTaMaEife1BPNA0,14318
19
- pypromice/core/variables/station_boom_height.py,sha256=5bomhyXLSlBhcL9EaMInkcriNeatuvnUqu98hX-0n0s,1317
20
+ pypromice/core/variables/station_boom_height.py,sha256=5uGalC-uVEdJFMPQ-cuayP5kpFu8mvou10w38X7kd8Y,2281
20
21
  pypromice/core/variables/station_pose.py,sha256=3aVb8hywsFBCDorgHPW_nNdf_pyyil6ziJsP3UBhje4,13112
21
22
  pypromice/core/variables/wind.py,sha256=-dpaBOtqGyPDIU4O1HbbWRzlLNRC2a50OdnZhIaWdeI,1701
22
23
  pypromice/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -37,28 +38,28 @@ pypromice/io/ingest/l0_repository.py,sha256=jmlQMSETj9aBA1iOC9sPl4-ia9UfsRHTWiR0
37
38
  pypromice/io/ingest/toa5.py,sha256=GiDCUMJpvbTRs0PzU1wz6uPkYkih9XjDVPcGbl__RtU,2857
38
39
  pypromice/pipeline/L0toL1.py,sha256=g_pH9sVPJMfYKdFgJpT7WW5rqJntJPKsxpw9N_nKs-o,13816
39
40
  pypromice/pipeline/L1toL2.py,sha256=lNSC3QOQf-ZZpFlCW6PFeRIkWtGWlDMBXWzNBU2yKAc,10158
40
- pypromice/pipeline/L2toL3.py,sha256=zDJggJMojKtK1VPkWoQBEgUNBbm02b2-xR-ajW2E3tg,60846
41
+ pypromice/pipeline/L2toL3.py,sha256=yEvjIaheTdT136HEMJXmqAuReYgnx0pF6VhG23VFj00,61650
41
42
  pypromice/pipeline/__init__.py,sha256=yqYenngaSNFtpnAsfc953142P84ocq3ykvlsCIbsw3g,151
42
43
  pypromice/pipeline/aws.py,sha256=5Xv7XVf5PvcSAnEu_jPpjDWV4U0p_UvdpOzCtmH0ICU,5092
43
44
  pypromice/pipeline/get_l2.py,sha256=VV4iV3P34HFSOi3jr0IQrNXjBMgCx5GpF0uwNbT84Ck,3072
44
- pypromice/pipeline/get_l2tol3.py,sha256=RLyyV2iCqCXInc91sMgxltTmAD8hAt6NU3-meIR9kac,4497
45
+ pypromice/pipeline/get_l2tol3.py,sha256=KIsScgatZbccz7ypZHBGiibbresJ9HWq9Sv4Ys2LcHs,4497
45
46
  pypromice/pipeline/join_l2.py,sha256=AQL9wVqRFbdB6bevuOArg9knZCv-jDVwypMETjuDqBM,4561
46
47
  pypromice/pipeline/join_l3.py,sha256=Wd5slBCo1XgzExOjPu9IRLWnk7NJMTwDcckhithvw0E,20316
47
- pypromice/pipeline/resample.py,sha256=SMq9Rh-Uy6DbtQdDv062t3n-AChRHx_J3AR914sMkLc,8099
48
+ pypromice/pipeline/resample.py,sha256=tq6GvnmbQJsFvdJxZF2DrwTTmeNRVig_LHZQboKWreU,8818
48
49
  pypromice/pipeline/utilities.py,sha256=m-BaHWMKBGzTHa09w-49yqWRulXex5TTScg7IZu8fSY,1248
49
50
  pypromice/resources/__init__.py,sha256=MpKmvV11R4tcqmyvJpXQt-_It3oRI0WEIQNbMST--4w,907
50
51
  pypromice/resources/file_attributes.csv,sha256=ISKR-Ax12CT9tQD38ByNyvWPLLpMszpWXwviPyhuUaI,7018
51
- pypromice/resources/variable_aliases_GC-Net.csv,sha256=ts10e0AI1Fz_TtEj8qdRFIZBTkE8pt_VtJGp5hSBUBI,993
52
- pypromice/resources/variables.csv,sha256=gUmA37r_k9JEP5Tvog3gUC6lROwz-w9DfX0FW3RA-FQ,14166
52
+ pypromice/resources/variable_aliases_GC-Net.csv,sha256=mIAnBy2XvnGGy4_HODHlbZCvk0jq3v9gAncMwyDmyqI,1001
53
+ pypromice/resources/variables.csv,sha256=hcd0WcY9vaG6MhqnCUNjbFxXNg7p5d_ZIyaebLKy0s8,14185
53
54
  pypromice/tx/__init__.py,sha256=-62bhHWJGfzFh5JwHcLqRj2jcGzmqzYOLWByhO706YY,30
54
55
  pypromice/tx/get_l0tx.py,sha256=b34-96KGshTyTN2tBFaAIBl7oZZzbRB_JR7sXtDNfXA,6957
55
56
  pypromice/tx/get_msg.py,sha256=OGS60OHjy4Wf8JExTfOdK-9xhjFdjhuChxoTSPe_MjI,3417
56
57
  pypromice/tx/payload_formats.csv,sha256=A46-XcYdpe9-gzmADylP2UVizLi_UphF-BPT5u3Lyn8,7903
57
58
  pypromice/tx/payload_types.csv,sha256=C1-xCmHytAqqAzgzPwBLWqabzWu6s6tKAd8AjVd935s,457
58
59
  pypromice/tx/tx.py,sha256=asbgXVI5vurKM-WVACTfpKRt-70wtzVvSbvjvYufajI,34416
59
- pypromice-1.6.0.dist-info/licenses/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
60
- pypromice-1.6.0.dist-info/METADATA,sha256=9Y4jNegT_VKzN0dQdqFSvOWcLSONdsva-tNfvG5qiB0,4958
61
- pypromice-1.6.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
62
- pypromice-1.6.0.dist-info/entry_points.txt,sha256=aU2SG5A0wI2n4HE9fotG-y5yeIqJ0G1-8UrEgr1GUQk,535
63
- pypromice-1.6.0.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
64
- pypromice-1.6.0.dist-info/RECORD,,
60
+ pypromice-1.7.0.dist-info/licenses/LICENSE.txt,sha256=gXf5dRMhNSbfLPYYTY_5hsZ1r7UU1OaKQEAQUhuIBkM,18092
61
+ pypromice-1.7.0.dist-info/METADATA,sha256=sU7zLwdi_aCLLUUUagNsQgIFVEwae5acab3osEHJDGM,4958
62
+ pypromice-1.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
+ pypromice-1.7.0.dist-info/entry_points.txt,sha256=aU2SG5A0wI2n4HE9fotG-y5yeIqJ0G1-8UrEgr1GUQk,535
64
+ pypromice-1.7.0.dist-info/top_level.txt,sha256=cBdfwgSbWDQq3a07nKRjrfmLC7jdaYXs98GG58HpTks,10
65
+ pypromice-1.7.0.dist-info/RECORD,,