cloudnetpy 1.78.0__py3-none-any.whl → 1.78.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -83,7 +83,7 @@ def _get_smoothed_v(
83
83
  obs: ClassData,
84
84
  sigma: tuple[float, float] = (5, 5),
85
85
  ) -> ma.MaskedArray:
86
- smoothed_v = gaussian_filter(obs.v, sigma)
86
+ smoothed_v = gaussian_filter(obs.v.filled(0), sigma)
87
87
  return ma.masked_where(obs.v.mask, smoothed_v)
88
88
 
89
89
 
@@ -24,8 +24,6 @@ class Radar(DataSource):
24
24
  folding_velocity (float): Radar's folding velocity (m/s).
25
25
  location (str): Location of the radar, copied from the global attribute
26
26
  `location` of the input file.
27
- sequence_indices (list): Indices denoting the different altitude
28
- regimes of the radar.
29
27
  source_type (str): Type of the radar, copied from the global attribute
30
28
  `source` of the *radar_file*. Can be free form string but must
31
29
  include either 'rpg' or 'mira' denoting one of the two supported
@@ -39,21 +37,17 @@ class Radar(DataSource):
39
37
  def __init__(self, full_path: str):
40
38
  super().__init__(full_path, radar=True)
41
39
  self.radar_frequency = float(self.getvar("radar_frequency"))
42
- self.folding_velocity = self._get_folding_velocity()
43
- self.sequence_indices = self._get_sequence_indices()
44
40
  self.location = getattr(self.dataset, "location", "")
45
41
  self.source_type = getattr(self.dataset, "source", "")
46
42
  self.height: np.ndarray
47
43
  self.altitude: float
48
44
  self._init_data()
49
- self._init_sigma_v()
50
- self._get_folding_velocity_full()
51
45
 
52
46
  def rebin_to_grid(self, time_new: np.ndarray) -> list:
53
- """Rebins radar data in time using mean.
47
+ """Rebins radar data in time.
54
48
 
55
49
  Args:
56
- time_new: Target time array as fraction hour. Updates *time* attribute.
50
+ time_new: Target time array as fraction hour.
57
51
 
58
52
  """
59
53
  bad_time_indices = []
@@ -64,21 +58,25 @@ class Radar(DataSource):
64
58
  bad_time_indices = array.rebin_data(self.time, time_new)
65
59
  array.lin2db()
66
60
  case "v":
67
- array.rebin_velocity(
68
- self.time,
61
+ array.data = self._rebin_velocity(
62
+ array.data,
69
63
  time_new,
70
- self.folding_velocity,
71
- self.sequence_indices,
72
64
  )
73
65
  case "v_sigma":
74
- array.calc_linear_std(self.time, time_new)
66
+ array.data, _ = utils.rebin_2d(
67
+ self.time,
68
+ array.data,
69
+ time_new,
70
+ "std",
71
+ mask_zeros=True,
72
+ )
75
73
  case "width":
76
74
  array.rebin_data(self.time, time_new)
77
75
  case "rainfall_rate":
78
- array.rebin_data(self.time, time_new, mask_zeros=False)
76
+ array.rebin_data(self.time, time_new)
79
77
  case _:
80
78
  continue
81
- return bad_time_indices
79
+ return list(bad_time_indices)
82
80
 
83
81
  def remove_incomplete_pixels(self) -> None:
84
82
  """Mask radar pixels where one or more required quantities are missing.
@@ -337,63 +335,88 @@ class Radar(DataSource):
337
335
 
338
336
  def _init_data(self) -> None:
339
337
  self.append_data(self.getvar("Zh"), "Z", units="dBZ")
340
- for key in ("v", "ldr", "width", "sldr", "rainfall_rate"):
341
- try:
342
- self._variables_to_cloudnet_arrays((key,))
343
- except KeyError:
344
- continue
345
-
346
- def _init_sigma_v(self) -> None:
347
- """Initializes std of the velocity field. The std will be calculated
348
- later when re-binning the data.
349
- """
350
338
  self.append_data(self.getvar("v"), "v_sigma")
339
+ for key in ("v", "ldr", "width", "sldr", "rainfall_rate", "nyquist_velocity"):
340
+ if key in self.dataset.variables:
341
+ data = self.dataset.variables[key]
342
+ self.append_data(data, key)
351
343
 
352
- def _get_sequence_indices(self) -> list:
353
- """Mira has only one sequence and one folding velocity. RPG has
354
- several sequences with different folding velocities.
355
- """
356
- if self.height is None:
357
- msg = "Height not found in the input file"
358
- raise RuntimeError(msg)
359
- all_indices = np.arange(len(self.height))
360
- if not utils.isscalar(self.folding_velocity):
361
- starting_indices = self.getvar("chirp_start_indices")
362
- return np.split(all_indices, starting_indices[1:])
363
- return [all_indices]
364
-
365
- def _get_folding_velocity(self) -> np.ndarray | float:
344
+ def _rebin_velocity(
345
+ self,
346
+ data: np.ndarray,
347
+ time_new: np.ndarray,
348
+ ) -> np.ndarray:
349
+ """Rebins Doppler velocity in polar coordinates."""
350
+ folding_velocity = self._get_expanded_folding_velocity()
351
+ # with the new shape (maximum value in every bin)
352
+ max_folding_binned, _ = utils.rebin_2d(
353
+ self.time,
354
+ folding_velocity,
355
+ time_new,
356
+ "max",
357
+ )
358
+ # store this in the file
359
+ self.append_data(max_folding_binned, "nyquist_velocity")
360
+ # with original shape (repeat maximum value for each point in every bin)
361
+ max_folding_full, _ = utils.rebin_2d(
362
+ self.time,
363
+ folding_velocity,
364
+ time_new,
365
+ "max",
366
+ keepdim=True,
367
+ )
368
+ data_scaled = data * (np.pi / max_folding_full)
369
+ vel_x = ma.cos(data_scaled)
370
+ vel_y = ma.sin(data_scaled)
371
+ vel_x_mean, _ = utils.rebin_2d(self.time, vel_x, time_new)
372
+ vel_y_mean, _ = utils.rebin_2d(self.time, vel_y, time_new)
373
+ vel_scaled = ma.arctan2(vel_y_mean, vel_x_mean)
374
+ return vel_scaled / (np.pi / max_folding_binned)
375
+
376
+ def _get_expanded_folding_velocity(self) -> np.ndarray:
366
377
  if "nyquist_velocity" in self.dataset.variables:
367
- return self.getvar("nyquist_velocity")
368
- if "prf" in self.dataset.variables:
378
+ fvel = self.getvar("nyquist_velocity")
379
+ elif "prf" in self.dataset.variables:
369
380
  prf = self.getvar("prf")
370
- return _prf_to_folding_velocity(prf, self.radar_frequency)
371
- msg = "Unable to determine folding velocity"
372
- raise RuntimeError(msg)
373
-
374
- def _get_folding_velocity_full(self) -> None:
375
- folding_velocity: list | np.ndarray = []
376
- if utils.isscalar(self.folding_velocity):
377
- folding_velocity = np.repeat(
378
- self.folding_velocity,
379
- len(self.sequence_indices[0]),
380
- )
381
+ fvel = _prf_to_folding_velocity(prf, self.radar_frequency)
381
382
  else:
382
- folding_velocity = list(folding_velocity)
383
- self.folding_velocity = np.array(self.folding_velocity)
384
- for indices, velocity in zip(
385
- self.sequence_indices,
386
- self.folding_velocity,
387
- strict=True,
388
- ):
389
- folding_velocity.append(np.repeat(velocity, len(indices)))
390
- folding_velocity = np.hstack(folding_velocity)
391
- self.append_data(folding_velocity, "nyquist_velocity")
392
-
393
-
394
- def _prf_to_folding_velocity(prf: np.ndarray, radar_frequency: float) -> float:
383
+ msg = "Unable to determine folding velocity"
384
+ raise RuntimeError(msg)
385
+
386
+ n_time = self.getvar("time").size
387
+ n_height = self.height.size
388
+
389
+ if fvel.shape == (n_time, n_height):
390
+ # Folding velocity is already expanded in radar file
391
+ # Not yet in current files
392
+ return fvel
393
+ if utils.isscalar(fvel):
394
+ # e.g. MIRA
395
+ return np.broadcast_to(fvel, (n_time, n_height))
396
+
397
+ # RPG radars have chirp segments
398
+ starts = self.getvar("chirp_start_indices")
399
+ n_seg = starts.size if starts.ndim == 1 else starts.shape[1]
400
+
401
+ starts = np.broadcast_to(starts, (n_time, n_seg))
402
+ fvel = np.broadcast_to(fvel, (n_time, n_seg))
403
+
404
+ # Indices should start from zero (first range gate)
405
+ # In pre-processed RV Meteor files the first index is 1, so normalize:
406
+ # Normalize starts so indices begin from zero
407
+ first_values = starts[:, [0]]
408
+ if not np.all(np.isin(first_values, [0, 1])):
409
+ msg = "First value of chirp_start_indices must be 0 or 1"
410
+ raise ValueError(msg)
411
+ starts = starts - first_values
412
+
413
+ chirp_size = np.diff(starts, append=n_height)
414
+ return np.repeat(fvel.ravel(), chirp_size.ravel()).reshape((n_time, n_height))
415
+
416
+
417
+ def _prf_to_folding_velocity(prf: np.ndarray, radar_frequency: float) -> np.ndarray:
395
418
  ghz_to_hz = 1e9
396
419
  if len(prf) != 1:
397
420
  msg = "Unable to determine folding velocity"
398
421
  raise RuntimeError(msg)
399
- return float(prf[0] * constants.c / (4 * radar_frequency * ghz_to_hz))
422
+ return prf[0] * constants.c / (4 * radar_frequency * ghz_to_hz)
@@ -1,6 +1,5 @@
1
1
  """CloudnetArray class."""
2
2
 
3
- import math
4
3
  from collections.abc import Sequence
5
4
 
6
5
  import netCDF4
@@ -58,29 +57,22 @@ class CloudnetArray:
58
57
  """Masks data from given indices."""
59
58
  self.data[ind] = ma.masked
60
59
 
61
- def rebin_data(
62
- self, time: np.ndarray, time_new: np.ndarray, *, mask_zeros: bool = True
63
- ) -> list:
60
+ def rebin_data(self, time: np.ndarray, time_new: np.ndarray) -> np.ndarray:
64
61
  """Rebins `data` in time.
65
62
 
66
63
  Args:
67
64
  time: 1D time array.
68
65
  time_new: 1D new time array.
69
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
70
66
 
71
67
  Returns:
72
68
  Time indices without data.
73
69
 
74
70
  """
75
71
  if self.data.ndim == 1:
76
- self.data = utils.rebin_1d(time, self.data, time_new, mask_zeros=mask_zeros)
77
- bad_indices = list(np.where(self.data == ma.masked)[0])
72
+ self.data = utils.rebin_1d(time, self.data, time_new)
73
+ bad_indices = np.nonzero(self.data.mask)[0]
78
74
  else:
79
- if not isinstance(self.data, ma.MaskedArray):
80
- self.data = ma.masked_array(self.data)
81
- self.data, bad_indices = utils.rebin_2d(
82
- time, self.data, time_new, mask_zeros=mask_zeros
83
- )
75
+ self.data, bad_indices = utils.rebin_2d(time, self.data, time_new)
84
76
  return bad_indices
85
77
 
86
78
  def fetch_attributes(self) -> list:
@@ -108,6 +100,21 @@ class CloudnetArray:
108
100
  if data:
109
101
  setattr(self, key, data)
110
102
 
103
+ def filter_isolated_pixels(self) -> None:
104
+ """Filters hot pixels from radar data."""
105
+ self._filter(utils.filter_isolated_pixels)
106
+
107
+ def filter_vertical_stripes(self) -> None:
108
+ """Filters vertical artifacts from radar data."""
109
+ self._filter(utils.filter_x_pixels)
110
+
111
+ def _filter(self, fun) -> None:
112
+ if not isinstance(self.data, ma.MaskedArray):
113
+ self.data = ma.masked_array(self.data)
114
+ is_data = (~self.data.mask).astype(int)
115
+ is_data_filtered = fun(is_data)
116
+ self.data[is_data_filtered == 0] = ma.masked
117
+
111
118
  def _init_data(self) -> np.ndarray:
112
119
  if isinstance(self.variable, netCDF4.Variable):
113
120
  return self.variable[:]
@@ -139,73 +146,3 @@ class CloudnetArray:
139
146
 
140
147
  def __getitem__(self, ind: tuple) -> np.ndarray:
141
148
  return self.data[ind]
142
-
143
- def filter_isolated_pixels(self) -> None:
144
- """Filters hot pixels from radar data."""
145
- self._filter(utils.filter_isolated_pixels)
146
-
147
- def filter_vertical_stripes(self) -> None:
148
- """Filters vertical artifacts from radar data."""
149
- self._filter(utils.filter_x_pixels)
150
-
151
- def _filter(self, fun) -> None:
152
- if not isinstance(self.data, ma.MaskedArray):
153
- self.data = ma.masked_array(self.data)
154
- is_data = (~self.data.mask).astype(int)
155
- is_data_filtered = fun(is_data)
156
- self.data[is_data_filtered == 0] = ma.masked
157
-
158
- def calc_linear_std(self, time: np.ndarray, time_new: np.ndarray) -> None:
159
- """Calculates std of radar velocity.
160
-
161
- Args:
162
- time: 1D time array.
163
- time_new: 1D new time array.
164
-
165
- Notes:
166
- The result is masked if the bin contains masked values.
167
- """
168
- data_as_float = self.data.astype(float)
169
- data_as_float = ma.masked_array(data_as_float)
170
- self.data, _ = utils.rebin_2d(time, data_as_float, time_new, "std")
171
-
172
- def rebin_velocity(
173
- self,
174
- time: np.ndarray,
175
- time_new: np.ndarray,
176
- folding_velocity: float | np.ndarray,
177
- sequence_indices: list,
178
- ) -> None:
179
- """Rebins Doppler velocity in polar coordinates.
180
-
181
- Args:
182
- time: 1D time array.
183
- time_new: 1D new time array.
184
- folding_velocity: Folding velocity (m/s). Can be a float when
185
- it's the same for all altitudes, or np.ndarray when it
186
- matches difference altitude regions (defined in `sequence_indices`).
187
- sequence_indices: List containing indices of different folding regions,
188
- e.g. [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10]].
189
-
190
- """
191
-
192
- def _get_scaled_vfold() -> np.ndarray:
193
- vfold_scaled = math.pi / folding_velocity
194
- if isinstance(vfold_scaled, float):
195
- vfold_scaled = np.array([float(vfold_scaled)])
196
- return vfold_scaled
197
-
198
- def _scale_by_vfold(data_in: np.ndarray, fun) -> np.ndarray:
199
- data_out = ma.copy(data_in)
200
- for i, ind in enumerate(sequence_indices):
201
- data_out[:, ind] = fun(data_in[:, ind], folding_velocity_scaled[i])
202
- return data_out
203
-
204
- folding_velocity_scaled = _get_scaled_vfold()
205
- data_scaled = _scale_by_vfold(self.data, np.multiply)
206
- vel_x = ma.cos(data_scaled)
207
- vel_y = ma.sin(data_scaled)
208
- vel_x_mean, _ = utils.rebin_2d(time, vel_x, time_new)
209
- vel_y_mean, _ = utils.rebin_2d(time, vel_y, time_new)
210
- mean_vel_scaled = np.arctan2(vel_y_mean, vel_x_mean)
211
- self.data = _scale_by_vfold(mean_vel_scaled, np.divide)
cloudnetpy/datasource.py CHANGED
@@ -182,53 +182,6 @@ class DataSource:
182
182
  return np.array(range_instrument + self.altitude)
183
183
  return None
184
184
 
185
- def _variables_to_cloudnet_arrays(self, keys: tuple) -> None:
186
- """Transforms netCDF4-variables into CloudnetArrays.
187
-
188
- Args:
189
- keys: netCDF4-variables to be converted. The results
190
- are saved in *self.data* dictionary with *fields*
191
- strings as keys.
192
-
193
- Notes:
194
- The attributes of the variables are not copied. Just the data.
195
-
196
- """
197
- for key in keys:
198
- self.append_data(self.dataset.variables[key], key)
199
-
200
- def _unknown_variable_to_cloudnet_array(
201
- self,
202
- possible_names: tuple,
203
- key: str,
204
- units: str | None = None,
205
- *,
206
- ignore_mask: bool = False,
207
- ) -> None:
208
- """Transforms single netCDF4 variable into CloudnetArray.
209
-
210
- Args:
211
- possible_names: Tuple of strings containing the possible
212
- names of the variable in the input NetCDF file.
213
- key: Key for self.data dictionary and name-attribute
214
- for the saved CloudnetArray object.
215
- units: Units attribute for the CloudnetArray object.
216
- ignore_mask: If true, always writes an ordinary numpy array.
217
-
218
- Raises:
219
- RuntimeError: No variable found.
220
-
221
- """
222
- for name in possible_names:
223
- if name in self.dataset.variables:
224
- array: netCDF4.Variable | np.ndarray = self.dataset.variables[name]
225
- if ignore_mask is True:
226
- array = np.array(array)
227
- self.append_data(array, key, units=units)
228
- return
229
- msg = f"Missing variable {possible_names[0]} in the input file."
230
- raise RuntimeError(msg)
231
-
232
185
  def __enter__(self):
233
186
  return self
234
187
 
@@ -97,7 +97,7 @@ def read_data_characters(nc_file: str, name: str, model: str) -> tuple:
97
97
  try:
98
98
  mask = y.mask
99
99
  if mask.any():
100
- x, y, data = change2one_dim_axes(x, y, data)
100
+ x, y, data = change2one_dim_axes(ma.array(x), y, data)
101
101
  except AttributeError:
102
102
  return data, x, y
103
103
  return data, x, y
@@ -65,7 +65,7 @@ def test_fit_z_sensitivity(obs_file, model_file) -> None:
65
65
  model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
66
66
  adv_pro = AdvanceProductMethods(model, str(model_file), obs)
67
67
  h = np.array([[5000, 9000, 13000], [10000, 15000, 20000], [8000, 12000, 16000]])
68
- compare = np.array([[0, 0.15, 0.5], [0.1, 1, 0], [0.15, 0, 1]])
68
+ compare = ma.masked_invalid([[np.nan, 0.15, 0.5], [0.1, 1, np.nan], [0.15, 0, 1]])
69
69
  x = adv_pro.fit_z_sensitivity(h)
70
70
  testing.assert_array_almost_equal(x, compare)
71
71
 
cloudnetpy/utils.py CHANGED
@@ -3,7 +3,6 @@
3
3
  import base64
4
4
  import datetime
5
5
  import hashlib
6
- import logging
7
6
  import os
8
7
  import re
9
8
  import textwrap
@@ -11,7 +10,7 @@ import uuid
11
10
  import warnings
12
11
  from collections.abc import Iterator
13
12
  from datetime import timezone
14
- from typing import Literal, TypeVar
13
+ from typing import Any, Literal, TypeVar
15
14
 
16
15
  import netCDF4
17
16
  import numpy as np
@@ -142,54 +141,42 @@ def binvec(x: np.ndarray | list) -> np.ndarray:
142
141
 
143
142
  def rebin_2d(
144
143
  x_in: np.ndarray,
145
- array: ma.MaskedArray,
144
+ array: np.ndarray,
146
145
  x_new: np.ndarray,
147
- statistic: Literal["mean", "std"] = "mean",
146
+ statistic: Literal["mean", "std", "max"] = "mean",
148
147
  n_min: int = 1,
149
148
  *,
150
- mask_zeros: bool = True,
151
- ) -> tuple[ma.MaskedArray, list]:
152
- """Rebins 2-D data in one dimension.
149
+ keepdim: bool = False,
150
+ mask_zeros: bool = False,
151
+ ) -> tuple[ma.MaskedArray, np.ndarray]:
152
+ edges = binvec(x_new)
153
+ binn = np.digitize(x_in, edges) - 1
154
+ n_bins = len(x_new)
155
+ counts = np.bincount(binn[binn >= 0], minlength=n_bins)
153
156
 
154
- Args:
155
- x_in: 1-D array with shape (n,).
156
- array: 2-D input data with shape (n, m).
157
- x_new: 1-D target vector (center points) with shape (N,).
158
- statistic: Statistic to be calculated. Possible statistics are 'mean', 'std'.
159
- Default is 'mean'.
160
- n_min: Minimum number of points to have good statistics in a bin. Default is 1.
161
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
157
+ stat_fn: Any = {
158
+ "mean": ma.mean,
159
+ "std": ma.std,
160
+ "max": ma.max,
161
+ }[statistic]
162
162
 
163
- Returns:
164
- tuple: Rebinned data with shape (N, m) and indices of bins without enough data.
165
- """
166
- edges = binvec(x_new)
167
- result = np.zeros((len(x_new), array.shape[1]))
168
- array_screened = ma.masked_invalid(array, copy=True) # data may contain nan-values
169
- for ind, values in enumerate(array_screened.T):
170
- mask = ~values.mask
171
- if ma.any(values[mask]):
172
- result[:, ind], _, _ = stats.binned_statistic(
173
- x_in[mask],
174
- values[mask],
175
- statistic=statistic,
176
- bins=edges,
177
- )
178
- result[~np.isfinite(result)] = 0
179
- if mask_zeros is True:
180
- masked_result = ma.masked_equal(result, 0)
181
- else:
182
- masked_result = ma.array(result)
163
+ shape = array.shape if keepdim else (n_bins, array.shape[1])
164
+ result: ma.MaskedArray = ma.masked_array(np.ones(shape, dtype="float32"), mask=True)
165
+
166
+ for bin_ind in range(n_bins):
167
+ if counts[bin_ind] < n_min:
168
+ continue
169
+ mask = binn == bin_ind
170
+ block = array[mask, :]
171
+ x_ind = mask if keepdim else bin_ind
172
+ result[x_ind, :] = stat_fn(block, axis=0)
183
173
 
184
- # Fill bins with not enough profiles
185
- x_hist, _ = np.histogram(x_in, bins=edges)
186
- empty_mask = x_hist < n_min
187
- masked_result[empty_mask, :] = ma.masked
188
- empty_indices = list(np.nonzero(empty_mask)[0])
189
- if len(empty_indices) > 0:
190
- logging.debug("No data in %s bins", len(empty_indices))
174
+ empty_bins = np.where(counts < n_min)[0]
191
175
 
192
- return masked_result, empty_indices
176
+ if mask_zeros:
177
+ result[result == 0] = ma.masked
178
+
179
+ return result, empty_bins
193
180
 
194
181
 
195
182
  def rebin_1d(
@@ -197,8 +184,6 @@ def rebin_1d(
197
184
  array: np.ndarray | ma.MaskedArray,
198
185
  x_new: np.ndarray,
199
186
  statistic: str = "mean",
200
- *,
201
- mask_zeros: bool = True,
202
187
  ) -> ma.MaskedArray:
203
188
  """Rebins 1D array.
204
189
 
@@ -208,14 +193,13 @@ def rebin_1d(
208
193
  x_new: 1-D target vector (center points) with shape (N,).
209
194
  statistic: Statistic to be calculated. Possible statistics are 'mean', 'std'.
210
195
  Default is 'mean'.
211
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
212
196
 
213
197
  Returns:
214
198
  Re-binned data with shape (N,).
215
199
 
216
200
  """
217
201
  edges = binvec(x_new)
218
- result = np.zeros(len(x_new))
202
+ result = ma.zeros(len(x_new))
219
203
  array_screened = ma.masked_invalid(array, copy=True) # data may contain nan-values
220
204
  mask = ~array_screened.mask
221
205
  if ma.any(array_screened[mask]):
@@ -225,10 +209,7 @@ def rebin_1d(
225
209
  statistic=statistic,
226
210
  bins=edges,
227
211
  )
228
- result[~np.isfinite(result)] = 0
229
- if mask_zeros:
230
- return ma.masked_equal(result, 0)
231
- return ma.array(result)
212
+ return ma.masked_invalid(result, copy=True)
232
213
 
233
214
 
234
215
  def filter_isolated_pixels(array: np.ndarray) -> np.ndarray:
cloudnetpy/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  MAJOR = 1
2
2
  MINOR = 78
3
- PATCH = 0
3
+ PATCH = 1
4
4
  __version__ = f"{MAJOR}.{MINOR}.{PATCH}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudnetpy
3
- Version: 1.78.0
3
+ Version: 1.78.1
4
4
  Summary: Python package for Cloudnet processing
5
5
  Author: Simo Tukiainen
6
6
  License: MIT License
@@ -1,15 +1,15 @@
1
1
  cloudnetpy/__init__.py,sha256=X_FqY-4yg5GUj5Edo14SToLEos6JIsC3fN-v1FUgQoA,43
2
2
  cloudnetpy/cli.py,sha256=lHkeAErmAijI-Ugpd4DHRHfbZP4SXOake0LIY5Ovv_Q,20782
3
- cloudnetpy/cloudnetarray.py,sha256=XFyXZwR4QWPyo7WLmvsu7DEELZQp1vi5FZ8F7tX_tM0,7307
3
+ cloudnetpy/cloudnetarray.py,sha256=uOYgpQ8hHh5fuHyip1HjnhsEda9_7dg7orYnbCRkTtI,4796
4
4
  cloudnetpy/concat_lib.py,sha256=XQ5Sk8kfXqI0Q5HoomKWWhdZ1-m2thYDKGL7SKapITE,12851
5
5
  cloudnetpy/constants.py,sha256=YnoSzZm35NDooJfhlulSJBc7g0eSchT3yGytRaTaJEI,845
6
- cloudnetpy/datasource.py,sha256=FcWS77jz56gIzwnbafDLdj-HjAyu0P_VtY7gkeVZThU,7952
6
+ cloudnetpy/datasource.py,sha256=Vx_I8S14nFAWKI0VbsW_-sllbVCRjTYxB7XH9b9PedQ,6268
7
7
  cloudnetpy/exceptions.py,sha256=hYbUtBwjCIfxnPe_5mELDEw87AWITBrwuo7WYIEKmJ8,1579
8
8
  cloudnetpy/metadata.py,sha256=lO7BCbVAzFoH3Nq-VuezYX0f7MnbG1Zp11g5GSiuQwM,6189
9
9
  cloudnetpy/output.py,sha256=gupxt4f_-eUrFsWMto8tnknoV-p9QauC9L6CJAqBILU,15988
10
10
  cloudnetpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- cloudnetpy/utils.py,sha256=O57jqUX61V_Jodrs_04Js9cmTUbK0cymW9NcHzKVrx0,32926
12
- cloudnetpy/version.py,sha256=Umza9M-fEEsgi9FrAC0XLoVQcbh-gNPHmP_sNLxDOD4,72
11
+ cloudnetpy/utils.py,sha256=HdcSNIgdxoZlP_jHl66ItheHLw_cDYZb7u6mZ5dfMNE,31952
12
+ cloudnetpy/version.py,sha256=3xCJ6Zv7LkLahqubKZWS5YDwui01I7yk-Z-3oUGO9Us,72
13
13
  cloudnetpy/categorize/__init__.py,sha256=s-SJaysvVpVVo5kidiruWQO6p3gv2TXwY1wEHYO5D6I,44
14
14
  cloudnetpy/categorize/atmos_utils.py,sha256=RcmbKxm2COkE7WEya0mK3yX5rzUbrewRVh3ekm01RtM,10598
15
15
  cloudnetpy/categorize/attenuation.py,sha256=Y_-fzmQTltWTqIZTulJhovC7a6ifpMcaAazDJcnMIOc,990
@@ -20,13 +20,13 @@ cloudnetpy/categorize/disdrometer.py,sha256=sRSt2B932lrrkvycKoSaKEIaDVfq9Z7uU-4i
20
20
  cloudnetpy/categorize/droplet.py,sha256=t49KEsH5ZM68JQ4NvAf9kGgQ-evic1T4de2-jgJ2f4M,8683
21
21
  cloudnetpy/categorize/falling.py,sha256=lok0HMi1ewf9pS70mq62nRKL6wJzMyWbYmv1cdwrwnA,4404
22
22
  cloudnetpy/categorize/freezing.py,sha256=eSFD37R7vBrg7mgfSanrwhBjnFyWNBpjw2AtvRmSh48,3753
23
- cloudnetpy/categorize/insects.py,sha256=9J5agmktit8Or66GGNue-bThiaG9rB2SuPNZBXI7FCE,5243
23
+ cloudnetpy/categorize/insects.py,sha256=MrxlWK-5JaMZxCBWFKR_6Kj5TAVXm-s9SVxsvcyNYJo,5253
24
24
  cloudnetpy/categorize/itu.py,sha256=ffXK27guyRS4d66VWQ2h4UEGjUIhGjPKbFmj7kh698c,10304
25
25
  cloudnetpy/categorize/lidar.py,sha256=YQrM_LOz8NQrrD9l9HyujV1GSGwkQ8LMqXN13bEJRW4,2605
26
26
  cloudnetpy/categorize/melting.py,sha256=ZnLeL_qWmiCdjXVOm9iBYHdo29Brqxu_DEErZPqUloQ,6217
27
27
  cloudnetpy/categorize/model.py,sha256=QFRCY0TvM2fzGRyP8BNkqbvu13XcQjt7TsN5fhjI_Uc,6654
28
28
  cloudnetpy/categorize/mwr.py,sha256=F7cquERWL6mBkgboqeaCIPf9gOlKI-NWUQIBdQXGT_I,1635
29
- cloudnetpy/categorize/radar.py,sha256=PmriTnrHbgZrau1RTNKpPI_-h5Uu0kGIMrMOaoMuROY,14821
29
+ cloudnetpy/categorize/radar.py,sha256=z2bFF_wAKbzzXB3Pq1z33Y1RMv6NL-8CdZesU89qzpw,15502
30
30
  cloudnetpy/categorize/attenuations/__init__.py,sha256=CWFHVWeTIe2hrZtgkJaX2HGftbuffsFc39Mzv5B0Lw0,1037
31
31
  cloudnetpy/categorize/attenuations/gas_attenuation.py,sha256=emr-RCxQT0i2N8k6eBNhRsmsCBPHJzQsWJfjC4fVSTo,975
32
32
  cloudnetpy/categorize/attenuations/liquid_attenuation.py,sha256=0p0G79BPkw1itCXHMwbvkNHtJGBocJzow3gNHAirChI,3036
@@ -68,7 +68,7 @@ cloudnetpy/model_evaluation/model_metadata.py,sha256=CxpY6RPm7GOTBBmPhcNVVpm9ate
68
68
  cloudnetpy/model_evaluation/utils.py,sha256=Z9VqYVdtY9yTr2JeVfBn4nccIVWCN5Fd-BCyB_qYI-A,154
69
69
  cloudnetpy/model_evaluation/plotting/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
70
  cloudnetpy/model_evaluation/plotting/plot_meta.py,sha256=K18Ugohh24uVAIxjZgJsmK80YwsMstm6B7ptVafONAw,3557
71
- cloudnetpy/model_evaluation/plotting/plot_tools.py,sha256=gV042W_AHidwPsRe2L57xdWbt3W-utcHMt_9FmfYK3M,5033
71
+ cloudnetpy/model_evaluation/plotting/plot_tools.py,sha256=umI06tPIEs48cQ8GY8s3vGHwPcN--tUir4s1yxNQf64,5043
72
72
  cloudnetpy/model_evaluation/plotting/plotting.py,sha256=mGgSnQoRTh04v5RSJHsYPaqUEIR82eZqAuiszrh9rjY,31235
73
73
  cloudnetpy/model_evaluation/products/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
74
  cloudnetpy/model_evaluation/products/advance_methods.py,sha256=rng3ZLR1Arv1AGUzq0Ehu-65628PC5LZVKpHSUpCIW8,8526
@@ -93,7 +93,7 @@ cloudnetpy/model_evaluation/tests/e2e/process_lwc/main.py,sha256=IFcPj-Vce9Yn0Cf
93
93
  cloudnetpy/model_evaluation/tests/e2e/process_lwc/tests.py,sha256=ANBA0LVao3Xrm-prRnwUmxM6BdQzqM7GZNKB3uz5BXQ,1725
94
94
  cloudnetpy/model_evaluation/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
95
  cloudnetpy/model_evaluation/tests/unit/conftest.py,sha256=WL_FgrDeoUYGp4PKjb37HLu79D9uu33PGQL40_ctqS0,7446
96
- cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py,sha256=IkoAVtsWVFrPpFqQOLAPHKb9qgV-KjGGVEtWMudeiSo,10079
96
+ cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py,sha256=UEF94sBN8pPOyoz5ARag3mc5A8KFjyY_-IJFqdKBGug,10098
97
97
  cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py,sha256=AEUXN5HBhKliPsSNGDCUtWOtIx6Y8iqkywb_-RfXYU0,26277
98
98
  cloudnetpy/model_evaluation/tests/unit/test_model_products.py,sha256=FRbYLshSHH2E527uJPwvUIyZKTsPFSZrwDsPsNrFSSU,3475
99
99
  cloudnetpy/model_evaluation/tests/unit/test_observation_products.py,sha256=DN3yVqq8vFYca_9POjcrJ8XViMrJks_jM-aQznfN8QQ,4936
@@ -117,10 +117,10 @@ cloudnetpy/products/lwc.py,sha256=sl6Al2tuH3KkCBrPbWTmuz3jlD5UQJ4D6qBsn1tt2CQ,18
117
117
  cloudnetpy/products/mie_lu_tables.nc,sha256=It4fYpqJXlqOgL8jeZ-PxGzP08PMrELIDVe55y9ob58,16637951
118
118
  cloudnetpy/products/mwr_tools.py,sha256=8HPZpQMTojKZP1JS1S83IE0sxmbDE9bxlaWoqmGnUZE,6199
119
119
  cloudnetpy/products/product_tools.py,sha256=uu4l6reuGbPcW3TgttbaSrqIKbyYGhBVTdnC7opKvmg,11101
120
- cloudnetpy-1.78.0.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
120
+ cloudnetpy-1.78.1.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
121
121
  docs/source/conf.py,sha256=IKiFWw6xhUd8NrCg0q7l596Ck1d61XWeVjIFHVSG9Og,1490
122
- cloudnetpy-1.78.0.dist-info/METADATA,sha256=4CkmH9P2aO02UADla12HiMWCqnPts65FVD6PrZYBre4,5796
123
- cloudnetpy-1.78.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
124
- cloudnetpy-1.78.0.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
- cloudnetpy-1.78.0.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
- cloudnetpy-1.78.0.dist-info/RECORD,,
122
+ cloudnetpy-1.78.1.dist-info/METADATA,sha256=AznJgGBfGvD7cbWAieBBM5xVu3Wn9GQebLBXaAVFJUE,5796
123
+ cloudnetpy-1.78.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
124
+ cloudnetpy-1.78.1.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
+ cloudnetpy-1.78.1.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
+ cloudnetpy-1.78.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5