cloudnetpy 1.77.2__py3-none-any.whl → 1.78.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -83,7 +83,7 @@ def _get_smoothed_v(
83
83
  obs: ClassData,
84
84
  sigma: tuple[float, float] = (5, 5),
85
85
  ) -> ma.MaskedArray:
86
- smoothed_v = gaussian_filter(obs.v, sigma)
86
+ smoothed_v = gaussian_filter(obs.v.filled(0), sigma)
87
87
  return ma.masked_where(obs.v.mask, smoothed_v)
88
88
 
89
89
 
@@ -24,8 +24,6 @@ class Radar(DataSource):
24
24
  folding_velocity (float): Radar's folding velocity (m/s).
25
25
  location (str): Location of the radar, copied from the global attribute
26
26
  `location` of the input file.
27
- sequence_indices (list): Indices denoting the different altitude
28
- regimes of the radar.
29
27
  source_type (str): Type of the radar, copied from the global attribute
30
28
  `source` of the *radar_file*. Can be free form string but must
31
29
  include either 'rpg' or 'mira' denoting one of the two supported
@@ -39,21 +37,17 @@ class Radar(DataSource):
39
37
  def __init__(self, full_path: str):
40
38
  super().__init__(full_path, radar=True)
41
39
  self.radar_frequency = float(self.getvar("radar_frequency"))
42
- self.folding_velocity = self._get_folding_velocity()
43
- self.sequence_indices = self._get_sequence_indices()
44
40
  self.location = getattr(self.dataset, "location", "")
45
41
  self.source_type = getattr(self.dataset, "source", "")
46
42
  self.height: np.ndarray
47
43
  self.altitude: float
48
44
  self._init_data()
49
- self._init_sigma_v()
50
- self._get_folding_velocity_full()
51
45
 
52
46
  def rebin_to_grid(self, time_new: np.ndarray) -> list:
53
- """Rebins radar data in time using mean.
47
+ """Rebins radar data in time.
54
48
 
55
49
  Args:
56
- time_new: Target time array as fraction hour. Updates *time* attribute.
50
+ time_new: Target time array as fraction hour.
57
51
 
58
52
  """
59
53
  bad_time_indices = []
@@ -64,21 +58,25 @@ class Radar(DataSource):
64
58
  bad_time_indices = array.rebin_data(self.time, time_new)
65
59
  array.lin2db()
66
60
  case "v":
67
- array.rebin_velocity(
68
- self.time,
61
+ array.data = self._rebin_velocity(
62
+ array.data,
69
63
  time_new,
70
- self.folding_velocity,
71
- self.sequence_indices,
72
64
  )
73
65
  case "v_sigma":
74
- array.calc_linear_std(self.time, time_new)
66
+ array.data, _ = utils.rebin_2d(
67
+ self.time,
68
+ array.data,
69
+ time_new,
70
+ "std",
71
+ mask_zeros=True,
72
+ )
75
73
  case "width":
76
74
  array.rebin_data(self.time, time_new)
77
75
  case "rainfall_rate":
78
- array.rebin_data(self.time, time_new, mask_zeros=False)
76
+ array.rebin_data(self.time, time_new)
79
77
  case _:
80
78
  continue
81
- return bad_time_indices
79
+ return list(bad_time_indices)
82
80
 
83
81
  def remove_incomplete_pixels(self) -> None:
84
82
  """Mask radar pixels where one or more required quantities are missing.
@@ -337,63 +335,88 @@ class Radar(DataSource):
337
335
 
338
336
  def _init_data(self) -> None:
339
337
  self.append_data(self.getvar("Zh"), "Z", units="dBZ")
340
- for key in ("v", "ldr", "width", "sldr", "rainfall_rate"):
341
- try:
342
- self._variables_to_cloudnet_arrays((key,))
343
- except KeyError:
344
- continue
345
-
346
- def _init_sigma_v(self) -> None:
347
- """Initializes std of the velocity field. The std will be calculated
348
- later when re-binning the data.
349
- """
350
338
  self.append_data(self.getvar("v"), "v_sigma")
339
+ for key in ("v", "ldr", "width", "sldr", "rainfall_rate", "nyquist_velocity"):
340
+ if key in self.dataset.variables:
341
+ data = self.dataset.variables[key]
342
+ self.append_data(data, key)
351
343
 
352
- def _get_sequence_indices(self) -> list:
353
- """Mira has only one sequence and one folding velocity. RPG has
354
- several sequences with different folding velocities.
355
- """
356
- if self.height is None:
357
- msg = "Height not found in the input file"
358
- raise RuntimeError(msg)
359
- all_indices = np.arange(len(self.height))
360
- if not utils.isscalar(self.folding_velocity):
361
- starting_indices = self.getvar("chirp_start_indices")
362
- return np.split(all_indices, starting_indices[1:])
363
- return [all_indices]
364
-
365
- def _get_folding_velocity(self) -> np.ndarray | float:
344
+ def _rebin_velocity(
345
+ self,
346
+ data: np.ndarray,
347
+ time_new: np.ndarray,
348
+ ) -> np.ndarray:
349
+ """Rebins Doppler velocity in polar coordinates."""
350
+ folding_velocity = self._get_expanded_folding_velocity()
351
+ # with the new shape (maximum value in every bin)
352
+ max_folding_binned, _ = utils.rebin_2d(
353
+ self.time,
354
+ folding_velocity,
355
+ time_new,
356
+ "max",
357
+ )
358
+ # store this in the file
359
+ self.append_data(max_folding_binned, "nyquist_velocity")
360
+ # with original shape (repeat maximum value for each point in every bin)
361
+ max_folding_full, _ = utils.rebin_2d(
362
+ self.time,
363
+ folding_velocity,
364
+ time_new,
365
+ "max",
366
+ keepdim=True,
367
+ )
368
+ data_scaled = data * (np.pi / max_folding_full)
369
+ vel_x = ma.cos(data_scaled)
370
+ vel_y = ma.sin(data_scaled)
371
+ vel_x_mean, _ = utils.rebin_2d(self.time, vel_x, time_new)
372
+ vel_y_mean, _ = utils.rebin_2d(self.time, vel_y, time_new)
373
+ vel_scaled = ma.arctan2(vel_y_mean, vel_x_mean)
374
+ return vel_scaled / (np.pi / max_folding_binned)
375
+
376
+ def _get_expanded_folding_velocity(self) -> np.ndarray:
366
377
  if "nyquist_velocity" in self.dataset.variables:
367
- return self.getvar("nyquist_velocity")
368
- if "prf" in self.dataset.variables:
378
+ fvel = self.getvar("nyquist_velocity")
379
+ elif "prf" in self.dataset.variables:
369
380
  prf = self.getvar("prf")
370
- return _prf_to_folding_velocity(prf, self.radar_frequency)
371
- msg = "Unable to determine folding velocity"
372
- raise RuntimeError(msg)
373
-
374
- def _get_folding_velocity_full(self) -> None:
375
- folding_velocity: list | np.ndarray = []
376
- if utils.isscalar(self.folding_velocity):
377
- folding_velocity = np.repeat(
378
- self.folding_velocity,
379
- len(self.sequence_indices[0]),
380
- )
381
+ fvel = _prf_to_folding_velocity(prf, self.radar_frequency)
381
382
  else:
382
- folding_velocity = list(folding_velocity)
383
- self.folding_velocity = np.array(self.folding_velocity)
384
- for indices, velocity in zip(
385
- self.sequence_indices,
386
- self.folding_velocity,
387
- strict=True,
388
- ):
389
- folding_velocity.append(np.repeat(velocity, len(indices)))
390
- folding_velocity = np.hstack(folding_velocity)
391
- self.append_data(folding_velocity, "nyquist_velocity")
392
-
393
-
394
- def _prf_to_folding_velocity(prf: np.ndarray, radar_frequency: float) -> float:
383
+ msg = "Unable to determine folding velocity"
384
+ raise RuntimeError(msg)
385
+
386
+ n_time = self.getvar("time").size
387
+ n_height = self.height.size
388
+
389
+ if fvel.shape == (n_time, n_height):
390
+ # Folding velocity is already expanded in radar file
391
+ # Not yet in current files
392
+ return fvel
393
+ if utils.isscalar(fvel):
394
+ # e.g. MIRA
395
+ return np.broadcast_to(fvel, (n_time, n_height))
396
+
397
+ # RPG radars have chirp segments
398
+ starts = self.getvar("chirp_start_indices")
399
+ n_seg = starts.size if starts.ndim == 1 else starts.shape[1]
400
+
401
+ starts = np.broadcast_to(starts, (n_time, n_seg))
402
+ fvel = np.broadcast_to(fvel, (n_time, n_seg))
403
+
404
+ # Indices should start from zero (first range gate)
405
+ # In pre-processed RV Meteor files the first index is 1, so normalize:
406
+ # Normalize starts so indices begin from zero
407
+ first_values = starts[:, [0]]
408
+ if not np.all(np.isin(first_values, [0, 1])):
409
+ msg = "First value of chirp_start_indices must be 0 or 1"
410
+ raise ValueError(msg)
411
+ starts = starts - first_values
412
+
413
+ chirp_size = np.diff(starts, append=n_height)
414
+ return np.repeat(fvel.ravel(), chirp_size.ravel()).reshape((n_time, n_height))
415
+
416
+
417
+ def _prf_to_folding_velocity(prf: np.ndarray, radar_frequency: float) -> np.ndarray:
395
418
  ghz_to_hz = 1e9
396
419
  if len(prf) != 1:
397
420
  msg = "Unable to determine folding velocity"
398
421
  raise RuntimeError(msg)
399
- return float(prf[0] * constants.c / (4 * radar_frequency * ghz_to_hz))
422
+ return prf[0] * constants.c / (4 * radar_frequency * ghz_to_hz)
@@ -1,6 +1,5 @@
1
1
  """CloudnetArray class."""
2
2
 
3
- import math
4
3
  from collections.abc import Sequence
5
4
 
6
5
  import netCDF4
@@ -58,29 +57,22 @@ class CloudnetArray:
58
57
  """Masks data from given indices."""
59
58
  self.data[ind] = ma.masked
60
59
 
61
- def rebin_data(
62
- self, time: np.ndarray, time_new: np.ndarray, *, mask_zeros: bool = True
63
- ) -> list:
60
+ def rebin_data(self, time: np.ndarray, time_new: np.ndarray) -> np.ndarray:
64
61
  """Rebins `data` in time.
65
62
 
66
63
  Args:
67
64
  time: 1D time array.
68
65
  time_new: 1D new time array.
69
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
70
66
 
71
67
  Returns:
72
68
  Time indices without data.
73
69
 
74
70
  """
75
71
  if self.data.ndim == 1:
76
- self.data = utils.rebin_1d(time, self.data, time_new, mask_zeros=mask_zeros)
77
- bad_indices = list(np.where(self.data == ma.masked)[0])
72
+ self.data = utils.rebin_1d(time, self.data, time_new)
73
+ bad_indices = np.nonzero(self.data.mask)[0]
78
74
  else:
79
- if not isinstance(self.data, ma.MaskedArray):
80
- self.data = ma.masked_array(self.data)
81
- self.data, bad_indices = utils.rebin_2d(
82
- time, self.data, time_new, mask_zeros=mask_zeros
83
- )
75
+ self.data, bad_indices = utils.rebin_2d(time, self.data, time_new)
84
76
  return bad_indices
85
77
 
86
78
  def fetch_attributes(self) -> list:
@@ -108,6 +100,21 @@ class CloudnetArray:
108
100
  if data:
109
101
  setattr(self, key, data)
110
102
 
103
+ def filter_isolated_pixels(self) -> None:
104
+ """Filters hot pixels from radar data."""
105
+ self._filter(utils.filter_isolated_pixels)
106
+
107
+ def filter_vertical_stripes(self) -> None:
108
+ """Filters vertical artifacts from radar data."""
109
+ self._filter(utils.filter_x_pixels)
110
+
111
+ def _filter(self, fun) -> None:
112
+ if not isinstance(self.data, ma.MaskedArray):
113
+ self.data = ma.masked_array(self.data)
114
+ is_data = (~self.data.mask).astype(int)
115
+ is_data_filtered = fun(is_data)
116
+ self.data[is_data_filtered == 0] = ma.masked
117
+
111
118
  def _init_data(self) -> np.ndarray:
112
119
  if isinstance(self.variable, netCDF4.Variable):
113
120
  return self.variable[:]
@@ -139,73 +146,3 @@ class CloudnetArray:
139
146
 
140
147
  def __getitem__(self, ind: tuple) -> np.ndarray:
141
148
  return self.data[ind]
142
-
143
- def filter_isolated_pixels(self) -> None:
144
- """Filters hot pixels from radar data."""
145
- self._filter(utils.filter_isolated_pixels)
146
-
147
- def filter_vertical_stripes(self) -> None:
148
- """Filters vertical artifacts from radar data."""
149
- self._filter(utils.filter_x_pixels)
150
-
151
- def _filter(self, fun) -> None:
152
- if not isinstance(self.data, ma.MaskedArray):
153
- self.data = ma.masked_array(self.data)
154
- is_data = (~self.data.mask).astype(int)
155
- is_data_filtered = fun(is_data)
156
- self.data[is_data_filtered == 0] = ma.masked
157
-
158
- def calc_linear_std(self, time: np.ndarray, time_new: np.ndarray) -> None:
159
- """Calculates std of radar velocity.
160
-
161
- Args:
162
- time: 1D time array.
163
- time_new: 1D new time array.
164
-
165
- Notes:
166
- The result is masked if the bin contains masked values.
167
- """
168
- data_as_float = self.data.astype(float)
169
- data_as_float = ma.masked_array(data_as_float)
170
- self.data, _ = utils.rebin_2d(time, data_as_float, time_new, "std")
171
-
172
- def rebin_velocity(
173
- self,
174
- time: np.ndarray,
175
- time_new: np.ndarray,
176
- folding_velocity: float | np.ndarray,
177
- sequence_indices: list,
178
- ) -> None:
179
- """Rebins Doppler velocity in polar coordinates.
180
-
181
- Args:
182
- time: 1D time array.
183
- time_new: 1D new time array.
184
- folding_velocity: Folding velocity (m/s). Can be a float when
185
- it's the same for all altitudes, or np.ndarray when it
186
- matches difference altitude regions (defined in `sequence_indices`).
187
- sequence_indices: List containing indices of different folding regions,
188
- e.g. [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10]].
189
-
190
- """
191
-
192
- def _get_scaled_vfold() -> np.ndarray:
193
- vfold_scaled = math.pi / folding_velocity
194
- if isinstance(vfold_scaled, float):
195
- vfold_scaled = np.array([float(vfold_scaled)])
196
- return vfold_scaled
197
-
198
- def _scale_by_vfold(data_in: np.ndarray, fun) -> np.ndarray:
199
- data_out = ma.copy(data_in)
200
- for i, ind in enumerate(sequence_indices):
201
- data_out[:, ind] = fun(data_in[:, ind], folding_velocity_scaled[i])
202
- return data_out
203
-
204
- folding_velocity_scaled = _get_scaled_vfold()
205
- data_scaled = _scale_by_vfold(self.data, np.multiply)
206
- vel_x = ma.cos(data_scaled)
207
- vel_y = ma.sin(data_scaled)
208
- vel_x_mean, _ = utils.rebin_2d(time, vel_x, time_new)
209
- vel_y_mean, _ = utils.rebin_2d(time, vel_y, time_new)
210
- mean_vel_scaled = np.arctan2(vel_y_mean, vel_x_mean)
211
- self.data = _scale_by_vfold(mean_vel_scaled, np.divide)
cloudnetpy/concat_lib.py CHANGED
@@ -9,9 +9,9 @@ from typing import Literal
9
9
 
10
10
  import netCDF4
11
11
  import numpy as np
12
+ from numpy import ma
12
13
 
13
14
  from cloudnetpy import utils
14
- from cloudnetpy.exceptions import InconsistentDataError
15
15
 
16
16
 
17
17
  def truncate_netcdf_file(
@@ -89,7 +89,7 @@ def concatenate_files(
89
89
  variables: list | None = None,
90
90
  new_attributes: dict | None = None,
91
91
  ignore: list | None = None,
92
- allow_difference: list | None = None,
92
+ interp_dimension: str = "range",
93
93
  ) -> list:
94
94
  """Concatenate netCDF files in one dimension.
95
95
 
@@ -101,22 +101,21 @@ def concatenate_files(
101
101
  Default is None when all variables with 'concat_dimension' will be saved.
102
102
  new_attributes: Optional new global attributes as {'attribute_name': value}.
103
103
  ignore: List of variables to be ignored.
104
- allow_difference: Names of scalar variables that can differ from one file to
105
- another (value from the first file is saved).
104
+ interp_dimension: Dimension name for interpolation if the dimensions
105
+ are not the same.
106
106
 
107
107
  Returns:
108
108
  List of filenames that were successfully concatenated.
109
109
 
110
110
  Notes:
111
- Arrays without 'concat_dimension', scalars, and global attributes will be taken
112
- from the first file. Groups, possibly present in a NETCDF4 formatted file,
113
- are ignored.
111
+ Arrays without 'concat_dimension' and scalars are expanded to the
112
+ concat_dimension. Global attributes are taken from the first file.
113
+ Groups, possibly present in a NETCDF4 formatted file, are ignored.
114
114
 
115
115
  """
116
- with _Concat(filenames, output_file, concat_dimension) as concat:
117
- concat.get_common_variables()
116
+ with _Concat(filenames, output_file, concat_dimension, interp_dimension) as concat:
118
117
  concat.create_global_attributes(new_attributes)
119
- return concat.concat_data(variables, ignore, allow_difference)
118
+ return concat.concat_data(variables, ignore)
120
119
 
121
120
 
122
121
  class _Concat:
@@ -127,19 +126,14 @@ class _Concat:
127
126
  filenames: Iterable[PathLike | str],
128
127
  output_file: str,
129
128
  concat_dimension: str = "time",
129
+ interp_dim: str = "range",
130
130
  ):
131
131
  self.filenames = sorted(map(Path, filenames), key=lambda f: f.name)
132
132
  self.concat_dimension = concat_dimension
133
+ self.interp_dim = interp_dim
133
134
  self.first_filename = self.filenames[0]
134
135
  self.first_file = netCDF4.Dataset(self.first_filename)
135
136
  self.concatenated_file = self._init_output_file(output_file)
136
- self.common_variables = set()
137
-
138
- def get_common_variables(self) -> None:
139
- """Finds variables which should have the same values in all files."""
140
- for key, value in self.first_file.variables.items():
141
- if self.concat_dimension not in value.dimensions:
142
- self.common_variables.add(key)
143
137
 
144
138
  def create_global_attributes(self, new_attributes: dict | None) -> None:
145
139
  """Copies global attributes from one of the source files."""
@@ -150,17 +144,16 @@ class _Concat:
150
144
 
151
145
  def concat_data(
152
146
  self,
153
- variables: list | None,
154
- ignore: list | None,
155
- allow_vary: list | None,
147
+ keep: list | None = None,
148
+ ignore: list | None = None,
156
149
  ) -> list:
157
150
  """Concatenates data arrays."""
158
- self._write_initial_data(variables, ignore)
151
+ self._write_initial_data(keep, ignore)
159
152
  output = [self.first_filename]
160
153
  if len(self.filenames) > 1:
161
154
  for filename in self.filenames[1:]:
162
155
  try:
163
- self._append_data(filename, allow_vary)
156
+ self._append_data(filename)
164
157
  except RuntimeError as e:
165
158
  if "NetCDF: HDF error" in str(e):
166
159
  msg = f"Caught a NetCDF HDF error. Skipping file '{filename}'."
@@ -170,24 +163,28 @@ class _Concat:
170
163
  output.append(filename)
171
164
  return output
172
165
 
173
- def _write_initial_data(self, variables: list | None, ignore: list | None) -> None:
174
- for key in self.first_file.variables:
166
+ def _write_initial_data(self, keep: list | None, ignore: list | None) -> None:
167
+ len_concat_dim = self.first_file[self.concat_dimension].size
168
+ auto_scale = False
169
+
170
+ for key, var in self.first_file.variables.items():
175
171
  if (
176
- variables is not None
177
- and key not in variables
178
- and key not in self.common_variables
172
+ # This filtering only affects variables having the concat_dimension
173
+ keep is not None
174
+ and key not in keep
179
175
  and key != self.concat_dimension
176
+ and self.concat_dimension in var.dimensions
180
177
  ):
181
178
  continue
182
179
  if ignore and key in ignore:
183
180
  continue
184
181
 
185
- auto_scale = False
186
- self.first_file[key].set_auto_scale(auto_scale)
187
- array = self.first_file[key][:]
188
- dimensions = self.first_file[key].dimensions
189
- fill_value = getattr(self.first_file[key], "_FillValue", None)
190
- var = self.concatenated_file.createVariable(
182
+ var.set_auto_scale(auto_scale)
183
+ array, dimensions = self._expand_array(var, len_concat_dim)
184
+
185
+ fill_value = var.get_fill_value()
186
+
187
+ var_new = self.concatenated_file.createVariable(
191
188
  key,
192
189
  array.dtype,
193
190
  dimensions,
@@ -196,37 +193,49 @@ class _Concat:
196
193
  shuffle=False,
197
194
  fill_value=fill_value,
198
195
  )
199
- auto_scale = False
200
- var.set_auto_scale(auto_scale)
201
- var[:] = array
202
- _copy_attributes(self.first_file[key], var)
203
-
204
- def _append_data(self, filename: str | PathLike, allow_vary: list | None) -> None:
196
+ var_new.set_auto_scale(auto_scale)
197
+ var_new[:] = array
198
+ _copy_attributes(var, var_new)
199
+
200
+ def _expand_array(
201
+ self, var: netCDF4.Variable, n_data: int
202
+ ) -> tuple[ma.MaskedArray, tuple[str, ...]]:
203
+ dimensions = var.dimensions
204
+ arr = var[:]
205
+ if self.concat_dimension not in dimensions and var.name != self.interp_dim:
206
+ dimensions = (self.concat_dimension, *dimensions)
207
+ arr = np.repeat(arr[np.newaxis, ...], n_data, axis=0)
208
+
209
+ return arr, dimensions
210
+
211
+ def _append_data(self, filename: str | PathLike) -> None:
205
212
  with netCDF4.Dataset(filename) as file:
206
213
  auto_scale = False
207
214
  file.set_auto_scale(auto_scale)
208
215
  ind0 = len(self.concatenated_file.variables[self.concat_dimension])
209
216
  ind1 = ind0 + len(file.variables[self.concat_dimension])
217
+ n_points = ind1 - ind0
218
+
210
219
  for key in self.concatenated_file.variables:
211
- if key not in file.variables:
212
- continue
213
- array = file[key][:]
214
- if key in self.common_variables:
215
- if allow_vary is not None and key in allow_vary:
216
- continue
217
- if not np.array_equal(self.first_file[key][:], array):
218
- msg = (
219
- f"Inconsistent values in variable '{key}' between "
220
- f"files '{self.first_filename}' and '{filename}'"
221
- )
222
- raise InconsistentDataError(msg)
220
+ if key not in file.variables or key == self.interp_dim:
223
221
  continue
224
- if array.ndim == 0:
225
- continue
226
- if array.ndim == 1:
227
- self.concatenated_file.variables[key][ind0:ind1] = array
228
- else:
229
- self.concatenated_file.variables[key][ind0:ind1, :] = array
222
+
223
+ array, dimensions = self._expand_array(file[key], n_points)
224
+
225
+ # Nearest neighbour interpolation in the interp_dim dimension
226
+ # if the dimensions are not the same between the files
227
+ if self.interp_dim in dimensions and (
228
+ self.first_file[self.interp_dim].size != file[self.interp_dim].size
229
+ ):
230
+ x = file.variables[self.interp_dim][:]
231
+ x_target = self.first_file.variables[self.interp_dim][:]
232
+ idx = np.abs(x[:, None] - x_target[None, :]).argmin(axis=0)
233
+ array = array[:, idx]
234
+ out_of_bounds = (x_target < x.min()) | (x_target > x.max())
235
+ fill_value = self.first_file.variables[key].get_fill_value()
236
+ array[:, out_of_bounds] = fill_value
237
+
238
+ self.concatenated_file.variables[key][ind0:ind1, ...] = array
230
239
 
231
240
  def _init_output_file(self, output_file: str) -> netCDF4.Dataset:
232
241
  data_model: Literal["NETCDF4", "NETCDF4_CLASSIC"] = (
cloudnetpy/datasource.py CHANGED
@@ -182,53 +182,6 @@ class DataSource:
182
182
  return np.array(range_instrument + self.altitude)
183
183
  return None
184
184
 
185
- def _variables_to_cloudnet_arrays(self, keys: tuple) -> None:
186
- """Transforms netCDF4-variables into CloudnetArrays.
187
-
188
- Args:
189
- keys: netCDF4-variables to be converted. The results
190
- are saved in *self.data* dictionary with *fields*
191
- strings as keys.
192
-
193
- Notes:
194
- The attributes of the variables are not copied. Just the data.
195
-
196
- """
197
- for key in keys:
198
- self.append_data(self.dataset.variables[key], key)
199
-
200
- def _unknown_variable_to_cloudnet_array(
201
- self,
202
- possible_names: tuple,
203
- key: str,
204
- units: str | None = None,
205
- *,
206
- ignore_mask: bool = False,
207
- ) -> None:
208
- """Transforms single netCDF4 variable into CloudnetArray.
209
-
210
- Args:
211
- possible_names: Tuple of strings containing the possible
212
- names of the variable in the input NetCDF file.
213
- key: Key for self.data dictionary and name-attribute
214
- for the saved CloudnetArray object.
215
- units: Units attribute for the CloudnetArray object.
216
- ignore_mask: If true, always writes an ordinary numpy array.
217
-
218
- Raises:
219
- RuntimeError: No variable found.
220
-
221
- """
222
- for name in possible_names:
223
- if name in self.dataset.variables:
224
- array: netCDF4.Variable | np.ndarray = self.dataset.variables[name]
225
- if ignore_mask is True:
226
- array = np.array(array)
227
- self.append_data(array, key, units=units)
228
- return
229
- msg = f"Missing variable {possible_names[0]} in the input file."
230
- raise RuntimeError(msg)
231
-
232
185
  def __enter__(self):
233
186
  return self
234
187
 
@@ -80,8 +80,8 @@ class Bowtie(NcRadar):
80
80
  self.data["relative_humidity"].data /= 100
81
81
 
82
82
  def fix_chirp_start_indices(self) -> None:
83
- ind = self.data["chirp_start_indices"].data
84
- self.data["chirp_start_indices"].data = np.array([int(i) for i in ind])
83
+ array = self.data["chirp_start_indices"].data
84
+ self.data["chirp_start_indices"].data = np.array(array, dtype=np.int32)
85
85
  self.data["chirp_start_indices"].data_type = "int32"
86
86
 
87
87
  def check_date(self, date: str):
@@ -69,7 +69,6 @@ def copernicus2nc(
69
69
  valid_filenames = utils.get_files_with_variables(
70
70
  valid_filenames, ["time", "ZED_HC"]
71
71
  )
72
- valid_filenames = utils.get_files_with_common_range(valid_filenames)
73
72
  variables = list(keymap.keys())
74
73
  concat_lib.concatenate_files(
75
74
  valid_filenames,
@@ -68,7 +68,6 @@ def galileo2nc(
68
68
  valid_filenames = utils.get_files_with_variables(
69
69
  valid_filenames, ["time", "ZED_HC"]
70
70
  )
71
- valid_filenames = utils.get_files_with_common_range(valid_filenames)
72
71
  variables = list(keymap.keys())
73
72
  concat_lib.concatenate_files(
74
73
  valid_filenames,
@@ -193,7 +193,6 @@ def _parse_input_files(input_files: str | list[str], temp_dir: str) -> tuple:
193
193
  )
194
194
  raise FileNotFoundError(msg)
195
195
 
196
- valid_files = utils.get_files_with_common_range(valid_files)
197
196
  filetypes = list({f.split(".")[-1].lower() for f in valid_files})
198
197
 
199
198
  if len(filetypes) > 1:
@@ -208,15 +207,6 @@ def _parse_input_files(input_files: str | list[str], temp_dir: str) -> tuple:
208
207
  input_filename,
209
208
  variables=variables,
210
209
  ignore=_get_ignored_variables(filetypes[0]),
211
- # It's somewhat risky to use varying nfft values as the velocity
212
- # resolution may differ, but this enables concatenation when switching
213
- # between different nfft configurations. Spectral data is ignored
214
- # anyway for now.
215
- allow_difference=[
216
- "nave",
217
- "ovl",
218
- "nfft",
219
- ],
220
210
  )
221
211
  else:
222
212
  input_filename = input_files
@@ -97,7 +97,7 @@ def read_data_characters(nc_file: str, name: str, model: str) -> tuple:
97
97
  try:
98
98
  mask = y.mask
99
99
  if mask.any():
100
- x, y, data = change2one_dim_axes(x, y, data)
100
+ x, y, data = change2one_dim_axes(ma.array(x), y, data)
101
101
  except AttributeError:
102
102
  return data, x, y
103
103
  return data, x, y
@@ -65,7 +65,7 @@ def test_fit_z_sensitivity(obs_file, model_file) -> None:
65
65
  model = ModelManager(str(model_file), MODEL, OUTPUT_FILE, PRODUCT)
66
66
  adv_pro = AdvanceProductMethods(model, str(model_file), obs)
67
67
  h = np.array([[5000, 9000, 13000], [10000, 15000, 20000], [8000, 12000, 16000]])
68
- compare = np.array([[0, 0.15, 0.5], [0.1, 1, 0], [0.15, 0, 1]])
68
+ compare = ma.masked_invalid([[np.nan, 0.15, 0.5], [0.1, 1, np.nan], [0.15, 0, 1]])
69
69
  x = adv_pro.fit_z_sensitivity(h)
70
70
  testing.assert_array_almost_equal(x, compare)
71
71
 
cloudnetpy/output.py CHANGED
@@ -57,7 +57,11 @@ def _get_netcdf_dimensions(obj) -> dict:
57
57
  }
58
58
  # RPG cloud radar
59
59
  if "chirp_start_indices" in obj.data:
60
- dimensions["chirp_sequence"] = len(obj.data["chirp_start_indices"][:])
60
+ if obj.data["chirp_start_indices"][:].ndim == 1:
61
+ dimensions["chirp_start_indices"] = len(obj.data["chirp_start_indices"][:])
62
+ else:
63
+ dimensions["chirp"] = obj.data["chirp_start_indices"][:].shape[1]
64
+
61
65
  # disdrometer
62
66
  if hasattr(obj, "n_diameter") and hasattr(obj, "n_velocity"):
63
67
  dimensions["diameter"] = obj.n_diameter
cloudnetpy/utils.py CHANGED
@@ -3,7 +3,6 @@
3
3
  import base64
4
4
  import datetime
5
5
  import hashlib
6
- import logging
7
6
  import os
8
7
  import re
9
8
  import textwrap
@@ -11,7 +10,7 @@ import uuid
11
10
  import warnings
12
11
  from collections.abc import Iterator
13
12
  from datetime import timezone
14
- from typing import Literal, TypeVar
13
+ from typing import Any, Literal, TypeVar
15
14
 
16
15
  import netCDF4
17
16
  import numpy as np
@@ -142,54 +141,42 @@ def binvec(x: np.ndarray | list) -> np.ndarray:
142
141
 
143
142
  def rebin_2d(
144
143
  x_in: np.ndarray,
145
- array: ma.MaskedArray,
144
+ array: np.ndarray,
146
145
  x_new: np.ndarray,
147
- statistic: Literal["mean", "std"] = "mean",
146
+ statistic: Literal["mean", "std", "max"] = "mean",
148
147
  n_min: int = 1,
149
148
  *,
150
- mask_zeros: bool = True,
151
- ) -> tuple[ma.MaskedArray, list]:
152
- """Rebins 2-D data in one dimension.
149
+ keepdim: bool = False,
150
+ mask_zeros: bool = False,
151
+ ) -> tuple[ma.MaskedArray, np.ndarray]:
152
+ edges = binvec(x_new)
153
+ binn = np.digitize(x_in, edges) - 1
154
+ n_bins = len(x_new)
155
+ counts = np.bincount(binn[binn >= 0], minlength=n_bins)
153
156
 
154
- Args:
155
- x_in: 1-D array with shape (n,).
156
- array: 2-D input data with shape (n, m).
157
- x_new: 1-D target vector (center points) with shape (N,).
158
- statistic: Statistic to be calculated. Possible statistics are 'mean', 'std'.
159
- Default is 'mean'.
160
- n_min: Minimum number of points to have good statistics in a bin. Default is 1.
161
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
157
+ stat_fn: Any = {
158
+ "mean": ma.mean,
159
+ "std": ma.std,
160
+ "max": ma.max,
161
+ }[statistic]
162
162
 
163
- Returns:
164
- tuple: Rebinned data with shape (N, m) and indices of bins without enough data.
165
- """
166
- edges = binvec(x_new)
167
- result = np.zeros((len(x_new), array.shape[1]))
168
- array_screened = ma.masked_invalid(array, copy=True) # data may contain nan-values
169
- for ind, values in enumerate(array_screened.T):
170
- mask = ~values.mask
171
- if ma.any(values[mask]):
172
- result[:, ind], _, _ = stats.binned_statistic(
173
- x_in[mask],
174
- values[mask],
175
- statistic=statistic,
176
- bins=edges,
177
- )
178
- result[~np.isfinite(result)] = 0
179
- if mask_zeros is True:
180
- masked_result = ma.masked_equal(result, 0)
181
- else:
182
- masked_result = ma.array(result)
163
+ shape = array.shape if keepdim else (n_bins, array.shape[1])
164
+ result: ma.MaskedArray = ma.masked_array(np.ones(shape, dtype="float32"), mask=True)
165
+
166
+ for bin_ind in range(n_bins):
167
+ if counts[bin_ind] < n_min:
168
+ continue
169
+ mask = binn == bin_ind
170
+ block = array[mask, :]
171
+ x_ind = mask if keepdim else bin_ind
172
+ result[x_ind, :] = stat_fn(block, axis=0)
173
+
174
+ empty_bins = np.where(counts < n_min)[0]
183
175
 
184
- # Fill bins with not enough profiles
185
- x_hist, _ = np.histogram(x_in, bins=edges)
186
- empty_mask = x_hist < n_min
187
- masked_result[empty_mask, :] = ma.masked
188
- empty_indices = list(np.nonzero(empty_mask)[0])
189
- if len(empty_indices) > 0:
190
- logging.debug("No data in %s bins", len(empty_indices))
176
+ if mask_zeros:
177
+ result[result == 0] = ma.masked
191
178
 
192
- return masked_result, empty_indices
179
+ return result, empty_bins
193
180
 
194
181
 
195
182
  def rebin_1d(
@@ -197,8 +184,6 @@ def rebin_1d(
197
184
  array: np.ndarray | ma.MaskedArray,
198
185
  x_new: np.ndarray,
199
186
  statistic: str = "mean",
200
- *,
201
- mask_zeros: bool = True,
202
187
  ) -> ma.MaskedArray:
203
188
  """Rebins 1D array.
204
189
 
@@ -208,14 +193,13 @@ def rebin_1d(
208
193
  x_new: 1-D target vector (center points) with shape (N,).
209
194
  statistic: Statistic to be calculated. Possible statistics are 'mean', 'std'.
210
195
  Default is 'mean'.
211
- mask_zeros: Whether to mask 0 values in the returned array. Default is True.
212
196
 
213
197
  Returns:
214
198
  Re-binned data with shape (N,).
215
199
 
216
200
  """
217
201
  edges = binvec(x_new)
218
- result = np.zeros(len(x_new))
202
+ result = ma.zeros(len(x_new))
219
203
  array_screened = ma.masked_invalid(array, copy=True) # data may contain nan-values
220
204
  mask = ~array_screened.mask
221
205
  if ma.any(array_screened[mask]):
@@ -225,10 +209,7 @@ def rebin_1d(
225
209
  statistic=statistic,
226
210
  bins=edges,
227
211
  )
228
- result[~np.isfinite(result)] = 0
229
- if mask_zeros:
230
- return ma.masked_equal(result, 0)
231
- return ma.array(result)
212
+ return ma.masked_invalid(result, copy=True)
232
213
 
233
214
 
234
215
  def filter_isolated_pixels(array: np.ndarray) -> np.ndarray:
@@ -960,21 +941,6 @@ def get_file_type(filename: str) -> str:
960
941
  raise ValueError(msg)
961
942
 
962
943
 
963
- def get_files_with_common_range(filenames: list) -> list:
964
- """Returns files with the same (most common) number of range gates."""
965
- n_range = []
966
- for file in filenames:
967
- with netCDF4.Dataset(file) as nc:
968
- n_range.append(len(nc.variables["range"]))
969
- most_common = np.bincount(n_range).argmax()
970
- n_removed = len(filenames) - n_range.count(int(most_common))
971
- if n_removed > 0:
972
- logging.warning(
973
- "Removing %s files due to inconsistent height vector", n_removed
974
- )
975
- return [file for i, file in enumerate(filenames) if n_range[i] == most_common]
976
-
977
-
978
944
  def get_files_with_variables(filenames: list, variables: list[str]) -> list:
979
945
  """Returns files where all variables exist."""
980
946
  valid_files = []
cloudnetpy/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  MAJOR = 1
2
- MINOR = 77
3
- PATCH = 2
2
+ MINOR = 78
3
+ PATCH = 1
4
4
  __version__ = f"{MAJOR}.{MINOR}.{PATCH}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudnetpy
3
- Version: 1.77.2
3
+ Version: 1.78.1
4
4
  Summary: Python package for Cloudnet processing
5
5
  Author: Simo Tukiainen
6
6
  License: MIT License
@@ -1,15 +1,15 @@
1
1
  cloudnetpy/__init__.py,sha256=X_FqY-4yg5GUj5Edo14SToLEos6JIsC3fN-v1FUgQoA,43
2
2
  cloudnetpy/cli.py,sha256=lHkeAErmAijI-Ugpd4DHRHfbZP4SXOake0LIY5Ovv_Q,20782
3
- cloudnetpy/cloudnetarray.py,sha256=XFyXZwR4QWPyo7WLmvsu7DEELZQp1vi5FZ8F7tX_tM0,7307
4
- cloudnetpy/concat_lib.py,sha256=jcLppqAmVHVkykcXBcpwUr8MS_k8v2Xl2xBLmVRE_DI,12624
3
+ cloudnetpy/cloudnetarray.py,sha256=uOYgpQ8hHh5fuHyip1HjnhsEda9_7dg7orYnbCRkTtI,4796
4
+ cloudnetpy/concat_lib.py,sha256=XQ5Sk8kfXqI0Q5HoomKWWhdZ1-m2thYDKGL7SKapITE,12851
5
5
  cloudnetpy/constants.py,sha256=YnoSzZm35NDooJfhlulSJBc7g0eSchT3yGytRaTaJEI,845
6
- cloudnetpy/datasource.py,sha256=FcWS77jz56gIzwnbafDLdj-HjAyu0P_VtY7gkeVZThU,7952
6
+ cloudnetpy/datasource.py,sha256=Vx_I8S14nFAWKI0VbsW_-sllbVCRjTYxB7XH9b9PedQ,6268
7
7
  cloudnetpy/exceptions.py,sha256=hYbUtBwjCIfxnPe_5mELDEw87AWITBrwuo7WYIEKmJ8,1579
8
8
  cloudnetpy/metadata.py,sha256=lO7BCbVAzFoH3Nq-VuezYX0f7MnbG1Zp11g5GSiuQwM,6189
9
- cloudnetpy/output.py,sha256=l0LoOhcGCBrg2EJ4NT1xZ7-UKWdV7X7yQ0fJmhkwJVc,15829
9
+ cloudnetpy/output.py,sha256=gupxt4f_-eUrFsWMto8tnknoV-p9QauC9L6CJAqBILU,15988
10
10
  cloudnetpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- cloudnetpy/utils.py,sha256=FFV4MRoiJfcbbknMD5DM-VesY-7yCb8jzeeadyyijLg,33532
12
- cloudnetpy/version.py,sha256=MBEzrrjK2C6qfoMHCRMYDIMl98CZIE1aPKuEZsjCPeo,72
11
+ cloudnetpy/utils.py,sha256=HdcSNIgdxoZlP_jHl66ItheHLw_cDYZb7u6mZ5dfMNE,31952
12
+ cloudnetpy/version.py,sha256=3xCJ6Zv7LkLahqubKZWS5YDwui01I7yk-Z-3oUGO9Us,72
13
13
  cloudnetpy/categorize/__init__.py,sha256=s-SJaysvVpVVo5kidiruWQO6p3gv2TXwY1wEHYO5D6I,44
14
14
  cloudnetpy/categorize/atmos_utils.py,sha256=RcmbKxm2COkE7WEya0mK3yX5rzUbrewRVh3ekm01RtM,10598
15
15
  cloudnetpy/categorize/attenuation.py,sha256=Y_-fzmQTltWTqIZTulJhovC7a6ifpMcaAazDJcnMIOc,990
@@ -20,13 +20,13 @@ cloudnetpy/categorize/disdrometer.py,sha256=sRSt2B932lrrkvycKoSaKEIaDVfq9Z7uU-4i
20
20
  cloudnetpy/categorize/droplet.py,sha256=t49KEsH5ZM68JQ4NvAf9kGgQ-evic1T4de2-jgJ2f4M,8683
21
21
  cloudnetpy/categorize/falling.py,sha256=lok0HMi1ewf9pS70mq62nRKL6wJzMyWbYmv1cdwrwnA,4404
22
22
  cloudnetpy/categorize/freezing.py,sha256=eSFD37R7vBrg7mgfSanrwhBjnFyWNBpjw2AtvRmSh48,3753
23
- cloudnetpy/categorize/insects.py,sha256=9J5agmktit8Or66GGNue-bThiaG9rB2SuPNZBXI7FCE,5243
23
+ cloudnetpy/categorize/insects.py,sha256=MrxlWK-5JaMZxCBWFKR_6Kj5TAVXm-s9SVxsvcyNYJo,5253
24
24
  cloudnetpy/categorize/itu.py,sha256=ffXK27guyRS4d66VWQ2h4UEGjUIhGjPKbFmj7kh698c,10304
25
25
  cloudnetpy/categorize/lidar.py,sha256=YQrM_LOz8NQrrD9l9HyujV1GSGwkQ8LMqXN13bEJRW4,2605
26
26
  cloudnetpy/categorize/melting.py,sha256=ZnLeL_qWmiCdjXVOm9iBYHdo29Brqxu_DEErZPqUloQ,6217
27
27
  cloudnetpy/categorize/model.py,sha256=QFRCY0TvM2fzGRyP8BNkqbvu13XcQjt7TsN5fhjI_Uc,6654
28
28
  cloudnetpy/categorize/mwr.py,sha256=F7cquERWL6mBkgboqeaCIPf9gOlKI-NWUQIBdQXGT_I,1635
29
- cloudnetpy/categorize/radar.py,sha256=PmriTnrHbgZrau1RTNKpPI_-h5Uu0kGIMrMOaoMuROY,14821
29
+ cloudnetpy/categorize/radar.py,sha256=z2bFF_wAKbzzXB3Pq1z33Y1RMv6NL-8CdZesU89qzpw,15502
30
30
  cloudnetpy/categorize/attenuations/__init__.py,sha256=CWFHVWeTIe2hrZtgkJaX2HGftbuffsFc39Mzv5B0Lw0,1037
31
31
  cloudnetpy/categorize/attenuations/gas_attenuation.py,sha256=emr-RCxQT0i2N8k6eBNhRsmsCBPHJzQsWJfjC4fVSTo,975
32
32
  cloudnetpy/categorize/attenuations/liquid_attenuation.py,sha256=0p0G79BPkw1itCXHMwbvkNHtJGBocJzow3gNHAirChI,3036
@@ -34,18 +34,18 @@ cloudnetpy/categorize/attenuations/melting_attenuation.py,sha256=9c9xoZHtGUbjFYJ
34
34
  cloudnetpy/categorize/attenuations/rain_attenuation.py,sha256=qazJzRyXf9vbjJhh4yiFmABI4L57j5W_6YZ-6qjRiBI,2839
35
35
  cloudnetpy/instruments/__init__.py,sha256=PEgrrQNoiOuN_ctYilmt4LV2QCLg1likPjJdWtuGlLs,528
36
36
  cloudnetpy/instruments/basta.py,sha256=Lb_EhQTI93S5Bd9osDbCE_tC8gZreRsHz7D2_dFOjmE,3793
37
- cloudnetpy/instruments/bowtie.py,sha256=jrEkOsZ4ScwmTN35rAwIbVHkHxQatFCHiuzBjVA4pJ0,3279
37
+ cloudnetpy/instruments/bowtie.py,sha256=GlWCxemeXrIvWnnQRvZa1ttl4J0NmLUdc7xngIH5WBI,3281
38
38
  cloudnetpy/instruments/ceilo.py,sha256=qM3AkQKHUblhRCD42HsB6lr82giBH-0g_VzoWHZDgeA,9535
39
39
  cloudnetpy/instruments/ceilometer.py,sha256=ati9-fUQ54K9tvynIPB-nlBYwtvBVaQtUCjVCLNB67w,12059
40
40
  cloudnetpy/instruments/cl61d.py,sha256=g6DNBFju3wYhLFl32DKmC8pUup7y-EupXoUU0fuoGGA,1990
41
41
  cloudnetpy/instruments/cloudnet_instrument.py,sha256=SGPsRYYoGPoRoDY7hHJcKUVX0A23X0Telc00Fu01PnY,4495
42
- cloudnetpy/instruments/copernicus.py,sha256=99idcn6-iKOSvSslNjwFRng3gwlTLFjKPiT1tnVytpQ,6613
42
+ cloudnetpy/instruments/copernicus.py,sha256=hCphEKyFCc3f1uLRdjL2435kuh64M5q-V1bI68bzGbA,6528
43
43
  cloudnetpy/instruments/fd12p.py,sha256=aGYpkczdSl7FSmK1bByMnpUBD5GAl7RTKkopt0cpWas,6822
44
- cloudnetpy/instruments/galileo.py,sha256=BjWE15_S3tTCOmAM5k--oicI3wghKaO0hv9EUBxtbl8,4830
44
+ cloudnetpy/instruments/galileo.py,sha256=vcY7mYcGD8YtMw8ioy9CNGu5yarQlwE-vfWIRSbTQG0,4745
45
45
  cloudnetpy/instruments/hatpro.py,sha256=G1fHsY9LTos4vHP5kFubjE5Wg2uTVFZpYDSD8VAo-zw,9590
46
46
  cloudnetpy/instruments/instruments.py,sha256=z8Osjww3iQRxKvzXdISl-5vV6gShtji8Db5k-ZzDQ-0,4843
47
47
  cloudnetpy/instruments/lufft.py,sha256=nIoEKuuFGKq2dLqkX7zW-HpAifefG472tZhKfXE1yoA,4212
48
- cloudnetpy/instruments/mira.py,sha256=IH88dnV5fdAQ-A04S23ROgNmT4GBAtzXQxCr_9fWj-Q,11634
48
+ cloudnetpy/instruments/mira.py,sha256=mH53Wpq3JnmHqDa2Bo0t6REEf8OQ4MjkHYm8AEM2_S4,11140
49
49
  cloudnetpy/instruments/mrr.py,sha256=eeAzCp3CiHGauywjwvMUAFwZ4vBOZMcd3IlF8KsrLQo,5711
50
50
  cloudnetpy/instruments/nc_lidar.py,sha256=5gQG9PApnNPrHmS9_zanl8HEYIQuGRpbnzC3wfTcOyQ,1705
51
51
  cloudnetpy/instruments/nc_radar.py,sha256=HlaZeH5939R86ukF8K-P4Kfzb5-CpLB15LU2u94C5eI,7330
@@ -68,7 +68,7 @@ cloudnetpy/model_evaluation/model_metadata.py,sha256=CxpY6RPm7GOTBBmPhcNVVpm9ate
68
68
  cloudnetpy/model_evaluation/utils.py,sha256=Z9VqYVdtY9yTr2JeVfBn4nccIVWCN5Fd-BCyB_qYI-A,154
69
69
  cloudnetpy/model_evaluation/plotting/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
70
  cloudnetpy/model_evaluation/plotting/plot_meta.py,sha256=K18Ugohh24uVAIxjZgJsmK80YwsMstm6B7ptVafONAw,3557
71
- cloudnetpy/model_evaluation/plotting/plot_tools.py,sha256=gV042W_AHidwPsRe2L57xdWbt3W-utcHMt_9FmfYK3M,5033
71
+ cloudnetpy/model_evaluation/plotting/plot_tools.py,sha256=umI06tPIEs48cQ8GY8s3vGHwPcN--tUir4s1yxNQf64,5043
72
72
  cloudnetpy/model_evaluation/plotting/plotting.py,sha256=mGgSnQoRTh04v5RSJHsYPaqUEIR82eZqAuiszrh9rjY,31235
73
73
  cloudnetpy/model_evaluation/products/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
74
  cloudnetpy/model_evaluation/products/advance_methods.py,sha256=rng3ZLR1Arv1AGUzq0Ehu-65628PC5LZVKpHSUpCIW8,8526
@@ -93,7 +93,7 @@ cloudnetpy/model_evaluation/tests/e2e/process_lwc/main.py,sha256=IFcPj-Vce9Yn0Cf
93
93
  cloudnetpy/model_evaluation/tests/e2e/process_lwc/tests.py,sha256=ANBA0LVao3Xrm-prRnwUmxM6BdQzqM7GZNKB3uz5BXQ,1725
94
94
  cloudnetpy/model_evaluation/tests/unit/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
95
  cloudnetpy/model_evaluation/tests/unit/conftest.py,sha256=WL_FgrDeoUYGp4PKjb37HLu79D9uu33PGQL40_ctqS0,7446
96
- cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py,sha256=IkoAVtsWVFrPpFqQOLAPHKb9qgV-KjGGVEtWMudeiSo,10079
96
+ cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py,sha256=UEF94sBN8pPOyoz5ARag3mc5A8KFjyY_-IJFqdKBGug,10098
97
97
  cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py,sha256=AEUXN5HBhKliPsSNGDCUtWOtIx6Y8iqkywb_-RfXYU0,26277
98
98
  cloudnetpy/model_evaluation/tests/unit/test_model_products.py,sha256=FRbYLshSHH2E527uJPwvUIyZKTsPFSZrwDsPsNrFSSU,3475
99
99
  cloudnetpy/model_evaluation/tests/unit/test_observation_products.py,sha256=DN3yVqq8vFYca_9POjcrJ8XViMrJks_jM-aQznfN8QQ,4936
@@ -117,10 +117,10 @@ cloudnetpy/products/lwc.py,sha256=sl6Al2tuH3KkCBrPbWTmuz3jlD5UQJ4D6qBsn1tt2CQ,18
117
117
  cloudnetpy/products/mie_lu_tables.nc,sha256=It4fYpqJXlqOgL8jeZ-PxGzP08PMrELIDVe55y9ob58,16637951
118
118
  cloudnetpy/products/mwr_tools.py,sha256=8HPZpQMTojKZP1JS1S83IE0sxmbDE9bxlaWoqmGnUZE,6199
119
119
  cloudnetpy/products/product_tools.py,sha256=uu4l6reuGbPcW3TgttbaSrqIKbyYGhBVTdnC7opKvmg,11101
120
- cloudnetpy-1.77.2.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
120
+ cloudnetpy-1.78.1.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
121
121
  docs/source/conf.py,sha256=IKiFWw6xhUd8NrCg0q7l596Ck1d61XWeVjIFHVSG9Og,1490
122
- cloudnetpy-1.77.2.dist-info/METADATA,sha256=soDh6zzIkRMSYIeYp6NiOTksTWZX0zttG8MnWJuFjNs,5796
123
- cloudnetpy-1.77.2.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
124
- cloudnetpy-1.77.2.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
- cloudnetpy-1.77.2.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
- cloudnetpy-1.77.2.dist-info/RECORD,,
122
+ cloudnetpy-1.78.1.dist-info/METADATA,sha256=AznJgGBfGvD7cbWAieBBM5xVu3Wn9GQebLBXaAVFJUE,5796
123
+ cloudnetpy-1.78.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
124
+ cloudnetpy-1.78.1.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
+ cloudnetpy-1.78.1.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
+ cloudnetpy-1.78.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.8.0)
2
+ Generator: setuptools (80.9.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5