cloudnetpy 1.77.1__py3-none-any.whl → 1.78.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
cloudnetpy/concat_lib.py CHANGED
@@ -9,9 +9,9 @@ from typing import Literal
9
9
 
10
10
  import netCDF4
11
11
  import numpy as np
12
+ from numpy import ma
12
13
 
13
14
  from cloudnetpy import utils
14
- from cloudnetpy.exceptions import InconsistentDataError
15
15
 
16
16
 
17
17
  def truncate_netcdf_file(
@@ -89,7 +89,7 @@ def concatenate_files(
89
89
  variables: list | None = None,
90
90
  new_attributes: dict | None = None,
91
91
  ignore: list | None = None,
92
- allow_difference: list | None = None,
92
+ interp_dimension: str = "range",
93
93
  ) -> list:
94
94
  """Concatenate netCDF files in one dimension.
95
95
 
@@ -101,22 +101,21 @@ def concatenate_files(
101
101
  Default is None when all variables with 'concat_dimension' will be saved.
102
102
  new_attributes: Optional new global attributes as {'attribute_name': value}.
103
103
  ignore: List of variables to be ignored.
104
- allow_difference: Names of scalar variables that can differ from one file to
105
- another (value from the first file is saved).
104
+ interp_dimension: Dimension name for interpolation if the dimensions
105
+ are not the same.
106
106
 
107
107
  Returns:
108
108
  List of filenames that were successfully concatenated.
109
109
 
110
110
  Notes:
111
- Arrays without 'concat_dimension', scalars, and global attributes will be taken
112
- from the first file. Groups, possibly present in a NETCDF4 formatted file,
113
- are ignored.
111
+ Arrays without 'concat_dimension' and scalars are expanded to the
112
+ concat_dimension. Global attributes are taken from the first file.
113
+ Groups, possibly present in a NETCDF4 formatted file, are ignored.
114
114
 
115
115
  """
116
- with _Concat(filenames, output_file, concat_dimension) as concat:
117
- concat.get_common_variables()
116
+ with _Concat(filenames, output_file, concat_dimension, interp_dimension) as concat:
118
117
  concat.create_global_attributes(new_attributes)
119
- return concat.concat_data(variables, ignore, allow_difference)
118
+ return concat.concat_data(variables, ignore)
120
119
 
121
120
 
122
121
  class _Concat:
@@ -127,19 +126,14 @@ class _Concat:
127
126
  filenames: Iterable[PathLike | str],
128
127
  output_file: str,
129
128
  concat_dimension: str = "time",
129
+ interp_dim: str = "range",
130
130
  ):
131
131
  self.filenames = sorted(map(Path, filenames), key=lambda f: f.name)
132
132
  self.concat_dimension = concat_dimension
133
+ self.interp_dim = interp_dim
133
134
  self.first_filename = self.filenames[0]
134
135
  self.first_file = netCDF4.Dataset(self.first_filename)
135
136
  self.concatenated_file = self._init_output_file(output_file)
136
- self.common_variables = set()
137
-
138
- def get_common_variables(self) -> None:
139
- """Finds variables which should have the same values in all files."""
140
- for key, value in self.first_file.variables.items():
141
- if self.concat_dimension not in value.dimensions:
142
- self.common_variables.add(key)
143
137
 
144
138
  def create_global_attributes(self, new_attributes: dict | None) -> None:
145
139
  """Copies global attributes from one of the source files."""
@@ -150,17 +144,16 @@ class _Concat:
150
144
 
151
145
  def concat_data(
152
146
  self,
153
- variables: list | None,
154
- ignore: list | None,
155
- allow_vary: list | None,
147
+ keep: list | None = None,
148
+ ignore: list | None = None,
156
149
  ) -> list:
157
150
  """Concatenates data arrays."""
158
- self._write_initial_data(variables, ignore)
151
+ self._write_initial_data(keep, ignore)
159
152
  output = [self.first_filename]
160
153
  if len(self.filenames) > 1:
161
154
  for filename in self.filenames[1:]:
162
155
  try:
163
- self._append_data(filename, allow_vary)
156
+ self._append_data(filename)
164
157
  except RuntimeError as e:
165
158
  if "NetCDF: HDF error" in str(e):
166
159
  msg = f"Caught a NetCDF HDF error. Skipping file '{filename}'."
@@ -170,24 +163,28 @@ class _Concat:
170
163
  output.append(filename)
171
164
  return output
172
165
 
173
- def _write_initial_data(self, variables: list | None, ignore: list | None) -> None:
174
- for key in self.first_file.variables:
166
+ def _write_initial_data(self, keep: list | None, ignore: list | None) -> None:
167
+ len_concat_dim = self.first_file[self.concat_dimension].size
168
+ auto_scale = False
169
+
170
+ for key, var in self.first_file.variables.items():
175
171
  if (
176
- variables is not None
177
- and key not in variables
178
- and key not in self.common_variables
172
+ # This filtering only affects variables having the concat_dimension
173
+ keep is not None
174
+ and key not in keep
179
175
  and key != self.concat_dimension
176
+ and self.concat_dimension in var.dimensions
180
177
  ):
181
178
  continue
182
179
  if ignore and key in ignore:
183
180
  continue
184
181
 
185
- auto_scale = False
186
- self.first_file[key].set_auto_scale(auto_scale)
187
- array = self.first_file[key][:]
188
- dimensions = self.first_file[key].dimensions
189
- fill_value = getattr(self.first_file[key], "_FillValue", None)
190
- var = self.concatenated_file.createVariable(
182
+ var.set_auto_scale(auto_scale)
183
+ array, dimensions = self._expand_array(var, len_concat_dim)
184
+
185
+ fill_value = var.get_fill_value()
186
+
187
+ var_new = self.concatenated_file.createVariable(
191
188
  key,
192
189
  array.dtype,
193
190
  dimensions,
@@ -196,37 +193,49 @@ class _Concat:
196
193
  shuffle=False,
197
194
  fill_value=fill_value,
198
195
  )
199
- auto_scale = False
200
- var.set_auto_scale(auto_scale)
201
- var[:] = array
202
- _copy_attributes(self.first_file[key], var)
203
-
204
- def _append_data(self, filename: str | PathLike, allow_vary: list | None) -> None:
196
+ var_new.set_auto_scale(auto_scale)
197
+ var_new[:] = array
198
+ _copy_attributes(var, var_new)
199
+
200
+ def _expand_array(
201
+ self, var: netCDF4.Variable, n_data: int
202
+ ) -> tuple[ma.MaskedArray, tuple[str, ...]]:
203
+ dimensions = var.dimensions
204
+ arr = var[:]
205
+ if self.concat_dimension not in dimensions and var.name != self.interp_dim:
206
+ dimensions = (self.concat_dimension, *dimensions)
207
+ arr = np.repeat(arr[np.newaxis, ...], n_data, axis=0)
208
+
209
+ return arr, dimensions
210
+
211
+ def _append_data(self, filename: str | PathLike) -> None:
205
212
  with netCDF4.Dataset(filename) as file:
206
213
  auto_scale = False
207
214
  file.set_auto_scale(auto_scale)
208
215
  ind0 = len(self.concatenated_file.variables[self.concat_dimension])
209
216
  ind1 = ind0 + len(file.variables[self.concat_dimension])
217
+ n_points = ind1 - ind0
218
+
210
219
  for key in self.concatenated_file.variables:
211
- if key not in file.variables:
212
- continue
213
- array = file[key][:]
214
- if key in self.common_variables:
215
- if allow_vary is not None and key in allow_vary:
216
- continue
217
- if not np.array_equal(self.first_file[key][:], array):
218
- msg = (
219
- f"Inconsistent values in variable '{key}' between "
220
- f"files '{self.first_filename}' and '{filename}'"
221
- )
222
- raise InconsistentDataError(msg)
220
+ if key not in file.variables or key == self.interp_dim:
223
221
  continue
224
- if array.ndim == 0:
225
- continue
226
- if array.ndim == 1:
227
- self.concatenated_file.variables[key][ind0:ind1] = array
228
- else:
229
- self.concatenated_file.variables[key][ind0:ind1, :] = array
222
+
223
+ array, dimensions = self._expand_array(file[key], n_points)
224
+
225
+ # Nearest neighbour interpolation in the interp_dim dimension
226
+ # if the dimensions are not the same between the files
227
+ if self.interp_dim in dimensions and (
228
+ self.first_file[self.interp_dim].size != file[self.interp_dim].size
229
+ ):
230
+ x = file.variables[self.interp_dim][:]
231
+ x_target = self.first_file.variables[self.interp_dim][:]
232
+ idx = np.abs(x[:, None] - x_target[None, :]).argmin(axis=0)
233
+ array = array[:, idx]
234
+ out_of_bounds = (x_target < x.min()) | (x_target > x.max())
235
+ fill_value = self.first_file.variables[key].get_fill_value()
236
+ array[:, out_of_bounds] = fill_value
237
+
238
+ self.concatenated_file.variables[key][ind0:ind1, ...] = array
230
239
 
231
240
  def _init_output_file(self, output_file: str) -> netCDF4.Dataset:
232
241
  data_model: Literal["NETCDF4", "NETCDF4_CLASSIC"] = (
@@ -1,5 +1,7 @@
1
1
  from os import PathLike
2
2
 
3
+ import numpy as np
4
+
3
5
  from cloudnetpy import output
4
6
  from cloudnetpy.constants import G_TO_KG, MM_H_TO_M_S
5
7
  from cloudnetpy.exceptions import ValidTimeStampError
@@ -59,6 +61,7 @@ def bowtie2nc(
59
61
  bowtie.add_site_geolocation()
60
62
  bowtie.add_height()
61
63
  bowtie.convert_units()
64
+ bowtie.fix_chirp_start_indices()
62
65
  bowtie.test_if_all_masked()
63
66
  attributes = output.add_time_attribute(ATTRIBUTES, bowtie.date)
64
67
  output.update_attributes(bowtie.data, attributes)
@@ -76,6 +79,11 @@ class Bowtie(NcRadar):
76
79
  self.data["rainfall_rate"].data *= MM_H_TO_M_S
77
80
  self.data["relative_humidity"].data /= 100
78
81
 
82
+ def fix_chirp_start_indices(self) -> None:
83
+ array = self.data["chirp_start_indices"].data
84
+ self.data["chirp_start_indices"].data = np.array(array, dtype=np.int32)
85
+ self.data["chirp_start_indices"].data_type = "int32"
86
+
79
87
  def check_date(self, date: str):
80
88
  if "-".join(self.date) != date:
81
89
  raise ValidTimeStampError
@@ -69,7 +69,6 @@ def copernicus2nc(
69
69
  valid_filenames = utils.get_files_with_variables(
70
70
  valid_filenames, ["time", "ZED_HC"]
71
71
  )
72
- valid_filenames = utils.get_files_with_common_range(valid_filenames)
73
72
  variables = list(keymap.keys())
74
73
  concat_lib.concatenate_files(
75
74
  valid_filenames,
@@ -68,7 +68,6 @@ def galileo2nc(
68
68
  valid_filenames = utils.get_files_with_variables(
69
69
  valid_filenames, ["time", "ZED_HC"]
70
70
  )
71
- valid_filenames = utils.get_files_with_common_range(valid_filenames)
72
71
  variables = list(keymap.keys())
73
72
  concat_lib.concatenate_files(
74
73
  valid_filenames,
@@ -193,7 +193,6 @@ def _parse_input_files(input_files: str | list[str], temp_dir: str) -> tuple:
193
193
  )
194
194
  raise FileNotFoundError(msg)
195
195
 
196
- valid_files = utils.get_files_with_common_range(valid_files)
197
196
  filetypes = list({f.split(".")[-1].lower() for f in valid_files})
198
197
 
199
198
  if len(filetypes) > 1:
@@ -208,15 +207,6 @@ def _parse_input_files(input_files: str | list[str], temp_dir: str) -> tuple:
208
207
  input_filename,
209
208
  variables=variables,
210
209
  ignore=_get_ignored_variables(filetypes[0]),
211
- # It's somewhat risky to use varying nfft values as the velocity
212
- # resolution may differ, but this enables concatenation when switching
213
- # between different nfft configurations. Spectral data is ignored
214
- # anyway for now.
215
- allow_difference=[
216
- "nave",
217
- "ovl",
218
- "nfft",
219
- ],
220
210
  )
221
211
  else:
222
212
  input_filename = input_files
cloudnetpy/output.py CHANGED
@@ -57,7 +57,11 @@ def _get_netcdf_dimensions(obj) -> dict:
57
57
  }
58
58
  # RPG cloud radar
59
59
  if "chirp_start_indices" in obj.data:
60
- dimensions["chirp_sequence"] = len(obj.data["chirp_start_indices"][:])
60
+ if obj.data["chirp_start_indices"][:].ndim == 1:
61
+ dimensions["chirp_start_indices"] = len(obj.data["chirp_start_indices"][:])
62
+ else:
63
+ dimensions["chirp"] = obj.data["chirp_start_indices"][:].shape[1]
64
+
61
65
  # disdrometer
62
66
  if hasattr(obj, "n_diameter") and hasattr(obj, "n_velocity"):
63
67
  dimensions["diameter"] = obj.n_diameter
cloudnetpy/utils.py CHANGED
@@ -826,6 +826,8 @@ def get_epoch(units: str) -> Epoch:
826
826
  except IndexError:
827
827
  return fallback
828
828
  date = date.replace(",", "")
829
+ if "T" in date:
830
+ date = date[: date.index("T")]
829
831
  try:
830
832
  date_components = [int(x) for x in date.split("-")]
831
833
  except ValueError:
@@ -958,21 +960,6 @@ def get_file_type(filename: str) -> str:
958
960
  raise ValueError(msg)
959
961
 
960
962
 
961
- def get_files_with_common_range(filenames: list) -> list:
962
- """Returns files with the same (most common) number of range gates."""
963
- n_range = []
964
- for file in filenames:
965
- with netCDF4.Dataset(file) as nc:
966
- n_range.append(len(nc.variables["range"]))
967
- most_common = np.bincount(n_range).argmax()
968
- n_removed = len(filenames) - n_range.count(int(most_common))
969
- if n_removed > 0:
970
- logging.warning(
971
- "Removing %s files due to inconsistent height vector", n_removed
972
- )
973
- return [file for i, file in enumerate(filenames) if n_range[i] == most_common]
974
-
975
-
976
963
  def get_files_with_variables(filenames: list, variables: list[str]) -> list:
977
964
  """Returns files where all variables exist."""
978
965
  valid_files = []
cloudnetpy/version.py CHANGED
@@ -1,4 +1,4 @@
1
1
  MAJOR = 1
2
- MINOR = 77
3
- PATCH = 1
2
+ MINOR = 78
3
+ PATCH = 0
4
4
  __version__ = f"{MAJOR}.{MINOR}.{PATCH}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: cloudnetpy
3
- Version: 1.77.1
3
+ Version: 1.78.0
4
4
  Summary: Python package for Cloudnet processing
5
5
  Author: Simo Tukiainen
6
6
  License: MIT License
@@ -1,15 +1,15 @@
1
1
  cloudnetpy/__init__.py,sha256=X_FqY-4yg5GUj5Edo14SToLEos6JIsC3fN-v1FUgQoA,43
2
2
  cloudnetpy/cli.py,sha256=lHkeAErmAijI-Ugpd4DHRHfbZP4SXOake0LIY5Ovv_Q,20782
3
3
  cloudnetpy/cloudnetarray.py,sha256=XFyXZwR4QWPyo7WLmvsu7DEELZQp1vi5FZ8F7tX_tM0,7307
4
- cloudnetpy/concat_lib.py,sha256=jcLppqAmVHVkykcXBcpwUr8MS_k8v2Xl2xBLmVRE_DI,12624
4
+ cloudnetpy/concat_lib.py,sha256=XQ5Sk8kfXqI0Q5HoomKWWhdZ1-m2thYDKGL7SKapITE,12851
5
5
  cloudnetpy/constants.py,sha256=YnoSzZm35NDooJfhlulSJBc7g0eSchT3yGytRaTaJEI,845
6
6
  cloudnetpy/datasource.py,sha256=FcWS77jz56gIzwnbafDLdj-HjAyu0P_VtY7gkeVZThU,7952
7
7
  cloudnetpy/exceptions.py,sha256=hYbUtBwjCIfxnPe_5mELDEw87AWITBrwuo7WYIEKmJ8,1579
8
8
  cloudnetpy/metadata.py,sha256=lO7BCbVAzFoH3Nq-VuezYX0f7MnbG1Zp11g5GSiuQwM,6189
9
- cloudnetpy/output.py,sha256=l0LoOhcGCBrg2EJ4NT1xZ7-UKWdV7X7yQ0fJmhkwJVc,15829
9
+ cloudnetpy/output.py,sha256=gupxt4f_-eUrFsWMto8tnknoV-p9QauC9L6CJAqBILU,15988
10
10
  cloudnetpy/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- cloudnetpy/utils.py,sha256=SSZWk82c4nkAiTcLdOKGVvxt5ovETdAMn_TLxVeYpBY,33473
12
- cloudnetpy/version.py,sha256=Uo3e3P2jzk9JhxwD31lPkG8Gt8Ya8hk34uy1zDz-P3o,72
11
+ cloudnetpy/utils.py,sha256=O57jqUX61V_Jodrs_04Js9cmTUbK0cymW9NcHzKVrx0,32926
12
+ cloudnetpy/version.py,sha256=Umza9M-fEEsgi9FrAC0XLoVQcbh-gNPHmP_sNLxDOD4,72
13
13
  cloudnetpy/categorize/__init__.py,sha256=s-SJaysvVpVVo5kidiruWQO6p3gv2TXwY1wEHYO5D6I,44
14
14
  cloudnetpy/categorize/atmos_utils.py,sha256=RcmbKxm2COkE7WEya0mK3yX5rzUbrewRVh3ekm01RtM,10598
15
15
  cloudnetpy/categorize/attenuation.py,sha256=Y_-fzmQTltWTqIZTulJhovC7a6ifpMcaAazDJcnMIOc,990
@@ -34,18 +34,18 @@ cloudnetpy/categorize/attenuations/melting_attenuation.py,sha256=9c9xoZHtGUbjFYJ
34
34
  cloudnetpy/categorize/attenuations/rain_attenuation.py,sha256=qazJzRyXf9vbjJhh4yiFmABI4L57j5W_6YZ-6qjRiBI,2839
35
35
  cloudnetpy/instruments/__init__.py,sha256=PEgrrQNoiOuN_ctYilmt4LV2QCLg1likPjJdWtuGlLs,528
36
36
  cloudnetpy/instruments/basta.py,sha256=Lb_EhQTI93S5Bd9osDbCE_tC8gZreRsHz7D2_dFOjmE,3793
37
- cloudnetpy/instruments/bowtie.py,sha256=Hp4mzjGqvYw5bhgAy_LvScYrf3Xm3ULbtPjhG9GnAJ8,2977
37
+ cloudnetpy/instruments/bowtie.py,sha256=GlWCxemeXrIvWnnQRvZa1ttl4J0NmLUdc7xngIH5WBI,3281
38
38
  cloudnetpy/instruments/ceilo.py,sha256=qM3AkQKHUblhRCD42HsB6lr82giBH-0g_VzoWHZDgeA,9535
39
39
  cloudnetpy/instruments/ceilometer.py,sha256=ati9-fUQ54K9tvynIPB-nlBYwtvBVaQtUCjVCLNB67w,12059
40
40
  cloudnetpy/instruments/cl61d.py,sha256=g6DNBFju3wYhLFl32DKmC8pUup7y-EupXoUU0fuoGGA,1990
41
41
  cloudnetpy/instruments/cloudnet_instrument.py,sha256=SGPsRYYoGPoRoDY7hHJcKUVX0A23X0Telc00Fu01PnY,4495
42
- cloudnetpy/instruments/copernicus.py,sha256=99idcn6-iKOSvSslNjwFRng3gwlTLFjKPiT1tnVytpQ,6613
42
+ cloudnetpy/instruments/copernicus.py,sha256=hCphEKyFCc3f1uLRdjL2435kuh64M5q-V1bI68bzGbA,6528
43
43
  cloudnetpy/instruments/fd12p.py,sha256=aGYpkczdSl7FSmK1bByMnpUBD5GAl7RTKkopt0cpWas,6822
44
- cloudnetpy/instruments/galileo.py,sha256=BjWE15_S3tTCOmAM5k--oicI3wghKaO0hv9EUBxtbl8,4830
44
+ cloudnetpy/instruments/galileo.py,sha256=vcY7mYcGD8YtMw8ioy9CNGu5yarQlwE-vfWIRSbTQG0,4745
45
45
  cloudnetpy/instruments/hatpro.py,sha256=G1fHsY9LTos4vHP5kFubjE5Wg2uTVFZpYDSD8VAo-zw,9590
46
46
  cloudnetpy/instruments/instruments.py,sha256=z8Osjww3iQRxKvzXdISl-5vV6gShtji8Db5k-ZzDQ-0,4843
47
47
  cloudnetpy/instruments/lufft.py,sha256=nIoEKuuFGKq2dLqkX7zW-HpAifefG472tZhKfXE1yoA,4212
48
- cloudnetpy/instruments/mira.py,sha256=IH88dnV5fdAQ-A04S23ROgNmT4GBAtzXQxCr_9fWj-Q,11634
48
+ cloudnetpy/instruments/mira.py,sha256=mH53Wpq3JnmHqDa2Bo0t6REEf8OQ4MjkHYm8AEM2_S4,11140
49
49
  cloudnetpy/instruments/mrr.py,sha256=eeAzCp3CiHGauywjwvMUAFwZ4vBOZMcd3IlF8KsrLQo,5711
50
50
  cloudnetpy/instruments/nc_lidar.py,sha256=5gQG9PApnNPrHmS9_zanl8HEYIQuGRpbnzC3wfTcOyQ,1705
51
51
  cloudnetpy/instruments/nc_radar.py,sha256=HlaZeH5939R86ukF8K-P4Kfzb5-CpLB15LU2u94C5eI,7330
@@ -117,10 +117,10 @@ cloudnetpy/products/lwc.py,sha256=sl6Al2tuH3KkCBrPbWTmuz3jlD5UQJ4D6qBsn1tt2CQ,18
117
117
  cloudnetpy/products/mie_lu_tables.nc,sha256=It4fYpqJXlqOgL8jeZ-PxGzP08PMrELIDVe55y9ob58,16637951
118
118
  cloudnetpy/products/mwr_tools.py,sha256=8HPZpQMTojKZP1JS1S83IE0sxmbDE9bxlaWoqmGnUZE,6199
119
119
  cloudnetpy/products/product_tools.py,sha256=uu4l6reuGbPcW3TgttbaSrqIKbyYGhBVTdnC7opKvmg,11101
120
- cloudnetpy-1.77.1.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
120
+ cloudnetpy-1.78.0.dist-info/licenses/LICENSE,sha256=wcZF72bdaoG9XugpyE95Juo7lBQOwLuTKBOhhtANZMM,1094
121
121
  docs/source/conf.py,sha256=IKiFWw6xhUd8NrCg0q7l596Ck1d61XWeVjIFHVSG9Og,1490
122
- cloudnetpy-1.77.1.dist-info/METADATA,sha256=DB4u2OZnOarp7s73THnhEZxmmNq-czSCPTvP3EW7KPc,5796
123
- cloudnetpy-1.77.1.dist-info/WHEEL,sha256=Nw36Djuh_5VDukK0H78QzOX-_FQEo6V37m3nkm96gtU,91
124
- cloudnetpy-1.77.1.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
- cloudnetpy-1.77.1.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
- cloudnetpy-1.77.1.dist-info/RECORD,,
122
+ cloudnetpy-1.78.0.dist-info/METADATA,sha256=4CkmH9P2aO02UADla12HiMWCqnPts65FVD6PrZYBre4,5796
123
+ cloudnetpy-1.78.0.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
124
+ cloudnetpy-1.78.0.dist-info/entry_points.txt,sha256=HhY7LwCFk4qFgDlXx_Fy983ZTd831WlhtdPIzV-Y3dY,51
125
+ cloudnetpy-1.78.0.dist-info/top_level.txt,sha256=ibSPWRr6ojS1i11rtBFz2_gkIe68mggj7aeswYfaOo0,16
126
+ cloudnetpy-1.78.0.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.7.1)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5