cloudnetpy 1.80.8__py3-none-any.whl → 1.81.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. cloudnetpy/categorize/__init__.py +1 -1
  2. cloudnetpy/categorize/atmos_utils.py +31 -27
  3. cloudnetpy/categorize/attenuations/__init__.py +4 -4
  4. cloudnetpy/categorize/attenuations/liquid_attenuation.py +7 -5
  5. cloudnetpy/categorize/attenuations/melting_attenuation.py +3 -3
  6. cloudnetpy/categorize/attenuations/rain_attenuation.py +4 -4
  7. cloudnetpy/categorize/categorize.py +25 -11
  8. cloudnetpy/categorize/classify.py +9 -8
  9. cloudnetpy/categorize/containers.py +13 -10
  10. cloudnetpy/categorize/disdrometer.py +5 -3
  11. cloudnetpy/categorize/droplet.py +12 -9
  12. cloudnetpy/categorize/falling.py +9 -8
  13. cloudnetpy/categorize/freezing.py +10 -7
  14. cloudnetpy/categorize/insects.py +18 -17
  15. cloudnetpy/categorize/lidar.py +7 -3
  16. cloudnetpy/categorize/melting.py +16 -15
  17. cloudnetpy/categorize/model.py +17 -10
  18. cloudnetpy/categorize/mwr.py +5 -3
  19. cloudnetpy/categorize/radar.py +15 -13
  20. cloudnetpy/cli.py +10 -8
  21. cloudnetpy/cloudnetarray.py +8 -7
  22. cloudnetpy/concat_lib.py +29 -20
  23. cloudnetpy/datasource.py +26 -21
  24. cloudnetpy/exceptions.py +12 -10
  25. cloudnetpy/instruments/basta.py +19 -9
  26. cloudnetpy/instruments/bowtie.py +18 -11
  27. cloudnetpy/instruments/ceilo.py +22 -10
  28. cloudnetpy/instruments/ceilometer.py +33 -34
  29. cloudnetpy/instruments/cl61d.py +5 -3
  30. cloudnetpy/instruments/cloudnet_instrument.py +7 -7
  31. cloudnetpy/instruments/copernicus.py +16 -7
  32. cloudnetpy/instruments/disdrometer/common.py +5 -4
  33. cloudnetpy/instruments/disdrometer/parsivel.py +14 -9
  34. cloudnetpy/instruments/disdrometer/thies.py +11 -7
  35. cloudnetpy/instruments/fd12p.py +7 -6
  36. cloudnetpy/instruments/galileo.py +16 -7
  37. cloudnetpy/instruments/hatpro.py +33 -24
  38. cloudnetpy/instruments/lufft.py +6 -4
  39. cloudnetpy/instruments/mira.py +33 -19
  40. cloudnetpy/instruments/mrr.py +12 -12
  41. cloudnetpy/instruments/nc_lidar.py +1 -1
  42. cloudnetpy/instruments/nc_radar.py +8 -8
  43. cloudnetpy/instruments/pollyxt.py +19 -12
  44. cloudnetpy/instruments/radiometrics.py +17 -10
  45. cloudnetpy/instruments/rain_e_h3.py +9 -5
  46. cloudnetpy/instruments/rpg.py +32 -21
  47. cloudnetpy/instruments/rpg_reader.py +15 -12
  48. cloudnetpy/instruments/vaisala.py +32 -24
  49. cloudnetpy/instruments/weather_station.py +28 -21
  50. cloudnetpy/model_evaluation/file_handler.py +27 -29
  51. cloudnetpy/model_evaluation/plotting/plot_tools.py +7 -5
  52. cloudnetpy/model_evaluation/plotting/plotting.py +41 -32
  53. cloudnetpy/model_evaluation/products/advance_methods.py +38 -34
  54. cloudnetpy/model_evaluation/products/grid_methods.py +10 -9
  55. cloudnetpy/model_evaluation/products/model_products.py +15 -9
  56. cloudnetpy/model_evaluation/products/observation_products.py +12 -10
  57. cloudnetpy/model_evaluation/products/product_resampling.py +11 -7
  58. cloudnetpy/model_evaluation/products/tools.py +18 -14
  59. cloudnetpy/model_evaluation/statistics/statistical_methods.py +6 -5
  60. cloudnetpy/model_evaluation/tests/unit/test_plotting.py +18 -25
  61. cloudnetpy/model_evaluation/utils.py +3 -3
  62. cloudnetpy/output.py +15 -32
  63. cloudnetpy/plotting/plotting.py +22 -12
  64. cloudnetpy/products/classification.py +15 -9
  65. cloudnetpy/products/der.py +24 -19
  66. cloudnetpy/products/drizzle.py +21 -13
  67. cloudnetpy/products/drizzle_error.py +8 -7
  68. cloudnetpy/products/drizzle_tools.py +27 -23
  69. cloudnetpy/products/epsilon.py +6 -5
  70. cloudnetpy/products/ier.py +11 -5
  71. cloudnetpy/products/iwc.py +18 -9
  72. cloudnetpy/products/lwc.py +41 -31
  73. cloudnetpy/products/mwr_tools.py +30 -19
  74. cloudnetpy/products/product_tools.py +23 -19
  75. cloudnetpy/utils.py +84 -98
  76. cloudnetpy/version.py +2 -2
  77. {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/METADATA +3 -2
  78. cloudnetpy-1.81.1.dist-info/RECORD +126 -0
  79. cloudnetpy-1.80.8.dist-info/RECORD +0 -126
  80. {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/WHEEL +0 -0
  81. {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/entry_points.txt +0 -0
  82. {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/licenses/LICENSE +0 -0
  83. {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  import csv
2
2
  import datetime
3
3
  from os import PathLike
4
+ from uuid import UUID
4
5
 
5
6
  import numpy as np
6
7
 
@@ -8,15 +9,16 @@ from cloudnetpy import output
8
9
  from cloudnetpy.exceptions import ValidTimeStampError
9
10
  from cloudnetpy.instruments import instruments
10
11
  from cloudnetpy.instruments.cloudnet_instrument import CSVFile
12
+ from cloudnetpy.utils import get_uuid
11
13
 
12
14
 
13
15
  def rain_e_h32nc(
14
16
  input_file: str | PathLike,
15
- output_file: str,
17
+ output_file: str | PathLike,
16
18
  site_meta: dict,
17
- uuid: str | None = None,
19
+ uuid: str | UUID | None = None,
18
20
  date: str | datetime.date | None = None,
19
- ):
21
+ ) -> UUID:
20
22
  """Converts rain_e_h3 rain-gauge into Cloudnet Level 1b netCDF file.
21
23
 
22
24
  Args:
@@ -36,6 +38,7 @@ def rain_e_h32nc(
36
38
  rain = RainEH3(site_meta)
37
39
  if isinstance(date, str):
38
40
  date = datetime.date.fromisoformat(date)
41
+ uuid = get_uuid(uuid)
39
42
  rain.parse_input_file(input_file, date)
40
43
  rain.add_data()
41
44
  rain.add_date()
@@ -46,11 +49,12 @@ def rain_e_h32nc(
46
49
  rain.remove_duplicate_timestamps()
47
50
  attributes = output.add_time_attribute({}, rain.date)
48
51
  output.update_attributes(rain.data, attributes)
49
- return output.save_level1b(rain, output_file, uuid)
52
+ output.save_level1b(rain, output_file, uuid)
53
+ return uuid
50
54
 
51
55
 
52
56
  class RainEH3(CSVFile):
53
- def __init__(self, site_meta: dict):
57
+ def __init__(self, site_meta: dict) -> None:
54
58
  super().__init__(site_meta)
55
59
  self.instrument = instruments.RAIN_E_H3
56
60
  self._data = {
@@ -1,10 +1,14 @@
1
1
  """This module contains RPG Cloud Radar related functions."""
2
2
 
3
+ import datetime
3
4
  import logging
4
5
  import math
5
- from collections.abc import Sequence
6
+ from collections.abc import Callable, Sequence
7
+ from os import PathLike
8
+ from uuid import UUID
6
9
 
7
10
  import numpy as np
11
+ import numpy.typing as npt
8
12
  from numpy import ma
9
13
  from rpgpy import RPGFileError
10
14
 
@@ -20,12 +24,12 @@ from cloudnetpy.metadata import MetaData
20
24
 
21
25
 
22
26
  def rpg2nc(
23
- path_to_l1_files: str,
24
- output_file: str,
27
+ path_to_l1_files: str | PathLike,
28
+ output_file: str | PathLike,
25
29
  site_meta: dict,
26
- uuid: str | None = None,
27
- date: str | None = None,
28
- ) -> tuple[str, list]:
30
+ uuid: str | UUID | None = None,
31
+ date: str | datetime.date | None = None,
32
+ ) -> tuple[UUID, list[str]]:
29
33
  """Converts RPG-FMCW-94 cloud radar data into Cloudnet Level 1b netCDF file.
30
34
 
31
35
  This function reads one day of RPG Level 1 cloud radar binary files,
@@ -58,11 +62,14 @@ def rpg2nc(
58
62
  >>> rpg2nc('/path/to/files/', 'test.nc', site_meta)
59
63
 
60
64
  """
65
+ if isinstance(date, str):
66
+ date = datetime.date.fromisoformat(date)
67
+ uuid = utils.get_uuid(uuid)
61
68
  l1_files = utils.get_sorted_filenames(path_to_l1_files, ".LV1")
62
69
  fmcw94_objects, valid_files = _get_fmcw94_objects(l1_files, date)
63
70
  one_day_of_data = create_one_day_data_record(fmcw94_objects)
64
71
  if not valid_files:
65
- return "", []
72
+ return uuid, []
66
73
  print_info(one_day_of_data)
67
74
  fmcw = Fmcw(one_day_of_data, site_meta)
68
75
  fmcw.convert_time_to_fraction_hour()
@@ -78,7 +85,7 @@ def rpg2nc(
78
85
  fmcw.add_height()
79
86
  attributes = output.add_time_attribute(RPG_ATTRIBUTES, fmcw.date)
80
87
  output.update_attributes(fmcw.data, attributes)
81
- uuid = output.save_level1b(fmcw, output_file, uuid)
88
+ output.save_level1b(fmcw, output_file, uuid)
82
89
  return uuid, valid_files
83
90
 
84
91
 
@@ -113,7 +120,7 @@ def _stack_rpg_data(rpg_objects: RpgObjects) -> tuple[dict, dict]:
113
120
 
114
121
  """
115
122
 
116
- def _stack(source, target, fun) -> None:
123
+ def _stack(source: dict, target: dict, fun: Callable) -> None:
117
124
  for name, value in source.items():
118
125
  if not name.startswith("_"):
119
126
  target[name] = fun((target[name], value)) if name in target else value
@@ -168,7 +175,9 @@ def _mask_invalid_data(data_in: dict) -> dict:
168
175
  return data
169
176
 
170
177
 
171
- def _get_fmcw94_objects(files: list, expected_date: str | None) -> tuple[list, list]:
178
+ def _get_fmcw94_objects(
179
+ files: list[str], expected_date: datetime.date | None
180
+ ) -> tuple[list[Fmcw94Bin], list[str]]:
172
181
  """Creates a list of Rpg() objects from the file names."""
173
182
  objects = []
174
183
  valid_files = []
@@ -211,10 +220,10 @@ def _remove_files_with_bad_height(objects: list, files: list) -> tuple[list, lis
211
220
  return objects, files
212
221
 
213
222
 
214
- def _validate_date(obj, expected_date: str) -> None:
223
+ def _validate_date(obj: Fmcw94Bin, expected_date: datetime.date) -> None:
215
224
  for t in obj.data["time"][:]:
216
- date_str = "-".join(utils.seconds2date(t)[:3])
217
- if date_str != expected_date:
225
+ date = utils.seconds2date(t).date()
226
+ if date != expected_date:
218
227
  msg = "Ignoring a file (time stamps not what expected)"
219
228
  raise ValueError(msg)
220
229
 
@@ -222,7 +231,7 @@ def _validate_date(obj, expected_date: str) -> None:
222
231
  class Rpg(CloudnetInstrument):
223
232
  """Base class for RPG FMCW-94 cloud radar and HATPRO mwr."""
224
233
 
225
- def __init__(self, raw_data: dict, site_meta: dict):
234
+ def __init__(self, raw_data: dict, site_meta: dict) -> None:
226
235
  super().__init__()
227
236
  self.raw_data = raw_data
228
237
  self.site_meta = site_meta
@@ -242,11 +251,11 @@ class Rpg(CloudnetInstrument):
242
251
  data_type=data_type,
243
252
  )
244
253
 
245
- def _get_date(self) -> list:
254
+ def _get_date(self) -> datetime.date:
246
255
  time_first = self.raw_data["time"][0]
247
256
  time_last = self.raw_data["time"][-1]
248
- date_first = utils.seconds2date(time_first)[:3]
249
- date_last = utils.seconds2date(time_last)[:3]
257
+ date_first = utils.seconds2date(time_first).date()
258
+ date_last = utils.seconds2date(time_last).date()
250
259
  if date_first != date_last:
251
260
  logging.warning("Measurements from different days")
252
261
  return date_first
@@ -261,7 +270,7 @@ class Rpg(CloudnetInstrument):
261
270
  class Fmcw(Rpg):
262
271
  """Class for RPG cloud radars."""
263
272
 
264
- def __init__(self, raw_data: dict, site_properties: dict):
273
+ def __init__(self, raw_data: dict, site_properties: dict) -> None:
265
274
  super().__init__(raw_data, site_properties)
266
275
  self.instrument = self._get_instrument(raw_data)
267
276
 
@@ -313,7 +322,7 @@ class Fmcw(Rpg):
313
322
  self.data["wind_speed"].data *= KM_H_TO_M_S
314
323
 
315
324
  @staticmethod
316
- def _get_instrument(data: dict):
325
+ def _get_instrument(data: dict) -> Instrument:
317
326
  frequency = data["radar_frequency"]
318
327
  if math.isclose(frequency, 35, abs_tol=0.1):
319
328
  return instruments.FMCW35
@@ -326,12 +335,14 @@ class Fmcw(Rpg):
326
335
  class Hatpro(Rpg):
327
336
  """Class for RPG HATPRO mwr."""
328
337
 
329
- def __init__(self, raw_data: dict, site_properties: dict, instrument: Instrument):
338
+ def __init__(
339
+ self, raw_data: dict, site_properties: dict, instrument: Instrument
340
+ ) -> None:
330
341
  super().__init__(raw_data, site_properties)
331
342
  self.instrument = instrument
332
343
 
333
344
 
334
- def _filter_zenith_angle(zenith: ma.MaskedArray) -> np.ndarray:
345
+ def _filter_zenith_angle(zenith: ma.MaskedArray) -> npt.NDArray:
335
346
  """Returns indices of profiles with stable zenith angle close to 0 deg."""
336
347
  zenith = ma.array(zenith)
337
348
  if zenith.mask.all():
@@ -1,7 +1,10 @@
1
1
  import logging
2
+ from os import PathLike
3
+ from pathlib import Path
2
4
  from typing import BinaryIO, Literal
3
5
 
4
6
  import numpy as np
7
+ import numpy.typing as npt
5
8
  from numpy import ma
6
9
  from numpy.lib import recfunctions as rfn
7
10
  from rpgpy import read_rpg
@@ -13,7 +16,7 @@ from cloudnetpy.exceptions import ValidTimeStampError
13
16
  class Fmcw94Bin:
14
17
  """RPG Cloud Radar Level 1 data reader."""
15
18
 
16
- def __init__(self, filename):
19
+ def __init__(self, filename: str | PathLike) -> None:
17
20
  self.filename = filename
18
21
  self.header, self.data = read_rpg(filename)
19
22
 
@@ -101,7 +104,7 @@ class Fmcw94Bin:
101
104
  self.replace_keys(self.data, data_keymap)
102
105
 
103
106
  @staticmethod
104
- def replace_keys(d: dict, keymap: dict):
107
+ def replace_keys(d: dict, keymap: dict) -> None:
105
108
  for key in d.copy():
106
109
  if key in keymap:
107
110
  new_key = keymap[key]
@@ -121,9 +124,9 @@ def _read_from_file(
121
124
 
122
125
 
123
126
  def _decode_angles(
124
- x: np.ndarray,
127
+ x: npt.NDArray,
125
128
  version: Literal[1, 2],
126
- ) -> tuple[np.ndarray, np.ndarray]:
129
+ ) -> tuple[npt.NDArray, npt.NDArray]:
127
130
  """Decode elevation and azimuth angles.
128
131
 
129
132
  >>> _decode_angles(np.array([1267438.5]), version=1)
@@ -181,7 +184,7 @@ class HatproBin:
181
184
  QUALITY_MEDIUM = 2
182
185
  QUALITY_LOW = 3
183
186
 
184
- def __init__(self, filename):
187
+ def __init__(self, filename: Path) -> None:
185
188
  self.filename = filename
186
189
  with open(self.filename, "rb") as file:
187
190
  self._read_header(file)
@@ -225,7 +228,7 @@ class HatproBinLwp(HatproBin):
225
228
 
226
229
  variable = "lwp"
227
230
 
228
- def _read_header(self, file) -> None:
231
+ def _read_header(self, file: BinaryIO) -> None:
229
232
  self.header = _read_from_file(
230
233
  file,
231
234
  [
@@ -245,7 +248,7 @@ class HatproBinLwp(HatproBin):
245
248
  msg = f"Unknown HATPRO version. {self.header['file_code']}"
246
249
  raise ValueError(msg)
247
250
 
248
- def _read_data(self, file) -> None:
251
+ def _read_data(self, file: BinaryIO) -> None:
249
252
  self.data = _read_from_file(
250
253
  file,
251
254
  [
@@ -264,7 +267,7 @@ class HatproBinIwv(HatproBin):
264
267
 
265
268
  variable = "iwv"
266
269
 
267
- def _read_header(self, file) -> None:
270
+ def _read_header(self, file: BinaryIO) -> None:
268
271
  self.header = _read_from_file(
269
272
  file,
270
273
  [
@@ -284,7 +287,7 @@ class HatproBinIwv(HatproBin):
284
287
  msg = f"Unknown HATPRO version. {self.header['file_code']}"
285
288
  raise ValueError(msg)
286
289
 
287
- def _read_data(self, file) -> None:
290
+ def _read_data(self, file: BinaryIO) -> None:
288
291
  self.data = _read_from_file(
289
292
  file,
290
293
  [
@@ -300,10 +303,10 @@ class HatproBinIwv(HatproBin):
300
303
  class HatproBinCombined:
301
304
  """Combine HATPRO objects that share values of the given dimensions."""
302
305
 
303
- header: dict[str, np.ndarray]
304
- data: dict[str, np.ndarray]
306
+ header: dict[str, npt.NDArray]
307
+ data: dict[str, npt.NDArray]
305
308
 
306
- def __init__(self, files: list[HatproBin]):
309
+ def __init__(self, files: list[HatproBin]) -> None:
307
310
  self.header = {}
308
311
  if len(files) == 1:
309
312
  arr = files[0].data
@@ -2,6 +2,7 @@
2
2
 
3
3
  import datetime
4
4
  from collections.abc import Callable
5
+ from os import PathLike
5
6
 
6
7
  import ceilopyter.version
7
8
  import numpy as np
@@ -19,20 +20,15 @@ class VaisalaCeilo(Ceilometer):
19
20
  def __init__(
20
21
  self,
21
22
  reader: Callable,
22
- full_path: str,
23
+ full_path: str | PathLike,
23
24
  site_meta: dict,
24
- expected_date: str | None = None,
25
- ):
25
+ expected_date: datetime.date | None = None,
26
+ ) -> None:
26
27
  super().__init__(self.noise_param)
27
28
  self.reader = reader
28
29
  self.full_path = full_path
29
30
  self.site_meta = site_meta
30
31
  self.expected_date = expected_date
31
- self.sane_date = (
32
- datetime.date.fromisoformat(self.expected_date)
33
- if self.expected_date
34
- else None
35
- )
36
32
  self.software = {"ceilopyter": ceilopyter.version.__version__}
37
33
 
38
34
  def read_ceilometer_file(self, calibration_factor: float | None = None) -> None:
@@ -51,23 +47,21 @@ class VaisalaCeilo(Ceilometer):
51
47
  self.convert_to_fraction_hour()
52
48
  self._store_ceilometer_info()
53
49
 
54
- def sort_time(self):
50
+ def sort_time(self) -> None:
55
51
  """Sorts timestamps and removes duplicates."""
56
52
  time = self.data["time"]
57
53
  _time, ind = np.unique(time, return_index=True)
58
54
  self._screen_time_indices(ind)
59
55
 
60
- def screen_date(self):
56
+ def screen_date(self) -> None:
61
57
  time = self.data["time"]
62
- if self.sane_date is None:
63
- self.sane_date = time[0].date()
64
- self.expected_date = self.sane_date.isoformat()
65
- is_valid = np.array([t.date() == self.sane_date for t in time])
58
+ self.date = time[0].date() if self.expected_date is None else self.expected_date
59
+ is_valid = np.array([t.date() == self.date for t in time])
66
60
  self._screen_time_indices(is_valid)
67
61
 
68
62
  def _screen_time_indices(
69
63
  self, valid_indices: npt.NDArray[np.intp] | npt.NDArray[np.bool]
70
- ):
64
+ ) -> None:
71
65
  time = self.data["time"]
72
66
  n_time = len(time)
73
67
  if len(valid_indices) == 0 or (
@@ -79,14 +73,13 @@ class VaisalaCeilo(Ceilometer):
79
73
  if hasattr(array, "shape") and array.shape[:1] == (n_time,):
80
74
  self.data[key] = self.data[key][valid_indices]
81
75
 
82
- def convert_to_fraction_hour(self):
76
+ def convert_to_fraction_hour(self) -> None:
83
77
  time = self.data["time"]
84
78
  midnight = time[0].replace(hour=0, minute=0, second=0, microsecond=0)
85
79
  hour = datetime.timedelta(hours=1)
86
80
  self.data["time"] = (time - midnight) / hour
87
- self.date = self.expected_date.split("-") # type: ignore[union-attr]
88
81
 
89
- def _store_ceilometer_info(self):
82
+ def _store_ceilometer_info(self) -> None:
90
83
  raise NotImplementedError
91
84
 
92
85
 
@@ -95,10 +88,15 @@ class ClCeilo(VaisalaCeilo):
95
88
 
96
89
  noise_param = NoiseParam(noise_min=3.1e-8, noise_smooth_min=1.1e-8)
97
90
 
98
- def __init__(self, full_path, site_meta, expected_date=None):
91
+ def __init__(
92
+ self,
93
+ full_path: str | PathLike,
94
+ site_meta: dict,
95
+ expected_date: datetime.date | None = None,
96
+ ) -> None:
99
97
  super().__init__(read_cl_file, full_path, site_meta, expected_date)
100
98
 
101
- def _store_ceilometer_info(self):
99
+ def _store_ceilometer_info(self) -> None:
102
100
  n_gates = self.data["beta_raw"].shape[1]
103
101
  if n_gates < 1540:
104
102
  self.instrument = instruments.CL31
@@ -111,11 +109,16 @@ class Ct25k(VaisalaCeilo):
111
109
 
112
110
  noise_param = NoiseParam(noise_min=0.7e-7, noise_smooth_min=1.2e-8)
113
111
 
114
- def __init__(self, full_path, site_meta, expected_date=None):
112
+ def __init__(
113
+ self,
114
+ full_path: str | PathLike,
115
+ site_meta: dict,
116
+ expected_date: datetime.date | None = None,
117
+ ) -> None:
115
118
  super().__init__(read_ct_file, full_path, site_meta, expected_date)
116
119
  self._store_ceilometer_info()
117
120
 
118
- def _store_ceilometer_info(self):
121
+ def _store_ceilometer_info(self) -> None:
119
122
  self.instrument = instruments.CT25K
120
123
 
121
124
 
@@ -124,8 +127,13 @@ class Cs135(VaisalaCeilo):
124
127
 
125
128
  noise_param = NoiseParam()
126
129
 
127
- def __init__(self, full_path, site_meta, expected_date=None):
130
+ def __init__(
131
+ self,
132
+ full_path: str | PathLike,
133
+ site_meta: dict,
134
+ expected_date: datetime.date | None = None,
135
+ ) -> None:
128
136
  super().__init__(read_cs_file, full_path, site_meta, expected_date)
129
137
 
130
- def _store_ceilometer_info(self):
138
+ def _store_ceilometer_info(self) -> None:
131
139
  self.instrument = instruments.CS135
@@ -5,6 +5,7 @@ import re
5
5
  from collections import defaultdict
6
6
  from collections.abc import Iterable, Sequence
7
7
  from os import PathLike
8
+ from uuid import UUID
8
9
 
9
10
  import numpy as np
10
11
  from numpy import ma
@@ -17,16 +18,16 @@ from cloudnetpy.exceptions import ValidTimeStampError
17
18
  from cloudnetpy.instruments import instruments
18
19
  from cloudnetpy.instruments.cloudnet_instrument import CSVFile
19
20
  from cloudnetpy.instruments.toa5 import read_toa5
20
- from cloudnetpy.utils import datetime2decimal_hours
21
+ from cloudnetpy.utils import datetime2decimal_hours, get_uuid
21
22
 
22
23
 
23
24
  def ws2nc(
24
25
  weather_station_file: str | PathLike | Sequence[str | PathLike],
25
- output_file: str,
26
+ output_file: str | PathLike,
26
27
  site_meta: dict,
27
- uuid: str | None = None,
28
+ uuid: str | UUID | None = None,
28
29
  date: str | datetime.date | None = None,
29
- ) -> str:
30
+ ) -> UUID:
30
31
  """Converts weather station data into Cloudnet Level 1b netCDF file.
31
32
 
32
33
  Args:
@@ -47,6 +48,7 @@ def ws2nc(
47
48
  weather_station_file = [weather_station_file]
48
49
  if isinstance(date, str):
49
50
  date = datetime.date.fromisoformat(date)
51
+ uuid = get_uuid(uuid)
50
52
  ws: WS
51
53
  if site_meta["name"] == "Palaiseau":
52
54
  ws = PalaiseauWS(weather_station_file, site_meta)
@@ -85,21 +87,26 @@ def ws2nc(
85
87
  ws.calculate_rainfall_amount()
86
88
  attributes = output.add_time_attribute({}, ws.date)
87
89
  output.update_attributes(ws.data, attributes)
88
- return output.save_level1b(ws, output_file, uuid)
90
+ output.save_level1b(ws, output_file, uuid)
91
+ return uuid
89
92
 
90
93
 
91
94
  class WS(CSVFile):
92
- def __init__(self, site_meta: dict):
95
+ def __init__(self, site_meta: dict) -> None:
93
96
  super().__init__(site_meta)
94
97
  self.instrument = instruments.GENERIC_WEATHER_STATION
95
98
 
96
- date: list[str]
99
+ date: datetime.date
97
100
 
98
101
  def calculate_rainfall_amount(self) -> None:
99
102
  if "rainfall_amount" in self.data or "rainfall_rate" not in self.data:
100
103
  return
101
- resolution = np.median(np.diff(self.data["time"].data)) * SEC_IN_HOUR
102
- rainfall_amount = ma.cumsum(self.data["rainfall_rate"].data * resolution)
104
+ time = self.data["time"].data
105
+ if len(time) == 1:
106
+ rainfall_amount = np.array([0])
107
+ else:
108
+ resolution = np.median(np.diff(time)) * SEC_IN_HOUR
109
+ rainfall_amount = ma.cumsum(self.data["rainfall_rate"].data * resolution)
103
110
  self.data["rainfall_amount"] = CloudnetArray(rainfall_amount, "rainfall_amount")
104
111
 
105
112
  def screen_timestamps(self, date: datetime.date) -> None:
@@ -143,7 +150,7 @@ class WS(CSVFile):
143
150
 
144
151
 
145
152
  class PalaiseauWS(WS):
146
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
153
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
147
154
  super().__init__(site_meta)
148
155
  self.filenames = filenames
149
156
  self._data = self._read_data()
@@ -232,7 +239,7 @@ class BucharestWS(PalaiseauWS):
232
239
 
233
240
 
234
241
  class GranadaWS(WS):
235
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
242
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
236
243
  if len(filenames) != 1:
237
244
  raise ValueError
238
245
  super().__init__(site_meta)
@@ -282,7 +289,7 @@ class GranadaWS(WS):
282
289
 
283
290
 
284
291
  class KenttarovaWS(WS):
285
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
292
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
286
293
  super().__init__(site_meta)
287
294
  self.filenames = filenames
288
295
  self._data = self._read_data()
@@ -338,7 +345,7 @@ class HyytialaWS(WS):
338
345
  - BbRT/mm = Bucket content in real-time (Pluvio200) [mm].
339
346
  """
340
347
 
341
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
348
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
342
349
  super().__init__(site_meta)
343
350
  self.filename = filenames[0]
344
351
  self._data = self._read_data()
@@ -399,7 +406,7 @@ class HyytialaWS(WS):
399
406
 
400
407
 
401
408
  class GalatiWS(WS):
402
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
409
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
403
410
  super().__init__(site_meta)
404
411
  self.filename = filenames[0]
405
412
  self._data = self._read_data()
@@ -422,7 +429,7 @@ class GalatiWS(WS):
422
429
  parsed_value = math.nan
423
430
  raw_data[key].append(parsed_value)
424
431
 
425
- def read_value(keys: Iterable[str]):
432
+ def read_value(keys: Iterable[str]) -> list:
426
433
  for key in keys:
427
434
  if key in raw_data:
428
435
  return raw_data[key]
@@ -443,7 +450,7 @@ class GalatiWS(WS):
443
450
 
444
451
  def add_data(self) -> None:
445
452
  # Skip wind measurements where range was limited to 0-180 degrees
446
- if datetime.date(*map(int, self.date)) < datetime.date(2024, 10, 29):
453
+ if self.date < datetime.date(2024, 10, 29):
447
454
  del self._data["wind_speed"]
448
455
  del self._data["wind_direction"]
449
456
  return super().add_data()
@@ -454,7 +461,7 @@ class GalatiWS(WS):
454
461
 
455
462
 
456
463
  class JuelichWS(WS):
457
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
464
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
458
465
  super().__init__(site_meta)
459
466
  self.filename = filenames[0]
460
467
  self._data = self._read_data()
@@ -500,7 +507,7 @@ class JuelichWS(WS):
500
507
  class LampedusaWS(WS):
501
508
  """Read Lampedusa weather station data in ICOS format."""
502
509
 
503
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
510
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
504
511
  super().__init__(site_meta)
505
512
  self.filename = filenames[0]
506
513
  self._data = self._read_data()
@@ -553,7 +560,7 @@ class LampedusaWS(WS):
553
560
 
554
561
 
555
562
  class LimassolWS(WS):
556
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
563
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
557
564
  super().__init__(site_meta)
558
565
  self.filenames = filenames
559
566
  self._data = defaultdict(list)
@@ -610,7 +617,7 @@ class LimassolWS(WS):
610
617
  ) # mm/(10 min) -> m/s
611
618
 
612
619
 
613
- def _parse_sirta(filename: str | PathLike):
620
+ def _parse_sirta(filename: str | PathLike) -> dict:
614
621
  """Parse SIRTA-style weather station file."""
615
622
  with open(filename, "rb") as f:
616
623
  raw_content = f.read()
@@ -653,7 +660,7 @@ def _parse_sirta(filename: str | PathLike):
653
660
 
654
661
 
655
662
  class LAquilaWS(WS):
656
- def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict):
663
+ def __init__(self, filenames: Sequence[str | PathLike], site_meta: dict) -> None:
657
664
  super().__init__(site_meta)
658
665
  self.filenames = filenames
659
666
  self._data = self._read_data()
@@ -1,7 +1,10 @@
1
1
  import os
2
2
  from datetime import datetime
3
+ from os import PathLike
4
+ from uuid import UUID
3
5
 
4
6
  import netCDF4
7
+ import numpy.typing as npt
5
8
 
6
9
  from cloudnetpy import output, utils
7
10
 
@@ -62,11 +65,11 @@ def update_attributes(model_downsample_variables: dict, attributes: dict) -> Non
62
65
 
63
66
  def save_downsampled_file(
64
67
  id_mark: str,
65
- file_name: str,
68
+ file_name: str | PathLike,
66
69
  objects: tuple,
67
- files: tuple,
68
- uuid: str | None,
69
- ) -> str:
70
+ files: tuple[list[str | PathLike], str | PathLike],
71
+ uuid: UUID,
72
+ ) -> None:
70
73
  """Saves a standard downsampled day product file.
71
74
 
72
75
  Args:
@@ -82,36 +85,31 @@ def save_downsampled_file(
82
85
  """
83
86
  obj = objects[0]
84
87
  dimensions = {"time": len(obj.time), "level": len(obj.data["level"][:])}
85
- root_group = output.init_file(file_name, dimensions, obj.data, uuid)
86
- _augment_global_attributes(root_group)
87
- uuid = root_group.file_uuid
88
- root_group.cloudnet_file_type = "l3-" + id_mark.split("_")[0]
89
- root_group.title = (
90
- f"Downsampled {id_mark.capitalize().replace('_', ' of ')} "
91
- f"from {obj.dataset.location}"
92
- )
93
- _add_source(root_group, objects, files)
94
- output.copy_global(obj.dataset, root_group, ("location", "day", "month", "year"))
95
- if not hasattr(obj.dataset, "day"):
96
- root_group.year, root_group.month, root_group.day = obj.date
97
- output.merge_history(root_group, id_mark, obj)
98
- root_group.close()
99
- if not isinstance(uuid, str):
100
- msg = "UUID is not a string."
101
- raise TypeError(msg)
102
- return uuid
103
-
104
-
105
- def add_var2ncfile(obj: ModelManager, file_name: str) -> None:
106
- nc_file = netCDF4.Dataset(file_name, "r+", format="NETCDF4_CLASSIC")
107
- _write_vars2nc(nc_file, obj.data)
108
- nc_file.close()
88
+ with output.init_file(file_name, dimensions, obj.data, uuid) as root_group:
89
+ _augment_global_attributes(root_group)
90
+ root_group.cloudnet_file_type = "l3-" + id_mark.split("_")[0]
91
+ root_group.title = (
92
+ f"Downsampled {id_mark.capitalize().replace('_', ' of ')} "
93
+ f"from {obj.dataset.location}"
94
+ )
95
+ _add_source(root_group, objects, files)
96
+ output.copy_global(
97
+ obj.dataset, root_group, ("location", "day", "month", "year")
98
+ )
99
+ if not hasattr(obj.dataset, "day"):
100
+ root_group.year, root_group.month, root_group.day = obj.date
101
+ output.merge_history(root_group, id_mark, obj)
102
+
103
+
104
+ def add_var2ncfile(obj: ModelManager, file_name: str | PathLike) -> None:
105
+ with netCDF4.Dataset(file_name, "r+", format="NETCDF4_CLASSIC") as nc_file:
106
+ _write_vars2nc(nc_file, obj.data)
109
107
 
110
108
 
111
109
  def _write_vars2nc(rootgrp: netCDF4.Dataset, cloudnet_variables: dict) -> None:
112
110
  """Iterates over Cloudnet-ME instances and write to given rootgrp."""
113
111
 
114
- def _get_dimensions(array) -> tuple:
112
+ def _get_dimensions(array: npt.NDArray) -> tuple:
115
113
  """Finds correct dimensions for a variable."""
116
114
  if utils.isscalar(array):
117
115
  return ()