cloudnetpy 1.49.9__py3-none-any.whl → 1.87.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. cloudnetpy/categorize/__init__.py +1 -2
  2. cloudnetpy/categorize/atmos_utils.py +297 -67
  3. cloudnetpy/categorize/attenuation.py +31 -0
  4. cloudnetpy/categorize/attenuations/__init__.py +37 -0
  5. cloudnetpy/categorize/attenuations/gas_attenuation.py +30 -0
  6. cloudnetpy/categorize/attenuations/liquid_attenuation.py +84 -0
  7. cloudnetpy/categorize/attenuations/melting_attenuation.py +78 -0
  8. cloudnetpy/categorize/attenuations/rain_attenuation.py +84 -0
  9. cloudnetpy/categorize/categorize.py +332 -156
  10. cloudnetpy/categorize/classify.py +127 -125
  11. cloudnetpy/categorize/containers.py +107 -76
  12. cloudnetpy/categorize/disdrometer.py +40 -0
  13. cloudnetpy/categorize/droplet.py +23 -21
  14. cloudnetpy/categorize/falling.py +53 -24
  15. cloudnetpy/categorize/freezing.py +25 -12
  16. cloudnetpy/categorize/insects.py +35 -23
  17. cloudnetpy/categorize/itu.py +243 -0
  18. cloudnetpy/categorize/lidar.py +36 -41
  19. cloudnetpy/categorize/melting.py +34 -26
  20. cloudnetpy/categorize/model.py +84 -37
  21. cloudnetpy/categorize/mwr.py +18 -14
  22. cloudnetpy/categorize/radar.py +215 -102
  23. cloudnetpy/cli.py +578 -0
  24. cloudnetpy/cloudnetarray.py +43 -89
  25. cloudnetpy/concat_lib.py +218 -78
  26. cloudnetpy/constants.py +28 -10
  27. cloudnetpy/datasource.py +61 -86
  28. cloudnetpy/exceptions.py +49 -20
  29. cloudnetpy/instruments/__init__.py +5 -0
  30. cloudnetpy/instruments/basta.py +29 -12
  31. cloudnetpy/instruments/bowtie.py +135 -0
  32. cloudnetpy/instruments/ceilo.py +138 -115
  33. cloudnetpy/instruments/ceilometer.py +164 -80
  34. cloudnetpy/instruments/cl61d.py +21 -5
  35. cloudnetpy/instruments/cloudnet_instrument.py +74 -36
  36. cloudnetpy/instruments/copernicus.py +108 -30
  37. cloudnetpy/instruments/da10.py +54 -0
  38. cloudnetpy/instruments/disdrometer/common.py +126 -223
  39. cloudnetpy/instruments/disdrometer/parsivel.py +453 -94
  40. cloudnetpy/instruments/disdrometer/thies.py +254 -87
  41. cloudnetpy/instruments/fd12p.py +201 -0
  42. cloudnetpy/instruments/galileo.py +65 -23
  43. cloudnetpy/instruments/hatpro.py +123 -49
  44. cloudnetpy/instruments/instruments.py +113 -1
  45. cloudnetpy/instruments/lufft.py +39 -17
  46. cloudnetpy/instruments/mira.py +268 -61
  47. cloudnetpy/instruments/mrr.py +187 -0
  48. cloudnetpy/instruments/nc_lidar.py +19 -8
  49. cloudnetpy/instruments/nc_radar.py +109 -55
  50. cloudnetpy/instruments/pollyxt.py +135 -51
  51. cloudnetpy/instruments/radiometrics.py +313 -59
  52. cloudnetpy/instruments/rain_e_h3.py +171 -0
  53. cloudnetpy/instruments/rpg.py +321 -189
  54. cloudnetpy/instruments/rpg_reader.py +74 -40
  55. cloudnetpy/instruments/toa5.py +49 -0
  56. cloudnetpy/instruments/vaisala.py +95 -343
  57. cloudnetpy/instruments/weather_station.py +774 -105
  58. cloudnetpy/metadata.py +90 -19
  59. cloudnetpy/model_evaluation/file_handler.py +55 -52
  60. cloudnetpy/model_evaluation/metadata.py +46 -20
  61. cloudnetpy/model_evaluation/model_metadata.py +1 -1
  62. cloudnetpy/model_evaluation/plotting/plot_tools.py +32 -37
  63. cloudnetpy/model_evaluation/plotting/plotting.py +327 -117
  64. cloudnetpy/model_evaluation/products/advance_methods.py +92 -83
  65. cloudnetpy/model_evaluation/products/grid_methods.py +88 -63
  66. cloudnetpy/model_evaluation/products/model_products.py +43 -35
  67. cloudnetpy/model_evaluation/products/observation_products.py +41 -35
  68. cloudnetpy/model_evaluation/products/product_resampling.py +17 -7
  69. cloudnetpy/model_evaluation/products/tools.py +29 -20
  70. cloudnetpy/model_evaluation/statistics/statistical_methods.py +30 -20
  71. cloudnetpy/model_evaluation/tests/e2e/conftest.py +3 -3
  72. cloudnetpy/model_evaluation/tests/e2e/process_cf/main.py +9 -5
  73. cloudnetpy/model_evaluation/tests/e2e/process_cf/tests.py +15 -14
  74. cloudnetpy/model_evaluation/tests/e2e/process_iwc/main.py +9 -5
  75. cloudnetpy/model_evaluation/tests/e2e/process_iwc/tests.py +15 -14
  76. cloudnetpy/model_evaluation/tests/e2e/process_lwc/main.py +9 -5
  77. cloudnetpy/model_evaluation/tests/e2e/process_lwc/tests.py +15 -14
  78. cloudnetpy/model_evaluation/tests/unit/conftest.py +42 -41
  79. cloudnetpy/model_evaluation/tests/unit/test_advance_methods.py +41 -48
  80. cloudnetpy/model_evaluation/tests/unit/test_grid_methods.py +216 -194
  81. cloudnetpy/model_evaluation/tests/unit/test_model_products.py +23 -21
  82. cloudnetpy/model_evaluation/tests/unit/test_observation_products.py +37 -38
  83. cloudnetpy/model_evaluation/tests/unit/test_plot_tools.py +43 -40
  84. cloudnetpy/model_evaluation/tests/unit/test_plotting.py +30 -36
  85. cloudnetpy/model_evaluation/tests/unit/test_statistical_methods.py +68 -31
  86. cloudnetpy/model_evaluation/tests/unit/test_tools.py +33 -26
  87. cloudnetpy/model_evaluation/utils.py +2 -1
  88. cloudnetpy/output.py +170 -111
  89. cloudnetpy/plotting/__init__.py +2 -1
  90. cloudnetpy/plotting/plot_meta.py +562 -822
  91. cloudnetpy/plotting/plotting.py +1142 -704
  92. cloudnetpy/products/__init__.py +1 -0
  93. cloudnetpy/products/classification.py +370 -88
  94. cloudnetpy/products/der.py +85 -55
  95. cloudnetpy/products/drizzle.py +77 -34
  96. cloudnetpy/products/drizzle_error.py +15 -11
  97. cloudnetpy/products/drizzle_tools.py +79 -59
  98. cloudnetpy/products/epsilon.py +211 -0
  99. cloudnetpy/products/ier.py +27 -50
  100. cloudnetpy/products/iwc.py +55 -48
  101. cloudnetpy/products/lwc.py +96 -70
  102. cloudnetpy/products/mwr_tools.py +186 -0
  103. cloudnetpy/products/product_tools.py +170 -128
  104. cloudnetpy/utils.py +455 -240
  105. cloudnetpy/version.py +2 -2
  106. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/METADATA +44 -40
  107. cloudnetpy-1.87.3.dist-info/RECORD +127 -0
  108. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/WHEEL +1 -1
  109. cloudnetpy-1.87.3.dist-info/entry_points.txt +2 -0
  110. docs/source/conf.py +2 -2
  111. cloudnetpy/categorize/atmos.py +0 -361
  112. cloudnetpy/products/mwr_multi.py +0 -68
  113. cloudnetpy/products/mwr_single.py +0 -75
  114. cloudnetpy-1.49.9.dist-info/RECORD +0 -112
  115. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info/licenses}/LICENSE +0 -0
  116. {cloudnetpy-1.49.9.dist-info → cloudnetpy-1.87.3.dist-info}/top_level.txt +0 -0
@@ -1,17 +1,82 @@
1
+ import datetime
2
+ from collections import defaultdict
3
+ from os import PathLike
4
+ from typing import Any
5
+ from uuid import UUID
6
+
7
+ import numpy as np
8
+ from numpy import ma
9
+
1
10
  from cloudnetpy import output
2
- from cloudnetpy.exceptions import DisdrometerDataError
11
+ from cloudnetpy.cloudnetarray import CloudnetArray
12
+ from cloudnetpy.constants import MM_TO_M, SEC_IN_HOUR
13
+ from cloudnetpy.exceptions import DisdrometerDataError, ValidTimeStampError
3
14
  from cloudnetpy.instruments import instruments
15
+ from cloudnetpy.instruments.toa5 import read_toa5
16
+ from cloudnetpy.utils import get_uuid
4
17
 
5
- from .common import ATTRIBUTES, THIES, Disdrometer, _format_thies_date
18
+ from .common import ATTRIBUTES, Disdrometer
19
+
20
+ TELEGRAM4 = [
21
+ (1, "_serial_number"),
22
+ (2, "_software_version"),
23
+ (3, "_date"),
24
+ (4, "_time"),
25
+ (5, "_synop_5min_ww"),
26
+ (6, "_synop_5min_WaWa"),
27
+ (7, "_metar_5min_4678"),
28
+ (8, "_rainfall_rate_5min"),
29
+ (9, "synop_WW"), # 1min
30
+ (10, "synop_WaWa"), # 1min
31
+ (11, "_metar_1_min_4678"),
32
+ (12, "rainfall_rate_1min_total"),
33
+ (13, "rainfall_rate"), # liquid, mm h-1
34
+ (14, "rainfall_rate_1min_solid"),
35
+ (15, "_precipition_amount"), # mm
36
+ (16, "visibility"),
37
+ (17, "radar_reflectivity"),
38
+ (18, "measurement_quality"),
39
+ (19, "maximum_hail_diameter"),
40
+ (20, "status_laser"),
41
+ (21, "static_signal"),
42
+ (22, "status_T_laser_analogue"),
43
+ (23, "status_T_laser_digital"),
44
+ (24, "status_I_laser_analogue"),
45
+ (25, "status_I_laser_digital"),
46
+ (26, "status_sensor_supply"),
47
+ (27, "status_laser_heating"),
48
+ (28, "status_receiver_heating"),
49
+ (29, "status_temperature_sensor"),
50
+ (30, "status_heating_supply"),
51
+ (31, "status_heating_housing"),
52
+ (32, "status_heating_heads"),
53
+ (33, "status_heating_carriers"),
54
+ (34, "status_laser_power"),
55
+ (35, "_status_reserve"),
56
+ (36, "T_interior"),
57
+ (37, "T_laser_driver"), # 0-80 C
58
+ (38, "I_mean_laser"),
59
+ (39, "V_control"), # mV 4005-4015
60
+ (40, "V_optical_output"), # mV 2300-6500
61
+ (41, "V_sensor_supply"), # 1/10V
62
+ (42, "I_heating_laser_head"), # mA
63
+ (43, "I_heating_receiver_head"), # mA
64
+ (44, "T_ambient"), # C
65
+ (45, "_V_heating_supply"),
66
+ (46, "_I_housing"),
67
+ (47, "_I_heating_heads"),
68
+ (48, "_I_heating_carriers"),
69
+ (49, "n_particles"),
70
+ ]
6
71
 
7
72
 
8
73
  def thies2nc(
9
- disdrometer_file: str,
10
- output_file: str,
74
+ disdrometer_file: str | PathLike,
75
+ output_file: str | PathLike,
11
76
  site_meta: dict,
12
- uuid: str | None = None,
13
- date: str | None = None,
14
- ) -> str:
77
+ uuid: str | UUID | None = None,
78
+ date: str | datetime.date | None = None,
79
+ ) -> UUID:
15
80
  """Converts Thies-LNM disdrometer data into Cloudnet Level 1b netCDF file.
16
81
 
17
82
  Args:
@@ -36,103 +101,205 @@ def thies2nc(
36
101
  >>> uuid = thies2nc('thies-lnm.log', 'thies-lnm.nc', site_meta)
37
102
 
38
103
  """
104
+ if isinstance(date, str):
105
+ date = datetime.date.fromisoformat(date)
106
+ uuid = get_uuid(uuid)
39
107
  try:
40
- disdrometer = Thies(disdrometer_file, site_meta)
108
+ disdrometer = Thies(disdrometer_file, site_meta, date)
41
109
  except (ValueError, IndexError) as err:
42
- raise DisdrometerDataError("Can not read disdrometer file") from err
43
- if date is not None:
44
- disdrometer.validate_date(date)
45
- disdrometer.init_data()
46
- if date is not None:
47
- disdrometer.sort_timestamps()
48
- disdrometer.remove_duplicate_timestamps()
110
+ msg = "Unable to read disdrometer file"
111
+ raise DisdrometerDataError(msg) from err
112
+ disdrometer.sort_timestamps()
113
+ disdrometer.remove_duplicate_timestamps()
114
+ disdrometer.mask_invalid_values()
49
115
  disdrometer.add_meta()
50
116
  disdrometer.convert_units()
51
117
  attributes = output.add_time_attribute(ATTRIBUTES, disdrometer.date)
52
118
  output.update_attributes(disdrometer.data, attributes)
53
- uuid = output.save_level1b(disdrometer, output_file, uuid)
119
+ output.save_level1b(disdrometer, output_file, uuid)
54
120
  return uuid
55
121
 
56
122
 
57
123
  class Thies(Disdrometer):
58
- def __init__(self, filename: str, site_meta: dict):
59
- super().__init__(filename, site_meta, THIES)
124
+ def __init__(
125
+ self,
126
+ filename: str | PathLike,
127
+ site_meta: dict,
128
+ expected_date: datetime.date | None = None,
129
+ ) -> None:
130
+ super().__init__()
131
+ self.instrument = instruments.THIES
60
132
  self.n_velocity = 20
61
133
  self.n_diameter = 22
62
- self.date = self._init_date()
134
+ self.site_meta = site_meta
135
+ self.raw_data: dict[str, Any] = defaultdict(list)
136
+ self._read_data(filename)
137
+ self._screen_time(expected_date)
138
+ self.data = {}
139
+ self._append_data()
63
140
  self._create_velocity_vectors()
64
141
  self._create_diameter_vectors()
65
- self.instrument = instruments.THIES
66
142
 
67
- def init_data(self):
68
- """According to
69
- https://www.biral.com/wp-content/uploads/2015/01/5.4110.xx_.xxx_.pdf
70
- """
71
- column_and_key = [
72
- (1, "_serial_number"),
73
- (2, "_software_version"),
74
- (3, "_date"),
75
- (4, "_time"),
76
- (5, "_synop_5min_ww"),
77
- (6, "_synop_5min_WaWa"),
78
- (7, "_metar_5min_4678"),
79
- (8, "_rainfall_rate_5min"),
80
- (9, "synop_WW"), # 1min
81
- (10, "synop_WaWa"), # 1min
82
- (11, "_metar_1_min_4678"),
83
- (12, "rainfall_rate_1min_total"),
84
- (13, "rainfall_rate"), # liquid, mm h-1
85
- (14, "rainfall_rate_1min_solid"),
86
- (15, "_precipition_amount"), # mm
87
- (16, "visibility"),
88
- (17, "radar_reflectivity"),
89
- (18, "measurement_quality"),
90
- (19, "maximum_hail_diameter"),
91
- (20, "status_laser"),
92
- (21, "static_signal"),
93
- (22, "status_T_laser_analogue"),
94
- (23, "status_T_laser_digital"),
95
- (24, "status_I_laser_analogue"),
96
- (25, "status_I_laser_digital"),
97
- (26, "status_sensor_supply"),
98
- (27, "status_laser_heating"),
99
- (28, "status_receiver_heating"),
100
- (29, "status_temperature_sensor"),
101
- (30, "status_heating_supply"),
102
- (31, "status_heating_housing"),
103
- (32, "status_heating_heads"),
104
- (33, "status_heating_carriers"),
105
- (34, "status_laser_power"),
106
- (35, "_status_reserve"),
107
- (36, "T_interior"),
108
- (37, "T_laser_driver"), # 0-80 C
109
- (38, "I_mean_laser"),
110
- (39, "V_control"), # mV 4005-4015
111
- (40, "V_optical_output"), # mV 2300-6500
112
- (41, "V_sensor_supply"), # 1/10V
113
- (42, "I_heating_laser_head"), # mA
114
- (43, "I_heating_receiver_head"), # mA
115
- (44, "T_ambient"), # C
116
- (45, "_V_heating_supply"),
117
- (46, "_I_housing"),
118
- (47, "_I_heating_heads"),
119
- (48, "_I_heating_carriers"),
120
- (49, "n_particles"),
121
- ]
122
- self._append_data(column_and_key)
123
- self._append_spectra()
124
-
125
- def _init_date(self) -> list:
126
- first_date = self._file_data["scalars"][0][3]
127
- first_date = _format_thies_date(first_date)
128
- return first_date.split("-")
129
-
130
- def _create_velocity_vectors(self):
143
+ def convert_units(self) -> None:
144
+ mmh_to_ms = SEC_IN_HOUR / MM_TO_M
145
+ c_to_k = 273.15
146
+ self._convert_data(("rainfall_rate_1min_total",), mmh_to_ms)
147
+ self._convert_data(("rainfall_rate",), mmh_to_ms)
148
+ self._convert_data(("rainfall_rate_1min_solid",), mmh_to_ms)
149
+ self._convert_data(("diameter", "diameter_spread", "diameter_bnds"), 1e3)
150
+ self._convert_data(("V_sensor_supply",), 10)
151
+ self._convert_data(("I_mean_laser",), 100)
152
+ self._convert_data(("T_interior",), c_to_k, method="add")
153
+ self._convert_data(("T_ambient",), c_to_k, method="add")
154
+ self._convert_data(("T_laser_driver",), c_to_k, method="add")
155
+
156
+ def _read_data(self, filename: str | PathLike) -> None:
157
+ with open(filename, errors="ignore") as file:
158
+ first_line = file.readline()
159
+ if "TOA5" in first_line:
160
+ _units, _process, rows = read_toa5(filename)
161
+ for row in rows:
162
+ self._read_line(row["RawString"], row["TIMESTAMP"])
163
+ elif first_line.lower().startswith("datetime [utc];"):
164
+ with open(filename, errors="ignore") as file:
165
+ first_line = file.readline()
166
+ for line in file:
167
+ timestamp, telegram = line.split(";", maxsplit=1)
168
+ fixed_telegram = telegram.strip().rstrip(";") + ";"
169
+ parsed_timestamp = datetime.datetime.strptime(
170
+ timestamp, "%Y-%m-%d %H:%M:%S"
171
+ )
172
+ self._read_line(fixed_telegram, parsed_timestamp)
173
+ else:
174
+ with open(filename, errors="ignore") as file:
175
+ for line in file:
176
+ self._read_line(line)
177
+ if len(self.raw_data["time"]) == 0:
178
+ raise ValidTimeStampError
179
+ for key, value in self.raw_data.items():
180
+ array = np.array(value)
181
+ if key == "time":
182
+ array = array.astype("datetime64[s]")
183
+ self.raw_data[key] = array
184
+
185
+ def _append_data(self) -> None:
186
+ for key, values in self.raw_data.items():
187
+ if key.startswith("_"):
188
+ continue
189
+ name_out = key
190
+ values_out = values
191
+ match key:
192
+ case "spectrum":
193
+ name_out = "data_raw"
194
+ dimensions = ["time", "diameter", "velocity"]
195
+ case "time":
196
+ dimensions = []
197
+ base = values[0].astype("datetime64[D]")
198
+ values_out = (values - base) / np.timedelta64(1, "h")
199
+ case _:
200
+ dimensions = ["time"]
201
+ self.data[name_out] = CloudnetArray(
202
+ values_out, name_out, dimensions=dimensions
203
+ )
204
+
205
+ first_id = self.raw_data["_serial_number"][0]
206
+ for sensor_id in self.raw_data["_serial_number"]:
207
+ if sensor_id != first_id:
208
+ msg = "Multiple serial numbers are not supported"
209
+ raise DisdrometerDataError(msg)
210
+ self.serial_number = first_id
211
+
212
+ def _read_line(self, line: str, timestamp: datetime.datetime | None = None) -> None:
213
+ raw_values = line.strip().strip(";").split(";")
214
+ # Support custom truncated format used in Leipzig LIM.
215
+ expected_columns = self.site_meta.get("truncate_columns", 521) - 1
216
+ # Length matches telegram 4 or 5 (has 4 additional columns).
217
+ if len(raw_values) not in (expected_columns, expected_columns + 4):
218
+ return
219
+ for i, key in TELEGRAM4:
220
+ if i >= expected_columns - 1:
221
+ break
222
+ value: Any
223
+ if key == "_date":
224
+ value = _parse_date(raw_values[i])
225
+ elif key == "_time":
226
+ value = _parse_time(raw_values[i])
227
+ elif key in (
228
+ "I_heating",
229
+ "T_ambient",
230
+ "T_interior",
231
+ "T_laser_driver",
232
+ "V_power_supply",
233
+ "_precipition_amount",
234
+ "_rainfall_rate_5min",
235
+ "maximum_hail_diameter",
236
+ "radar_reflectivity",
237
+ "rainfall_rate",
238
+ "rainfall_rate_1min_solid",
239
+ "rainfall_rate_1min_total",
240
+ ):
241
+ value = float(raw_values[i])
242
+ elif key in (
243
+ "_serial_number",
244
+ "_software_version",
245
+ "_metar_5min_4678",
246
+ "_metar_1_min_4678",
247
+ ):
248
+ value = raw_values[i]
249
+ else:
250
+ value = int(raw_values[i])
251
+ self.raw_data[key].append(value)
252
+ if expected_columns > 79:
253
+ self.raw_data["spectrum"].append(
254
+ np.array(list(map(int, raw_values[79:519])), dtype="i2").reshape(
255
+ self.n_diameter, self.n_velocity
256
+ )
257
+ )
258
+ if timestamp is not None:
259
+ self.raw_data["time"].append(timestamp)
260
+ else:
261
+ self.raw_data["time"].append(
262
+ datetime.datetime.combine(
263
+ self.raw_data["_date"][-1], self.raw_data["_time"][-1]
264
+ )
265
+ )
266
+
267
+ def _screen_time(self, expected_date: datetime.date | None = None) -> None:
268
+ if expected_date is None:
269
+ self.date = self.raw_data["time"][0].astype(object).date()
270
+ return
271
+ self.date = expected_date
272
+ valid_mask = self.raw_data["time"].astype("datetime64[D]") == self.date
273
+ if np.count_nonzero(valid_mask) == 0:
274
+ msg = f"No data found on {expected_date}"
275
+ raise DisdrometerDataError(msg)
276
+ for key in self.raw_data:
277
+ self.raw_data[key] = self.raw_data[key][valid_mask]
278
+
279
+ def mask_invalid_values(self) -> None:
280
+ rainfall_rate = self.data["rainfall_rate"]
281
+ rainfall_rate.data = ma.masked_where(
282
+ rainfall_rate.data > 999, rainfall_rate.data
283
+ )
284
+
285
+ def _create_velocity_vectors(self) -> None:
131
286
  n_values = [5, 6, 7, 1, 1]
132
287
  spreads = [0.2, 0.4, 0.8, 1, 10]
133
- self.store_vectors(self.data, n_values, spreads, "velocity")
288
+ self.store_vectors(n_values, spreads, "velocity")
134
289
 
135
- def _create_diameter_vectors(self):
290
+ def _create_diameter_vectors(self) -> None:
136
291
  n_values = [3, 6, 13]
137
292
  spreads = [0.125, 0.25, 0.5]
138
- self.store_vectors(self.data, n_values, spreads, "diameter", start=0.125)
293
+ self.store_vectors(n_values, spreads, "diameter", start=0.125)
294
+
295
+
296
+ def _parse_date(date: str) -> datetime.date:
297
+ day, month, year = map(int, date.split("."))
298
+ if year < 100:
299
+ year += 2000
300
+ return datetime.date(year, month, day)
301
+
302
+
303
+ def _parse_time(time: str) -> datetime.time:
304
+ hour, minute, second = map(int, time.split(":"))
305
+ return datetime.time(hour, minute, second)
@@ -0,0 +1,201 @@
1
+ import datetime
2
+ import logging
3
+ import math
4
+ import re
5
+ from os import PathLike
6
+ from uuid import UUID
7
+
8
+ import numpy as np
9
+ from numpy import ma
10
+
11
+ from cloudnetpy import output
12
+ from cloudnetpy.exceptions import ValidTimeStampError
13
+ from cloudnetpy.instruments import instruments
14
+ from cloudnetpy.instruments.cloudnet_instrument import CSVFile
15
+ from cloudnetpy.metadata import MetaData
16
+ from cloudnetpy.utils import get_uuid
17
+
18
+
19
+ def fd12p2nc(
20
+ input_file: str | PathLike,
21
+ output_file: str | PathLike,
22
+ site_meta: dict,
23
+ uuid: str | UUID | None = None,
24
+ date: str | datetime.date | None = None,
25
+ ) -> UUID:
26
+ """Converts Vaisala FD12P into Cloudnet Level 1b netCDF file.
27
+
28
+ Args:
29
+ input_file: Filename of input file.
30
+ output_file: Output filename.
31
+ site_meta: Dictionary containing information about the site. Required key
32
+ is `name`.
33
+ uuid: Set specific UUID for the file.
34
+ date: Expected date of the measurements as YYYY-MM-DD or datetime.date object.
35
+
36
+ Returns:
37
+ UUID of the generated file.
38
+
39
+ Raises:
40
+ ValidTimeStampError: No valid timestamps found.
41
+ """
42
+ if isinstance(date, str):
43
+ date = datetime.date.fromisoformat(date)
44
+ uuid = get_uuid(uuid)
45
+ fd12p = FD12P(site_meta)
46
+ fd12p.parse_input_file(input_file, date)
47
+ fd12p.add_data()
48
+ fd12p.add_date()
49
+ fd12p.screen_all_masked()
50
+ fd12p.sort_timestamps()
51
+ fd12p.remove_duplicate_timestamps()
52
+ fd12p.convert_units()
53
+ fd12p.normalize_cumulative_amount("precipitation_amount")
54
+ fd12p.normalize_cumulative_amount("snowfall_amount")
55
+ fd12p.add_site_geolocation()
56
+ attributes = output.add_time_attribute(ATTRIBUTES, fd12p.date)
57
+ output.update_attributes(fd12p.data, attributes)
58
+ output.save_level1b(fd12p, output_file, uuid)
59
+ return uuid
60
+
61
+
62
+ class FD12P(CSVFile):
63
+ def __init__(self, site_meta: dict) -> None:
64
+ super().__init__(site_meta)
65
+ self.instrument = instruments.FD12P
66
+ self._data = {
67
+ key: []
68
+ for key in (
69
+ "time",
70
+ "visibility",
71
+ "synop_WaWa",
72
+ "precipitation_rate",
73
+ "precipitation_amount",
74
+ "snowfall_amount",
75
+ )
76
+ }
77
+
78
+ def parse_input_file(
79
+ self, filename: str | PathLike, expected_date: datetime.date | None = None
80
+ ) -> None:
81
+ # In Lindenberg, format is date and time followed by Message 2 without
82
+ # non-printable characters.
83
+ with open(filename) as file:
84
+ invalid_lines = 0
85
+ for line in file:
86
+ try:
87
+ columns = line.split()
88
+ if len(columns) != 13:
89
+ msg = "Invalid column count"
90
+ raise ValueError(msg) # noqa: TRY301
91
+ date = _parse_date(columns[0])
92
+ time = _parse_time(columns[1])
93
+ visibility = _parse_int(columns[4])
94
+ synop = _parse_int(columns[7])
95
+ p_rate = _parse_float(columns[10]) # mm/h
96
+ p_amount = _parse_float(columns[11]) # mm
97
+ s_amount = _parse_int(columns[12]) # mm
98
+ self._data["time"].append(datetime.datetime.combine(date, time))
99
+ self._data["visibility"].append(visibility)
100
+ self._data["synop_WaWa"].append(synop)
101
+ self._data["precipitation_rate"].append(p_rate)
102
+ self._data["precipitation_amount"].append(p_amount)
103
+ self._data["snowfall_amount"].append(s_amount)
104
+ except ValueError:
105
+ invalid_lines += 1
106
+ continue
107
+ if invalid_lines:
108
+ logging.info("Skipped %d lines", invalid_lines)
109
+ for key in ("visibility", "synop_WaWa", "snowfall_amount"):
110
+ values = np.array(
111
+ [0 if math.isnan(x) else x for x in self._data[key]], dtype=np.int32
112
+ )
113
+ mask = np.array([math.isnan(x) for x in self._data[key]])
114
+ self._data[key] = ma.array(values, mask=mask)
115
+ self._data["snowfall_amount"] = self._data["snowfall_amount"].astype(np.float32)
116
+ if expected_date:
117
+ self._data["time"] = [
118
+ d for d in self._data["time"] if d.date() == expected_date
119
+ ]
120
+ if not self._data["time"]:
121
+ raise ValidTimeStampError
122
+
123
+ def convert_units(self) -> None:
124
+ precipitation_rate = self.data["precipitation_rate"][:]
125
+ self.data["precipitation_rate"].data = (
126
+ precipitation_rate / 3600 / 1000
127
+ ) # mm/h -> m/s
128
+ for key in ("precipitation_amount", "snowfall_amount"):
129
+ self.data[key].data = self.data[key][:] / 1000 # mm -> m
130
+
131
+ def screen_all_masked(self) -> None:
132
+ is_valid = np.ones_like(self.data["time"][:], dtype=np.bool_)
133
+ for key in self.data:
134
+ if key == "time":
135
+ continue
136
+ is_valid &= ma.getmaskarray(self.data[key][:])
137
+ self.screen_time_indices(~is_valid)
138
+
139
+
140
+ def _parse_date(date: str) -> datetime.date:
141
+ match = re.fullmatch(r"(?P<day>\d{2})\.(?P<month>\d{2})\.(?P<year>\d{4})", date)
142
+ if match is None:
143
+ msg = f"Invalid date: {date}"
144
+ raise ValueError(msg)
145
+ return datetime.date(int(match["year"]), int(match["month"]), int(match["day"]))
146
+
147
+
148
+ def _parse_time(time: str) -> datetime.time:
149
+ match = re.fullmatch(
150
+ r"(?P<hour>\d{2}):(?P<minute>\d{2})(:(?P<second>\d{2}))?", time
151
+ )
152
+ if match is None:
153
+ msg = f"Invalid time: {time}"
154
+ raise ValueError(msg)
155
+ return datetime.time(
156
+ int(match["hour"]),
157
+ int(match["minute"]),
158
+ int(match["second"]) if match["second"] is not None else 0,
159
+ )
160
+
161
+
162
+ def _parse_int(value: str) -> float:
163
+ if "/" in value:
164
+ return math.nan
165
+ return int(value)
166
+
167
+
168
+ def _parse_float(value: str) -> float:
169
+ if "/" in value:
170
+ return math.nan
171
+ return float(value)
172
+
173
+
174
+ ATTRIBUTES = {
175
+ "visibility": MetaData(
176
+ long_name="Meteorological optical range (MOR) visibility",
177
+ units="m",
178
+ standard_name="visibility_in_air",
179
+ dimensions=("time",),
180
+ ),
181
+ "precipitation_rate": MetaData(
182
+ long_name="Precipitation rate",
183
+ standard_name="lwe_precipitation_rate",
184
+ units="m s-1",
185
+ dimensions=("time",),
186
+ ),
187
+ "precipitation_amount": MetaData(
188
+ long_name="Precipitation amount",
189
+ standard_name="lwe_thickness_of_precipitation_amount",
190
+ units="m",
191
+ comment="Cumulated precipitation since 00:00 UTC",
192
+ dimensions=("time",),
193
+ ),
194
+ "snowfall_amount": MetaData(
195
+ long_name="Snowfall amount",
196
+ units="m",
197
+ standard_name="thickness_of_snowfall_amount",
198
+ comment="Cumulated snow since 00:00 UTC",
199
+ dimensions=("time",),
200
+ ),
201
+ }