cloudnetpy 1.80.8__py3-none-any.whl → 1.81.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudnetpy/categorize/__init__.py +1 -1
- cloudnetpy/categorize/atmos_utils.py +31 -27
- cloudnetpy/categorize/attenuations/__init__.py +4 -4
- cloudnetpy/categorize/attenuations/liquid_attenuation.py +7 -5
- cloudnetpy/categorize/attenuations/melting_attenuation.py +3 -3
- cloudnetpy/categorize/attenuations/rain_attenuation.py +4 -4
- cloudnetpy/categorize/categorize.py +25 -11
- cloudnetpy/categorize/classify.py +9 -8
- cloudnetpy/categorize/containers.py +13 -10
- cloudnetpy/categorize/disdrometer.py +5 -3
- cloudnetpy/categorize/droplet.py +12 -9
- cloudnetpy/categorize/falling.py +9 -8
- cloudnetpy/categorize/freezing.py +10 -7
- cloudnetpy/categorize/insects.py +18 -17
- cloudnetpy/categorize/lidar.py +7 -3
- cloudnetpy/categorize/melting.py +16 -15
- cloudnetpy/categorize/model.py +17 -10
- cloudnetpy/categorize/mwr.py +5 -3
- cloudnetpy/categorize/radar.py +15 -13
- cloudnetpy/cli.py +10 -8
- cloudnetpy/cloudnetarray.py +8 -7
- cloudnetpy/concat_lib.py +29 -20
- cloudnetpy/datasource.py +26 -21
- cloudnetpy/exceptions.py +12 -10
- cloudnetpy/instruments/basta.py +19 -9
- cloudnetpy/instruments/bowtie.py +18 -11
- cloudnetpy/instruments/ceilo.py +22 -10
- cloudnetpy/instruments/ceilometer.py +33 -34
- cloudnetpy/instruments/cl61d.py +5 -3
- cloudnetpy/instruments/cloudnet_instrument.py +7 -7
- cloudnetpy/instruments/copernicus.py +16 -7
- cloudnetpy/instruments/disdrometer/common.py +5 -4
- cloudnetpy/instruments/disdrometer/parsivel.py +14 -9
- cloudnetpy/instruments/disdrometer/thies.py +11 -7
- cloudnetpy/instruments/fd12p.py +7 -6
- cloudnetpy/instruments/galileo.py +16 -7
- cloudnetpy/instruments/hatpro.py +33 -24
- cloudnetpy/instruments/lufft.py +6 -4
- cloudnetpy/instruments/mira.py +33 -19
- cloudnetpy/instruments/mrr.py +12 -12
- cloudnetpy/instruments/nc_lidar.py +1 -1
- cloudnetpy/instruments/nc_radar.py +8 -8
- cloudnetpy/instruments/pollyxt.py +19 -12
- cloudnetpy/instruments/radiometrics.py +17 -10
- cloudnetpy/instruments/rain_e_h3.py +9 -5
- cloudnetpy/instruments/rpg.py +32 -21
- cloudnetpy/instruments/rpg_reader.py +15 -12
- cloudnetpy/instruments/vaisala.py +32 -24
- cloudnetpy/instruments/weather_station.py +28 -21
- cloudnetpy/model_evaluation/file_handler.py +27 -29
- cloudnetpy/model_evaluation/plotting/plot_tools.py +7 -5
- cloudnetpy/model_evaluation/plotting/plotting.py +41 -32
- cloudnetpy/model_evaluation/products/advance_methods.py +38 -34
- cloudnetpy/model_evaluation/products/grid_methods.py +10 -9
- cloudnetpy/model_evaluation/products/model_products.py +15 -9
- cloudnetpy/model_evaluation/products/observation_products.py +12 -10
- cloudnetpy/model_evaluation/products/product_resampling.py +11 -7
- cloudnetpy/model_evaluation/products/tools.py +18 -14
- cloudnetpy/model_evaluation/statistics/statistical_methods.py +6 -5
- cloudnetpy/model_evaluation/tests/unit/test_plotting.py +18 -25
- cloudnetpy/model_evaluation/utils.py +3 -3
- cloudnetpy/output.py +15 -32
- cloudnetpy/plotting/plotting.py +22 -12
- cloudnetpy/products/classification.py +15 -9
- cloudnetpy/products/der.py +24 -19
- cloudnetpy/products/drizzle.py +21 -13
- cloudnetpy/products/drizzle_error.py +8 -7
- cloudnetpy/products/drizzle_tools.py +27 -23
- cloudnetpy/products/epsilon.py +6 -5
- cloudnetpy/products/ier.py +11 -5
- cloudnetpy/products/iwc.py +18 -9
- cloudnetpy/products/lwc.py +41 -31
- cloudnetpy/products/mwr_tools.py +30 -19
- cloudnetpy/products/product_tools.py +23 -19
- cloudnetpy/utils.py +84 -98
- cloudnetpy/version.py +2 -2
- {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/METADATA +3 -2
- cloudnetpy-1.81.1.dist-info/RECORD +126 -0
- cloudnetpy-1.80.8.dist-info/RECORD +0 -126
- {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/WHEEL +0 -0
- {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/entry_points.txt +0 -0
- {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/licenses/LICENSE +0 -0
- {cloudnetpy-1.80.8.dist-info → cloudnetpy-1.81.1.dist-info}/top_level.txt +0 -0
cloudnetpy/concat_lib.py
CHANGED
@@ -1,15 +1,19 @@
|
|
1
1
|
"""Module for concatenating netCDF files."""
|
2
2
|
|
3
|
+
import datetime
|
3
4
|
import logging
|
4
5
|
import shutil
|
5
|
-
from collections.abc import Iterable
|
6
|
+
from collections.abc import Iterable, Sequence
|
6
7
|
from os import PathLike
|
7
8
|
from pathlib import Path
|
9
|
+
from types import TracebackType
|
8
10
|
from typing import Literal
|
9
11
|
|
10
12
|
import netCDF4
|
11
13
|
import numpy as np
|
14
|
+
import numpy.typing as npt
|
12
15
|
from numpy import ma
|
16
|
+
from typing_extensions import Self
|
13
17
|
|
14
18
|
from cloudnetpy import utils
|
15
19
|
|
@@ -84,13 +88,13 @@ def update_nc(old_file: str, new_file: str) -> int:
|
|
84
88
|
|
85
89
|
def concatenate_files(
|
86
90
|
filenames: Iterable[PathLike | str],
|
87
|
-
output_file: str,
|
91
|
+
output_file: str | PathLike,
|
88
92
|
concat_dimension: str = "time",
|
89
|
-
variables: list | None = None,
|
90
|
-
new_attributes: dict | None = None,
|
91
|
-
ignore: list | None = None,
|
93
|
+
variables: list[str] | None = None,
|
94
|
+
new_attributes: dict[str, str | int] | None = None,
|
95
|
+
ignore: list[str] | None = None,
|
92
96
|
interp_dimension: str = "range",
|
93
|
-
) -> list:
|
97
|
+
) -> list[Path]:
|
94
98
|
"""Concatenate netCDF files in one dimension.
|
95
99
|
|
96
100
|
Args:
|
@@ -123,11 +127,11 @@ class _Concat:
|
|
123
127
|
|
124
128
|
def __init__(
|
125
129
|
self,
|
126
|
-
filenames: Iterable[
|
127
|
-
output_file: str,
|
130
|
+
filenames: Iterable[str | PathLike],
|
131
|
+
output_file: str | PathLike,
|
128
132
|
concat_dimension: str = "time",
|
129
133
|
interp_dim: str = "range",
|
130
|
-
):
|
134
|
+
) -> None:
|
131
135
|
self.filenames = sorted(map(Path, filenames), key=lambda f: f.name)
|
132
136
|
self.concat_dimension = concat_dimension
|
133
137
|
self.interp_dim = interp_dim
|
@@ -146,7 +150,7 @@ class _Concat:
|
|
146
150
|
self,
|
147
151
|
keep: list | None = None,
|
148
152
|
ignore: list | None = None,
|
149
|
-
) -> list:
|
153
|
+
) -> list[Path]:
|
150
154
|
"""Concatenates data arrays."""
|
151
155
|
self._write_initial_data(keep, ignore)
|
152
156
|
output = [self.first_filename]
|
@@ -237,7 +241,7 @@ class _Concat:
|
|
237
241
|
|
238
242
|
self.concatenated_file.variables[key][ind0:ind1, ...] = array
|
239
243
|
|
240
|
-
def _init_output_file(self, output_file: str) -> netCDF4.Dataset:
|
244
|
+
def _init_output_file(self, output_file: str | PathLike) -> netCDF4.Dataset:
|
241
245
|
data_model: Literal["NETCDF4", "NETCDF4_CLASSIC"] = (
|
242
246
|
"NETCDF4" if self.first_file.data_model == "NETCDF4" else "NETCDF4_CLASSIC"
|
243
247
|
)
|
@@ -255,10 +259,15 @@ class _Concat:
|
|
255
259
|
self.first_file.close()
|
256
260
|
self.concatenated_file.close()
|
257
261
|
|
258
|
-
def __enter__(self):
|
262
|
+
def __enter__(self) -> Self:
|
259
263
|
return self
|
260
264
|
|
261
|
-
def __exit__(
|
265
|
+
def __exit__(
|
266
|
+
self,
|
267
|
+
exc_type: type[BaseException] | None,
|
268
|
+
exc_val: BaseException | None,
|
269
|
+
exc_tb: TracebackType | None,
|
270
|
+
) -> None:
|
262
271
|
self._close()
|
263
272
|
|
264
273
|
|
@@ -275,14 +284,14 @@ def _copy_attributes(
|
|
275
284
|
def _find_valid_time_indices(
|
276
285
|
nc_old: netCDF4.Dataset,
|
277
286
|
nc_new: netCDF4.Dataset,
|
278
|
-
) ->
|
287
|
+
) -> npt.NDArray:
|
279
288
|
return np.where(nc_new.variables["time"][:] > nc_old.variables["time"][-1])[0]
|
280
289
|
|
281
290
|
|
282
291
|
def _update_fields(
|
283
292
|
nc_old: netCDF4.Dataset,
|
284
293
|
nc_new: netCDF4.Dataset,
|
285
|
-
valid_ind:
|
294
|
+
valid_ind: npt.NDArray,
|
286
295
|
) -> None:
|
287
296
|
ind0 = len(nc_old.variables["time"])
|
288
297
|
idx = [ind0 + x for x in valid_ind]
|
@@ -310,9 +319,9 @@ def concatenate_text_files(filenames: list, output_filename: str | PathLike) ->
|
|
310
319
|
|
311
320
|
|
312
321
|
def bundle_netcdf_files(
|
313
|
-
files:
|
314
|
-
date:
|
315
|
-
output_file: str,
|
322
|
+
files: Sequence[str | PathLike],
|
323
|
+
date: datetime.date,
|
324
|
+
output_file: str | PathLike,
|
316
325
|
concat_dimensions: tuple[str, ...] = ("time", "profile"),
|
317
326
|
variables: list | None = None,
|
318
327
|
) -> list:
|
@@ -330,7 +339,7 @@ def bundle_netcdf_files(
|
|
330
339
|
raise KeyError(msg)
|
331
340
|
if len(files) == 1:
|
332
341
|
shutil.copy(files[0], output_file)
|
333
|
-
return files
|
342
|
+
return list(files)
|
334
343
|
valid_files = []
|
335
344
|
for file in files:
|
336
345
|
try:
|
@@ -342,7 +351,7 @@ def bundle_netcdf_files(
|
|
342
351
|
continue
|
343
352
|
epoch = utils.get_epoch(time_units)
|
344
353
|
for timestamp in time_array:
|
345
|
-
if utils.seconds2date(timestamp, epoch)
|
354
|
+
if utils.seconds2date(timestamp, epoch).date() == date:
|
346
355
|
valid_files.append(file)
|
347
356
|
break
|
348
357
|
concatenate_files(
|
cloudnetpy/datasource.py
CHANGED
@@ -1,12 +1,16 @@
|
|
1
1
|
"""Datasource module, containing the :class:`DataSource` class."""
|
2
2
|
|
3
|
+
import datetime
|
3
4
|
import logging
|
4
5
|
import os
|
5
6
|
from collections.abc import Callable
|
6
|
-
from
|
7
|
+
from os import PathLike
|
8
|
+
from types import TracebackType
|
7
9
|
|
8
10
|
import netCDF4
|
9
11
|
import numpy as np
|
12
|
+
import numpy.typing as npt
|
13
|
+
from typing_extensions import Self
|
10
14
|
|
11
15
|
from cloudnetpy import utils
|
12
16
|
from cloudnetpy.cloudnetarray import CloudnetArray
|
@@ -47,17 +51,17 @@ class DataSource:
|
|
47
51
|
data_sparse: dict
|
48
52
|
source_type: str
|
49
53
|
|
50
|
-
def __init__(self, full_path:
|
54
|
+
def __init__(self, full_path: PathLike | str, *, radar: bool = False) -> None:
|
51
55
|
self.filename = os.path.basename(full_path)
|
52
56
|
self.dataset = netCDF4.Dataset(full_path)
|
53
57
|
self.source = getattr(self.dataset, "source", "")
|
54
|
-
self.time:
|
58
|
+
self.time: npt.NDArray = self._init_time()
|
55
59
|
self.altitude = self._init_altitude()
|
56
60
|
self.height = self._init_height()
|
57
61
|
self.data: dict = {}
|
58
62
|
self._is_radar = radar
|
59
63
|
|
60
|
-
def getvar(self, *args) ->
|
64
|
+
def getvar(self, *args: str) -> npt.NDArray:
|
61
65
|
"""Returns data array from the source file variables.
|
62
66
|
|
63
67
|
Returns just the data (and no attributes) from the original
|
@@ -81,7 +85,7 @@ class DataSource:
|
|
81
85
|
|
82
86
|
def append_data(
|
83
87
|
self,
|
84
|
-
variable: netCDF4.Variable |
|
88
|
+
variable: netCDF4.Variable | npt.NDArray | float,
|
85
89
|
key: str,
|
86
90
|
name: str | None = None,
|
87
91
|
units: str | None = None,
|
@@ -100,35 +104,31 @@ class DataSource:
|
|
100
104
|
"""
|
101
105
|
self.data[key] = CloudnetArray(variable, name or key, units, data_type=dtype)
|
102
106
|
|
103
|
-
def get_date(self) ->
|
107
|
+
def get_date(self) -> datetime.date:
|
104
108
|
"""Returns date components.
|
105
109
|
|
106
110
|
Returns:
|
107
|
-
|
111
|
+
date object
|
108
112
|
|
109
113
|
Raises:
|
110
114
|
RuntimeError: Not found or invalid date.
|
111
115
|
|
112
116
|
"""
|
113
117
|
try:
|
114
|
-
year =
|
115
|
-
month =
|
116
|
-
day =
|
117
|
-
datetime.
|
118
|
-
tzinfo=timezone.utc,
|
119
|
-
)
|
120
|
-
|
118
|
+
year = int(self.dataset.year)
|
119
|
+
month = int(self.dataset.month)
|
120
|
+
day = int(self.dataset.day)
|
121
|
+
return datetime.date(year, month, day)
|
121
122
|
except (AttributeError, ValueError) as read_error:
|
122
123
|
msg = "Missing or invalid date in global attributes."
|
123
124
|
raise RuntimeError(msg) from read_error
|
124
|
-
return [year, month, day]
|
125
125
|
|
126
126
|
def close(self) -> None:
|
127
127
|
"""Closes the open file."""
|
128
128
|
self.dataset.close()
|
129
129
|
|
130
130
|
@staticmethod
|
131
|
-
def to_m(var: netCDF4.Variable) ->
|
131
|
+
def to_m(var: netCDF4.Variable) -> npt.NDArray:
|
132
132
|
"""Converts km to m."""
|
133
133
|
alt = var[:]
|
134
134
|
if var.units == "km":
|
@@ -139,7 +139,7 @@ class DataSource:
|
|
139
139
|
return alt
|
140
140
|
|
141
141
|
@staticmethod
|
142
|
-
def to_km(var: netCDF4.Variable) ->
|
142
|
+
def to_km(var: netCDF4.Variable) -> npt.NDArray:
|
143
143
|
"""Converts m to km."""
|
144
144
|
alt = var[:]
|
145
145
|
if var.units == "m":
|
@@ -149,7 +149,7 @@ class DataSource:
|
|
149
149
|
raise ValueError(msg)
|
150
150
|
return alt
|
151
151
|
|
152
|
-
def _init_time(self) ->
|
152
|
+
def _init_time(self) -> npt.NDArray:
|
153
153
|
time = self.getvar("time")
|
154
154
|
if len(time) == 0:
|
155
155
|
msg = "Empty time vector"
|
@@ -173,7 +173,7 @@ class DataSource:
|
|
173
173
|
)
|
174
174
|
return None
|
175
175
|
|
176
|
-
def _init_height(self) ->
|
176
|
+
def _init_height(self) -> npt.NDArray | None:
|
177
177
|
"""Returns height array above mean sea level (m)."""
|
178
178
|
if "height" in self.dataset.variables:
|
179
179
|
return self.to_m(self.dataset.variables["height"])
|
@@ -182,8 +182,13 @@ class DataSource:
|
|
182
182
|
return np.array(range_instrument + self.altitude)
|
183
183
|
return None
|
184
184
|
|
185
|
-
def __enter__(self):
|
185
|
+
def __enter__(self) -> Self:
|
186
186
|
return self
|
187
187
|
|
188
|
-
def __exit__(
|
188
|
+
def __exit__(
|
189
|
+
self,
|
190
|
+
exc_type: type[BaseException] | None,
|
191
|
+
exc_val: BaseException | None,
|
192
|
+
exc_tb: TracebackType | None,
|
193
|
+
) -> None:
|
189
194
|
self.close()
|
cloudnetpy/exceptions.py
CHANGED
@@ -5,68 +5,70 @@ class CloudnetException(Exception):
|
|
5
5
|
class InconsistentDataError(CloudnetException):
|
6
6
|
"""Internal exception class."""
|
7
7
|
|
8
|
-
def __init__(self, msg: str):
|
8
|
+
def __init__(self, msg: str) -> None:
|
9
9
|
super().__init__(msg)
|
10
10
|
|
11
11
|
|
12
12
|
class DisdrometerDataError(CloudnetException):
|
13
13
|
"""Internal exception class."""
|
14
14
|
|
15
|
-
def __init__(self, msg: str):
|
15
|
+
def __init__(self, msg: str) -> None:
|
16
16
|
super().__init__(msg)
|
17
17
|
|
18
18
|
|
19
19
|
class RadarDataError(CloudnetException):
|
20
20
|
"""Internal exception class."""
|
21
21
|
|
22
|
-
def __init__(self, msg: str):
|
22
|
+
def __init__(self, msg: str) -> None:
|
23
23
|
super().__init__(msg)
|
24
24
|
|
25
25
|
|
26
26
|
class LidarDataError(CloudnetException):
|
27
27
|
"""Internal exception class."""
|
28
28
|
|
29
|
-
def __init__(self, msg: str):
|
29
|
+
def __init__(self, msg: str) -> None:
|
30
30
|
super().__init__(msg)
|
31
31
|
|
32
32
|
|
33
33
|
class PlottingError(CloudnetException):
|
34
34
|
"""Internal exception class."""
|
35
35
|
|
36
|
-
def __init__(self, msg: str):
|
36
|
+
def __init__(self, msg: str) -> None:
|
37
37
|
super().__init__(msg)
|
38
38
|
|
39
39
|
|
40
40
|
class ModelDataError(CloudnetException):
|
41
41
|
"""Internal exception class."""
|
42
42
|
|
43
|
-
def __init__(
|
43
|
+
def __init__(
|
44
|
+
self, msg: str = "Invalid model file: not enough proper profiles"
|
45
|
+
) -> None:
|
44
46
|
super().__init__(msg)
|
45
47
|
|
46
48
|
|
47
49
|
class ValidTimeStampError(CloudnetException):
|
48
50
|
"""Internal exception class."""
|
49
51
|
|
50
|
-
def __init__(self, msg: str = "No valid timestamps found"):
|
52
|
+
def __init__(self, msg: str = "No valid timestamps found") -> None:
|
51
53
|
super().__init__(msg)
|
52
54
|
|
53
55
|
|
54
56
|
class MissingInputFileError(CloudnetException):
|
55
57
|
"""Internal exception class."""
|
56
58
|
|
57
|
-
def __init__(self, msg: str = "Missing required input files"):
|
59
|
+
def __init__(self, msg: str = "Missing required input files") -> None:
|
58
60
|
super().__init__(msg)
|
59
61
|
|
60
62
|
|
61
63
|
class HatproDataError(CloudnetException):
|
62
64
|
"""Internal exception class."""
|
63
65
|
|
64
|
-
def __init__(self, msg: str = "Invalid HATPRO file"):
|
66
|
+
def __init__(self, msg: str = "Invalid HATPRO file") -> None:
|
65
67
|
super().__init__(msg)
|
66
68
|
|
67
69
|
|
68
70
|
class InvalidSourceFileError(CloudnetException):
|
69
71
|
"""Internal exception class."""
|
70
72
|
|
71
|
-
def __init__(self, msg: str = "Invalid source file"):
|
73
|
+
def __init__(self, msg: str = "Invalid source file") -> None:
|
72
74
|
super().__init__(msg)
|
cloudnetpy/instruments/basta.py
CHANGED
@@ -1,5 +1,9 @@
|
|
1
1
|
"""Module for reading / converting BASTA radar data."""
|
2
2
|
|
3
|
+
import datetime
|
4
|
+
from os import PathLike
|
5
|
+
from uuid import UUID
|
6
|
+
|
3
7
|
import numpy as np
|
4
8
|
|
5
9
|
from cloudnetpy import output
|
@@ -7,15 +11,16 @@ from cloudnetpy.exceptions import ValidTimeStampError
|
|
7
11
|
from cloudnetpy.instruments import instruments
|
8
12
|
from cloudnetpy.instruments.nc_radar import NcRadar
|
9
13
|
from cloudnetpy.metadata import MetaData
|
14
|
+
from cloudnetpy.utils import get_uuid
|
10
15
|
|
11
16
|
|
12
17
|
def basta2nc(
|
13
|
-
basta_file: str,
|
14
|
-
output_file: str,
|
18
|
+
basta_file: str | PathLike,
|
19
|
+
output_file: str | PathLike,
|
15
20
|
site_meta: dict,
|
16
|
-
uuid: str | None = None,
|
17
|
-
date: str | None = None,
|
18
|
-
) ->
|
21
|
+
uuid: str | UUID | None = None,
|
22
|
+
date: str | datetime.date | None = None,
|
23
|
+
) -> UUID:
|
19
24
|
"""Converts BASTA cloud radar data into Cloudnet Level 1b netCDF file.
|
20
25
|
|
21
26
|
This function converts daily BASTA file into a much smaller file that
|
@@ -42,6 +47,10 @@ def basta2nc(
|
|
42
47
|
>>> basta2nc('basta_file.nc', 'radar.nc', site_meta)
|
43
48
|
|
44
49
|
"""
|
50
|
+
if isinstance(date, str):
|
51
|
+
date = datetime.date.fromisoformat(date)
|
52
|
+
uuid = get_uuid(uuid)
|
53
|
+
|
45
54
|
keymap = {
|
46
55
|
"reflectivity": "Zh",
|
47
56
|
"velocity": "v",
|
@@ -65,7 +74,8 @@ def basta2nc(
|
|
65
74
|
basta.test_if_all_masked()
|
66
75
|
attributes = output.add_time_attribute(ATTRIBUTES, basta.date)
|
67
76
|
output.update_attributes(basta.data, attributes)
|
68
|
-
|
77
|
+
output.save_level1b(basta, output_file, uuid)
|
78
|
+
return uuid
|
69
79
|
|
70
80
|
|
71
81
|
class Basta(NcRadar):
|
@@ -77,9 +87,9 @@ class Basta(NcRadar):
|
|
77
87
|
|
78
88
|
"""
|
79
89
|
|
80
|
-
def __init__(self, full_path: str, site_meta: dict):
|
90
|
+
def __init__(self, full_path: str | PathLike, site_meta: dict) -> None:
|
81
91
|
super().__init__(full_path, site_meta)
|
82
|
-
self.date
|
92
|
+
self.date = self.get_date()
|
83
93
|
self.instrument = instruments.BASTA
|
84
94
|
|
85
95
|
def screen_data(self, keymap: dict) -> None:
|
@@ -89,7 +99,7 @@ class Basta(NcRadar):
|
|
89
99
|
if key in self.data and self.data[key].data.ndim == mask.ndim:
|
90
100
|
self.data[key].mask_indices(np.where(mask != 1))
|
91
101
|
|
92
|
-
def validate_date(self, expected_date:
|
102
|
+
def validate_date(self, expected_date: datetime.date) -> None:
|
93
103
|
"""Validates expected data."""
|
94
104
|
date_units = self.dataset.variables["time"].units
|
95
105
|
date = date_units.split()[2]
|
cloudnetpy/instruments/bowtie.py
CHANGED
@@ -1,4 +1,6 @@
|
|
1
|
+
import datetime
|
1
2
|
from os import PathLike
|
3
|
+
from uuid import UUID
|
2
4
|
|
3
5
|
import numpy as np
|
4
6
|
from numpy import ma
|
@@ -10,16 +12,16 @@ from cloudnetpy.instruments.instruments import FMCW94
|
|
10
12
|
from cloudnetpy.instruments.nc_radar import NcRadar
|
11
13
|
from cloudnetpy.instruments.rpg import RPG_ATTRIBUTES
|
12
14
|
from cloudnetpy.metadata import MetaData
|
13
|
-
from cloudnetpy.utils import bit_field_definition
|
15
|
+
from cloudnetpy.utils import bit_field_definition, get_uuid
|
14
16
|
|
15
17
|
|
16
18
|
def bowtie2nc(
|
17
19
|
bowtie_file: str | PathLike,
|
18
|
-
output_file: str,
|
20
|
+
output_file: str | PathLike,
|
19
21
|
site_meta: dict,
|
20
|
-
uuid: str | None = None,
|
21
|
-
date: str | None = None,
|
22
|
-
) ->
|
22
|
+
uuid: str | UUID | None = None,
|
23
|
+
date: str | datetime.date | None = None,
|
24
|
+
) -> UUID:
|
23
25
|
"""Converts data from 'BOW-TIE' campaign cloud radar on RV-Meteor into
|
24
26
|
Cloudnet Level 1b netCDF file.
|
25
27
|
|
@@ -55,6 +57,10 @@ def bowtie2nc(
|
|
55
57
|
"range_offsets": "chirp_start_indices",
|
56
58
|
}
|
57
59
|
|
60
|
+
if isinstance(date, str):
|
61
|
+
date = datetime.date.fromisoformat(date)
|
62
|
+
uuid = get_uuid(uuid)
|
63
|
+
|
58
64
|
with Bowtie(bowtie_file, site_meta) as bowtie:
|
59
65
|
bowtie.init_data(keymap)
|
60
66
|
bowtie.add_time_and_range()
|
@@ -69,16 +75,17 @@ def bowtie2nc(
|
|
69
75
|
bowtie.add_correction_bits()
|
70
76
|
attributes = output.add_time_attribute(ATTRIBUTES, bowtie.date)
|
71
77
|
output.update_attributes(bowtie.data, attributes)
|
72
|
-
|
78
|
+
output.save_level1b(bowtie, output_file, uuid)
|
79
|
+
return uuid
|
73
80
|
|
74
81
|
|
75
82
|
class Bowtie(NcRadar):
|
76
|
-
def __init__(self, full_path: str | PathLike, site_meta: dict):
|
83
|
+
def __init__(self, full_path: str | PathLike, site_meta: dict) -> None:
|
77
84
|
super().__init__(full_path, site_meta)
|
78
85
|
self.instrument = FMCW94
|
79
86
|
self.date = self.get_date()
|
80
87
|
|
81
|
-
def convert_units(self):
|
88
|
+
def convert_units(self) -> None:
|
82
89
|
self.data["lwp"].data *= G_TO_KG
|
83
90
|
self.data["rainfall_rate"].data *= MM_H_TO_M_S
|
84
91
|
self.data["relative_humidity"].data /= 100
|
@@ -88,13 +95,13 @@ class Bowtie(NcRadar):
|
|
88
95
|
self.data["chirp_start_indices"].data = np.array(array, dtype=np.int32)
|
89
96
|
self.data["chirp_start_indices"].data_type = "int32"
|
90
97
|
|
91
|
-
def add_correction_bits(self):
|
98
|
+
def add_correction_bits(self) -> None:
|
92
99
|
bits = ma.ones(self.data["v"].data.shape, dtype=np.uint32)
|
93
100
|
bits.mask = self.data["v"].data.mask
|
94
101
|
self.append_data(bits, "correction_bits")
|
95
102
|
|
96
|
-
def check_date(self, date:
|
97
|
-
if
|
103
|
+
def check_date(self, date: datetime.date) -> None:
|
104
|
+
if self.date != date:
|
98
105
|
raise ValidTimeStampError
|
99
106
|
|
100
107
|
|
cloudnetpy/instruments/ceilo.py
CHANGED
@@ -1,7 +1,11 @@
|
|
1
1
|
"""Module for reading and processing Vaisala / Lufft ceilometers."""
|
2
2
|
|
3
|
+
import datetime
|
3
4
|
import logging
|
5
|
+
import os.path
|
4
6
|
from itertools import islice
|
7
|
+
from os import PathLike
|
8
|
+
from uuid import UUID
|
5
9
|
|
6
10
|
import netCDF4
|
7
11
|
import numpy as np
|
@@ -13,15 +17,16 @@ from cloudnetpy.instruments.cl61d import Cl61d
|
|
13
17
|
from cloudnetpy.instruments.lufft import LufftCeilo
|
14
18
|
from cloudnetpy.instruments.vaisala import ClCeilo, Cs135, Ct25k
|
15
19
|
from cloudnetpy.metadata import COMMON_ATTRIBUTES, MetaData
|
20
|
+
from cloudnetpy.utils import get_uuid
|
16
21
|
|
17
22
|
|
18
23
|
def ceilo2nc(
|
19
|
-
full_path: str,
|
20
|
-
output_file: str,
|
24
|
+
full_path: str | PathLike,
|
25
|
+
output_file: str | PathLike,
|
21
26
|
site_meta: dict,
|
22
|
-
uuid: str | None = None,
|
23
|
-
date: str | None = None,
|
24
|
-
) ->
|
27
|
+
uuid: str | UUID | None = None,
|
28
|
+
date: str | datetime.date | None = None,
|
29
|
+
) -> UUID:
|
25
30
|
"""Converts Vaisala, Lufft and Campbell Scientific ceilometer data into
|
26
31
|
Cloudnet Level 1b netCDF file.
|
27
32
|
|
@@ -70,6 +75,9 @@ def ceilo2nc(
|
|
70
75
|
>>> ceilo2nc('chm15k_raw.nc', 'chm15k.nc', site_meta)
|
71
76
|
|
72
77
|
"""
|
78
|
+
if isinstance(date, str):
|
79
|
+
date = datetime.date.fromisoformat(date)
|
80
|
+
uuid = get_uuid(uuid)
|
73
81
|
snr_limit = 5
|
74
82
|
ceilo_obj = _initialize_ceilo(full_path, site_meta, date)
|
75
83
|
calibration_factor = site_meta.get("calibration_factor")
|
@@ -129,7 +137,8 @@ def ceilo2nc(
|
|
129
137
|
output.update_attributes(ceilo_obj.data, attributes)
|
130
138
|
for key in ("beta", "beta_smooth"):
|
131
139
|
ceilo_obj.add_snr_info(key, snr_limit)
|
132
|
-
|
140
|
+
output.save_level1b(ceilo_obj, output_file, uuid)
|
141
|
+
return uuid
|
133
142
|
|
134
143
|
|
135
144
|
def _get_n_negatives(ceilo_obj: ClCeilo | Ct25k | LufftCeilo | Cl61d | Cs135) -> int:
|
@@ -145,9 +154,9 @@ def _get_n_negatives(ceilo_obj: ClCeilo | Ct25k | LufftCeilo | Cl61d | Cs135) ->
|
|
145
154
|
|
146
155
|
|
147
156
|
def _initialize_ceilo(
|
148
|
-
full_path: str,
|
157
|
+
full_path: str | PathLike,
|
149
158
|
site_meta: dict,
|
150
|
-
date:
|
159
|
+
date: datetime.date | None = None,
|
151
160
|
) -> ClCeilo | Ct25k | LufftCeilo | Cl61d | Cs135:
|
152
161
|
if "model" in site_meta:
|
153
162
|
if site_meta["model"] not in (
|
@@ -177,13 +186,16 @@ def _initialize_ceilo(
|
|
177
186
|
return LufftCeilo(full_path, site_meta, date)
|
178
187
|
|
179
188
|
|
180
|
-
def _find_ceilo_model(full_path: str) -> str:
|
189
|
+
def _find_ceilo_model(full_path: str | PathLike) -> str:
|
181
190
|
model = None
|
182
191
|
try:
|
183
192
|
with netCDF4.Dataset(full_path) as nc:
|
184
193
|
title = nc.title
|
185
194
|
for identifier in ["cl61d", "cl61-d"]:
|
186
|
-
if
|
195
|
+
if (
|
196
|
+
identifier in title.lower()
|
197
|
+
or identifier in os.path.basename(full_path).lower()
|
198
|
+
):
|
187
199
|
model = "cl61d"
|
188
200
|
if model is None:
|
189
201
|
model = "chm15k"
|