doppy 0.3.6__tar.gz → 0.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of doppy might be problematic. Click here for more details.
- {doppy-0.3.6 → doppy-0.4.0}/Cargo.lock +2 -2
- {doppy-0.3.6 → doppy-0.4.0}/Cargo.toml +1 -1
- {doppy-0.3.6 → doppy-0.4.0}/PKG-INFO +4 -2
- {doppy-0.3.6 → doppy-0.4.0}/pyproject.toml +15 -4
- doppy-0.4.0/src/doppy/defaults.py +18 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/product/stare.py +96 -14
- doppy-0.4.0/src/doppy/raw/__init__.py +7 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/raw/halo_hpl.py +6 -2
- doppy-0.4.0/src/doppy/raw/windcube.py +476 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/raw/wls70.py +7 -2
- doppy-0.4.0/src/doppy/utils.py +24 -0
- doppy-0.3.6/src/doppy/defaults.py +0 -2
- doppy-0.3.6/src/doppy/raw/__init__.py +0 -7
- doppy-0.3.6/src/doppy/raw/windcube.py +0 -259
- doppy-0.3.6/src/doppy/utils.py +0 -9
- {doppy-0.3.6 → doppy-0.4.0}/LICENSE +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/README.md +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doppy_rs/Cargo.toml +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doppy_rs/src/lib.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doppy_rs/src/raw/halo_hpl.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doppy_rs/src/raw/wls70.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doppy_rs/src/raw.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/.gitignore +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/Cargo.toml +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/src/lib.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/src/raw/error.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/src/raw/halo_hpl.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/src/raw/wls70.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/crates/doprs/src/raw.rs +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/__init__.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/__main__.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/bench.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/data/__init__.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/data/api.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/data/cache.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/data/exceptions.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/exceptions.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/netcdf.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/options.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/product/__init__.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/product/stare_depol.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/product/wind.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/py.typed +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/raw/halo_bg.py +0 -0
- {doppy-0.3.6 → doppy-0.4.0}/src/doppy/raw/halo_sys_params.py +0 -0
|
@@ -106,7 +106,7 @@ checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
|
|
|
106
106
|
|
|
107
107
|
[[package]]
|
|
108
108
|
name = "doppy_rs"
|
|
109
|
-
version = "0.
|
|
109
|
+
version = "0.4.0"
|
|
110
110
|
dependencies = [
|
|
111
111
|
"doprs",
|
|
112
112
|
"numpy",
|
|
@@ -115,7 +115,7 @@ dependencies = [
|
|
|
115
115
|
|
|
116
116
|
[[package]]
|
|
117
117
|
name = "doprs"
|
|
118
|
-
version = "0.
|
|
118
|
+
version = "0.4.0"
|
|
119
119
|
dependencies = [
|
|
120
120
|
"chrono",
|
|
121
121
|
"rayon",
|
|
@@ -1,11 +1,12 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: doppy
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Classifier: Development Status :: 4 - Beta
|
|
5
5
|
Classifier: Programming Language :: Python :: 3
|
|
6
6
|
Classifier: Programming Language :: Python :: 3.10
|
|
7
7
|
Classifier: Programming Language :: Python :: 3.11
|
|
8
8
|
Classifier: Programming Language :: Python :: 3.12
|
|
9
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
9
10
|
Classifier: License :: OSI Approved :: MIT License
|
|
10
11
|
Classifier: Intended Audience :: Science/Research
|
|
11
12
|
Classifier: Operating System :: OS Independent
|
|
@@ -25,6 +26,7 @@ Requires-Dist: py-spy ; extra == 'dev'
|
|
|
25
26
|
Requires-Dist: maturin ==1.4 ; extra == 'dev'
|
|
26
27
|
Requires-Dist: release-version ; extra == 'dev'
|
|
27
28
|
Requires-Dist: pre-commit ; extra == 'dev'
|
|
29
|
+
Requires-Dist: xarray[io] ; extra == 'dev'
|
|
28
30
|
Provides-Extra: dev
|
|
29
31
|
License-File: LICENSE
|
|
30
32
|
License-File: LICENSE
|
|
@@ -6,16 +6,17 @@ build-backend = "maturin"
|
|
|
6
6
|
name = "doppy"
|
|
7
7
|
summary = "Doppler lidar processing"
|
|
8
8
|
dynamic = ["version"]
|
|
9
|
-
authors = [{name = "Niko Leskinen", email = "niko.leskinen@fmi.fi"}]
|
|
9
|
+
authors = [{ name = "Niko Leskinen", email = "niko.leskinen@fmi.fi" }]
|
|
10
10
|
requires-python = ">=3.10"
|
|
11
11
|
readme = "README.md"
|
|
12
|
-
license = {file = "LICENSE"}
|
|
12
|
+
license = { file = "LICENSE" }
|
|
13
13
|
classifiers = [
|
|
14
14
|
"Development Status :: 4 - Beta",
|
|
15
15
|
"Programming Language :: Python :: 3",
|
|
16
16
|
"Programming Language :: Python :: 3.10",
|
|
17
17
|
"Programming Language :: Python :: 3.11",
|
|
18
18
|
"Programming Language :: Python :: 3.12",
|
|
19
|
+
"Programming Language :: Python :: 3.13",
|
|
19
20
|
"License :: OSI Approved :: MIT License",
|
|
20
21
|
"Intended Audience :: Science/Research",
|
|
21
22
|
"Operating System :: OS Independent"
|
|
@@ -28,11 +29,21 @@ dependencies = [
|
|
|
28
29
|
"typer",
|
|
29
30
|
"matplotlib",
|
|
30
31
|
"scikit-learn",
|
|
31
|
-
"scipy"
|
|
32
|
+
"scipy",
|
|
32
33
|
]
|
|
33
34
|
|
|
34
35
|
[project.optional-dependencies]
|
|
35
|
-
dev = [
|
|
36
|
+
dev = [
|
|
37
|
+
"mypy",
|
|
38
|
+
"ruff",
|
|
39
|
+
"pytest",
|
|
40
|
+
"types-requests",
|
|
41
|
+
"py-spy",
|
|
42
|
+
"maturin==1.4",
|
|
43
|
+
"release-version",
|
|
44
|
+
"pre-commit",
|
|
45
|
+
"xarray[io]",
|
|
46
|
+
]
|
|
36
47
|
|
|
37
48
|
[project.scripts]
|
|
38
49
|
doppy = "doppy.__main__:app"
|
|
@@ -0,0 +1,18 @@
|
|
|
1
|
+
DEFAULT_BEAM_ENERGY = 1e-5
|
|
2
|
+
DEFAULT_EFFECTIVE_DIAMETER = 25e-3
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class Halo:
|
|
6
|
+
wavelength = 1.565e-6 # [m]
|
|
7
|
+
receiver_bandwidth = 50e6 # [Hz]
|
|
8
|
+
beam_energy = DEFAULT_BEAM_ENERGY
|
|
9
|
+
effective_diameter = DEFAULT_EFFECTIVE_DIAMETER
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class WindCube:
|
|
13
|
+
# https://doi.org/10.5194/essd-13-3539-2021
|
|
14
|
+
wavelength = 1.54e-6 # [m]
|
|
15
|
+
receiver_bandwidth = 55e6 # [Hz]
|
|
16
|
+
beam_energy = DEFAULT_BEAM_ENERGY
|
|
17
|
+
effective_diameter = 50e-3 # [m]
|
|
18
|
+
focus = 1e3 # [m]
|
|
@@ -9,7 +9,7 @@ from typing import Sequence, Tuple, TypeAlias
|
|
|
9
9
|
import numpy as np
|
|
10
10
|
import numpy.typing as npt
|
|
11
11
|
import scipy
|
|
12
|
-
from scipy.ndimage import uniform_filter
|
|
12
|
+
from scipy.ndimage import median_filter, uniform_filter
|
|
13
13
|
from sklearn.cluster import KMeans
|
|
14
14
|
|
|
15
15
|
import doppy
|
|
@@ -51,6 +51,46 @@ class Stare:
|
|
|
51
51
|
)
|
|
52
52
|
raise TypeError
|
|
53
53
|
|
|
54
|
+
@classmethod
|
|
55
|
+
def mask_nan(cls, x: npt.NDArray[np.float64]) -> npt.NDArray[np.bool_]:
|
|
56
|
+
return np.isnan(x)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def from_windcube_data(
|
|
60
|
+
cls,
|
|
61
|
+
data: Sequence[str]
|
|
62
|
+
| Sequence[Path]
|
|
63
|
+
| Sequence[bytes]
|
|
64
|
+
| Sequence[BufferedIOBase],
|
|
65
|
+
) -> Stare:
|
|
66
|
+
raws = doppy.raw.WindCubeFixed.from_srcs(data)
|
|
67
|
+
raw = (
|
|
68
|
+
doppy.raw.WindCubeFixed.merge(raws).sorted_by_time().nan_profiles_removed()
|
|
69
|
+
)
|
|
70
|
+
|
|
71
|
+
wavelength = defaults.WindCube.wavelength
|
|
72
|
+
beta = _compute_beta(
|
|
73
|
+
snr=raw.cnr,
|
|
74
|
+
radial_distance=raw.radial_distance,
|
|
75
|
+
wavelength=wavelength,
|
|
76
|
+
beam_energy=defaults.WindCube.beam_energy,
|
|
77
|
+
receiver_bandwidth=defaults.WindCube.receiver_bandwidth,
|
|
78
|
+
focus=defaults.WindCube.focus,
|
|
79
|
+
effective_diameter=defaults.WindCube.effective_diameter,
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
mask = _compute_noise_mask_for_windcube(raw)
|
|
83
|
+
return cls(
|
|
84
|
+
time=raw.time,
|
|
85
|
+
radial_distance=raw.radial_distance,
|
|
86
|
+
elevation=raw.elevation,
|
|
87
|
+
beta=beta,
|
|
88
|
+
radial_velocity=raw.radial_velocity,
|
|
89
|
+
mask=mask,
|
|
90
|
+
wavelength=wavelength,
|
|
91
|
+
system_id=raw.system_id,
|
|
92
|
+
)
|
|
93
|
+
|
|
54
94
|
@classmethod
|
|
55
95
|
def from_halo_data(
|
|
56
96
|
cls,
|
|
@@ -95,16 +135,21 @@ class Stare:
|
|
|
95
135
|
raw, intensity_bg_corrected
|
|
96
136
|
)
|
|
97
137
|
wavelength = defaults.Halo.wavelength
|
|
138
|
+
|
|
98
139
|
beta = _compute_beta(
|
|
99
|
-
intensity_noise_bias_corrected,
|
|
100
|
-
raw.radial_distance,
|
|
101
|
-
|
|
102
|
-
|
|
140
|
+
snr=intensity_noise_bias_corrected - 1,
|
|
141
|
+
radial_distance=raw.radial_distance,
|
|
142
|
+
wavelength=wavelength,
|
|
143
|
+
beam_energy=defaults.Halo.beam_energy,
|
|
144
|
+
receiver_bandwidth=defaults.Halo.receiver_bandwidth,
|
|
145
|
+
focus=raw.header.focus_range,
|
|
146
|
+
effective_diameter=defaults.Halo.effective_diameter,
|
|
103
147
|
)
|
|
148
|
+
|
|
104
149
|
mask = _compute_noise_mask(
|
|
105
150
|
intensity_noise_bias_corrected, raw.radial_velocity, raw.radial_distance
|
|
106
151
|
)
|
|
107
|
-
return
|
|
152
|
+
return cls(
|
|
108
153
|
time=raw.time,
|
|
109
154
|
radial_distance=raw.radial_distance,
|
|
110
155
|
elevation=raw.elevation,
|
|
@@ -177,6 +222,36 @@ class Stare:
|
|
|
177
222
|
nc.add_attribute("doppy_version", doppy.__version__)
|
|
178
223
|
|
|
179
224
|
|
|
225
|
+
def _compute_noise_mask_for_windcube(
|
|
226
|
+
raw: doppy.raw.WindCubeFixed,
|
|
227
|
+
) -> npt.NDArray[np.bool_]:
|
|
228
|
+
if np.any(np.isnan(raw.cnr)) or np.any(np.isnan(raw.radial_velocity)):
|
|
229
|
+
raise ValueError("Unexpected nans in crn or radial_velocity")
|
|
230
|
+
|
|
231
|
+
mask = _mask_with_cnr_norm_dist(raw.cnr) | (np.abs(raw.radial_velocity) > 30)
|
|
232
|
+
|
|
233
|
+
cnr = raw.cnr.copy()
|
|
234
|
+
cnr[mask] = np.finfo(float).eps
|
|
235
|
+
cnr_filt = median_filter(cnr, size=(3, 3))
|
|
236
|
+
rel_diff = np.abs(cnr - cnr_filt) / np.abs(cnr)
|
|
237
|
+
diff_mask = rel_diff > 0.25
|
|
238
|
+
|
|
239
|
+
mask = mask | diff_mask
|
|
240
|
+
|
|
241
|
+
return np.array(mask, dtype=np.bool_)
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
def _mask_with_cnr_norm_dist(cnr: npt.NDArray[np.float64]) -> npt.NDArray[np.bool_]:
|
|
245
|
+
th_trunc = -5.5
|
|
246
|
+
std_factor = 2
|
|
247
|
+
log_cnr = np.log(cnr)
|
|
248
|
+
log_cnr_trunc = log_cnr[log_cnr < th_trunc]
|
|
249
|
+
th_trunc_fit = np.percentile(log_cnr_trunc, 90)
|
|
250
|
+
log_cnr_for_fit = log_cnr_trunc[log_cnr_trunc < th_trunc_fit]
|
|
251
|
+
mean, std = scipy.stats.norm.fit(log_cnr_for_fit)
|
|
252
|
+
return np.array(np.log(cnr) < (mean + std_factor * std), dtype=np.bool_)
|
|
253
|
+
|
|
254
|
+
|
|
180
255
|
def _compute_noise_mask(
|
|
181
256
|
intensity: npt.NDArray[np.float64],
|
|
182
257
|
radial_velocity: npt.NDArray[np.float64],
|
|
@@ -197,14 +272,19 @@ def _compute_noise_mask(
|
|
|
197
272
|
|
|
198
273
|
|
|
199
274
|
def _compute_beta(
|
|
200
|
-
|
|
275
|
+
snr: npt.NDArray[np.float64],
|
|
201
276
|
radial_distance: npt.NDArray[np.float64],
|
|
202
|
-
focus: float,
|
|
203
277
|
wavelength: float,
|
|
278
|
+
beam_energy: float,
|
|
279
|
+
receiver_bandwidth: float,
|
|
280
|
+
focus: float,
|
|
281
|
+
effective_diameter: float,
|
|
204
282
|
) -> npt.NDArray[np.float64]:
|
|
205
283
|
"""
|
|
206
284
|
Parameters
|
|
207
285
|
----------
|
|
286
|
+
snr
|
|
287
|
+
for halo: intensity - 1
|
|
208
288
|
radial_distance
|
|
209
289
|
distance from the instrument
|
|
210
290
|
focus
|
|
@@ -236,22 +316,24 @@ def _compute_beta(
|
|
|
236
316
|
doi: https://doi.org/10.5194/amt-13-2849-2020
|
|
237
317
|
"""
|
|
238
318
|
|
|
239
|
-
snr = intensity - 1
|
|
240
319
|
h = scipy.constants.Planck
|
|
241
320
|
c = scipy.constants.speed_of_light
|
|
242
321
|
eta = 1
|
|
243
|
-
E =
|
|
244
|
-
B =
|
|
322
|
+
E = beam_energy
|
|
323
|
+
B = receiver_bandwidth
|
|
245
324
|
nu = c / wavelength
|
|
246
|
-
A_e = _compute_effective_receiver_energy(
|
|
325
|
+
A_e = _compute_effective_receiver_energy(
|
|
326
|
+
radial_distance, wavelength, focus, effective_diameter
|
|
327
|
+
)
|
|
247
328
|
beta = 2 * h * nu * B * radial_distance**2 * snr / (eta * c * E * A_e)
|
|
248
329
|
return np.array(beta, dtype=np.float64)
|
|
249
330
|
|
|
250
331
|
|
|
251
332
|
def _compute_effective_receiver_energy(
|
|
252
333
|
radial_distance: npt.NDArray[np.float64],
|
|
253
|
-
focus: float,
|
|
254
334
|
wavelength: float,
|
|
335
|
+
focus: float,
|
|
336
|
+
effective_diameter: float,
|
|
255
337
|
) -> npt.NDArray[np.float64]:
|
|
256
338
|
"""
|
|
257
339
|
NOTE
|
|
@@ -268,7 +350,7 @@ def _compute_effective_receiver_energy(
|
|
|
268
350
|
wavelength
|
|
269
351
|
laser wavelength
|
|
270
352
|
"""
|
|
271
|
-
D =
|
|
353
|
+
D = effective_diameter
|
|
272
354
|
return np.array(
|
|
273
355
|
np.pi
|
|
274
356
|
* D**2
|
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
from .halo_bg import HaloBg
|
|
2
|
+
from .halo_hpl import HaloHpl
|
|
3
|
+
from .halo_sys_params import HaloSysParams
|
|
4
|
+
from .windcube import WindCube, WindCubeFixed
|
|
5
|
+
from .wls70 import Wls70
|
|
6
|
+
|
|
7
|
+
__all__ = ["HaloHpl", "HaloBg", "HaloSysParams", "WindCube", "WindCubeFixed", "Wls70"]
|
|
@@ -4,7 +4,7 @@ import functools
|
|
|
4
4
|
import io
|
|
5
5
|
import re
|
|
6
6
|
from dataclasses import dataclass
|
|
7
|
-
from datetime import datetime, timedelta
|
|
7
|
+
from datetime import datetime, timedelta, timezone
|
|
8
8
|
from io import BufferedIOBase
|
|
9
9
|
from os.path import commonprefix
|
|
10
10
|
from pathlib import Path
|
|
@@ -309,7 +309,11 @@ def _raw_tuple2halo_hpl(
|
|
|
309
309
|
resolution=float(header_dict["resolution"]),
|
|
310
310
|
scan_type=str(header_dict["scan_type"]),
|
|
311
311
|
focus_range=int(header_dict["focus_range"]),
|
|
312
|
-
start_time=datetime64(
|
|
312
|
+
start_time=datetime64(
|
|
313
|
+
datetime.fromtimestamp(header_dict["start_time"], timezone.utc).replace(
|
|
314
|
+
tzinfo=None
|
|
315
|
+
)
|
|
316
|
+
),
|
|
313
317
|
system_id=str(header_dict["system_id"]),
|
|
314
318
|
instrument_spectral_width=float(header_dict["instrument_spectral_width"])
|
|
315
319
|
if header_dict["instrument_spectral_width"] is not None
|
|
@@ -0,0 +1,476 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from io import BufferedIOBase
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Sequence
|
|
7
|
+
|
|
8
|
+
import numpy as np
|
|
9
|
+
import numpy.typing as npt
|
|
10
|
+
from netCDF4 import Dataset, Variable, num2date
|
|
11
|
+
from numpy import datetime64
|
|
12
|
+
|
|
13
|
+
from doppy.utils import merge_all_equal
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class WindCubeFixed:
|
|
18
|
+
time: npt.NDArray[datetime64] # dim: (time, )
|
|
19
|
+
radial_distance: npt.NDArray[np.float64] # dim: (radial_distance,)
|
|
20
|
+
azimuth: npt.NDArray[np.float64] # dim: (time, )
|
|
21
|
+
elevation: npt.NDArray[np.float64] # dim: (time, )
|
|
22
|
+
cnr: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
23
|
+
relative_beta: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
24
|
+
radial_velocity: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
25
|
+
doppler_spectrum_width: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
26
|
+
radial_velocity_confidence: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
27
|
+
ray_accumulation_time: np.float64 # dim: ()
|
|
28
|
+
system_id: str
|
|
29
|
+
|
|
30
|
+
@classmethod
|
|
31
|
+
def from_srcs(
|
|
32
|
+
cls,
|
|
33
|
+
data: Sequence[str]
|
|
34
|
+
| Sequence[Path]
|
|
35
|
+
| Sequence[bytes]
|
|
36
|
+
| Sequence[BufferedIOBase],
|
|
37
|
+
) -> list[WindCubeFixed]:
|
|
38
|
+
return [WindCubeFixed.from_fixed_src(src) for src in data]
|
|
39
|
+
|
|
40
|
+
@classmethod
|
|
41
|
+
def from_fixed_src(cls, data: str | Path | bytes | BufferedIOBase) -> WindCubeFixed:
|
|
42
|
+
data_bytes = _src_to_bytes(data)
|
|
43
|
+
nc = Dataset("inmemory.nc", "r", memory=data_bytes)
|
|
44
|
+
return _from_fixed_src(nc)
|
|
45
|
+
|
|
46
|
+
@classmethod
|
|
47
|
+
def merge(cls, raws: list[WindCubeFixed]) -> WindCubeFixed:
|
|
48
|
+
return WindCubeFixed(
|
|
49
|
+
time=np.concatenate([r.time for r in raws]),
|
|
50
|
+
radial_distance=_merge_radial_distance_for_fixed(
|
|
51
|
+
[r.radial_distance for r in raws]
|
|
52
|
+
),
|
|
53
|
+
azimuth=np.concatenate([r.azimuth for r in raws]),
|
|
54
|
+
elevation=np.concatenate([r.elevation for r in raws]),
|
|
55
|
+
radial_velocity=np.concatenate([r.radial_velocity for r in raws]),
|
|
56
|
+
radial_velocity_confidence=np.concatenate(
|
|
57
|
+
[r.radial_velocity_confidence for r in raws]
|
|
58
|
+
),
|
|
59
|
+
cnr=np.concatenate([r.cnr for r in raws]),
|
|
60
|
+
relative_beta=np.concatenate([r.relative_beta for r in raws]),
|
|
61
|
+
doppler_spectrum_width=np.concatenate(
|
|
62
|
+
[r.doppler_spectrum_width for r in raws]
|
|
63
|
+
),
|
|
64
|
+
ray_accumulation_time=merge_all_equal(
|
|
65
|
+
"ray_accumulation_time", [r.ray_accumulation_time for r in raws]
|
|
66
|
+
),
|
|
67
|
+
system_id=merge_all_equal("system_id", [r.system_id for r in raws]),
|
|
68
|
+
)
|
|
69
|
+
|
|
70
|
+
def __getitem__(
|
|
71
|
+
self,
|
|
72
|
+
index: int
|
|
73
|
+
| slice
|
|
74
|
+
| list[int]
|
|
75
|
+
| npt.NDArray[np.int64]
|
|
76
|
+
| npt.NDArray[np.bool_]
|
|
77
|
+
| tuple[slice, slice],
|
|
78
|
+
) -> WindCubeFixed:
|
|
79
|
+
if isinstance(index, (int, slice, list, np.ndarray)):
|
|
80
|
+
return WindCubeFixed(
|
|
81
|
+
time=self.time[index],
|
|
82
|
+
radial_distance=self.radial_distance,
|
|
83
|
+
azimuth=self.azimuth[index],
|
|
84
|
+
elevation=self.elevation[index],
|
|
85
|
+
radial_velocity=self.radial_velocity[index],
|
|
86
|
+
radial_velocity_confidence=self.radial_velocity_confidence[index],
|
|
87
|
+
cnr=self.cnr[index],
|
|
88
|
+
relative_beta=self.relative_beta[index],
|
|
89
|
+
doppler_spectrum_width=self.doppler_spectrum_width[index],
|
|
90
|
+
ray_accumulation_time=self.ray_accumulation_time,
|
|
91
|
+
system_id=self.system_id,
|
|
92
|
+
)
|
|
93
|
+
raise TypeError
|
|
94
|
+
|
|
95
|
+
def sorted_by_time(self) -> WindCubeFixed:
|
|
96
|
+
sort_indices = np.argsort(self.time)
|
|
97
|
+
return self[sort_indices]
|
|
98
|
+
|
|
99
|
+
def nan_profiles_removed(self) -> WindCubeFixed:
|
|
100
|
+
return self[~np.all(np.isnan(self.cnr), axis=1)]
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@dataclass
|
|
104
|
+
class WindCube:
|
|
105
|
+
time: npt.NDArray[datetime64] # dim: (time, )
|
|
106
|
+
radial_distance: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
107
|
+
height: npt.NDArray[np.float64] # dim: (time,radial_distance)
|
|
108
|
+
azimuth: npt.NDArray[np.float64] # dim: (time, )
|
|
109
|
+
elevation: npt.NDArray[np.float64] # dim: (time, )
|
|
110
|
+
cnr: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
111
|
+
radial_velocity: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
112
|
+
radial_velocity_confidence: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
113
|
+
scan_index: npt.NDArray[np.int64]
|
|
114
|
+
system_id: str
|
|
115
|
+
|
|
116
|
+
@classmethod
|
|
117
|
+
def from_vad_or_dbs_srcs(
|
|
118
|
+
cls,
|
|
119
|
+
data: Sequence[str]
|
|
120
|
+
| Sequence[Path]
|
|
121
|
+
| Sequence[bytes]
|
|
122
|
+
| Sequence[BufferedIOBase],
|
|
123
|
+
) -> list[WindCube]:
|
|
124
|
+
return [WindCube.from_vad_or_dbs_src(src) for src in data]
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
def from_vad_or_dbs_src(cls, data: str | Path | bytes | BufferedIOBase) -> WindCube:
|
|
128
|
+
data_bytes = _src_to_bytes(data)
|
|
129
|
+
nc = Dataset("inmemory.nc", "r", memory=data_bytes)
|
|
130
|
+
return _from_vad_or_dbs_src(nc)
|
|
131
|
+
|
|
132
|
+
@classmethod
|
|
133
|
+
def merge(cls, raws: list[WindCube]) -> WindCube:
|
|
134
|
+
return WindCube(
|
|
135
|
+
scan_index=_merge_scan_index([r.scan_index for r in raws]),
|
|
136
|
+
time=np.concatenate([r.time for r in raws]),
|
|
137
|
+
height=np.concatenate([r.height for r in raws]),
|
|
138
|
+
radial_distance=np.concatenate([r.radial_distance for r in raws]),
|
|
139
|
+
azimuth=np.concatenate([r.azimuth for r in raws]),
|
|
140
|
+
elevation=np.concatenate([r.elevation for r in raws]),
|
|
141
|
+
radial_velocity=np.concatenate([r.radial_velocity for r in raws]),
|
|
142
|
+
radial_velocity_confidence=np.concatenate(
|
|
143
|
+
[r.radial_velocity_confidence for r in raws]
|
|
144
|
+
),
|
|
145
|
+
cnr=np.concatenate([r.cnr for r in raws]),
|
|
146
|
+
system_id=merge_all_equal("system_id", [r.system_id for r in raws]),
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
def __getitem__(
|
|
150
|
+
self,
|
|
151
|
+
index: int
|
|
152
|
+
| slice
|
|
153
|
+
| list[int]
|
|
154
|
+
| npt.NDArray[np.int64]
|
|
155
|
+
| npt.NDArray[np.bool_]
|
|
156
|
+
| tuple[slice, slice],
|
|
157
|
+
) -> WindCube:
|
|
158
|
+
if isinstance(index, (int, slice, list, np.ndarray)):
|
|
159
|
+
return WindCube(
|
|
160
|
+
time=self.time[index],
|
|
161
|
+
radial_distance=self.radial_distance[index],
|
|
162
|
+
height=self.height[index],
|
|
163
|
+
azimuth=self.azimuth[index],
|
|
164
|
+
elevation=self.elevation[index],
|
|
165
|
+
radial_velocity=self.radial_velocity[index],
|
|
166
|
+
radial_velocity_confidence=self.radial_velocity_confidence[index],
|
|
167
|
+
cnr=self.cnr[index],
|
|
168
|
+
scan_index=self.scan_index[index],
|
|
169
|
+
system_id=self.system_id,
|
|
170
|
+
)
|
|
171
|
+
raise TypeError
|
|
172
|
+
|
|
173
|
+
def sorted_by_time(self) -> WindCube:
|
|
174
|
+
sort_indices = np.argsort(self.time)
|
|
175
|
+
return self[sort_indices]
|
|
176
|
+
|
|
177
|
+
def non_strictly_increasing_timesteps_removed(self) -> WindCube:
|
|
178
|
+
if len(self.time) == 0:
|
|
179
|
+
return self
|
|
180
|
+
mask = np.ones_like(self.time, dtype=np.bool_)
|
|
181
|
+
latest_time = self.time[0]
|
|
182
|
+
for i, t in enumerate(self.time[1:], start=1):
|
|
183
|
+
if t <= latest_time:
|
|
184
|
+
mask[i] = False
|
|
185
|
+
else:
|
|
186
|
+
latest_time = t
|
|
187
|
+
return self[mask]
|
|
188
|
+
|
|
189
|
+
def reindex_scan_indices(self) -> WindCube:
|
|
190
|
+
new_indices = np.zeros_like(self.scan_index)
|
|
191
|
+
indexed = set()
|
|
192
|
+
j = 0
|
|
193
|
+
for i in self.scan_index:
|
|
194
|
+
if i in indexed:
|
|
195
|
+
continue
|
|
196
|
+
new_indices[i == self.scan_index] = j
|
|
197
|
+
indexed.add(i)
|
|
198
|
+
j += 1
|
|
199
|
+
self.scan_index = new_indices
|
|
200
|
+
return self
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def _merge_scan_index(index_list: list[npt.NDArray[np.int64]]) -> npt.NDArray[np.int64]:
|
|
204
|
+
if len(index_list) == 0:
|
|
205
|
+
raise ValueError("cannot merge empty list")
|
|
206
|
+
|
|
207
|
+
new_index_list = []
|
|
208
|
+
current_max = index_list[0].max()
|
|
209
|
+
new_index_list.append(index_list[0])
|
|
210
|
+
|
|
211
|
+
for index_arr in index_list[1:]:
|
|
212
|
+
new_arr = index_arr + current_max + 1
|
|
213
|
+
new_index_list.append(new_arr)
|
|
214
|
+
current_max += index_arr.max() + 1
|
|
215
|
+
return np.concatenate(new_index_list)
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _merge_radial_distance_for_fixed(
|
|
219
|
+
radial_distance_list: list[npt.NDArray[np.float64]],
|
|
220
|
+
) -> npt.NDArray[np.float64]:
|
|
221
|
+
if len(radial_distance_list) == 0:
|
|
222
|
+
raise ValueError("cannot merge empty list")
|
|
223
|
+
if not all(
|
|
224
|
+
np.allclose(arr.shape, radial_distance_list[0].shape)
|
|
225
|
+
for arr in radial_distance_list
|
|
226
|
+
):
|
|
227
|
+
raise ValueError("Cannot merge radial distances with different shapes")
|
|
228
|
+
if not all(
|
|
229
|
+
np.allclose(arr, radial_distance_list[0]) for arr in radial_distance_list
|
|
230
|
+
):
|
|
231
|
+
raise ValueError("Cannot merge radial distances")
|
|
232
|
+
return radial_distance_list[0]
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def _src_to_bytes(data: str | Path | bytes | BufferedIOBase) -> bytes:
|
|
236
|
+
if isinstance(data, str):
|
|
237
|
+
path = Path(data)
|
|
238
|
+
with path.open("rb") as f:
|
|
239
|
+
return f.read()
|
|
240
|
+
elif isinstance(data, Path):
|
|
241
|
+
with data.open("rb") as f:
|
|
242
|
+
return f.read()
|
|
243
|
+
elif isinstance(data, bytes):
|
|
244
|
+
return data
|
|
245
|
+
elif isinstance(data, BufferedIOBase):
|
|
246
|
+
return data.read()
|
|
247
|
+
|
|
248
|
+
|
|
249
|
+
def _from_fixed_src(nc: Dataset) -> WindCubeFixed:
|
|
250
|
+
time_list = []
|
|
251
|
+
cnr_list = []
|
|
252
|
+
relative_beta_list = []
|
|
253
|
+
radial_wind_speed_list = []
|
|
254
|
+
radial_wind_speed_confidence_list = []
|
|
255
|
+
azimuth_list = []
|
|
256
|
+
elevation_list = []
|
|
257
|
+
range_list = []
|
|
258
|
+
doppler_spectrum_width_list = []
|
|
259
|
+
ray_accumulation_time_list = []
|
|
260
|
+
time_reference = (
|
|
261
|
+
nc["time_reference"][:] if "time_reference" in nc.variables else None
|
|
262
|
+
)
|
|
263
|
+
|
|
264
|
+
expected_dimensions = ("time", "range")
|
|
265
|
+
for _, group in enumerate(
|
|
266
|
+
nc[group] for group in (nc.variables["sweep_group_name"][:])
|
|
267
|
+
):
|
|
268
|
+
time_reference_ = time_reference
|
|
269
|
+
if time_reference is None and "time_reference" in group.variables:
|
|
270
|
+
time_reference_ = group["time_reference"][:]
|
|
271
|
+
|
|
272
|
+
time_list.append(_extract_datetime64_or_raise(group["time"], time_reference_))
|
|
273
|
+
radial_wind_speed_list.append(
|
|
274
|
+
_extract_float64_or_raise(group["radial_wind_speed"], expected_dimensions)
|
|
275
|
+
)
|
|
276
|
+
cnr_list.append(_extract_float64_or_raise(group["cnr"], expected_dimensions))
|
|
277
|
+
relative_beta_list.append(
|
|
278
|
+
_extract_float64_or_raise(group["relative_beta"], expected_dimensions)
|
|
279
|
+
)
|
|
280
|
+
radial_wind_speed_confidence_list.append(
|
|
281
|
+
_extract_float64_or_raise(
|
|
282
|
+
group["radial_wind_speed_ci"], expected_dimensions
|
|
283
|
+
)
|
|
284
|
+
)
|
|
285
|
+
azimuth_list.append(
|
|
286
|
+
_extract_float64_or_raise(group["azimuth"], expected_dimensions)
|
|
287
|
+
)
|
|
288
|
+
elevation_list.append(
|
|
289
|
+
_extract_float64_or_raise(group["elevation"], expected_dimensions)
|
|
290
|
+
)
|
|
291
|
+
range_list.append(
|
|
292
|
+
_extract_float64_or_raise(group["range"], (expected_dimensions[1],))
|
|
293
|
+
)
|
|
294
|
+
doppler_spectrum_width_list.append(
|
|
295
|
+
_extract_float64_or_raise(
|
|
296
|
+
group["doppler_spectrum_width"], expected_dimensions
|
|
297
|
+
)
|
|
298
|
+
)
|
|
299
|
+
ray_accumulation_time_list.append(
|
|
300
|
+
_extract_float64_or_raise(
|
|
301
|
+
group["ray_accumulation_time"], expected_dimensions
|
|
302
|
+
)
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
return WindCubeFixed(
|
|
306
|
+
time=np.concatenate(time_list),
|
|
307
|
+
radial_distance=np.concatenate(range_list),
|
|
308
|
+
azimuth=np.concatenate(azimuth_list),
|
|
309
|
+
elevation=np.concatenate(elevation_list),
|
|
310
|
+
radial_velocity=np.concatenate(radial_wind_speed_list),
|
|
311
|
+
radial_velocity_confidence=np.concatenate(radial_wind_speed_confidence_list),
|
|
312
|
+
cnr=np.concatenate(cnr_list),
|
|
313
|
+
relative_beta=np.concatenate(relative_beta_list),
|
|
314
|
+
doppler_spectrum_width=np.concatenate(doppler_spectrum_width_list),
|
|
315
|
+
ray_accumulation_time=merge_all_equal(
|
|
316
|
+
"ray_accumulation_time",
|
|
317
|
+
np.array(ray_accumulation_time_list, dtype=np.float64).tolist(),
|
|
318
|
+
),
|
|
319
|
+
system_id=nc.instrument_name,
|
|
320
|
+
)
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def _from_vad_or_dbs_src(nc: Dataset) -> WindCube:
|
|
324
|
+
scan_index_list: list[npt.NDArray[np.int64]] = []
|
|
325
|
+
time_list: list[npt.NDArray[np.datetime64]] = []
|
|
326
|
+
cnr_list: list[npt.NDArray[np.float64]] = []
|
|
327
|
+
radial_wind_speed_list: list[npt.NDArray[np.float64]] = []
|
|
328
|
+
radial_wind_speed_confidence_list: list[npt.NDArray[np.float64]] = []
|
|
329
|
+
azimuth_list: list[npt.NDArray[np.float64]] = []
|
|
330
|
+
elevation_list: list[npt.NDArray[np.float64]] = []
|
|
331
|
+
range_list: list[npt.NDArray[np.float64]] = []
|
|
332
|
+
height_list: list[npt.NDArray[np.float64]] = []
|
|
333
|
+
|
|
334
|
+
time_reference = (
|
|
335
|
+
nc["time_reference"][:] if "time_reference" in nc.variables else None
|
|
336
|
+
)
|
|
337
|
+
|
|
338
|
+
expected_dimensions = ("time", "gate_index")
|
|
339
|
+
for i, group in enumerate(
|
|
340
|
+
nc[group] for group in (nc.variables["sweep_group_name"][:])
|
|
341
|
+
):
|
|
342
|
+
time_reference_ = time_reference
|
|
343
|
+
if time_reference is None and "time_reference" in group.variables:
|
|
344
|
+
time_reference_ = group["time_reference"][:]
|
|
345
|
+
|
|
346
|
+
time_list.append(_extract_datetime64_or_raise(group["time"], time_reference_))
|
|
347
|
+
radial_wind_speed_list.append(
|
|
348
|
+
_extract_float64_or_raise(group["radial_wind_speed"], expected_dimensions)
|
|
349
|
+
)
|
|
350
|
+
cnr_list.append(_extract_float64_or_raise(group["cnr"], expected_dimensions))
|
|
351
|
+
radial_wind_speed_confidence_list.append(
|
|
352
|
+
_extract_float64_or_raise(
|
|
353
|
+
group["radial_wind_speed_ci"], expected_dimensions
|
|
354
|
+
)
|
|
355
|
+
)
|
|
356
|
+
azimuth_list.append(
|
|
357
|
+
_extract_float64_or_raise(group["azimuth"], expected_dimensions)
|
|
358
|
+
)
|
|
359
|
+
elevation_list.append(
|
|
360
|
+
_extract_float64_or_raise(group["elevation"], expected_dimensions)
|
|
361
|
+
)
|
|
362
|
+
range_list.append(
|
|
363
|
+
_extract_float64_or_raise(group["range"], expected_dimensions)
|
|
364
|
+
)
|
|
365
|
+
height_list.append(
|
|
366
|
+
_extract_float64_or_raise(group["measurement_height"], expected_dimensions)
|
|
367
|
+
)
|
|
368
|
+
scan_index_list.append(np.full(group["time"][:].shape, i, dtype=np.int64))
|
|
369
|
+
|
|
370
|
+
return WindCube(
|
|
371
|
+
scan_index=np.concatenate(scan_index_list),
|
|
372
|
+
time=np.concatenate(time_list),
|
|
373
|
+
radial_distance=np.concatenate(range_list),
|
|
374
|
+
height=np.concatenate(height_list),
|
|
375
|
+
azimuth=np.concatenate(azimuth_list),
|
|
376
|
+
elevation=np.concatenate(elevation_list),
|
|
377
|
+
radial_velocity=np.concatenate(radial_wind_speed_list),
|
|
378
|
+
radial_velocity_confidence=np.concatenate(radial_wind_speed_confidence_list),
|
|
379
|
+
cnr=np.concatenate(cnr_list),
|
|
380
|
+
system_id=nc.instrument_name,
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
def _extract_datetime64_or_raise(
|
|
385
|
+
nc: Variable[npt.NDArray[np.float64]], time_reference: str | None
|
|
386
|
+
) -> npt.NDArray[np.datetime64]:
|
|
387
|
+
match nc.name:
|
|
388
|
+
case "time":
|
|
389
|
+
if nc.dimensions != ("time",):
|
|
390
|
+
raise ValueError
|
|
391
|
+
|
|
392
|
+
units = nc.units
|
|
393
|
+
if "time_reference" in nc.units:
|
|
394
|
+
if time_reference is not None:
|
|
395
|
+
units = nc.units.replace("time_reference", time_reference)
|
|
396
|
+
else:
|
|
397
|
+
raise ValueError("Unknown time_reference")
|
|
398
|
+
return np.array(num2date(nc[:], units=units), dtype="datetime64[us]")
|
|
399
|
+
case _:
|
|
400
|
+
raise ValueError(f"Unexpected variable name {nc.name}")
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def _dB_to_ratio(decibels: npt.NDArray[np.float64]) -> npt.NDArray[np.float64]:
|
|
404
|
+
return 10 ** (0.1 * decibels)
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def _extract_float64_or_raise(
|
|
408
|
+
nc: Variable[npt.NDArray[np.float64]], expected_dimensions: tuple[str, ...]
|
|
409
|
+
) -> npt.NDArray[np.float64]:
|
|
410
|
+
match nc.name:
|
|
411
|
+
case "range" | "measurement_height":
|
|
412
|
+
if nc.dimensions != expected_dimensions:
|
|
413
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
414
|
+
if nc.units != "m":
|
|
415
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
416
|
+
if nc[:].mask is not np.bool_(False):
|
|
417
|
+
raise ValueError
|
|
418
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
419
|
+
case "cnr":
|
|
420
|
+
if nc.dimensions != expected_dimensions:
|
|
421
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
422
|
+
if nc.units != "dB":
|
|
423
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
424
|
+
if nc[:].mask is not np.bool_(False):
|
|
425
|
+
pass # ignore that array contains masked values
|
|
426
|
+
return _dB_to_ratio(np.array(nc[:].data, dtype=np.float64))
|
|
427
|
+
case "relative_beta":
|
|
428
|
+
if nc.dimensions != expected_dimensions:
|
|
429
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
430
|
+
if nc.units != "m-1 sr-1":
|
|
431
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
432
|
+
if nc[:].mask is not np.bool_(False):
|
|
433
|
+
pass # ignore that array contains masked values
|
|
434
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
435
|
+
case "radial_wind_speed":
|
|
436
|
+
if nc.dimensions != expected_dimensions:
|
|
437
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
438
|
+
if nc.units != "m s-1":
|
|
439
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
440
|
+
if nc[:].mask is not np.bool_(False):
|
|
441
|
+
pass # ignore that array contains masked values
|
|
442
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
443
|
+
case "radial_wind_speed_ci":
|
|
444
|
+
if nc.dimensions != expected_dimensions:
|
|
445
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
446
|
+
if nc.units != "percent":
|
|
447
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
448
|
+
if nc[:].mask is not np.bool_(False):
|
|
449
|
+
pass # ignore that array contains masked values
|
|
450
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
451
|
+
case "azimuth" | "elevation":
|
|
452
|
+
if nc.dimensions != (expected_dimensions[0],):
|
|
453
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
454
|
+
if nc.units != "degrees":
|
|
455
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
456
|
+
if nc[:].mask is not np.bool_(False):
|
|
457
|
+
raise ValueError
|
|
458
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
459
|
+
case "doppler_spectrum_width":
|
|
460
|
+
if nc.dimensions != expected_dimensions:
|
|
461
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
462
|
+
if nc.units != "m s-1":
|
|
463
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
464
|
+
if nc[:].mask is not np.bool_(False):
|
|
465
|
+
pass # ignore that array contains masked values
|
|
466
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
467
|
+
case "ray_accumulation_time":
|
|
468
|
+
if nc.dimensions != ():
|
|
469
|
+
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
470
|
+
if nc.units != "ms":
|
|
471
|
+
raise ValueError(f"Unexpected units for {nc.name}")
|
|
472
|
+
if nc[:].mask is not np.bool_(False):
|
|
473
|
+
raise ValueError(f"Variable {nc.name} contains masked values")
|
|
474
|
+
return np.array(nc[:].data, dtype=np.float64)
|
|
475
|
+
case _:
|
|
476
|
+
raise ValueError(f"Unexpected variable name {nc.name}")
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
from dataclasses import dataclass
|
|
4
|
-
from datetime import datetime
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
5
|
from io import BufferedIOBase
|
|
6
6
|
from pathlib import Path
|
|
7
7
|
from typing import Any, Sequence
|
|
@@ -166,7 +166,12 @@ def _raw_rs_to_wls70(
|
|
|
166
166
|
cnr_threshold = float(info["cnr_threshold"])
|
|
167
167
|
data = data.reshape(-1, len(cols))
|
|
168
168
|
time_ts = data[:, 0]
|
|
169
|
-
time = np.array(
|
|
169
|
+
time = np.array(
|
|
170
|
+
[
|
|
171
|
+
datetime64(datetime.fromtimestamp(ts, timezone.utc).replace(tzinfo=None))
|
|
172
|
+
for ts in time_ts
|
|
173
|
+
]
|
|
174
|
+
)
|
|
170
175
|
|
|
171
176
|
position = data[:, 1]
|
|
172
177
|
temperature = data[:, 2]
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
from typing import TypeVar, cast
|
|
2
|
+
|
|
3
|
+
import numpy as np
|
|
4
|
+
from numpy.typing import NDArray
|
|
5
|
+
|
|
6
|
+
T = TypeVar("T")
|
|
7
|
+
NT = TypeVar("NT", bound=np.generic)
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def merge_all_equal(key: str, lst: list[T]) -> T:
|
|
11
|
+
if len(set(lst)) != 1:
|
|
12
|
+
raise ValueError(f"Cannot merge header key {key} values {lst}")
|
|
13
|
+
return lst[0]
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def merge_all_close(key: str, lst: list[NDArray[NT]]) -> NT:
|
|
17
|
+
if len(lst) == 0:
|
|
18
|
+
raise ValueError(f"Cannot merge empty list for key {key}")
|
|
19
|
+
if any(arr.size == 0 for arr in lst):
|
|
20
|
+
raise ValueError(f"Cannot merge key {key}, one or more arrays are empty.")
|
|
21
|
+
arr = np.concatenate([arr.flatten() for arr in lst])
|
|
22
|
+
if not np.allclose(arr, arr[0]):
|
|
23
|
+
raise ValueError(f"Cannot merge key {key}, values are not close enough")
|
|
24
|
+
return cast(NT, arr[0])
|
|
@@ -1,259 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
from dataclasses import dataclass
|
|
4
|
-
from io import BufferedIOBase
|
|
5
|
-
from pathlib import Path
|
|
6
|
-
from typing import Sequence
|
|
7
|
-
|
|
8
|
-
import numpy as np
|
|
9
|
-
import numpy.typing as npt
|
|
10
|
-
from netCDF4 import Dataset, num2date
|
|
11
|
-
from numpy import datetime64
|
|
12
|
-
|
|
13
|
-
from doppy.utils import merge_all_equal
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
@dataclass
|
|
17
|
-
class WindCube:
|
|
18
|
-
time: npt.NDArray[datetime64] # dim: (time, )
|
|
19
|
-
radial_distance: npt.NDArray[np.int64] # dim: (time, radial_distance)
|
|
20
|
-
height: npt.NDArray[np.int64] # dim: (time,radial_distance)
|
|
21
|
-
azimuth: npt.NDArray[np.float64] # dim: (time, )
|
|
22
|
-
elevation: npt.NDArray[np.float64] # dim: (time, )
|
|
23
|
-
cnr: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
24
|
-
radial_velocity: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
25
|
-
radial_velocity_confidence: npt.NDArray[np.float64] # dim: (time, radial_distance)
|
|
26
|
-
scan_index: npt.NDArray[np.int64]
|
|
27
|
-
system_id: str
|
|
28
|
-
|
|
29
|
-
@classmethod
|
|
30
|
-
def from_vad_or_dbs_srcs(
|
|
31
|
-
cls,
|
|
32
|
-
data: Sequence[str]
|
|
33
|
-
| Sequence[Path]
|
|
34
|
-
| Sequence[bytes]
|
|
35
|
-
| Sequence[BufferedIOBase],
|
|
36
|
-
) -> list[WindCube]:
|
|
37
|
-
return [WindCube.from_vad_or_dbs_src(src) for src in data]
|
|
38
|
-
|
|
39
|
-
@classmethod
|
|
40
|
-
def from_vad_or_dbs_src(cls, data: str | Path | bytes | BufferedIOBase) -> WindCube:
|
|
41
|
-
data_bytes = _src_to_bytes(data)
|
|
42
|
-
nc = Dataset("inmemory.nc", "r", memory=data_bytes)
|
|
43
|
-
return _from_vad_or_dbs_src(nc)
|
|
44
|
-
|
|
45
|
-
@classmethod
|
|
46
|
-
def merge(cls, raws: list[WindCube]) -> WindCube:
|
|
47
|
-
return WindCube(
|
|
48
|
-
scan_index=_merge_scan_index([r.scan_index for r in raws]),
|
|
49
|
-
time=np.concatenate([r.time for r in raws]),
|
|
50
|
-
height=np.concatenate([r.height for r in raws]),
|
|
51
|
-
radial_distance=np.concatenate([r.radial_distance for r in raws]),
|
|
52
|
-
azimuth=np.concatenate([r.azimuth for r in raws]),
|
|
53
|
-
elevation=np.concatenate([r.elevation for r in raws]),
|
|
54
|
-
radial_velocity=np.concatenate([r.radial_velocity for r in raws]),
|
|
55
|
-
radial_velocity_confidence=np.concatenate(
|
|
56
|
-
[r.radial_velocity_confidence for r in raws]
|
|
57
|
-
),
|
|
58
|
-
cnr=np.concatenate([r.cnr for r in raws]),
|
|
59
|
-
system_id=merge_all_equal("system_id", [r.system_id for r in raws]),
|
|
60
|
-
)
|
|
61
|
-
|
|
62
|
-
def __getitem__(
|
|
63
|
-
self,
|
|
64
|
-
index: int
|
|
65
|
-
| slice
|
|
66
|
-
| list[int]
|
|
67
|
-
| npt.NDArray[np.int64]
|
|
68
|
-
| npt.NDArray[np.bool_]
|
|
69
|
-
| tuple[slice, slice],
|
|
70
|
-
) -> WindCube:
|
|
71
|
-
if isinstance(index, (int, slice, list, np.ndarray)):
|
|
72
|
-
return WindCube(
|
|
73
|
-
time=self.time[index],
|
|
74
|
-
radial_distance=self.radial_distance[index],
|
|
75
|
-
height=self.height[index],
|
|
76
|
-
azimuth=self.azimuth[index],
|
|
77
|
-
elevation=self.elevation[index],
|
|
78
|
-
radial_velocity=self.radial_velocity[index],
|
|
79
|
-
radial_velocity_confidence=self.radial_velocity_confidence[index],
|
|
80
|
-
cnr=self.cnr[index],
|
|
81
|
-
scan_index=self.scan_index[index],
|
|
82
|
-
system_id=self.system_id,
|
|
83
|
-
)
|
|
84
|
-
raise TypeError
|
|
85
|
-
|
|
86
|
-
def sorted_by_time(self) -> WindCube:
|
|
87
|
-
sort_indices = np.argsort(self.time)
|
|
88
|
-
return self[sort_indices]
|
|
89
|
-
|
|
90
|
-
def non_strictly_increasing_timesteps_removed(self) -> WindCube:
|
|
91
|
-
if len(self.time) == 0:
|
|
92
|
-
return self
|
|
93
|
-
mask = np.ones_like(self.time, dtype=np.bool_)
|
|
94
|
-
latest_time = self.time[0]
|
|
95
|
-
for i, t in enumerate(self.time[1:], start=1):
|
|
96
|
-
if t <= latest_time:
|
|
97
|
-
mask[i] = False
|
|
98
|
-
else:
|
|
99
|
-
latest_time = t
|
|
100
|
-
return self[mask]
|
|
101
|
-
|
|
102
|
-
def reindex_scan_indices(self) -> WindCube:
|
|
103
|
-
new_indices = np.zeros_like(self.scan_index)
|
|
104
|
-
indexed = set()
|
|
105
|
-
j = 0
|
|
106
|
-
for i in self.scan_index:
|
|
107
|
-
if i in indexed:
|
|
108
|
-
continue
|
|
109
|
-
new_indices[i == self.scan_index] = j
|
|
110
|
-
indexed.add(i)
|
|
111
|
-
j += 1
|
|
112
|
-
self.scan_index = new_indices
|
|
113
|
-
return self
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def _merge_scan_index(index_list: list[npt.NDArray[np.int64]]) -> npt.NDArray[np.int64]:
|
|
117
|
-
if len(index_list) == 0:
|
|
118
|
-
raise ValueError("cannot merge empty list")
|
|
119
|
-
|
|
120
|
-
new_index_list = []
|
|
121
|
-
current_max = index_list[0].max()
|
|
122
|
-
new_index_list.append(index_list[0])
|
|
123
|
-
|
|
124
|
-
for index_arr in index_list[1:]:
|
|
125
|
-
new_arr = index_arr + current_max + 1
|
|
126
|
-
new_index_list.append(new_arr)
|
|
127
|
-
current_max += index_arr.max() + 1
|
|
128
|
-
return np.concatenate(new_index_list)
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
def _src_to_bytes(data: str | Path | bytes | BufferedIOBase) -> bytes:
|
|
132
|
-
if isinstance(data, str):
|
|
133
|
-
path = Path(data)
|
|
134
|
-
with path.open("rb") as f:
|
|
135
|
-
return f.read()
|
|
136
|
-
elif isinstance(data, Path):
|
|
137
|
-
with data.open("rb") as f:
|
|
138
|
-
return f.read()
|
|
139
|
-
elif isinstance(data, bytes):
|
|
140
|
-
return data
|
|
141
|
-
elif isinstance(data, BufferedIOBase):
|
|
142
|
-
return data.read()
|
|
143
|
-
raise TypeError("Unsupported data type")
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
def _from_vad_or_dbs_src(nc: Dataset) -> WindCube:
|
|
147
|
-
scan_index_list = []
|
|
148
|
-
time_list = []
|
|
149
|
-
cnr_list = []
|
|
150
|
-
radial_wind_speed_list = []
|
|
151
|
-
radial_wind_speed_confidence_list = []
|
|
152
|
-
azimuth_list = []
|
|
153
|
-
elevation_list = []
|
|
154
|
-
range_list = []
|
|
155
|
-
height_list = []
|
|
156
|
-
time_reference = (
|
|
157
|
-
nc["time_reference"][:] if "time_reference" in nc.variables else None
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
for i, group in enumerate(
|
|
161
|
-
nc[group] for group in (nc.variables["sweep_group_name"][:])
|
|
162
|
-
):
|
|
163
|
-
time_list.append(_extract_datetime64_or_raise(group["time"], time_reference))
|
|
164
|
-
radial_wind_speed_list.append(
|
|
165
|
-
_extract_float64_or_raise(group["radial_wind_speed"])
|
|
166
|
-
)
|
|
167
|
-
cnr_list.append(_extract_float64_or_raise(group["cnr"]))
|
|
168
|
-
radial_wind_speed_confidence_list.append(
|
|
169
|
-
_extract_float64_or_raise(group["radial_wind_speed_ci"])
|
|
170
|
-
)
|
|
171
|
-
azimuth_list.append(_extract_float64_or_raise(group["azimuth"]))
|
|
172
|
-
elevation_list.append(_extract_float64_or_raise(group["elevation"]))
|
|
173
|
-
range_list.append(_extract_int64_or_raise(group["range"]))
|
|
174
|
-
height_list.append(_extract_int64_or_raise(group["measurement_height"]))
|
|
175
|
-
scan_index_list.append(np.full(group["time"][:].shape, i, dtype=np.int64))
|
|
176
|
-
|
|
177
|
-
return WindCube(
|
|
178
|
-
scan_index=np.concatenate(scan_index_list),
|
|
179
|
-
time=np.concatenate(time_list),
|
|
180
|
-
radial_distance=np.concatenate(range_list),
|
|
181
|
-
height=np.concatenate(height_list),
|
|
182
|
-
azimuth=np.concatenate(azimuth_list),
|
|
183
|
-
elevation=np.concatenate(elevation_list),
|
|
184
|
-
radial_velocity=np.concatenate(radial_wind_speed_list),
|
|
185
|
-
radial_velocity_confidence=np.concatenate(radial_wind_speed_confidence_list),
|
|
186
|
-
cnr=np.concatenate(cnr_list),
|
|
187
|
-
system_id=nc.instrument_name,
|
|
188
|
-
)
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
def _extract_datetime64_or_raise(
|
|
192
|
-
nc: Dataset, time_reference: str | None
|
|
193
|
-
) -> npt.NDArray[np.datetime64]:
|
|
194
|
-
match nc.name:
|
|
195
|
-
case "time":
|
|
196
|
-
if nc.dimensions != ("time",):
|
|
197
|
-
raise ValueError
|
|
198
|
-
|
|
199
|
-
units = nc.units
|
|
200
|
-
if "time_reference" in nc.units:
|
|
201
|
-
if time_reference is not None:
|
|
202
|
-
units = nc.units.replace("time_reference", time_reference)
|
|
203
|
-
else:
|
|
204
|
-
raise ValueError("Unknown time_reference")
|
|
205
|
-
return np.array(num2date(nc[:], units=units), dtype="datetime64[us]")
|
|
206
|
-
case _:
|
|
207
|
-
raise ValueError(f"Unexpected variable name {nc.name}")
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
def _extract_float64_or_raise(nc: Dataset) -> npt.NDArray[np.float64]:
|
|
211
|
-
match nc.name:
|
|
212
|
-
case "cnr":
|
|
213
|
-
if nc.dimensions != ("time", "gate_index"):
|
|
214
|
-
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
215
|
-
if nc.units != "dB":
|
|
216
|
-
raise ValueError(f"Unexpected units for {nc.name}")
|
|
217
|
-
if nc[:].mask is not np.bool_(False):
|
|
218
|
-
pass # ignore that array contains masked values
|
|
219
|
-
return np.array(nc[:].data, dtype=np.float64)
|
|
220
|
-
case "radial_wind_speed":
|
|
221
|
-
if nc.dimensions != ("time", "gate_index"):
|
|
222
|
-
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
223
|
-
if nc.units != "m s-1":
|
|
224
|
-
raise ValueError(f"Unexpected units for {nc.name}")
|
|
225
|
-
if nc[:].mask is not np.bool_(False):
|
|
226
|
-
pass # ignore that array contains masked values
|
|
227
|
-
return np.array(nc[:].data, dtype=np.float64)
|
|
228
|
-
case "radial_wind_speed_ci":
|
|
229
|
-
if nc.dimensions != ("time", "gate_index"):
|
|
230
|
-
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
231
|
-
if nc.units != "percent":
|
|
232
|
-
raise ValueError(f"Unexpected units for {nc.name}")
|
|
233
|
-
if nc[:].mask is not np.bool_(False):
|
|
234
|
-
pass # ignore that array contains masked values
|
|
235
|
-
return np.array(nc[:].data, dtype=np.float64)
|
|
236
|
-
case "azimuth" | "elevation":
|
|
237
|
-
if nc.dimensions != ("time",):
|
|
238
|
-
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
239
|
-
if nc.units != "degrees":
|
|
240
|
-
raise ValueError(f"Unexpected units for {nc.name}")
|
|
241
|
-
if nc[:].mask is not np.bool_(False):
|
|
242
|
-
raise ValueError
|
|
243
|
-
return np.array(nc[:].data, dtype=np.float64)
|
|
244
|
-
case _:
|
|
245
|
-
raise ValueError(f"Unexpected variable name {nc.name}")
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
def _extract_int64_or_raise(nc: Dataset) -> npt.NDArray[np.int64]:
|
|
249
|
-
match nc.name:
|
|
250
|
-
case "range" | "measurement_height":
|
|
251
|
-
if nc.dimensions != ("time", "gate_index"):
|
|
252
|
-
raise ValueError(f"Unexpected dimensions for {nc.name}")
|
|
253
|
-
if nc.units != "m":
|
|
254
|
-
raise ValueError(f"Unexpected units for {nc.name}")
|
|
255
|
-
if nc[:].mask is not np.bool_(False):
|
|
256
|
-
raise ValueError
|
|
257
|
-
return np.array(nc[:].data, dtype=np.int64)
|
|
258
|
-
case _:
|
|
259
|
-
raise ValueError(f"Unexpected variable name {nc.name}")
|
doppy-0.3.6/src/doppy/utils.py
DELETED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|