doppy 0.5.9__cp310-abi3-macosx_10_12_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- doppy/__init__.py +6 -0
- doppy/bench.py +13 -0
- doppy/data/__init__.py +0 -0
- doppy/data/api.py +58 -0
- doppy/data/cache.py +43 -0
- doppy/data/exceptions.py +6 -0
- doppy/defaults.py +18 -0
- doppy/exceptions.py +14 -0
- doppy/netcdf.py +134 -0
- doppy/options.py +13 -0
- doppy/product/__init__.py +6 -0
- doppy/product/noise_utils.py +106 -0
- doppy/product/stare.py +807 -0
- doppy/product/stare_depol.py +308 -0
- doppy/product/turbulence.py +264 -0
- doppy/product/utils.py +12 -0
- doppy/product/wind.py +460 -0
- doppy/py.typed +0 -0
- doppy/raw/__init__.py +16 -0
- doppy/raw/halo_bg.py +173 -0
- doppy/raw/halo_hpl.py +480 -0
- doppy/raw/halo_sys_params.py +135 -0
- doppy/raw/utils.py +14 -0
- doppy/raw/windcube.py +477 -0
- doppy/raw/wls70.py +175 -0
- doppy/raw/wls77.py +163 -0
- doppy/rs.abi3.so +0 -0
- doppy/utils.py +24 -0
- doppy-0.5.9.dist-info/METADATA +144 -0
- doppy-0.5.9.dist-info/RECORD +33 -0
- doppy-0.5.9.dist-info/WHEEL +4 -0
- doppy-0.5.9.dist-info/entry_points.txt +2 -0
- doppy-0.5.9.dist-info/licenses/LICENSE +21 -0
doppy/product/wind.py
ADDED
|
@@ -0,0 +1,460 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import functools
|
|
4
|
+
from collections import Counter
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from io import BufferedIOBase
|
|
7
|
+
from pathlib import Path
|
|
8
|
+
from typing import Sequence
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
import numpy.typing as npt
|
|
12
|
+
from scipy.ndimage import generic_filter
|
|
13
|
+
|
|
14
|
+
import doppy
|
|
15
|
+
from doppy.product.utils import arr_to_rounded_set
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class Options:
|
|
20
|
+
azimuth_offset_deg: float | None
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
@dataclass
|
|
24
|
+
class Wind:
|
|
25
|
+
time: npt.NDArray[np.datetime64]
|
|
26
|
+
height: npt.NDArray[np.float64]
|
|
27
|
+
zonal_wind: npt.NDArray[np.float64]
|
|
28
|
+
meridional_wind: npt.NDArray[np.float64]
|
|
29
|
+
vertical_wind: npt.NDArray[np.float64]
|
|
30
|
+
mask: npt.NDArray[np.bool_]
|
|
31
|
+
system_id: str
|
|
32
|
+
options: Options | None
|
|
33
|
+
|
|
34
|
+
@property
|
|
35
|
+
def mask_zonal_wind(self) -> npt.NDArray[np.bool_]:
|
|
36
|
+
return np.isnan(self.zonal_wind)
|
|
37
|
+
|
|
38
|
+
@property
|
|
39
|
+
def mask_meridional_wind(self) -> npt.NDArray[np.bool_]:
|
|
40
|
+
return np.isnan(self.meridional_wind)
|
|
41
|
+
|
|
42
|
+
@property
|
|
43
|
+
def mask_vertical_wind(self) -> npt.NDArray[np.bool_]:
|
|
44
|
+
return np.isnan(self.vertical_wind)
|
|
45
|
+
|
|
46
|
+
@functools.cached_property
|
|
47
|
+
def horizontal_wind_speed(self) -> npt.NDArray[np.float64]:
|
|
48
|
+
return np.sqrt(self.zonal_wind**2 + self.meridional_wind**2)
|
|
49
|
+
|
|
50
|
+
@functools.cached_property
|
|
51
|
+
def horizontal_wind_direction(self) -> npt.NDArray[np.float64]:
|
|
52
|
+
direction = np.arctan2(self.zonal_wind, self.meridional_wind)
|
|
53
|
+
direction[direction < 0] += 2 * np.pi
|
|
54
|
+
return np.array(np.degrees(direction), dtype=np.float64)
|
|
55
|
+
|
|
56
|
+
@classmethod
|
|
57
|
+
def from_halo_data(
|
|
58
|
+
cls,
|
|
59
|
+
data: Sequence[str]
|
|
60
|
+
| Sequence[Path]
|
|
61
|
+
| Sequence[bytes]
|
|
62
|
+
| Sequence[BufferedIOBase],
|
|
63
|
+
options: Options | None = None,
|
|
64
|
+
) -> Wind:
|
|
65
|
+
raws = doppy.raw.HaloHpl.from_srcs(data)
|
|
66
|
+
|
|
67
|
+
if len(raws) == 0:
|
|
68
|
+
raise doppy.exceptions.NoDataError("HaloHpl data missing")
|
|
69
|
+
|
|
70
|
+
raw = (
|
|
71
|
+
doppy.raw.HaloHpl.merge(_select_raws_for_wind(raws))
|
|
72
|
+
.sorted_by_time()
|
|
73
|
+
.non_strictly_increasing_timesteps_removed()
|
|
74
|
+
.nans_removed()
|
|
75
|
+
)
|
|
76
|
+
if len(raw.time) == 0:
|
|
77
|
+
raise doppy.exceptions.NoDataError("No suitable data for the wind product")
|
|
78
|
+
|
|
79
|
+
if options and options.azimuth_offset_deg:
|
|
80
|
+
raw.azimuth += options.azimuth_offset_deg
|
|
81
|
+
|
|
82
|
+
groups = _group_scans_by_azimuth_rotation(raw)
|
|
83
|
+
time_list = []
|
|
84
|
+
elevation_list = []
|
|
85
|
+
wind_list = []
|
|
86
|
+
rmse_list = []
|
|
87
|
+
|
|
88
|
+
for group_index in set(groups):
|
|
89
|
+
pick = group_index == groups
|
|
90
|
+
if pick.sum() < 4:
|
|
91
|
+
continue
|
|
92
|
+
time_, elevation_, wind_, rmse_ = _compute_wind(raw[pick])
|
|
93
|
+
time_list.append(time_)
|
|
94
|
+
elevation_list.append(elevation_)
|
|
95
|
+
wind_list.append(wind_[np.newaxis, :, :])
|
|
96
|
+
rmse_list.append(rmse_[np.newaxis, :])
|
|
97
|
+
time = np.array(time_list)
|
|
98
|
+
if len(time) == 0:
|
|
99
|
+
raise doppy.exceptions.NoDataError(
|
|
100
|
+
"Probably something wrong with scan grouping"
|
|
101
|
+
)
|
|
102
|
+
elevation = np.array(elevation_list)
|
|
103
|
+
wind = np.concatenate(wind_list)
|
|
104
|
+
rmse = np.concatenate(rmse_list)
|
|
105
|
+
if not np.allclose(elevation, elevation[0]):
|
|
106
|
+
raise ValueError("Elevation is expected to stay same")
|
|
107
|
+
height = raw.radial_distance * np.sin(np.deg2rad(elevation[0]))
|
|
108
|
+
mask = _compute_mask(wind, rmse)
|
|
109
|
+
return Wind(
|
|
110
|
+
time=time,
|
|
111
|
+
height=height,
|
|
112
|
+
zonal_wind=wind[:, :, 0],
|
|
113
|
+
meridional_wind=wind[:, :, 1],
|
|
114
|
+
vertical_wind=wind[:, :, 2],
|
|
115
|
+
mask=mask,
|
|
116
|
+
system_id=raw.header.system_id,
|
|
117
|
+
options=options,
|
|
118
|
+
)
|
|
119
|
+
|
|
120
|
+
@classmethod
|
|
121
|
+
def from_windcube_data(
|
|
122
|
+
cls,
|
|
123
|
+
data: Sequence[str]
|
|
124
|
+
| Sequence[Path]
|
|
125
|
+
| Sequence[bytes]
|
|
126
|
+
| Sequence[BufferedIOBase],
|
|
127
|
+
options: Options | None = None,
|
|
128
|
+
) -> Wind:
|
|
129
|
+
raws = doppy.raw.WindCube.from_vad_or_dbs_srcs(data)
|
|
130
|
+
|
|
131
|
+
if len(raws) == 0:
|
|
132
|
+
raise doppy.exceptions.NoDataError("WindCube data missing")
|
|
133
|
+
|
|
134
|
+
raw = (
|
|
135
|
+
doppy.raw.WindCube.merge(raws)
|
|
136
|
+
.sorted_by_time()
|
|
137
|
+
.non_strictly_increasing_timesteps_removed()
|
|
138
|
+
.reindex_scan_indices()
|
|
139
|
+
)
|
|
140
|
+
# select scans with most frequent elevation angle from range (15,85)
|
|
141
|
+
raw = raw[(raw.elevation > 15) & (raw.elevation < 85)]
|
|
142
|
+
elevation_ints = raw.elevation.round().astype(int)
|
|
143
|
+
unique_elevations, counts = np.unique(elevation_ints, return_counts=True)
|
|
144
|
+
most_frequent_elevation = unique_elevations[np.argmax(counts)]
|
|
145
|
+
raw = raw[elevation_ints == most_frequent_elevation]
|
|
146
|
+
|
|
147
|
+
if len(raw.time) == 0:
|
|
148
|
+
raise doppy.exceptions.NoDataError("No suitable data for the wind product")
|
|
149
|
+
|
|
150
|
+
if options and options.azimuth_offset_deg:
|
|
151
|
+
raw.azimuth += options.azimuth_offset_deg
|
|
152
|
+
|
|
153
|
+
time_list = []
|
|
154
|
+
elevation_list = []
|
|
155
|
+
wind_list = []
|
|
156
|
+
rmse_list = []
|
|
157
|
+
|
|
158
|
+
for scan_index in set(raw.scan_index):
|
|
159
|
+
pick = raw.scan_index == scan_index
|
|
160
|
+
if pick.sum() < 4:
|
|
161
|
+
continue
|
|
162
|
+
time_, elevation_, wind_, rmse_ = _compute_wind(raw[pick])
|
|
163
|
+
time_list.append(time_)
|
|
164
|
+
elevation_list.append(elevation_)
|
|
165
|
+
wind_list.append(wind_[np.newaxis, :, :])
|
|
166
|
+
rmse_list.append(rmse_[np.newaxis, :])
|
|
167
|
+
|
|
168
|
+
time = np.array(time_list)
|
|
169
|
+
elevation = np.array(elevation_list)
|
|
170
|
+
wind = np.concatenate(wind_list)
|
|
171
|
+
rmse = np.concatenate(rmse_list)
|
|
172
|
+
mask = _compute_mask(wind, rmse) | np.any(np.isnan(wind), axis=2)
|
|
173
|
+
if not np.allclose(elevation, elevation[0]):
|
|
174
|
+
raise ValueError("Elevation is expected to stay same")
|
|
175
|
+
if not (raw.height == raw.height[0]).all():
|
|
176
|
+
raise ValueError("Unexpected heights")
|
|
177
|
+
height = np.array(raw.height[0], dtype=np.float64)
|
|
178
|
+
return Wind(
|
|
179
|
+
time=time,
|
|
180
|
+
height=height,
|
|
181
|
+
zonal_wind=wind[:, :, 0],
|
|
182
|
+
meridional_wind=wind[:, :, 1],
|
|
183
|
+
vertical_wind=wind[:, :, 2],
|
|
184
|
+
mask=mask,
|
|
185
|
+
system_id=raw.system_id,
|
|
186
|
+
options=options,
|
|
187
|
+
)
|
|
188
|
+
|
|
189
|
+
@classmethod
|
|
190
|
+
def from_wls70_data(
|
|
191
|
+
cls,
|
|
192
|
+
data: Sequence[str]
|
|
193
|
+
| Sequence[Path]
|
|
194
|
+
| Sequence[bytes]
|
|
195
|
+
| Sequence[BufferedIOBase],
|
|
196
|
+
options: Options | None = None,
|
|
197
|
+
) -> Wind:
|
|
198
|
+
raws = doppy.raw.Wls70.from_srcs(data)
|
|
199
|
+
|
|
200
|
+
if len(raws) == 0:
|
|
201
|
+
raise doppy.exceptions.NoDataError("Wls70 data missing")
|
|
202
|
+
|
|
203
|
+
raw = (
|
|
204
|
+
doppy.raw.Wls70.merge(raws)
|
|
205
|
+
.sorted_by_time()
|
|
206
|
+
.non_strictly_increasing_timesteps_removed()
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
if options and options.azimuth_offset_deg:
|
|
210
|
+
theta = np.deg2rad(options.azimuth_offset_deg)
|
|
211
|
+
cos_theta = np.cos(theta)
|
|
212
|
+
sin_theta = np.sin(theta)
|
|
213
|
+
|
|
214
|
+
meridional_wind = (
|
|
215
|
+
sin_theta * raw.zonal_wind + cos_theta * raw.meridional_wind
|
|
216
|
+
)
|
|
217
|
+
zonal_wind = cos_theta * raw.zonal_wind - sin_theta * raw.meridional_wind
|
|
218
|
+
else:
|
|
219
|
+
meridional_wind = raw.meridional_wind
|
|
220
|
+
zonal_wind = raw.zonal_wind
|
|
221
|
+
|
|
222
|
+
mask = (
|
|
223
|
+
np.isnan(raw.meridional_wind)
|
|
224
|
+
| np.isnan(raw.zonal_wind)
|
|
225
|
+
| np.isnan(raw.vertical_wind)
|
|
226
|
+
)
|
|
227
|
+
return Wind(
|
|
228
|
+
time=raw.time,
|
|
229
|
+
height=raw.altitude,
|
|
230
|
+
zonal_wind=zonal_wind,
|
|
231
|
+
meridional_wind=meridional_wind,
|
|
232
|
+
vertical_wind=raw.vertical_wind,
|
|
233
|
+
mask=mask,
|
|
234
|
+
system_id=raw.system_id,
|
|
235
|
+
options=options,
|
|
236
|
+
)
|
|
237
|
+
|
|
238
|
+
def write_to_netcdf(self, filename: str | Path) -> None:
|
|
239
|
+
with doppy.netcdf.Dataset(filename) as nc:
|
|
240
|
+
nc.add_dimension("time")
|
|
241
|
+
nc.add_dimension("height")
|
|
242
|
+
nc.add_time(
|
|
243
|
+
name="time",
|
|
244
|
+
dimensions=("time",),
|
|
245
|
+
standard_name="time",
|
|
246
|
+
long_name="Time UTC",
|
|
247
|
+
data=self.time,
|
|
248
|
+
dtype="f8",
|
|
249
|
+
)
|
|
250
|
+
nc.add_variable(
|
|
251
|
+
name="height",
|
|
252
|
+
dimensions=("height",),
|
|
253
|
+
units="m",
|
|
254
|
+
data=self.height,
|
|
255
|
+
dtype="f4",
|
|
256
|
+
)
|
|
257
|
+
nc.add_variable(
|
|
258
|
+
name="uwind_raw",
|
|
259
|
+
dimensions=("time", "height"),
|
|
260
|
+
units="m s-1",
|
|
261
|
+
data=self.zonal_wind,
|
|
262
|
+
mask=self.mask_zonal_wind,
|
|
263
|
+
dtype="f4",
|
|
264
|
+
long_name="Non-screened zonal wind",
|
|
265
|
+
)
|
|
266
|
+
nc.add_variable(
|
|
267
|
+
name="uwind",
|
|
268
|
+
dimensions=("time", "height"),
|
|
269
|
+
units="m s-1",
|
|
270
|
+
data=self.zonal_wind,
|
|
271
|
+
mask=self.mask | self.mask_zonal_wind,
|
|
272
|
+
dtype="f4",
|
|
273
|
+
long_name="Zonal wind",
|
|
274
|
+
)
|
|
275
|
+
nc.add_variable(
|
|
276
|
+
name="vwind_raw",
|
|
277
|
+
dimensions=("time", "height"),
|
|
278
|
+
units="m s-1",
|
|
279
|
+
data=self.meridional_wind,
|
|
280
|
+
mask=self.mask_meridional_wind,
|
|
281
|
+
dtype="f4",
|
|
282
|
+
long_name="Non-screened meridional wind",
|
|
283
|
+
)
|
|
284
|
+
nc.add_variable(
|
|
285
|
+
name="vwind",
|
|
286
|
+
dimensions=("time", "height"),
|
|
287
|
+
units="m s-1",
|
|
288
|
+
data=self.meridional_wind,
|
|
289
|
+
mask=self.mask | self.mask_meridional_wind,
|
|
290
|
+
dtype="f4",
|
|
291
|
+
long_name="Meridional wind",
|
|
292
|
+
)
|
|
293
|
+
nc.add_attribute("serial_number", self.system_id)
|
|
294
|
+
nc.add_attribute("doppy_version", doppy.__version__)
|
|
295
|
+
if self.options is not None and self.options.azimuth_offset_deg is not None:
|
|
296
|
+
nc.add_scalar_variable(
|
|
297
|
+
name="azimuth_offset",
|
|
298
|
+
units="degrees",
|
|
299
|
+
data=self.options.azimuth_offset_deg,
|
|
300
|
+
dtype="f4",
|
|
301
|
+
long_name="Azimuth offset of the instrument "
|
|
302
|
+
"(positive clockwise from north)",
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
|
|
306
|
+
def _compute_wind(
|
|
307
|
+
raw: doppy.raw.HaloHpl | doppy.raw.WindCube,
|
|
308
|
+
) -> tuple[float, float, npt.NDArray[np.float64], npt.NDArray[np.float64]]:
|
|
309
|
+
"""
|
|
310
|
+
Returns
|
|
311
|
+
-------
|
|
312
|
+
time
|
|
313
|
+
|
|
314
|
+
elevation
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
wind (range,component):
|
|
318
|
+
Wind components for each range gate.
|
|
319
|
+
Components:
|
|
320
|
+
0: zonal wind
|
|
321
|
+
1: meridional wind
|
|
322
|
+
2: vertical wind
|
|
323
|
+
|
|
324
|
+
rmse (range,):
|
|
325
|
+
Root-mean-square error of radial velocity fit for each range gate.
|
|
326
|
+
|
|
327
|
+
References
|
|
328
|
+
----------
|
|
329
|
+
An assessment of the performance of a 1.5 µm Doppler lidar for
|
|
330
|
+
operational vertical wind profiling based on a 1-year trial
|
|
331
|
+
authors: E. Päschke, R. Leinweber, and V. Lehmann
|
|
332
|
+
doi: 10.5194/amt-8-2251-2015
|
|
333
|
+
"""
|
|
334
|
+
elevation = np.deg2rad(raw.elevation)
|
|
335
|
+
azimuth = np.deg2rad(raw.azimuth)
|
|
336
|
+
radial_velocity = raw.radial_velocity
|
|
337
|
+
|
|
338
|
+
cos_elevation = np.cos(elevation)
|
|
339
|
+
A = np.hstack(
|
|
340
|
+
(
|
|
341
|
+
(np.sin(azimuth) * cos_elevation).reshape(-1, 1),
|
|
342
|
+
(np.cos(azimuth) * cos_elevation).reshape(-1, 1),
|
|
343
|
+
(np.sin(elevation)).reshape(-1, 1),
|
|
344
|
+
)
|
|
345
|
+
)
|
|
346
|
+
A_inv = np.linalg.pinv(A)
|
|
347
|
+
|
|
348
|
+
w = A_inv @ radial_velocity
|
|
349
|
+
r_appr = A @ w
|
|
350
|
+
rmse = np.sqrt(np.sum((r_appr - radial_velocity) ** 2, axis=0) / r_appr.shape[0])
|
|
351
|
+
wind = w.T
|
|
352
|
+
time = raw.time[len(raw.time) // 2]
|
|
353
|
+
elevation = np.round(raw.elevation)
|
|
354
|
+
if not np.allclose(elevation, elevation[0]):
|
|
355
|
+
raise ValueError("Elevations in the scan differ")
|
|
356
|
+
return time, elevation[0], wind, rmse
|
|
357
|
+
|
|
358
|
+
|
|
359
|
+
def _compute_mask(
|
|
360
|
+
wind: npt.NDArray[np.float64], rmse: npt.NDArray[np.float64]
|
|
361
|
+
) -> npt.NDArray[np.bool_]:
|
|
362
|
+
"""
|
|
363
|
+
Parameters
|
|
364
|
+
----------
|
|
365
|
+
|
|
366
|
+
wind (time,range,component)
|
|
367
|
+
intensty (time,range)
|
|
368
|
+
rmse (time,range)
|
|
369
|
+
"""
|
|
370
|
+
|
|
371
|
+
def neighbour_diff(X: npt.NDArray[np.float64]) -> np.float64:
|
|
372
|
+
mdiff = np.max(np.abs(X - X[len(X) // 2]))
|
|
373
|
+
return np.float64(mdiff)
|
|
374
|
+
|
|
375
|
+
WIND_NEIGHBOUR_DIFFERENCE = 20
|
|
376
|
+
neighbour_mask = np.any(
|
|
377
|
+
generic_filter(wind, neighbour_diff, size=(1, 3, 1))
|
|
378
|
+
> WIND_NEIGHBOUR_DIFFERENCE,
|
|
379
|
+
axis=2,
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
rmse_th = 5
|
|
383
|
+
return np.array((rmse > rmse_th) | neighbour_mask, dtype=np.bool_)
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
def _group_scans_by_azimuth_rotation(raw: doppy.raw.HaloHpl) -> npt.NDArray[np.int64]:
|
|
387
|
+
max_timedelta_in_scan = np.timedelta64(30, "s")
|
|
388
|
+
if len(raw.time) < 4:
|
|
389
|
+
raise doppy.exceptions.NoDataError(
|
|
390
|
+
"Less than 4 profiles is not sufficient for wind product."
|
|
391
|
+
)
|
|
392
|
+
groups = -1 * np.ones_like(raw.time, dtype=np.int64)
|
|
393
|
+
|
|
394
|
+
group = 0
|
|
395
|
+
first_azimuth_of_scan = _wrap_and_round_angle(raw.azimuth[0])
|
|
396
|
+
groups[0] = group
|
|
397
|
+
for i, (time_prev, time, azimuth) in enumerate(
|
|
398
|
+
zip(raw.time[:-1], raw.time[1:], raw.azimuth[1:]), start=1
|
|
399
|
+
):
|
|
400
|
+
if (
|
|
401
|
+
angle := _wrap_and_round_angle(azimuth)
|
|
402
|
+
) == first_azimuth_of_scan or time - time_prev > max_timedelta_in_scan:
|
|
403
|
+
group += 1
|
|
404
|
+
first_azimuth_of_scan = angle
|
|
405
|
+
groups[i] = group
|
|
406
|
+
return groups
|
|
407
|
+
|
|
408
|
+
|
|
409
|
+
def _wrap_and_round_angle(a: np.float64) -> int:
|
|
410
|
+
return int(np.round(a)) % 360
|
|
411
|
+
|
|
412
|
+
|
|
413
|
+
def _select_raws_for_wind(
|
|
414
|
+
raws: Sequence[doppy.raw.HaloHpl],
|
|
415
|
+
) -> Sequence[doppy.raw.HaloHpl]:
|
|
416
|
+
counter: Counter[tuple[int, int]] = Counter()
|
|
417
|
+
filtered_raws = []
|
|
418
|
+
for raw in raws:
|
|
419
|
+
select = (1 < raw.elevation) & (raw.elevation < 85)
|
|
420
|
+
for el in arr_to_rounded_set(raw.elevation[select]):
|
|
421
|
+
select_el = raw.elevation.round().astype(int) == el
|
|
422
|
+
select_and = select & select_el
|
|
423
|
+
|
|
424
|
+
if _get_nrounded_angles(raw.azimuth[select_and]) > 3:
|
|
425
|
+
filtered_raws.append(raw[select_and])
|
|
426
|
+
counter.update(
|
|
427
|
+
Counter(
|
|
428
|
+
(raw.header.mergeable_hash(), el)
|
|
429
|
+
for el in raw.elevation[select_and].round().astype(int)
|
|
430
|
+
)
|
|
431
|
+
)
|
|
432
|
+
if len(counter) == 0:
|
|
433
|
+
raise doppy.exceptions.NoDataError(
|
|
434
|
+
"No scans with 1 < elevation angle < 85 and more than 3 azimuth angles"
|
|
435
|
+
)
|
|
436
|
+
if len(counter) == 1:
|
|
437
|
+
return filtered_raws
|
|
438
|
+
# Else select angle closes to 75 from angles
|
|
439
|
+
# that have count larger than mean_count/2
|
|
440
|
+
mean_count = counter.total() / len(counter)
|
|
441
|
+
elevation, _, hash = sorted(
|
|
442
|
+
[
|
|
443
|
+
(el, abs(el - 75), hash)
|
|
444
|
+
for (hash, el), count in counter.items()
|
|
445
|
+
if count > mean_count / 2
|
|
446
|
+
],
|
|
447
|
+
key=lambda x: x[1],
|
|
448
|
+
)[0]
|
|
449
|
+
elevation_set = {elevation}
|
|
450
|
+
raws = [
|
|
451
|
+
raw
|
|
452
|
+
for raw in filtered_raws
|
|
453
|
+
if raw.header.mergeable_hash() == hash
|
|
454
|
+
and arr_to_rounded_set(raw.elevation) == elevation_set
|
|
455
|
+
]
|
|
456
|
+
return raws
|
|
457
|
+
|
|
458
|
+
|
|
459
|
+
def _get_nrounded_angles(arr: npt.NDArray[np.float64]) -> int:
|
|
460
|
+
return len(set((x + 360) % 360 for x in arr_to_rounded_set(arr)))
|
doppy/py.typed
ADDED
|
File without changes
|
doppy/raw/__init__.py
ADDED
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from .halo_bg import HaloBg
|
|
2
|
+
from .halo_hpl import HaloHpl
|
|
3
|
+
from .halo_sys_params import HaloSysParams
|
|
4
|
+
from .windcube import WindCube, WindCubeFixed
|
|
5
|
+
from .wls70 import Wls70
|
|
6
|
+
from .wls77 import Wls77
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"HaloHpl",
|
|
10
|
+
"HaloBg",
|
|
11
|
+
"HaloSysParams",
|
|
12
|
+
"WindCube",
|
|
13
|
+
"WindCubeFixed",
|
|
14
|
+
"Wls70",
|
|
15
|
+
"Wls77",
|
|
16
|
+
]
|
doppy/raw/halo_bg.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import io
|
|
4
|
+
import re
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from datetime import datetime
|
|
7
|
+
from io import BufferedIOBase
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Sequence
|
|
10
|
+
|
|
11
|
+
import numpy as np
|
|
12
|
+
import numpy.typing as npt
|
|
13
|
+
from numpy import datetime64
|
|
14
|
+
|
|
15
|
+
from doppy.exceptions import RawParsingError
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@dataclass
|
|
19
|
+
class HaloBg:
|
|
20
|
+
time: npt.NDArray[datetime64] # dim: (time, )
|
|
21
|
+
signal: npt.NDArray[np.float64] # dim: (time, range)
|
|
22
|
+
|
|
23
|
+
@property
|
|
24
|
+
def ngates(self) -> int:
|
|
25
|
+
return int(self.signal.shape[1])
|
|
26
|
+
|
|
27
|
+
@classmethod
|
|
28
|
+
def from_srcs(
|
|
29
|
+
cls,
|
|
30
|
+
data: Sequence[str]
|
|
31
|
+
| Sequence[Path]
|
|
32
|
+
| Sequence[tuple[bytes, str]]
|
|
33
|
+
| Sequence[tuple[BufferedIOBase, str]],
|
|
34
|
+
) -> list[HaloBg]:
|
|
35
|
+
"""
|
|
36
|
+
Creates a list of `HaloBg` instances from various data sources.
|
|
37
|
+
|
|
38
|
+
Parameters
|
|
39
|
+
----------
|
|
40
|
+
data
|
|
41
|
+
A sequence of data source identifiers which can be file paths (as strings
|
|
42
|
+
or `Path` objects), tuples of raw byte data with filenames, or tuples of
|
|
43
|
+
buffered reader streams with filenames.
|
|
44
|
+
|
|
45
|
+
Returns
|
|
46
|
+
-------
|
|
47
|
+
list[HaloBg]
|
|
48
|
+
A list of `HaloBg` instances created from the provided data sources. Data
|
|
49
|
+
sources that cause a raw parsing error are ignored and not included in
|
|
50
|
+
the resulting list.
|
|
51
|
+
|
|
52
|
+
Raises
|
|
53
|
+
------
|
|
54
|
+
TypeError
|
|
55
|
+
If `data` is not a list or tuple of supported types.
|
|
56
|
+
"""
|
|
57
|
+
if not isinstance(data, (list, tuple)):
|
|
58
|
+
raise TypeError("data should be list or tuple")
|
|
59
|
+
# TODO: rust reader and proper type checking
|
|
60
|
+
data_normalised = []
|
|
61
|
+
for item in data:
|
|
62
|
+
if isinstance(item, str):
|
|
63
|
+
path = Path(item)
|
|
64
|
+
with path.open("rb") as f:
|
|
65
|
+
data_normalised.append((f.read(), path.name))
|
|
66
|
+
elif isinstance(item, Path):
|
|
67
|
+
with item.open("rb") as f:
|
|
68
|
+
data_normalised.append((f.read(), item.name))
|
|
69
|
+
elif isinstance(item, tuple) and isinstance(item[0], bytes):
|
|
70
|
+
data_normalised.append(item)
|
|
71
|
+
elif isinstance(item, tuple) and isinstance(item[0], BufferedIOBase):
|
|
72
|
+
data_normalised.append((item[0].read(), item[1]))
|
|
73
|
+
bgs = []
|
|
74
|
+
for data_bytes, filename in data_normalised:
|
|
75
|
+
try:
|
|
76
|
+
bgs.append(HaloBg.from_src(data_bytes, filename))
|
|
77
|
+
except RawParsingError:
|
|
78
|
+
continue
|
|
79
|
+
return bgs
|
|
80
|
+
|
|
81
|
+
@classmethod
|
|
82
|
+
def from_src(
|
|
83
|
+
cls, data: str | Path | bytes | BufferedIOBase, filename: str | None = None
|
|
84
|
+
) -> HaloBg:
|
|
85
|
+
if isinstance(data, str):
|
|
86
|
+
path = Path(data)
|
|
87
|
+
if filename is None:
|
|
88
|
+
filename = path.name
|
|
89
|
+
with path.open("rb") as f:
|
|
90
|
+
return _from_src(f, filename)
|
|
91
|
+
elif isinstance(data, Path):
|
|
92
|
+
if filename is None:
|
|
93
|
+
filename = data.name
|
|
94
|
+
with data.open("rb") as f:
|
|
95
|
+
return _from_src(f, filename)
|
|
96
|
+
elif isinstance(data, bytes):
|
|
97
|
+
if filename is None:
|
|
98
|
+
raise TypeError("Filename is mandatory if data is given as bytes")
|
|
99
|
+
return _from_src(io.BytesIO(data), filename)
|
|
100
|
+
elif isinstance(data, BufferedIOBase):
|
|
101
|
+
if filename is None:
|
|
102
|
+
raise TypeError(
|
|
103
|
+
"Filename is mandatory if data is given as BufferedIOBase"
|
|
104
|
+
)
|
|
105
|
+
return _from_src(data, filename)
|
|
106
|
+
else:
|
|
107
|
+
raise TypeError("Unsupported data type")
|
|
108
|
+
|
|
109
|
+
@classmethod
|
|
110
|
+
def merge(cls, raws: Sequence[HaloBg]) -> HaloBg:
|
|
111
|
+
return cls(
|
|
112
|
+
np.concatenate(tuple(r.time for r in raws)),
|
|
113
|
+
np.concatenate(tuple(r.signal for r in raws)),
|
|
114
|
+
)
|
|
115
|
+
|
|
116
|
+
def __getitem__(
|
|
117
|
+
self,
|
|
118
|
+
index: int
|
|
119
|
+
| slice
|
|
120
|
+
| list[int]
|
|
121
|
+
| npt.NDArray[np.int64]
|
|
122
|
+
| npt.NDArray[np.bool_]
|
|
123
|
+
| tuple[slice, slice],
|
|
124
|
+
) -> HaloBg:
|
|
125
|
+
if isinstance(index, (int, slice, list, np.ndarray)):
|
|
126
|
+
return HaloBg(self.time[index], self.signal[index])
|
|
127
|
+
elif isinstance(index, tuple):
|
|
128
|
+
return HaloBg(self.time[index[0]], self.signal[index])
|
|
129
|
+
raise TypeError
|
|
130
|
+
|
|
131
|
+
def sorted_by_time(self) -> HaloBg:
|
|
132
|
+
sort_indices = np.argsort(self.time)
|
|
133
|
+
return self[sort_indices]
|
|
134
|
+
|
|
135
|
+
def non_strictly_increasing_timesteps_removed(self) -> HaloBg:
|
|
136
|
+
is_increasing = np.insert(np.diff(self.time).astype(int) > 0, 0, True)
|
|
137
|
+
return self[is_increasing]
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def _from_src(data: BufferedIOBase, filename: str) -> HaloBg:
|
|
141
|
+
if not (m := re.match(r"^Background_(\d{6}-\d{6}).txt", filename)):
|
|
142
|
+
raise ValueError(f"Cannot parse datetime from filename: {filename}")
|
|
143
|
+
time = np.array(datetime64(datetime.strptime(m.group(1), "%d%m%y-%H%M%S")))[
|
|
144
|
+
np.newaxis
|
|
145
|
+
]
|
|
146
|
+
|
|
147
|
+
data_bytes = data.read().strip()
|
|
148
|
+
if b"\r\n" not in data_bytes:
|
|
149
|
+
signal = _from_src_without_newlines(data_bytes)
|
|
150
|
+
else:
|
|
151
|
+
try:
|
|
152
|
+
signal = np.array(list(map(float, data_bytes.split(b"\r\n"))))[np.newaxis]
|
|
153
|
+
except ValueError:
|
|
154
|
+
try:
|
|
155
|
+
signal = np.array(
|
|
156
|
+
list(map(float, data_bytes.replace(b",", b".").split(b"\r\n")))
|
|
157
|
+
)[np.newaxis]
|
|
158
|
+
except ValueError as err:
|
|
159
|
+
raise RawParsingError(err) from err
|
|
160
|
+
|
|
161
|
+
return HaloBg(time, signal)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def _from_src_without_newlines(data: bytes) -> npt.NDArray[np.float64]:
|
|
165
|
+
NUMBER_OF_DECIMALS = 6
|
|
166
|
+
match = re.finditer(rb"\.", data)
|
|
167
|
+
start = 0
|
|
168
|
+
signal = []
|
|
169
|
+
for i in match:
|
|
170
|
+
end = i.end() + NUMBER_OF_DECIMALS
|
|
171
|
+
signal.append(float(data[start:end]))
|
|
172
|
+
start = end
|
|
173
|
+
return np.array(signal)[np.newaxis]
|