doppy 0.0.1__cp310-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of doppy might be problematic. Click here for more details.

doppy/raw/halo_bg.py ADDED
@@ -0,0 +1,142 @@
1
+ from __future__ import annotations
2
+
3
+ import io
4
+ import re
5
+ from dataclasses import dataclass
6
+ from datetime import datetime
7
+ from io import BufferedIOBase
8
+ from pathlib import Path
9
+ from typing import Sequence
10
+
11
+ import numpy as np
12
+ import numpy.typing as npt
13
+ from numpy import datetime64
14
+
15
+
16
+ @dataclass
17
+ class HaloBg:
18
+ time: npt.NDArray[datetime64] # dim: (time, )
19
+ signal: npt.NDArray[np.float64] # dim: (time, range)
20
+
21
+ @property
22
+ def ngates(self) -> int:
23
+ return int(self.signal.shape[1])
24
+
25
+ @classmethod
26
+ def from_srcs(
27
+ cls,
28
+ data: Sequence[str]
29
+ | Sequence[Path]
30
+ | Sequence[tuple[bytes, str]]
31
+ | Sequence[tuple[BufferedIOBase, str]],
32
+ ) -> list[HaloBg]:
33
+ if not isinstance(data, (list, tuple)):
34
+ raise TypeError("data should be list or tuple")
35
+ # TODO: rust reader and proper type checking
36
+ data_normalised = []
37
+ for item in data:
38
+ if isinstance(item, str):
39
+ path = Path(item)
40
+ with path.open("rb") as f:
41
+ data_normalised.append((f.read(), path.name))
42
+ elif isinstance(item, Path):
43
+ with item.open("rb") as f:
44
+ data_normalised.append((f.read(), item.name))
45
+ elif isinstance(item, tuple) and isinstance(item[0], bytes):
46
+ data_normalised.append(item)
47
+ elif isinstance(item, tuple) and isinstance(item[0], BufferedIOBase):
48
+ data_normalised.append((item[0].read(), item[1]))
49
+ return [
50
+ HaloBg.from_src(data_bytes, filename)
51
+ for data_bytes, filename in data_normalised
52
+ ]
53
+
54
+ @classmethod
55
+ def from_src(
56
+ cls, data: str | Path | bytes | BufferedIOBase, filename: str | None = None
57
+ ) -> HaloBg:
58
+ if isinstance(data, str):
59
+ path = Path(data)
60
+ if filename is None:
61
+ filename = path.name
62
+ with path.open("rb") as f:
63
+ return _from_src(f, filename)
64
+ elif isinstance(data, Path):
65
+ if filename is None:
66
+ filename = data.name
67
+ with data.open("rb") as f:
68
+ return _from_src(f, filename)
69
+ elif isinstance(data, bytes):
70
+ if filename is None:
71
+ raise TypeError("Filename is mandatory if data is given as bytes")
72
+ return _from_src(io.BytesIO(data), filename)
73
+ elif isinstance(data, BufferedIOBase):
74
+ if filename is None:
75
+ raise TypeError(
76
+ "Filename is mandatory if data is given as BufferedIOBase"
77
+ )
78
+ return _from_src(data, filename)
79
+ else:
80
+ raise TypeError("Unsupported data type")
81
+
82
+ @classmethod
83
+ def merge(cls, raws: Sequence[HaloBg]) -> HaloBg:
84
+ return cls(
85
+ np.concatenate(tuple(r.time for r in raws)),
86
+ np.concatenate(tuple(r.signal for r in raws)),
87
+ )
88
+
89
+ def __getitem__(
90
+ self,
91
+ index: int
92
+ | slice
93
+ | list[int]
94
+ | npt.NDArray[np.int64]
95
+ | npt.NDArray[np.bool_]
96
+ | tuple[slice, slice],
97
+ ) -> HaloBg:
98
+ if isinstance(index, (int, slice, list, np.ndarray)):
99
+ return HaloBg(self.time[index], self.signal[index])
100
+ elif isinstance(index, tuple):
101
+ return HaloBg(self.time[index[0]], self.signal[index])
102
+ raise TypeError
103
+
104
+ def sorted_by_time(self) -> HaloBg:
105
+ sort_indices = np.argsort(self.time)
106
+ return self[sort_indices]
107
+
108
+ def non_strictly_increasing_timesteps_removed(self) -> HaloBg:
109
+ is_increasing = np.insert(np.diff(self.time).astype(int) > 0, 0, True)
110
+ return self[is_increasing]
111
+
112
+
113
+ def _from_src(data: BufferedIOBase, filename: str) -> HaloBg:
114
+ if not (m := re.match(r"^Background_(\d{6}-\d{6}).txt", filename)):
115
+ raise ValueError(f"Cannot parse datetime from filename: {filename}")
116
+ time = np.array(datetime64(datetime.strptime(m.group(1), "%d%m%y-%H%M%S")))[
117
+ np.newaxis
118
+ ]
119
+
120
+ data_bytes = data.read().strip()
121
+ if b"\r\n" not in data_bytes:
122
+ signal = _from_src_without_newlines(data_bytes)
123
+ else:
124
+ try:
125
+ signal = np.array(list(map(float, data_bytes.split(b"\r\n"))))[np.newaxis]
126
+ except ValueError:
127
+ signal = np.array(
128
+ list(map(float, data_bytes.replace(b",", b".").split(b"\r\n")))
129
+ )[np.newaxis]
130
+ return HaloBg(time, signal)
131
+
132
+
133
+ def _from_src_without_newlines(data: bytes) -> npt.NDArray[np.float64]:
134
+ NUMBER_OF_DECIMALS = 6
135
+ match = re.finditer(rb"\.", data)
136
+ start = 0
137
+ signal = []
138
+ for i in match:
139
+ end = i.end() + NUMBER_OF_DECIMALS
140
+ signal.append(float(data[start:end]))
141
+ start = end
142
+ return np.array(signal)[np.newaxis]
doppy/raw/halo_hpl.py ADDED
@@ -0,0 +1,507 @@
1
+ from __future__ import annotations
2
+
3
+ import functools
4
+ import io
5
+ import re
6
+ from dataclasses import dataclass
7
+ from datetime import datetime, timedelta
8
+ from io import BufferedIOBase
9
+ from os.path import commonprefix
10
+ from pathlib import Path
11
+ from typing import Any, Sequence, TypeVar, cast
12
+
13
+ import numpy as np
14
+ import numpy.typing as npt
15
+ from numpy import datetime64, timedelta64
16
+
17
+ import doppy
18
+ from doppy import exceptions
19
+
20
+ T = TypeVar("T")
21
+
22
+
23
+
24
+ @dataclass
25
+ class HaloHpl:
26
+ header: HaloHplHeader
27
+ time: npt.NDArray[datetime64] # dim: (time, )
28
+ radial_distance: npt.NDArray[np.float64] # dim: (radial_distance, )
29
+ azimuth: npt.NDArray[np.float64] # dim: (time, )
30
+ elevation: npt.NDArray[np.float64] # dim: (time, )
31
+ pitch: npt.NDArray[np.float64] | None # dim: (time, )
32
+ roll: npt.NDArray[np.float64] | None # dim: (time, )
33
+ radial_velocity: npt.NDArray[np.float64] # dim: (time, radial_distance)
34
+ intensity: npt.NDArray[np.float64] # dim: (time, radial_distance)
35
+ beta: npt.NDArray[np.float64] # dim: (time, radial_distance)
36
+ spectral_width: npt.NDArray[np.float64] | None # dim: (time, radial_distance )
37
+
38
+ @classmethod
39
+ def from_srcs(
40
+ cls,
41
+ data: Sequence[str]
42
+ | Sequence[Path]
43
+ | Sequence[bytes]
44
+ | Sequence[BufferedIOBase],
45
+ ) -> list[HaloHpl]:
46
+ if not isinstance(data, (list, tuple)):
47
+ raise TypeError("data should be list or tuple")
48
+ if all(isinstance(src, bytes) for src in data):
49
+ data_bytes = data
50
+ elif all(isinstance(src, str) for src in data):
51
+ data_bytes = []
52
+ for src in data:
53
+ with Path(src).open("rb") as f:
54
+ data_bytes.append(f.read())
55
+ elif all(isinstance(src, Path) for src in data):
56
+ data_bytes = []
57
+ for src in data:
58
+ with src.open("rb") as f:
59
+ data_bytes.append(f.read())
60
+ elif all(isinstance(src, BufferedIOBase) for src in data):
61
+ data_bytes = [src.read() for src in data]
62
+ else:
63
+ raise TypeError("Unexpected types in data")
64
+ raw_dicts = doppy.rs.raw.halo_hpl.from_bytes_srcs(data_bytes)
65
+ try:
66
+ return [_raw_tuple2halo_hpl(r) for r in raw_dicts]
67
+ except RuntimeError as err:
68
+ raise exceptions.RawParsingError(err) from err
69
+
70
+ @classmethod
71
+ def from_src(cls, data: str | Path | bytes | BufferedIOBase) -> HaloHpl:
72
+ if isinstance(data, str):
73
+ path = Path(data)
74
+ with path.open("rb") as f:
75
+ data_bytes = f.read()
76
+ elif isinstance(data, Path):
77
+ with data.open("rb") as f:
78
+ data_bytes = f.read()
79
+ elif isinstance(data, bytes):
80
+ data_bytes = data
81
+ elif isinstance(data, BufferedIOBase):
82
+ data_bytes = data.read()
83
+ else:
84
+ raise TypeError("Unsupported data type")
85
+ try:
86
+ return _raw_tuple2halo_hpl(doppy.rs.raw.halo_hpl.from_bytes_src(data_bytes))
87
+ except RuntimeError as err:
88
+ raise exceptions.RawParsingError(err) from err
89
+
90
+ @classmethod
91
+ def _py_from_src(cls, data: str | Path | bytes | BufferedIOBase) -> HaloHpl:
92
+ if isinstance(data, str):
93
+ path = Path(data)
94
+ with path.open("rb") as f:
95
+ return _from_src(f)
96
+ elif isinstance(data, Path):
97
+ with data.open("rb") as f:
98
+ return _from_src(f)
99
+ elif isinstance(data, bytes):
100
+ return _from_src(io.BytesIO(data))
101
+ elif isinstance(data, BufferedIOBase):
102
+ return _from_src(data)
103
+ else:
104
+ raise TypeError("Unsupported data type")
105
+
106
+ def __getitem__(
107
+ self,
108
+ index: int
109
+ | slice
110
+ | list[int]
111
+ | npt.NDArray[np.int64]
112
+ | npt.NDArray[np.bool_]
113
+ | tuple[slice, slice],
114
+ ) -> HaloHpl:
115
+ if isinstance(index, (int, slice, list, np.ndarray)):
116
+ return HaloHpl(
117
+ header=self.header,
118
+ time=self.time[index],
119
+ radial_distance=self.radial_distance,
120
+ azimuth=self.azimuth[index],
121
+ elevation=self.elevation[index],
122
+ radial_velocity=self.radial_velocity[index],
123
+ intensity=self.intensity[index],
124
+ beta=self.beta[index],
125
+ pitch=self.pitch[index] if self.pitch is not None else None,
126
+ roll=self.roll[index] if self.roll is not None else None,
127
+ spectral_width=self.spectral_width[index]
128
+ if self.spectral_width is not None
129
+ else None,
130
+ )
131
+ raise TypeError
132
+
133
+ @classmethod
134
+ def merge(cls, raws: Sequence[HaloHpl]) -> HaloHpl:
135
+ return cls(
136
+ header=_merge_headers([r.header for r in raws]),
137
+ time=np.concatenate(tuple(r.time for r in raws)),
138
+ radial_distance=raws[0].radial_distance,
139
+ azimuth=np.concatenate(tuple(r.azimuth for r in raws)),
140
+ elevation=np.concatenate(tuple(r.elevation for r in raws)),
141
+ radial_velocity=np.concatenate(tuple(r.radial_velocity for r in raws)),
142
+ intensity=np.concatenate(tuple(r.intensity for r in raws)),
143
+ beta=np.concatenate(tuple(r.beta for r in raws)),
144
+ pitch=_merge_float_arrays_or_nones(tuple(r.pitch for r in raws)),
145
+ roll=_merge_float_arrays_or_nones(tuple(r.roll for r in raws)),
146
+ spectral_width=_merge_float_arrays_or_nones(
147
+ tuple(r.spectral_width for r in raws)
148
+ ),
149
+ )
150
+
151
+ @functools.cached_property
152
+ def azimuth_angles(self) -> set[int]:
153
+ return set(int(x) for x in np.round(self.azimuth))
154
+
155
+ @functools.cached_property
156
+ def elevation_angles(self) -> set[int]:
157
+ return set(int(x) for x in np.round(self.elevation))
158
+
159
+ @functools.cached_property
160
+ def time_diffs(self) -> set[int]:
161
+ return set(np.diff(self.time.astype("datetime64[s]").astype("int")))
162
+
163
+ @functools.cached_property
164
+ def median_time_diff(self) -> float:
165
+ med = np.round(
166
+ np.median(
167
+ np.diff(1e-6 * self.time.astype("datetime64[us]").astype("float"))
168
+ ),
169
+ 2,
170
+ )
171
+ if isinstance(med, float):
172
+ return med
173
+ raise TypeError
174
+
175
+ def sorted_by_time(self) -> HaloHpl:
176
+ sort_indices = np.argsort(self.time)
177
+ return self[sort_indices]
178
+
179
+ def non_strictly_increasing_timesteps_removed(self) -> HaloHpl:
180
+ if len(self.time) == 0:
181
+ return self
182
+ mask = np.ones_like(self.time,dtype=np.bool_)
183
+ latest_time = self.time[0]
184
+ for i, t in enumerate(self.time[1:], start=1):
185
+ if t <= latest_time:
186
+ mask[i] = False
187
+ else:
188
+ latest_time = t
189
+ return self[mask]
190
+
191
+
192
+ @dataclass(slots=True)
193
+ class HaloHplHeader:
194
+ filename: str
195
+ gate_points: int
196
+ nrays: int | None
197
+ nwaypoints: int | None
198
+ ngates: int
199
+ pulses_per_ray: int
200
+ range_gate_length: float
201
+ resolution: float
202
+ scan_type: str
203
+ focus_range: int
204
+ start_time: datetime64
205
+ system_id: str
206
+ instrument_spectral_width: float | None
207
+
208
+ @classmethod
209
+ def from_dict(cls, data: dict[bytes, bytes]) -> HaloHplHeader:
210
+ return cls(
211
+ filename=data[b"Filename"].decode(),
212
+ gate_points=int(data[b"Gate length (pts)"]),
213
+ nrays=(
214
+ int(data[b"No. of rays in file"])
215
+ if b"No. of rays in file" in data
216
+ else None
217
+ ),
218
+ nwaypoints=(
219
+ int(data[b"No. of waypoints in file"])
220
+ if b"No. of waypoints in file" in data
221
+ else None
222
+ ),
223
+ ngates=int(data[b"Number of gates"]),
224
+ pulses_per_ray=int(data[b"Pulses/ray"]),
225
+ range_gate_length=float(data[b"Range gate length (m)"]),
226
+ resolution=float(data[b"Resolution (m/s)"]),
227
+ scan_type=data[b"Scan type"].decode(),
228
+ focus_range=int(data[b"Focus range"]),
229
+ start_time=_parser_start_time(data[b"Start time"]),
230
+ system_id=data[b"System ID"].decode(),
231
+ instrument_spectral_width=(
232
+ float(data[b"instrument_spectral_width"])
233
+ if b"instrument_spectral_width" in data
234
+ else None
235
+ ),
236
+ )
237
+
238
+
239
+ def _merger(lst: list[T]) -> T:
240
+ if len(set(lst)) != 1:
241
+ raise ValueError(f"Cannot merge header values {lst}")
242
+ return lst[0]
243
+
244
+
245
+ def _merge_headers(headers: list[HaloHplHeader]) -> HaloHplHeader:
246
+ return HaloHplHeader(
247
+ filename=commonprefix([h.filename for h in headers]),
248
+ start_time=np.min([h.start_time for h in headers]),
249
+ **{
250
+ key: _merger([getattr(h, key) for h in headers])
251
+ for key in (
252
+ "gate_points",
253
+ "nrays",
254
+ "nwaypoints",
255
+ "ngates",
256
+ "pulses_per_ray",
257
+ "range_gate_length",
258
+ "resolution",
259
+ "scan_type",
260
+ "focus_range",
261
+ "system_id",
262
+ "instrument_spectral_width",
263
+ )
264
+ },
265
+ )
266
+
267
+
268
+ def _merge_float_arrays_or_nones(
269
+ arrs: tuple[npt.NDArray[np.float64] | None, ...],
270
+ ) -> npt.NDArray[np.float64] | None:
271
+ isnone = tuple(x is None for x in arrs)
272
+ if all(isnone):
273
+ return None
274
+ if any(isnone):
275
+ raise ValueError
276
+ arrs = cast(tuple[npt.NDArray[np.float64], ...], arrs)
277
+ return np.concatenate(arrs, axis=0)
278
+
279
+
280
+ def _raw_tuple2halo_hpl(
281
+ raw_tuple: tuple[dict[str, Any], dict[str, npt.NDArray[np.float64] | None]],
282
+ ) -> HaloHpl:
283
+ header_dict, data_dict = raw_tuple
284
+ header = HaloHplHeader(
285
+ filename=str(header_dict["filename"]),
286
+ gate_points=int(header_dict["gate_points"]),
287
+ nrays=int(header_dict["nrays"]) if header_dict["nrays"] is not None else None,
288
+ nwaypoints=int(header_dict["nwaypoints"])
289
+ if header_dict["nwaypoints"] is not None
290
+ else None,
291
+ ngates=int(header_dict["ngates"]),
292
+ pulses_per_ray=int(header_dict["pulses_per_ray"]),
293
+ range_gate_length=float(header_dict["range_gate_length"]),
294
+ resolution=float(header_dict["resolution"]),
295
+ scan_type=str(header_dict["scan_type"]),
296
+ focus_range=int(header_dict["focus_range"]),
297
+ start_time=datetime64(datetime.utcfromtimestamp(header_dict["start_time"])),
298
+ system_id=str(header_dict["system_id"]),
299
+ instrument_spectral_width=float(header_dict["instrument_spectral_width"])
300
+ if header_dict["instrument_spectral_width"] is not None
301
+ else None,
302
+ )
303
+ expected_range = np.arange(header.ngates, dtype=np.float64)
304
+
305
+ if any(
306
+ data_dict[key] is None
307
+ for key in (
308
+ "range",
309
+ "time",
310
+ "radial_distance",
311
+ "azimuth",
312
+ "elevation",
313
+ "radial_velocity",
314
+ "intensity",
315
+ "beta",
316
+ )
317
+ ):
318
+ raise TypeError
319
+ range_ = cast(npt.NDArray[np.float64], data_dict["range"]).reshape(
320
+ -1, header.ngates
321
+ )
322
+ radial_distance = cast(npt.NDArray[np.float64], data_dict["radial_distance"])
323
+ azimuth = cast(npt.NDArray[np.float64], data_dict["azimuth"])
324
+ elevation = cast(npt.NDArray[np.float64], data_dict["elevation"])
325
+ radial_velocity = cast(
326
+ npt.NDArray[np.float64], data_dict["radial_velocity"]
327
+ ).reshape(-1, header.ngates)
328
+ intensity = cast(npt.NDArray[np.float64], data_dict["intensity"]).reshape(
329
+ -1, header.ngates
330
+ )
331
+ beta = cast(npt.NDArray[np.float64], data_dict["beta"]).reshape(-1, header.ngates)
332
+ if not np.isclose(range_, expected_range).all():
333
+ raise exceptions.RawParsingError(
334
+ "Incoherent range gates: Number of gates in the middle of the file"
335
+ )
336
+ return HaloHpl(
337
+ header=header,
338
+ time=_convert_time(
339
+ header.start_time, cast(npt.NDArray[np.float64], data_dict["time"])
340
+ ),
341
+ radial_distance=radial_distance,
342
+ azimuth=azimuth,
343
+ elevation=elevation,
344
+ pitch=data_dict["pitch"] if data_dict["pitch"] is not None else None,
345
+ roll=data_dict["roll"] if data_dict["roll"] is not None else None,
346
+ radial_velocity=radial_velocity,
347
+ intensity=intensity,
348
+ beta=beta,
349
+ spectral_width=data_dict["spectral_width"].reshape(-1, header.ngates)
350
+ if data_dict["spectral_width"] is not None
351
+ else None,
352
+ )
353
+
354
+
355
+ def _convert_time(
356
+ start_time: datetime64, decimal_time: npt.NDArray[np.float64]
357
+ ) -> npt.NDArray[datetime64]:
358
+ """
359
+ Parameters
360
+ ----------
361
+ start_time: unix-time
362
+ decimal_time: hours since beginning of the day of start_time
363
+ """
364
+ HOURS_TO_MICROSECONDS = 3600000000.0
365
+ start_of_day = datetime64(start_time, "D").astype("datetime64[us]")
366
+ delta_hours = (decimal_time * HOURS_TO_MICROSECONDS).astype("timedelta64[us]")
367
+ return np.array(start_of_day + delta_hours,dtype=datetime64)
368
+
369
+
370
+
371
+ def _parser_start_time(s: bytes) -> datetime64:
372
+ return datetime64(datetime.strptime(s.decode(), "%Y%m%d %H:%M:%S.%f"))
373
+
374
+
375
+ def _from_src(data: BufferedIOBase) -> HaloHpl:
376
+ head = data.read(1000)
377
+ match_header_div = re.search(b"\\*\\*\\*\\*.*\n+", head, re.MULTILINE)
378
+ if match_header_div is None:
379
+ raise exceptions.RawParsingError("Cannot find header divider '****'")
380
+ data.seek(0)
381
+ _, div = match_header_div.span()
382
+ header_bytes = data.read(div)
383
+ header = _read_header(header_bytes)
384
+ data_bytes = data.read()
385
+ res = _read_data(data_bytes, header)
386
+ return res
387
+
388
+
389
+ def _read_header(data: bytes) -> HaloHplHeader:
390
+ data = data.strip()
391
+ data_dict = {}
392
+ expected_header_rows = [
393
+ b"Altitude of measurement (center of gate) = (range gate + 0.5) * Gate length",
394
+ b"Range of measurement (center of gate) = (range gate + 0.5) * Gate length",
395
+ b"Data line 1: Decimal time (hours) Azimuth (degrees) Elevation (degrees) "
396
+ b"Pitch (degrees) Roll (degrees)",
397
+ b"Data line 1: Decimal time (hours) Azimuth (degrees) Elevation (degrees)",
398
+ b"f9.6,1x,f6.2,1x,f6.2",
399
+ b"Data line 2: Range Gate Doppler (m/s) Intensity (SNR + 1) Beta (m-1 sr-1)",
400
+ b"Data line 2: Range Gate Doppler (m/s) Intensity (SNR + 1) Beta (m-1 sr-1) "
401
+ b"Spectral Width",
402
+ b"i3,1x,f6.4,1x,f8.6,1x,e12.6 - repeat for no. gates",
403
+ b"i3,1x,f6.4,1x,f8.6,1x,e12.6,1x,f6.4 - repeat for no. gates",
404
+ b"****",
405
+ ]
406
+ for line in data.split(b"\r\n"):
407
+ split = line.split(b":\t")
408
+ if len(split) == 2:
409
+ key, val = split
410
+ data_dict[key] = val
411
+ else:
412
+ (val,) = split
413
+ if m := re.match(rb"\*\*\*\* Instrument spectral width = (.*)", val):
414
+ data_dict[b"instrument_spectral_width"] = m.group(1)
415
+ elif val not in expected_header_rows:
416
+ raise ValueError(f"Unexpected row '{val!r}'")
417
+ return HaloHplHeader.from_dict(data_dict)
418
+
419
+
420
+ def _read_data(data: bytes, header: HaloHplHeader) -> HaloHpl:
421
+ if not data:
422
+ raise exceptions.RawParsingError("No data found")
423
+ data = data.strip()
424
+ data = data.replace(
425
+ b"\x00", b""
426
+ ) # Some files contain null characters between profiles
427
+ data_lines = data.split(b"\r\n")
428
+
429
+ i = 0
430
+ while i + 1 < len(data_lines) and data_lines[i + 1].strip().split()[0] != b"0":
431
+ i += 1
432
+ del data_lines[:i]
433
+
434
+ i = len(data_lines) - 1
435
+ while (
436
+ i - 1 >= 0
437
+ and header.ngates > 1
438
+ and len(data_lines[i].strip().split()) != len(data_lines[i - 1].strip().split())
439
+ ):
440
+ i -= 1
441
+ del data_lines[i + 1 :]
442
+
443
+ trailing_lines = len(data_lines) % (header.ngates + 1)
444
+ if trailing_lines > 0:
445
+ del data_lines[-trailing_lines:]
446
+
447
+ data1D_lines = data_lines[:: header.ngates + 1]
448
+ data1D = [list(map(float, line.split())) for line in data1D_lines]
449
+ try:
450
+ data1Darr = np.array(data1D)
451
+ except ValueError as err:
452
+ if "inhomogeneous" in str(err):
453
+ raise exceptions.RawParsingError(
454
+ "Inhomogeneous raw data. "
455
+ "Probable reason: Number of gates changes in middle of the file"
456
+ ) from err
457
+ else:
458
+ raise
459
+
460
+ del data_lines[:: header.ngates + 1]
461
+ data2D = [list(map(float, line.split())) for line in data_lines]
462
+ data2Darr = np.array(data2D)
463
+
464
+ decimal_time = data1Darr[:, 0]
465
+ time = header.start_time.astype("datetime64[D]") + np.array(
466
+ list(map(_decimal_time2timedelta, decimal_time))
467
+ )
468
+ azimuth = data1Darr[:, 1]
469
+ elevation = data1Darr[:, 2]
470
+ pitch = data1Darr[:, 3] if data1Darr.shape[1] > 3 else None
471
+ roll = data1Darr[:, 4] if data1Darr.shape[1] > 4 else None
472
+
473
+ ntimes = len(decimal_time)
474
+
475
+ data2Darr_reshape = data2Darr.reshape(ntimes, header.ngates, -1)
476
+
477
+ gate = data2Darr_reshape[:, :, 0]
478
+ gate_expected = np.arange(len(gate[0])).astype("float64")
479
+ if not all(np.allclose(gate_expected, gate[i, :]) for i in range(gate.shape[0])):
480
+ raise ValueError("all gate indices should be equal")
481
+ radial_distance = (gate_expected + 0.5) * header.range_gate_length
482
+
483
+ radial_velocity = data2Darr_reshape[:, :, 1]
484
+ intensity = data2Darr_reshape[:, :, 2]
485
+ beta = data2Darr_reshape[:, :, 3]
486
+
487
+ spectral_width = (
488
+ data2Darr_reshape[:, :, 4] if data2Darr_reshape.shape[2] > 4 else None
489
+ )
490
+
491
+ return HaloHpl(
492
+ header,
493
+ time,
494
+ radial_distance,
495
+ azimuth,
496
+ elevation,
497
+ pitch,
498
+ roll,
499
+ radial_velocity,
500
+ intensity,
501
+ beta,
502
+ spectral_width,
503
+ )
504
+
505
+
506
+ def _decimal_time2timedelta(h: float) -> timedelta64:
507
+ return timedelta64(timedelta(hours=h))