doppy 0.3.3__tar.gz → 0.3.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of doppy might be problematic. Click here for more details.

Files changed (41) hide show
  1. {doppy-0.3.3 → doppy-0.3.5}/Cargo.lock +2 -2
  2. {doppy-0.3.3 → doppy-0.3.5}/Cargo.toml +1 -1
  3. {doppy-0.3.3 → doppy-0.3.5}/PKG-INFO +1 -1
  4. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/exceptions.py +4 -0
  5. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/product/stare_depol.py +14 -10
  6. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/product/wind.py +17 -3
  7. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/halo_bg.py +38 -7
  8. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/windcube.py +18 -6
  9. {doppy-0.3.3 → doppy-0.3.5}/LICENSE +0 -0
  10. {doppy-0.3.3 → doppy-0.3.5}/README.md +0 -0
  11. {doppy-0.3.3 → doppy-0.3.5}/crates/doppy_rs/Cargo.toml +0 -0
  12. {doppy-0.3.3 → doppy-0.3.5}/crates/doppy_rs/src/lib.rs +0 -0
  13. {doppy-0.3.3 → doppy-0.3.5}/crates/doppy_rs/src/raw/halo_hpl.rs +0 -0
  14. {doppy-0.3.3 → doppy-0.3.5}/crates/doppy_rs/src/raw/wls70.rs +0 -0
  15. {doppy-0.3.3 → doppy-0.3.5}/crates/doppy_rs/src/raw.rs +0 -0
  16. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/.gitignore +0 -0
  17. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/Cargo.toml +0 -0
  18. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/src/lib.rs +0 -0
  19. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/src/raw/error.rs +0 -0
  20. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/src/raw/halo_hpl.rs +0 -0
  21. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/src/raw/wls70.rs +0 -0
  22. {doppy-0.3.3 → doppy-0.3.5}/crates/doprs/src/raw.rs +0 -0
  23. {doppy-0.3.3 → doppy-0.3.5}/pyproject.toml +0 -0
  24. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/__init__.py +0 -0
  25. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/__main__.py +0 -0
  26. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/bench.py +0 -0
  27. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/data/__init__.py +0 -0
  28. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/data/api.py +0 -0
  29. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/data/cache.py +0 -0
  30. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/data/exceptions.py +0 -0
  31. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/defaults.py +0 -0
  32. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/netcdf.py +0 -0
  33. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/options.py +0 -0
  34. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/product/__init__.py +0 -0
  35. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/product/stare.py +0 -0
  36. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/py.typed +0 -0
  37. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/__init__.py +0 -0
  38. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/halo_hpl.py +0 -0
  39. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/halo_sys_params.py +0 -0
  40. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/raw/wls70.py +0 -0
  41. {doppy-0.3.3 → doppy-0.3.5}/src/doppy/utils.py +0 -0
@@ -106,7 +106,7 @@ checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
106
106
 
107
107
  [[package]]
108
108
  name = "doppy_rs"
109
- version = "0.3.3"
109
+ version = "0.3.5"
110
110
  dependencies = [
111
111
  "doprs",
112
112
  "numpy",
@@ -115,7 +115,7 @@ dependencies = [
115
115
 
116
116
  [[package]]
117
117
  name = "doprs"
118
- version = "0.3.3"
118
+ version = "0.3.5"
119
119
  dependencies = [
120
120
  "chrono",
121
121
  "rayon",
@@ -4,6 +4,6 @@ resolver = "2"
4
4
 
5
5
  [workspace.package]
6
6
  edition = "2021"
7
- version = "0.3.3"
7
+ version = "0.3.5"
8
8
  authors = ["Niko Leskinen <niko.leskinen@fmi.fi>"]
9
9
  license-file = "LICENSE"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: doppy
3
- Version: 0.3.3
3
+ Version: 0.3.5
4
4
  Classifier: Development Status :: 4 - Beta
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: Programming Language :: Python :: 3.10
@@ -8,3 +8,7 @@ class RawParsingError(DoppyException):
8
8
 
9
9
  class NoDataError(DoppyException):
10
10
  pass
11
+
12
+
13
+ class ShapeError(DoppyException):
14
+ pass
@@ -71,8 +71,6 @@ class StareDepol:
71
71
  radial_velocity: npt.NDArray[np.float64]
72
72
  mask: npt.NDArray[np.bool_]
73
73
  depolarisation: npt.NDArray[np.float64]
74
- mask_depolarisation: npt.NDArray[np.bool_]
75
- mask_beta_cross: npt.NDArray[np.bool_]
76
74
  polariser_bleed_through: float
77
75
  wavelength: float
78
76
  system_id: str
@@ -94,6 +92,12 @@ class StareDepol:
94
92
  The amount of bleed-through from the polariser.
95
93
  """
96
94
 
95
+ if co.beta.shape[1] != cross.beta.shape[1]:
96
+ raise doppy.exceptions.ShapeError(
97
+ "Range dimension mismatch in co and cross: "
98
+ f"{co.beta.shape[1]} vs {cross.beta.shape[1]}"
99
+ )
100
+
97
101
  if not np.isclose(co.wavelength, cross.wavelength):
98
102
  raise ValueError(
99
103
  "Different wavelength in co and cross: "
@@ -107,12 +111,6 @@ class StareDepol:
107
111
  if not np.allclose(co.radial_distance, cross.radial_distance, atol=1):
108
112
  raise ValueError("Different radial distance in co and cross")
109
113
 
110
- if co.beta.shape[1] != cross.beta.shape[1]:
111
- raise ValueError(
112
- "Range dimension mismatch in co and cross: "
113
- f"{co.beta.shape[1]} vs {cross.beta.shape[1]}"
114
- )
115
-
116
114
  ind = np.searchsorted(cross.time, co.time, side="left")
117
115
  pick_ind = ind < len(cross.time)
118
116
  time_diff_threshold = 2 * np.median(np.diff(co.time))
@@ -142,12 +140,18 @@ class StareDepol:
142
140
  self.radial_velocity = co.radial_velocity
143
141
  self.mask = co.mask
144
142
  self.depolarisation = depolarisation
145
- self.mask_depolarisation = np.isnan(depolarisation)
146
- self.mask_beta_cross = np.isnan(self.beta_cross)
147
143
  self.polariser_bleed_through = polariser_bleed_through
148
144
  self.wavelength = co.wavelength
149
145
  self.system_id = co.system_id
150
146
 
147
+ @property
148
+ def mask_depolarisation(self) -> npt.NDArray[np.bool_]:
149
+ return np.isnan(self.depolarisation)
150
+
151
+ @property
152
+ def mask_beta_cross(self) -> npt.NDArray[np.bool_]:
153
+ return np.isnan(self.beta_cross)
154
+
151
155
  @classmethod
152
156
  def from_halo_data(
153
157
  cls,
@@ -34,6 +34,18 @@ class Wind:
34
34
  system_id: str
35
35
  options: Options | None
36
36
 
37
+ @property
38
+ def mask_zonal_wind(self) -> npt.NDArray[np.bool_]:
39
+ return np.isnan(self.zonal_wind)
40
+
41
+ @property
42
+ def mask_meridional_wind(self) -> npt.NDArray[np.bool_]:
43
+ return np.isnan(self.meridional_wind)
44
+
45
+ @property
46
+ def mask_vertical_wind(self) -> npt.NDArray[np.bool_]:
47
+ return np.isnan(self.vertical_wind)
48
+
37
49
  @functools.cached_property
38
50
  def horizontal_wind_speed(self) -> npt.NDArray[np.float64]:
39
51
  return np.sqrt(self.zonal_wind**2 + self.meridional_wind**2)
@@ -153,7 +165,7 @@ class Wind:
153
165
  elevation = np.array(elevation_list)
154
166
  wind = np.concatenate(wind_list)
155
167
  rmse = np.concatenate(rmse_list)
156
- mask = _compute_mask(wind, rmse)
168
+ mask = _compute_mask(wind, rmse) | np.any(np.isnan(wind), axis=2)
157
169
  if not np.allclose(elevation, elevation[0]):
158
170
  raise ValueError("Elevation is expected to stay same")
159
171
  height = np.array(raw.height, dtype=np.float64)
@@ -241,6 +253,7 @@ class Wind:
241
253
  dimensions=("time", "height"),
242
254
  units="m s-1",
243
255
  data=self.zonal_wind,
256
+ mask=self.mask_zonal_wind,
244
257
  dtype="f4",
245
258
  long_name="Non-screened zonal wind",
246
259
  )
@@ -249,7 +262,7 @@ class Wind:
249
262
  dimensions=("time", "height"),
250
263
  units="m s-1",
251
264
  data=self.zonal_wind,
252
- mask=self.mask,
265
+ mask=self.mask | self.mask_zonal_wind,
253
266
  dtype="f4",
254
267
  long_name="Zonal wind",
255
268
  )
@@ -258,6 +271,7 @@ class Wind:
258
271
  dimensions=("time", "height"),
259
272
  units="m s-1",
260
273
  data=self.meridional_wind,
274
+ mask=self.mask_meridional_wind,
261
275
  dtype="f4",
262
276
  long_name="Non-screened meridional wind",
263
277
  )
@@ -266,7 +280,7 @@ class Wind:
266
280
  dimensions=("time", "height"),
267
281
  units="m s-1",
268
282
  data=self.meridional_wind,
269
- mask=self.mask,
283
+ mask=self.mask | self.mask_meridional_wind,
270
284
  dtype="f4",
271
285
  long_name="Meridional wind",
272
286
  )
@@ -12,6 +12,8 @@ import numpy as np
12
12
  import numpy.typing as npt
13
13
  from numpy import datetime64
14
14
 
15
+ from doppy.exceptions import RawParsingError
16
+
15
17
 
16
18
  @dataclass
17
19
  class HaloBg:
@@ -30,6 +32,28 @@ class HaloBg:
30
32
  | Sequence[tuple[bytes, str]]
31
33
  | Sequence[tuple[BufferedIOBase, str]],
32
34
  ) -> list[HaloBg]:
35
+ """
36
+ Creates a list of `HaloBg` instances from various data sources.
37
+
38
+ Parameters
39
+ ----------
40
+ data
41
+ A sequence of data source identifiers which can be file paths (as strings
42
+ or `Path` objects), tuples of raw byte data with filenames, or tuples of
43
+ buffered reader streams with filenames.
44
+
45
+ Returns
46
+ -------
47
+ list[HaloBg]
48
+ A list of `HaloBg` instances created from the provided data sources. Data
49
+ sources that cause a raw parsing error are ignored and not included in
50
+ the resulting list.
51
+
52
+ Raises
53
+ ------
54
+ TypeError
55
+ If `data` is not a list or tuple of supported types.
56
+ """
33
57
  if not isinstance(data, (list, tuple)):
34
58
  raise TypeError("data should be list or tuple")
35
59
  # TODO: rust reader and proper type checking
@@ -46,10 +70,13 @@ class HaloBg:
46
70
  data_normalised.append(item)
47
71
  elif isinstance(item, tuple) and isinstance(item[0], BufferedIOBase):
48
72
  data_normalised.append((item[0].read(), item[1]))
49
- return [
50
- HaloBg.from_src(data_bytes, filename)
51
- for data_bytes, filename in data_normalised
52
- ]
73
+ bgs = []
74
+ for data_bytes, filename in data_normalised:
75
+ try:
76
+ bgs.append(HaloBg.from_src(data_bytes, filename))
77
+ except RawParsingError:
78
+ continue
79
+ return bgs
53
80
 
54
81
  @classmethod
55
82
  def from_src(
@@ -124,9 +151,13 @@ def _from_src(data: BufferedIOBase, filename: str) -> HaloBg:
124
151
  try:
125
152
  signal = np.array(list(map(float, data_bytes.split(b"\r\n"))))[np.newaxis]
126
153
  except ValueError:
127
- signal = np.array(
128
- list(map(float, data_bytes.replace(b",", b".").split(b"\r\n")))
129
- )[np.newaxis]
154
+ try:
155
+ signal = np.array(
156
+ list(map(float, data_bytes.replace(b",", b".").split(b"\r\n")))
157
+ )[np.newaxis]
158
+ except ValueError as err:
159
+ raise RawParsingError(err) from err
160
+
130
161
  return HaloBg(time, signal)
131
162
 
132
163
 
@@ -160,11 +160,14 @@ def _from_vad_src(nc: Dataset) -> WindCube:
160
160
  elevation_list = []
161
161
  range_list = []
162
162
  height_list = []
163
+ time_reference = (
164
+ nc["time_reference"][:] if "time_reference" in nc.variables else None
165
+ )
163
166
 
164
167
  for i, group in enumerate(
165
168
  nc[group] for group in (nc.variables["sweep_group_name"][:])
166
169
  ):
167
- time_list.append(_extract_datetime64_or_raise(group["time"]))
170
+ time_list.append(_extract_datetime64_or_raise(group["time"], time_reference))
168
171
  radial_wind_speed_list.append(
169
172
  _extract_float64_or_raise(group["radial_wind_speed"])
170
173
  )
@@ -198,12 +201,21 @@ def _from_vad_src(nc: Dataset) -> WindCube:
198
201
  )
199
202
 
200
203
 
201
- def _extract_datetime64_or_raise(nc: Dataset) -> npt.NDArray[np.datetime64]:
204
+ def _extract_datetime64_or_raise(
205
+ nc: Dataset, time_reference: str | None
206
+ ) -> npt.NDArray[np.datetime64]:
202
207
  match nc.name:
203
208
  case "time":
204
209
  if nc.dimensions != ("time",):
205
210
  raise ValueError
206
- return np.array(num2date(nc[:], units=nc.units), dtype="datetime64[us]")
211
+
212
+ units = nc.units
213
+ if "time_reference" in nc.units:
214
+ if time_reference is not None:
215
+ units = nc.units.replace("time_reference", time_reference)
216
+ else:
217
+ raise ValueError("Unknown time_reference")
218
+ return np.array(num2date(nc[:], units=units), dtype="datetime64[us]")
207
219
  case _:
208
220
  raise ValueError(f"Unexpected variable name {nc.name}")
209
221
 
@@ -216,7 +228,7 @@ def _extract_float64_or_raise(nc: Dataset) -> npt.NDArray[np.float64]:
216
228
  if nc.units != "dB":
217
229
  raise ValueError(f"Unexpected units for {nc.name}")
218
230
  if nc[:].mask is not np.bool_(False):
219
- raise ValueError
231
+ pass # ignore that array contains masked values
220
232
  return np.array(nc[:].data, dtype=np.float64)
221
233
  case "radial_wind_speed":
222
234
  if nc.dimensions != ("time", "gate_index"):
@@ -224,7 +236,7 @@ def _extract_float64_or_raise(nc: Dataset) -> npt.NDArray[np.float64]:
224
236
  if nc.units != "m s-1":
225
237
  raise ValueError(f"Unexpected units for {nc.name}")
226
238
  if nc[:].mask is not np.bool_(False):
227
- raise ValueError
239
+ pass # ignore that array contains masked values
228
240
  return np.array(nc[:].data, dtype=np.float64)
229
241
  case "radial_wind_speed_ci":
230
242
  if nc.dimensions != ("time", "gate_index"):
@@ -232,7 +244,7 @@ def _extract_float64_or_raise(nc: Dataset) -> npt.NDArray[np.float64]:
232
244
  if nc.units != "percent":
233
245
  raise ValueError(f"Unexpected units for {nc.name}")
234
246
  if nc[:].mask is not np.bool_(False):
235
- raise ValueError
247
+ pass # ignore that array contains masked values
236
248
  return np.array(nc[:].data, dtype=np.float64)
237
249
  case "azimuth" | "elevation":
238
250
  if nc.dimensions != ("time",):
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes