doppy 0.1.4__tar.gz → 0.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of doppy might be problematic. Click here for more details.

Files changed (40) hide show
  1. {doppy-0.1.4 → doppy-0.2.1}/Cargo.lock +2 -2
  2. {doppy-0.1.4 → doppy-0.2.1}/Cargo.toml +1 -1
  3. {doppy-0.1.4 → doppy-0.2.1}/PKG-INFO +2 -2
  4. doppy-0.2.1/crates/doppy_rs/src/raw/wls70.rs +77 -0
  5. {doppy-0.1.4 → doppy-0.2.1}/crates/doppy_rs/src/raw.rs +2 -0
  6. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/src/raw/error.rs +1 -1
  7. doppy-0.2.1/crates/doprs/src/raw/wls70.rs +232 -0
  8. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/src/raw.rs +1 -0
  9. {doppy-0.1.4 → doppy-0.2.1}/pyproject.toml +1 -1
  10. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/data/api.py +7 -1
  11. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/product/stare.py +4 -0
  12. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/product/wind.py +36 -0
  13. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/raw/__init__.py +2 -1
  14. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/raw/halo_hpl.py +3 -10
  15. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/raw/windcube.py +6 -0
  16. doppy-0.2.1/src/doppy/raw/wls70.py +192 -0
  17. doppy-0.2.1/src/doppy/utils.py +9 -0
  18. {doppy-0.1.4 → doppy-0.2.1}/LICENSE +0 -0
  19. {doppy-0.1.4 → doppy-0.2.1}/README.md +0 -0
  20. {doppy-0.1.4 → doppy-0.2.1}/crates/doppy_rs/Cargo.toml +0 -0
  21. {doppy-0.1.4 → doppy-0.2.1}/crates/doppy_rs/src/lib.rs +0 -0
  22. {doppy-0.1.4 → doppy-0.2.1}/crates/doppy_rs/src/raw/halo_hpl.rs +0 -0
  23. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/.gitignore +0 -0
  24. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/Cargo.toml +0 -0
  25. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/src/lib.rs +0 -0
  26. {doppy-0.1.4 → doppy-0.2.1}/crates/doprs/src/raw/halo_hpl.rs +0 -0
  27. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/__init__.py +0 -0
  28. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/__main__.py +0 -0
  29. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/bench.py +0 -0
  30. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/data/__init__.py +0 -0
  31. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/data/cache.py +0 -0
  32. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/data/exceptions.py +0 -0
  33. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/defaults.py +0 -0
  34. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/exceptions.py +0 -0
  35. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/netcdf.py +0 -0
  36. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/options.py +0 -0
  37. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/product/__init__.py +0 -0
  38. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/py.typed +0 -0
  39. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/raw/halo_bg.py +0 -0
  40. {doppy-0.1.4 → doppy-0.2.1}/src/doppy/raw/halo_sys_params.py +0 -0
@@ -106,7 +106,7 @@ checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345"
106
106
 
107
107
  [[package]]
108
108
  name = "doppy_rs"
109
- version = "0.1.4"
109
+ version = "0.2.1"
110
110
  dependencies = [
111
111
  "doprs",
112
112
  "numpy",
@@ -115,7 +115,7 @@ dependencies = [
115
115
 
116
116
  [[package]]
117
117
  name = "doprs"
118
- version = "0.1.4"
118
+ version = "0.2.1"
119
119
  dependencies = [
120
120
  "chrono",
121
121
  "rayon",
@@ -4,6 +4,6 @@ resolver = "2"
4
4
 
5
5
  [workspace.package]
6
6
  edition = "2021"
7
- version = "0.1.4"
7
+ version = "0.2.1"
8
8
  authors = ["Niko Leskinen <niko.leskinen@fmi.fi>"]
9
9
  license-file = "LICENSE"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: doppy
3
- Version: 0.1.4
3
+ Version: 0.2.1
4
4
  Classifier: Development Status :: 4 - Beta
5
5
  Classifier: Programming Language :: Python :: 3
6
6
  Classifier: Programming Language :: Python :: 3.10
@@ -13,7 +13,7 @@ Requires-Dist: requests
13
13
  Requires-Dist: urllib3
14
14
  Requires-Dist: numpy
15
15
  Requires-Dist: netcdf4
16
- Requires-Dist: typer[all]
16
+ Requires-Dist: typer
17
17
  Requires-Dist: matplotlib
18
18
  Requires-Dist: scikit-learn
19
19
  Requires-Dist: scipy
@@ -0,0 +1,77 @@
1
+ use numpy::{PyArray1, ToPyArray};
2
+ use pyo3::exceptions::PyRuntimeError;
3
+ use pyo3::prelude::*;
4
+ use pyo3::types::{PyDict, PyList};
5
+
6
+ type PyReturnType<'a> = (&'a PyDict, &'a PyList, &'a PyArray1<f64>);
7
+
8
+ #[pymodule]
9
+ pub fn wls70(_py: Python, m: &PyModule) -> PyResult<()> {
10
+ m.add_function(wrap_pyfunction!(from_filename_srcs, m)?)?;
11
+ m.add_function(wrap_pyfunction!(from_filename_src, m)?)?;
12
+ m.add_function(wrap_pyfunction!(from_bytes_srcs, m)?)?;
13
+ m.add_function(wrap_pyfunction!(from_bytes_src, m)?)?;
14
+ Ok(())
15
+ }
16
+
17
+ #[pyfunction]
18
+ pub fn from_bytes_srcs<'a>(
19
+ py: Python<'a>,
20
+ contents: Vec<&'a [u8]>,
21
+ ) -> PyResult<Vec<PyReturnType<'a>>> {
22
+ let raws = doprs::raw::wls70::from_bytes_srcs(contents);
23
+ let mut result = Vec::new();
24
+ for raw in raws {
25
+ result.push(convert_to_python(py, raw)?);
26
+ }
27
+ Ok(result)
28
+ }
29
+
30
+ #[pyfunction]
31
+ pub fn from_bytes_src<'a>(py: Python<'a>, content: &'a [u8]) -> PyResult<PyReturnType<'a>> {
32
+ let raw = match doprs::raw::wls70::from_bytes_src(content) {
33
+ Ok(raw) => raw,
34
+ Err(e) => {
35
+ return Err(PyRuntimeError::new_err(format!(
36
+ "Failed to read files: {}",
37
+ e
38
+ )))
39
+ }
40
+ };
41
+ convert_to_python(py, raw)
42
+ }
43
+
44
+ #[pyfunction]
45
+ pub fn from_filename_srcs(py: Python, filenames: Vec<String>) -> PyResult<Vec<PyReturnType>> {
46
+ let raws = doprs::raw::wls70::from_filename_srcs(filenames);
47
+ let mut result = Vec::new();
48
+ for raw in raws {
49
+ result.push(convert_to_python(py, raw)?);
50
+ }
51
+ Ok(result)
52
+ }
53
+
54
+ #[pyfunction]
55
+ pub fn from_filename_src(py: Python, filename: String) -> PyResult<PyReturnType> {
56
+ let raw = match doprs::raw::wls70::from_filename_src(filename) {
57
+ Ok(raw) => raw,
58
+ Err(e) => {
59
+ return Err(PyRuntimeError::new_err(format!(
60
+ "Failed to read files: {}",
61
+ e
62
+ )))
63
+ }
64
+ };
65
+ convert_to_python(py, raw)
66
+ }
67
+
68
+ fn convert_to_python(py: Python, raw: doprs::raw::wls70::Wls70) -> PyResult<PyReturnType> {
69
+ let info_dict = PyDict::new(py);
70
+ info_dict.set_item("altitude", raw.info.altitude.as_slice().to_pyarray(py))?;
71
+ info_dict.set_item("system_id", raw.info.system_id)?;
72
+ Ok((
73
+ info_dict,
74
+ PyList::new(py, raw.data_columns),
75
+ raw.data.as_slice().to_pyarray(py),
76
+ ))
77
+ }
@@ -2,9 +2,11 @@ use pyo3::prelude::*;
2
2
  use pyo3::wrap_pymodule;
3
3
 
4
4
  pub mod halo_hpl;
5
+ pub mod wls70;
5
6
 
6
7
  #[pymodule]
7
8
  pub fn raw(_py: Python, m: &PyModule) -> PyResult<()> {
8
9
  m.add_wrapped(wrap_pymodule!(halo_hpl::halo_hpl))?;
10
+ m.add_wrapped(wrap_pymodule!(wls70::wls70))?;
9
11
  Ok(())
10
12
  }
@@ -2,7 +2,7 @@ use std::fmt;
2
2
 
3
3
  #[derive(Debug, Clone)]
4
4
  pub struct RawParseError {
5
- message: String,
5
+ pub message: String,
6
6
  }
7
7
 
8
8
  impl Default for RawParseError {
@@ -0,0 +1,232 @@
1
+ use std::fs::File;
2
+
3
+ use chrono::{DateTime, NaiveDateTime, ParseError, Utc};
4
+ use rayon::prelude::*;
5
+ use std::io::{BufRead, Cursor, Read};
6
+
7
+ use crate::raw::error::RawParseError;
8
+
9
+ #[derive(Debug, Default, Clone)]
10
+ pub struct Wls70 {
11
+ pub info: Info,
12
+ pub data_columns: Vec<String>,
13
+ pub data: Vec<f64>,
14
+ }
15
+
16
+ #[derive(Debug, Default, Clone)]
17
+ pub struct Info {
18
+ pub altitude: Vec<f64>,
19
+ pub system_id: String,
20
+ }
21
+
22
+ pub fn from_file_src(mut file: &File) -> Result<Wls70, RawParseError> {
23
+ let mut content = vec![];
24
+ file.read_to_end(&mut content)?;
25
+ from_bytes_src(&content)
26
+ }
27
+
28
+ pub fn from_filename_src(filename: String) -> Result<Wls70, RawParseError> {
29
+ let file = File::open(filename)?;
30
+ from_file_src(&file)
31
+ }
32
+
33
+ pub fn from_filename_srcs(filenames: Vec<String>) -> Vec<Wls70> {
34
+ let results = filenames
35
+ .par_iter()
36
+ .filter_map(|filename| from_filename_src(filename.to_string()).ok())
37
+ .collect();
38
+ results
39
+ }
40
+
41
+ pub fn from_file_srcs(files: Vec<&File>) -> Vec<Wls70> {
42
+ let results = files
43
+ .par_iter()
44
+ .filter_map(|file| from_file_src(file).ok())
45
+ .collect();
46
+ results
47
+ }
48
+
49
+ pub fn from_bytes_srcs(contents: Vec<&[u8]>) -> Vec<Wls70> {
50
+ let results = contents
51
+ .par_iter()
52
+ .filter_map(|content| from_bytes_src(content).ok())
53
+ .collect();
54
+ results
55
+ }
56
+
57
+ enum Phase {
58
+ Info,
59
+ Data,
60
+ }
61
+
62
+ pub fn from_bytes_src(content: &[u8]) -> Result<Wls70, RawParseError> {
63
+ let cur = Cursor::new(content);
64
+
65
+ let mut info_str = Vec::new();
66
+ let mut header = Vec::new();
67
+ let mut data_str = Vec::new();
68
+
69
+ let mut phase = Phase::Info;
70
+
71
+ for line in cur.split(b'\n') {
72
+ let line = line?;
73
+ match phase {
74
+ Phase::Info => {
75
+ if line.starts_with(b"Timestamp\tPosition\tTemperature") {
76
+ header.extend_from_slice(&line);
77
+ header.push(b'\n');
78
+ phase = Phase::Data;
79
+ } else {
80
+ info_str.extend_from_slice(&line);
81
+ info_str.push(b'\n');
82
+ }
83
+ }
84
+ Phase::Data => {
85
+ data_str.extend_from_slice(&line);
86
+ data_str.push(b'\n');
87
+ }
88
+ }
89
+ }
90
+ let info = parse_info(&info_str)?;
91
+ match parse_data(&data_str) {
92
+ Ok((data, ncols)) => {
93
+ let header_str: String = header.iter().map(|&c| c as char).collect();
94
+ let cols: Vec<_> = header_str
95
+ .split(|c| c == '\t')
96
+ .map(|s| s.trim().to_string())
97
+ .filter(|s| !s.is_empty())
98
+ .collect();
99
+ if ncols != (cols.len() as i64) {
100
+ return Err(RawParseError {
101
+ message: "Number of columns on header and number of columns in data mismatch"
102
+ .to_string(),
103
+ });
104
+ }
105
+ Ok(Wls70 {
106
+ info,
107
+ data_columns: cols,
108
+ data,
109
+ })
110
+ }
111
+ Err(e) => Err(e),
112
+ }
113
+ }
114
+
115
+ fn parse_info(info_str: &[u8]) -> Result<Info, RawParseError> {
116
+ let mut info = Info::default();
117
+ for line in info_str.split(|&b| b == b'\n') {
118
+ match line {
119
+ b if b.starts_with(b"Altitudes(m)=") => {
120
+ info.altitude = line
121
+ .split(|&b| b == b'\t')
122
+ .skip(1)
123
+ .map(|part| {
124
+ String::from_utf8(part.to_vec())
125
+ .map_err(|_| RawParseError {
126
+ message: "UTF-8 conversion error".into(),
127
+ })
128
+ .and_then(|s| {
129
+ s.trim().parse::<f64>().map_err(|_| RawParseError {
130
+ message: "Parse float error".into(),
131
+ })
132
+ })
133
+ })
134
+ .collect::<Result<Vec<f64>, _>>()?;
135
+ }
136
+ b if b.starts_with(b"ID System=") => {
137
+ info.system_id = std::str::from_utf8(&line[10..])
138
+ .map(|s| s.trim())
139
+ .map_err(|_| RawParseError {
140
+ message: "UTF-8 conversion error".into(),
141
+ })?
142
+ .to_string();
143
+ }
144
+ _ => (),
145
+ }
146
+ }
147
+
148
+ Ok(info)
149
+ }
150
+
151
+ pub fn parse_data(data: &[u8]) -> Result<(Vec<f64>, i64), RawParseError> {
152
+ let mut ncols: i64 = -1;
153
+ let mut data_flat = vec![];
154
+ for line in data.split(|&b| b == b'\n') {
155
+ let parts: Vec<_> = line
156
+ .split(|&b| b == b'\t')
157
+ .filter(|part| !(part.is_empty() || part == b"\r"))
158
+ .collect();
159
+ if parts.is_empty() {
160
+ continue;
161
+ }
162
+ if ncols < 0 {
163
+ ncols = parts.len() as i64;
164
+ }
165
+ if ncols != parts.len() as i64 {
166
+ return Err(RawParseError {
167
+ message: "Unexpected number of columns".to_string(),
168
+ });
169
+ }
170
+ for (i, part) in parts.iter().enumerate() {
171
+ match i {
172
+ 0 => {
173
+ let date = String::from_utf8_lossy(part).trim().to_string();
174
+ match datetime_to_timestamp(&date) {
175
+ Ok(d) => {
176
+ data_flat.push(d);
177
+ }
178
+ Err(_) => println!("Error with datetime"),
179
+ }
180
+ }
181
+ 3 => match String::from_utf8_lossy(part).trim() {
182
+ "On" => {
183
+ data_flat.push(1.);
184
+ }
185
+ "Off" => {
186
+ data_flat.push(0.);
187
+ }
188
+ _ => {
189
+ println!("Failed to read Wiper state");
190
+ return Err(RawParseError {
191
+ message: "Unexpected value for Wiper state".to_string(),
192
+ });
193
+ }
194
+ },
195
+ _ => match String::from_utf8_lossy(part).trim().parse::<f64>() {
196
+ Ok(x) => {
197
+ data_flat.push(x);
198
+ }
199
+ Err(_) => println!("Cannot parse float"),
200
+ },
201
+ }
202
+ }
203
+ }
204
+ if ncols < 1 || (data_flat.len() as i64) % ncols != 0 {
205
+ return Err(RawParseError {
206
+ message: "Unexpected number of columns".to_string(),
207
+ });
208
+ }
209
+ Ok((data_flat, ncols))
210
+ }
211
+
212
+ fn datetime_to_timestamp(s: &str) -> Result<f64, ParseError> {
213
+ let format = "%d/%m/%Y %H:%M:%S%.f";
214
+ let ndt = NaiveDateTime::parse_from_str(s, format)?;
215
+ let dt = DateTime::<Utc>::from_naive_utc_and_offset(ndt, Utc);
216
+ Ok(dt.timestamp() as f64 + dt.timestamp_subsec_millis() as f64 / 1000.0)
217
+ }
218
+
219
+ #[cfg(test)]
220
+ mod tests {
221
+ use super::*;
222
+ use std::fs::File;
223
+
224
+ #[test]
225
+ fn test_from_file_src() -> Result<(), Box<dyn std::error::Error>> {
226
+ let file_path = "../../data/palaiseau/2024-04-01/wlscerea_0a_windLz1Lb87R10s-HR_v01_20240401_000000.rtd";
227
+ let file = File::open(file_path)?;
228
+ assert!(from_file_src(&file).is_ok());
229
+
230
+ Ok(())
231
+ }
232
+ }
@@ -1,2 +1,3 @@
1
1
  pub mod error;
2
2
  pub mod halo_hpl;
3
+ pub mod wls70;
@@ -25,7 +25,7 @@ dependencies = [
25
25
  "urllib3",
26
26
  "numpy",
27
27
  "netCDF4",
28
- "typer[all]",
28
+ "typer",
29
29
  "matplotlib",
30
30
  "scikit-learn",
31
31
  "scipy"
@@ -37,7 +37,13 @@ class Api:
37
37
  return self.get(
38
38
  "raw-files",
39
39
  params={
40
- "instrument": ["halo-doppler-lidar", "wls100s", "wls200s", "wls400s"],
40
+ "instrument": [
41
+ "halo-doppler-lidar",
42
+ "wls100s",
43
+ "wls200s",
44
+ "wls400s",
45
+ "wls70",
46
+ ],
41
47
  "site": site,
42
48
  "date": date,
43
49
  },
@@ -27,6 +27,7 @@ class Stare:
27
27
  radial_velocity: npt.NDArray[np.float64]
28
28
  mask: npt.NDArray[np.bool_]
29
29
  wavelength: float
30
+ system_id: str
30
31
 
31
32
  @classmethod
32
33
  def from_halo_data(
@@ -66,6 +67,8 @@ class Stare:
66
67
  .non_strictly_increasing_timesteps_removed()
67
68
  )
68
69
  raw, intensity_bg_corrected = _correct_background(raw, bg, bg_correction_method)
70
+ if len(raw.time) == 0:
71
+ raise doppy.exceptions.NoDataError("No matching data and bg files")
69
72
  intensity_noise_bias_corrected = _correct_intensity_noise_bias(
70
73
  raw, intensity_bg_corrected
71
74
  )
@@ -87,6 +90,7 @@ class Stare:
87
90
  radial_velocity=raw.radial_velocity,
88
91
  mask=mask,
89
92
  wavelength=wavelength,
93
+ system_id=raw.header.system_id,
90
94
  )
91
95
 
92
96
 
@@ -26,6 +26,7 @@ class Wind:
26
26
  meridional_wind: npt.NDArray[np.float64]
27
27
  vertical_wind: npt.NDArray[np.float64]
28
28
  mask: npt.NDArray[np.bool_]
29
+ system_id: str
29
30
 
30
31
  @functools.cached_property
31
32
  def horizontal_wind_speed(self) -> npt.NDArray[np.float64]:
@@ -93,6 +94,7 @@ class Wind:
93
94
  meridional_wind=wind[:, :, 1],
94
95
  vertical_wind=wind[:, :, 2],
95
96
  mask=mask,
97
+ system_id=raw.header.system_id,
96
98
  )
97
99
 
98
100
  @classmethod
@@ -147,6 +149,40 @@ class Wind:
147
149
  meridional_wind=wind[:, :, 1],
148
150
  vertical_wind=wind[:, :, 2],
149
151
  mask=mask,
152
+ system_id=raw.system_id,
153
+ )
154
+
155
+ @classmethod
156
+ def from_wls70_data(
157
+ cls,
158
+ data: Sequence[str]
159
+ | Sequence[Path]
160
+ | Sequence[bytes]
161
+ | Sequence[BufferedIOBase],
162
+ ) -> Wind:
163
+ raws = doppy.raw.Wls70.from_srcs(data)
164
+
165
+ if len(raws) == 0:
166
+ raise doppy.exceptions.NoDataError("Wls70 data missing")
167
+
168
+ raw = (
169
+ doppy.raw.Wls70.merge(raws)
170
+ .sorted_by_time()
171
+ .non_strictly_increasing_timesteps_removed()
172
+ )
173
+ mask = (
174
+ np.isnan(raw.meridional_wind)
175
+ | np.isnan(raw.zonal_wind)
176
+ | np.isnan(raw.vertical_wind)
177
+ )
178
+ return Wind(
179
+ time=raw.time,
180
+ height=raw.altitude,
181
+ zonal_wind=raw.zonal_wind,
182
+ meridional_wind=raw.meridional_wind,
183
+ vertical_wind=raw.vertical_wind,
184
+ mask=mask,
185
+ system_id=raw.system_id,
150
186
  )
151
187
 
152
188
 
@@ -2,5 +2,6 @@ from .halo_bg import HaloBg
2
2
  from .halo_hpl import HaloHpl
3
3
  from .halo_sys_params import HaloSysParams
4
4
  from .windcube import WindCube
5
+ from .wls70 import Wls70
5
6
 
6
- __all__ = ["HaloHpl", "HaloBg", "HaloSysParams", "WindCube"]
7
+ __all__ = ["HaloHpl", "HaloBg", "HaloSysParams", "WindCube", "Wls70"]
@@ -8,7 +8,7 @@ from datetime import datetime, timedelta
8
8
  from io import BufferedIOBase
9
9
  from os.path import commonprefix
10
10
  from pathlib import Path
11
- from typing import Any, Sequence, TypeVar, cast
11
+ from typing import Any, Sequence, cast
12
12
 
13
13
  import numpy as np
14
14
  import numpy.typing as npt
@@ -16,8 +16,7 @@ from numpy import datetime64, timedelta64
16
16
 
17
17
  import doppy
18
18
  from doppy import exceptions
19
-
20
- T = TypeVar("T")
19
+ from doppy.utils import merge_all_equal
21
20
 
22
21
 
23
22
  @dataclass
@@ -258,18 +257,12 @@ class HaloHplHeader:
258
257
  )
259
258
 
260
259
 
261
- def _merger(key: str, lst: list[T]) -> T:
262
- if len(set(lst)) != 1:
263
- raise ValueError(f"Cannot merge header key {key} values {lst}")
264
- return lst[0]
265
-
266
-
267
260
  def _merge_headers(headers: list[HaloHplHeader]) -> HaloHplHeader:
268
261
  return HaloHplHeader(
269
262
  filename=commonprefix([h.filename for h in headers]),
270
263
  start_time=np.min([h.start_time for h in headers]),
271
264
  **{
272
- key: _merger(key, [getattr(h, key) for h in headers])
265
+ key: merge_all_equal(key, [getattr(h, key) for h in headers])
273
266
  for key in (
274
267
  "gate_points",
275
268
  "nrays",
@@ -10,6 +10,8 @@ import numpy.typing as npt
10
10
  from netCDF4 import Dataset, num2date
11
11
  from numpy import datetime64
12
12
 
13
+ from doppy.utils import merge_all_equal
14
+
13
15
 
14
16
  @dataclass
15
17
  class WindCube:
@@ -22,6 +24,7 @@ class WindCube:
22
24
  radial_velocity: npt.NDArray[np.float64] # dim: (time, radial_distance)
23
25
  radial_velocity_confidence: npt.NDArray[np.float64] # dim: (time, radial_distance)
24
26
  scan_index: npt.NDArray[np.int64]
27
+ system_id: str
25
28
 
26
29
  @classmethod
27
30
  def from_vad_srcs(
@@ -53,6 +56,7 @@ class WindCube:
53
56
  [r.radial_velocity_confidence for r in raws]
54
57
  ),
55
58
  cnr=np.concatenate([r.cnr for r in raws]),
59
+ system_id=merge_all_equal("system_id", [r.system_id for r in raws]),
56
60
  )
57
61
 
58
62
  def __getitem__(
@@ -75,6 +79,7 @@ class WindCube:
75
79
  radial_velocity_confidence=self.radial_velocity_confidence[index],
76
80
  cnr=self.cnr[index],
77
81
  scan_index=self.scan_index[index],
82
+ system_id=self.system_id,
78
83
  )
79
84
  raise TypeError
80
85
 
@@ -189,6 +194,7 @@ def _from_vad_src(nc: Dataset) -> WindCube:
189
194
  radial_velocity=np.concatenate(radial_wind_speed_list),
190
195
  radial_velocity_confidence=np.concatenate(radial_wind_speed_confidence_list),
191
196
  cnr=np.concatenate(cnr_list),
197
+ system_id=nc.instrument_name,
192
198
  )
193
199
 
194
200
 
@@ -0,0 +1,192 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass
4
+ from datetime import datetime
5
+ from io import BufferedIOBase
6
+ from pathlib import Path
7
+ from typing import Any, Sequence
8
+
9
+ import numpy as np
10
+ import numpy.typing as npt
11
+ from numpy import datetime64
12
+
13
+ import doppy
14
+ from doppy import exceptions
15
+ from doppy.utils import merge_all_equal
16
+
17
+
18
+ @dataclass
19
+ class Wls70:
20
+ time: npt.NDArray[datetime64] # dim: (time, )
21
+ altitude: npt.NDArray[np.float64] # dim: (altitude, )
22
+ position: npt.NDArray[np.float64] # dim: (time, )
23
+ temperature: npt.NDArray[np.float64] # dim: (time, )
24
+ wiper: npt.NDArray[np.bool_] # dim: (time, )
25
+ cnr: npt.NDArray[np.float64] # dim: (time, altitude)
26
+ radial_velocity: npt.NDArray[np.float64] # dim: (time, altitude)
27
+ radial_velocity_deviation: npt.NDArray[np.float64] # dim: (time, altitude)
28
+ vh: npt.NDArray[np.float64] # dim: (time, altitude)
29
+ wind_direction: npt.NDArray[np.float64] # dim: (time, altitude)
30
+ zonal_wind: npt.NDArray[np.float64] # u := zonal wind?, dim: (time, altitude)
31
+ meridional_wind: npt.NDArray[
32
+ np.float64
33
+ ] # v := meridional wind?, dim: (time, altitude)
34
+ vertical_wind: npt.NDArray[np.float64] # w := vertical wind?, dim: (time, altitude)
35
+ system_id: str
36
+
37
+ @classmethod
38
+ def from_srcs(
39
+ cls,
40
+ data: Sequence[str]
41
+ | Sequence[Path]
42
+ | Sequence[bytes]
43
+ | Sequence[BufferedIOBase],
44
+ ) -> list[Wls70]:
45
+ if not isinstance(data, (list, tuple)):
46
+ raise TypeError("data should be list or tuple")
47
+ if all(isinstance(src, bytes) for src in data):
48
+ data_bytes = data
49
+ elif all(isinstance(src, str) for src in data):
50
+ data_bytes = []
51
+ for src in data:
52
+ with Path(src).open("rb") as f:
53
+ data_bytes.append(f.read())
54
+ elif all(isinstance(src, Path) for src in data):
55
+ data_bytes = []
56
+ for src in data:
57
+ with src.open("rb") as f:
58
+ data_bytes.append(f.read())
59
+ elif all(isinstance(src, BufferedIOBase) for src in data):
60
+ data_bytes = [src.read() for src in data]
61
+ else:
62
+ raise TypeError("Unexpected types in data")
63
+ raws = doppy.rs.raw.wls70.from_bytes_srcs(data_bytes)
64
+ try:
65
+ return [_raw_rs_to_wls70(r) for r in raws]
66
+ except RuntimeError as err:
67
+ raise exceptions.RawParsingError(err) from err
68
+
69
+ @classmethod
70
+ def from_src(cls, data: str | Path | bytes | BufferedIOBase) -> Wls70:
71
+ if isinstance(data, str):
72
+ path = Path(data)
73
+ with path.open("rb") as f:
74
+ data_bytes = f.read()
75
+ elif isinstance(data, Path):
76
+ with data.open("rb") as f:
77
+ data_bytes = f.read()
78
+ elif isinstance(data, bytes):
79
+ data_bytes = data
80
+ elif isinstance(data, BufferedIOBase):
81
+ data_bytes = data.read()
82
+ else:
83
+ raise TypeError("Unsupported data type")
84
+ try:
85
+ return _raw_rs_to_wls70(doppy.rs.raw.wls70.from_bytes_src(data_bytes))
86
+ except RuntimeError as err:
87
+ raise exceptions.RawParsingError(err) from err
88
+
89
+ def __getitem__(
90
+ self,
91
+ index: int
92
+ | slice
93
+ | list[int]
94
+ | npt.NDArray[np.int64]
95
+ | npt.NDArray[np.bool_]
96
+ | tuple[slice, slice],
97
+ ) -> Wls70:
98
+ if isinstance(index, (int, slice, list, np.ndarray)):
99
+ return Wls70(
100
+ time=self.time[index],
101
+ altitude=self.altitude,
102
+ position=self.position[index],
103
+ temperature=self.temperature[index],
104
+ wiper=self.wiper[index],
105
+ cnr=self.cnr[index],
106
+ radial_velocity=self.radial_velocity[index],
107
+ radial_velocity_deviation=self.radial_velocity_deviation[index],
108
+ vh=self.vh[index],
109
+ wind_direction=self.wind_direction[index],
110
+ zonal_wind=self.zonal_wind[index],
111
+ meridional_wind=self.meridional_wind[index],
112
+ vertical_wind=self.vertical_wind[index],
113
+ system_id=self.system_id,
114
+ )
115
+ raise TypeError
116
+
117
+ def sorted_by_time(self) -> Wls70:
118
+ sort_indices = np.argsort(self.time)
119
+ return self[sort_indices]
120
+
121
+ @classmethod
122
+ def merge(cls, raws: Sequence[Wls70]) -> Wls70:
123
+ return cls(
124
+ time=np.concatenate(tuple(r.time for r in raws)),
125
+ altitude=raws[0].altitude,
126
+ position=np.concatenate(tuple(r.position for r in raws)),
127
+ temperature=np.concatenate(tuple(r.temperature for r in raws)),
128
+ wiper=np.concatenate(tuple(r.wiper for r in raws)),
129
+ cnr=np.concatenate(tuple(r.cnr for r in raws)),
130
+ radial_velocity=np.concatenate(tuple(r.radial_velocity for r in raws)),
131
+ radial_velocity_deviation=np.concatenate(
132
+ tuple(r.radial_velocity_deviation for r in raws)
133
+ ),
134
+ vh=np.concatenate(tuple(r.vh for r in raws)),
135
+ wind_direction=np.concatenate(tuple(r.wind_direction for r in raws)),
136
+ zonal_wind=np.concatenate(tuple(r.zonal_wind for r in raws)),
137
+ meridional_wind=np.concatenate(tuple(r.meridional_wind for r in raws)),
138
+ vertical_wind=np.concatenate(tuple(r.vertical_wind for r in raws)),
139
+ system_id=merge_all_equal("system_id", [r.system_id for r in raws]),
140
+ )
141
+
142
+ def non_strictly_increasing_timesteps_removed(self) -> Wls70:
143
+ if len(self.time) == 0:
144
+ return self
145
+ mask = np.ones_like(self.time, dtype=np.bool_)
146
+ latest_time = self.time[0]
147
+ for i, t in enumerate(self.time[1:], start=1):
148
+ if t <= latest_time:
149
+ mask[i] = False
150
+ else:
151
+ latest_time = t
152
+ return self[mask]
153
+
154
+
155
+ def _raw_rs_to_wls70(
156
+ raw_rs: tuple[dict[str, Any], list[str], npt.NDArray[np.float64]],
157
+ ) -> Wls70:
158
+ info, cols, data = raw_rs
159
+ altitude = info["altitude"]
160
+ system_id = info["system_id"]
161
+ data = data.reshape(-1, len(cols))
162
+ time_ts = data[:, 0]
163
+ time = np.array([datetime64(datetime.utcfromtimestamp(ts)) for ts in time_ts])
164
+
165
+ position = data[:, 1]
166
+ temperature = data[:, 2]
167
+ wiper = np.array(np.isclose(data[:, 3], 1), dtype=np.bool_)
168
+ cnr = data[:, 4::8]
169
+ rws = data[:, 5::8]
170
+ rwsd = data[:, 6::8]
171
+ vh = data[:, 7::8]
172
+ direction = data[:, 8::8]
173
+ u = data[:, 9::8]
174
+ v = data[:, 10::8]
175
+ w = data[:, 11::8]
176
+
177
+ return Wls70(
178
+ time=time,
179
+ altitude=altitude,
180
+ position=position,
181
+ temperature=temperature,
182
+ wiper=wiper,
183
+ cnr=cnr,
184
+ radial_velocity=rws,
185
+ radial_velocity_deviation=rwsd,
186
+ vh=vh,
187
+ wind_direction=direction,
188
+ zonal_wind=u,
189
+ meridional_wind=v,
190
+ vertical_wind=w,
191
+ system_id=system_id,
192
+ )
@@ -0,0 +1,9 @@
1
+ from typing import TypeVar
2
+
3
+ T = TypeVar("T")
4
+
5
+
6
+ def merge_all_equal(key: str, lst: list[T]) -> T:
7
+ if len(set(lst)) != 1:
8
+ raise ValueError(f"Cannot merge header key {key} values {lst}")
9
+ return lst[0]
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes