doppy 0.0.1__cp310-abi3-macosx_10_12_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of doppy might be problematic. Click here for more details.

doppy/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ from doppy import data, netcdf, options, product, raw
2
+ from doppy.rs import __version__
3
+
4
+ from . import bench
5
+
6
+ __all__ = ["raw", "options", "product", "netcdf", "bench", "data", "__version__"]
doppy/__main__.py ADDED
@@ -0,0 +1,25 @@
1
+ import typer
2
+
3
+ app = typer.Typer()
4
+
5
+
6
+ @app.command()
7
+ def parse(file: str, merge: bool = False) -> None:
8
+ """
9
+ Parse raw hpl files into netCDF files
10
+ """
11
+ print(f"Hello {file}")
12
+
13
+
14
+ @app.command()
15
+ def stare() -> None:
16
+ print("stare cmd")
17
+
18
+
19
+ @app.command()
20
+ def wind() -> None:
21
+ print("wind cmd")
22
+
23
+
24
+ if __name__ == "__main__":
25
+ app()
doppy/bench.py ADDED
@@ -0,0 +1,12 @@
1
+ import time
2
+
3
+
4
+ class Timer:
5
+ def __init__(self):
6
+ self.start = None
7
+
8
+ def __enter__(self):
9
+ self.start = time.time()
10
+
11
+ def __exit__(self, type, value, traceback):
12
+ print(f"Elapsed time: {time.time() - self.start:.2f}")
doppy/data/__init__.py ADDED
File without changes
doppy/data/api.py ADDED
@@ -0,0 +1,44 @@
1
+ import io
2
+
3
+ import requests
4
+ from requests.adapters import HTTPAdapter
5
+ from urllib3.util.retry import Retry
6
+
7
+ from doppy.data import exceptions
8
+ from doppy.data.cache import cached_record
9
+
10
+
11
+ class Api:
12
+ def __init__(self, cache: bool = False) -> None:
13
+ retries = Retry(total=10, backoff_factor=0.2)
14
+ adapter = HTTPAdapter(max_retries=retries)
15
+ session = requests.Session()
16
+ session.mount("https://", adapter)
17
+ session.mount("http://", adapter)
18
+ self.session = session
19
+ self.api_endpoint = "https://cloudnet.fmi.fi/api"
20
+ self.cache = cache
21
+
22
+ def get(self, path: str, params: dict[str, str]) -> list:
23
+ res = self.session.get(
24
+ f"{self.api_endpoint}/{path}", params=params, timeout=1800
25
+ )
26
+ if res.ok:
27
+ data = res.json()
28
+ if isinstance(data, list):
29
+ return data
30
+ raise exceptions.ApiRequestError(
31
+ f"Unexpected response type from api: {type(data)}"
32
+ )
33
+ raise exceptions.ApiRequestError(f"Api request error: {res.status_code}")
34
+
35
+ def get_raw_records(self, site: str, date: str) -> list:
36
+ return self.get(
37
+ "raw-files",
38
+ params={"instrument": "halo-doppler-lidar", "site": site, "date": date},
39
+ )
40
+
41
+ def get_record_content(self, rec: dict) -> io.BytesIO:
42
+ if self.cache:
43
+ return cached_record(rec, self.session)
44
+ return io.BytesIO(self.session.get(rec["downloadUrl"]).content)
doppy/data/cache.py ADDED
@@ -0,0 +1,34 @@
1
+ import shutil
2
+ from io import BytesIO
3
+ from pathlib import Path
4
+
5
+ from requests import Session
6
+
7
+
8
+ def cached_record(
9
+ record: dict, session: Session, check_disk_usage: bool = True
10
+ ) -> BytesIO:
11
+ cache_dir = Path("cache")
12
+ path = cache_dir / record["uuid"]
13
+
14
+ if check_disk_usage:
15
+ HUNDRED_GIGABYTES_AS_BYTES = 100 * 1024 * 1024 * 1024
16
+
17
+ _, _, disk_free = shutil.disk_usage("./")
18
+ if disk_free < HUNDRED_GIGABYTES_AS_BYTES:
19
+ _clear_dir(cache_dir)
20
+
21
+ if path.is_file():
22
+ return BytesIO(path.read_bytes())
23
+ else:
24
+ path.parent.mkdir(parents=True, exist_ok=True)
25
+ content = session.get(record["downloadUrl"]).content
26
+ with path.open("wb") as f:
27
+ f.write(content)
28
+ return BytesIO(content)
29
+
30
+
31
+ def _clear_dir(path: Path) -> None:
32
+ for f in path.glob("**/*"):
33
+ if f.is_file():
34
+ f.unlink()
@@ -0,0 +1,6 @@
1
+ class ApiRequestError(Exception):
2
+ pass
3
+
4
+
5
+ class CliArgumentError(Exception):
6
+ pass
doppy/defaults.py ADDED
@@ -0,0 +1,3 @@
1
+ class Halo:
2
+ wavelength = 1.565e-6
3
+
doppy/exceptions.py ADDED
@@ -0,0 +1,10 @@
1
+ class DoppyException(Exception):
2
+ pass
3
+
4
+
5
+ class RawParsingError(DoppyException):
6
+ pass
7
+
8
+
9
+ class NoDataError(DoppyException):
10
+ pass
doppy/netcdf.py ADDED
@@ -0,0 +1,113 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ from typing import Literal, TypeAlias
5
+
6
+ import netCDF4
7
+ import numpy as np
8
+ import numpy.typing as npt
9
+
10
+ NetCDFDataType: TypeAlias = Literal["f4", "f8", "i4", "i8", "u4", "u8"]
11
+
12
+
13
+ class Dataset:
14
+ def __init__(self) -> None:
15
+ self.nc = netCDF4.Dataset("inmemory.nc", mode="w", memory=1028)
16
+
17
+ def add_dimension(self, dim: str) -> Dataset:
18
+ self.nc.createDimension(dim, None)
19
+ return self
20
+
21
+ def add_time(
22
+ self,
23
+ name: str,
24
+ dimensions: tuple[str, ...],
25
+ data: npt.NDArray[np.datetime64],
26
+ dtype: NetCDFDataType,
27
+ standard_name: str | None = None,
28
+ long_name: str | None = None,
29
+ ) -> Dataset:
30
+ time, units, calendar = _convert_time(data)
31
+ var = self.nc.createVariable(name, dtype, dimensions)
32
+ var.units = units
33
+ var.calendar = calendar
34
+ var[:] = time
35
+ if standard_name is not None:
36
+ var.standard_name = standard_name
37
+ if long_name is not None:
38
+ var.long_name = long_name
39
+ return self
40
+
41
+ def add_variable(
42
+ self,
43
+ name: str,
44
+ dimensions: tuple[str, ...],
45
+ units: str,
46
+ data: npt.NDArray[np.float64],
47
+ dtype: NetCDFDataType,
48
+ standard_name: str | None = None,
49
+ long_name: str | None = None,
50
+ mask: npt.NDArray[np.bool_] | None = None,
51
+ ) -> Dataset:
52
+ var = self.nc.createVariable(name, dtype, dimensions)
53
+ var.units = units
54
+ if mask is not None:
55
+ var[:] = np.ma.masked_array(data, mask) # type: ignore
56
+ else:
57
+ var[:] = data
58
+ if standard_name is not None:
59
+ var.standard_name = standard_name
60
+ if long_name is not None:
61
+ var.long_name = long_name
62
+ return self
63
+
64
+ def add_scalar_variable(
65
+ self,
66
+ name: str,
67
+ units: str,
68
+ data: np.float64 | np.int64 | float | int,
69
+ dtype: NetCDFDataType,
70
+ standard_name: str | None = None,
71
+ long_name: str | None = None,
72
+ mask: npt.NDArray[np.bool_] | None = None,
73
+ ) -> Dataset:
74
+ var = self.nc.createVariable(name, dtype)
75
+ var.units = units
76
+ var[:] = data
77
+ if standard_name is not None:
78
+ var.standard_name = standard_name
79
+ if long_name is not None:
80
+ var.long_name = long_name
81
+ return self
82
+
83
+ def close(self) -> memoryview:
84
+ buf = self.nc.close()
85
+ if isinstance(buf, memoryview):
86
+ return buf
87
+ raise TypeError
88
+
89
+ def write(self, path: str | Path) -> None:
90
+ buf = self.nc.close()
91
+ with Path(path).open("wb") as f:
92
+ f.write(buf)
93
+
94
+
95
+ def _convert_time(
96
+ time: npt.NDArray[np.datetime64],
97
+ ) -> tuple[npt.NDArray[np.float64], str, str]:
98
+ """
99
+ Parameters
100
+ ----------
101
+ time : npt.NDArray[np.datetime64["us"]]
102
+ Must be represented in UTC
103
+ """
104
+ if time.dtype != "<M8[us]":
105
+ raise TypeError("time must be datetime64[us]")
106
+ MICROSECONDS_TO_HOURS = 1 / (1e6 * 3600)
107
+ start_of_day = time.min().astype("datetime64[D]")
108
+ hours_since_start_of_day = (time - start_of_day).astype(
109
+ np.float64
110
+ ) * MICROSECONDS_TO_HOURS
111
+ units = f"hours since {np.datetime_as_string(start_of_day)} 00:00:00 +0000"
112
+ calendar = "standard"
113
+ return hours_since_start_of_day, units, calendar
doppy/options.py ADDED
@@ -0,0 +1,13 @@
1
+ from enum import Enum
2
+
3
+
4
+ class BgCorrectionMethod(Enum):
5
+ FIT = "fit"
6
+ MEAN = "mean"
7
+ PRE_COMPUTED = "pre_computed"
8
+
9
+
10
+ class BgFitMethod(Enum):
11
+ LIN = "lin"
12
+ EXP = "exp"
13
+ EXPLIN = "explin"
@@ -0,0 +1,3 @@
1
+ from doppy.product.stare import Stare
2
+
3
+ __all__ = ["Stare"]