salientsdk 0.1.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- salientsdk-0.1.3/PKG-INFO +105 -0
- salientsdk-0.1.3/docs/index.md +80 -0
- salientsdk-0.1.3/pyproject.toml +114 -0
- salientsdk-0.1.3/salientsdk/.gitignore +3 -0
- salientsdk-0.1.3/salientsdk/__init__.py +42 -0
- salientsdk-0.1.3/salientsdk/constants.py +102 -0
- salientsdk-0.1.3/salientsdk/data_timeseries_api.py +224 -0
- salientsdk-0.1.3/salientsdk/downscale_api.py +139 -0
- salientsdk-0.1.3/salientsdk/forecast_timeseries_api.py +208 -0
- salientsdk-0.1.3/salientsdk/location.py +197 -0
- salientsdk-0.1.3/salientsdk/login_api.py +123 -0
- salientsdk-0.1.3/salientsdk/upload_file_api.py +158 -0
|
@@ -0,0 +1,105 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: salientsdk
|
|
3
|
+
Version: 0.1.3
|
|
4
|
+
Summary: Salient Predictions Software Development Kit
|
|
5
|
+
Home-page: https://salientpredictions.com
|
|
6
|
+
License: docs/LICENSE.md
|
|
7
|
+
Keywords: weather,climate,forecasting,sdk,salient,s2s
|
|
8
|
+
Author: Salient Predictions
|
|
9
|
+
Author-email: help@salientpredictions.com
|
|
10
|
+
Requires-Python: >=3.11,<4.0
|
|
11
|
+
Classifier: License :: Other/Proprietary License
|
|
12
|
+
Classifier: Programming Language :: Python :: 3
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Requires-Dist: h5netcdf (>=1.3.0,<2.0.0)
|
|
16
|
+
Requires-Dist: netCDF4 (>=1.6.5,<2.0.0)
|
|
17
|
+
Requires-Dist: pandas (>=2.2.1,<3.0.0)
|
|
18
|
+
Requires-Dist: requests (>=2.31.0,<3.0.0)
|
|
19
|
+
Requires-Dist: toml (>=0.10.2,<0.11.0)
|
|
20
|
+
Requires-Dist: xarray[h5netcdf] (>=2024.2.0,<2025.0.0)
|
|
21
|
+
Project-URL: Documentation, https://sdk.salientpredictions.com
|
|
22
|
+
Project-URL: Repository, https://github.com/Salient-Predictions/salientsdk
|
|
23
|
+
Description-Content-Type: text/markdown
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# Intended Use
|
|
27
|
+
|
|
28
|
+
The Salient SDK is a convenience wrapper around Salient Predictions' customer-facing
|
|
29
|
+
[web API](https://api.salientpredictions.com/v2/documentation/api/). It also contains utility functions for manipulating and analyzing the data delivered from the API.
|
|
30
|
+
|
|
31
|
+
# Setting up the SDK
|
|
32
|
+
|
|
33
|
+
## Prerequisites
|
|
34
|
+
|
|
35
|
+
The Salient SDK requires Python 3.11 to use. If you have Python installed, you can check your version with:
|
|
36
|
+
|
|
37
|
+
```bash
|
|
38
|
+
python --version
|
|
39
|
+
```
|
|
40
|
+
|
|
41
|
+
To get version 3.11:
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
# Ubuntu:
|
|
45
|
+
sudo apt update
|
|
46
|
+
sudo apt install software-properties-common
|
|
47
|
+
sudo add-apt-repository ppa:deadsnakes/ppa
|
|
48
|
+
sudo apt update
|
|
49
|
+
sudo apt install python3.11
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
```bash
|
|
53
|
+
# macOS:
|
|
54
|
+
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
|
55
|
+
brew update
|
|
56
|
+
brew install python@3.11
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
## Installing the SDK
|
|
60
|
+
|
|
61
|
+
The easiest way to get the Salient SDK is to install it like any other package:
|
|
62
|
+
|
|
63
|
+
```bash
|
|
64
|
+
pip install salientsdk
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# Usage
|
|
74
|
+
|
|
75
|
+
To access the Salient API you will need a `username` and `password` provided by
|
|
76
|
+
your Salient representative. The universal credentials `testusr` and `testpwd`
|
|
77
|
+
have limited permissions for testing and validation purposes:
|
|
78
|
+
|
|
79
|
+
```bash
|
|
80
|
+
python -m salientsdk.data_timeseries -lat 42 -lon -73 -fld all --start 2020-01-01 --end 2020-12-31 -u testusr -p testpwd
|
|
81
|
+
```
|
|
82
|
+
|
|
83
|
+
In a python script:
|
|
84
|
+
|
|
85
|
+
```python
|
|
86
|
+
import salientsdk as sk
|
|
87
|
+
import xarray as xr
|
|
88
|
+
import netcdf4
|
|
89
|
+
|
|
90
|
+
session = sk.login("testusr","testpwd")
|
|
91
|
+
history = sk.data_timeseries(loc = Location(lat=42, lon=-73), field="all", variable="temp")
|
|
92
|
+
print(xr.open_file(history))
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
See all available functions in the [API Reference](api.md).
|
|
96
|
+
|
|
97
|
+
The [examples](https://github.com/Salient-Predictions/salientsdk/tree/main/examples) directory contains `ipynb` notebooks to help you get started with common operations.
|
|
98
|
+
|
|
99
|
+
# License
|
|
100
|
+
|
|
101
|
+
This SDK is licensed for use by Salient customers [details](LICENSE.md).
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
Copyright 2024 [Salient Predictions](https://www.salientpredictions.com/)
|
|
105
|
+
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
|
|
2
|
+
# Intended Use
|
|
3
|
+
|
|
4
|
+
The Salient SDK is a convenience wrapper around Salient Predictions' customer-facing
|
|
5
|
+
[web API](https://api.salientpredictions.com/v2/documentation/api/). It also contains utility functions for manipulating and analyzing the data delivered from the API.
|
|
6
|
+
|
|
7
|
+
# Setting up the SDK
|
|
8
|
+
|
|
9
|
+
## Prerequisites
|
|
10
|
+
|
|
11
|
+
The Salient SDK requires Python 3.11 to use. If you have Python installed, you can check your version with:
|
|
12
|
+
|
|
13
|
+
```bash
|
|
14
|
+
python --version
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
To get version 3.11:
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
# Ubuntu:
|
|
21
|
+
sudo apt update
|
|
22
|
+
sudo apt install software-properties-common
|
|
23
|
+
sudo add-apt-repository ppa:deadsnakes/ppa
|
|
24
|
+
sudo apt update
|
|
25
|
+
sudo apt install python3.11
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
```bash
|
|
29
|
+
# macOS:
|
|
30
|
+
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"
|
|
31
|
+
brew update
|
|
32
|
+
brew install python@3.11
|
|
33
|
+
```
|
|
34
|
+
|
|
35
|
+
## Installing the SDK
|
|
36
|
+
|
|
37
|
+
The easiest way to get the Salient SDK is to install it like any other package:
|
|
38
|
+
|
|
39
|
+
```bash
|
|
40
|
+
pip install salientsdk
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
# Usage
|
|
50
|
+
|
|
51
|
+
To access the Salient API you will need a `username` and `password` provided by
|
|
52
|
+
your Salient representative. The universal credentials `testusr` and `testpwd`
|
|
53
|
+
have limited permissions for testing and validation purposes:
|
|
54
|
+
|
|
55
|
+
```bash
|
|
56
|
+
python -m salientsdk.data_timeseries -lat 42 -lon -73 -fld all --start 2020-01-01 --end 2020-12-31 -u testusr -p testpwd
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
In a python script:
|
|
60
|
+
|
|
61
|
+
```python
|
|
62
|
+
import salientsdk as sk
|
|
63
|
+
import xarray as xr
|
|
64
|
+
import netcdf4
|
|
65
|
+
|
|
66
|
+
session = sk.login("testusr","testpwd")
|
|
67
|
+
history = sk.data_timeseries(loc = Location(lat=42, lon=-73), field="all", variable="temp")
|
|
68
|
+
print(xr.open_file(history))
|
|
69
|
+
```
|
|
70
|
+
|
|
71
|
+
See all available functions in the [API Reference](api.md).
|
|
72
|
+
|
|
73
|
+
The [examples](https://github.com/Salient-Predictions/salientsdk/tree/main/examples) directory contains `ipynb` notebooks to help you get started with common operations.
|
|
74
|
+
|
|
75
|
+
# License
|
|
76
|
+
|
|
77
|
+
This SDK is licensed for use by Salient customers [details](LICENSE.md).
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
Copyright 2024 [Salient Predictions](https://www.salientpredictions.com/)
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "salientsdk"
|
|
3
|
+
version = "0.1.3"
|
|
4
|
+
description = "Salient Predictions Software Development Kit"
|
|
5
|
+
authors = ["Salient Predictions <help@salientpredictions.com>"]
|
|
6
|
+
license = "docs/LICENSE.md"
|
|
7
|
+
readme = "docs/index.md"
|
|
8
|
+
keywords = ["weather","climate","forecasting","sdk","salient","s2s"]
|
|
9
|
+
homepage = "https://salientpredictions.com"
|
|
10
|
+
documentation = "https://sdk.salientpredictions.com"
|
|
11
|
+
repository = "https://github.com/Salient-Predictions/salientsdk"
|
|
12
|
+
|
|
13
|
+
[tool.poetry.dependencies]
|
|
14
|
+
# use "poetry add <packagename>" to edit this list
|
|
15
|
+
h5netcdf = "^1.3.0"
|
|
16
|
+
netCDF4 = "^1.6.5"
|
|
17
|
+
pandas = "^2.2.1"
|
|
18
|
+
python = "^3.11"
|
|
19
|
+
requests = "^2.31.0"
|
|
20
|
+
toml = "^0.10.2"
|
|
21
|
+
xarray = {extras = ["h5netcdf"], version = "^2024.2.0"}
|
|
22
|
+
|
|
23
|
+
[tool.poetry.group.dev.dependencies]
|
|
24
|
+
# use "poetry add --dev <packagename>" to edit this list
|
|
25
|
+
google = "^3.0.0"
|
|
26
|
+
google-cloud-secret-manager = "^2.19.0"
|
|
27
|
+
markdown-include = "^0.8.1"
|
|
28
|
+
mkdocs = "^1.5.3"
|
|
29
|
+
mkdocs-material = "^9.5.15"
|
|
30
|
+
mkdocs-jupyter = "^0.24.6"
|
|
31
|
+
mkdocs-glightbox = "^0.3.7"
|
|
32
|
+
mkdocstrings = "^0.24.1"
|
|
33
|
+
mkdocstrings-python = "^1.9.0"
|
|
34
|
+
nbmake = "^1.5.3"
|
|
35
|
+
pydoc-markdown = "^4.8.2"
|
|
36
|
+
pytest = "^8.1.1"
|
|
37
|
+
pytest-cov = "^5.0.0"
|
|
38
|
+
ruff = "^0.3.4"
|
|
39
|
+
|
|
40
|
+
[build-system]
|
|
41
|
+
# poetry build
|
|
42
|
+
# -- to publish to test pypi --
|
|
43
|
+
# poetry config repositories.testpypi https://test.pypi.org/legacy/
|
|
44
|
+
# poetry config pypi-token.testpypi <your-test-pypi-token>
|
|
45
|
+
# poetry publish -r testpypi
|
|
46
|
+
# pip install --index-url https://test.pypi.org/simple/ salientsdk
|
|
47
|
+
# -- to publish to the canonical pypi --
|
|
48
|
+
# poetry config pypi-token.pypi <your-pypi-token>
|
|
49
|
+
# poetry publish
|
|
50
|
+
# pip install salientsdk
|
|
51
|
+
requires = ["poetry-core"]
|
|
52
|
+
build-backend = "poetry.core.masonry.api"
|
|
53
|
+
|
|
54
|
+
[tool.black]
|
|
55
|
+
line-length = 99
|
|
56
|
+
exclude = ""
|
|
57
|
+
|
|
58
|
+
# Make sure that isort and black play nicely together
|
|
59
|
+
# (both are part of our precommit)
|
|
60
|
+
[tool.isort]
|
|
61
|
+
profile = "black"
|
|
62
|
+
|
|
63
|
+
# These are the ruff settings that are explicitly supplied to pre-commit for enforcement
|
|
64
|
+
[tool.ruff]
|
|
65
|
+
# Should match black
|
|
66
|
+
line-length = 99
|
|
67
|
+
# Assume Python 3.9
|
|
68
|
+
target-version = "py39"
|
|
69
|
+
|
|
70
|
+
# Currently enforce:
|
|
71
|
+
# C90=mccabe-complexity
|
|
72
|
+
# E722=do not use base except
|
|
73
|
+
# Eventually add: F=pyflakes, E=pycodestyle, I=isort, W=pycodestyle warnings, N=pep8-naming, D=pydocstyle
|
|
74
|
+
select = ["C90", "E722","D"]
|
|
75
|
+
ignore = []
|
|
76
|
+
|
|
77
|
+
[tool.ruff.mccabe]
|
|
78
|
+
max-complexity = 25
|
|
79
|
+
|
|
80
|
+
[tool.ruff.pydocstyle]
|
|
81
|
+
convention = "google"
|
|
82
|
+
|
|
83
|
+
[tool.pytest.ini_options]
|
|
84
|
+
filterwarnings = [
|
|
85
|
+
"ignore::DeprecationWarning"
|
|
86
|
+
]
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
#[project]
|
|
92
|
+
#name = "salientsdk"
|
|
93
|
+
#version = "0.1.2"
|
|
94
|
+
#dynamic = ["dependencies"]
|
|
95
|
+
#license = {file = "docs/LICENSE.md"}
|
|
96
|
+
#readme = "docs/index.md"
|
|
97
|
+
#description="Salient Predictions Software Development Kit"
|
|
98
|
+
#requires-python=">=3.11"
|
|
99
|
+
#keywords = ["weather","climate","forecasting","sdk","salient","s2s"]
|
|
100
|
+
#authors = [
|
|
101
|
+
# {name = "Salient Predictions", email = "help@salientpredictions.com"}
|
|
102
|
+
#]
|
|
103
|
+
#classifiers=[
|
|
104
|
+
# "Development Status :: 1 - Planning",
|
|
105
|
+
# "Programming Language :: Python",
|
|
106
|
+
#]
|
|
107
|
+
|
|
108
|
+
# to deploy & install with twine, replace build-system-twine with build-system
|
|
109
|
+
# python3 -m build
|
|
110
|
+
# python3 -m twine upload --repository testpypi dist/* --skip-existing
|
|
111
|
+
# pip install --upgrade -i https://test.pypi.org/simple/ --extra-index-url https://pypi.org/simple/ salientsdk
|
|
112
|
+
#[build-system]
|
|
113
|
+
#requires = ["setuptools>=61.0","wheel","toml"]
|
|
114
|
+
#build-backend = "setuptools.build_meta"
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# Copyright Salient Predictions 2024
|
|
3
|
+
|
|
4
|
+
"""Salient Predictions Software Development Kit."""
|
|
5
|
+
|
|
6
|
+
import os
|
|
7
|
+
|
|
8
|
+
import toml
|
|
9
|
+
|
|
10
|
+
from .constants import get_model_version, set_model_version
|
|
11
|
+
from .data_timeseries_api import data_timeseries, load_multihistory
|
|
12
|
+
from .downscale_api import downscale
|
|
13
|
+
from .forecast_timeseries_api import forecast_timeseries
|
|
14
|
+
from .location import Location
|
|
15
|
+
from .login_api import login
|
|
16
|
+
from .upload_file_api import upload_bounding_box, upload_file, upload_location_file
|
|
17
|
+
|
|
18
|
+
init_file_dir = os.path.dirname(__file__)
|
|
19
|
+
pyproject_path = os.path.join(init_file_dir, "..", "pyproject.toml")
|
|
20
|
+
|
|
21
|
+
with open(pyproject_path) as f:
|
|
22
|
+
pyprj = toml.load(f)
|
|
23
|
+
prj = pyprj["tool"]["poetry"]
|
|
24
|
+
|
|
25
|
+
__version__ = prj["version"]
|
|
26
|
+
__author__ = "Salient Predictions"
|
|
27
|
+
__all__ = [
|
|
28
|
+
"login",
|
|
29
|
+
"data_timeseries",
|
|
30
|
+
"downscale",
|
|
31
|
+
"forecast_timeseries",
|
|
32
|
+
"get_model_version",
|
|
33
|
+
"load_multihistory",
|
|
34
|
+
"Location",
|
|
35
|
+
"set_model_version",
|
|
36
|
+
"upload_file",
|
|
37
|
+
"upload_bounding_box",
|
|
38
|
+
"upload_location_file",
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
if __name__ == "__main__":
|
|
42
|
+
print(f"ver: {__version__} by: {__author__}")
|
|
@@ -0,0 +1,102 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# Copyright Salient Predictions 2024
|
|
3
|
+
|
|
4
|
+
"""Constants for the Salient SDK.
|
|
5
|
+
|
|
6
|
+
This module contains constants used throughout the Salient SDK.
|
|
7
|
+
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import datetime
|
|
11
|
+
import hashlib
|
|
12
|
+
import urllib
|
|
13
|
+
|
|
14
|
+
import requests
|
|
15
|
+
|
|
16
|
+
# This is the base URL for the Salient API:
|
|
17
|
+
URL = "https://api.salientpredictions.com/"
|
|
18
|
+
|
|
19
|
+
API_VERSION = "v2"
|
|
20
|
+
|
|
21
|
+
MODEL_VERSION = "v8"
|
|
22
|
+
MODEL_VERSIONS = ["v7", "v7_1", "v8"]
|
|
23
|
+
|
|
24
|
+
VERFY_SSL = True
|
|
25
|
+
|
|
26
|
+
TEST_USER = "help+test@salientpredictions.com"
|
|
27
|
+
TEST_PWD = "salty!"
|
|
28
|
+
|
|
29
|
+
CURRENT_SESSION = None
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def _build_url(endpoint: str, args: None | dict = None) -> tuple[str, str]:
|
|
33
|
+
url = URL + API_VERSION + "/" + endpoint
|
|
34
|
+
file_name = endpoint
|
|
35
|
+
|
|
36
|
+
if args:
|
|
37
|
+
url += "?"
|
|
38
|
+
url += urllib.parse.urlencode(args, safe=",")
|
|
39
|
+
|
|
40
|
+
file_name += "_"
|
|
41
|
+
file_name += hashlib.md5(str(args).encode()).hexdigest()
|
|
42
|
+
|
|
43
|
+
if "format" in args:
|
|
44
|
+
file_name += "." + args["format"]
|
|
45
|
+
|
|
46
|
+
return (url, file_name)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _validate_date(date: str | datetime.datetime) -> str:
|
|
50
|
+
if isinstance(date, str) and date == "-today":
|
|
51
|
+
date = datetime.datetime.today()
|
|
52
|
+
|
|
53
|
+
if isinstance(date, datetime.datetime):
|
|
54
|
+
date = date.strftime("%Y-%m-%d")
|
|
55
|
+
|
|
56
|
+
# ENHANCEMENT: accept other date formats like numpy datetime64, pandas Timestamp, etc
|
|
57
|
+
# ENHANCEMENT: make sure date is properly formatted
|
|
58
|
+
|
|
59
|
+
return date
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_model_version() -> str:
|
|
63
|
+
"""Get the current default model version.
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
str: The current model version
|
|
67
|
+
|
|
68
|
+
"""
|
|
69
|
+
return MODEL_VERSION
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def set_model_version(version: str) -> None:
|
|
73
|
+
"""Set the default model version.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
version (str): The model version to set
|
|
77
|
+
|
|
78
|
+
"""
|
|
79
|
+
assert version in MODEL_VERSIONS
|
|
80
|
+
global MODEL_VERSION
|
|
81
|
+
MODEL_VERSION = version
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def get_current_session() -> None | requests.Session:
|
|
85
|
+
"""Get the current session.
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
None | requests.Session: The current session
|
|
89
|
+
|
|
90
|
+
"""
|
|
91
|
+
return CURRENT_SESSION
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def set_current_session(session: requests.Session) -> None:
|
|
95
|
+
"""Set the current session.
|
|
96
|
+
|
|
97
|
+
Args:
|
|
98
|
+
session (requests.Session): The session to set
|
|
99
|
+
|
|
100
|
+
"""
|
|
101
|
+
global CURRENT_SESSION
|
|
102
|
+
CURRENT_SESSION = session
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
#!/usr/bin/env python
|
|
2
|
+
# Copyright Salient Predictions 2024
|
|
3
|
+
|
|
4
|
+
"""Historical data timeseries.
|
|
5
|
+
|
|
6
|
+
This module is an interface to the Salient `data_timeseries` API, which returns historical
|
|
7
|
+
observed data. It also includes utility functions for operating on the returned data.
|
|
8
|
+
|
|
9
|
+
Command line usage example:
|
|
10
|
+
|
|
11
|
+
```
|
|
12
|
+
cd ~/salientsdk
|
|
13
|
+
# this will get a single variable in a single file:
|
|
14
|
+
python -m salientsdk.data_timeseries_api -lat 42 -lon -73 -fld all --start 2020-01-01 --end 2020-12-31
|
|
15
|
+
# this will get multiple variables in separate files:
|
|
16
|
+
python -m salientsdk.data_timeseries_api -lat 42 -lon -73 -fld all -var temp,precip
|
|
17
|
+
```
|
|
18
|
+
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
import os
|
|
22
|
+
|
|
23
|
+
import requests
|
|
24
|
+
import xarray as xr
|
|
25
|
+
|
|
26
|
+
from . import constants, location, login_api
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def data_timeseries(
|
|
30
|
+
loc: location.Location,
|
|
31
|
+
variable: str = "temp",
|
|
32
|
+
field: str = "anom",
|
|
33
|
+
debias: bool = False,
|
|
34
|
+
start: str = "1950-01-01",
|
|
35
|
+
end: str = "-today",
|
|
36
|
+
format: str = "nc",
|
|
37
|
+
frequency: str = "daily",
|
|
38
|
+
force: bool = False,
|
|
39
|
+
session: requests.Session = constants.get_current_session(),
|
|
40
|
+
verify: bool = constants.VERFY_SSL,
|
|
41
|
+
verbose: bool = False,
|
|
42
|
+
**kwargs,
|
|
43
|
+
) -> str | dict[str, str]:
|
|
44
|
+
"""Get a historical time series of ERA5 data.
|
|
45
|
+
|
|
46
|
+
This function is a convenience wrapper to the Salient
|
|
47
|
+
[API](https://api.salientpredictions.com/v2/documentation/api/#/Historical/get_data_timeseries).
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
loc (Location): The location to query
|
|
51
|
+
variable (str): The variable to query, defaults to `temp`
|
|
52
|
+
To request multiple variables, separate them with a comma `temp,precip`
|
|
53
|
+
This will download one file per variable
|
|
54
|
+
See the
|
|
55
|
+
[Data Fields](https://salientpredictions.notion.site/Variables-d88463032846402e80c9c0972412fe60)
|
|
56
|
+
documentation for a full list of available historical variables.
|
|
57
|
+
field (str): The field to query, defaults to "anom"
|
|
58
|
+
debias (bool): If True, debias the data to local observations. Disabled for `shapefile` locations. [detail](https://salientpredictions.notion.site/Debiasing-2888d5759eef4fe89a5ba3e40cd72c8f)
|
|
59
|
+
start (str): The start date of the time series
|
|
60
|
+
end (str): The end date of the time series
|
|
61
|
+
format (str): The format of the response
|
|
62
|
+
frequency (str): The frequency of the time series
|
|
63
|
+
force (bool): If False (default), don't download the data if it already exists
|
|
64
|
+
session (requests.Session): The session object to use for the request
|
|
65
|
+
verify (bool): If True (default), verify the SSL certificate
|
|
66
|
+
verbose (bool): If True (default False) print status messages
|
|
67
|
+
**kwargs: Additional arguments to pass to the API
|
|
68
|
+
|
|
69
|
+
Keyword Arguments:
|
|
70
|
+
units (str): `SI` or `US`
|
|
71
|
+
apikey (str): use an API key instead of a username & password
|
|
72
|
+
|
|
73
|
+
Returns:
|
|
74
|
+
str | dict: the file name of the downloaded data. File names are a hash of the query parameters.
|
|
75
|
+
When `force=False` and the file already exists, the function will return the file name
|
|
76
|
+
almost instantaneously without querying the API.
|
|
77
|
+
If multiple variables are requested, returns a `dict` of `{variable:file_name}`
|
|
78
|
+
"""
|
|
79
|
+
assert field in [
|
|
80
|
+
"anom",
|
|
81
|
+
"anom_d",
|
|
82
|
+
"anom_ds",
|
|
83
|
+
"anom_qnt",
|
|
84
|
+
"anom_s",
|
|
85
|
+
"clim",
|
|
86
|
+
"stdv",
|
|
87
|
+
"trend",
|
|
88
|
+
"vals",
|
|
89
|
+
"all",
|
|
90
|
+
], f"Invalid field {field}"
|
|
91
|
+
assert format in ["nc", "csv"], f"Invalid format {format}"
|
|
92
|
+
assert frequency in [
|
|
93
|
+
"daily",
|
|
94
|
+
"weekly",
|
|
95
|
+
"monthly",
|
|
96
|
+
"3-monthly",
|
|
97
|
+
], f"Invalid frequency {frequency}"
|
|
98
|
+
|
|
99
|
+
# if there is a comma in variable, vectorize:
|
|
100
|
+
if isinstance(variable, str) and "," in variable:
|
|
101
|
+
variable = variable.split(",")
|
|
102
|
+
|
|
103
|
+
if isinstance(variable, list):
|
|
104
|
+
file_names = {
|
|
105
|
+
var: data_timeseries(
|
|
106
|
+
loc=loc,
|
|
107
|
+
variable=var,
|
|
108
|
+
field=field,
|
|
109
|
+
debias=debias,
|
|
110
|
+
start=start,
|
|
111
|
+
end=end,
|
|
112
|
+
format=format,
|
|
113
|
+
frequency=frequency,
|
|
114
|
+
force=force,
|
|
115
|
+
session=session,
|
|
116
|
+
verify=verify,
|
|
117
|
+
verbose=verbose,
|
|
118
|
+
**kwargs,
|
|
119
|
+
)
|
|
120
|
+
for var in variable
|
|
121
|
+
}
|
|
122
|
+
if verbose:
|
|
123
|
+
print(file_names)
|
|
124
|
+
return file_names
|
|
125
|
+
|
|
126
|
+
endpoint = "data_timeseries"
|
|
127
|
+
args = loc.asdict(
|
|
128
|
+
start=start,
|
|
129
|
+
end=end,
|
|
130
|
+
debias=debias,
|
|
131
|
+
field=field,
|
|
132
|
+
format=format,
|
|
133
|
+
frequency=frequency,
|
|
134
|
+
variable=variable,
|
|
135
|
+
**kwargs,
|
|
136
|
+
)
|
|
137
|
+
|
|
138
|
+
(query, file_name) = constants._build_url(endpoint, args)
|
|
139
|
+
|
|
140
|
+
if force or not os.path.exists(file_name):
|
|
141
|
+
if verbose:
|
|
142
|
+
print(f"Downloading {query} to {file_name}")
|
|
143
|
+
with open(file_name, "wb" if format == "nc" else "w") as f:
|
|
144
|
+
result = session.get(query, verify=verify)
|
|
145
|
+
result.raise_for_status()
|
|
146
|
+
if format == "nc":
|
|
147
|
+
f.write(result.content)
|
|
148
|
+
else:
|
|
149
|
+
f.write(result.text)
|
|
150
|
+
elif verbose:
|
|
151
|
+
print(f"File {file_name} already exists")
|
|
152
|
+
|
|
153
|
+
return file_name
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def load_multihistory(files: dict, fields: list[str] = ["vals"]) -> xr.Dataset:
|
|
157
|
+
"""Load multiple history files and merge them into a single dataset.
|
|
158
|
+
|
|
159
|
+
Args:
|
|
160
|
+
files (dict): Dictionary of `{variable:file_name}` of the type returned by
|
|
161
|
+
`data_timeseries` when multiple `variable`s are requested
|
|
162
|
+
e.g. `data_timeseries(..., variable = "temp,precip")`
|
|
163
|
+
fields (list[str]): List of fields to extract from the history files
|
|
164
|
+
|
|
165
|
+
Returns:
|
|
166
|
+
xr.Dataset: The merged dataset, where each field and variable is renamed
|
|
167
|
+
to `<variable>_<field>` or simply `variable` if field = "vals".
|
|
168
|
+
"""
|
|
169
|
+
|
|
170
|
+
def __extract_history_fields(file: str, variable: str, fields: str) -> xr.Dataset:
|
|
171
|
+
hst = xr.load_dataset(file)
|
|
172
|
+
hst = hst[fields]
|
|
173
|
+
fields_new = [variable if field == "vals" else variable + "_" + field for field in fields]
|
|
174
|
+
hst = hst.rename({field: field_new for field, field_new in zip(fields, fields_new)})
|
|
175
|
+
for fld in fields_new:
|
|
176
|
+
hst[fld].attrs = hst.attrs
|
|
177
|
+
hst.attrs = {}
|
|
178
|
+
hst.close()
|
|
179
|
+
|
|
180
|
+
return hst
|
|
181
|
+
|
|
182
|
+
# Would prefer to use xr.open_mfdataset, but we need to pass in the variable name
|
|
183
|
+
# Can convert when history files have a short_name attribute
|
|
184
|
+
# https://salientpredictions.atlassian.net/browse/RD-1184
|
|
185
|
+
hst = xr.merge(
|
|
186
|
+
[__extract_history_fields(files[variable], variable, fields) for variable in files.keys()]
|
|
187
|
+
)
|
|
188
|
+
return hst
|
|
189
|
+
|
|
190
|
+
|
|
191
|
+
def _main() -> None:
|
|
192
|
+
argparser = location.Location.get_argparser(["debias", "force"])
|
|
193
|
+
argparser.add_argument("-var", "--variable", type=str, default="temp")
|
|
194
|
+
argparser.add_argument("-fld", "--field", type=str, default="anom")
|
|
195
|
+
argparser.add_argument("--start", type=str, default="1950-01-01")
|
|
196
|
+
argparser.add_argument("--end", type=str, default="-today")
|
|
197
|
+
argparser.add_argument("--format", type=str, default="nc")
|
|
198
|
+
argparser.add_argument("--frequency", type=str, default="daily")
|
|
199
|
+
|
|
200
|
+
args = argparser.parse_args()
|
|
201
|
+
|
|
202
|
+
session = login_api._login_from_args(args)
|
|
203
|
+
|
|
204
|
+
loc = location.Location._from_args_(args)
|
|
205
|
+
file_name = data_timeseries(
|
|
206
|
+
loc=loc,
|
|
207
|
+
variable=args.variable,
|
|
208
|
+
field=args.field,
|
|
209
|
+
debias=args.debias,
|
|
210
|
+
start=args.start,
|
|
211
|
+
end=args.end,
|
|
212
|
+
format=args.format,
|
|
213
|
+
frequency=args.frequency,
|
|
214
|
+
force=args.force,
|
|
215
|
+
verbose=args.verbose,
|
|
216
|
+
session=session,
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
if args.verbose and isinstance(file_name, str):
|
|
220
|
+
print(xr.open_dataset(file_name))
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
if __name__ == "__main__":
|
|
224
|
+
_main()
|