ecmwf-datastores-client 0.2.0__tar.gz → 0.4.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ecmwf-datastores-client might be problematic. Click here for more details.
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.cruft.json +2 -2
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-push.yml +3 -3
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.pre-commit-config.yaml +4 -4
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/PKG-INFO +7 -6
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/README.md +4 -2
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ci/environment-ci.yml +1 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/client.py +19 -2
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/processing.py +21 -11
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/version.py +1 -1
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/PKG-INFO +7 -6
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/SOURCES.txt +2 -2
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/requires.txt +1 -1
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/environment.yml +1 -1
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/pyproject.toml +7 -6
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/conftest.py +1 -4
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_20_processing.py +9 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_40_results.py +0 -44
- ecmwf_datastores_client-0.2.0/tests/integration_test_60_api_client.py → ecmwf_datastores_client-0.4.0/tests/integration_test_60_client.py +9 -9
- ecmwf_datastores_client-0.2.0/tests/integration_test_70_legacy_api_client.py → ecmwf_datastores_client-0.4.0/tests/integration_test_70_legacy_client.py +15 -16
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/test_20_processing.py +6 -1
- ecmwf_datastores_client-0.4.0/tests/test_40_results.py +143 -0
- ecmwf_datastores_client-0.2.0/tests/test_40_results.py +0 -92
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-pr-closed.yml +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.gitignore +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.pre-commit-config-cruft.yaml +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/Dockerfile +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/LICENSE +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/Makefile +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ci/environment-integration.yml +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/Makefile +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/_static/.gitkeep +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/_templates/.gitkeep +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/conf.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/index.md +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/make.bat +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/notebooks/index.md +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/notebooks/quick_start.ipynb +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/__init__.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/catalogue.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/config.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/legacy_client.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/profile.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/py.typed +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/utils.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/dependency_links.txt +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/top_level.txt +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/setup.cfg +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_10_catalogue.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_30_remote.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_50_profile.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_80_adaptors.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_90_features.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/test_00_version.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/test_01_config.py +0 -0
- {ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/test_10_catalogue.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
|
|
3
|
-
"commit": "
|
|
3
|
+
"commit": "28eb7788868b278f924c6171a0dba603591b2f33",
|
|
4
4
|
"checkout": null,
|
|
5
5
|
"context": {
|
|
6
6
|
"cookiecutter": {
|
|
@@ -13,7 +13,7 @@
|
|
|
13
13
|
"integration_tests": "True",
|
|
14
14
|
"pypi": true,
|
|
15
15
|
"_template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
|
|
16
|
-
"_commit": "
|
|
16
|
+
"_commit": "28eb7788868b278f924c6171a0dba603591b2f33"
|
|
17
17
|
}
|
|
18
18
|
},
|
|
19
19
|
"directory": null
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-push.yml
RENAMED
|
@@ -145,7 +145,7 @@ jobs:
|
|
|
145
145
|
env:
|
|
146
146
|
ECMWF_DATASTORES_URL: ${{ secrets.ECMWF_DATASTORES_URL }}
|
|
147
147
|
ECMWF_DATASTORES_KEY: ${{ secrets.ECMWF_DATASTORES_KEY }}
|
|
148
|
-
|
|
148
|
+
ANONYMOUS_PAT: ${{ secrets.ECMWF_DATASTORES_ANON_KEY }}
|
|
149
149
|
run: |
|
|
150
150
|
make docs-build
|
|
151
151
|
- uses: rossjrw/pr-preview-action@v1
|
|
@@ -170,7 +170,7 @@ jobs:
|
|
|
170
170
|
|
|
171
171
|
strategy:
|
|
172
172
|
matrix:
|
|
173
|
-
python-version: ['3.
|
|
173
|
+
python-version: ['3.9', '3.13']
|
|
174
174
|
extra: ['-ci']
|
|
175
175
|
|
|
176
176
|
steps:
|
|
@@ -199,7 +199,7 @@ jobs:
|
|
|
199
199
|
env:
|
|
200
200
|
ECMWF_DATASTORES_URL: ${{ secrets.ECMWF_DATASTORES_URL }}
|
|
201
201
|
ECMWF_DATASTORES_KEY: ${{ secrets.ECMWF_DATASTORES_KEY }}
|
|
202
|
-
|
|
202
|
+
ANONYMOUS_PAT: ${{ secrets.ECMWF_DATASTORES_ANON_KEY }}
|
|
203
203
|
run: |
|
|
204
204
|
make ci-integration-tests COV_REPORT=xml
|
|
205
205
|
|
|
@@ -12,12 +12,12 @@ repos:
|
|
|
12
12
|
- id: debug-statements
|
|
13
13
|
- id: mixed-line-ending
|
|
14
14
|
- repo: https://github.com/keewis/blackdoc
|
|
15
|
-
rev: v0.
|
|
15
|
+
rev: v0.4.1
|
|
16
16
|
hooks:
|
|
17
17
|
- id: blackdoc
|
|
18
18
|
additional_dependencies: [black==23.11.0]
|
|
19
19
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
|
20
|
-
rev: v0.
|
|
20
|
+
rev: v0.12.5
|
|
21
21
|
hooks:
|
|
22
22
|
- id: ruff
|
|
23
23
|
args: [--fix, --show-fixes]
|
|
@@ -27,14 +27,14 @@ repos:
|
|
|
27
27
|
hooks:
|
|
28
28
|
- id: mdformat
|
|
29
29
|
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
|
30
|
-
rev: v2.
|
|
30
|
+
rev: v2.15.0
|
|
31
31
|
hooks:
|
|
32
32
|
- id: pretty-format-yaml
|
|
33
33
|
args: [--autofix, --preserve-quotes]
|
|
34
34
|
- id: pretty-format-toml
|
|
35
35
|
args: [--autofix]
|
|
36
36
|
- repo: https://github.com/gitleaks/gitleaks
|
|
37
|
-
rev: v8.
|
|
37
|
+
rev: v8.28.0
|
|
38
38
|
hooks:
|
|
39
39
|
- id: gitleaks
|
|
40
40
|
- repo: https://github.com/kynan/nbstripout
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ecmwf-datastores-client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: ECMWF Data Stores Service (DSS) API Python client
|
|
5
5
|
License: Apache License
|
|
6
6
|
Version 2.0, January 2004
|
|
@@ -213,18 +213,17 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
213
213
|
Classifier: Operating System :: OS Independent
|
|
214
214
|
Classifier: Programming Language :: Python
|
|
215
215
|
Classifier: Programming Language :: Python :: 3
|
|
216
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
217
216
|
Classifier: Programming Language :: Python :: 3.9
|
|
218
217
|
Classifier: Programming Language :: Python :: 3.10
|
|
219
218
|
Classifier: Programming Language :: Python :: 3.11
|
|
220
219
|
Classifier: Programming Language :: Python :: 3.12
|
|
221
220
|
Classifier: Programming Language :: Python :: 3.13
|
|
222
221
|
Classifier: Topic :: Scientific/Engineering
|
|
223
|
-
Requires-Python: >=3.
|
|
222
|
+
Requires-Python: >=3.9
|
|
224
223
|
Description-Content-Type: text/markdown
|
|
225
224
|
License-File: LICENSE
|
|
226
225
|
Requires-Dist: attrs
|
|
227
|
-
Requires-Dist: multiurl>=0.3.
|
|
226
|
+
Requires-Dist: multiurl>=0.3.7
|
|
228
227
|
Requires-Dist: requests
|
|
229
228
|
Requires-Dist: typing-extensions
|
|
230
229
|
Provides-Extra: legacy
|
|
@@ -322,8 +321,8 @@ Retrieve data:
|
|
|
322
321
|
... "time": ["00:00"],
|
|
323
322
|
... "pressure_level": ["1000"],
|
|
324
323
|
... "data_format": "grib",
|
|
325
|
-
... "download_format": "unarchived"
|
|
326
|
-
...
|
|
324
|
+
... "download_format": "unarchived",
|
|
325
|
+
... }
|
|
327
326
|
|
|
328
327
|
>>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
|
|
329
328
|
'target_1.grib'
|
|
@@ -359,6 +358,7 @@ List all collection IDs sorted by last update:
|
|
|
359
358
|
>>> while collections is not None: # Loop over pages
|
|
360
359
|
... collection_ids.extend(collections.collection_ids)
|
|
361
360
|
... collections = collections.next # Move to the next page
|
|
361
|
+
...
|
|
362
362
|
|
|
363
363
|
>>> collection_ids
|
|
364
364
|
[...]
|
|
@@ -425,6 +425,7 @@ List all successful jobs, sorted by newest first:
|
|
|
425
425
|
>>> while jobs is not None: # Loop over pages
|
|
426
426
|
... request_ids.extend(jobs.request_ids)
|
|
427
427
|
... jobs = jobs.next # Move to the next page
|
|
428
|
+
...
|
|
428
429
|
|
|
429
430
|
>>> request_ids
|
|
430
431
|
[...]
|
|
@@ -89,8 +89,8 @@ Retrieve data:
|
|
|
89
89
|
... "time": ["00:00"],
|
|
90
90
|
... "pressure_level": ["1000"],
|
|
91
91
|
... "data_format": "grib",
|
|
92
|
-
... "download_format": "unarchived"
|
|
93
|
-
...
|
|
92
|
+
... "download_format": "unarchived",
|
|
93
|
+
... }
|
|
94
94
|
|
|
95
95
|
>>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
|
|
96
96
|
'target_1.grib'
|
|
@@ -126,6 +126,7 @@ List all collection IDs sorted by last update:
|
|
|
126
126
|
>>> while collections is not None: # Loop over pages
|
|
127
127
|
... collection_ids.extend(collections.collection_ids)
|
|
128
128
|
... collections = collections.next # Move to the next page
|
|
129
|
+
...
|
|
129
130
|
|
|
130
131
|
>>> collection_ids
|
|
131
132
|
[...]
|
|
@@ -192,6 +193,7 @@ List all successful jobs, sorted by newest first:
|
|
|
192
193
|
>>> while jobs is not None: # Loop over pages
|
|
193
194
|
... request_ids.extend(jobs.request_ids)
|
|
194
195
|
... jobs = jobs.next # Move to the next page
|
|
196
|
+
...
|
|
195
197
|
|
|
196
198
|
>>> request_ids
|
|
197
199
|
[...]
|
|
@@ -28,6 +28,8 @@ from ecmwf.datastores.catalogue import Catalogue
|
|
|
28
28
|
from ecmwf.datastores.processing import Processing, RequestKwargs
|
|
29
29
|
from ecmwf.datastores.profile import Profile
|
|
30
30
|
|
|
31
|
+
T_STATUS = Literal["accepted", "running", "successful", "failed", "rejected"]
|
|
32
|
+
|
|
31
33
|
|
|
32
34
|
@attrs.define(slots=False)
|
|
33
35
|
class Client:
|
|
@@ -179,6 +181,21 @@ class Client:
|
|
|
179
181
|
"""
|
|
180
182
|
return self._profile_api.check_authentication()
|
|
181
183
|
|
|
184
|
+
def delete(self, *request_ids: str) -> dict[str, Any]:
|
|
185
|
+
"""Delete requests.
|
|
186
|
+
|
|
187
|
+
Parameters
|
|
188
|
+
----------
|
|
189
|
+
*request_ids: str
|
|
190
|
+
Request IDs.
|
|
191
|
+
|
|
192
|
+
Returns
|
|
193
|
+
-------
|
|
194
|
+
dict[str,Any]
|
|
195
|
+
Content of the response.
|
|
196
|
+
"""
|
|
197
|
+
return self._retrieve_api.delete(*request_ids)
|
|
198
|
+
|
|
182
199
|
def download_results(self, request_id: str, target: str | None = None) -> str:
|
|
183
200
|
"""Download the results of a request.
|
|
184
201
|
|
|
@@ -259,7 +276,7 @@ class Client:
|
|
|
259
276
|
self,
|
|
260
277
|
limit: int | None = None,
|
|
261
278
|
sortby: Literal[None, "created", "-created"] = None,
|
|
262
|
-
status:
|
|
279
|
+
status: None | T_STATUS | list[T_STATUS] = None,
|
|
263
280
|
) -> datastores.Jobs:
|
|
264
281
|
"""Retrieve submitted jobs.
|
|
265
282
|
|
|
@@ -269,7 +286,7 @@ class Client:
|
|
|
269
286
|
Number of jobs per page.
|
|
270
287
|
sortby: {None, 'created', '-created'}
|
|
271
288
|
Field to sort results by.
|
|
272
|
-
status:
|
|
289
|
+
status: None or {'accepted', 'running', 'successful', 'failed', 'rejected'} or list
|
|
273
290
|
Status of the results.
|
|
274
291
|
|
|
275
292
|
Returns
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/processing.py
RENAMED
|
@@ -21,7 +21,7 @@ import os
|
|
|
21
21
|
import time
|
|
22
22
|
import urllib.parse
|
|
23
23
|
import warnings
|
|
24
|
-
from typing import Any, Callable,
|
|
24
|
+
from typing import Any, Callable, TypedDict, TypeVar
|
|
25
25
|
|
|
26
26
|
try:
|
|
27
27
|
from typing import Self
|
|
@@ -144,7 +144,7 @@ class ApiResponse:
|
|
|
144
144
|
|
|
145
145
|
@classmethod
|
|
146
146
|
def from_request(
|
|
147
|
-
cls:
|
|
147
|
+
cls: type[T_ApiResponse],
|
|
148
148
|
method: str,
|
|
149
149
|
url: str,
|
|
150
150
|
headers: dict[str, str],
|
|
@@ -467,7 +467,7 @@ class Remote:
|
|
|
467
467
|
return True
|
|
468
468
|
if status in ("accepted", "running"):
|
|
469
469
|
return False
|
|
470
|
-
if status
|
|
470
|
+
if status in ("failed", "rejected"):
|
|
471
471
|
results = self._make_results(wait=False)
|
|
472
472
|
raise ProcessingFailedError(error_json_to_message(results._json_dict))
|
|
473
473
|
if status in ("dismissed", "deleted"):
|
|
@@ -620,13 +620,16 @@ class Results(ApiResponse):
|
|
|
620
620
|
def _download(self, url: str, target: str) -> requests.Response:
|
|
621
621
|
download_options = {"stream": True, "resume_transfers": True}
|
|
622
622
|
download_options.update(self.download_options)
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
623
|
+
try:
|
|
624
|
+
multiurl.download(
|
|
625
|
+
url,
|
|
626
|
+
target=target,
|
|
627
|
+
maximum_retries=0,
|
|
628
|
+
**self.request_options,
|
|
629
|
+
**download_options,
|
|
630
|
+
)
|
|
631
|
+
except requests.HTTPError as exc:
|
|
632
|
+
return exc.response
|
|
630
633
|
return requests.Response() # mutliurl robust needs a response
|
|
631
634
|
|
|
632
635
|
def download(
|
|
@@ -661,7 +664,7 @@ class Results(ApiResponse):
|
|
|
661
664
|
@property
|
|
662
665
|
def location(self) -> str:
|
|
663
666
|
"""File location."""
|
|
664
|
-
result_href = self.asset["href"]
|
|
667
|
+
result_href: str = self.asset["href"]
|
|
665
668
|
return urllib.parse.urljoin(self.response.url, result_href)
|
|
666
669
|
|
|
667
670
|
@property
|
|
@@ -705,6 +708,13 @@ class Processing:
|
|
|
705
708
|
log_callback=self.log_callback,
|
|
706
709
|
)
|
|
707
710
|
|
|
711
|
+
def delete(self, *job_ids: str) -> dict[str, Any]:
|
|
712
|
+
url = f"{self.url}/jobs/delete"
|
|
713
|
+
response = ApiResponse.from_request(
|
|
714
|
+
"post", url, json={"job_ids": job_ids}, **self._request_kwargs
|
|
715
|
+
)
|
|
716
|
+
return response._json_dict
|
|
717
|
+
|
|
708
718
|
def get_processes(self, **params: Any) -> Processes:
|
|
709
719
|
url = f"{self.url}/processes"
|
|
710
720
|
return Processes.from_request("get", url, params=params, **self._request_kwargs)
|
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
# Do not change! Do not track in version control!
|
|
2
|
-
__version__ = "0.
|
|
2
|
+
__version__ = "0.4.0"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: ecmwf-datastores-client
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.4.0
|
|
4
4
|
Summary: ECMWF Data Stores Service (DSS) API Python client
|
|
5
5
|
License: Apache License
|
|
6
6
|
Version 2.0, January 2004
|
|
@@ -213,18 +213,17 @@ Classifier: License :: OSI Approved :: Apache Software License
|
|
|
213
213
|
Classifier: Operating System :: OS Independent
|
|
214
214
|
Classifier: Programming Language :: Python
|
|
215
215
|
Classifier: Programming Language :: Python :: 3
|
|
216
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
217
216
|
Classifier: Programming Language :: Python :: 3.9
|
|
218
217
|
Classifier: Programming Language :: Python :: 3.10
|
|
219
218
|
Classifier: Programming Language :: Python :: 3.11
|
|
220
219
|
Classifier: Programming Language :: Python :: 3.12
|
|
221
220
|
Classifier: Programming Language :: Python :: 3.13
|
|
222
221
|
Classifier: Topic :: Scientific/Engineering
|
|
223
|
-
Requires-Python: >=3.
|
|
222
|
+
Requires-Python: >=3.9
|
|
224
223
|
Description-Content-Type: text/markdown
|
|
225
224
|
License-File: LICENSE
|
|
226
225
|
Requires-Dist: attrs
|
|
227
|
-
Requires-Dist: multiurl>=0.3.
|
|
226
|
+
Requires-Dist: multiurl>=0.3.7
|
|
228
227
|
Requires-Dist: requests
|
|
229
228
|
Requires-Dist: typing-extensions
|
|
230
229
|
Provides-Extra: legacy
|
|
@@ -322,8 +321,8 @@ Retrieve data:
|
|
|
322
321
|
... "time": ["00:00"],
|
|
323
322
|
... "pressure_level": ["1000"],
|
|
324
323
|
... "data_format": "grib",
|
|
325
|
-
... "download_format": "unarchived"
|
|
326
|
-
...
|
|
324
|
+
... "download_format": "unarchived",
|
|
325
|
+
... }
|
|
327
326
|
|
|
328
327
|
>>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
|
|
329
328
|
'target_1.grib'
|
|
@@ -359,6 +358,7 @@ List all collection IDs sorted by last update:
|
|
|
359
358
|
>>> while collections is not None: # Loop over pages
|
|
360
359
|
... collection_ids.extend(collections.collection_ids)
|
|
361
360
|
... collections = collections.next # Move to the next page
|
|
361
|
+
...
|
|
362
362
|
|
|
363
363
|
>>> collection_ids
|
|
364
364
|
[...]
|
|
@@ -425,6 +425,7 @@ List all successful jobs, sorted by newest first:
|
|
|
425
425
|
>>> while jobs is not None: # Loop over pages
|
|
426
426
|
... request_ids.extend(jobs.request_ids)
|
|
427
427
|
... jobs = jobs.next # Move to the next page
|
|
428
|
+
...
|
|
428
429
|
|
|
429
430
|
>>> request_ids
|
|
430
431
|
[...]
|
|
@@ -41,8 +41,8 @@ tests/integration_test_20_processing.py
|
|
|
41
41
|
tests/integration_test_30_remote.py
|
|
42
42
|
tests/integration_test_40_results.py
|
|
43
43
|
tests/integration_test_50_profile.py
|
|
44
|
-
tests/
|
|
45
|
-
tests/
|
|
44
|
+
tests/integration_test_60_client.py
|
|
45
|
+
tests/integration_test_70_legacy_client.py
|
|
46
46
|
tests/integration_test_80_adaptors.py
|
|
47
47
|
tests/integration_test_90_features.py
|
|
48
48
|
tests/test_00_version.py
|
|
@@ -10,7 +10,6 @@ classifiers = [
|
|
|
10
10
|
"Operating System :: OS Independent",
|
|
11
11
|
"Programming Language :: Python",
|
|
12
12
|
"Programming Language :: Python :: 3",
|
|
13
|
-
"Programming Language :: Python :: 3.8",
|
|
14
13
|
"Programming Language :: Python :: 3.9",
|
|
15
14
|
"Programming Language :: Python :: 3.10",
|
|
16
15
|
"Programming Language :: Python :: 3.11",
|
|
@@ -20,7 +19,7 @@ classifiers = [
|
|
|
20
19
|
]
|
|
21
20
|
dependencies = [
|
|
22
21
|
"attrs",
|
|
23
|
-
"multiurl >= 0.3.
|
|
22
|
+
"multiurl >= 0.3.7",
|
|
24
23
|
"requests",
|
|
25
24
|
"typing-extensions"
|
|
26
25
|
]
|
|
@@ -29,7 +28,7 @@ dynamic = ["version"]
|
|
|
29
28
|
license = {file = "LICENSE"}
|
|
30
29
|
name = "ecmwf-datastores-client"
|
|
31
30
|
readme = "README.md"
|
|
32
|
-
requires-python = ">=3.
|
|
31
|
+
requires-python = ">=3.9"
|
|
33
32
|
|
|
34
33
|
[project.optional-dependencies]
|
|
35
34
|
legacy = ["cdsapi >= 0.7.6"]
|
|
@@ -48,7 +47,7 @@ strict = true
|
|
|
48
47
|
|
|
49
48
|
[[tool.mypy.overrides]]
|
|
50
49
|
ignore_missing_imports = true
|
|
51
|
-
module = ["cdsapi.*", "multiurl.*"]
|
|
50
|
+
module = ["cdsapi.*", "multiurl.*", "pytest_httpbin.*"]
|
|
52
51
|
|
|
53
52
|
[tool.pytest.ini_options]
|
|
54
53
|
markers = ["extra: additional tests skipped by CI"]
|
|
@@ -57,7 +56,7 @@ markers = ["extra: additional tests skipped by CI"]
|
|
|
57
56
|
# Same as Black.
|
|
58
57
|
indent-width = 4
|
|
59
58
|
line-length = 88
|
|
60
|
-
target-version = "
|
|
59
|
+
target-version = "py39"
|
|
61
60
|
|
|
62
61
|
[tool.ruff.lint]
|
|
63
62
|
ignore = [
|
|
@@ -75,7 +74,9 @@ select = [
|
|
|
75
74
|
# pydocstyle
|
|
76
75
|
"D",
|
|
77
76
|
# flake8-future-annotations
|
|
78
|
-
"FA"
|
|
77
|
+
"FA",
|
|
78
|
+
# pyupgrade
|
|
79
|
+
"UP"
|
|
79
80
|
]
|
|
80
81
|
|
|
81
82
|
[tool.ruff.lint.pycodestyle]
|
|
@@ -21,10 +21,7 @@ def api_root_url() -> str:
|
|
|
21
21
|
|
|
22
22
|
@pytest.fixture
|
|
23
23
|
def api_anon_key() -> str:
|
|
24
|
-
return os.getenv(
|
|
25
|
-
"ECMWF_DATASTORES_ANON_KEY",
|
|
26
|
-
"00112233-4455-6677-c899-aabbccddeeff", # gitleaks:allow
|
|
27
|
-
)
|
|
24
|
+
return os.getenv("ANONYMOUS_PAT", "00112233-4455-6677-c899-aabbccddeeff")
|
|
28
25
|
|
|
29
26
|
|
|
30
27
|
@pytest.fixture
|
|
@@ -73,3 +73,12 @@ def test_processing_get_jobs_sortby(api_anon_client: Client) -> None:
|
|
|
73
73
|
ids = api_anon_client.get_jobs(sortby="-created").request_ids
|
|
74
74
|
assert ids.index(id2) < ids.index(id1)
|
|
75
75
|
assert [id2] != api_anon_client.get_jobs(sortby="created", limit=1).request_ids
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_processing_delete(api_anon_client: Client) -> None:
|
|
79
|
+
id1 = api_anon_client.submit("test-adaptor-dummy", {}).request_id
|
|
80
|
+
id2 = api_anon_client.submit("test-adaptor-dummy", {}).request_id
|
|
81
|
+
job1, job2 = api_anon_client.delete(id1, id2)["jobs"]
|
|
82
|
+
assert job1["status"] == job2["status"] == "dismissed"
|
|
83
|
+
assert job1["jobID"] == id1
|
|
84
|
+
assert job2["jobID"] == id2
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_40_results.py
RENAMED
|
@@ -2,11 +2,8 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import contextlib
|
|
4
4
|
import pathlib
|
|
5
|
-
import random
|
|
6
|
-
from typing import Any
|
|
7
5
|
|
|
8
6
|
import pytest
|
|
9
|
-
import requests
|
|
10
7
|
|
|
11
8
|
from ecmwf.datastores import Client, Results
|
|
12
9
|
|
|
@@ -41,47 +38,6 @@ def test_results_progress(
|
|
|
41
38
|
assert captured.err if progress else not captured.err
|
|
42
39
|
|
|
43
40
|
|
|
44
|
-
@pytest.mark.parametrize(
|
|
45
|
-
"maximum_tries,raises",
|
|
46
|
-
[
|
|
47
|
-
(500, does_not_raise()),
|
|
48
|
-
(1, pytest.raises(requests.ConnectionError, match="Random error.")),
|
|
49
|
-
],
|
|
50
|
-
)
|
|
51
|
-
def test_results_robust_download(
|
|
52
|
-
api_root_url: str,
|
|
53
|
-
api_anon_key: str,
|
|
54
|
-
monkeypatch: pytest.MonkeyPatch,
|
|
55
|
-
tmp_path: pathlib.Path,
|
|
56
|
-
maximum_tries: int,
|
|
57
|
-
raises: contextlib.nullcontext[Any],
|
|
58
|
-
) -> None:
|
|
59
|
-
from multiurl.http import FullHTTPDownloader
|
|
60
|
-
|
|
61
|
-
def patched_iter_content(self, *args, **kwargs): # type: ignore
|
|
62
|
-
for chunk in self.iter_content(chunk_size=1):
|
|
63
|
-
if random.choice([True, False]):
|
|
64
|
-
raise requests.ConnectionError("Random error.")
|
|
65
|
-
yield chunk
|
|
66
|
-
|
|
67
|
-
def make_stream(self): # type: ignore
|
|
68
|
-
request = self.issue_request(self.range)
|
|
69
|
-
return request.patched_iter_content
|
|
70
|
-
|
|
71
|
-
client = Client(
|
|
72
|
-
url=api_root_url, key=api_anon_key, retry_after=0, maximum_tries=maximum_tries
|
|
73
|
-
)
|
|
74
|
-
results = client.submit_and_wait_on_results("test-adaptor-dummy", {"size": 10})
|
|
75
|
-
monkeypatch.setattr(
|
|
76
|
-
requests.Response, "patched_iter_content", patched_iter_content, raising=False
|
|
77
|
-
)
|
|
78
|
-
monkeypatch.setattr(FullHTTPDownloader, "make_stream", make_stream)
|
|
79
|
-
|
|
80
|
-
target = tmp_path / "test.grib"
|
|
81
|
-
with raises:
|
|
82
|
-
results.download(str(target))
|
|
83
|
-
|
|
84
|
-
|
|
85
41
|
def test_results_override(api_anon_client: Client, tmp_path: pathlib.Path) -> None:
|
|
86
42
|
target_1 = tmp_path / "tmp1.grib"
|
|
87
43
|
api_anon_client.retrieve("test-adaptor-dummy", {"size": 1}, target=str(target_1))
|
|
@@ -9,7 +9,7 @@ from urllib3.exceptions import InsecureRequestWarning
|
|
|
9
9
|
from ecmwf.datastores import Client, Remote, Results, processing
|
|
10
10
|
|
|
11
11
|
|
|
12
|
-
def
|
|
12
|
+
def test_client_download_results(
|
|
13
13
|
api_anon_client: Client, tmp_path: pathlib.Path
|
|
14
14
|
) -> None:
|
|
15
15
|
remote = api_anon_client.submit("test-adaptor-dummy", {})
|
|
@@ -20,27 +20,27 @@ def test_api_client_download_results(
|
|
|
20
20
|
assert os.path.exists(result)
|
|
21
21
|
|
|
22
22
|
|
|
23
|
-
def
|
|
23
|
+
def test_client_get_process(api_anon_client: Client) -> None:
|
|
24
24
|
process = api_anon_client.get_process("test-adaptor-dummy")
|
|
25
25
|
assert isinstance(process, processing.Process)
|
|
26
26
|
assert process.id == "test-adaptor-dummy"
|
|
27
27
|
assert set(process.headers) == {"User-Agent", "PRIVATE-TOKEN"}
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
def
|
|
30
|
+
def test_client_get_remote(api_anon_client: Client) -> None:
|
|
31
31
|
request_id = api_anon_client.submit("test-adaptor-dummy", {}).request_id
|
|
32
32
|
remote = api_anon_client.get_remote(request_id)
|
|
33
33
|
assert remote.request_id == request_id
|
|
34
34
|
assert set(remote.headers) == {"User-Agent", "PRIVATE-TOKEN"}
|
|
35
35
|
|
|
36
36
|
|
|
37
|
-
def
|
|
37
|
+
def test_client_get_results(api_anon_client: Client) -> None:
|
|
38
38
|
request_id = api_anon_client.submit("test-adaptor-dummy", {}).request_id
|
|
39
39
|
results = api_anon_client.get_results(request_id)
|
|
40
40
|
assert isinstance(results, Results)
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
def
|
|
43
|
+
def test_client_retrieve(
|
|
44
44
|
api_anon_client: Client,
|
|
45
45
|
tmp_path: pathlib.Path,
|
|
46
46
|
) -> None:
|
|
@@ -54,24 +54,24 @@ def test_api_client_retrieve(
|
|
|
54
54
|
assert os.path.getsize(actual_target) == 1
|
|
55
55
|
|
|
56
56
|
|
|
57
|
-
def
|
|
57
|
+
def test_client_submit(api_anon_client: Client) -> None:
|
|
58
58
|
remote = api_anon_client.submit("test-adaptor-dummy", {})
|
|
59
59
|
assert isinstance(remote, Remote)
|
|
60
60
|
|
|
61
61
|
|
|
62
|
-
def
|
|
62
|
+
def test_client_submit_and_wait_on_results(api_anon_client: Client) -> None:
|
|
63
63
|
results = api_anon_client.submit_and_wait_on_results("test-adaptor-dummy", {})
|
|
64
64
|
assert isinstance(results, Results)
|
|
65
65
|
|
|
66
66
|
|
|
67
|
-
def
|
|
67
|
+
def test_client_verify(api_root_url: str, api_anon_key: str) -> None:
|
|
68
68
|
if not api_root_url.startswith("https"):
|
|
69
69
|
pytest.skip(f"{api_root_url=} does not use https protocol")
|
|
70
70
|
with pytest.warns(InsecureRequestWarning):
|
|
71
71
|
Client(url=api_root_url, key=api_anon_key, verify=False, maximum_tries=0)
|
|
72
72
|
|
|
73
73
|
|
|
74
|
-
def
|
|
74
|
+
def test_client_timeout(
|
|
75
75
|
api_root_url: str,
|
|
76
76
|
api_anon_key: str,
|
|
77
77
|
tmp_path: pathlib.Path,
|
|
@@ -29,7 +29,7 @@ def legacy_update(remote: processing.Remote) -> None:
|
|
|
29
29
|
remote.update()
|
|
30
30
|
|
|
31
31
|
reply = remote.reply
|
|
32
|
-
remote.info("Request ID:
|
|
32
|
+
remote.info(f"Request ID: {reply['request_id']!s}, state: {reply['state']!s}")
|
|
33
33
|
|
|
34
34
|
if reply["state"] == "completed":
|
|
35
35
|
break
|
|
@@ -49,12 +49,11 @@ def legacy_update(remote: processing.Remote) -> None:
|
|
|
49
49
|
break
|
|
50
50
|
remote.error(" %s", n)
|
|
51
51
|
raise Exception(
|
|
52
|
-
"
|
|
53
|
-
% (reply["error"].get("message"), reply["error"].get("reason"))
|
|
52
|
+
f"{reply['error'].get('message')!s}. {reply['error'].get('reason')!s}."
|
|
54
53
|
)
|
|
55
54
|
|
|
56
55
|
|
|
57
|
-
def
|
|
56
|
+
def test_legacy_client_retrieve(
|
|
58
57
|
tmp_path: pathlib.Path, legacy_client: LegacyClient
|
|
59
58
|
) -> None:
|
|
60
59
|
collection_id = "test-adaptor-dummy"
|
|
@@ -65,7 +64,7 @@ def test_legacy_api_client_retrieve(
|
|
|
65
64
|
assert os.path.getsize(target) == 1
|
|
66
65
|
|
|
67
66
|
|
|
68
|
-
def
|
|
67
|
+
def test_legacy_client_result(
|
|
69
68
|
monkeypatch: pytest.MonkeyPatch,
|
|
70
69
|
tmp_path: pathlib.Path,
|
|
71
70
|
legacy_client: LegacyClient,
|
|
@@ -92,7 +91,7 @@ def test_legacy_api_client_result(
|
|
|
92
91
|
|
|
93
92
|
|
|
94
93
|
@pytest.mark.parametrize("quiet", [True, False])
|
|
95
|
-
def
|
|
94
|
+
def test_legacy_client_quiet(
|
|
96
95
|
caplog: pytest.LogCaptureFixture,
|
|
97
96
|
api_root_url: str,
|
|
98
97
|
api_anon_key: str,
|
|
@@ -105,7 +104,7 @@ def test_legacy_api_client_quiet(
|
|
|
105
104
|
|
|
106
105
|
|
|
107
106
|
@pytest.mark.parametrize("debug", [True, False])
|
|
108
|
-
def
|
|
107
|
+
def test_legacy_client_debug(
|
|
109
108
|
caplog: pytest.LogCaptureFixture,
|
|
110
109
|
api_root_url: str,
|
|
111
110
|
api_anon_key: str,
|
|
@@ -120,7 +119,7 @@ def test_legacy_api_client_debug(
|
|
|
120
119
|
"wait_until_complete,expected_type",
|
|
121
120
|
[(True, processing.Results), (False, processing.Remote)],
|
|
122
121
|
)
|
|
123
|
-
def
|
|
122
|
+
def test_legacy_client_wait_until_complete(
|
|
124
123
|
tmp_path: pathlib.Path,
|
|
125
124
|
api_root_url: str,
|
|
126
125
|
api_anon_key: str,
|
|
@@ -156,7 +155,7 @@ def test_legacy_api_client_wait_until_complete(
|
|
|
156
155
|
),
|
|
157
156
|
],
|
|
158
157
|
)
|
|
159
|
-
def
|
|
158
|
+
def test_legacy_client_update(
|
|
160
159
|
api_root_url: str,
|
|
161
160
|
api_anon_key: str,
|
|
162
161
|
collection_id: str,
|
|
@@ -173,7 +172,7 @@ def test_legacy_api_client_update(
|
|
|
173
172
|
|
|
174
173
|
|
|
175
174
|
@pytest.mark.filterwarnings("ignore:Unverified HTTPS")
|
|
176
|
-
def
|
|
175
|
+
def test_legacy_client_kwargs(api_root_url: str, api_anon_key: str) -> None:
|
|
177
176
|
session = requests.Session()
|
|
178
177
|
client = LegacyClient(
|
|
179
178
|
url=api_root_url,
|
|
@@ -198,7 +197,7 @@ def test_legacy_api_client_kwargs(api_root_url: str, api_anon_key: str) -> None:
|
|
|
198
197
|
assert client.client.session is session
|
|
199
198
|
|
|
200
199
|
|
|
201
|
-
def
|
|
200
|
+
def test_legacy_client_logging(
|
|
202
201
|
caplog: pytest.LogCaptureFixture,
|
|
203
202
|
api_root_url: str,
|
|
204
203
|
api_anon_key: str,
|
|
@@ -226,7 +225,7 @@ def test_legacy_api_client_logging(
|
|
|
226
225
|
]
|
|
227
226
|
|
|
228
227
|
|
|
229
|
-
def
|
|
228
|
+
def test_legacy_client_download(
|
|
230
229
|
tmp_path: pathlib.Path,
|
|
231
230
|
monkeypatch: pytest.MonkeyPatch,
|
|
232
231
|
api_root_url: str,
|
|
@@ -258,7 +257,7 @@ def test_legacy_api_client_download(
|
|
|
258
257
|
assert all(os.path.getsize(target) == 1 for target in targets)
|
|
259
258
|
|
|
260
259
|
|
|
261
|
-
def
|
|
260
|
+
def test_legacy_client_status(legacy_client: LegacyClient) -> None:
|
|
262
261
|
status = legacy_client.status()
|
|
263
262
|
assert set(status) <= {
|
|
264
263
|
"critical",
|
|
@@ -277,7 +276,7 @@ def test_legacy_api_client_status(legacy_client: LegacyClient) -> None:
|
|
|
277
276
|
)
|
|
278
277
|
|
|
279
278
|
|
|
280
|
-
def
|
|
279
|
+
def test_legacy_client_remote(
|
|
281
280
|
legacy_client: LegacyClient, tmp_path: pathlib.Path
|
|
282
281
|
) -> None:
|
|
283
282
|
results = legacy_client.retrieve("test-adaptor-dummy", {"size": 1})
|
|
@@ -288,7 +287,7 @@ def test_legacy_api_client_remote(
|
|
|
288
287
|
assert os.path.getsize(target) == 1
|
|
289
288
|
|
|
290
289
|
|
|
291
|
-
def
|
|
290
|
+
def test_legacy_client_warning(
|
|
292
291
|
api_root_url: str,
|
|
293
292
|
api_anon_key: str,
|
|
294
293
|
) -> None:
|
|
@@ -305,7 +304,7 @@ def test_legacy_api_client_warning(
|
|
|
305
304
|
)
|
|
306
305
|
|
|
307
306
|
|
|
308
|
-
def
|
|
307
|
+
def test_legacy_client_toolbox(legacy_client: LegacyClient) -> None:
|
|
309
308
|
with pytest.raises(NotImplementedError):
|
|
310
309
|
legacy_client.service("service")
|
|
311
310
|
with pytest.raises(NotImplementedError):
|
|
@@ -411,8 +411,12 @@ def test_wait_on_result(cat: catalogue.Catalogue) -> None:
|
|
|
411
411
|
remote._wait_on_results()
|
|
412
412
|
|
|
413
413
|
|
|
414
|
+
@pytest.mark.parametrize("status", ["failed", "rejected"])
|
|
414
415
|
@responses.activate
|
|
415
|
-
def test_wait_on_result_failed(
|
|
416
|
+
def test_wait_on_result_failed(
|
|
417
|
+
cat: catalogue.Catalogue, status: str, monkeypatch: pytest.MonkeyPatch
|
|
418
|
+
) -> None:
|
|
419
|
+
monkeypatch.setitem(JOB_FAILED_JSON, "status", status)
|
|
416
420
|
responses_add()
|
|
417
421
|
|
|
418
422
|
collection = cat.get_collection(COLLECTION_ID)
|
|
@@ -424,6 +428,7 @@ def test_wait_on_result_failed(cat: catalogue.Catalogue) -> None:
|
|
|
424
428
|
):
|
|
425
429
|
remote._wait_on_results()
|
|
426
430
|
|
|
431
|
+
assert remote.status == status
|
|
427
432
|
assert remote.created_at.isoformat() == "2022-09-02T17:30:48.201213+00:00"
|
|
428
433
|
assert remote.started_at is not None
|
|
429
434
|
assert remote.started_at.isoformat() == "2022-09-02T17:32:43.890617+00:00"
|
|
@@ -0,0 +1,143 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import contextlib
|
|
4
|
+
import os
|
|
5
|
+
import pathlib
|
|
6
|
+
import random
|
|
7
|
+
from typing import Any
|
|
8
|
+
|
|
9
|
+
import pytest
|
|
10
|
+
import pytest_httpbin.serve
|
|
11
|
+
import requests
|
|
12
|
+
import responses
|
|
13
|
+
|
|
14
|
+
from ecmwf.datastores import Results
|
|
15
|
+
|
|
16
|
+
does_not_raise = contextlib.nullcontext
|
|
17
|
+
|
|
18
|
+
RESULTS_URL = "http://localhost:8080/api/retrieve/v1/jobs/9bfc1362-2832-48e1-a235-359267420bb2/results"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@pytest.fixture
|
|
22
|
+
def results_json(httpbin: pytest_httpbin.serve.Server) -> dict[str, Any]:
|
|
23
|
+
return {
|
|
24
|
+
"asset": {
|
|
25
|
+
"value": {
|
|
26
|
+
"type": "application/x-grib",
|
|
27
|
+
"href": f"{httpbin.url}/range/10",
|
|
28
|
+
"file:size": 10,
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
@pytest.fixture
|
|
35
|
+
@responses.activate
|
|
36
|
+
def results(results_json: dict[str, Any]) -> Results:
|
|
37
|
+
responses.add(
|
|
38
|
+
responses.GET,
|
|
39
|
+
RESULTS_URL,
|
|
40
|
+
json=results_json,
|
|
41
|
+
status=200,
|
|
42
|
+
content_type="application/json",
|
|
43
|
+
)
|
|
44
|
+
return Results.from_request(
|
|
45
|
+
"get",
|
|
46
|
+
RESULTS_URL,
|
|
47
|
+
headers={},
|
|
48
|
+
session=None,
|
|
49
|
+
retry_options={"maximum_tries": 1, "retry_after": 0},
|
|
50
|
+
request_options={},
|
|
51
|
+
download_options={},
|
|
52
|
+
sleep_max=120,
|
|
53
|
+
cleanup=False,
|
|
54
|
+
log_callback=None,
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
@pytest.mark.parametrize(
|
|
59
|
+
"target,expected",
|
|
60
|
+
[
|
|
61
|
+
("dummy.grib", "dummy.grib"),
|
|
62
|
+
(None, "10"),
|
|
63
|
+
],
|
|
64
|
+
)
|
|
65
|
+
def test_results_download(
|
|
66
|
+
monkeypatch: pytest.MonkeyPatch,
|
|
67
|
+
results: Results,
|
|
68
|
+
tmp_path: pathlib.Path,
|
|
69
|
+
target: str | None,
|
|
70
|
+
expected: str,
|
|
71
|
+
) -> None:
|
|
72
|
+
monkeypatch.chdir(tmp_path)
|
|
73
|
+
actual = results.download(target=target)
|
|
74
|
+
assert actual == expected
|
|
75
|
+
assert os.path.getsize(actual) == 10
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def test_results_asset(httpbin: pytest_httpbin.serve.Server, results: Results) -> None:
|
|
79
|
+
assert results.asset == {
|
|
80
|
+
"file:size": 10,
|
|
81
|
+
"href": f"{httpbin.url}/range/10",
|
|
82
|
+
"type": "application/x-grib",
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def test_results_content_length(results: Results) -> None:
|
|
87
|
+
assert results.content_length == 10
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def test_results_content_type(results: Results) -> None:
|
|
91
|
+
assert results.content_type == "application/x-grib"
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def test_results_json(results: Results, results_json: dict[str, Any]) -> None:
|
|
95
|
+
assert results.json == results_json
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def test_results_location(
|
|
99
|
+
httpbin: pytest_httpbin.serve.Server, results: Results
|
|
100
|
+
) -> None:
|
|
101
|
+
assert results.location == f"{httpbin.url}/range/10"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def test_results_url(results: Results) -> None:
|
|
105
|
+
assert results.url == RESULTS_URL
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
@pytest.mark.parametrize(
|
|
109
|
+
"maximum_tries,raises",
|
|
110
|
+
[
|
|
111
|
+
(500, does_not_raise()),
|
|
112
|
+
(1, pytest.raises(requests.ConnectionError, match="Random error.")),
|
|
113
|
+
],
|
|
114
|
+
)
|
|
115
|
+
def test_results_robust_download(
|
|
116
|
+
results: Results,
|
|
117
|
+
monkeypatch: pytest.MonkeyPatch,
|
|
118
|
+
tmp_path: pathlib.Path,
|
|
119
|
+
maximum_tries: int,
|
|
120
|
+
raises: contextlib.nullcontext[Any],
|
|
121
|
+
) -> None:
|
|
122
|
+
from multiurl.http import FullHTTPDownloader
|
|
123
|
+
|
|
124
|
+
def patched_iter_content(self, *args, **kwargs): # type: ignore
|
|
125
|
+
for chunk in self.iter_content(chunk_size=1):
|
|
126
|
+
if random.choice([True, False]):
|
|
127
|
+
raise requests.ConnectionError("Random error.")
|
|
128
|
+
yield chunk
|
|
129
|
+
|
|
130
|
+
def make_stream(self): # type: ignore
|
|
131
|
+
request = self.issue_request(self.range)
|
|
132
|
+
return request.patched_iter_content
|
|
133
|
+
|
|
134
|
+
monkeypatch.setattr(
|
|
135
|
+
requests.Response, "patched_iter_content", patched_iter_content, raising=False
|
|
136
|
+
)
|
|
137
|
+
monkeypatch.setattr(FullHTTPDownloader, "make_stream", make_stream)
|
|
138
|
+
monkeypatch.setitem(results.retry_options, "maximum_tries", maximum_tries)
|
|
139
|
+
|
|
140
|
+
target = tmp_path / "test.txt"
|
|
141
|
+
with raises:
|
|
142
|
+
results.download(str(target))
|
|
143
|
+
assert target.stat().st_size == 10
|
|
@@ -1,92 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import pathlib
|
|
5
|
-
|
|
6
|
-
import pytest
|
|
7
|
-
import responses
|
|
8
|
-
|
|
9
|
-
from ecmwf.datastores import Results
|
|
10
|
-
|
|
11
|
-
RESULTS_URL = "http://localhost:8080/api/retrieve/v1/jobs/9bfc1362-2832-48e1-a235-359267420bb2/results"
|
|
12
|
-
RESULTS_JSON = {
|
|
13
|
-
"asset": {
|
|
14
|
-
"value": {
|
|
15
|
-
"type": "application/x-grib",
|
|
16
|
-
"href": "http://httpbin.org/bytes/1",
|
|
17
|
-
"file:size": 1,
|
|
18
|
-
}
|
|
19
|
-
}
|
|
20
|
-
}
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
@pytest.fixture
|
|
24
|
-
@responses.activate
|
|
25
|
-
def results() -> Results:
|
|
26
|
-
responses.add(
|
|
27
|
-
responses.GET,
|
|
28
|
-
RESULTS_URL,
|
|
29
|
-
json=RESULTS_JSON,
|
|
30
|
-
status=200,
|
|
31
|
-
content_type="application/json",
|
|
32
|
-
)
|
|
33
|
-
return Results.from_request(
|
|
34
|
-
"get",
|
|
35
|
-
RESULTS_URL,
|
|
36
|
-
headers={},
|
|
37
|
-
session=None,
|
|
38
|
-
retry_options={"maximum_tries": 1},
|
|
39
|
-
request_options={},
|
|
40
|
-
download_options={},
|
|
41
|
-
sleep_max=120,
|
|
42
|
-
cleanup=False,
|
|
43
|
-
log_callback=None,
|
|
44
|
-
)
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
@pytest.mark.parametrize(
|
|
48
|
-
"target,expected",
|
|
49
|
-
[
|
|
50
|
-
("dummy.grib", "dummy.grib"),
|
|
51
|
-
(None, "1"),
|
|
52
|
-
],
|
|
53
|
-
)
|
|
54
|
-
def test_results_download(
|
|
55
|
-
monkeypatch: pytest.MonkeyPatch,
|
|
56
|
-
results: Results,
|
|
57
|
-
tmp_path: pathlib.Path,
|
|
58
|
-
target: str | None,
|
|
59
|
-
expected: str,
|
|
60
|
-
) -> None:
|
|
61
|
-
monkeypatch.chdir(tmp_path)
|
|
62
|
-
actual = results.download(target=target)
|
|
63
|
-
assert actual == expected
|
|
64
|
-
assert os.path.getsize(actual) == 1
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def test_results_asset(results: Results) -> None:
|
|
68
|
-
assert results.asset == {
|
|
69
|
-
"file:size": 1,
|
|
70
|
-
"href": "http://httpbin.org/bytes/1",
|
|
71
|
-
"type": "application/x-grib",
|
|
72
|
-
}
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def test_results_content_length(results: Results) -> None:
|
|
76
|
-
assert results.content_length == 1
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
def test_results_content_type(results: Results) -> None:
|
|
80
|
-
assert results.content_type == "application/x-grib"
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
def test_results_json(results: Results) -> None:
|
|
84
|
-
assert results.json == RESULTS_JSON
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
def test_results_location(results: Results) -> None:
|
|
88
|
-
assert results.location == "http://httpbin.org/bytes/1"
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def test_results_url(results: Results) -> None:
|
|
92
|
-
assert results.url == RESULTS_URL
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-pr-closed.yml
RENAMED
|
File without changes
|
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/.pre-commit-config-cruft.yaml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ci/environment-integration.yml
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/docs/notebooks/quick_start.ipynb
RENAMED
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/__init__.py
RENAMED
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/catalogue.py
RENAMED
|
File without changes
|
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/legacy_client.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_30_remote.py
RENAMED
|
File without changes
|
{ecmwf_datastores_client-0.2.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_50_profile.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|