ecmwf-datastores-client 0.3.0__tar.gz → 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ecmwf-datastores-client might be problematic. Click here for more details.

Files changed (55) hide show
  1. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.cruft.json +2 -2
  2. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-push.yml +1 -1
  3. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.pre-commit-config.yaml +4 -4
  4. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/PKG-INFO +7 -6
  5. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/README.md +4 -2
  6. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ci/environment-ci.yml +1 -0
  7. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/processing.py +13 -10
  8. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/version.py +1 -1
  9. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/PKG-INFO +7 -6
  10. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/requires.txt +1 -1
  11. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/environment.yml +1 -1
  12. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/pyproject.toml +7 -6
  13. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_40_results.py +0 -44
  14. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_70_legacy_client.py +2 -3
  15. ecmwf_datastores_client-0.4.0/tests/test_40_results.py +143 -0
  16. ecmwf_datastores_client-0.3.0/tests/test_40_results.py +0 -92
  17. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.github/workflows/on-pr-closed.yml +0 -0
  18. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.gitignore +0 -0
  19. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/.pre-commit-config-cruft.yaml +0 -0
  20. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/Dockerfile +0 -0
  21. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/LICENSE +0 -0
  22. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/Makefile +0 -0
  23. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ci/environment-integration.yml +0 -0
  24. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/Makefile +0 -0
  25. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/_static/.gitkeep +0 -0
  26. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/_templates/.gitkeep +0 -0
  27. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/conf.py +0 -0
  28. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/index.md +0 -0
  29. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/make.bat +0 -0
  30. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/notebooks/index.md +0 -0
  31. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/docs/notebooks/quick_start.ipynb +0 -0
  32. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/__init__.py +0 -0
  33. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/catalogue.py +0 -0
  34. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/client.py +0 -0
  35. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/config.py +0 -0
  36. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/legacy_client.py +0 -0
  37. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/profile.py +0 -0
  38. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/py.typed +0 -0
  39. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf/datastores/utils.py +0 -0
  40. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/SOURCES.txt +0 -0
  41. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/dependency_links.txt +0 -0
  42. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/ecmwf_datastores_client.egg-info/top_level.txt +0 -0
  43. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/setup.cfg +0 -0
  44. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/conftest.py +0 -0
  45. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_10_catalogue.py +0 -0
  46. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_20_processing.py +0 -0
  47. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_30_remote.py +0 -0
  48. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_50_profile.py +0 -0
  49. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_60_client.py +0 -0
  50. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_80_adaptors.py +0 -0
  51. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/integration_test_90_features.py +0 -0
  52. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/test_00_version.py +0 -0
  53. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/test_01_config.py +0 -0
  54. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/test_10_catalogue.py +0 -0
  55. {ecmwf_datastores_client-0.3.0 → ecmwf_datastores_client-0.4.0}/tests/test_20_processing.py +0 -0
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
3
- "commit": "0b4d61da26c3aacfc1778716cb36749d16846c51",
3
+ "commit": "28eb7788868b278f924c6171a0dba603591b2f33",
4
4
  "checkout": null,
5
5
  "context": {
6
6
  "cookiecutter": {
@@ -13,7 +13,7 @@
13
13
  "integration_tests": "True",
14
14
  "pypi": true,
15
15
  "_template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
16
- "_commit": "0b4d61da26c3aacfc1778716cb36749d16846c51"
16
+ "_commit": "28eb7788868b278f924c6171a0dba603591b2f33"
17
17
  }
18
18
  },
19
19
  "directory": null
@@ -170,7 +170,7 @@ jobs:
170
170
 
171
171
  strategy:
172
172
  matrix:
173
- python-version: ['3.8', '3.13']
173
+ python-version: ['3.9', '3.13']
174
174
  extra: ['-ci']
175
175
 
176
176
  steps:
@@ -12,12 +12,12 @@ repos:
12
12
  - id: debug-statements
13
13
  - id: mixed-line-ending
14
14
  - repo: https://github.com/keewis/blackdoc
15
- rev: v0.3.9
15
+ rev: v0.4.1
16
16
  hooks:
17
17
  - id: blackdoc
18
18
  additional_dependencies: [black==23.11.0]
19
19
  - repo: https://github.com/astral-sh/ruff-pre-commit
20
- rev: v0.11.12
20
+ rev: v0.12.5
21
21
  hooks:
22
22
  - id: ruff
23
23
  args: [--fix, --show-fixes]
@@ -27,14 +27,14 @@ repos:
27
27
  hooks:
28
28
  - id: mdformat
29
29
  - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
30
- rev: v2.14.0
30
+ rev: v2.15.0
31
31
  hooks:
32
32
  - id: pretty-format-yaml
33
33
  args: [--autofix, --preserve-quotes]
34
34
  - id: pretty-format-toml
35
35
  args: [--autofix]
36
36
  - repo: https://github.com/gitleaks/gitleaks
37
- rev: v8.27.0
37
+ rev: v8.28.0
38
38
  hooks:
39
39
  - id: gitleaks
40
40
  - repo: https://github.com/kynan/nbstripout
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ecmwf-datastores-client
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: ECMWF Data Stores Service (DSS) API Python client
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -213,18 +213,17 @@ Classifier: License :: OSI Approved :: Apache Software License
213
213
  Classifier: Operating System :: OS Independent
214
214
  Classifier: Programming Language :: Python
215
215
  Classifier: Programming Language :: Python :: 3
216
- Classifier: Programming Language :: Python :: 3.8
217
216
  Classifier: Programming Language :: Python :: 3.9
218
217
  Classifier: Programming Language :: Python :: 3.10
219
218
  Classifier: Programming Language :: Python :: 3.11
220
219
  Classifier: Programming Language :: Python :: 3.12
221
220
  Classifier: Programming Language :: Python :: 3.13
222
221
  Classifier: Topic :: Scientific/Engineering
223
- Requires-Python: >=3.8
222
+ Requires-Python: >=3.9
224
223
  Description-Content-Type: text/markdown
225
224
  License-File: LICENSE
226
225
  Requires-Dist: attrs
227
- Requires-Dist: multiurl>=0.3.2
226
+ Requires-Dist: multiurl>=0.3.7
228
227
  Requires-Dist: requests
229
228
  Requires-Dist: typing-extensions
230
229
  Provides-Extra: legacy
@@ -322,8 +321,8 @@ Retrieve data:
322
321
  ... "time": ["00:00"],
323
322
  ... "pressure_level": ["1000"],
324
323
  ... "data_format": "grib",
325
- ... "download_format": "unarchived"
326
- ... }
324
+ ... "download_format": "unarchived",
325
+ ... }
327
326
 
328
327
  >>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
329
328
  'target_1.grib'
@@ -359,6 +358,7 @@ List all collection IDs sorted by last update:
359
358
  >>> while collections is not None: # Loop over pages
360
359
  ... collection_ids.extend(collections.collection_ids)
361
360
  ... collections = collections.next # Move to the next page
361
+ ...
362
362
 
363
363
  >>> collection_ids
364
364
  [...]
@@ -425,6 +425,7 @@ List all successful jobs, sorted by newest first:
425
425
  >>> while jobs is not None: # Loop over pages
426
426
  ... request_ids.extend(jobs.request_ids)
427
427
  ... jobs = jobs.next # Move to the next page
428
+ ...
428
429
 
429
430
  >>> request_ids
430
431
  [...]
@@ -89,8 +89,8 @@ Retrieve data:
89
89
  ... "time": ["00:00"],
90
90
  ... "pressure_level": ["1000"],
91
91
  ... "data_format": "grib",
92
- ... "download_format": "unarchived"
93
- ... }
92
+ ... "download_format": "unarchived",
93
+ ... }
94
94
 
95
95
  >>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
96
96
  'target_1.grib'
@@ -126,6 +126,7 @@ List all collection IDs sorted by last update:
126
126
  >>> while collections is not None: # Loop over pages
127
127
  ... collection_ids.extend(collections.collection_ids)
128
128
  ... collections = collections.next # Move to the next page
129
+ ...
129
130
 
130
131
  >>> collection_ids
131
132
  [...]
@@ -192,6 +193,7 @@ List all successful jobs, sorted by newest first:
192
193
  >>> while jobs is not None: # Loop over pages
193
194
  ... request_ids.extend(jobs.request_ids)
194
195
  ... jobs = jobs.next # Move to the next page
196
+ ...
195
197
 
196
198
  >>> request_ids
197
199
  [...]
@@ -20,3 +20,4 @@ dependencies:
20
20
  - pip:
21
21
  - cdsapi >= 0.7.6
22
22
  - responses
23
+ - pytest_httpbin
@@ -21,7 +21,7 @@ import os
21
21
  import time
22
22
  import urllib.parse
23
23
  import warnings
24
- from typing import Any, Callable, Type, TypedDict, TypeVar
24
+ from typing import Any, Callable, TypedDict, TypeVar
25
25
 
26
26
  try:
27
27
  from typing import Self
@@ -144,7 +144,7 @@ class ApiResponse:
144
144
 
145
145
  @classmethod
146
146
  def from_request(
147
- cls: Type[T_ApiResponse],
147
+ cls: type[T_ApiResponse],
148
148
  method: str,
149
149
  url: str,
150
150
  headers: dict[str, str],
@@ -620,13 +620,16 @@ class Results(ApiResponse):
620
620
  def _download(self, url: str, target: str) -> requests.Response:
621
621
  download_options = {"stream": True, "resume_transfers": True}
622
622
  download_options.update(self.download_options)
623
- multiurl.download(
624
- url,
625
- target=target,
626
- **self.retry_options,
627
- **self.request_options,
628
- **download_options,
629
- )
623
+ try:
624
+ multiurl.download(
625
+ url,
626
+ target=target,
627
+ maximum_retries=0,
628
+ **self.request_options,
629
+ **download_options,
630
+ )
631
+ except requests.HTTPError as exc:
632
+ return exc.response
630
633
  return requests.Response() # mutliurl robust needs a response
631
634
 
632
635
  def download(
@@ -661,7 +664,7 @@ class Results(ApiResponse):
661
664
  @property
662
665
  def location(self) -> str:
663
666
  """File location."""
664
- result_href = self.asset["href"]
667
+ result_href: str = self.asset["href"]
665
668
  return urllib.parse.urljoin(self.response.url, result_href)
666
669
 
667
670
  @property
@@ -1,2 +1,2 @@
1
1
  # Do not change! Do not track in version control!
2
- __version__ = "0.3.0"
2
+ __version__ = "0.4.0"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: ecmwf-datastores-client
3
- Version: 0.3.0
3
+ Version: 0.4.0
4
4
  Summary: ECMWF Data Stores Service (DSS) API Python client
5
5
  License: Apache License
6
6
  Version 2.0, January 2004
@@ -213,18 +213,17 @@ Classifier: License :: OSI Approved :: Apache Software License
213
213
  Classifier: Operating System :: OS Independent
214
214
  Classifier: Programming Language :: Python
215
215
  Classifier: Programming Language :: Python :: 3
216
- Classifier: Programming Language :: Python :: 3.8
217
216
  Classifier: Programming Language :: Python :: 3.9
218
217
  Classifier: Programming Language :: Python :: 3.10
219
218
  Classifier: Programming Language :: Python :: 3.11
220
219
  Classifier: Programming Language :: Python :: 3.12
221
220
  Classifier: Programming Language :: Python :: 3.13
222
221
  Classifier: Topic :: Scientific/Engineering
223
- Requires-Python: >=3.8
222
+ Requires-Python: >=3.9
224
223
  Description-Content-Type: text/markdown
225
224
  License-File: LICENSE
226
225
  Requires-Dist: attrs
227
- Requires-Dist: multiurl>=0.3.2
226
+ Requires-Dist: multiurl>=0.3.7
228
227
  Requires-Dist: requests
229
228
  Requires-Dist: typing-extensions
230
229
  Provides-Extra: legacy
@@ -322,8 +321,8 @@ Retrieve data:
322
321
  ... "time": ["00:00"],
323
322
  ... "pressure_level": ["1000"],
324
323
  ... "data_format": "grib",
325
- ... "download_format": "unarchived"
326
- ... }
324
+ ... "download_format": "unarchived",
325
+ ... }
327
326
 
328
327
  >>> client.retrieve(collection_id, request, target="target_1.grib") # blocks
329
328
  'target_1.grib'
@@ -359,6 +358,7 @@ List all collection IDs sorted by last update:
359
358
  >>> while collections is not None: # Loop over pages
360
359
  ... collection_ids.extend(collections.collection_ids)
361
360
  ... collections = collections.next # Move to the next page
361
+ ...
362
362
 
363
363
  >>> collection_ids
364
364
  [...]
@@ -425,6 +425,7 @@ List all successful jobs, sorted by newest first:
425
425
  >>> while jobs is not None: # Loop over pages
426
426
  ... request_ids.extend(jobs.request_ids)
427
427
  ... jobs = jobs.next # Move to the next page
428
+ ...
428
429
 
429
430
  >>> request_ids
430
431
  [...]
@@ -1,5 +1,5 @@
1
1
  attrs
2
- multiurl>=0.3.2
2
+ multiurl>=0.3.7
3
3
  requests
4
4
  typing-extensions
5
5
 
@@ -9,6 +9,6 @@ channels:
9
9
  # DO NOT EDIT ABOVE THIS LINE, ADD DEPENDENCIES BELOW AS SHOWN IN THE EXAMPLE
10
10
  dependencies:
11
11
  - attrs
12
- - multiurl
12
+ - multiurl >= 0.3.7
13
13
  - requests
14
14
  - typing-extensions
@@ -10,7 +10,6 @@ classifiers = [
10
10
  "Operating System :: OS Independent",
11
11
  "Programming Language :: Python",
12
12
  "Programming Language :: Python :: 3",
13
- "Programming Language :: Python :: 3.8",
14
13
  "Programming Language :: Python :: 3.9",
15
14
  "Programming Language :: Python :: 3.10",
16
15
  "Programming Language :: Python :: 3.11",
@@ -20,7 +19,7 @@ classifiers = [
20
19
  ]
21
20
  dependencies = [
22
21
  "attrs",
23
- "multiurl >= 0.3.2",
22
+ "multiurl >= 0.3.7",
24
23
  "requests",
25
24
  "typing-extensions"
26
25
  ]
@@ -29,7 +28,7 @@ dynamic = ["version"]
29
28
  license = {file = "LICENSE"}
30
29
  name = "ecmwf-datastores-client"
31
30
  readme = "README.md"
32
- requires-python = ">=3.8"
31
+ requires-python = ">=3.9"
33
32
 
34
33
  [project.optional-dependencies]
35
34
  legacy = ["cdsapi >= 0.7.6"]
@@ -48,7 +47,7 @@ strict = true
48
47
 
49
48
  [[tool.mypy.overrides]]
50
49
  ignore_missing_imports = true
51
- module = ["cdsapi.*", "multiurl.*"]
50
+ module = ["cdsapi.*", "multiurl.*", "pytest_httpbin.*"]
52
51
 
53
52
  [tool.pytest.ini_options]
54
53
  markers = ["extra: additional tests skipped by CI"]
@@ -57,7 +56,7 @@ markers = ["extra: additional tests skipped by CI"]
57
56
  # Same as Black.
58
57
  indent-width = 4
59
58
  line-length = 88
60
- target-version = "py38"
59
+ target-version = "py39"
61
60
 
62
61
  [tool.ruff.lint]
63
62
  ignore = [
@@ -75,7 +74,9 @@ select = [
75
74
  # pydocstyle
76
75
  "D",
77
76
  # flake8-future-annotations
78
- "FA"
77
+ "FA",
78
+ # pyupgrade
79
+ "UP"
79
80
  ]
80
81
 
81
82
  [tool.ruff.lint.pycodestyle]
@@ -2,11 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  import contextlib
4
4
  import pathlib
5
- import random
6
- from typing import Any
7
5
 
8
6
  import pytest
9
- import requests
10
7
 
11
8
  from ecmwf.datastores import Client, Results
12
9
 
@@ -41,47 +38,6 @@ def test_results_progress(
41
38
  assert captured.err if progress else not captured.err
42
39
 
43
40
 
44
- @pytest.mark.parametrize(
45
- "maximum_tries,raises",
46
- [
47
- (500, does_not_raise()),
48
- (1, pytest.raises(requests.ConnectionError, match="Random error.")),
49
- ],
50
- )
51
- def test_results_robust_download(
52
- api_root_url: str,
53
- api_anon_key: str,
54
- monkeypatch: pytest.MonkeyPatch,
55
- tmp_path: pathlib.Path,
56
- maximum_tries: int,
57
- raises: contextlib.nullcontext[Any],
58
- ) -> None:
59
- from multiurl.http import FullHTTPDownloader
60
-
61
- def patched_iter_content(self, *args, **kwargs): # type: ignore
62
- for chunk in self.iter_content(chunk_size=1):
63
- if random.choice([True, False]):
64
- raise requests.ConnectionError("Random error.")
65
- yield chunk
66
-
67
- def make_stream(self): # type: ignore
68
- request = self.issue_request(self.range)
69
- return request.patched_iter_content
70
-
71
- client = Client(
72
- url=api_root_url, key=api_anon_key, retry_after=0, maximum_tries=maximum_tries
73
- )
74
- results = client.submit_and_wait_on_results("test-adaptor-dummy", {"size": 10})
75
- monkeypatch.setattr(
76
- requests.Response, "patched_iter_content", patched_iter_content, raising=False
77
- )
78
- monkeypatch.setattr(FullHTTPDownloader, "make_stream", make_stream)
79
-
80
- target = tmp_path / "test.grib"
81
- with raises:
82
- results.download(str(target))
83
-
84
-
85
41
  def test_results_override(api_anon_client: Client, tmp_path: pathlib.Path) -> None:
86
42
  target_1 = tmp_path / "tmp1.grib"
87
43
  api_anon_client.retrieve("test-adaptor-dummy", {"size": 1}, target=str(target_1))
@@ -29,7 +29,7 @@ def legacy_update(remote: processing.Remote) -> None:
29
29
  remote.update()
30
30
 
31
31
  reply = remote.reply
32
- remote.info("Request ID: %s, state: %s" % (reply["request_id"], reply["state"]))
32
+ remote.info(f"Request ID: {reply['request_id']!s}, state: {reply['state']!s}")
33
33
 
34
34
  if reply["state"] == "completed":
35
35
  break
@@ -49,8 +49,7 @@ def legacy_update(remote: processing.Remote) -> None:
49
49
  break
50
50
  remote.error(" %s", n)
51
51
  raise Exception(
52
- "%s. %s."
53
- % (reply["error"].get("message"), reply["error"].get("reason"))
52
+ f"{reply['error'].get('message')!s}. {reply['error'].get('reason')!s}."
54
53
  )
55
54
 
56
55
 
@@ -0,0 +1,143 @@
1
+ from __future__ import annotations
2
+
3
+ import contextlib
4
+ import os
5
+ import pathlib
6
+ import random
7
+ from typing import Any
8
+
9
+ import pytest
10
+ import pytest_httpbin.serve
11
+ import requests
12
+ import responses
13
+
14
+ from ecmwf.datastores import Results
15
+
16
+ does_not_raise = contextlib.nullcontext
17
+
18
+ RESULTS_URL = "http://localhost:8080/api/retrieve/v1/jobs/9bfc1362-2832-48e1-a235-359267420bb2/results"
19
+
20
+
21
+ @pytest.fixture
22
+ def results_json(httpbin: pytest_httpbin.serve.Server) -> dict[str, Any]:
23
+ return {
24
+ "asset": {
25
+ "value": {
26
+ "type": "application/x-grib",
27
+ "href": f"{httpbin.url}/range/10",
28
+ "file:size": 10,
29
+ }
30
+ }
31
+ }
32
+
33
+
34
+ @pytest.fixture
35
+ @responses.activate
36
+ def results(results_json: dict[str, Any]) -> Results:
37
+ responses.add(
38
+ responses.GET,
39
+ RESULTS_URL,
40
+ json=results_json,
41
+ status=200,
42
+ content_type="application/json",
43
+ )
44
+ return Results.from_request(
45
+ "get",
46
+ RESULTS_URL,
47
+ headers={},
48
+ session=None,
49
+ retry_options={"maximum_tries": 1, "retry_after": 0},
50
+ request_options={},
51
+ download_options={},
52
+ sleep_max=120,
53
+ cleanup=False,
54
+ log_callback=None,
55
+ )
56
+
57
+
58
+ @pytest.mark.parametrize(
59
+ "target,expected",
60
+ [
61
+ ("dummy.grib", "dummy.grib"),
62
+ (None, "10"),
63
+ ],
64
+ )
65
+ def test_results_download(
66
+ monkeypatch: pytest.MonkeyPatch,
67
+ results: Results,
68
+ tmp_path: pathlib.Path,
69
+ target: str | None,
70
+ expected: str,
71
+ ) -> None:
72
+ monkeypatch.chdir(tmp_path)
73
+ actual = results.download(target=target)
74
+ assert actual == expected
75
+ assert os.path.getsize(actual) == 10
76
+
77
+
78
+ def test_results_asset(httpbin: pytest_httpbin.serve.Server, results: Results) -> None:
79
+ assert results.asset == {
80
+ "file:size": 10,
81
+ "href": f"{httpbin.url}/range/10",
82
+ "type": "application/x-grib",
83
+ }
84
+
85
+
86
+ def test_results_content_length(results: Results) -> None:
87
+ assert results.content_length == 10
88
+
89
+
90
+ def test_results_content_type(results: Results) -> None:
91
+ assert results.content_type == "application/x-grib"
92
+
93
+
94
+ def test_results_json(results: Results, results_json: dict[str, Any]) -> None:
95
+ assert results.json == results_json
96
+
97
+
98
+ def test_results_location(
99
+ httpbin: pytest_httpbin.serve.Server, results: Results
100
+ ) -> None:
101
+ assert results.location == f"{httpbin.url}/range/10"
102
+
103
+
104
+ def test_results_url(results: Results) -> None:
105
+ assert results.url == RESULTS_URL
106
+
107
+
108
+ @pytest.mark.parametrize(
109
+ "maximum_tries,raises",
110
+ [
111
+ (500, does_not_raise()),
112
+ (1, pytest.raises(requests.ConnectionError, match="Random error.")),
113
+ ],
114
+ )
115
+ def test_results_robust_download(
116
+ results: Results,
117
+ monkeypatch: pytest.MonkeyPatch,
118
+ tmp_path: pathlib.Path,
119
+ maximum_tries: int,
120
+ raises: contextlib.nullcontext[Any],
121
+ ) -> None:
122
+ from multiurl.http import FullHTTPDownloader
123
+
124
+ def patched_iter_content(self, *args, **kwargs): # type: ignore
125
+ for chunk in self.iter_content(chunk_size=1):
126
+ if random.choice([True, False]):
127
+ raise requests.ConnectionError("Random error.")
128
+ yield chunk
129
+
130
+ def make_stream(self): # type: ignore
131
+ request = self.issue_request(self.range)
132
+ return request.patched_iter_content
133
+
134
+ monkeypatch.setattr(
135
+ requests.Response, "patched_iter_content", patched_iter_content, raising=False
136
+ )
137
+ monkeypatch.setattr(FullHTTPDownloader, "make_stream", make_stream)
138
+ monkeypatch.setitem(results.retry_options, "maximum_tries", maximum_tries)
139
+
140
+ target = tmp_path / "test.txt"
141
+ with raises:
142
+ results.download(str(target))
143
+ assert target.stat().st_size == 10
@@ -1,92 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import os
4
- import pathlib
5
-
6
- import pytest
7
- import responses
8
-
9
- from ecmwf.datastores import Results
10
-
11
- RESULTS_URL = "http://localhost:8080/api/retrieve/v1/jobs/9bfc1362-2832-48e1-a235-359267420bb2/results"
12
- RESULTS_JSON = {
13
- "asset": {
14
- "value": {
15
- "type": "application/x-grib",
16
- "href": "http://httpbin.org/bytes/1",
17
- "file:size": 1,
18
- }
19
- }
20
- }
21
-
22
-
23
- @pytest.fixture
24
- @responses.activate
25
- def results() -> Results:
26
- responses.add(
27
- responses.GET,
28
- RESULTS_URL,
29
- json=RESULTS_JSON,
30
- status=200,
31
- content_type="application/json",
32
- )
33
- return Results.from_request(
34
- "get",
35
- RESULTS_URL,
36
- headers={},
37
- session=None,
38
- retry_options={"maximum_tries": 1},
39
- request_options={},
40
- download_options={},
41
- sleep_max=120,
42
- cleanup=False,
43
- log_callback=None,
44
- )
45
-
46
-
47
- @pytest.mark.parametrize(
48
- "target,expected",
49
- [
50
- ("dummy.grib", "dummy.grib"),
51
- (None, "1"),
52
- ],
53
- )
54
- def test_results_download(
55
- monkeypatch: pytest.MonkeyPatch,
56
- results: Results,
57
- tmp_path: pathlib.Path,
58
- target: str | None,
59
- expected: str,
60
- ) -> None:
61
- monkeypatch.chdir(tmp_path)
62
- actual = results.download(target=target)
63
- assert actual == expected
64
- assert os.path.getsize(actual) == 1
65
-
66
-
67
- def test_results_asset(results: Results) -> None:
68
- assert results.asset == {
69
- "file:size": 1,
70
- "href": "http://httpbin.org/bytes/1",
71
- "type": "application/x-grib",
72
- }
73
-
74
-
75
- def test_results_content_length(results: Results) -> None:
76
- assert results.content_length == 1
77
-
78
-
79
- def test_results_content_type(results: Results) -> None:
80
- assert results.content_type == "application/x-grib"
81
-
82
-
83
- def test_results_json(results: Results) -> None:
84
- assert results.json == RESULTS_JSON
85
-
86
-
87
- def test_results_location(results: Results) -> None:
88
- assert results.location == "http://httpbin.org/bytes/1"
89
-
90
-
91
- def test_results_url(results: Results) -> None:
92
- assert results.url == RESULTS_URL