eodag 3.0.1__py3-none-any.whl → 3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +174 -138
- eodag/api/product/_assets.py +44 -15
- eodag/api/product/_product.py +58 -47
- eodag/api/product/drivers/__init__.py +81 -4
- eodag/api/product/drivers/base.py +65 -4
- eodag/api/product/drivers/generic.py +65 -0
- eodag/api/product/drivers/sentinel1.py +97 -0
- eodag/api/product/drivers/sentinel2.py +95 -0
- eodag/api/product/metadata_mapping.py +117 -90
- eodag/api/search_result.py +13 -23
- eodag/cli.py +26 -5
- eodag/config.py +86 -92
- eodag/plugins/apis/base.py +1 -1
- eodag/plugins/apis/ecmwf.py +42 -22
- eodag/plugins/apis/usgs.py +17 -16
- eodag/plugins/authentication/aws_auth.py +16 -13
- eodag/plugins/authentication/base.py +5 -3
- eodag/plugins/authentication/header.py +3 -3
- eodag/plugins/authentication/keycloak.py +4 -4
- eodag/plugins/authentication/oauth.py +7 -3
- eodag/plugins/authentication/openid_connect.py +22 -16
- eodag/plugins/authentication/sas_auth.py +4 -4
- eodag/plugins/authentication/token.py +41 -10
- eodag/plugins/authentication/token_exchange.py +1 -1
- eodag/plugins/base.py +4 -4
- eodag/plugins/crunch/base.py +4 -4
- eodag/plugins/crunch/filter_date.py +4 -4
- eodag/plugins/crunch/filter_latest_intersect.py +6 -6
- eodag/plugins/crunch/filter_latest_tpl_name.py +7 -7
- eodag/plugins/crunch/filter_overlap.py +4 -4
- eodag/plugins/crunch/filter_property.py +6 -7
- eodag/plugins/download/aws.py +146 -87
- eodag/plugins/download/base.py +38 -56
- eodag/plugins/download/creodias_s3.py +29 -0
- eodag/plugins/download/http.py +173 -183
- eodag/plugins/download/s3rest.py +10 -11
- eodag/plugins/manager.py +10 -20
- eodag/plugins/search/__init__.py +6 -5
- eodag/plugins/search/base.py +90 -46
- eodag/plugins/search/build_search_result.py +1048 -361
- eodag/plugins/search/cop_marine.py +22 -12
- eodag/plugins/search/creodias_s3.py +9 -73
- eodag/plugins/search/csw.py +11 -11
- eodag/plugins/search/data_request_search.py +19 -18
- eodag/plugins/search/qssearch.py +99 -258
- eodag/plugins/search/stac_list_assets.py +85 -0
- eodag/plugins/search/static_stac_search.py +4 -4
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +1134 -325
- eodag/resources/providers.yml +906 -2006
- eodag/resources/stac_api.yml +2 -2
- eodag/resources/user_conf_template.yml +10 -9
- eodag/rest/cache.py +2 -2
- eodag/rest/config.py +3 -3
- eodag/rest/core.py +112 -82
- eodag/rest/errors.py +5 -5
- eodag/rest/server.py +33 -14
- eodag/rest/stac.py +41 -38
- eodag/rest/types/collections_search.py +3 -3
- eodag/rest/types/eodag_search.py +29 -23
- eodag/rest/types/queryables.py +42 -31
- eodag/rest/types/stac_search.py +15 -25
- eodag/rest/utils/__init__.py +14 -21
- eodag/rest/utils/cql_evaluate.py +6 -6
- eodag/rest/utils/rfc3339.py +2 -2
- eodag/types/__init__.py +141 -32
- eodag/types/bbox.py +2 -2
- eodag/types/download_args.py +3 -3
- eodag/types/queryables.py +183 -72
- eodag/types/search_args.py +4 -4
- eodag/types/whoosh.py +127 -3
- eodag/utils/__init__.py +153 -51
- eodag/utils/exceptions.py +28 -21
- eodag/utils/import_system.py +2 -2
- eodag/utils/repr.py +65 -6
- eodag/utils/requests.py +13 -13
- eodag/utils/rest.py +2 -2
- eodag/utils/s3.py +231 -0
- eodag/utils/stac_reader.py +10 -10
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/METADATA +77 -76
- eodag-3.1.0.dist-info/RECORD +113 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/WHEEL +1 -1
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/entry_points.txt +4 -2
- eodag/utils/constraints.py +0 -244
- eodag-3.0.1.dist-info/RECORD +0 -109
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/LICENSE +0 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/top_level.txt +0 -0
|
@@ -17,120 +17,979 @@
|
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
+
import functools
|
|
20
21
|
import hashlib
|
|
21
22
|
import logging
|
|
22
|
-
|
|
23
|
-
from
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
Any,
|
|
27
|
-
Dict,
|
|
28
|
-
List,
|
|
29
|
-
Optional,
|
|
30
|
-
Set,
|
|
31
|
-
Tuple,
|
|
32
|
-
cast,
|
|
33
|
-
get_args,
|
|
34
|
-
)
|
|
23
|
+
import re
|
|
24
|
+
from collections import OrderedDict
|
|
25
|
+
from datetime import datetime, timedelta
|
|
26
|
+
from typing import TYPE_CHECKING, Annotated, Any, Optional, Union
|
|
35
27
|
from urllib.parse import quote_plus, unquote_plus
|
|
36
28
|
|
|
37
29
|
import geojson
|
|
38
30
|
import orjson
|
|
39
31
|
from dateutil.parser import isoparse
|
|
40
32
|
from dateutil.tz import tzutc
|
|
41
|
-
from
|
|
42
|
-
from pydantic import
|
|
33
|
+
from dateutil.utils import today
|
|
34
|
+
from pydantic import Field
|
|
43
35
|
from pydantic.fields import FieldInfo
|
|
36
|
+
from requests.auth import AuthBase
|
|
37
|
+
from shapely.geometry.base import BaseGeometry
|
|
38
|
+
from typing_extensions import get_args
|
|
44
39
|
|
|
45
40
|
from eodag.api.product import EOProduct
|
|
46
41
|
from eodag.api.product.metadata_mapping import (
|
|
47
42
|
NOT_AVAILABLE,
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
mtd_cfg_as_conversion_and_querypath,
|
|
43
|
+
OFFLINE_STATUS,
|
|
44
|
+
format_metadata,
|
|
51
45
|
properties_from_json,
|
|
52
46
|
)
|
|
53
47
|
from eodag.api.search_result import RawSearchResult
|
|
54
48
|
from eodag.plugins.search import PreparedSearch
|
|
55
|
-
from eodag.plugins.search.
|
|
56
|
-
from eodag.
|
|
57
|
-
from eodag.types import
|
|
58
|
-
from eodag.types.queryables import CommonQueryables
|
|
49
|
+
from eodag.plugins.search.qssearch import PostJsonSearch, QueryStringSearch
|
|
50
|
+
from eodag.types import json_field_definition_to_python
|
|
51
|
+
from eodag.types.queryables import Queryables, QueryablesDict
|
|
59
52
|
from eodag.utils import (
|
|
60
53
|
DEFAULT_MISSION_START_DATE,
|
|
54
|
+
DEFAULT_SEARCH_TIMEOUT,
|
|
61
55
|
deepcopy,
|
|
62
56
|
dict_items_recursive_sort,
|
|
63
57
|
get_geometry_from_various,
|
|
64
|
-
|
|
65
|
-
from eodag.utils.constraints import (
|
|
66
|
-
fetch_constraints,
|
|
67
|
-
get_constraint_queryables_with_additional_params,
|
|
58
|
+
is_range_in_range,
|
|
68
59
|
)
|
|
69
60
|
from eodag.utils.exceptions import ValidationError
|
|
61
|
+
from eodag.utils.requests import fetch_json
|
|
70
62
|
|
|
71
63
|
if TYPE_CHECKING:
|
|
72
64
|
from eodag.config import PluginConfig
|
|
73
65
|
|
|
74
66
|
logger = logging.getLogger("eodag.search.build_search_result")
|
|
75
67
|
|
|
68
|
+
ECMWF_PREFIX = "ecmwf:"
|
|
69
|
+
|
|
70
|
+
# keywords from ECMWF keyword database + "dataset" (not part of database but exists)
|
|
71
|
+
# database: https://confluence.ecmwf.int/display/UDOC/Keywords+in+MARS+and+Dissemination+requests
|
|
72
|
+
ECMWF_KEYWORDS = {
|
|
73
|
+
"dataset",
|
|
74
|
+
"accuracy",
|
|
75
|
+
"activity",
|
|
76
|
+
"anoffset",
|
|
77
|
+
"bitmap",
|
|
78
|
+
"block",
|
|
79
|
+
"channel",
|
|
80
|
+
"class",
|
|
81
|
+
"database",
|
|
82
|
+
"date",
|
|
83
|
+
"diagnostic",
|
|
84
|
+
"direction",
|
|
85
|
+
"domain",
|
|
86
|
+
"duplicates",
|
|
87
|
+
"expect",
|
|
88
|
+
"expver",
|
|
89
|
+
"fcmonth",
|
|
90
|
+
"fcperiod",
|
|
91
|
+
"fieldset",
|
|
92
|
+
"filter",
|
|
93
|
+
"format",
|
|
94
|
+
"frame",
|
|
95
|
+
"frequency",
|
|
96
|
+
"generation",
|
|
97
|
+
"grid",
|
|
98
|
+
"hdate",
|
|
99
|
+
"ident",
|
|
100
|
+
"interpolation",
|
|
101
|
+
"intgrid",
|
|
102
|
+
"iteration",
|
|
103
|
+
"latitude",
|
|
104
|
+
"levelist",
|
|
105
|
+
"levtype",
|
|
106
|
+
"longitude",
|
|
107
|
+
"lsm",
|
|
108
|
+
"method",
|
|
109
|
+
"number",
|
|
110
|
+
"obsgroup",
|
|
111
|
+
"obstype",
|
|
112
|
+
"origin",
|
|
113
|
+
"packing",
|
|
114
|
+
"padding",
|
|
115
|
+
"param",
|
|
116
|
+
"priority",
|
|
117
|
+
"product",
|
|
118
|
+
"range",
|
|
119
|
+
"realization",
|
|
120
|
+
"refdate",
|
|
121
|
+
"reference",
|
|
122
|
+
"reportype",
|
|
123
|
+
"repres",
|
|
124
|
+
"resolution",
|
|
125
|
+
"rotation",
|
|
126
|
+
"section",
|
|
127
|
+
"source",
|
|
128
|
+
"step",
|
|
129
|
+
"stream",
|
|
130
|
+
"system",
|
|
131
|
+
"target",
|
|
132
|
+
"time",
|
|
133
|
+
"truncation",
|
|
134
|
+
"type",
|
|
135
|
+
"use",
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
# additional keywords from copernicus services
|
|
139
|
+
COP_DS_KEYWORDS = {
|
|
140
|
+
"aerosol_type",
|
|
141
|
+
"altitude",
|
|
142
|
+
"product_type",
|
|
143
|
+
"band",
|
|
144
|
+
"cdr_type",
|
|
145
|
+
"data_format",
|
|
146
|
+
"dataset_type",
|
|
147
|
+
"day",
|
|
148
|
+
"download_format",
|
|
149
|
+
"ensemble_member",
|
|
150
|
+
"experiment",
|
|
151
|
+
"forcing_type",
|
|
152
|
+
"gcm",
|
|
153
|
+
"hday",
|
|
154
|
+
"hmonth",
|
|
155
|
+
"horizontal_resolution",
|
|
156
|
+
"hydrological_model",
|
|
157
|
+
"hydrological_year",
|
|
158
|
+
"hyear",
|
|
159
|
+
"input_observations",
|
|
160
|
+
"leadtime_hour",
|
|
161
|
+
"leadtime_month",
|
|
162
|
+
"level",
|
|
163
|
+
"location",
|
|
164
|
+
"model",
|
|
165
|
+
"model_level",
|
|
166
|
+
"model_levels",
|
|
167
|
+
"month",
|
|
168
|
+
"nominal_day",
|
|
169
|
+
"originating_centre",
|
|
170
|
+
"period",
|
|
171
|
+
"pressure_level",
|
|
172
|
+
"processing_level",
|
|
173
|
+
"processing_type",
|
|
174
|
+
"product_version",
|
|
175
|
+
"quantity",
|
|
176
|
+
"rcm",
|
|
177
|
+
"region",
|
|
178
|
+
"release_version",
|
|
179
|
+
"satellite",
|
|
180
|
+
"sensor",
|
|
181
|
+
"sensor_and_algorithm",
|
|
182
|
+
"soil_level",
|
|
183
|
+
"sky_type",
|
|
184
|
+
"statistic",
|
|
185
|
+
"system_version",
|
|
186
|
+
"temporal_aggregation",
|
|
187
|
+
"time_aggregation",
|
|
188
|
+
"time_reference",
|
|
189
|
+
"time_step",
|
|
190
|
+
"variable",
|
|
191
|
+
"variable_type",
|
|
192
|
+
"version",
|
|
193
|
+
"year",
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
ALLOWED_KEYWORDS = ECMWF_KEYWORDS | COP_DS_KEYWORDS
|
|
197
|
+
|
|
198
|
+
END = "completionTimeFromAscendingNode"
|
|
199
|
+
|
|
200
|
+
START = "startTimeFromAscendingNode"
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def ecmwf_mtd() -> dict[str, Any]:
|
|
204
|
+
"""
|
|
205
|
+
Make metadata mapping dict from a list of defined ECMWF Keywords
|
|
206
|
+
|
|
207
|
+
We automatically add the #to_geojson convert to prevent modification of entries by eval() in the metadata mapping.
|
|
76
208
|
|
|
77
|
-
|
|
78
|
-
|
|
209
|
+
keyword:
|
|
210
|
+
- keyword
|
|
211
|
+
- $."keyword"#to_geojson
|
|
79
212
|
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
213
|
+
:return: metadata mapping dict
|
|
214
|
+
"""
|
|
215
|
+
return {k: [k, f'{{$."{k}"#to_geojson}}'] for k in ALLOWED_KEYWORDS}
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _update_properties_from_element(
|
|
219
|
+
prop: dict[str, Any], element: dict[str, Any], values: list[str]
|
|
220
|
+
) -> None:
|
|
221
|
+
"""updates a property dict with the given values based on the information from the element dict
|
|
222
|
+
e.g. the type is set based on the type of the element
|
|
223
|
+
"""
|
|
224
|
+
# multichoice elements are transformed into array
|
|
225
|
+
if element["type"] in ("StringListWidget", "StringListArrayWidget"):
|
|
226
|
+
prop["type"] = "array"
|
|
227
|
+
if values:
|
|
228
|
+
prop["items"] = {"type": "string", "enum": sorted(values)}
|
|
229
|
+
|
|
230
|
+
# single choice elements are transformed into string
|
|
231
|
+
elif element["type"] in (
|
|
232
|
+
"StringChoiceWidget",
|
|
233
|
+
"DateRangeWidget",
|
|
234
|
+
"FreeformInputWidget",
|
|
235
|
+
):
|
|
236
|
+
prop["type"] = "string"
|
|
237
|
+
if values:
|
|
238
|
+
prop["enum"] = sorted(values)
|
|
239
|
+
|
|
240
|
+
# a bbox element
|
|
241
|
+
elif element["type"] in ["GeographicExtentWidget", "GeographicExtentMapWidget"]:
|
|
242
|
+
prop.update(
|
|
243
|
+
{
|
|
244
|
+
"type": "array",
|
|
245
|
+
"minItems": 4,
|
|
246
|
+
"additionalItems": False,
|
|
247
|
+
"items": [
|
|
248
|
+
{
|
|
249
|
+
"type": "number",
|
|
250
|
+
"maximum": 180,
|
|
251
|
+
"minimum": -180,
|
|
252
|
+
"description": "West border of the bounding box",
|
|
253
|
+
},
|
|
254
|
+
{
|
|
255
|
+
"type": "number",
|
|
256
|
+
"maximum": 90,
|
|
257
|
+
"minimum": -90,
|
|
258
|
+
"description": "South border of the bounding box",
|
|
259
|
+
},
|
|
260
|
+
{
|
|
261
|
+
"type": "number",
|
|
262
|
+
"maximum": 180,
|
|
263
|
+
"minimum": -180,
|
|
264
|
+
"description": "East border of the bounding box",
|
|
265
|
+
},
|
|
266
|
+
{
|
|
267
|
+
"type": "number",
|
|
268
|
+
"maximum": 90,
|
|
269
|
+
"minimum": -90,
|
|
270
|
+
"description": "North border of the bounding box",
|
|
271
|
+
},
|
|
272
|
+
],
|
|
273
|
+
}
|
|
274
|
+
)
|
|
83
275
|
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
276
|
+
# DateRangeWidget is a calendar date picker
|
|
277
|
+
if element["type"] == "DateRangeWidget":
|
|
278
|
+
prop["description"] = "date formatted like yyyy-mm-dd/yyyy-mm-dd"
|
|
87
279
|
|
|
88
|
-
|
|
280
|
+
if description := element.get("help"):
|
|
281
|
+
prop["description"] = description
|
|
282
|
+
|
|
283
|
+
|
|
284
|
+
def ecmwf_format(v: str) -> str:
|
|
285
|
+
"""Add ECMWF prefix to value v if v is a ECMWF keyword."""
|
|
286
|
+
return ECMWF_PREFIX + v if v in ALLOWED_KEYWORDS else v
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
class ECMWFSearch(PostJsonSearch):
|
|
290
|
+
"""ECMWF search plugin.
|
|
291
|
+
|
|
292
|
+
This plugin builds a :class:`~eodag.api.search_result.SearchResult` containing a single product
|
|
293
|
+
using given query parameters as product properties.
|
|
294
|
+
|
|
295
|
+
The available configuration parameters inherits from parent classes, with some particular parameters
|
|
296
|
+
for this plugin.
|
|
297
|
+
|
|
298
|
+
:param provider: An eodag providers configuration dictionary
|
|
89
299
|
:param config: Search plugin configuration:
|
|
90
300
|
|
|
91
|
-
* :attr:`~eodag.config.PluginConfig.remove_from_query` (``
|
|
301
|
+
* :attr:`~eodag.config.PluginConfig.remove_from_query` (``list[str]``): List of parameters
|
|
92
302
|
used to parse metadata but that must not be included to the query
|
|
93
|
-
|
|
303
|
+
* :attr:`~eodag.config.PluginConfig.end_date_excluded` (``bool``): Set to `False` if
|
|
304
|
+
provider does not include end date to search
|
|
305
|
+
* :attr:`~eodag.config.PluginConfig.discover_queryables`
|
|
306
|
+
(:class:`~eodag.config.PluginConfig.DiscoverQueryables`): configuration to fetch the queryables from a
|
|
307
|
+
provider queryables endpoint; It has the following keys:
|
|
308
|
+
|
|
309
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.fetch_url` (``str``): url to fetch the queryables valid
|
|
310
|
+
for all product types
|
|
311
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.product_type_fetch_url` (``str``): url to fetch the
|
|
312
|
+
queryables for a specific product type
|
|
313
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.constraints_url` (``str``): url of the constraint file
|
|
314
|
+
used to build queryables
|
|
94
315
|
"""
|
|
95
316
|
|
|
96
|
-
def
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
317
|
+
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
318
|
+
config.metadata_mapping = {
|
|
319
|
+
**ecmwf_mtd(),
|
|
320
|
+
**{
|
|
321
|
+
"id": "$.id",
|
|
322
|
+
"title": "$.id",
|
|
323
|
+
"storageStatus": OFFLINE_STATUS,
|
|
324
|
+
"downloadLink": "$.null",
|
|
325
|
+
"geometry": ["feature", "$.geometry"],
|
|
326
|
+
"defaultGeometry": "POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))",
|
|
327
|
+
},
|
|
328
|
+
**config.metadata_mapping,
|
|
329
|
+
}
|
|
101
330
|
|
|
102
|
-
|
|
331
|
+
super().__init__(provider, config)
|
|
332
|
+
|
|
333
|
+
# ECMWF providers do not feature any api_endpoint or next_page_query_obj.
|
|
334
|
+
# Searched is faked by EODAG.
|
|
335
|
+
self.config.__dict__.setdefault("api_endpoint", "")
|
|
336
|
+
self.config.pagination.setdefault("next_page_query_obj", "{{}}")
|
|
337
|
+
|
|
338
|
+
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
339
|
+
"""Should perform the actual search request.
|
|
340
|
+
|
|
341
|
+
:param args: arguments to be used in the search
|
|
342
|
+
:param kwargs: keyword arguments to be used in the search
|
|
343
|
+
:return: list containing the results from the provider in json format
|
|
344
|
+
"""
|
|
345
|
+
# no real search. We fake it all
|
|
346
|
+
return [{}]
|
|
347
|
+
|
|
348
|
+
def query(
|
|
103
349
|
self,
|
|
104
350
|
prep: PreparedSearch = PreparedSearch(),
|
|
105
351
|
**kwargs: Any,
|
|
106
|
-
) ->
|
|
107
|
-
"""
|
|
108
|
-
urls, _ = super(BuildPostSearchResult, self).collect_search_urls(prep, **kwargs)
|
|
109
|
-
return urls, 1
|
|
352
|
+
) -> tuple[list[EOProduct], Optional[int]]:
|
|
353
|
+
"""Build ready-to-download SearchResult
|
|
110
354
|
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
"""
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
355
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information needed for the search
|
|
356
|
+
:param kwargs: keyword arguments to be used in the search
|
|
357
|
+
:returns: list of products and number of products (optional)
|
|
358
|
+
"""
|
|
359
|
+
product_type = prep.product_type
|
|
360
|
+
if not product_type:
|
|
361
|
+
product_type = kwargs.get("productType", None)
|
|
362
|
+
kwargs = self._preprocess_search_params(kwargs, product_type)
|
|
363
|
+
result, num_items = super().query(prep, **kwargs)
|
|
364
|
+
if prep.count and not num_items:
|
|
365
|
+
num_items = 1
|
|
366
|
+
|
|
367
|
+
return result, num_items
|
|
368
|
+
|
|
369
|
+
def clear(self) -> None:
|
|
370
|
+
"""Clear search context"""
|
|
371
|
+
super().clear()
|
|
372
|
+
|
|
373
|
+
def build_query_string(
|
|
374
|
+
self, product_type: str, query_dict: dict[str, Any]
|
|
375
|
+
) -> tuple[dict[str, Any], str]:
|
|
376
|
+
"""Build The query string using the search parameters
|
|
377
|
+
|
|
378
|
+
:param product_type: product type id
|
|
379
|
+
:param query_dict: keyword arguments to be used in the query string
|
|
380
|
+
:return: formatted query params and encode query string
|
|
381
|
+
"""
|
|
382
|
+
query_dict["_date"] = f"{query_dict.get(START)}/{query_dict.get(END)}"
|
|
383
|
+
|
|
384
|
+
# Reorder kwargs to make sure year/month/day/time if set overwrite default datetime.
|
|
385
|
+
priority_keys = [
|
|
386
|
+
START,
|
|
387
|
+
END,
|
|
388
|
+
]
|
|
389
|
+
ordered_kwargs = {k: query_dict[k] for k in priority_keys if k in query_dict}
|
|
390
|
+
ordered_kwargs.update(query_dict)
|
|
391
|
+
|
|
392
|
+
return super().build_query_string(
|
|
393
|
+
product_type=product_type, query_dict=ordered_kwargs
|
|
120
394
|
)
|
|
121
|
-
response = self._request(prep)
|
|
122
395
|
|
|
123
|
-
|
|
396
|
+
def _preprocess_search_params(
|
|
397
|
+
self, params: dict[str, Any], product_type: Optional[str]
|
|
398
|
+
) -> dict[str, Any]:
|
|
399
|
+
"""Preprocess search parameters before making a request to the CDS API.
|
|
400
|
+
|
|
401
|
+
This method is responsible for checking and updating the provided search parameters
|
|
402
|
+
to ensure that required parameters like 'productType', 'startTimeFromAscendingNode',
|
|
403
|
+
'completionTimeFromAscendingNode', and 'geometry' are properly set. If not specified
|
|
404
|
+
in the input parameters, default values or values from the configuration are used.
|
|
405
|
+
|
|
406
|
+
:param params: Search parameters to be preprocessed.
|
|
407
|
+
:param product_type: (optional) product type id
|
|
408
|
+
"""
|
|
409
|
+
_dc_qs = params.get("_dc_qs", None)
|
|
410
|
+
if _dc_qs is not None:
|
|
411
|
+
# if available, update search params using datacube query-string
|
|
412
|
+
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
413
|
+
if "/to/" in _dc_qp.get("date", ""):
|
|
414
|
+
params[START], params[END] = _dc_qp["date"].split("/to/")
|
|
415
|
+
elif "/" in _dc_qp.get("date", ""):
|
|
416
|
+
(params[START], params[END],) = _dc_qp[
|
|
417
|
+
"date"
|
|
418
|
+
].split("/")
|
|
419
|
+
elif _dc_qp.get("date", None):
|
|
420
|
+
params[START] = params[END] = _dc_qp["date"]
|
|
421
|
+
|
|
422
|
+
if "/" in _dc_qp.get("area", ""):
|
|
423
|
+
params["geometry"] = _dc_qp["area"].split("/")
|
|
424
|
+
|
|
425
|
+
params = {
|
|
426
|
+
k.removeprefix(ECMWF_PREFIX): v for k, v in params.items() if v is not None
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
# dates
|
|
430
|
+
# check if default dates have to be added
|
|
431
|
+
if getattr(self.config, "dates_required", False):
|
|
432
|
+
self._check_date_params(params, product_type)
|
|
433
|
+
|
|
434
|
+
# adapt end date if it is midnight
|
|
435
|
+
if END in params:
|
|
436
|
+
end_date_excluded = getattr(self.config, "end_date_excluded", True)
|
|
437
|
+
is_datetime = True
|
|
438
|
+
try:
|
|
439
|
+
end_date = datetime.strptime(params[END], "%Y-%m-%dT%H:%M:%SZ")
|
|
440
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
441
|
+
except ValueError:
|
|
442
|
+
try:
|
|
443
|
+
end_date = datetime.strptime(
|
|
444
|
+
params[END],
|
|
445
|
+
"%Y-%m-%dT%H:%M:%S.%fZ",
|
|
446
|
+
)
|
|
447
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
448
|
+
except ValueError:
|
|
449
|
+
end_date = isoparse(params[END])
|
|
450
|
+
is_datetime = False
|
|
451
|
+
start_date = isoparse(params[START])
|
|
452
|
+
if (
|
|
453
|
+
not end_date_excluded
|
|
454
|
+
and is_datetime
|
|
455
|
+
and end_date > start_date
|
|
456
|
+
and end_date
|
|
457
|
+
== end_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
458
|
+
):
|
|
459
|
+
end_date += timedelta(days=-1)
|
|
460
|
+
params[END] = end_date.isoformat()
|
|
461
|
+
|
|
462
|
+
# geometry
|
|
463
|
+
if "geometry" in params:
|
|
464
|
+
params["geometry"] = get_geometry_from_various(geometry=params["geometry"])
|
|
465
|
+
|
|
466
|
+
return params
|
|
467
|
+
|
|
468
|
+
def _check_date_params(
|
|
469
|
+
self, keywords: dict[str, Any], product_type: Optional[str]
|
|
470
|
+
) -> None:
|
|
471
|
+
"""checks if start and end date are present in the keywords and adds them if not"""
|
|
472
|
+
|
|
473
|
+
if START and END in keywords:
|
|
474
|
+
return
|
|
475
|
+
|
|
476
|
+
product_type_conf = getattr(self.config, "metadata_mapping", {})
|
|
477
|
+
if (
|
|
478
|
+
product_type
|
|
479
|
+
and product_type in self.config.products
|
|
480
|
+
and "metadata_mapping" in self.config.products[product_type]
|
|
481
|
+
):
|
|
482
|
+
product_type_conf = self.config.products[product_type]["metadata_mapping"]
|
|
483
|
+
|
|
484
|
+
# start time given, end time missing
|
|
485
|
+
if START in keywords:
|
|
486
|
+
keywords[END] = (
|
|
487
|
+
keywords[START]
|
|
488
|
+
if END in product_type_conf
|
|
489
|
+
else self.get_product_type_cfg_value(
|
|
490
|
+
"missionEndDate", today().isoformat()
|
|
491
|
+
)
|
|
492
|
+
)
|
|
493
|
+
return
|
|
494
|
+
|
|
495
|
+
if END in product_type_conf:
|
|
496
|
+
mapping = product_type_conf[START]
|
|
497
|
+
if not isinstance(mapping, list):
|
|
498
|
+
mapping = product_type_conf[END]
|
|
499
|
+
if isinstance(mapping, list):
|
|
500
|
+
# get time parameters (date, year, month, ...) from metadata mapping
|
|
501
|
+
input_mapping = mapping[0].replace("{{", "").replace("}}", "")
|
|
502
|
+
time_params = [
|
|
503
|
+
values.split(":")[0].strip() for values in input_mapping.split(",")
|
|
504
|
+
]
|
|
505
|
+
time_params = [
|
|
506
|
+
tp.replace('"', "").replace("'", "") for tp in time_params
|
|
507
|
+
]
|
|
508
|
+
# if startTime is not given but other time params (e.g. year/month/(day)) are given,
|
|
509
|
+
# no default date is required
|
|
510
|
+
in_keywords = True
|
|
511
|
+
for tp in time_params:
|
|
512
|
+
if tp not in keywords:
|
|
513
|
+
in_keywords = False
|
|
514
|
+
break
|
|
515
|
+
if not in_keywords:
|
|
516
|
+
keywords[START] = self.get_product_type_cfg_value(
|
|
517
|
+
"missionStartDate", DEFAULT_MISSION_START_DATE
|
|
518
|
+
)
|
|
519
|
+
keywords[END] = (
|
|
520
|
+
keywords[START]
|
|
521
|
+
if END in product_type_conf
|
|
522
|
+
else self.get_product_type_cfg_value(
|
|
523
|
+
"missionEndDate", today().isoformat()
|
|
524
|
+
)
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
def _get_product_type_queryables(
|
|
528
|
+
self, product_type: Optional[str], alias: Optional[str], filters: dict[str, Any]
|
|
529
|
+
) -> QueryablesDict:
|
|
530
|
+
"""Override to set additional_properties to false."""
|
|
531
|
+
default_values: dict[str, Any] = deepcopy(
|
|
532
|
+
getattr(self.config, "products", {}).get(product_type, {})
|
|
533
|
+
)
|
|
534
|
+
default_values.pop("metadata_mapping", None)
|
|
535
|
+
|
|
536
|
+
filters["productType"] = product_type
|
|
537
|
+
queryables = self.discover_queryables(**{**default_values, **filters}) or {}
|
|
538
|
+
|
|
539
|
+
return QueryablesDict(additional_properties=False, **queryables)
|
|
540
|
+
|
|
541
|
+
def discover_queryables(
|
|
542
|
+
self, **kwargs: Any
|
|
543
|
+
) -> Optional[dict[str, Annotated[Any, FieldInfo]]]:
|
|
544
|
+
"""Fetch queryables list from provider using its constraints file
|
|
545
|
+
|
|
546
|
+
:param kwargs: additional filters for queryables (`productType` and other search
|
|
547
|
+
arguments)
|
|
548
|
+
:returns: fetched queryable parameters dict
|
|
549
|
+
"""
|
|
550
|
+
product_type = kwargs.pop("productType")
|
|
551
|
+
|
|
552
|
+
pt_config = self.get_product_type_def_params(product_type)
|
|
553
|
+
|
|
554
|
+
default_values = deepcopy(pt_config)
|
|
555
|
+
default_values.pop("metadata_mapping", None)
|
|
556
|
+
filters = {**default_values, **kwargs}
|
|
557
|
+
|
|
558
|
+
if "start" in filters:
|
|
559
|
+
filters[START] = filters.pop("start")
|
|
560
|
+
if "end" in filters:
|
|
561
|
+
filters[END] = filters.pop("end")
|
|
562
|
+
|
|
563
|
+
# extract default datetime
|
|
564
|
+
processed_filters = self._preprocess_search_params(
|
|
565
|
+
deepcopy(filters), product_type
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
constraints_url = format_metadata(
|
|
569
|
+
getattr(self.config, "discover_queryables", {}).get("constraints_url", ""),
|
|
570
|
+
**filters,
|
|
571
|
+
)
|
|
572
|
+
constraints: list[dict[str, Any]] = self._fetch_data(constraints_url)
|
|
573
|
+
|
|
574
|
+
form_url = format_metadata(
|
|
575
|
+
getattr(self.config, "discover_queryables", {}).get("form_url", ""),
|
|
576
|
+
**filters,
|
|
577
|
+
)
|
|
578
|
+
form: list[dict[str, Any]] = self._fetch_data(form_url)
|
|
579
|
+
|
|
580
|
+
formated_filters = self.format_as_provider_keyword(
|
|
581
|
+
product_type, processed_filters
|
|
582
|
+
)
|
|
583
|
+
# we re-apply kwargs input to consider override of year, month, day and time.
|
|
584
|
+
for k, v in {**default_values, **kwargs}.items():
|
|
585
|
+
key = k.removeprefix(ECMWF_PREFIX)
|
|
586
|
+
|
|
587
|
+
if key not in ALLOWED_KEYWORDS | {
|
|
588
|
+
START,
|
|
589
|
+
END,
|
|
590
|
+
"geom",
|
|
591
|
+
"geometry",
|
|
592
|
+
}:
|
|
593
|
+
raise ValidationError(
|
|
594
|
+
f"{key} is not a queryable parameter for {self.provider}"
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
formated_filters[key] = v
|
|
598
|
+
|
|
599
|
+
# we use non empty filters as default to integrate user inputs
|
|
600
|
+
# it is needed because pydantic json schema does not represent "value"
|
|
601
|
+
# but only "default"
|
|
602
|
+
non_empty_formated: dict[str, Any] = {
|
|
603
|
+
k: v
|
|
604
|
+
for k, v in formated_filters.items()
|
|
605
|
+
if v and (not isinstance(v, list) or all(v))
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
required_keywords: set[str] = set()
|
|
609
|
+
|
|
610
|
+
# calculate available values
|
|
611
|
+
if constraints:
|
|
612
|
+
# Apply constraints filtering
|
|
613
|
+
available_values = self.available_values_from_constraints(
|
|
614
|
+
constraints,
|
|
615
|
+
non_empty_formated,
|
|
616
|
+
form_keywords=[f["name"] for f in form],
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
# Pre-compute the required keywords (present in all constraint dicts)
|
|
620
|
+
# when form, required keywords are extracted directly from form
|
|
621
|
+
if not form:
|
|
622
|
+
required_keywords = set.intersection(
|
|
623
|
+
*(map(lambda d: set(d.keys()), constraints))
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
else:
|
|
627
|
+
values_url = getattr(self.config, "available_values_url", "")
|
|
628
|
+
if not values_url:
|
|
629
|
+
return self.queryables_from_metadata_mapping(product_type)
|
|
630
|
+
if "{" in values_url:
|
|
631
|
+
values_url = values_url.format(**filters)
|
|
632
|
+
data = self._fetch_data(values_url)
|
|
633
|
+
available_values = data["constraints"]
|
|
634
|
+
required_keywords = data.get("required", [])
|
|
635
|
+
|
|
636
|
+
# To check if all keywords are queryable parameters, we check if they are in the
|
|
637
|
+
# available values or the product type config (available values calculated from the
|
|
638
|
+
# constraints might not include all queryables)
|
|
639
|
+
for keyword in filters:
|
|
640
|
+
if (
|
|
641
|
+
keyword
|
|
642
|
+
not in available_values.keys()
|
|
643
|
+
| pt_config.keys()
|
|
644
|
+
| {
|
|
645
|
+
START,
|
|
646
|
+
END,
|
|
647
|
+
"geom",
|
|
648
|
+
}
|
|
649
|
+
and keyword not in [f["name"] for f in form]
|
|
650
|
+
and keyword.removeprefix(ECMWF_PREFIX)
|
|
651
|
+
not in set(list(available_values.keys()) + [f["name"] for f in form])
|
|
652
|
+
):
|
|
653
|
+
raise ValidationError(f"{keyword} is not a queryable parameter")
|
|
654
|
+
|
|
655
|
+
# generate queryables
|
|
656
|
+
if form:
|
|
657
|
+
queryables = self.queryables_by_form(
|
|
658
|
+
form,
|
|
659
|
+
available_values,
|
|
660
|
+
non_empty_formated,
|
|
661
|
+
)
|
|
662
|
+
else:
|
|
663
|
+
queryables = self.queryables_by_values(
|
|
664
|
+
available_values, list(required_keywords), non_empty_formated
|
|
665
|
+
)
|
|
666
|
+
|
|
667
|
+
# ecmwf:date is replaced by start and end.
|
|
668
|
+
# start and end filters are supported whenever combinations of "year", "month", "day" filters exist
|
|
669
|
+
if (
|
|
670
|
+
queryables.pop(f"{ECMWF_PREFIX}date", None)
|
|
671
|
+
or f"{ECMWF_PREFIX}year" in queryables
|
|
672
|
+
or f"{ECMWF_PREFIX}hyear" in queryables
|
|
673
|
+
):
|
|
674
|
+
queryables.update(
|
|
675
|
+
{
|
|
676
|
+
"start": Queryables.get_with_default(
|
|
677
|
+
"start", processed_filters.get(START)
|
|
678
|
+
),
|
|
679
|
+
"end": Queryables.get_with_default(
|
|
680
|
+
"end",
|
|
681
|
+
processed_filters.get(END),
|
|
682
|
+
),
|
|
683
|
+
}
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
# area is geom in EODAG.
|
|
687
|
+
if queryables.pop("area", None):
|
|
688
|
+
queryables["geom"] = Annotated[
|
|
689
|
+
Union[str, dict[str, float], BaseGeometry],
|
|
690
|
+
Field(
|
|
691
|
+
None,
|
|
692
|
+
description="Read EODAG documentation for all supported geometry format.",
|
|
693
|
+
),
|
|
694
|
+
]
|
|
695
|
+
|
|
696
|
+
return queryables
|
|
697
|
+
|
|
698
|
+
def available_values_from_constraints(
|
|
699
|
+
self,
|
|
700
|
+
constraints: list[dict[str, Any]],
|
|
701
|
+
input_keywords: dict[str, Any],
|
|
702
|
+
form_keywords: list[str],
|
|
703
|
+
) -> dict[str, list[str]]:
|
|
704
|
+
"""
|
|
705
|
+
Filter constraints using input_keywords. Return list of available queryables.
|
|
706
|
+
All constraint entries must have the same parameters.
|
|
707
|
+
|
|
708
|
+
:param constraints: list of constraints received from the provider
|
|
709
|
+
:param input_keywords: dict of input parameters given by the user
|
|
710
|
+
:param form_keywords: list of keyword names from the provider form endpoint
|
|
711
|
+
:return: dict with available values for each parameter
|
|
712
|
+
"""
|
|
713
|
+
# get ordered constraint keywords
|
|
714
|
+
constraints_keywords = list(
|
|
715
|
+
OrderedDict.fromkeys(k for c in constraints for k in c.keys())
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
# prepare ordered input keywords formatted as provider's keywords
|
|
719
|
+
# required to filter with constraints
|
|
720
|
+
ordered_keywords = (
|
|
721
|
+
[kw for kw in form_keywords if kw in constraints_keywords]
|
|
722
|
+
if form_keywords
|
|
723
|
+
else constraints_keywords
|
|
724
|
+
)
|
|
725
|
+
|
|
726
|
+
# filter constraint entries matching input keyword values
|
|
727
|
+
filtered_constraints: list[dict[str, Any]]
|
|
728
|
+
|
|
729
|
+
parsed_keywords: list[str] = []
|
|
730
|
+
for keyword in ordered_keywords:
|
|
731
|
+
values = input_keywords.get(keyword)
|
|
732
|
+
|
|
733
|
+
if values is None:
|
|
734
|
+
parsed_keywords.append(keyword)
|
|
735
|
+
continue
|
|
736
|
+
|
|
737
|
+
# we only compare list of strings.
|
|
738
|
+
if isinstance(values, dict):
|
|
739
|
+
raise ValidationError(
|
|
740
|
+
f"Parameter value as object is not supported: {keyword}={values}"
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
# We convert every single value to a list of string
|
|
744
|
+
filter_v = values if isinstance(values, (list, tuple)) else [values]
|
|
745
|
+
|
|
746
|
+
# We strip values of superfluous quotes (added by mapping converter to_geojson).
|
|
747
|
+
# ECMWF accept values with /to/. We need to split it to an array
|
|
748
|
+
# ECMWF accept values in format val1/val2. We need to split it to an array
|
|
749
|
+
sep = re.compile(r"/to/|/")
|
|
750
|
+
filter_v = [i for v in filter_v for i in sep.split(str(v))]
|
|
751
|
+
|
|
752
|
+
# special handling for time 0000 converted to 0 by pre-formating with metadata_mapping
|
|
753
|
+
if keyword.split(":")[-1] == "time":
|
|
754
|
+
filter_v = ["0000" if str(v) == "0" else v for v in filter_v]
|
|
755
|
+
|
|
756
|
+
# Collect missing values to report errors
|
|
757
|
+
missing_values = set(filter_v)
|
|
758
|
+
|
|
759
|
+
# Filter constraints and check for missing values
|
|
760
|
+
filtered_constraints = []
|
|
761
|
+
for entry in constraints:
|
|
762
|
+
# Filter based on the presence of any value in filter_v
|
|
763
|
+
entry_values = entry.get(keyword, [])
|
|
764
|
+
|
|
765
|
+
# date constraint may be intervals. We identify intervals with a "/" in the value
|
|
766
|
+
# we assume that if the first value is an interval, all values are intervals
|
|
767
|
+
present_values = []
|
|
768
|
+
if keyword == "date" and "/" in entry[keyword][0]:
|
|
769
|
+
input_range = values
|
|
770
|
+
if isinstance(values, list):
|
|
771
|
+
input_range = values[0]
|
|
772
|
+
if any(is_range_in_range(x, input_range) for x in entry[keyword]):
|
|
773
|
+
present_values = filter_v
|
|
774
|
+
else:
|
|
775
|
+
present_values = [
|
|
776
|
+
value for value in filter_v if value in entry_values
|
|
777
|
+
]
|
|
778
|
+
|
|
779
|
+
# Remove present values from the missing_values set
|
|
780
|
+
missing_values -= set(present_values)
|
|
781
|
+
|
|
782
|
+
if present_values:
|
|
783
|
+
filtered_constraints.append(entry)
|
|
784
|
+
|
|
785
|
+
# raise an error as no constraint entry matched the input keywords
|
|
786
|
+
# raise an error if one value from input is not allowed
|
|
787
|
+
if not filtered_constraints or missing_values:
|
|
788
|
+
allowed_values = list(
|
|
789
|
+
{value for c in constraints for value in c.get(keyword, [])}
|
|
790
|
+
)
|
|
791
|
+
# restore ecmwf: prefix before raising error
|
|
792
|
+
keyword = ECMWF_PREFIX + keyword
|
|
793
|
+
|
|
794
|
+
all_keywords_str = ""
|
|
795
|
+
if len(parsed_keywords) > 1:
|
|
796
|
+
keywords = [
|
|
797
|
+
f"{ECMWF_PREFIX + k}={pk}"
|
|
798
|
+
for k in parsed_keywords
|
|
799
|
+
if (pk := input_keywords.get(k))
|
|
800
|
+
]
|
|
801
|
+
all_keywords_str = f" with {', '.join(keywords)}"
|
|
802
|
+
|
|
803
|
+
raise ValidationError(
|
|
804
|
+
f"{keyword}={values} is not available"
|
|
805
|
+
f"{all_keywords_str}."
|
|
806
|
+
f" Allowed values are {', '.join(allowed_values)}."
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
parsed_keywords.append(keyword)
|
|
810
|
+
constraints = filtered_constraints
|
|
811
|
+
|
|
812
|
+
available_values: dict[str, Any] = {k: set() for k in ordered_keywords}
|
|
813
|
+
|
|
814
|
+
# we aggregate the constraint entries left
|
|
815
|
+
for entry in constraints:
|
|
816
|
+
for key, value in entry.items():
|
|
817
|
+
available_values[key].update(value)
|
|
818
|
+
|
|
819
|
+
return {k: list(v) for k, v in available_values.items()}
|
|
820
|
+
|
|
821
|
+
def queryables_by_form(
|
|
822
|
+
self,
|
|
823
|
+
form: list[dict[str, Any]],
|
|
824
|
+
available_values: dict[str, list[str]],
|
|
825
|
+
defaults: dict[str, Any],
|
|
826
|
+
) -> dict[str, Annotated[Any, FieldInfo]]:
|
|
827
|
+
"""
|
|
828
|
+
Generate Annotated field definitions from form entries and available values
|
|
829
|
+
Used by Copernicus services like cop_cds, cop_ads, cop_ewds.
|
|
830
|
+
|
|
831
|
+
:param form: data fetched from the form endpoint of the provider
|
|
832
|
+
:param available_values: available values for each parameter
|
|
833
|
+
:param defaults: default values for the parameters
|
|
834
|
+
:return: dict of annotated queryables
|
|
835
|
+
"""
|
|
836
|
+
queryables: dict[str, Annotated[Any, FieldInfo]] = {}
|
|
837
|
+
|
|
838
|
+
required_list: list[str] = []
|
|
839
|
+
for element in form:
|
|
840
|
+
name: str = element["name"]
|
|
841
|
+
|
|
842
|
+
# those are not parameter elements.
|
|
843
|
+
if name in ("area_group", "global", "warning", "licences"):
|
|
844
|
+
continue
|
|
845
|
+
if "type" not in element or element["type"] == "FreeEditionWidget":
|
|
846
|
+
# FreeEditionWidget used to select the whole available region
|
|
847
|
+
# and to provide comments for the dataset
|
|
848
|
+
continue
|
|
849
|
+
|
|
850
|
+
# ordering done by id -> set id to high value if not present -> element will be last
|
|
851
|
+
if "id" not in element:
|
|
852
|
+
element["id"] = 100
|
|
853
|
+
|
|
854
|
+
prop = {"title": element.get("label", name)}
|
|
855
|
+
|
|
856
|
+
details = element.get("details", {})
|
|
857
|
+
|
|
858
|
+
# add values from form if keyword was not in constraints
|
|
859
|
+
values = (
|
|
860
|
+
available_values[name]
|
|
861
|
+
if name in available_values
|
|
862
|
+
else details.get("values")
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
# updates the properties with the values given based on the information from the element
|
|
866
|
+
_update_properties_from_element(prop, element, values)
|
|
867
|
+
|
|
868
|
+
default = defaults.get(name)
|
|
869
|
+
|
|
870
|
+
if details:
|
|
871
|
+
fields = details.get("fields")
|
|
872
|
+
if fields and (comment := fields[0].get("comment")):
|
|
873
|
+
prop["description"] = comment
|
|
874
|
+
|
|
875
|
+
if name == "area" and isinstance(default, dict):
|
|
876
|
+
default = list(default.values())
|
|
877
|
+
|
|
878
|
+
# sometimes form returns default as array instead of string
|
|
879
|
+
if default and prop.get("type") == "string" and isinstance(default, list):
|
|
880
|
+
default = ",".join(default)
|
|
881
|
+
|
|
882
|
+
is_required = bool(element.get("required"))
|
|
883
|
+
if is_required:
|
|
884
|
+
required_list.append(name)
|
|
885
|
+
|
|
886
|
+
queryables[ecmwf_format(name)] = Annotated[
|
|
887
|
+
get_args(
|
|
888
|
+
json_field_definition_to_python(
|
|
889
|
+
prop,
|
|
890
|
+
default_value=default,
|
|
891
|
+
required=is_required,
|
|
892
|
+
)
|
|
893
|
+
)
|
|
894
|
+
]
|
|
895
|
+
|
|
896
|
+
return queryables
|
|
897
|
+
|
|
898
|
+
def queryables_by_values(
|
|
899
|
+
self,
|
|
900
|
+
available_values: dict[str, list[str]],
|
|
901
|
+
required_keywords: list[str],
|
|
902
|
+
defaults: dict[str, Any],
|
|
903
|
+
) -> dict[str, Annotated[Any, FieldInfo]]:
|
|
904
|
+
"""
|
|
905
|
+
Generate Annotated field definitions from available values.
|
|
906
|
+
Used by ECMWF data providers like dedt_lumi.
|
|
907
|
+
|
|
908
|
+
:param available_values: available values for each parameter
|
|
909
|
+
:param required_keywords: list of required parameters
|
|
910
|
+
:param defaults: default values for the parameters
|
|
911
|
+
:return: dict of annotated queryables
|
|
912
|
+
"""
|
|
913
|
+
# Rename keywords from form with metadata mapping.
|
|
914
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
915
|
+
required = [ecmwf_format(k) for k in required_keywords]
|
|
916
|
+
|
|
917
|
+
queryables: dict[str, Annotated[Any, FieldInfo]] = {}
|
|
918
|
+
for name, values in available_values.items():
|
|
919
|
+
# Rename keywords from form with metadata mapping.
|
|
920
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
921
|
+
key = ecmwf_format(name)
|
|
922
|
+
|
|
923
|
+
queryables[key] = Annotated[
|
|
924
|
+
get_args(
|
|
925
|
+
json_field_definition_to_python(
|
|
926
|
+
{"type": "string", "title": name, "enum": values},
|
|
927
|
+
default_value=defaults.get(name),
|
|
928
|
+
required=bool(key in required),
|
|
929
|
+
)
|
|
930
|
+
)
|
|
931
|
+
]
|
|
932
|
+
|
|
933
|
+
return queryables
|
|
934
|
+
|
|
935
|
+
def format_as_provider_keyword(
|
|
936
|
+
self, product_type: str, properties: dict[str, Any]
|
|
937
|
+
) -> dict[str, Any]:
|
|
938
|
+
"""Return provider equivalent keyword names from EODAG keywords.
|
|
939
|
+
|
|
940
|
+
:param product_type: product type id
|
|
941
|
+
:param properties: dict of properties to be formatted
|
|
942
|
+
:return: dict of formatted properties
|
|
943
|
+
"""
|
|
944
|
+
properties["productType"] = product_type
|
|
945
|
+
|
|
946
|
+
# provider product type specific conf
|
|
947
|
+
product_type_def_params = self.get_product_type_def_params(
|
|
948
|
+
product_type, format_variables=properties
|
|
949
|
+
)
|
|
950
|
+
|
|
951
|
+
# Add to the query, the queryable parameters set in the provider product type definition
|
|
952
|
+
properties.update(
|
|
953
|
+
{
|
|
954
|
+
k: v
|
|
955
|
+
for k, v in product_type_def_params.items()
|
|
956
|
+
if k not in properties.keys()
|
|
957
|
+
and k in self.config.metadata_mapping.keys()
|
|
958
|
+
and isinstance(self.config.metadata_mapping[k], list)
|
|
959
|
+
}
|
|
960
|
+
)
|
|
961
|
+
qp, _ = self.build_query_string(product_type, properties)
|
|
962
|
+
|
|
963
|
+
return qp
|
|
964
|
+
|
|
965
|
+
def _fetch_data(self, url: str) -> Any:
|
|
966
|
+
"""
|
|
967
|
+
fetches from a provider elements like constraints or forms.
|
|
968
|
+
|
|
969
|
+
:param url: url from which the constraints can be fetched
|
|
970
|
+
:returns: json file content fetched from the provider
|
|
971
|
+
"""
|
|
972
|
+
if not url:
|
|
973
|
+
return []
|
|
974
|
+
|
|
975
|
+
auth = (
|
|
976
|
+
self.auth
|
|
977
|
+
if hasattr(self, "auth") and isinstance(self.auth, AuthBase)
|
|
978
|
+
else None
|
|
979
|
+
)
|
|
980
|
+
timeout = getattr(self.config, "timeout", DEFAULT_SEARCH_TIMEOUT)
|
|
981
|
+
return functools.lru_cache()(fetch_json)(url, auth=auth, timeout=timeout)
|
|
124
982
|
|
|
125
983
|
def normalize_results(
|
|
126
984
|
self, results: RawSearchResult, **kwargs: Any
|
|
127
|
-
) ->
|
|
985
|
+
) -> list[EOProduct]:
|
|
128
986
|
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
129
987
|
|
|
130
988
|
:param results: Raw provider result as single dict in list
|
|
131
989
|
:param kwargs: Search arguments
|
|
132
990
|
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
133
991
|
"""
|
|
992
|
+
|
|
134
993
|
product_type = kwargs.get("productType")
|
|
135
994
|
|
|
136
995
|
result = results[0]
|
|
@@ -151,13 +1010,12 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
151
1010
|
self.config.pagination["next_page_query_obj"].format()
|
|
152
1011
|
)
|
|
153
1012
|
unpaginated_query_params = {
|
|
154
|
-
k: v
|
|
1013
|
+
k: v
|
|
155
1014
|
for k, v in results.query_params.items()
|
|
156
1015
|
if (k, v) not in next_page_query_obj.items()
|
|
157
1016
|
}
|
|
158
1017
|
else:
|
|
159
1018
|
unpaginated_query_params = self.query_params
|
|
160
|
-
|
|
161
1019
|
# query hash, will be used to build a product id
|
|
162
1020
|
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
163
1021
|
unpaginated_query_params
|
|
@@ -190,353 +1048,182 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
190
1048
|
discovery_config=getattr(self.config, "discover_metadata", {}),
|
|
191
1049
|
)
|
|
192
1050
|
|
|
193
|
-
|
|
194
|
-
|
|
1051
|
+
properties = {
|
|
1052
|
+
# use product_type_config as default properties
|
|
1053
|
+
**getattr(self.config, "product_type_config", {}),
|
|
1054
|
+
**{ecmwf_format(k): v for k, v in parsed_properties.items()},
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
def slugify(date_str: str) -> str:
|
|
1058
|
+
return date_str.split("T")[0].replace("-", "")
|
|
195
1059
|
|
|
196
1060
|
# build product id
|
|
197
|
-
|
|
198
|
-
product_id = "%s_%s_%s_%s" % (
|
|
199
|
-
id_prefix,
|
|
200
|
-
parsed_properties["startTimeFromAscendingNode"]
|
|
201
|
-
.split("T")[0]
|
|
202
|
-
.replace("-", ""),
|
|
203
|
-
parsed_properties["completionTimeFromAscendingNode"]
|
|
204
|
-
.split("T")[0]
|
|
205
|
-
.replace("-", ""),
|
|
206
|
-
query_hash,
|
|
207
|
-
)
|
|
208
|
-
parsed_properties["id"] = parsed_properties["title"] = product_id
|
|
209
|
-
|
|
210
|
-
# update downloadLink and orderLink
|
|
211
|
-
parsed_properties["_dc_qs"] = quote_plus(qs)
|
|
212
|
-
if parsed_properties["downloadLink"] != "Not Available":
|
|
213
|
-
parsed_properties["downloadLink"] += f"?{qs}"
|
|
214
|
-
|
|
215
|
-
# parse metadata needing downloadLink
|
|
216
|
-
dl_path = Fields("downloadLink")
|
|
217
|
-
dl_path_from_root = Child(Root(), dl_path)
|
|
218
|
-
for param, mapping in self.config.metadata_mapping.items():
|
|
219
|
-
if dl_path in mapping or dl_path_from_root in mapping:
|
|
220
|
-
parsed_properties.update(
|
|
221
|
-
properties_from_json(parsed_properties, {param: mapping})
|
|
222
|
-
)
|
|
1061
|
+
product_id = (product_type or kwargs.get("dataset") or self.provider).upper()
|
|
223
1062
|
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
1063
|
+
start = properties.get(START, NOT_AVAILABLE)
|
|
1064
|
+
end = properties.get(END, NOT_AVAILABLE)
|
|
1065
|
+
|
|
1066
|
+
if start != NOT_AVAILABLE:
|
|
1067
|
+
product_id += f"_{slugify(start)}"
|
|
1068
|
+
if end != NOT_AVAILABLE:
|
|
1069
|
+
product_id += f"_{slugify(end)}"
|
|
1070
|
+
|
|
1071
|
+
product_id += f"_{query_hash}"
|
|
1072
|
+
|
|
1073
|
+
properties["id"] = properties["title"] = product_id
|
|
1074
|
+
|
|
1075
|
+
# used by server mode to generate downloadlink href
|
|
1076
|
+
properties["_dc_qs"] = quote_plus(qs)
|
|
229
1077
|
|
|
230
1078
|
product = EOProduct(
|
|
231
1079
|
provider=self.provider,
|
|
232
1080
|
productType=product_type,
|
|
233
|
-
properties=
|
|
1081
|
+
properties=properties,
|
|
234
1082
|
)
|
|
235
1083
|
|
|
236
1084
|
return [
|
|
237
1085
|
product,
|
|
238
1086
|
]
|
|
239
1087
|
|
|
1088
|
+
def count_hits(
|
|
1089
|
+
self, count_url: Optional[str] = None, result_type: Optional[str] = None
|
|
1090
|
+
) -> int:
|
|
1091
|
+
"""Count method that will always return 1.
|
|
240
1092
|
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
The available configuration parameters inherits from parent classes
|
|
248
|
-
(:class:`~eodag.plugins.search.build_search_result.BuildPostSearchResult`,
|
|
249
|
-
:class:`~eodag.plugins.search.qssearch.PostJsonSearch` and
|
|
250
|
-
:class:`~eodag.plugins.search.qssearch.QueryStringSearch`), with particularly for this plugin:
|
|
251
|
-
|
|
252
|
-
:param provider: provider name
|
|
253
|
-
:param config: Search plugin configuration:
|
|
1093
|
+
:param count_url: not used, only here because this method overwrites count_hits from the parent class
|
|
1094
|
+
:param result_type: not used, only here because this method overwrites count_hits from the parent class
|
|
1095
|
+
:return: always 1
|
|
1096
|
+
"""
|
|
1097
|
+
return 1
|
|
254
1098
|
|
|
255
|
-
* :attr:`~eodag.config.PluginConfig.end_date_excluded` (``bool``): Set to ``False`` if provider
|
|
256
|
-
does not include end date in the search request; In this case, if the end date is at midnight,
|
|
257
|
-
the previous day will be used. default: ``True``
|
|
258
1099
|
|
|
259
|
-
|
|
1100
|
+
class MeteoblueSearch(ECMWFSearch):
|
|
1101
|
+
"""MeteoblueSearch search plugin.
|
|
260
1102
|
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
1103
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1104
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1105
|
+
object.
|
|
264
1106
|
|
|
265
|
-
|
|
1107
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1108
|
+
for pagination for this plugin.
|
|
266
1109
|
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
# needed for compatibility
|
|
270
|
-
self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"})
|
|
271
|
-
|
|
272
|
-
# parse jsonpath on init: product type specific metadata-mapping
|
|
273
|
-
for product_type in self.config.products.keys():
|
|
274
|
-
if "metadata_mapping" in self.config.products[product_type].keys():
|
|
275
|
-
self.config.products[product_type][
|
|
276
|
-
"metadata_mapping"
|
|
277
|
-
] = mtd_cfg_as_conversion_and_querypath(
|
|
278
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
279
|
-
)
|
|
280
|
-
# Complete and ready to use product type specific metadata-mapping
|
|
281
|
-
product_type_metadata_mapping = deepcopy(self.config.metadata_mapping)
|
|
282
|
-
|
|
283
|
-
# update config using provider product type definition metadata_mapping
|
|
284
|
-
# from another product
|
|
285
|
-
other_product_for_mapping = cast(
|
|
286
|
-
str,
|
|
287
|
-
self.config.products[product_type].get(
|
|
288
|
-
"metadata_mapping_from_product", ""
|
|
289
|
-
),
|
|
290
|
-
)
|
|
291
|
-
if other_product_for_mapping:
|
|
292
|
-
other_product_type_def_params = self.get_product_type_def_params(
|
|
293
|
-
other_product_for_mapping,
|
|
294
|
-
)
|
|
295
|
-
product_type_metadata_mapping.update(
|
|
296
|
-
other_product_type_def_params.get("metadata_mapping", {})
|
|
297
|
-
)
|
|
298
|
-
# from current product
|
|
299
|
-
product_type_metadata_mapping.update(
|
|
300
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
301
|
-
)
|
|
1110
|
+
:param provider: An eodag providers configuration dictionary
|
|
1111
|
+
:param config: Search plugin configuration:
|
|
302
1112
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
1113
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1114
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1115
|
+
this plugin it has the node:
|
|
306
1116
|
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
1117
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1118
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1119
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1120
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1121
|
+
"""
|
|
310
1122
|
|
|
311
|
-
def
|
|
1123
|
+
def collect_search_urls(
|
|
312
1124
|
self,
|
|
313
1125
|
prep: PreparedSearch = PreparedSearch(),
|
|
314
1126
|
**kwargs: Any,
|
|
315
|
-
) ->
|
|
316
|
-
"""
|
|
317
|
-
|
|
318
|
-
self._preprocess_search_params(kwargs)
|
|
1127
|
+
) -> tuple[list[str], int]:
|
|
1128
|
+
"""Wraps PostJsonSearch.collect_search_urls to force product count to 1
|
|
319
1129
|
|
|
320
|
-
|
|
1130
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1131
|
+
:param kwargs: keyword arguments used in the search
|
|
1132
|
+
:return: list of search url and number of results
|
|
1133
|
+
"""
|
|
1134
|
+
urls, _ = super().collect_search_urls(prep, **kwargs)
|
|
1135
|
+
return urls, 1
|
|
321
1136
|
|
|
322
|
-
def
|
|
323
|
-
|
|
324
|
-
|
|
1137
|
+
def do_search(
|
|
1138
|
+
self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any
|
|
1139
|
+
) -> list[dict[str, Any]]:
|
|
1140
|
+
"""Perform the actual search request, and return result in a single element.
|
|
325
1141
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
"""
|
|
330
|
-
# parse kwargs as properties as they might be needed to build the query
|
|
331
|
-
parsed_properties = properties_from_json(
|
|
332
|
-
kwargs,
|
|
333
|
-
self.config.metadata_mapping,
|
|
334
|
-
)
|
|
335
|
-
available_properties = {
|
|
336
|
-
k: v
|
|
337
|
-
for k, v in parsed_properties.items()
|
|
338
|
-
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
339
|
-
}
|
|
1142
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1143
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1144
|
+
:return: list containing the results from the provider in json format
|
|
1145
|
+
"""
|
|
340
1146
|
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
1147
|
+
prep.url = prep.search_urls[0]
|
|
1148
|
+
prep.info_message = f"Sending search request: {prep.url}"
|
|
1149
|
+
prep.exception_message = (
|
|
1150
|
+
f"Skipping error while searching for {self.provider}"
|
|
1151
|
+
f" {self.__class__.__name__} instance"
|
|
344
1152
|
)
|
|
1153
|
+
response = self._request(prep)
|
|
345
1154
|
|
|
346
|
-
|
|
347
|
-
"""Preprocess search parameters before making a request to the CDS API.
|
|
1155
|
+
return [response.json()]
|
|
348
1156
|
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
|
|
352
|
-
|
|
1157
|
+
def build_query_string(
|
|
1158
|
+
self, product_type: str, query_dict: dict[str, Any]
|
|
1159
|
+
) -> tuple[dict[str, Any], str]:
|
|
1160
|
+
"""Build The query string using the search parameters
|
|
353
1161
|
|
|
354
|
-
:param
|
|
1162
|
+
:param product_type: product type id
|
|
1163
|
+
:param query_dict: keyword arguments to be used in the query string
|
|
1164
|
+
:return: formatted query params and encode query string
|
|
355
1165
|
"""
|
|
356
|
-
|
|
357
|
-
if _dc_qs is not None:
|
|
358
|
-
# if available, update search params using datacube query-string
|
|
359
|
-
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
360
|
-
if "/to/" in _dc_qp.get("date", ""):
|
|
361
|
-
(
|
|
362
|
-
params["startTimeFromAscendingNode"],
|
|
363
|
-
params["completionTimeFromAscendingNode"],
|
|
364
|
-
) = _dc_qp["date"].split("/to/")
|
|
365
|
-
elif "/" in _dc_qp.get("date", ""):
|
|
366
|
-
(
|
|
367
|
-
params["startTimeFromAscendingNode"],
|
|
368
|
-
params["completionTimeFromAscendingNode"],
|
|
369
|
-
) = _dc_qp["date"].split("/")
|
|
370
|
-
elif _dc_qp.get("date", None):
|
|
371
|
-
params["startTimeFromAscendingNode"] = params[
|
|
372
|
-
"completionTimeFromAscendingNode"
|
|
373
|
-
] = _dc_qp["date"]
|
|
1166
|
+
return QueryStringSearch.build_query_string(self, product_type, query_dict)
|
|
374
1167
|
|
|
375
|
-
if "/" in _dc_qp.get("area", ""):
|
|
376
|
-
params["geometry"] = _dc_qp["area"].split("/")
|
|
377
|
-
|
|
378
|
-
non_none_params = {k: v for k, v in params.items() if v}
|
|
379
1168
|
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
1169
|
+
class WekeoECMWFSearch(ECMWFSearch):
|
|
1170
|
+
"""
|
|
1171
|
+
WekeoECMWFSearch search plugin.
|
|
383
1172
|
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
).replace(
|
|
389
|
-
"Z", "+00:00"
|
|
390
|
-
) # before 3.11
|
|
391
|
-
)
|
|
1173
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1174
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1175
|
+
object. In contrast to ECMWFSearch or MeteoblueSearch, the products are only build with information
|
|
1176
|
+
returned by the provider.
|
|
392
1177
|
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
)
|
|
396
|
-
default_end_str = (
|
|
397
|
-
default_end_from_cfg
|
|
398
|
-
or (
|
|
399
|
-
datetime.now(timezone.utc)
|
|
400
|
-
if params.get("startTimeFromAscendingNode")
|
|
401
|
-
else mission_start_dt + timedelta(days=1)
|
|
402
|
-
).isoformat()
|
|
403
|
-
)
|
|
1178
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1179
|
+
for pagination for this plugin.
|
|
404
1180
|
|
|
405
|
-
|
|
406
|
-
|
|
407
|
-
)
|
|
408
|
-
params["completionTimeFromAscendingNode"] = non_none_params.get(
|
|
409
|
-
"completionTimeFromAscendingNode", default_end_str
|
|
410
|
-
)
|
|
1181
|
+
:param provider: An eodag providers configuration dictionary
|
|
1182
|
+
:param config: Search plugin configuration:
|
|
411
1183
|
|
|
412
|
-
|
|
413
|
-
|
|
414
|
-
|
|
415
|
-
try:
|
|
416
|
-
end_date = datetime.strptime(
|
|
417
|
-
params["completionTimeFromAscendingNode"], "%Y-%m-%dT%H:%M:%SZ"
|
|
418
|
-
)
|
|
419
|
-
end_date = end_date.replace(tzinfo=tzutc())
|
|
420
|
-
except ValueError:
|
|
421
|
-
try:
|
|
422
|
-
end_date = datetime.strptime(
|
|
423
|
-
params["completionTimeFromAscendingNode"], "%Y-%m-%dT%H:%M:%S.%fZ"
|
|
424
|
-
)
|
|
425
|
-
end_date = end_date.replace(tzinfo=tzutc())
|
|
426
|
-
except ValueError:
|
|
427
|
-
end_date = isoparse(params["completionTimeFromAscendingNode"])
|
|
428
|
-
is_datetime = False
|
|
429
|
-
start_date = isoparse(params["startTimeFromAscendingNode"])
|
|
430
|
-
if (
|
|
431
|
-
not end_date_excluded
|
|
432
|
-
and is_datetime
|
|
433
|
-
and end_date > start_date
|
|
434
|
-
and end_date == end_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
435
|
-
):
|
|
436
|
-
end_date += timedelta(days=-1)
|
|
437
|
-
params["completionTimeFromAscendingNode"] = end_date.isoformat()
|
|
1184
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1185
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1186
|
+
this plugin it has the node:
|
|
438
1187
|
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
1188
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1189
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1190
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1191
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1192
|
+
"""
|
|
442
1193
|
|
|
443
|
-
def
|
|
444
|
-
self, **kwargs: Any
|
|
445
|
-
) ->
|
|
446
|
-
"""
|
|
1194
|
+
def normalize_results(
|
|
1195
|
+
self, results: RawSearchResult, **kwargs: Any
|
|
1196
|
+
) -> list[EOProduct]:
|
|
1197
|
+
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
447
1198
|
|
|
448
|
-
:param
|
|
449
|
-
|
|
450
|
-
:returns:
|
|
1199
|
+
:param results: Raw provider result as single dict in list
|
|
1200
|
+
:param kwargs: Search arguments
|
|
1201
|
+
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
451
1202
|
"""
|
|
452
|
-
constraints_file_url = getattr(self.config, "constraints_file_url", "")
|
|
453
|
-
if not constraints_file_url:
|
|
454
|
-
return {}
|
|
455
|
-
product_type = kwargs.pop("productType", None)
|
|
456
|
-
if not product_type:
|
|
457
|
-
return {}
|
|
458
1203
|
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
if (
|
|
464
|
-
user_provider_product_type
|
|
465
|
-
and user_provider_product_type != provider_product_type
|
|
466
|
-
):
|
|
467
|
-
raise ValidationError(
|
|
468
|
-
f"Cannot change dataset from {provider_product_type} to {user_provider_product_type}"
|
|
469
|
-
)
|
|
1204
|
+
# formating of orderLink requires access to the productType value.
|
|
1205
|
+
results.data = [
|
|
1206
|
+
{**result, **results.product_type_def_params} for result in results
|
|
1207
|
+
]
|
|
470
1208
|
|
|
471
|
-
|
|
472
|
-
default_queryables = self._get_defaults_as_queryables(product_type)
|
|
473
|
-
# remove dataset from queryables
|
|
474
|
-
default_queryables.pop("dataset", None)
|
|
1209
|
+
normalized = QueryStringSearch.normalize_results(self, results, **kwargs)
|
|
475
1210
|
|
|
476
|
-
|
|
1211
|
+
if not normalized:
|
|
1212
|
+
return normalized
|
|
477
1213
|
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
if not constraints:
|
|
484
|
-
return default_queryables
|
|
485
|
-
|
|
486
|
-
constraint_params: Dict[str, Dict[str, Set[Any]]] = {}
|
|
487
|
-
if len(kwargs) == 0:
|
|
488
|
-
# get values from constraints without additional filters
|
|
489
|
-
for constraint in constraints:
|
|
490
|
-
for key in constraint.keys():
|
|
491
|
-
if key in constraint_params:
|
|
492
|
-
constraint_params[key]["enum"].update(constraint[key])
|
|
493
|
-
else:
|
|
494
|
-
constraint_params[key] = {}
|
|
495
|
-
constraint_params[key]["enum"] = set(constraint[key])
|
|
496
|
-
else:
|
|
497
|
-
# get values from constraints with additional filters
|
|
498
|
-
constraints_input_params = {k: v for k, v in non_empty_kwargs.items()}
|
|
499
|
-
constraint_params = get_constraint_queryables_with_additional_params(
|
|
500
|
-
constraints, constraints_input_params, self, product_type
|
|
501
|
-
)
|
|
502
|
-
# query params that are not in constraints but might be default queryables
|
|
503
|
-
if len(constraint_params) == 1 and "not_available" in constraint_params:
|
|
504
|
-
not_queryables: Set[str] = set()
|
|
505
|
-
for constraint_param in constraint_params["not_available"]["enum"]:
|
|
506
|
-
param = CommonQueryables.get_queryable_from_alias(constraint_param)
|
|
507
|
-
if param in dict(
|
|
508
|
-
CommonQueryables.model_fields, **default_queryables
|
|
509
|
-
):
|
|
510
|
-
non_empty_kwargs.pop(constraint_param)
|
|
511
|
-
else:
|
|
512
|
-
not_queryables.add(constraint_param)
|
|
513
|
-
if not_queryables:
|
|
514
|
-
raise ValidationError(
|
|
515
|
-
f"parameter(s) {not_queryables} not queryable"
|
|
516
|
-
)
|
|
517
|
-
else:
|
|
518
|
-
# get constraints again without common queryables
|
|
519
|
-
constraint_params = (
|
|
520
|
-
get_constraint_queryables_with_additional_params(
|
|
521
|
-
constraints, non_empty_kwargs, self, product_type
|
|
522
|
-
)
|
|
523
|
-
)
|
|
1214
|
+
query_params_encoded = quote_plus(orjson.dumps(results.query_params))
|
|
1215
|
+
for product in normalized:
|
|
1216
|
+
properties = {**product.properties, **results.query_params}
|
|
1217
|
+
properties["_dc_qs"] = query_params_encoded
|
|
1218
|
+
product.properties = {ecmwf_format(k): v for k, v in properties.items()}
|
|
524
1219
|
|
|
525
|
-
|
|
526
|
-
for json_param, json_mtd in constraint_params.items():
|
|
527
|
-
param = (
|
|
528
|
-
get_queryable_from_provider(
|
|
529
|
-
json_param, self.get_metadata_mapping(product_type)
|
|
530
|
-
)
|
|
531
|
-
or json_param
|
|
532
|
-
)
|
|
533
|
-
default = kwargs.get(param, None) or self.config.products.get(
|
|
534
|
-
product_type, {}
|
|
535
|
-
).get(param, None)
|
|
536
|
-
annotated_def = json_field_definition_to_python(
|
|
537
|
-
json_mtd, default_value=default, required=True
|
|
538
|
-
)
|
|
539
|
-
field_definitions[param] = get_args(annotated_def)
|
|
1220
|
+
return normalized
|
|
540
1221
|
|
|
541
|
-
|
|
542
|
-
|
|
1222
|
+
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
1223
|
+
"""Should perform the actual search request.
|
|
1224
|
+
|
|
1225
|
+
:param args: arguments to be used in the search
|
|
1226
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1227
|
+
:return: list containing the results from the provider in json format
|
|
1228
|
+
"""
|
|
1229
|
+
return QueryStringSearch.do_search(self, *args, **kwargs)
|