eodag 3.0.0b3__py3-none-any.whl → 3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +347 -247
- eodag/api/product/_assets.py +44 -15
- eodag/api/product/_product.py +58 -47
- eodag/api/product/drivers/__init__.py +81 -4
- eodag/api/product/drivers/base.py +65 -4
- eodag/api/product/drivers/generic.py +65 -0
- eodag/api/product/drivers/sentinel1.py +97 -0
- eodag/api/product/drivers/sentinel2.py +95 -0
- eodag/api/product/metadata_mapping.py +129 -93
- eodag/api/search_result.py +28 -12
- eodag/cli.py +61 -24
- eodag/config.py +457 -167
- eodag/plugins/apis/base.py +10 -4
- eodag/plugins/apis/ecmwf.py +53 -23
- eodag/plugins/apis/usgs.py +41 -17
- eodag/plugins/authentication/aws_auth.py +30 -18
- eodag/plugins/authentication/base.py +14 -3
- eodag/plugins/authentication/generic.py +14 -3
- eodag/plugins/authentication/header.py +14 -6
- eodag/plugins/authentication/keycloak.py +44 -25
- eodag/plugins/authentication/oauth.py +18 -4
- eodag/plugins/authentication/openid_connect.py +192 -171
- eodag/plugins/authentication/qsauth.py +12 -4
- eodag/plugins/authentication/sas_auth.py +22 -5
- eodag/plugins/authentication/token.py +95 -17
- eodag/plugins/authentication/token_exchange.py +19 -19
- eodag/plugins/base.py +4 -4
- eodag/plugins/crunch/base.py +8 -5
- eodag/plugins/crunch/filter_date.py +9 -6
- eodag/plugins/crunch/filter_latest_intersect.py +9 -8
- eodag/plugins/crunch/filter_latest_tpl_name.py +8 -8
- eodag/plugins/crunch/filter_overlap.py +9 -11
- eodag/plugins/crunch/filter_property.py +10 -10
- eodag/plugins/download/aws.py +181 -105
- eodag/plugins/download/base.py +49 -67
- eodag/plugins/download/creodias_s3.py +40 -2
- eodag/plugins/download/http.py +247 -223
- eodag/plugins/download/s3rest.py +29 -28
- eodag/plugins/manager.py +176 -41
- eodag/plugins/search/__init__.py +6 -5
- eodag/plugins/search/base.py +123 -60
- eodag/plugins/search/build_search_result.py +1046 -355
- eodag/plugins/search/cop_marine.py +132 -39
- eodag/plugins/search/creodias_s3.py +19 -68
- eodag/plugins/search/csw.py +48 -8
- eodag/plugins/search/data_request_search.py +124 -23
- eodag/plugins/search/qssearch.py +531 -310
- eodag/plugins/search/stac_list_assets.py +85 -0
- eodag/plugins/search/static_stac_search.py +23 -24
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +1295 -355
- eodag/resources/providers.yml +1819 -3010
- eodag/resources/stac.yml +3 -163
- eodag/resources/stac_api.yml +2 -2
- eodag/resources/user_conf_template.yml +115 -99
- eodag/rest/cache.py +2 -2
- eodag/rest/config.py +3 -4
- eodag/rest/constants.py +0 -1
- eodag/rest/core.py +157 -117
- eodag/rest/errors.py +181 -0
- eodag/rest/server.py +57 -339
- eodag/rest/stac.py +133 -581
- eodag/rest/types/collections_search.py +3 -3
- eodag/rest/types/eodag_search.py +41 -30
- eodag/rest/types/queryables.py +42 -32
- eodag/rest/types/stac_search.py +15 -16
- eodag/rest/utils/__init__.py +14 -21
- eodag/rest/utils/cql_evaluate.py +6 -6
- eodag/rest/utils/rfc3339.py +2 -2
- eodag/types/__init__.py +153 -32
- eodag/types/bbox.py +2 -2
- eodag/types/download_args.py +4 -4
- eodag/types/queryables.py +183 -73
- eodag/types/search_args.py +6 -6
- eodag/types/whoosh.py +127 -3
- eodag/utils/__init__.py +228 -106
- eodag/utils/exceptions.py +47 -26
- eodag/utils/import_system.py +2 -2
- eodag/utils/logging.py +37 -77
- eodag/utils/repr.py +65 -6
- eodag/utils/requests.py +13 -15
- eodag/utils/rest.py +2 -2
- eodag/utils/s3.py +231 -0
- eodag/utils/stac_reader.py +11 -11
- {eodag-3.0.0b3.dist-info → eodag-3.1.0.dist-info}/METADATA +81 -81
- eodag-3.1.0.dist-info/RECORD +113 -0
- {eodag-3.0.0b3.dist-info → eodag-3.1.0.dist-info}/WHEEL +1 -1
- {eodag-3.0.0b3.dist-info → eodag-3.1.0.dist-info}/entry_points.txt +5 -2
- eodag/resources/constraints/climate-dt.json +0 -13
- eodag/resources/constraints/extremes-dt.json +0 -8
- eodag/utils/constraints.py +0 -244
- eodag-3.0.0b3.dist-info/RECORD +0 -110
- {eodag-3.0.0b3.dist-info → eodag-3.1.0.dist-info}/LICENSE +0 -0
- {eodag-3.0.0b3.dist-info → eodag-3.1.0.dist-info}/top_level.txt +0 -0
|
@@ -17,115 +17,979 @@
|
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
+
import functools
|
|
20
21
|
import hashlib
|
|
21
22
|
import logging
|
|
22
|
-
|
|
23
|
-
from
|
|
23
|
+
import re
|
|
24
|
+
from collections import OrderedDict
|
|
25
|
+
from datetime import datetime, timedelta
|
|
26
|
+
from typing import TYPE_CHECKING, Annotated, Any, Optional, Union
|
|
24
27
|
from urllib.parse import quote_plus, unquote_plus
|
|
25
28
|
|
|
26
29
|
import geojson
|
|
27
30
|
import orjson
|
|
28
31
|
from dateutil.parser import isoparse
|
|
29
|
-
from
|
|
30
|
-
from
|
|
32
|
+
from dateutil.tz import tzutc
|
|
33
|
+
from dateutil.utils import today
|
|
34
|
+
from pydantic import Field
|
|
31
35
|
from pydantic.fields import FieldInfo
|
|
36
|
+
from requests.auth import AuthBase
|
|
37
|
+
from shapely.geometry.base import BaseGeometry
|
|
32
38
|
from typing_extensions import get_args
|
|
33
39
|
|
|
34
40
|
from eodag.api.product import EOProduct
|
|
35
41
|
from eodag.api.product.metadata_mapping import (
|
|
36
42
|
NOT_AVAILABLE,
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
mtd_cfg_as_conversion_and_querypath,
|
|
43
|
+
OFFLINE_STATUS,
|
|
44
|
+
format_metadata,
|
|
40
45
|
properties_from_json,
|
|
41
46
|
)
|
|
42
47
|
from eodag.api.search_result import RawSearchResult
|
|
43
48
|
from eodag.plugins.search import PreparedSearch
|
|
44
|
-
from eodag.plugins.search.
|
|
45
|
-
from eodag.
|
|
46
|
-
from eodag.types import
|
|
47
|
-
from eodag.types.queryables import CommonQueryables
|
|
49
|
+
from eodag.plugins.search.qssearch import PostJsonSearch, QueryStringSearch
|
|
50
|
+
from eodag.types import json_field_definition_to_python
|
|
51
|
+
from eodag.types.queryables import Queryables, QueryablesDict
|
|
48
52
|
from eodag.utils import (
|
|
49
53
|
DEFAULT_MISSION_START_DATE,
|
|
50
|
-
|
|
54
|
+
DEFAULT_SEARCH_TIMEOUT,
|
|
51
55
|
deepcopy,
|
|
52
56
|
dict_items_recursive_sort,
|
|
53
57
|
get_geometry_from_various,
|
|
54
|
-
|
|
55
|
-
from eodag.utils.constraints import (
|
|
56
|
-
fetch_constraints,
|
|
57
|
-
get_constraint_queryables_with_additional_params,
|
|
58
|
+
is_range_in_range,
|
|
58
59
|
)
|
|
59
60
|
from eodag.utils.exceptions import ValidationError
|
|
61
|
+
from eodag.utils.requests import fetch_json
|
|
60
62
|
|
|
61
63
|
if TYPE_CHECKING:
|
|
62
64
|
from eodag.config import PluginConfig
|
|
63
65
|
|
|
64
66
|
logger = logging.getLogger("eodag.search.build_search_result")
|
|
65
67
|
|
|
68
|
+
ECMWF_PREFIX = "ecmwf:"
|
|
69
|
+
|
|
70
|
+
# keywords from ECMWF keyword database + "dataset" (not part of database but exists)
|
|
71
|
+
# database: https://confluence.ecmwf.int/display/UDOC/Keywords+in+MARS+and+Dissemination+requests
|
|
72
|
+
ECMWF_KEYWORDS = {
|
|
73
|
+
"dataset",
|
|
74
|
+
"accuracy",
|
|
75
|
+
"activity",
|
|
76
|
+
"anoffset",
|
|
77
|
+
"bitmap",
|
|
78
|
+
"block",
|
|
79
|
+
"channel",
|
|
80
|
+
"class",
|
|
81
|
+
"database",
|
|
82
|
+
"date",
|
|
83
|
+
"diagnostic",
|
|
84
|
+
"direction",
|
|
85
|
+
"domain",
|
|
86
|
+
"duplicates",
|
|
87
|
+
"expect",
|
|
88
|
+
"expver",
|
|
89
|
+
"fcmonth",
|
|
90
|
+
"fcperiod",
|
|
91
|
+
"fieldset",
|
|
92
|
+
"filter",
|
|
93
|
+
"format",
|
|
94
|
+
"frame",
|
|
95
|
+
"frequency",
|
|
96
|
+
"generation",
|
|
97
|
+
"grid",
|
|
98
|
+
"hdate",
|
|
99
|
+
"ident",
|
|
100
|
+
"interpolation",
|
|
101
|
+
"intgrid",
|
|
102
|
+
"iteration",
|
|
103
|
+
"latitude",
|
|
104
|
+
"levelist",
|
|
105
|
+
"levtype",
|
|
106
|
+
"longitude",
|
|
107
|
+
"lsm",
|
|
108
|
+
"method",
|
|
109
|
+
"number",
|
|
110
|
+
"obsgroup",
|
|
111
|
+
"obstype",
|
|
112
|
+
"origin",
|
|
113
|
+
"packing",
|
|
114
|
+
"padding",
|
|
115
|
+
"param",
|
|
116
|
+
"priority",
|
|
117
|
+
"product",
|
|
118
|
+
"range",
|
|
119
|
+
"realization",
|
|
120
|
+
"refdate",
|
|
121
|
+
"reference",
|
|
122
|
+
"reportype",
|
|
123
|
+
"repres",
|
|
124
|
+
"resolution",
|
|
125
|
+
"rotation",
|
|
126
|
+
"section",
|
|
127
|
+
"source",
|
|
128
|
+
"step",
|
|
129
|
+
"stream",
|
|
130
|
+
"system",
|
|
131
|
+
"target",
|
|
132
|
+
"time",
|
|
133
|
+
"truncation",
|
|
134
|
+
"type",
|
|
135
|
+
"use",
|
|
136
|
+
}
|
|
137
|
+
|
|
138
|
+
# additional keywords from copernicus services
|
|
139
|
+
COP_DS_KEYWORDS = {
|
|
140
|
+
"aerosol_type",
|
|
141
|
+
"altitude",
|
|
142
|
+
"product_type",
|
|
143
|
+
"band",
|
|
144
|
+
"cdr_type",
|
|
145
|
+
"data_format",
|
|
146
|
+
"dataset_type",
|
|
147
|
+
"day",
|
|
148
|
+
"download_format",
|
|
149
|
+
"ensemble_member",
|
|
150
|
+
"experiment",
|
|
151
|
+
"forcing_type",
|
|
152
|
+
"gcm",
|
|
153
|
+
"hday",
|
|
154
|
+
"hmonth",
|
|
155
|
+
"horizontal_resolution",
|
|
156
|
+
"hydrological_model",
|
|
157
|
+
"hydrological_year",
|
|
158
|
+
"hyear",
|
|
159
|
+
"input_observations",
|
|
160
|
+
"leadtime_hour",
|
|
161
|
+
"leadtime_month",
|
|
162
|
+
"level",
|
|
163
|
+
"location",
|
|
164
|
+
"model",
|
|
165
|
+
"model_level",
|
|
166
|
+
"model_levels",
|
|
167
|
+
"month",
|
|
168
|
+
"nominal_day",
|
|
169
|
+
"originating_centre",
|
|
170
|
+
"period",
|
|
171
|
+
"pressure_level",
|
|
172
|
+
"processing_level",
|
|
173
|
+
"processing_type",
|
|
174
|
+
"product_version",
|
|
175
|
+
"quantity",
|
|
176
|
+
"rcm",
|
|
177
|
+
"region",
|
|
178
|
+
"release_version",
|
|
179
|
+
"satellite",
|
|
180
|
+
"sensor",
|
|
181
|
+
"sensor_and_algorithm",
|
|
182
|
+
"soil_level",
|
|
183
|
+
"sky_type",
|
|
184
|
+
"statistic",
|
|
185
|
+
"system_version",
|
|
186
|
+
"temporal_aggregation",
|
|
187
|
+
"time_aggregation",
|
|
188
|
+
"time_reference",
|
|
189
|
+
"time_step",
|
|
190
|
+
"variable",
|
|
191
|
+
"variable_type",
|
|
192
|
+
"version",
|
|
193
|
+
"year",
|
|
194
|
+
}
|
|
195
|
+
|
|
196
|
+
ALLOWED_KEYWORDS = ECMWF_KEYWORDS | COP_DS_KEYWORDS
|
|
197
|
+
|
|
198
|
+
END = "completionTimeFromAscendingNode"
|
|
199
|
+
|
|
200
|
+
START = "startTimeFromAscendingNode"
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def ecmwf_mtd() -> dict[str, Any]:
|
|
204
|
+
"""
|
|
205
|
+
Make metadata mapping dict from a list of defined ECMWF Keywords
|
|
66
206
|
|
|
67
|
-
|
|
68
|
-
"""BuildPostSearchResult search plugin.
|
|
207
|
+
We automatically add the #to_geojson convert to prevent modification of entries by eval() in the metadata mapping.
|
|
69
208
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
209
|
+
keyword:
|
|
210
|
+
- keyword
|
|
211
|
+
- $."keyword"#to_geojson
|
|
212
|
+
|
|
213
|
+
:return: metadata mapping dict
|
|
214
|
+
"""
|
|
215
|
+
return {k: [k, f'{{$."{k}"#to_geojson}}'] for k in ALLOWED_KEYWORDS}
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def _update_properties_from_element(
|
|
219
|
+
prop: dict[str, Any], element: dict[str, Any], values: list[str]
|
|
220
|
+
) -> None:
|
|
221
|
+
"""updates a property dict with the given values based on the information from the element dict
|
|
222
|
+
e.g. the type is set based on the type of the element
|
|
223
|
+
"""
|
|
224
|
+
# multichoice elements are transformed into array
|
|
225
|
+
if element["type"] in ("StringListWidget", "StringListArrayWidget"):
|
|
226
|
+
prop["type"] = "array"
|
|
227
|
+
if values:
|
|
228
|
+
prop["items"] = {"type": "string", "enum": sorted(values)}
|
|
229
|
+
|
|
230
|
+
# single choice elements are transformed into string
|
|
231
|
+
elif element["type"] in (
|
|
232
|
+
"StringChoiceWidget",
|
|
233
|
+
"DateRangeWidget",
|
|
234
|
+
"FreeformInputWidget",
|
|
235
|
+
):
|
|
236
|
+
prop["type"] = "string"
|
|
237
|
+
if values:
|
|
238
|
+
prop["enum"] = sorted(values)
|
|
239
|
+
|
|
240
|
+
# a bbox element
|
|
241
|
+
elif element["type"] in ["GeographicExtentWidget", "GeographicExtentMapWidget"]:
|
|
242
|
+
prop.update(
|
|
243
|
+
{
|
|
244
|
+
"type": "array",
|
|
245
|
+
"minItems": 4,
|
|
246
|
+
"additionalItems": False,
|
|
247
|
+
"items": [
|
|
248
|
+
{
|
|
249
|
+
"type": "number",
|
|
250
|
+
"maximum": 180,
|
|
251
|
+
"minimum": -180,
|
|
252
|
+
"description": "West border of the bounding box",
|
|
253
|
+
},
|
|
254
|
+
{
|
|
255
|
+
"type": "number",
|
|
256
|
+
"maximum": 90,
|
|
257
|
+
"minimum": -90,
|
|
258
|
+
"description": "South border of the bounding box",
|
|
259
|
+
},
|
|
260
|
+
{
|
|
261
|
+
"type": "number",
|
|
262
|
+
"maximum": 180,
|
|
263
|
+
"minimum": -180,
|
|
264
|
+
"description": "East border of the bounding box",
|
|
265
|
+
},
|
|
266
|
+
{
|
|
267
|
+
"type": "number",
|
|
268
|
+
"maximum": 90,
|
|
269
|
+
"minimum": -90,
|
|
270
|
+
"description": "North border of the bounding box",
|
|
271
|
+
},
|
|
272
|
+
],
|
|
273
|
+
}
|
|
274
|
+
)
|
|
275
|
+
|
|
276
|
+
# DateRangeWidget is a calendar date picker
|
|
277
|
+
if element["type"] == "DateRangeWidget":
|
|
278
|
+
prop["description"] = "date formatted like yyyy-mm-dd/yyyy-mm-dd"
|
|
73
279
|
|
|
74
|
-
|
|
75
|
-
|
|
280
|
+
if description := element.get("help"):
|
|
281
|
+
prop["description"] = description
|
|
76
282
|
|
|
77
|
-
- **api_endpoint**: (mandatory) The endpoint of the provider's search interface
|
|
78
283
|
|
|
79
|
-
|
|
80
|
-
|
|
284
|
+
def ecmwf_format(v: str) -> str:
|
|
285
|
+
"""Add ECMWF prefix to value v if v is a ECMWF keyword."""
|
|
286
|
+
return ECMWF_PREFIX + v if v in ALLOWED_KEYWORDS else v
|
|
81
287
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
288
|
+
|
|
289
|
+
class ECMWFSearch(PostJsonSearch):
|
|
290
|
+
"""ECMWF search plugin.
|
|
291
|
+
|
|
292
|
+
This plugin builds a :class:`~eodag.api.search_result.SearchResult` containing a single product
|
|
293
|
+
using given query parameters as product properties.
|
|
294
|
+
|
|
295
|
+
The available configuration parameters inherits from parent classes, with some particular parameters
|
|
296
|
+
for this plugin.
|
|
86
297
|
|
|
87
298
|
:param provider: An eodag providers configuration dictionary
|
|
88
|
-
:param config:
|
|
299
|
+
:param config: Search plugin configuration:
|
|
300
|
+
|
|
301
|
+
* :attr:`~eodag.config.PluginConfig.remove_from_query` (``list[str]``): List of parameters
|
|
302
|
+
used to parse metadata but that must not be included to the query
|
|
303
|
+
* :attr:`~eodag.config.PluginConfig.end_date_excluded` (``bool``): Set to `False` if
|
|
304
|
+
provider does not include end date to search
|
|
305
|
+
* :attr:`~eodag.config.PluginConfig.discover_queryables`
|
|
306
|
+
(:class:`~eodag.config.PluginConfig.DiscoverQueryables`): configuration to fetch the queryables from a
|
|
307
|
+
provider queryables endpoint; It has the following keys:
|
|
308
|
+
|
|
309
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.fetch_url` (``str``): url to fetch the queryables valid
|
|
310
|
+
for all product types
|
|
311
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.product_type_fetch_url` (``str``): url to fetch the
|
|
312
|
+
queryables for a specific product type
|
|
313
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.constraints_url` (``str``): url of the constraint file
|
|
314
|
+
used to build queryables
|
|
89
315
|
"""
|
|
90
316
|
|
|
91
|
-
def
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
317
|
+
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
318
|
+
config.metadata_mapping = {
|
|
319
|
+
**ecmwf_mtd(),
|
|
320
|
+
**{
|
|
321
|
+
"id": "$.id",
|
|
322
|
+
"title": "$.id",
|
|
323
|
+
"storageStatus": OFFLINE_STATUS,
|
|
324
|
+
"downloadLink": "$.null",
|
|
325
|
+
"geometry": ["feature", "$.geometry"],
|
|
326
|
+
"defaultGeometry": "POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))",
|
|
327
|
+
},
|
|
328
|
+
**config.metadata_mapping,
|
|
329
|
+
}
|
|
96
330
|
|
|
97
|
-
|
|
331
|
+
super().__init__(provider, config)
|
|
332
|
+
|
|
333
|
+
# ECMWF providers do not feature any api_endpoint or next_page_query_obj.
|
|
334
|
+
# Searched is faked by EODAG.
|
|
335
|
+
self.config.__dict__.setdefault("api_endpoint", "")
|
|
336
|
+
self.config.pagination.setdefault("next_page_query_obj", "{{}}")
|
|
337
|
+
|
|
338
|
+
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
339
|
+
"""Should perform the actual search request.
|
|
340
|
+
|
|
341
|
+
:param args: arguments to be used in the search
|
|
342
|
+
:param kwargs: keyword arguments to be used in the search
|
|
343
|
+
:return: list containing the results from the provider in json format
|
|
344
|
+
"""
|
|
345
|
+
# no real search. We fake it all
|
|
346
|
+
return [{}]
|
|
347
|
+
|
|
348
|
+
def query(
|
|
98
349
|
self,
|
|
99
350
|
prep: PreparedSearch = PreparedSearch(),
|
|
100
351
|
**kwargs: Any,
|
|
101
|
-
) ->
|
|
102
|
-
"""
|
|
103
|
-
urls, _ = super(BuildPostSearchResult, self).collect_search_urls(prep, **kwargs)
|
|
104
|
-
return urls, 1
|
|
352
|
+
) -> tuple[list[EOProduct], Optional[int]]:
|
|
353
|
+
"""Build ready-to-download SearchResult
|
|
105
354
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
"""
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
355
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information needed for the search
|
|
356
|
+
:param kwargs: keyword arguments to be used in the search
|
|
357
|
+
:returns: list of products and number of products (optional)
|
|
358
|
+
"""
|
|
359
|
+
product_type = prep.product_type
|
|
360
|
+
if not product_type:
|
|
361
|
+
product_type = kwargs.get("productType", None)
|
|
362
|
+
kwargs = self._preprocess_search_params(kwargs, product_type)
|
|
363
|
+
result, num_items = super().query(prep, **kwargs)
|
|
364
|
+
if prep.count and not num_items:
|
|
365
|
+
num_items = 1
|
|
366
|
+
|
|
367
|
+
return result, num_items
|
|
368
|
+
|
|
369
|
+
def clear(self) -> None:
|
|
370
|
+
"""Clear search context"""
|
|
371
|
+
super().clear()
|
|
372
|
+
|
|
373
|
+
def build_query_string(
|
|
374
|
+
self, product_type: str, query_dict: dict[str, Any]
|
|
375
|
+
) -> tuple[dict[str, Any], str]:
|
|
376
|
+
"""Build The query string using the search parameters
|
|
377
|
+
|
|
378
|
+
:param product_type: product type id
|
|
379
|
+
:param query_dict: keyword arguments to be used in the query string
|
|
380
|
+
:return: formatted query params and encode query string
|
|
381
|
+
"""
|
|
382
|
+
query_dict["_date"] = f"{query_dict.get(START)}/{query_dict.get(END)}"
|
|
383
|
+
|
|
384
|
+
# Reorder kwargs to make sure year/month/day/time if set overwrite default datetime.
|
|
385
|
+
priority_keys = [
|
|
386
|
+
START,
|
|
387
|
+
END,
|
|
388
|
+
]
|
|
389
|
+
ordered_kwargs = {k: query_dict[k] for k in priority_keys if k in query_dict}
|
|
390
|
+
ordered_kwargs.update(query_dict)
|
|
391
|
+
|
|
392
|
+
return super().build_query_string(
|
|
393
|
+
product_type=product_type, query_dict=ordered_kwargs
|
|
115
394
|
)
|
|
116
|
-
response = self._request(prep)
|
|
117
395
|
|
|
118
|
-
|
|
396
|
+
def _preprocess_search_params(
|
|
397
|
+
self, params: dict[str, Any], product_type: Optional[str]
|
|
398
|
+
) -> dict[str, Any]:
|
|
399
|
+
"""Preprocess search parameters before making a request to the CDS API.
|
|
400
|
+
|
|
401
|
+
This method is responsible for checking and updating the provided search parameters
|
|
402
|
+
to ensure that required parameters like 'productType', 'startTimeFromAscendingNode',
|
|
403
|
+
'completionTimeFromAscendingNode', and 'geometry' are properly set. If not specified
|
|
404
|
+
in the input parameters, default values or values from the configuration are used.
|
|
405
|
+
|
|
406
|
+
:param params: Search parameters to be preprocessed.
|
|
407
|
+
:param product_type: (optional) product type id
|
|
408
|
+
"""
|
|
409
|
+
_dc_qs = params.get("_dc_qs", None)
|
|
410
|
+
if _dc_qs is not None:
|
|
411
|
+
# if available, update search params using datacube query-string
|
|
412
|
+
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
413
|
+
if "/to/" in _dc_qp.get("date", ""):
|
|
414
|
+
params[START], params[END] = _dc_qp["date"].split("/to/")
|
|
415
|
+
elif "/" in _dc_qp.get("date", ""):
|
|
416
|
+
(params[START], params[END],) = _dc_qp[
|
|
417
|
+
"date"
|
|
418
|
+
].split("/")
|
|
419
|
+
elif _dc_qp.get("date", None):
|
|
420
|
+
params[START] = params[END] = _dc_qp["date"]
|
|
421
|
+
|
|
422
|
+
if "/" in _dc_qp.get("area", ""):
|
|
423
|
+
params["geometry"] = _dc_qp["area"].split("/")
|
|
424
|
+
|
|
425
|
+
params = {
|
|
426
|
+
k.removeprefix(ECMWF_PREFIX): v for k, v in params.items() if v is not None
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
# dates
|
|
430
|
+
# check if default dates have to be added
|
|
431
|
+
if getattr(self.config, "dates_required", False):
|
|
432
|
+
self._check_date_params(params, product_type)
|
|
433
|
+
|
|
434
|
+
# adapt end date if it is midnight
|
|
435
|
+
if END in params:
|
|
436
|
+
end_date_excluded = getattr(self.config, "end_date_excluded", True)
|
|
437
|
+
is_datetime = True
|
|
438
|
+
try:
|
|
439
|
+
end_date = datetime.strptime(params[END], "%Y-%m-%dT%H:%M:%SZ")
|
|
440
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
441
|
+
except ValueError:
|
|
442
|
+
try:
|
|
443
|
+
end_date = datetime.strptime(
|
|
444
|
+
params[END],
|
|
445
|
+
"%Y-%m-%dT%H:%M:%S.%fZ",
|
|
446
|
+
)
|
|
447
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
448
|
+
except ValueError:
|
|
449
|
+
end_date = isoparse(params[END])
|
|
450
|
+
is_datetime = False
|
|
451
|
+
start_date = isoparse(params[START])
|
|
452
|
+
if (
|
|
453
|
+
not end_date_excluded
|
|
454
|
+
and is_datetime
|
|
455
|
+
and end_date > start_date
|
|
456
|
+
and end_date
|
|
457
|
+
== end_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
458
|
+
):
|
|
459
|
+
end_date += timedelta(days=-1)
|
|
460
|
+
params[END] = end_date.isoformat()
|
|
461
|
+
|
|
462
|
+
# geometry
|
|
463
|
+
if "geometry" in params:
|
|
464
|
+
params["geometry"] = get_geometry_from_various(geometry=params["geometry"])
|
|
465
|
+
|
|
466
|
+
return params
|
|
467
|
+
|
|
468
|
+
def _check_date_params(
|
|
469
|
+
self, keywords: dict[str, Any], product_type: Optional[str]
|
|
470
|
+
) -> None:
|
|
471
|
+
"""checks if start and end date are present in the keywords and adds them if not"""
|
|
472
|
+
|
|
473
|
+
if START and END in keywords:
|
|
474
|
+
return
|
|
475
|
+
|
|
476
|
+
product_type_conf = getattr(self.config, "metadata_mapping", {})
|
|
477
|
+
if (
|
|
478
|
+
product_type
|
|
479
|
+
and product_type in self.config.products
|
|
480
|
+
and "metadata_mapping" in self.config.products[product_type]
|
|
481
|
+
):
|
|
482
|
+
product_type_conf = self.config.products[product_type]["metadata_mapping"]
|
|
483
|
+
|
|
484
|
+
# start time given, end time missing
|
|
485
|
+
if START in keywords:
|
|
486
|
+
keywords[END] = (
|
|
487
|
+
keywords[START]
|
|
488
|
+
if END in product_type_conf
|
|
489
|
+
else self.get_product_type_cfg_value(
|
|
490
|
+
"missionEndDate", today().isoformat()
|
|
491
|
+
)
|
|
492
|
+
)
|
|
493
|
+
return
|
|
494
|
+
|
|
495
|
+
if END in product_type_conf:
|
|
496
|
+
mapping = product_type_conf[START]
|
|
497
|
+
if not isinstance(mapping, list):
|
|
498
|
+
mapping = product_type_conf[END]
|
|
499
|
+
if isinstance(mapping, list):
|
|
500
|
+
# get time parameters (date, year, month, ...) from metadata mapping
|
|
501
|
+
input_mapping = mapping[0].replace("{{", "").replace("}}", "")
|
|
502
|
+
time_params = [
|
|
503
|
+
values.split(":")[0].strip() for values in input_mapping.split(",")
|
|
504
|
+
]
|
|
505
|
+
time_params = [
|
|
506
|
+
tp.replace('"', "").replace("'", "") for tp in time_params
|
|
507
|
+
]
|
|
508
|
+
# if startTime is not given but other time params (e.g. year/month/(day)) are given,
|
|
509
|
+
# no default date is required
|
|
510
|
+
in_keywords = True
|
|
511
|
+
for tp in time_params:
|
|
512
|
+
if tp not in keywords:
|
|
513
|
+
in_keywords = False
|
|
514
|
+
break
|
|
515
|
+
if not in_keywords:
|
|
516
|
+
keywords[START] = self.get_product_type_cfg_value(
|
|
517
|
+
"missionStartDate", DEFAULT_MISSION_START_DATE
|
|
518
|
+
)
|
|
519
|
+
keywords[END] = (
|
|
520
|
+
keywords[START]
|
|
521
|
+
if END in product_type_conf
|
|
522
|
+
else self.get_product_type_cfg_value(
|
|
523
|
+
"missionEndDate", today().isoformat()
|
|
524
|
+
)
|
|
525
|
+
)
|
|
526
|
+
|
|
527
|
+
def _get_product_type_queryables(
|
|
528
|
+
self, product_type: Optional[str], alias: Optional[str], filters: dict[str, Any]
|
|
529
|
+
) -> QueryablesDict:
|
|
530
|
+
"""Override to set additional_properties to false."""
|
|
531
|
+
default_values: dict[str, Any] = deepcopy(
|
|
532
|
+
getattr(self.config, "products", {}).get(product_type, {})
|
|
533
|
+
)
|
|
534
|
+
default_values.pop("metadata_mapping", None)
|
|
535
|
+
|
|
536
|
+
filters["productType"] = product_type
|
|
537
|
+
queryables = self.discover_queryables(**{**default_values, **filters}) or {}
|
|
538
|
+
|
|
539
|
+
return QueryablesDict(additional_properties=False, **queryables)
|
|
540
|
+
|
|
541
|
+
def discover_queryables(
|
|
542
|
+
self, **kwargs: Any
|
|
543
|
+
) -> Optional[dict[str, Annotated[Any, FieldInfo]]]:
|
|
544
|
+
"""Fetch queryables list from provider using its constraints file
|
|
545
|
+
|
|
546
|
+
:param kwargs: additional filters for queryables (`productType` and other search
|
|
547
|
+
arguments)
|
|
548
|
+
:returns: fetched queryable parameters dict
|
|
549
|
+
"""
|
|
550
|
+
product_type = kwargs.pop("productType")
|
|
551
|
+
|
|
552
|
+
pt_config = self.get_product_type_def_params(product_type)
|
|
553
|
+
|
|
554
|
+
default_values = deepcopy(pt_config)
|
|
555
|
+
default_values.pop("metadata_mapping", None)
|
|
556
|
+
filters = {**default_values, **kwargs}
|
|
557
|
+
|
|
558
|
+
if "start" in filters:
|
|
559
|
+
filters[START] = filters.pop("start")
|
|
560
|
+
if "end" in filters:
|
|
561
|
+
filters[END] = filters.pop("end")
|
|
562
|
+
|
|
563
|
+
# extract default datetime
|
|
564
|
+
processed_filters = self._preprocess_search_params(
|
|
565
|
+
deepcopy(filters), product_type
|
|
566
|
+
)
|
|
567
|
+
|
|
568
|
+
constraints_url = format_metadata(
|
|
569
|
+
getattr(self.config, "discover_queryables", {}).get("constraints_url", ""),
|
|
570
|
+
**filters,
|
|
571
|
+
)
|
|
572
|
+
constraints: list[dict[str, Any]] = self._fetch_data(constraints_url)
|
|
573
|
+
|
|
574
|
+
form_url = format_metadata(
|
|
575
|
+
getattr(self.config, "discover_queryables", {}).get("form_url", ""),
|
|
576
|
+
**filters,
|
|
577
|
+
)
|
|
578
|
+
form: list[dict[str, Any]] = self._fetch_data(form_url)
|
|
579
|
+
|
|
580
|
+
formated_filters = self.format_as_provider_keyword(
|
|
581
|
+
product_type, processed_filters
|
|
582
|
+
)
|
|
583
|
+
# we re-apply kwargs input to consider override of year, month, day and time.
|
|
584
|
+
for k, v in {**default_values, **kwargs}.items():
|
|
585
|
+
key = k.removeprefix(ECMWF_PREFIX)
|
|
586
|
+
|
|
587
|
+
if key not in ALLOWED_KEYWORDS | {
|
|
588
|
+
START,
|
|
589
|
+
END,
|
|
590
|
+
"geom",
|
|
591
|
+
"geometry",
|
|
592
|
+
}:
|
|
593
|
+
raise ValidationError(
|
|
594
|
+
f"{key} is not a queryable parameter for {self.provider}"
|
|
595
|
+
)
|
|
596
|
+
|
|
597
|
+
formated_filters[key] = v
|
|
598
|
+
|
|
599
|
+
# we use non empty filters as default to integrate user inputs
|
|
600
|
+
# it is needed because pydantic json schema does not represent "value"
|
|
601
|
+
# but only "default"
|
|
602
|
+
non_empty_formated: dict[str, Any] = {
|
|
603
|
+
k: v
|
|
604
|
+
for k, v in formated_filters.items()
|
|
605
|
+
if v and (not isinstance(v, list) or all(v))
|
|
606
|
+
}
|
|
607
|
+
|
|
608
|
+
required_keywords: set[str] = set()
|
|
609
|
+
|
|
610
|
+
# calculate available values
|
|
611
|
+
if constraints:
|
|
612
|
+
# Apply constraints filtering
|
|
613
|
+
available_values = self.available_values_from_constraints(
|
|
614
|
+
constraints,
|
|
615
|
+
non_empty_formated,
|
|
616
|
+
form_keywords=[f["name"] for f in form],
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
# Pre-compute the required keywords (present in all constraint dicts)
|
|
620
|
+
# when form, required keywords are extracted directly from form
|
|
621
|
+
if not form:
|
|
622
|
+
required_keywords = set.intersection(
|
|
623
|
+
*(map(lambda d: set(d.keys()), constraints))
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
else:
|
|
627
|
+
values_url = getattr(self.config, "available_values_url", "")
|
|
628
|
+
if not values_url:
|
|
629
|
+
return self.queryables_from_metadata_mapping(product_type)
|
|
630
|
+
if "{" in values_url:
|
|
631
|
+
values_url = values_url.format(**filters)
|
|
632
|
+
data = self._fetch_data(values_url)
|
|
633
|
+
available_values = data["constraints"]
|
|
634
|
+
required_keywords = data.get("required", [])
|
|
635
|
+
|
|
636
|
+
# To check if all keywords are queryable parameters, we check if they are in the
|
|
637
|
+
# available values or the product type config (available values calculated from the
|
|
638
|
+
# constraints might not include all queryables)
|
|
639
|
+
for keyword in filters:
|
|
640
|
+
if (
|
|
641
|
+
keyword
|
|
642
|
+
not in available_values.keys()
|
|
643
|
+
| pt_config.keys()
|
|
644
|
+
| {
|
|
645
|
+
START,
|
|
646
|
+
END,
|
|
647
|
+
"geom",
|
|
648
|
+
}
|
|
649
|
+
and keyword not in [f["name"] for f in form]
|
|
650
|
+
and keyword.removeprefix(ECMWF_PREFIX)
|
|
651
|
+
not in set(list(available_values.keys()) + [f["name"] for f in form])
|
|
652
|
+
):
|
|
653
|
+
raise ValidationError(f"{keyword} is not a queryable parameter")
|
|
654
|
+
|
|
655
|
+
# generate queryables
|
|
656
|
+
if form:
|
|
657
|
+
queryables = self.queryables_by_form(
|
|
658
|
+
form,
|
|
659
|
+
available_values,
|
|
660
|
+
non_empty_formated,
|
|
661
|
+
)
|
|
662
|
+
else:
|
|
663
|
+
queryables = self.queryables_by_values(
|
|
664
|
+
available_values, list(required_keywords), non_empty_formated
|
|
665
|
+
)
|
|
666
|
+
|
|
667
|
+
# ecmwf:date is replaced by start and end.
|
|
668
|
+
# start and end filters are supported whenever combinations of "year", "month", "day" filters exist
|
|
669
|
+
if (
|
|
670
|
+
queryables.pop(f"{ECMWF_PREFIX}date", None)
|
|
671
|
+
or f"{ECMWF_PREFIX}year" in queryables
|
|
672
|
+
or f"{ECMWF_PREFIX}hyear" in queryables
|
|
673
|
+
):
|
|
674
|
+
queryables.update(
|
|
675
|
+
{
|
|
676
|
+
"start": Queryables.get_with_default(
|
|
677
|
+
"start", processed_filters.get(START)
|
|
678
|
+
),
|
|
679
|
+
"end": Queryables.get_with_default(
|
|
680
|
+
"end",
|
|
681
|
+
processed_filters.get(END),
|
|
682
|
+
),
|
|
683
|
+
}
|
|
684
|
+
)
|
|
685
|
+
|
|
686
|
+
# area is geom in EODAG.
|
|
687
|
+
if queryables.pop("area", None):
|
|
688
|
+
queryables["geom"] = Annotated[
|
|
689
|
+
Union[str, dict[str, float], BaseGeometry],
|
|
690
|
+
Field(
|
|
691
|
+
None,
|
|
692
|
+
description="Read EODAG documentation for all supported geometry format.",
|
|
693
|
+
),
|
|
694
|
+
]
|
|
695
|
+
|
|
696
|
+
return queryables
|
|
697
|
+
|
|
698
|
+
def available_values_from_constraints(
|
|
699
|
+
self,
|
|
700
|
+
constraints: list[dict[str, Any]],
|
|
701
|
+
input_keywords: dict[str, Any],
|
|
702
|
+
form_keywords: list[str],
|
|
703
|
+
) -> dict[str, list[str]]:
|
|
704
|
+
"""
|
|
705
|
+
Filter constraints using input_keywords. Return list of available queryables.
|
|
706
|
+
All constraint entries must have the same parameters.
|
|
707
|
+
|
|
708
|
+
:param constraints: list of constraints received from the provider
|
|
709
|
+
:param input_keywords: dict of input parameters given by the user
|
|
710
|
+
:param form_keywords: list of keyword names from the provider form endpoint
|
|
711
|
+
:return: dict with available values for each parameter
|
|
712
|
+
"""
|
|
713
|
+
# get ordered constraint keywords
|
|
714
|
+
constraints_keywords = list(
|
|
715
|
+
OrderedDict.fromkeys(k for c in constraints for k in c.keys())
|
|
716
|
+
)
|
|
717
|
+
|
|
718
|
+
# prepare ordered input keywords formatted as provider's keywords
|
|
719
|
+
# required to filter with constraints
|
|
720
|
+
ordered_keywords = (
|
|
721
|
+
[kw for kw in form_keywords if kw in constraints_keywords]
|
|
722
|
+
if form_keywords
|
|
723
|
+
else constraints_keywords
|
|
724
|
+
)
|
|
725
|
+
|
|
726
|
+
# filter constraint entries matching input keyword values
|
|
727
|
+
filtered_constraints: list[dict[str, Any]]
|
|
728
|
+
|
|
729
|
+
parsed_keywords: list[str] = []
|
|
730
|
+
for keyword in ordered_keywords:
|
|
731
|
+
values = input_keywords.get(keyword)
|
|
732
|
+
|
|
733
|
+
if values is None:
|
|
734
|
+
parsed_keywords.append(keyword)
|
|
735
|
+
continue
|
|
736
|
+
|
|
737
|
+
# we only compare list of strings.
|
|
738
|
+
if isinstance(values, dict):
|
|
739
|
+
raise ValidationError(
|
|
740
|
+
f"Parameter value as object is not supported: {keyword}={values}"
|
|
741
|
+
)
|
|
742
|
+
|
|
743
|
+
# We convert every single value to a list of string
|
|
744
|
+
filter_v = values if isinstance(values, (list, tuple)) else [values]
|
|
745
|
+
|
|
746
|
+
# We strip values of superfluous quotes (added by mapping converter to_geojson).
|
|
747
|
+
# ECMWF accept values with /to/. We need to split it to an array
|
|
748
|
+
# ECMWF accept values in format val1/val2. We need to split it to an array
|
|
749
|
+
sep = re.compile(r"/to/|/")
|
|
750
|
+
filter_v = [i for v in filter_v for i in sep.split(str(v))]
|
|
751
|
+
|
|
752
|
+
# special handling for time 0000 converted to 0 by pre-formating with metadata_mapping
|
|
753
|
+
if keyword.split(":")[-1] == "time":
|
|
754
|
+
filter_v = ["0000" if str(v) == "0" else v for v in filter_v]
|
|
755
|
+
|
|
756
|
+
# Collect missing values to report errors
|
|
757
|
+
missing_values = set(filter_v)
|
|
758
|
+
|
|
759
|
+
# Filter constraints and check for missing values
|
|
760
|
+
filtered_constraints = []
|
|
761
|
+
for entry in constraints:
|
|
762
|
+
# Filter based on the presence of any value in filter_v
|
|
763
|
+
entry_values = entry.get(keyword, [])
|
|
764
|
+
|
|
765
|
+
# date constraint may be intervals. We identify intervals with a "/" in the value
|
|
766
|
+
# we assume that if the first value is an interval, all values are intervals
|
|
767
|
+
present_values = []
|
|
768
|
+
if keyword == "date" and "/" in entry[keyword][0]:
|
|
769
|
+
input_range = values
|
|
770
|
+
if isinstance(values, list):
|
|
771
|
+
input_range = values[0]
|
|
772
|
+
if any(is_range_in_range(x, input_range) for x in entry[keyword]):
|
|
773
|
+
present_values = filter_v
|
|
774
|
+
else:
|
|
775
|
+
present_values = [
|
|
776
|
+
value for value in filter_v if value in entry_values
|
|
777
|
+
]
|
|
778
|
+
|
|
779
|
+
# Remove present values from the missing_values set
|
|
780
|
+
missing_values -= set(present_values)
|
|
781
|
+
|
|
782
|
+
if present_values:
|
|
783
|
+
filtered_constraints.append(entry)
|
|
784
|
+
|
|
785
|
+
# raise an error as no constraint entry matched the input keywords
|
|
786
|
+
# raise an error if one value from input is not allowed
|
|
787
|
+
if not filtered_constraints or missing_values:
|
|
788
|
+
allowed_values = list(
|
|
789
|
+
{value for c in constraints for value in c.get(keyword, [])}
|
|
790
|
+
)
|
|
791
|
+
# restore ecmwf: prefix before raising error
|
|
792
|
+
keyword = ECMWF_PREFIX + keyword
|
|
793
|
+
|
|
794
|
+
all_keywords_str = ""
|
|
795
|
+
if len(parsed_keywords) > 1:
|
|
796
|
+
keywords = [
|
|
797
|
+
f"{ECMWF_PREFIX + k}={pk}"
|
|
798
|
+
for k in parsed_keywords
|
|
799
|
+
if (pk := input_keywords.get(k))
|
|
800
|
+
]
|
|
801
|
+
all_keywords_str = f" with {', '.join(keywords)}"
|
|
802
|
+
|
|
803
|
+
raise ValidationError(
|
|
804
|
+
f"{keyword}={values} is not available"
|
|
805
|
+
f"{all_keywords_str}."
|
|
806
|
+
f" Allowed values are {', '.join(allowed_values)}."
|
|
807
|
+
)
|
|
808
|
+
|
|
809
|
+
parsed_keywords.append(keyword)
|
|
810
|
+
constraints = filtered_constraints
|
|
811
|
+
|
|
812
|
+
available_values: dict[str, Any] = {k: set() for k in ordered_keywords}
|
|
813
|
+
|
|
814
|
+
# we aggregate the constraint entries left
|
|
815
|
+
for entry in constraints:
|
|
816
|
+
for key, value in entry.items():
|
|
817
|
+
available_values[key].update(value)
|
|
818
|
+
|
|
819
|
+
return {k: list(v) for k, v in available_values.items()}
|
|
820
|
+
|
|
821
|
+
def queryables_by_form(
|
|
822
|
+
self,
|
|
823
|
+
form: list[dict[str, Any]],
|
|
824
|
+
available_values: dict[str, list[str]],
|
|
825
|
+
defaults: dict[str, Any],
|
|
826
|
+
) -> dict[str, Annotated[Any, FieldInfo]]:
|
|
827
|
+
"""
|
|
828
|
+
Generate Annotated field definitions from form entries and available values
|
|
829
|
+
Used by Copernicus services like cop_cds, cop_ads, cop_ewds.
|
|
830
|
+
|
|
831
|
+
:param form: data fetched from the form endpoint of the provider
|
|
832
|
+
:param available_values: available values for each parameter
|
|
833
|
+
:param defaults: default values for the parameters
|
|
834
|
+
:return: dict of annotated queryables
|
|
835
|
+
"""
|
|
836
|
+
queryables: dict[str, Annotated[Any, FieldInfo]] = {}
|
|
837
|
+
|
|
838
|
+
required_list: list[str] = []
|
|
839
|
+
for element in form:
|
|
840
|
+
name: str = element["name"]
|
|
841
|
+
|
|
842
|
+
# those are not parameter elements.
|
|
843
|
+
if name in ("area_group", "global", "warning", "licences"):
|
|
844
|
+
continue
|
|
845
|
+
if "type" not in element or element["type"] == "FreeEditionWidget":
|
|
846
|
+
# FreeEditionWidget used to select the whole available region
|
|
847
|
+
# and to provide comments for the dataset
|
|
848
|
+
continue
|
|
849
|
+
|
|
850
|
+
# ordering done by id -> set id to high value if not present -> element will be last
|
|
851
|
+
if "id" not in element:
|
|
852
|
+
element["id"] = 100
|
|
853
|
+
|
|
854
|
+
prop = {"title": element.get("label", name)}
|
|
855
|
+
|
|
856
|
+
details = element.get("details", {})
|
|
857
|
+
|
|
858
|
+
# add values from form if keyword was not in constraints
|
|
859
|
+
values = (
|
|
860
|
+
available_values[name]
|
|
861
|
+
if name in available_values
|
|
862
|
+
else details.get("values")
|
|
863
|
+
)
|
|
864
|
+
|
|
865
|
+
# updates the properties with the values given based on the information from the element
|
|
866
|
+
_update_properties_from_element(prop, element, values)
|
|
867
|
+
|
|
868
|
+
default = defaults.get(name)
|
|
869
|
+
|
|
870
|
+
if details:
|
|
871
|
+
fields = details.get("fields")
|
|
872
|
+
if fields and (comment := fields[0].get("comment")):
|
|
873
|
+
prop["description"] = comment
|
|
874
|
+
|
|
875
|
+
if name == "area" and isinstance(default, dict):
|
|
876
|
+
default = list(default.values())
|
|
877
|
+
|
|
878
|
+
# sometimes form returns default as array instead of string
|
|
879
|
+
if default and prop.get("type") == "string" and isinstance(default, list):
|
|
880
|
+
default = ",".join(default)
|
|
881
|
+
|
|
882
|
+
is_required = bool(element.get("required"))
|
|
883
|
+
if is_required:
|
|
884
|
+
required_list.append(name)
|
|
885
|
+
|
|
886
|
+
queryables[ecmwf_format(name)] = Annotated[
|
|
887
|
+
get_args(
|
|
888
|
+
json_field_definition_to_python(
|
|
889
|
+
prop,
|
|
890
|
+
default_value=default,
|
|
891
|
+
required=is_required,
|
|
892
|
+
)
|
|
893
|
+
)
|
|
894
|
+
]
|
|
895
|
+
|
|
896
|
+
return queryables
|
|
897
|
+
|
|
898
|
+
def queryables_by_values(
|
|
899
|
+
self,
|
|
900
|
+
available_values: dict[str, list[str]],
|
|
901
|
+
required_keywords: list[str],
|
|
902
|
+
defaults: dict[str, Any],
|
|
903
|
+
) -> dict[str, Annotated[Any, FieldInfo]]:
|
|
904
|
+
"""
|
|
905
|
+
Generate Annotated field definitions from available values.
|
|
906
|
+
Used by ECMWF data providers like dedt_lumi.
|
|
907
|
+
|
|
908
|
+
:param available_values: available values for each parameter
|
|
909
|
+
:param required_keywords: list of required parameters
|
|
910
|
+
:param defaults: default values for the parameters
|
|
911
|
+
:return: dict of annotated queryables
|
|
912
|
+
"""
|
|
913
|
+
# Rename keywords from form with metadata mapping.
|
|
914
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
915
|
+
required = [ecmwf_format(k) for k in required_keywords]
|
|
916
|
+
|
|
917
|
+
queryables: dict[str, Annotated[Any, FieldInfo]] = {}
|
|
918
|
+
for name, values in available_values.items():
|
|
919
|
+
# Rename keywords from form with metadata mapping.
|
|
920
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
921
|
+
key = ecmwf_format(name)
|
|
922
|
+
|
|
923
|
+
queryables[key] = Annotated[
|
|
924
|
+
get_args(
|
|
925
|
+
json_field_definition_to_python(
|
|
926
|
+
{"type": "string", "title": name, "enum": values},
|
|
927
|
+
default_value=defaults.get(name),
|
|
928
|
+
required=bool(key in required),
|
|
929
|
+
)
|
|
930
|
+
)
|
|
931
|
+
]
|
|
932
|
+
|
|
933
|
+
return queryables
|
|
934
|
+
|
|
935
|
+
def format_as_provider_keyword(
|
|
936
|
+
self, product_type: str, properties: dict[str, Any]
|
|
937
|
+
) -> dict[str, Any]:
|
|
938
|
+
"""Return provider equivalent keyword names from EODAG keywords.
|
|
939
|
+
|
|
940
|
+
:param product_type: product type id
|
|
941
|
+
:param properties: dict of properties to be formatted
|
|
942
|
+
:return: dict of formatted properties
|
|
943
|
+
"""
|
|
944
|
+
properties["productType"] = product_type
|
|
945
|
+
|
|
946
|
+
# provider product type specific conf
|
|
947
|
+
product_type_def_params = self.get_product_type_def_params(
|
|
948
|
+
product_type, format_variables=properties
|
|
949
|
+
)
|
|
950
|
+
|
|
951
|
+
# Add to the query, the queryable parameters set in the provider product type definition
|
|
952
|
+
properties.update(
|
|
953
|
+
{
|
|
954
|
+
k: v
|
|
955
|
+
for k, v in product_type_def_params.items()
|
|
956
|
+
if k not in properties.keys()
|
|
957
|
+
and k in self.config.metadata_mapping.keys()
|
|
958
|
+
and isinstance(self.config.metadata_mapping[k], list)
|
|
959
|
+
}
|
|
960
|
+
)
|
|
961
|
+
qp, _ = self.build_query_string(product_type, properties)
|
|
962
|
+
|
|
963
|
+
return qp
|
|
964
|
+
|
|
965
|
+
def _fetch_data(self, url: str) -> Any:
|
|
966
|
+
"""
|
|
967
|
+
fetches from a provider elements like constraints or forms.
|
|
968
|
+
|
|
969
|
+
:param url: url from which the constraints can be fetched
|
|
970
|
+
:returns: json file content fetched from the provider
|
|
971
|
+
"""
|
|
972
|
+
if not url:
|
|
973
|
+
return []
|
|
974
|
+
|
|
975
|
+
auth = (
|
|
976
|
+
self.auth
|
|
977
|
+
if hasattr(self, "auth") and isinstance(self.auth, AuthBase)
|
|
978
|
+
else None
|
|
979
|
+
)
|
|
980
|
+
timeout = getattr(self.config, "timeout", DEFAULT_SEARCH_TIMEOUT)
|
|
981
|
+
return functools.lru_cache()(fetch_json)(url, auth=auth, timeout=timeout)
|
|
119
982
|
|
|
120
983
|
def normalize_results(
|
|
121
984
|
self, results: RawSearchResult, **kwargs: Any
|
|
122
|
-
) ->
|
|
985
|
+
) -> list[EOProduct]:
|
|
123
986
|
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
124
987
|
|
|
125
988
|
:param results: Raw provider result as single dict in list
|
|
126
989
|
:param kwargs: Search arguments
|
|
127
990
|
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
128
991
|
"""
|
|
992
|
+
|
|
129
993
|
product_type = kwargs.get("productType")
|
|
130
994
|
|
|
131
995
|
result = results[0]
|
|
@@ -146,13 +1010,12 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
146
1010
|
self.config.pagination["next_page_query_obj"].format()
|
|
147
1011
|
)
|
|
148
1012
|
unpaginated_query_params = {
|
|
149
|
-
k: v
|
|
1013
|
+
k: v
|
|
150
1014
|
for k, v in results.query_params.items()
|
|
151
1015
|
if (k, v) not in next_page_query_obj.items()
|
|
152
1016
|
}
|
|
153
1017
|
else:
|
|
154
1018
|
unpaginated_query_params = self.query_params
|
|
155
|
-
|
|
156
1019
|
# query hash, will be used to build a product id
|
|
157
1020
|
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
158
1021
|
unpaginated_query_params
|
|
@@ -178,361 +1041,189 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
178
1041
|
result.update(results.product_type_def_params)
|
|
179
1042
|
result = dict(result, **{k: v for k, v in kwargs.items() if v is not None})
|
|
180
1043
|
|
|
181
|
-
# parse
|
|
1044
|
+
# parse properties
|
|
182
1045
|
parsed_properties = properties_from_json(
|
|
183
1046
|
result,
|
|
184
1047
|
self.config.metadata_mapping,
|
|
185
1048
|
discovery_config=getattr(self.config, "discover_metadata", {}),
|
|
186
1049
|
)
|
|
187
1050
|
|
|
188
|
-
|
|
189
|
-
|
|
1051
|
+
properties = {
|
|
1052
|
+
# use product_type_config as default properties
|
|
1053
|
+
**getattr(self.config, "product_type_config", {}),
|
|
1054
|
+
**{ecmwf_format(k): v for k, v in parsed_properties.items()},
|
|
1055
|
+
}
|
|
1056
|
+
|
|
1057
|
+
def slugify(date_str: str) -> str:
|
|
1058
|
+
return date_str.split("T")[0].replace("-", "")
|
|
190
1059
|
|
|
191
1060
|
# build product id
|
|
192
|
-
|
|
193
|
-
product_id = "%s_%s_%s_%s" % (
|
|
194
|
-
id_prefix,
|
|
195
|
-
parsed_properties["startTimeFromAscendingNode"]
|
|
196
|
-
.split("T")[0]
|
|
197
|
-
.replace("-", ""),
|
|
198
|
-
parsed_properties["completionTimeFromAscendingNode"]
|
|
199
|
-
.split("T")[0]
|
|
200
|
-
.replace("-", ""),
|
|
201
|
-
query_hash,
|
|
202
|
-
)
|
|
203
|
-
parsed_properties["id"] = parsed_properties["title"] = product_id
|
|
204
|
-
|
|
205
|
-
# update downloadLink and orderLink
|
|
206
|
-
parsed_properties["_dc_qs"] = quote_plus(qs)
|
|
207
|
-
if parsed_properties["downloadLink"] != "Not Available":
|
|
208
|
-
parsed_properties["downloadLink"] += f"?{qs}"
|
|
209
|
-
|
|
210
|
-
# parse metadata needing downloadLink
|
|
211
|
-
dl_path = Fields("downloadLink")
|
|
212
|
-
dl_path_from_root = Child(Root(), dl_path)
|
|
213
|
-
for param, mapping in self.config.metadata_mapping.items():
|
|
214
|
-
if dl_path in mapping or dl_path_from_root in mapping:
|
|
215
|
-
parsed_properties.update(
|
|
216
|
-
properties_from_json(parsed_properties, {param: mapping})
|
|
217
|
-
)
|
|
1061
|
+
product_id = (product_type or kwargs.get("dataset") or self.provider).upper()
|
|
218
1062
|
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
1063
|
+
start = properties.get(START, NOT_AVAILABLE)
|
|
1064
|
+
end = properties.get(END, NOT_AVAILABLE)
|
|
1065
|
+
|
|
1066
|
+
if start != NOT_AVAILABLE:
|
|
1067
|
+
product_id += f"_{slugify(start)}"
|
|
1068
|
+
if end != NOT_AVAILABLE:
|
|
1069
|
+
product_id += f"_{slugify(end)}"
|
|
1070
|
+
|
|
1071
|
+
product_id += f"_{query_hash}"
|
|
1072
|
+
|
|
1073
|
+
properties["id"] = properties["title"] = product_id
|
|
1074
|
+
|
|
1075
|
+
# used by server mode to generate downloadlink href
|
|
1076
|
+
properties["_dc_qs"] = quote_plus(qs)
|
|
224
1077
|
|
|
225
1078
|
product = EOProduct(
|
|
226
1079
|
provider=self.provider,
|
|
227
1080
|
productType=product_type,
|
|
228
|
-
properties=
|
|
1081
|
+
properties=properties,
|
|
229
1082
|
)
|
|
230
1083
|
|
|
231
1084
|
return [
|
|
232
1085
|
product,
|
|
233
1086
|
]
|
|
234
1087
|
|
|
1088
|
+
def count_hits(
|
|
1089
|
+
self, count_url: Optional[str] = None, result_type: Optional[str] = None
|
|
1090
|
+
) -> int:
|
|
1091
|
+
"""Count method that will always return 1.
|
|
235
1092
|
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
1093
|
+
:param count_url: not used, only here because this method overwrites count_hits from the parent class
|
|
1094
|
+
:param result_type: not used, only here because this method overwrites count_hits from the parent class
|
|
1095
|
+
:return: always 1
|
|
1096
|
+
"""
|
|
1097
|
+
return 1
|
|
241
1098
|
|
|
242
|
-
The available configuration parameters inherits from parent classes, with particularly
|
|
243
|
-
for this plugin:
|
|
244
1099
|
|
|
245
|
-
|
|
246
|
-
|
|
1100
|
+
class MeteoblueSearch(ECMWFSearch):
|
|
1101
|
+
"""MeteoblueSearch search plugin.
|
|
247
1102
|
|
|
248
|
-
|
|
249
|
-
|
|
1103
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1104
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1105
|
+
object.
|
|
250
1106
|
|
|
251
|
-
|
|
1107
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1108
|
+
for pagination for this plugin.
|
|
252
1109
|
|
|
253
1110
|
:param provider: An eodag providers configuration dictionary
|
|
254
|
-
:param config:
|
|
255
|
-
"""
|
|
256
|
-
|
|
257
|
-
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
258
|
-
# init self.config.metadata_mapping using Search Base plugin
|
|
259
|
-
Search.__init__(self, provider, config)
|
|
260
|
-
|
|
261
|
-
self.config.__dict__.setdefault("api_endpoint", "")
|
|
262
|
-
|
|
263
|
-
# needed by QueryStringSearch.build_query_string / format_free_text_search
|
|
264
|
-
self.config.__dict__.setdefault("free_text_search_operations", {})
|
|
265
|
-
# needed for compatibility
|
|
266
|
-
self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"})
|
|
267
|
-
|
|
268
|
-
# parse jsonpath on init: product type specific metadata-mapping
|
|
269
|
-
for product_type in self.config.products.keys():
|
|
270
|
-
if "metadata_mapping" in self.config.products[product_type].keys():
|
|
271
|
-
self.config.products[product_type][
|
|
272
|
-
"metadata_mapping"
|
|
273
|
-
] = mtd_cfg_as_conversion_and_querypath(
|
|
274
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
275
|
-
)
|
|
276
|
-
# Complete and ready to use product type specific metadata-mapping
|
|
277
|
-
product_type_metadata_mapping = deepcopy(self.config.metadata_mapping)
|
|
278
|
-
|
|
279
|
-
# update config using provider product type definition metadata_mapping
|
|
280
|
-
# from another product
|
|
281
|
-
other_product_for_mapping = cast(
|
|
282
|
-
str,
|
|
283
|
-
self.config.products[product_type].get(
|
|
284
|
-
"metadata_mapping_from_product", ""
|
|
285
|
-
),
|
|
286
|
-
)
|
|
287
|
-
if other_product_for_mapping:
|
|
288
|
-
other_product_type_def_params = self.get_product_type_def_params(
|
|
289
|
-
other_product_for_mapping,
|
|
290
|
-
)
|
|
291
|
-
product_type_metadata_mapping.update(
|
|
292
|
-
other_product_type_def_params.get("metadata_mapping", {})
|
|
293
|
-
)
|
|
294
|
-
# from current product
|
|
295
|
-
product_type_metadata_mapping.update(
|
|
296
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
297
|
-
)
|
|
1111
|
+
:param config: Search plugin configuration:
|
|
298
1112
|
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
1113
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1114
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1115
|
+
this plugin it has the node:
|
|
302
1116
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
1117
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1118
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1119
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1120
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1121
|
+
"""
|
|
306
1122
|
|
|
307
|
-
def
|
|
1123
|
+
def collect_search_urls(
|
|
308
1124
|
self,
|
|
309
1125
|
prep: PreparedSearch = PreparedSearch(),
|
|
310
1126
|
**kwargs: Any,
|
|
311
|
-
) ->
|
|
312
|
-
"""
|
|
313
|
-
|
|
314
|
-
self._preprocess_search_params(kwargs)
|
|
315
|
-
|
|
316
|
-
return BuildPostSearchResult.query(self, prep, **kwargs)
|
|
317
|
-
|
|
318
|
-
def clear(self) -> None:
|
|
319
|
-
"""Clear search context"""
|
|
320
|
-
pass
|
|
321
|
-
|
|
322
|
-
def build_query_string(
|
|
323
|
-
self, product_type: str, **kwargs: Any
|
|
324
|
-
) -> Tuple[Dict[str, Any], str]:
|
|
325
|
-
"""Build The query string using the search parameters"""
|
|
326
|
-
# parse kwargs as properties as they might be needed to build the query
|
|
327
|
-
parsed_properties = properties_from_json(
|
|
328
|
-
kwargs,
|
|
329
|
-
self.config.metadata_mapping,
|
|
330
|
-
)
|
|
331
|
-
available_properties = {
|
|
332
|
-
k: v
|
|
333
|
-
for k, v in parsed_properties.items()
|
|
334
|
-
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
335
|
-
}
|
|
336
|
-
|
|
337
|
-
# build and return the query
|
|
338
|
-
return BuildPostSearchResult.build_query_string(
|
|
339
|
-
self, product_type=product_type, **available_properties
|
|
340
|
-
)
|
|
1127
|
+
) -> tuple[list[str], int]:
|
|
1128
|
+
"""Wraps PostJsonSearch.collect_search_urls to force product count to 1
|
|
341
1129
|
|
|
342
|
-
|
|
1130
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1131
|
+
:param kwargs: keyword arguments used in the search
|
|
1132
|
+
:return: list of search url and number of results
|
|
343
1133
|
"""
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
This method retrieves the value of a configuration option from the
|
|
347
|
-
`_product_type_config` attribute. If the option is not found, the provided
|
|
348
|
-
default value is returned.
|
|
1134
|
+
urls, _ = super().collect_search_urls(prep, **kwargs)
|
|
1135
|
+
return urls, 1
|
|
349
1136
|
|
|
350
|
-
|
|
351
|
-
|
|
1137
|
+
def do_search(
|
|
1138
|
+
self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any
|
|
1139
|
+
) -> list[dict[str, Any]]:
|
|
1140
|
+
"""Perform the actual search request, and return result in a single element.
|
|
352
1141
|
|
|
353
|
-
:
|
|
1142
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1143
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1144
|
+
:return: list containing the results from the provider in json format
|
|
354
1145
|
"""
|
|
355
|
-
product_type_cfg = getattr(self.config, "product_type_config", {})
|
|
356
|
-
non_none_cfg = {k: v for k, v in product_type_cfg.items() if v}
|
|
357
1146
|
|
|
358
|
-
|
|
1147
|
+
prep.url = prep.search_urls[0]
|
|
1148
|
+
prep.info_message = f"Sending search request: {prep.url}"
|
|
1149
|
+
prep.exception_message = (
|
|
1150
|
+
f"Skipping error while searching for {self.provider}"
|
|
1151
|
+
f" {self.__class__.__name__} instance"
|
|
1152
|
+
)
|
|
1153
|
+
response = self._request(prep)
|
|
359
1154
|
|
|
360
|
-
|
|
361
|
-
"""Preprocess search parameters before making a request to the CDS API.
|
|
1155
|
+
return [response.json()]
|
|
362
1156
|
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
1157
|
+
def build_query_string(
|
|
1158
|
+
self, product_type: str, query_dict: dict[str, Any]
|
|
1159
|
+
) -> tuple[dict[str, Any], str]:
|
|
1160
|
+
"""Build The query string using the search parameters
|
|
367
1161
|
|
|
368
|
-
:param
|
|
1162
|
+
:param product_type: product type id
|
|
1163
|
+
:param query_dict: keyword arguments to be used in the query string
|
|
1164
|
+
:return: formatted query params and encode query string
|
|
369
1165
|
"""
|
|
370
|
-
|
|
371
|
-
if _dc_qs is not None:
|
|
372
|
-
# if available, update search params using datacube query-string
|
|
373
|
-
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
374
|
-
if "/to/" in _dc_qp.get("date", ""):
|
|
375
|
-
(
|
|
376
|
-
params["startTimeFromAscendingNode"],
|
|
377
|
-
params["completionTimeFromAscendingNode"],
|
|
378
|
-
) = _dc_qp["date"].split("/to/")
|
|
379
|
-
elif "/" in _dc_qp.get("date", ""):
|
|
380
|
-
(
|
|
381
|
-
params["startTimeFromAscendingNode"],
|
|
382
|
-
params["completionTimeFromAscendingNode"],
|
|
383
|
-
) = _dc_qp["date"].split("/")
|
|
384
|
-
elif _dc_qp.get("date", None):
|
|
385
|
-
params["startTimeFromAscendingNode"] = params[
|
|
386
|
-
"completionTimeFromAscendingNode"
|
|
387
|
-
] = _dc_qp["date"]
|
|
388
|
-
|
|
389
|
-
if "/" in _dc_qp.get("area", ""):
|
|
390
|
-
params["geometry"] = _dc_qp["area"].split("/")
|
|
1166
|
+
return QueryStringSearch.build_query_string(self, product_type, query_dict)
|
|
391
1167
|
|
|
392
|
-
non_none_params = {k: v for k, v in params.items() if v}
|
|
393
1168
|
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
1169
|
+
class WekeoECMWFSearch(ECMWFSearch):
|
|
1170
|
+
"""
|
|
1171
|
+
WekeoECMWFSearch search plugin.
|
|
397
1172
|
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
).replace(
|
|
403
|
-
"Z", "+00:00"
|
|
404
|
-
) # before 3.11
|
|
405
|
-
)
|
|
1173
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1174
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1175
|
+
object. In contrast to ECMWFSearch or MeteoblueSearch, the products are only build with information
|
|
1176
|
+
returned by the provider.
|
|
406
1177
|
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
)
|
|
410
|
-
default_end_str = (
|
|
411
|
-
default_end_from_cfg
|
|
412
|
-
or (
|
|
413
|
-
datetime.now(timezone.utc)
|
|
414
|
-
if params.get("startTimeFromAscendingNode")
|
|
415
|
-
else mission_start_dt + timedelta(days=1)
|
|
416
|
-
).isoformat()
|
|
417
|
-
)
|
|
1178
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1179
|
+
for pagination for this plugin.
|
|
418
1180
|
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
)
|
|
422
|
-
params["completionTimeFromAscendingNode"] = non_none_params.get(
|
|
423
|
-
"completionTimeFromAscendingNode", default_end_str
|
|
424
|
-
)
|
|
1181
|
+
:param provider: An eodag providers configuration dictionary
|
|
1182
|
+
:param config: Search plugin configuration:
|
|
425
1183
|
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
if not end_date_excluded and end_date == end_date.replace(
|
|
430
|
-
hour=0, minute=0, second=0, microsecond=0
|
|
431
|
-
):
|
|
432
|
-
end_date += timedelta(days=-1)
|
|
433
|
-
params["completionTimeFromAscendingNode"] = end_date.isoformat()
|
|
1184
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1185
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1186
|
+
this plugin it has the node:
|
|
434
1187
|
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
1188
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1189
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1190
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1191
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1192
|
+
"""
|
|
438
1193
|
|
|
439
|
-
def
|
|
440
|
-
self, **kwargs: Any
|
|
441
|
-
) ->
|
|
442
|
-
"""
|
|
1194
|
+
def normalize_results(
|
|
1195
|
+
self, results: RawSearchResult, **kwargs: Any
|
|
1196
|
+
) -> list[EOProduct]:
|
|
1197
|
+
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
443
1198
|
|
|
444
|
-
:param
|
|
445
|
-
|
|
446
|
-
:returns:
|
|
1199
|
+
:param results: Raw provider result as single dict in list
|
|
1200
|
+
:param kwargs: Search arguments
|
|
1201
|
+
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
447
1202
|
"""
|
|
448
|
-
constraints_file_url = getattr(self.config, "constraints_file_url", "")
|
|
449
|
-
if not constraints_file_url:
|
|
450
|
-
return {}
|
|
451
|
-
product_type = kwargs.pop("productType", None)
|
|
452
|
-
if not product_type:
|
|
453
|
-
return {}
|
|
454
1203
|
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
if (
|
|
460
|
-
user_provider_product_type
|
|
461
|
-
and user_provider_product_type != provider_product_type
|
|
462
|
-
):
|
|
463
|
-
raise ValidationError(
|
|
464
|
-
f"Cannot change dataset from {provider_product_type} to {user_provider_product_type}"
|
|
465
|
-
)
|
|
1204
|
+
# formating of orderLink requires access to the productType value.
|
|
1205
|
+
results.data = [
|
|
1206
|
+
{**result, **results.product_type_def_params} for result in results
|
|
1207
|
+
]
|
|
466
1208
|
|
|
467
|
-
|
|
468
|
-
default_queryables = self._get_defaults_as_queryables(product_type)
|
|
469
|
-
# remove dataset from queryables
|
|
470
|
-
default_queryables.pop("dataset", None)
|
|
1209
|
+
normalized = QueryStringSearch.normalize_results(self, results, **kwargs)
|
|
471
1210
|
|
|
472
|
-
|
|
1211
|
+
if not normalized:
|
|
1212
|
+
return normalized
|
|
473
1213
|
|
|
474
|
-
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
if not constraints:
|
|
480
|
-
return default_queryables
|
|
481
|
-
|
|
482
|
-
constraint_params: Dict[str, Dict[str, Set[Any]]] = {}
|
|
483
|
-
if len(kwargs) == 0:
|
|
484
|
-
# get values from constraints without additional filters
|
|
485
|
-
for constraint in constraints:
|
|
486
|
-
for key in constraint.keys():
|
|
487
|
-
if key in constraint_params:
|
|
488
|
-
constraint_params[key]["enum"].update(constraint[key])
|
|
489
|
-
else:
|
|
490
|
-
constraint_params[key] = {}
|
|
491
|
-
constraint_params[key]["enum"] = set(constraint[key])
|
|
492
|
-
else:
|
|
493
|
-
# get values from constraints with additional filters
|
|
494
|
-
constraints_input_params = {k: v for k, v in non_empty_kwargs.items()}
|
|
495
|
-
constraint_params = get_constraint_queryables_with_additional_params(
|
|
496
|
-
constraints, constraints_input_params, self, product_type
|
|
497
|
-
)
|
|
498
|
-
# query params that are not in constraints but might be default queryables
|
|
499
|
-
if len(constraint_params) == 1 and "not_available" in constraint_params:
|
|
500
|
-
not_queryables: Set[str] = set()
|
|
501
|
-
for constraint_param in constraint_params["not_available"]["enum"]:
|
|
502
|
-
param = CommonQueryables.get_queryable_from_alias(constraint_param)
|
|
503
|
-
if param in dict(
|
|
504
|
-
CommonQueryables.model_fields, **default_queryables
|
|
505
|
-
):
|
|
506
|
-
non_empty_kwargs.pop(constraint_param)
|
|
507
|
-
else:
|
|
508
|
-
not_queryables.add(constraint_param)
|
|
509
|
-
if not_queryables:
|
|
510
|
-
raise ValidationError(
|
|
511
|
-
f"parameter(s) {not_queryables} not queryable"
|
|
512
|
-
)
|
|
513
|
-
else:
|
|
514
|
-
# get constraints again without common queryables
|
|
515
|
-
constraint_params = (
|
|
516
|
-
get_constraint_queryables_with_additional_params(
|
|
517
|
-
constraints, non_empty_kwargs, self, product_type
|
|
518
|
-
)
|
|
519
|
-
)
|
|
1214
|
+
query_params_encoded = quote_plus(orjson.dumps(results.query_params))
|
|
1215
|
+
for product in normalized:
|
|
1216
|
+
properties = {**product.properties, **results.query_params}
|
|
1217
|
+
properties["_dc_qs"] = query_params_encoded
|
|
1218
|
+
product.properties = {ecmwf_format(k): v for k, v in properties.items()}
|
|
520
1219
|
|
|
521
|
-
|
|
522
|
-
for json_param, json_mtd in constraint_params.items():
|
|
523
|
-
param = (
|
|
524
|
-
get_queryable_from_provider(
|
|
525
|
-
json_param, self.get_metadata_mapping(product_type)
|
|
526
|
-
)
|
|
527
|
-
or json_param
|
|
528
|
-
)
|
|
529
|
-
default = kwargs.get(param, None) or self.config.products.get(
|
|
530
|
-
product_type, {}
|
|
531
|
-
).get(param, None)
|
|
532
|
-
annotated_def = json_field_definition_to_python(
|
|
533
|
-
json_mtd, default_value=default, required=True
|
|
534
|
-
)
|
|
535
|
-
field_definitions[param] = get_args(annotated_def)
|
|
1220
|
+
return normalized
|
|
536
1221
|
|
|
537
|
-
|
|
538
|
-
|
|
1222
|
+
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
1223
|
+
"""Should perform the actual search request.
|
|
1224
|
+
|
|
1225
|
+
:param args: arguments to be used in the search
|
|
1226
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1227
|
+
:return: list containing the results from the provider in json format
|
|
1228
|
+
"""
|
|
1229
|
+
return QueryStringSearch.do_search(self, *args, **kwargs)
|