eodag 3.0.0b3__py3-none-any.whl → 3.1.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +292 -198
- eodag/api/product/_assets.py +6 -6
- eodag/api/product/_product.py +18 -18
- eodag/api/product/metadata_mapping.py +51 -14
- eodag/api/search_result.py +29 -3
- eodag/cli.py +57 -20
- eodag/config.py +413 -117
- eodag/plugins/apis/base.py +10 -4
- eodag/plugins/apis/ecmwf.py +49 -16
- eodag/plugins/apis/usgs.py +30 -7
- eodag/plugins/authentication/aws_auth.py +14 -5
- eodag/plugins/authentication/base.py +10 -1
- eodag/plugins/authentication/generic.py +14 -3
- eodag/plugins/authentication/header.py +12 -4
- eodag/plugins/authentication/keycloak.py +41 -22
- eodag/plugins/authentication/oauth.py +11 -1
- eodag/plugins/authentication/openid_connect.py +178 -163
- eodag/plugins/authentication/qsauth.py +12 -4
- eodag/plugins/authentication/sas_auth.py +19 -2
- eodag/plugins/authentication/token.py +93 -15
- eodag/plugins/authentication/token_exchange.py +19 -19
- eodag/plugins/crunch/base.py +4 -1
- eodag/plugins/crunch/filter_date.py +5 -2
- eodag/plugins/crunch/filter_latest_intersect.py +5 -4
- eodag/plugins/crunch/filter_latest_tpl_name.py +1 -1
- eodag/plugins/crunch/filter_overlap.py +5 -7
- eodag/plugins/crunch/filter_property.py +6 -6
- eodag/plugins/download/aws.py +50 -34
- eodag/plugins/download/base.py +41 -50
- eodag/plugins/download/creodias_s3.py +40 -2
- eodag/plugins/download/http.py +221 -195
- eodag/plugins/download/s3rest.py +25 -25
- eodag/plugins/manager.py +168 -23
- eodag/plugins/search/base.py +106 -39
- eodag/plugins/search/build_search_result.py +1065 -324
- eodag/plugins/search/cop_marine.py +112 -29
- eodag/plugins/search/creodias_s3.py +45 -24
- eodag/plugins/search/csw.py +41 -1
- eodag/plugins/search/data_request_search.py +109 -9
- eodag/plugins/search/qssearch.py +549 -257
- eodag/plugins/search/static_stac_search.py +20 -21
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +577 -87
- eodag/resources/providers.yml +1619 -2776
- eodag/resources/stac.yml +3 -163
- eodag/resources/user_conf_template.yml +112 -97
- eodag/rest/config.py +1 -2
- eodag/rest/constants.py +0 -1
- eodag/rest/core.py +138 -98
- eodag/rest/errors.py +181 -0
- eodag/rest/server.py +55 -329
- eodag/rest/stac.py +93 -544
- eodag/rest/types/eodag_search.py +19 -8
- eodag/rest/types/queryables.py +6 -8
- eodag/rest/types/stac_search.py +11 -2
- eodag/rest/utils/__init__.py +3 -0
- eodag/types/__init__.py +71 -18
- eodag/types/download_args.py +3 -3
- eodag/types/queryables.py +180 -73
- eodag/types/search_args.py +3 -3
- eodag/types/whoosh.py +126 -0
- eodag/utils/__init__.py +147 -66
- eodag/utils/exceptions.py +47 -26
- eodag/utils/logging.py +37 -77
- eodag/utils/repr.py +65 -6
- eodag/utils/requests.py +11 -13
- eodag/utils/stac_reader.py +1 -1
- {eodag-3.0.0b3.dist-info → eodag-3.1.0b1.dist-info}/METADATA +80 -81
- eodag-3.1.0b1.dist-info/RECORD +108 -0
- {eodag-3.0.0b3.dist-info → eodag-3.1.0b1.dist-info}/WHEEL +1 -1
- {eodag-3.0.0b3.dist-info → eodag-3.1.0b1.dist-info}/entry_points.txt +4 -2
- eodag/resources/constraints/climate-dt.json +0 -13
- eodag/resources/constraints/extremes-dt.json +0 -8
- eodag/utils/constraints.py +0 -244
- eodag-3.0.0b3.dist-info/RECORD +0 -110
- {eodag-3.0.0b3.dist-info → eodag-3.1.0b1.dist-info}/LICENSE +0 -0
- {eodag-3.0.0b3.dist-info → eodag-3.1.0b1.dist-info}/top_level.txt +0 -0
|
@@ -17,105 +17,964 @@
|
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
+
import functools
|
|
20
21
|
import hashlib
|
|
21
22
|
import logging
|
|
22
|
-
|
|
23
|
-
from
|
|
23
|
+
import re
|
|
24
|
+
from collections import OrderedDict
|
|
25
|
+
from datetime import datetime, timedelta
|
|
26
|
+
from typing import (
|
|
27
|
+
TYPE_CHECKING,
|
|
28
|
+
Annotated,
|
|
29
|
+
Any,
|
|
30
|
+
Dict,
|
|
31
|
+
List,
|
|
32
|
+
Optional,
|
|
33
|
+
Set,
|
|
34
|
+
Tuple,
|
|
35
|
+
Union,
|
|
36
|
+
cast,
|
|
37
|
+
)
|
|
24
38
|
from urllib.parse import quote_plus, unquote_plus
|
|
25
39
|
|
|
26
40
|
import geojson
|
|
27
41
|
import orjson
|
|
28
42
|
from dateutil.parser import isoparse
|
|
43
|
+
from dateutil.tz import tzutc
|
|
29
44
|
from jsonpath_ng import Child, Fields, Root
|
|
30
|
-
from pydantic import
|
|
45
|
+
from pydantic import Field
|
|
31
46
|
from pydantic.fields import FieldInfo
|
|
47
|
+
from requests.auth import AuthBase
|
|
48
|
+
from shapely.geometry.base import BaseGeometry
|
|
32
49
|
from typing_extensions import get_args
|
|
33
50
|
|
|
34
51
|
from eodag.api.product import EOProduct
|
|
35
52
|
from eodag.api.product.metadata_mapping import (
|
|
36
53
|
NOT_AVAILABLE,
|
|
37
54
|
NOT_MAPPED,
|
|
38
|
-
|
|
55
|
+
format_metadata,
|
|
56
|
+
format_query_params,
|
|
39
57
|
mtd_cfg_as_conversion_and_querypath,
|
|
40
58
|
properties_from_json,
|
|
41
59
|
)
|
|
42
60
|
from eodag.api.search_result import RawSearchResult
|
|
43
61
|
from eodag.plugins.search import PreparedSearch
|
|
44
|
-
from eodag.plugins.search.
|
|
45
|
-
from eodag.
|
|
46
|
-
from eodag.types import
|
|
47
|
-
from eodag.types.queryables import CommonQueryables
|
|
62
|
+
from eodag.plugins.search.qssearch import PostJsonSearch, QueryStringSearch
|
|
63
|
+
from eodag.types import json_field_definition_to_python
|
|
64
|
+
from eodag.types.queryables import Queryables
|
|
48
65
|
from eodag.utils import (
|
|
49
|
-
|
|
50
|
-
Annotated,
|
|
66
|
+
HTTP_REQ_TIMEOUT,
|
|
51
67
|
deepcopy,
|
|
52
68
|
dict_items_recursive_sort,
|
|
53
69
|
get_geometry_from_various,
|
|
54
|
-
|
|
55
|
-
from eodag.utils.constraints import (
|
|
56
|
-
fetch_constraints,
|
|
57
|
-
get_constraint_queryables_with_additional_params,
|
|
70
|
+
is_range_in_range,
|
|
58
71
|
)
|
|
59
72
|
from eodag.utils.exceptions import ValidationError
|
|
73
|
+
from eodag.utils.requests import fetch_json
|
|
60
74
|
|
|
61
75
|
if TYPE_CHECKING:
|
|
62
76
|
from eodag.config import PluginConfig
|
|
63
77
|
|
|
64
78
|
logger = logging.getLogger("eodag.search.build_search_result")
|
|
65
79
|
|
|
80
|
+
# keywords from ECMWF keyword database + "dataset" (not part of database but exists)
|
|
81
|
+
# database: https://confluence.ecmwf.int/display/UDOC/Keywords+in+MARS+and+Dissemination+requests
|
|
82
|
+
ECMWF_KEYWORDS = [
|
|
83
|
+
"dataset",
|
|
84
|
+
"accuracy",
|
|
85
|
+
"activity",
|
|
86
|
+
"anoffset",
|
|
87
|
+
"bitmap",
|
|
88
|
+
"block",
|
|
89
|
+
"channel",
|
|
90
|
+
"class",
|
|
91
|
+
"database",
|
|
92
|
+
"date",
|
|
93
|
+
"diagnostic",
|
|
94
|
+
"direction",
|
|
95
|
+
"domain",
|
|
96
|
+
"duplicates",
|
|
97
|
+
"expect",
|
|
98
|
+
"expver",
|
|
99
|
+
"fcmonth",
|
|
100
|
+
"fcperiod",
|
|
101
|
+
"fieldset",
|
|
102
|
+
"filter",
|
|
103
|
+
"format",
|
|
104
|
+
"frame",
|
|
105
|
+
"frequency",
|
|
106
|
+
"generation",
|
|
107
|
+
"grid",
|
|
108
|
+
"hdate",
|
|
109
|
+
"ident",
|
|
110
|
+
"interpolation",
|
|
111
|
+
"intgrid",
|
|
112
|
+
"iteration",
|
|
113
|
+
"latitude",
|
|
114
|
+
"levelist",
|
|
115
|
+
"levtype",
|
|
116
|
+
"longitude",
|
|
117
|
+
"lsm",
|
|
118
|
+
"method",
|
|
119
|
+
"number",
|
|
120
|
+
"obsgroup",
|
|
121
|
+
"obstype",
|
|
122
|
+
"origin",
|
|
123
|
+
"packing",
|
|
124
|
+
"padding",
|
|
125
|
+
"param",
|
|
126
|
+
"priority",
|
|
127
|
+
"product",
|
|
128
|
+
"range",
|
|
129
|
+
"realization",
|
|
130
|
+
"refdate",
|
|
131
|
+
"reference",
|
|
132
|
+
"reportype",
|
|
133
|
+
"repres",
|
|
134
|
+
"resolution",
|
|
135
|
+
"rotation",
|
|
136
|
+
"section",
|
|
137
|
+
"source",
|
|
138
|
+
"step",
|
|
139
|
+
"stream",
|
|
140
|
+
"system",
|
|
141
|
+
"target",
|
|
142
|
+
"time",
|
|
143
|
+
"truncation",
|
|
144
|
+
"type",
|
|
145
|
+
"use",
|
|
146
|
+
]
|
|
147
|
+
|
|
148
|
+
# additional keywords from copernicus services
|
|
149
|
+
COP_DS_KEYWORDS = [
|
|
150
|
+
"aerosol_type",
|
|
151
|
+
"altitude",
|
|
152
|
+
"product_type",
|
|
153
|
+
"band",
|
|
154
|
+
"cdr_type",
|
|
155
|
+
"data_format",
|
|
156
|
+
"dataset_type",
|
|
157
|
+
"day",
|
|
158
|
+
"download_format",
|
|
159
|
+
"ensemble_member",
|
|
160
|
+
"experiment",
|
|
161
|
+
"forcing_type",
|
|
162
|
+
"gcm",
|
|
163
|
+
"hday",
|
|
164
|
+
"hmonth",
|
|
165
|
+
"horizontal_resolution",
|
|
166
|
+
"hydrological_model",
|
|
167
|
+
"hydrological_year",
|
|
168
|
+
"hyear",
|
|
169
|
+
"input_observations",
|
|
170
|
+
"leadtime_hour",
|
|
171
|
+
"leadtime_month",
|
|
172
|
+
"level",
|
|
173
|
+
"location",
|
|
174
|
+
"model",
|
|
175
|
+
"model_level",
|
|
176
|
+
"model_levels",
|
|
177
|
+
"month",
|
|
178
|
+
"nominal_day",
|
|
179
|
+
"originating_centre",
|
|
180
|
+
"period",
|
|
181
|
+
"pressure_level",
|
|
182
|
+
"processing_level",
|
|
183
|
+
"processing_type",
|
|
184
|
+
"product_version",
|
|
185
|
+
"quantity",
|
|
186
|
+
"rcm",
|
|
187
|
+
"region",
|
|
188
|
+
"release_version",
|
|
189
|
+
"satellite",
|
|
190
|
+
"sensor",
|
|
191
|
+
"sensor_and_algorithm",
|
|
192
|
+
"soil_level",
|
|
193
|
+
"sky_type",
|
|
194
|
+
"statistic",
|
|
195
|
+
"system_version",
|
|
196
|
+
"temporal_aggregation",
|
|
197
|
+
"time_aggregation",
|
|
198
|
+
"time_reference",
|
|
199
|
+
"time_step",
|
|
200
|
+
"variable",
|
|
201
|
+
"variable_type",
|
|
202
|
+
"version",
|
|
203
|
+
"year",
|
|
204
|
+
]
|
|
205
|
+
|
|
206
|
+
|
|
207
|
+
def keywords_to_mdt(
|
|
208
|
+
keywords: List[str], prefix: Optional[str] = None
|
|
209
|
+
) -> Dict[str, Any]:
|
|
210
|
+
"""
|
|
211
|
+
Make metadata mapping dict from a list of keywords
|
|
66
212
|
|
|
67
|
-
|
|
68
|
-
|
|
213
|
+
prefix:keyword:
|
|
214
|
+
- keyword
|
|
215
|
+
- $."prefix:keyword"
|
|
69
216
|
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
217
|
+
>>> keywords_to_mdt(["month", "year"])
|
|
218
|
+
{'month': ['month', '$."month"'], 'year': ['year', '$."year"']}
|
|
219
|
+
>>> keywords_to_mdt(["month", "year"], "ecmwf")
|
|
220
|
+
{'ecmwf:month': ['month', '$."ecmwf:month"'], 'ecmwf:year': ['year', '$."ecmwf:year"']}
|
|
73
221
|
|
|
74
|
-
|
|
75
|
-
|
|
222
|
+
:param keywords: List of keywords to be converted
|
|
223
|
+
:param prefix: prefix to be added to the parameter in the mapping
|
|
224
|
+
:return: metadata mapping dict
|
|
225
|
+
"""
|
|
226
|
+
mdt: Dict[str, Any] = {}
|
|
227
|
+
for keyword in keywords:
|
|
228
|
+
key = f"{prefix}:{keyword}" if prefix else keyword
|
|
229
|
+
mdt[key] = [keyword, f'$."{key}"']
|
|
230
|
+
return mdt
|
|
231
|
+
|
|
232
|
+
|
|
233
|
+
def strip_quotes(value: Any) -> Any:
|
|
234
|
+
"""Strip superfluous quotes from elements (added by mapping converter to_geojson).
|
|
235
|
+
|
|
236
|
+
>>> strip_quotes("'abc'")
|
|
237
|
+
'abc'
|
|
238
|
+
>>> strip_quotes(["'abc'", '"def'])
|
|
239
|
+
['abc', 'def']
|
|
240
|
+
|
|
241
|
+
:param value: value from which quotes should be removed (should be either str or list)
|
|
242
|
+
:return: value without quotes
|
|
243
|
+
:raises: NotImplementedError
|
|
244
|
+
"""
|
|
245
|
+
if isinstance(value, (list, tuple)):
|
|
246
|
+
return [strip_quotes(v) for v in value]
|
|
247
|
+
elif isinstance(value, dict):
|
|
248
|
+
raise NotImplementedError("Dict value is not supported.")
|
|
249
|
+
else:
|
|
250
|
+
return str(value).strip("'\"")
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def _update_properties_from_element(
|
|
254
|
+
prop: Dict[str, Any], element: Dict[str, Any], values: List[str]
|
|
255
|
+
) -> None:
|
|
256
|
+
"""updates a property dict with the given values based on the information from the element dict
|
|
257
|
+
e.g. the type is set based on the type of the element
|
|
258
|
+
"""
|
|
259
|
+
# multichoice elements are transformed into array
|
|
260
|
+
if element["type"] in ("StringListWidget", "StringListArrayWidget"):
|
|
261
|
+
prop["type"] = "array"
|
|
262
|
+
if values:
|
|
263
|
+
prop["items"] = {"type": "string", "enum": sorted(values)}
|
|
264
|
+
|
|
265
|
+
# single choice elements are transformed into string
|
|
266
|
+
elif element["type"] in (
|
|
267
|
+
"StringChoiceWidget",
|
|
268
|
+
"DateRangeWidget",
|
|
269
|
+
"FreeformInputWidget",
|
|
270
|
+
):
|
|
271
|
+
prop["type"] = "string"
|
|
272
|
+
if values:
|
|
273
|
+
prop["enum"] = sorted(values)
|
|
274
|
+
|
|
275
|
+
# a bbox element
|
|
276
|
+
elif element["type"] in ["GeographicExtentWidget", "GeographicExtentMapWidget"]:
|
|
277
|
+
prop.update(
|
|
278
|
+
{
|
|
279
|
+
"type": "array",
|
|
280
|
+
"minItems": 4,
|
|
281
|
+
"additionalItems": False,
|
|
282
|
+
"items": [
|
|
283
|
+
{
|
|
284
|
+
"type": "number",
|
|
285
|
+
"maximum": 180,
|
|
286
|
+
"minimum": -180,
|
|
287
|
+
"description": "West border of the bounding box",
|
|
288
|
+
},
|
|
289
|
+
{
|
|
290
|
+
"type": "number",
|
|
291
|
+
"maximum": 90,
|
|
292
|
+
"minimum": -90,
|
|
293
|
+
"description": "South border of the bounding box",
|
|
294
|
+
},
|
|
295
|
+
{
|
|
296
|
+
"type": "number",
|
|
297
|
+
"maximum": 180,
|
|
298
|
+
"minimum": -180,
|
|
299
|
+
"description": "East border of the bounding box",
|
|
300
|
+
},
|
|
301
|
+
{
|
|
302
|
+
"type": "number",
|
|
303
|
+
"maximum": 90,
|
|
304
|
+
"minimum": -90,
|
|
305
|
+
"description": "North border of the bounding box",
|
|
306
|
+
},
|
|
307
|
+
],
|
|
308
|
+
}
|
|
309
|
+
)
|
|
76
310
|
|
|
77
|
-
|
|
311
|
+
# DateRangeWidget is a calendar date picker
|
|
312
|
+
if element["type"] == "DateRangeWidget":
|
|
313
|
+
prop["description"] = "date formatted like yyyy-mm-dd/yyyy-mm-dd"
|
|
78
314
|
|
|
79
|
-
|
|
80
|
-
|
|
315
|
+
if description := element.get("help"):
|
|
316
|
+
prop["description"] = description
|
|
81
317
|
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
318
|
+
|
|
319
|
+
def ecmwf_format(v: str) -> str:
|
|
320
|
+
"""Add ECMWF prefix to value v if v is a ECMWF keyword."""
|
|
321
|
+
return "ecmwf:" + v if v in ECMWF_KEYWORDS + COP_DS_KEYWORDS else v
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
class ECMWFSearch(PostJsonSearch):
|
|
325
|
+
"""ECMWF search plugin.
|
|
326
|
+
|
|
327
|
+
This plugin builds a :class:`~eodag.api.search_result.SearchResult` containing a single product
|
|
328
|
+
using given query parameters as product properties.
|
|
329
|
+
|
|
330
|
+
The available configuration parameters inherits from parent classes, with some particular parameters
|
|
331
|
+
for this plugin.
|
|
86
332
|
|
|
87
333
|
:param provider: An eodag providers configuration dictionary
|
|
88
|
-
:param config:
|
|
334
|
+
:param config: Search plugin configuration:
|
|
335
|
+
|
|
336
|
+
* :attr:`~eodag.config.PluginConfig.remove_from_query` (``List[str]``): List of parameters
|
|
337
|
+
used to parse metadata but that must not be included to the query
|
|
338
|
+
* :attr:`~eodag.config.PluginConfig.end_date_excluded` (``bool``): Set to `False` if
|
|
339
|
+
provider does not include end date to search
|
|
340
|
+
* :attr:`~eodag.config.PluginConfig.discover_queryables`
|
|
341
|
+
(:class:`~eodag.config.PluginConfig.DiscoverQueryables`): configuration to fetch the queryables from a
|
|
342
|
+
provider queryables endpoint; It has the following keys:
|
|
343
|
+
|
|
344
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.fetch_url` (``str``): url to fetch the queryables valid
|
|
345
|
+
for all product types
|
|
346
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.product_type_fetch_url` (``str``): url to fetch the
|
|
347
|
+
queryables for a specific product type
|
|
348
|
+
* :attr:`~eodag.config.PluginConfig.DiscoverQueryables.constraints_url` (``str``): url of the constraint file
|
|
349
|
+
used to build queryables
|
|
89
350
|
"""
|
|
90
351
|
|
|
91
|
-
def
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
"""Count method that will always return 1."""
|
|
95
|
-
return 1
|
|
352
|
+
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
353
|
+
# cache fetching method
|
|
354
|
+
self.fetch_data = functools.lru_cache()(self._fetch_data)
|
|
96
355
|
|
|
97
|
-
|
|
356
|
+
config.metadata_mapping = {
|
|
357
|
+
**keywords_to_mdt(ECMWF_KEYWORDS + COP_DS_KEYWORDS, "ecmwf"),
|
|
358
|
+
**config.metadata_mapping,
|
|
359
|
+
}
|
|
360
|
+
|
|
361
|
+
super().__init__(provider, config)
|
|
362
|
+
|
|
363
|
+
self.config.__dict__.setdefault("api_endpoint", "")
|
|
364
|
+
|
|
365
|
+
# needed by QueryStringSearch.build_query_string / format_free_text_search
|
|
366
|
+
self.config.__dict__.setdefault("free_text_search_operations", {})
|
|
367
|
+
# needed for compatibility
|
|
368
|
+
self.config.pagination.setdefault("next_page_query_obj", "{{}}")
|
|
369
|
+
|
|
370
|
+
# parse jsonpath on init: product type specific metadata-mapping
|
|
371
|
+
for product_type in self.config.products.keys():
|
|
372
|
+
if "metadata_mapping" in self.config.products[product_type].keys():
|
|
373
|
+
self.config.products[product_type][
|
|
374
|
+
"metadata_mapping"
|
|
375
|
+
] = mtd_cfg_as_conversion_and_querypath(
|
|
376
|
+
self.config.products[product_type]["metadata_mapping"]
|
|
377
|
+
)
|
|
378
|
+
# Complete and ready to use product type specific metadata-mapping
|
|
379
|
+
product_type_metadata_mapping = deepcopy(self.config.metadata_mapping)
|
|
380
|
+
|
|
381
|
+
# update config using provider product type definition metadata_mapping
|
|
382
|
+
# from another product
|
|
383
|
+
other_product_for_mapping = cast(
|
|
384
|
+
str,
|
|
385
|
+
self.config.products[product_type].get(
|
|
386
|
+
"metadata_mapping_from_product", ""
|
|
387
|
+
),
|
|
388
|
+
)
|
|
389
|
+
if other_product_for_mapping:
|
|
390
|
+
other_product_type_def_params = self.get_product_type_def_params(
|
|
391
|
+
other_product_for_mapping,
|
|
392
|
+
)
|
|
393
|
+
product_type_metadata_mapping.update(
|
|
394
|
+
other_product_type_def_params.get("metadata_mapping", {})
|
|
395
|
+
)
|
|
396
|
+
# from current product
|
|
397
|
+
product_type_metadata_mapping.update(
|
|
398
|
+
self.config.products[product_type]["metadata_mapping"]
|
|
399
|
+
)
|
|
400
|
+
|
|
401
|
+
self.config.products[product_type][
|
|
402
|
+
"metadata_mapping"
|
|
403
|
+
] = product_type_metadata_mapping
|
|
404
|
+
|
|
405
|
+
def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
|
406
|
+
"""Should perform the actual search request.
|
|
407
|
+
|
|
408
|
+
:param args: arguments to be used in the search
|
|
409
|
+
:param kwargs: keyword arguments to be used in the search
|
|
410
|
+
:return: list containing the results from the provider in json format
|
|
411
|
+
"""
|
|
412
|
+
# no real search. We fake it all
|
|
413
|
+
return [{}]
|
|
414
|
+
|
|
415
|
+
def query(
|
|
98
416
|
self,
|
|
99
417
|
prep: PreparedSearch = PreparedSearch(),
|
|
100
418
|
**kwargs: Any,
|
|
101
|
-
) -> Tuple[List[
|
|
102
|
-
"""
|
|
103
|
-
urls, _ = super(BuildPostSearchResult, self).collect_search_urls(prep, **kwargs)
|
|
104
|
-
return urls, 1
|
|
419
|
+
) -> Tuple[List[EOProduct], Optional[int]]:
|
|
420
|
+
"""Build ready-to-download SearchResult
|
|
105
421
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
"""
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
422
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information needed for the search
|
|
423
|
+
:param kwargs: keyword arguments to be used in the search
|
|
424
|
+
:returns: list of products and number of products (optional)
|
|
425
|
+
"""
|
|
426
|
+
product_type = prep.product_type
|
|
427
|
+
if not product_type:
|
|
428
|
+
product_type = kwargs.get("productType", None)
|
|
429
|
+
self._preprocess_search_params(kwargs, product_type)
|
|
430
|
+
result, num_items = super().query(prep, **kwargs)
|
|
431
|
+
if prep.count and not num_items:
|
|
432
|
+
num_items = 1
|
|
433
|
+
|
|
434
|
+
return result, num_items
|
|
435
|
+
|
|
436
|
+
def clear(self) -> None:
|
|
437
|
+
"""Clear search context"""
|
|
438
|
+
super().clear()
|
|
439
|
+
|
|
440
|
+
def build_query_string(
|
|
441
|
+
self, product_type: str, **kwargs: Any
|
|
442
|
+
) -> Tuple[Dict[str, Any], str]:
|
|
443
|
+
"""Build The query string using the search parameters
|
|
444
|
+
|
|
445
|
+
:param product_type: product type id
|
|
446
|
+
:param kwargs: keyword arguments to be used in the query string
|
|
447
|
+
:return: formatted query params and encode query string
|
|
448
|
+
"""
|
|
449
|
+
# parse kwargs as properties as they might be needed to build the query
|
|
450
|
+
parsed_properties = properties_from_json(
|
|
451
|
+
kwargs,
|
|
452
|
+
self.config.metadata_mapping,
|
|
115
453
|
)
|
|
116
|
-
|
|
454
|
+
available_properties = {
|
|
455
|
+
# We strip values of superfluous quotes (added by mapping converter to_geojson).
|
|
456
|
+
k: strip_quotes(v)
|
|
457
|
+
for k, v in parsed_properties.items()
|
|
458
|
+
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
459
|
+
}
|
|
117
460
|
|
|
118
|
-
return
|
|
461
|
+
# build and return the query
|
|
462
|
+
return super().build_query_string(
|
|
463
|
+
product_type=product_type, **available_properties
|
|
464
|
+
)
|
|
465
|
+
|
|
466
|
+
def _preprocess_search_params(
|
|
467
|
+
self, params: Dict[str, Any], product_type: Optional[str]
|
|
468
|
+
) -> None:
|
|
469
|
+
"""Preprocess search parameters before making a request to the CDS API.
|
|
470
|
+
|
|
471
|
+
This method is responsible for checking and updating the provided search parameters
|
|
472
|
+
to ensure that required parameters like 'productType', 'startTimeFromAscendingNode',
|
|
473
|
+
'completionTimeFromAscendingNode', and 'geometry' are properly set. If not specified
|
|
474
|
+
in the input parameters, default values or values from the configuration are used.
|
|
475
|
+
|
|
476
|
+
:param params: Search parameters to be preprocessed.
|
|
477
|
+
:param product_type: (optional) product type id
|
|
478
|
+
"""
|
|
479
|
+
_dc_qs = params.get("_dc_qs", None)
|
|
480
|
+
if _dc_qs is not None:
|
|
481
|
+
# if available, update search params using datacube query-string
|
|
482
|
+
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
483
|
+
if "/to/" in _dc_qp.get("date", ""):
|
|
484
|
+
(
|
|
485
|
+
params["startTimeFromAscendingNode"],
|
|
486
|
+
params["completionTimeFromAscendingNode"],
|
|
487
|
+
) = _dc_qp["date"].split("/to/")
|
|
488
|
+
elif "/" in _dc_qp.get("date", ""):
|
|
489
|
+
(
|
|
490
|
+
params["startTimeFromAscendingNode"],
|
|
491
|
+
params["completionTimeFromAscendingNode"],
|
|
492
|
+
) = _dc_qp["date"].split("/")
|
|
493
|
+
elif _dc_qp.get("date", None):
|
|
494
|
+
params["startTimeFromAscendingNode"] = params[
|
|
495
|
+
"completionTimeFromAscendingNode"
|
|
496
|
+
] = _dc_qp["date"]
|
|
497
|
+
|
|
498
|
+
if "/" in _dc_qp.get("area", ""):
|
|
499
|
+
params["geometry"] = _dc_qp["area"].split("/")
|
|
500
|
+
|
|
501
|
+
non_none_params = {k: v for k, v in params.items() if v}
|
|
502
|
+
|
|
503
|
+
# productType
|
|
504
|
+
dataset = params.get("ecmwf:dataset", None)
|
|
505
|
+
params["productType"] = non_none_params.get("productType", dataset)
|
|
506
|
+
|
|
507
|
+
# dates
|
|
508
|
+
# check if default dates have to be added
|
|
509
|
+
if getattr(self.config, "dates_required", False):
|
|
510
|
+
self._check_date_params(params, product_type)
|
|
511
|
+
|
|
512
|
+
# adapt end date if it is midnight
|
|
513
|
+
if "completionTimeFromAscendingNode" in params:
|
|
514
|
+
end_date_excluded = getattr(self.config, "end_date_excluded", True)
|
|
515
|
+
is_datetime = True
|
|
516
|
+
try:
|
|
517
|
+
end_date = datetime.strptime(
|
|
518
|
+
params["completionTimeFromAscendingNode"], "%Y-%m-%dT%H:%M:%SZ"
|
|
519
|
+
)
|
|
520
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
521
|
+
except ValueError:
|
|
522
|
+
try:
|
|
523
|
+
end_date = datetime.strptime(
|
|
524
|
+
params["completionTimeFromAscendingNode"],
|
|
525
|
+
"%Y-%m-%dT%H:%M:%S.%fZ",
|
|
526
|
+
)
|
|
527
|
+
end_date = end_date.replace(tzinfo=tzutc())
|
|
528
|
+
except ValueError:
|
|
529
|
+
end_date = isoparse(params["completionTimeFromAscendingNode"])
|
|
530
|
+
is_datetime = False
|
|
531
|
+
start_date = isoparse(params["startTimeFromAscendingNode"])
|
|
532
|
+
if (
|
|
533
|
+
not end_date_excluded
|
|
534
|
+
and is_datetime
|
|
535
|
+
and end_date > start_date
|
|
536
|
+
and end_date
|
|
537
|
+
== end_date.replace(hour=0, minute=0, second=0, microsecond=0)
|
|
538
|
+
):
|
|
539
|
+
end_date += timedelta(days=-1)
|
|
540
|
+
params["completionTimeFromAscendingNode"] = end_date.isoformat()
|
|
541
|
+
|
|
542
|
+
# geometry
|
|
543
|
+
if "geometry" in params:
|
|
544
|
+
params["geometry"] = get_geometry_from_various(geometry=params["geometry"])
|
|
545
|
+
|
|
546
|
+
def discover_queryables(
|
|
547
|
+
self, **kwargs: Any
|
|
548
|
+
) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]:
|
|
549
|
+
"""Fetch queryables list from provider using its constraints file
|
|
550
|
+
|
|
551
|
+
:param kwargs: additional filters for queryables (`productType` and other search
|
|
552
|
+
arguments)
|
|
553
|
+
:returns: fetched queryable parameters dict
|
|
554
|
+
"""
|
|
555
|
+
product_type = kwargs.pop("productType")
|
|
556
|
+
product_type_config = self.config.products.get(product_type, {})
|
|
557
|
+
provider_product_type = (
|
|
558
|
+
product_type_config.get("ecmwf:dataset", None)
|
|
559
|
+
or product_type_config["productType"]
|
|
560
|
+
)
|
|
561
|
+
if "start" in kwargs:
|
|
562
|
+
kwargs["startTimeFromAscendingNode"] = kwargs.pop("start")
|
|
563
|
+
if "end" in kwargs:
|
|
564
|
+
kwargs["completionTimeFromAscendingNode"] = kwargs.pop("end")
|
|
565
|
+
|
|
566
|
+
# extract default datetime
|
|
567
|
+
processed_kwargs = deepcopy(kwargs)
|
|
568
|
+
self._preprocess_search_params(processed_kwargs, product_type)
|
|
569
|
+
|
|
570
|
+
constraints_url = format_metadata(
|
|
571
|
+
getattr(self.config, "discover_queryables", {}).get("constraints_url", ""),
|
|
572
|
+
**kwargs,
|
|
573
|
+
)
|
|
574
|
+
constraints: List[Dict[str, Any]] = self.fetch_data(constraints_url)
|
|
575
|
+
|
|
576
|
+
form_url = format_metadata(
|
|
577
|
+
getattr(self.config, "discover_queryables", {}).get("form_url", ""),
|
|
578
|
+
**kwargs,
|
|
579
|
+
)
|
|
580
|
+
form = self.fetch_data(form_url)
|
|
581
|
+
|
|
582
|
+
formated_kwargs = self.format_as_provider_keyword(
|
|
583
|
+
product_type, processed_kwargs
|
|
584
|
+
)
|
|
585
|
+
# we re-apply kwargs input to consider override of year, month, day and time.
|
|
586
|
+
for key in kwargs:
|
|
587
|
+
if key.startswith("ecmwf:"):
|
|
588
|
+
formated_kwargs[key.replace("ecmwf:", "")] = kwargs[key]
|
|
589
|
+
elif key in (
|
|
590
|
+
"startTimeFromAscendingNode",
|
|
591
|
+
"completionTimeFromAscendingNode",
|
|
592
|
+
"geom",
|
|
593
|
+
):
|
|
594
|
+
formated_kwargs[key] = kwargs[key]
|
|
595
|
+
else:
|
|
596
|
+
raise ValidationError(
|
|
597
|
+
f"{key} is not a queryable parameter for {self.provider}"
|
|
598
|
+
)
|
|
599
|
+
|
|
600
|
+
# we use non empty kwargs as default to integrate user inputs
|
|
601
|
+
# it is needed because pydantic json schema does not represent "value"
|
|
602
|
+
# but only "default"
|
|
603
|
+
non_empty_formated: Dict[str, Any] = {
|
|
604
|
+
k: v
|
|
605
|
+
for k, v in formated_kwargs.items()
|
|
606
|
+
if v and (not isinstance(v, list) or all(v))
|
|
607
|
+
}
|
|
608
|
+
non_empty_kwargs: Dict[str, Any] = {
|
|
609
|
+
k: v
|
|
610
|
+
for k, v in processed_kwargs.items()
|
|
611
|
+
if v and (not isinstance(v, list) or all(v))
|
|
612
|
+
}
|
|
613
|
+
|
|
614
|
+
required_keywords: Set[str] = set()
|
|
615
|
+
|
|
616
|
+
# calculate available values
|
|
617
|
+
if constraints:
|
|
618
|
+
# Apply constraints filtering
|
|
619
|
+
available_values = self.available_values_from_constraints(
|
|
620
|
+
constraints,
|
|
621
|
+
non_empty_formated,
|
|
622
|
+
form_keywords=[f["name"] for f in form],
|
|
623
|
+
)
|
|
624
|
+
|
|
625
|
+
# Pre-compute the required keywords (present in all constraint dicts)
|
|
626
|
+
# when form, required keywords are extracted directly from form
|
|
627
|
+
if not form:
|
|
628
|
+
required_keywords = set(constraints[0].keys())
|
|
629
|
+
for constraint in constraints[1:]:
|
|
630
|
+
required_keywords.intersection_update(constraint.keys())
|
|
631
|
+
else:
|
|
632
|
+
values_url = getattr(self.config, "available_values_url", "")
|
|
633
|
+
if not values_url:
|
|
634
|
+
return self.queryables_from_metadata_mapping(product_type)
|
|
635
|
+
if "{" in values_url:
|
|
636
|
+
values_url = values_url.format(productType=provider_product_type)
|
|
637
|
+
data = self.fetch_data(values_url)
|
|
638
|
+
available_values = data["constraints"]
|
|
639
|
+
required_keywords = data.get("required", [])
|
|
640
|
+
|
|
641
|
+
# To check if all keywords are queryable parameters, we check if they are in the
|
|
642
|
+
# available values or the product type config (available values calculated from the
|
|
643
|
+
# constraints might not include all queryables)
|
|
644
|
+
for keyword in kwargs:
|
|
645
|
+
if (
|
|
646
|
+
keyword
|
|
647
|
+
not in available_values.keys()
|
|
648
|
+
| product_type_config.keys()
|
|
649
|
+
| {
|
|
650
|
+
"startTimeFromAscendingNode",
|
|
651
|
+
"completionTimeFromAscendingNode",
|
|
652
|
+
"geom",
|
|
653
|
+
}
|
|
654
|
+
and keyword.replace("ecmwf:", "") not in available_values
|
|
655
|
+
):
|
|
656
|
+
raise ValidationError(f"{keyword} is not a queryable parameter")
|
|
657
|
+
|
|
658
|
+
# generate queryables
|
|
659
|
+
if form:
|
|
660
|
+
queryables = self.queryables_by_form(
|
|
661
|
+
form,
|
|
662
|
+
available_values,
|
|
663
|
+
non_empty_formated,
|
|
664
|
+
)
|
|
665
|
+
else:
|
|
666
|
+
queryables = self.queryables_by_values(
|
|
667
|
+
available_values, list(required_keywords), non_empty_kwargs
|
|
668
|
+
)
|
|
669
|
+
|
|
670
|
+
# ecmwf:date is replaced by start and end.
|
|
671
|
+
# start and end filters are supported whenever combinations of "year", "month", "day" filters exist
|
|
672
|
+
if (
|
|
673
|
+
queryables.pop("ecmwf:date", None)
|
|
674
|
+
or "ecmwf:year" in queryables
|
|
675
|
+
or "ecmwf:hyear" in queryables
|
|
676
|
+
):
|
|
677
|
+
queryables.update(
|
|
678
|
+
{
|
|
679
|
+
"start": Queryables.get_with_default(
|
|
680
|
+
"start", non_empty_kwargs.get("startTimeFromAscendingNode")
|
|
681
|
+
),
|
|
682
|
+
"end": Queryables.get_with_default(
|
|
683
|
+
"end",
|
|
684
|
+
non_empty_kwargs.get("completionTimeFromAscendingNode"),
|
|
685
|
+
),
|
|
686
|
+
}
|
|
687
|
+
)
|
|
688
|
+
|
|
689
|
+
# area is geom in EODAG.
|
|
690
|
+
if queryables.pop("area", None):
|
|
691
|
+
queryables["geom"] = Annotated[
|
|
692
|
+
Union[str, Dict[str, float], BaseGeometry],
|
|
693
|
+
Field(
|
|
694
|
+
None,
|
|
695
|
+
description="Read EODAG documentation for all supported geometry format.",
|
|
696
|
+
),
|
|
697
|
+
]
|
|
698
|
+
|
|
699
|
+
return queryables
|
|
700
|
+
|
|
701
|
+
def available_values_from_constraints(
|
|
702
|
+
self,
|
|
703
|
+
constraints: list[Dict[str, Any]],
|
|
704
|
+
input_keywords: Dict[str, Any],
|
|
705
|
+
form_keywords: List[str],
|
|
706
|
+
) -> Dict[str, List[str]]:
|
|
707
|
+
"""
|
|
708
|
+
Filter constraints using input_keywords. Return list of available queryables.
|
|
709
|
+
All constraint entries must have the same parameters.
|
|
710
|
+
|
|
711
|
+
:param constraints: list of constraints received from the provider
|
|
712
|
+
:param input_keywords: dict of input parameters given by the user
|
|
713
|
+
:param form_keywords: list of keyword names from the provider form endpoint
|
|
714
|
+
:return: dict with available values for each parameter
|
|
715
|
+
"""
|
|
716
|
+
# get ordered constraint keywords
|
|
717
|
+
constraints_keywords = list(
|
|
718
|
+
OrderedDict.fromkeys(k for c in constraints for k in c.keys())
|
|
719
|
+
)
|
|
720
|
+
|
|
721
|
+
# prepare ordered input keywords formatted as provider's keywords
|
|
722
|
+
# required to filter with constraints
|
|
723
|
+
ordered_keywords = (
|
|
724
|
+
[kw for kw in form_keywords if kw in constraints_keywords]
|
|
725
|
+
if form_keywords
|
|
726
|
+
else constraints_keywords
|
|
727
|
+
)
|
|
728
|
+
|
|
729
|
+
# filter constraint entries matching input keyword values
|
|
730
|
+
filtered_constraints: List[Dict[str, Any]]
|
|
731
|
+
|
|
732
|
+
parsed_keywords: List[str] = []
|
|
733
|
+
for keyword in ordered_keywords:
|
|
734
|
+
values = input_keywords.get(keyword)
|
|
735
|
+
|
|
736
|
+
if values is None:
|
|
737
|
+
parsed_keywords.append(keyword)
|
|
738
|
+
continue
|
|
739
|
+
|
|
740
|
+
# we only compare list of strings.
|
|
741
|
+
if isinstance(values, dict):
|
|
742
|
+
raise ValidationError(
|
|
743
|
+
f"Parameter value as object is not supported: {keyword}={values}"
|
|
744
|
+
)
|
|
745
|
+
filter_v = values if isinstance(values, (list, tuple)) else [values]
|
|
746
|
+
|
|
747
|
+
# We convert every single value to a list of string
|
|
748
|
+
# We strip values of superfluous quotes (added by mapping converter to_geojson).
|
|
749
|
+
# ECMWF accept values with /to/. We need to split it to an array
|
|
750
|
+
# ECMWF accept values in format val1/val2. We need to split it to an array
|
|
751
|
+
sep = re.compile(r"/to/|/")
|
|
752
|
+
filter_v = [i for v in filter_v for i in sep.split(strip_quotes(v))]
|
|
753
|
+
|
|
754
|
+
# special handling for time 0000 converted to 0 by pre-formating with metadata_mapping
|
|
755
|
+
if keyword.split(":")[-1] == "time":
|
|
756
|
+
filter_v = ["0000" if str(v) == "0" else v for v in filter_v]
|
|
757
|
+
|
|
758
|
+
# Collect missing values to report errors
|
|
759
|
+
missing_values = set(filter_v)
|
|
760
|
+
|
|
761
|
+
# Filter constraints and check for missing values
|
|
762
|
+
filtered_constraints = []
|
|
763
|
+
for entry in constraints:
|
|
764
|
+
# Filter based on the presence of any value in filter_v
|
|
765
|
+
entry_values = entry.get(keyword, [])
|
|
766
|
+
|
|
767
|
+
# date constraint may be intervals. We identify intervals with a "/" in the value
|
|
768
|
+
# we assume that if the first value is an interval, all values are intervals
|
|
769
|
+
present_values = []
|
|
770
|
+
if keyword == "date" and "/" in entry[keyword][0]:
|
|
771
|
+
if any(is_range_in_range(x, values[0]) for x in entry[keyword]):
|
|
772
|
+
present_values = filter_v
|
|
773
|
+
else:
|
|
774
|
+
present_values = [
|
|
775
|
+
value for value in filter_v if value in entry_values
|
|
776
|
+
]
|
|
777
|
+
|
|
778
|
+
# Remove present values from the missing_values set
|
|
779
|
+
missing_values -= set(present_values)
|
|
780
|
+
|
|
781
|
+
if present_values:
|
|
782
|
+
filtered_constraints.append(entry)
|
|
783
|
+
|
|
784
|
+
# raise an error as no constraint entry matched the input keywords
|
|
785
|
+
# raise an error if one value from input is not allowed
|
|
786
|
+
if not filtered_constraints or missing_values:
|
|
787
|
+
allowed_values = list(
|
|
788
|
+
{value for c in constraints for value in c.get(keyword, [])}
|
|
789
|
+
)
|
|
790
|
+
# restore ecmwf: prefix before raising error
|
|
791
|
+
keyword = f"ecmwf:{keyword}"
|
|
792
|
+
|
|
793
|
+
all_keywords_str = ""
|
|
794
|
+
if len(parsed_keywords) > 1:
|
|
795
|
+
keywords = [
|
|
796
|
+
f"ecmwf:{k}={pk}"
|
|
797
|
+
for k in parsed_keywords
|
|
798
|
+
if (pk := input_keywords.get(k))
|
|
799
|
+
]
|
|
800
|
+
all_keywords_str = f" with {', '.join(keywords)}"
|
|
801
|
+
|
|
802
|
+
raise ValidationError(
|
|
803
|
+
f"{keyword}={values} is not available"
|
|
804
|
+
f"{all_keywords_str}."
|
|
805
|
+
f" Allowed values are {', '.join(allowed_values)}."
|
|
806
|
+
)
|
|
807
|
+
|
|
808
|
+
parsed_keywords.append(keyword)
|
|
809
|
+
constraints = filtered_constraints
|
|
810
|
+
|
|
811
|
+
available_values: Dict[str, Any] = {k: set() for k in ordered_keywords}
|
|
812
|
+
|
|
813
|
+
# we aggregate the constraint entries left
|
|
814
|
+
for entry in constraints:
|
|
815
|
+
for key, value in entry.items():
|
|
816
|
+
available_values[key].update(value)
|
|
817
|
+
|
|
818
|
+
return {k: list(v) for k, v in available_values.items()}
|
|
819
|
+
|
|
820
|
+
def queryables_by_form(
|
|
821
|
+
self,
|
|
822
|
+
form: List[Dict[str, Any]],
|
|
823
|
+
available_values: Dict[str, List[str]],
|
|
824
|
+
defaults: Dict[str, Any],
|
|
825
|
+
) -> Dict[str, Annotated[Any, FieldInfo]]:
|
|
826
|
+
"""
|
|
827
|
+
Generate Annotated field definitions from form entries and available values
|
|
828
|
+
Used by Copernicus services like cop_cds, cop_ads, cop_ewds.
|
|
829
|
+
|
|
830
|
+
:param form: data fetched from the form endpoint of the provider
|
|
831
|
+
:param available_values: available values for each parameter
|
|
832
|
+
:param defaults: default values for the parameters
|
|
833
|
+
:return: dict of annotated queryables
|
|
834
|
+
"""
|
|
835
|
+
queryables: Dict[str, Annotated[Any, FieldInfo]] = {}
|
|
836
|
+
|
|
837
|
+
required_list: List[str] = []
|
|
838
|
+
for element in form:
|
|
839
|
+
name: str = element["name"]
|
|
840
|
+
|
|
841
|
+
# those are not parameter elements.
|
|
842
|
+
if name in ("area_group", "global", "warning", "licences"):
|
|
843
|
+
continue
|
|
844
|
+
if "type" not in element or element["type"] == "FreeEditionWidget":
|
|
845
|
+
continue
|
|
846
|
+
|
|
847
|
+
# ordering done by id -> set id to high value if not present -> element will be last
|
|
848
|
+
if "id" not in element:
|
|
849
|
+
element["id"] = 100
|
|
850
|
+
|
|
851
|
+
prop = {"title": element.get("label", name)}
|
|
852
|
+
|
|
853
|
+
details = element.get("details", {})
|
|
854
|
+
|
|
855
|
+
# add values from form if keyword was not in constraints
|
|
856
|
+
values = (
|
|
857
|
+
available_values[name]
|
|
858
|
+
if name in available_values
|
|
859
|
+
else details.get("values")
|
|
860
|
+
)
|
|
861
|
+
|
|
862
|
+
# updates the properties with the values given based on the information from the element
|
|
863
|
+
_update_properties_from_element(prop, element, values)
|
|
864
|
+
|
|
865
|
+
default = defaults.get(name)
|
|
866
|
+
|
|
867
|
+
if details:
|
|
868
|
+
fields = details.get("fields")
|
|
869
|
+
if fields and (comment := fields[0].get("comment")):
|
|
870
|
+
prop["description"] = comment
|
|
871
|
+
|
|
872
|
+
if d := details.get("default"):
|
|
873
|
+
default = default or (d[0] if fields else d)
|
|
874
|
+
|
|
875
|
+
if name == "area" and isinstance(default, dict):
|
|
876
|
+
default = list(default.values())
|
|
877
|
+
|
|
878
|
+
if default:
|
|
879
|
+
# We strip values of superfluous quotes (addded by mapping converter to_geojson).
|
|
880
|
+
default = strip_quotes(default)
|
|
881
|
+
|
|
882
|
+
# sometimes form returns default as array instead of string
|
|
883
|
+
if default and prop["type"] == "string" and isinstance(default, list):
|
|
884
|
+
default = ",".join(default)
|
|
885
|
+
|
|
886
|
+
is_required = bool(element.get("required"))
|
|
887
|
+
if is_required:
|
|
888
|
+
required_list.append(name)
|
|
889
|
+
|
|
890
|
+
queryables[ecmwf_format(name)] = Annotated[
|
|
891
|
+
get_args(
|
|
892
|
+
json_field_definition_to_python(
|
|
893
|
+
prop,
|
|
894
|
+
default_value=default,
|
|
895
|
+
required=is_required,
|
|
896
|
+
)
|
|
897
|
+
)
|
|
898
|
+
]
|
|
899
|
+
|
|
900
|
+
return queryables
|
|
901
|
+
|
|
902
|
+
def queryables_by_values(
|
|
903
|
+
self,
|
|
904
|
+
available_values: Dict[str, List[str]],
|
|
905
|
+
required_keywords: List[str],
|
|
906
|
+
defaults: Dict[str, Any],
|
|
907
|
+
) -> Dict[str, Annotated[Any, FieldInfo]]:
|
|
908
|
+
"""
|
|
909
|
+
Generate Annotated field definitions from available values.
|
|
910
|
+
Used by ECMWF data providers like dedt_lumi.
|
|
911
|
+
|
|
912
|
+
:param available_values: available values for each parameter
|
|
913
|
+
:param required_keywords: list of required parameters
|
|
914
|
+
:param defaults: default values for the parameters
|
|
915
|
+
:return: dict of annotated queryables
|
|
916
|
+
"""
|
|
917
|
+
# Rename keywords from form with metadata mapping.
|
|
918
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
919
|
+
required = [ecmwf_format(k) for k in required_keywords]
|
|
920
|
+
|
|
921
|
+
queryables: Dict[str, Annotated[Any, FieldInfo]] = {}
|
|
922
|
+
for name, values in available_values.items():
|
|
923
|
+
# Rename keywords from form with metadata mapping.
|
|
924
|
+
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
925
|
+
key = ecmwf_format(name)
|
|
926
|
+
|
|
927
|
+
default = defaults.get(key)
|
|
928
|
+
|
|
929
|
+
queryables[key] = Annotated[
|
|
930
|
+
get_args(
|
|
931
|
+
json_field_definition_to_python(
|
|
932
|
+
{"type": "string", "title": name, "enum": values},
|
|
933
|
+
default_value=strip_quotes(default) if default else None,
|
|
934
|
+
required=bool(key in required),
|
|
935
|
+
)
|
|
936
|
+
)
|
|
937
|
+
]
|
|
938
|
+
|
|
939
|
+
return queryables
|
|
940
|
+
|
|
941
|
+
def format_as_provider_keyword(
|
|
942
|
+
self, product_type: str, properties: Dict[str, Any]
|
|
943
|
+
) -> Dict[str, Any]:
|
|
944
|
+
"""Return provider equivalent keyword names from EODAG keywords.
|
|
945
|
+
|
|
946
|
+
:param product_type: product type id
|
|
947
|
+
:param properties: dict of properties to be formatted
|
|
948
|
+
:return: dict of formatted properties
|
|
949
|
+
"""
|
|
950
|
+
parsed_properties = properties_from_json(
|
|
951
|
+
properties,
|
|
952
|
+
self.config.metadata_mapping,
|
|
953
|
+
)
|
|
954
|
+
available_properties = {
|
|
955
|
+
k: v
|
|
956
|
+
for k, v in parsed_properties.items()
|
|
957
|
+
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
958
|
+
}
|
|
959
|
+
return format_query_params(product_type, self.config, available_properties)
|
|
960
|
+
|
|
961
|
+
def _fetch_data(self, url: str) -> Any:
|
|
962
|
+
"""
|
|
963
|
+
fetches from a provider elements like constraints or forms.
|
|
964
|
+
|
|
965
|
+
:param url: url from which the constraints can be fetched
|
|
966
|
+
:returns: json file content fetched from the provider
|
|
967
|
+
"""
|
|
968
|
+
if not url:
|
|
969
|
+
return []
|
|
970
|
+
|
|
971
|
+
auth = (
|
|
972
|
+
self.auth
|
|
973
|
+
if hasattr(self, "auth") and isinstance(self.auth, AuthBase)
|
|
974
|
+
else None
|
|
975
|
+
)
|
|
976
|
+
timeout = getattr(self.config, "timeout", HTTP_REQ_TIMEOUT)
|
|
977
|
+
return fetch_json(url, auth=auth, timeout=timeout)
|
|
119
978
|
|
|
120
979
|
def normalize_results(
|
|
121
980
|
self, results: RawSearchResult, **kwargs: Any
|
|
@@ -126,6 +985,7 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
126
985
|
:param kwargs: Search arguments
|
|
127
986
|
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
128
987
|
"""
|
|
988
|
+
|
|
129
989
|
product_type = kwargs.get("productType")
|
|
130
990
|
|
|
131
991
|
result = results[0]
|
|
@@ -146,13 +1006,12 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
146
1006
|
self.config.pagination["next_page_query_obj"].format()
|
|
147
1007
|
)
|
|
148
1008
|
unpaginated_query_params = {
|
|
149
|
-
k: v
|
|
1009
|
+
k: v
|
|
150
1010
|
for k, v in results.query_params.items()
|
|
151
1011
|
if (k, v) not in next_page_query_obj.items()
|
|
152
1012
|
}
|
|
153
1013
|
else:
|
|
154
1014
|
unpaginated_query_params = self.query_params
|
|
155
|
-
|
|
156
1015
|
# query hash, will be used to build a product id
|
|
157
1016
|
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
158
1017
|
unpaginated_query_params
|
|
@@ -178,7 +1037,7 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
178
1037
|
result.update(results.product_type_def_params)
|
|
179
1038
|
result = dict(result, **{k: v for k, v in kwargs.items() if v is not None})
|
|
180
1039
|
|
|
181
|
-
# parse
|
|
1040
|
+
# parse properties
|
|
182
1041
|
parsed_properties = properties_from_json(
|
|
183
1042
|
result,
|
|
184
1043
|
self.config.metadata_mapping,
|
|
@@ -190,16 +1049,36 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
190
1049
|
|
|
191
1050
|
# build product id
|
|
192
1051
|
id_prefix = (product_type or self.provider).upper()
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
parsed_properties["startTimeFromAscendingNode"]
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
1052
|
+
if (
|
|
1053
|
+
"startTimeFromAscendingNode" in parsed_properties
|
|
1054
|
+
and parsed_properties["startTimeFromAscendingNode"] != "Not Available"
|
|
1055
|
+
and "completionTimeFromAscendingNode" in parsed_properties
|
|
1056
|
+
and parsed_properties["completionTimeFromAscendingNode"] != "Not Available"
|
|
1057
|
+
):
|
|
1058
|
+
product_id = "%s_%s_%s_%s" % (
|
|
1059
|
+
id_prefix,
|
|
1060
|
+
parsed_properties["startTimeFromAscendingNode"]
|
|
1061
|
+
.split("T")[0]
|
|
1062
|
+
.replace("-", ""),
|
|
1063
|
+
parsed_properties["completionTimeFromAscendingNode"]
|
|
1064
|
+
.split("T")[0]
|
|
1065
|
+
.replace("-", ""),
|
|
1066
|
+
query_hash,
|
|
1067
|
+
)
|
|
1068
|
+
elif (
|
|
1069
|
+
"startTimeFromAscendingNode" in parsed_properties
|
|
1070
|
+
and parsed_properties["startTimeFromAscendingNode"] != "Not Available"
|
|
1071
|
+
):
|
|
1072
|
+
product_id = "%s_%s_%s" % (
|
|
1073
|
+
id_prefix,
|
|
1074
|
+
parsed_properties["startTimeFromAscendingNode"]
|
|
1075
|
+
.split("T")[0]
|
|
1076
|
+
.replace("-", ""),
|
|
1077
|
+
query_hash,
|
|
1078
|
+
)
|
|
1079
|
+
else:
|
|
1080
|
+
product_id = f"{id_prefix}_{query_hash}"
|
|
1081
|
+
|
|
203
1082
|
parsed_properties["id"] = parsed_properties["title"] = product_id
|
|
204
1083
|
|
|
205
1084
|
# update downloadLink and orderLink
|
|
@@ -232,307 +1111,169 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
232
1111
|
product,
|
|
233
1112
|
]
|
|
234
1113
|
|
|
1114
|
+
def count_hits(
|
|
1115
|
+
self, count_url: Optional[str] = None, result_type: Optional[str] = None
|
|
1116
|
+
) -> int:
|
|
1117
|
+
"""Count method that will always return 1.
|
|
235
1118
|
|
|
236
|
-
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
1119
|
+
:param count_url: not used, only here because this method overwrites count_hits from the parent class
|
|
1120
|
+
:param result_type: not used, only here because this method overwrites count_hits from the parent class
|
|
1121
|
+
:return: always 1
|
|
1122
|
+
"""
|
|
1123
|
+
return 1
|
|
241
1124
|
|
|
242
|
-
The available configuration parameters inherits from parent classes, with particularly
|
|
243
|
-
for this plugin:
|
|
244
1125
|
|
|
245
|
-
|
|
246
|
-
|
|
1126
|
+
class MeteoblueSearch(ECMWFSearch):
|
|
1127
|
+
"""MeteoblueSearch search plugin.
|
|
247
1128
|
|
|
248
|
-
|
|
249
|
-
|
|
1129
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1130
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1131
|
+
object.
|
|
250
1132
|
|
|
251
|
-
|
|
1133
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1134
|
+
for pagination for this plugin.
|
|
252
1135
|
|
|
253
1136
|
:param provider: An eodag providers configuration dictionary
|
|
254
|
-
:param config:
|
|
255
|
-
"""
|
|
256
|
-
|
|
257
|
-
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
258
|
-
# init self.config.metadata_mapping using Search Base plugin
|
|
259
|
-
Search.__init__(self, provider, config)
|
|
260
|
-
|
|
261
|
-
self.config.__dict__.setdefault("api_endpoint", "")
|
|
262
|
-
|
|
263
|
-
# needed by QueryStringSearch.build_query_string / format_free_text_search
|
|
264
|
-
self.config.__dict__.setdefault("free_text_search_operations", {})
|
|
265
|
-
# needed for compatibility
|
|
266
|
-
self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"})
|
|
267
|
-
|
|
268
|
-
# parse jsonpath on init: product type specific metadata-mapping
|
|
269
|
-
for product_type in self.config.products.keys():
|
|
270
|
-
if "metadata_mapping" in self.config.products[product_type].keys():
|
|
271
|
-
self.config.products[product_type][
|
|
272
|
-
"metadata_mapping"
|
|
273
|
-
] = mtd_cfg_as_conversion_and_querypath(
|
|
274
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
275
|
-
)
|
|
276
|
-
# Complete and ready to use product type specific metadata-mapping
|
|
277
|
-
product_type_metadata_mapping = deepcopy(self.config.metadata_mapping)
|
|
278
|
-
|
|
279
|
-
# update config using provider product type definition metadata_mapping
|
|
280
|
-
# from another product
|
|
281
|
-
other_product_for_mapping = cast(
|
|
282
|
-
str,
|
|
283
|
-
self.config.products[product_type].get(
|
|
284
|
-
"metadata_mapping_from_product", ""
|
|
285
|
-
),
|
|
286
|
-
)
|
|
287
|
-
if other_product_for_mapping:
|
|
288
|
-
other_product_type_def_params = self.get_product_type_def_params(
|
|
289
|
-
other_product_for_mapping,
|
|
290
|
-
)
|
|
291
|
-
product_type_metadata_mapping.update(
|
|
292
|
-
other_product_type_def_params.get("metadata_mapping", {})
|
|
293
|
-
)
|
|
294
|
-
# from current product
|
|
295
|
-
product_type_metadata_mapping.update(
|
|
296
|
-
self.config.products[product_type]["metadata_mapping"]
|
|
297
|
-
)
|
|
1137
|
+
:param config: Search plugin configuration:
|
|
298
1138
|
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
1139
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1140
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1141
|
+
this plugin it has the node:
|
|
302
1142
|
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
1143
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1144
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1145
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1146
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1147
|
+
"""
|
|
306
1148
|
|
|
307
|
-
def
|
|
1149
|
+
def collect_search_urls(
|
|
308
1150
|
self,
|
|
309
1151
|
prep: PreparedSearch = PreparedSearch(),
|
|
310
1152
|
**kwargs: Any,
|
|
311
|
-
) -> Tuple[List[
|
|
312
|
-
"""
|
|
1153
|
+
) -> Tuple[List[str], int]:
|
|
1154
|
+
"""Wraps PostJsonSearch.collect_search_urls to force product count to 1
|
|
313
1155
|
|
|
314
|
-
|
|
1156
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1157
|
+
:param kwargs: keyword arguments used in the search
|
|
1158
|
+
:return: list of search url and number of results
|
|
1159
|
+
"""
|
|
1160
|
+
urls, _ = super().collect_search_urls(prep, **kwargs)
|
|
1161
|
+
return urls, 1
|
|
315
1162
|
|
|
316
|
-
|
|
1163
|
+
def do_search(
|
|
1164
|
+
self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any
|
|
1165
|
+
) -> List[Dict[str, Any]]:
|
|
1166
|
+
"""Perform the actual search request, and return result in a single element.
|
|
317
1167
|
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
1168
|
+
:param prep: :class:`~eodag.plugins.search.PreparedSearch` object containing information for the search
|
|
1169
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1170
|
+
:return: list containing the results from the provider in json format
|
|
1171
|
+
"""
|
|
1172
|
+
|
|
1173
|
+
prep.url = prep.search_urls[0]
|
|
1174
|
+
prep.info_message = f"Sending search request: {prep.url}"
|
|
1175
|
+
prep.exception_message = (
|
|
1176
|
+
f"Skipping error while searching for {self.provider}"
|
|
1177
|
+
f" {self.__class__.__name__} instance"
|
|
1178
|
+
)
|
|
1179
|
+
response = self._request(prep)
|
|
1180
|
+
|
|
1181
|
+
return [response.json()]
|
|
321
1182
|
|
|
322
1183
|
def build_query_string(
|
|
323
1184
|
self, product_type: str, **kwargs: Any
|
|
324
1185
|
) -> Tuple[Dict[str, Any], str]:
|
|
325
|
-
"""Build The query string using the search parameters
|
|
326
|
-
# parse kwargs as properties as they might be needed to build the query
|
|
327
|
-
parsed_properties = properties_from_json(
|
|
328
|
-
kwargs,
|
|
329
|
-
self.config.metadata_mapping,
|
|
330
|
-
)
|
|
331
|
-
available_properties = {
|
|
332
|
-
k: v
|
|
333
|
-
for k, v in parsed_properties.items()
|
|
334
|
-
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
335
|
-
}
|
|
1186
|
+
"""Build The query string using the search parameters
|
|
336
1187
|
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
1188
|
+
:param product_type: product type id
|
|
1189
|
+
:param kwargs: keyword arguments to be used in the query string
|
|
1190
|
+
:return: formatted query params and encode query string
|
|
1191
|
+
"""
|
|
1192
|
+
return QueryStringSearch.build_query_string(
|
|
1193
|
+
self, product_type=product_type, **kwargs
|
|
340
1194
|
)
|
|
341
1195
|
|
|
342
|
-
def get_product_type_cfg(self, key: str, default: Any = None) -> Any:
|
|
343
|
-
"""
|
|
344
|
-
Get the value of a configuration option specific to the current product type.
|
|
345
1196
|
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
1197
|
+
class WekeoECMWFSearch(ECMWFSearch):
|
|
1198
|
+
"""
|
|
1199
|
+
WekeoECMWFSearch search plugin.
|
|
349
1200
|
|
|
350
|
-
|
|
351
|
-
|
|
1201
|
+
This plugin, which inherits from :class:`~eodag.plugins.search.build_search_result.ECMWFSearch`,
|
|
1202
|
+
performs a POST request and uses its result to build a single :class:`~eodag.api.search_result.SearchResult`
|
|
1203
|
+
object. In contrast to ECMWFSearch or MeteoblueSearch, the products are only build with information
|
|
1204
|
+
returned by the provider.
|
|
352
1205
|
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
product_type_cfg = getattr(self.config, "product_type_config", {})
|
|
356
|
-
non_none_cfg = {k: v for k, v in product_type_cfg.items() if v}
|
|
1206
|
+
The available configuration parameters are inherited from parent classes, with some a particularity
|
|
1207
|
+
for pagination for this plugin.
|
|
357
1208
|
|
|
358
|
-
|
|
1209
|
+
:param provider: An eodag providers configuration dictionary
|
|
1210
|
+
:param config: Search plugin configuration:
|
|
359
1211
|
|
|
360
|
-
|
|
361
|
-
|
|
1212
|
+
* :attr:`~eodag.config.PluginConfig.pagination` (:class:`~eodag.config.PluginConfig.Pagination`)
|
|
1213
|
+
(**mandatory**): The configuration of how the pagination is done on the provider. For
|
|
1214
|
+
this plugin it has the node:
|
|
362
1215
|
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
1216
|
+
* :attr:`~eodag.config.PluginConfig.Pagination.next_page_query_obj` (``str``): The
|
|
1217
|
+
additional parameters needed to perform search. These parameters won't be included in
|
|
1218
|
+
the result. This must be a json dict formatted like ``{{"foo":"bar"}}`` because it
|
|
1219
|
+
will be passed to a :meth:`str.format` method before being loaded as json.
|
|
1220
|
+
"""
|
|
367
1221
|
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
# if available, update search params using datacube query-string
|
|
373
|
-
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
374
|
-
if "/to/" in _dc_qp.get("date", ""):
|
|
375
|
-
(
|
|
376
|
-
params["startTimeFromAscendingNode"],
|
|
377
|
-
params["completionTimeFromAscendingNode"],
|
|
378
|
-
) = _dc_qp["date"].split("/to/")
|
|
379
|
-
elif "/" in _dc_qp.get("date", ""):
|
|
380
|
-
(
|
|
381
|
-
params["startTimeFromAscendingNode"],
|
|
382
|
-
params["completionTimeFromAscendingNode"],
|
|
383
|
-
) = _dc_qp["date"].split("/")
|
|
384
|
-
elif _dc_qp.get("date", None):
|
|
385
|
-
params["startTimeFromAscendingNode"] = params[
|
|
386
|
-
"completionTimeFromAscendingNode"
|
|
387
|
-
] = _dc_qp["date"]
|
|
1222
|
+
def normalize_results(
|
|
1223
|
+
self, results: RawSearchResult, **kwargs: Any
|
|
1224
|
+
) -> List[EOProduct]:
|
|
1225
|
+
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
388
1226
|
|
|
389
|
-
|
|
390
|
-
|
|
1227
|
+
:param results: Raw provider result as single dict in list
|
|
1228
|
+
:param kwargs: Search arguments
|
|
1229
|
+
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
1230
|
+
"""
|
|
391
1231
|
|
|
392
|
-
|
|
1232
|
+
# formating of orderLink requires access to the productType value.
|
|
1233
|
+
results.data = [
|
|
1234
|
+
{**result, **results.product_type_def_params} for result in results
|
|
1235
|
+
]
|
|
393
1236
|
|
|
394
|
-
|
|
395
|
-
dataset = params.get("dataset", None)
|
|
396
|
-
params["productType"] = non_none_params.get("productType", dataset)
|
|
1237
|
+
normalized = QueryStringSearch.normalize_results(self, results, **kwargs)
|
|
397
1238
|
|
|
398
|
-
|
|
399
|
-
|
|
400
|
-
self.get_product_type_cfg(
|
|
401
|
-
"missionStartDate", DEFAULT_MISSION_START_DATE
|
|
402
|
-
).replace(
|
|
403
|
-
"Z", "+00:00"
|
|
404
|
-
) # before 3.11
|
|
405
|
-
)
|
|
1239
|
+
if not normalized:
|
|
1240
|
+
return normalized
|
|
406
1241
|
|
|
407
|
-
|
|
408
|
-
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
412
|
-
or (
|
|
413
|
-
datetime.now(timezone.utc)
|
|
414
|
-
if params.get("startTimeFromAscendingNode")
|
|
415
|
-
else mission_start_dt + timedelta(days=1)
|
|
416
|
-
).isoformat()
|
|
417
|
-
)
|
|
1242
|
+
query_params_encoded = quote_plus(orjson.dumps(results.query_params))
|
|
1243
|
+
for product in normalized:
|
|
1244
|
+
properties = {**product.properties, **results.query_params}
|
|
1245
|
+
properties["_dc_qs"] = query_params_encoded
|
|
1246
|
+
product.properties = {ecmwf_format(k): v for k, v in properties.items()}
|
|
418
1247
|
|
|
419
|
-
|
|
420
|
-
"startTimeFromAscendingNode", mission_start_dt.isoformat()
|
|
421
|
-
)
|
|
422
|
-
params["completionTimeFromAscendingNode"] = non_none_params.get(
|
|
423
|
-
"completionTimeFromAscendingNode", default_end_str
|
|
424
|
-
)
|
|
1248
|
+
return normalized
|
|
425
1249
|
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
end_date = isoparse(params["completionTimeFromAscendingNode"])
|
|
429
|
-
if not end_date_excluded and end_date == end_date.replace(
|
|
430
|
-
hour=0, minute=0, second=0, microsecond=0
|
|
431
|
-
):
|
|
432
|
-
end_date += timedelta(days=-1)
|
|
433
|
-
params["completionTimeFromAscendingNode"] = end_date.isoformat()
|
|
1250
|
+
def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
|
1251
|
+
"""Should perform the actual search request.
|
|
434
1252
|
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
1253
|
+
:param args: arguments to be used in the search
|
|
1254
|
+
:param kwargs: keyword arguments to be used in the search
|
|
1255
|
+
:return: list containing the results from the provider in json format
|
|
1256
|
+
"""
|
|
1257
|
+
return QueryStringSearch.do_search(self, *args, **kwargs)
|
|
438
1258
|
|
|
439
|
-
def
|
|
440
|
-
self, **kwargs: Any
|
|
441
|
-
) ->
|
|
442
|
-
"""
|
|
1259
|
+
def build_query_string(
|
|
1260
|
+
self, product_type: str, **kwargs: Any
|
|
1261
|
+
) -> Tuple[Dict[str, Any], str]:
|
|
1262
|
+
"""Build The query string using the search parameters
|
|
443
1263
|
|
|
444
|
-
:param
|
|
445
|
-
|
|
446
|
-
:
|
|
1264
|
+
:param product_type: product type id
|
|
1265
|
+
:param kwargs: keyword arguments to be used in the query string
|
|
1266
|
+
:return: formatted query params and encode query string
|
|
447
1267
|
"""
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
1268
|
+
# Reorder kwargs to make sure year/month/day/time if set overwrite default datetime.
|
|
1269
|
+
# strip_quotes to remove duplicated quotes like "'1_1'" produced by convertors like to_geojson.
|
|
1270
|
+
priority_keys = [
|
|
1271
|
+
"startTimeFromAscendingNode",
|
|
1272
|
+
"completionTimeFromAscendingNode",
|
|
1273
|
+
]
|
|
1274
|
+
ordered_kwargs = {k: kwargs[k] for k in priority_keys if k in kwargs}
|
|
1275
|
+
ordered_kwargs.update({k: strip_quotes(v) for k, v in kwargs.items()})
|
|
454
1276
|
|
|
455
|
-
|
|
456
|
-
|
|
1277
|
+
return QueryStringSearch.build_query_string(
|
|
1278
|
+
self, product_type=product_type, **ordered_kwargs
|
|
457
1279
|
)
|
|
458
|
-
user_provider_product_type = kwargs.pop("dataset", None)
|
|
459
|
-
if (
|
|
460
|
-
user_provider_product_type
|
|
461
|
-
and user_provider_product_type != provider_product_type
|
|
462
|
-
):
|
|
463
|
-
raise ValidationError(
|
|
464
|
-
f"Cannot change dataset from {provider_product_type} to {user_provider_product_type}"
|
|
465
|
-
)
|
|
466
|
-
|
|
467
|
-
# defaults
|
|
468
|
-
default_queryables = self._get_defaults_as_queryables(product_type)
|
|
469
|
-
# remove dataset from queryables
|
|
470
|
-
default_queryables.pop("dataset", None)
|
|
471
|
-
|
|
472
|
-
non_empty_kwargs = {k: v for k, v in kwargs.items() if v}
|
|
473
|
-
|
|
474
|
-
if "{" in constraints_file_url:
|
|
475
|
-
constraints_file_url = constraints_file_url.format(
|
|
476
|
-
dataset=provider_product_type
|
|
477
|
-
)
|
|
478
|
-
constraints = fetch_constraints(constraints_file_url, self)
|
|
479
|
-
if not constraints:
|
|
480
|
-
return default_queryables
|
|
481
|
-
|
|
482
|
-
constraint_params: Dict[str, Dict[str, Set[Any]]] = {}
|
|
483
|
-
if len(kwargs) == 0:
|
|
484
|
-
# get values from constraints without additional filters
|
|
485
|
-
for constraint in constraints:
|
|
486
|
-
for key in constraint.keys():
|
|
487
|
-
if key in constraint_params:
|
|
488
|
-
constraint_params[key]["enum"].update(constraint[key])
|
|
489
|
-
else:
|
|
490
|
-
constraint_params[key] = {}
|
|
491
|
-
constraint_params[key]["enum"] = set(constraint[key])
|
|
492
|
-
else:
|
|
493
|
-
# get values from constraints with additional filters
|
|
494
|
-
constraints_input_params = {k: v for k, v in non_empty_kwargs.items()}
|
|
495
|
-
constraint_params = get_constraint_queryables_with_additional_params(
|
|
496
|
-
constraints, constraints_input_params, self, product_type
|
|
497
|
-
)
|
|
498
|
-
# query params that are not in constraints but might be default queryables
|
|
499
|
-
if len(constraint_params) == 1 and "not_available" in constraint_params:
|
|
500
|
-
not_queryables: Set[str] = set()
|
|
501
|
-
for constraint_param in constraint_params["not_available"]["enum"]:
|
|
502
|
-
param = CommonQueryables.get_queryable_from_alias(constraint_param)
|
|
503
|
-
if param in dict(
|
|
504
|
-
CommonQueryables.model_fields, **default_queryables
|
|
505
|
-
):
|
|
506
|
-
non_empty_kwargs.pop(constraint_param)
|
|
507
|
-
else:
|
|
508
|
-
not_queryables.add(constraint_param)
|
|
509
|
-
if not_queryables:
|
|
510
|
-
raise ValidationError(
|
|
511
|
-
f"parameter(s) {not_queryables} not queryable"
|
|
512
|
-
)
|
|
513
|
-
else:
|
|
514
|
-
# get constraints again without common queryables
|
|
515
|
-
constraint_params = (
|
|
516
|
-
get_constraint_queryables_with_additional_params(
|
|
517
|
-
constraints, non_empty_kwargs, self, product_type
|
|
518
|
-
)
|
|
519
|
-
)
|
|
520
|
-
|
|
521
|
-
field_definitions: Dict[str, Any] = {}
|
|
522
|
-
for json_param, json_mtd in constraint_params.items():
|
|
523
|
-
param = (
|
|
524
|
-
get_queryable_from_provider(
|
|
525
|
-
json_param, self.get_metadata_mapping(product_type)
|
|
526
|
-
)
|
|
527
|
-
or json_param
|
|
528
|
-
)
|
|
529
|
-
default = kwargs.get(param, None) or self.config.products.get(
|
|
530
|
-
product_type, {}
|
|
531
|
-
).get(param, None)
|
|
532
|
-
annotated_def = json_field_definition_to_python(
|
|
533
|
-
json_mtd, default_value=default, required=True
|
|
534
|
-
)
|
|
535
|
-
field_definitions[param] = get_args(annotated_def)
|
|
536
|
-
|
|
537
|
-
python_queryables = create_model("m", **field_definitions).model_fields
|
|
538
|
-
return {**default_queryables, **model_fields_to_annotated(python_queryables)}
|