eodag 2.12.1__py3-none-any.whl → 3.0.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +440 -321
- eodag/api/product/__init__.py +5 -1
- eodag/api/product/_assets.py +57 -2
- eodag/api/product/_product.py +89 -68
- eodag/api/product/metadata_mapping.py +181 -66
- eodag/api/search_result.py +48 -1
- eodag/cli.py +20 -6
- eodag/config.py +95 -6
- eodag/plugins/apis/base.py +8 -165
- eodag/plugins/apis/ecmwf.py +36 -24
- eodag/plugins/apis/usgs.py +40 -24
- eodag/plugins/authentication/aws_auth.py +2 -2
- eodag/plugins/authentication/header.py +31 -6
- eodag/plugins/authentication/keycloak.py +13 -84
- eodag/plugins/authentication/oauth.py +3 -3
- eodag/plugins/authentication/openid_connect.py +256 -46
- eodag/plugins/authentication/qsauth.py +3 -0
- eodag/plugins/authentication/sas_auth.py +8 -1
- eodag/plugins/authentication/token.py +92 -46
- eodag/plugins/authentication/token_exchange.py +120 -0
- eodag/plugins/download/aws.py +86 -91
- eodag/plugins/download/base.py +72 -40
- eodag/plugins/download/http.py +607 -264
- eodag/plugins/download/s3rest.py +28 -15
- eodag/plugins/manager.py +74 -57
- eodag/plugins/search/__init__.py +36 -0
- eodag/plugins/search/base.py +225 -18
- eodag/plugins/search/build_search_result.py +389 -32
- eodag/plugins/search/cop_marine.py +378 -0
- eodag/plugins/search/creodias_s3.py +15 -14
- eodag/plugins/search/csw.py +5 -7
- eodag/plugins/search/data_request_search.py +44 -20
- eodag/plugins/search/qssearch.py +508 -203
- eodag/plugins/search/static_stac_search.py +99 -36
- eodag/resources/constraints/climate-dt.json +13 -0
- eodag/resources/constraints/extremes-dt.json +8 -0
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +1897 -34
- eodag/resources/providers.yml +3539 -3277
- eodag/resources/stac.yml +48 -54
- eodag/resources/stac_api.yml +71 -25
- eodag/resources/stac_provider.yml +5 -0
- eodag/resources/user_conf_template.yml +51 -3
- eodag/rest/__init__.py +6 -0
- eodag/rest/cache.py +70 -0
- eodag/rest/config.py +68 -0
- eodag/rest/constants.py +27 -0
- eodag/rest/core.py +757 -0
- eodag/rest/server.py +397 -258
- eodag/rest/stac.py +438 -307
- eodag/rest/types/collections_search.py +44 -0
- eodag/rest/types/eodag_search.py +232 -43
- eodag/rest/types/{stac_queryables.py → queryables.py} +81 -43
- eodag/rest/types/stac_search.py +277 -0
- eodag/rest/utils/__init__.py +216 -0
- eodag/rest/utils/cql_evaluate.py +119 -0
- eodag/rest/utils/rfc3339.py +65 -0
- eodag/types/__init__.py +99 -9
- eodag/types/bbox.py +15 -14
- eodag/types/download_args.py +31 -0
- eodag/types/search_args.py +58 -7
- eodag/types/whoosh.py +81 -0
- eodag/utils/__init__.py +72 -9
- eodag/utils/constraints.py +37 -37
- eodag/utils/exceptions.py +23 -17
- eodag/utils/repr.py +113 -0
- eodag/utils/requests.py +138 -0
- eodag/utils/rest.py +104 -0
- eodag/utils/stac_reader.py +100 -16
- {eodag-2.12.1.dist-info → eodag-3.0.0b2.dist-info}/METADATA +65 -44
- eodag-3.0.0b2.dist-info/RECORD +110 -0
- {eodag-2.12.1.dist-info → eodag-3.0.0b2.dist-info}/WHEEL +1 -1
- {eodag-2.12.1.dist-info → eodag-3.0.0b2.dist-info}/entry_points.txt +6 -5
- eodag/plugins/apis/cds.py +0 -540
- eodag/rest/utils.py +0 -1133
- eodag-2.12.1.dist-info/RECORD +0 -94
- {eodag-2.12.1.dist-info → eodag-3.0.0b2.dist-info}/LICENSE +0 -0
- {eodag-2.12.1.dist-info → eodag-3.0.0b2.dist-info}/top_level.txt +0 -0
|
@@ -19,17 +19,47 @@ from __future__ import annotations
|
|
|
19
19
|
|
|
20
20
|
import hashlib
|
|
21
21
|
import logging
|
|
22
|
-
from
|
|
22
|
+
from datetime import datetime, timedelta, timezone
|
|
23
|
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, cast
|
|
23
24
|
from urllib.parse import quote_plus, unquote_plus
|
|
24
25
|
|
|
25
26
|
import geojson
|
|
26
27
|
import orjson
|
|
27
|
-
from
|
|
28
|
+
from dateutil.parser import isoparse
|
|
29
|
+
from jsonpath_ng import Child, Fields, Root
|
|
30
|
+
from pydantic import create_model
|
|
31
|
+
from pydantic.fields import FieldInfo
|
|
32
|
+
from typing_extensions import get_args
|
|
28
33
|
|
|
29
34
|
from eodag.api.product import EOProduct
|
|
30
|
-
from eodag.api.product.metadata_mapping import
|
|
35
|
+
from eodag.api.product.metadata_mapping import (
|
|
36
|
+
NOT_AVAILABLE,
|
|
37
|
+
NOT_MAPPED,
|
|
38
|
+
get_queryable_from_provider,
|
|
39
|
+
mtd_cfg_as_conversion_and_querypath,
|
|
40
|
+
properties_from_json,
|
|
41
|
+
)
|
|
42
|
+
from eodag.api.search_result import RawSearchResult
|
|
43
|
+
from eodag.plugins.search import PreparedSearch
|
|
44
|
+
from eodag.plugins.search.base import Search
|
|
31
45
|
from eodag.plugins.search.qssearch import PostJsonSearch
|
|
32
|
-
from eodag.
|
|
46
|
+
from eodag.types import json_field_definition_to_python, model_fields_to_annotated
|
|
47
|
+
from eodag.types.queryables import CommonQueryables
|
|
48
|
+
from eodag.utils import (
|
|
49
|
+
DEFAULT_MISSION_START_DATE,
|
|
50
|
+
Annotated,
|
|
51
|
+
deepcopy,
|
|
52
|
+
dict_items_recursive_sort,
|
|
53
|
+
get_geometry_from_various,
|
|
54
|
+
)
|
|
55
|
+
from eodag.utils.constraints import (
|
|
56
|
+
fetch_constraints,
|
|
57
|
+
get_constraint_queryables_with_additional_params,
|
|
58
|
+
)
|
|
59
|
+
from eodag.utils.exceptions import ValidationError
|
|
60
|
+
|
|
61
|
+
if TYPE_CHECKING:
|
|
62
|
+
from eodag.config import PluginConfig
|
|
33
63
|
|
|
34
64
|
logger = logging.getLogger("eodag.search.build_search_result")
|
|
35
65
|
|
|
@@ -68,30 +98,29 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
68
98
|
|
|
69
99
|
def collect_search_urls(
|
|
70
100
|
self,
|
|
71
|
-
|
|
72
|
-
items_per_page: Optional[int] = None,
|
|
73
|
-
count: bool = True,
|
|
101
|
+
prep: PreparedSearch = PreparedSearch(),
|
|
74
102
|
**kwargs: Any,
|
|
75
103
|
) -> Tuple[List[str], int]:
|
|
76
104
|
"""Wraps PostJsonSearch.collect_search_urls to force product count to 1"""
|
|
77
|
-
urls, _ = super(BuildPostSearchResult, self).collect_search_urls(
|
|
78
|
-
page=page, items_per_page=items_per_page, count=count, **kwargs
|
|
79
|
-
)
|
|
105
|
+
urls, _ = super(BuildPostSearchResult, self).collect_search_urls(prep, **kwargs)
|
|
80
106
|
return urls, 1
|
|
81
107
|
|
|
82
|
-
def do_search(
|
|
108
|
+
def do_search(
|
|
109
|
+
self, prep: PreparedSearch = PreparedSearch(items_per_page=None), **kwargs: Any
|
|
110
|
+
) -> List[Dict[str, Any]]:
|
|
83
111
|
"""Perform the actual search request, and return result in a single element."""
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
f"{self.__class__.__name__} instance:",
|
|
112
|
+
prep.url = prep.search_urls[0]
|
|
113
|
+
prep.info_message = f"Sending search request: {prep.url}"
|
|
114
|
+
prep.exception_message = (
|
|
115
|
+
f"Skipping error while searching for {self.provider} "
|
|
116
|
+
f"{self.__class__.__name__} instance:"
|
|
90
117
|
)
|
|
118
|
+
response = self._request(prep)
|
|
119
|
+
|
|
91
120
|
return [response.json()]
|
|
92
121
|
|
|
93
122
|
def normalize_results(
|
|
94
|
-
self, results:
|
|
123
|
+
self, results: RawSearchResult, **kwargs: Any
|
|
95
124
|
) -> List[EOProduct]:
|
|
96
125
|
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
97
126
|
|
|
@@ -110,22 +139,20 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
110
139
|
_dc_qs = kwargs.pop("_dc_qs", None)
|
|
111
140
|
if _dc_qs is not None:
|
|
112
141
|
qs = unquote_plus(unquote_plus(_dc_qs))
|
|
113
|
-
|
|
114
|
-
qs
|
|
115
|
-
)
|
|
142
|
+
sorted_unpaginated_query_params = geojson.loads(qs)
|
|
116
143
|
else:
|
|
117
144
|
# update result with query parameters without pagination (or search-only params)
|
|
118
145
|
if isinstance(
|
|
119
146
|
self.config.pagination["next_page_query_obj"], str
|
|
120
|
-
) and hasattr(
|
|
121
|
-
unpaginated_query_params =
|
|
147
|
+
) and hasattr(results, "query_params_unpaginated"):
|
|
148
|
+
unpaginated_query_params = results.query_params_unpaginated
|
|
122
149
|
elif isinstance(self.config.pagination["next_page_query_obj"], str):
|
|
123
150
|
next_page_query_obj = orjson.loads(
|
|
124
151
|
self.config.pagination["next_page_query_obj"].format()
|
|
125
152
|
)
|
|
126
153
|
unpaginated_query_params = {
|
|
127
154
|
k: v[0] if (isinstance(v, list) and len(v) == 1) else v
|
|
128
|
-
for k, v in
|
|
155
|
+
for k, v in results.query_params.items()
|
|
129
156
|
if (k, v) not in next_page_query_obj.items()
|
|
130
157
|
}
|
|
131
158
|
else:
|
|
@@ -135,14 +162,25 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
135
162
|
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
136
163
|
unpaginated_query_params
|
|
137
164
|
)
|
|
138
|
-
qs = geojson.dumps(sorted_unpaginated_query_params)
|
|
139
165
|
|
|
140
|
-
|
|
166
|
+
# use all available query_params to parse properties
|
|
167
|
+
result = dict(
|
|
168
|
+
result,
|
|
169
|
+
**sorted_unpaginated_query_params,
|
|
170
|
+
qs=sorted_unpaginated_query_params,
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
# remove unwanted query params
|
|
174
|
+
for param in getattr(self.config, "remove_from_query", []):
|
|
175
|
+
sorted_unpaginated_query_params.pop(param, None)
|
|
141
176
|
|
|
142
|
-
|
|
177
|
+
qs = geojson.dumps(sorted_unpaginated_query_params)
|
|
143
178
|
|
|
144
|
-
|
|
179
|
+
query_hash = hashlib.sha1(str(qs).encode("UTF-8")).hexdigest()
|
|
180
|
+
|
|
181
|
+
# update result with product_type_def_params and search args if not None (and not auth)
|
|
145
182
|
kwargs.pop("auth", None)
|
|
183
|
+
result.update(results.product_type_def_params)
|
|
146
184
|
result = dict(result, **{k: v for k, v in kwargs.items() if v is not None})
|
|
147
185
|
|
|
148
186
|
# parse porperties
|
|
@@ -157,22 +195,28 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
157
195
|
|
|
158
196
|
# build product id
|
|
159
197
|
id_prefix = (product_type or self.provider).upper()
|
|
160
|
-
product_id = "%s_%s_%s" % (
|
|
198
|
+
product_id = "%s_%s_%s_%s" % (
|
|
161
199
|
id_prefix,
|
|
162
200
|
parsed_properties["startTimeFromAscendingNode"]
|
|
163
201
|
.split("T")[0]
|
|
164
202
|
.replace("-", ""),
|
|
203
|
+
parsed_properties["completionTimeFromAscendingNode"]
|
|
204
|
+
.split("T")[0]
|
|
205
|
+
.replace("-", ""),
|
|
165
206
|
query_hash,
|
|
166
207
|
)
|
|
167
208
|
parsed_properties["id"] = parsed_properties["title"] = product_id
|
|
168
209
|
|
|
169
|
-
# update downloadLink
|
|
170
|
-
parsed_properties["downloadLink"] += f"?{qs}"
|
|
210
|
+
# update downloadLink and orderLink
|
|
171
211
|
parsed_properties["_dc_qs"] = quote_plus(qs)
|
|
212
|
+
if parsed_properties["downloadLink"] != "Not Available":
|
|
213
|
+
parsed_properties["downloadLink"] += f"?{qs}"
|
|
172
214
|
|
|
173
215
|
# parse metadata needing downloadLink
|
|
216
|
+
dl_path = Fields("downloadLink")
|
|
217
|
+
dl_path_from_root = Child(Root(), dl_path)
|
|
174
218
|
for param, mapping in self.config.metadata_mapping.items():
|
|
175
|
-
if
|
|
219
|
+
if dl_path in mapping or dl_path_from_root in mapping:
|
|
176
220
|
parsed_properties.update(
|
|
177
221
|
properties_from_json(parsed_properties, {param: mapping})
|
|
178
222
|
)
|
|
@@ -192,3 +236,316 @@ class BuildPostSearchResult(PostJsonSearch):
|
|
|
192
236
|
return [
|
|
193
237
|
product,
|
|
194
238
|
]
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
class BuildSearchResult(BuildPostSearchResult):
|
|
242
|
+
"""BuildSearchResult search plugin.
|
|
243
|
+
|
|
244
|
+
This plugin builds a single :class:`~eodag.api.search_result.SearchResult` object
|
|
245
|
+
using given query parameters as product properties.
|
|
246
|
+
|
|
247
|
+
The available configuration parameters inherits from parent classes, with particularly
|
|
248
|
+
for this plugin:
|
|
249
|
+
|
|
250
|
+
- **end_date_excluded**: Set to `False` if provider does not include end date to
|
|
251
|
+
search
|
|
252
|
+
|
|
253
|
+
- **remove_from_query**: List of parameters used to parse metadata but that must
|
|
254
|
+
not be included to the query
|
|
255
|
+
|
|
256
|
+
- **constraints_file_url**: url of the constraint file used to build queryables
|
|
257
|
+
|
|
258
|
+
:param provider: An eodag providers configuration dictionary
|
|
259
|
+
:type provider: dict
|
|
260
|
+
:param config: Path to the user configuration file
|
|
261
|
+
:type config: str
|
|
262
|
+
"""
|
|
263
|
+
|
|
264
|
+
def __init__(self, provider: str, config: PluginConfig) -> None:
|
|
265
|
+
# init self.config.metadata_mapping using Search Base plugin
|
|
266
|
+
Search.__init__(self, provider, config)
|
|
267
|
+
|
|
268
|
+
self.config.__dict__.setdefault("api_endpoint", "")
|
|
269
|
+
|
|
270
|
+
# needed by QueryStringSearch.build_query_string / format_free_text_search
|
|
271
|
+
self.config.__dict__.setdefault("free_text_search_operations", {})
|
|
272
|
+
# needed for compatibility
|
|
273
|
+
self.config.__dict__.setdefault("pagination", {"next_page_query_obj": "{{}}"})
|
|
274
|
+
|
|
275
|
+
# parse jsonpath on init: product type specific metadata-mapping
|
|
276
|
+
for product_type in self.config.products.keys():
|
|
277
|
+
if "metadata_mapping" in self.config.products[product_type].keys():
|
|
278
|
+
self.config.products[product_type][
|
|
279
|
+
"metadata_mapping"
|
|
280
|
+
] = mtd_cfg_as_conversion_and_querypath(
|
|
281
|
+
self.config.products[product_type]["metadata_mapping"]
|
|
282
|
+
)
|
|
283
|
+
# Complete and ready to use product type specific metadata-mapping
|
|
284
|
+
product_type_metadata_mapping = deepcopy(self.config.metadata_mapping)
|
|
285
|
+
|
|
286
|
+
# update config using provider product type definition metadata_mapping
|
|
287
|
+
# from another product
|
|
288
|
+
other_product_for_mapping = cast(
|
|
289
|
+
str,
|
|
290
|
+
self.config.products[product_type].get(
|
|
291
|
+
"metadata_mapping_from_product", ""
|
|
292
|
+
),
|
|
293
|
+
)
|
|
294
|
+
if other_product_for_mapping:
|
|
295
|
+
other_product_type_def_params = self.get_product_type_def_params(
|
|
296
|
+
other_product_for_mapping,
|
|
297
|
+
)
|
|
298
|
+
product_type_metadata_mapping.update(
|
|
299
|
+
other_product_type_def_params.get("metadata_mapping", {})
|
|
300
|
+
)
|
|
301
|
+
# from current product
|
|
302
|
+
product_type_metadata_mapping.update(
|
|
303
|
+
self.config.products[product_type]["metadata_mapping"]
|
|
304
|
+
)
|
|
305
|
+
|
|
306
|
+
self.config.products[product_type][
|
|
307
|
+
"metadata_mapping"
|
|
308
|
+
] = product_type_metadata_mapping
|
|
309
|
+
|
|
310
|
+
def do_search(self, *args: Any, **kwargs: Any) -> List[Dict[str, Any]]:
|
|
311
|
+
"""Should perform the actual search request."""
|
|
312
|
+
return [{}]
|
|
313
|
+
|
|
314
|
+
def query(
|
|
315
|
+
self,
|
|
316
|
+
prep: PreparedSearch = PreparedSearch(),
|
|
317
|
+
**kwargs: Any,
|
|
318
|
+
) -> Tuple[List[EOProduct], Optional[int]]:
|
|
319
|
+
"""Build ready-to-download SearchResult"""
|
|
320
|
+
|
|
321
|
+
self._preprocess_search_params(kwargs)
|
|
322
|
+
|
|
323
|
+
return BuildPostSearchResult.query(self, prep, **kwargs)
|
|
324
|
+
|
|
325
|
+
def clear(self) -> None:
|
|
326
|
+
"""Clear search context"""
|
|
327
|
+
pass
|
|
328
|
+
|
|
329
|
+
def build_query_string(
|
|
330
|
+
self, product_type: str, **kwargs: Any
|
|
331
|
+
) -> Tuple[Dict[str, Any], str]:
|
|
332
|
+
"""Build The query string using the search parameters"""
|
|
333
|
+
# parse kwargs as properties as they might be needed to build the query
|
|
334
|
+
parsed_properties = properties_from_json(
|
|
335
|
+
kwargs,
|
|
336
|
+
self.config.metadata_mapping,
|
|
337
|
+
)
|
|
338
|
+
available_properties = {
|
|
339
|
+
k: v
|
|
340
|
+
for k, v in parsed_properties.items()
|
|
341
|
+
if v not in [NOT_AVAILABLE, NOT_MAPPED]
|
|
342
|
+
}
|
|
343
|
+
|
|
344
|
+
# build and return the query
|
|
345
|
+
return BuildPostSearchResult.build_query_string(
|
|
346
|
+
self, product_type=product_type, **available_properties
|
|
347
|
+
)
|
|
348
|
+
|
|
349
|
+
def get_product_type_cfg(self, key: str, default: Any = None) -> Any:
|
|
350
|
+
"""
|
|
351
|
+
Get the value of a configuration option specific to the current product type.
|
|
352
|
+
|
|
353
|
+
This method retrieves the value of a configuration option from the
|
|
354
|
+
`_product_type_config` attribute. If the option is not found, the provided
|
|
355
|
+
default value is returned.
|
|
356
|
+
|
|
357
|
+
:param key: The configuration option key.
|
|
358
|
+
:type key: str
|
|
359
|
+
:param default: The default value to be returned if the option is not found (default is None).
|
|
360
|
+
:type default: Any
|
|
361
|
+
|
|
362
|
+
:return: The value of the specified configuration option or the default value.
|
|
363
|
+
:rtype: Any
|
|
364
|
+
"""
|
|
365
|
+
product_type_cfg = getattr(self.config, "product_type_config", {})
|
|
366
|
+
non_none_cfg = {k: v for k, v in product_type_cfg.items() if v}
|
|
367
|
+
|
|
368
|
+
return non_none_cfg.get(key, default)
|
|
369
|
+
|
|
370
|
+
def _preprocess_search_params(self, params: Dict[str, Any]) -> None:
|
|
371
|
+
"""Preprocess search parameters before making a request to the CDS API.
|
|
372
|
+
|
|
373
|
+
This method is responsible for checking and updating the provided search parameters
|
|
374
|
+
to ensure that required parameters like 'productType', 'startTimeFromAscendingNode',
|
|
375
|
+
'completionTimeFromAscendingNode', and 'geometry' are properly set. If not specified
|
|
376
|
+
in the input parameters, default values or values from the configuration are used.
|
|
377
|
+
|
|
378
|
+
:param params: Search parameters to be preprocessed.
|
|
379
|
+
:type params: dict
|
|
380
|
+
"""
|
|
381
|
+
_dc_qs = params.get("_dc_qs", None)
|
|
382
|
+
if _dc_qs is not None:
|
|
383
|
+
# if available, update search params using datacube query-string
|
|
384
|
+
_dc_qp = geojson.loads(unquote_plus(unquote_plus(_dc_qs)))
|
|
385
|
+
if "/to/" in _dc_qp.get("date", ""):
|
|
386
|
+
(
|
|
387
|
+
params["startTimeFromAscendingNode"],
|
|
388
|
+
params["completionTimeFromAscendingNode"],
|
|
389
|
+
) = _dc_qp["date"].split("/to/")
|
|
390
|
+
elif "/" in _dc_qp.get("date", ""):
|
|
391
|
+
(
|
|
392
|
+
params["startTimeFromAscendingNode"],
|
|
393
|
+
params["completionTimeFromAscendingNode"],
|
|
394
|
+
) = _dc_qp["date"].split("/")
|
|
395
|
+
elif _dc_qp.get("date", None):
|
|
396
|
+
params["startTimeFromAscendingNode"] = params[
|
|
397
|
+
"completionTimeFromAscendingNode"
|
|
398
|
+
] = _dc_qp["date"]
|
|
399
|
+
|
|
400
|
+
if "/" in _dc_qp.get("area", ""):
|
|
401
|
+
params["geometry"] = _dc_qp["area"].split("/")
|
|
402
|
+
|
|
403
|
+
non_none_params = {k: v for k, v in params.items() if v}
|
|
404
|
+
|
|
405
|
+
# productType
|
|
406
|
+
dataset = params.get("dataset", None)
|
|
407
|
+
params["productType"] = non_none_params.get("productType", dataset)
|
|
408
|
+
|
|
409
|
+
# dates
|
|
410
|
+
mission_start_dt = datetime.fromisoformat(
|
|
411
|
+
self.get_product_type_cfg(
|
|
412
|
+
"missionStartDate", DEFAULT_MISSION_START_DATE
|
|
413
|
+
).replace(
|
|
414
|
+
"Z", "+00:00"
|
|
415
|
+
) # before 3.11
|
|
416
|
+
)
|
|
417
|
+
|
|
418
|
+
default_end_from_cfg = self.config.products.get(params["productType"], {}).get(
|
|
419
|
+
"_default_end_date", None
|
|
420
|
+
)
|
|
421
|
+
default_end_str = (
|
|
422
|
+
default_end_from_cfg
|
|
423
|
+
or (
|
|
424
|
+
datetime.now(timezone.utc)
|
|
425
|
+
if params.get("startTimeFromAscendingNode")
|
|
426
|
+
else mission_start_dt + timedelta(days=1)
|
|
427
|
+
).isoformat()
|
|
428
|
+
)
|
|
429
|
+
|
|
430
|
+
params["startTimeFromAscendingNode"] = non_none_params.get(
|
|
431
|
+
"startTimeFromAscendingNode", mission_start_dt.isoformat()
|
|
432
|
+
)
|
|
433
|
+
params["completionTimeFromAscendingNode"] = non_none_params.get(
|
|
434
|
+
"completionTimeFromAscendingNode", default_end_str
|
|
435
|
+
)
|
|
436
|
+
|
|
437
|
+
# temporary _date parameter mixing start & end
|
|
438
|
+
end_date_excluded = getattr(self.config, "end_date_excluded", True)
|
|
439
|
+
end_date = isoparse(params["completionTimeFromAscendingNode"])
|
|
440
|
+
if not end_date_excluded and end_date == end_date.replace(
|
|
441
|
+
hour=0, minute=0, second=0, microsecond=0
|
|
442
|
+
):
|
|
443
|
+
end_date += timedelta(days=-1)
|
|
444
|
+
params["completionTimeFromAscendingNode"] = end_date.isoformat()
|
|
445
|
+
|
|
446
|
+
# geometry
|
|
447
|
+
if "geometry" in params:
|
|
448
|
+
params["geometry"] = get_geometry_from_various(geometry=params["geometry"])
|
|
449
|
+
|
|
450
|
+
def discover_queryables(
|
|
451
|
+
self, **kwargs: Any
|
|
452
|
+
) -> Optional[Dict[str, Annotated[Any, FieldInfo]]]:
|
|
453
|
+
"""Fetch queryables list from provider using its constraints file
|
|
454
|
+
|
|
455
|
+
:param kwargs: additional filters for queryables (`productType` and other search
|
|
456
|
+
arguments)
|
|
457
|
+
:type kwargs: Any
|
|
458
|
+
:returns: fetched queryable parameters dict
|
|
459
|
+
:rtype: Optional[Dict[str, Annotated[Any, FieldInfo]]]
|
|
460
|
+
"""
|
|
461
|
+
constraints_file_url = getattr(self.config, "constraints_file_url", "")
|
|
462
|
+
if not constraints_file_url:
|
|
463
|
+
return {}
|
|
464
|
+
product_type = kwargs.pop("productType", None)
|
|
465
|
+
if not product_type:
|
|
466
|
+
return {}
|
|
467
|
+
|
|
468
|
+
provider_product_type = self.config.products.get(product_type, {}).get(
|
|
469
|
+
"dataset", None
|
|
470
|
+
)
|
|
471
|
+
user_provider_product_type = kwargs.pop("dataset", None)
|
|
472
|
+
if (
|
|
473
|
+
user_provider_product_type
|
|
474
|
+
and user_provider_product_type != provider_product_type
|
|
475
|
+
):
|
|
476
|
+
raise ValidationError(
|
|
477
|
+
f"Cannot change dataset from {provider_product_type} to {user_provider_product_type}"
|
|
478
|
+
)
|
|
479
|
+
|
|
480
|
+
# defaults
|
|
481
|
+
default_queryables = self._get_defaults_as_queryables(product_type)
|
|
482
|
+
# remove dataset from queryables
|
|
483
|
+
default_queryables.pop("dataset", None)
|
|
484
|
+
|
|
485
|
+
non_empty_kwargs = {k: v for k, v in kwargs.items() if v}
|
|
486
|
+
|
|
487
|
+
if "{" in constraints_file_url:
|
|
488
|
+
constraints_file_url = constraints_file_url.format(
|
|
489
|
+
dataset=provider_product_type
|
|
490
|
+
)
|
|
491
|
+
constraints = fetch_constraints(constraints_file_url, self)
|
|
492
|
+
if not constraints:
|
|
493
|
+
return default_queryables
|
|
494
|
+
|
|
495
|
+
constraint_params: Dict[str, Dict[str, Set[Any]]] = {}
|
|
496
|
+
if len(kwargs) == 0:
|
|
497
|
+
# get values from constraints without additional filters
|
|
498
|
+
for constraint in constraints:
|
|
499
|
+
for key in constraint.keys():
|
|
500
|
+
if key in constraint_params:
|
|
501
|
+
constraint_params[key]["enum"].update(constraint[key])
|
|
502
|
+
else:
|
|
503
|
+
constraint_params[key] = {}
|
|
504
|
+
constraint_params[key]["enum"] = set(constraint[key])
|
|
505
|
+
else:
|
|
506
|
+
# get values from constraints with additional filters
|
|
507
|
+
constraints_input_params = {k: v for k, v in non_empty_kwargs.items()}
|
|
508
|
+
constraint_params = get_constraint_queryables_with_additional_params(
|
|
509
|
+
constraints, constraints_input_params, self, product_type
|
|
510
|
+
)
|
|
511
|
+
# query params that are not in constraints but might be default queryables
|
|
512
|
+
if len(constraint_params) == 1 and "not_available" in constraint_params:
|
|
513
|
+
not_queryables: Set[str] = set()
|
|
514
|
+
for constraint_param in constraint_params["not_available"]["enum"]:
|
|
515
|
+
param = CommonQueryables.get_queryable_from_alias(constraint_param)
|
|
516
|
+
if param in dict(
|
|
517
|
+
CommonQueryables.model_fields, **default_queryables
|
|
518
|
+
):
|
|
519
|
+
non_empty_kwargs.pop(constraint_param)
|
|
520
|
+
else:
|
|
521
|
+
not_queryables.add(constraint_param)
|
|
522
|
+
if not_queryables:
|
|
523
|
+
raise ValidationError(
|
|
524
|
+
f"parameter(s) {not_queryables} not queryable"
|
|
525
|
+
)
|
|
526
|
+
else:
|
|
527
|
+
# get constraints again without common queryables
|
|
528
|
+
constraint_params = (
|
|
529
|
+
get_constraint_queryables_with_additional_params(
|
|
530
|
+
constraints, non_empty_kwargs, self, product_type
|
|
531
|
+
)
|
|
532
|
+
)
|
|
533
|
+
|
|
534
|
+
field_definitions: Dict[str, Any] = {}
|
|
535
|
+
for json_param, json_mtd in constraint_params.items():
|
|
536
|
+
param = (
|
|
537
|
+
get_queryable_from_provider(
|
|
538
|
+
json_param, self.get_metadata_mapping(product_type)
|
|
539
|
+
)
|
|
540
|
+
or json_param
|
|
541
|
+
)
|
|
542
|
+
default = kwargs.get(param, None) or self.config.products.get(
|
|
543
|
+
product_type, {}
|
|
544
|
+
).get(param, None)
|
|
545
|
+
annotated_def = json_field_definition_to_python(
|
|
546
|
+
json_mtd, default_value=default, required=True
|
|
547
|
+
)
|
|
548
|
+
field_definitions[param] = get_args(annotated_def)
|
|
549
|
+
|
|
550
|
+
python_queryables = create_model("m", **field_definitions).model_fields
|
|
551
|
+
return {**default_queryables, **model_fields_to_annotated(python_queryables)}
|