eodag 3.0.1__py3-none-any.whl → 3.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +174 -138
- eodag/api/product/_assets.py +44 -15
- eodag/api/product/_product.py +58 -47
- eodag/api/product/drivers/__init__.py +81 -4
- eodag/api/product/drivers/base.py +65 -4
- eodag/api/product/drivers/generic.py +65 -0
- eodag/api/product/drivers/sentinel1.py +97 -0
- eodag/api/product/drivers/sentinel2.py +95 -0
- eodag/api/product/metadata_mapping.py +117 -90
- eodag/api/search_result.py +13 -23
- eodag/cli.py +26 -5
- eodag/config.py +86 -92
- eodag/plugins/apis/base.py +1 -1
- eodag/plugins/apis/ecmwf.py +42 -22
- eodag/plugins/apis/usgs.py +17 -16
- eodag/plugins/authentication/aws_auth.py +16 -13
- eodag/plugins/authentication/base.py +5 -3
- eodag/plugins/authentication/header.py +3 -3
- eodag/plugins/authentication/keycloak.py +4 -4
- eodag/plugins/authentication/oauth.py +7 -3
- eodag/plugins/authentication/openid_connect.py +22 -16
- eodag/plugins/authentication/sas_auth.py +4 -4
- eodag/plugins/authentication/token.py +41 -10
- eodag/plugins/authentication/token_exchange.py +1 -1
- eodag/plugins/base.py +4 -4
- eodag/plugins/crunch/base.py +4 -4
- eodag/plugins/crunch/filter_date.py +4 -4
- eodag/plugins/crunch/filter_latest_intersect.py +6 -6
- eodag/plugins/crunch/filter_latest_tpl_name.py +7 -7
- eodag/plugins/crunch/filter_overlap.py +4 -4
- eodag/plugins/crunch/filter_property.py +6 -7
- eodag/plugins/download/aws.py +146 -87
- eodag/plugins/download/base.py +38 -56
- eodag/plugins/download/creodias_s3.py +29 -0
- eodag/plugins/download/http.py +173 -183
- eodag/plugins/download/s3rest.py +10 -11
- eodag/plugins/manager.py +10 -20
- eodag/plugins/search/__init__.py +6 -5
- eodag/plugins/search/base.py +90 -46
- eodag/plugins/search/build_search_result.py +1048 -361
- eodag/plugins/search/cop_marine.py +22 -12
- eodag/plugins/search/creodias_s3.py +9 -73
- eodag/plugins/search/csw.py +11 -11
- eodag/plugins/search/data_request_search.py +19 -18
- eodag/plugins/search/qssearch.py +99 -258
- eodag/plugins/search/stac_list_assets.py +85 -0
- eodag/plugins/search/static_stac_search.py +4 -4
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +1134 -325
- eodag/resources/providers.yml +906 -2006
- eodag/resources/stac_api.yml +2 -2
- eodag/resources/user_conf_template.yml +10 -9
- eodag/rest/cache.py +2 -2
- eodag/rest/config.py +3 -3
- eodag/rest/core.py +112 -82
- eodag/rest/errors.py +5 -5
- eodag/rest/server.py +33 -14
- eodag/rest/stac.py +41 -38
- eodag/rest/types/collections_search.py +3 -3
- eodag/rest/types/eodag_search.py +29 -23
- eodag/rest/types/queryables.py +42 -31
- eodag/rest/types/stac_search.py +15 -25
- eodag/rest/utils/__init__.py +14 -21
- eodag/rest/utils/cql_evaluate.py +6 -6
- eodag/rest/utils/rfc3339.py +2 -2
- eodag/types/__init__.py +141 -32
- eodag/types/bbox.py +2 -2
- eodag/types/download_args.py +3 -3
- eodag/types/queryables.py +183 -72
- eodag/types/search_args.py +4 -4
- eodag/types/whoosh.py +127 -3
- eodag/utils/__init__.py +153 -51
- eodag/utils/exceptions.py +28 -21
- eodag/utils/import_system.py +2 -2
- eodag/utils/repr.py +65 -6
- eodag/utils/requests.py +13 -13
- eodag/utils/rest.py +2 -2
- eodag/utils/s3.py +231 -0
- eodag/utils/stac_reader.py +10 -10
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/METADATA +77 -76
- eodag-3.1.0.dist-info/RECORD +113 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/WHEEL +1 -1
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/entry_points.txt +4 -2
- eodag/utils/constraints.py +0 -244
- eodag-3.0.1.dist-info/RECORD +0 -109
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/LICENSE +0 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
# -*- coding: utf-8 -*-
|
|
2
|
+
# Copyright 2021, CS GROUP - France, http://www.c-s.fr
|
|
3
|
+
#
|
|
4
|
+
# This file is part of EODAG project
|
|
5
|
+
# https://www.github.com/CS-SI/EODAG
|
|
6
|
+
#
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
import re
|
|
21
|
+
from typing import TYPE_CHECKING
|
|
22
|
+
|
|
23
|
+
from eodag.api.product.drivers.base import AssetPatterns, DatasetDriver
|
|
24
|
+
|
|
25
|
+
if TYPE_CHECKING:
|
|
26
|
+
from eodag.api.product._product import EOProduct
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class Sentinel2Driver(DatasetDriver):
|
|
30
|
+
"""Driver for Sentinel2 products"""
|
|
31
|
+
|
|
32
|
+
#: Band keys associated with their default Ground Sampling Distance (GSD)
|
|
33
|
+
BANDS_DEFAULT_GSD = {
|
|
34
|
+
"10M": ("B02", "B03", "B04", "B08", "TCI"),
|
|
35
|
+
"20M": ("B05", "B06", "B07", "B11", "B12", "B8A"),
|
|
36
|
+
"60M": ("B01", "B09", "B10"),
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
#: list of patterns to match asset keys and roles
|
|
40
|
+
ASSET_KEYS_PATTERNS_ROLES: list[AssetPatterns] = [
|
|
41
|
+
# masks
|
|
42
|
+
{
|
|
43
|
+
"pattern": re.compile(r"^.*?(MSK_[^/\\]+)\.(?:jp2|tiff?)$", re.IGNORECASE),
|
|
44
|
+
"roles": ["data-mask"],
|
|
45
|
+
},
|
|
46
|
+
# visual
|
|
47
|
+
{
|
|
48
|
+
"pattern": re.compile(
|
|
49
|
+
r"^.*?(TCI)(_[0-9]+m)?\.(?:jp2|tiff?)$", re.IGNORECASE
|
|
50
|
+
),
|
|
51
|
+
"roles": ["visual"],
|
|
52
|
+
},
|
|
53
|
+
# bands
|
|
54
|
+
{
|
|
55
|
+
"pattern": re.compile(
|
|
56
|
+
r"^.*?([A-Z]+[0-9]*[A-Z]?)(_[0-9]+m)?\.(?:jp2|tiff?)$", re.IGNORECASE
|
|
57
|
+
),
|
|
58
|
+
"roles": ["data"],
|
|
59
|
+
},
|
|
60
|
+
# metadata
|
|
61
|
+
{
|
|
62
|
+
"pattern": re.compile(
|
|
63
|
+
r"^(?:.*[/\\])?([^/\\]+)(\.xml|\.xsd|\.safe|\.json)$", re.IGNORECASE
|
|
64
|
+
),
|
|
65
|
+
"roles": ["metadata"],
|
|
66
|
+
},
|
|
67
|
+
# thumbnail
|
|
68
|
+
{
|
|
69
|
+
"pattern": re.compile(
|
|
70
|
+
r"^(?:.*[/\\])?(thumbnail)(\.jpe?g|\.png)$", re.IGNORECASE
|
|
71
|
+
),
|
|
72
|
+
"roles": ["thumbnail"],
|
|
73
|
+
},
|
|
74
|
+
# quicklook
|
|
75
|
+
{
|
|
76
|
+
"pattern": re.compile(
|
|
77
|
+
r"^(?:.*[/\\])?[^/\\]+(-ql|preview|quick-?look)(\.jpe?g|\.png)$",
|
|
78
|
+
re.IGNORECASE,
|
|
79
|
+
),
|
|
80
|
+
"roles": ["overview"],
|
|
81
|
+
},
|
|
82
|
+
# default
|
|
83
|
+
{"pattern": re.compile(r"^(?:.*[/\\])?([^/\\]+)$"), "roles": ["auxiliary"]},
|
|
84
|
+
]
|
|
85
|
+
|
|
86
|
+
def _normalize_key(self, key: str, eo_product: EOProduct) -> str:
|
|
87
|
+
upper_key = key.upper()
|
|
88
|
+
# check if key matched any normalized
|
|
89
|
+
for res in self.BANDS_DEFAULT_GSD:
|
|
90
|
+
if res in upper_key:
|
|
91
|
+
for norm_key in self.BANDS_DEFAULT_GSD[res]:
|
|
92
|
+
if norm_key in upper_key:
|
|
93
|
+
return norm_key
|
|
94
|
+
|
|
95
|
+
return super()._normalize_key(key, eo_product)
|
|
@@ -23,23 +23,12 @@ import logging
|
|
|
23
23
|
import re
|
|
24
24
|
from datetime import datetime, timedelta
|
|
25
25
|
from string import Formatter
|
|
26
|
-
from typing import
|
|
27
|
-
TYPE_CHECKING,
|
|
28
|
-
Any,
|
|
29
|
-
AnyStr,
|
|
30
|
-
Callable,
|
|
31
|
-
Dict,
|
|
32
|
-
Iterator,
|
|
33
|
-
List,
|
|
34
|
-
Optional,
|
|
35
|
-
Tuple,
|
|
36
|
-
Union,
|
|
37
|
-
cast,
|
|
38
|
-
)
|
|
26
|
+
from typing import TYPE_CHECKING, Any, AnyStr, Callable, Iterator, Optional, Union, cast
|
|
39
27
|
|
|
40
28
|
import geojson
|
|
41
29
|
import orjson
|
|
42
30
|
import pyproj
|
|
31
|
+
import shapely
|
|
43
32
|
from dateutil.parser import isoparse
|
|
44
33
|
from dateutil.relativedelta import relativedelta
|
|
45
34
|
from dateutil.tz import UTC, tzutc
|
|
@@ -87,8 +76,8 @@ DEFAULT_GEOMETRY = "POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))"
|
|
|
87
76
|
|
|
88
77
|
|
|
89
78
|
def get_metadata_path(
|
|
90
|
-
map_value: Union[str,
|
|
91
|
-
) ->
|
|
79
|
+
map_value: Union[str, list[str]],
|
|
80
|
+
) -> tuple[Union[list[str], None], str]:
|
|
92
81
|
"""Return the jsonpath or xpath to the value of a EO product metadata in a provider
|
|
93
82
|
search result.
|
|
94
83
|
|
|
@@ -136,12 +125,12 @@ def get_metadata_path(
|
|
|
136
125
|
return None, path
|
|
137
126
|
|
|
138
127
|
|
|
139
|
-
def get_metadata_path_value(map_value: Union[str,
|
|
128
|
+
def get_metadata_path_value(map_value: Union[str, list[str]]) -> str:
|
|
140
129
|
"""Get raw metadata path without converter"""
|
|
141
130
|
return map_value[1] if isinstance(map_value, list) else map_value
|
|
142
131
|
|
|
143
132
|
|
|
144
|
-
def get_search_param(map_value:
|
|
133
|
+
def get_search_param(map_value: list[str]) -> str:
|
|
145
134
|
"""See :func:`~eodag.api.product.metadata_mapping.get_metadata_path`
|
|
146
135
|
|
|
147
136
|
:param map_value: The value originating from the definition of `metadata_mapping`
|
|
@@ -179,6 +168,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
179
168
|
- ``recursive_sub_str``: recursively substitue in the structure (e.g. dict)
|
|
180
169
|
values matching a regex
|
|
181
170
|
- ``slice_str``: slice a string (equivalent to s[start, end, step])
|
|
171
|
+
- ``to_lower``: Convert a string to lowercase
|
|
172
|
+
- ``to_upper``: Convert a string to uppercase
|
|
182
173
|
- ``fake_l2a_title_from_l1c``: used to generate SAFE format metadata for data from AWS
|
|
183
174
|
- ``s2msil2a_title_to_aws_productinfo``: used to generate SAFE format metadata for data from AWS
|
|
184
175
|
- ``split_cop_dem_id``: get the bbox by splitting the product id
|
|
@@ -332,7 +323,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
332
323
|
return wkt_value
|
|
333
324
|
|
|
334
325
|
@staticmethod
|
|
335
|
-
def convert_to_bounds_lists(input_geom: BaseGeometry) ->
|
|
326
|
+
def convert_to_bounds_lists(input_geom: BaseGeometry) -> list[list[float]]:
|
|
336
327
|
if isinstance(input_geom, MultiPolygon):
|
|
337
328
|
geoms = [geom for geom in input_geom.geoms]
|
|
338
329
|
# sort with larger one at first (stac-browser only plots first one)
|
|
@@ -342,7 +333,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
342
333
|
return [list(input_geom.bounds[0:4])]
|
|
343
334
|
|
|
344
335
|
@staticmethod
|
|
345
|
-
def convert_to_bounds(input_geom_unformatted: Any) ->
|
|
336
|
+
def convert_to_bounds(input_geom_unformatted: Any) -> list[float]:
|
|
346
337
|
input_geom = get_geometry_from_various(geometry=input_geom_unformatted)
|
|
347
338
|
if isinstance(input_geom, MultiPolygon):
|
|
348
339
|
geoms = [geom for geom in input_geom.geoms]
|
|
@@ -353,16 +344,18 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
353
344
|
max_lon = -180
|
|
354
345
|
max_lat = -90
|
|
355
346
|
for geom in geoms:
|
|
356
|
-
min_lon = min(min_lon, geom.
|
|
357
|
-
min_lat = min(min_lat, geom.
|
|
358
|
-
max_lon = max(max_lon, geom.
|
|
359
|
-
max_lat = max(max_lat, geom.
|
|
347
|
+
min_lon = min(min_lon, geom.bounds[0])
|
|
348
|
+
min_lat = min(min_lat, geom.bounds[1])
|
|
349
|
+
max_lon = max(max_lon, geom.bounds[2])
|
|
350
|
+
max_lat = max(max_lat, geom.bounds[3])
|
|
360
351
|
return [min_lon, min_lat, max_lon, max_lat]
|
|
361
352
|
else:
|
|
362
353
|
return list(input_geom.bounds[0:4])
|
|
363
354
|
|
|
364
355
|
@staticmethod
|
|
365
|
-
def convert_to_nwse_bounds(input_geom: BaseGeometry) ->
|
|
356
|
+
def convert_to_nwse_bounds(input_geom: BaseGeometry) -> list[float]:
|
|
357
|
+
if isinstance(input_geom, str):
|
|
358
|
+
input_geom = shapely.wkt.loads(input_geom)
|
|
366
359
|
return list(input_geom.bounds[-1:] + input_geom.bounds[:-1])
|
|
367
360
|
|
|
368
361
|
@staticmethod
|
|
@@ -374,8 +367,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
374
367
|
)
|
|
375
368
|
|
|
376
369
|
@staticmethod
|
|
377
|
-
def convert_to_geojson(
|
|
378
|
-
return geojson.dumps(
|
|
370
|
+
def convert_to_geojson(value: Any) -> str:
|
|
371
|
+
return geojson.dumps(value)
|
|
379
372
|
|
|
380
373
|
@staticmethod
|
|
381
374
|
def convert_from_ewkt(ewkt_string: str) -> Union[BaseGeometry, str]:
|
|
@@ -444,7 +437,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
444
437
|
else:
|
|
445
438
|
yield e
|
|
446
439
|
|
|
447
|
-
polygons_list:
|
|
440
|
+
polygons_list: list[Polygon] = []
|
|
448
441
|
for elem in flatten_elements(georss[0]):
|
|
449
442
|
coords_list = elem.text.split()
|
|
450
443
|
polygon_args = [
|
|
@@ -469,7 +462,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
469
462
|
@staticmethod
|
|
470
463
|
def convert_to_longitude_latitude(
|
|
471
464
|
input_geom_unformatted: Any,
|
|
472
|
-
) ->
|
|
465
|
+
) -> dict[str, float]:
|
|
473
466
|
bounds = MetadataFormatter.convert_to_bounds(input_geom_unformatted)
|
|
474
467
|
lon = (bounds[0] + bounds[2]) / 2
|
|
475
468
|
lat = (bounds[1] + bounds[3]) / 2
|
|
@@ -503,14 +496,21 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
503
496
|
return NOT_AVAILABLE
|
|
504
497
|
|
|
505
498
|
@staticmethod
|
|
506
|
-
def convert_replace_str(
|
|
499
|
+
def convert_replace_str(value: Any, args: str) -> str:
|
|
500
|
+
if isinstance(value, dict):
|
|
501
|
+
value = MetadataFormatter.convert_to_geojson(value)
|
|
502
|
+
elif not isinstance(value, str):
|
|
503
|
+
raise TypeError(
|
|
504
|
+
f"convert_replace_str expects a string or a dict (apply to_geojson). Got {type(value)}"
|
|
505
|
+
)
|
|
506
|
+
|
|
507
507
|
old, new = ast.literal_eval(args)
|
|
508
|
-
return re.sub(old, new,
|
|
508
|
+
return re.sub(old, new, value)
|
|
509
509
|
|
|
510
510
|
@staticmethod
|
|
511
511
|
def convert_recursive_sub_str(
|
|
512
|
-
input_obj: Union[
|
|
513
|
-
) -> Union[
|
|
512
|
+
input_obj: Union[dict[Any, Any], list[Any]], args: str
|
|
513
|
+
) -> Union[dict[Any, Any], list[Any]]:
|
|
514
514
|
old, new = ast.literal_eval(args)
|
|
515
515
|
return items_recursive_apply(
|
|
516
516
|
input_obj,
|
|
@@ -520,8 +520,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
520
520
|
|
|
521
521
|
@staticmethod
|
|
522
522
|
def convert_dict_update(
|
|
523
|
-
input_dict:
|
|
524
|
-
) ->
|
|
523
|
+
input_dict: dict[Any, Any], args: str
|
|
524
|
+
) -> dict[Any, Any]:
|
|
525
525
|
"""Converts"""
|
|
526
526
|
new_items_list = ast.literal_eval(args)
|
|
527
527
|
|
|
@@ -531,8 +531,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
531
531
|
|
|
532
532
|
@staticmethod
|
|
533
533
|
def convert_dict_filter(
|
|
534
|
-
input_dict:
|
|
535
|
-
) ->
|
|
534
|
+
input_dict: dict[Any, Any], jsonpath_filter_str: str
|
|
535
|
+
) -> dict[Any, Any]:
|
|
536
536
|
"""Fitlers dict items using jsonpath"""
|
|
537
537
|
|
|
538
538
|
jsonpath_filter = string_to_jsonpath(jsonpath_filter_str, force=True)
|
|
@@ -558,6 +558,16 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
558
558
|
]
|
|
559
559
|
return string[cmin:cmax:cstep]
|
|
560
560
|
|
|
561
|
+
@staticmethod
|
|
562
|
+
def convert_to_lower(string: str) -> str:
|
|
563
|
+
"""Convert a string to lowercase."""
|
|
564
|
+
return string.lower()
|
|
565
|
+
|
|
566
|
+
@staticmethod
|
|
567
|
+
def convert_to_upper(string: str) -> str:
|
|
568
|
+
"""Convert a string to uppercase."""
|
|
569
|
+
return string.upper()
|
|
570
|
+
|
|
561
571
|
@staticmethod
|
|
562
572
|
def convert_fake_l2a_title_from_l1c(string: str) -> str:
|
|
563
573
|
id_regex = re.compile(
|
|
@@ -601,8 +611,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
601
611
|
return NOT_AVAILABLE
|
|
602
612
|
|
|
603
613
|
@staticmethod
|
|
604
|
-
def convert_split_id_into_s1_params(product_id: str) ->
|
|
605
|
-
parts:
|
|
614
|
+
def convert_split_id_into_s1_params(product_id: str) -> dict[str, str]:
|
|
615
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
606
616
|
if len(parts) < 9:
|
|
607
617
|
logger.error(
|
|
608
618
|
"id %s does not match expected Sentinel-1 id format", product_id
|
|
@@ -636,8 +646,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
636
646
|
return params
|
|
637
647
|
|
|
638
648
|
@staticmethod
|
|
639
|
-
def convert_split_id_into_s3_params(product_id: str) ->
|
|
640
|
-
parts:
|
|
649
|
+
def convert_split_id_into_s3_params(product_id: str) -> dict[str, str]:
|
|
650
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
641
651
|
params = {"productType": product_id[4:15]}
|
|
642
652
|
dates = re.findall("[0-9]{8}T[0-9]{6}", product_id)
|
|
643
653
|
start_date = datetime.strptime(dates[0], "%Y%m%dT%H%M%S") - timedelta(
|
|
@@ -653,8 +663,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
653
663
|
return params
|
|
654
664
|
|
|
655
665
|
@staticmethod
|
|
656
|
-
def convert_split_id_into_s5p_params(product_id: str) ->
|
|
657
|
-
parts:
|
|
666
|
+
def convert_split_id_into_s5p_params(product_id: str) -> dict[str, str]:
|
|
667
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
658
668
|
params = {
|
|
659
669
|
"productType": product_id[9:19],
|
|
660
670
|
"processingMode": parts[1],
|
|
@@ -671,7 +681,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
671
681
|
return params
|
|
672
682
|
|
|
673
683
|
@staticmethod
|
|
674
|
-
def convert_split_cop_dem_id(product_id: str) ->
|
|
684
|
+
def convert_split_cop_dem_id(product_id: str) -> list[int]:
|
|
675
685
|
parts = product_id.split("_")
|
|
676
686
|
lattitude = parts[3]
|
|
677
687
|
longitude = parts[5]
|
|
@@ -710,7 +720,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
710
720
|
@staticmethod
|
|
711
721
|
def convert_to_datetime_dict(
|
|
712
722
|
date: str, format: str
|
|
713
|
-
) ->
|
|
723
|
+
) -> dict[str, Union[list[str], str]]:
|
|
714
724
|
"""Convert a date (str) to a dictionary where values are in the format given in argument
|
|
715
725
|
|
|
716
726
|
date == "2021-04-21T18:27:19.123Z" and format == "list" => {
|
|
@@ -762,7 +772,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
762
772
|
@staticmethod
|
|
763
773
|
def convert_interval_to_datetime_dict(
|
|
764
774
|
date: str, separator: str = "/"
|
|
765
|
-
) ->
|
|
775
|
+
) -> dict[str, list[str]]:
|
|
766
776
|
"""Convert a date interval ('/' separated str) to a dictionary where values are lists
|
|
767
777
|
|
|
768
778
|
date == "2021-04-21/2021-04-22" => {
|
|
@@ -802,7 +812,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
802
812
|
}
|
|
803
813
|
|
|
804
814
|
@staticmethod
|
|
805
|
-
def convert_get_ecmwf_time(date: str) ->
|
|
815
|
+
def convert_get_ecmwf_time(date: str) -> list[str]:
|
|
806
816
|
"""Get the time of a date (str) in the ECMWF format (["HH:00"])
|
|
807
817
|
|
|
808
818
|
"2021-04-21T18:27:19.123Z" => ["18:00"]
|
|
@@ -834,7 +844,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
834
844
|
date_object = datetime.strptime(utc_date, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
835
845
|
date_object_second_year = date_object + relativedelta(years=1)
|
|
836
846
|
return [
|
|
837
|
-
f
|
|
847
|
+
f"{date_object.strftime('%Y')}_{date_object_second_year.strftime('%y')}"
|
|
838
848
|
]
|
|
839
849
|
|
|
840
850
|
@staticmethod
|
|
@@ -846,8 +856,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
846
856
|
|
|
847
857
|
@staticmethod
|
|
848
858
|
def convert_assets_list_to_dict(
|
|
849
|
-
assets_list:
|
|
850
|
-
) ->
|
|
859
|
+
assets_list: list[dict[str, str]], asset_name_key: str = "title"
|
|
860
|
+
) -> dict[str, dict[str, str]]:
|
|
851
861
|
"""Convert a list of assets to a dictionary where keys represent
|
|
852
862
|
name of assets and are found among values of asset dictionaries.
|
|
853
863
|
|
|
@@ -874,8 +884,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
874
884
|
"asset3": {"href": "qux", "title": "qux-title", "name": "asset3"},
|
|
875
885
|
}
|
|
876
886
|
"""
|
|
877
|
-
asset_names:
|
|
878
|
-
assets_dict:
|
|
887
|
+
asset_names: list[str] = []
|
|
888
|
+
assets_dict: dict[str, dict[str, str]] = {}
|
|
879
889
|
|
|
880
890
|
for asset in assets_list:
|
|
881
891
|
asset_name = asset[asset_name_key]
|
|
@@ -884,7 +894,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
884
894
|
|
|
885
895
|
# we only keep the equivalent of the path basename in the case where the
|
|
886
896
|
# asset name has a path pattern and this basename is only found once
|
|
887
|
-
immutable_asset_indexes:
|
|
897
|
+
immutable_asset_indexes: list[int] = []
|
|
888
898
|
for i, asset_name in enumerate(asset_names):
|
|
889
899
|
if i in immutable_asset_indexes:
|
|
890
900
|
continue
|
|
@@ -902,20 +912,18 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
902
912
|
return assets_dict
|
|
903
913
|
|
|
904
914
|
# if stac extension colon separator `:` is in search params, parse it to prevent issues with vformat
|
|
905
|
-
if re.search(r"{[
|
|
906
|
-
search_param = re.sub(
|
|
907
|
-
r"{([a-zA-Z0-9_-]*):([a-zA-Z0-9_-]*)}", r"{\1_COLON_\2}", search_param
|
|
908
|
-
)
|
|
915
|
+
if re.search(r"{[\w-]*:[\w#-]*}", search_param):
|
|
916
|
+
search_param = re.sub(r"{([\w-]*):([\w#-]*)}", r"{\1_COLON_\2}", search_param)
|
|
909
917
|
kwargs = {k.replace(":", "_COLON_"): v for k, v in kwargs.items()}
|
|
910
918
|
|
|
911
919
|
return MetadataFormatter().vformat(search_param, args, kwargs)
|
|
912
920
|
|
|
913
921
|
|
|
914
922
|
def properties_from_json(
|
|
915
|
-
json:
|
|
916
|
-
mapping:
|
|
917
|
-
discovery_config: Optional[
|
|
918
|
-
) ->
|
|
923
|
+
json: dict[str, Any],
|
|
924
|
+
mapping: dict[str, Any],
|
|
925
|
+
discovery_config: Optional[dict[str, Any]] = None,
|
|
926
|
+
) -> dict[str, Any]:
|
|
919
927
|
"""Extract properties from a provider json result.
|
|
920
928
|
|
|
921
929
|
:param json: The representation of a provider result as a json object
|
|
@@ -928,7 +936,7 @@ def properties_from_json(
|
|
|
928
936
|
`discovery_path` (String representation of jsonpath)
|
|
929
937
|
:returns: The metadata of the :class:`~eodag.api.product._product.EOProduct`
|
|
930
938
|
"""
|
|
931
|
-
properties:
|
|
939
|
+
properties: dict[str, Any] = {}
|
|
932
940
|
templates = {}
|
|
933
941
|
used_jsonpaths = []
|
|
934
942
|
for metadata, value in mapping.items():
|
|
@@ -975,10 +983,24 @@ def properties_from_json(
|
|
|
975
983
|
if re.search(r"({[^{}:]+})+", conversion_or_none):
|
|
976
984
|
conversion_or_none = conversion_or_none.format(**properties)
|
|
977
985
|
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
986
|
+
if extracted_value == NOT_AVAILABLE:
|
|
987
|
+
# try if value can be formatted even if it is not available
|
|
988
|
+
try:
|
|
989
|
+
properties[metadata] = format_metadata(
|
|
990
|
+
"{%s%s%s}" % (metadata, SEP, conversion_or_none),
|
|
991
|
+
**{metadata: extracted_value},
|
|
992
|
+
)
|
|
993
|
+
except ValueError:
|
|
994
|
+
logger.debug(
|
|
995
|
+
f"{metadata}: {extracted_value} could not be formatted with {conversion_or_none}"
|
|
996
|
+
)
|
|
997
|
+
continue
|
|
998
|
+
else:
|
|
999
|
+
# in this case formatting should work, otherwise something is wrong in the mapping
|
|
1000
|
+
properties[metadata] = format_metadata(
|
|
1001
|
+
"{%s%s%s}" % (metadata, SEP, conversion_or_none),
|
|
1002
|
+
**{metadata: extracted_value},
|
|
1003
|
+
)
|
|
982
1004
|
# properties as python objects when possible (format_metadata returns only strings)
|
|
983
1005
|
try:
|
|
984
1006
|
properties[metadata] = ast.literal_eval(properties[metadata])
|
|
@@ -1058,8 +1080,8 @@ def properties_from_xml(
|
|
|
1058
1080
|
xml_as_text: AnyStr,
|
|
1059
1081
|
mapping: Any,
|
|
1060
1082
|
empty_ns_prefix: str = "ns",
|
|
1061
|
-
discovery_config: Optional[
|
|
1062
|
-
) ->
|
|
1083
|
+
discovery_config: Optional[dict[str, Any]] = None,
|
|
1084
|
+
) -> dict[str, Any]:
|
|
1063
1085
|
"""Extract properties from a provider xml result.
|
|
1064
1086
|
|
|
1065
1087
|
:param xml_as_text: The representation of a provider result as xml
|
|
@@ -1077,7 +1099,7 @@ def properties_from_xml(
|
|
|
1077
1099
|
`discovery_path` (String representation of xpath)
|
|
1078
1100
|
:returns: the metadata of the :class:`~eodag.api.product._product.EOProduct`
|
|
1079
1101
|
"""
|
|
1080
|
-
properties:
|
|
1102
|
+
properties: dict[str, Any] = {}
|
|
1081
1103
|
templates = {}
|
|
1082
1104
|
used_xpaths = []
|
|
1083
1105
|
root = etree.XML(xml_as_text)
|
|
@@ -1205,10 +1227,10 @@ def properties_from_xml(
|
|
|
1205
1227
|
|
|
1206
1228
|
|
|
1207
1229
|
def mtd_cfg_as_conversion_and_querypath(
|
|
1208
|
-
src_dict:
|
|
1209
|
-
dest_dict:
|
|
1230
|
+
src_dict: dict[str, Any],
|
|
1231
|
+
dest_dict: dict[str, Any] = {},
|
|
1210
1232
|
result_type: str = "json",
|
|
1211
|
-
) ->
|
|
1233
|
+
) -> dict[str, Any]:
|
|
1212
1234
|
"""Metadata configuration dictionary to querypath with conversion dictionary
|
|
1213
1235
|
Transform every src_dict value from jsonpath_str to tuple `(conversion, jsonpath_object)`
|
|
1214
1236
|
or from xpath_str to tuple `(conversion, xpath_str)`
|
|
@@ -1256,8 +1278,8 @@ def mtd_cfg_as_conversion_and_querypath(
|
|
|
1256
1278
|
|
|
1257
1279
|
|
|
1258
1280
|
def format_query_params(
|
|
1259
|
-
product_type: str, config: PluginConfig, query_dict:
|
|
1260
|
-
) ->
|
|
1281
|
+
product_type: str, config: PluginConfig, query_dict: dict[str, Any]
|
|
1282
|
+
) -> dict[str, Any]:
|
|
1261
1283
|
"""format the search parameters to query parameters"""
|
|
1262
1284
|
if "raise_errors" in query_dict.keys():
|
|
1263
1285
|
del query_dict["raise_errors"]
|
|
@@ -1269,7 +1291,7 @@ def format_query_params(
|
|
|
1269
1291
|
**config.products.get(product_type, {}).get("metadata_mapping", {}),
|
|
1270
1292
|
)
|
|
1271
1293
|
|
|
1272
|
-
query_params:
|
|
1294
|
+
query_params: dict[str, Any] = {}
|
|
1273
1295
|
# Get all the search parameters that are recognised as queryables by the
|
|
1274
1296
|
# provider (they appear in the queryables dictionary)
|
|
1275
1297
|
queryables = _get_queryables(query_dict, config, product_type_metadata_mapping)
|
|
@@ -1299,8 +1321,8 @@ def format_query_params(
|
|
|
1299
1321
|
query_params[eodag_search_key] = formatted_query_param
|
|
1300
1322
|
else:
|
|
1301
1323
|
provider_search_key, provider_value = parts
|
|
1302
|
-
query_params
|
|
1303
|
-
|
|
1324
|
+
query_params[provider_search_key] = format_metadata(
|
|
1325
|
+
provider_value, product_type, **query_dict
|
|
1304
1326
|
)
|
|
1305
1327
|
else:
|
|
1306
1328
|
query_params[provider_search_key] = user_input
|
|
@@ -1359,10 +1381,10 @@ def _resolve_hashes(formatted_query_param: str) -> str:
|
|
|
1359
1381
|
|
|
1360
1382
|
|
|
1361
1383
|
def _format_free_text_search(
|
|
1362
|
-
config: PluginConfig, metadata_mapping:
|
|
1363
|
-
) ->
|
|
1384
|
+
config: PluginConfig, metadata_mapping: dict[str, Any], **kwargs: Any
|
|
1385
|
+
) -> dict[str, Any]:
|
|
1364
1386
|
"""Build the free text search parameter using the search parameters"""
|
|
1365
|
-
query_params:
|
|
1387
|
+
query_params: dict[str, Any] = {}
|
|
1366
1388
|
if not getattr(config, "free_text_search_operations", None):
|
|
1367
1389
|
return query_params
|
|
1368
1390
|
for param, operations_config in config.free_text_search_operations.items():
|
|
@@ -1401,13 +1423,13 @@ def _format_free_text_search(
|
|
|
1401
1423
|
|
|
1402
1424
|
|
|
1403
1425
|
def _get_queryables(
|
|
1404
|
-
search_params:
|
|
1426
|
+
search_params: dict[str, Any],
|
|
1405
1427
|
config: PluginConfig,
|
|
1406
|
-
metadata_mapping:
|
|
1407
|
-
) ->
|
|
1428
|
+
metadata_mapping: dict[str, Any],
|
|
1429
|
+
) -> dict[str, Any]:
|
|
1408
1430
|
"""Retrieve the metadata mappings that are query-able"""
|
|
1409
1431
|
logger.debug("Retrieving queryable metadata from metadata_mapping")
|
|
1410
|
-
queryables:
|
|
1432
|
+
queryables: dict[str, Any] = {}
|
|
1411
1433
|
for eodag_search_key, user_input in search_params.items():
|
|
1412
1434
|
if user_input is not None:
|
|
1413
1435
|
md_mapping = metadata_mapping.get(eodag_search_key, (None, NOT_MAPPED))
|
|
@@ -1454,7 +1476,7 @@ def _get_queryables(
|
|
|
1454
1476
|
|
|
1455
1477
|
|
|
1456
1478
|
def get_queryable_from_provider(
|
|
1457
|
-
provider_queryable: str, metadata_mapping:
|
|
1479
|
+
provider_queryable: str, metadata_mapping: dict[str, Union[str, list[str]]]
|
|
1458
1480
|
) -> Optional[str]:
|
|
1459
1481
|
"""Get EODAG configured queryable parameter from provider queryable parameter
|
|
1460
1482
|
|
|
@@ -1462,7 +1484,7 @@ def get_queryable_from_provider(
|
|
|
1462
1484
|
:param metadata_mapping: metadata-mapping configuration
|
|
1463
1485
|
:returns: EODAG configured queryable parameter or None
|
|
1464
1486
|
"""
|
|
1465
|
-
pattern = rf"\
|
|
1487
|
+
pattern = rf"\"{provider_queryable}\""
|
|
1466
1488
|
# if 1:1 mapping exists privilege this one instead of other mapping
|
|
1467
1489
|
# e.g. provider queryable = year -> use year and not date in which year also appears
|
|
1468
1490
|
mapping_values = [
|
|
@@ -1472,13 +1494,17 @@ def get_queryable_from_provider(
|
|
|
1472
1494
|
ind = mapping_values.index(provider_queryable)
|
|
1473
1495
|
return Queryables.get_queryable_from_alias(list(metadata_mapping.keys())[ind])
|
|
1474
1496
|
for param, param_conf in metadata_mapping.items():
|
|
1475
|
-
if
|
|
1497
|
+
if (
|
|
1498
|
+
isinstance(param_conf, list)
|
|
1499
|
+
and param_conf[0]
|
|
1500
|
+
and re.search(pattern, param_conf[0])
|
|
1501
|
+
):
|
|
1476
1502
|
return Queryables.get_queryable_from_alias(param)
|
|
1477
1503
|
return None
|
|
1478
1504
|
|
|
1479
1505
|
|
|
1480
1506
|
def get_provider_queryable_path(
|
|
1481
|
-
queryable: str, metadata_mapping:
|
|
1507
|
+
queryable: str, metadata_mapping: dict[str, Union[str, list[str]]]
|
|
1482
1508
|
) -> Optional[str]:
|
|
1483
1509
|
"""Get EODAG configured queryable path from its parameter
|
|
1484
1510
|
|
|
@@ -1495,10 +1521,11 @@ def get_provider_queryable_path(
|
|
|
1495
1521
|
|
|
1496
1522
|
def get_provider_queryable_key(
|
|
1497
1523
|
eodag_key: str,
|
|
1498
|
-
provider_queryables:
|
|
1499
|
-
metadata_mapping:
|
|
1524
|
+
provider_queryables: dict[str, Any],
|
|
1525
|
+
metadata_mapping: dict[str, Union[list[Any], str]],
|
|
1500
1526
|
) -> str:
|
|
1501
|
-
"""
|
|
1527
|
+
"""Finds the provider queryable corresponding to the given eodag key based on the metadata mapping
|
|
1528
|
+
|
|
1502
1529
|
:param eodag_key: key in eodag
|
|
1503
1530
|
:param provider_queryables: queryables returned from the provider
|
|
1504
1531
|
:param metadata_mapping: metadata mapping from which the keys are retrieved
|