eodag 3.0.1__py3-none-any.whl → 3.1.0b2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +164 -127
- eodag/api/product/_assets.py +11 -11
- eodag/api/product/_product.py +45 -30
- eodag/api/product/drivers/__init__.py +81 -4
- eodag/api/product/drivers/base.py +65 -4
- eodag/api/product/drivers/generic.py +65 -0
- eodag/api/product/drivers/sentinel1.py +97 -0
- eodag/api/product/drivers/sentinel2.py +95 -0
- eodag/api/product/metadata_mapping.py +101 -85
- eodag/api/search_result.py +13 -23
- eodag/cli.py +26 -5
- eodag/config.py +78 -81
- eodag/plugins/apis/base.py +1 -1
- eodag/plugins/apis/ecmwf.py +46 -22
- eodag/plugins/apis/usgs.py +16 -15
- eodag/plugins/authentication/aws_auth.py +16 -13
- eodag/plugins/authentication/base.py +5 -3
- eodag/plugins/authentication/header.py +3 -3
- eodag/plugins/authentication/keycloak.py +4 -4
- eodag/plugins/authentication/oauth.py +7 -3
- eodag/plugins/authentication/openid_connect.py +16 -16
- eodag/plugins/authentication/sas_auth.py +4 -4
- eodag/plugins/authentication/token.py +41 -10
- eodag/plugins/authentication/token_exchange.py +1 -1
- eodag/plugins/base.py +4 -4
- eodag/plugins/crunch/base.py +4 -4
- eodag/plugins/crunch/filter_date.py +4 -4
- eodag/plugins/crunch/filter_latest_intersect.py +6 -6
- eodag/plugins/crunch/filter_latest_tpl_name.py +7 -7
- eodag/plugins/crunch/filter_overlap.py +4 -4
- eodag/plugins/crunch/filter_property.py +6 -7
- eodag/plugins/download/aws.py +58 -78
- eodag/plugins/download/base.py +38 -56
- eodag/plugins/download/creodias_s3.py +29 -0
- eodag/plugins/download/http.py +173 -183
- eodag/plugins/download/s3rest.py +10 -11
- eodag/plugins/manager.py +10 -20
- eodag/plugins/search/__init__.py +6 -5
- eodag/plugins/search/base.py +87 -44
- eodag/plugins/search/build_search_result.py +1067 -329
- eodag/plugins/search/cop_marine.py +22 -12
- eodag/plugins/search/creodias_s3.py +9 -73
- eodag/plugins/search/csw.py +11 -11
- eodag/plugins/search/data_request_search.py +16 -15
- eodag/plugins/search/qssearch.py +103 -187
- eodag/plugins/search/stac_list_assets.py +85 -0
- eodag/plugins/search/static_stac_search.py +3 -3
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +663 -304
- eodag/resources/providers.yml +823 -1749
- eodag/resources/stac_api.yml +2 -2
- eodag/resources/user_conf_template.yml +11 -0
- eodag/rest/cache.py +2 -2
- eodag/rest/config.py +3 -3
- eodag/rest/core.py +112 -82
- eodag/rest/errors.py +5 -5
- eodag/rest/server.py +33 -14
- eodag/rest/stac.py +40 -38
- eodag/rest/types/collections_search.py +3 -3
- eodag/rest/types/eodag_search.py +29 -23
- eodag/rest/types/queryables.py +15 -16
- eodag/rest/types/stac_search.py +15 -25
- eodag/rest/utils/__init__.py +14 -21
- eodag/rest/utils/cql_evaluate.py +6 -6
- eodag/rest/utils/rfc3339.py +2 -2
- eodag/types/__init__.py +75 -28
- eodag/types/bbox.py +2 -2
- eodag/types/download_args.py +3 -3
- eodag/types/queryables.py +183 -72
- eodag/types/search_args.py +4 -4
- eodag/types/whoosh.py +127 -3
- eodag/utils/__init__.py +152 -50
- eodag/utils/exceptions.py +28 -21
- eodag/utils/import_system.py +2 -2
- eodag/utils/repr.py +65 -6
- eodag/utils/requests.py +13 -13
- eodag/utils/rest.py +2 -2
- eodag/utils/s3.py +208 -0
- eodag/utils/stac_reader.py +10 -10
- {eodag-3.0.1.dist-info → eodag-3.1.0b2.dist-info}/METADATA +77 -76
- eodag-3.1.0b2.dist-info/RECORD +113 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0b2.dist-info}/WHEEL +1 -1
- {eodag-3.0.1.dist-info → eodag-3.1.0b2.dist-info}/entry_points.txt +4 -2
- eodag/utils/constraints.py +0 -244
- eodag-3.0.1.dist-info/RECORD +0 -109
- {eodag-3.0.1.dist-info → eodag-3.1.0b2.dist-info}/LICENSE +0 -0
- {eodag-3.0.1.dist-info → eodag-3.1.0b2.dist-info}/top_level.txt +0 -0
|
@@ -23,23 +23,12 @@ import logging
|
|
|
23
23
|
import re
|
|
24
24
|
from datetime import datetime, timedelta
|
|
25
25
|
from string import Formatter
|
|
26
|
-
from typing import
|
|
27
|
-
TYPE_CHECKING,
|
|
28
|
-
Any,
|
|
29
|
-
AnyStr,
|
|
30
|
-
Callable,
|
|
31
|
-
Dict,
|
|
32
|
-
Iterator,
|
|
33
|
-
List,
|
|
34
|
-
Optional,
|
|
35
|
-
Tuple,
|
|
36
|
-
Union,
|
|
37
|
-
cast,
|
|
38
|
-
)
|
|
26
|
+
from typing import TYPE_CHECKING, Any, AnyStr, Callable, Iterator, Optional, Union, cast
|
|
39
27
|
|
|
40
28
|
import geojson
|
|
41
29
|
import orjson
|
|
42
30
|
import pyproj
|
|
31
|
+
import shapely
|
|
43
32
|
from dateutil.parser import isoparse
|
|
44
33
|
from dateutil.relativedelta import relativedelta
|
|
45
34
|
from dateutil.tz import UTC, tzutc
|
|
@@ -87,8 +76,8 @@ DEFAULT_GEOMETRY = "POLYGON((180 -90, 180 90, -180 90, -180 -90, 180 -90))"
|
|
|
87
76
|
|
|
88
77
|
|
|
89
78
|
def get_metadata_path(
|
|
90
|
-
map_value: Union[str,
|
|
91
|
-
) ->
|
|
79
|
+
map_value: Union[str, list[str]],
|
|
80
|
+
) -> tuple[Union[list[str], None], str]:
|
|
92
81
|
"""Return the jsonpath or xpath to the value of a EO product metadata in a provider
|
|
93
82
|
search result.
|
|
94
83
|
|
|
@@ -136,12 +125,12 @@ def get_metadata_path(
|
|
|
136
125
|
return None, path
|
|
137
126
|
|
|
138
127
|
|
|
139
|
-
def get_metadata_path_value(map_value: Union[str,
|
|
128
|
+
def get_metadata_path_value(map_value: Union[str, list[str]]) -> str:
|
|
140
129
|
"""Get raw metadata path without converter"""
|
|
141
130
|
return map_value[1] if isinstance(map_value, list) else map_value
|
|
142
131
|
|
|
143
132
|
|
|
144
|
-
def get_search_param(map_value:
|
|
133
|
+
def get_search_param(map_value: list[str]) -> str:
|
|
145
134
|
"""See :func:`~eodag.api.product.metadata_mapping.get_metadata_path`
|
|
146
135
|
|
|
147
136
|
:param map_value: The value originating from the definition of `metadata_mapping`
|
|
@@ -179,6 +168,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
179
168
|
- ``recursive_sub_str``: recursively substitue in the structure (e.g. dict)
|
|
180
169
|
values matching a regex
|
|
181
170
|
- ``slice_str``: slice a string (equivalent to s[start, end, step])
|
|
171
|
+
- ``to_lower``: Convert a string to lowercase
|
|
172
|
+
- ``to_upper``: Convert a string to uppercase
|
|
182
173
|
- ``fake_l2a_title_from_l1c``: used to generate SAFE format metadata for data from AWS
|
|
183
174
|
- ``s2msil2a_title_to_aws_productinfo``: used to generate SAFE format metadata for data from AWS
|
|
184
175
|
- ``split_cop_dem_id``: get the bbox by splitting the product id
|
|
@@ -332,7 +323,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
332
323
|
return wkt_value
|
|
333
324
|
|
|
334
325
|
@staticmethod
|
|
335
|
-
def convert_to_bounds_lists(input_geom: BaseGeometry) ->
|
|
326
|
+
def convert_to_bounds_lists(input_geom: BaseGeometry) -> list[list[float]]:
|
|
336
327
|
if isinstance(input_geom, MultiPolygon):
|
|
337
328
|
geoms = [geom for geom in input_geom.geoms]
|
|
338
329
|
# sort with larger one at first (stac-browser only plots first one)
|
|
@@ -342,7 +333,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
342
333
|
return [list(input_geom.bounds[0:4])]
|
|
343
334
|
|
|
344
335
|
@staticmethod
|
|
345
|
-
def convert_to_bounds(input_geom_unformatted: Any) ->
|
|
336
|
+
def convert_to_bounds(input_geom_unformatted: Any) -> list[float]:
|
|
346
337
|
input_geom = get_geometry_from_various(geometry=input_geom_unformatted)
|
|
347
338
|
if isinstance(input_geom, MultiPolygon):
|
|
348
339
|
geoms = [geom for geom in input_geom.geoms]
|
|
@@ -353,16 +344,18 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
353
344
|
max_lon = -180
|
|
354
345
|
max_lat = -90
|
|
355
346
|
for geom in geoms:
|
|
356
|
-
min_lon = min(min_lon, geom.
|
|
357
|
-
min_lat = min(min_lat, geom.
|
|
358
|
-
max_lon = max(max_lon, geom.
|
|
359
|
-
max_lat = max(max_lat, geom.
|
|
347
|
+
min_lon = min(min_lon, geom.bounds[0])
|
|
348
|
+
min_lat = min(min_lat, geom.bounds[1])
|
|
349
|
+
max_lon = max(max_lon, geom.bounds[2])
|
|
350
|
+
max_lat = max(max_lat, geom.bounds[3])
|
|
360
351
|
return [min_lon, min_lat, max_lon, max_lat]
|
|
361
352
|
else:
|
|
362
353
|
return list(input_geom.bounds[0:4])
|
|
363
354
|
|
|
364
355
|
@staticmethod
|
|
365
|
-
def convert_to_nwse_bounds(input_geom: BaseGeometry) ->
|
|
356
|
+
def convert_to_nwse_bounds(input_geom: BaseGeometry) -> list[float]:
|
|
357
|
+
if isinstance(input_geom, str):
|
|
358
|
+
input_geom = shapely.wkt.loads(input_geom)
|
|
366
359
|
return list(input_geom.bounds[-1:] + input_geom.bounds[:-1])
|
|
367
360
|
|
|
368
361
|
@staticmethod
|
|
@@ -444,7 +437,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
444
437
|
else:
|
|
445
438
|
yield e
|
|
446
439
|
|
|
447
|
-
polygons_list:
|
|
440
|
+
polygons_list: list[Polygon] = []
|
|
448
441
|
for elem in flatten_elements(georss[0]):
|
|
449
442
|
coords_list = elem.text.split()
|
|
450
443
|
polygon_args = [
|
|
@@ -469,7 +462,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
469
462
|
@staticmethod
|
|
470
463
|
def convert_to_longitude_latitude(
|
|
471
464
|
input_geom_unformatted: Any,
|
|
472
|
-
) ->
|
|
465
|
+
) -> dict[str, float]:
|
|
473
466
|
bounds = MetadataFormatter.convert_to_bounds(input_geom_unformatted)
|
|
474
467
|
lon = (bounds[0] + bounds[2]) / 2
|
|
475
468
|
lat = (bounds[1] + bounds[3]) / 2
|
|
@@ -509,8 +502,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
509
502
|
|
|
510
503
|
@staticmethod
|
|
511
504
|
def convert_recursive_sub_str(
|
|
512
|
-
input_obj: Union[
|
|
513
|
-
) -> Union[
|
|
505
|
+
input_obj: Union[dict[Any, Any], list[Any]], args: str
|
|
506
|
+
) -> Union[dict[Any, Any], list[Any]]:
|
|
514
507
|
old, new = ast.literal_eval(args)
|
|
515
508
|
return items_recursive_apply(
|
|
516
509
|
input_obj,
|
|
@@ -520,8 +513,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
520
513
|
|
|
521
514
|
@staticmethod
|
|
522
515
|
def convert_dict_update(
|
|
523
|
-
input_dict:
|
|
524
|
-
) ->
|
|
516
|
+
input_dict: dict[Any, Any], args: str
|
|
517
|
+
) -> dict[Any, Any]:
|
|
525
518
|
"""Converts"""
|
|
526
519
|
new_items_list = ast.literal_eval(args)
|
|
527
520
|
|
|
@@ -531,8 +524,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
531
524
|
|
|
532
525
|
@staticmethod
|
|
533
526
|
def convert_dict_filter(
|
|
534
|
-
input_dict:
|
|
535
|
-
) ->
|
|
527
|
+
input_dict: dict[Any, Any], jsonpath_filter_str: str
|
|
528
|
+
) -> dict[Any, Any]:
|
|
536
529
|
"""Fitlers dict items using jsonpath"""
|
|
537
530
|
|
|
538
531
|
jsonpath_filter = string_to_jsonpath(jsonpath_filter_str, force=True)
|
|
@@ -558,6 +551,16 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
558
551
|
]
|
|
559
552
|
return string[cmin:cmax:cstep]
|
|
560
553
|
|
|
554
|
+
@staticmethod
|
|
555
|
+
def convert_to_lower(string: str) -> str:
|
|
556
|
+
"""Convert a string to lowercase."""
|
|
557
|
+
return string.lower()
|
|
558
|
+
|
|
559
|
+
@staticmethod
|
|
560
|
+
def convert_to_upper(string: str) -> str:
|
|
561
|
+
"""Convert a string to uppercase."""
|
|
562
|
+
return string.upper()
|
|
563
|
+
|
|
561
564
|
@staticmethod
|
|
562
565
|
def convert_fake_l2a_title_from_l1c(string: str) -> str:
|
|
563
566
|
id_regex = re.compile(
|
|
@@ -601,8 +604,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
601
604
|
return NOT_AVAILABLE
|
|
602
605
|
|
|
603
606
|
@staticmethod
|
|
604
|
-
def convert_split_id_into_s1_params(product_id: str) ->
|
|
605
|
-
parts:
|
|
607
|
+
def convert_split_id_into_s1_params(product_id: str) -> dict[str, str]:
|
|
608
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
606
609
|
if len(parts) < 9:
|
|
607
610
|
logger.error(
|
|
608
611
|
"id %s does not match expected Sentinel-1 id format", product_id
|
|
@@ -636,8 +639,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
636
639
|
return params
|
|
637
640
|
|
|
638
641
|
@staticmethod
|
|
639
|
-
def convert_split_id_into_s3_params(product_id: str) ->
|
|
640
|
-
parts:
|
|
642
|
+
def convert_split_id_into_s3_params(product_id: str) -> dict[str, str]:
|
|
643
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
641
644
|
params = {"productType": product_id[4:15]}
|
|
642
645
|
dates = re.findall("[0-9]{8}T[0-9]{6}", product_id)
|
|
643
646
|
start_date = datetime.strptime(dates[0], "%Y%m%dT%H%M%S") - timedelta(
|
|
@@ -653,8 +656,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
653
656
|
return params
|
|
654
657
|
|
|
655
658
|
@staticmethod
|
|
656
|
-
def convert_split_id_into_s5p_params(product_id: str) ->
|
|
657
|
-
parts:
|
|
659
|
+
def convert_split_id_into_s5p_params(product_id: str) -> dict[str, str]:
|
|
660
|
+
parts: list[str] = re.split(r"_(?!_)", product_id)
|
|
658
661
|
params = {
|
|
659
662
|
"productType": product_id[9:19],
|
|
660
663
|
"processingMode": parts[1],
|
|
@@ -671,7 +674,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
671
674
|
return params
|
|
672
675
|
|
|
673
676
|
@staticmethod
|
|
674
|
-
def convert_split_cop_dem_id(product_id: str) ->
|
|
677
|
+
def convert_split_cop_dem_id(product_id: str) -> list[int]:
|
|
675
678
|
parts = product_id.split("_")
|
|
676
679
|
lattitude = parts[3]
|
|
677
680
|
longitude = parts[5]
|
|
@@ -710,7 +713,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
710
713
|
@staticmethod
|
|
711
714
|
def convert_to_datetime_dict(
|
|
712
715
|
date: str, format: str
|
|
713
|
-
) ->
|
|
716
|
+
) -> dict[str, Union[list[str], str]]:
|
|
714
717
|
"""Convert a date (str) to a dictionary where values are in the format given in argument
|
|
715
718
|
|
|
716
719
|
date == "2021-04-21T18:27:19.123Z" and format == "list" => {
|
|
@@ -762,7 +765,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
762
765
|
@staticmethod
|
|
763
766
|
def convert_interval_to_datetime_dict(
|
|
764
767
|
date: str, separator: str = "/"
|
|
765
|
-
) ->
|
|
768
|
+
) -> dict[str, list[str]]:
|
|
766
769
|
"""Convert a date interval ('/' separated str) to a dictionary where values are lists
|
|
767
770
|
|
|
768
771
|
date == "2021-04-21/2021-04-22" => {
|
|
@@ -802,7 +805,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
802
805
|
}
|
|
803
806
|
|
|
804
807
|
@staticmethod
|
|
805
|
-
def convert_get_ecmwf_time(date: str) ->
|
|
808
|
+
def convert_get_ecmwf_time(date: str) -> list[str]:
|
|
806
809
|
"""Get the time of a date (str) in the ECMWF format (["HH:00"])
|
|
807
810
|
|
|
808
811
|
"2021-04-21T18:27:19.123Z" => ["18:00"]
|
|
@@ -834,7 +837,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
834
837
|
date_object = datetime.strptime(utc_date, "%Y-%m-%dT%H:%M:%S.%fZ")
|
|
835
838
|
date_object_second_year = date_object + relativedelta(years=1)
|
|
836
839
|
return [
|
|
837
|
-
f
|
|
840
|
+
f"{date_object.strftime('%Y')}_{date_object_second_year.strftime('%y')}"
|
|
838
841
|
]
|
|
839
842
|
|
|
840
843
|
@staticmethod
|
|
@@ -846,8 +849,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
846
849
|
|
|
847
850
|
@staticmethod
|
|
848
851
|
def convert_assets_list_to_dict(
|
|
849
|
-
assets_list:
|
|
850
|
-
) ->
|
|
852
|
+
assets_list: list[dict[str, str]], asset_name_key: str = "title"
|
|
853
|
+
) -> dict[str, dict[str, str]]:
|
|
851
854
|
"""Convert a list of assets to a dictionary where keys represent
|
|
852
855
|
name of assets and are found among values of asset dictionaries.
|
|
853
856
|
|
|
@@ -874,8 +877,8 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
874
877
|
"asset3": {"href": "qux", "title": "qux-title", "name": "asset3"},
|
|
875
878
|
}
|
|
876
879
|
"""
|
|
877
|
-
asset_names:
|
|
878
|
-
assets_dict:
|
|
880
|
+
asset_names: list[str] = []
|
|
881
|
+
assets_dict: dict[str, dict[str, str]] = {}
|
|
879
882
|
|
|
880
883
|
for asset in assets_list:
|
|
881
884
|
asset_name = asset[asset_name_key]
|
|
@@ -884,7 +887,7 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
884
887
|
|
|
885
888
|
# we only keep the equivalent of the path basename in the case where the
|
|
886
889
|
# asset name has a path pattern and this basename is only found once
|
|
887
|
-
immutable_asset_indexes:
|
|
890
|
+
immutable_asset_indexes: list[int] = []
|
|
888
891
|
for i, asset_name in enumerate(asset_names):
|
|
889
892
|
if i in immutable_asset_indexes:
|
|
890
893
|
continue
|
|
@@ -902,20 +905,18 @@ def format_metadata(search_param: str, *args: Any, **kwargs: Any) -> str:
|
|
|
902
905
|
return assets_dict
|
|
903
906
|
|
|
904
907
|
# if stac extension colon separator `:` is in search params, parse it to prevent issues with vformat
|
|
905
|
-
if re.search(r"{[
|
|
906
|
-
search_param = re.sub(
|
|
907
|
-
r"{([a-zA-Z0-9_-]*):([a-zA-Z0-9_-]*)}", r"{\1_COLON_\2}", search_param
|
|
908
|
-
)
|
|
908
|
+
if re.search(r"{[\w-]*:[\w#-]*}", search_param):
|
|
909
|
+
search_param = re.sub(r"{([\w-]*):([\w#-]*)}", r"{\1_COLON_\2}", search_param)
|
|
909
910
|
kwargs = {k.replace(":", "_COLON_"): v for k, v in kwargs.items()}
|
|
910
911
|
|
|
911
912
|
return MetadataFormatter().vformat(search_param, args, kwargs)
|
|
912
913
|
|
|
913
914
|
|
|
914
915
|
def properties_from_json(
|
|
915
|
-
json:
|
|
916
|
-
mapping:
|
|
917
|
-
discovery_config: Optional[
|
|
918
|
-
) ->
|
|
916
|
+
json: dict[str, Any],
|
|
917
|
+
mapping: dict[str, Any],
|
|
918
|
+
discovery_config: Optional[dict[str, Any]] = None,
|
|
919
|
+
) -> dict[str, Any]:
|
|
919
920
|
"""Extract properties from a provider json result.
|
|
920
921
|
|
|
921
922
|
:param json: The representation of a provider result as a json object
|
|
@@ -928,7 +929,7 @@ def properties_from_json(
|
|
|
928
929
|
`discovery_path` (String representation of jsonpath)
|
|
929
930
|
:returns: The metadata of the :class:`~eodag.api.product._product.EOProduct`
|
|
930
931
|
"""
|
|
931
|
-
properties:
|
|
932
|
+
properties: dict[str, Any] = {}
|
|
932
933
|
templates = {}
|
|
933
934
|
used_jsonpaths = []
|
|
934
935
|
for metadata, value in mapping.items():
|
|
@@ -975,10 +976,24 @@ def properties_from_json(
|
|
|
975
976
|
if re.search(r"({[^{}:]+})+", conversion_or_none):
|
|
976
977
|
conversion_or_none = conversion_or_none.format(**properties)
|
|
977
978
|
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
979
|
+
if extracted_value == NOT_AVAILABLE:
|
|
980
|
+
# try if value can be formatted even if it is not available
|
|
981
|
+
try:
|
|
982
|
+
properties[metadata] = format_metadata(
|
|
983
|
+
"{%s%s%s}" % (metadata, SEP, conversion_or_none),
|
|
984
|
+
**{metadata: extracted_value},
|
|
985
|
+
)
|
|
986
|
+
except ValueError:
|
|
987
|
+
logger.debug(
|
|
988
|
+
f"{metadata}: {extracted_value} could not be formatted with {conversion_or_none}"
|
|
989
|
+
)
|
|
990
|
+
continue
|
|
991
|
+
else:
|
|
992
|
+
# in this case formatting should work, otherwise something is wrong in the mapping
|
|
993
|
+
properties[metadata] = format_metadata(
|
|
994
|
+
"{%s%s%s}" % (metadata, SEP, conversion_or_none),
|
|
995
|
+
**{metadata: extracted_value},
|
|
996
|
+
)
|
|
982
997
|
# properties as python objects when possible (format_metadata returns only strings)
|
|
983
998
|
try:
|
|
984
999
|
properties[metadata] = ast.literal_eval(properties[metadata])
|
|
@@ -1058,8 +1073,8 @@ def properties_from_xml(
|
|
|
1058
1073
|
xml_as_text: AnyStr,
|
|
1059
1074
|
mapping: Any,
|
|
1060
1075
|
empty_ns_prefix: str = "ns",
|
|
1061
|
-
discovery_config: Optional[
|
|
1062
|
-
) ->
|
|
1076
|
+
discovery_config: Optional[dict[str, Any]] = None,
|
|
1077
|
+
) -> dict[str, Any]:
|
|
1063
1078
|
"""Extract properties from a provider xml result.
|
|
1064
1079
|
|
|
1065
1080
|
:param xml_as_text: The representation of a provider result as xml
|
|
@@ -1077,7 +1092,7 @@ def properties_from_xml(
|
|
|
1077
1092
|
`discovery_path` (String representation of xpath)
|
|
1078
1093
|
:returns: the metadata of the :class:`~eodag.api.product._product.EOProduct`
|
|
1079
1094
|
"""
|
|
1080
|
-
properties:
|
|
1095
|
+
properties: dict[str, Any] = {}
|
|
1081
1096
|
templates = {}
|
|
1082
1097
|
used_xpaths = []
|
|
1083
1098
|
root = etree.XML(xml_as_text)
|
|
@@ -1205,10 +1220,10 @@ def properties_from_xml(
|
|
|
1205
1220
|
|
|
1206
1221
|
|
|
1207
1222
|
def mtd_cfg_as_conversion_and_querypath(
|
|
1208
|
-
src_dict:
|
|
1209
|
-
dest_dict:
|
|
1223
|
+
src_dict: dict[str, Any],
|
|
1224
|
+
dest_dict: dict[str, Any] = {},
|
|
1210
1225
|
result_type: str = "json",
|
|
1211
|
-
) ->
|
|
1226
|
+
) -> dict[str, Any]:
|
|
1212
1227
|
"""Metadata configuration dictionary to querypath with conversion dictionary
|
|
1213
1228
|
Transform every src_dict value from jsonpath_str to tuple `(conversion, jsonpath_object)`
|
|
1214
1229
|
or from xpath_str to tuple `(conversion, xpath_str)`
|
|
@@ -1256,8 +1271,8 @@ def mtd_cfg_as_conversion_and_querypath(
|
|
|
1256
1271
|
|
|
1257
1272
|
|
|
1258
1273
|
def format_query_params(
|
|
1259
|
-
product_type: str, config: PluginConfig, query_dict:
|
|
1260
|
-
) ->
|
|
1274
|
+
product_type: str, config: PluginConfig, query_dict: dict[str, Any]
|
|
1275
|
+
) -> dict[str, Any]:
|
|
1261
1276
|
"""format the search parameters to query parameters"""
|
|
1262
1277
|
if "raise_errors" in query_dict.keys():
|
|
1263
1278
|
del query_dict["raise_errors"]
|
|
@@ -1269,7 +1284,7 @@ def format_query_params(
|
|
|
1269
1284
|
**config.products.get(product_type, {}).get("metadata_mapping", {}),
|
|
1270
1285
|
)
|
|
1271
1286
|
|
|
1272
|
-
query_params:
|
|
1287
|
+
query_params: dict[str, Any] = {}
|
|
1273
1288
|
# Get all the search parameters that are recognised as queryables by the
|
|
1274
1289
|
# provider (they appear in the queryables dictionary)
|
|
1275
1290
|
queryables = _get_queryables(query_dict, config, product_type_metadata_mapping)
|
|
@@ -1299,8 +1314,8 @@ def format_query_params(
|
|
|
1299
1314
|
query_params[eodag_search_key] = formatted_query_param
|
|
1300
1315
|
else:
|
|
1301
1316
|
provider_search_key, provider_value = parts
|
|
1302
|
-
query_params
|
|
1303
|
-
|
|
1317
|
+
query_params[provider_search_key] = format_metadata(
|
|
1318
|
+
provider_value, product_type, **query_dict
|
|
1304
1319
|
)
|
|
1305
1320
|
else:
|
|
1306
1321
|
query_params[provider_search_key] = user_input
|
|
@@ -1359,10 +1374,10 @@ def _resolve_hashes(formatted_query_param: str) -> str:
|
|
|
1359
1374
|
|
|
1360
1375
|
|
|
1361
1376
|
def _format_free_text_search(
|
|
1362
|
-
config: PluginConfig, metadata_mapping:
|
|
1363
|
-
) ->
|
|
1377
|
+
config: PluginConfig, metadata_mapping: dict[str, Any], **kwargs: Any
|
|
1378
|
+
) -> dict[str, Any]:
|
|
1364
1379
|
"""Build the free text search parameter using the search parameters"""
|
|
1365
|
-
query_params:
|
|
1380
|
+
query_params: dict[str, Any] = {}
|
|
1366
1381
|
if not getattr(config, "free_text_search_operations", None):
|
|
1367
1382
|
return query_params
|
|
1368
1383
|
for param, operations_config in config.free_text_search_operations.items():
|
|
@@ -1401,13 +1416,13 @@ def _format_free_text_search(
|
|
|
1401
1416
|
|
|
1402
1417
|
|
|
1403
1418
|
def _get_queryables(
|
|
1404
|
-
search_params:
|
|
1419
|
+
search_params: dict[str, Any],
|
|
1405
1420
|
config: PluginConfig,
|
|
1406
|
-
metadata_mapping:
|
|
1407
|
-
) ->
|
|
1421
|
+
metadata_mapping: dict[str, Any],
|
|
1422
|
+
) -> dict[str, Any]:
|
|
1408
1423
|
"""Retrieve the metadata mappings that are query-able"""
|
|
1409
1424
|
logger.debug("Retrieving queryable metadata from metadata_mapping")
|
|
1410
|
-
queryables:
|
|
1425
|
+
queryables: dict[str, Any] = {}
|
|
1411
1426
|
for eodag_search_key, user_input in search_params.items():
|
|
1412
1427
|
if user_input is not None:
|
|
1413
1428
|
md_mapping = metadata_mapping.get(eodag_search_key, (None, NOT_MAPPED))
|
|
@@ -1454,7 +1469,7 @@ def _get_queryables(
|
|
|
1454
1469
|
|
|
1455
1470
|
|
|
1456
1471
|
def get_queryable_from_provider(
|
|
1457
|
-
provider_queryable: str, metadata_mapping:
|
|
1472
|
+
provider_queryable: str, metadata_mapping: dict[str, Union[str, list[str]]]
|
|
1458
1473
|
) -> Optional[str]:
|
|
1459
1474
|
"""Get EODAG configured queryable parameter from provider queryable parameter
|
|
1460
1475
|
|
|
@@ -1462,7 +1477,7 @@ def get_queryable_from_provider(
|
|
|
1462
1477
|
:param metadata_mapping: metadata-mapping configuration
|
|
1463
1478
|
:returns: EODAG configured queryable parameter or None
|
|
1464
1479
|
"""
|
|
1465
|
-
pattern = rf"\
|
|
1480
|
+
pattern = rf"\"{provider_queryable}\""
|
|
1466
1481
|
# if 1:1 mapping exists privilege this one instead of other mapping
|
|
1467
1482
|
# e.g. provider queryable = year -> use year and not date in which year also appears
|
|
1468
1483
|
mapping_values = [
|
|
@@ -1478,7 +1493,7 @@ def get_queryable_from_provider(
|
|
|
1478
1493
|
|
|
1479
1494
|
|
|
1480
1495
|
def get_provider_queryable_path(
|
|
1481
|
-
queryable: str, metadata_mapping:
|
|
1496
|
+
queryable: str, metadata_mapping: dict[str, Union[str, list[str]]]
|
|
1482
1497
|
) -> Optional[str]:
|
|
1483
1498
|
"""Get EODAG configured queryable path from its parameter
|
|
1484
1499
|
|
|
@@ -1495,10 +1510,11 @@ def get_provider_queryable_path(
|
|
|
1495
1510
|
|
|
1496
1511
|
def get_provider_queryable_key(
|
|
1497
1512
|
eodag_key: str,
|
|
1498
|
-
provider_queryables:
|
|
1499
|
-
metadata_mapping:
|
|
1513
|
+
provider_queryables: dict[str, Any],
|
|
1514
|
+
metadata_mapping: dict[str, Union[list[Any], str]],
|
|
1500
1515
|
) -> str:
|
|
1501
|
-
"""
|
|
1516
|
+
"""Finds the provider queryable corresponding to the given eodag key based on the metadata mapping
|
|
1517
|
+
|
|
1502
1518
|
:param eodag_key: key in eodag
|
|
1503
1519
|
:param provider_queryables: queryables returned from the provider
|
|
1504
1520
|
:param metadata_mapping: metadata mapping from which the keys are retrieved
|
eodag/api/search_result.py
CHANGED
|
@@ -18,17 +18,7 @@
|
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
20
|
from collections import UserList
|
|
21
|
-
from typing import
|
|
22
|
-
TYPE_CHECKING,
|
|
23
|
-
Annotated,
|
|
24
|
-
Any,
|
|
25
|
-
Dict,
|
|
26
|
-
Iterable,
|
|
27
|
-
List,
|
|
28
|
-
Optional,
|
|
29
|
-
Tuple,
|
|
30
|
-
Union,
|
|
31
|
-
)
|
|
21
|
+
from typing import TYPE_CHECKING, Annotated, Any, Iterable, Optional, Union
|
|
32
22
|
|
|
33
23
|
from shapely.geometry import GeometryCollection, shape
|
|
34
24
|
from typing_extensions import Doc
|
|
@@ -56,17 +46,17 @@ class SearchResult(UserList):
|
|
|
56
46
|
:ivar number_matched: Estimated total number of matching results
|
|
57
47
|
"""
|
|
58
48
|
|
|
59
|
-
data:
|
|
49
|
+
data: list[EOProduct]
|
|
60
50
|
|
|
61
51
|
errors: Annotated[
|
|
62
|
-
|
|
52
|
+
list[tuple[str, Exception]], Doc("Tuple of provider name, exception")
|
|
63
53
|
]
|
|
64
54
|
|
|
65
55
|
def __init__(
|
|
66
56
|
self,
|
|
67
|
-
products:
|
|
57
|
+
products: list[EOProduct],
|
|
68
58
|
number_matched: Optional[int] = None,
|
|
69
|
-
errors:
|
|
59
|
+
errors: list[tuple[str, Exception]] = [],
|
|
70
60
|
) -> None:
|
|
71
61
|
super().__init__(products)
|
|
72
62
|
self.number_matched = number_matched
|
|
@@ -92,7 +82,7 @@ class SearchResult(UserList):
|
|
|
92
82
|
return self.crunch(FilterDate(dict(start=start, end=end)))
|
|
93
83
|
|
|
94
84
|
def filter_latest_intersect(
|
|
95
|
-
self, geometry: Union[
|
|
85
|
+
self, geometry: Union[dict[str, Any], BaseGeometry, Any]
|
|
96
86
|
) -> SearchResult:
|
|
97
87
|
"""
|
|
98
88
|
Apply :class:`~eodag.plugins.crunch.filter_latest_intersect.FilterLatestIntersect` crunch,
|
|
@@ -148,7 +138,7 @@ class SearchResult(UserList):
|
|
|
148
138
|
return self.filter_property(storageStatus="ONLINE")
|
|
149
139
|
|
|
150
140
|
@staticmethod
|
|
151
|
-
def from_geojson(feature_collection:
|
|
141
|
+
def from_geojson(feature_collection: dict[str, Any]) -> SearchResult:
|
|
152
142
|
"""Builds an :class:`~eodag.api.search_result.SearchResult` object from its representation as geojson
|
|
153
143
|
|
|
154
144
|
:param feature_collection: A collection representing a search result.
|
|
@@ -161,7 +151,7 @@ class SearchResult(UserList):
|
|
|
161
151
|
]
|
|
162
152
|
)
|
|
163
153
|
|
|
164
|
-
def as_geojson_object(self) ->
|
|
154
|
+
def as_geojson_object(self) -> dict[str, Any]:
|
|
165
155
|
"""GeoJSON representation of SearchResult"""
|
|
166
156
|
return {
|
|
167
157
|
"type": "FeatureCollection",
|
|
@@ -182,7 +172,7 @@ class SearchResult(UserList):
|
|
|
182
172
|
return self.as_shapely_geometry_object().wkt
|
|
183
173
|
|
|
184
174
|
@property
|
|
185
|
-
def __geo_interface__(self) ->
|
|
175
|
+
def __geo_interface__(self) -> dict[str, Any]:
|
|
186
176
|
"""Implements the geo-interface protocol.
|
|
187
177
|
|
|
188
178
|
See https://gist.github.com/sgillies/2217756
|
|
@@ -230,9 +220,9 @@ class RawSearchResult(UserList):
|
|
|
230
220
|
:param results: A list of raw/unparsed search results
|
|
231
221
|
"""
|
|
232
222
|
|
|
233
|
-
data:
|
|
234
|
-
query_params:
|
|
235
|
-
product_type_def_params:
|
|
223
|
+
data: list[Any]
|
|
224
|
+
query_params: dict[str, Any]
|
|
225
|
+
product_type_def_params: dict[str, Any]
|
|
236
226
|
|
|
237
|
-
def __init__(self, results:
|
|
227
|
+
def __init__(self, results: list[Any]) -> None:
|
|
238
228
|
super(RawSearchResult, self).__init__(results)
|
eodag/cli.py
CHANGED
|
@@ -48,7 +48,7 @@ import shutil
|
|
|
48
48
|
import sys
|
|
49
49
|
import textwrap
|
|
50
50
|
from importlib.metadata import metadata
|
|
51
|
-
from typing import TYPE_CHECKING, Any,
|
|
51
|
+
from typing import TYPE_CHECKING, Any, Mapping
|
|
52
52
|
|
|
53
53
|
import click
|
|
54
54
|
|
|
@@ -57,6 +57,11 @@ from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE, parse_qs
|
|
|
57
57
|
from eodag.utils.exceptions import NoMatchingProductType, UnsupportedProvider
|
|
58
58
|
from eodag.utils.logging import setup_logging
|
|
59
59
|
|
|
60
|
+
try:
|
|
61
|
+
from eodag.rest.utils import LIVENESS_PROBE_PATH
|
|
62
|
+
except ImportError:
|
|
63
|
+
pass
|
|
64
|
+
|
|
60
65
|
if TYPE_CHECKING:
|
|
61
66
|
from click import Context
|
|
62
67
|
|
|
@@ -70,6 +75,18 @@ CRUNCHERS = [
|
|
|
70
75
|
]
|
|
71
76
|
|
|
72
77
|
|
|
78
|
+
class LivenessFilter:
|
|
79
|
+
"""
|
|
80
|
+
Filter out requests to the liveness probe endpoint
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
def filter(self, record):
|
|
84
|
+
"""
|
|
85
|
+
Filter method required by the Python logging API.
|
|
86
|
+
"""
|
|
87
|
+
return LIVENESS_PROBE_PATH not in record.getMessage()
|
|
88
|
+
|
|
89
|
+
|
|
73
90
|
class MutuallyExclusiveOption(click.Option):
|
|
74
91
|
"""Mutually Exclusive Options for Click
|
|
75
92
|
from https://gist.github.com/jacobtolar/fb80d5552a9a9dfc32b12a829fa21c0c
|
|
@@ -87,7 +104,7 @@ class MutuallyExclusiveOption(click.Option):
|
|
|
87
104
|
super(MutuallyExclusiveOption, self).__init__(*args, **kwargs)
|
|
88
105
|
|
|
89
106
|
def handle_parse_result(
|
|
90
|
-
self, ctx: Context, opts: Mapping[str, Any], args:
|
|
107
|
+
self, ctx: Context, opts: Mapping[str, Any], args: list[str]
|
|
91
108
|
):
|
|
92
109
|
"""Raise error or use parent handle_parse_result()"""
|
|
93
110
|
if self.mutually_exclusive.intersection(opts) and self.name in opts:
|
|
@@ -342,9 +359,9 @@ def search_crunch(ctx: Context, **kwargs: Any) -> None:
|
|
|
342
359
|
count = kwargs.pop("count")
|
|
343
360
|
|
|
344
361
|
# Process inputs for crunch
|
|
345
|
-
cruncher_names:
|
|
362
|
+
cruncher_names: set[Any] = set(kwargs.pop("cruncher") or [])
|
|
346
363
|
cruncher_args = kwargs.pop("cruncher_args")
|
|
347
|
-
cruncher_args_dict:
|
|
364
|
+
cruncher_args_dict: dict[str, dict[str, Any]] = {}
|
|
348
365
|
if cruncher_args:
|
|
349
366
|
for cruncher, argname, argval in cruncher_args:
|
|
350
367
|
cruncher_args_dict.setdefault(cruncher, {}).setdefault(argname, argval)
|
|
@@ -679,7 +696,9 @@ def serve_rest(
|
|
|
679
696
|
try:
|
|
680
697
|
pid = os.fork()
|
|
681
698
|
except OSError as e:
|
|
682
|
-
raise Exception(
|
|
699
|
+
raise Exception(
|
|
700
|
+
"%s [%d]" % (e.strerror, e.errno) if e.errno is not None else e.strerror
|
|
701
|
+
)
|
|
683
702
|
|
|
684
703
|
if pid == 0:
|
|
685
704
|
os.setsid()
|
|
@@ -691,8 +710,10 @@ def serve_rest(
|
|
|
691
710
|
|
|
692
711
|
logging_config = uvicorn.config.LOGGING_CONFIG
|
|
693
712
|
uvicorn_fmt = "%(asctime)-15s %(name)-32s [%(levelname)-8s] %(message)s"
|
|
713
|
+
logging_config["filters"] = {"liveness": {"()": LivenessFilter}}
|
|
694
714
|
logging_config["formatters"]["access"]["fmt"] = uvicorn_fmt
|
|
695
715
|
logging_config["formatters"]["default"]["fmt"] = uvicorn_fmt
|
|
716
|
+
logging_config["loggers"]["uvicorn.access"]["filters"] = ["liveness"]
|
|
696
717
|
|
|
697
718
|
eodag_formatter = logging.Formatter(
|
|
698
719
|
"%(asctime)-15s %(name)-32s [%(levelname)-8s] (tid=%(thread)d) %(message)s"
|