eodag 3.0.0b2__py3-none-any.whl → 3.0.0b3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/__init__.py +6 -8
- eodag/api/core.py +113 -169
- eodag/api/product/__init__.py +10 -4
- eodag/api/product/_assets.py +2 -14
- eodag/api/product/_product.py +16 -30
- eodag/api/product/drivers/__init__.py +7 -2
- eodag/api/product/drivers/base.py +0 -3
- eodag/api/product/metadata_mapping.py +0 -28
- eodag/api/search_result.py +4 -9
- eodag/config.py +45 -41
- eodag/plugins/apis/base.py +3 -3
- eodag/plugins/apis/ecmwf.py +2 -3
- eodag/plugins/apis/usgs.py +43 -14
- eodag/plugins/authentication/aws_auth.py +11 -2
- eodag/plugins/authentication/openid_connect.py +5 -4
- eodag/plugins/authentication/token.py +2 -1
- eodag/plugins/crunch/base.py +3 -1
- eodag/plugins/crunch/filter_date.py +3 -9
- eodag/plugins/crunch/filter_latest_intersect.py +0 -3
- eodag/plugins/crunch/filter_latest_tpl_name.py +1 -4
- eodag/plugins/crunch/filter_overlap.py +4 -8
- eodag/plugins/crunch/filter_property.py +5 -11
- eodag/plugins/download/aws.py +46 -78
- eodag/plugins/download/base.py +27 -68
- eodag/plugins/download/http.py +48 -57
- eodag/plugins/download/s3rest.py +17 -25
- eodag/plugins/manager.py +5 -18
- eodag/plugins/search/__init__.py +9 -9
- eodag/plugins/search/base.py +7 -26
- eodag/plugins/search/build_search_result.py +0 -13
- eodag/plugins/search/cop_marine.py +1 -3
- eodag/plugins/search/creodias_s3.py +0 -3
- eodag/plugins/search/data_request_search.py +10 -5
- eodag/plugins/search/qssearch.py +95 -53
- eodag/plugins/search/static_stac_search.py +6 -3
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +24 -0
- eodag/resources/providers.yml +198 -154
- eodag/resources/user_conf_template.yml +27 -27
- eodag/rest/core.py +11 -43
- eodag/rest/server.py +1 -6
- eodag/rest/stac.py +13 -87
- eodag/rest/types/eodag_search.py +4 -7
- eodag/rest/types/queryables.py +4 -12
- eodag/rest/types/stac_search.py +7 -11
- eodag/rest/utils/rfc3339.py +0 -1
- eodag/types/__init__.py +9 -3
- eodag/types/download_args.py +14 -5
- eodag/types/search_args.py +7 -8
- eodag/types/whoosh.py +0 -2
- eodag/utils/__init__.py +20 -79
- eodag/utils/constraints.py +0 -8
- eodag/utils/import_system.py +0 -4
- eodag/utils/logging.py +0 -3
- eodag/utils/notebook.py +4 -4
- eodag/utils/requests.py +12 -20
- eodag/utils/rest.py +0 -4
- eodag/utils/stac_reader.py +2 -14
- {eodag-3.0.0b2.dist-info → eodag-3.0.0b3.dist-info}/METADATA +32 -14
- eodag-3.0.0b3.dist-info/RECORD +110 -0
- {eodag-3.0.0b2.dist-info → eodag-3.0.0b3.dist-info}/WHEEL +1 -1
- eodag-3.0.0b2.dist-info/RECORD +0 -110
- {eodag-3.0.0b2.dist-info → eodag-3.0.0b3.dist-info}/LICENSE +0 -0
- {eodag-3.0.0b2.dist-info → eodag-3.0.0b3.dist-info}/entry_points.txt +0 -0
- {eodag-3.0.0b2.dist-info → eodag-3.0.0b3.dist-info}/top_level.txt +0 -0
eodag/utils/__init__.py
CHANGED
|
@@ -55,6 +55,7 @@ from typing import (
|
|
|
55
55
|
Any,
|
|
56
56
|
Callable,
|
|
57
57
|
Dict,
|
|
58
|
+
Iterable,
|
|
58
59
|
Iterator,
|
|
59
60
|
List,
|
|
60
61
|
Mapping,
|
|
@@ -321,9 +322,7 @@ def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) ->
|
|
|
321
322
|
mapping.
|
|
322
323
|
|
|
323
324
|
:param func: A function to apply to each value of mapping which is not a dict object
|
|
324
|
-
:type func: func
|
|
325
325
|
:param mapping: A Python dict object
|
|
326
|
-
:type mapping: dict
|
|
327
326
|
:returns: None
|
|
328
327
|
"""
|
|
329
328
|
for key, value in mapping.items():
|
|
@@ -349,10 +348,8 @@ def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None:
|
|
|
349
348
|
to mapping1 as is.
|
|
350
349
|
|
|
351
350
|
:param mapping1: The mapping containing values to be overridden
|
|
352
|
-
:type mapping1: dict
|
|
353
351
|
:param mapping2: The mapping containing values that will override the
|
|
354
352
|
first mapping
|
|
355
|
-
:type mapping2: dict
|
|
356
353
|
"""
|
|
357
354
|
# A mapping between mapping1 keys as lowercase strings and original mapping1 keys
|
|
358
355
|
m1_keys_lowercase = {key.lower(): key for key in mapping1}
|
|
@@ -419,9 +416,7 @@ def get_timestamp(date_time: str) -> float:
|
|
|
419
416
|
If the datetime has no offset, it is assumed to be an UTC datetime.
|
|
420
417
|
|
|
421
418
|
:param date_time: The datetime string to return as timestamp
|
|
422
|
-
:type date_time: str
|
|
423
419
|
:returns: The timestamp corresponding to the date_time string in seconds
|
|
424
|
-
:rtype: float
|
|
425
420
|
"""
|
|
426
421
|
dt = isoparse(date_time)
|
|
427
422
|
if not dt.tzinfo:
|
|
@@ -443,7 +438,6 @@ class DownloadedCallback:
|
|
|
443
438
|
"""Callback
|
|
444
439
|
|
|
445
440
|
:param product: The downloaded EO product
|
|
446
|
-
:type product: :class:`~eodag.api.product._product.EOProduct`
|
|
447
441
|
"""
|
|
448
442
|
logger.debug("Download finished for the product %s", product)
|
|
449
443
|
|
|
@@ -483,9 +477,7 @@ class ProgressCallback(tqdm):
|
|
|
483
477
|
"""Update the progress bar.
|
|
484
478
|
|
|
485
479
|
:param increment: Amount of data already processed
|
|
486
|
-
:type increment: int
|
|
487
480
|
:param total: (optional) Maximum amount of data to be processed
|
|
488
|
-
:type total: int
|
|
489
481
|
"""
|
|
490
482
|
if total is not None and total != self.total:
|
|
491
483
|
self.reset(total=total)
|
|
@@ -538,9 +530,7 @@ def rename_subfolder(dirpath: str, name: str) -> None:
|
|
|
538
530
|
raise RuntimeError if no subfolder can be found
|
|
539
531
|
|
|
540
532
|
:param dirpath: path to the directory containing the subfolder
|
|
541
|
-
:type dirpath: str
|
|
542
533
|
:param name: new name of the subfolder
|
|
543
|
-
:type name: str
|
|
544
534
|
:raises: RuntimeError
|
|
545
535
|
|
|
546
536
|
Example:
|
|
@@ -589,11 +579,8 @@ def format_dict_items(
|
|
|
589
579
|
True
|
|
590
580
|
|
|
591
581
|
:param config_dict: Dictionnary having values that need to be parsed
|
|
592
|
-
:type config_dict: dict
|
|
593
582
|
:param format_variables: Variables used as args for parsing
|
|
594
|
-
:type format_variables: dict
|
|
595
583
|
:returns: Updated dict
|
|
596
|
-
:rtype: dict
|
|
597
584
|
"""
|
|
598
585
|
return dict_items_recursive_apply(config_dict, format_string, **format_variables)
|
|
599
586
|
|
|
@@ -611,11 +598,8 @@ def jsonpath_parse_dict_items(
|
|
|
611
598
|
True
|
|
612
599
|
|
|
613
600
|
:param jsonpath_dict: Dictionnary having values that need to be parsed
|
|
614
|
-
:type jsonpath_dict: dict
|
|
615
601
|
:param values_dict: Values dict used as args for parsing
|
|
616
|
-
:type values_dict: dict
|
|
617
602
|
:returns: Updated dict
|
|
618
|
-
:rtype: dict
|
|
619
603
|
"""
|
|
620
604
|
return dict_items_recursive_apply(jsonpath_dict, parse_jsonpath, **values_dict)
|
|
621
605
|
|
|
@@ -661,15 +645,10 @@ def update_nested_dict(
|
|
|
661
645
|
True
|
|
662
646
|
|
|
663
647
|
:param old_dict: Dict to be updated
|
|
664
|
-
:type old_dict: dict
|
|
665
648
|
:param new_dict: Incomming dict
|
|
666
|
-
:type new_dict: dict
|
|
667
649
|
:param extend_list_values: (optional) Extend old_dict value if both old/new values are lists
|
|
668
|
-
:type extend_list_values: bool
|
|
669
650
|
:param allow_empty_values: (optional) Allow update with empty values
|
|
670
|
-
:type allow_empty_values: bool
|
|
671
651
|
:returns: Updated dict
|
|
672
|
-
:rtype: dict
|
|
673
652
|
"""
|
|
674
653
|
for k, v in new_dict.items():
|
|
675
654
|
if k in old_dict.keys():
|
|
@@ -734,13 +713,9 @@ def items_recursive_apply(
|
|
|
734
713
|
'foo'
|
|
735
714
|
|
|
736
715
|
:param input_obj: Input object (dict or list)
|
|
737
|
-
:type input_obj: Union[dict,list]
|
|
738
716
|
:param apply_method: Method to be applied to dict elements
|
|
739
|
-
:type apply_method: :func:`apply_method`
|
|
740
717
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
741
|
-
:type apply_method_parameters: dict
|
|
742
718
|
:returns: Updated object
|
|
743
|
-
:rtype: Union[dict, list]
|
|
744
719
|
"""
|
|
745
720
|
if isinstance(input_obj, dict):
|
|
746
721
|
return dict_items_recursive_apply(
|
|
@@ -769,13 +744,9 @@ def dict_items_recursive_apply(
|
|
|
769
744
|
True
|
|
770
745
|
|
|
771
746
|
:param config_dict: Input nested dictionnary
|
|
772
|
-
:type config_dict: dict
|
|
773
747
|
:param apply_method: Method to be applied to dict elements
|
|
774
|
-
:type apply_method: :func:`apply_method`
|
|
775
748
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
776
|
-
:type apply_method_parameters: dict
|
|
777
749
|
:returns: Updated dict
|
|
778
|
-
:rtype: dict
|
|
779
750
|
"""
|
|
780
751
|
result_dict: Dict[Any, Any] = deepcopy(config_dict)
|
|
781
752
|
for dict_k, dict_v in result_dict.items():
|
|
@@ -809,13 +780,9 @@ def list_items_recursive_apply(
|
|
|
809
780
|
[{'foo': {'bar': 'BAZ!'}}, 'QUX!']
|
|
810
781
|
|
|
811
782
|
:param config_list: Input list containing nested lists/dicts
|
|
812
|
-
:type config_list: list
|
|
813
783
|
:param apply_method: Method to be applied to list elements
|
|
814
|
-
:type apply_method: :func:`apply_method`
|
|
815
784
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
816
|
-
:type apply_method_parameters: dict
|
|
817
785
|
:returns: Updated list
|
|
818
|
-
:rtype: list
|
|
819
786
|
"""
|
|
820
787
|
result_list = deepcopy(config_list)
|
|
821
788
|
for list_idx, list_v in enumerate(result_list):
|
|
@@ -850,9 +817,7 @@ def items_recursive_sort(
|
|
|
850
817
|
'foo'
|
|
851
818
|
|
|
852
819
|
:param input_obj: Input object (dict or list)
|
|
853
|
-
:type input_obj: Union[dict,list]
|
|
854
820
|
:returns: Updated object
|
|
855
|
-
:rtype: Union[dict, list]
|
|
856
821
|
"""
|
|
857
822
|
if isinstance(input_obj, dict):
|
|
858
823
|
return dict_items_recursive_sort(input_obj)
|
|
@@ -872,9 +837,7 @@ def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]:
|
|
|
872
837
|
True
|
|
873
838
|
|
|
874
839
|
:param config_dict: Input nested dictionnary
|
|
875
|
-
:type config_dict: dict
|
|
876
840
|
:returns: Updated dict
|
|
877
|
-
:rtype: dict
|
|
878
841
|
"""
|
|
879
842
|
result_dict: Dict[Any, Any] = deepcopy(config_dict)
|
|
880
843
|
for dict_k, dict_v in result_dict.items():
|
|
@@ -895,9 +858,7 @@ def list_items_recursive_sort(config_list: List[Any]) -> List[Any]:
|
|
|
895
858
|
['b', {0: 1, 1: 2, 2: 0}]
|
|
896
859
|
|
|
897
860
|
:param config_list: Input list containing nested lists/dicts
|
|
898
|
-
:type config_list: list
|
|
899
861
|
:returns: Updated list
|
|
900
|
-
:rtype: list
|
|
901
862
|
"""
|
|
902
863
|
result_list: List[Any] = deepcopy(config_list)
|
|
903
864
|
for list_idx, list_v in enumerate(result_list):
|
|
@@ -926,11 +887,8 @@ def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]:
|
|
|
926
887
|
Fields('foo')
|
|
927
888
|
|
|
928
889
|
:param args: Last arg as input string value, to be converted
|
|
929
|
-
:type args: str
|
|
930
890
|
:param force: force conversion even if input string is not detected as a jsonpath
|
|
931
|
-
:type force: bool
|
|
932
891
|
:returns: Parsed value
|
|
933
|
-
:rtype: str or Child or Root
|
|
934
892
|
"""
|
|
935
893
|
path_str: str = args[-1]
|
|
936
894
|
if JSONPATH_MATCH.match(str(path_str)) or force:
|
|
@@ -998,11 +956,8 @@ def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any:
|
|
|
998
956
|
'foo qux, quux ?'
|
|
999
957
|
|
|
1000
958
|
:param key: Input item key
|
|
1001
|
-
:type key: str
|
|
1002
959
|
:param str_to_format: Input item value, to be parsed
|
|
1003
|
-
:type str_to_format: str
|
|
1004
960
|
:returns: Parsed value
|
|
1005
|
-
:rtype: str
|
|
1006
961
|
"""
|
|
1007
962
|
if not isinstance(str_to_format, str):
|
|
1008
963
|
return str_to_format
|
|
@@ -1040,13 +995,9 @@ def parse_jsonpath(
|
|
|
1040
995
|
'baz'
|
|
1041
996
|
|
|
1042
997
|
:param key: Input item key
|
|
1043
|
-
:type key: str
|
|
1044
998
|
:param jsonpath_obj: Input item value, to be parsed
|
|
1045
|
-
:type jsonpath_obj: str or jsonpath.Child
|
|
1046
999
|
:param values_dict: Values used as args for parsing
|
|
1047
|
-
:type values_dict: dict
|
|
1048
1000
|
:returns: Parsed value
|
|
1049
|
-
:rtype: str
|
|
1050
1001
|
"""
|
|
1051
1002
|
if isinstance(jsonpath_obj, jsonpath.Child):
|
|
1052
1003
|
match = jsonpath_obj.find(values_dict)
|
|
@@ -1062,9 +1013,7 @@ def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]
|
|
|
1062
1013
|
{'foo': {'bar': 'baz'}}
|
|
1063
1014
|
|
|
1064
1015
|
:param pairs: Pairs of key / value
|
|
1065
|
-
:type pairs: list or Any
|
|
1066
1016
|
:returns: Created dict
|
|
1067
|
-
:rtype: dict or Any
|
|
1068
1017
|
"""
|
|
1069
1018
|
d = {}
|
|
1070
1019
|
try:
|
|
@@ -1084,11 +1033,8 @@ def get_geometry_from_various(
|
|
|
1084
1033
|
"""Creates a shapely geometry using given query kwargs arguments
|
|
1085
1034
|
|
|
1086
1035
|
:param locations_config: (optional) EODAG locations configuration
|
|
1087
|
-
:type locations_config: list
|
|
1088
1036
|
:param query_args: Query kwargs arguments from core.search() method
|
|
1089
|
-
:type query_args: dict
|
|
1090
1037
|
:returns: shapely Geometry found
|
|
1091
|
-
:rtype: :class:`shapely.geometry.BaseGeometry`
|
|
1092
1038
|
:raises: :class:`ValueError`
|
|
1093
1039
|
"""
|
|
1094
1040
|
geom = None
|
|
@@ -1150,11 +1096,11 @@ def get_geometry_from_various(
|
|
|
1150
1096
|
for arg in query_locations.keys():
|
|
1151
1097
|
if arg in locations_dict.keys():
|
|
1152
1098
|
found = False
|
|
1153
|
-
pattern = query_locations[arg]
|
|
1099
|
+
pattern = rf"{query_locations[arg]}"
|
|
1154
1100
|
attr = locations_dict[arg]["attr"]
|
|
1155
1101
|
with shapefile.Reader(locations_dict[arg]["path"]) as shp:
|
|
1156
1102
|
for shaperec in shp.shapeRecords():
|
|
1157
|
-
if re.search(pattern, shaperec.record[attr]):
|
|
1103
|
+
if re.search(pattern, str(shaperec.record[attr])):
|
|
1158
1104
|
found = True
|
|
1159
1105
|
new_geom = shape(shaperec.shape)
|
|
1160
1106
|
# get geoms union
|
|
@@ -1194,9 +1140,7 @@ def md5sum(file_path: str) -> str:
|
|
|
1194
1140
|
'd41d8cd98f00b204e9800998ecf8427e'
|
|
1195
1141
|
|
|
1196
1142
|
:param file_path: input file path
|
|
1197
|
-
:type file_path: str
|
|
1198
1143
|
:returns: MD5 checksum
|
|
1199
|
-
:rtype: str
|
|
1200
1144
|
"""
|
|
1201
1145
|
hash_md5 = hashlib.md5()
|
|
1202
1146
|
with open(file_path, "rb") as f:
|
|
@@ -1212,9 +1156,7 @@ def obj_md5sum(data: Any) -> str:
|
|
|
1212
1156
|
'37a6259cc0c1dae299a7866489dff0bd'
|
|
1213
1157
|
|
|
1214
1158
|
:param data: JSON serializable input object
|
|
1215
|
-
:type data: Any
|
|
1216
1159
|
:returns: MD5 checksum
|
|
1217
|
-
:rtype: str
|
|
1218
1160
|
"""
|
|
1219
1161
|
return hashlib.md5(orjson.dumps(data, option=orjson.OPT_SORT_KEYS)).hexdigest()
|
|
1220
1162
|
|
|
@@ -1238,9 +1180,7 @@ def cached_parse(str_to_parse: str) -> JSONPath:
|
|
|
1238
1180
|
CacheInfo(hits=1, misses=2, maxsize=128, currsize=2)
|
|
1239
1181
|
|
|
1240
1182
|
:param str_to_parse: string to parse as jsonpath
|
|
1241
|
-
:type str_to_parse: str
|
|
1242
1183
|
:returns: parsed jsonpath
|
|
1243
|
-
:rtype: :class:`jsonpath_ng.JSONPath`
|
|
1244
1184
|
"""
|
|
1245
1185
|
return parse(str_to_parse)
|
|
1246
1186
|
|
|
@@ -1257,9 +1197,7 @@ def cached_yaml_load(config_path: str) -> Dict[str, Any]:
|
|
|
1257
1197
|
"""Cached yaml.load
|
|
1258
1198
|
|
|
1259
1199
|
:param config_path: path to the yaml configuration file
|
|
1260
|
-
:type config_path: str
|
|
1261
1200
|
:returns: loaded yaml configuration
|
|
1262
|
-
:rtype: dict
|
|
1263
1201
|
"""
|
|
1264
1202
|
return copy_deepcopy(_mutable_cached_yaml_load(config_path))
|
|
1265
1203
|
|
|
@@ -1276,9 +1214,7 @@ def cached_yaml_load_all(config_path: str) -> List[Any]:
|
|
|
1276
1214
|
Load all configurations stored in the configuration file as separated yaml documents
|
|
1277
1215
|
|
|
1278
1216
|
:param config_path: path to the yaml configuration file
|
|
1279
|
-
:type config_path: str
|
|
1280
1217
|
:returns: list of configurations
|
|
1281
|
-
:rtype: list
|
|
1282
1218
|
"""
|
|
1283
1219
|
return copy_deepcopy(_mutable_cached_yaml_load_all(config_path))
|
|
1284
1220
|
|
|
@@ -1289,11 +1225,8 @@ def get_bucket_name_and_prefix(
|
|
|
1289
1225
|
"""Extract bucket name and prefix from URL
|
|
1290
1226
|
|
|
1291
1227
|
:param url: (optional) URL to use as product.location
|
|
1292
|
-
:type url: str
|
|
1293
1228
|
:param bucket_path_level: (optional) bucket location index in path.split('/')
|
|
1294
|
-
:type bucket_path_level: int
|
|
1295
1229
|
:returns: bucket_name and prefix as str
|
|
1296
|
-
:rtype: tuple
|
|
1297
1230
|
"""
|
|
1298
1231
|
bucket, prefix = None, None
|
|
1299
1232
|
|
|
@@ -1322,9 +1255,7 @@ def flatten_top_directories(
|
|
|
1322
1255
|
"""Flatten directory structure, removing common empty sub-directories
|
|
1323
1256
|
|
|
1324
1257
|
:param nested_dir_root: Absolute path of the directory structure to flatten
|
|
1325
|
-
:type nested_dir_root: str
|
|
1326
1258
|
:param common_subdirs_path: (optional) Absolute path of the desired subdirectory to remove
|
|
1327
|
-
:type common_subdirs_path: str
|
|
1328
1259
|
"""
|
|
1329
1260
|
if not common_subdirs_path:
|
|
1330
1261
|
subpaths_list = [p for p in Path(nested_dir_root).glob("**/*") if p.is_file()]
|
|
@@ -1346,9 +1277,7 @@ def deepcopy(sth: Any) -> Any:
|
|
|
1346
1277
|
`_copy_list` and `_copy_dict` available for the moment
|
|
1347
1278
|
|
|
1348
1279
|
:param sth: Object to copy
|
|
1349
|
-
:type sth: Any
|
|
1350
1280
|
:returns: Copied object
|
|
1351
|
-
:rtype: Any
|
|
1352
1281
|
"""
|
|
1353
1282
|
_dispatcher: Dict[Type[Any], Callable[..., Any]] = {}
|
|
1354
1283
|
|
|
@@ -1391,9 +1320,7 @@ def parse_header(header: str) -> Message:
|
|
|
1391
1320
|
'example.txt'
|
|
1392
1321
|
|
|
1393
1322
|
:param header: header to parse
|
|
1394
|
-
:type header: str
|
|
1395
1323
|
:returns: parsed header
|
|
1396
|
-
:rtype: :class:`~email.message.Message`
|
|
1397
1324
|
"""
|
|
1398
1325
|
m = Message()
|
|
1399
1326
|
m["content-type"] = header
|
|
@@ -1435,7 +1362,7 @@ def cast_scalar_value(value: Any, new_type: Any) -> Any:
|
|
|
1435
1362
|
class StreamResponse:
|
|
1436
1363
|
"""Represents a streaming response"""
|
|
1437
1364
|
|
|
1438
|
-
content:
|
|
1365
|
+
content: Iterable[bytes]
|
|
1439
1366
|
headers: Optional[Mapping[str, str]] = None
|
|
1440
1367
|
media_type: Optional[str] = None
|
|
1441
1368
|
status_code: Optional[int] = None
|
|
@@ -1460,9 +1387,7 @@ def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext:
|
|
|
1460
1387
|
"""
|
|
1461
1388
|
Returns an SSL context based on ssl_verify argument.
|
|
1462
1389
|
:param ssl_verify: ssl_verify parameter
|
|
1463
|
-
:type ssl_verify: bool
|
|
1464
1390
|
:returns: An SSL context object.
|
|
1465
|
-
:rtype: ssl.SSLContext
|
|
1466
1391
|
"""
|
|
1467
1392
|
ctx = ssl.create_default_context()
|
|
1468
1393
|
if not ssl_verify:
|
|
@@ -1472,3 +1397,19 @@ def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext:
|
|
|
1472
1397
|
ctx.check_hostname = True
|
|
1473
1398
|
ctx.verify_mode = ssl.CERT_REQUIRED
|
|
1474
1399
|
return ctx
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
def sort_dict(input_dict: Dict[str, Any]) -> Dict[str, Any]:
|
|
1403
|
+
"""
|
|
1404
|
+
Recursively sorts a dict by keys.
|
|
1405
|
+
|
|
1406
|
+
:param input_dict: input dict
|
|
1407
|
+
:returns: sorted dict
|
|
1408
|
+
|
|
1409
|
+
>>> sort_dict({"b": {"c": 1, "a": 2, "b": 3}, "a": 4})
|
|
1410
|
+
{'a': 4, 'b': {'a': 2, 'b': 3, 'c': 1}}
|
|
1411
|
+
"""
|
|
1412
|
+
return {
|
|
1413
|
+
k: sort_dict(v) if isinstance(v, dict) else v
|
|
1414
|
+
for k, v in sorted(input_dict.items())
|
|
1415
|
+
}
|
eodag/utils/constraints.py
CHANGED
|
@@ -41,15 +41,10 @@ def get_constraint_queryables_with_additional_params(
|
|
|
41
41
|
gets the queryables from the constraints using the given parameters
|
|
42
42
|
For all queryables only values matching the given parameters based on the constraints will be returned
|
|
43
43
|
:param constraints: list of constraints fetched from the provider
|
|
44
|
-
:type constraints: List[Any]
|
|
45
44
|
:param input_params: conditions the constraints should fulfil
|
|
46
|
-
:type input_params: dict
|
|
47
45
|
:param plugin: search or api plugin that is used
|
|
48
|
-
:type plugin: Union[Search, Api]
|
|
49
46
|
:param product_type: product type for which the data should be fetched
|
|
50
|
-
:type product_type: str
|
|
51
47
|
:returns: dict containing queryable data
|
|
52
|
-
:rtype: Dict[str, Dict[str, Set[Any]]]
|
|
53
48
|
"""
|
|
54
49
|
defaults = copy.deepcopy(input_params)
|
|
55
50
|
constraint_matches = {}
|
|
@@ -174,11 +169,8 @@ def fetch_constraints(
|
|
|
174
169
|
"""
|
|
175
170
|
fetches the constraints from a provider
|
|
176
171
|
:param constraints_url: url from which the constraints can be fetched
|
|
177
|
-
:type constraints_url: str
|
|
178
172
|
:param plugin: api or search plugin of the provider
|
|
179
|
-
:type plugin: Union[Search, Api]
|
|
180
173
|
:returns: list of constraints fetched from the provider
|
|
181
|
-
:rtype: List[Dict[Any, Any]]
|
|
182
174
|
"""
|
|
183
175
|
auth = (
|
|
184
176
|
plugin.auth
|
eodag/utils/import_system.py
CHANGED
|
@@ -52,12 +52,9 @@ def import_all_modules(
|
|
|
52
52
|
import_all_modules(base_package)
|
|
53
53
|
|
|
54
54
|
:param base_package: The package from where we must import all the modules
|
|
55
|
-
:type base_package: `module`
|
|
56
55
|
:param depth: (optional) If `base_package` has sub packages, import all the modules recursively up to this level.
|
|
57
56
|
Defaults to 1 (limits to the level of `base_package`)
|
|
58
|
-
:type depth: int
|
|
59
57
|
:param exclude: (optional) The sub packages and modules to ignore while importing. Empty by default
|
|
60
|
-
:type exclude: tuple(str, ...)
|
|
61
58
|
|
|
62
59
|
.. note::
|
|
63
60
|
if `package` and `subpackage` have a module of the same name and this name is included in the exclude
|
|
@@ -88,7 +85,6 @@ def patch_owslib_requests(verify: bool = True) -> Generator[None, Any, None]:
|
|
|
88
85
|
these functions in `owslib <https://geopython.github.io/OWSLib/>`_.
|
|
89
86
|
|
|
90
87
|
:param verify: (optional) Whether to verify the use of https or not
|
|
91
|
-
:type verify: bool
|
|
92
88
|
"""
|
|
93
89
|
from owslib.util import requests
|
|
94
90
|
|
eodag/utils/logging.py
CHANGED
|
@@ -32,9 +32,7 @@ def setup_logging(verbose: int, no_progress_bar: bool = False) -> None:
|
|
|
32
32
|
* 1: no logging but still displays progress bars
|
|
33
33
|
* 2: INFO level
|
|
34
34
|
* 3: DEBUG level
|
|
35
|
-
:type verbose: int
|
|
36
35
|
:param no_progress_bar: (optional) Disable progress bars
|
|
37
|
-
:type no_progress_bar: bool
|
|
38
36
|
"""
|
|
39
37
|
global disable_tqdm
|
|
40
38
|
disable_tqdm = no_progress_bar
|
|
@@ -142,7 +140,6 @@ def get_logging_verbose() -> Optional[int]:
|
|
|
142
140
|
3
|
|
143
141
|
|
|
144
142
|
:returns: Verbose level in ``[0, 1, 2, 3]`` or None if not set
|
|
145
|
-
:rtype: int or None
|
|
146
143
|
"""
|
|
147
144
|
global disable_tqdm
|
|
148
145
|
logger = logging.getLogger("eodag")
|
eodag/utils/notebook.py
CHANGED
|
@@ -23,7 +23,7 @@ from typing import Any, Optional
|
|
|
23
23
|
def check_ipython() -> bool:
|
|
24
24
|
"""Check if called from ipython"""
|
|
25
25
|
try:
|
|
26
|
-
__IPYTHON__
|
|
26
|
+
__IPYTHON__ # type: ignore[name-defined]
|
|
27
27
|
return True
|
|
28
28
|
except NameError:
|
|
29
29
|
return False
|
|
@@ -32,7 +32,7 @@ def check_ipython() -> bool:
|
|
|
32
32
|
def check_notebook() -> bool:
|
|
33
33
|
"""Check if called from a notebook"""
|
|
34
34
|
try:
|
|
35
|
-
shell = get_ipython().__class__.__name__
|
|
35
|
+
shell = get_ipython().__class__.__name__ # type: ignore[name-defined]
|
|
36
36
|
if shell == "ZMQInteractiveShell":
|
|
37
37
|
return True # Jupyter notebook or qtconsole
|
|
38
38
|
elif shell == "TerminalInteractiveShell":
|
|
@@ -69,7 +69,7 @@ class NotebookWidgets:
|
|
|
69
69
|
if not self.is_notebook:
|
|
70
70
|
return None
|
|
71
71
|
|
|
72
|
-
self.html_box
|
|
72
|
+
setattr(self.html_box, "data", html_value)
|
|
73
73
|
|
|
74
74
|
if not self.html_box_shown:
|
|
75
75
|
self._html_handle = self.display(self.html_box, display_id=True)
|
|
@@ -83,5 +83,5 @@ class NotebookWidgets:
|
|
|
83
83
|
if not self.is_notebook:
|
|
84
84
|
return None
|
|
85
85
|
|
|
86
|
-
self.html_box
|
|
86
|
+
setattr(self.html_box, "data", "")
|
|
87
87
|
self._update_display(self.html_box, display_id=self._html_handle.display_id)
|
eodag/utils/requests.py
CHANGED
|
@@ -32,22 +32,17 @@ logger = logging.getLogger("eodag.utils.requests")
|
|
|
32
32
|
def fetch_json(
|
|
33
33
|
file_url: str,
|
|
34
34
|
req_session: Optional[requests.Session] = None,
|
|
35
|
-
auth: Optional[requests.AuthBase] = None,
|
|
35
|
+
auth: Optional[requests.auth.AuthBase] = None,
|
|
36
36
|
timeout: float = HTTP_REQ_TIMEOUT,
|
|
37
37
|
) -> Any:
|
|
38
38
|
"""
|
|
39
39
|
Fetches http/distant or local json file
|
|
40
40
|
|
|
41
41
|
:param file_url: url from which the file can be fetched
|
|
42
|
-
:type file_url: str
|
|
43
42
|
:param req_session: (optional) requests session
|
|
44
|
-
:type req_session: requests.Session
|
|
45
43
|
:param auth: (optional) authenticated object if request needs authentication
|
|
46
|
-
:type auth: Optional[requests.AuthBase]
|
|
47
44
|
:param timeout: (optional) authenticated object
|
|
48
|
-
:type timeout: float
|
|
49
45
|
:returns: json file content
|
|
50
|
-
:rtype: Any
|
|
51
46
|
"""
|
|
52
47
|
if req_session is None:
|
|
53
48
|
req_session = requests.Session()
|
|
@@ -86,11 +81,8 @@ class LocalFileAdapter(requests.adapters.BaseAdapter):
|
|
|
86
81
|
"""Return an HTTP status for the given filesystem path.
|
|
87
82
|
|
|
88
83
|
:param method: method of the request
|
|
89
|
-
:type method: str
|
|
90
84
|
:param path: path of the given file
|
|
91
|
-
:type path: str
|
|
92
85
|
:returns: HTTP status and its associated message
|
|
93
|
-
:rtype: Tuple[int, str]
|
|
94
86
|
"""
|
|
95
87
|
if method.lower() in ("put", "delete"):
|
|
96
88
|
return 501, "Not Implemented" # TODO
|
|
@@ -105,31 +97,31 @@ class LocalFileAdapter(requests.adapters.BaseAdapter):
|
|
|
105
97
|
else:
|
|
106
98
|
return 200, "OK"
|
|
107
99
|
|
|
108
|
-
def send(
|
|
100
|
+
def send(
|
|
101
|
+
self, request: requests.PreparedRequest, *args: Any, **kwargs: Any
|
|
102
|
+
) -> requests.Response:
|
|
109
103
|
"""Wraps a file, described in request, in a Response object.
|
|
110
104
|
|
|
111
105
|
:param req: The PreparedRequest being "sent".
|
|
112
|
-
:type req: :class:`~requests.PreparedRequest`
|
|
113
106
|
:param kwargs: (not used) additionnal arguments of the request
|
|
114
|
-
:type kwargs: Any
|
|
115
107
|
:returns: a Response object containing the file
|
|
116
|
-
:rtype: :class:`~requests.Response`
|
|
117
108
|
"""
|
|
118
109
|
response = requests.Response()
|
|
119
110
|
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
if req.method is None or req.url is None:
|
|
111
|
+
if request.method is None or request.url is None:
|
|
123
112
|
raise RequestError("Method or url of the request is missing")
|
|
124
|
-
|
|
125
|
-
|
|
113
|
+
|
|
114
|
+
path_url = uri_to_path(request.url)
|
|
115
|
+
|
|
116
|
+
response.status_code, response.reason = self._chkpath(request.method, path_url)
|
|
117
|
+
if response.status_code == 200 and request.method.lower() != "head":
|
|
126
118
|
try:
|
|
127
119
|
response.raw = open(path_url, "rb")
|
|
128
120
|
except (OSError, IOError) as err:
|
|
129
121
|
response.status_code = 500
|
|
130
122
|
response.reason = str(err)
|
|
131
|
-
response.url =
|
|
132
|
-
response.request =
|
|
123
|
+
response.url = request.url
|
|
124
|
+
response.request = request
|
|
133
125
|
|
|
134
126
|
return response
|
|
135
127
|
|
eodag/utils/rest.py
CHANGED
|
@@ -39,9 +39,7 @@ def get_datetime(arguments: Dict[str, Any]) -> Tuple[Optional[str], Optional[str
|
|
|
39
39
|
"""Get start and end dates from a dict containing `/` separated dates in `datetime` item
|
|
40
40
|
|
|
41
41
|
:param arguments: dict containing a single date or `/` separated dates in `datetime` item
|
|
42
|
-
:type arguments: dict
|
|
43
42
|
:returns: Start date and end date from datetime string (duplicate value if only one date as input)
|
|
44
|
-
:rtype: Tuple[Optional[str], Optional[str]]
|
|
45
43
|
"""
|
|
46
44
|
datetime_str = arguments.pop("datetime", None)
|
|
47
45
|
|
|
@@ -86,10 +84,8 @@ def rfc3339_str_to_datetime(s: str) -> datetime.datetime:
|
|
|
86
84
|
"""Convert a string conforming to RFC 3339 to a :class:`datetime.datetime`.
|
|
87
85
|
|
|
88
86
|
:param s: The string to convert to :class:`datetime.datetime`
|
|
89
|
-
:type s: str
|
|
90
87
|
|
|
91
88
|
:returns: The datetime represented by the ISO8601 (RFC 3339) formatted string
|
|
92
|
-
:rtype: :class:`datetime.datetime`
|
|
93
89
|
|
|
94
90
|
raises: :class:`ValidationError`
|
|
95
91
|
"""
|
eodag/utils/stac_reader.py
CHANGED
|
@@ -112,23 +112,17 @@ def fetch_stac_items(
|
|
|
112
112
|
"""Fetch STAC item from a single item file or items from a catalog.
|
|
113
113
|
|
|
114
114
|
:param stac_path: A STAC object filepath
|
|
115
|
-
:type stac_path: str
|
|
116
115
|
:param recursive: (optional) Browse recursively in child nodes if True
|
|
117
|
-
:type recursive: bool
|
|
118
116
|
:param max_connections: (optional) Maximum number of connections for HTTP requests
|
|
119
|
-
:type max_connections: int
|
|
120
117
|
:param timeout: (optional) Timeout in seconds for each internal HTTP request
|
|
121
|
-
:type timeout: int
|
|
122
118
|
:param ssl_verify: (optional) SSL Verification for HTTP request
|
|
123
|
-
:type ssl_verify: bool
|
|
124
119
|
:returns: The items found in `stac_path`
|
|
125
|
-
:rtype: :class:`list`
|
|
126
120
|
"""
|
|
127
121
|
|
|
128
122
|
# URI opener used by PySTAC internally, instantiated here
|
|
129
123
|
# to retrieve the timeout.
|
|
130
124
|
_text_opener = _TextOpener(timeout, ssl_verify)
|
|
131
|
-
pystac.StacIO.read_text = _text_opener
|
|
125
|
+
pystac.StacIO.read_text = _text_opener # type: ignore[assignment]
|
|
132
126
|
|
|
133
127
|
stac_obj = pystac.read_file(stac_path)
|
|
134
128
|
# Single STAC item
|
|
@@ -198,22 +192,16 @@ def fetch_stac_collections(
|
|
|
198
192
|
"""Fetch STAC collection(s) from a catalog.
|
|
199
193
|
|
|
200
194
|
:param stac_path: A STAC object filepath
|
|
201
|
-
:type stac_path: str
|
|
202
195
|
:param collection: the collection to fetch
|
|
203
|
-
:type collection: Optional[str]
|
|
204
196
|
:param max_connections: (optional) Maximum number of connections for HTTP requests
|
|
205
|
-
:type max_connections: int
|
|
206
197
|
:param timeout: (optional) Timeout in seconds for each internal HTTP request
|
|
207
|
-
:type timeout: int
|
|
208
198
|
:param ssl_verify: (optional) SSL Verification for HTTP request
|
|
209
|
-
:type ssl_verify: bool
|
|
210
199
|
:returns: The collection(s) found in `stac_path`
|
|
211
|
-
:rtype: :class:`list`
|
|
212
200
|
"""
|
|
213
201
|
|
|
214
202
|
# URI opener used by PySTAC internally, instantiated here to retrieve the timeout.
|
|
215
203
|
_text_opener = _TextOpener(timeout, ssl_verify)
|
|
216
|
-
pystac.StacIO.read_text = _text_opener
|
|
204
|
+
pystac.StacIO.read_text = _text_opener # type: ignore[assignment]
|
|
217
205
|
|
|
218
206
|
stac_obj = pystac.read_file(stac_path)
|
|
219
207
|
if isinstance(stac_obj, pystac.Catalog):
|