eodag 2.12.0__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (93) hide show
  1. eodag/__init__.py +6 -8
  2. eodag/api/core.py +654 -538
  3. eodag/api/product/__init__.py +12 -2
  4. eodag/api/product/_assets.py +59 -16
  5. eodag/api/product/_product.py +100 -93
  6. eodag/api/product/drivers/__init__.py +7 -2
  7. eodag/api/product/drivers/base.py +0 -3
  8. eodag/api/product/metadata_mapping.py +192 -96
  9. eodag/api/search_result.py +69 -10
  10. eodag/cli.py +55 -25
  11. eodag/config.py +391 -116
  12. eodag/plugins/apis/base.py +11 -165
  13. eodag/plugins/apis/ecmwf.py +36 -25
  14. eodag/plugins/apis/usgs.py +80 -35
  15. eodag/plugins/authentication/aws_auth.py +13 -4
  16. eodag/plugins/authentication/base.py +10 -1
  17. eodag/plugins/authentication/generic.py +2 -2
  18. eodag/plugins/authentication/header.py +31 -6
  19. eodag/plugins/authentication/keycloak.py +17 -84
  20. eodag/plugins/authentication/oauth.py +3 -3
  21. eodag/plugins/authentication/openid_connect.py +268 -49
  22. eodag/plugins/authentication/qsauth.py +4 -1
  23. eodag/plugins/authentication/sas_auth.py +9 -2
  24. eodag/plugins/authentication/token.py +98 -47
  25. eodag/plugins/authentication/token_exchange.py +122 -0
  26. eodag/plugins/crunch/base.py +3 -1
  27. eodag/plugins/crunch/filter_date.py +3 -9
  28. eodag/plugins/crunch/filter_latest_intersect.py +0 -3
  29. eodag/plugins/crunch/filter_latest_tpl_name.py +1 -4
  30. eodag/plugins/crunch/filter_overlap.py +4 -8
  31. eodag/plugins/crunch/filter_property.py +5 -11
  32. eodag/plugins/download/aws.py +149 -185
  33. eodag/plugins/download/base.py +88 -97
  34. eodag/plugins/download/creodias_s3.py +1 -1
  35. eodag/plugins/download/http.py +638 -310
  36. eodag/plugins/download/s3rest.py +47 -45
  37. eodag/plugins/manager.py +228 -88
  38. eodag/plugins/search/__init__.py +36 -0
  39. eodag/plugins/search/base.py +239 -30
  40. eodag/plugins/search/build_search_result.py +382 -37
  41. eodag/plugins/search/cop_marine.py +441 -0
  42. eodag/plugins/search/creodias_s3.py +25 -20
  43. eodag/plugins/search/csw.py +5 -7
  44. eodag/plugins/search/data_request_search.py +61 -30
  45. eodag/plugins/search/qssearch.py +713 -255
  46. eodag/plugins/search/static_stac_search.py +106 -40
  47. eodag/resources/ext_product_types.json +1 -1
  48. eodag/resources/product_types.yml +1921 -34
  49. eodag/resources/providers.yml +4091 -3655
  50. eodag/resources/stac.yml +50 -216
  51. eodag/resources/stac_api.yml +71 -25
  52. eodag/resources/stac_provider.yml +5 -0
  53. eodag/resources/user_conf_template.yml +89 -32
  54. eodag/rest/__init__.py +6 -0
  55. eodag/rest/cache.py +70 -0
  56. eodag/rest/config.py +68 -0
  57. eodag/rest/constants.py +26 -0
  58. eodag/rest/core.py +735 -0
  59. eodag/rest/errors.py +178 -0
  60. eodag/rest/server.py +264 -431
  61. eodag/rest/stac.py +442 -836
  62. eodag/rest/types/collections_search.py +44 -0
  63. eodag/rest/types/eodag_search.py +238 -47
  64. eodag/rest/types/queryables.py +164 -0
  65. eodag/rest/types/stac_search.py +273 -0
  66. eodag/rest/utils/__init__.py +216 -0
  67. eodag/rest/utils/cql_evaluate.py +119 -0
  68. eodag/rest/utils/rfc3339.py +64 -0
  69. eodag/types/__init__.py +106 -10
  70. eodag/types/bbox.py +15 -14
  71. eodag/types/download_args.py +40 -0
  72. eodag/types/search_args.py +57 -7
  73. eodag/types/whoosh.py +79 -0
  74. eodag/utils/__init__.py +110 -91
  75. eodag/utils/constraints.py +37 -45
  76. eodag/utils/exceptions.py +39 -22
  77. eodag/utils/import_system.py +0 -4
  78. eodag/utils/logging.py +37 -80
  79. eodag/utils/notebook.py +4 -4
  80. eodag/utils/repr.py +113 -0
  81. eodag/utils/requests.py +128 -0
  82. eodag/utils/rest.py +100 -0
  83. eodag/utils/stac_reader.py +93 -21
  84. {eodag-2.12.0.dist-info → eodag-3.0.0.dist-info}/METADATA +88 -53
  85. eodag-3.0.0.dist-info/RECORD +109 -0
  86. {eodag-2.12.0.dist-info → eodag-3.0.0.dist-info}/WHEEL +1 -1
  87. {eodag-2.12.0.dist-info → eodag-3.0.0.dist-info}/entry_points.txt +7 -5
  88. eodag/plugins/apis/cds.py +0 -540
  89. eodag/rest/types/stac_queryables.py +0 -134
  90. eodag/rest/utils.py +0 -1133
  91. eodag-2.12.0.dist-info/RECORD +0 -94
  92. {eodag-2.12.0.dist-info → eodag-3.0.0.dist-info}/LICENSE +0 -0
  93. {eodag-2.12.0.dist-info → eodag-3.0.0.dist-info}/top_level.txt +0 -0
eodag/utils/__init__.py CHANGED
@@ -20,6 +20,7 @@
20
20
  Everything that does not fit into one of the specialised categories of utilities in
21
21
  this package should go here
22
22
  """
23
+
23
24
  from __future__ import annotations
24
25
 
25
26
  import ast
@@ -29,15 +30,19 @@ import functools
29
30
  import hashlib
30
31
  import inspect
31
32
  import logging as py_logging
33
+ import mimetypes
32
34
  import os
33
35
  import re
34
36
  import shutil
37
+ import ssl
35
38
  import string
39
+ import sys
36
40
  import types
37
41
  import unicodedata
38
42
  import warnings
39
43
  from collections import defaultdict
40
44
  from copy import deepcopy as copy_deepcopy
45
+ from dataclasses import dataclass
41
46
  from datetime import datetime as dt
42
47
  from email.message import Message
43
48
  from glob import glob
@@ -50,12 +55,15 @@ from typing import (
50
55
  Any,
51
56
  Callable,
52
57
  Dict,
58
+ Iterable,
53
59
  Iterator,
54
60
  List,
61
+ Mapping,
55
62
  Optional,
56
63
  Tuple,
57
64
  Type,
58
65
  Union,
66
+ cast,
59
67
  )
60
68
 
61
69
  # All modules using these should import them from utils package
@@ -71,12 +79,16 @@ from urllib.parse import ( # noqa; noqa
71
79
  )
72
80
  from urllib.request import url2pathname
73
81
 
74
- try:
82
+ if sys.version_info >= (3, 9):
75
83
  from typing import Annotated, get_args, get_origin # noqa
76
- except ImportError:
77
- # for python < 3.9
84
+ else:
78
85
  from typing_extensions import Annotated, get_args, get_origin # type: ignore # noqa
79
86
 
87
+ if sys.version_info >= (3, 12):
88
+ from typing import Unpack # type: ignore # noqa
89
+ else:
90
+ from typing_extensions import Unpack # noqa
91
+
80
92
  import click
81
93
  import orjson
82
94
  import shapefile
@@ -87,9 +99,9 @@ from dateutil.tz import UTC
87
99
  from jsonpath_ng import jsonpath
88
100
  from jsonpath_ng.ext import parse
89
101
  from jsonpath_ng.jsonpath import Child, Fields, Index, Root, Slice
90
- from requests import HTTPError
102
+ from requests import HTTPError, Response
91
103
  from shapely.geometry import Polygon, shape
92
- from shapely.geometry.base import BaseGeometry
104
+ from shapely.geometry.base import GEOMETRY_TYPES, BaseGeometry
93
105
  from tqdm.auto import tqdm
94
106
 
95
107
  from eodag.utils import logging as eodag_logging
@@ -128,6 +140,9 @@ DEFAULT_ITEMS_PER_PAGE = 20
128
140
  # (DEFAULT_ITEMS_PER_PAGE) to increase it to the known and current minimum value (mundi)
129
141
  DEFAULT_MAX_ITEMS_PER_PAGE = 50
130
142
 
143
+ # default product-types start date
144
+ DEFAULT_MISSION_START_DATE = "2015-01-01T00:00:00Z"
145
+
131
146
 
132
147
  def _deprecated(reason: str = "", version: Optional[str] = None) -> Callable[..., Any]:
133
148
  """Simple decorator to mark functions/methods/classes as deprecated.
@@ -307,9 +322,7 @@ def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) ->
307
322
  mapping.
308
323
 
309
324
  :param func: A function to apply to each value of mapping which is not a dict object
310
- :type func: func
311
325
  :param mapping: A Python dict object
312
- :type mapping: dict
313
326
  :returns: None
314
327
  """
315
328
  for key, value in mapping.items():
@@ -335,10 +348,8 @@ def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None:
335
348
  to mapping1 as is.
336
349
 
337
350
  :param mapping1: The mapping containing values to be overridden
338
- :type mapping1: dict
339
351
  :param mapping2: The mapping containing values that will override the
340
352
  first mapping
341
- :type mapping2: dict
342
353
  """
343
354
  # A mapping between mapping1 keys as lowercase strings and original mapping1 keys
344
355
  m1_keys_lowercase = {key.lower(): key for key in mapping1}
@@ -405,9 +416,7 @@ def get_timestamp(date_time: str) -> float:
405
416
  If the datetime has no offset, it is assumed to be an UTC datetime.
406
417
 
407
418
  :param date_time: The datetime string to return as timestamp
408
- :type date_time: str
409
419
  :returns: The timestamp corresponding to the date_time string in seconds
410
- :rtype: float
411
420
  """
412
421
  dt = isoparse(date_time)
413
422
  if not dt.tzinfo:
@@ -429,7 +438,6 @@ class DownloadedCallback:
429
438
  """Callback
430
439
 
431
440
  :param product: The downloaded EO product
432
- :type product: :class:`~eodag.api.product._product.EOProduct`
433
441
  """
434
442
  logger.debug("Download finished for the product %s", product)
435
443
 
@@ -469,9 +477,7 @@ class ProgressCallback(tqdm):
469
477
  """Update the progress bar.
470
478
 
471
479
  :param increment: Amount of data already processed
472
- :type increment: int
473
480
  :param total: (optional) Maximum amount of data to be processed
474
- :type total: int
475
481
  """
476
482
  if total is not None and total != self.total:
477
483
  self.reset(total=total)
@@ -524,9 +530,7 @@ def rename_subfolder(dirpath: str, name: str) -> None:
524
530
  raise RuntimeError if no subfolder can be found
525
531
 
526
532
  :param dirpath: path to the directory containing the subfolder
527
- :type dirpath: str
528
533
  :param name: new name of the subfolder
529
- :type name: str
530
534
  :raises: RuntimeError
531
535
 
532
536
  Example:
@@ -574,12 +578,9 @@ def format_dict_items(
574
578
  ... ) == {"foo": {"bar": "qux"}, "baz": ["quux?", "quux!"]}
575
579
  True
576
580
 
577
- :param config_dict: Dictionnary having values that need to be parsed
578
- :type config_dict: dict
581
+ :param config_dict: Dictionary having values that need to be parsed
579
582
  :param format_variables: Variables used as args for parsing
580
- :type format_variables: dict
581
583
  :returns: Updated dict
582
- :rtype: dict
583
584
  """
584
585
  return dict_items_recursive_apply(config_dict, format_string, **format_variables)
585
586
 
@@ -596,12 +597,9 @@ def jsonpath_parse_dict_items(
596
597
  ... ) == {'foo': {'bar': 'baz'}, 'qux': ['quux', 'quux']}
597
598
  True
598
599
 
599
- :param jsonpath_dict: Dictionnary having values that need to be parsed
600
- :type jsonpath_dict: dict
600
+ :param jsonpath_dict: Dictionary having values that need to be parsed
601
601
  :param values_dict: Values dict used as args for parsing
602
- :type values_dict: dict
603
602
  :returns: Updated dict
604
- :rtype: dict
605
603
  """
606
604
  return dict_items_recursive_apply(jsonpath_dict, parse_jsonpath, **values_dict)
607
605
 
@@ -647,15 +645,10 @@ def update_nested_dict(
647
645
  True
648
646
 
649
647
  :param old_dict: Dict to be updated
650
- :type old_dict: dict
651
648
  :param new_dict: Incomming dict
652
- :type new_dict: dict
653
649
  :param extend_list_values: (optional) Extend old_dict value if both old/new values are lists
654
- :type extend_list_values: bool
655
650
  :param allow_empty_values: (optional) Allow update with empty values
656
- :type allow_empty_values: bool
657
651
  :returns: Updated dict
658
- :rtype: dict
659
652
  """
660
653
  for k, v in new_dict.items():
661
654
  if k in old_dict.keys():
@@ -720,13 +713,9 @@ def items_recursive_apply(
720
713
  'foo'
721
714
 
722
715
  :param input_obj: Input object (dict or list)
723
- :type input_obj: Union[dict,list]
724
716
  :param apply_method: Method to be applied to dict elements
725
- :type apply_method: :func:`apply_method`
726
717
  :param apply_method_parameters: Optional parameters passed to the method
727
- :type apply_method_parameters: dict
728
718
  :returns: Updated object
729
- :rtype: Union[dict, list]
730
719
  """
731
720
  if isinstance(input_obj, dict):
732
721
  return dict_items_recursive_apply(
@@ -754,14 +743,10 @@ def dict_items_recursive_apply(
754
743
  ... ) == {'foo': {'bar': 'BAZ!'}, 'qux': ['A!', 'B!']}
755
744
  True
756
745
 
757
- :param config_dict: Input nested dictionnary
758
- :type config_dict: dict
746
+ :param config_dict: Input nested dictionary
759
747
  :param apply_method: Method to be applied to dict elements
760
- :type apply_method: :func:`apply_method`
761
748
  :param apply_method_parameters: Optional parameters passed to the method
762
- :type apply_method_parameters: dict
763
749
  :returns: Updated dict
764
- :rtype: dict
765
750
  """
766
751
  result_dict: Dict[Any, Any] = deepcopy(config_dict)
767
752
  for dict_k, dict_v in result_dict.items():
@@ -795,13 +780,9 @@ def list_items_recursive_apply(
795
780
  [{'foo': {'bar': 'BAZ!'}}, 'QUX!']
796
781
 
797
782
  :param config_list: Input list containing nested lists/dicts
798
- :type config_list: list
799
783
  :param apply_method: Method to be applied to list elements
800
- :type apply_method: :func:`apply_method`
801
784
  :param apply_method_parameters: Optional parameters passed to the method
802
- :type apply_method_parameters: dict
803
785
  :returns: Updated list
804
- :rtype: list
805
786
  """
806
787
  result_list = deepcopy(config_list)
807
788
  for list_idx, list_v in enumerate(result_list):
@@ -822,7 +803,7 @@ def list_items_recursive_apply(
822
803
 
823
804
 
824
805
  def items_recursive_sort(
825
- input_obj: Union[List[Any], Dict[Any, Any]]
806
+ input_obj: Union[List[Any], Dict[Any, Any]],
826
807
  ) -> Union[List[Any], Dict[Any, Any]]:
827
808
  """Recursive sort dict items contained in input object (dict or list)
828
809
 
@@ -836,9 +817,7 @@ def items_recursive_sort(
836
817
  'foo'
837
818
 
838
819
  :param input_obj: Input object (dict or list)
839
- :type input_obj: Union[dict,list]
840
820
  :returns: Updated object
841
- :rtype: Union[dict, list]
842
821
  """
843
822
  if isinstance(input_obj, dict):
844
823
  return dict_items_recursive_sort(input_obj)
@@ -857,10 +836,8 @@ def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]:
857
836
  ... ) == {"a": ["b", {0: 1, 1: 2, 2: 0}], "b": {"a": 0, "b": "c"}}
858
837
  True
859
838
 
860
- :param config_dict: Input nested dictionnary
861
- :type config_dict: dict
839
+ :param config_dict: Input nested dictionary
862
840
  :returns: Updated dict
863
- :rtype: dict
864
841
  """
865
842
  result_dict: Dict[Any, Any] = deepcopy(config_dict)
866
843
  for dict_k, dict_v in result_dict.items():
@@ -881,9 +858,7 @@ def list_items_recursive_sort(config_list: List[Any]) -> List[Any]:
881
858
  ['b', {0: 1, 1: 2, 2: 0}]
882
859
 
883
860
  :param config_list: Input list containing nested lists/dicts
884
- :type config_list: list
885
861
  :returns: Updated list
886
- :rtype: list
887
862
  """
888
863
  result_list: List[Any] = deepcopy(config_list)
889
864
  for list_idx, list_v in enumerate(result_list):
@@ -912,11 +887,8 @@ def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]:
912
887
  Fields('foo')
913
888
 
914
889
  :param args: Last arg as input string value, to be converted
915
- :type args: str
916
890
  :param force: force conversion even if input string is not detected as a jsonpath
917
- :type force: bool
918
891
  :returns: Parsed value
919
- :rtype: str or Child or Root
920
892
  """
921
893
  path_str: str = args[-1]
922
894
  if JSONPATH_MATCH.match(str(path_str)) or force:
@@ -984,11 +956,8 @@ def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any:
984
956
  'foo qux, quux ?'
985
957
 
986
958
  :param key: Input item key
987
- :type key: str
988
959
  :param str_to_format: Input item value, to be parsed
989
- :type str_to_format: str
990
960
  :returns: Parsed value
991
- :rtype: str
992
961
  """
993
962
  if not isinstance(str_to_format, str):
994
963
  return str_to_format
@@ -1026,13 +995,9 @@ def parse_jsonpath(
1026
995
  'baz'
1027
996
 
1028
997
  :param key: Input item key
1029
- :type key: str
1030
998
  :param jsonpath_obj: Input item value, to be parsed
1031
- :type jsonpath_obj: str or jsonpath.Child
1032
999
  :param values_dict: Values used as args for parsing
1033
- :type values_dict: dict
1034
1000
  :returns: Parsed value
1035
- :rtype: str
1036
1001
  """
1037
1002
  if isinstance(jsonpath_obj, jsonpath.Child):
1038
1003
  match = jsonpath_obj.find(values_dict)
@@ -1048,9 +1013,7 @@ def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]
1048
1013
  {'foo': {'bar': 'baz'}}
1049
1014
 
1050
1015
  :param pairs: Pairs of key / value
1051
- :type pairs: list or Any
1052
1016
  :returns: Created dict
1053
- :rtype: dict or Any
1054
1017
  """
1055
1018
  d = {}
1056
1019
  try:
@@ -1070,11 +1033,8 @@ def get_geometry_from_various(
1070
1033
  """Creates a shapely geometry using given query kwargs arguments
1071
1034
 
1072
1035
  :param locations_config: (optional) EODAG locations configuration
1073
- :type locations_config: list
1074
1036
  :param query_args: Query kwargs arguments from core.search() method
1075
- :type query_args: dict
1076
1037
  :returns: shapely Geometry found
1077
- :rtype: :class:`shapely.geometry.BaseGeometry`
1078
1038
  :raises: :class:`ValueError`
1079
1039
  """
1080
1040
  geom = None
@@ -1083,7 +1043,10 @@ def get_geometry_from_various(
1083
1043
  geom_arg = query_args["geometry"]
1084
1044
 
1085
1045
  bbox_keys = ["lonmin", "latmin", "lonmax", "latmax"]
1086
- if isinstance(geom_arg, dict) and all(k in geom_arg for k in bbox_keys):
1046
+ if isinstance(geom_arg, dict) and geom_arg.get("type") in GEOMETRY_TYPES:
1047
+ # geojson geometry
1048
+ geom = cast(BaseGeometry, shape(geom_arg))
1049
+ elif isinstance(geom_arg, dict) and all(k in geom_arg for k in bbox_keys):
1087
1050
  # bbox dict
1088
1051
  geom = Polygon(
1089
1052
  (
@@ -1133,11 +1096,11 @@ def get_geometry_from_various(
1133
1096
  for arg in query_locations.keys():
1134
1097
  if arg in locations_dict.keys():
1135
1098
  found = False
1136
- pattern = query_locations[arg]
1099
+ pattern = rf"{query_locations[arg]}"
1137
1100
  attr = locations_dict[arg]["attr"]
1138
1101
  with shapefile.Reader(locations_dict[arg]["path"]) as shp:
1139
1102
  for shaperec in shp.shapeRecords():
1140
- if re.search(pattern, shaperec.record[attr]):
1103
+ if re.search(pattern, str(shaperec.record[attr])):
1141
1104
  found = True
1142
1105
  new_geom = shape(shaperec.shape)
1143
1106
  # get geoms union
@@ -1166,7 +1129,7 @@ class MockResponse:
1166
1129
  def raise_for_status(self) -> None:
1167
1130
  """raises an exception when the status is not ok"""
1168
1131
  if self.status_code != 200:
1169
- raise HTTPError()
1132
+ raise HTTPError(response=Response())
1170
1133
 
1171
1134
 
1172
1135
  def md5sum(file_path: str) -> str:
@@ -1177,9 +1140,7 @@ def md5sum(file_path: str) -> str:
1177
1140
  'd41d8cd98f00b204e9800998ecf8427e'
1178
1141
 
1179
1142
  :param file_path: input file path
1180
- :type file_path: str
1181
1143
  :returns: MD5 checksum
1182
- :rtype: str
1183
1144
  """
1184
1145
  hash_md5 = hashlib.md5()
1185
1146
  with open(file_path, "rb") as f:
@@ -1195,9 +1156,7 @@ def obj_md5sum(data: Any) -> str:
1195
1156
  '37a6259cc0c1dae299a7866489dff0bd'
1196
1157
 
1197
1158
  :param data: JSON serializable input object
1198
- :type data: Any
1199
1159
  :returns: MD5 checksum
1200
- :rtype: str
1201
1160
  """
1202
1161
  return hashlib.md5(orjson.dumps(data, option=orjson.OPT_SORT_KEYS)).hexdigest()
1203
1162
 
@@ -1221,16 +1180,16 @@ def cached_parse(str_to_parse: str) -> JSONPath:
1221
1180
  CacheInfo(hits=1, misses=2, maxsize=128, currsize=2)
1222
1181
 
1223
1182
  :param str_to_parse: string to parse as jsonpath
1224
- :type str_to_parse: str
1225
1183
  :returns: parsed jsonpath
1226
- :rtype: :class:`jsonpath_ng.JSONPath`
1227
1184
  """
1228
1185
  return parse(str_to_parse)
1229
1186
 
1230
1187
 
1231
1188
  @functools.lru_cache()
1232
1189
  def _mutable_cached_yaml_load(config_path: str) -> Any:
1233
- with open(os.path.abspath(os.path.realpath(config_path)), "r") as fh:
1190
+ with open(
1191
+ os.path.abspath(os.path.realpath(config_path)), mode="r", encoding="utf-8"
1192
+ ) as fh:
1234
1193
  return yaml.load(fh, Loader=yaml.SafeLoader)
1235
1194
 
1236
1195
 
@@ -1238,9 +1197,7 @@ def cached_yaml_load(config_path: str) -> Dict[str, Any]:
1238
1197
  """Cached yaml.load
1239
1198
 
1240
1199
  :param config_path: path to the yaml configuration file
1241
- :type config_path: str
1242
1200
  :returns: loaded yaml configuration
1243
- :rtype: dict
1244
1201
  """
1245
1202
  return copy_deepcopy(_mutable_cached_yaml_load(config_path))
1246
1203
 
@@ -1257,9 +1214,7 @@ def cached_yaml_load_all(config_path: str) -> List[Any]:
1257
1214
  Load all configurations stored in the configuration file as separated yaml documents
1258
1215
 
1259
1216
  :param config_path: path to the yaml configuration file
1260
- :type config_path: str
1261
1217
  :returns: list of configurations
1262
- :rtype: list
1263
1218
  """
1264
1219
  return copy_deepcopy(_mutable_cached_yaml_load_all(config_path))
1265
1220
 
@@ -1270,11 +1225,8 @@ def get_bucket_name_and_prefix(
1270
1225
  """Extract bucket name and prefix from URL
1271
1226
 
1272
1227
  :param url: (optional) URL to use as product.location
1273
- :type url: str
1274
1228
  :param bucket_path_level: (optional) bucket location index in path.split('/')
1275
- :type bucket_path_level: int
1276
1229
  :returns: bucket_name and prefix as str
1277
- :rtype: tuple
1278
1230
  """
1279
1231
  bucket, prefix = None, None
1280
1232
 
@@ -1289,8 +1241,9 @@ def get_bucket_name_and_prefix(
1289
1241
  prefix = path
1290
1242
  elif bucket_path_level is not None:
1291
1243
  parts = path.split("/")
1292
- bucket, prefix = parts[bucket_path_level], "/".join(
1293
- parts[(bucket_path_level + 1) :]
1244
+ bucket, prefix = (
1245
+ parts[bucket_path_level],
1246
+ "/".join(parts[(bucket_path_level + 1) :]),
1294
1247
  )
1295
1248
 
1296
1249
  return bucket, prefix
@@ -1302,9 +1255,7 @@ def flatten_top_directories(
1302
1255
  """Flatten directory structure, removing common empty sub-directories
1303
1256
 
1304
1257
  :param nested_dir_root: Absolute path of the directory structure to flatten
1305
- :type nested_dir_root: str
1306
1258
  :param common_subdirs_path: (optional) Absolute path of the desired subdirectory to remove
1307
- :type common_subdirs_path: str
1308
1259
  """
1309
1260
  if not common_subdirs_path:
1310
1261
  subpaths_list = [p for p in Path(nested_dir_root).glob("**/*") if p.is_file()]
@@ -1326,9 +1277,7 @@ def deepcopy(sth: Any) -> Any:
1326
1277
  `_copy_list` and `_copy_dict` available for the moment
1327
1278
 
1328
1279
  :param sth: Object to copy
1329
- :type sth: Any
1330
1280
  :returns: Copied object
1331
- :rtype: Any
1332
1281
  """
1333
1282
  _dispatcher: Dict[Type[Any], Callable[..., Any]] = {}
1334
1283
 
@@ -1371,9 +1320,7 @@ def parse_header(header: str) -> Message:
1371
1320
  'example.txt'
1372
1321
 
1373
1322
  :param header: header to parse
1374
- :type header: str
1375
1323
  :returns: parsed header
1376
- :rtype: :class:`~email.message.Message`
1377
1324
  """
1378
1325
  m = Message()
1379
1326
  m["content-type"] = header
@@ -1409,3 +1356,75 @@ def cast_scalar_value(value: Any, new_type: Any) -> Any:
1409
1356
  return eval(value.capitalize())
1410
1357
 
1411
1358
  return new_type(value)
1359
+
1360
+
1361
+ @dataclass
1362
+ class StreamResponse:
1363
+ """Represents a streaming response"""
1364
+
1365
+ content: Iterable[bytes]
1366
+ headers: Optional[Mapping[str, str]] = None
1367
+ media_type: Optional[str] = None
1368
+ status_code: Optional[int] = None
1369
+
1370
+
1371
+ def guess_file_type(file: str) -> Optional[str]:
1372
+ """guess the mime type of a file or URL based on its extension"""
1373
+ mimetypes.add_type("text/xml", ".xsd")
1374
+ mimetypes.add_type("application/x-grib", ".grib")
1375
+ mime_type, _ = mimetypes.guess_type(file, False)
1376
+ return mime_type
1377
+
1378
+
1379
+ def guess_extension(type: str) -> Optional[str]:
1380
+ """guess extension from mime type"""
1381
+ mimetypes.add_type("text/xml", ".xsd")
1382
+ mimetypes.add_type("application/x-grib", ".grib")
1383
+ return mimetypes.guess_extension(type, strict=False)
1384
+
1385
+
1386
+ def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext:
1387
+ """
1388
+ Returns an SSL context based on ssl_verify argument.
1389
+ :param ssl_verify: ssl_verify parameter
1390
+ :returns: An SSL context object.
1391
+ """
1392
+ ctx = ssl.create_default_context()
1393
+ if not ssl_verify:
1394
+ ctx.check_hostname = False
1395
+ ctx.verify_mode = ssl.CERT_NONE
1396
+ else:
1397
+ ctx.check_hostname = True
1398
+ ctx.verify_mode = ssl.CERT_REQUIRED
1399
+ return ctx
1400
+
1401
+
1402
+ def sort_dict(input_dict: Dict[str, Any]) -> Dict[str, Any]:
1403
+ """
1404
+ Recursively sorts a dict by keys.
1405
+
1406
+ :param input_dict: input dict
1407
+ :returns: sorted dict
1408
+
1409
+ >>> sort_dict({"b": {"c": 1, "a": 2, "b": 3}, "a": 4})
1410
+ {'a': 4, 'b': {'a': 2, 'b': 3, 'c': 1}}
1411
+ """
1412
+ return {
1413
+ k: sort_dict(v) if isinstance(v, dict) else v
1414
+ for k, v in sorted(input_dict.items())
1415
+ }
1416
+
1417
+
1418
+ def dict_md5sum(input_dict: Dict[str, Any]) -> str:
1419
+ """
1420
+ Hash nested dictionary
1421
+
1422
+ :param input_dict: input dict
1423
+ :returns: hash
1424
+
1425
+ >>> hd = dict_md5sum({"b": {"c": 1, "a": 2, "b": 3}, "a": 4})
1426
+ >>> hd
1427
+ 'a195bcef1bb3b419e9e74b7cc5db8098'
1428
+ >>> assert(dict_md5sum({"a": 4, "b": {"b": 3, "c": 1, "a": 2}}) == hd)
1429
+ """
1430
+ return obj_md5sum(sort_dict(input_dict))
@@ -19,13 +19,14 @@ import copy
19
19
  import logging
20
20
  from typing import Any, Dict, List, Set, Union
21
21
 
22
- import requests
22
+ from requests.auth import AuthBase
23
23
 
24
24
  from eodag.api.product.metadata_mapping import get_provider_queryable_key
25
25
  from eodag.plugins.apis.base import Api
26
26
  from eodag.plugins.search.base import Search
27
- from eodag.utils import HTTP_REQ_TIMEOUT, USER_AGENT, deepcopy
28
- from eodag.utils.exceptions import TimeOutError, ValidationError
27
+ from eodag.utils import deepcopy
28
+ from eodag.utils.exceptions import RequestError, ValidationError
29
+ from eodag.utils.requests import fetch_json
29
30
 
30
31
  logger = logging.getLogger("eodag.constraints")
31
32
 
@@ -40,15 +41,10 @@ def get_constraint_queryables_with_additional_params(
40
41
  gets the queryables from the constraints using the given parameters
41
42
  For all queryables only values matching the given parameters based on the constraints will be returned
42
43
  :param constraints: list of constraints fetched from the provider
43
- :type constraints: List[Any]
44
44
  :param input_params: conditions the constraints should fulfil
45
- :type input_params: dict
46
45
  :param plugin: search or api plugin that is used
47
- :type plugin: Union[Search, Api]
48
46
  :param product_type: product type for which the data should be fetched
49
- :type product_type: str
50
47
  :returns: dict containing queryable data
51
- :rtype: Dict[str, Dict[str, Set[Any]]]
52
48
  """
53
49
  defaults = copy.deepcopy(input_params)
54
50
  constraint_matches = {}
@@ -73,8 +69,20 @@ def get_constraint_queryables_with_additional_params(
73
69
  if provider_key and provider_key in constraint:
74
70
  eodag_provider_key_mapping[provider_key] = param
75
71
  params_available[param] = True
76
- if value in constraint[provider_key]:
72
+ if (
73
+ isinstance(value, list)
74
+ and all([v in constraint[provider_key] for v in value])
75
+ or not isinstance(value, list)
76
+ and value in constraint[provider_key]
77
+ ):
77
78
  params_matched[param] = True
79
+ elif isinstance(value, str):
80
+ # for Copernicus providers, values can be multiple and represented with a string
81
+ # separated by slashes (example: time = "0000/0100/0200")
82
+ values = value.split("/")
83
+ params_matched[param] = all(
84
+ [v in constraint[provider_key] for v in values]
85
+ )
78
86
  values_available[param].update(constraint[provider_key])
79
87
  # match with default values of params
80
88
  for default_param, default_value in defaults.items():
@@ -161,48 +169,32 @@ def fetch_constraints(
161
169
  """
162
170
  fetches the constraints from a provider
163
171
  :param constraints_url: url from which the constraints can be fetched
164
- :type constraints_url: str
165
172
  :param plugin: api or search plugin of the provider
166
- :type plugin: Union[Search, Api]
167
173
  :returns: list of constraints fetched from the provider
168
- :rtype: List[Dict[Any, Any]]
169
174
  """
175
+ auth = (
176
+ plugin.auth
177
+ if hasattr(plugin, "auth") and isinstance(plugin.auth, AuthBase)
178
+ else None
179
+ )
170
180
  try:
171
- headers = USER_AGENT
172
- logger.debug("fetching constraints from %s", constraints_url)
173
- if hasattr(plugin, "auth"):
174
- res = requests.get(
175
- constraints_url,
176
- headers=headers,
177
- auth=plugin.auth,
178
- timeout=HTTP_REQ_TIMEOUT,
179
- )
180
- else:
181
- res = requests.get(
182
- constraints_url, headers=headers, timeout=HTTP_REQ_TIMEOUT
183
- )
184
- res.raise_for_status()
185
- except requests.exceptions.Timeout as exc:
186
- raise TimeOutError(exc, timeout=HTTP_REQ_TIMEOUT) from exc
187
- except requests.exceptions.HTTPError as err:
188
- logger.error(
189
- "constraints could not be fetched from %s, error: %s",
190
- constraints_url,
191
- str(err),
192
- )
181
+ constraints_data = fetch_json(constraints_url, auth=auth)
182
+ except RequestError as err:
183
+ logger.error(str(err))
184
+ return []
185
+
186
+ config = plugin.config.__dict__
187
+ if (
188
+ "constraints_entry" in config
189
+ and config["constraints_entry"]
190
+ and config["constraints_entry"] in constraints_data
191
+ ):
192
+ constraints = constraints_data[config["constraints_entry"]]
193
+ elif config.get("stop_without_constraints_entry_key", False):
193
194
  return []
194
195
  else:
195
- constraints_data = res.json()
196
- config = plugin.config.__dict__
197
- if (
198
- "constraints_entry" in config
199
- and config["constraints_entry"]
200
- and config["constraints_entry"] in constraints_data
201
- ):
202
- constraints = constraints_data[config["constraints_entry"]]
203
- else:
204
- constraints = constraints_data
205
- return constraints
196
+ constraints = constraints_data
197
+ return constraints
206
198
 
207
199
 
208
200
  def _get_other_possible_values_for_values_with_defaults(