eodag 3.8.1__py3-none-any.whl → 3.9.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +1 -1
- eodag/api/product/drivers/generic.py +5 -1
- eodag/api/product/metadata_mapping.py +132 -35
- eodag/cli.py +36 -4
- eodag/config.py +5 -2
- eodag/plugins/apis/ecmwf.py +3 -1
- eodag/plugins/apis/usgs.py +2 -1
- eodag/plugins/authentication/aws_auth.py +235 -37
- eodag/plugins/authentication/base.py +12 -2
- eodag/plugins/authentication/oauth.py +5 -0
- eodag/plugins/base.py +3 -2
- eodag/plugins/download/aws.py +44 -285
- eodag/plugins/download/base.py +3 -2
- eodag/plugins/download/creodias_s3.py +1 -38
- eodag/plugins/download/http.py +111 -103
- eodag/plugins/download/s3rest.py +3 -1
- eodag/plugins/manager.py +2 -1
- eodag/plugins/search/__init__.py +2 -1
- eodag/plugins/search/base.py +2 -1
- eodag/plugins/search/build_search_result.py +2 -2
- eodag/plugins/search/creodias_s3.py +9 -1
- eodag/plugins/search/qssearch.py +3 -1
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +220 -30
- eodag/resources/providers.yml +633 -88
- eodag/resources/stac_provider.yml +5 -2
- eodag/resources/user_conf_template.yml +0 -5
- eodag/rest/core.py +8 -0
- eodag/rest/errors.py +9 -0
- eodag/rest/server.py +8 -0
- eodag/rest/stac.py +8 -0
- eodag/rest/utils/__init__.py +2 -4
- eodag/rest/utils/rfc3339.py +1 -1
- eodag/utils/__init__.py +69 -54
- eodag/utils/dates.py +204 -0
- eodag/utils/s3.py +187 -168
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/METADATA +4 -3
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/RECORD +42 -42
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/entry_points.txt +1 -1
- eodag/utils/rest.py +0 -100
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/WHEEL +0 -0
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/licenses/LICENSE +0 -0
- {eodag-3.8.1.dist-info → eodag-3.9.1.dist-info}/top_level.txt +0 -0
eodag/plugins/download/http.py
CHANGED
|
@@ -23,7 +23,6 @@ import re
|
|
|
23
23
|
import shutil
|
|
24
24
|
import tarfile
|
|
25
25
|
import zipfile
|
|
26
|
-
from datetime import datetime
|
|
27
26
|
from email.message import Message
|
|
28
27
|
from itertools import chain
|
|
29
28
|
from json import JSONDecodeError
|
|
@@ -46,7 +45,7 @@ from lxml import etree
|
|
|
46
45
|
from requests import RequestException
|
|
47
46
|
from requests.auth import AuthBase
|
|
48
47
|
from requests.structures import CaseInsensitiveDict
|
|
49
|
-
from
|
|
48
|
+
from zipstream import ZipStream
|
|
50
49
|
|
|
51
50
|
from eodag.api.product.metadata_mapping import (
|
|
52
51
|
NOT_AVAILABLE,
|
|
@@ -87,6 +86,7 @@ from eodag.utils.exceptions import (
|
|
|
87
86
|
|
|
88
87
|
if TYPE_CHECKING:
|
|
89
88
|
from jsonpath_ng import JSONPath
|
|
89
|
+
from mypy_boto3_s3 import S3ServiceResource
|
|
90
90
|
from requests import Response
|
|
91
91
|
|
|
92
92
|
from eodag.api.product import Asset, EOProduct # type: ignore
|
|
@@ -155,6 +155,7 @@ class HTTPDownload(Download):
|
|
|
155
155
|
auth: Optional[AuthBase] = None,
|
|
156
156
|
**kwargs: Unpack[DownloadConf],
|
|
157
157
|
) -> Optional[dict[str, Any]]:
|
|
158
|
+
|
|
158
159
|
"""Send product order request.
|
|
159
160
|
|
|
160
161
|
It will be executed once before the download retry loop, if the product is OFFLINE
|
|
@@ -332,6 +333,7 @@ class HTTPDownload(Download):
|
|
|
332
333
|
logger.debug(
|
|
333
334
|
f"Order download status request responded with {response.status_code}"
|
|
334
335
|
)
|
|
336
|
+
|
|
335
337
|
response.raise_for_status() # Raise an exception if status code indicates an error
|
|
336
338
|
|
|
337
339
|
# Handle redirection (if needed)
|
|
@@ -589,7 +591,7 @@ class HTTPDownload(Download):
|
|
|
589
591
|
def download(
|
|
590
592
|
self,
|
|
591
593
|
product: EOProduct,
|
|
592
|
-
auth: Optional[Union[AuthBase, S3SessionKwargs]] = None,
|
|
594
|
+
auth: Optional[Union[AuthBase, S3SessionKwargs, S3ServiceResource]] = None,
|
|
593
595
|
progress_callback: Optional[ProgressCallback] = None,
|
|
594
596
|
wait: float = DEFAULT_DOWNLOAD_WAIT,
|
|
595
597
|
timeout: float = DEFAULT_DOWNLOAD_TIMEOUT,
|
|
@@ -753,7 +755,7 @@ class HTTPDownload(Download):
|
|
|
753
755
|
def _stream_download_dict(
|
|
754
756
|
self,
|
|
755
757
|
product: EOProduct,
|
|
756
|
-
auth: Optional[Union[AuthBase, S3SessionKwargs]] = None,
|
|
758
|
+
auth: Optional[Union[AuthBase, S3SessionKwargs, S3ServiceResource]] = None,
|
|
757
759
|
byte_range: tuple[Optional[int], Optional[int]] = (None, None),
|
|
758
760
|
compress: Literal["zip", "raw", "auto"] = "auto",
|
|
759
761
|
wait: float = DEFAULT_DOWNLOAD_WAIT,
|
|
@@ -786,7 +788,7 @@ class HTTPDownload(Download):
|
|
|
786
788
|
):
|
|
787
789
|
try:
|
|
788
790
|
assets_values = product.assets.get_values(kwargs.get("asset"))
|
|
789
|
-
|
|
791
|
+
assets_stream_list = self._stream_download_assets(
|
|
790
792
|
product,
|
|
791
793
|
auth,
|
|
792
794
|
None,
|
|
@@ -794,40 +796,41 @@ class HTTPDownload(Download):
|
|
|
794
796
|
**kwargs,
|
|
795
797
|
)
|
|
796
798
|
|
|
797
|
-
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
|
|
801
|
-
# update headers
|
|
802
|
-
assets_values[0].headers[
|
|
803
|
-
"content-disposition"
|
|
804
|
-
] = f"attachment; filename={assets_values[0].filename}"
|
|
799
|
+
# single asset
|
|
800
|
+
if len(assets_stream_list) == 1:
|
|
801
|
+
asset_stream = assets_stream_list[0]
|
|
805
802
|
if assets_values[0].get("type"):
|
|
806
|
-
|
|
807
|
-
|
|
808
|
-
]
|
|
809
|
-
|
|
810
|
-
return StreamResponse(
|
|
811
|
-
content=chain(iter([first_chunks_tuple]), chunks_tuples),
|
|
812
|
-
headers=assets_values[0].headers,
|
|
813
|
-
)
|
|
803
|
+
asset_stream.headers["content-type"] = assets_values[0]["type"]
|
|
804
|
+
return asset_stream
|
|
814
805
|
|
|
806
|
+
# multiple assets in zip
|
|
815
807
|
else:
|
|
816
|
-
# get first chunk to check if it does not contain an error (if it does, that error will be raised)
|
|
817
|
-
first_chunks_tuple = next(chunks_tuples)
|
|
818
808
|
outputs_filename = (
|
|
819
809
|
sanitize(product.properties["title"])
|
|
820
810
|
if "title" in product.properties
|
|
821
811
|
else sanitize(product.properties.get("id", "download"))
|
|
822
812
|
)
|
|
813
|
+
|
|
814
|
+
# do not use global size if one of the assets has no size
|
|
815
|
+
missing_length = any(not (asset.size) for asset in assets_values)
|
|
816
|
+
|
|
817
|
+
zip_stream = (
|
|
818
|
+
ZipStream(sized=True) if not missing_length else ZipStream()
|
|
819
|
+
)
|
|
820
|
+
for asset_stream in assets_stream_list:
|
|
821
|
+
zip_stream.add(
|
|
822
|
+
asset_stream.content,
|
|
823
|
+
arcname=asset_stream.arcname,
|
|
824
|
+
size=asset_stream.size,
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
zip_length = len(zip_stream) if not missing_length else None
|
|
828
|
+
|
|
823
829
|
return StreamResponse(
|
|
824
|
-
content=
|
|
825
|
-
chain(iter([first_chunks_tuple]), chunks_tuples)
|
|
826
|
-
),
|
|
830
|
+
content=zip_stream,
|
|
827
831
|
media_type="application/zip",
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
},
|
|
832
|
+
filename=f"{outputs_filename}.zip",
|
|
833
|
+
size=zip_length,
|
|
831
834
|
)
|
|
832
835
|
except NotAvailableError as e:
|
|
833
836
|
if kwargs.get("asset") is not None:
|
|
@@ -848,6 +851,8 @@ class HTTPDownload(Download):
|
|
|
848
851
|
return StreamResponse(
|
|
849
852
|
content=chain(iter([first_chunk]), chunk_iterator),
|
|
850
853
|
headers=product.headers,
|
|
854
|
+
filename=getattr(product, "filename", None),
|
|
855
|
+
size=getattr(product, "size", None),
|
|
851
856
|
)
|
|
852
857
|
|
|
853
858
|
def _check_auth_exception(self, e: Optional[RequestException]) -> None:
|
|
@@ -1042,7 +1047,6 @@ class HTTPDownload(Download):
|
|
|
1042
1047
|
|
|
1043
1048
|
product.headers = self.stream.headers
|
|
1044
1049
|
filename = self._check_product_filename(product)
|
|
1045
|
-
product.headers["content-disposition"] = f"attachment; filename={filename}"
|
|
1046
1050
|
content_type = product.headers.get("Content-Type")
|
|
1047
1051
|
guessed_content_type = (
|
|
1048
1052
|
guess_file_type(filename) if filename and not content_type else None
|
|
@@ -1051,6 +1055,7 @@ class HTTPDownload(Download):
|
|
|
1051
1055
|
product.headers["Content-Type"] = guessed_content_type
|
|
1052
1056
|
|
|
1053
1057
|
progress_callback.reset(total=stream_size)
|
|
1058
|
+
product.size = stream_size
|
|
1054
1059
|
|
|
1055
1060
|
product.filename = filename
|
|
1056
1061
|
return self.stream.iter_content(chunk_size=64 * 1024)
|
|
@@ -1062,18 +1067,13 @@ class HTTPDownload(Download):
|
|
|
1062
1067
|
progress_callback: Optional[ProgressCallback] = None,
|
|
1063
1068
|
assets_values: list[Asset] = [],
|
|
1064
1069
|
**kwargs: Unpack[DownloadConf],
|
|
1065
|
-
) ->
|
|
1070
|
+
) -> list[StreamResponse]:
|
|
1071
|
+
"""Stream download assets as a zip file."""
|
|
1072
|
+
|
|
1066
1073
|
if progress_callback is None:
|
|
1067
1074
|
logger.info("Progress bar unavailable, please call product.download()")
|
|
1068
1075
|
progress_callback = ProgressCallback(disable=True)
|
|
1069
1076
|
|
|
1070
|
-
assets_urls = [
|
|
1071
|
-
a["href"] for a in getattr(product, "assets", {}).values() if "href" in a
|
|
1072
|
-
]
|
|
1073
|
-
|
|
1074
|
-
if not assets_urls:
|
|
1075
|
-
raise NotAvailableError("No assets available for %s" % product)
|
|
1076
|
-
|
|
1077
1077
|
# get extra parameters to pass to the query
|
|
1078
1078
|
params = kwargs.pop("dl_url_params", None) or getattr(
|
|
1079
1079
|
self.config, "dl_url_params", {}
|
|
@@ -1083,16 +1083,6 @@ class HTTPDownload(Download):
|
|
|
1083
1083
|
|
|
1084
1084
|
progress_callback.reset(total=total_size)
|
|
1085
1085
|
|
|
1086
|
-
def get_chunks(stream: Response) -> Any:
|
|
1087
|
-
for chunk in stream.iter_content(chunk_size=64 * 1024):
|
|
1088
|
-
if chunk:
|
|
1089
|
-
progress_callback(len(chunk))
|
|
1090
|
-
yield chunk
|
|
1091
|
-
|
|
1092
|
-
# zipped files properties
|
|
1093
|
-
modified_at = datetime.now()
|
|
1094
|
-
perms = 0o600
|
|
1095
|
-
|
|
1096
1086
|
# loop for assets paths and get common_subdir
|
|
1097
1087
|
asset_rel_paths_list = []
|
|
1098
1088
|
for asset in assets_values:
|
|
@@ -1124,37 +1114,35 @@ class HTTPDownload(Download):
|
|
|
1124
1114
|
else None
|
|
1125
1115
|
)
|
|
1126
1116
|
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1117
|
+
def get_chunks_generator(asset: Asset) -> Iterator[bytes]:
|
|
1118
|
+
"""Create a generator function that will be called by ZipStream when needed."""
|
|
1119
|
+
|
|
1120
|
+
asset_href = asset.get("href")
|
|
1121
|
+
# This function will be called by zipstream when it needs the data
|
|
1122
|
+
if not asset_href or asset_href.startswith("file:"):
|
|
1123
|
+
logger.info(f"Local asset detected. Download skipped for {asset_href}")
|
|
1124
|
+
return
|
|
1125
|
+
|
|
1126
|
+
# Determine auth
|
|
1127
|
+
if matching_conf or (matching_url and re.match(matching_url, asset_href)):
|
|
1137
1128
|
auth_object = auth
|
|
1138
1129
|
else:
|
|
1139
1130
|
auth_object = None
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1131
|
+
|
|
1132
|
+
# Make the request inside the generator
|
|
1133
|
+
try:
|
|
1134
|
+
with requests.get(
|
|
1135
|
+
asset_href,
|
|
1136
|
+
stream=True,
|
|
1137
|
+
auth=auth_object,
|
|
1138
|
+
params=params,
|
|
1139
|
+
headers=USER_AGENT,
|
|
1140
|
+
timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT,
|
|
1141
|
+
verify=ssl_verify,
|
|
1142
|
+
) as stream:
|
|
1150
1143
|
stream.raise_for_status()
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
exc, timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT
|
|
1154
|
-
) from exc
|
|
1155
|
-
except RequestException as e:
|
|
1156
|
-
self._handle_asset_exception(e, asset)
|
|
1157
|
-
else:
|
|
1144
|
+
|
|
1145
|
+
# Process asset path
|
|
1158
1146
|
asset_rel_path = (
|
|
1159
1147
|
asset.rel_path.replace(assets_common_subdir, "").strip(os.sep)
|
|
1160
1148
|
if flatten_top_dirs
|
|
@@ -1183,19 +1171,46 @@ class HTTPDownload(Download):
|
|
|
1183
1171
|
asset_rel_dir, cast(str, asset.filename)
|
|
1184
1172
|
)
|
|
1185
1173
|
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1174
|
+
for chunk in stream.iter_content(chunk_size=64 * 1024):
|
|
1175
|
+
if chunk:
|
|
1176
|
+
progress_callback(len(chunk))
|
|
1177
|
+
yield chunk
|
|
1178
|
+
|
|
1179
|
+
except requests.exceptions.Timeout as exc:
|
|
1180
|
+
raise TimeOutError(
|
|
1181
|
+
exc, timeout=DEFAULT_STREAM_REQUESTS_TIMEOUT
|
|
1182
|
+
) from exc
|
|
1183
|
+
except RequestException as e:
|
|
1184
|
+
self._handle_asset_exception(e, asset)
|
|
1185
|
+
|
|
1186
|
+
assets_stream_list = []
|
|
1187
|
+
|
|
1188
|
+
# Process each asset
|
|
1189
|
+
for asset in assets_values:
|
|
1190
|
+
if not asset["href"] or asset["href"].startswith("file:"):
|
|
1191
|
+
logger.info(
|
|
1192
|
+
f"Local asset detected. Download skipped for {asset['href']}"
|
|
1193
|
+
)
|
|
1194
|
+
continue
|
|
1195
|
+
asset_chunks = get_chunks_generator(asset)
|
|
1196
|
+
try:
|
|
1197
|
+
# start reading chunks to set assets attributes
|
|
1198
|
+
first_chunk = next(asset_chunks)
|
|
1199
|
+
asset_chunks = chain(iter([first_chunk]), asset_chunks)
|
|
1200
|
+
except StopIteration:
|
|
1201
|
+
# Empty generator
|
|
1202
|
+
asset_chunks = iter([])
|
|
1203
|
+
|
|
1204
|
+
assets_stream_list.append(
|
|
1205
|
+
StreamResponse(
|
|
1206
|
+
content=asset_chunks,
|
|
1207
|
+
filename=getattr(asset, "filename", None),
|
|
1208
|
+
arcname=getattr(asset, "rel_path", None),
|
|
1209
|
+
size=getattr(asset, "size", 0) or None,
|
|
1210
|
+
)
|
|
1211
|
+
)
|
|
1212
|
+
|
|
1213
|
+
return assets_stream_list
|
|
1199
1214
|
|
|
1200
1215
|
def _download_assets(
|
|
1201
1216
|
self,
|
|
@@ -1219,7 +1234,7 @@ class HTTPDownload(Download):
|
|
|
1219
1234
|
|
|
1220
1235
|
assets_values = product.assets.get_values(kwargs.get("asset"))
|
|
1221
1236
|
|
|
1222
|
-
|
|
1237
|
+
assets_stream_list = self._stream_download_assets(
|
|
1223
1238
|
product, auth, progress_callback, assets_values=assets_values, **kwargs
|
|
1224
1239
|
)
|
|
1225
1240
|
|
|
@@ -1245,17 +1260,9 @@ class HTTPDownload(Download):
|
|
|
1245
1260
|
local_assets_count += 1
|
|
1246
1261
|
continue
|
|
1247
1262
|
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
chunks = chain(iter([first_chunks_tuple]), chunks_tuples)
|
|
1252
|
-
chunks_tuples = iter(
|
|
1253
|
-
[(assets_values[0].rel_path, None, None, None, chunks)]
|
|
1254
|
-
)
|
|
1255
|
-
|
|
1256
|
-
for chunk_tuple in chunks_tuples:
|
|
1257
|
-
asset_path = chunk_tuple[0]
|
|
1258
|
-
asset_chunks = chunk_tuple[4]
|
|
1263
|
+
for asset_stream in assets_stream_list:
|
|
1264
|
+
asset_chunks = asset_stream.content
|
|
1265
|
+
asset_path = cast(str, asset_stream.arcname)
|
|
1259
1266
|
asset_abs_path = os.path.join(fs_dir_path, asset_path)
|
|
1260
1267
|
asset_abs_path_temp = asset_abs_path + "~"
|
|
1261
1268
|
# create asset subdir if not exist
|
|
@@ -1271,7 +1278,6 @@ class HTTPDownload(Download):
|
|
|
1271
1278
|
for chunk in asset_chunks:
|
|
1272
1279
|
if chunk:
|
|
1273
1280
|
fhandle.write(chunk)
|
|
1274
|
-
progress_callback(len(chunk))
|
|
1275
1281
|
logger.debug(
|
|
1276
1282
|
"Download completed. Renaming temporary file '%s' to '%s'",
|
|
1277
1283
|
os.path.basename(asset_abs_path_temp),
|
|
@@ -1344,14 +1350,16 @@ class HTTPDownload(Download):
|
|
|
1344
1350
|
if asset["href"] and not asset["href"].startswith("file:"):
|
|
1345
1351
|
# HEAD request for size & filename
|
|
1346
1352
|
try:
|
|
1347
|
-
|
|
1353
|
+
asset_headers_resp = requests.head(
|
|
1348
1354
|
asset["href"],
|
|
1349
1355
|
auth=auth,
|
|
1350
1356
|
params=params,
|
|
1351
1357
|
headers=USER_AGENT,
|
|
1352
1358
|
timeout=timeout,
|
|
1353
1359
|
verify=ssl_verify,
|
|
1354
|
-
)
|
|
1360
|
+
)
|
|
1361
|
+
asset_headers_resp.raise_for_status()
|
|
1362
|
+
asset_headers = asset_headers_resp.headers
|
|
1355
1363
|
except RequestException as e:
|
|
1356
1364
|
logger.debug(f"HEAD request failed: {str(e)}")
|
|
1357
1365
|
asset_headers = CaseInsensitiveDict()
|
eodag/plugins/download/s3rest.py
CHANGED
|
@@ -52,6 +52,8 @@ from eodag.utils.exceptions import (
|
|
|
52
52
|
)
|
|
53
53
|
|
|
54
54
|
if TYPE_CHECKING:
|
|
55
|
+
from mypy_boto3_s3 import S3ServiceResource
|
|
56
|
+
|
|
55
57
|
from eodag.api.product import EOProduct
|
|
56
58
|
from eodag.config import PluginConfig
|
|
57
59
|
from eodag.types import S3SessionKwargs
|
|
@@ -98,7 +100,7 @@ class S3RestDownload(Download):
|
|
|
98
100
|
def download(
|
|
99
101
|
self,
|
|
100
102
|
product: EOProduct,
|
|
101
|
-
auth: Optional[Union[AuthBase, S3SessionKwargs]] = None,
|
|
103
|
+
auth: Optional[Union[AuthBase, S3SessionKwargs, S3ServiceResource]] = None,
|
|
102
104
|
progress_callback: Optional[ProgressCallback] = None,
|
|
103
105
|
wait: float = DEFAULT_DOWNLOAD_WAIT,
|
|
104
106
|
timeout: float = DEFAULT_DOWNLOAD_TIMEOUT,
|
eodag/plugins/manager.py
CHANGED
|
@@ -44,6 +44,7 @@ from eodag.utils.exceptions import (
|
|
|
44
44
|
)
|
|
45
45
|
|
|
46
46
|
if TYPE_CHECKING:
|
|
47
|
+
from mypy_boto3_s3 import S3ServiceResource
|
|
47
48
|
from requests.auth import AuthBase
|
|
48
49
|
|
|
49
50
|
from eodag.api.product import EOProduct
|
|
@@ -361,7 +362,7 @@ class PluginManager:
|
|
|
361
362
|
provider: str,
|
|
362
363
|
matching_url: Optional[str] = None,
|
|
363
364
|
matching_conf: Optional[PluginConfig] = None,
|
|
364
|
-
) -> Optional[Union[AuthBase, S3SessionKwargs]]:
|
|
365
|
+
) -> Optional[Union[AuthBase, S3SessionKwargs, S3ServiceResource]]:
|
|
365
366
|
"""Authenticate and return the authenticated object for the first matching
|
|
366
367
|
authentication plugin
|
|
367
368
|
|
eodag/plugins/search/__init__.py
CHANGED
|
@@ -26,6 +26,7 @@ from eodag.utils import DEFAULT_ITEMS_PER_PAGE, DEFAULT_PAGE
|
|
|
26
26
|
if TYPE_CHECKING:
|
|
27
27
|
from typing import Any, Optional, Union
|
|
28
28
|
|
|
29
|
+
from mypy_boto3_s3 import S3ServiceResource
|
|
29
30
|
from requests.auth import AuthBase
|
|
30
31
|
|
|
31
32
|
from eodag.plugins.authentication.base import Authentication
|
|
@@ -39,7 +40,7 @@ class PreparedSearch:
|
|
|
39
40
|
product_type: Optional[str] = None
|
|
40
41
|
page: Optional[int] = DEFAULT_PAGE
|
|
41
42
|
items_per_page: Optional[int] = DEFAULT_ITEMS_PER_PAGE
|
|
42
|
-
auth: Optional[Union[AuthBase, S3SessionKwargs]] = None
|
|
43
|
+
auth: Optional[Union[AuthBase, S3SessionKwargs, S3ServiceResource]] = None
|
|
43
44
|
auth_plugin: Optional[Authentication] = None
|
|
44
45
|
count: bool = True
|
|
45
46
|
url: Optional[str] = None
|
eodag/plugins/search/base.py
CHANGED
|
@@ -47,6 +47,7 @@ from eodag.utils.exceptions import ValidationError
|
|
|
47
47
|
if TYPE_CHECKING:
|
|
48
48
|
from typing import Any, Optional, Union
|
|
49
49
|
|
|
50
|
+
from mypy_boto3_s3 import S3ServiceResource
|
|
50
51
|
from requests.auth import AuthBase
|
|
51
52
|
|
|
52
53
|
from eodag.api.product import EOProduct
|
|
@@ -63,7 +64,7 @@ class Search(PluginTopic):
|
|
|
63
64
|
:param config: An EODAG plugin configuration
|
|
64
65
|
"""
|
|
65
66
|
|
|
66
|
-
auth: Union[AuthBase, S3SessionKwargs]
|
|
67
|
+
auth: Union[AuthBase, S3SessionKwargs, S3ServiceResource]
|
|
67
68
|
next_page_url: Optional[str]
|
|
68
69
|
next_page_query_obj: Optional[dict[str, Any]]
|
|
69
70
|
total_items_nb: int
|
|
@@ -58,9 +58,9 @@ from eodag.utils import (
|
|
|
58
58
|
deepcopy,
|
|
59
59
|
dict_items_recursive_sort,
|
|
60
60
|
get_geometry_from_various,
|
|
61
|
-
is_range_in_range,
|
|
62
61
|
)
|
|
63
62
|
from eodag.utils.cache import instance_cached_method
|
|
63
|
+
from eodag.utils.dates import is_range_in_range
|
|
64
64
|
from eodag.utils.exceptions import DownloadError, NotAvailableError, ValidationError
|
|
65
65
|
from eodag.utils.requests import fetch_json
|
|
66
66
|
|
|
@@ -1269,7 +1269,7 @@ def _check_id(product: EOProduct) -> EOProduct:
|
|
|
1269
1269
|
isinstance(e, DownloadError) or isinstance(e, ValidationError)
|
|
1270
1270
|
) and "order status could not be checked" in e.args[0]:
|
|
1271
1271
|
raise ValidationError(
|
|
1272
|
-
f"
|
|
1272
|
+
f"Requested data is not available on {product.provider} ({product_id})."
|
|
1273
1273
|
) from e
|
|
1274
1274
|
raise ValidationError(e.args[0]) from e
|
|
1275
1275
|
|
|
@@ -24,7 +24,7 @@ from botocore.exceptions import BotoCoreError
|
|
|
24
24
|
from eodag.api.product import EOProduct # type: ignore
|
|
25
25
|
from eodag.api.search_result import RawSearchResult
|
|
26
26
|
from eodag.plugins.search.qssearch import ODataV4Search
|
|
27
|
-
from eodag.utils.exceptions import RequestError
|
|
27
|
+
from eodag.utils.exceptions import MisconfiguredError, RequestError
|
|
28
28
|
from eodag.utils.s3 import update_assets_from_s3
|
|
29
29
|
|
|
30
30
|
logger = logging.getLogger("eodag.search.creodiass3")
|
|
@@ -40,6 +40,14 @@ def patched_register_downloader(self, downloader, authenticator):
|
|
|
40
40
|
:param authenticator: The authentication method needed to perform the download
|
|
41
41
|
:class:`~eodag.plugins.authentication.base.Authentication`
|
|
42
42
|
"""
|
|
43
|
+
# verify credentials
|
|
44
|
+
required_creds = ["aws_access_key_id", "aws_secret_access_key"]
|
|
45
|
+
credentials = getattr(authenticator.config, "credentials", {}) or {}
|
|
46
|
+
if not all(x in credentials and credentials[x] for x in required_creds):
|
|
47
|
+
raise MisconfiguredError(
|
|
48
|
+
f"Incomplete credentials for {self.provider}, missing "
|
|
49
|
+
f"{[x for x in required_creds if x not in credentials or not credentials[x]]}"
|
|
50
|
+
)
|
|
43
51
|
# register downloader
|
|
44
52
|
self.register_downloader_only(downloader, authenticator)
|
|
45
53
|
# and also update assets
|
eodag/plugins/search/qssearch.py
CHANGED
|
@@ -1133,6 +1133,8 @@ class QueryStringSearch(Search):
|
|
|
1133
1133
|
)
|
|
1134
1134
|
if norm_key:
|
|
1135
1135
|
product.assets[norm_key] = asset
|
|
1136
|
+
# Normalize title with key
|
|
1137
|
+
product.assets[norm_key]["title"] = norm_key
|
|
1136
1138
|
# sort assets
|
|
1137
1139
|
product.assets.data = dict(sorted(product.assets.data.items()))
|
|
1138
1140
|
products.append(product)
|
|
@@ -1982,7 +1984,7 @@ class StacSearch(PostJsonSearch):
|
|
|
1982
1984
|
return queryables_dict
|
|
1983
1985
|
|
|
1984
1986
|
|
|
1985
|
-
class
|
|
1987
|
+
class WekeoSearch(StacSearch, PostJsonSearch):
|
|
1986
1988
|
"""A specialisation of a :class:`~eodag.plugins.search.qssearch.PostJsonSearch` that uses
|
|
1987
1989
|
generic STAC configuration for queryables (inherited from :class:`~eodag.plugins.search.qssearch.StacSearch`).
|
|
1988
1990
|
"""
|