eodag 3.2.0__py3-none-any.whl → 3.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +2 -1
- eodag/api/search_result.py +3 -4
- eodag/config.py +3 -0
- eodag/plugins/authentication/token.py +16 -1
- eodag/plugins/download/http.py +46 -17
- eodag/plugins/search/build_search_result.py +390 -86
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/providers.yml +66 -19
- eodag/utils/__init__.py +8 -2
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/METADATA +2 -2
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/RECORD +15 -15
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/WHEEL +0 -0
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/entry_points.txt +0 -0
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/licenses/LICENSE +0 -0
- {eodag-3.2.0.dist-info → eodag-3.3.0.dist-info}/top_level.txt +0 -0
|
@@ -22,7 +22,8 @@ import hashlib
|
|
|
22
22
|
import logging
|
|
23
23
|
import re
|
|
24
24
|
from collections import OrderedDict
|
|
25
|
-
from datetime import datetime, timedelta
|
|
25
|
+
from datetime import date, datetime, timedelta, timezone
|
|
26
|
+
from types import MethodType
|
|
26
27
|
from typing import TYPE_CHECKING, Annotated, Any, Optional, Union
|
|
27
28
|
from urllib.parse import quote_plus, unquote_plus
|
|
28
29
|
|
|
@@ -35,19 +36,22 @@ from pydantic import Field
|
|
|
35
36
|
from pydantic.fields import FieldInfo
|
|
36
37
|
from requests.auth import AuthBase
|
|
37
38
|
from shapely.geometry.base import BaseGeometry
|
|
38
|
-
from typing_extensions import get_args
|
|
39
|
+
from typing_extensions import get_args # noqa: F401
|
|
39
40
|
|
|
40
41
|
from eodag.api.product import EOProduct
|
|
41
42
|
from eodag.api.product.metadata_mapping import (
|
|
43
|
+
DEFAULT_GEOMETRY,
|
|
42
44
|
NOT_AVAILABLE,
|
|
43
45
|
OFFLINE_STATUS,
|
|
46
|
+
STAGING_STATUS,
|
|
44
47
|
format_metadata,
|
|
48
|
+
mtd_cfg_as_conversion_and_querypath,
|
|
45
49
|
properties_from_json,
|
|
46
50
|
)
|
|
47
51
|
from eodag.api.search_result import RawSearchResult
|
|
48
52
|
from eodag.plugins.search import PreparedSearch
|
|
49
53
|
from eodag.plugins.search.qssearch import PostJsonSearch, QueryStringSearch
|
|
50
|
-
from eodag.types import json_field_definition_to_python
|
|
54
|
+
from eodag.types import json_field_definition_to_python # noqa: F401
|
|
51
55
|
from eodag.types.queryables import Queryables, QueryablesDict
|
|
52
56
|
from eodag.utils import (
|
|
53
57
|
DEFAULT_MISSION_START_DATE,
|
|
@@ -57,7 +61,7 @@ from eodag.utils import (
|
|
|
57
61
|
get_geometry_from_various,
|
|
58
62
|
is_range_in_range,
|
|
59
63
|
)
|
|
60
|
-
from eodag.utils.exceptions import ValidationError
|
|
64
|
+
from eodag.utils.exceptions import DownloadError, NotAvailableError, ValidationError
|
|
61
65
|
from eodag.utils.requests import fetch_json
|
|
62
66
|
|
|
63
67
|
if TYPE_CHECKING:
|
|
@@ -286,6 +290,133 @@ def ecmwf_format(v: str) -> str:
|
|
|
286
290
|
return ECMWF_PREFIX + v if v in ALLOWED_KEYWORDS else v
|
|
287
291
|
|
|
288
292
|
|
|
293
|
+
def get_min_max(
|
|
294
|
+
value: Optional[Union[str, list[str]]] = None,
|
|
295
|
+
) -> tuple[Optional[str], Optional[str]]:
|
|
296
|
+
"""Returns the min and max from a list of strings or the same string if a single string is given."""
|
|
297
|
+
if isinstance(value, list):
|
|
298
|
+
sorted_values = sorted(value)
|
|
299
|
+
return sorted_values[0], sorted_values[-1]
|
|
300
|
+
return value, value
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def append_time(input_date: date, time: Optional[str]) -> datetime:
|
|
304
|
+
"""
|
|
305
|
+
Parses a time string in format HHMM and appends it to a date.
|
|
306
|
+
|
|
307
|
+
if the time string is in format HH:MM we convert it to HHMM
|
|
308
|
+
"""
|
|
309
|
+
if not time:
|
|
310
|
+
time = "0000"
|
|
311
|
+
time = time.replace(":", "")
|
|
312
|
+
if time == "2400":
|
|
313
|
+
time = "0000"
|
|
314
|
+
dt = datetime.combine(input_date, datetime.strptime(time, "%H%M").time())
|
|
315
|
+
dt.replace(tzinfo=timezone.utc)
|
|
316
|
+
return dt
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def parse_date(
|
|
320
|
+
date_str: str, time: Optional[Union[str, list[str]]]
|
|
321
|
+
) -> tuple[datetime, datetime]:
|
|
322
|
+
"""Parses a date string in formats YYYY-MM-DD, YYYMMDD, solo or in start/end or start/to/end intervals."""
|
|
323
|
+
if "to" in date_str:
|
|
324
|
+
start_date_str, end_date_str = date_str.split("/to/")
|
|
325
|
+
elif "/" in date_str:
|
|
326
|
+
dates = date_str.split("/")
|
|
327
|
+
start_date_str = dates[0]
|
|
328
|
+
end_date_str = dates[-1]
|
|
329
|
+
else:
|
|
330
|
+
start_date_str = end_date_str = date_str
|
|
331
|
+
|
|
332
|
+
# Update YYYYMMDD formatted dates
|
|
333
|
+
if re.match(r"^\d{8}$", start_date_str):
|
|
334
|
+
start_date_str = (
|
|
335
|
+
f"{start_date_str[:4]}-{start_date_str[4:6]}-{start_date_str[6:]}"
|
|
336
|
+
)
|
|
337
|
+
if re.match(r"^\d{8}$", end_date_str):
|
|
338
|
+
end_date_str = f"{end_date_str[:4]}-{end_date_str[4:6]}-{end_date_str[6:]}"
|
|
339
|
+
|
|
340
|
+
start_date = datetime.fromisoformat(start_date_str.rstrip("Z"))
|
|
341
|
+
end_date = datetime.fromisoformat(end_date_str.rstrip("Z"))
|
|
342
|
+
|
|
343
|
+
if time:
|
|
344
|
+
start_t, end_t = get_min_max(time)
|
|
345
|
+
start_date = append_time(start_date.date(), start_t)
|
|
346
|
+
end_date = append_time(end_date.date(), end_t)
|
|
347
|
+
|
|
348
|
+
return start_date, end_date
|
|
349
|
+
|
|
350
|
+
|
|
351
|
+
def parse_year_month_day(
|
|
352
|
+
year: Union[str, list[str]],
|
|
353
|
+
month: Optional[Union[str, list[str]]] = None,
|
|
354
|
+
day: Optional[Union[str, list[str]]] = None,
|
|
355
|
+
time: Optional[Union[str, list[str]]] = None,
|
|
356
|
+
) -> tuple[datetime, datetime]:
|
|
357
|
+
"""Extracts and returns the year, month, day, and time from the parameters."""
|
|
358
|
+
|
|
359
|
+
def build_date(year, month=None, day=None, time=None) -> datetime:
|
|
360
|
+
"""Datetime from default_date with updated year, month, day and time."""
|
|
361
|
+
updated_date = datetime(int(year), 1, 1).replace(
|
|
362
|
+
month=int(month) if month is not None else 1,
|
|
363
|
+
day=int(day) if day is not None else 1,
|
|
364
|
+
)
|
|
365
|
+
if time is not None:
|
|
366
|
+
updated_date = append_time(updated_date.date(), time)
|
|
367
|
+
return updated_date
|
|
368
|
+
|
|
369
|
+
start_y, end_y = get_min_max(year)
|
|
370
|
+
start_m, end_m = get_min_max(month)
|
|
371
|
+
start_d, end_d = get_min_max(day)
|
|
372
|
+
start_t, end_t = get_min_max(time)
|
|
373
|
+
|
|
374
|
+
start_date = build_date(start_y, start_m, start_d, start_t)
|
|
375
|
+
end_date = build_date(end_y, end_m, end_d, end_t)
|
|
376
|
+
|
|
377
|
+
return start_date, end_date
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
def ecmwf_temporal_to_eodag(
|
|
381
|
+
params: dict[str, Any]
|
|
382
|
+
) -> tuple[Optional[str], Optional[str]]:
|
|
383
|
+
"""
|
|
384
|
+
Converts ECMWF temporal parameters to EODAG temporal parameters.
|
|
385
|
+
|
|
386
|
+
ECMWF temporal parameters:
|
|
387
|
+
- **year** or **hyear**: Union[str, list[str]] — Year(s) as a string or list of strings.
|
|
388
|
+
- **month** or **hmonth**: Union[str, list[str]] — Month(s) as a string or list of strings.
|
|
389
|
+
- **day** or **hday**: Union[str, list[str]] — Day(s) as a string or list of strings.
|
|
390
|
+
- **time**: str — A string representing the time in the format `HHMM` (e.g., `0200`, `0800`, `1400`).
|
|
391
|
+
- **date**: str — A string in one of the formats:
|
|
392
|
+
- `YYYY-MM-DD`
|
|
393
|
+
- `YYYY-MM-DD/YYYY-MM-DD`
|
|
394
|
+
- `YYYY-MM-DD/to/YYYY-MM-DD`
|
|
395
|
+
|
|
396
|
+
:param params: Dictionary containing ECMWF temporal parameters.
|
|
397
|
+
:return: A tuple with:
|
|
398
|
+
- **start**: A string in the format `YYYY-MM-DDTHH:MM:SSZ`.
|
|
399
|
+
- **end**: A string in the format `YYYY-MM-DDTHH:MM:SSZ`.
|
|
400
|
+
"""
|
|
401
|
+
start = end = None
|
|
402
|
+
|
|
403
|
+
if date := params.get("date"):
|
|
404
|
+
start, end = parse_date(date, params.get("time"))
|
|
405
|
+
|
|
406
|
+
elif year := params.get("year") or params.get("hyear"):
|
|
407
|
+
year = params.get("year") or params.get("hyear")
|
|
408
|
+
month = params.get("month") or params.get("hmonth")
|
|
409
|
+
day = params.get("day") or params.get("hday")
|
|
410
|
+
time = params.get("time")
|
|
411
|
+
|
|
412
|
+
start, end = parse_year_month_day(year, month, day, time)
|
|
413
|
+
|
|
414
|
+
if start and end:
|
|
415
|
+
return start.strftime("%Y-%m-%dT%H:%M:%SZ"), end.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
416
|
+
else:
|
|
417
|
+
return None, None
|
|
418
|
+
|
|
419
|
+
|
|
289
420
|
class ECMWFSearch(PostJsonSearch):
|
|
290
421
|
"""ECMWF search plugin.
|
|
291
422
|
|
|
@@ -335,6 +466,16 @@ class ECMWFSearch(PostJsonSearch):
|
|
|
335
466
|
self.config.__dict__.setdefault("api_endpoint", "")
|
|
336
467
|
self.config.pagination.setdefault("next_page_query_obj", "{{}}")
|
|
337
468
|
|
|
469
|
+
# defaut conf for accepting custom query params
|
|
470
|
+
self.config.__dict__.setdefault(
|
|
471
|
+
"discover_metadata",
|
|
472
|
+
{
|
|
473
|
+
"auto_discovery": True,
|
|
474
|
+
"search_param": "{metadata}",
|
|
475
|
+
"metadata_pattern": "^[a-zA-Z0-9][a-zA-Z0-9_]*$",
|
|
476
|
+
},
|
|
477
|
+
)
|
|
478
|
+
|
|
338
479
|
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
339
480
|
"""Should perform the actual search request.
|
|
340
481
|
|
|
@@ -497,22 +638,10 @@ class ECMWFSearch(PostJsonSearch):
|
|
|
497
638
|
if not isinstance(mapping, list):
|
|
498
639
|
mapping = product_type_conf[END]
|
|
499
640
|
if isinstance(mapping, list):
|
|
500
|
-
# get time parameters (date, year, month, ...) from metadata mapping
|
|
501
|
-
input_mapping = mapping[0].replace("{{", "").replace("}}", "")
|
|
502
|
-
time_params = [
|
|
503
|
-
values.split(":")[0].strip() for values in input_mapping.split(",")
|
|
504
|
-
]
|
|
505
|
-
time_params = [
|
|
506
|
-
tp.replace('"', "").replace("'", "") for tp in time_params
|
|
507
|
-
]
|
|
508
641
|
# if startTime is not given but other time params (e.g. year/month/(day)) are given,
|
|
509
642
|
# no default date is required
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
if tp not in keywords:
|
|
513
|
-
in_keywords = False
|
|
514
|
-
break
|
|
515
|
-
if not in_keywords:
|
|
643
|
+
start, end = ecmwf_temporal_to_eodag(keywords)
|
|
644
|
+
if start is None:
|
|
516
645
|
keywords[START] = self.get_product_type_cfg_value(
|
|
517
646
|
"missionStartDate", DEFAULT_MISSION_START_DATE
|
|
518
647
|
)
|
|
@@ -912,7 +1041,7 @@ class ECMWFSearch(PostJsonSearch):
|
|
|
912
1041
|
"""
|
|
913
1042
|
# Rename keywords from form with metadata mapping.
|
|
914
1043
|
# Needed to map constraints like "xxxx" to eodag parameter "ecmwf:xxxx"
|
|
915
|
-
required = [ecmwf_format(k) for k in required_keywords]
|
|
1044
|
+
required = [ecmwf_format(k) for k in required_keywords] # noqa: F841
|
|
916
1045
|
|
|
917
1046
|
queryables: dict[str, Annotated[Any, FieldInfo]] = {}
|
|
918
1047
|
for name, values in available_values.items():
|
|
@@ -998,92 +1127,74 @@ class ECMWFSearch(PostJsonSearch):
|
|
|
998
1127
|
_dc_qs = kwargs.pop("_dc_qs", None)
|
|
999
1128
|
if _dc_qs is not None:
|
|
1000
1129
|
qs = unquote_plus(unquote_plus(_dc_qs))
|
|
1001
|
-
|
|
1130
|
+
sorted_unpaginated_qp = geojson.loads(qs)
|
|
1002
1131
|
else:
|
|
1003
|
-
|
|
1004
|
-
if isinstance(
|
|
1005
|
-
self.config.pagination["next_page_query_obj"], str
|
|
1006
|
-
) and hasattr(results, "query_params_unpaginated"):
|
|
1007
|
-
unpaginated_query_params = results.query_params_unpaginated
|
|
1008
|
-
elif isinstance(self.config.pagination["next_page_query_obj"], str):
|
|
1009
|
-
next_page_query_obj = orjson.loads(
|
|
1010
|
-
self.config.pagination["next_page_query_obj"].format()
|
|
1011
|
-
)
|
|
1012
|
-
unpaginated_query_params = {
|
|
1013
|
-
k: v
|
|
1014
|
-
for k, v in results.query_params.items()
|
|
1015
|
-
if (k, v) not in next_page_query_obj.items()
|
|
1016
|
-
}
|
|
1017
|
-
else:
|
|
1018
|
-
unpaginated_query_params = self.query_params
|
|
1019
|
-
# query hash, will be used to build a product id
|
|
1020
|
-
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
1021
|
-
unpaginated_query_params
|
|
1022
|
-
)
|
|
1023
|
-
|
|
1024
|
-
# use all available query_params to parse properties
|
|
1025
|
-
result = dict(
|
|
1026
|
-
result,
|
|
1027
|
-
**sorted_unpaginated_query_params,
|
|
1028
|
-
qs=sorted_unpaginated_query_params,
|
|
1029
|
-
)
|
|
1132
|
+
sorted_unpaginated_qp = dict_items_recursive_sort(results.query_params)
|
|
1030
1133
|
|
|
1031
1134
|
# remove unwanted query params
|
|
1032
1135
|
for param in getattr(self.config, "remove_from_query", []):
|
|
1033
|
-
|
|
1136
|
+
sorted_unpaginated_qp.pop(param, None)
|
|
1034
1137
|
|
|
1035
|
-
|
|
1138
|
+
if result:
|
|
1139
|
+
properties = result
|
|
1140
|
+
properties.update(result.pop("request_params", None) or {})
|
|
1036
1141
|
|
|
1037
|
-
|
|
1142
|
+
properties = {k: v for k, v in properties.items() if not k.startswith("__")}
|
|
1038
1143
|
|
|
1039
|
-
|
|
1040
|
-
kwargs.pop("auth", None)
|
|
1041
|
-
result.update(results.product_type_def_params)
|
|
1042
|
-
result = dict(result, **{k: v for k, v in kwargs.items() if v is not None})
|
|
1144
|
+
properties["geometry"] = properties.get("area") or DEFAULT_GEOMETRY
|
|
1043
1145
|
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
self.config.metadata_mapping,
|
|
1048
|
-
discovery_config=getattr(self.config, "discover_metadata", {}),
|
|
1049
|
-
)
|
|
1146
|
+
start, end = ecmwf_temporal_to_eodag(properties)
|
|
1147
|
+
properties["startTimeFromAscendingNode"] = start
|
|
1148
|
+
properties["completionTimeFromAscendingNode"] = end
|
|
1050
1149
|
|
|
1051
|
-
|
|
1052
|
-
# use
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
return date_str.split("T")[0].replace("-", "")
|
|
1150
|
+
else:
|
|
1151
|
+
# use all available query_params to parse properties
|
|
1152
|
+
result_data: dict[str, Any] = {
|
|
1153
|
+
**results.product_type_def_params,
|
|
1154
|
+
**sorted_unpaginated_qp,
|
|
1155
|
+
**{"qs": sorted_unpaginated_qp},
|
|
1156
|
+
}
|
|
1059
1157
|
|
|
1060
|
-
|
|
1061
|
-
|
|
1158
|
+
# update result with product_type_def_params and search args if not None (and not auth)
|
|
1159
|
+
kwargs.pop("auth", None)
|
|
1160
|
+
result_data.update(results.product_type_def_params)
|
|
1161
|
+
result_data = {
|
|
1162
|
+
**result_data,
|
|
1163
|
+
**{k: v for k, v in kwargs.items() if v is not None},
|
|
1164
|
+
}
|
|
1062
1165
|
|
|
1063
|
-
|
|
1064
|
-
|
|
1166
|
+
properties = properties_from_json(
|
|
1167
|
+
result_data,
|
|
1168
|
+
self.config.metadata_mapping,
|
|
1169
|
+
discovery_config=getattr(self.config, "discover_metadata", {}),
|
|
1170
|
+
)
|
|
1065
1171
|
|
|
1066
|
-
|
|
1067
|
-
product_id += f"_{slugify(start)}"
|
|
1068
|
-
if end != NOT_AVAILABLE:
|
|
1069
|
-
product_id += f"_{slugify(end)}"
|
|
1172
|
+
query_hash = hashlib.sha1(str(result_data).encode("UTF-8")).hexdigest()
|
|
1070
1173
|
|
|
1071
|
-
|
|
1174
|
+
properties["title"] = properties["id"] = (
|
|
1175
|
+
(product_type or kwargs.get("dataset", self.provider)).upper()
|
|
1176
|
+
+ "_ORDERABLE_"
|
|
1177
|
+
+ query_hash
|
|
1178
|
+
)
|
|
1072
1179
|
|
|
1073
|
-
|
|
1180
|
+
qs = geojson.dumps(sorted_unpaginated_qp)
|
|
1074
1181
|
|
|
1075
1182
|
# used by server mode to generate downloadlink href
|
|
1183
|
+
# TODO: to remove once the legacy server is removed
|
|
1076
1184
|
properties["_dc_qs"] = quote_plus(qs)
|
|
1077
1185
|
|
|
1078
1186
|
product = EOProduct(
|
|
1079
1187
|
provider=self.provider,
|
|
1080
|
-
|
|
1081
|
-
|
|
1188
|
+
properties={ecmwf_format(k): v for k, v in properties.items()},
|
|
1189
|
+
**kwargs,
|
|
1082
1190
|
)
|
|
1083
1191
|
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1192
|
+
# backup original register_downloader to register_downloader_only
|
|
1193
|
+
product.register_downloader_only = product.register_downloader
|
|
1194
|
+
# patched register_downloader that will also update properties
|
|
1195
|
+
product.register_downloader = MethodType(patched_register_downloader, product)
|
|
1196
|
+
|
|
1197
|
+
return [product]
|
|
1087
1198
|
|
|
1088
1199
|
def count_hits(
|
|
1089
1200
|
self, count_url: Optional[str] = None, result_type: Optional[str] = None
|
|
@@ -1097,6 +1208,83 @@ class ECMWFSearch(PostJsonSearch):
|
|
|
1097
1208
|
return 1
|
|
1098
1209
|
|
|
1099
1210
|
|
|
1211
|
+
def _check_id(product: EOProduct) -> EOProduct:
|
|
1212
|
+
"""Check if the id is the one of an existing job.
|
|
1213
|
+
|
|
1214
|
+
If the job exists, poll it, otherwise, raise an error.
|
|
1215
|
+
|
|
1216
|
+
:param product: The product to check the id for
|
|
1217
|
+
:raises: :class:`~eodag.utils.exceptions.ValidationError`
|
|
1218
|
+
"""
|
|
1219
|
+
if not (product_id := product.search_kwargs.get("id")):
|
|
1220
|
+
return product
|
|
1221
|
+
|
|
1222
|
+
on_response_mm = getattr(product.downloader.config, "order_on_response", {}).get(
|
|
1223
|
+
"metadata_mapping", {}
|
|
1224
|
+
)
|
|
1225
|
+
if not on_response_mm:
|
|
1226
|
+
return product
|
|
1227
|
+
|
|
1228
|
+
logger.debug(f"Update product properties using given orderId {product_id}")
|
|
1229
|
+
on_response_mm_jsonpath = mtd_cfg_as_conversion_and_querypath(
|
|
1230
|
+
on_response_mm,
|
|
1231
|
+
)
|
|
1232
|
+
properties_update = properties_from_json(
|
|
1233
|
+
{}, {**on_response_mm_jsonpath, **{"orderId": (None, product_id)}}
|
|
1234
|
+
)
|
|
1235
|
+
product.properties.update(
|
|
1236
|
+
{k: v for k, v in properties_update.items() if v != NOT_AVAILABLE}
|
|
1237
|
+
)
|
|
1238
|
+
|
|
1239
|
+
auth = product.downloader_auth.authenticate() if product.downloader_auth else None
|
|
1240
|
+
|
|
1241
|
+
# try to poll the job corresponding to the given id
|
|
1242
|
+
try:
|
|
1243
|
+
product.downloader._order_status(product=product, auth=auth) # type: ignore
|
|
1244
|
+
# when a NotAvailableError is catched, it means the product is not ready and still needs to be polled
|
|
1245
|
+
except NotAvailableError:
|
|
1246
|
+
product.properties["storageStatus"] = STAGING_STATUS
|
|
1247
|
+
except Exception as e:
|
|
1248
|
+
if (
|
|
1249
|
+
isinstance(e, DownloadError) or isinstance(e, ValidationError)
|
|
1250
|
+
) and "order status could not be checked" in e.args[0]:
|
|
1251
|
+
raise ValidationError(
|
|
1252
|
+
f"Item {product_id} does not exist with {product.provider}."
|
|
1253
|
+
) from e
|
|
1254
|
+
raise ValidationError(e.args[0]) from e
|
|
1255
|
+
|
|
1256
|
+
# update product id
|
|
1257
|
+
product.properties["id"] = product_id
|
|
1258
|
+
# update product type if needed
|
|
1259
|
+
if product.product_type is None:
|
|
1260
|
+
product.product_type = product.properties.get("ecmwf:dataset")
|
|
1261
|
+
# update product title
|
|
1262
|
+
product.properties["title"] = (
|
|
1263
|
+
(product.product_type or product.provider).upper() + "_" + product_id
|
|
1264
|
+
)
|
|
1265
|
+
# use NOT_AVAILABLE as fallback product_type to avoid using guess_product_type
|
|
1266
|
+
if product.product_type is None:
|
|
1267
|
+
product.product_type = NOT_AVAILABLE
|
|
1268
|
+
|
|
1269
|
+
return product
|
|
1270
|
+
|
|
1271
|
+
|
|
1272
|
+
def patched_register_downloader(self, downloader, authenticator):
|
|
1273
|
+
"""Register product donwloader and update properties if searched by id.
|
|
1274
|
+
|
|
1275
|
+
:param self: product to which information should be added
|
|
1276
|
+
:param downloader: The download method that it can use
|
|
1277
|
+
:class:`~eodag.plugins.download.base.Download` or
|
|
1278
|
+
:class:`~eodag.plugins.api.base.Api`
|
|
1279
|
+
:param authenticator: The authentication method needed to perform the download
|
|
1280
|
+
:class:`~eodag.plugins.authentication.base.Authentication`
|
|
1281
|
+
"""
|
|
1282
|
+
# register downloader
|
|
1283
|
+
self.register_downloader_only(downloader, authenticator)
|
|
1284
|
+
# and also update properties
|
|
1285
|
+
_check_id(self)
|
|
1286
|
+
|
|
1287
|
+
|
|
1100
1288
|
class MeteoblueSearch(ECMWFSearch):
|
|
1101
1289
|
"""MeteoblueSearch search plugin.
|
|
1102
1290
|
|
|
@@ -1165,6 +1353,97 @@ class MeteoblueSearch(ECMWFSearch):
|
|
|
1165
1353
|
"""
|
|
1166
1354
|
return QueryStringSearch.build_query_string(self, product_type, query_dict)
|
|
1167
1355
|
|
|
1356
|
+
def normalize_results(self, results, **kwargs):
|
|
1357
|
+
"""Build :class:`~eodag.api.product._product.EOProduct` from provider result
|
|
1358
|
+
|
|
1359
|
+
:param results: Raw provider result as single dict in list
|
|
1360
|
+
:param kwargs: Search arguments
|
|
1361
|
+
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
1362
|
+
"""
|
|
1363
|
+
|
|
1364
|
+
product_type = kwargs.get("productType")
|
|
1365
|
+
|
|
1366
|
+
result = results[0]
|
|
1367
|
+
|
|
1368
|
+
# datacube query string got from previous search
|
|
1369
|
+
_dc_qs = kwargs.pop("_dc_qs", None)
|
|
1370
|
+
if _dc_qs is not None:
|
|
1371
|
+
qs = unquote_plus(unquote_plus(_dc_qs))
|
|
1372
|
+
sorted_unpaginated_query_params = geojson.loads(qs)
|
|
1373
|
+
else:
|
|
1374
|
+
next_page_query_obj = orjson.loads(
|
|
1375
|
+
self.config.pagination["next_page_query_obj"].format()
|
|
1376
|
+
)
|
|
1377
|
+
unpaginated_query_params = {
|
|
1378
|
+
k: v
|
|
1379
|
+
for k, v in results.query_params.items()
|
|
1380
|
+
if (k, v) not in next_page_query_obj.items()
|
|
1381
|
+
}
|
|
1382
|
+
# query hash, will be used to build a product id
|
|
1383
|
+
sorted_unpaginated_query_params = dict_items_recursive_sort(
|
|
1384
|
+
unpaginated_query_params
|
|
1385
|
+
)
|
|
1386
|
+
|
|
1387
|
+
# use all available query_params to parse properties
|
|
1388
|
+
result = dict(
|
|
1389
|
+
result,
|
|
1390
|
+
**sorted_unpaginated_query_params,
|
|
1391
|
+
qs=sorted_unpaginated_query_params,
|
|
1392
|
+
)
|
|
1393
|
+
|
|
1394
|
+
qs = geojson.dumps(sorted_unpaginated_query_params)
|
|
1395
|
+
|
|
1396
|
+
query_hash = hashlib.sha1(str(qs).encode("UTF-8")).hexdigest()
|
|
1397
|
+
|
|
1398
|
+
# update result with product_type_def_params and search args if not None (and not auth)
|
|
1399
|
+
kwargs.pop("auth", None)
|
|
1400
|
+
result.update(results.product_type_def_params)
|
|
1401
|
+
result = dict(result, **{k: v for k, v in kwargs.items() if v is not None})
|
|
1402
|
+
|
|
1403
|
+
# parse properties
|
|
1404
|
+
parsed_properties = properties_from_json(
|
|
1405
|
+
result,
|
|
1406
|
+
self.config.metadata_mapping,
|
|
1407
|
+
discovery_config=getattr(self.config, "discover_metadata", {}),
|
|
1408
|
+
)
|
|
1409
|
+
|
|
1410
|
+
properties = {
|
|
1411
|
+
# use product_type_config as default properties
|
|
1412
|
+
**getattr(self.config, "product_type_config", {}),
|
|
1413
|
+
**{ecmwf_format(k): v for k, v in parsed_properties.items()},
|
|
1414
|
+
}
|
|
1415
|
+
|
|
1416
|
+
def slugify(date_str: str) -> str:
|
|
1417
|
+
return date_str.split("T")[0].replace("-", "")
|
|
1418
|
+
|
|
1419
|
+
# build product id
|
|
1420
|
+
product_id = (product_type or self.provider).upper()
|
|
1421
|
+
|
|
1422
|
+
start = properties.get(START, NOT_AVAILABLE)
|
|
1423
|
+
end = properties.get(END, NOT_AVAILABLE)
|
|
1424
|
+
|
|
1425
|
+
if start != NOT_AVAILABLE:
|
|
1426
|
+
product_id += f"_{slugify(start)}"
|
|
1427
|
+
if end != NOT_AVAILABLE:
|
|
1428
|
+
product_id += f"_{slugify(end)}"
|
|
1429
|
+
|
|
1430
|
+
product_id += f"_{query_hash}"
|
|
1431
|
+
|
|
1432
|
+
properties["id"] = properties["title"] = product_id
|
|
1433
|
+
|
|
1434
|
+
# used by server mode to generate downloadlink href
|
|
1435
|
+
properties["_dc_qs"] = quote_plus(qs)
|
|
1436
|
+
|
|
1437
|
+
product = EOProduct(
|
|
1438
|
+
provider=self.provider,
|
|
1439
|
+
productType=product_type,
|
|
1440
|
+
properties=properties,
|
|
1441
|
+
)
|
|
1442
|
+
|
|
1443
|
+
return [
|
|
1444
|
+
product,
|
|
1445
|
+
]
|
|
1446
|
+
|
|
1168
1447
|
|
|
1169
1448
|
class WekeoECMWFSearch(ECMWFSearch):
|
|
1170
1449
|
"""
|
|
@@ -1201,6 +1480,10 @@ class WekeoECMWFSearch(ECMWFSearch):
|
|
|
1201
1480
|
:returns: list of single :class:`~eodag.api.product._product.EOProduct`
|
|
1202
1481
|
"""
|
|
1203
1482
|
|
|
1483
|
+
if kwargs.get("id") and "ORDERABLE" not in kwargs["id"]:
|
|
1484
|
+
# id is order id (only letters and numbers) -> use parent normalize results
|
|
1485
|
+
return super().normalize_results(results, **kwargs)
|
|
1486
|
+
|
|
1204
1487
|
# formating of orderLink requires access to the productType value.
|
|
1205
1488
|
results.data = [
|
|
1206
1489
|
{**result, **results.product_type_def_params} for result in results
|
|
@@ -1211,12 +1494,28 @@ class WekeoECMWFSearch(ECMWFSearch):
|
|
|
1211
1494
|
if not normalized:
|
|
1212
1495
|
return normalized
|
|
1213
1496
|
|
|
1214
|
-
|
|
1497
|
+
# remove unwanted query params
|
|
1498
|
+
excluded_query_params = getattr(self.config, "remove_from_query", [])
|
|
1499
|
+
filtered_query_params = {
|
|
1500
|
+
k: v
|
|
1501
|
+
for k, v in results.query_params.items()
|
|
1502
|
+
if k not in excluded_query_params
|
|
1503
|
+
}
|
|
1215
1504
|
for product in normalized:
|
|
1216
1505
|
properties = {**product.properties, **results.query_params}
|
|
1217
|
-
properties["_dc_qs"] =
|
|
1506
|
+
properties["_dc_qs"] = quote_plus(orjson.dumps(filtered_query_params))
|
|
1218
1507
|
product.properties = {ecmwf_format(k): v for k, v in properties.items()}
|
|
1219
1508
|
|
|
1509
|
+
# update product and title the same way as in parent class
|
|
1510
|
+
splitted_id = product.properties.get("title", "").split("-")
|
|
1511
|
+
dataset = "_".join(splitted_id[:-1])
|
|
1512
|
+
query_hash = splitted_id[-1]
|
|
1513
|
+
product.properties["title"] = product.properties["id"] = (
|
|
1514
|
+
(product.product_type or dataset or self.provider).upper()
|
|
1515
|
+
+ "_ORDERABLE_"
|
|
1516
|
+
+ query_hash
|
|
1517
|
+
)
|
|
1518
|
+
|
|
1220
1519
|
return normalized
|
|
1221
1520
|
|
|
1222
1521
|
def do_search(self, *args: Any, **kwargs: Any) -> list[dict[str, Any]]:
|
|
@@ -1226,4 +1525,9 @@ class WekeoECMWFSearch(ECMWFSearch):
|
|
|
1226
1525
|
:param kwargs: keyword arguments to be used in the search
|
|
1227
1526
|
:return: list containing the results from the provider in json format
|
|
1228
1527
|
"""
|
|
1229
|
-
|
|
1528
|
+
if "id" in kwargs and "ORDERABLE" not in kwargs["id"]:
|
|
1529
|
+
# id is order id (only letters and numbers) -> use parent normalize results.
|
|
1530
|
+
# No real search. We fake it all, then check order status using given id
|
|
1531
|
+
return [{}]
|
|
1532
|
+
else:
|
|
1533
|
+
return QueryStringSearch.do_search(self, *args, **kwargs)
|