eodag 2.12.0__py3-none-any.whl → 3.0.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +434 -319
- eodag/api/product/__init__.py +5 -1
- eodag/api/product/_assets.py +7 -2
- eodag/api/product/_product.py +46 -68
- eodag/api/product/metadata_mapping.py +181 -66
- eodag/api/search_result.py +21 -1
- eodag/cli.py +20 -6
- eodag/config.py +95 -6
- eodag/plugins/apis/base.py +8 -162
- eodag/plugins/apis/ecmwf.py +36 -24
- eodag/plugins/apis/usgs.py +40 -24
- eodag/plugins/authentication/aws_auth.py +2 -2
- eodag/plugins/authentication/header.py +31 -6
- eodag/plugins/authentication/keycloak.py +13 -84
- eodag/plugins/authentication/oauth.py +3 -3
- eodag/plugins/authentication/openid_connect.py +256 -46
- eodag/plugins/authentication/qsauth.py +3 -0
- eodag/plugins/authentication/sas_auth.py +8 -1
- eodag/plugins/authentication/token.py +92 -46
- eodag/plugins/authentication/token_exchange.py +120 -0
- eodag/plugins/download/aws.py +86 -91
- eodag/plugins/download/base.py +72 -40
- eodag/plugins/download/http.py +607 -264
- eodag/plugins/download/s3rest.py +28 -15
- eodag/plugins/manager.py +73 -57
- eodag/plugins/search/__init__.py +36 -0
- eodag/plugins/search/base.py +225 -18
- eodag/plugins/search/build_search_result.py +389 -32
- eodag/plugins/search/cop_marine.py +378 -0
- eodag/plugins/search/creodias_s3.py +15 -14
- eodag/plugins/search/csw.py +5 -7
- eodag/plugins/search/data_request_search.py +44 -20
- eodag/plugins/search/qssearch.py +508 -203
- eodag/plugins/search/static_stac_search.py +99 -36
- eodag/resources/constraints/climate-dt.json +13 -0
- eodag/resources/constraints/extremes-dt.json +8 -0
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +1897 -34
- eodag/resources/providers.yml +3539 -3277
- eodag/resources/stac.yml +48 -54
- eodag/resources/stac_api.yml +71 -25
- eodag/resources/stac_provider.yml +5 -0
- eodag/resources/user_conf_template.yml +51 -3
- eodag/rest/__init__.py +6 -0
- eodag/rest/cache.py +70 -0
- eodag/rest/config.py +68 -0
- eodag/rest/constants.py +27 -0
- eodag/rest/core.py +757 -0
- eodag/rest/server.py +397 -258
- eodag/rest/stac.py +438 -307
- eodag/rest/types/collections_search.py +44 -0
- eodag/rest/types/eodag_search.py +232 -43
- eodag/rest/types/{stac_queryables.py → queryables.py} +81 -43
- eodag/rest/types/stac_search.py +277 -0
- eodag/rest/utils/__init__.py +216 -0
- eodag/rest/utils/cql_evaluate.py +119 -0
- eodag/rest/utils/rfc3339.py +65 -0
- eodag/types/__init__.py +99 -9
- eodag/types/bbox.py +15 -14
- eodag/types/download_args.py +31 -0
- eodag/types/search_args.py +58 -7
- eodag/types/whoosh.py +81 -0
- eodag/utils/__init__.py +72 -9
- eodag/utils/constraints.py +37 -37
- eodag/utils/exceptions.py +23 -17
- eodag/utils/requests.py +138 -0
- eodag/utils/rest.py +104 -0
- eodag/utils/stac_reader.py +100 -16
- {eodag-2.12.0.dist-info → eodag-3.0.0b1.dist-info}/METADATA +64 -44
- eodag-3.0.0b1.dist-info/RECORD +109 -0
- {eodag-2.12.0.dist-info → eodag-3.0.0b1.dist-info}/WHEEL +1 -1
- {eodag-2.12.0.dist-info → eodag-3.0.0b1.dist-info}/entry_points.txt +6 -5
- eodag/plugins/apis/cds.py +0 -540
- eodag/rest/utils.py +0 -1133
- eodag-2.12.0.dist-info/RECORD +0 -94
- {eodag-2.12.0.dist-info → eodag-3.0.0b1.dist-info}/LICENSE +0 -0
- {eodag-2.12.0.dist-info → eodag-3.0.0b1.dist-info}/top_level.txt +0 -0
eodag/rest/stac.py
CHANGED
|
@@ -21,14 +21,15 @@ import logging
|
|
|
21
21
|
import os
|
|
22
22
|
from collections import defaultdict
|
|
23
23
|
from datetime import datetime, timezone
|
|
24
|
-
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, cast
|
|
25
|
-
from urllib.parse import parse_qs, urlencode, urlparse
|
|
24
|
+
from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, cast
|
|
25
|
+
from urllib.parse import parse_qs, urlencode, urlparse, urlunparse
|
|
26
26
|
|
|
27
27
|
import dateutil.parser
|
|
28
28
|
import geojson
|
|
29
29
|
import shapefile
|
|
30
30
|
from dateutil import tz
|
|
31
31
|
from dateutil.relativedelta import relativedelta
|
|
32
|
+
from jsonpath_ng.jsonpath import Child
|
|
32
33
|
from shapely.geometry import shape
|
|
33
34
|
from shapely.geometry.base import BaseGeometry
|
|
34
35
|
from shapely.ops import unary_union
|
|
@@ -38,10 +39,14 @@ from eodag.api.product.metadata_mapping import (
|
|
|
38
39
|
format_metadata,
|
|
39
40
|
get_metadata_path,
|
|
40
41
|
)
|
|
42
|
+
from eodag.rest.config import Settings
|
|
43
|
+
from eodag.rest.utils.rfc3339 import str_to_interval
|
|
41
44
|
from eodag.utils import (
|
|
45
|
+
DEFAULT_MISSION_START_DATE,
|
|
42
46
|
deepcopy,
|
|
43
47
|
dict_items_recursive_apply,
|
|
44
48
|
format_dict_items,
|
|
49
|
+
guess_file_type,
|
|
45
50
|
jsonpath_parse_dict_items,
|
|
46
51
|
string_to_jsonpath,
|
|
47
52
|
update_nested_dict,
|
|
@@ -50,8 +55,11 @@ from eodag.utils import (
|
|
|
50
55
|
from eodag.utils.exceptions import (
|
|
51
56
|
NoMatchingProductType,
|
|
52
57
|
NotAvailableError,
|
|
58
|
+
RequestError,
|
|
59
|
+
TimeOutError,
|
|
53
60
|
ValidationError,
|
|
54
61
|
)
|
|
62
|
+
from eodag.utils.requests import fetch_json
|
|
55
63
|
|
|
56
64
|
if TYPE_CHECKING:
|
|
57
65
|
from eodag.api.core import EODataAccessGateway
|
|
@@ -61,9 +69,38 @@ if TYPE_CHECKING:
|
|
|
61
69
|
|
|
62
70
|
logger = logging.getLogger("eodag.rest.stac")
|
|
63
71
|
|
|
64
|
-
DEFAULT_MISSION_START_DATE = "2015-01-01T00:00:00Z"
|
|
65
72
|
STAC_CATALOGS_PREFIX = "catalogs"
|
|
66
73
|
|
|
74
|
+
# fields not to put in item properties
|
|
75
|
+
COLLECTION_PROPERTIES = [
|
|
76
|
+
"abstract",
|
|
77
|
+
"instrument",
|
|
78
|
+
"platform",
|
|
79
|
+
"platformSerialIdentifier",
|
|
80
|
+
"processingLevel",
|
|
81
|
+
"sensorType",
|
|
82
|
+
"md5",
|
|
83
|
+
"license",
|
|
84
|
+
"title",
|
|
85
|
+
"missionStartDate",
|
|
86
|
+
"missionEndDate",
|
|
87
|
+
"keywords",
|
|
88
|
+
"stacCollection",
|
|
89
|
+
]
|
|
90
|
+
IGNORED_ITEM_PROPERTIES = [
|
|
91
|
+
"_id",
|
|
92
|
+
"id",
|
|
93
|
+
"keyword",
|
|
94
|
+
"quicklook",
|
|
95
|
+
"thumbnail",
|
|
96
|
+
"downloadLink",
|
|
97
|
+
"orderLink",
|
|
98
|
+
"_dc_qs",
|
|
99
|
+
"qs",
|
|
100
|
+
"defaultGeometry",
|
|
101
|
+
"_date",
|
|
102
|
+
]
|
|
103
|
+
|
|
67
104
|
|
|
68
105
|
class StacCommon:
|
|
69
106
|
"""Stac common object
|
|
@@ -113,14 +150,15 @@ class StacCommon:
|
|
|
113
150
|
for i, bbox in enumerate(self.data["extent"]["spatial"]["bbox"]):
|
|
114
151
|
self.data["extent"]["spatial"]["bbox"][i] = [float(x) for x in bbox]
|
|
115
152
|
# "None" values to None
|
|
116
|
-
|
|
117
|
-
|
|
153
|
+
apply_method: Callable[[str, str], Optional[str]] = lambda _, v: (
|
|
154
|
+
None if v == "None" else v
|
|
118
155
|
)
|
|
119
|
-
|
|
156
|
+
self.data = dict_items_recursive_apply(self.data, apply_method)
|
|
120
157
|
# ids and titles as str
|
|
121
|
-
|
|
122
|
-
|
|
158
|
+
apply_method: Callable[[str, str], Optional[str]] = lambda k, v: (
|
|
159
|
+
str(v) if k in ["title", "id"] else v
|
|
123
160
|
)
|
|
161
|
+
self.data = dict_items_recursive_apply(self.data, apply_method)
|
|
124
162
|
|
|
125
163
|
# empty stac_extensions: "" to []
|
|
126
164
|
if not self.data.get("stac_extensions", True):
|
|
@@ -153,15 +191,20 @@ class StacCommon:
|
|
|
153
191
|
}
|
|
154
192
|
return format_dict_items(extension_model, **format_args)
|
|
155
193
|
|
|
156
|
-
def
|
|
157
|
-
"""
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
194
|
+
def get_provider_dict(self, provider: str) -> Dict[str, Any]:
|
|
195
|
+
"""Generate STAC provider dict"""
|
|
196
|
+
provider_config = next(
|
|
197
|
+
p
|
|
198
|
+
for p in self.eodag_api.providers_config.values()
|
|
199
|
+
if provider in [p.name, getattr(p, "group", None)]
|
|
200
|
+
)
|
|
201
|
+
return {
|
|
202
|
+
"name": getattr(provider_config, "group", provider_config.name),
|
|
203
|
+
"description": getattr(provider_config, "description", None),
|
|
204
|
+
"roles": getattr(provider_config, "roles", None),
|
|
205
|
+
"url": getattr(provider_config, "url", None),
|
|
206
|
+
"priority": getattr(provider_config, "priority", None),
|
|
207
|
+
}
|
|
165
208
|
|
|
166
209
|
|
|
167
210
|
class StacItem(StacCommon):
|
|
@@ -213,7 +256,6 @@ class StacItem(StacCommon):
|
|
|
213
256
|
item_model = self.__filter_item_model_properties(
|
|
214
257
|
self.stac_config["item"], str(search_results[0].product_type)
|
|
215
258
|
)
|
|
216
|
-
provider_model = deepcopy(self.stac_config["provider"])
|
|
217
259
|
|
|
218
260
|
# check if some items need to be converted
|
|
219
261
|
need_conversion: Dict[str, Any] = {}
|
|
@@ -229,77 +271,62 @@ class StacItem(StacCommon):
|
|
|
229
271
|
k, item_model["properties"][k]
|
|
230
272
|
)
|
|
231
273
|
|
|
274
|
+
item_props = [
|
|
275
|
+
p.right.fields[0]
|
|
276
|
+
for p in item_model["properties"].values()
|
|
277
|
+
if isinstance(p, Child)
|
|
278
|
+
]
|
|
279
|
+
ignored_props = COLLECTION_PROPERTIES + item_props + IGNORED_ITEM_PROPERTIES
|
|
280
|
+
|
|
232
281
|
item_list: List[Dict[str, Any]] = []
|
|
233
282
|
for product in search_results:
|
|
234
|
-
# parse jsonpath
|
|
235
|
-
provider_dict = jsonpath_parse_dict_items(
|
|
236
|
-
provider_model,
|
|
237
|
-
{
|
|
238
|
-
"provider": self.eodag_api.providers_config[
|
|
239
|
-
product.provider
|
|
240
|
-
].__dict__
|
|
241
|
-
},
|
|
242
|
-
)
|
|
243
|
-
|
|
244
283
|
product_dict = deepcopy(product.__dict__)
|
|
245
|
-
if isinstance(product.assets, dict):
|
|
246
|
-
product_dict["assets"] = product.assets
|
|
247
|
-
else:
|
|
248
|
-
product_dict["assets"] = product.assets.as_dict()
|
|
249
284
|
|
|
250
|
-
product_item = jsonpath_parse_dict_items(
|
|
285
|
+
product_item: Dict[str, Any] = jsonpath_parse_dict_items(
|
|
251
286
|
item_model,
|
|
252
287
|
{
|
|
253
288
|
"product": product_dict,
|
|
254
|
-
"providers": [
|
|
289
|
+
"providers": [self.get_provider_dict(product.provider)],
|
|
255
290
|
},
|
|
256
291
|
)
|
|
257
292
|
|
|
293
|
+
# add additional item props
|
|
294
|
+
for p in set(product.properties) - set(ignored_props):
|
|
295
|
+
prefix = getattr(
|
|
296
|
+
self.eodag_api.providers_config[product.provider],
|
|
297
|
+
"group",
|
|
298
|
+
product.provider,
|
|
299
|
+
)
|
|
300
|
+
key = p if ":" in p else f"{prefix}:{p}"
|
|
301
|
+
product_item["properties"][key] = product.properties[p]
|
|
302
|
+
|
|
258
303
|
# parse download link
|
|
259
|
-
|
|
304
|
+
downloadlink_href = (
|
|
305
|
+
f"{catalog['url']}/items/{product.properties['id']}/download"
|
|
306
|
+
)
|
|
307
|
+
_dc_qs = product.properties.get("_dc_qs")
|
|
308
|
+
url_parts = urlparse(downloadlink_href)
|
|
260
309
|
query_dict = parse_qs(url_parts.query)
|
|
261
310
|
without_arg_url = (
|
|
262
311
|
f"{url_parts.scheme}://{url_parts.netloc}{url_parts.path}"
|
|
263
312
|
if url_parts.scheme
|
|
264
313
|
else f"{url_parts.netloc}{url_parts.path}"
|
|
265
314
|
)
|
|
266
|
-
|
|
267
315
|
# add provider to query-args
|
|
268
|
-
|
|
269
|
-
|
|
316
|
+
p_config = self.eodag_api.providers_config[product.provider]
|
|
317
|
+
query_dict.update(provider=[getattr(p_config, "group", p_config.name)])
|
|
270
318
|
# add datacube query-string to query-args
|
|
271
|
-
_dc_qs = product_item["assets"]["downloadLink"].pop("_dc_qs", None)
|
|
272
319
|
if _dc_qs:
|
|
273
|
-
query_dict.update(_dc_qs=_dc_qs)
|
|
274
|
-
|
|
275
|
-
# update download link with up-to-date query-args
|
|
320
|
+
query_dict.update(_dc_qs=[_dc_qs])
|
|
276
321
|
if query_dict:
|
|
277
|
-
|
|
278
|
-
"
|
|
279
|
-
|
|
322
|
+
downloadlink_href = (
|
|
323
|
+
f"{without_arg_url}?{urlencode(query_dict, doseq=True)}"
|
|
324
|
+
)
|
|
280
325
|
|
|
281
|
-
#
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
for asset_key, asset_value in origin_assets.items():
|
|
286
|
-
# use origin asset as default
|
|
287
|
-
product_item["assets"][asset_key] = asset_value
|
|
288
|
-
# origin assets as alternate link
|
|
289
|
-
product_item["assets"][asset_key]["alternate"] = {
|
|
290
|
-
"origin": {
|
|
291
|
-
"title": "Origin asset link",
|
|
292
|
-
"href": asset_value["href"],
|
|
293
|
-
}
|
|
294
|
-
}
|
|
295
|
-
# use server-mode assets download links
|
|
296
|
-
asset_value["href"] = without_arg_url
|
|
297
|
-
if query_dict:
|
|
298
|
-
product_item["assets"][asset_key][
|
|
299
|
-
"href"
|
|
300
|
-
] += f"/{asset_key}?{urlencode(query_dict, doseq=True)}"
|
|
301
|
-
else:
|
|
302
|
-
product_item["assets"][asset_key]["href"] += f"/{asset_key}"
|
|
326
|
+
# generate STAC assets
|
|
327
|
+
product_item["assets"] = self._get_assets(
|
|
328
|
+
product, downloadlink_href, without_arg_url, query_dict, _dc_qs
|
|
329
|
+
)
|
|
303
330
|
|
|
304
331
|
# apply conversion if needed
|
|
305
332
|
for prop_key, prop_val in need_conversion.items():
|
|
@@ -321,16 +348,19 @@ class StacItem(StacCommon):
|
|
|
321
348
|
format_args = deepcopy(self.stac_config)
|
|
322
349
|
format_args["catalog"] = catalog
|
|
323
350
|
format_args["item"] = product_item
|
|
324
|
-
product_item
|
|
325
|
-
product_item, **format_args
|
|
326
|
-
)
|
|
351
|
+
product_item = format_dict_items(product_item, **format_args)
|
|
327
352
|
product_item["bbox"] = [float(i) for i in product_item["bbox"]]
|
|
328
353
|
|
|
354
|
+
# transform shapely geometry to geojson
|
|
355
|
+
product_item["geometry"] = geojson.loads(
|
|
356
|
+
geojson.dumps(product_item["geometry"])
|
|
357
|
+
)
|
|
358
|
+
|
|
329
359
|
# remove empty properties
|
|
330
360
|
product_item = self.__filter_item_properties_values(product_item)
|
|
331
361
|
|
|
332
362
|
# update item link with datacube query-string
|
|
333
|
-
if _dc_qs:
|
|
363
|
+
if _dc_qs or self.provider:
|
|
334
364
|
url_parts = urlparse(str(product_item["links"][0]["href"]))
|
|
335
365
|
without_arg_url = (
|
|
336
366
|
f"{url_parts.scheme}://{url_parts.netloc}{url_parts.path}"
|
|
@@ -345,8 +375,91 @@ class StacItem(StacCommon):
|
|
|
345
375
|
|
|
346
376
|
return item_list
|
|
347
377
|
|
|
378
|
+
def _get_assets(
|
|
379
|
+
self,
|
|
380
|
+
product: EOProduct,
|
|
381
|
+
downloadlink_href: str,
|
|
382
|
+
without_arg_url: str,
|
|
383
|
+
query_dict: Optional[Dict[str, Any]] = None,
|
|
384
|
+
_dc_qs: Optional[str] = None,
|
|
385
|
+
) -> Dict[str, Any]:
|
|
386
|
+
assets: Dict[str, Any] = {}
|
|
387
|
+
settings = Settings.from_environment()
|
|
388
|
+
|
|
389
|
+
if _dc_qs:
|
|
390
|
+
parsed = urlparse(product.remote_location)
|
|
391
|
+
fragments = parsed.fragment.split("?")
|
|
392
|
+
parsed = parsed._replace(fragment=f"{fragments[0]}?_dc_qs={_dc_qs}")
|
|
393
|
+
origin_href = urlunparse(parsed)
|
|
394
|
+
else:
|
|
395
|
+
origin_href = product.remote_location
|
|
396
|
+
|
|
397
|
+
# update download link with up-to-date query-args
|
|
398
|
+
assets["downloadLink"] = {
|
|
399
|
+
"title": "Download link",
|
|
400
|
+
"href": downloadlink_href,
|
|
401
|
+
"type": "application/zip",
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
if not origin_href.startswith(tuple(settings.origin_url_blacklist)):
|
|
405
|
+
assets["downloadLink"]["alternate"] = {
|
|
406
|
+
"origin": {
|
|
407
|
+
"title": "Origin asset link",
|
|
408
|
+
"href": origin_href,
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
if "storageStatus" in product.properties:
|
|
413
|
+
assets["downloadLink"]["storage:tier"] = product.properties["storageStatus"]
|
|
414
|
+
|
|
415
|
+
# move origin asset urls to alternate links and replace with eodag-server ones
|
|
416
|
+
if product.assets:
|
|
417
|
+
origin_assets = product.assets.as_dict()
|
|
418
|
+
# replace origin asset urls with eodag-server ones
|
|
419
|
+
for asset_key, asset_value in origin_assets.items():
|
|
420
|
+
# use origin asset as default
|
|
421
|
+
assets[asset_key] = asset_value
|
|
422
|
+
# origin assets as alternate link
|
|
423
|
+
if not asset_value["href"].startswith(
|
|
424
|
+
tuple(settings.origin_url_blacklist)
|
|
425
|
+
):
|
|
426
|
+
assets[asset_key]["alternate"] = {
|
|
427
|
+
"origin": {
|
|
428
|
+
"title": "Origin asset link",
|
|
429
|
+
"href": asset_value["href"],
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
# use server-mode assets download links
|
|
433
|
+
asset_value["href"] = without_arg_url
|
|
434
|
+
if query_dict:
|
|
435
|
+
assets[asset_key][
|
|
436
|
+
"href"
|
|
437
|
+
] += f"/{asset_key}?{urlencode(query_dict, doseq=True)}"
|
|
438
|
+
else:
|
|
439
|
+
assets[asset_key]["href"] += f"/{asset_key}"
|
|
440
|
+
if asset_type := asset_value.get("type", None):
|
|
441
|
+
assets[asset_key]["type"] = asset_type
|
|
442
|
+
if origin := assets[asset_key].get("alternate", {}).get("origin"):
|
|
443
|
+
origin["type"] = asset_type
|
|
444
|
+
|
|
445
|
+
if thumbnail_url := product.properties.get(
|
|
446
|
+
"quicklook", product.properties.get("thumbnail", None)
|
|
447
|
+
):
|
|
448
|
+
assets["thumbnail"] = {
|
|
449
|
+
"title": "Thumbnail",
|
|
450
|
+
"href": thumbnail_url,
|
|
451
|
+
"role": "thumbnail",
|
|
452
|
+
}
|
|
453
|
+
if mime_type := guess_file_type(thumbnail_url):
|
|
454
|
+
assets["thumbnail"]["type"] = mime_type
|
|
455
|
+
return assets
|
|
456
|
+
|
|
348
457
|
def get_stac_items(
|
|
349
|
-
self,
|
|
458
|
+
self,
|
|
459
|
+
search_results: SearchResult,
|
|
460
|
+
total: int,
|
|
461
|
+
catalog: Dict[str, Any],
|
|
462
|
+
next_link: Optional[Dict[str, Any]],
|
|
350
463
|
) -> Dict[str, Any]:
|
|
351
464
|
"""Build STAC items from EODAG search results
|
|
352
465
|
|
|
@@ -359,36 +472,30 @@ class StacItem(StacCommon):
|
|
|
359
472
|
"""
|
|
360
473
|
items_model = deepcopy(self.stac_config["items"])
|
|
361
474
|
|
|
362
|
-
search_results.numberMatched = search_results.properties["totalResults"]
|
|
363
|
-
search_results.numberReturned = len(search_results)
|
|
364
|
-
|
|
365
|
-
# next page link
|
|
366
475
|
if "?" in self.url:
|
|
367
476
|
# search endpoint: use page url as self link
|
|
368
477
|
for i, _ in enumerate(items_model["links"]):
|
|
369
478
|
if items_model["links"][i]["rel"] == "self":
|
|
370
479
|
items_model["links"][i]["href"] = catalog["url"]
|
|
371
480
|
|
|
372
|
-
|
|
373
|
-
datetime.now(timezone.utc).isoformat().replace("+00:00", "") + "Z"
|
|
374
|
-
)
|
|
481
|
+
timestamp = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
|
375
482
|
|
|
376
483
|
# parse jsonpath
|
|
377
484
|
items = jsonpath_parse_dict_items(
|
|
378
|
-
items_model,
|
|
485
|
+
items_model,
|
|
486
|
+
{
|
|
487
|
+
"numberMatched": total,
|
|
488
|
+
"numberReturned": len(search_results),
|
|
489
|
+
"timeStamp": timestamp,
|
|
490
|
+
},
|
|
379
491
|
)
|
|
380
492
|
# parse f-strings
|
|
381
493
|
format_args = deepcopy(self.stac_config)
|
|
382
494
|
format_args["catalog"] = catalog
|
|
383
495
|
items = format_dict_items(items, **format_args)
|
|
384
496
|
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
search_results.properties["itemsPerPage"]
|
|
388
|
-
* search_results.properties["page"]
|
|
389
|
-
>= search_results.properties["totalResults"]
|
|
390
|
-
):
|
|
391
|
-
items["links"] = [link for link in items["links"] if link["rel"] != "next"]
|
|
497
|
+
if next_link:
|
|
498
|
+
items["links"].append(next_link)
|
|
392
499
|
|
|
393
500
|
# provide static catalog to build features
|
|
394
501
|
if "search?" in catalog["url"]:
|
|
@@ -402,7 +509,7 @@ class StacItem(StacCommon):
|
|
|
402
509
|
items["features"] = self.__get_item_list(search_results, catalog)
|
|
403
510
|
|
|
404
511
|
self.update_data(items)
|
|
405
|
-
return
|
|
512
|
+
return self.data
|
|
406
513
|
|
|
407
514
|
def __filter_item_model_properties(
|
|
408
515
|
self, item_model: Dict[str, Any], product_type: str
|
|
@@ -427,12 +534,10 @@ class StacItem(StacCommon):
|
|
|
427
534
|
if pt["ID"] == product_type
|
|
428
535
|
or ("alias" in pt and pt["alias"] == product_type)
|
|
429
536
|
][0]
|
|
430
|
-
except IndexError:
|
|
537
|
+
except IndexError as e:
|
|
431
538
|
raise NoMatchingProductType(
|
|
432
|
-
"Product type {} not available for {}"
|
|
433
|
-
|
|
434
|
-
)
|
|
435
|
-
)
|
|
539
|
+
f"Product type {product_type} not available for {self.provider}"
|
|
540
|
+
) from e
|
|
436
541
|
|
|
437
542
|
result_item_model = deepcopy(item_model)
|
|
438
543
|
result_item_model["stac_extensions"] = list(
|
|
@@ -521,12 +626,6 @@ class StacItem(StacCommon):
|
|
|
521
626
|
item_model = self.__filter_item_model_properties(
|
|
522
627
|
self.stac_config["item"], product_type
|
|
523
628
|
)
|
|
524
|
-
provider_model = deepcopy(self.stac_config["provider"])
|
|
525
|
-
|
|
526
|
-
provider_dict = jsonpath_parse_dict_items(
|
|
527
|
-
provider_model,
|
|
528
|
-
{"provider": self.eodag_api.providers_config[product.provider].__dict__},
|
|
529
|
-
)
|
|
530
629
|
|
|
531
630
|
catalog = StacCatalog(
|
|
532
631
|
url=self.url.split("/items")[0],
|
|
@@ -545,14 +644,15 @@ class StacItem(StacCommon):
|
|
|
545
644
|
item_model,
|
|
546
645
|
{
|
|
547
646
|
"product": product_dict,
|
|
548
|
-
"providers":
|
|
647
|
+
"providers": [self.get_provider_dict(product.provider)],
|
|
549
648
|
},
|
|
550
649
|
)
|
|
551
650
|
# parse f-strings
|
|
552
651
|
format_args = deepcopy(self.stac_config)
|
|
553
|
-
format_args["catalog"] =
|
|
554
|
-
|
|
555
|
-
|
|
652
|
+
format_args["catalog"] = {
|
|
653
|
+
**catalog.data,
|
|
654
|
+
**{"url": catalog.url, "root": catalog.root},
|
|
655
|
+
}
|
|
556
656
|
format_args["item"] = product_item
|
|
557
657
|
product_item = format_dict_items(product_item, **format_args)
|
|
558
658
|
product_item["bbox"] = [float(i) for i in product_item["bbox"]]
|
|
@@ -561,7 +661,7 @@ class StacItem(StacCommon):
|
|
|
561
661
|
product_item = self.__filter_item_properties_values(product_item)
|
|
562
662
|
|
|
563
663
|
self.update_data(product_item)
|
|
564
|
-
return self.
|
|
664
|
+
return self.data
|
|
565
665
|
|
|
566
666
|
|
|
567
667
|
class StacCollection(StacCommon):
|
|
@@ -579,6 +679,34 @@ class StacCollection(StacCommon):
|
|
|
579
679
|
:type root: str
|
|
580
680
|
"""
|
|
581
681
|
|
|
682
|
+
# External STAC collections
|
|
683
|
+
ext_stac_collections: Dict[str, Dict[str, Any]] = dict()
|
|
684
|
+
|
|
685
|
+
@classmethod
|
|
686
|
+
def fetch_external_stac_collections(cls, eodag_api: EODataAccessGateway) -> None:
|
|
687
|
+
"""Load external STAC collections
|
|
688
|
+
|
|
689
|
+
:param eodag_api: EODAG python API instance
|
|
690
|
+
:type eodag_api: :class:`eodag.api.core.EODataAccessGateway`
|
|
691
|
+
"""
|
|
692
|
+
list_product_types = eodag_api.list_product_types(fetch_providers=False)
|
|
693
|
+
for product_type in list_product_types:
|
|
694
|
+
ext_stac_collection_path = product_type.get("stacCollection")
|
|
695
|
+
if not ext_stac_collection_path:
|
|
696
|
+
continue
|
|
697
|
+
logger.info(f"Fetching external STAC collection for {product_type['ID']}")
|
|
698
|
+
|
|
699
|
+
try:
|
|
700
|
+
ext_stac_collection = fetch_json(ext_stac_collection_path)
|
|
701
|
+
except (RequestError, TimeOutError) as e:
|
|
702
|
+
logger.debug(e)
|
|
703
|
+
logger.warning(
|
|
704
|
+
f"Could not read remote external STAC collection from {ext_stac_collection_path}",
|
|
705
|
+
)
|
|
706
|
+
ext_stac_collection = {}
|
|
707
|
+
|
|
708
|
+
cls.ext_stac_collections[product_type["ID"]] = ext_stac_collection
|
|
709
|
+
|
|
582
710
|
def __init__(
|
|
583
711
|
self,
|
|
584
712
|
url: str,
|
|
@@ -595,145 +723,161 @@ class StacCollection(StacCommon):
|
|
|
595
723
|
root=root,
|
|
596
724
|
)
|
|
597
725
|
|
|
598
|
-
def
|
|
599
|
-
|
|
600
|
-
) -> List[Dict[str, Any]]:
|
|
601
|
-
"""Returns a list of supported product types
|
|
726
|
+
def __list_product_type_providers(self, product_type: Dict[str, Any]) -> List[str]:
|
|
727
|
+
"""Retrieve a list of providers for a given product type.
|
|
602
728
|
|
|
603
|
-
:param
|
|
604
|
-
:type
|
|
605
|
-
:
|
|
729
|
+
:param product_type: Dictionary containing information about the product type.
|
|
730
|
+
:type product_type: dict
|
|
731
|
+
:return: A list of provider names.
|
|
606
732
|
:rtype: list
|
|
607
733
|
"""
|
|
608
|
-
if
|
|
609
|
-
|
|
610
|
-
try:
|
|
611
|
-
guessed_product_types = self.eodag_api.guess_product_type(**filters)
|
|
612
|
-
except NoMatchingProductType:
|
|
613
|
-
guessed_product_types = []
|
|
614
|
-
if guessed_product_types:
|
|
615
|
-
product_types = [
|
|
616
|
-
pt
|
|
617
|
-
for pt in self.eodag_api.list_product_types(provider=self.provider)
|
|
618
|
-
if pt["ID"] in guessed_product_types
|
|
619
|
-
]
|
|
620
|
-
else:
|
|
621
|
-
product_types = self.eodag_api.list_product_types(provider=self.provider)
|
|
622
|
-
return product_types
|
|
734
|
+
if self.provider:
|
|
735
|
+
return [self.provider]
|
|
623
736
|
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
737
|
+
return [
|
|
738
|
+
plugin.provider
|
|
739
|
+
for plugin in self.eodag_api._plugins_manager.get_search_plugins(
|
|
740
|
+
product_type=product_type.get("_id", product_type["ID"])
|
|
741
|
+
)
|
|
742
|
+
]
|
|
628
743
|
|
|
629
|
-
|
|
630
|
-
:
|
|
631
|
-
|
|
632
|
-
|
|
744
|
+
def __generate_stac_collection(
|
|
745
|
+
self, collection_model: Any, product_type: Dict[str, Any]
|
|
746
|
+
) -> Dict[str, Any]:
|
|
747
|
+
"""Generate a STAC collection dictionary for a given product type.
|
|
748
|
+
|
|
749
|
+
:param collection_model: The base model for the STAC collection.
|
|
750
|
+
:type collection_model: Any
|
|
751
|
+
:param product_type: Dictionary containing information about the product type.
|
|
752
|
+
:type product_type: dict
|
|
753
|
+
:return: A dictionary representing the STAC collection for the given product type.
|
|
754
|
+
:rtype: dict
|
|
633
755
|
"""
|
|
634
|
-
|
|
635
|
-
provider_model = deepcopy(self.stac_config["provider"])
|
|
756
|
+
providers = self.__list_product_type_providers(product_type)
|
|
636
757
|
|
|
637
|
-
|
|
758
|
+
providers_dict: Dict[str, Dict[str, Any]] = {}
|
|
759
|
+
for provider in providers:
|
|
760
|
+
p_dict = self.get_provider_dict(provider)
|
|
761
|
+
providers_dict.setdefault(p_dict["name"], p_dict)
|
|
762
|
+
providers_list = list(providers_dict.values())
|
|
638
763
|
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
|
|
646
|
-
|
|
647
|
-
|
|
648
|
-
|
|
649
|
-
|
|
650
|
-
|
|
764
|
+
# parse jsonpath
|
|
765
|
+
product_type_collection = jsonpath_parse_dict_items(
|
|
766
|
+
collection_model,
|
|
767
|
+
{
|
|
768
|
+
"product_type": product_type,
|
|
769
|
+
"providers": providers_list,
|
|
770
|
+
},
|
|
771
|
+
)
|
|
772
|
+
# override EODAG's collection with the external collection
|
|
773
|
+
ext_stac_collection = deepcopy(
|
|
774
|
+
self.ext_stac_collections.get(product_type["ID"], {})
|
|
775
|
+
)
|
|
776
|
+
|
|
777
|
+
# update links (keep eodag links as defaults)
|
|
778
|
+
ext_stac_collection.setdefault("links", {})
|
|
779
|
+
for link in product_type_collection["links"]:
|
|
780
|
+
ext_stac_collection["links"] = [
|
|
781
|
+
x for x in ext_stac_collection["links"] if x["rel"] != link["rel"]
|
|
782
|
+
]
|
|
783
|
+
ext_stac_collection["links"].append(link)
|
|
784
|
+
|
|
785
|
+
# merge "keywords" lists
|
|
786
|
+
if "keywords" in ext_stac_collection:
|
|
787
|
+
try:
|
|
788
|
+
ext_stac_collection["keywords"] = [
|
|
789
|
+
k
|
|
790
|
+
for k in set(
|
|
791
|
+
ext_stac_collection["keywords"]
|
|
792
|
+
+ product_type_collection["keywords"]
|
|
651
793
|
)
|
|
794
|
+
if k is not None
|
|
652
795
|
]
|
|
653
|
-
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
|
|
657
|
-
|
|
796
|
+
except TypeError as e:
|
|
797
|
+
logger.warning(
|
|
798
|
+
f"Could not merge keywords from external collection for {product_type['ID']}: {str(e)}"
|
|
799
|
+
)
|
|
800
|
+
logger.debug(
|
|
801
|
+
f"External collection keywords: {str(ext_stac_collection['keywords'])}, ",
|
|
802
|
+
f"Product type keywords: {str(product_type_collection['keywords'])}",
|
|
658
803
|
)
|
|
659
|
-
providers_models.append(provider_m)
|
|
660
804
|
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
{
|
|
665
|
-
"product_type": product_type,
|
|
666
|
-
"providers": providers_models,
|
|
667
|
-
},
|
|
668
|
-
)
|
|
669
|
-
# parse f-strings
|
|
670
|
-
format_args = deepcopy(self.stac_config)
|
|
671
|
-
format_args["collection"] = dict(
|
|
672
|
-
product_type_collection,
|
|
673
|
-
**{"url": f"{self.url}/{product_type['ID']}", "root": self.root},
|
|
674
|
-
)
|
|
675
|
-
product_type_collection = format_dict_items(
|
|
676
|
-
product_type_collection, **format_args
|
|
677
|
-
)
|
|
805
|
+
# merge providers
|
|
806
|
+
if "providers" in ext_stac_collection:
|
|
807
|
+
ext_stac_collection["providers"] += product_type_collection["providers"]
|
|
678
808
|
|
|
679
|
-
|
|
809
|
+
product_type_collection.update(ext_stac_collection)
|
|
680
810
|
|
|
681
|
-
|
|
811
|
+
# parse f-strings
|
|
812
|
+
format_args = deepcopy(self.stac_config)
|
|
813
|
+
format_args["collection"] = {
|
|
814
|
+
**product_type_collection,
|
|
815
|
+
**{
|
|
816
|
+
"url": self.url
|
|
817
|
+
if self.url.endswith(product_type["ID"])
|
|
818
|
+
else f"{self.url}/{product_type['ID']}",
|
|
819
|
+
"root": self.root,
|
|
820
|
+
},
|
|
821
|
+
}
|
|
822
|
+
product_type_collection = format_dict_items(
|
|
823
|
+
product_type_collection, **format_args
|
|
824
|
+
)
|
|
682
825
|
|
|
683
|
-
|
|
684
|
-
|
|
685
|
-
|
|
686
|
-
|
|
826
|
+
return product_type_collection
|
|
827
|
+
|
|
828
|
+
def get_collection_list(
|
|
829
|
+
self,
|
|
830
|
+
collection: Optional[str] = None,
|
|
831
|
+
q: Optional[str] = None,
|
|
832
|
+
platform: Optional[str] = None,
|
|
833
|
+
instrument: Optional[str] = None,
|
|
834
|
+
constellation: Optional[str] = None,
|
|
835
|
+
datetime: Optional[str] = None,
|
|
836
|
+
) -> List[Dict[str, Any]]:
|
|
837
|
+
"""Build STAC collections list
|
|
687
838
|
|
|
688
839
|
:param filters: (optional) Additional filters for collections search
|
|
689
840
|
:type filters: dict
|
|
690
|
-
:returns:
|
|
691
|
-
:rtype:
|
|
841
|
+
:returns: STAC collection dicts list
|
|
842
|
+
:rtype: list
|
|
692
843
|
"""
|
|
693
|
-
|
|
694
|
-
collections["collections"] = self.__get_collection_list(filters)
|
|
695
|
-
|
|
696
|
-
# # parse f-strings
|
|
697
|
-
format_args = deepcopy(self.stac_config)
|
|
698
|
-
format_args["collections"].update({"url": self.url, "root": self.root})
|
|
699
|
-
|
|
700
|
-
collections["links"] = [
|
|
701
|
-
format_dict_items(link, **format_args) for link in collections["links"]
|
|
702
|
-
]
|
|
703
|
-
|
|
704
|
-
collections["links"] += [
|
|
705
|
-
{
|
|
706
|
-
"rel": "child",
|
|
707
|
-
"title": collec["id"],
|
|
708
|
-
"href": [
|
|
709
|
-
link["href"] for link in collec["links"] if link["rel"] == "self"
|
|
710
|
-
][0],
|
|
711
|
-
}
|
|
712
|
-
for collec in collections["collections"]
|
|
713
|
-
]
|
|
714
|
-
|
|
715
|
-
self.update_data(collections)
|
|
716
|
-
return self.as_dict()
|
|
844
|
+
collection_model = deepcopy(self.stac_config["collection"])
|
|
717
845
|
|
|
718
|
-
|
|
719
|
-
"""Build STAC collection by its id
|
|
846
|
+
start, end = str_to_interval(datetime)
|
|
720
847
|
|
|
721
|
-
|
|
722
|
-
|
|
723
|
-
:returns: Collection dictionary
|
|
724
|
-
:rtype: dict
|
|
725
|
-
"""
|
|
726
|
-
collection_list = self.__get_collection_list(
|
|
727
|
-
filters={"productType": collection_id}
|
|
848
|
+
all_pt = self.eodag_api.list_product_types(
|
|
849
|
+
provider=self.provider, fetch_providers=False
|
|
728
850
|
)
|
|
729
851
|
|
|
730
|
-
|
|
731
|
-
|
|
732
|
-
|
|
733
|
-
|
|
852
|
+
if any((collection, q, platform, instrument, constellation, datetime)):
|
|
853
|
+
try:
|
|
854
|
+
guessed_product_types = self.eodag_api.guess_product_type(
|
|
855
|
+
free_text=q,
|
|
856
|
+
platformSerialIdentifier=platform,
|
|
857
|
+
instrument=instrument,
|
|
858
|
+
platform=constellation,
|
|
859
|
+
productType=collection,
|
|
860
|
+
missionStartDate=start.isoformat() if start else None,
|
|
861
|
+
missionEndDate=end.isoformat() if end else None,
|
|
862
|
+
)
|
|
863
|
+
except NoMatchingProductType:
|
|
864
|
+
product_types = []
|
|
865
|
+
else:
|
|
866
|
+
product_types = [
|
|
867
|
+
pt for pt in all_pt if pt["ID"] in guessed_product_types
|
|
868
|
+
]
|
|
869
|
+
else:
|
|
870
|
+
product_types = all_pt
|
|
871
|
+
|
|
872
|
+
# list product types with all metadata using guessed ids
|
|
873
|
+
collection_list: List[Dict[str, Any]] = []
|
|
874
|
+
for product_type in product_types:
|
|
875
|
+
stac_collection = self.__generate_stac_collection(
|
|
876
|
+
collection_model, product_type
|
|
877
|
+
)
|
|
878
|
+
collection_list.append(stac_collection)
|
|
734
879
|
|
|
735
|
-
|
|
736
|
-
return self.as_dict()
|
|
880
|
+
return collection_list
|
|
737
881
|
|
|
738
882
|
|
|
739
883
|
class StacCatalog(StacCommon):
|
|
@@ -751,9 +895,6 @@ class StacCatalog(StacCommon):
|
|
|
751
895
|
:type root: str
|
|
752
896
|
:param catalogs: (optional) Catalogs list
|
|
753
897
|
:type catalogs: list
|
|
754
|
-
:param fetch_providers: (optional) Whether to fetch providers for new product
|
|
755
|
-
types or not
|
|
756
|
-
:type fetch_providers: bool
|
|
757
898
|
"""
|
|
758
899
|
|
|
759
900
|
def __init__(
|
|
@@ -763,8 +904,7 @@ class StacCatalog(StacCommon):
|
|
|
763
904
|
provider: Optional[str],
|
|
764
905
|
eodag_api: EODataAccessGateway,
|
|
765
906
|
root: str = "/",
|
|
766
|
-
catalogs: List[str] =
|
|
767
|
-
fetch_providers: bool = True,
|
|
907
|
+
catalogs: Optional[List[str]] = None,
|
|
768
908
|
) -> None:
|
|
769
909
|
super(StacCatalog, self).__init__(
|
|
770
910
|
url=url,
|
|
@@ -791,7 +931,7 @@ class StacCatalog(StacCommon):
|
|
|
791
931
|
self.data["links"] += self.children
|
|
792
932
|
|
|
793
933
|
# build catalog
|
|
794
|
-
self.__build_stac_catalog(catalogs
|
|
934
|
+
self.__build_stac_catalog(catalogs)
|
|
795
935
|
|
|
796
936
|
def __update_data_from_catalog_config(self, catalog_config: Dict[str, Any]) -> bool:
|
|
797
937
|
"""Updates configuration and data using given input catalog config
|
|
@@ -817,40 +957,43 @@ class StacCatalog(StacCommon):
|
|
|
817
957
|
|
|
818
958
|
return True
|
|
819
959
|
|
|
820
|
-
def set_children(self, children: List[Dict[str, Any]] =
|
|
960
|
+
def set_children(self, children: Optional[List[Dict[str, Any]]] = None) -> bool:
|
|
821
961
|
"""Set catalog children / links
|
|
822
962
|
|
|
823
963
|
:param children: (optional) Children list
|
|
824
964
|
:type children: list
|
|
825
965
|
"""
|
|
826
|
-
self.children = children
|
|
966
|
+
self.children = children or []
|
|
827
967
|
self.data["links"] = [
|
|
828
968
|
link for link in self.data["links"] if link["rel"] != "child"
|
|
829
969
|
]
|
|
830
|
-
self.data["links"] += children
|
|
970
|
+
self.data["links"] += self.children
|
|
831
971
|
return True
|
|
832
972
|
|
|
833
973
|
def set_stac_product_type_by_id(
|
|
834
|
-
self, product_type: str, **
|
|
974
|
+
self, product_type: str, **_: Any
|
|
835
975
|
) -> Dict[str, Any]:
|
|
836
976
|
"""Updates catalog with given product_type
|
|
837
977
|
|
|
838
978
|
:param product_type: Product type
|
|
839
979
|
:type product_type: str
|
|
840
980
|
"""
|
|
841
|
-
|
|
981
|
+
collections = StacCollection(
|
|
842
982
|
url=self.url,
|
|
843
983
|
stac_config=self.stac_config,
|
|
844
984
|
provider=self.provider,
|
|
845
985
|
eodag_api=self.eodag_api,
|
|
846
986
|
root=self.root,
|
|
847
|
-
).
|
|
987
|
+
).get_collection_list(collection=product_type)
|
|
988
|
+
|
|
989
|
+
if not collections:
|
|
990
|
+
raise NotAvailableError(f"Collection {product_type} does not exist.")
|
|
848
991
|
|
|
849
992
|
cat_model = deepcopy(self.stac_config["catalogs"]["product_type"]["model"])
|
|
850
993
|
# parse f-strings
|
|
851
994
|
format_args = deepcopy(self.stac_config)
|
|
852
995
|
format_args["catalog"] = defaultdict(str, **self.data)
|
|
853
|
-
format_args["collection"] =
|
|
996
|
+
format_args["collection"] = collections[0]
|
|
854
997
|
try:
|
|
855
998
|
parsed_dict: Dict[str, Any] = format_dict_items(cat_model, **format_args)
|
|
856
999
|
except Exception:
|
|
@@ -860,13 +1003,13 @@ class StacCatalog(StacCommon):
|
|
|
860
1003
|
self.update_data(parsed_dict)
|
|
861
1004
|
|
|
862
1005
|
# update search args
|
|
863
|
-
self.search_args.update({"
|
|
1006
|
+
self.search_args.update({"productType": product_type})
|
|
864
1007
|
|
|
865
1008
|
return parsed_dict
|
|
866
1009
|
|
|
867
1010
|
# get / set dates filters -------------------------------------------------
|
|
868
1011
|
|
|
869
|
-
def get_stac_years_list(self, **
|
|
1012
|
+
def get_stac_years_list(self, **_: Any) -> List[int]:
|
|
870
1013
|
"""Get catalog available years list
|
|
871
1014
|
|
|
872
1015
|
:returns: Years list
|
|
@@ -876,7 +1019,7 @@ class StacCatalog(StacCommon):
|
|
|
876
1019
|
|
|
877
1020
|
return list(range(extent_date_min.year, extent_date_max.year + 1))
|
|
878
1021
|
|
|
879
|
-
def get_stac_months_list(self, **
|
|
1022
|
+
def get_stac_months_list(self, **_: Any) -> List[int]:
|
|
880
1023
|
"""Get catalog available months list
|
|
881
1024
|
|
|
882
1025
|
:returns: Months list
|
|
@@ -891,7 +1034,7 @@ class StacCatalog(StacCommon):
|
|
|
891
1034
|
)
|
|
892
1035
|
)
|
|
893
1036
|
|
|
894
|
-
def get_stac_days_list(self, **
|
|
1037
|
+
def get_stac_days_list(self, **_: Any) -> List[int]:
|
|
895
1038
|
"""Get catalog available days list
|
|
896
1039
|
|
|
897
1040
|
:returns: Days list
|
|
@@ -905,7 +1048,7 @@ class StacCatalog(StacCommon):
|
|
|
905
1048
|
)
|
|
906
1049
|
)
|
|
907
1050
|
|
|
908
|
-
def set_stac_year_by_id(self, year: str, **
|
|
1051
|
+
def set_stac_year_by_id(self, year: str, **_: Any) -> Dict[str, Any]:
|
|
909
1052
|
"""Updates and returns catalog with given year
|
|
910
1053
|
|
|
911
1054
|
:param year: Year number
|
|
@@ -916,12 +1059,12 @@ class StacCatalog(StacCommon):
|
|
|
916
1059
|
extent_date_min, extent_date_max = self.get_datetime_extent()
|
|
917
1060
|
|
|
918
1061
|
datetime_min = max(
|
|
919
|
-
[extent_date_min, dateutil.parser.parse("{}-01-01T00:00:00Z"
|
|
1062
|
+
[extent_date_min, dateutil.parser.parse(f"{year}-01-01T00:00:00Z")]
|
|
920
1063
|
)
|
|
921
1064
|
datetime_max = min(
|
|
922
1065
|
[
|
|
923
1066
|
extent_date_max,
|
|
924
|
-
dateutil.parser.parse("{}-01-01T00:00:00Z"
|
|
1067
|
+
dateutil.parser.parse(f"{year}-01-01T00:00:00Z")
|
|
925
1068
|
+ relativedelta(years=1),
|
|
926
1069
|
]
|
|
927
1070
|
)
|
|
@@ -932,7 +1075,7 @@ class StacCatalog(StacCommon):
|
|
|
932
1075
|
|
|
933
1076
|
return parsed_dict
|
|
934
1077
|
|
|
935
|
-
def set_stac_month_by_id(self, month: str, **
|
|
1078
|
+
def set_stac_month_by_id(self, month: str, **_: Any) -> Dict[str, Any]:
|
|
936
1079
|
"""Updates and returns catalog with given month
|
|
937
1080
|
|
|
938
1081
|
:param month: Month number
|
|
@@ -946,13 +1089,13 @@ class StacCatalog(StacCommon):
|
|
|
946
1089
|
datetime_min = max(
|
|
947
1090
|
[
|
|
948
1091
|
extent_date_min,
|
|
949
|
-
dateutil.parser.parse("{}-{}-01T00:00:00Z"
|
|
1092
|
+
dateutil.parser.parse(f"{year}-{month}-01T00:00:00Z"),
|
|
950
1093
|
]
|
|
951
1094
|
)
|
|
952
1095
|
datetime_max = min(
|
|
953
1096
|
[
|
|
954
1097
|
extent_date_max,
|
|
955
|
-
dateutil.parser.parse("{}-{}-01T00:00:00Z"
|
|
1098
|
+
dateutil.parser.parse(f"{year}-{month}-01T00:00:00Z")
|
|
956
1099
|
+ relativedelta(months=1),
|
|
957
1100
|
]
|
|
958
1101
|
)
|
|
@@ -963,7 +1106,7 @@ class StacCatalog(StacCommon):
|
|
|
963
1106
|
|
|
964
1107
|
return parsed_dict
|
|
965
1108
|
|
|
966
|
-
def set_stac_day_by_id(self, day: str, **
|
|
1109
|
+
def set_stac_day_by_id(self, day: str, **_: Any) -> Dict[str, Any]:
|
|
967
1110
|
"""Updates and returns catalog with given day
|
|
968
1111
|
|
|
969
1112
|
:param day: Day number
|
|
@@ -978,13 +1121,13 @@ class StacCatalog(StacCommon):
|
|
|
978
1121
|
datetime_min = max(
|
|
979
1122
|
[
|
|
980
1123
|
extent_date_min,
|
|
981
|
-
dateutil.parser.parse("{}-{}-{}T00:00:00Z"
|
|
1124
|
+
dateutil.parser.parse(f"{year}-{month}-{day}T00:00:00Z"),
|
|
982
1125
|
]
|
|
983
1126
|
)
|
|
984
1127
|
datetime_max = min(
|
|
985
1128
|
[
|
|
986
1129
|
extent_date_max,
|
|
987
|
-
dateutil.parser.parse("{}-{}-{}T00:00:00Z"
|
|
1130
|
+
dateutil.parser.parse(f"{year}-{month}-{day}T00:00:00Z")
|
|
988
1131
|
+ relativedelta(days=1),
|
|
989
1132
|
]
|
|
990
1133
|
)
|
|
@@ -1049,8 +1192,8 @@ class StacCatalog(StacCommon):
|
|
|
1049
1192
|
"year": datetime_min.year,
|
|
1050
1193
|
"month": datetime_min.month,
|
|
1051
1194
|
"day": datetime_min.day,
|
|
1052
|
-
"min": datetime_min.isoformat().replace("+00:00", "")
|
|
1053
|
-
"max": datetime_max.isoformat().replace("+00:00", "")
|
|
1195
|
+
"min": datetime_min.isoformat().replace("+00:00", "Z"),
|
|
1196
|
+
"max": datetime_max.isoformat().replace("+00:00", "Z"),
|
|
1054
1197
|
},
|
|
1055
1198
|
)
|
|
1056
1199
|
parsed_dict: Dict[str, Any] = format_dict_items(catalog_model, **format_args)
|
|
@@ -1060,15 +1203,15 @@ class StacCatalog(StacCommon):
|
|
|
1060
1203
|
# update search args
|
|
1061
1204
|
self.search_args.update(
|
|
1062
1205
|
{
|
|
1063
|
-
"
|
|
1064
|
-
"
|
|
1206
|
+
"start": datetime_min.isoformat().replace("+00:00", "Z"),
|
|
1207
|
+
"end": datetime_max.isoformat().replace("+00:00", "Z"),
|
|
1065
1208
|
}
|
|
1066
1209
|
)
|
|
1067
1210
|
return parsed_dict
|
|
1068
1211
|
|
|
1069
1212
|
# get / set cloud_cover filter --------------------------------------------
|
|
1070
1213
|
|
|
1071
|
-
def get_stac_cloud_covers_list(self, **
|
|
1214
|
+
def get_stac_cloud_covers_list(self, **_: Any) -> List[int]:
|
|
1072
1215
|
"""Get cloud_cover list
|
|
1073
1216
|
|
|
1074
1217
|
:returns: cloud_cover list
|
|
@@ -1076,9 +1219,7 @@ class StacCatalog(StacCommon):
|
|
|
1076
1219
|
"""
|
|
1077
1220
|
return list(range(0, 101, 10))
|
|
1078
1221
|
|
|
1079
|
-
def set_stac_cloud_cover_by_id(
|
|
1080
|
-
self, cloud_cover: str, **kwargs: Any
|
|
1081
|
-
) -> Dict[str, Any]:
|
|
1222
|
+
def set_stac_cloud_cover_by_id(self, cloud_cover: str, **_: Any) -> Dict[str, Any]:
|
|
1082
1223
|
"""Updates and returns catalog with given max cloud_cover
|
|
1083
1224
|
|
|
1084
1225
|
:param cloud_cover: Cloud_cover number
|
|
@@ -1096,7 +1237,7 @@ class StacCatalog(StacCommon):
|
|
|
1096
1237
|
self.update_data(parsed_dict)
|
|
1097
1238
|
|
|
1098
1239
|
# update search args
|
|
1099
|
-
self.search_args.update({"
|
|
1240
|
+
self.search_args.update({"cloudCover": cloud_cover})
|
|
1100
1241
|
|
|
1101
1242
|
return parsed_dict
|
|
1102
1243
|
|
|
@@ -1112,23 +1253,21 @@ class StacCatalog(StacCommon):
|
|
|
1112
1253
|
"""
|
|
1113
1254
|
|
|
1114
1255
|
if catalog_name not in self.stac_config["catalogs"]:
|
|
1115
|
-
logger.warning(
|
|
1116
|
-
"no entry found for {} in location_config".format(catalog_name)
|
|
1117
|
-
)
|
|
1256
|
+
logger.warning("no entry found for %s in location_config", catalog_name)
|
|
1118
1257
|
return []
|
|
1119
1258
|
location_config = self.stac_config["catalogs"][catalog_name]
|
|
1120
1259
|
|
|
1121
1260
|
for k in ["path", "attr"]:
|
|
1122
1261
|
if k not in location_config.keys():
|
|
1123
1262
|
logger.warning(
|
|
1124
|
-
"no
|
|
1263
|
+
"no %s key found for %s in location_config", k, catalog_name
|
|
1125
1264
|
)
|
|
1126
1265
|
return []
|
|
1127
1266
|
path = location_config["path"]
|
|
1128
1267
|
attr = location_config["attr"]
|
|
1129
1268
|
|
|
1130
1269
|
with shapefile.Reader(path) as shp:
|
|
1131
|
-
countries_list: List[str] = [rec[attr] for rec in shp.records()]
|
|
1270
|
+
countries_list: List[str] = [rec[attr] for rec in shp.records()] # type: ignore
|
|
1132
1271
|
|
|
1133
1272
|
# remove duplicates
|
|
1134
1273
|
countries_list = list(set(countries_list))
|
|
@@ -1153,7 +1292,7 @@ class StacCatalog(StacCommon):
|
|
|
1153
1292
|
|
|
1154
1293
|
if location_list_cat_key not in self.stac_config["catalogs"]:
|
|
1155
1294
|
logger.warning(
|
|
1156
|
-
"no entry found for
|
|
1295
|
+
"no entry found for %s's list in location_config", catalog_name
|
|
1157
1296
|
)
|
|
1158
1297
|
return {}
|
|
1159
1298
|
location_config = self.stac_config["catalogs"][location_list_cat_key]
|
|
@@ -1161,9 +1300,7 @@ class StacCatalog(StacCommon):
|
|
|
1161
1300
|
for k in ["path", "attr"]:
|
|
1162
1301
|
if k not in location_config.keys():
|
|
1163
1302
|
logger.warning(
|
|
1164
|
-
"no
|
|
1165
|
-
k, catalog_name
|
|
1166
|
-
)
|
|
1303
|
+
"no %s key found for %s's list in location_config", k, catalog_name
|
|
1167
1304
|
)
|
|
1168
1305
|
return {}
|
|
1169
1306
|
path = location_config["path"]
|
|
@@ -1176,9 +1313,9 @@ class StacCatalog(StacCommon):
|
|
|
1176
1313
|
if shaperec.record.as_dict().get(attr, None) == location
|
|
1177
1314
|
]
|
|
1178
1315
|
|
|
1179
|
-
if
|
|
1316
|
+
if not geom_hits:
|
|
1180
1317
|
logger.warning(
|
|
1181
|
-
"no feature found in %s matching %s=%s"
|
|
1318
|
+
"no feature found in %s matching %s=%s", path, attr, location
|
|
1182
1319
|
)
|
|
1183
1320
|
return {}
|
|
1184
1321
|
|
|
@@ -1216,41 +1353,44 @@ class StacCatalog(StacCommon):
|
|
|
1216
1353
|
)
|
|
1217
1354
|
|
|
1218
1355
|
# set default child/parent for this location
|
|
1219
|
-
parsed["location"]["parent_key"] = "{
|
|
1356
|
+
parsed["location"]["parent_key"] = f"{loc['name']}_list"
|
|
1220
1357
|
|
|
1221
|
-
locations_config["{
|
|
1358
|
+
locations_config[f"{loc['name']}_list"] = parsed["locations_list"]
|
|
1222
1359
|
locations_config[loc["name"]] = parsed["location"]
|
|
1223
1360
|
|
|
1224
1361
|
return locations_config
|
|
1225
1362
|
|
|
1226
|
-
def __build_stac_catalog(
|
|
1227
|
-
self, catalogs: List[str] = [], fetch_providers: bool = True
|
|
1228
|
-
) -> StacCatalog:
|
|
1363
|
+
def __build_stac_catalog(self, catalogs: Optional[List[str]] = None) -> StacCatalog:
|
|
1229
1364
|
"""Build nested catalog from catalag list
|
|
1230
1365
|
|
|
1231
1366
|
:param catalogs: (optional) Catalogs list
|
|
1232
1367
|
:type catalogs: list
|
|
1233
|
-
:param fetch_providers: (optional) Whether to fetch providers for new product
|
|
1234
|
-
types or not
|
|
1235
|
-
:type fetch_providers: bool
|
|
1236
1368
|
:returns: This catalog obj
|
|
1237
1369
|
:rtype: :class:`eodag.stac.StacCatalog`
|
|
1238
1370
|
"""
|
|
1371
|
+
settings = Settings.from_environment()
|
|
1372
|
+
|
|
1239
1373
|
# update conf with user shp locations
|
|
1240
1374
|
locations_config = self.build_locations_config()
|
|
1241
1375
|
|
|
1242
|
-
self.stac_config["catalogs"] =
|
|
1243
|
-
deepcopy(self.stac_config["catalogs"]),
|
|
1244
|
-
|
|
1376
|
+
self.stac_config["catalogs"] = {
|
|
1377
|
+
**deepcopy(self.stac_config["catalogs"]),
|
|
1378
|
+
**locations_config,
|
|
1379
|
+
}
|
|
1245
1380
|
|
|
1246
|
-
if
|
|
1381
|
+
if not catalogs:
|
|
1247
1382
|
# Build root catalog combined with landing page
|
|
1248
1383
|
self.__update_data_from_catalog_config(
|
|
1249
1384
|
{
|
|
1250
|
-
"model":
|
|
1251
|
-
deepcopy(self.stac_config["landing_page"]),
|
|
1252
|
-
**{
|
|
1253
|
-
|
|
1385
|
+
"model": {
|
|
1386
|
+
**deepcopy(self.stac_config["landing_page"]),
|
|
1387
|
+
**{
|
|
1388
|
+
"provider": self.provider,
|
|
1389
|
+
"id": settings.stac_api_landing_id,
|
|
1390
|
+
"title": settings.stac_api_title,
|
|
1391
|
+
"description": settings.stac_api_description,
|
|
1392
|
+
},
|
|
1393
|
+
}
|
|
1254
1394
|
}
|
|
1255
1395
|
)
|
|
1256
1396
|
|
|
@@ -1258,7 +1398,7 @@ class StacCatalog(StacCommon):
|
|
|
1258
1398
|
product_types_list = [
|
|
1259
1399
|
pt
|
|
1260
1400
|
for pt in self.eodag_api.list_product_types(
|
|
1261
|
-
provider=self.provider, fetch_providers=
|
|
1401
|
+
provider=self.provider, fetch_providers=False
|
|
1262
1402
|
)
|
|
1263
1403
|
]
|
|
1264
1404
|
self.set_children(
|
|
@@ -1268,16 +1408,17 @@ class StacCatalog(StacCommon):
|
|
|
1268
1408
|
"href": urljoin(
|
|
1269
1409
|
self.url, f"{STAC_CATALOGS_PREFIX}/{product_type['ID']}"
|
|
1270
1410
|
),
|
|
1271
|
-
"title": product_type["
|
|
1411
|
+
"title": product_type["title"],
|
|
1272
1412
|
}
|
|
1273
1413
|
for product_type in product_types_list
|
|
1274
1414
|
]
|
|
1275
1415
|
)
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
)
|
|
1416
|
+
return self
|
|
1417
|
+
|
|
1418
|
+
# use product_types_list as base for building nested catalogs
|
|
1419
|
+
self.__update_data_from_catalog_config(
|
|
1420
|
+
deepcopy(self.stac_config["catalogs"]["product_types_list"])
|
|
1421
|
+
)
|
|
1281
1422
|
|
|
1282
1423
|
for idx, cat in enumerate(catalogs):
|
|
1283
1424
|
if idx % 2 == 0:
|
|
@@ -1286,12 +1427,11 @@ class StacCatalog(StacCommon):
|
|
|
1286
1427
|
cat_data_value = cat
|
|
1287
1428
|
|
|
1288
1429
|
# update data
|
|
1430
|
+
cat_data_name_dict = self.stac_config["catalogs"][cat_data_name]
|
|
1289
1431
|
set_data_method_name = (
|
|
1290
|
-
"set_stac_
|
|
1291
|
-
if "catalog_type"
|
|
1292
|
-
|
|
1293
|
-
else "set_stac_%s_by_id"
|
|
1294
|
-
% self.stac_config["catalogs"][cat_data_name]["catalog_type"]
|
|
1432
|
+
f"set_stac_{cat_data_name}_by_id"
|
|
1433
|
+
if "catalog_type" not in cat_data_name_dict.keys()
|
|
1434
|
+
else f"set_stac_{cat_data_name_dict['catalog_type']}_by_id"
|
|
1295
1435
|
)
|
|
1296
1436
|
set_data_method = getattr(self, set_data_method_name)
|
|
1297
1437
|
set_data_method(cat_data_value, catalog_name=cat_data_name)
|
|
@@ -1355,21 +1495,20 @@ class StacCatalog(StacCommon):
|
|
|
1355
1495
|
for c in self.stac_config["catalogs"].keys()
|
|
1356
1496
|
if self.stac_config["catalogs"][c]["model"]["id"] == cat
|
|
1357
1497
|
][0]
|
|
1358
|
-
except IndexError:
|
|
1498
|
+
except IndexError as e:
|
|
1359
1499
|
raise ValidationError(
|
|
1360
|
-
"Bad settings for
|
|
1361
|
-
)
|
|
1500
|
+
f"Bad settings for {cat} in stac_config catalogs"
|
|
1501
|
+
) from e
|
|
1362
1502
|
cat_config = deepcopy(self.stac_config["catalogs"][cat_key])
|
|
1363
1503
|
# update data
|
|
1364
1504
|
self.__update_data_from_catalog_config(cat_config)
|
|
1365
1505
|
|
|
1366
1506
|
# get filtering values list
|
|
1367
1507
|
get_data_method_name = (
|
|
1368
|
-
"get_stac_
|
|
1508
|
+
f"get_stac_{cat_key}"
|
|
1369
1509
|
if "catalog_type"
|
|
1370
1510
|
not in self.stac_config["catalogs"][cat_key].keys()
|
|
1371
|
-
else "get_stac_
|
|
1372
|
-
% self.stac_config["catalogs"][cat_key]["catalog_type"]
|
|
1511
|
+
else f"get_stac_{self.stac_config['catalogs'][cat_key]['catalog_type']}"
|
|
1373
1512
|
)
|
|
1374
1513
|
get_data_method = getattr(self, get_data_method_name)
|
|
1375
1514
|
cat_data_list = get_data_method(catalog_name=cat_key)
|
|
@@ -1388,11 +1527,3 @@ class StacCatalog(StacCommon):
|
|
|
1388
1527
|
)
|
|
1389
1528
|
|
|
1390
1529
|
return self
|
|
1391
|
-
|
|
1392
|
-
def get_stac_catalog(self) -> Dict[str, Any]:
|
|
1393
|
-
"""Get nested STAC catalog as data dict
|
|
1394
|
-
|
|
1395
|
-
:returns: Catalog dictionnary
|
|
1396
|
-
:rtype: dict
|
|
1397
|
-
"""
|
|
1398
|
-
return self.as_dict()
|