eodag 3.0.0b2__py3-none-any.whl → 3.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/__init__.py +6 -8
- eodag/api/core.py +295 -287
- eodag/api/product/__init__.py +10 -4
- eodag/api/product/_assets.py +2 -14
- eodag/api/product/_product.py +16 -30
- eodag/api/product/drivers/__init__.py +7 -2
- eodag/api/product/drivers/base.py +0 -3
- eodag/api/product/metadata_mapping.py +12 -31
- eodag/api/search_result.py +33 -12
- eodag/cli.py +35 -19
- eodag/config.py +455 -155
- eodag/plugins/apis/base.py +13 -7
- eodag/plugins/apis/ecmwf.py +16 -7
- eodag/plugins/apis/usgs.py +68 -16
- eodag/plugins/authentication/aws_auth.py +25 -7
- eodag/plugins/authentication/base.py +10 -1
- eodag/plugins/authentication/generic.py +14 -3
- eodag/plugins/authentication/header.py +12 -4
- eodag/plugins/authentication/keycloak.py +41 -22
- eodag/plugins/authentication/oauth.py +11 -1
- eodag/plugins/authentication/openid_connect.py +183 -167
- eodag/plugins/authentication/qsauth.py +12 -4
- eodag/plugins/authentication/sas_auth.py +19 -2
- eodag/plugins/authentication/token.py +59 -11
- eodag/plugins/authentication/token_exchange.py +19 -19
- eodag/plugins/crunch/base.py +7 -2
- eodag/plugins/crunch/filter_date.py +8 -11
- eodag/plugins/crunch/filter_latest_intersect.py +5 -7
- eodag/plugins/crunch/filter_latest_tpl_name.py +2 -5
- eodag/plugins/crunch/filter_overlap.py +9 -15
- eodag/plugins/crunch/filter_property.py +9 -14
- eodag/plugins/download/aws.py +84 -99
- eodag/plugins/download/base.py +36 -77
- eodag/plugins/download/creodias_s3.py +11 -2
- eodag/plugins/download/http.py +134 -109
- eodag/plugins/download/s3rest.py +37 -43
- eodag/plugins/manager.py +173 -41
- eodag/plugins/search/__init__.py +9 -9
- eodag/plugins/search/base.py +35 -35
- eodag/plugins/search/build_search_result.py +55 -64
- eodag/plugins/search/cop_marine.py +113 -32
- eodag/plugins/search/creodias_s3.py +20 -8
- eodag/plugins/search/csw.py +41 -1
- eodag/plugins/search/data_request_search.py +119 -14
- eodag/plugins/search/qssearch.py +619 -197
- eodag/plugins/search/static_stac_search.py +25 -23
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +211 -56
- eodag/resources/providers.yml +1762 -1809
- eodag/resources/stac.yml +3 -163
- eodag/resources/user_conf_template.yml +134 -119
- eodag/rest/config.py +1 -2
- eodag/rest/constants.py +0 -1
- eodag/rest/core.py +70 -92
- eodag/rest/errors.py +181 -0
- eodag/rest/server.py +24 -330
- eodag/rest/stac.py +105 -630
- eodag/rest/types/eodag_search.py +17 -15
- eodag/rest/types/queryables.py +5 -14
- eodag/rest/types/stac_search.py +18 -13
- eodag/rest/utils/rfc3339.py +0 -1
- eodag/types/__init__.py +24 -6
- eodag/types/download_args.py +14 -5
- eodag/types/queryables.py +1 -2
- eodag/types/search_args.py +10 -11
- eodag/types/whoosh.py +0 -2
- eodag/utils/__init__.py +97 -136
- eodag/utils/constraints.py +0 -8
- eodag/utils/exceptions.py +23 -9
- eodag/utils/import_system.py +0 -4
- eodag/utils/logging.py +37 -80
- eodag/utils/notebook.py +4 -4
- eodag/utils/requests.py +13 -23
- eodag/utils/rest.py +0 -4
- eodag/utils/stac_reader.py +3 -15
- {eodag-3.0.0b2.dist-info → eodag-3.0.1.dist-info}/METADATA +41 -24
- eodag-3.0.1.dist-info/RECORD +109 -0
- {eodag-3.0.0b2.dist-info → eodag-3.0.1.dist-info}/WHEEL +1 -1
- {eodag-3.0.0b2.dist-info → eodag-3.0.1.dist-info}/entry_points.txt +1 -0
- eodag/resources/constraints/climate-dt.json +0 -13
- eodag/resources/constraints/extremes-dt.json +0 -8
- eodag-3.0.0b2.dist-info/RECORD +0 -110
- {eodag-3.0.0b2.dist-info → eodag-3.0.1.dist-info}/LICENSE +0 -0
- {eodag-3.0.0b2.dist-info → eodag-3.0.1.dist-info}/top_level.txt +0 -0
eodag/utils/__init__.py
CHANGED
|
@@ -55,6 +55,7 @@ from typing import (
|
|
|
55
55
|
Any,
|
|
56
56
|
Callable,
|
|
57
57
|
Dict,
|
|
58
|
+
Iterable,
|
|
58
59
|
Iterator,
|
|
59
60
|
List,
|
|
60
61
|
Mapping,
|
|
@@ -78,11 +79,6 @@ from urllib.parse import ( # noqa; noqa
|
|
|
78
79
|
)
|
|
79
80
|
from urllib.request import url2pathname
|
|
80
81
|
|
|
81
|
-
if sys.version_info >= (3, 9):
|
|
82
|
-
from typing import Annotated, get_args, get_origin # noqa
|
|
83
|
-
else:
|
|
84
|
-
from typing_extensions import Annotated, get_args, get_origin # type: ignore # noqa
|
|
85
|
-
|
|
86
82
|
if sys.version_info >= (3, 12):
|
|
87
83
|
from typing import Unpack # type: ignore # noqa
|
|
88
84
|
else:
|
|
@@ -98,7 +94,7 @@ from dateutil.tz import UTC
|
|
|
98
94
|
from jsonpath_ng import jsonpath
|
|
99
95
|
from jsonpath_ng.ext import parse
|
|
100
96
|
from jsonpath_ng.jsonpath import Child, Fields, Index, Root, Slice
|
|
101
|
-
from requests import HTTPError
|
|
97
|
+
from requests import HTTPError, Response
|
|
102
98
|
from shapely.geometry import Polygon, shape
|
|
103
99
|
from shapely.geometry.base import GEOMETRY_TYPES, BaseGeometry
|
|
104
100
|
from tqdm.auto import tqdm
|
|
@@ -109,7 +105,7 @@ from eodag.utils.exceptions import MisconfiguredError
|
|
|
109
105
|
if TYPE_CHECKING:
|
|
110
106
|
from jsonpath_ng import JSONPath
|
|
111
107
|
|
|
112
|
-
from eodag.api.product import EOProduct
|
|
108
|
+
from eodag.api.product._product import EOProduct
|
|
113
109
|
|
|
114
110
|
|
|
115
111
|
logger = py_logging.getLogger("eodag.utils")
|
|
@@ -124,6 +120,10 @@ USER_AGENT = {"User-Agent": f"eodag/{eodag_version}"}
|
|
|
124
120
|
HTTP_REQ_TIMEOUT = 5 # in seconds
|
|
125
121
|
DEFAULT_STREAM_REQUESTS_TIMEOUT = 60 # in seconds
|
|
126
122
|
|
|
123
|
+
REQ_RETRY_TOTAL = 3
|
|
124
|
+
REQ_RETRY_BACKOFF_FACTOR = 2
|
|
125
|
+
REQ_RETRY_STATUS_FORCELIST = [401, 429, 500, 502, 503, 504]
|
|
126
|
+
|
|
127
127
|
# default wait times in minutes
|
|
128
128
|
DEFAULT_DOWNLOAD_WAIT = 2 # in minutes
|
|
129
129
|
DEFAULT_DOWNLOAD_TIMEOUT = 20 # in minutes
|
|
@@ -236,9 +236,10 @@ class FloatRange(click.types.FloatParamType):
|
|
|
236
236
|
def slugify(value: Any, allow_unicode: bool = False) -> str:
|
|
237
237
|
"""Copied from Django Source code, only modifying last line (no need for safe
|
|
238
238
|
strings).
|
|
239
|
+
|
|
239
240
|
source: https://github.com/django/django/blob/master/django/utils/text.py
|
|
240
241
|
|
|
241
|
-
Convert to ASCII if
|
|
242
|
+
Convert to ASCII if ``allow_unicode`` is ``False``. Convert spaces to hyphens.
|
|
242
243
|
Remove characters that aren't alphanumerics, underscores, or hyphens.
|
|
243
244
|
Convert to lowercase. Also strip leading and trailing whitespace.
|
|
244
245
|
"""
|
|
@@ -297,7 +298,7 @@ def strip_accents(s: str) -> str:
|
|
|
297
298
|
|
|
298
299
|
def uri_to_path(uri: str) -> str:
|
|
299
300
|
"""
|
|
300
|
-
Convert a file URI (e.g.
|
|
301
|
+
Convert a file URI (e.g. ``file:///tmp``) to a local path (e.g. ``/tmp``)
|
|
301
302
|
"""
|
|
302
303
|
if not uri.startswith("file"):
|
|
303
304
|
raise ValueError("A file URI must be provided (e.g. 'file:///tmp'")
|
|
@@ -321,9 +322,7 @@ def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) ->
|
|
|
321
322
|
mapping.
|
|
322
323
|
|
|
323
324
|
:param func: A function to apply to each value of mapping which is not a dict object
|
|
324
|
-
:type func: func
|
|
325
325
|
:param mapping: A Python dict object
|
|
326
|
-
:type mapping: dict
|
|
327
326
|
:returns: None
|
|
328
327
|
"""
|
|
329
328
|
for key, value in mapping.items():
|
|
@@ -334,10 +333,10 @@ def mutate_dict_in_place(func: Callable[[Any], Any], mapping: Dict[Any, Any]) ->
|
|
|
334
333
|
|
|
335
334
|
|
|
336
335
|
def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None:
|
|
337
|
-
"""Merge two mappings with string keys, values from
|
|
338
|
-
from
|
|
336
|
+
"""Merge two mappings with string keys, values from ``mapping2`` overriding values
|
|
337
|
+
from ``mapping1``.
|
|
339
338
|
|
|
340
|
-
Do its best to detect the key in
|
|
339
|
+
Do its best to detect the key in ``mapping1`` to override. For example:
|
|
341
340
|
|
|
342
341
|
>>> mapping2 = {"keya": "new"}
|
|
343
342
|
>>> mapping1 = {"keyA": "obsolete"}
|
|
@@ -345,14 +344,11 @@ def merge_mappings(mapping1: Dict[Any, Any], mapping2: Dict[Any, Any]) -> None:
|
|
|
345
344
|
>>> mapping1
|
|
346
345
|
{'keyA': 'new'}
|
|
347
346
|
|
|
348
|
-
If mapping2 has a key that cannot be detected in mapping1
|
|
349
|
-
to mapping1 as is.
|
|
347
|
+
If ``mapping2`` has a key that cannot be detected in ``mapping1``, this new key is
|
|
348
|
+
added to ``mapping1`` as is.
|
|
350
349
|
|
|
351
350
|
:param mapping1: The mapping containing values to be overridden
|
|
352
|
-
:
|
|
353
|
-
:param mapping2: The mapping containing values that will override the
|
|
354
|
-
first mapping
|
|
355
|
-
:type mapping2: dict
|
|
351
|
+
:param mapping2: The mapping containing values that will override the first mapping
|
|
356
352
|
"""
|
|
357
353
|
# A mapping between mapping1 keys as lowercase strings and original mapping1 keys
|
|
358
354
|
m1_keys_lowercase = {key.lower(): key for key in mapping1}
|
|
@@ -419,9 +415,7 @@ def get_timestamp(date_time: str) -> float:
|
|
|
419
415
|
If the datetime has no offset, it is assumed to be an UTC datetime.
|
|
420
416
|
|
|
421
417
|
:param date_time: The datetime string to return as timestamp
|
|
422
|
-
:
|
|
423
|
-
:returns: The timestamp corresponding to the date_time string in seconds
|
|
424
|
-
:rtype: float
|
|
418
|
+
:returns: The timestamp corresponding to the ``date_time`` string in seconds
|
|
425
419
|
"""
|
|
426
420
|
dt = isoparse(date_time)
|
|
427
421
|
if not dt.tzinfo:
|
|
@@ -430,7 +424,7 @@ def get_timestamp(date_time: str) -> float:
|
|
|
430
424
|
|
|
431
425
|
|
|
432
426
|
def datetime_range(start: dt, end: dt) -> Iterator[dt]:
|
|
433
|
-
"""Generator function for all dates in-between start and end date."""
|
|
427
|
+
"""Generator function for all dates in-between ``start`` and ``end`` date."""
|
|
434
428
|
delta = end - start
|
|
435
429
|
for nday in range(delta.days + 1):
|
|
436
430
|
yield start + datetime.timedelta(days=nday)
|
|
@@ -443,7 +437,6 @@ class DownloadedCallback:
|
|
|
443
437
|
"""Callback
|
|
444
438
|
|
|
445
439
|
:param product: The downloaded EO product
|
|
446
|
-
:type product: :class:`~eodag.api.product._product.EOProduct`
|
|
447
440
|
"""
|
|
448
441
|
logger.debug("Download finished for the product %s", product)
|
|
449
442
|
|
|
@@ -451,15 +444,15 @@ class DownloadedCallback:
|
|
|
451
444
|
class ProgressCallback(tqdm):
|
|
452
445
|
"""A callable used to render progress to users for long running processes.
|
|
453
446
|
|
|
454
|
-
It inherits from
|
|
455
|
-
instantiation:
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
447
|
+
It inherits from :class:`tqdm.auto.tqdm`, and accepts the same arguments on
|
|
448
|
+
instantiation: ``iterable``, ``desc``, ``total``, ``leave``, ``file``, ``ncols``,
|
|
449
|
+
``mininterval``, ``maxinterval``, ``miniters``, ``ascii``, ``disable``, ``unit``,
|
|
450
|
+
``unit_scale``, ``dynamic_ncols``, ``smoothing``, ``bar_format``, ``initial``,
|
|
451
|
+
``position``, ``postfix``, ``unit_divisor``.
|
|
459
452
|
|
|
460
|
-
It can be globally disabled using
|
|
461
|
-
|
|
462
|
-
individually disabled using
|
|
453
|
+
It can be globally disabled using ``eodag.utils.logging.setup_logging(0)`` or
|
|
454
|
+
``eodag.utils.logging.setup_logging(level, no_progress_bar=True)``, and
|
|
455
|
+
individually disabled using ``disable=True``.
|
|
463
456
|
"""
|
|
464
457
|
|
|
465
458
|
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
|
@@ -483,9 +476,7 @@ class ProgressCallback(tqdm):
|
|
|
483
476
|
"""Update the progress bar.
|
|
484
477
|
|
|
485
478
|
:param increment: Amount of data already processed
|
|
486
|
-
:type increment: int
|
|
487
479
|
:param total: (optional) Maximum amount of data to be processed
|
|
488
|
-
:type total: int
|
|
489
480
|
"""
|
|
490
481
|
if total is not None and total != self.total:
|
|
491
482
|
self.reset(total=total)
|
|
@@ -496,8 +487,8 @@ class ProgressCallback(tqdm):
|
|
|
496
487
|
"""Returns another progress callback using the same initial
|
|
497
488
|
keyword-arguments.
|
|
498
489
|
|
|
499
|
-
Optional
|
|
500
|
-
new
|
|
490
|
+
Optional ``args`` and ``kwargs`` parameters will be used to create a
|
|
491
|
+
new :class:`~eodag.utils.ProgressCallback` instance, overriding initial
|
|
501
492
|
`kwargs`.
|
|
502
493
|
"""
|
|
503
494
|
|
|
@@ -519,7 +510,7 @@ def get_progress_callback() -> tqdm:
|
|
|
519
510
|
|
|
520
511
|
|
|
521
512
|
def repeatfunc(func: Callable[..., Any], n: int, *args: Any) -> starmap:
|
|
522
|
-
"""Call
|
|
513
|
+
"""Call ``func`` ``n`` times with ``args``"""
|
|
523
514
|
return starmap(func, repeat(args, n))
|
|
524
515
|
|
|
525
516
|
|
|
@@ -534,14 +525,12 @@ def makedirs(dirpath: str) -> None:
|
|
|
534
525
|
|
|
535
526
|
|
|
536
527
|
def rename_subfolder(dirpath: str, name: str) -> None:
|
|
537
|
-
"""Rename first subfolder found in dirpath with given name
|
|
538
|
-
raise RuntimeError if no subfolder can be found
|
|
528
|
+
"""Rename first subfolder found in ``dirpath`` with given ``name``,
|
|
529
|
+
raise :class:`RuntimeError` if no subfolder can be found
|
|
539
530
|
|
|
540
531
|
:param dirpath: path to the directory containing the subfolder
|
|
541
|
-
:type dirpath: str
|
|
542
532
|
:param name: new name of the subfolder
|
|
543
|
-
:
|
|
544
|
-
:raises: RuntimeError
|
|
533
|
+
:raises: :class:`RuntimeError`
|
|
545
534
|
|
|
546
535
|
Example:
|
|
547
536
|
|
|
@@ -555,16 +544,20 @@ def rename_subfolder(dirpath: str, name: str) -> None:
|
|
|
555
544
|
... rename_subfolder(tmpdir, "otherfolder")
|
|
556
545
|
... assert not os.path.isdir(somefolder) and os.path.isdir(otherfolder)
|
|
557
546
|
|
|
558
|
-
Before
|
|
547
|
+
Before::
|
|
548
|
+
|
|
559
549
|
$ tree <tmp-folder>
|
|
560
550
|
<tmp-folder>
|
|
561
551
|
└── somefolder
|
|
562
552
|
└── somefile
|
|
563
|
-
|
|
553
|
+
|
|
554
|
+
After::
|
|
555
|
+
|
|
564
556
|
$ tree <tmp-folder>
|
|
565
557
|
<tmp-folder>
|
|
566
558
|
└── otherfolder
|
|
567
559
|
└── somefile
|
|
560
|
+
|
|
568
561
|
"""
|
|
569
562
|
try:
|
|
570
563
|
subdir, *_ = (p for p in glob(os.path.join(dirpath, "*")) if os.path.isdir(p))
|
|
@@ -580,7 +573,7 @@ def rename_subfolder(dirpath: str, name: str) -> None:
|
|
|
580
573
|
def format_dict_items(
|
|
581
574
|
config_dict: Dict[str, Any], **format_variables: Any
|
|
582
575
|
) -> Dict[Any, Any]:
|
|
583
|
-
r"""
|
|
576
|
+
r"""Recursively apply :meth:`str.format` to ``**format_variables`` on ``config_dict`` values
|
|
584
577
|
|
|
585
578
|
>>> format_dict_items(
|
|
586
579
|
... {"foo": {"bar": "{a}"}, "baz": ["{b}?", "{b}!"]},
|
|
@@ -588,12 +581,9 @@ def format_dict_items(
|
|
|
588
581
|
... ) == {"foo": {"bar": "qux"}, "baz": ["quux?", "quux!"]}
|
|
589
582
|
True
|
|
590
583
|
|
|
591
|
-
:param config_dict:
|
|
592
|
-
:type config_dict: dict
|
|
584
|
+
:param config_dict: Dictionary having values that need to be parsed
|
|
593
585
|
:param format_variables: Variables used as args for parsing
|
|
594
|
-
:type format_variables: dict
|
|
595
586
|
:returns: Updated dict
|
|
596
|
-
:rtype: dict
|
|
597
587
|
"""
|
|
598
588
|
return dict_items_recursive_apply(config_dict, format_string, **format_variables)
|
|
599
589
|
|
|
@@ -601,7 +591,7 @@ def format_dict_items(
|
|
|
601
591
|
def jsonpath_parse_dict_items(
|
|
602
592
|
jsonpath_dict: Dict[str, Any], values_dict: Dict[str, Any]
|
|
603
593
|
) -> Dict[Any, Any]:
|
|
604
|
-
"""
|
|
594
|
+
"""Recursively parse :class:`jsonpath_ng.JSONPath` elements in dict
|
|
605
595
|
|
|
606
596
|
>>> import jsonpath_ng.ext as jsonpath
|
|
607
597
|
>>> jsonpath_parse_dict_items(
|
|
@@ -610,12 +600,9 @@ def jsonpath_parse_dict_items(
|
|
|
610
600
|
... ) == {'foo': {'bar': 'baz'}, 'qux': ['quux', 'quux']}
|
|
611
601
|
True
|
|
612
602
|
|
|
613
|
-
:param jsonpath_dict:
|
|
614
|
-
:type jsonpath_dict: dict
|
|
603
|
+
:param jsonpath_dict: Dictionary having :class:`jsonpath_ng.JSONPath` values that need to be parsed
|
|
615
604
|
:param values_dict: Values dict used as args for parsing
|
|
616
|
-
:type values_dict: dict
|
|
617
605
|
:returns: Updated dict
|
|
618
|
-
:rtype: dict
|
|
619
606
|
"""
|
|
620
607
|
return dict_items_recursive_apply(jsonpath_dict, parse_jsonpath, **values_dict)
|
|
621
608
|
|
|
@@ -627,7 +614,7 @@ def update_nested_dict(
|
|
|
627
614
|
allow_empty_values: bool = False,
|
|
628
615
|
allow_extend_duplicates: bool = True,
|
|
629
616
|
) -> Dict[Any, Any]:
|
|
630
|
-
"""Update recursively old_dict items with new_dict ones
|
|
617
|
+
"""Update recursively ``old_dict`` items with ``new_dict`` ones
|
|
631
618
|
|
|
632
619
|
>>> update_nested_dict(
|
|
633
620
|
... {"a": {"a.a": 1, "a.b": 2}, "b": 3},
|
|
@@ -661,15 +648,10 @@ def update_nested_dict(
|
|
|
661
648
|
True
|
|
662
649
|
|
|
663
650
|
:param old_dict: Dict to be updated
|
|
664
|
-
:type old_dict: dict
|
|
665
651
|
:param new_dict: Incomming dict
|
|
666
|
-
:type new_dict: dict
|
|
667
652
|
:param extend_list_values: (optional) Extend old_dict value if both old/new values are lists
|
|
668
|
-
:type extend_list_values: bool
|
|
669
653
|
:param allow_empty_values: (optional) Allow update with empty values
|
|
670
|
-
:type allow_empty_values: bool
|
|
671
654
|
:returns: Updated dict
|
|
672
|
-
:rtype: dict
|
|
673
655
|
"""
|
|
674
656
|
for k, v in new_dict.items():
|
|
675
657
|
if k in old_dict.keys():
|
|
@@ -734,13 +716,9 @@ def items_recursive_apply(
|
|
|
734
716
|
'foo'
|
|
735
717
|
|
|
736
718
|
:param input_obj: Input object (dict or list)
|
|
737
|
-
:type input_obj: Union[dict,list]
|
|
738
719
|
:param apply_method: Method to be applied to dict elements
|
|
739
|
-
:type apply_method: :func:`apply_method`
|
|
740
720
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
741
|
-
:type apply_method_parameters: dict
|
|
742
721
|
:returns: Updated object
|
|
743
|
-
:rtype: Union[dict, list]
|
|
744
722
|
"""
|
|
745
723
|
if isinstance(input_obj, dict):
|
|
746
724
|
return dict_items_recursive_apply(
|
|
@@ -768,14 +746,10 @@ def dict_items_recursive_apply(
|
|
|
768
746
|
... ) == {'foo': {'bar': 'BAZ!'}, 'qux': ['A!', 'B!']}
|
|
769
747
|
True
|
|
770
748
|
|
|
771
|
-
:param config_dict: Input nested
|
|
772
|
-
:type config_dict: dict
|
|
749
|
+
:param config_dict: Input nested dictionary
|
|
773
750
|
:param apply_method: Method to be applied to dict elements
|
|
774
|
-
:type apply_method: :func:`apply_method`
|
|
775
751
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
776
|
-
:type apply_method_parameters: dict
|
|
777
752
|
:returns: Updated dict
|
|
778
|
-
:rtype: dict
|
|
779
753
|
"""
|
|
780
754
|
result_dict: Dict[Any, Any] = deepcopy(config_dict)
|
|
781
755
|
for dict_k, dict_v in result_dict.items():
|
|
@@ -809,13 +783,9 @@ def list_items_recursive_apply(
|
|
|
809
783
|
[{'foo': {'bar': 'BAZ!'}}, 'QUX!']
|
|
810
784
|
|
|
811
785
|
:param config_list: Input list containing nested lists/dicts
|
|
812
|
-
:type config_list: list
|
|
813
786
|
:param apply_method: Method to be applied to list elements
|
|
814
|
-
:type apply_method: :func:`apply_method`
|
|
815
787
|
:param apply_method_parameters: Optional parameters passed to the method
|
|
816
|
-
:type apply_method_parameters: dict
|
|
817
788
|
:returns: Updated list
|
|
818
|
-
:rtype: list
|
|
819
789
|
"""
|
|
820
790
|
result_list = deepcopy(config_list)
|
|
821
791
|
for list_idx, list_v in enumerate(result_list):
|
|
@@ -850,9 +820,7 @@ def items_recursive_sort(
|
|
|
850
820
|
'foo'
|
|
851
821
|
|
|
852
822
|
:param input_obj: Input object (dict or list)
|
|
853
|
-
:type input_obj: Union[dict,list]
|
|
854
823
|
:returns: Updated object
|
|
855
|
-
:rtype: Union[dict, list]
|
|
856
824
|
"""
|
|
857
825
|
if isinstance(input_obj, dict):
|
|
858
826
|
return dict_items_recursive_sort(input_obj)
|
|
@@ -871,10 +839,8 @@ def dict_items_recursive_sort(config_dict: Dict[Any, Any]) -> Dict[Any, Any]:
|
|
|
871
839
|
... ) == {"a": ["b", {0: 1, 1: 2, 2: 0}], "b": {"a": 0, "b": "c"}}
|
|
872
840
|
True
|
|
873
841
|
|
|
874
|
-
:param config_dict: Input nested
|
|
875
|
-
:type config_dict: dict
|
|
842
|
+
:param config_dict: Input nested dictionary
|
|
876
843
|
:returns: Updated dict
|
|
877
|
-
:rtype: dict
|
|
878
844
|
"""
|
|
879
845
|
result_dict: Dict[Any, Any] = deepcopy(config_dict)
|
|
880
846
|
for dict_k, dict_v in result_dict.items():
|
|
@@ -895,9 +861,7 @@ def list_items_recursive_sort(config_list: List[Any]) -> List[Any]:
|
|
|
895
861
|
['b', {0: 1, 1: 2, 2: 0}]
|
|
896
862
|
|
|
897
863
|
:param config_list: Input list containing nested lists/dicts
|
|
898
|
-
:type config_list: list
|
|
899
864
|
:returns: Updated list
|
|
900
|
-
:rtype: list
|
|
901
865
|
"""
|
|
902
866
|
result_list: List[Any] = deepcopy(config_list)
|
|
903
867
|
for list_idx, list_v in enumerate(result_list):
|
|
@@ -912,7 +876,7 @@ def list_items_recursive_sort(config_list: List[Any]) -> List[Any]:
|
|
|
912
876
|
|
|
913
877
|
|
|
914
878
|
def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]:
|
|
915
|
-
"""Get
|
|
879
|
+
"""Get :class:`jsonpath_ng.JSONPath` for ``$.foo.bar`` like string
|
|
916
880
|
|
|
917
881
|
>>> string_to_jsonpath(None, "$.foo.bar")
|
|
918
882
|
Child(Child(Root(), Fields('foo')), Fields('bar'))
|
|
@@ -926,11 +890,8 @@ def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]:
|
|
|
926
890
|
Fields('foo')
|
|
927
891
|
|
|
928
892
|
:param args: Last arg as input string value, to be converted
|
|
929
|
-
:
|
|
930
|
-
:param force: force conversion even if input string is not detected as a jsonpath
|
|
931
|
-
:type force: bool
|
|
893
|
+
:param force: force conversion even if input string is not detected as a :class:`jsonpath_ng.JSONPath`
|
|
932
894
|
:returns: Parsed value
|
|
933
|
-
:rtype: str or Child or Root
|
|
934
895
|
"""
|
|
935
896
|
path_str: str = args[-1]
|
|
936
897
|
if JSONPATH_MATCH.match(str(path_str)) or force:
|
|
@@ -992,17 +953,14 @@ def string_to_jsonpath(*args: Any, force: bool = False) -> Union[str, JSONPath]:
|
|
|
992
953
|
|
|
993
954
|
|
|
994
955
|
def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any:
|
|
995
|
-
"""Format "{foo}"
|
|
956
|
+
"""Format ``"{foo}"``-like string
|
|
996
957
|
|
|
997
958
|
>>> format_string(None, "foo {bar}, {baz} ?", **{"bar": "qux", "baz": "quux"})
|
|
998
959
|
'foo qux, quux ?'
|
|
999
960
|
|
|
1000
961
|
:param key: Input item key
|
|
1001
|
-
:type key: str
|
|
1002
962
|
:param str_to_format: Input item value, to be parsed
|
|
1003
|
-
:type str_to_format: str
|
|
1004
963
|
:returns: Parsed value
|
|
1005
|
-
:rtype: str
|
|
1006
964
|
"""
|
|
1007
965
|
if not isinstance(str_to_format, str):
|
|
1008
966
|
return str_to_format
|
|
@@ -1033,20 +991,16 @@ def format_string(key: str, str_to_format: Any, **format_variables: Any) -> Any:
|
|
|
1033
991
|
def parse_jsonpath(
|
|
1034
992
|
key: str, jsonpath_obj: Union[str, jsonpath.Child], **values_dict: Dict[str, Any]
|
|
1035
993
|
) -> Optional[str]:
|
|
1036
|
-
"""Parse jsonpah in jsonpath_obj using values_dict
|
|
994
|
+
"""Parse jsonpah in ``jsonpath_obj`` using ``values_dict``
|
|
1037
995
|
|
|
1038
996
|
>>> import jsonpath_ng.ext as jsonpath
|
|
1039
997
|
>>> parse_jsonpath(None, parse("$.foo.bar"), **{"foo": {"bar": "baz"}})
|
|
1040
998
|
'baz'
|
|
1041
999
|
|
|
1042
1000
|
:param key: Input item key
|
|
1043
|
-
:type key: str
|
|
1044
1001
|
:param jsonpath_obj: Input item value, to be parsed
|
|
1045
|
-
:type jsonpath_obj: str or jsonpath.Child
|
|
1046
1002
|
:param values_dict: Values used as args for parsing
|
|
1047
|
-
:type values_dict: dict
|
|
1048
1003
|
:returns: Parsed value
|
|
1049
|
-
:rtype: str
|
|
1050
1004
|
"""
|
|
1051
1005
|
if isinstance(jsonpath_obj, jsonpath.Child):
|
|
1052
1006
|
match = jsonpath_obj.find(values_dict)
|
|
@@ -1062,9 +1016,7 @@ def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]
|
|
|
1062
1016
|
{'foo': {'bar': 'baz'}}
|
|
1063
1017
|
|
|
1064
1018
|
:param pairs: Pairs of key / value
|
|
1065
|
-
:type pairs: list or Any
|
|
1066
1019
|
:returns: Created dict
|
|
1067
|
-
:rtype: dict or Any
|
|
1068
1020
|
"""
|
|
1069
1021
|
d = {}
|
|
1070
1022
|
try:
|
|
@@ -1081,14 +1033,11 @@ def nested_pairs2dict(pairs: Union[List[Any], Any]) -> Union[Any, Dict[Any, Any]
|
|
|
1081
1033
|
def get_geometry_from_various(
|
|
1082
1034
|
locations_config: List[Dict[str, Any]] = [], **query_args: Any
|
|
1083
1035
|
) -> BaseGeometry:
|
|
1084
|
-
"""Creates a shapely
|
|
1036
|
+
"""Creates a ``shapely.geometry`` using given query kwargs arguments
|
|
1085
1037
|
|
|
1086
1038
|
:param locations_config: (optional) EODAG locations configuration
|
|
1087
|
-
:
|
|
1088
|
-
:param query_args: Query kwargs arguments from core.search() method
|
|
1089
|
-
:type query_args: dict
|
|
1039
|
+
:param query_args: Query kwargs arguments from :meth:`~eodag.api.core.EODataAccessGateway.search`
|
|
1090
1040
|
:returns: shapely Geometry found
|
|
1091
|
-
:rtype: :class:`shapely.geometry.BaseGeometry`
|
|
1092
1041
|
:raises: :class:`ValueError`
|
|
1093
1042
|
"""
|
|
1094
1043
|
geom = None
|
|
@@ -1150,11 +1099,11 @@ def get_geometry_from_various(
|
|
|
1150
1099
|
for arg in query_locations.keys():
|
|
1151
1100
|
if arg in locations_dict.keys():
|
|
1152
1101
|
found = False
|
|
1153
|
-
pattern = query_locations[arg]
|
|
1102
|
+
pattern = rf"{query_locations[arg]}"
|
|
1154
1103
|
attr = locations_dict[arg]["attr"]
|
|
1155
1104
|
with shapefile.Reader(locations_dict[arg]["path"]) as shp:
|
|
1156
1105
|
for shaperec in shp.shapeRecords():
|
|
1157
|
-
if re.search(pattern, shaperec.record[attr]):
|
|
1106
|
+
if re.search(pattern, str(shaperec.record[attr])):
|
|
1158
1107
|
found = True
|
|
1159
1108
|
new_geom = shape(shaperec.shape)
|
|
1160
1109
|
# get geoms union
|
|
@@ -1183,7 +1132,7 @@ class MockResponse:
|
|
|
1183
1132
|
def raise_for_status(self) -> None:
|
|
1184
1133
|
"""raises an exception when the status is not ok"""
|
|
1185
1134
|
if self.status_code != 200:
|
|
1186
|
-
raise HTTPError()
|
|
1135
|
+
raise HTTPError(response=Response())
|
|
1187
1136
|
|
|
1188
1137
|
|
|
1189
1138
|
def md5sum(file_path: str) -> str:
|
|
@@ -1194,9 +1143,7 @@ def md5sum(file_path: str) -> str:
|
|
|
1194
1143
|
'd41d8cd98f00b204e9800998ecf8427e'
|
|
1195
1144
|
|
|
1196
1145
|
:param file_path: input file path
|
|
1197
|
-
:type file_path: str
|
|
1198
1146
|
:returns: MD5 checksum
|
|
1199
|
-
:rtype: str
|
|
1200
1147
|
"""
|
|
1201
1148
|
hash_md5 = hashlib.md5()
|
|
1202
1149
|
with open(file_path, "rb") as f:
|
|
@@ -1212,16 +1159,14 @@ def obj_md5sum(data: Any) -> str:
|
|
|
1212
1159
|
'37a6259cc0c1dae299a7866489dff0bd'
|
|
1213
1160
|
|
|
1214
1161
|
:param data: JSON serializable input object
|
|
1215
|
-
:type data: Any
|
|
1216
1162
|
:returns: MD5 checksum
|
|
1217
|
-
:rtype: str
|
|
1218
1163
|
"""
|
|
1219
1164
|
return hashlib.md5(orjson.dumps(data, option=orjson.OPT_SORT_KEYS)).hexdigest()
|
|
1220
1165
|
|
|
1221
1166
|
|
|
1222
1167
|
@functools.lru_cache()
|
|
1223
1168
|
def cached_parse(str_to_parse: str) -> JSONPath:
|
|
1224
|
-
"""Cached jsonpath_ng.ext.parse
|
|
1169
|
+
"""Cached :func:`jsonpath_ng.ext.parse`
|
|
1225
1170
|
|
|
1226
1171
|
>>> cached_parse.cache_clear()
|
|
1227
1172
|
>>> cached_parse("$.foo")
|
|
@@ -1237,10 +1182,8 @@ def cached_parse(str_to_parse: str) -> JSONPath:
|
|
|
1237
1182
|
>>> cached_parse.cache_info()
|
|
1238
1183
|
CacheInfo(hits=1, misses=2, maxsize=128, currsize=2)
|
|
1239
1184
|
|
|
1240
|
-
:param str_to_parse: string to parse as
|
|
1241
|
-
:
|
|
1242
|
-
:returns: parsed jsonpath
|
|
1243
|
-
:rtype: :class:`jsonpath_ng.JSONPath`
|
|
1185
|
+
:param str_to_parse: string to parse as :class:`jsonpath_ng.JSONPath`
|
|
1186
|
+
:returns: parsed :class:`jsonpath_ng.JSONPath`
|
|
1244
1187
|
"""
|
|
1245
1188
|
return parse(str_to_parse)
|
|
1246
1189
|
|
|
@@ -1254,12 +1197,10 @@ def _mutable_cached_yaml_load(config_path: str) -> Any:
|
|
|
1254
1197
|
|
|
1255
1198
|
|
|
1256
1199
|
def cached_yaml_load(config_path: str) -> Dict[str, Any]:
|
|
1257
|
-
"""Cached yaml.load
|
|
1200
|
+
"""Cached :func:`yaml.load`
|
|
1258
1201
|
|
|
1259
1202
|
:param config_path: path to the yaml configuration file
|
|
1260
|
-
:type config_path: str
|
|
1261
1203
|
:returns: loaded yaml configuration
|
|
1262
|
-
:rtype: dict
|
|
1263
1204
|
"""
|
|
1264
1205
|
return copy_deepcopy(_mutable_cached_yaml_load(config_path))
|
|
1265
1206
|
|
|
@@ -1271,14 +1212,12 @@ def _mutable_cached_yaml_load_all(config_path: str) -> List[Any]:
|
|
|
1271
1212
|
|
|
1272
1213
|
|
|
1273
1214
|
def cached_yaml_load_all(config_path: str) -> List[Any]:
|
|
1274
|
-
"""Cached yaml.load_all
|
|
1215
|
+
"""Cached :func:`yaml.load_all`
|
|
1275
1216
|
|
|
1276
1217
|
Load all configurations stored in the configuration file as separated yaml documents
|
|
1277
1218
|
|
|
1278
1219
|
:param config_path: path to the yaml configuration file
|
|
1279
|
-
:type config_path: str
|
|
1280
1220
|
:returns: list of configurations
|
|
1281
|
-
:rtype: list
|
|
1282
1221
|
"""
|
|
1283
1222
|
return copy_deepcopy(_mutable_cached_yaml_load_all(config_path))
|
|
1284
1223
|
|
|
@@ -1289,11 +1228,8 @@ def get_bucket_name_and_prefix(
|
|
|
1289
1228
|
"""Extract bucket name and prefix from URL
|
|
1290
1229
|
|
|
1291
1230
|
:param url: (optional) URL to use as product.location
|
|
1292
|
-
:type url: str
|
|
1293
1231
|
:param bucket_path_level: (optional) bucket location index in path.split('/')
|
|
1294
|
-
:type bucket_path_level: int
|
|
1295
1232
|
:returns: bucket_name and prefix as str
|
|
1296
|
-
:rtype: tuple
|
|
1297
1233
|
"""
|
|
1298
1234
|
bucket, prefix = None, None
|
|
1299
1235
|
|
|
@@ -1322,9 +1258,7 @@ def flatten_top_directories(
|
|
|
1322
1258
|
"""Flatten directory structure, removing common empty sub-directories
|
|
1323
1259
|
|
|
1324
1260
|
:param nested_dir_root: Absolute path of the directory structure to flatten
|
|
1325
|
-
:type nested_dir_root: str
|
|
1326
1261
|
:param common_subdirs_path: (optional) Absolute path of the desired subdirectory to remove
|
|
1327
|
-
:type common_subdirs_path: str
|
|
1328
1262
|
"""
|
|
1329
1263
|
if not common_subdirs_path:
|
|
1330
1264
|
subpaths_list = [p for p in Path(nested_dir_root).glob("**/*") if p.is_file()]
|
|
@@ -1343,12 +1277,11 @@ def flatten_top_directories(
|
|
|
1343
1277
|
|
|
1344
1278
|
def deepcopy(sth: Any) -> Any:
|
|
1345
1279
|
"""Customized and faster deepcopy inspired by https://stackoverflow.com/a/45858907
|
|
1346
|
-
|
|
1280
|
+
|
|
1281
|
+
``_copy_list`` and ``_copy_dict`` dispatchers available for the moment
|
|
1347
1282
|
|
|
1348
1283
|
:param sth: Object to copy
|
|
1349
|
-
:type sth: Any
|
|
1350
1284
|
:returns: Copied object
|
|
1351
|
-
:rtype: Any
|
|
1352
1285
|
"""
|
|
1353
1286
|
_dispatcher: Dict[Type[Any], Callable[..., Any]] = {}
|
|
1354
1287
|
|
|
@@ -1391,9 +1324,7 @@ def parse_header(header: str) -> Message:
|
|
|
1391
1324
|
'example.txt'
|
|
1392
1325
|
|
|
1393
1326
|
:param header: header to parse
|
|
1394
|
-
:type header: str
|
|
1395
1327
|
:returns: parsed header
|
|
1396
|
-
:rtype: :class:`~email.message.Message`
|
|
1397
1328
|
"""
|
|
1398
1329
|
m = Message()
|
|
1399
1330
|
m["content-type"] = header
|
|
@@ -1412,7 +1343,7 @@ def cast_scalar_value(value: Any, new_type: Any) -> Any:
|
|
|
1412
1343
|
|
|
1413
1344
|
:param value: the scalar value to convert
|
|
1414
1345
|
:param new_type: the wanted type
|
|
1415
|
-
:returns: scalar value converted to new_type
|
|
1346
|
+
:returns: scalar ``value`` converted to ``new_type``
|
|
1416
1347
|
"""
|
|
1417
1348
|
if isinstance(value, str) and new_type is bool:
|
|
1418
1349
|
# Bool is a type with special meaning in Python, thus the special
|
|
@@ -1435,7 +1366,7 @@ def cast_scalar_value(value: Any, new_type: Any) -> Any:
|
|
|
1435
1366
|
class StreamResponse:
|
|
1436
1367
|
"""Represents a streaming response"""
|
|
1437
1368
|
|
|
1438
|
-
content:
|
|
1369
|
+
content: Iterable[bytes]
|
|
1439
1370
|
headers: Optional[Mapping[str, str]] = None
|
|
1440
1371
|
media_type: Optional[str] = None
|
|
1441
1372
|
status_code: Optional[int] = None
|
|
@@ -1458,11 +1389,10 @@ def guess_extension(type: str) -> Optional[str]:
|
|
|
1458
1389
|
|
|
1459
1390
|
def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext:
|
|
1460
1391
|
"""
|
|
1461
|
-
Returns an SSL context based on ssl_verify argument.
|
|
1462
|
-
|
|
1463
|
-
:
|
|
1392
|
+
Returns an SSL context based on ``ssl_verify`` argument.
|
|
1393
|
+
|
|
1394
|
+
:param ssl_verify: :attr:`~eodag.config.PluginConfig.ssl_verify` parameter
|
|
1464
1395
|
:returns: An SSL context object.
|
|
1465
|
-
:rtype: ssl.SSLContext
|
|
1466
1396
|
"""
|
|
1467
1397
|
ctx = ssl.create_default_context()
|
|
1468
1398
|
if not ssl_verify:
|
|
@@ -1472,3 +1402,34 @@ def get_ssl_context(ssl_verify: bool) -> ssl.SSLContext:
|
|
|
1472
1402
|
ctx.check_hostname = True
|
|
1473
1403
|
ctx.verify_mode = ssl.CERT_REQUIRED
|
|
1474
1404
|
return ctx
|
|
1405
|
+
|
|
1406
|
+
|
|
1407
|
+
def sort_dict(input_dict: Dict[str, Any]) -> Dict[str, Any]:
|
|
1408
|
+
"""
|
|
1409
|
+
Recursively sorts a dict by keys.
|
|
1410
|
+
|
|
1411
|
+
:param input_dict: input dict
|
|
1412
|
+
:returns: sorted dict
|
|
1413
|
+
|
|
1414
|
+
>>> sort_dict({"b": {"c": 1, "a": 2, "b": 3}, "a": 4})
|
|
1415
|
+
{'a': 4, 'b': {'a': 2, 'b': 3, 'c': 1}}
|
|
1416
|
+
"""
|
|
1417
|
+
return {
|
|
1418
|
+
k: sort_dict(v) if isinstance(v, dict) else v
|
|
1419
|
+
for k, v in sorted(input_dict.items())
|
|
1420
|
+
}
|
|
1421
|
+
|
|
1422
|
+
|
|
1423
|
+
def dict_md5sum(input_dict: Dict[str, Any]) -> str:
|
|
1424
|
+
"""
|
|
1425
|
+
Hash nested dictionary
|
|
1426
|
+
|
|
1427
|
+
:param input_dict: input dict
|
|
1428
|
+
:returns: hash
|
|
1429
|
+
|
|
1430
|
+
>>> hd = dict_md5sum({"b": {"c": 1, "a": 2, "b": 3}, "a": 4})
|
|
1431
|
+
>>> hd
|
|
1432
|
+
'a195bcef1bb3b419e9e74b7cc5db8098'
|
|
1433
|
+
>>> assert(dict_md5sum({"a": 4, "b": {"b": 3, "c": 1, "a": 2}}) == hd)
|
|
1434
|
+
"""
|
|
1435
|
+
return obj_md5sum(sort_dict(input_dict))
|