eodag 3.10.0__py3-none-any.whl → 4.0.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +378 -419
- eodag/api/product/__init__.py +3 -3
- eodag/api/product/_product.py +68 -40
- eodag/api/product/drivers/__init__.py +3 -5
- eodag/api/product/drivers/base.py +1 -18
- eodag/api/product/metadata_mapping.py +151 -215
- eodag/api/search_result.py +13 -7
- eodag/cli.py +72 -395
- eodag/config.py +46 -50
- eodag/plugins/apis/base.py +2 -2
- eodag/plugins/apis/ecmwf.py +20 -21
- eodag/plugins/apis/usgs.py +37 -33
- eodag/plugins/authentication/base.py +1 -3
- eodag/plugins/crunch/filter_date.py +3 -3
- eodag/plugins/crunch/filter_latest_intersect.py +2 -2
- eodag/plugins/crunch/filter_latest_tpl_name.py +1 -1
- eodag/plugins/download/aws.py +45 -41
- eodag/plugins/download/base.py +13 -14
- eodag/plugins/download/http.py +65 -65
- eodag/plugins/manager.py +28 -29
- eodag/plugins/search/__init__.py +3 -4
- eodag/plugins/search/base.py +128 -77
- eodag/plugins/search/build_search_result.py +105 -107
- eodag/plugins/search/cop_marine.py +44 -47
- eodag/plugins/search/csw.py +33 -33
- eodag/plugins/search/qssearch.py +335 -354
- eodag/plugins/search/stac_list_assets.py +1 -1
- eodag/plugins/search/static_stac_search.py +31 -31
- eodag/resources/{product_types.yml → collections.yml} +2353 -2429
- eodag/resources/ext_collections.json +1 -1
- eodag/resources/providers.yml +2427 -2719
- eodag/resources/stac_provider.yml +46 -90
- eodag/types/queryables.py +55 -91
- eodag/types/search_args.py +1 -1
- eodag/utils/__init__.py +94 -21
- eodag/utils/exceptions.py +6 -6
- eodag/utils/free_text_search.py +3 -3
- {eodag-3.10.0.dist-info → eodag-4.0.0a1.dist-info}/METADATA +10 -87
- eodag-4.0.0a1.dist-info/RECORD +92 -0
- {eodag-3.10.0.dist-info → eodag-4.0.0a1.dist-info}/entry_points.txt +0 -4
- eodag/plugins/authentication/oauth.py +0 -60
- eodag/plugins/download/creodias_s3.py +0 -71
- eodag/plugins/download/s3rest.py +0 -351
- eodag/plugins/search/data_request_search.py +0 -565
- eodag/resources/stac.yml +0 -294
- eodag/resources/stac_api.yml +0 -2105
- eodag/rest/__init__.py +0 -24
- eodag/rest/cache.py +0 -70
- eodag/rest/config.py +0 -67
- eodag/rest/constants.py +0 -26
- eodag/rest/core.py +0 -764
- eodag/rest/errors.py +0 -210
- eodag/rest/server.py +0 -604
- eodag/rest/server.wsgi +0 -6
- eodag/rest/stac.py +0 -1032
- eodag/rest/templates/README +0 -1
- eodag/rest/types/__init__.py +0 -18
- eodag/rest/types/collections_search.py +0 -44
- eodag/rest/types/eodag_search.py +0 -386
- eodag/rest/types/queryables.py +0 -174
- eodag/rest/types/stac_search.py +0 -272
- eodag/rest/utils/__init__.py +0 -207
- eodag/rest/utils/cql_evaluate.py +0 -119
- eodag/rest/utils/rfc3339.py +0 -64
- eodag-3.10.0.dist-info/RECORD +0 -116
- {eodag-3.10.0.dist-info → eodag-4.0.0a1.dist-info}/WHEEL +0 -0
- {eodag-3.10.0.dist-info → eodag-4.0.0a1.dist-info}/licenses/LICENSE +0 -0
- {eodag-3.10.0.dist-info → eodag-4.0.0a1.dist-info}/top_level.txt +0 -0
eodag/rest/types/stac_search.py
DELETED
|
@@ -1,272 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Copyright 2023, CS GROUP - France, https://www.csgroup.eu/
|
|
3
|
-
#
|
|
4
|
-
# This file is part of EODAG project
|
|
5
|
-
# https://www.github.com/CS-SI/EODAG
|
|
6
|
-
#
|
|
7
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
-
# you may not use this file except in compliance with the License.
|
|
9
|
-
# You may obtain a copy of the License at
|
|
10
|
-
#
|
|
11
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
-
#
|
|
13
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
-
# See the License for the specific language governing permissions and
|
|
17
|
-
# limitations under the License.
|
|
18
|
-
"""Model describing a STAC search POST request"""
|
|
19
|
-
|
|
20
|
-
from __future__ import annotations
|
|
21
|
-
|
|
22
|
-
from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union
|
|
23
|
-
|
|
24
|
-
import geojson
|
|
25
|
-
from pydantic import (
|
|
26
|
-
BaseModel,
|
|
27
|
-
ConfigDict,
|
|
28
|
-
Field,
|
|
29
|
-
PositiveInt,
|
|
30
|
-
StringConstraints,
|
|
31
|
-
field_serializer,
|
|
32
|
-
field_validator,
|
|
33
|
-
model_validator,
|
|
34
|
-
)
|
|
35
|
-
from shapely.geometry import (
|
|
36
|
-
GeometryCollection,
|
|
37
|
-
LineString,
|
|
38
|
-
MultiLineString,
|
|
39
|
-
MultiPoint,
|
|
40
|
-
MultiPolygon,
|
|
41
|
-
Point,
|
|
42
|
-
Polygon,
|
|
43
|
-
shape,
|
|
44
|
-
)
|
|
45
|
-
from shapely.geometry.base import GEOMETRY_TYPES, BaseGeometry
|
|
46
|
-
|
|
47
|
-
from eodag.rest.utils.rfc3339 import rfc3339_str_to_datetime, str_to_interval
|
|
48
|
-
from eodag.utils.exceptions import ValidationError
|
|
49
|
-
|
|
50
|
-
if TYPE_CHECKING:
|
|
51
|
-
from typing_extensions import Self
|
|
52
|
-
|
|
53
|
-
NumType = Union[float, int]
|
|
54
|
-
|
|
55
|
-
BBox = Union[
|
|
56
|
-
tuple[NumType, NumType, NumType, NumType],
|
|
57
|
-
tuple[NumType, NumType, NumType, NumType, NumType, NumType],
|
|
58
|
-
]
|
|
59
|
-
|
|
60
|
-
Geometry = Union[
|
|
61
|
-
Point,
|
|
62
|
-
MultiPoint,
|
|
63
|
-
LineString,
|
|
64
|
-
MultiLineString,
|
|
65
|
-
Polygon,
|
|
66
|
-
MultiPolygon,
|
|
67
|
-
GeometryCollection,
|
|
68
|
-
]
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
Direction = Annotated[Literal["asc", "desc"], StringConstraints(min_length=1)]
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
class SortBy(BaseModel):
|
|
75
|
-
"""
|
|
76
|
-
A class representing a parameter with which we want to sort results and its sorting order in a
|
|
77
|
-
POST search
|
|
78
|
-
|
|
79
|
-
:param field: The name of the parameter with which we want to sort results
|
|
80
|
-
:param direction: The sorting order of the parameter
|
|
81
|
-
"""
|
|
82
|
-
|
|
83
|
-
__pydantic_config__ = ConfigDict(extra="forbid")
|
|
84
|
-
|
|
85
|
-
field: Annotated[str, StringConstraints(min_length=1, strip_whitespace=True)]
|
|
86
|
-
direction: Direction
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
class SearchPostRequest(BaseModel):
|
|
90
|
-
"""
|
|
91
|
-
class which describes the body of a search request
|
|
92
|
-
|
|
93
|
-
Overrides the validation for datetime and spatial filter from the base request model.
|
|
94
|
-
"""
|
|
95
|
-
|
|
96
|
-
model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True)
|
|
97
|
-
|
|
98
|
-
provider: Optional[str] = None
|
|
99
|
-
collections: Optional[list[str]] = None
|
|
100
|
-
ids: Optional[list[str]] = None
|
|
101
|
-
bbox: Optional[BBox] = None
|
|
102
|
-
intersects: Optional[Geometry] = None
|
|
103
|
-
datetime: Optional[str] = None
|
|
104
|
-
limit: Optional[PositiveInt] = Field( # type: ignore
|
|
105
|
-
default=None, description="Maximum number of items per page."
|
|
106
|
-
)
|
|
107
|
-
page: Optional[PositiveInt] = Field( # type: ignore
|
|
108
|
-
default=None, description="Page number, must be a positive integer."
|
|
109
|
-
)
|
|
110
|
-
query: Optional[dict[str, Any]] = None
|
|
111
|
-
filter: Optional[dict[str, Any]] = None
|
|
112
|
-
filter_lang: Optional[str] = Field(
|
|
113
|
-
default=None,
|
|
114
|
-
alias="filter-lang",
|
|
115
|
-
description="The language used for filtering.",
|
|
116
|
-
validate_default=True,
|
|
117
|
-
)
|
|
118
|
-
sortby: Optional[list[SortBy]] = None
|
|
119
|
-
crunch: Optional[str] = None
|
|
120
|
-
|
|
121
|
-
@field_serializer("intersects")
|
|
122
|
-
def serialize_intersects(
|
|
123
|
-
self, intersects: Optional[Geometry]
|
|
124
|
-
) -> Optional[dict[str, Any]]:
|
|
125
|
-
"""Serialize intersects from shapely to a proper dict"""
|
|
126
|
-
if intersects:
|
|
127
|
-
return geojson.loads(geojson.dumps(intersects)) # type: ignore
|
|
128
|
-
return None
|
|
129
|
-
|
|
130
|
-
@model_validator(mode="after")
|
|
131
|
-
def check_filter_lang(self) -> Self:
|
|
132
|
-
"""Verify filter-lang has correct value"""
|
|
133
|
-
if not self.filter_lang and self.filter:
|
|
134
|
-
self.filter_lang = "cql2-json"
|
|
135
|
-
if self.filter_lang and not self.filter:
|
|
136
|
-
raise ValueError("filter-lang is set but filter is missing")
|
|
137
|
-
if self.filter_lang != "cql2-json" and self.filter:
|
|
138
|
-
raise ValueError('Only filter language "cql2-json" is accepted')
|
|
139
|
-
return self
|
|
140
|
-
|
|
141
|
-
@model_validator(mode="before")
|
|
142
|
-
@classmethod
|
|
143
|
-
def only_one_spatial(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
144
|
-
"""Check bbox and intersects are not both supplied."""
|
|
145
|
-
if "intersects" in values and "bbox" in values:
|
|
146
|
-
raise ValueError("intersects and bbox parameters are mutually exclusive")
|
|
147
|
-
return values
|
|
148
|
-
|
|
149
|
-
@property
|
|
150
|
-
def start_date(self) -> Optional[str]:
|
|
151
|
-
"""Extract the start date from the datetime string."""
|
|
152
|
-
start = str_to_interval(self.datetime)[0]
|
|
153
|
-
return start.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" if start else None
|
|
154
|
-
|
|
155
|
-
@property
|
|
156
|
-
def end_date(self) -> Optional[str]:
|
|
157
|
-
"""Extract the end date from the datetime string."""
|
|
158
|
-
end = str_to_interval(self.datetime)[1]
|
|
159
|
-
return end.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z" if end else None
|
|
160
|
-
|
|
161
|
-
@field_validator("ids", "collections", mode="before")
|
|
162
|
-
@classmethod
|
|
163
|
-
def str_to_str_list(cls, v: Union[str, list[str]]) -> list[str]:
|
|
164
|
-
"""Convert ids and collections strings to list of strings"""
|
|
165
|
-
if isinstance(v, str):
|
|
166
|
-
return [i.strip() for i in v.split(",")]
|
|
167
|
-
return v
|
|
168
|
-
|
|
169
|
-
@field_validator("intersects", mode="before")
|
|
170
|
-
@classmethod
|
|
171
|
-
def validate_intersects(cls, v: Union[dict[str, Any], Geometry]) -> Geometry:
|
|
172
|
-
"""Verify format of intersects"""
|
|
173
|
-
if isinstance(v, BaseGeometry):
|
|
174
|
-
return v
|
|
175
|
-
|
|
176
|
-
if isinstance(v, dict) and v.get("type") in GEOMETRY_TYPES: # type: ignore
|
|
177
|
-
return shape(v)
|
|
178
|
-
|
|
179
|
-
raise ValueError("Not a valid geometry")
|
|
180
|
-
|
|
181
|
-
@field_validator("bbox")
|
|
182
|
-
@classmethod
|
|
183
|
-
def validate_bbox(cls, v: BBox) -> BBox:
|
|
184
|
-
"""Check order of supplied bbox coordinates."""
|
|
185
|
-
# Validate order
|
|
186
|
-
if len(v) == 4:
|
|
187
|
-
xmin, ymin, xmax, ymax = v
|
|
188
|
-
else:
|
|
189
|
-
xmin, ymin, min_elev, xmax, ymax, max_elev = v
|
|
190
|
-
if max_elev < min_elev:
|
|
191
|
-
raise ValueError(
|
|
192
|
-
"Maximum elevation must greater than minimum elevation"
|
|
193
|
-
)
|
|
194
|
-
|
|
195
|
-
if xmax < xmin:
|
|
196
|
-
raise ValueError("Maximum longitude must be greater than minimum longitude")
|
|
197
|
-
|
|
198
|
-
if ymax < ymin:
|
|
199
|
-
raise ValueError("Maximum longitude must be greater than minimum longitude")
|
|
200
|
-
|
|
201
|
-
# Validate against WGS84
|
|
202
|
-
if xmin < -180 or ymin < -90 or xmax > 180 or ymax > 90:
|
|
203
|
-
raise ValueError("Bounding box must be within (-180, -90, 180, 90)")
|
|
204
|
-
|
|
205
|
-
return v
|
|
206
|
-
|
|
207
|
-
@field_validator("datetime")
|
|
208
|
-
@classmethod
|
|
209
|
-
def validate_datetime(cls, v: str) -> str:
|
|
210
|
-
"""Validate datetime."""
|
|
211
|
-
if "/" in v:
|
|
212
|
-
values = v.split("/")
|
|
213
|
-
else:
|
|
214
|
-
# Single date is interpreted as end date
|
|
215
|
-
values = ["..", v]
|
|
216
|
-
|
|
217
|
-
dates: list[str] = []
|
|
218
|
-
for value in values:
|
|
219
|
-
if value == ".." or value == "":
|
|
220
|
-
dates.append("..")
|
|
221
|
-
continue
|
|
222
|
-
|
|
223
|
-
try:
|
|
224
|
-
dates.append(
|
|
225
|
-
rfc3339_str_to_datetime(value).strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
226
|
-
)
|
|
227
|
-
except ValidationError as e:
|
|
228
|
-
raise ValueError(e)
|
|
229
|
-
|
|
230
|
-
if dates[0] == ".." and dates[1] == "..":
|
|
231
|
-
raise ValueError(
|
|
232
|
-
"Invalid datetime range, both ends of range may not be open"
|
|
233
|
-
)
|
|
234
|
-
|
|
235
|
-
if ".." not in dates and dates[0] > dates[1]:
|
|
236
|
-
raise ValueError(
|
|
237
|
-
"Invalid datetime range, must match format (begin_date, end_date)"
|
|
238
|
-
)
|
|
239
|
-
|
|
240
|
-
return v
|
|
241
|
-
|
|
242
|
-
@property
|
|
243
|
-
def spatial_filter(self) -> Optional[Geometry]:
|
|
244
|
-
"""Return a geojson-pydantic object representing the spatial filter for the search
|
|
245
|
-
request.
|
|
246
|
-
|
|
247
|
-
Check for both because the ``bbox`` and ``intersects`` parameters are
|
|
248
|
-
mutually exclusive.
|
|
249
|
-
"""
|
|
250
|
-
if self.bbox:
|
|
251
|
-
return Polygon.from_bounds(*self.bbox) # type: ignore
|
|
252
|
-
|
|
253
|
-
if self.intersects:
|
|
254
|
-
return self.intersects
|
|
255
|
-
return None
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
def sortby2list(
|
|
259
|
-
v: Optional[str],
|
|
260
|
-
) -> Optional[list[SortBy]]:
|
|
261
|
-
"""
|
|
262
|
-
Convert sortby filter parameter GET syntax to POST syntax
|
|
263
|
-
"""
|
|
264
|
-
if not v:
|
|
265
|
-
return None
|
|
266
|
-
sortby: list[SortBy] = []
|
|
267
|
-
for sortby_param in v.split(","):
|
|
268
|
-
sortby_param = sortby_param.strip()
|
|
269
|
-
direction: Direction = "desc" if sortby_param.startswith("-") else "asc"
|
|
270
|
-
field = sortby_param.lstrip("+-")
|
|
271
|
-
sortby.append(SortBy(field=field, direction=direction))
|
|
272
|
-
return sortby
|
eodag/rest/utils/__init__.py
DELETED
|
@@ -1,207 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Copyright 2023, CS Systemes d'Information, https://www.csgroup.eu/
|
|
3
|
-
#
|
|
4
|
-
# This file is part of EODAG project
|
|
5
|
-
# https://www.github.com/CS-SI/EODAG
|
|
6
|
-
#
|
|
7
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
-
# you may not use this file except in compliance with the License.
|
|
9
|
-
# You may obtain a copy of the License at
|
|
10
|
-
#
|
|
11
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
-
#
|
|
13
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
-
# See the License for the specific language governing permissions and
|
|
17
|
-
# limitations under the License.
|
|
18
|
-
"""EODAG REST utils"""
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
import glob
|
|
22
|
-
import logging
|
|
23
|
-
import os
|
|
24
|
-
from io import BufferedReader
|
|
25
|
-
from shutil import make_archive, rmtree
|
|
26
|
-
from typing import TYPE_CHECKING, Any, Callable, Iterator, NamedTuple, Optional, Union
|
|
27
|
-
from urllib.parse import unquote_plus, urlencode
|
|
28
|
-
|
|
29
|
-
import orjson
|
|
30
|
-
from fastapi import Request
|
|
31
|
-
from pydantic import ValidationError as pydanticValidationError
|
|
32
|
-
|
|
33
|
-
from eodag.plugins.crunch.filter_latest_intersect import FilterLatestIntersect
|
|
34
|
-
from eodag.plugins.crunch.filter_latest_tpl_name import FilterLatestByName
|
|
35
|
-
from eodag.plugins.crunch.filter_overlap import FilterOverlap
|
|
36
|
-
from eodag.utils import StreamResponse
|
|
37
|
-
from eodag.utils.dates import get_date, get_datetime
|
|
38
|
-
from eodag.utils.exceptions import ValidationError
|
|
39
|
-
|
|
40
|
-
if TYPE_CHECKING:
|
|
41
|
-
from eodag.rest.types.stac_search import SearchPostRequest
|
|
42
|
-
|
|
43
|
-
# exportable content
|
|
44
|
-
__all__ = ["get_date", "get_datetime"]
|
|
45
|
-
|
|
46
|
-
logger = logging.getLogger("eodag.rest.utils")
|
|
47
|
-
|
|
48
|
-
# Path of the liveness endpoint
|
|
49
|
-
LIVENESS_PROBE_PATH = "/_mgmt/ping"
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
class Cruncher(NamedTuple):
|
|
53
|
-
"""Type hinted Cruncher namedTuple"""
|
|
54
|
-
|
|
55
|
-
clazz: Callable[..., Any]
|
|
56
|
-
config_params: list[str]
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
crunchers = {
|
|
60
|
-
"latestIntersect": Cruncher(FilterLatestIntersect, []),
|
|
61
|
-
"latestByName": Cruncher(FilterLatestByName, ["name_pattern"]),
|
|
62
|
-
"overlap": Cruncher(FilterOverlap, ["minimum_overlap"]),
|
|
63
|
-
}
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def format_pydantic_error(e: pydanticValidationError) -> str:
|
|
67
|
-
"""Format Pydantic ValidationError
|
|
68
|
-
|
|
69
|
-
:param e: A Pydantic ValidationError object
|
|
70
|
-
:tyype e: pydanticValidationError
|
|
71
|
-
"""
|
|
72
|
-
error_header = f"{e.error_count()} error(s). "
|
|
73
|
-
|
|
74
|
-
error_messages = [
|
|
75
|
-
f'{err["loc"][0]}: {err["msg"]}' if err["loc"] else err["msg"]
|
|
76
|
-
for err in e.errors()
|
|
77
|
-
]
|
|
78
|
-
return error_header + "; ".join(set(error_messages))
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
def is_dict_str_any(var: Any) -> bool:
|
|
82
|
-
"""Verify whether the variable is of type dict[str, Any]"""
|
|
83
|
-
if isinstance(var, dict):
|
|
84
|
-
return all(isinstance(k, str) for k in var.keys()) # type: ignore
|
|
85
|
-
return False
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def str2list(v: Optional[str]) -> Optional[list[str]]:
|
|
89
|
-
"""Convert string to list base on , delimiter."""
|
|
90
|
-
if v:
|
|
91
|
-
return v.split(",")
|
|
92
|
-
return None
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
def str2json(k: str, v: Optional[str] = None) -> Optional[dict[str, Any]]:
|
|
96
|
-
"""decoding a URL parameter and then parsing it as JSON."""
|
|
97
|
-
if not v:
|
|
98
|
-
return None
|
|
99
|
-
try:
|
|
100
|
-
return orjson.loads(unquote_plus(v))
|
|
101
|
-
except orjson.JSONDecodeError as e:
|
|
102
|
-
raise ValidationError(f"{k}: Incorrect JSON object") from e
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def flatten_list(nested_list: Union[Any, list[Any]]) -> list[Any]:
|
|
106
|
-
"""Flatten a nested list structure into a single list."""
|
|
107
|
-
if not isinstance(nested_list, list):
|
|
108
|
-
return [nested_list]
|
|
109
|
-
else:
|
|
110
|
-
flattened: list[Any] = []
|
|
111
|
-
for element in nested_list:
|
|
112
|
-
flattened.extend(flatten_list(element))
|
|
113
|
-
return flattened
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def list_to_str_list(input_list: list[Any]) -> list[str]:
|
|
117
|
-
"""Attempt to convert a list of any type to a list of strings."""
|
|
118
|
-
try:
|
|
119
|
-
# Try to convert each element to a string
|
|
120
|
-
return [str(element) for element in input_list]
|
|
121
|
-
except Exception as e:
|
|
122
|
-
# Raise an exception if any element cannot be converted
|
|
123
|
-
raise TypeError(f"Failed to convert to list[str]: {e}") from e
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
def get_next_link(
|
|
127
|
-
request: Request,
|
|
128
|
-
search_request: SearchPostRequest,
|
|
129
|
-
total_results: Optional[int],
|
|
130
|
-
items_per_page: int,
|
|
131
|
-
) -> Optional[dict[str, Any]]:
|
|
132
|
-
"""Generate next link URL and body"""
|
|
133
|
-
body = search_request.model_dump(exclude_none=True)
|
|
134
|
-
if "bbox" in body:
|
|
135
|
-
# bbox is tuple
|
|
136
|
-
body["bbox"] = list(body["bbox"])
|
|
137
|
-
|
|
138
|
-
params = dict(request.query_params)
|
|
139
|
-
|
|
140
|
-
page = int(body.get("page", 0) or params.get("page", 0)) or 1
|
|
141
|
-
|
|
142
|
-
if total_results is None or items_per_page * page >= total_results:
|
|
143
|
-
return None
|
|
144
|
-
|
|
145
|
-
url = str(request.state.url)
|
|
146
|
-
if request.method == "POST":
|
|
147
|
-
body["page"] = page + 1
|
|
148
|
-
else:
|
|
149
|
-
params["page"] = str(page + 1)
|
|
150
|
-
url += f"?{urlencode(params)}"
|
|
151
|
-
|
|
152
|
-
next: dict[str, Any] = {
|
|
153
|
-
"rel": "next",
|
|
154
|
-
"href": url,
|
|
155
|
-
"title": "Next page",
|
|
156
|
-
"method": request.method,
|
|
157
|
-
"type": "application/geo+json",
|
|
158
|
-
}
|
|
159
|
-
if request.method == "POST":
|
|
160
|
-
next["body"] = body
|
|
161
|
-
return next
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
def read_file_chunks_and_delete(
|
|
165
|
-
opened_file: BufferedReader, chunk_size: int = 64 * 1024
|
|
166
|
-
) -> Iterator[bytes]:
|
|
167
|
-
"""Yield file chunks and delete file when finished."""
|
|
168
|
-
while True:
|
|
169
|
-
data = opened_file.read(chunk_size)
|
|
170
|
-
if not data:
|
|
171
|
-
opened_file.close()
|
|
172
|
-
os.remove(opened_file.name)
|
|
173
|
-
logger.debug("%s deleted after streaming complete", opened_file.name)
|
|
174
|
-
break
|
|
175
|
-
yield data
|
|
176
|
-
yield data
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
def file_to_stream(
|
|
180
|
-
file_path: str,
|
|
181
|
-
) -> StreamResponse:
|
|
182
|
-
"""Break a file into chunck and return it as a byte stream"""
|
|
183
|
-
if os.path.isdir(file_path):
|
|
184
|
-
# do not zip if dir contains only one file
|
|
185
|
-
all_filenames = [
|
|
186
|
-
f
|
|
187
|
-
for f in glob.glob(os.path.join(file_path, "**", "*"), recursive=True)
|
|
188
|
-
if os.path.isfile(f)
|
|
189
|
-
]
|
|
190
|
-
if len(all_filenames) == 1:
|
|
191
|
-
filepath_to_stream = all_filenames[0]
|
|
192
|
-
else:
|
|
193
|
-
filepath_to_stream = f"{file_path}.zip"
|
|
194
|
-
logger.debug(
|
|
195
|
-
"Building archive for downloaded product path %s",
|
|
196
|
-
filepath_to_stream,
|
|
197
|
-
)
|
|
198
|
-
make_archive(file_path, "zip", file_path)
|
|
199
|
-
rmtree(file_path)
|
|
200
|
-
else:
|
|
201
|
-
filepath_to_stream = file_path
|
|
202
|
-
|
|
203
|
-
filename = os.path.basename(filepath_to_stream)
|
|
204
|
-
return StreamResponse(
|
|
205
|
-
content=read_file_chunks_and_delete(open(filepath_to_stream, "rb")),
|
|
206
|
-
filename=filename,
|
|
207
|
-
)
|
eodag/rest/utils/cql_evaluate.py
DELETED
|
@@ -1,119 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Copyright 2023, CS Systemes d'Information, https://www.csgroup.eu/
|
|
3
|
-
#
|
|
4
|
-
# This file is part of EODAG project
|
|
5
|
-
# https://www.github.com/CS-SI/EODAG
|
|
6
|
-
#
|
|
7
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
-
# you may not use this file except in compliance with the License.
|
|
9
|
-
# You may obtain a copy of the License at
|
|
10
|
-
#
|
|
11
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
-
#
|
|
13
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
-
# See the License for the specific language governing permissions and
|
|
17
|
-
# limitations under the License.
|
|
18
|
-
from datetime import datetime as dt
|
|
19
|
-
from typing import Any, Optional, Union
|
|
20
|
-
|
|
21
|
-
from pygeofilter import ast
|
|
22
|
-
from pygeofilter.backends.evaluator import Evaluator, handle
|
|
23
|
-
from pygeofilter.values import Geometry, Interval
|
|
24
|
-
|
|
25
|
-
simpleNode = Union[ast.Attribute, str, int, complex, float, list[Any], tuple[Any, ...]]
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
class EodagEvaluator(Evaluator):
|
|
29
|
-
"""
|
|
30
|
-
Evaluate a cql2 json expression and transform it to a STAC args object
|
|
31
|
-
"""
|
|
32
|
-
|
|
33
|
-
@handle(ast.Attribute, str, int, complex, float, list, tuple)
|
|
34
|
-
def attribute(self, node: simpleNode, *_) -> simpleNode:
|
|
35
|
-
"""handle attribute and literal"""
|
|
36
|
-
return node
|
|
37
|
-
|
|
38
|
-
@handle(Geometry)
|
|
39
|
-
def spatial(self, node: Geometry) -> dict[str, Any]:
|
|
40
|
-
"""handle geometry"""
|
|
41
|
-
return node.geometry
|
|
42
|
-
|
|
43
|
-
@handle(dt)
|
|
44
|
-
def temporal(self, node: dt) -> str:
|
|
45
|
-
"""handle datetime"""
|
|
46
|
-
return node.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
47
|
-
|
|
48
|
-
@handle(Interval)
|
|
49
|
-
def interval(self, _, *interval: Any) -> list[Any]:
|
|
50
|
-
"""handle datetime interval"""
|
|
51
|
-
return list(interval)
|
|
52
|
-
|
|
53
|
-
@handle(
|
|
54
|
-
ast.GeometryIntersects,
|
|
55
|
-
ast.Equal,
|
|
56
|
-
ast.LessEqual,
|
|
57
|
-
ast.GreaterEqual,
|
|
58
|
-
ast.TimeOverlaps,
|
|
59
|
-
ast.In,
|
|
60
|
-
)
|
|
61
|
-
def predicate(
|
|
62
|
-
self, node: ast.Predicate, lhs: Any, rhs: Any
|
|
63
|
-
) -> Optional[dict[str, Any]]:
|
|
64
|
-
"""
|
|
65
|
-
Handle predicates
|
|
66
|
-
Verify the property is first attribute in each predicate
|
|
67
|
-
"""
|
|
68
|
-
if not isinstance(lhs, ast.Attribute):
|
|
69
|
-
raise ValueError(
|
|
70
|
-
f'invalid cql syntax, first argument in "{node.op.value}" must be a property'
|
|
71
|
-
)
|
|
72
|
-
|
|
73
|
-
if isinstance(node, ast.Equal) and not isinstance(
|
|
74
|
-
rhs, (int, float, complex, str)
|
|
75
|
-
):
|
|
76
|
-
raise ValueError(
|
|
77
|
-
f'second argument in property "{lhs.name}" must be a string or a numeric value'
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
if isinstance(node, ast.GeometryIntersects) and not lhs.name == "geometry":
|
|
81
|
-
raise ValueError(
|
|
82
|
-
f'operator {node.op.value} is not supported for property "{lhs.name}"'
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
if isinstance(node, (ast.Equal, ast.GeometryIntersects)):
|
|
86
|
-
return {lhs.name: rhs}
|
|
87
|
-
|
|
88
|
-
if isinstance(node, ast.LessEqual):
|
|
89
|
-
if not isinstance(node.rhs, dt):
|
|
90
|
-
raise ValueError(
|
|
91
|
-
f'operator "<=" is not supported for property "{lhs.name}"'
|
|
92
|
-
)
|
|
93
|
-
return {"end_datetime": rhs}
|
|
94
|
-
|
|
95
|
-
if isinstance(node, ast.GreaterEqual):
|
|
96
|
-
if not isinstance(node.rhs, dt):
|
|
97
|
-
raise ValueError(
|
|
98
|
-
f'operator ">=" is not supported for property "{lhs.name}"'
|
|
99
|
-
)
|
|
100
|
-
return {"start_datetime": rhs}
|
|
101
|
-
|
|
102
|
-
if isinstance(node, ast.TimeOverlaps):
|
|
103
|
-
return {"start_datetime": rhs[0], "end_datetime": rhs[1]}
|
|
104
|
-
|
|
105
|
-
return None
|
|
106
|
-
|
|
107
|
-
@handle(ast.In)
|
|
108
|
-
def contains(self, node: ast.In, lhs: Any, *rhs: Any):
|
|
109
|
-
"""handle in keyword"""
|
|
110
|
-
if not isinstance(node.sub_nodes, list): # type: ignore
|
|
111
|
-
raise ValueError(
|
|
112
|
-
f'property "{lhs.name}" expects a value in list format with operator "in"'
|
|
113
|
-
)
|
|
114
|
-
return {lhs.name: list(rhs)}
|
|
115
|
-
|
|
116
|
-
@handle(ast.And)
|
|
117
|
-
def combination(self, _, lhs: dict[str, str], rhs: dict[str, str]):
|
|
118
|
-
"""handle combinations"""
|
|
119
|
-
return {**lhs, **rhs}
|
eodag/rest/utils/rfc3339.py
DELETED
|
@@ -1,64 +0,0 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
|
-
# Copyright 2023, CS Systemes d'Information, https://www.csgroup.eu/
|
|
3
|
-
#
|
|
4
|
-
# This file is part of EODAG project
|
|
5
|
-
# https://www.github.com/CS-SI/EODAG
|
|
6
|
-
#
|
|
7
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
-
# you may not use this file except in compliance with the License.
|
|
9
|
-
# You may obtain a copy of the License at
|
|
10
|
-
#
|
|
11
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
-
#
|
|
13
|
-
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
-
# See the License for the specific language governing permissions and
|
|
17
|
-
# limitations under the License.
|
|
18
|
-
import datetime
|
|
19
|
-
from typing import Optional
|
|
20
|
-
|
|
21
|
-
from eodag.utils.dates import rfc3339_str_to_datetime
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
def str_to_interval(
|
|
25
|
-
interval: Optional[str],
|
|
26
|
-
) -> tuple[Optional[datetime.datetime], Optional[datetime.datetime]]:
|
|
27
|
-
"""Extract a tuple of datetimes from an interval string.
|
|
28
|
-
|
|
29
|
-
Interval strings are defined by
|
|
30
|
-
OGC API - Features Part 1 for the datetime query parameter value. These follow the
|
|
31
|
-
form '1985-04-12T23:20:50.52Z/1986-04-12T23:20:50.52Z', and allow either the start
|
|
32
|
-
or end (but not both) to be open-ended with '..' or ''.
|
|
33
|
-
|
|
34
|
-
:param interval: The interval string to convert to a :class:`datetime.datetime`
|
|
35
|
-
tuple.
|
|
36
|
-
|
|
37
|
-
:raises: :class:`ValueError`
|
|
38
|
-
"""
|
|
39
|
-
if not interval:
|
|
40
|
-
return (None, None)
|
|
41
|
-
|
|
42
|
-
if "/" not in interval:
|
|
43
|
-
date = rfc3339_str_to_datetime(interval)
|
|
44
|
-
return (date, date)
|
|
45
|
-
|
|
46
|
-
values = interval.split("/")
|
|
47
|
-
if len(values) != 2:
|
|
48
|
-
raise ValueError(
|
|
49
|
-
f"Interval string '{interval}' contains more than one forward slash."
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
start = None
|
|
53
|
-
end = None
|
|
54
|
-
if values[0] not in ["..", ""]:
|
|
55
|
-
start = rfc3339_str_to_datetime(values[0])
|
|
56
|
-
if values[1] not in ["..", ""]:
|
|
57
|
-
end = rfc3339_str_to_datetime(values[1])
|
|
58
|
-
|
|
59
|
-
if start is None and end is None:
|
|
60
|
-
raise ValueError("Double open-ended intervals are not allowed.")
|
|
61
|
-
if start is not None and end is not None and start > end:
|
|
62
|
-
raise ValueError("Start datetime cannot be before end datetime.")
|
|
63
|
-
else:
|
|
64
|
-
return start, end
|