eodag 3.1.0b1__py3-none-any.whl → 3.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- eodag/api/core.py +69 -63
- eodag/api/product/_assets.py +49 -13
- eodag/api/product/_product.py +41 -30
- eodag/api/product/drivers/__init__.py +81 -4
- eodag/api/product/drivers/base.py +65 -4
- eodag/api/product/drivers/generic.py +65 -0
- eodag/api/product/drivers/sentinel1.py +97 -0
- eodag/api/product/drivers/sentinel2.py +95 -0
- eodag/api/product/metadata_mapping.py +85 -79
- eodag/api/search_result.py +13 -23
- eodag/cli.py +4 -4
- eodag/config.py +77 -80
- eodag/plugins/apis/base.py +1 -1
- eodag/plugins/apis/ecmwf.py +12 -15
- eodag/plugins/apis/usgs.py +12 -11
- eodag/plugins/authentication/aws_auth.py +16 -13
- eodag/plugins/authentication/base.py +5 -3
- eodag/plugins/authentication/header.py +3 -3
- eodag/plugins/authentication/keycloak.py +4 -4
- eodag/plugins/authentication/oauth.py +7 -3
- eodag/plugins/authentication/openid_connect.py +20 -14
- eodag/plugins/authentication/sas_auth.py +4 -4
- eodag/plugins/authentication/token.py +7 -7
- eodag/plugins/authentication/token_exchange.py +1 -1
- eodag/plugins/base.py +4 -4
- eodag/plugins/crunch/base.py +4 -4
- eodag/plugins/crunch/filter_date.py +4 -4
- eodag/plugins/crunch/filter_latest_intersect.py +6 -6
- eodag/plugins/crunch/filter_latest_tpl_name.py +7 -7
- eodag/plugins/crunch/filter_overlap.py +4 -4
- eodag/plugins/crunch/filter_property.py +4 -4
- eodag/plugins/download/aws.py +137 -77
- eodag/plugins/download/base.py +8 -17
- eodag/plugins/download/creodias_s3.py +2 -2
- eodag/plugins/download/http.py +30 -32
- eodag/plugins/download/s3rest.py +5 -4
- eodag/plugins/manager.py +10 -20
- eodag/plugins/search/__init__.py +6 -5
- eodag/plugins/search/base.py +38 -42
- eodag/plugins/search/build_search_result.py +286 -336
- eodag/plugins/search/cop_marine.py +22 -12
- eodag/plugins/search/creodias_s3.py +8 -78
- eodag/plugins/search/csw.py +11 -11
- eodag/plugins/search/data_request_search.py +19 -18
- eodag/plugins/search/qssearch.py +84 -151
- eodag/plugins/search/stac_list_assets.py +85 -0
- eodag/plugins/search/static_stac_search.py +4 -4
- eodag/resources/ext_product_types.json +1 -1
- eodag/resources/product_types.yml +848 -398
- eodag/resources/providers.yml +1038 -1115
- eodag/resources/stac_api.yml +2 -2
- eodag/resources/user_conf_template.yml +10 -9
- eodag/rest/cache.py +2 -2
- eodag/rest/config.py +3 -3
- eodag/rest/core.py +24 -24
- eodag/rest/errors.py +5 -5
- eodag/rest/server.py +3 -11
- eodag/rest/stac.py +41 -38
- eodag/rest/types/collections_search.py +3 -3
- eodag/rest/types/eodag_search.py +23 -23
- eodag/rest/types/queryables.py +40 -28
- eodag/rest/types/stac_search.py +15 -25
- eodag/rest/utils/__init__.py +11 -21
- eodag/rest/utils/cql_evaluate.py +6 -6
- eodag/rest/utils/rfc3339.py +2 -2
- eodag/types/__init__.py +97 -29
- eodag/types/bbox.py +2 -2
- eodag/types/download_args.py +2 -2
- eodag/types/queryables.py +5 -2
- eodag/types/search_args.py +4 -4
- eodag/types/whoosh.py +1 -3
- eodag/utils/__init__.py +82 -41
- eodag/utils/exceptions.py +2 -2
- eodag/utils/import_system.py +2 -2
- eodag/utils/requests.py +2 -2
- eodag/utils/rest.py +2 -2
- eodag/utils/s3.py +231 -0
- eodag/utils/stac_reader.py +10 -10
- {eodag-3.1.0b1.dist-info → eodag-3.2.0.dist-info}/METADATA +12 -10
- eodag-3.2.0.dist-info/RECORD +113 -0
- {eodag-3.1.0b1.dist-info → eodag-3.2.0.dist-info}/WHEEL +1 -1
- {eodag-3.1.0b1.dist-info → eodag-3.2.0.dist-info}/entry_points.txt +1 -0
- eodag-3.1.0b1.dist-info/RECORD +0 -108
- {eodag-3.1.0b1.dist-info → eodag-3.2.0.dist-info/licenses}/LICENSE +0 -0
- {eodag-3.1.0b1.dist-info → eodag-3.2.0.dist-info}/top_level.txt +0 -0
eodag/rest/types/eodag_search.py
CHANGED
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from typing import TYPE_CHECKING, Any,
|
|
20
|
+
from typing import TYPE_CHECKING, Any, Optional, Union, cast
|
|
21
21
|
|
|
22
22
|
from pydantic import (
|
|
23
23
|
AliasChoices,
|
|
@@ -52,7 +52,7 @@ if TYPE_CHECKING:
|
|
|
52
52
|
from typing_extensions import Self
|
|
53
53
|
|
|
54
54
|
Geometry = Union[
|
|
55
|
-
|
|
55
|
+
dict[str, Any],
|
|
56
56
|
Point,
|
|
57
57
|
MultiPoint,
|
|
58
58
|
LineString,
|
|
@@ -73,8 +73,8 @@ class EODAGSearch(BaseModel):
|
|
|
73
73
|
|
|
74
74
|
productType: Optional[str] = Field(None, alias="collections", validate_default=True)
|
|
75
75
|
provider: Optional[str] = Field(None)
|
|
76
|
-
ids: Optional[
|
|
77
|
-
id: Optional[
|
|
76
|
+
ids: Optional[list[str]] = Field(None)
|
|
77
|
+
id: Optional[list[str]] = Field(
|
|
78
78
|
None, alias="ids"
|
|
79
79
|
) # TODO: remove when updating queryables
|
|
80
80
|
geom: Optional[Geometry] = Field(None, alias="geometry")
|
|
@@ -101,7 +101,7 @@ class EODAGSearch(BaseModel):
|
|
|
101
101
|
orbitNumber: Optional[int] = Field(None, alias="sat:absolute_orbit")
|
|
102
102
|
# TODO: colision in property name. Need to handle "sar:product_type"
|
|
103
103
|
sensorMode: Optional[str] = Field(None, alias="sar:instrument_mode")
|
|
104
|
-
polarizationChannels: Optional[
|
|
104
|
+
polarizationChannels: Optional[list[str]] = Field(None, alias="sar:polarizations")
|
|
105
105
|
dopplerFrequency: Optional[str] = Field(None, alias="sar:frequency_band")
|
|
106
106
|
doi: Optional[str] = Field(None, alias="sci:doi")
|
|
107
107
|
illuminationElevationAngle: Optional[float] = Field(
|
|
@@ -110,10 +110,10 @@ class EODAGSearch(BaseModel):
|
|
|
110
110
|
illuminationAzimuthAngle: Optional[float] = Field(None, alias="view:sun_azimuth")
|
|
111
111
|
page: Optional[int] = Field(1)
|
|
112
112
|
items_per_page: int = Field(DEFAULT_ITEMS_PER_PAGE, alias="limit")
|
|
113
|
-
sort_by: Optional[
|
|
113
|
+
sort_by: Optional[list[tuple[str, str]]] = Field(None, alias="sortby")
|
|
114
114
|
raise_errors: bool = False
|
|
115
115
|
|
|
116
|
-
_to_eodag_map:
|
|
116
|
+
_to_eodag_map: dict[str, str]
|
|
117
117
|
|
|
118
118
|
@model_validator(mode="after")
|
|
119
119
|
def remove_timeFromAscendingNode(self) -> Self: # pylint: disable=invalid-name
|
|
@@ -129,7 +129,7 @@ class EODAGSearch(BaseModel):
|
|
|
129
129
|
if not self.__pydantic_extra__:
|
|
130
130
|
return self
|
|
131
131
|
|
|
132
|
-
keys_to_update:
|
|
132
|
+
keys_to_update: dict[str, str] = {}
|
|
133
133
|
for key in self.__pydantic_extra__.keys():
|
|
134
134
|
if key.startswith("unk:"):
|
|
135
135
|
keys_to_update[key] = key[len("unk:") :]
|
|
@@ -145,7 +145,7 @@ class EODAGSearch(BaseModel):
|
|
|
145
145
|
|
|
146
146
|
@model_validator(mode="before")
|
|
147
147
|
@classmethod
|
|
148
|
-
def remove_keys(cls, values:
|
|
148
|
+
def remove_keys(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
149
149
|
"""Remove 'datetime', 'crunch', 'intersects', and 'bbox' keys"""
|
|
150
150
|
for key in ["datetime", "crunch", "intersects", "bbox", "filter_lang"]:
|
|
151
151
|
values.pop(key, None)
|
|
@@ -154,8 +154,8 @@ class EODAGSearch(BaseModel):
|
|
|
154
154
|
@model_validator(mode="before")
|
|
155
155
|
@classmethod
|
|
156
156
|
def parse_collections(
|
|
157
|
-
cls, values:
|
|
158
|
-
) ->
|
|
157
|
+
cls, values: dict[str, Any], info: ValidationInfo
|
|
158
|
+
) -> dict[str, Any]:
|
|
159
159
|
"""convert collections to productType"""
|
|
160
160
|
|
|
161
161
|
if collections := values.pop("collections", None):
|
|
@@ -172,7 +172,7 @@ class EODAGSearch(BaseModel):
|
|
|
172
172
|
|
|
173
173
|
@model_validator(mode="before")
|
|
174
174
|
@classmethod
|
|
175
|
-
def parse_query(cls, values:
|
|
175
|
+
def parse_query(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
176
176
|
"""
|
|
177
177
|
Convert a STAC query parameter filter with the "eq" operator to a dict.
|
|
178
178
|
"""
|
|
@@ -190,9 +190,9 @@ class EODAGSearch(BaseModel):
|
|
|
190
190
|
if not query:
|
|
191
191
|
return values
|
|
192
192
|
|
|
193
|
-
query_props:
|
|
194
|
-
errors:
|
|
195
|
-
for property_name, conditions in cast(
|
|
193
|
+
query_props: dict[str, Any] = {}
|
|
194
|
+
errors: list[InitErrorDetails] = []
|
|
195
|
+
for property_name, conditions in cast(dict[str, Any], query).items():
|
|
196
196
|
# Remove the prefix "properties." if present
|
|
197
197
|
prop = property_name.replace("properties.", "", 1)
|
|
198
198
|
|
|
@@ -205,7 +205,7 @@ class EODAGSearch(BaseModel):
|
|
|
205
205
|
continue
|
|
206
206
|
|
|
207
207
|
# Retrieve the operator and its value
|
|
208
|
-
operator, value = next(iter(cast(
|
|
208
|
+
operator, value = next(iter(cast(dict[str, Any], conditions).items()))
|
|
209
209
|
|
|
210
210
|
# Validate the operator
|
|
211
211
|
# only eq, in and lte are allowed
|
|
@@ -239,7 +239,7 @@ class EODAGSearch(BaseModel):
|
|
|
239
239
|
|
|
240
240
|
@model_validator(mode="before")
|
|
241
241
|
@classmethod
|
|
242
|
-
def parse_cql(cls, values:
|
|
242
|
+
def parse_cql(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
243
243
|
"""
|
|
244
244
|
Process cql2 filter
|
|
245
245
|
"""
|
|
@@ -256,7 +256,7 @@ class EODAGSearch(BaseModel):
|
|
|
256
256
|
if not filter_:
|
|
257
257
|
return values
|
|
258
258
|
|
|
259
|
-
errors:
|
|
259
|
+
errors: list[InitErrorDetails] = []
|
|
260
260
|
try:
|
|
261
261
|
parsing_result = EodagEvaluator().evaluate(parse_json(filter_)) # type: ignore
|
|
262
262
|
except (ValueError, NotImplementedError) as e:
|
|
@@ -271,7 +271,7 @@ class EODAGSearch(BaseModel):
|
|
|
271
271
|
title=cls.__name__, line_errors=errors
|
|
272
272
|
)
|
|
273
273
|
|
|
274
|
-
cql_args:
|
|
274
|
+
cql_args: dict[str, Any] = cast(dict[str, Any], parsing_result)
|
|
275
275
|
|
|
276
276
|
invalid_keys = {
|
|
277
277
|
"collections": 'Use "collection" instead of "collections"',
|
|
@@ -298,7 +298,7 @@ class EODAGSearch(BaseModel):
|
|
|
298
298
|
|
|
299
299
|
@field_validator("instrument", mode="before")
|
|
300
300
|
@classmethod
|
|
301
|
-
def join_instruments(cls, v: Union[str,
|
|
301
|
+
def join_instruments(cls, v: Union[str, list[str]]) -> str:
|
|
302
302
|
"""convert instruments to instrument"""
|
|
303
303
|
if isinstance(v, list):
|
|
304
304
|
return ",".join(v)
|
|
@@ -308,8 +308,8 @@ class EODAGSearch(BaseModel):
|
|
|
308
308
|
@classmethod
|
|
309
309
|
def parse_sortby(
|
|
310
310
|
cls,
|
|
311
|
-
sortby_post_params:
|
|
312
|
-
) ->
|
|
311
|
+
sortby_post_params: list[dict[str, str]],
|
|
312
|
+
) -> list[tuple[str, str]]:
|
|
313
313
|
"""
|
|
314
314
|
Convert STAC POST sortby to EODAG sort_by
|
|
315
315
|
"""
|
|
@@ -363,7 +363,7 @@ class EODAGSearch(BaseModel):
|
|
|
363
363
|
def to_stac(
|
|
364
364
|
cls,
|
|
365
365
|
field_name: str,
|
|
366
|
-
stac_item_properties: Optional[
|
|
366
|
+
stac_item_properties: Optional[list[str]] = None,
|
|
367
367
|
provider: Optional[str] = None,
|
|
368
368
|
) -> str:
|
|
369
369
|
"""Get the alias of a field in a Pydantic model"""
|
eodag/rest/types/queryables.py
CHANGED
|
@@ -17,7 +17,7 @@
|
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from __future__ import annotations
|
|
19
19
|
|
|
20
|
-
from typing import TYPE_CHECKING, Annotated, Any, ClassVar,
|
|
20
|
+
from typing import TYPE_CHECKING, Annotated, Any, ClassVar, Optional, Union
|
|
21
21
|
|
|
22
22
|
from pydantic import (
|
|
23
23
|
BaseModel,
|
|
@@ -25,8 +25,9 @@ from pydantic import (
|
|
|
25
25
|
Field,
|
|
26
26
|
SerializationInfo,
|
|
27
27
|
SerializerFunctionWrapHandler,
|
|
28
|
-
|
|
28
|
+
field_validator,
|
|
29
29
|
model_serializer,
|
|
30
|
+
model_validator,
|
|
30
31
|
)
|
|
31
32
|
|
|
32
33
|
from eodag.rest.types.eodag_search import EODAGSearch
|
|
@@ -35,6 +36,7 @@ from eodag.types import python_field_definition_to_json
|
|
|
35
36
|
|
|
36
37
|
if TYPE_CHECKING:
|
|
37
38
|
from pydantic.fields import FieldInfo
|
|
39
|
+
from typing_extensions import Self
|
|
38
40
|
|
|
39
41
|
|
|
40
42
|
class QueryablesGetParams(BaseModel):
|
|
@@ -42,29 +44,39 @@ class QueryablesGetParams(BaseModel):
|
|
|
42
44
|
|
|
43
45
|
collection: Optional[str] = Field(default=None, serialization_alias="productType")
|
|
44
46
|
datetime: Optional[str] = Field(default=None)
|
|
47
|
+
start_datetime: Optional[str] = Field(default=None)
|
|
48
|
+
end_datetime: Optional[str] = Field(default=None)
|
|
45
49
|
|
|
46
50
|
model_config = ConfigDict(extra="allow", frozen=True)
|
|
47
51
|
|
|
48
52
|
@model_serializer(mode="wrap")
|
|
49
|
-
def _serialize(self, handler: SerializerFunctionWrapHandler) ->
|
|
50
|
-
dumped:
|
|
53
|
+
def _serialize(self, handler: SerializerFunctionWrapHandler) -> dict[str, Any]:
|
|
54
|
+
dumped: dict[str, Any] = handler(self)
|
|
51
55
|
return {EODAGSearch.to_eodag(k): v for k, v in dumped.items()}
|
|
52
56
|
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
start = str_to_interval(self.datetime)[0]
|
|
59
|
-
return start.strftime("%Y-%m-%dT%H:%M:%SZ") if start else None
|
|
57
|
+
@field_validator("datetime", "start_datetime", "end_datetime", mode="before")
|
|
58
|
+
def validate_datetime(cls, value: Any) -> Optional[str]:
|
|
59
|
+
"""datetime, start_datetime and end_datetime must be a string"""
|
|
60
|
+
if isinstance(value, list):
|
|
61
|
+
return value[0]
|
|
60
62
|
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
@
|
|
64
|
-
def
|
|
65
|
-
"""
|
|
66
|
-
|
|
67
|
-
|
|
63
|
+
return value
|
|
64
|
+
|
|
65
|
+
@model_validator(mode="after")
|
|
66
|
+
def compute_datetimes(self: Self) -> Self:
|
|
67
|
+
"""Start datetime must be a string"""
|
|
68
|
+
if not self.datetime:
|
|
69
|
+
return self
|
|
70
|
+
|
|
71
|
+
start, end = str_to_interval(self.datetime)
|
|
72
|
+
|
|
73
|
+
if not self.start_datetime and start:
|
|
74
|
+
self.start_datetime = start.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
75
|
+
|
|
76
|
+
if not self.end_datetime and end:
|
|
77
|
+
self.end_datetime = end.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
78
|
+
|
|
79
|
+
return self
|
|
68
80
|
|
|
69
81
|
|
|
70
82
|
class StacQueryableProperty(BaseModel):
|
|
@@ -77,12 +89,12 @@ class StacQueryableProperty(BaseModel):
|
|
|
77
89
|
|
|
78
90
|
description: str
|
|
79
91
|
ref: Optional[str] = Field(default=None, serialization_alias="$ref")
|
|
80
|
-
type: Optional[Union[str,
|
|
81
|
-
enum: Optional[
|
|
92
|
+
type: Optional[Union[str, list[str]]] = None
|
|
93
|
+
enum: Optional[list[Any]] = None
|
|
82
94
|
value: Optional[Any] = None
|
|
83
|
-
min: Optional[Union[int,
|
|
84
|
-
max: Optional[Union[int,
|
|
85
|
-
oneOf: Optional[
|
|
95
|
+
min: Optional[Union[int, list[Union[int, None]]]] = None
|
|
96
|
+
max: Optional[Union[int, list[Union[int, None]]]] = None
|
|
97
|
+
oneOf: Optional[list[Any]] = None
|
|
86
98
|
items: Optional[Any] = None
|
|
87
99
|
|
|
88
100
|
@classmethod
|
|
@@ -104,7 +116,7 @@ class StacQueryableProperty(BaseModel):
|
|
|
104
116
|
_: SerializationInfo,
|
|
105
117
|
):
|
|
106
118
|
"""Remove none value property fields during serialization"""
|
|
107
|
-
props:
|
|
119
|
+
props: dict[str, Any] = handler(self)
|
|
108
120
|
return {k: v for k, v in props.items() if v is not None}
|
|
109
121
|
|
|
110
122
|
|
|
@@ -130,13 +142,13 @@ class StacQueryables(BaseModel):
|
|
|
130
142
|
description: str = Field(
|
|
131
143
|
default="Queryable names for the EODAG STAC API Item Search filter."
|
|
132
144
|
)
|
|
133
|
-
default_properties: ClassVar[
|
|
145
|
+
default_properties: ClassVar[dict[str, StacQueryableProperty]] = {
|
|
134
146
|
"collection": StacQueryableProperty(
|
|
135
147
|
description="Collection",
|
|
136
148
|
ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/collection",
|
|
137
149
|
)
|
|
138
150
|
}
|
|
139
|
-
possible_properties: ClassVar[
|
|
151
|
+
possible_properties: ClassVar[dict[str, StacQueryableProperty]] = {
|
|
140
152
|
"geometry": StacQueryableProperty(
|
|
141
153
|
description="Geometry",
|
|
142
154
|
ref="https://schemas.stacspec.org/v1.0.0/item-spec/json-schema/item.json#/geometry",
|
|
@@ -152,8 +164,8 @@ class StacQueryables(BaseModel):
|
|
|
152
164
|
items={"type": "number"},
|
|
153
165
|
),
|
|
154
166
|
}
|
|
155
|
-
properties:
|
|
156
|
-
required: Optional[
|
|
167
|
+
properties: dict[str, Any] = Field()
|
|
168
|
+
required: Optional[list[str]] = Field(None)
|
|
157
169
|
additional_properties: bool = Field(
|
|
158
170
|
default=True, serialization_alias="additionalProperties"
|
|
159
171
|
)
|
eodag/rest/types/stac_search.py
CHANGED
|
@@ -19,17 +19,7 @@
|
|
|
19
19
|
|
|
20
20
|
from __future__ import annotations
|
|
21
21
|
|
|
22
|
-
from typing import
|
|
23
|
-
TYPE_CHECKING,
|
|
24
|
-
Annotated,
|
|
25
|
-
Any,
|
|
26
|
-
Dict,
|
|
27
|
-
List,
|
|
28
|
-
Literal,
|
|
29
|
-
Optional,
|
|
30
|
-
Tuple,
|
|
31
|
-
Union,
|
|
32
|
-
)
|
|
22
|
+
from typing import TYPE_CHECKING, Annotated, Any, Literal, Optional, Union
|
|
33
23
|
|
|
34
24
|
import geojson
|
|
35
25
|
from pydantic import (
|
|
@@ -63,8 +53,8 @@ if TYPE_CHECKING:
|
|
|
63
53
|
NumType = Union[float, int]
|
|
64
54
|
|
|
65
55
|
BBox = Union[
|
|
66
|
-
|
|
67
|
-
|
|
56
|
+
tuple[NumType, NumType, NumType, NumType],
|
|
57
|
+
tuple[NumType, NumType, NumType, NumType, NumType, NumType],
|
|
68
58
|
]
|
|
69
59
|
|
|
70
60
|
Geometry = Union[
|
|
@@ -106,8 +96,8 @@ class SearchPostRequest(BaseModel):
|
|
|
106
96
|
model_config = ConfigDict(populate_by_name=True, arbitrary_types_allowed=True)
|
|
107
97
|
|
|
108
98
|
provider: Optional[str] = None
|
|
109
|
-
collections: Optional[
|
|
110
|
-
ids: Optional[
|
|
99
|
+
collections: Optional[list[str]] = None
|
|
100
|
+
ids: Optional[list[str]] = None
|
|
111
101
|
bbox: Optional[BBox] = None
|
|
112
102
|
intersects: Optional[Geometry] = None
|
|
113
103
|
datetime: Optional[str] = None
|
|
@@ -117,21 +107,21 @@ class SearchPostRequest(BaseModel):
|
|
|
117
107
|
page: Optional[PositiveInt] = Field( # type: ignore
|
|
118
108
|
default=None, description="Page number, must be a positive integer."
|
|
119
109
|
)
|
|
120
|
-
query: Optional[
|
|
121
|
-
filter: Optional[
|
|
110
|
+
query: Optional[dict[str, Any]] = None
|
|
111
|
+
filter: Optional[dict[str, Any]] = None
|
|
122
112
|
filter_lang: Optional[str] = Field(
|
|
123
113
|
default=None,
|
|
124
114
|
alias="filter-lang",
|
|
125
115
|
description="The language used for filtering.",
|
|
126
116
|
validate_default=True,
|
|
127
117
|
)
|
|
128
|
-
sortby: Optional[
|
|
118
|
+
sortby: Optional[list[SortBy]] = None
|
|
129
119
|
crunch: Optional[str] = None
|
|
130
120
|
|
|
131
121
|
@field_serializer("intersects")
|
|
132
122
|
def serialize_intersects(
|
|
133
123
|
self, intersects: Optional[Geometry]
|
|
134
|
-
) -> Optional[
|
|
124
|
+
) -> Optional[dict[str, Any]]:
|
|
135
125
|
"""Serialize intersects from shapely to a proper dict"""
|
|
136
126
|
if intersects:
|
|
137
127
|
return geojson.loads(geojson.dumps(intersects)) # type: ignore
|
|
@@ -150,7 +140,7 @@ class SearchPostRequest(BaseModel):
|
|
|
150
140
|
|
|
151
141
|
@model_validator(mode="before")
|
|
152
142
|
@classmethod
|
|
153
|
-
def only_one_spatial(cls, values:
|
|
143
|
+
def only_one_spatial(cls, values: dict[str, Any]) -> dict[str, Any]:
|
|
154
144
|
"""Check bbox and intersects are not both supplied."""
|
|
155
145
|
if "intersects" in values and "bbox" in values:
|
|
156
146
|
raise ValueError("intersects and bbox parameters are mutually exclusive")
|
|
@@ -170,7 +160,7 @@ class SearchPostRequest(BaseModel):
|
|
|
170
160
|
|
|
171
161
|
@field_validator("ids", "collections", mode="before")
|
|
172
162
|
@classmethod
|
|
173
|
-
def str_to_str_list(cls, v: Union[str,
|
|
163
|
+
def str_to_str_list(cls, v: Union[str, list[str]]) -> list[str]:
|
|
174
164
|
"""Convert ids and collections strings to list of strings"""
|
|
175
165
|
if isinstance(v, str):
|
|
176
166
|
return [i.strip() for i in v.split(",")]
|
|
@@ -178,7 +168,7 @@ class SearchPostRequest(BaseModel):
|
|
|
178
168
|
|
|
179
169
|
@field_validator("intersects", mode="before")
|
|
180
170
|
@classmethod
|
|
181
|
-
def validate_intersects(cls, v: Union[
|
|
171
|
+
def validate_intersects(cls, v: Union[dict[str, Any], Geometry]) -> Geometry:
|
|
182
172
|
"""Verify format of intersects"""
|
|
183
173
|
if isinstance(v, BaseGeometry):
|
|
184
174
|
return v
|
|
@@ -224,7 +214,7 @@ class SearchPostRequest(BaseModel):
|
|
|
224
214
|
# Single date is interpreted as end date
|
|
225
215
|
values = ["..", v]
|
|
226
216
|
|
|
227
|
-
dates:
|
|
217
|
+
dates: list[str] = []
|
|
228
218
|
for value in values:
|
|
229
219
|
if value == ".." or value == "":
|
|
230
220
|
dates.append("..")
|
|
@@ -267,13 +257,13 @@ class SearchPostRequest(BaseModel):
|
|
|
267
257
|
|
|
268
258
|
def sortby2list(
|
|
269
259
|
v: Optional[str],
|
|
270
|
-
) -> Optional[
|
|
260
|
+
) -> Optional[list[SortBy]]:
|
|
271
261
|
"""
|
|
272
262
|
Convert sortby filter parameter GET syntax to POST syntax
|
|
273
263
|
"""
|
|
274
264
|
if not v:
|
|
275
265
|
return None
|
|
276
|
-
sortby:
|
|
266
|
+
sortby: list[SortBy] = []
|
|
277
267
|
for sortby_param in v.split(","):
|
|
278
268
|
sortby_param = sortby_param.strip()
|
|
279
269
|
direction: Direction = "desc" if sortby_param.startswith("-") else "asc"
|
eodag/rest/utils/__init__.py
CHANGED
|
@@ -23,17 +23,7 @@ import logging
|
|
|
23
23
|
import os
|
|
24
24
|
from io import BufferedReader
|
|
25
25
|
from shutil import make_archive, rmtree
|
|
26
|
-
from typing import
|
|
27
|
-
TYPE_CHECKING,
|
|
28
|
-
Any,
|
|
29
|
-
Callable,
|
|
30
|
-
Dict,
|
|
31
|
-
Iterator,
|
|
32
|
-
List,
|
|
33
|
-
NamedTuple,
|
|
34
|
-
Optional,
|
|
35
|
-
Union,
|
|
36
|
-
)
|
|
26
|
+
from typing import TYPE_CHECKING, Any, Callable, Iterator, NamedTuple, Optional, Union
|
|
37
27
|
from urllib.parse import unquote_plus, urlencode
|
|
38
28
|
|
|
39
29
|
import orjson
|
|
@@ -63,7 +53,7 @@ class Cruncher(NamedTuple):
|
|
|
63
53
|
"""Type hinted Cruncher namedTuple"""
|
|
64
54
|
|
|
65
55
|
clazz: Callable[..., Any]
|
|
66
|
-
config_params:
|
|
56
|
+
config_params: list[str]
|
|
67
57
|
|
|
68
58
|
|
|
69
59
|
crunchers = {
|
|
@@ -90,19 +80,19 @@ def format_pydantic_error(e: pydanticValidationError) -> str:
|
|
|
90
80
|
|
|
91
81
|
def is_dict_str_any(var: Any) -> bool:
|
|
92
82
|
"""Verify whether the variable is of type dict[str, Any]"""
|
|
93
|
-
if isinstance(var,
|
|
83
|
+
if isinstance(var, dict):
|
|
94
84
|
return all(isinstance(k, str) for k in var.keys()) # type: ignore
|
|
95
85
|
return False
|
|
96
86
|
|
|
97
87
|
|
|
98
|
-
def str2list(v: Optional[str]) -> Optional[
|
|
88
|
+
def str2list(v: Optional[str]) -> Optional[list[str]]:
|
|
99
89
|
"""Convert string to list base on , delimiter."""
|
|
100
90
|
if v:
|
|
101
91
|
return v.split(",")
|
|
102
92
|
return None
|
|
103
93
|
|
|
104
94
|
|
|
105
|
-
def str2json(k: str, v: Optional[str] = None) -> Optional[
|
|
95
|
+
def str2json(k: str, v: Optional[str] = None) -> Optional[dict[str, Any]]:
|
|
106
96
|
"""decoding a URL parameter and then parsing it as JSON."""
|
|
107
97
|
if not v:
|
|
108
98
|
return None
|
|
@@ -112,25 +102,25 @@ def str2json(k: str, v: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
|
112
102
|
raise ValidationError(f"{k}: Incorrect JSON object") from e
|
|
113
103
|
|
|
114
104
|
|
|
115
|
-
def flatten_list(nested_list: Union[Any,
|
|
105
|
+
def flatten_list(nested_list: Union[Any, list[Any]]) -> list[Any]:
|
|
116
106
|
"""Flatten a nested list structure into a single list."""
|
|
117
107
|
if not isinstance(nested_list, list):
|
|
118
108
|
return [nested_list]
|
|
119
109
|
else:
|
|
120
|
-
flattened:
|
|
110
|
+
flattened: list[Any] = []
|
|
121
111
|
for element in nested_list:
|
|
122
112
|
flattened.extend(flatten_list(element))
|
|
123
113
|
return flattened
|
|
124
114
|
|
|
125
115
|
|
|
126
|
-
def list_to_str_list(input_list:
|
|
116
|
+
def list_to_str_list(input_list: list[Any]) -> list[str]:
|
|
127
117
|
"""Attempt to convert a list of any type to a list of strings."""
|
|
128
118
|
try:
|
|
129
119
|
# Try to convert each element to a string
|
|
130
120
|
return [str(element) for element in input_list]
|
|
131
121
|
except Exception as e:
|
|
132
122
|
# Raise an exception if any element cannot be converted
|
|
133
|
-
raise TypeError(f"Failed to convert to
|
|
123
|
+
raise TypeError(f"Failed to convert to list[str]: {e}") from e
|
|
134
124
|
|
|
135
125
|
|
|
136
126
|
def get_next_link(
|
|
@@ -138,7 +128,7 @@ def get_next_link(
|
|
|
138
128
|
search_request: SearchPostRequest,
|
|
139
129
|
total_results: Optional[int],
|
|
140
130
|
items_per_page: int,
|
|
141
|
-
) -> Optional[
|
|
131
|
+
) -> Optional[dict[str, Any]]:
|
|
142
132
|
"""Generate next link URL and body"""
|
|
143
133
|
body = search_request.model_dump(exclude_none=True)
|
|
144
134
|
if "bbox" in body:
|
|
@@ -159,7 +149,7 @@ def get_next_link(
|
|
|
159
149
|
params["page"] = str(page + 1)
|
|
160
150
|
url += f"?{urlencode(params)}"
|
|
161
151
|
|
|
162
|
-
next:
|
|
152
|
+
next: dict[str, Any] = {
|
|
163
153
|
"rel": "next",
|
|
164
154
|
"href": url,
|
|
165
155
|
"title": "Next page",
|
eodag/rest/utils/cql_evaluate.py
CHANGED
|
@@ -16,13 +16,13 @@
|
|
|
16
16
|
# See the License for the specific language governing permissions and
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
from datetime import datetime as dt
|
|
19
|
-
from typing import Any,
|
|
19
|
+
from typing import Any, Optional, Union
|
|
20
20
|
|
|
21
21
|
from pygeofilter import ast
|
|
22
22
|
from pygeofilter.backends.evaluator import Evaluator, handle
|
|
23
23
|
from pygeofilter.values import Geometry, Interval
|
|
24
24
|
|
|
25
|
-
simpleNode = Union[ast.Attribute, str, int, complex, float,
|
|
25
|
+
simpleNode = Union[ast.Attribute, str, int, complex, float, list[Any], tuple[Any, ...]]
|
|
26
26
|
|
|
27
27
|
|
|
28
28
|
class EodagEvaluator(Evaluator):
|
|
@@ -36,7 +36,7 @@ class EodagEvaluator(Evaluator):
|
|
|
36
36
|
return node
|
|
37
37
|
|
|
38
38
|
@handle(Geometry)
|
|
39
|
-
def spatial(self, node: Geometry) ->
|
|
39
|
+
def spatial(self, node: Geometry) -> dict[str, Any]:
|
|
40
40
|
"""handle geometry"""
|
|
41
41
|
return node.geometry
|
|
42
42
|
|
|
@@ -46,7 +46,7 @@ class EodagEvaluator(Evaluator):
|
|
|
46
46
|
return node.strftime("%Y-%m-%dT%H:%M:%SZ")
|
|
47
47
|
|
|
48
48
|
@handle(Interval)
|
|
49
|
-
def interval(self, _, *interval: Any) ->
|
|
49
|
+
def interval(self, _, *interval: Any) -> list[Any]:
|
|
50
50
|
"""handle datetime interval"""
|
|
51
51
|
return list(interval)
|
|
52
52
|
|
|
@@ -60,7 +60,7 @@ class EodagEvaluator(Evaluator):
|
|
|
60
60
|
)
|
|
61
61
|
def predicate(
|
|
62
62
|
self, node: ast.Predicate, lhs: Any, rhs: Any
|
|
63
|
-
) -> Optional[
|
|
63
|
+
) -> Optional[dict[str, Any]]:
|
|
64
64
|
"""
|
|
65
65
|
Handle predicates
|
|
66
66
|
Verify the property is first attribute in each predicate
|
|
@@ -114,6 +114,6 @@ class EodagEvaluator(Evaluator):
|
|
|
114
114
|
return {lhs.name: list(rhs)}
|
|
115
115
|
|
|
116
116
|
@handle(ast.And)
|
|
117
|
-
def combination(self, _, lhs:
|
|
117
|
+
def combination(self, _, lhs: dict[str, str], rhs: dict[str, str]):
|
|
118
118
|
"""handle combinations"""
|
|
119
119
|
return {**lhs, **rhs}
|
eodag/rest/utils/rfc3339.py
CHANGED
|
@@ -16,14 +16,14 @@
|
|
|
16
16
|
# See the License for the specific language governing permissions and
|
|
17
17
|
# limitations under the License.
|
|
18
18
|
import datetime
|
|
19
|
-
from typing import Optional
|
|
19
|
+
from typing import Optional
|
|
20
20
|
|
|
21
21
|
from eodag.utils.rest import rfc3339_str_to_datetime
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
def str_to_interval(
|
|
25
25
|
interval: Optional[str],
|
|
26
|
-
) ->
|
|
26
|
+
) -> tuple[Optional[datetime.datetime], Optional[datetime.datetime]]:
|
|
27
27
|
"""Extract a tuple of datetimes from an interval string.
|
|
28
28
|
|
|
29
29
|
Interval strings are defined by
|