eodag 4.0.0a5__py3-none-any.whl → 4.0.0b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. eodag/api/collection.py +65 -1
  2. eodag/api/core.py +48 -16
  3. eodag/api/product/_assets.py +1 -1
  4. eodag/api/product/_product.py +108 -15
  5. eodag/api/product/drivers/__init__.py +3 -1
  6. eodag/api/product/drivers/base.py +3 -1
  7. eodag/api/product/drivers/generic.py +9 -5
  8. eodag/api/product/drivers/sentinel1.py +14 -9
  9. eodag/api/product/drivers/sentinel2.py +14 -7
  10. eodag/api/product/metadata_mapping.py +5 -2
  11. eodag/api/provider.py +1 -0
  12. eodag/api/search_result.py +4 -1
  13. eodag/cli.py +7 -7
  14. eodag/config.py +22 -4
  15. eodag/plugins/download/aws.py +3 -1
  16. eodag/plugins/download/http.py +4 -10
  17. eodag/plugins/search/base.py +8 -3
  18. eodag/plugins/search/build_search_result.py +108 -120
  19. eodag/plugins/search/cop_marine.py +3 -1
  20. eodag/plugins/search/qssearch.py +7 -6
  21. eodag/resources/collections.yml +255 -0
  22. eodag/resources/ext_collections.json +1 -1
  23. eodag/resources/ext_product_types.json +1 -1
  24. eodag/resources/providers.yml +60 -25
  25. eodag/resources/user_conf_template.yml +6 -0
  26. eodag/types/__init__.py +22 -16
  27. eodag/types/download_args.py +3 -1
  28. eodag/types/queryables.py +125 -55
  29. eodag/types/stac_extensions.py +408 -0
  30. eodag/types/stac_metadata.py +312 -0
  31. eodag/utils/__init__.py +42 -4
  32. eodag/utils/dates.py +202 -2
  33. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/METADATA +7 -13
  34. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/RECORD +38 -36
  35. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/WHEEL +1 -1
  36. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/entry_points.txt +1 -1
  37. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/licenses/LICENSE +0 -0
  38. {eodag-4.0.0a5.dist-info → eodag-4.0.0b1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,312 @@
1
+ # -*- coding: utf-8 -*-
2
+ # Copyright 2025, CS GROUP - France, https://www.cs-soprasteria.com
3
+ #
4
+ # This file is part of stac-fastapi-eodag project
5
+ # https://www.github.com/CS-SI/stac-fastapi-eodag
6
+ #
7
+ # Licensed under the Apache License, Version 2.0 (the "License");
8
+ # you may not use this file except in compliance with the License.
9
+ # You may obtain a copy of the License at
10
+ #
11
+ # http://www.apache.org/licenses/LICENSE-2.0
12
+ #
13
+ # Unless required by applicable law or agreed to in writing, software
14
+ # distributed under the License is distributed on an "AS IS" BASIS,
15
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ # See the License for the specific language governing permissions and
17
+ # limitations under the License.
18
+ """property fields."""
19
+
20
+ import logging
21
+ from collections.abc import Callable
22
+ from datetime import datetime as dt
23
+ from typing import Annotated, Any, ClassVar, Optional, Type, TypeVar, Union, cast
24
+
25
+ from pydantic import (
26
+ AliasChoices,
27
+ AliasPath,
28
+ BaseModel,
29
+ Field,
30
+ TypeAdapter,
31
+ ValidationError,
32
+ create_model,
33
+ field_serializer,
34
+ model_validator,
35
+ )
36
+ from pydantic.fields import FieldInfo
37
+ from stac_pydantic.item import ItemProperties
38
+ from stac_pydantic.shared import Provider
39
+ from typing_extensions import Self
40
+
41
+ from eodag.types.stac_extensions import STAC_EXTENSIONS, BaseStacExtension
42
+
43
+ logger = logging.getLogger("eodag.types.stac_metadata")
44
+
45
+ T = TypeVar("T", bound="CommonStacMetadata")
46
+
47
+
48
+ class CommonStacMetadata(ItemProperties):
49
+ """Common STAC properties."""
50
+
51
+ # TODO: replace dt by stac_pydantic.shared.UtcDatetime.
52
+ # Requires timezone to be set in EODAG datetime properties
53
+ # Tested with EFAS FORECAST
54
+ datetime: Annotated[dt, Field(None, validation_alias="start_datetime")]
55
+ start_datetime: Annotated[dt, Field(None)] # TODO do not set if start = end
56
+ end_datetime: Annotated[dt, Field(None)] # TODO do not set if start = end
57
+ created: Annotated[dt, Field(None)]
58
+ updated: Annotated[dt, Field(None)]
59
+ platform: Annotated[str, Field(None)]
60
+ instruments: Annotated[list[str], Field(None)]
61
+ constellation: Annotated[str, Field(None)]
62
+ providers: Annotated[list[Provider], Field(None)]
63
+ gsd: Annotated[float, Field(None, gt=0)]
64
+ collection: Annotated[str, Field(None)]
65
+ title: Annotated[str, Field(None)]
66
+ description: Annotated[str, Field(None)]
67
+ license: Annotated[str, Field(None)]
68
+
69
+ _conformance_classes: ClassVar[dict[str, str]]
70
+ get_conformance_classes: ClassVar[Callable[[Any], list[str]]]
71
+
72
+ @field_serializer(
73
+ "datetime", "start_datetime", "end_datetime", "created", "updated"
74
+ )
75
+ def format_datetime(self, value: dt):
76
+ """format datetime properties"""
77
+ return value.strftime("%Y-%m-%dT%H:%M:%S.%fZ")
78
+
79
+ @model_validator(mode="before")
80
+ @classmethod
81
+ def parse_instruments(cls, values: dict[str, Any]) -> dict[str, Any]:
82
+ """
83
+ Convert instrument ``str`` to ``list``.
84
+ """
85
+ if instrument := values.get("instruments"):
86
+ values["instruments"] = (
87
+ ",".join(instrument.split()).split(",")
88
+ if isinstance(instrument, str)
89
+ else instrument
90
+ )
91
+ if None in values["instruments"]:
92
+ values["instruments"].remove(None)
93
+ return values
94
+
95
+ @model_validator(mode="before")
96
+ @classmethod
97
+ def parse_platform(cls, values: dict[str, Any]) -> dict[str, Any]:
98
+ """
99
+ Convert platform ``list`` to ``str``.
100
+ TODO: This should be removed after the refactoring of cop_marine because an item should only have one platform
101
+ """
102
+ if platform := values.get("platform"):
103
+ values["platform"] = (
104
+ ",".join(platform) if isinstance(platform, list) else platform
105
+ )
106
+ return values
107
+
108
+ @model_validator(mode="before")
109
+ @classmethod
110
+ def convert_processing_level(cls, values: dict[str, Any]) -> dict[str, Any]:
111
+ """Convert processing level to ``str`` if it is ``int`"""
112
+ if processing_level := values.get("processing:level"):
113
+ if isinstance(processing_level, int):
114
+ values["processing:level"] = f"L{processing_level}"
115
+ return values
116
+
117
+ @model_validator(mode="before")
118
+ @classmethod
119
+ def remove_id_property(cls, values: dict[str, Any]) -> dict[str, Any]:
120
+ """
121
+ Remove "id" property which is not STAC compliant if exists.
122
+ """
123
+ values.pop("id", None)
124
+ return values
125
+
126
+ @model_validator(mode="after")
127
+ def validate_datetime_or_start_end(self) -> Self:
128
+ """disable validation of datetime.
129
+
130
+ This model is used for properties conversion not validation.
131
+ """
132
+ return self
133
+
134
+ @model_validator(mode="after")
135
+ def validate_start_end(self) -> Self:
136
+ """disable validation of datetime.
137
+
138
+ This model is used for properties conversion not validation.
139
+ """
140
+ return self
141
+
142
+ @classmethod
143
+ def _create_from_stac_map(
144
+ cls,
145
+ ) -> dict[str, Optional[Union[str, AliasChoices, AliasPath]]]:
146
+ """Create mapping to convert fields from STAC to python-style"""
147
+ return {
148
+ v.serialization_alias or k: v.validation_alias
149
+ for k, v in cls.model_fields.items()
150
+ }
151
+
152
+ @classmethod
153
+ def has_field(cls, field_name: str) -> bool:
154
+ """Check if a given string is in model fields or validation_alias.
155
+
156
+ :param field_name: Field name to check (can be STAC name or python-style name)
157
+ :returns: True if the field is accepted, False otherwise
158
+ """
159
+ # Check if it's a model field name
160
+ if field_name in cls.model_fields:
161
+ return True
162
+
163
+ # Check if it's a validation alias
164
+ for field_info in cls.model_fields.values():
165
+ if field_info.validation_alias == field_name:
166
+ return True
167
+
168
+ return False
169
+
170
+ @classmethod
171
+ def from_stac(cls, field_name: str) -> str:
172
+ """Convert a STAC parameter to its matching python-style name.
173
+
174
+ :param field_name: STAC field name
175
+ :returns: EODAG field name
176
+ """
177
+ field_dict: dict[str, Optional[Union[str, AliasChoices, AliasPath]]] = {
178
+ stac_name: py_name
179
+ for stac_name, py_name in cls._create_from_stac_map().items()
180
+ if field_name == stac_name
181
+ }
182
+ if field_dict:
183
+ if field_dict[field_name] is None:
184
+ return field_name
185
+ if isinstance(field_dict[field_name], (AliasChoices, AliasPath)):
186
+ raise NotImplementedError(
187
+ f"Error for stac name {field_name}: AliasChoices and AliasPath are not currently handled to"
188
+ "convert stac names to eodag names"
189
+ )
190
+ return field_dict[field_name] # type: ignore
191
+ return field_name
192
+
193
+ @classmethod
194
+ def to_stac(cls, field_name: str) -> str:
195
+ """Convert an python-style parameter to its matching STAC name.
196
+
197
+ :param field_name: python-style field name
198
+ :returns: STAC field name
199
+ """
200
+ field_dict: dict[str, Optional[Union[str, AliasChoices, AliasPath]]] = {
201
+ stac_name: py_name
202
+ for stac_name, py_name in cls._create_from_stac_map().items()
203
+ if field_name == py_name
204
+ }
205
+ if field_dict:
206
+ return list(field_dict.keys())[0]
207
+ return field_name
208
+
209
+ @classmethod
210
+ def safe_validate(
211
+ cls: Type[T],
212
+ data: dict,
213
+ ) -> T:
214
+ """Validate only fields with correct types, drop others with a warning.
215
+
216
+ :param data: data to validate
217
+ :returns: validated model"""
218
+ valid = {}
219
+
220
+ for name, field in cls.model_fields.items():
221
+ value = data.get(name, data.get(field.validation_alias))
222
+ if value is None:
223
+ continue
224
+ try:
225
+ TypeAdapter(field.annotation).validate_python(value)
226
+ valid[name] = value
227
+ except ValidationError as e:
228
+ logger.warning(
229
+ "Dropped property %s: %s, %s",
230
+ name,
231
+ value,
232
+ e.errors()[0]["msg"],
233
+ )
234
+ return cls.model_validate(valid)
235
+
236
+
237
+ def create_stac_metadata_model(
238
+ extensions: list[BaseStacExtension] = STAC_EXTENSIONS,
239
+ base_models: list[type[BaseModel]] = [CommonStacMetadata],
240
+ class_name: str = "StacMetadata",
241
+ ) -> type[BaseModel]:
242
+ """Create a pydantic model to validate item properties.
243
+
244
+ :param extensions: list of STAC extensions to include in the model
245
+ :param base_model: base model to extend
246
+ :param class_name: name of the created model
247
+ :returns: pydantic model class
248
+ """
249
+ extension_models: list[type[BaseModel]] = []
250
+
251
+ # Check extensions for additional parameters to include
252
+ for extension in extensions:
253
+ if extension_model := extension.FIELDS:
254
+ extension_models.append(extension_model)
255
+
256
+ models = base_models + extension_models
257
+
258
+ # check for duplicate field aliases (e.g., start_datetime and start in Queryables)
259
+ aliases: dict[str, Optional[str]] = dict()
260
+ duplicates = set()
261
+ for bm in base_models:
262
+ for key, field in bm.model_fields.items():
263
+ if key not in aliases.keys() and key in aliases.values():
264
+ duplicates.add(key)
265
+ else:
266
+ aliases[key] = field.alias
267
+
268
+ model = create_model(
269
+ class_name,
270
+ __base__=tuple(models),
271
+ _conformance_classes=(
272
+ ClassVar[dict[str, str]],
273
+ {e.__class__.__name__: e.schema_href for e in extensions},
274
+ ),
275
+ get_conformance_classes=(
276
+ ClassVar[Callable[[Any], list[str]]],
277
+ _get_conformance_classes,
278
+ ),
279
+ )
280
+
281
+ for key in duplicates:
282
+ model.model_fields.pop(key)
283
+
284
+ return model
285
+
286
+
287
+ def _get_conformance_classes(self) -> list[str]:
288
+ """Extract list of conformance classes from set fields metadata"""
289
+ conformance_classes: set[str] = set()
290
+
291
+ model_fields_by_alias = {
292
+ field_info.serialization_alias: field_info
293
+ for name, field_info in self.__class__.model_fields.items()
294
+ if field_info.serialization_alias
295
+ }
296
+
297
+ for f in self.model_fields_set:
298
+ mf = model_fields_by_alias.get(f) or self.__class__.model_fields.get(f)
299
+ if not mf or not isinstance(mf, FieldInfo) or not mf.metadata:
300
+ continue
301
+ extension = next(
302
+ (
303
+ cast(str, m["extension"])
304
+ for m in mf.metadata
305
+ if isinstance(m, dict) and "extension" in m
306
+ ),
307
+ None,
308
+ )
309
+ if c := self._conformance_classes.get(extension, None):
310
+ conformance_classes.add(c)
311
+
312
+ return list(conformance_classes)
eodag/utils/__init__.py CHANGED
@@ -143,6 +143,10 @@ DEFAULT_TOKEN_EXPIRATION_MARGIN = 60
143
143
  # knwown next page token keys used to guess key in STAC providers next link responses
144
144
  KNOWN_NEXT_PAGE_TOKEN_KEYS = ["token", "next", "page", "skip"]
145
145
 
146
+ ONLINE_STATUS = "succeeded"
147
+
148
+ STAC_VERSION = "1.1.0"
149
+
146
150
  # update missing mimetypes
147
151
  mimetypes.add_type("text/xml", ".xsd")
148
152
  mimetypes.add_type("application/x-grib", ".grib")
@@ -1162,9 +1166,7 @@ def get_geometry_from_ecmwf_feature(geom: dict[str, Any]) -> BaseGeometry:
1162
1166
  :returns: A Shapely polygon.
1163
1167
  """
1164
1168
  if not isinstance(geom, dict):
1165
- raise TypeError(
1166
- "Geometry must be a dictionary, instead it's {}".format(type(geom))
1167
- )
1169
+ raise TypeError("Geometry must be a dictionary")
1168
1170
  if "type" not in geom or geom["type"] != "polygon":
1169
1171
  raise TypeError("Geometry type must be 'polygon'")
1170
1172
  if "shape" not in geom:
@@ -1193,6 +1195,27 @@ def get_geometry_from_ecmwf_area(area: list[float]) -> Optional[BaseGeometry]:
1193
1195
  return get_geometry_from_various(geometry=bbox)
1194
1196
 
1195
1197
 
1198
+ def get_geometry_from_ecmwf_location(
1199
+ location: dict[str, float]
1200
+ ) -> Optional[BaseGeometry]:
1201
+ """
1202
+ Creates a ``shapely.geometry`` from a single location.
1203
+
1204
+ location format: {"latitude": float, "longitude": float}
1205
+
1206
+ :param location: dictionary with latitude and longitude
1207
+ :returns: A Shapely polygon.
1208
+ """
1209
+ if not isinstance(location, dict):
1210
+ raise TypeError("Location type must be a dictionary")
1211
+ if not all(f in location for f in ("latitude", "longitude")):
1212
+ raise ValueError("The location must contains the latitude and the longitude")
1213
+ lat = location["latitude"]
1214
+ lon = location["longitude"]
1215
+ bbox = [lon, lat, lon, lat]
1216
+ return get_geometry_from_various(geometry=bbox)
1217
+
1218
+
1196
1219
  class MockResponse:
1197
1220
  """Fake requests response"""
1198
1221
 
@@ -1667,8 +1690,23 @@ def format_pydantic_error(e: PydanticValidationError) -> str:
1667
1690
  """
1668
1691
  error_header = f"{e.error_count()} error(s). "
1669
1692
 
1693
+ def concat_loc_names(location: tuple):
1694
+ """Concatenate location names, excluding list indexes.
1695
+
1696
+ :param location: Location components
1697
+ :returns: Concatenation of the string elements of the location
1698
+
1699
+ Examples:
1700
+ >>> concat_loc_names(("variable", 0))
1701
+ 'variable'
1702
+ >>> concat_loc_names(("location", "latitude"))
1703
+ 'location.latitude'
1704
+ """
1705
+ str_loc = (loc for loc in location if type(loc) is str)
1706
+ return ".".join(str_loc)
1707
+
1670
1708
  error_messages = [
1671
- f'{err["loc"][0]}: {err["msg"]}' if err["loc"] else err["msg"]
1709
+ f'{concat_loc_names(err["loc"])}: {err["msg"]}' if err["loc"] else err["msg"]
1672
1710
  for err in e.errors()
1673
1711
  ]
1674
1712
  return error_header + "; ".join(set(error_messages))
eodag/utils/dates.py CHANGED
@@ -19,8 +19,10 @@
19
19
 
20
20
  import datetime
21
21
  import re
22
+ from datetime import date
22
23
  from datetime import datetime as dt
23
- from typing import Any, Iterator, Optional
24
+ from datetime import timezone
25
+ from typing import Any, Iterator, Optional, Union
24
26
 
25
27
  import dateutil.parser
26
28
  from dateutil import tz
@@ -35,6 +37,18 @@ RFC3339_PATTERN = (
35
37
  r"(Z|([+-])(\d{2}):(\d{2}))?)?$"
36
38
  )
37
39
 
40
+ # yyyy-mm-dd
41
+ DATE_PATTERN = r"\d{4}-(0[1-9]|1[0-2])-([0-2][0-9]|3[0-1])"
42
+
43
+ # yyyymmdd
44
+ COMPACT_DATE_PATTERN = r"\d{4}(0[1-9]|1[0-2])([0-2][0-9]|3[0-1])"
45
+
46
+ # yyyy-mm-dd/yyyy-mm-dd, yyyy-mm-dd/to/yyyy-mm-dd
47
+ DATE_RANGE_PATTERN = DATE_PATTERN + r"(/to/|/)" + DATE_PATTERN
48
+
49
+ # yyyymmdd/yyyymmdd, yyyymmdd/to/yyyymmdd
50
+ COMPACT_DATE_RANGE_PATTERN = COMPACT_DATE_PATTERN + r"(/to/|/)" + COMPACT_DATE_PATTERN
51
+
38
52
 
39
53
  def get_timestamp(date_time: str) -> float:
40
54
  """Return the Unix timestamp of an ISO8601 date/datetime in seconds.
@@ -59,7 +73,26 @@ def get_timestamp(date_time: str) -> float:
59
73
 
60
74
 
61
75
  def datetime_range(start: dt, end: dt) -> Iterator[dt]:
62
- """Generator function for all dates in-between ``start`` and ``end`` date."""
76
+ """Generator function for all dates in-between ``start`` and ``end`` date.
77
+
78
+ :param start: Start date
79
+ :param end: End date
80
+ :returns: Generator of dates
81
+
82
+ Examples:
83
+ >>> from datetime import datetime
84
+ >>> dtr = datetime_range(datetime(2020, 12, 31), datetime(2021, 1, 2))
85
+ >>> next(dtr)
86
+ datetime.datetime(2020, 12, 31, 0, 0)
87
+ >>> next(dtr)
88
+ datetime.datetime(2021, 1, 1, 0, 0)
89
+ >>> next(dtr)
90
+ datetime.datetime(2021, 1, 2, 0, 0)
91
+ >>> next(dtr)
92
+ Traceback (most recent call last):
93
+ ...
94
+ StopIteration
95
+ """
63
96
  delta = end - start
64
97
  for nday in range(delta.days + 1):
65
98
  yield start + datetime.timedelta(days=nday)
@@ -150,6 +183,9 @@ def get_date(date: Optional[str]) -> Optional[str]:
150
183
  """
151
184
  Check if the input date can be parsed as a date
152
185
 
186
+ :param date: The date to parse
187
+ :returns: The datetime represented with ISO format
188
+
153
189
  Examples:
154
190
  >>> from eodag.utils.exceptions import ValidationError
155
191
  >>> get_date("2023-09-23")
@@ -202,3 +238,167 @@ def rfc3339_str_to_datetime(s: str) -> datetime.datetime:
202
238
  raise ValidationError("Invalid RFC3339 datetime.")
203
239
 
204
240
  return dateutil.parser.isoparse(s).replace(tzinfo=datetime.timezone.utc)
241
+
242
+
243
+ def get_min_max(
244
+ value: Optional[Union[str, list[str]]] = None,
245
+ ) -> tuple[Optional[str], Optional[str]]:
246
+ """Returns the min and max from a list of strings or the same string if a single string is given.
247
+
248
+ :param value: a single string or a list of strings
249
+ :returns: a tuple with the min and max values
250
+
251
+ Examples:
252
+ >>> get_min_max(["a", "c", "b"])
253
+ ('a', 'c')
254
+ >>> get_min_max(["a"])
255
+ ('a', 'a')
256
+ >>> get_min_max("a")
257
+ ('a', 'a')
258
+ """
259
+ if isinstance(value, list):
260
+ sorted_values = sorted(value)
261
+ return sorted_values[0], sorted_values[-1]
262
+ return value, value
263
+
264
+
265
+ def append_time(input_date: date, time: Optional[str] = None) -> dt:
266
+ """Appends a string-formatted time to a date.
267
+
268
+ :param input_date: Date to combine with the time
269
+ :param time: (optional) time string in format HHMM, HH:MM or HH_MM
270
+ :returns: Datetime obtained by appenting the time to the date
271
+
272
+ Examples:
273
+ >>> from eodag.utils.dates import append_time
274
+ >>> from datetime import date
275
+ >>> append_time(date(2020, 12, 13))
276
+ datetime.datetime(2020, 12, 13, 0, 0)
277
+ >>> append_time(date(2020, 12, 13), "")
278
+ datetime.datetime(2020, 12, 13, 0, 0)
279
+ >>> append_time(date(2020, 12, 13), "2400")
280
+ datetime.datetime(2020, 12, 13, 0, 0)
281
+ >>> append_time(date(2020, 12, 13), "14_31")
282
+ datetime.datetime(2020, 12, 13, 14, 31)
283
+ """
284
+ if not time:
285
+ time = "0000"
286
+ time = re.sub(":|_", "", time)
287
+ if time == "2400":
288
+ time = "0000"
289
+ combined_dt = dt.combine(input_date, dt.strptime(time, "%H%M").time())
290
+ combined_dt.replace(tzinfo=timezone.utc)
291
+ return combined_dt
292
+
293
+
294
+ def parse_date(
295
+ date: str, time: Optional[Union[str, list[str]]] = None
296
+ ) -> tuple[dt, dt]:
297
+ """Parses a date string in formats YYYY-MM-DD, YYYMMDD, solo or in start/end or start/to/end intervals.
298
+
299
+ :param date: Single or interval date string
300
+ :returns: A tuple with the start and end datetime
301
+
302
+ Examples:
303
+ >>> parse_date("2020-12-15")
304
+ (datetime.datetime(2020, 12, 15, 0, 0), datetime.datetime(2020, 12, 15, 0, 0))
305
+ >>> parse_date("2020-12-15/to/20201230")
306
+ (datetime.datetime(2020, 12, 15, 0, 0), datetime.datetime(2020, 12, 30, 0, 0))
307
+ """
308
+ if "to" in date:
309
+ start_date_str, end_date_str = date.split("/to/")
310
+ elif "/" in date:
311
+ dates = date.split("/")
312
+ start_date_str = dates[0]
313
+ end_date_str = dates[-1]
314
+ else:
315
+ start_date_str = end_date_str = date
316
+
317
+ # Update YYYYMMDD formatted dates
318
+ if re.match(r"^\d{8}$", start_date_str):
319
+ start_date_str = (
320
+ f"{start_date_str[:4]}-{start_date_str[4:6]}-{start_date_str[6:]}"
321
+ )
322
+ if re.match(r"^\d{8}$", end_date_str):
323
+ end_date_str = f"{end_date_str[:4]}-{end_date_str[4:6]}-{end_date_str[6:]}"
324
+
325
+ start_date = dt.fromisoformat(start_date_str.rstrip("Z"))
326
+ end_date = dt.fromisoformat(end_date_str.rstrip("Z"))
327
+
328
+ if time:
329
+ start_t, end_t = get_min_max(time)
330
+ start_date = append_time(start_date.date(), start_t)
331
+ end_date = append_time(end_date.date(), end_t)
332
+
333
+ return start_date, end_date
334
+
335
+
336
+ def parse_year_month_day(
337
+ year: Union[str, list[str]],
338
+ month: Optional[Union[str, list[str]]] = None,
339
+ day: Optional[Union[str, list[str]]] = None,
340
+ time: Optional[Union[str, list[str]]] = None,
341
+ ) -> tuple[dt, dt]:
342
+ """Returns minimum and maximum datetimes from given lists of years, months, days, times.
343
+
344
+ :param year: List of years or a single one
345
+ :param month: (optional) List of months or a single one
346
+ :param day: (optional) List of days or a single one
347
+ :param time: (optional) List of times or a single one in the format HHMM, HH:MM or HH_MM
348
+ :returns: A tuple with the start and end datetime
349
+
350
+ Examples:
351
+ >>> parse_year_month_day(["2020", "2021", "2022"], ["01", "03", "05"], "01", ["0000", "1200"])
352
+ (datetime.datetime(2020, 1, 1, 0, 0), datetime.datetime(2022, 5, 1, 12, 0))
353
+ """
354
+
355
+ def build_date(year, month=None, day=None, time=None) -> dt:
356
+ """Datetime from default_date with updated year, month, day and time."""
357
+ updated_date = dt(int(year), 1, 1).replace(
358
+ month=int(month) if month is not None else 1,
359
+ day=int(day) if day is not None else 1,
360
+ )
361
+ if time is not None:
362
+ updated_date = append_time(updated_date.date(), time)
363
+ return updated_date
364
+
365
+ start_y, end_y = get_min_max(year)
366
+ start_m, end_m = get_min_max(month)
367
+ start_d, end_d = get_min_max(day)
368
+ start_t, end_t = get_min_max(time)
369
+
370
+ start_date = build_date(start_y, start_m, start_d, start_t)
371
+ end_date = build_date(end_y, end_m, end_d, end_t)
372
+
373
+ return start_date, end_date
374
+
375
+
376
+ def format_date(date: dt) -> str:
377
+ """Format a ``datetime`` with the format 'YYYY-MM-DD'.
378
+
379
+ :param date: Datetime to format
380
+ :returns: Date string in the format 'YYYY-MM-DD'
381
+
382
+ Examples:
383
+ >>> from datetime import datetime
384
+ >>> format_date(datetime(2020, 12, 2))
385
+ '2020-12-02'
386
+ >>> format_date(datetime(2020, 12, 2, 11, 22, 33))
387
+ '2020-12-02'
388
+ """
389
+ return date.isoformat()[:10]
390
+
391
+
392
+ def format_date_range(start: dt, end: dt) -> str:
393
+ """Format a range with the format 'YYYY-MM-DD/YYYY-MM-DD'.
394
+
395
+ :param start: Start datetime
396
+ :param end: End datetime
397
+ :returns: Date range in the format 'YYYY-MM-DD/YYYY-MM-DD'
398
+
399
+ Examples:
400
+ >>> from datetime import datetime
401
+ >>> format_date_range(datetime(2020, 12, 2, 11, 22, 33), datetime(2020, 12, 31))
402
+ '2020-12-02/2020-12-31'
403
+ """
404
+ return f"{format_date(start)}/{format_date(end)}"
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: eodag
3
- Version: 4.0.0a5
3
+ Version: 4.0.0b1
4
4
  Summary: Earth Observation Data Access Gateway
5
5
  Home-page: https://github.com/CS-SI/eodag
6
6
  Author: CS GROUP - France
@@ -23,6 +23,7 @@ Classifier: Programming Language :: Python :: 3.10
23
23
  Classifier: Programming Language :: Python :: 3.11
24
24
  Classifier: Programming Language :: Python :: 3.12
25
25
  Classifier: Programming Language :: Python :: 3.13
26
+ Classifier: Programming Language :: Python :: 3.14
26
27
  Classifier: Programming Language :: Python :: Implementation :: CPython
27
28
  Classifier: Topic :: Internet :: WWW/HTTP :: Indexing/Search
28
29
  Classifier: Topic :: Scientific/Engineering :: GIS
@@ -40,7 +41,7 @@ Requires-Dist: importlib_metadata>=5.0
40
41
  Requires-Dist: jsonpath-ng
41
42
  Requires-Dist: lxml
42
43
  Requires-Dist: orjson
43
- Requires-Dist: pydantic!=2.10.0,<2.12.0,>=2.1.0
44
+ Requires-Dist: pydantic!=2.10.0,>=2.1.0
44
45
  Requires-Dist: pydantic_core
45
46
  Requires-Dist: PyJWT[crypto]>=2.5.0
46
47
  Requires-Dist: pyproj>=2.1.0
@@ -56,7 +57,7 @@ Requires-Dist: typing_extensions>=4.8.0
56
57
  Requires-Dist: urllib3
57
58
  Requires-Dist: zipstream-ng
58
59
  Provides-Extra: all
59
- Requires-Dist: eodag[all-providers,csw,server,tutorials]; extra == "all"
60
+ Requires-Dist: eodag[all-providers,csw,tutorials]; extra == "all"
60
61
  Provides-Extra: all-providers
61
62
  Requires-Dist: eodag[ecmwf,usgs]; extra == "all-providers"
62
63
  Provides-Extra: csw
@@ -65,13 +66,6 @@ Provides-Extra: ecmwf
65
66
  Requires-Dist: ecmwf-api-client; extra == "ecmwf"
66
67
  Provides-Extra: usgs
67
68
  Requires-Dist: usgs>=0.3.6; extra == "usgs"
68
- Provides-Extra: server
69
- Requires-Dist: fastapi>=0.93.0; extra == "server"
70
- Requires-Dist: pygeofilter; extra == "server"
71
- Requires-Dist: starlette; extra == "server"
72
- Requires-Dist: uvicorn[standard]; extra == "server"
73
- Requires-Dist: pydantic-settings; extra == "server"
74
- Requires-Dist: cachetools; extra == "server"
75
69
  Provides-Extra: notebook
76
70
  Requires-Dist: tqdm[notebook]; extra == "notebook"
77
71
  Provides-Extra: tutorials
@@ -88,7 +82,7 @@ Requires-Dist: netcdf4; extra == "tutorials"
88
82
  Requires-Dist: cartopy; extra == "tutorials"
89
83
  Requires-Dist: covjsonkit; extra == "tutorials"
90
84
  Provides-Extra: dev
91
- Requires-Dist: eodag[all-providers,csw,server,stubs]; extra == "dev"
85
+ Requires-Dist: eodag[all-providers,csw,stubs]; extra == "dev"
92
86
  Requires-Dist: pytest; extra == "dev"
93
87
  Requires-Dist: pytest-cov; extra == "dev"
94
88
  Requires-Dist: py>=1.8.2; extra == "dev"
@@ -105,7 +99,7 @@ Requires-Dist: wheel; extra == "dev"
105
99
  Requires-Dist: flake8; extra == "dev"
106
100
  Requires-Dist: prek; extra == "dev"
107
101
  Requires-Dist: responses!=0.24.0; extra == "dev"
108
- Requires-Dist: fastapi[all]; extra == "dev"
102
+ Requires-Dist: stac-validator; extra == "dev"
109
103
  Requires-Dist: stdlib-list; extra == "dev"
110
104
  Requires-Dist: mypy; extra == "dev"
111
105
  Provides-Extra: stubs
@@ -123,7 +117,7 @@ Provides-Extra: docs
123
117
  Requires-Dist: eodag[all,stubs]; extra == "docs"
124
118
  Requires-Dist: autodoc_pydantic; extra == "docs"
125
119
  Requires-Dist: nbsphinx; extra == "docs"
126
- Requires-Dist: sphinx; extra == "docs"
120
+ Requires-Dist: sphinx<9.0.0; extra == "docs"
127
121
  Requires-Dist: sphinx-autodoc-typehints; extra == "docs"
128
122
  Requires-Dist: sphinx-book-theme>=1.0.0; extra == "docs"
129
123
  Requires-Dist: sphinx-copybutton; extra == "docs"