datacosmos 0.0.3__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacosmos might be problematic. Click here for more details.

config/config.py CHANGED
@@ -6,7 +6,7 @@ and supports environment variable-based overrides.
6
6
  """
7
7
 
8
8
  import os
9
- from typing import ClassVar, Literal, Optional
9
+ from typing import ClassVar, Optional
10
10
 
11
11
  import yaml
12
12
  from pydantic import field_validator
@@ -29,7 +29,6 @@ class Config(BaseSettings):
29
29
  stac: Optional[URL] = None
30
30
  datacosmos_cloud_storage: Optional[URL] = None
31
31
  mission_id: int = 0
32
- environment: Literal["local", "test", "prod"] = "test"
33
32
 
34
33
  DEFAULT_AUTH_TYPE: ClassVar[str] = "m2m"
35
34
  DEFAULT_AUTH_TOKEN_URL: ClassVar[str] = "https://login.open-cosmos.com/oauth/token"
@@ -6,7 +6,7 @@ without user interaction.
6
6
 
7
7
  from typing import Literal
8
8
 
9
- from pydantic import BaseModel
9
+ from pydantic import BaseModel, Field
10
10
 
11
11
 
12
12
  class M2MAuthenticationConfig(BaseModel):
@@ -16,8 +16,12 @@ class M2MAuthenticationConfig(BaseModel):
16
16
  with client credentials.
17
17
  """
18
18
 
19
- type: Literal["m2m"]
19
+ DEFAULT_TYPE: Literal["m2m"] = "m2m"
20
+ DEFAULT_TOKEN_URL: str = "https://login.open-cosmos.com/oauth/token"
21
+ DEFAULT_AUDIENCE: str = "https://beeapp.open-cosmos.com"
22
+
23
+ type: Literal["m2m"] = Field(default=DEFAULT_TYPE)
20
24
  client_id: str
21
- token_url: str
22
- audience: str
25
+ token_url: str = Field(default=DEFAULT_TOKEN_URL)
26
+ audience: str = Field(default=DEFAULT_AUDIENCE)
23
27
  client_secret: str
@@ -6,6 +6,7 @@ from pystac import Collection, Extent, SpatialExtent, TemporalExtent
6
6
  from pystac.utils import str_to_datetime
7
7
 
8
8
  from datacosmos.datacosmos_client import DatacosmosClient
9
+ from datacosmos.exceptions.datacosmos_exception import DatacosmosException
9
10
  from datacosmos.stac.collection.models.collection_update import CollectionUpdate
10
11
  from datacosmos.utils.http_response.check_api_response import check_api_response
11
12
 
@@ -145,5 +146,8 @@ class CollectionClient:
145
146
  """
146
147
  try:
147
148
  return next_href.split("?")[1].split("=")[-1]
148
- except (IndexError, AttributeError):
149
- raise InvalidRequest(f"Failed to parse pagination token from {next_href}")
149
+ except (IndexError, AttributeError) as e:
150
+ raise DatacosmosException(
151
+ f"Failed to parse pagination token from {next_href}",
152
+ response=e.response,
153
+ ) from e
@@ -2,6 +2,7 @@
2
2
 
3
3
  Allows partial updates where only the provided fields are modified.
4
4
  """
5
+
5
6
  from typing import Any, Dict, List, Optional
6
7
 
7
8
  from pydantic import BaseModel, Field
@@ -0,0 +1 @@
1
+ """Constants for STAC."""
@@ -0,0 +1,20 @@
1
+ """Satellite name mapping."""
2
+
3
+ SATELLITE_NAME_MAPPING = {
4
+ "GEOSAT-2": "2014-033D",
5
+ "SUPERVIEW-1-01": "2016-083A",
6
+ "SUPERVIEW-1-02": "2016-083B",
7
+ "SUPERVIEW-1-03": "2018-002A",
8
+ "SUPERVIEW-1-04": "2018-002B",
9
+ "MANTIS": "2023-174B",
10
+ "MENUT": "2023-001B",
11
+ "HAMMER": "2024-043BC",
12
+ "HAMMER-EM": "COSPAR-HAMMER-EM-TBD",
13
+ "Alisio": "2023-185M",
14
+ "Platero": "2023-174G",
15
+ "PHISAT-2": "2024-149C",
16
+ "PHISAT-2 EM": "COSPAR-PHISAT2-EM-TBD",
17
+ "Sentinel-2A": "2015-028A",
18
+ "Sentinel-2B": "2017-013A",
19
+ "Sentinel-2C": "2024-157A",
20
+ }
@@ -0,0 +1,15 @@
1
+ """Level enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class ProcessingLevel(Enum):
7
+ """Enum class for the processing levels of the data."""
8
+
9
+ L0 = "L0"
10
+ L1A = "L1A"
11
+ L2A = "L2A"
12
+ L1B = "L1B"
13
+ L1C = "L1C"
14
+ L1D = "L1D"
15
+ L3 = "L3"
@@ -0,0 +1,11 @@
1
+ """Product type enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class ProductType(str, Enum):
7
+ """Different product types."""
8
+
9
+ SATELLITE = "Satellite"
10
+ VECTOR = "Vector"
11
+ INSIGHT = "Insight"
@@ -0,0 +1,14 @@
1
+ """Season enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class Season(str, Enum):
7
+ """Different Open Cosmos seasons."""
8
+
9
+ SUMMER = "Summer"
10
+ WINTER = "Winter"
11
+ AUTUMN = "Autumn"
12
+ SPRING = "Spring"
13
+ RAINY = "Rainy"
14
+ DRY = "Dry"
@@ -9,9 +9,11 @@ from pystac import Item
9
9
 
10
10
  from datacosmos.datacosmos_client import DatacosmosClient
11
11
  from datacosmos.exceptions.datacosmos_exception import DatacosmosException
12
+ from datacosmos.stac.item.models.catalog_search_parameters import (
13
+ CatalogSearchParameters,
14
+ )
12
15
  from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
13
16
  from datacosmos.stac.item.models.item_update import ItemUpdate
14
- from datacosmos.stac.item.models.search_parameters import SearchParameters
15
17
  from datacosmos.utils.http_response.check_api_response import check_api_response
16
18
 
17
19
 
@@ -42,34 +44,22 @@ class ItemClient:
42
44
  check_api_response(response)
43
45
  return Item.from_dict(response.json())
44
46
 
45
- def fetch_collection_items(
46
- self, collection_id: str, parameters: Optional[SearchParameters] = None
47
+ def search_items(
48
+ self, parameters: CatalogSearchParameters, project_id: str
47
49
  ) -> Generator[Item, None, None]:
48
- """Fetch all items in a collection with optional filtering.
49
-
50
- Args:
51
- collection_id (str): The ID of the collection.
52
- parameters (Optional[SearchParameters]): Filtering parameters (spatial, temporal, etc.).
53
-
54
- Yields:
55
- Item: Parsed STAC item.
56
- """
57
- if parameters is None:
58
- parameters = SearchParameters(collections=[collection_id])
59
-
60
- return self.search_items(parameters)
61
-
62
- def search_items(self, parameters: SearchParameters) -> Generator[Item, None, None]:
63
50
  """Query the STAC catalog using the POST endpoint with filtering and pagination.
64
51
 
65
52
  Args:
66
- parameters (SearchParameters): The search parameters.
53
+ parameters (CatalogSearchParameters): The search parameters.
67
54
 
68
55
  Yields:
69
56
  Item: Parsed STAC item.
70
57
  """
71
58
  url = self.base_url.with_suffix("/search")
72
- body = parameters.model_dump(by_alias=True, exclude_none=True)
59
+ parameters_query = parameters.to_query()
60
+ body = {"project": project_id, "limit": 50, "query": parameters_query}
61
+ if parameters.collections is not None:
62
+ body = body | {"collections": parameters.collections}
73
63
  return self._paginate_items(url, body)
74
64
 
75
65
  def create_item(self, collection_id: str, item: Item | DatacosmosItem) -> None:
@@ -84,7 +74,6 @@ class ItemClient:
84
74
  """
85
75
  url = self.base_url.with_suffix(f"/collections/{collection_id}/items")
86
76
  item_json: dict = item.to_dict()
87
-
88
77
  response = self.client.post(url, json=item_json)
89
78
  check_api_response(response)
90
79
 
@@ -0,0 +1,138 @@
1
+ """Query parameters for catalog search."""
2
+
3
+ from datetime import datetime, timedelta
4
+ from typing import Any, List, Optional
5
+
6
+ from pydantic import BaseModel, Field, field_validator, model_validator
7
+
8
+ from datacosmos.stac.constants.satellite_name_mapping import SATELLITE_NAME_MAPPING
9
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
10
+ from datacosmos.stac.enums.product_type import ProductType
11
+ from datacosmos.stac.enums.season import Season
12
+
13
+
14
+ class CatalogSearchParameters(BaseModel):
15
+ """Query parameters for catalog search."""
16
+
17
+ start_date: Optional[str] = None
18
+ end_date: Optional[str] = None
19
+ seasons: Optional[List[Season]] = None
20
+ satellite: Optional[List[str]] = None
21
+ product_type: Optional[List[ProductType]] = None
22
+ processing_level: Optional[List[ProcessingLevel]] = None
23
+ collections: Optional[list[str]] = Field(
24
+ None,
25
+ description="Array of collection IDs to filter by.",
26
+ example=["collection1", "collection2"],
27
+ )
28
+
29
+ # --- Field Validators ---
30
+
31
+ @field_validator("seasons", mode="before")
32
+ @classmethod
33
+ def parse_seasons(cls, value):
34
+ """Parses seasons values into a list of Season object."""
35
+ if value is None:
36
+ return None
37
+ return [Season(v) if not isinstance(v, Season) else v for v in value]
38
+
39
+ @field_validator("product_type", mode="before")
40
+ @classmethod
41
+ def parse_product_types(cls, value):
42
+ """Parses product types values into a list of ProductType object."""
43
+ if value is None:
44
+ return None
45
+ return [ProductType(v) if not isinstance(v, ProductType) else v for v in value]
46
+
47
+ @field_validator("processing_level", mode="before")
48
+ @classmethod
49
+ def parse_processing_levels(cls, value):
50
+ """Parses processing levels values into a list of ProcessingLevel object."""
51
+ if value is None:
52
+ return None
53
+ return [
54
+ ProcessingLevel(v) if not isinstance(v, ProcessingLevel) else v
55
+ for v in value
56
+ ]
57
+
58
+ @field_validator("start_date", mode="before")
59
+ @classmethod
60
+ def parse_start_date(cls, value: Any) -> Optional[str]:
61
+ """Validations on start_date."""
62
+ if value is None:
63
+ return None
64
+ try:
65
+ dt = datetime.strptime(value, "%m/%d/%Y")
66
+ except Exception as e:
67
+ raise ValueError(
68
+ "Invalid start_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
69
+ ) from e
70
+ if dt < datetime(2015, 5, 15):
71
+ raise ValueError("Date must be 5/15/2015 or later.")
72
+ return dt.isoformat() + "Z"
73
+
74
+ @field_validator("end_date", mode="before")
75
+ @classmethod
76
+ def parse_end_date(cls, value: Any) -> Optional[str]:
77
+ """Validations on end_date."""
78
+ if value is None:
79
+ return None
80
+ try:
81
+ dt = datetime.strptime(value, "%m/%d/%Y")
82
+ except ValueError:
83
+ raise ValueError(
84
+ "Invalid end_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
85
+ )
86
+
87
+ if dt < datetime(2015, 5, 15):
88
+ raise ValueError("Date must be 5/15/2015 or later.")
89
+ dt = dt + timedelta(days=1) - timedelta(milliseconds=1)
90
+ return dt.isoformat() + "Z"
91
+
92
+ # --- Model Validator ---
93
+
94
+ @model_validator(mode="after")
95
+ def validate_date_range(self) -> "CatalogSearchParameters":
96
+ """Checks if end_date is after the start_date."""
97
+ if self.start_date and self.end_date:
98
+ start_dt = datetime.fromisoformat(self.start_date.rstrip("Z"))
99
+ end_dt = datetime.fromisoformat(self.end_date.rstrip("Z"))
100
+ if start_dt > end_dt:
101
+ raise ValueError("end_date cannot be before start_date.")
102
+ return self
103
+
104
+ # --- Query Mapper ---
105
+
106
+ def to_query(self) -> dict:
107
+ """Map user-friendly input to STAC query structure."""
108
+ query = {}
109
+
110
+ if self.start_date or self.end_date:
111
+ query["datetime"] = {"gte": self.start_date, "lte": self.end_date}
112
+
113
+ if self.seasons:
114
+ query["opencosmos:season"] = {
115
+ "in": [seasons.value for seasons in self.seasons]
116
+ }
117
+
118
+ if self.product_type:
119
+ query["opencosmos:product_type"] = {
120
+ "in": [product_type.value for product_type in self.product_type]
121
+ }
122
+
123
+ if self.processing_level:
124
+ query["processing:level"] = {
125
+ "in": [
126
+ processing_level.value for processing_level in self.processing_level
127
+ ]
128
+ }
129
+
130
+ if self.satellite:
131
+ cospars = [
132
+ SATELLITE_NAME_MAPPING[ui]
133
+ for ui in self.satellite
134
+ if ui in SATELLITE_NAME_MAPPING
135
+ ]
136
+ query["sat:platform_international_designator"] = {"in": cospars}
137
+
138
+ return query
@@ -4,7 +4,7 @@ from datetime import datetime
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
7
- from datacosmos.stac.enums.level import Level
7
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
8
8
  from datacosmos.stac.item.models.asset import Asset
9
9
 
10
10
 
@@ -36,9 +36,9 @@ class DatacosmosItem(BaseModel):
36
36
  return datetime.strptime(self.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
37
37
 
38
38
  @property
39
- def level(self) -> Level:
39
+ def level(self) -> ProcessingLevel:
40
40
  """Get the processing level of the Datacosmos item."""
41
- return Level(self.properties["processing:level"].lower())
41
+ return ProcessingLevel(self.properties["processing:level"].lower())
42
42
 
43
43
  @property
44
44
  def sat_int_designator(self) -> str:
@@ -6,7 +6,7 @@ from pathlib import Path
6
6
 
7
7
  import structlog
8
8
 
9
- from datacosmos.stac.enums.level import Level
9
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
10
10
  from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
11
11
  from datacosmos.utils.missions import get_mission_id
12
12
 
@@ -18,7 +18,7 @@ class UploadPath:
18
18
  """Dataclass for retrieving the upload path of a file."""
19
19
 
20
20
  mission: str
21
- level: Level
21
+ level: ProcessingLevel
22
22
  day: int
23
23
  month: int
24
24
  year: int
@@ -43,7 +43,7 @@ class UploadPath:
43
43
  dt = datetime.strptime(item.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
44
44
  path = UploadPath(
45
45
  mission=mission,
46
- level=Level(item.properties["processing:level"].lower()),
46
+ level=ProcessingLevel(item.properties["processing:level"].upper()),
47
47
  day=dt.day,
48
48
  month=dt.month,
49
49
  year=dt.year,
@@ -60,7 +60,7 @@ class UploadPath:
60
60
  raise ValueError(f"Invalid path {path}")
61
61
  return cls(
62
62
  mission=parts[0],
63
- level=Level(parts[1]),
63
+ level=ProcessingLevel(parts[1]),
64
64
  day=int(parts[4]),
65
65
  month=int(parts[3]),
66
66
  year=int(parts[2]),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacosmos
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: A library for interacting with DataCosmos from Python code
5
5
  Author-email: Open Cosmos <support@open-cosmos.com>
6
6
  Classifier: Programming Language :: Python :: 3
@@ -13,6 +13,8 @@ Requires-Dist: oauthlib==3.2.0
13
13
  Requires-Dist: requests-oauthlib==1.3.1
14
14
  Requires-Dist: pydantic==2.10.6
15
15
  Requires-Dist: pystac==1.12.1
16
+ Requires-Dist: pyyaml==6.0.2
17
+ Requires-Dist: structlog==25.3.0
16
18
  Provides-Extra: dev
17
19
  Requires-Dist: black==22.3.0; extra == "dev"
18
20
  Requires-Dist: ruff==0.9.5; extra == "dev"
@@ -1,7 +1,7 @@
1
1
  config/__init__.py,sha256=KCsaTb9-ZgFui1GM8wZFIPLJy0D0O8l8Z1Sv3NRD9UM,140
2
- config/config.py,sha256=h5XwNSA6QFBCDyennyFDNMAmbQOdtg8DsFAvjHlSEx4,7233
2
+ config/config.py,sha256=vtimmFY2zOXV3OjT7sS5P0p1sW_-ecB5VCF6cseSk4g,7165
3
3
  config/models/__init__.py,sha256=r3lThPkyKjBjUZXRNscFzOrmn_-m_i9DvG3RePfCFYc,41
4
- config/models/m2m_authentication_config.py,sha256=1eJ_9df7Twn5WeWPbqMuR63ZdxhnTpBtKzqAPMnzP_k,565
4
+ config/models/m2m_authentication_config.py,sha256=n76N4bakpPPycTOeKpiM8pazYtNqiJGMzZXmI_ogbHM,847
5
5
  config/models/url.py,sha256=fwr2C06e_RDS8AWxOV_orVxMWhc57bzYoWSjFxQbkwg,835
6
6
  datacosmos/__init__.py,sha256=dVHKpbz5FVtfoJAWHRdsUENG6H-vs4UrkuwnIvOGJr4,66
7
7
  datacosmos/datacosmos_client.py,sha256=sivVYf45QEHTkUO62fnb1fnObKVmUngTR1Ga-ZRnoQE,4967
@@ -10,24 +10,28 @@ datacosmos/exceptions/datacosmos_exception.py,sha256=rKjJvQDvCEbxXWWccxB5GI_sth6
10
10
  datacosmos/stac/__init__.py,sha256=B4x_Mr4X7TzQoYtRC-VzI4W-fEON5WUOaz8cWJbk3Fc,214
11
11
  datacosmos/stac/stac_client.py,sha256=Cz_p96RmAgWX8t7Sye4OJRanQpCLihKStvfEw7IgYZc,472
12
12
  datacosmos/stac/collection/__init__.py,sha256=VQMLnsU3sER5kh4YxHrHP7XCA3DG1y0n9yoSmvycOY0,212
13
- datacosmos/stac/collection/collection_client.py,sha256=XTO2s309-cktJosvnwnFFXHDVmJc4vjvbEsZjpsCDmY,5904
13
+ datacosmos/stac/collection/collection_client.py,sha256=-Nn3yqL4mQS05YAMd0IUmv03hdHKYBtVG2_EqoaAQWc,6064
14
14
  datacosmos/stac/collection/models/__init__.py,sha256=TQaihUS_CM9Eaekm4SbzFTNfv7BmabHv3Z-f37Py5Qs,40
15
- datacosmos/stac/collection/models/collection_update.py,sha256=Tqmfg4H4UQj5jsgy1dpKJCR59NSfWeiCSi9y8CY8-Cg,1656
15
+ datacosmos/stac/collection/models/collection_update.py,sha256=XC6-29nLz1VGWMxYAw7r1OuL8PdJ3b2oI-RPvnM-XXI,1657
16
+ datacosmos/stac/constants/__init__.py,sha256=dDRSsF7CKqNF44yIlNdE-PD1sp0Q5mhTEPT7hHIK7YE,26
17
+ datacosmos/stac/constants/satellite_name_mapping.py,sha256=EJqNdO9uW5B-sIeDF72AjnW7va5BM9mm4oNwijtl51w,575
16
18
  datacosmos/stac/enums/__init__.py,sha256=GUEL2xGtdjsrszrxivs0X6daxkaZs2JsTu2JoBtsvB4,22
17
- datacosmos/stac/enums/level.py,sha256=dqrkSRtoutMTWatGyRRUz3uNKVlNXn3qa_ubXNbw618,237
19
+ datacosmos/stac/enums/processing_level.py,sha256=5gHG-0kG5rCUxmXYwF3t94ALKk6zUqguOdyTL-jwgps,247
20
+ datacosmos/stac/enums/product_type.py,sha256=7lL0unJ1hxevW8Pepn9rmydUUWIORu2x4MEtp6rSFbA,196
21
+ datacosmos/stac/enums/season.py,sha256=QvUzXBYtPEfixhlbV0SAw2u_HK3tRFEnHKshJyIatdg,241
18
22
  datacosmos/stac/item/__init__.py,sha256=lRuD_yp-JxoLqBA23q0XMkCNImf4T-X3BJnSw9u_3Yk,200
19
- datacosmos/stac/item/item_client.py,sha256=E6zHf3ANzVXd5Di_u05mLen5-EOKTdCs0VKcXXJ6lUc,6826
23
+ datacosmos/stac/item/item_client.py,sha256=mFcbXqV1Ascd5hUSlZFzNgni_DncHAIyIvhtUHpgHI0,6457
20
24
  datacosmos/stac/item/models/__init__.py,sha256=bcOrOcIxGxGBrRVIyQVxSM3C3Xj_qzxIHgQeWo6f7Q8,34
21
25
  datacosmos/stac/item/models/asset.py,sha256=mvg_fenYCGOTMGwXXpK2nyqBk5RMsUYxl6KhQTWW_b0,631
22
- datacosmos/stac/item/models/datacosmos_item.py,sha256=jHuOkNvbVXUHdpplClPnA5mR4mcrfYQNm51EgQaVtNk,1704
26
+ datacosmos/stac/item/models/catalog_search_parameters.py,sha256=3HrUm37VezujwuCR45jhMryS5m1FGc1XmX8-fdTy4jU,4870
27
+ datacosmos/stac/item/models/datacosmos_item.py,sha256=AImz0GRxrpZfIETdzzNfaKX35wpr39Q4f4u0z6r8eys,1745
23
28
  datacosmos/stac/item/models/eo_band.py,sha256=YC3Scn_wFhIo51pIVcJeuJienF7JGWoEv39JngDM6rI,309
24
29
  datacosmos/stac/item/models/item_update.py,sha256=_CpjQn9SsfedfuxlHSiGeptqY4M-p15t9YX__mBRueI,2088
25
30
  datacosmos/stac/item/models/raster_band.py,sha256=CoEVs-YyPE5Fse0He9DdOs4dGZpzfCsCuVzOcdXa_UM,354
26
- datacosmos/stac/item/models/search_parameters.py,sha256=yMmcb-Tr2as8585MD5wuZLWcqzwtRRkj07WBkootVS0,2022
27
31
  datacosmos/uploader/__init__.py,sha256=ZtfCVJ_pWKKh2F1r_NArnbG3_JtpcEiXcA_tmSwSKmQ,128
28
32
  datacosmos/uploader/datacosmos_uploader.py,sha256=LUtBDvAjZI7AYxKnC9TZQDP4z6lV2aHusz92XqivFGw,4398
29
33
  datacosmos/uploader/dataclasses/__init__.py,sha256=IjcyA8Vod-z1_Gi1FMZhK58Owman0foL25Hs0YtkYYs,43
30
- datacosmos/uploader/dataclasses/upload_path.py,sha256=WPl9u-oB-ti07ssKNDjL4vRQXhlOmLCgjt8MxFGrf3A,3153
34
+ datacosmos/uploader/dataclasses/upload_path.py,sha256=X8zkfw3_FO9qTiKHu-nL_uDmQJYfaov6e4Y2-f-opaU,3204
31
35
  datacosmos/utils/__init__.py,sha256=XQbAnoqJrPpnSpEzAbjh84yqYWw8cBM8mNp8ynTG-54,50
32
36
  datacosmos/utils/constants.py,sha256=f7pOqCpdXk7WFGoaTyuCpr65jb-TtfhoVGuYTz3_T6Y,272
33
37
  datacosmos/utils/missions.py,sha256=7GOnrjxB8V11C_Jr3HHI4vpXifgkOSeirNjIDx17C58,940
@@ -37,8 +41,8 @@ datacosmos/utils/http_response/check_api_response.py,sha256=dKWW01jn2_lWV0xpOBAB
37
41
  datacosmos/utils/http_response/models/__init__.py,sha256=Wj8YT6dqw7rAz_rctllxo5Or_vv8DwopvQvBzwCTvpw,45
38
42
  datacosmos/utils/http_response/models/datacosmos_error.py,sha256=Uqi2uM98nJPeCbM7zngV6vHSk97jEAb_nkdDEeUjiQM,740
39
43
  datacosmos/utils/http_response/models/datacosmos_response.py,sha256=oV4n-sue7K1wwiIQeHpxdNU8vxeqF3okVPE2rydw5W0,336
40
- datacosmos-0.0.3.dist-info/licenses/LICENSE.md,sha256=vpbRI-UUbZVQfr3VG_CXt9HpRnL1b5kt8uTVbirxeyI,1486
41
- datacosmos-0.0.3.dist-info/METADATA,sha256=ejuFFFnmdaInVXwXQtO6m35BQdTyotYYMLDVT-oWec4,843
42
- datacosmos-0.0.3.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
43
- datacosmos-0.0.3.dist-info/top_level.txt,sha256=Iu5b533Fmdfz0rFKTnuBPjSUOQL2lEkTfHxsokP72s4,18
44
- datacosmos-0.0.3.dist-info/RECORD,,
44
+ datacosmos-0.0.5.dist-info/licenses/LICENSE.md,sha256=vpbRI-UUbZVQfr3VG_CXt9HpRnL1b5kt8uTVbirxeyI,1486
45
+ datacosmos-0.0.5.dist-info/METADATA,sha256=vng8nMwWZjm4IZgXymw6cjWEJZOm6LZSahL0vUNUmPA,905
46
+ datacosmos-0.0.5.dist-info/WHEEL,sha256=zaaOINJESkSfm_4HQVc5ssNzHCPXhJm0kEUakpsEHaU,91
47
+ datacosmos-0.0.5.dist-info/top_level.txt,sha256=Iu5b533Fmdfz0rFKTnuBPjSUOQL2lEkTfHxsokP72s4,18
48
+ datacosmos-0.0.5.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (80.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,15 +0,0 @@
1
- """Level enum class."""
2
-
3
- from enum import Enum
4
-
5
-
6
- class Level(Enum):
7
- """Enum class for the processing levels of the data."""
8
-
9
- L0 = "l0"
10
- L1A = "l1a"
11
- L2A = "l2a"
12
- L1B = "l1b"
13
- L1C = "l1c"
14
- L1D = "l1d"
15
- L3 = "l3"
@@ -1,58 +0,0 @@
1
- """Module defining the SearchParameters model for STAC API queries, encapsulating filtering criteria.
2
-
3
- It includes spatial, temporal, and property-based filters for querying STAC items efficiently.
4
- """
5
-
6
- from typing import Optional, Union
7
-
8
- from pydantic import BaseModel, Field, model_validator
9
-
10
-
11
- class SearchParameters(BaseModel):
12
- """Encapsulates the parameters for the STAC search API with validation."""
13
-
14
- bbox: Optional[list[float]] = Field(
15
- None,
16
- description="Bounding box filter [minX, minY, maxX, maxY]. Optional six values for 3D bounding box.",
17
- example=[-180.0, -90.0, 180.0, 90.0],
18
- )
19
- datetime_range: Optional[str] = Field(
20
- None,
21
- alias="datetime",
22
- description=(
23
- "Temporal filter, either a single RFC 3339 datetime or an interval. "
24
- 'Example: "2025-01-01T00:00:00Z/.."'
25
- ),
26
- )
27
- intersects: Optional[dict] = Field(
28
- None, description="GeoJSON geometry filter, e.g., a Polygon or Point."
29
- )
30
- ids: Optional[list[str]] = Field(
31
- None,
32
- description="Array of item IDs to filter by.",
33
- example=["item1", "item2"],
34
- )
35
- collections: Optional[list[str]] = Field(
36
- None,
37
- description="Array of collection IDs to filter by.",
38
- example=["collection1", "collection2"],
39
- )
40
- limit: Optional[int] = Field(
41
- None,
42
- ge=1,
43
- le=10000,
44
- description="Maximum number of items per page. Default: 10, Max: 10000.",
45
- example=10,
46
- )
47
- query: Optional[dict[str, dict[str, Union[str, int, float]]]] = Field(
48
- None,
49
- description="Additional property filters, e.g., { 'cloud_coverage': { 'lt': 10 } }.",
50
- )
51
-
52
- @model_validator(mode="before")
53
- def validate_bbox(cls, values):
54
- """Validate that the `bbox` field contains either 4 or 6 values."""
55
- bbox = values.get("bbox")
56
- if bbox and len(bbox) not in {4, 6}:
57
- raise ValueError("bbox must contain 4 or 6 values.")
58
- return values