datacosmos 0.0.3__tar.gz → 0.0.5__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacosmos might be problematic. Click here for more details.

Files changed (57) hide show
  1. {datacosmos-0.0.3/datacosmos.egg-info → datacosmos-0.0.5}/PKG-INFO +3 -1
  2. datacosmos-0.0.5/README.md +385 -0
  3. {datacosmos-0.0.3 → datacosmos-0.0.5}/config/config.py +1 -2
  4. {datacosmos-0.0.3 → datacosmos-0.0.5}/config/models/m2m_authentication_config.py +8 -4
  5. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/collection/collection_client.py +6 -2
  6. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/collection/models/collection_update.py +1 -0
  7. datacosmos-0.0.5/datacosmos/stac/constants/__init__.py +1 -0
  8. datacosmos-0.0.5/datacosmos/stac/constants/satellite_name_mapping.py +20 -0
  9. datacosmos-0.0.5/datacosmos/stac/enums/processing_level.py +15 -0
  10. datacosmos-0.0.5/datacosmos/stac/enums/product_type.py +11 -0
  11. datacosmos-0.0.5/datacosmos/stac/enums/season.py +14 -0
  12. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/item_client.py +10 -21
  13. datacosmos-0.0.5/datacosmos/stac/item/models/catalog_search_parameters.py +138 -0
  14. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/datacosmos_item.py +3 -3
  15. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/uploader/dataclasses/upload_path.py +4 -4
  16. {datacosmos-0.0.3 → datacosmos-0.0.5/datacosmos.egg-info}/PKG-INFO +3 -1
  17. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos.egg-info/SOURCES.txt +6 -2
  18. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos.egg-info/requires.txt +2 -0
  19. {datacosmos-0.0.3 → datacosmos-0.0.5}/pyproject.toml +4 -2
  20. datacosmos-0.0.3/README.md +0 -209
  21. datacosmos-0.0.3/datacosmos/stac/enums/level.py +0 -15
  22. datacosmos-0.0.3/datacosmos/stac/item/models/search_parameters.py +0 -58
  23. {datacosmos-0.0.3 → datacosmos-0.0.5}/LICENSE.md +0 -0
  24. {datacosmos-0.0.3 → datacosmos-0.0.5}/config/__init__.py +0 -0
  25. {datacosmos-0.0.3 → datacosmos-0.0.5}/config/models/__init__.py +0 -0
  26. {datacosmos-0.0.3 → datacosmos-0.0.5}/config/models/url.py +0 -0
  27. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/__init__.py +0 -0
  28. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/datacosmos_client.py +0 -0
  29. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/exceptions/__init__.py +0 -0
  30. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/exceptions/datacosmos_exception.py +0 -0
  31. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/__init__.py +0 -0
  32. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/collection/__init__.py +0 -0
  33. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/collection/models/__init__.py +0 -0
  34. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/enums/__init__.py +0 -0
  35. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/__init__.py +0 -0
  36. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/__init__.py +0 -0
  37. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/asset.py +0 -0
  38. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/eo_band.py +0 -0
  39. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/item_update.py +0 -0
  40. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/item/models/raster_band.py +0 -0
  41. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/stac/stac_client.py +0 -0
  42. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/uploader/__init__.py +0 -0
  43. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/uploader/dataclasses/__init__.py +0 -0
  44. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/uploader/datacosmos_uploader.py +0 -0
  45. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/__init__.py +0 -0
  46. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/constants.py +0 -0
  47. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/http_response/__init__.py +0 -0
  48. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/http_response/check_api_response.py +0 -0
  49. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/__init__.py +0 -0
  50. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_error.py +0 -0
  51. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_response.py +0 -0
  52. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/missions.py +0 -0
  53. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos/utils/url.py +0 -0
  54. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos.egg-info/dependency_links.txt +0 -0
  55. {datacosmos-0.0.3 → datacosmos-0.0.5}/datacosmos.egg-info/top_level.txt +0 -0
  56. {datacosmos-0.0.3 → datacosmos-0.0.5}/setup.cfg +0 -0
  57. {datacosmos-0.0.3 → datacosmos-0.0.5}/tests/test_pass.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacosmos
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: A library for interacting with DataCosmos from Python code
5
5
  Author-email: Open Cosmos <support@open-cosmos.com>
6
6
  Classifier: Programming Language :: Python :: 3
@@ -13,6 +13,8 @@ Requires-Dist: oauthlib==3.2.0
13
13
  Requires-Dist: requests-oauthlib==1.3.1
14
14
  Requires-Dist: pydantic==2.10.6
15
15
  Requires-Dist: pystac==1.12.1
16
+ Requires-Dist: pyyaml==6.0.2
17
+ Requires-Dist: structlog==25.3.0
16
18
  Provides-Extra: dev
17
19
  Requires-Dist: black==22.3.0; extra == "dev"
18
20
  Requires-Dist: ruff==0.9.5; extra == "dev"
@@ -0,0 +1,385 @@
1
+ # DataCosmos SDK
2
+
3
+ ## Overview
4
+
5
+ The **DataCosmos SDK** enables Open Cosmos customers to interact with the **DataCosmos APIs** for efficient data management and retrieval. It provides authentication handling, HTTP request utilities, and a client for interacting with the **STAC API** (SpatioTemporal Asset Catalog).
6
+
7
+ ## Installation
8
+
9
+ Install the SDK using **pip**:
10
+
11
+ ```sh
12
+ pip install datacosmos=={version}
13
+ ```
14
+
15
+ ## Initializing the Client
16
+
17
+ To start using the SDK, initialize the client. The easiest way to do this is by loading the configuration from a YAML file. Alternatively, you can manually instantiate the Config object or use environment variables.
18
+
19
+ ### Default Initialization (Recommended)
20
+
21
+ By default, the client loads configuration from a YAML file (`config/config.yaml`).
22
+
23
+ ```python
24
+ from datacosmos.datacosmos_client import DatacosmosClient
25
+
26
+ client = DatacosmosClient()
27
+ ```
28
+
29
+ ### Loading from YAML (Recommended)
30
+
31
+ Create a YAML file (`config/config.yaml`) with the following content:
32
+
33
+ ```yaml
34
+ authentication:
35
+ client_id: {client_id}
36
+ client_secret: {client_secret}
37
+ ```
38
+
39
+ The client will automatically read this file when initialized.
40
+
41
+ ### Loading from Environment Variables
42
+
43
+ Set the following environment variables:
44
+
45
+ ```sh
46
+ export OC_AUTH_CLIENT_ID={client_id}
47
+ export OC_AUTH_CLIENT_SECRET={client_secret}
48
+ ```
49
+
50
+ The client will automatically read these values when initialized.
51
+
52
+ ### Manual Instantiation
53
+
54
+ If manually instantiating `Config`, default values are now applied where possible.
55
+
56
+ ```python
57
+ from datacosmos.datacosmos_client import DatacosmosClient
58
+ from config.config import Config
59
+ from config.models.m2m_authentication_config import M2MAuthenticationConfig
60
+ from config.models.url import URL
61
+
62
+ config = Config(
63
+ authentication=M2MAuthenticationConfig(
64
+ client_id="your-client-id",
65
+ client_secret="your-client-secret"
66
+ )
67
+ )
68
+
69
+ client = DatacosmosClient(config=config)
70
+ ```
71
+
72
+ ### Configuration Options and Defaults
73
+
74
+ | Setting | Default Value | Override Method |
75
+ |------------------------------|-------------------------------------------------|----------------|
76
+ | `authentication.type` | `m2m` | YAML / ENV |
77
+ | `authentication.client_id` | _Required in manual instantiation_ | YAML / ENV |
78
+ | `authentication.client_secret` | _Required in manual instantiation_ | YAML / ENV |
79
+ | `stac.protocol` | `https` | YAML / ENV |
80
+ | `stac.host` | `app.open-cosmos.com` | YAML / ENV |
81
+ | `stac.port` | `443` | YAML / ENV |
82
+ | `stac.path` | `/api/data/v0/stac` | YAML / ENV |
83
+ | `datacosmos_cloud_storage.protocol` | `https` | YAML / ENV |
84
+ | `datacosmos_cloud_storage.host` | `app.open-cosmos.com` | YAML / ENV |
85
+ | `datacosmos_cloud_storage.port` | `443` | YAML / ENV |
86
+ | `datacosmos_cloud_storage.path` | `/api/data/v0/storage` | YAML / ENV |
87
+ | `mission_id` | `0` | YAML / ENV |
88
+ | `environment` | `test` | YAML / ENV |
89
+
90
+ ## STAC Client
91
+
92
+ The `STACClient` enables interaction with the STAC API, allowing for searching, retrieving, creating, updating, and deleting STAC items and collections.
93
+
94
+ ### Initialize STACClient
95
+
96
+ ```python
97
+ from datacosmos.datacosmos_client import DatacosmosClient
98
+ from datacosmos.stac.stac_client import STACClient
99
+
100
+ client = DatacosmosClient()
101
+ stac_client = STACClient(client)
102
+ ```
103
+
104
+ ### STACClient Methods
105
+
106
+ #### 1. **Search Items**
107
+ ```python
108
+ from datacosmos.stac.item.models.catalog_search_parameters import CatalogSearchParameters
109
+ from datacosmos.datacosmos_client import DatacosmosClient
110
+ from datacosmos.stac.stac_client import STACClient
111
+
112
+ client = DatacosmosClient()
113
+ stac_client = STACClient(client)
114
+
115
+ params = CatalogSearchParameters(
116
+ start_date="2/9/2025",
117
+ end_date="2/9/2025",
118
+ satellite=["MANTIS"],
119
+ product_type=["Satellite"],
120
+ processing_level=["L1A"],
121
+ collections=["mantis-l1a"]
122
+ )
123
+
124
+ items = list(stac_client.search_items(parameters=params, project_id="your-project-id"))
125
+ ```
126
+
127
+ #### 2. **Fetch a Single Item**
128
+ ```python
129
+ from datacosmos.datacosmos_client import DatacosmosClient
130
+ from datacosmos.stac.stac_client import STACClient
131
+
132
+ client = DatacosmosClient()
133
+ stac_client = STACClient(client)
134
+
135
+ item = stac_client.fetch_item(item_id="example-item", collection_id="example-collection")
136
+ ```
137
+ #### 3. **Create a New STAC Item**
138
+ ```python
139
+ from pystac import Item, Asset
140
+ from datetime import datetime
141
+
142
+ from datacosmos.datacosmos_client import DatacosmosClient
143
+ from datacosmos.stac.stac_client import STACClient
144
+
145
+ client = DatacosmosClient()
146
+ stac_client = STACClient(client)
147
+
148
+ stac_item = Item(
149
+ id="new-item",
150
+ geometry={"type": "Point", "coordinates": [102.0, 0.5]},
151
+ bbox=[101.0, 0.0, 103.0, 1.0],
152
+ datetime=datetime.utcnow(),
153
+ properties={"datetime": datetime.utcnow(), "processing:level": "example-processing-level"},
154
+ collection="example-collection"
155
+ )
156
+
157
+ stac_item.add_asset(
158
+ "image",
159
+ Asset(
160
+ href="https://example.com/sample-image.tiff",
161
+ media_type="image/tiff",
162
+ roles=["data"],
163
+ title="Sample Image"
164
+ )
165
+ )
166
+
167
+ stac_client.create_item(collection_id="example-collection", item=stac_item)
168
+ ```
169
+
170
+ #### 4. **Update an Existing STAC Item**
171
+ ```python
172
+ from datacosmos.stac.item.models.item_update import ItemUpdate
173
+ from pystac import Asset, Link
174
+
175
+ from datacosmos.datacosmos_client import DatacosmosClient
176
+ from datacosmos.stac.stac_client import STACClient
177
+
178
+ client = DatacosmosClient()
179
+ stac_client = STACClient(client)
180
+
181
+ update_payload = ItemUpdate(
182
+ properties={
183
+ "new_property": "updated_value",
184
+ "datetime": "2024-11-10T14:58:00Z"
185
+ },
186
+ assets={
187
+ "image": Asset(
188
+ href="https://example.com/updated-image.tiff",
189
+ media_type="image/tiff"
190
+ )
191
+ },
192
+ links=[
193
+ Link(rel="self", target="https://example.com/updated-image.tiff")
194
+ ],
195
+ geometry={
196
+ "type": "Point",
197
+ "coordinates": [10, 20]
198
+ },
199
+ bbox=[10.0, 20.0, 30.0, 40.0]
200
+ )
201
+
202
+ stac_client.update_item(item_id="new-item", collection_id="example-collection", update_data=update_payload)
203
+ ```
204
+
205
+ #### 5. **Delete an Item**
206
+ ```python
207
+
208
+ from datacosmos.datacosmos_client import DatacosmosClient
209
+ from datacosmos.stac.stac_client import STACClient
210
+
211
+ client = DatacosmosClient()
212
+ stac_client = STACClient(client)
213
+
214
+ stac_client.delete_item(item_id="new-item", collection_id="example-collection")
215
+ ```
216
+
217
+ #### 6. Fetch a Collection
218
+
219
+ ```python
220
+
221
+ from datacosmos.datacosmos_client import DatacosmosClient
222
+ from datacosmos.stac.stac_client import STACClient
223
+
224
+ client = DatacosmosClient()
225
+ stac_client = STACClient(client)
226
+
227
+ collection = stac_client.fetch_collection("test-collection")
228
+ ```
229
+
230
+ #### 7. Fetch All Collections
231
+
232
+ ```python
233
+
234
+ from datacosmos.datacosmos_client import DatacosmosClient
235
+ from datacosmos.stac.stac_client import STACClient
236
+
237
+ client = DatacosmosClient()
238
+ stac_client = STACClient(client)
239
+
240
+ collections = list(stac_client.fetch_all_collections())
241
+ ```
242
+
243
+ #### 8. Create a Collection
244
+
245
+ ```python
246
+ from pystac import Collection
247
+
248
+ from datacosmos.datacosmos_client import DatacosmosClient
249
+ from datacosmos.stac.stac_client import STACClient
250
+
251
+ client = DatacosmosClient()
252
+ stac_client = STACClient(client)
253
+
254
+ new_collection = Collection(
255
+ id="test-collection",
256
+ title="Test Collection",
257
+ description="This is a test collection",
258
+ license="proprietary",
259
+ extent={
260
+ "spatial": {"bbox": [[-180, -90, 180, 90]]},
261
+ "temporal": {"interval": [["2023-01-01T00:00:00Z", None]]},
262
+ },
263
+ )
264
+
265
+ stac_client.create_collection(new_collection)
266
+ ```
267
+
268
+ #### 9. Update a Collection
269
+
270
+ ```python
271
+ from datacosmos.stac.collection.models.collection_update import CollectionUpdate
272
+
273
+ from datacosmos.datacosmos_client import DatacosmosClient
274
+ from datacosmos.stac.stac_client import STACClient
275
+
276
+ client = DatacosmosClient()
277
+ stac_client = STACClient(client)
278
+
279
+ update_data = CollectionUpdate(
280
+ title="Updated Collection Title",
281
+ description="Updated description",
282
+ )
283
+
284
+ stac_client.update_collection("test-collection", update_data)
285
+ ```
286
+
287
+ #### 10. Delete a Collection
288
+
289
+ ```python
290
+
291
+ from datacosmos.datacosmos_client import DatacosmosClient
292
+ from datacosmos.stac.stac_client import STACClient
293
+
294
+ client = DatacosmosClient()
295
+ stac_client = STACClient(client)
296
+
297
+ stac_client.delete_collection("test-collection")
298
+ ```
299
+
300
+ ## Uploading Files and Registering STAC Items
301
+
302
+ You can use the `DatacosmosUploader` class to upload files to the DataCosmos cloud storage and register a STAC item. The `upload_and_register_item` method will take care of both uploading files and creating the STAC item.
303
+
304
+ ### **Upload Files and Register STAC Item**
305
+
306
+ 1. Make sure you have a directory with the same name as your STAC item JSON file (this directory should contain the files you want to upload).
307
+ 2. Call the `upload_and_register_item` method, providing the path to the STAC item JSON file.
308
+
309
+ ```python
310
+ from datacosmos.datacosmos_client import DatacosmosClient
311
+ from datacosmos.uploader.datacosmos_uploader import DatacosmosUploader
312
+
313
+ # Initialize the client with the configuration
314
+ client = DatacosmosClient(config=config)
315
+
316
+ # Create the uploader instance
317
+ uploader = DatacosmosUploader(client)
318
+
319
+ # Path to your STAC item JSON file
320
+ item_json_file_path = "/home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814.json"
321
+
322
+ # Upload the item and its assets, and register it in the STAC API
323
+ uploader.upload_and_register_item(item_json_file_path)
324
+ ```
325
+
326
+ ### **Folder Structure**
327
+
328
+ For the `upload_and_register_item` method to work correctly, ensure that the directory structure matches the name of the STAC item JSON file. For example:
329
+
330
+ ```
331
+ /home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814.json
332
+ /home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814/
333
+ ├── asset1.tiff
334
+ ├── asset2.tiff
335
+ └── ...
336
+ ```
337
+
338
+ The folder `MENUT_L1A_000001943_20250304134812_20250304134821_49435814` should contain the assets (files) for upload.
339
+
340
+ The `upload_and_register_item` method will:
341
+ 1. Delete any existing item with the same ID (if it exists).
342
+ 2. Upload the assets in the folder to DataCosmos cloud storage.
343
+ 3. Register the item in the STAC API.
344
+
345
+ ## Error Handling
346
+
347
+ Use `try-except` blocks to handle API errors gracefully:
348
+
349
+ ```python
350
+ try:
351
+ data = client.get_data("dataset_id")
352
+ print(data)
353
+ except Exception as e:
354
+ print(f"An error occurred: {e}")
355
+ ```
356
+
357
+ ## Contributing
358
+
359
+ To contribute:
360
+
361
+ 1. Fork the repository.
362
+ 2. Create a feature branch.
363
+ 3. Submit a pull request.
364
+
365
+ ### Development Setup
366
+
367
+ Use `uv` for dependency management:
368
+
369
+ ```sh
370
+ pip install uv
371
+ uv venv
372
+ uv pip install -r pyproject.toml .[dev]
373
+ source .venv/bin/activate
374
+ ```
375
+
376
+ Before making changes, run:
377
+
378
+ ```sh
379
+ black .
380
+ isort .
381
+ ruff check .
382
+ pydocstyle .
383
+ bandit -r -c pyproject.toml .
384
+ pytest
385
+ ```
@@ -6,7 +6,7 @@ and supports environment variable-based overrides.
6
6
  """
7
7
 
8
8
  import os
9
- from typing import ClassVar, Literal, Optional
9
+ from typing import ClassVar, Optional
10
10
 
11
11
  import yaml
12
12
  from pydantic import field_validator
@@ -29,7 +29,6 @@ class Config(BaseSettings):
29
29
  stac: Optional[URL] = None
30
30
  datacosmos_cloud_storage: Optional[URL] = None
31
31
  mission_id: int = 0
32
- environment: Literal["local", "test", "prod"] = "test"
33
32
 
34
33
  DEFAULT_AUTH_TYPE: ClassVar[str] = "m2m"
35
34
  DEFAULT_AUTH_TOKEN_URL: ClassVar[str] = "https://login.open-cosmos.com/oauth/token"
@@ -6,7 +6,7 @@ without user interaction.
6
6
 
7
7
  from typing import Literal
8
8
 
9
- from pydantic import BaseModel
9
+ from pydantic import BaseModel, Field
10
10
 
11
11
 
12
12
  class M2MAuthenticationConfig(BaseModel):
@@ -16,8 +16,12 @@ class M2MAuthenticationConfig(BaseModel):
16
16
  with client credentials.
17
17
  """
18
18
 
19
- type: Literal["m2m"]
19
+ DEFAULT_TYPE: Literal["m2m"] = "m2m"
20
+ DEFAULT_TOKEN_URL: str = "https://login.open-cosmos.com/oauth/token"
21
+ DEFAULT_AUDIENCE: str = "https://beeapp.open-cosmos.com"
22
+
23
+ type: Literal["m2m"] = Field(default=DEFAULT_TYPE)
20
24
  client_id: str
21
- token_url: str
22
- audience: str
25
+ token_url: str = Field(default=DEFAULT_TOKEN_URL)
26
+ audience: str = Field(default=DEFAULT_AUDIENCE)
23
27
  client_secret: str
@@ -6,6 +6,7 @@ from pystac import Collection, Extent, SpatialExtent, TemporalExtent
6
6
  from pystac.utils import str_to_datetime
7
7
 
8
8
  from datacosmos.datacosmos_client import DatacosmosClient
9
+ from datacosmos.exceptions.datacosmos_exception import DatacosmosException
9
10
  from datacosmos.stac.collection.models.collection_update import CollectionUpdate
10
11
  from datacosmos.utils.http_response.check_api_response import check_api_response
11
12
 
@@ -145,5 +146,8 @@ class CollectionClient:
145
146
  """
146
147
  try:
147
148
  return next_href.split("?")[1].split("=")[-1]
148
- except (IndexError, AttributeError):
149
- raise InvalidRequest(f"Failed to parse pagination token from {next_href}")
149
+ except (IndexError, AttributeError) as e:
150
+ raise DatacosmosException(
151
+ f"Failed to parse pagination token from {next_href}",
152
+ response=e.response,
153
+ ) from e
@@ -2,6 +2,7 @@
2
2
 
3
3
  Allows partial updates where only the provided fields are modified.
4
4
  """
5
+
5
6
  from typing import Any, Dict, List, Optional
6
7
 
7
8
  from pydantic import BaseModel, Field
@@ -0,0 +1 @@
1
+ """Constants for STAC."""
@@ -0,0 +1,20 @@
1
+ """Satellite name mapping."""
2
+
3
+ SATELLITE_NAME_MAPPING = {
4
+ "GEOSAT-2": "2014-033D",
5
+ "SUPERVIEW-1-01": "2016-083A",
6
+ "SUPERVIEW-1-02": "2016-083B",
7
+ "SUPERVIEW-1-03": "2018-002A",
8
+ "SUPERVIEW-1-04": "2018-002B",
9
+ "MANTIS": "2023-174B",
10
+ "MENUT": "2023-001B",
11
+ "HAMMER": "2024-043BC",
12
+ "HAMMER-EM": "COSPAR-HAMMER-EM-TBD",
13
+ "Alisio": "2023-185M",
14
+ "Platero": "2023-174G",
15
+ "PHISAT-2": "2024-149C",
16
+ "PHISAT-2 EM": "COSPAR-PHISAT2-EM-TBD",
17
+ "Sentinel-2A": "2015-028A",
18
+ "Sentinel-2B": "2017-013A",
19
+ "Sentinel-2C": "2024-157A",
20
+ }
@@ -0,0 +1,15 @@
1
+ """Level enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class ProcessingLevel(Enum):
7
+ """Enum class for the processing levels of the data."""
8
+
9
+ L0 = "L0"
10
+ L1A = "L1A"
11
+ L2A = "L2A"
12
+ L1B = "L1B"
13
+ L1C = "L1C"
14
+ L1D = "L1D"
15
+ L3 = "L3"
@@ -0,0 +1,11 @@
1
+ """Product type enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class ProductType(str, Enum):
7
+ """Different product types."""
8
+
9
+ SATELLITE = "Satellite"
10
+ VECTOR = "Vector"
11
+ INSIGHT = "Insight"
@@ -0,0 +1,14 @@
1
+ """Season enum class."""
2
+
3
+ from enum import Enum
4
+
5
+
6
+ class Season(str, Enum):
7
+ """Different Open Cosmos seasons."""
8
+
9
+ SUMMER = "Summer"
10
+ WINTER = "Winter"
11
+ AUTUMN = "Autumn"
12
+ SPRING = "Spring"
13
+ RAINY = "Rainy"
14
+ DRY = "Dry"
@@ -9,9 +9,11 @@ from pystac import Item
9
9
 
10
10
  from datacosmos.datacosmos_client import DatacosmosClient
11
11
  from datacosmos.exceptions.datacosmos_exception import DatacosmosException
12
+ from datacosmos.stac.item.models.catalog_search_parameters import (
13
+ CatalogSearchParameters,
14
+ )
12
15
  from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
13
16
  from datacosmos.stac.item.models.item_update import ItemUpdate
14
- from datacosmos.stac.item.models.search_parameters import SearchParameters
15
17
  from datacosmos.utils.http_response.check_api_response import check_api_response
16
18
 
17
19
 
@@ -42,34 +44,22 @@ class ItemClient:
42
44
  check_api_response(response)
43
45
  return Item.from_dict(response.json())
44
46
 
45
- def fetch_collection_items(
46
- self, collection_id: str, parameters: Optional[SearchParameters] = None
47
+ def search_items(
48
+ self, parameters: CatalogSearchParameters, project_id: str
47
49
  ) -> Generator[Item, None, None]:
48
- """Fetch all items in a collection with optional filtering.
49
-
50
- Args:
51
- collection_id (str): The ID of the collection.
52
- parameters (Optional[SearchParameters]): Filtering parameters (spatial, temporal, etc.).
53
-
54
- Yields:
55
- Item: Parsed STAC item.
56
- """
57
- if parameters is None:
58
- parameters = SearchParameters(collections=[collection_id])
59
-
60
- return self.search_items(parameters)
61
-
62
- def search_items(self, parameters: SearchParameters) -> Generator[Item, None, None]:
63
50
  """Query the STAC catalog using the POST endpoint with filtering and pagination.
64
51
 
65
52
  Args:
66
- parameters (SearchParameters): The search parameters.
53
+ parameters (CatalogSearchParameters): The search parameters.
67
54
 
68
55
  Yields:
69
56
  Item: Parsed STAC item.
70
57
  """
71
58
  url = self.base_url.with_suffix("/search")
72
- body = parameters.model_dump(by_alias=True, exclude_none=True)
59
+ parameters_query = parameters.to_query()
60
+ body = {"project": project_id, "limit": 50, "query": parameters_query}
61
+ if parameters.collections is not None:
62
+ body = body | {"collections": parameters.collections}
73
63
  return self._paginate_items(url, body)
74
64
 
75
65
  def create_item(self, collection_id: str, item: Item | DatacosmosItem) -> None:
@@ -84,7 +74,6 @@ class ItemClient:
84
74
  """
85
75
  url = self.base_url.with_suffix(f"/collections/{collection_id}/items")
86
76
  item_json: dict = item.to_dict()
87
-
88
77
  response = self.client.post(url, json=item_json)
89
78
  check_api_response(response)
90
79
 
@@ -0,0 +1,138 @@
1
+ """Query parameters for catalog search."""
2
+
3
+ from datetime import datetime, timedelta
4
+ from typing import Any, List, Optional
5
+
6
+ from pydantic import BaseModel, Field, field_validator, model_validator
7
+
8
+ from datacosmos.stac.constants.satellite_name_mapping import SATELLITE_NAME_MAPPING
9
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
10
+ from datacosmos.stac.enums.product_type import ProductType
11
+ from datacosmos.stac.enums.season import Season
12
+
13
+
14
+ class CatalogSearchParameters(BaseModel):
15
+ """Query parameters for catalog search."""
16
+
17
+ start_date: Optional[str] = None
18
+ end_date: Optional[str] = None
19
+ seasons: Optional[List[Season]] = None
20
+ satellite: Optional[List[str]] = None
21
+ product_type: Optional[List[ProductType]] = None
22
+ processing_level: Optional[List[ProcessingLevel]] = None
23
+ collections: Optional[list[str]] = Field(
24
+ None,
25
+ description="Array of collection IDs to filter by.",
26
+ example=["collection1", "collection2"],
27
+ )
28
+
29
+ # --- Field Validators ---
30
+
31
+ @field_validator("seasons", mode="before")
32
+ @classmethod
33
+ def parse_seasons(cls, value):
34
+ """Parses seasons values into a list of Season object."""
35
+ if value is None:
36
+ return None
37
+ return [Season(v) if not isinstance(v, Season) else v for v in value]
38
+
39
+ @field_validator("product_type", mode="before")
40
+ @classmethod
41
+ def parse_product_types(cls, value):
42
+ """Parses product types values into a list of ProductType object."""
43
+ if value is None:
44
+ return None
45
+ return [ProductType(v) if not isinstance(v, ProductType) else v for v in value]
46
+
47
+ @field_validator("processing_level", mode="before")
48
+ @classmethod
49
+ def parse_processing_levels(cls, value):
50
+ """Parses processing levels values into a list of ProcessingLevel object."""
51
+ if value is None:
52
+ return None
53
+ return [
54
+ ProcessingLevel(v) if not isinstance(v, ProcessingLevel) else v
55
+ for v in value
56
+ ]
57
+
58
+ @field_validator("start_date", mode="before")
59
+ @classmethod
60
+ def parse_start_date(cls, value: Any) -> Optional[str]:
61
+ """Validations on start_date."""
62
+ if value is None:
63
+ return None
64
+ try:
65
+ dt = datetime.strptime(value, "%m/%d/%Y")
66
+ except Exception as e:
67
+ raise ValueError(
68
+ "Invalid start_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
69
+ ) from e
70
+ if dt < datetime(2015, 5, 15):
71
+ raise ValueError("Date must be 5/15/2015 or later.")
72
+ return dt.isoformat() + "Z"
73
+
74
+ @field_validator("end_date", mode="before")
75
+ @classmethod
76
+ def parse_end_date(cls, value: Any) -> Optional[str]:
77
+ """Validations on end_date."""
78
+ if value is None:
79
+ return None
80
+ try:
81
+ dt = datetime.strptime(value, "%m/%d/%Y")
82
+ except ValueError:
83
+ raise ValueError(
84
+ "Invalid end_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
85
+ )
86
+
87
+ if dt < datetime(2015, 5, 15):
88
+ raise ValueError("Date must be 5/15/2015 or later.")
89
+ dt = dt + timedelta(days=1) - timedelta(milliseconds=1)
90
+ return dt.isoformat() + "Z"
91
+
92
+ # --- Model Validator ---
93
+
94
+ @model_validator(mode="after")
95
+ def validate_date_range(self) -> "CatalogSearchParameters":
96
+ """Checks if end_date is after the start_date."""
97
+ if self.start_date and self.end_date:
98
+ start_dt = datetime.fromisoformat(self.start_date.rstrip("Z"))
99
+ end_dt = datetime.fromisoformat(self.end_date.rstrip("Z"))
100
+ if start_dt > end_dt:
101
+ raise ValueError("end_date cannot be before start_date.")
102
+ return self
103
+
104
+ # --- Query Mapper ---
105
+
106
+ def to_query(self) -> dict:
107
+ """Map user-friendly input to STAC query structure."""
108
+ query = {}
109
+
110
+ if self.start_date or self.end_date:
111
+ query["datetime"] = {"gte": self.start_date, "lte": self.end_date}
112
+
113
+ if self.seasons:
114
+ query["opencosmos:season"] = {
115
+ "in": [seasons.value for seasons in self.seasons]
116
+ }
117
+
118
+ if self.product_type:
119
+ query["opencosmos:product_type"] = {
120
+ "in": [product_type.value for product_type in self.product_type]
121
+ }
122
+
123
+ if self.processing_level:
124
+ query["processing:level"] = {
125
+ "in": [
126
+ processing_level.value for processing_level in self.processing_level
127
+ ]
128
+ }
129
+
130
+ if self.satellite:
131
+ cospars = [
132
+ SATELLITE_NAME_MAPPING[ui]
133
+ for ui in self.satellite
134
+ if ui in SATELLITE_NAME_MAPPING
135
+ ]
136
+ query["sat:platform_international_designator"] = {"in": cospars}
137
+
138
+ return query
@@ -4,7 +4,7 @@ from datetime import datetime
4
4
 
5
5
  from pydantic import BaseModel
6
6
 
7
- from datacosmos.stac.enums.level import Level
7
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
8
8
  from datacosmos.stac.item.models.asset import Asset
9
9
 
10
10
 
@@ -36,9 +36,9 @@ class DatacosmosItem(BaseModel):
36
36
  return datetime.strptime(self.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
37
37
 
38
38
  @property
39
- def level(self) -> Level:
39
+ def level(self) -> ProcessingLevel:
40
40
  """Get the processing level of the Datacosmos item."""
41
- return Level(self.properties["processing:level"].lower())
41
+ return ProcessingLevel(self.properties["processing:level"].lower())
42
42
 
43
43
  @property
44
44
  def sat_int_designator(self) -> str:
@@ -6,7 +6,7 @@ from pathlib import Path
6
6
 
7
7
  import structlog
8
8
 
9
- from datacosmos.stac.enums.level import Level
9
+ from datacosmos.stac.enums.processing_level import ProcessingLevel
10
10
  from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
11
11
  from datacosmos.utils.missions import get_mission_id
12
12
 
@@ -18,7 +18,7 @@ class UploadPath:
18
18
  """Dataclass for retrieving the upload path of a file."""
19
19
 
20
20
  mission: str
21
- level: Level
21
+ level: ProcessingLevel
22
22
  day: int
23
23
  month: int
24
24
  year: int
@@ -43,7 +43,7 @@ class UploadPath:
43
43
  dt = datetime.strptime(item.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
44
44
  path = UploadPath(
45
45
  mission=mission,
46
- level=Level(item.properties["processing:level"].lower()),
46
+ level=ProcessingLevel(item.properties["processing:level"].upper()),
47
47
  day=dt.day,
48
48
  month=dt.month,
49
49
  year=dt.year,
@@ -60,7 +60,7 @@ class UploadPath:
60
60
  raise ValueError(f"Invalid path {path}")
61
61
  return cls(
62
62
  mission=parts[0],
63
- level=Level(parts[1]),
63
+ level=ProcessingLevel(parts[1]),
64
64
  day=int(parts[4]),
65
65
  month=int(parts[3]),
66
66
  year=int(parts[2]),
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacosmos
3
- Version: 0.0.3
3
+ Version: 0.0.5
4
4
  Summary: A library for interacting with DataCosmos from Python code
5
5
  Author-email: Open Cosmos <support@open-cosmos.com>
6
6
  Classifier: Programming Language :: Python :: 3
@@ -13,6 +13,8 @@ Requires-Dist: oauthlib==3.2.0
13
13
  Requires-Dist: requests-oauthlib==1.3.1
14
14
  Requires-Dist: pydantic==2.10.6
15
15
  Requires-Dist: pystac==1.12.1
16
+ Requires-Dist: pyyaml==6.0.2
17
+ Requires-Dist: structlog==25.3.0
16
18
  Provides-Extra: dev
17
19
  Requires-Dist: black==22.3.0; extra == "dev"
18
20
  Requires-Dist: ruff==0.9.5; extra == "dev"
@@ -21,17 +21,21 @@ datacosmos/stac/collection/__init__.py
21
21
  datacosmos/stac/collection/collection_client.py
22
22
  datacosmos/stac/collection/models/__init__.py
23
23
  datacosmos/stac/collection/models/collection_update.py
24
+ datacosmos/stac/constants/__init__.py
25
+ datacosmos/stac/constants/satellite_name_mapping.py
24
26
  datacosmos/stac/enums/__init__.py
25
- datacosmos/stac/enums/level.py
27
+ datacosmos/stac/enums/processing_level.py
28
+ datacosmos/stac/enums/product_type.py
29
+ datacosmos/stac/enums/season.py
26
30
  datacosmos/stac/item/__init__.py
27
31
  datacosmos/stac/item/item_client.py
28
32
  datacosmos/stac/item/models/__init__.py
29
33
  datacosmos/stac/item/models/asset.py
34
+ datacosmos/stac/item/models/catalog_search_parameters.py
30
35
  datacosmos/stac/item/models/datacosmos_item.py
31
36
  datacosmos/stac/item/models/eo_band.py
32
37
  datacosmos/stac/item/models/item_update.py
33
38
  datacosmos/stac/item/models/raster_band.py
34
- datacosmos/stac/item/models/search_parameters.py
35
39
  datacosmos/uploader/__init__.py
36
40
  datacosmos/uploader/datacosmos_uploader.py
37
41
  datacosmos/uploader/dataclasses/__init__.py
@@ -4,6 +4,8 @@ oauthlib==3.2.0
4
4
  requests-oauthlib==1.3.1
5
5
  pydantic==2.10.6
6
6
  pystac==1.12.1
7
+ pyyaml==6.0.2
8
+ structlog==25.3.0
7
9
 
8
10
  [dev]
9
11
  black==22.3.0
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
4
4
 
5
5
  [project]
6
6
  name = "datacosmos"
7
- version = "0.0.3"
7
+ version = "0.0.5"
8
8
  authors = [
9
9
  { name="Open Cosmos", email="support@open-cosmos.com" },
10
10
  ]
@@ -20,7 +20,9 @@ dependencies = [
20
20
  "oauthlib==3.2.0",
21
21
  "requests-oauthlib==1.3.1",
22
22
  "pydantic==2.10.6",
23
- "pystac==1.12.1"
23
+ "pystac==1.12.1",
24
+ "pyyaml==6.0.2",
25
+ "structlog==25.3.0"
24
26
  ]
25
27
 
26
28
  [project.optional-dependencies]
@@ -1,209 +0,0 @@
1
- # DataCosmos SDK
2
-
3
- ## Overview
4
-
5
- The **DataCosmos SDK** enables Open Cosmos customers to interact with the **DataCosmos APIs** for efficient data management and retrieval. It provides authentication handling, HTTP request utilities, and a client for interacting with the **STAC API** (SpatioTemporal Asset Catalog).
6
-
7
- ## Installation
8
-
9
- Install the SDK using **pip**:
10
-
11
- ```sh
12
- pip install datacosmos=={version}
13
- ```
14
-
15
- ## Initializing the Client
16
-
17
- To start using the SDK, initialize the client. The easiest way to do this is by loading the configuration from a YAML file. Alternatively, you can manually instantiate the Config object or use environment variables.
18
-
19
- ### Default Initialization (Recommended)
20
-
21
- By default, the client loads configuration from a YAML file (`config/config.yaml`).
22
-
23
- ```python
24
- from datacosmos.datacosmos_client import DatacosmosClient
25
-
26
- client = DatacosmosClient()
27
- ```
28
-
29
- ### Loading from YAML (Recommended)
30
-
31
- Create a YAML file (`config/config.yaml`) with the following content:
32
-
33
- ```yaml
34
- authentication:
35
- client_id: {client_id}
36
- client_secret: {client_secret}
37
- ```
38
-
39
- The client will automatically read this file when initialized.
40
-
41
- ### Loading from Environment Variables
42
-
43
- Set the following environment variables:
44
-
45
- ```sh
46
- export OC_AUTH_CLIENT_ID={client_id}
47
- export OC_AUTH_CLIENT_SECRET={client_secret}
48
- ```
49
-
50
- The client will automatically read these values when initialized.
51
-
52
- ### Manual Instantiation
53
-
54
- If manually instantiating `Config`, default values are now applied where possible.
55
-
56
- ```python
57
- from config.config import Config
58
- from config.models.m2m_authentication_config import M2MAuthenticationConfig
59
- from config.models.url import URL
60
-
61
- config = Config(
62
- authentication=M2MAuthenticationConfig(
63
- client_id="your-client-id",
64
- client_secret="your-client-secret"
65
- )
66
- )
67
-
68
- client = DatacosmosClient(config=config)
69
- ```
70
-
71
- ### Configuration Options and Defaults
72
-
73
- | Setting | Default Value | Override Method |
74
- |------------------------------|-------------------------------------------------|----------------|
75
- | `authentication.type` | `m2m` | YAML / ENV |
76
- | `authentication.client_id` | _Required in manual instantiation_ | YAML / ENV |
77
- | `authentication.client_secret` | _Required in manual instantiation_ | YAML / ENV |
78
- | `stac.protocol` | `https` | YAML / ENV |
79
- | `stac.host` | `app.open-cosmos.com` | YAML / ENV |
80
- | `stac.port` | `443` | YAML / ENV |
81
- | `stac.path` | `/api/data/v0/stac` | YAML / ENV |
82
- | `datacosmos_cloud_storage.protocol` | `https` | YAML / ENV |
83
- | `datacosmos_cloud_storage.host` | `app.open-cosmos.com` | YAML / ENV |
84
- | `datacosmos_cloud_storage.port` | `443` | YAML / ENV |
85
- | `datacosmos_cloud_storage.path` | `/api/data/v0/storage` | YAML / ENV |
86
- | `mission_id` | `0` | YAML / ENV |
87
- | `environment` | `test` | YAML / ENV |
88
-
89
- ## STAC Client
90
-
91
- The `STACClient` enables interaction with the STAC API, allowing for searching, retrieving, creating, updating, and deleting STAC items and collections.
92
-
93
- ### Initialize STACClient
94
-
95
- ```python
96
- from datacosmos.datacosmos_client import DatacosmosClient
97
- from datacosmos.stac.stac_client import STACClient
98
-
99
- client = DatacosmosClient()
100
- stac_client = STACClient(client)
101
- ```
102
-
103
- ### STACClient Methods
104
-
105
- #### 1. Fetch a Collection
106
-
107
- ```python
108
- collection = stac_client.fetch_collection("test-collection")
109
- ```
110
-
111
- #### 2. Fetch All Collections
112
-
113
- ```python
114
- collections = list(stac_client.fetch_all_collections())
115
- ```
116
-
117
- #### 3. Create a Collection
118
-
119
- ```python
120
- from pystac import Collection
121
-
122
- new_collection = Collection(
123
- id="test-collection",
124
- title="Test Collection",
125
- description="This is a test collection",
126
- license="proprietary",
127
- extent={
128
- "spatial": {"bbox": [[-180, -90, 180, 90]]},
129
- "temporal": {"interval": [["2023-01-01T00:00:00Z", None]]},
130
- },
131
- )
132
-
133
- stac_client.create_collection(new_collection)
134
- ```
135
-
136
- #### 4. Update a Collection
137
-
138
- ```python
139
- from datacosmos.stac.collection.models.collection_update import CollectionUpdate
140
-
141
- update_data = CollectionUpdate(
142
- title="Updated Collection Title",
143
- description="Updated description",
144
- )
145
-
146
- stac_client.update_collection("test-collection", update_data)
147
- ```
148
-
149
- #### 5. Delete a Collection
150
-
151
- ```python
152
- stac_client.delete_collection("test-collection")
153
- ```
154
-
155
- ### Uploading Files and Registering STAC Items
156
-
157
- You can use the `DatacosmosUploader` class to upload files to the DataCosmos cloud storage and register a STAC item.
158
-
159
- #### Upload Files and Register STAC Item
160
-
161
- ```python
162
- from datacosmos.uploader.datacosmos_uploader import DatacosmosUploader
163
-
164
- uploader = DatacosmosUploader(client)
165
- item_json_file_path = "/path/to/stac_item.json"
166
- uploader.upload_and_register_item(item_json_file_path)
167
- ```
168
-
169
- ## Error Handling
170
-
171
- Use `try-except` blocks to handle API errors gracefully:
172
-
173
- ```python
174
- try:
175
- data = client.get_data("dataset_id")
176
- print(data)
177
- except Exception as e:
178
- print(f"An error occurred: {e}")
179
- ```
180
-
181
- ## Contributing
182
-
183
- To contribute:
184
-
185
- 1. Fork the repository.
186
- 2. Create a feature branch.
187
- 3. Submit a pull request.
188
-
189
- ### Development Setup
190
-
191
- Use `uv` for dependency management:
192
-
193
- ```sh
194
- pip install uv
195
- uv venv
196
- uv pip install -r pyproject.toml .[dev]
197
- source .venv/bin/activate
198
- ```
199
-
200
- Before making changes, run:
201
-
202
- ```sh
203
- black .
204
- isort .
205
- ruff check .
206
- pydocstyle .
207
- bandit -r -c pyproject.toml .
208
- pytest
209
- ```
@@ -1,15 +0,0 @@
1
- """Level enum class."""
2
-
3
- from enum import Enum
4
-
5
-
6
- class Level(Enum):
7
- """Enum class for the processing levels of the data."""
8
-
9
- L0 = "l0"
10
- L1A = "l1a"
11
- L2A = "l2a"
12
- L1B = "l1b"
13
- L1C = "l1c"
14
- L1D = "l1d"
15
- L3 = "l3"
@@ -1,58 +0,0 @@
1
- """Module defining the SearchParameters model for STAC API queries, encapsulating filtering criteria.
2
-
3
- It includes spatial, temporal, and property-based filters for querying STAC items efficiently.
4
- """
5
-
6
- from typing import Optional, Union
7
-
8
- from pydantic import BaseModel, Field, model_validator
9
-
10
-
11
- class SearchParameters(BaseModel):
12
- """Encapsulates the parameters for the STAC search API with validation."""
13
-
14
- bbox: Optional[list[float]] = Field(
15
- None,
16
- description="Bounding box filter [minX, minY, maxX, maxY]. Optional six values for 3D bounding box.",
17
- example=[-180.0, -90.0, 180.0, 90.0],
18
- )
19
- datetime_range: Optional[str] = Field(
20
- None,
21
- alias="datetime",
22
- description=(
23
- "Temporal filter, either a single RFC 3339 datetime or an interval. "
24
- 'Example: "2025-01-01T00:00:00Z/.."'
25
- ),
26
- )
27
- intersects: Optional[dict] = Field(
28
- None, description="GeoJSON geometry filter, e.g., a Polygon or Point."
29
- )
30
- ids: Optional[list[str]] = Field(
31
- None,
32
- description="Array of item IDs to filter by.",
33
- example=["item1", "item2"],
34
- )
35
- collections: Optional[list[str]] = Field(
36
- None,
37
- description="Array of collection IDs to filter by.",
38
- example=["collection1", "collection2"],
39
- )
40
- limit: Optional[int] = Field(
41
- None,
42
- ge=1,
43
- le=10000,
44
- description="Maximum number of items per page. Default: 10, Max: 10000.",
45
- example=10,
46
- )
47
- query: Optional[dict[str, dict[str, Union[str, int, float]]]] = Field(
48
- None,
49
- description="Additional property filters, e.g., { 'cloud_coverage': { 'lt': 10 } }.",
50
- )
51
-
52
- @model_validator(mode="before")
53
- def validate_bbox(cls, values):
54
- """Validate that the `bbox` field contains either 4 or 6 values."""
55
- bbox = values.get("bbox")
56
- if bbox and len(bbox) not in {4, 6}:
57
- raise ValueError("bbox must contain 4 or 6 values.")
58
- return values
File without changes
File without changes