datacosmos 0.0.4__tar.gz → 0.0.5__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacosmos might be problematic. Click here for more details.
- {datacosmos-0.0.4/datacosmos.egg-info → datacosmos-0.0.5}/PKG-INFO +2 -1
- {datacosmos-0.0.4 → datacosmos-0.0.5}/README.md +47 -30
- {datacosmos-0.0.4 → datacosmos-0.0.5}/config/config.py +1 -2
- {datacosmos-0.0.4 → datacosmos-0.0.5}/config/models/m2m_authentication_config.py +8 -4
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/item_client.py +2 -19
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/catalog_search_parameters.py +16 -10
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/uploader/dataclasses/upload_path.py +4 -4
- {datacosmos-0.0.4 → datacosmos-0.0.5/datacosmos.egg-info}/PKG-INFO +2 -1
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos.egg-info/SOURCES.txt +0 -1
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos.egg-info/requires.txt +1 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/pyproject.toml +3 -2
- datacosmos-0.0.4/datacosmos/stac/item/models/search_parameters.py +0 -58
- {datacosmos-0.0.4 → datacosmos-0.0.5}/LICENSE.md +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/config/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/config/models/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/config/models/url.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/datacosmos_client.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/exceptions/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/exceptions/datacosmos_exception.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/collection/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/collection/collection_client.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/collection/models/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/collection/models/collection_update.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/constants/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/constants/satellite_name_mapping.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/enums/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/enums/processing_level.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/enums/product_type.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/enums/season.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/asset.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/datacosmos_item.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/eo_band.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/item_update.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/raster_band.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/stac_client.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/uploader/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/uploader/dataclasses/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/uploader/datacosmos_uploader.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/constants.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/check_api_response.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/__init__.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_error.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_response.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/missions.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/url.py +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos.egg-info/dependency_links.txt +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos.egg-info/top_level.txt +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/setup.cfg +0 -0
- {datacosmos-0.0.4 → datacosmos-0.0.5}/tests/test_pass.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: datacosmos
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.5
|
|
4
4
|
Summary: A library for interacting with DataCosmos from Python code
|
|
5
5
|
Author-email: Open Cosmos <support@open-cosmos.com>
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -14,6 +14,7 @@ Requires-Dist: requests-oauthlib==1.3.1
|
|
|
14
14
|
Requires-Dist: pydantic==2.10.6
|
|
15
15
|
Requires-Dist: pystac==1.12.1
|
|
16
16
|
Requires-Dist: pyyaml==6.0.2
|
|
17
|
+
Requires-Dist: structlog==25.3.0
|
|
17
18
|
Provides-Extra: dev
|
|
18
19
|
Requires-Dist: black==22.3.0; extra == "dev"
|
|
19
20
|
Requires-Dist: ruff==0.9.5; extra == "dev"
|
|
@@ -54,6 +54,7 @@ The client will automatically read these values when initialized.
|
|
|
54
54
|
If manually instantiating `Config`, default values are now applied where possible.
|
|
55
55
|
|
|
56
56
|
```python
|
|
57
|
+
from datacosmos.datacosmos_client import DatacosmosClient
|
|
57
58
|
from config.config import Config
|
|
58
59
|
from config.models.m2m_authentication_config import M2MAuthenticationConfig
|
|
59
60
|
from config.models.url import URL
|
|
@@ -116,7 +117,8 @@ params = CatalogSearchParameters(
|
|
|
116
117
|
end_date="2/9/2025",
|
|
117
118
|
satellite=["MANTIS"],
|
|
118
119
|
product_type=["Satellite"],
|
|
119
|
-
processing_level=["L1A"]
|
|
120
|
+
processing_level=["L1A"],
|
|
121
|
+
collections=["mantis-l1a"]
|
|
120
122
|
)
|
|
121
123
|
|
|
122
124
|
items = list(stac_client.search_items(parameters=params, project_id="your-project-id"))
|
|
@@ -132,19 +134,7 @@ stac_client = STACClient(client)
|
|
|
132
134
|
|
|
133
135
|
item = stac_client.fetch_item(item_id="example-item", collection_id="example-collection")
|
|
134
136
|
```
|
|
135
|
-
|
|
136
|
-
#### 3. **Fetch All Items in a Collection**
|
|
137
|
-
```python
|
|
138
|
-
from datacosmos.datacosmos_client import DatacosmosClient
|
|
139
|
-
from datacosmos.stac.stac_client import STACClient
|
|
140
|
-
|
|
141
|
-
client = DatacosmosClient()
|
|
142
|
-
stac_client = STACClient(client)
|
|
143
|
-
|
|
144
|
-
items = stac_client.fetch_collection_items(collection_id="example-collection")
|
|
145
|
-
```
|
|
146
|
-
|
|
147
|
-
#### 4. **Create a New STAC Item**
|
|
137
|
+
#### 3. **Create a New STAC Item**
|
|
148
138
|
```python
|
|
149
139
|
from pystac import Item, Asset
|
|
150
140
|
from datetime import datetime
|
|
@@ -160,7 +150,7 @@ stac_item = Item(
|
|
|
160
150
|
geometry={"type": "Point", "coordinates": [102.0, 0.5]},
|
|
161
151
|
bbox=[101.0, 0.0, 103.0, 1.0],
|
|
162
152
|
datetime=datetime.utcnow(),
|
|
163
|
-
properties={},
|
|
153
|
+
properties={"datetime": datetime.utcnow(), "processing:level": "example-processing-level"},
|
|
164
154
|
collection="example-collection"
|
|
165
155
|
)
|
|
166
156
|
|
|
@@ -177,9 +167,9 @@ stac_item.add_asset(
|
|
|
177
167
|
stac_client.create_item(collection_id="example-collection", item=stac_item)
|
|
178
168
|
```
|
|
179
169
|
|
|
180
|
-
####
|
|
170
|
+
#### 4. **Update an Existing STAC Item**
|
|
181
171
|
```python
|
|
182
|
-
from datacosmos.stac.models.item_update import ItemUpdate
|
|
172
|
+
from datacosmos.stac.item.models.item_update import ItemUpdate
|
|
183
173
|
from pystac import Asset, Link
|
|
184
174
|
|
|
185
175
|
from datacosmos.datacosmos_client import DatacosmosClient
|
|
@@ -212,7 +202,7 @@ update_payload = ItemUpdate(
|
|
|
212
202
|
stac_client.update_item(item_id="new-item", collection_id="example-collection", update_data=update_payload)
|
|
213
203
|
```
|
|
214
204
|
|
|
215
|
-
####
|
|
205
|
+
#### 5. **Delete an Item**
|
|
216
206
|
```python
|
|
217
207
|
|
|
218
208
|
from datacosmos.datacosmos_client import DatacosmosClient
|
|
@@ -224,7 +214,7 @@ stac_client = STACClient(client)
|
|
|
224
214
|
stac_client.delete_item(item_id="new-item", collection_id="example-collection")
|
|
225
215
|
```
|
|
226
216
|
|
|
227
|
-
####
|
|
217
|
+
#### 6. Fetch a Collection
|
|
228
218
|
|
|
229
219
|
```python
|
|
230
220
|
|
|
@@ -237,7 +227,7 @@ stac_client = STACClient(client)
|
|
|
237
227
|
collection = stac_client.fetch_collection("test-collection")
|
|
238
228
|
```
|
|
239
229
|
|
|
240
|
-
####
|
|
230
|
+
#### 7. Fetch All Collections
|
|
241
231
|
|
|
242
232
|
```python
|
|
243
233
|
|
|
@@ -250,7 +240,7 @@ stac_client = STACClient(client)
|
|
|
250
240
|
collections = list(stac_client.fetch_all_collections())
|
|
251
241
|
```
|
|
252
242
|
|
|
253
|
-
####
|
|
243
|
+
#### 8. Create a Collection
|
|
254
244
|
|
|
255
245
|
```python
|
|
256
246
|
from pystac import Collection
|
|
@@ -275,7 +265,7 @@ new_collection = Collection(
|
|
|
275
265
|
stac_client.create_collection(new_collection)
|
|
276
266
|
```
|
|
277
267
|
|
|
278
|
-
####
|
|
268
|
+
#### 9. Update a Collection
|
|
279
269
|
|
|
280
270
|
```python
|
|
281
271
|
from datacosmos.stac.collection.models.collection_update import CollectionUpdate
|
|
@@ -294,7 +284,7 @@ update_data = CollectionUpdate(
|
|
|
294
284
|
stac_client.update_collection("test-collection", update_data)
|
|
295
285
|
```
|
|
296
286
|
|
|
297
|
-
####
|
|
287
|
+
#### 10. Delete a Collection
|
|
298
288
|
|
|
299
289
|
```python
|
|
300
290
|
|
|
@@ -307,24 +297,51 @@ stac_client = STACClient(client)
|
|
|
307
297
|
stac_client.delete_collection("test-collection")
|
|
308
298
|
```
|
|
309
299
|
|
|
310
|
-
|
|
300
|
+
## Uploading Files and Registering STAC Items
|
|
311
301
|
|
|
312
|
-
You can use the `DatacosmosUploader` class to upload files to the DataCosmos cloud storage and register a STAC item.
|
|
302
|
+
You can use the `DatacosmosUploader` class to upload files to the DataCosmos cloud storage and register a STAC item. The `upload_and_register_item` method will take care of both uploading files and creating the STAC item.
|
|
313
303
|
|
|
314
|
-
|
|
304
|
+
### **Upload Files and Register STAC Item**
|
|
315
305
|
|
|
316
|
-
|
|
317
|
-
|
|
306
|
+
1. Make sure you have a directory with the same name as your STAC item JSON file (this directory should contain the files you want to upload).
|
|
307
|
+
2. Call the `upload_and_register_item` method, providing the path to the STAC item JSON file.
|
|
318
308
|
|
|
309
|
+
```python
|
|
319
310
|
from datacosmos.datacosmos_client import DatacosmosClient
|
|
311
|
+
from datacosmos.uploader.datacosmos_uploader import DatacosmosUploader
|
|
320
312
|
|
|
321
|
-
client
|
|
313
|
+
# Initialize the client with the configuration
|
|
314
|
+
client = DatacosmosClient(config=config)
|
|
322
315
|
|
|
316
|
+
# Create the uploader instance
|
|
323
317
|
uploader = DatacosmosUploader(client)
|
|
324
|
-
|
|
318
|
+
|
|
319
|
+
# Path to your STAC item JSON file
|
|
320
|
+
item_json_file_path = "/home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814.json"
|
|
321
|
+
|
|
322
|
+
# Upload the item and its assets, and register it in the STAC API
|
|
325
323
|
uploader.upload_and_register_item(item_json_file_path)
|
|
326
324
|
```
|
|
327
325
|
|
|
326
|
+
### **Folder Structure**
|
|
327
|
+
|
|
328
|
+
For the `upload_and_register_item` method to work correctly, ensure that the directory structure matches the name of the STAC item JSON file. For example:
|
|
329
|
+
|
|
330
|
+
```
|
|
331
|
+
/home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814.json
|
|
332
|
+
/home/peres/repos/datacosmos-sdk/MENUT_L1A_000001943_20250304134812_20250304134821_49435814/
|
|
333
|
+
├── asset1.tiff
|
|
334
|
+
├── asset2.tiff
|
|
335
|
+
└── ...
|
|
336
|
+
```
|
|
337
|
+
|
|
338
|
+
The folder `MENUT_L1A_000001943_20250304134812_20250304134821_49435814` should contain the assets (files) for upload.
|
|
339
|
+
|
|
340
|
+
The `upload_and_register_item` method will:
|
|
341
|
+
1. Delete any existing item with the same ID (if it exists).
|
|
342
|
+
2. Upload the assets in the folder to DataCosmos cloud storage.
|
|
343
|
+
3. Register the item in the STAC API.
|
|
344
|
+
|
|
328
345
|
## Error Handling
|
|
329
346
|
|
|
330
347
|
Use `try-except` blocks to handle API errors gracefully:
|
|
@@ -6,7 +6,7 @@ and supports environment variable-based overrides.
|
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
8
|
import os
|
|
9
|
-
from typing import ClassVar,
|
|
9
|
+
from typing import ClassVar, Optional
|
|
10
10
|
|
|
11
11
|
import yaml
|
|
12
12
|
from pydantic import field_validator
|
|
@@ -29,7 +29,6 @@ class Config(BaseSettings):
|
|
|
29
29
|
stac: Optional[URL] = None
|
|
30
30
|
datacosmos_cloud_storage: Optional[URL] = None
|
|
31
31
|
mission_id: int = 0
|
|
32
|
-
environment: Literal["local", "test", "prod"] = "test"
|
|
33
32
|
|
|
34
33
|
DEFAULT_AUTH_TYPE: ClassVar[str] = "m2m"
|
|
35
34
|
DEFAULT_AUTH_TOKEN_URL: ClassVar[str] = "https://login.open-cosmos.com/oauth/token"
|
|
@@ -6,7 +6,7 @@ without user interaction.
|
|
|
6
6
|
|
|
7
7
|
from typing import Literal
|
|
8
8
|
|
|
9
|
-
from pydantic import BaseModel
|
|
9
|
+
from pydantic import BaseModel, Field
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class M2MAuthenticationConfig(BaseModel):
|
|
@@ -16,8 +16,12 @@ class M2MAuthenticationConfig(BaseModel):
|
|
|
16
16
|
with client credentials.
|
|
17
17
|
"""
|
|
18
18
|
|
|
19
|
-
|
|
19
|
+
DEFAULT_TYPE: Literal["m2m"] = "m2m"
|
|
20
|
+
DEFAULT_TOKEN_URL: str = "https://login.open-cosmos.com/oauth/token"
|
|
21
|
+
DEFAULT_AUDIENCE: str = "https://beeapp.open-cosmos.com"
|
|
22
|
+
|
|
23
|
+
type: Literal["m2m"] = Field(default=DEFAULT_TYPE)
|
|
20
24
|
client_id: str
|
|
21
|
-
token_url: str
|
|
22
|
-
audience: str
|
|
25
|
+
token_url: str = Field(default=DEFAULT_TOKEN_URL)
|
|
26
|
+
audience: str = Field(default=DEFAULT_AUDIENCE)
|
|
23
27
|
client_secret: str
|
|
@@ -14,7 +14,6 @@ from datacosmos.stac.item.models.catalog_search_parameters import (
|
|
|
14
14
|
)
|
|
15
15
|
from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
|
|
16
16
|
from datacosmos.stac.item.models.item_update import ItemUpdate
|
|
17
|
-
from datacosmos.stac.item.models.search_parameters import SearchParameters
|
|
18
17
|
from datacosmos.utils.http_response.check_api_response import check_api_response
|
|
19
18
|
|
|
20
19
|
|
|
@@ -45,23 +44,6 @@ class ItemClient:
|
|
|
45
44
|
check_api_response(response)
|
|
46
45
|
return Item.from_dict(response.json())
|
|
47
46
|
|
|
48
|
-
def fetch_collection_items(
|
|
49
|
-
self, collection_id: str, parameters: Optional[SearchParameters] = None
|
|
50
|
-
) -> Generator[Item, None, None]:
|
|
51
|
-
"""Fetch all items in a collection with optional filtering.
|
|
52
|
-
|
|
53
|
-
Args:
|
|
54
|
-
collection_id (str): The ID of the collection.
|
|
55
|
-
parameters (Optional[SearchParameters]): Filtering parameters (spatial, temporal, etc.).
|
|
56
|
-
|
|
57
|
-
Yields:
|
|
58
|
-
Item: Parsed STAC item.
|
|
59
|
-
"""
|
|
60
|
-
if parameters is None:
|
|
61
|
-
parameters = SearchParameters(collections=[collection_id])
|
|
62
|
-
|
|
63
|
-
return self.search_items(parameters)
|
|
64
|
-
|
|
65
47
|
def search_items(
|
|
66
48
|
self, parameters: CatalogSearchParameters, project_id: str
|
|
67
49
|
) -> Generator[Item, None, None]:
|
|
@@ -76,6 +58,8 @@ class ItemClient:
|
|
|
76
58
|
url = self.base_url.with_suffix("/search")
|
|
77
59
|
parameters_query = parameters.to_query()
|
|
78
60
|
body = {"project": project_id, "limit": 50, "query": parameters_query}
|
|
61
|
+
if parameters.collections is not None:
|
|
62
|
+
body = body | {"collections": parameters.collections}
|
|
79
63
|
return self._paginate_items(url, body)
|
|
80
64
|
|
|
81
65
|
def create_item(self, collection_id: str, item: Item | DatacosmosItem) -> None:
|
|
@@ -90,7 +74,6 @@ class ItemClient:
|
|
|
90
74
|
"""
|
|
91
75
|
url = self.base_url.with_suffix(f"/collections/{collection_id}/items")
|
|
92
76
|
item_json: dict = item.to_dict()
|
|
93
|
-
|
|
94
77
|
response = self.client.post(url, json=item_json)
|
|
95
78
|
check_api_response(response)
|
|
96
79
|
|
{datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/item/models/catalog_search_parameters.py
RENAMED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
from datetime import datetime, timedelta
|
|
4
4
|
from typing import Any, List, Optional
|
|
5
5
|
|
|
6
|
-
from pydantic import BaseModel, field_validator, model_validator
|
|
6
|
+
from pydantic import BaseModel, Field, field_validator, model_validator
|
|
7
7
|
|
|
8
8
|
from datacosmos.stac.constants.satellite_name_mapping import SATELLITE_NAME_MAPPING
|
|
9
9
|
from datacosmos.stac.enums.processing_level import ProcessingLevel
|
|
@@ -20,6 +20,11 @@ class CatalogSearchParameters(BaseModel):
|
|
|
20
20
|
satellite: Optional[List[str]] = None
|
|
21
21
|
product_type: Optional[List[ProductType]] = None
|
|
22
22
|
processing_level: Optional[List[ProcessingLevel]] = None
|
|
23
|
+
collections: Optional[list[str]] = Field(
|
|
24
|
+
None,
|
|
25
|
+
description="Array of collection IDs to filter by.",
|
|
26
|
+
example=["collection1", "collection2"],
|
|
27
|
+
)
|
|
23
28
|
|
|
24
29
|
# --- Field Validators ---
|
|
25
30
|
|
|
@@ -58,13 +63,13 @@ class CatalogSearchParameters(BaseModel):
|
|
|
58
63
|
return None
|
|
59
64
|
try:
|
|
60
65
|
dt = datetime.strptime(value, "%m/%d/%Y")
|
|
61
|
-
|
|
62
|
-
raise ValueError("Date must be 5/15/2015 or later.")
|
|
63
|
-
return dt.isoformat() + "Z"
|
|
64
|
-
except ValueError:
|
|
66
|
+
except Exception as e:
|
|
65
67
|
raise ValueError(
|
|
66
68
|
"Invalid start_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
|
|
67
|
-
)
|
|
69
|
+
) from e
|
|
70
|
+
if dt < datetime(2015, 5, 15):
|
|
71
|
+
raise ValueError("Date must be 5/15/2015 or later.")
|
|
72
|
+
return dt.isoformat() + "Z"
|
|
68
73
|
|
|
69
74
|
@field_validator("end_date", mode="before")
|
|
70
75
|
@classmethod
|
|
@@ -74,15 +79,16 @@ class CatalogSearchParameters(BaseModel):
|
|
|
74
79
|
return None
|
|
75
80
|
try:
|
|
76
81
|
dt = datetime.strptime(value, "%m/%d/%Y")
|
|
77
|
-
if dt < datetime(2015, 5, 15):
|
|
78
|
-
raise ValueError("Date must be 5/15/2015 or later.")
|
|
79
|
-
dt = dt + timedelta(days=1) - timedelta(milliseconds=1)
|
|
80
|
-
return dt.isoformat() + "Z"
|
|
81
82
|
except ValueError:
|
|
82
83
|
raise ValueError(
|
|
83
84
|
"Invalid end_date format. Use mm/dd/yyyy (e.g., 05/15/2024)"
|
|
84
85
|
)
|
|
85
86
|
|
|
87
|
+
if dt < datetime(2015, 5, 15):
|
|
88
|
+
raise ValueError("Date must be 5/15/2015 or later.")
|
|
89
|
+
dt = dt + timedelta(days=1) - timedelta(milliseconds=1)
|
|
90
|
+
return dt.isoformat() + "Z"
|
|
91
|
+
|
|
86
92
|
# --- Model Validator ---
|
|
87
93
|
|
|
88
94
|
@model_validator(mode="after")
|
|
@@ -6,7 +6,7 @@ from pathlib import Path
|
|
|
6
6
|
|
|
7
7
|
import structlog
|
|
8
8
|
|
|
9
|
-
from datacosmos.stac.enums.
|
|
9
|
+
from datacosmos.stac.enums.processing_level import ProcessingLevel
|
|
10
10
|
from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
|
|
11
11
|
from datacosmos.utils.missions import get_mission_id
|
|
12
12
|
|
|
@@ -18,7 +18,7 @@ class UploadPath:
|
|
|
18
18
|
"""Dataclass for retrieving the upload path of a file."""
|
|
19
19
|
|
|
20
20
|
mission: str
|
|
21
|
-
level:
|
|
21
|
+
level: ProcessingLevel
|
|
22
22
|
day: int
|
|
23
23
|
month: int
|
|
24
24
|
year: int
|
|
@@ -43,7 +43,7 @@ class UploadPath:
|
|
|
43
43
|
dt = datetime.strptime(item.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
|
|
44
44
|
path = UploadPath(
|
|
45
45
|
mission=mission,
|
|
46
|
-
level=
|
|
46
|
+
level=ProcessingLevel(item.properties["processing:level"].upper()),
|
|
47
47
|
day=dt.day,
|
|
48
48
|
month=dt.month,
|
|
49
49
|
year=dt.year,
|
|
@@ -60,7 +60,7 @@ class UploadPath:
|
|
|
60
60
|
raise ValueError(f"Invalid path {path}")
|
|
61
61
|
return cls(
|
|
62
62
|
mission=parts[0],
|
|
63
|
-
level=
|
|
63
|
+
level=ProcessingLevel(parts[1]),
|
|
64
64
|
day=int(parts[4]),
|
|
65
65
|
month=int(parts[3]),
|
|
66
66
|
year=int(parts[2]),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: datacosmos
|
|
3
|
-
Version: 0.0.
|
|
3
|
+
Version: 0.0.5
|
|
4
4
|
Summary: A library for interacting with DataCosmos from Python code
|
|
5
5
|
Author-email: Open Cosmos <support@open-cosmos.com>
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -14,6 +14,7 @@ Requires-Dist: requests-oauthlib==1.3.1
|
|
|
14
14
|
Requires-Dist: pydantic==2.10.6
|
|
15
15
|
Requires-Dist: pystac==1.12.1
|
|
16
16
|
Requires-Dist: pyyaml==6.0.2
|
|
17
|
+
Requires-Dist: structlog==25.3.0
|
|
17
18
|
Provides-Extra: dev
|
|
18
19
|
Requires-Dist: black==22.3.0; extra == "dev"
|
|
19
20
|
Requires-Dist: ruff==0.9.5; extra == "dev"
|
|
@@ -36,7 +36,6 @@ datacosmos/stac/item/models/datacosmos_item.py
|
|
|
36
36
|
datacosmos/stac/item/models/eo_band.py
|
|
37
37
|
datacosmos/stac/item/models/item_update.py
|
|
38
38
|
datacosmos/stac/item/models/raster_band.py
|
|
39
|
-
datacosmos/stac/item/models/search_parameters.py
|
|
40
39
|
datacosmos/uploader/__init__.py
|
|
41
40
|
datacosmos/uploader/datacosmos_uploader.py
|
|
42
41
|
datacosmos/uploader/dataclasses/__init__.py
|
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
|
4
4
|
|
|
5
5
|
[project]
|
|
6
6
|
name = "datacosmos"
|
|
7
|
-
version = "0.0.
|
|
7
|
+
version = "0.0.5"
|
|
8
8
|
authors = [
|
|
9
9
|
{ name="Open Cosmos", email="support@open-cosmos.com" },
|
|
10
10
|
]
|
|
@@ -21,7 +21,8 @@ dependencies = [
|
|
|
21
21
|
"requests-oauthlib==1.3.1",
|
|
22
22
|
"pydantic==2.10.6",
|
|
23
23
|
"pystac==1.12.1",
|
|
24
|
-
"pyyaml==6.0.2"
|
|
24
|
+
"pyyaml==6.0.2",
|
|
25
|
+
"structlog==25.3.0"
|
|
25
26
|
]
|
|
26
27
|
|
|
27
28
|
[project.optional-dependencies]
|
|
@@ -1,58 +0,0 @@
|
|
|
1
|
-
"""Module defining the SearchParameters model for STAC API queries, encapsulating filtering criteria.
|
|
2
|
-
|
|
3
|
-
It includes spatial, temporal, and property-based filters for querying STAC items efficiently.
|
|
4
|
-
"""
|
|
5
|
-
|
|
6
|
-
from typing import Optional, Union
|
|
7
|
-
|
|
8
|
-
from pydantic import BaseModel, Field, model_validator
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class SearchParameters(BaseModel):
|
|
12
|
-
"""Encapsulates the parameters for the STAC search API with validation."""
|
|
13
|
-
|
|
14
|
-
bbox: Optional[list[float]] = Field(
|
|
15
|
-
None,
|
|
16
|
-
description="Bounding box filter [minX, minY, maxX, maxY]. Optional six values for 3D bounding box.",
|
|
17
|
-
example=[-180.0, -90.0, 180.0, 90.0],
|
|
18
|
-
)
|
|
19
|
-
datetime_range: Optional[str] = Field(
|
|
20
|
-
None,
|
|
21
|
-
alias="datetime",
|
|
22
|
-
description=(
|
|
23
|
-
"Temporal filter, either a single RFC 3339 datetime or an interval. "
|
|
24
|
-
'Example: "2025-01-01T00:00:00Z/.."'
|
|
25
|
-
),
|
|
26
|
-
)
|
|
27
|
-
intersects: Optional[dict] = Field(
|
|
28
|
-
None, description="GeoJSON geometry filter, e.g., a Polygon or Point."
|
|
29
|
-
)
|
|
30
|
-
ids: Optional[list[str]] = Field(
|
|
31
|
-
None,
|
|
32
|
-
description="Array of item IDs to filter by.",
|
|
33
|
-
example=["item1", "item2"],
|
|
34
|
-
)
|
|
35
|
-
collections: Optional[list[str]] = Field(
|
|
36
|
-
None,
|
|
37
|
-
description="Array of collection IDs to filter by.",
|
|
38
|
-
example=["collection1", "collection2"],
|
|
39
|
-
)
|
|
40
|
-
limit: Optional[int] = Field(
|
|
41
|
-
None,
|
|
42
|
-
ge=1,
|
|
43
|
-
le=10000,
|
|
44
|
-
description="Maximum number of items per page. Default: 10, Max: 10000.",
|
|
45
|
-
example=10,
|
|
46
|
-
)
|
|
47
|
-
query: Optional[dict[str, dict[str, Union[str, int, float]]]] = Field(
|
|
48
|
-
None,
|
|
49
|
-
description="Additional property filters, e.g., { 'cloud_coverage': { 'lt': 10 } }.",
|
|
50
|
-
)
|
|
51
|
-
|
|
52
|
-
@model_validator(mode="before")
|
|
53
|
-
def validate_bbox(cls, values):
|
|
54
|
-
"""Validate that the `bbox` field contains either 4 or 6 values."""
|
|
55
|
-
bbox = values.get("bbox")
|
|
56
|
-
if bbox and len(bbox) not in {4, 6}:
|
|
57
|
-
raise ValueError("bbox must contain 4 or 6 values.")
|
|
58
|
-
return values
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/stac/collection/models/collection_update.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_error.py
RENAMED
|
File without changes
|
{datacosmos-0.0.4 → datacosmos-0.0.5}/datacosmos/utils/http_response/models/datacosmos_response.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|