datacosmos 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacosmos might be problematic. Click here for more details.

Files changed (43) hide show
  1. config/__init__.py +5 -0
  2. config/config.py +195 -0
  3. config/models/__init__.py +1 -0
  4. config/models/m2m_authentication_config.py +23 -0
  5. config/models/url.py +35 -0
  6. datacosmos/exceptions/__init__.py +1 -0
  7. datacosmos/exceptions/datacosmos_exception.py +27 -0
  8. datacosmos/stac/__init__.py +5 -0
  9. datacosmos/stac/collection/__init__.py +4 -0
  10. datacosmos/stac/collection/collection_client.py +149 -0
  11. datacosmos/stac/collection/models/__init__.py +1 -0
  12. datacosmos/stac/collection/models/collection_update.py +46 -0
  13. datacosmos/stac/enums/__init__.py +1 -0
  14. datacosmos/stac/enums/level.py +15 -0
  15. datacosmos/stac/item/__init__.py +4 -0
  16. datacosmos/stac/item/item_client.py +186 -0
  17. datacosmos/stac/item/models/__init__.py +1 -0
  18. datacosmos/stac/item/models/asset.py +23 -0
  19. datacosmos/stac/item/models/datacosmos_item.py +55 -0
  20. datacosmos/stac/item/models/eo_band.py +15 -0
  21. datacosmos/stac/item/models/item_update.py +57 -0
  22. datacosmos/stac/item/models/raster_band.py +17 -0
  23. datacosmos/stac/item/models/search_parameters.py +58 -0
  24. datacosmos/stac/stac_client.py +12 -0
  25. datacosmos/uploader/__init__.py +1 -0
  26. datacosmos/uploader/dataclasses/__init__.py +1 -0
  27. datacosmos/uploader/dataclasses/upload_path.py +93 -0
  28. datacosmos/uploader/datacosmos_uploader.py +106 -0
  29. datacosmos/utils/__init__.py +1 -0
  30. datacosmos/utils/constants.py +16 -0
  31. datacosmos/utils/http_response/__init__.py +1 -0
  32. datacosmos/utils/http_response/check_api_response.py +34 -0
  33. datacosmos/utils/http_response/models/__init__.py +1 -0
  34. datacosmos/utils/http_response/models/datacosmos_error.py +26 -0
  35. datacosmos/utils/http_response/models/datacosmos_response.py +11 -0
  36. datacosmos/utils/missions.py +27 -0
  37. datacosmos/utils/url.py +60 -0
  38. {datacosmos-0.0.1.dist-info → datacosmos-0.0.3.dist-info}/METADATA +3 -2
  39. datacosmos-0.0.3.dist-info/RECORD +44 -0
  40. {datacosmos-0.0.1.dist-info → datacosmos-0.0.3.dist-info}/WHEEL +1 -1
  41. {datacosmos-0.0.1.dist-info → datacosmos-0.0.3.dist-info}/top_level.txt +1 -0
  42. datacosmos-0.0.1.dist-info/RECORD +0 -7
  43. {datacosmos-0.0.1.dist-info → datacosmos-0.0.3.dist-info/licenses}/LICENSE.md +0 -0
@@ -0,0 +1,186 @@
1
+ """STAC Client module for interacting with a STAC (SpatioTemporal Asset Catalog) API.
2
+
3
+ Provides methods for querying, fetching, creating, updating, and deleting STAC items.
4
+ """
5
+
6
+ from typing import Generator, Optional
7
+
8
+ from pystac import Item
9
+
10
+ from datacosmos.datacosmos_client import DatacosmosClient
11
+ from datacosmos.exceptions.datacosmos_exception import DatacosmosException
12
+ from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
13
+ from datacosmos.stac.item.models.item_update import ItemUpdate
14
+ from datacosmos.stac.item.models.search_parameters import SearchParameters
15
+ from datacosmos.utils.http_response.check_api_response import check_api_response
16
+
17
+
18
+ class ItemClient:
19
+ """Client for interacting with the STAC API."""
20
+
21
+ def __init__(self, client: DatacosmosClient):
22
+ """Initialize the STACClient with a DatacosmosClient.
23
+
24
+ Args:
25
+ client (DatacosmosClient): The authenticated Datacosmos client instance.
26
+ """
27
+ self.client = client
28
+ self.base_url = client.config.stac.as_domain_url()
29
+
30
+ def fetch_item(self, item_id: str, collection_id: str) -> Item:
31
+ """Fetch a single STAC item by ID.
32
+
33
+ Args:
34
+ item_id (str): The ID of the item to fetch.
35
+ collection_id (str): The ID of the collection containing the item.
36
+
37
+ Returns:
38
+ Item: The fetched STAC item.
39
+ """
40
+ url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
41
+ response = self.client.get(url)
42
+ check_api_response(response)
43
+ return Item.from_dict(response.json())
44
+
45
+ def fetch_collection_items(
46
+ self, collection_id: str, parameters: Optional[SearchParameters] = None
47
+ ) -> Generator[Item, None, None]:
48
+ """Fetch all items in a collection with optional filtering.
49
+
50
+ Args:
51
+ collection_id (str): The ID of the collection.
52
+ parameters (Optional[SearchParameters]): Filtering parameters (spatial, temporal, etc.).
53
+
54
+ Yields:
55
+ Item: Parsed STAC item.
56
+ """
57
+ if parameters is None:
58
+ parameters = SearchParameters(collections=[collection_id])
59
+
60
+ return self.search_items(parameters)
61
+
62
+ def search_items(self, parameters: SearchParameters) -> Generator[Item, None, None]:
63
+ """Query the STAC catalog using the POST endpoint with filtering and pagination.
64
+
65
+ Args:
66
+ parameters (SearchParameters): The search parameters.
67
+
68
+ Yields:
69
+ Item: Parsed STAC item.
70
+ """
71
+ url = self.base_url.with_suffix("/search")
72
+ body = parameters.model_dump(by_alias=True, exclude_none=True)
73
+ return self._paginate_items(url, body)
74
+
75
+ def create_item(self, collection_id: str, item: Item | DatacosmosItem) -> None:
76
+ """Create a new STAC item in a specified collection.
77
+
78
+ Args:
79
+ collection_id (str): The ID of the collection where the item will be created.
80
+ item (Item): The STAC Item to be created.
81
+
82
+ Raises:
83
+ RequestError: If the API returns an error response.
84
+ """
85
+ url = self.base_url.with_suffix(f"/collections/{collection_id}/items")
86
+ item_json: dict = item.to_dict()
87
+
88
+ response = self.client.post(url, json=item_json)
89
+ check_api_response(response)
90
+
91
+ def update_item(
92
+ self, item_id: str, collection_id: str, update_data: ItemUpdate
93
+ ) -> None:
94
+ """Partially update an existing STAC item.
95
+
96
+ Args:
97
+ item_id (str): The ID of the item to update.
98
+ collection_id (str): The ID of the collection containing the item.
99
+ update_data (ItemUpdate): The structured update payload.
100
+ """
101
+ url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
102
+
103
+ update_payload = update_data.model_dump(by_alias=True, exclude_none=True)
104
+
105
+ if "assets" in update_payload:
106
+ update_payload["assets"] = {
107
+ key: asset.to_dict() for key, asset in update_payload["assets"].items()
108
+ }
109
+ if "links" in update_payload:
110
+ update_payload["links"] = [
111
+ link.to_dict() for link in update_payload["links"]
112
+ ]
113
+
114
+ response = self.client.patch(url, json=update_payload)
115
+ check_api_response(response)
116
+
117
+ def delete_item(self, item_id: str, collection_id: str) -> None:
118
+ """Delete a STAC item by its ID.
119
+
120
+ Args:
121
+ item_id (str): The ID of the item to delete.
122
+ collection_id (str): The ID of the collection containing the item.
123
+
124
+ Raises:
125
+ OCError: If the item is not found or deletion is forbidden.
126
+ """
127
+ url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
128
+ response = self.client.delete(url)
129
+ check_api_response(response)
130
+
131
+ def _paginate_items(self, url: str, body: dict) -> Generator[Item, None, None]:
132
+ """Handle pagination for the STAC search POST endpoint.
133
+
134
+ Fetches items one page at a time using the 'next' link.
135
+
136
+ Args:
137
+ url (str): The base URL for the search endpoint.
138
+ body (dict): The request body containing search parameters.
139
+
140
+ Yields:
141
+ Item: Parsed STAC item.
142
+ """
143
+ params = {"limit": body.get("limit", 10)}
144
+
145
+ while True:
146
+ response = self.client.post(url, json=body, params=params)
147
+ check_api_response(response)
148
+ data = response.json()
149
+
150
+ yield from (Item.from_dict(feature) for feature in data.get("features", []))
151
+
152
+ next_href = self._get_next_link(data)
153
+ if not next_href:
154
+ break
155
+
156
+ token = self._extract_pagination_token(next_href)
157
+ if not token:
158
+ break
159
+ params["cursor"] = token
160
+
161
+ def _get_next_link(self, data: dict) -> Optional[str]:
162
+ """Extract the next page link from the response."""
163
+ next_link = next(
164
+ (link for link in data.get("links", []) if link.get("rel") == "next"), None
165
+ )
166
+ return next_link.get("href", "") if next_link else None
167
+
168
+ def _extract_pagination_token(self, next_href: str) -> Optional[str]:
169
+ """Extract the pagination token from the next link URL.
170
+
171
+ Args:
172
+ next_href (str): The next page URL.
173
+
174
+ Returns:
175
+ Optional[str]: The extracted token, or None if parsing fails.
176
+
177
+ Raises:
178
+ DatacosmosException: If pagination token extraction fails.
179
+ """
180
+ try:
181
+ return next_href.split("?")[1].split("=")[-1]
182
+ except (IndexError, AttributeError) as e:
183
+ raise DatacosmosException(
184
+ f"Failed to parse pagination token from {next_href}",
185
+ response=e.response,
186
+ ) from e
@@ -0,0 +1 @@
1
+ """Models for the Item Client."""
@@ -0,0 +1,23 @@
1
+ """Model representing a datacosmos item asset."""
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+ from datacosmos.stac.item.models.eo_band import EoBand
6
+ from datacosmos.stac.item.models.raster_band import RasterBand
7
+
8
+
9
+ class Asset(BaseModel):
10
+ """Model representing a datacosmos item asset."""
11
+
12
+ href: str
13
+ title: str
14
+ description: str
15
+ type: str
16
+ roles: list[str] | None
17
+ eo_bands: list[EoBand] | None = Field(default=None, alias="eo:bands")
18
+ raster_bands: list[RasterBand] | None = Field(default=None, alias="raster:bands")
19
+
20
+ class Config:
21
+ """Pydantic configuration."""
22
+
23
+ populate_by_name = True
@@ -0,0 +1,55 @@
1
+ """Model representing a datacosmos item."""
2
+
3
+ from datetime import datetime
4
+
5
+ from pydantic import BaseModel
6
+
7
+ from datacosmos.stac.enums.level import Level
8
+ from datacosmos.stac.item.models.asset import Asset
9
+
10
+
11
+ class DatacosmosItem(BaseModel):
12
+ """Model representing a datacosmos item."""
13
+
14
+ id: str
15
+ type: str
16
+ stac_version: str
17
+ stac_extensions: list | None
18
+ geometry: dict
19
+ properties: dict
20
+ links: list
21
+ assets: dict[str, Asset]
22
+ collection: str
23
+ bbox: tuple[float, float, float, float]
24
+
25
+ def get_property(self, key: str) -> str | None:
26
+ """Get a property value from the Datacosmos item."""
27
+ return self.properties.get(key)
28
+
29
+ def get_asset(self, key: str) -> Asset | None:
30
+ """Get an asset from the Datacosmos item."""
31
+ return self.assets.get(key)
32
+
33
+ @property
34
+ def datetime(self) -> datetime:
35
+ """Get the datetime of the Datacosmos item."""
36
+ return datetime.strptime(self.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
37
+
38
+ @property
39
+ def level(self) -> Level:
40
+ """Get the processing level of the Datacosmos item."""
41
+ return Level(self.properties["processing:level"].lower())
42
+
43
+ @property
44
+ def sat_int_designator(self) -> str:
45
+ """Get the satellite international designator of the Datacosmos item."""
46
+ property = self.get_property("sat:platform_international_designator")
47
+ if property is None:
48
+ raise ValueError(
49
+ "sat:platform_international_designator is missing in STAC item"
50
+ )
51
+ return property
52
+
53
+ def to_dict(self) -> dict:
54
+ """Converts the DatacosmosItem instance to a dictionary."""
55
+ return self.model_dump()
@@ -0,0 +1,15 @@
1
+ """Model representing an EO band."""
2
+
3
+ from typing import Optional
4
+
5
+ from pydantic import BaseModel
6
+
7
+
8
+ class EoBand(BaseModel):
9
+ """Model representing an EO band."""
10
+
11
+ name: str
12
+ common_name: str
13
+ center_wavelength: float
14
+ full_width_half_max: float
15
+ solar_illumination: Optional[float] = None
@@ -0,0 +1,57 @@
1
+ """Model representing a partial update for a STAC item."""
2
+
3
+ from typing import Any, Optional
4
+
5
+ from pydantic import BaseModel, Field, model_validator
6
+ from pystac import Asset, Link
7
+
8
+
9
+ class ItemUpdate(BaseModel):
10
+ """Model representing a partial update for a STAC item."""
11
+
12
+ model_config = {"arbitrary_types_allowed": True}
13
+
14
+ stac_extensions: Optional[list[str]] = None
15
+ geometry: Optional[dict[str, Any]] = None
16
+ bbox: Optional[list[float]] = Field(
17
+ None, min_items=4, max_items=4
18
+ ) # Must be [minX, minY, maxX, maxY]
19
+ properties: Optional[dict[str, Any]] = None
20
+ assets: Optional[dict[str, Asset]] = None
21
+ links: Optional[list[Link]] = None
22
+
23
+ def set_geometry(self, geom_type: str, coordinates: list[Any]) -> None:
24
+ """Set the geometry manually without using shapely.
25
+
26
+ Args:
27
+ geom_type (str): The type of geometry (e.g., 'Point', 'Polygon').
28
+ coordinates (list[Any]): The coordinates defining the geometry.
29
+ """
30
+ self.geometry = {"type": geom_type, "coordinates": coordinates}
31
+
32
+ @staticmethod
33
+ def has_valid_datetime(properties: dict[str, Any]) -> bool:
34
+ """Check if 'datetime' is present and not None."""
35
+ return properties.get("datetime") is not None
36
+
37
+ @staticmethod
38
+ def has_valid_datetime_range(properties: dict[str, Any]) -> bool:
39
+ """Check if both 'start_datetime' and 'end_datetime' are present and not None."""
40
+ return all(
41
+ properties.get(key) is not None
42
+ for key in ["start_datetime", "end_datetime"]
43
+ )
44
+
45
+ @model_validator(mode="before")
46
+ def validate_datetime_fields(cls, values):
47
+ """Ensure at least one of 'datetime' or 'start_datetime'/'end_datetime' exists."""
48
+ properties = values.get("properties", {})
49
+
50
+ if not cls.has_valid_datetime(properties) and not cls.has_valid_datetime_range(
51
+ properties
52
+ ):
53
+ raise ValueError(
54
+ "Either 'datetime' or both 'start_datetime' and 'end_datetime' must be provided."
55
+ )
56
+
57
+ return values
@@ -0,0 +1,17 @@
1
+ """Model representing a raster band."""
2
+
3
+ from pydantic import BaseModel, Field
4
+
5
+
6
+ class RasterBand(BaseModel):
7
+ """Model representing a raster band."""
8
+
9
+ gain: float = Field(alias="scale")
10
+ bias: float = Field(alias="offset")
11
+ nodata: int
12
+ unit: str
13
+
14
+ class Config:
15
+ """Pydantic configuration."""
16
+
17
+ populate_by_name = True
@@ -0,0 +1,58 @@
1
+ """Module defining the SearchParameters model for STAC API queries, encapsulating filtering criteria.
2
+
3
+ It includes spatial, temporal, and property-based filters for querying STAC items efficiently.
4
+ """
5
+
6
+ from typing import Optional, Union
7
+
8
+ from pydantic import BaseModel, Field, model_validator
9
+
10
+
11
+ class SearchParameters(BaseModel):
12
+ """Encapsulates the parameters for the STAC search API with validation."""
13
+
14
+ bbox: Optional[list[float]] = Field(
15
+ None,
16
+ description="Bounding box filter [minX, minY, maxX, maxY]. Optional six values for 3D bounding box.",
17
+ example=[-180.0, -90.0, 180.0, 90.0],
18
+ )
19
+ datetime_range: Optional[str] = Field(
20
+ None,
21
+ alias="datetime",
22
+ description=(
23
+ "Temporal filter, either a single RFC 3339 datetime or an interval. "
24
+ 'Example: "2025-01-01T00:00:00Z/.."'
25
+ ),
26
+ )
27
+ intersects: Optional[dict] = Field(
28
+ None, description="GeoJSON geometry filter, e.g., a Polygon or Point."
29
+ )
30
+ ids: Optional[list[str]] = Field(
31
+ None,
32
+ description="Array of item IDs to filter by.",
33
+ example=["item1", "item2"],
34
+ )
35
+ collections: Optional[list[str]] = Field(
36
+ None,
37
+ description="Array of collection IDs to filter by.",
38
+ example=["collection1", "collection2"],
39
+ )
40
+ limit: Optional[int] = Field(
41
+ None,
42
+ ge=1,
43
+ le=10000,
44
+ description="Maximum number of items per page. Default: 10, Max: 10000.",
45
+ example=10,
46
+ )
47
+ query: Optional[dict[str, dict[str, Union[str, int, float]]]] = Field(
48
+ None,
49
+ description="Additional property filters, e.g., { 'cloud_coverage': { 'lt': 10 } }.",
50
+ )
51
+
52
+ @model_validator(mode="before")
53
+ def validate_bbox(cls, values):
54
+ """Validate that the `bbox` field contains either 4 or 6 values."""
55
+ bbox = values.get("bbox")
56
+ if bbox and len(bbox) not in {4, 6}:
57
+ raise ValueError("bbox must contain 4 or 6 values.")
58
+ return values
@@ -0,0 +1,12 @@
1
+ """Unified interface for STAC API, combining Item & Collection operations."""
2
+
3
+ from datacosmos.stac.collection.collection_client import CollectionClient
4
+ from datacosmos.stac.item.item_client import ItemClient
5
+
6
+
7
+ class STACClient(ItemClient, CollectionClient):
8
+ """Unified interface for STAC API, combining Item & Collection operations."""
9
+
10
+ def __init__(self, client):
11
+ """Initialize the STACClient with a DatacosmosClient."""
12
+ super().__init__(client)
@@ -0,0 +1 @@
1
+ """Uploader package for interacting with the Uploader API, providing upload functionalities to the datacosmos cloud storage."""
@@ -0,0 +1 @@
1
+ """Dataclasses for the uploader module."""
@@ -0,0 +1,93 @@
1
+ """Dataclass for retrieving the upload path of a file."""
2
+
3
+ from dataclasses import dataclass
4
+ from datetime import datetime
5
+ from pathlib import Path
6
+
7
+ import structlog
8
+
9
+ from datacosmos.stac.enums.level import Level
10
+ from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
11
+ from datacosmos.utils.missions import get_mission_id
12
+
13
+ logger = structlog.get_logger()
14
+
15
+
16
+ @dataclass
17
+ class UploadPath:
18
+ """Dataclass for retrieving the upload path of a file."""
19
+
20
+ mission: str
21
+ level: Level
22
+ day: int
23
+ month: int
24
+ year: int
25
+ id: str
26
+ path: str
27
+
28
+ def __str__(self):
29
+ """Return a human-readable string representation of the Path."""
30
+ path = f"full/{self.mission.lower()}/{self.level.value.lower()}/{self.year:02}/{self.month:02}/{self.day:02}/{self.id}/{self.path}"
31
+ return path.removesuffix("/")
32
+
33
+ @classmethod
34
+ def from_item_path(
35
+ cls, item: DatacosmosItem, mission: str, item_path: str
36
+ ) -> "Path":
37
+ """Create a Path instance from a DatacosmosItem and a path."""
38
+ for asset in item.assets.values():
39
+ if mission == "":
40
+ mission = cls._get_mission_name(asset.href)
41
+ else:
42
+ break
43
+ dt = datetime.strptime(item.properties["datetime"], "%Y-%m-%dT%H:%M:%SZ")
44
+ path = UploadPath(
45
+ mission=mission,
46
+ level=Level(item.properties["processing:level"].lower()),
47
+ day=dt.day,
48
+ month=dt.month,
49
+ year=dt.year,
50
+ id=item.id,
51
+ path=item_path,
52
+ )
53
+ return cls(**path.__dict__)
54
+
55
+ @classmethod
56
+ def from_path(cls, path: str) -> "Path":
57
+ """Create a Path instance from a string path."""
58
+ parts = path.split("/")
59
+ if len(parts) < 7:
60
+ raise ValueError(f"Invalid path {path}")
61
+ return cls(
62
+ mission=parts[0],
63
+ level=Level(parts[1]),
64
+ day=int(parts[4]),
65
+ month=int(parts[3]),
66
+ year=int(parts[2]),
67
+ id=parts[5],
68
+ path="/".join(parts[6:]),
69
+ )
70
+
71
+ @classmethod
72
+ def _get_mission_name(cls, href: str) -> str:
73
+ mission = ""
74
+ # bruteforce mission name from asset path
75
+ # traverse the path and check if any part is a mission name (generates a mission id)
76
+ href_parts = href.split("/")
77
+ for idx, part in enumerate(href_parts):
78
+ try:
79
+ # when an id is found, then the mission name is valid
80
+ get_mission_id(
81
+ part, "test"
82
+ ) # using test as it is more wide and anything on prod should exists on test
83
+ except KeyError:
84
+ continue
85
+ # validate the mission name by checking if the path is correct
86
+ # using the same logic as the __str__ method
87
+ mission = part.lower()
88
+ h = "/".join(["full", *href_parts[idx:]])
89
+ p = UploadPath.from_path("/".join([mission, *href_parts[idx + 1 :]]))
90
+ if str(p) != h:
91
+ raise ValueError(f"Could not find mission name in asset path {href}")
92
+ break
93
+ return mission
@@ -0,0 +1,106 @@
1
+ """Module for uploading files to Datacosmos cloud storage and registering STAC items."""
2
+
3
+ from concurrent.futures import ThreadPoolExecutor
4
+ from pathlib import Path
5
+
6
+ from pydantic import TypeAdapter
7
+
8
+ from datacosmos.datacosmos_client import DatacosmosClient
9
+ from datacosmos.stac.item.item_client import ItemClient
10
+ from datacosmos.stac.item.models.datacosmos_item import DatacosmosItem
11
+ from datacosmos.uploader.dataclasses.upload_path import UploadPath
12
+ from datacosmos.utils.missions import get_mission_name
13
+
14
+
15
+ class DatacosmosUploader:
16
+ """Handles uploading files to Datacosmos storage and registering STAC items."""
17
+
18
+ def __init__(self, client: DatacosmosClient):
19
+ """Initialize the uploader with DatacosmosClient."""
20
+ mission_id = client.config.mission_id
21
+ environment = client.config.environment
22
+
23
+ self.datacosmos_client = client
24
+ self.item_client = ItemClient(client)
25
+ self.mission_name = (
26
+ get_mission_name(mission_id, environment) if mission_id != 0 else ""
27
+ )
28
+ self.base_url = client.config.datacosmos_cloud_storage.as_domain_url()
29
+
30
+ def upload_and_register_item(self, item_json_file_path: str) -> None:
31
+ """Uploads files to Datacosmos storage and registers a STAC item.
32
+
33
+ Args:
34
+ item_json_file_path (str): Path to the STAC item JSON file.
35
+ """
36
+ item = self._load_item(item_json_file_path)
37
+ collection_id, item_id = item.collection, item.id
38
+ dirname = str(Path(item_json_file_path).parent / Path(item_json_file_path).stem)
39
+
40
+ self._delete_existing_item(collection_id, item_id)
41
+ upload_path = self._get_upload_path(item)
42
+ self.upload_from_folder(dirname, upload_path)
43
+
44
+ self._update_item_assets(item)
45
+
46
+ self.item_client.create_item(collection_id, item)
47
+
48
+ def upload_file(self, src: str, dst: str) -> None:
49
+ """Uploads a single file to the specified destination path."""
50
+ url = self.base_url.with_suffix(str(dst))
51
+
52
+ with open(src, "rb") as f:
53
+ response = self.datacosmos_client.put(url, data=f)
54
+ response.raise_for_status()
55
+
56
+ def upload_from_folder(self, src: str, dst: UploadPath, workers: int = 4) -> None:
57
+ """Uploads all files from a folder to the destination path in parallel."""
58
+ if Path(dst.path).is_file():
59
+ raise ValueError(f"Destination path should not be a file path {dst}")
60
+
61
+ if Path(src).is_file():
62
+ raise ValueError(f"Source path should not be a file path {src}")
63
+
64
+ with ThreadPoolExecutor(max_workers=workers) as executor:
65
+ futures = []
66
+ for file in Path(src).rglob("*"):
67
+ if file.is_file():
68
+ dst = UploadPath(
69
+ mission=dst.mission,
70
+ level=dst.level,
71
+ day=dst.day,
72
+ month=dst.month,
73
+ year=dst.year,
74
+ id=dst.id,
75
+ path=str(file.relative_to(src)),
76
+ )
77
+ futures.append(executor.submit(self.upload_file, str(file), dst))
78
+ for future in futures:
79
+ future.result()
80
+
81
+ @staticmethod
82
+ def _load_item(item_json_file_path: str) -> DatacosmosItem:
83
+ """Loads and validates the STAC item from a JSON file."""
84
+ with open(item_json_file_path, "rb") as file:
85
+ data = file.read().decode("utf-8")
86
+ return TypeAdapter(DatacosmosItem).validate_json(data)
87
+
88
+ def _delete_existing_item(self, collection_id: str, item_id: str) -> None:
89
+ """Deletes an existing item if it already exists."""
90
+ try:
91
+ self.item_client.delete_item(item_id, collection_id)
92
+ except Exception: # nosec
93
+ pass # Ignore if item doesn't exist
94
+
95
+ def _get_upload_path(self, item: DatacosmosItem) -> str:
96
+ """Constructs the storage upload path based on the item and mission name."""
97
+ return UploadPath.from_item_path(item, self.mission_name, "")
98
+
99
+ def _update_item_assets(self, item: DatacosmosItem) -> None:
100
+ """Updates the item's assets with uploaded file URLs."""
101
+ for asset in item.assets.values():
102
+ try:
103
+ url = self.base_url
104
+ asset.href = url.with_base(asset.href) # type: ignore
105
+ except ValueError:
106
+ pass
@@ -0,0 +1 @@
1
+ """Http response and url utils for datacosmos."""
@@ -0,0 +1,16 @@
1
+ """Package for storing constants."""
2
+
3
+ TEST_MISSION_NAMES = {
4
+ 55: "MENUT",
5
+ 56: "PHISAT-2",
6
+ 57: "HAMMER",
7
+ 63: "MANTIS",
8
+ 64: "PLATERO",
9
+ }
10
+ PROD_MISSION_NAMES = {
11
+ 23: "MENUT",
12
+ 29: "MANTIS",
13
+ 35: "PHISAT-2",
14
+ 37: "PLATERO",
15
+ 48: "HAMMER",
16
+ }
@@ -0,0 +1 @@
1
+ """Validates an API response."""