datacosmos 0.0.1__py3-none-any.whl → 0.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacosmos might be problematic. Click here for more details.
- config/__init__.py +5 -0
- config/config.py +167 -0
- config/models/__init__.py +1 -0
- config/models/m2m_authentication_config.py +23 -0
- config/models/url.py +35 -0
- datacosmos/exceptions/__init__.py +1 -0
- datacosmos/exceptions/datacosmos_exception.py +27 -0
- datacosmos/stac/__init__.py +5 -0
- datacosmos/stac/collection/__init__.py +4 -0
- datacosmos/stac/collection/collection_client.py +149 -0
- datacosmos/stac/collection/models/__init__.py +1 -0
- datacosmos/stac/collection/models/collection_update.py +46 -0
- datacosmos/stac/item/__init__.py +4 -0
- datacosmos/stac/item/item_client.py +185 -0
- datacosmos/stac/item/models/__init__.py +1 -0
- datacosmos/stac/item/models/item_update.py +57 -0
- datacosmos/stac/item/models/search_parameters.py +58 -0
- datacosmos/stac/stac_client.py +12 -0
- datacosmos/utils/__init__.py +1 -0
- datacosmos/utils/http_response/__init__.py +1 -0
- datacosmos/utils/http_response/check_api_response.py +34 -0
- datacosmos/utils/http_response/models/__init__.py +1 -0
- datacosmos/utils/http_response/models/datacosmos_error.py +26 -0
- datacosmos/utils/http_response/models/datacosmos_response.py +11 -0
- datacosmos/utils/url.py +37 -0
- {datacosmos-0.0.1.dist-info → datacosmos-0.0.2.dist-info}/METADATA +1 -1
- datacosmos-0.0.2.dist-info/RECORD +32 -0
- {datacosmos-0.0.1.dist-info → datacosmos-0.0.2.dist-info}/top_level.txt +1 -0
- datacosmos-0.0.1.dist-info/RECORD +0 -7
- {datacosmos-0.0.1.dist-info → datacosmos-0.0.2.dist-info}/LICENSE.md +0 -0
- {datacosmos-0.0.1.dist-info → datacosmos-0.0.2.dist-info}/WHEEL +0 -0
config/__init__.py
ADDED
config/config.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
1
|
+
"""Configuration module for the Datacosmos SDK.
|
|
2
|
+
|
|
3
|
+
Handles configuration management using Pydantic and Pydantic Settings.
|
|
4
|
+
It loads default values, allows overrides via YAML configuration files,
|
|
5
|
+
and supports environment variable-based overrides.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import os
|
|
9
|
+
from typing import ClassVar, Optional
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
from pydantic import field_validator
|
|
13
|
+
from pydantic_settings import BaseSettings, SettingsConfigDict
|
|
14
|
+
|
|
15
|
+
from config.models.m2m_authentication_config import M2MAuthenticationConfig
|
|
16
|
+
from config.models.url import URL
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class Config(BaseSettings):
|
|
20
|
+
"""Centralized configuration for the Datacosmos SDK."""
|
|
21
|
+
|
|
22
|
+
model_config = SettingsConfigDict(
|
|
23
|
+
env_nested_delimiter="__",
|
|
24
|
+
nested_model_default_partial_update=True,
|
|
25
|
+
extra="allow",
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
authentication: Optional[M2MAuthenticationConfig] = None
|
|
29
|
+
stac: Optional[URL] = None
|
|
30
|
+
|
|
31
|
+
DEFAULT_AUTH_TYPE: ClassVar[str] = "m2m"
|
|
32
|
+
DEFAULT_AUTH_TOKEN_URL: ClassVar[str] = "https://login.open-cosmos.com/oauth/token"
|
|
33
|
+
DEFAULT_AUTH_AUDIENCE: ClassVar[str] = "https://beeapp.open-cosmos.com"
|
|
34
|
+
|
|
35
|
+
@classmethod
|
|
36
|
+
def from_yaml(cls, file_path: str = "config/config.yaml") -> "Config":
|
|
37
|
+
"""Load configuration from a YAML file and override defaults.
|
|
38
|
+
|
|
39
|
+
Args:
|
|
40
|
+
file_path (str): The path to the YAML configuration file.
|
|
41
|
+
|
|
42
|
+
Returns:
|
|
43
|
+
Config: An instance of the Config class with loaded settings.
|
|
44
|
+
"""
|
|
45
|
+
config_data: dict = {}
|
|
46
|
+
if os.path.exists(file_path):
|
|
47
|
+
with open(file_path, "r") as f:
|
|
48
|
+
yaml_data = yaml.safe_load(f) or {}
|
|
49
|
+
# Remove empty values from YAML to avoid overwriting with `None`
|
|
50
|
+
config_data = {
|
|
51
|
+
key: value
|
|
52
|
+
for key, value in yaml_data.items()
|
|
53
|
+
if value not in [None, ""]
|
|
54
|
+
}
|
|
55
|
+
|
|
56
|
+
return cls(**config_data)
|
|
57
|
+
|
|
58
|
+
@classmethod
|
|
59
|
+
def from_env(cls) -> "Config":
|
|
60
|
+
"""Load configuration from environment variables.
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Config: An instance of the Config class with settings loaded from environment variables.
|
|
64
|
+
"""
|
|
65
|
+
authentication_config = M2MAuthenticationConfig(
|
|
66
|
+
type=os.getenv("OC_AUTH_TYPE", cls.DEFAULT_AUTH_TYPE),
|
|
67
|
+
client_id=os.getenv("OC_AUTH_CLIENT_ID"),
|
|
68
|
+
client_secret=os.getenv("OC_AUTH_CLIENT_SECRET"),
|
|
69
|
+
token_url=os.getenv("OC_AUTH_TOKEN_URL", cls.DEFAULT_AUTH_TOKEN_URL),
|
|
70
|
+
audience=os.getenv("OC_AUTH_AUDIENCE", cls.DEFAULT_AUTH_AUDIENCE),
|
|
71
|
+
)
|
|
72
|
+
|
|
73
|
+
stac_config = URL(
|
|
74
|
+
protocol=os.getenv("OC_STAC_PROTOCOL", "https"),
|
|
75
|
+
host=os.getenv("OC_STAC_HOST", "app.open-cosmos.com"),
|
|
76
|
+
port=int(os.getenv("OC_STAC_PORT", "443")),
|
|
77
|
+
path=os.getenv("OC_STAC_PATH", "/api/data/v0/stac"),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
return cls(authentication=authentication_config, stac=stac_config)
|
|
81
|
+
|
|
82
|
+
@field_validator("authentication", mode="before")
|
|
83
|
+
@classmethod
|
|
84
|
+
def validate_authentication(
|
|
85
|
+
cls, auth_data: Optional[dict]
|
|
86
|
+
) -> M2MAuthenticationConfig:
|
|
87
|
+
"""Ensure authentication is provided and apply defaults.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
auth_data (Optional[dict]): The authentication config as a dictionary.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
M2MAuthenticationConfig: The validated authentication configuration.
|
|
94
|
+
|
|
95
|
+
Raises:
|
|
96
|
+
ValueError: If authentication is missing or required fields are not set.
|
|
97
|
+
"""
|
|
98
|
+
if not auth_data:
|
|
99
|
+
cls.raise_missing_auth_error()
|
|
100
|
+
|
|
101
|
+
auth = cls.parse_auth_config(auth_data)
|
|
102
|
+
auth = cls.apply_auth_defaults(auth)
|
|
103
|
+
|
|
104
|
+
cls.check_required_auth_fields(auth)
|
|
105
|
+
return auth
|
|
106
|
+
|
|
107
|
+
@staticmethod
|
|
108
|
+
def raise_missing_auth_error():
|
|
109
|
+
"""Raise an error when authentication is missing."""
|
|
110
|
+
raise ValueError(
|
|
111
|
+
"M2M authentication is required. Provide it via:\n"
|
|
112
|
+
"1. Explicit instantiation (Config(authentication=...))\n"
|
|
113
|
+
"2. A YAML config file (config.yaml)\n"
|
|
114
|
+
"3. Environment variables (OC_AUTH_CLIENT_ID, OC_AUTH_CLIENT_SECRET, etc.)"
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
@staticmethod
|
|
118
|
+
def parse_auth_config(auth_data: dict) -> M2MAuthenticationConfig:
|
|
119
|
+
"""Convert dictionary input to M2MAuthenticationConfig object."""
|
|
120
|
+
return (
|
|
121
|
+
M2MAuthenticationConfig(**auth_data)
|
|
122
|
+
if isinstance(auth_data, dict)
|
|
123
|
+
else auth_data
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
def apply_auth_defaults(
|
|
128
|
+
cls, auth: M2MAuthenticationConfig
|
|
129
|
+
) -> M2MAuthenticationConfig:
|
|
130
|
+
"""Apply default authentication values if they are missing."""
|
|
131
|
+
auth.type = auth.type or cls.DEFAULT_AUTH_TYPE
|
|
132
|
+
auth.token_url = auth.token_url or cls.DEFAULT_AUTH_TOKEN_URL
|
|
133
|
+
auth.audience = auth.audience or cls.DEFAULT_AUTH_AUDIENCE
|
|
134
|
+
return auth
|
|
135
|
+
|
|
136
|
+
@staticmethod
|
|
137
|
+
def check_required_auth_fields(auth: M2MAuthenticationConfig):
|
|
138
|
+
"""Ensure required fields (client_id, client_secret) are provided."""
|
|
139
|
+
missing_fields = [
|
|
140
|
+
field
|
|
141
|
+
for field in ("client_id", "client_secret")
|
|
142
|
+
if not getattr(auth, field)
|
|
143
|
+
]
|
|
144
|
+
if missing_fields:
|
|
145
|
+
raise ValueError(
|
|
146
|
+
f"Missing required authentication fields: {', '.join(missing_fields)}"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
@field_validator("stac", mode="before")
|
|
150
|
+
@classmethod
|
|
151
|
+
def validate_stac(cls, stac_config: Optional[URL]) -> URL:
|
|
152
|
+
"""Ensure STAC configuration has a default if not explicitly set.
|
|
153
|
+
|
|
154
|
+
Args:
|
|
155
|
+
stac_config (Optional[URL]): The STAC config to validate.
|
|
156
|
+
|
|
157
|
+
Returns:
|
|
158
|
+
URL: The validated STAC configuration.
|
|
159
|
+
"""
|
|
160
|
+
if stac_config is None:
|
|
161
|
+
return URL(
|
|
162
|
+
protocol="https",
|
|
163
|
+
host="app.open-cosmos.com",
|
|
164
|
+
port=443,
|
|
165
|
+
path="/api/data/v0/stac",
|
|
166
|
+
)
|
|
167
|
+
return stac_config
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Models for configuration settings."""
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
"""Module for configuring machine-to-machine (M2M) authentication.
|
|
2
|
+
|
|
3
|
+
Used when running scripts in the cluster that require automated authentication
|
|
4
|
+
without user interaction.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import Literal
|
|
8
|
+
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class M2MAuthenticationConfig(BaseModel):
|
|
13
|
+
"""Configuration for machine-to-machine authentication.
|
|
14
|
+
|
|
15
|
+
This is used when running scripts in the cluster that require authentication
|
|
16
|
+
with client credentials.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
type: Literal["m2m"]
|
|
20
|
+
client_id: str
|
|
21
|
+
token_url: str
|
|
22
|
+
audience: str
|
|
23
|
+
client_secret: str
|
config/models/url.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
"""Module defining a structured URL configuration model.
|
|
2
|
+
|
|
3
|
+
Ensures that URLs contain required components such as protocol, host,
|
|
4
|
+
port, and path.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel
|
|
8
|
+
|
|
9
|
+
from datacosmos.utils.url import URL as DomainURL
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class URL(BaseModel):
|
|
13
|
+
"""Generic configuration model for a URL.
|
|
14
|
+
|
|
15
|
+
This class provides attributes to store URL components and a method
|
|
16
|
+
to convert them into a `DomainURL` instance.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
protocol: str
|
|
20
|
+
host: str
|
|
21
|
+
port: int
|
|
22
|
+
path: str
|
|
23
|
+
|
|
24
|
+
def as_domain_url(self) -> DomainURL:
|
|
25
|
+
"""Convert the URL instance to a `DomainURL` object.
|
|
26
|
+
|
|
27
|
+
Returns:
|
|
28
|
+
DomainURL: A domain-specific URL object.
|
|
29
|
+
"""
|
|
30
|
+
return DomainURL(
|
|
31
|
+
protocol=self.protocol,
|
|
32
|
+
host=self.host,
|
|
33
|
+
port=self.port,
|
|
34
|
+
base=self.path,
|
|
35
|
+
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Exceptions for the datacosmos package."""
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
"""Base exception class for all Datacosmos SDK exceptions."""
|
|
2
|
+
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
from requests import Response
|
|
6
|
+
from requests.exceptions import RequestException
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class DatacosmosException(RequestException):
|
|
10
|
+
"""Base exception class for all Datacosmos SDK exceptions."""
|
|
11
|
+
|
|
12
|
+
def __init__(self, message: str, response: Optional[Response] = None):
|
|
13
|
+
"""Initialize DatacosmosException.
|
|
14
|
+
|
|
15
|
+
Args:
|
|
16
|
+
message (str): The error message.
|
|
17
|
+
response (Optional[Response]): The HTTP response object, if available.
|
|
18
|
+
"""
|
|
19
|
+
self.response = response
|
|
20
|
+
self.status_code = response.status_code if response else None
|
|
21
|
+
self.details = response.text if response else None
|
|
22
|
+
full_message = (
|
|
23
|
+
f"{message} (Status: {self.status_code}, Details: {self.details})"
|
|
24
|
+
if response
|
|
25
|
+
else message
|
|
26
|
+
)
|
|
27
|
+
super().__init__(full_message)
|
|
@@ -0,0 +1,149 @@
|
|
|
1
|
+
"""Handles operations related to STAC collections."""
|
|
2
|
+
|
|
3
|
+
from typing import Generator, Optional
|
|
4
|
+
|
|
5
|
+
from pystac import Collection, Extent, SpatialExtent, TemporalExtent
|
|
6
|
+
from pystac.utils import str_to_datetime
|
|
7
|
+
|
|
8
|
+
from datacosmos.datacosmos_client import DatacosmosClient
|
|
9
|
+
from datacosmos.stac.collection.models.collection_update import CollectionUpdate
|
|
10
|
+
from datacosmos.utils.http_response.check_api_response import check_api_response
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class CollectionClient:
|
|
14
|
+
"""Handles operations related to STAC collections."""
|
|
15
|
+
|
|
16
|
+
def __init__(self, client: DatacosmosClient):
|
|
17
|
+
"""Initialize the CollectionClient with a DatacosmosClient."""
|
|
18
|
+
self.client = client
|
|
19
|
+
self.base_url = client.config.stac.as_domain_url()
|
|
20
|
+
|
|
21
|
+
def fetch_collection(self, collection_id: str) -> Collection:
|
|
22
|
+
"""Fetch details of an existing STAC collection."""
|
|
23
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}")
|
|
24
|
+
response = self.client.get(url)
|
|
25
|
+
check_api_response(response)
|
|
26
|
+
return Collection.from_dict(response.json())
|
|
27
|
+
|
|
28
|
+
def create_collection(self, collection: Collection) -> None:
|
|
29
|
+
"""Create a new STAC collection.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
collection (Collection): The STAC collection to create.
|
|
33
|
+
|
|
34
|
+
Raises:
|
|
35
|
+
InvalidRequest: If the collection data is malformed.
|
|
36
|
+
"""
|
|
37
|
+
if isinstance(collection.extent, dict):
|
|
38
|
+
spatial_data = collection.extent.get("spatial", {}).get("bbox", [[]])
|
|
39
|
+
temporal_data = collection.extent.get("temporal", {}).get("interval", [[]])
|
|
40
|
+
|
|
41
|
+
# Convert string timestamps to datetime objects
|
|
42
|
+
parsed_temporal = []
|
|
43
|
+
for interval in temporal_data:
|
|
44
|
+
start = str_to_datetime(interval[0]) if interval[0] else None
|
|
45
|
+
end = (
|
|
46
|
+
str_to_datetime(interval[1])
|
|
47
|
+
if len(interval) > 1 and interval[1]
|
|
48
|
+
else None
|
|
49
|
+
)
|
|
50
|
+
parsed_temporal.append([start, end])
|
|
51
|
+
|
|
52
|
+
collection.extent = Extent(
|
|
53
|
+
spatial=SpatialExtent(spatial_data),
|
|
54
|
+
temporal=TemporalExtent(parsed_temporal),
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
url = self.base_url.with_suffix("/collections")
|
|
58
|
+
response = self.client.post(url, json=collection.to_dict())
|
|
59
|
+
check_api_response(response)
|
|
60
|
+
|
|
61
|
+
def update_collection(
|
|
62
|
+
self, collection_id: str, update_data: CollectionUpdate
|
|
63
|
+
) -> None:
|
|
64
|
+
"""Update an existing STAC collection."""
|
|
65
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}")
|
|
66
|
+
response = self.client.patch(
|
|
67
|
+
url, json=update_data.model_dump(by_alias=True, exclude_none=True)
|
|
68
|
+
)
|
|
69
|
+
check_api_response(response)
|
|
70
|
+
|
|
71
|
+
def delete_collection(self, collection_id: str) -> None:
|
|
72
|
+
"""Delete a STAC collection by its ID."""
|
|
73
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}")
|
|
74
|
+
response = self.client.delete(url)
|
|
75
|
+
check_api_response(response)
|
|
76
|
+
|
|
77
|
+
def fetch_all_collections(self) -> Generator[Collection, None, None]:
|
|
78
|
+
"""Fetch all STAC collections with pagination support."""
|
|
79
|
+
url = self.base_url.with_suffix("/collections")
|
|
80
|
+
params = {"limit": 10}
|
|
81
|
+
|
|
82
|
+
while True:
|
|
83
|
+
data = self._fetch_collections_page(url, params)
|
|
84
|
+
yield from self._parse_collections(data)
|
|
85
|
+
|
|
86
|
+
next_cursor = self._get_next_pagination_cursor(data)
|
|
87
|
+
if not next_cursor:
|
|
88
|
+
break
|
|
89
|
+
|
|
90
|
+
params["cursor"] = next_cursor
|
|
91
|
+
|
|
92
|
+
def _fetch_collections_page(self, url: str, params: dict) -> dict:
|
|
93
|
+
"""Fetch a single page of collections from the API."""
|
|
94
|
+
response = self.client.get(url, params=params)
|
|
95
|
+
check_api_response(response)
|
|
96
|
+
|
|
97
|
+
data = response.json()
|
|
98
|
+
|
|
99
|
+
if isinstance(data, list):
|
|
100
|
+
return {"collections": data}
|
|
101
|
+
|
|
102
|
+
return data
|
|
103
|
+
|
|
104
|
+
def _parse_collections(self, data: dict) -> Generator[Collection, None, None]:
|
|
105
|
+
"""Convert API response data to STAC Collection objects, ensuring required fields exist."""
|
|
106
|
+
return (
|
|
107
|
+
Collection.from_dict(
|
|
108
|
+
{
|
|
109
|
+
**collection,
|
|
110
|
+
"type": collection.get("type", "Collection"),
|
|
111
|
+
"id": collection.get("id", ""),
|
|
112
|
+
"stac_version": collection.get("stac_version", "1.0.0"),
|
|
113
|
+
"extent": collection.get(
|
|
114
|
+
"extent",
|
|
115
|
+
{"spatial": {"bbox": []}, "temporal": {"interval": []}},
|
|
116
|
+
),
|
|
117
|
+
"links": collection.get("links", []) or [],
|
|
118
|
+
"properties": collection.get("properties", {}),
|
|
119
|
+
}
|
|
120
|
+
)
|
|
121
|
+
for collection in data.get("collections", [])
|
|
122
|
+
if collection.get("type") == "Collection"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
def _get_next_pagination_cursor(self, data: dict) -> Optional[str]:
|
|
126
|
+
"""Extract the next pagination token from the response."""
|
|
127
|
+
next_href = self._get_next_link(data)
|
|
128
|
+
return self._extract_pagination_token(next_href) if next_href else None
|
|
129
|
+
|
|
130
|
+
def _get_next_link(self, data: dict) -> Optional[str]:
|
|
131
|
+
"""Extract the next page link from the response."""
|
|
132
|
+
next_link = next(
|
|
133
|
+
(link for link in data.get("links", []) if link.get("rel") == "next"), None
|
|
134
|
+
)
|
|
135
|
+
return next_link.get("href", "") if next_link else None
|
|
136
|
+
|
|
137
|
+
def _extract_pagination_token(self, next_href: str) -> Optional[str]:
|
|
138
|
+
"""Extract the pagination token from the next link URL.
|
|
139
|
+
|
|
140
|
+
Args:
|
|
141
|
+
next_href (str): The next page URL.
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Optional[str]: The extracted token, or None if parsing fails.
|
|
145
|
+
"""
|
|
146
|
+
try:
|
|
147
|
+
return next_href.split("?")[1].split("=")[-1]
|
|
148
|
+
except (IndexError, AttributeError):
|
|
149
|
+
raise InvalidRequest(f"Failed to parse pagination token from {next_href}")
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Models for the Collection Client."""
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
"""Represents a structured update model for STAC collections.
|
|
2
|
+
|
|
3
|
+
Allows partial updates where only the provided fields are modified.
|
|
4
|
+
"""
|
|
5
|
+
from typing import Any, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from pydantic import BaseModel, Field
|
|
8
|
+
from pystac import Extent, Link, Provider, Summaries
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class CollectionUpdate(BaseModel):
|
|
12
|
+
"""Represents a structured update model for STAC collections.
|
|
13
|
+
|
|
14
|
+
Allows partial updates where only the provided fields are modified.
|
|
15
|
+
"""
|
|
16
|
+
|
|
17
|
+
model_config = {"arbitrary_types_allowed": True}
|
|
18
|
+
|
|
19
|
+
title: Optional[str] = Field(None, description="Title of the STAC collection.")
|
|
20
|
+
description: Optional[str] = Field(
|
|
21
|
+
None, description="Description of the collection."
|
|
22
|
+
)
|
|
23
|
+
keywords: Optional[List[str]] = Field(
|
|
24
|
+
None, description="List of keywords associated with the collection."
|
|
25
|
+
)
|
|
26
|
+
license: Optional[str] = Field(None, description="Collection license information.")
|
|
27
|
+
providers: Optional[List[Provider]] = Field(
|
|
28
|
+
None, description="List of data providers."
|
|
29
|
+
)
|
|
30
|
+
extent: Optional[Extent] = Field(
|
|
31
|
+
None, description="Spatial and temporal extent of the collection."
|
|
32
|
+
)
|
|
33
|
+
summaries: Optional[Summaries] = Field(
|
|
34
|
+
None, description="Summaries for the collection."
|
|
35
|
+
)
|
|
36
|
+
links: Optional[List[Link]] = Field(
|
|
37
|
+
None, description="List of links associated with the collection."
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
41
|
+
"""Convert the model into a dictionary, excluding `None` values.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Dict[str, Any]: Dictionary representation of the update payload.
|
|
45
|
+
"""
|
|
46
|
+
return self.model_dump(by_alias=True, exclude_none=True)
|
|
@@ -0,0 +1,185 @@
|
|
|
1
|
+
"""STAC Client module for interacting with a STAC (SpatioTemporal Asset Catalog) API.
|
|
2
|
+
|
|
3
|
+
Provides methods for querying, fetching, creating, updating, and deleting STAC items.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Generator, Optional
|
|
7
|
+
|
|
8
|
+
from pystac import Item
|
|
9
|
+
|
|
10
|
+
from datacosmos.datacosmos_client import DatacosmosClient
|
|
11
|
+
from datacosmos.exceptions.datacosmos_exception import DatacosmosException
|
|
12
|
+
from datacosmos.stac.item.models.item_update import ItemUpdate
|
|
13
|
+
from datacosmos.stac.item.models.search_parameters import SearchParameters
|
|
14
|
+
from datacosmos.utils.http_response.check_api_response import check_api_response
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class ItemClient:
|
|
18
|
+
"""Client for interacting with the STAC API."""
|
|
19
|
+
|
|
20
|
+
def __init__(self, client: DatacosmosClient):
|
|
21
|
+
"""Initialize the STACClient with a DatacosmosClient.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
client (DatacosmosClient): The authenticated Datacosmos client instance.
|
|
25
|
+
"""
|
|
26
|
+
self.client = client
|
|
27
|
+
self.base_url = client.config.stac.as_domain_url()
|
|
28
|
+
|
|
29
|
+
def fetch_item(self, item_id: str, collection_id: str) -> Item:
|
|
30
|
+
"""Fetch a single STAC item by ID.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
item_id (str): The ID of the item to fetch.
|
|
34
|
+
collection_id (str): The ID of the collection containing the item.
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Item: The fetched STAC item.
|
|
38
|
+
"""
|
|
39
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
|
|
40
|
+
response = self.client.get(url)
|
|
41
|
+
check_api_response(response)
|
|
42
|
+
return Item.from_dict(response.json())
|
|
43
|
+
|
|
44
|
+
def fetch_collection_items(
|
|
45
|
+
self, collection_id: str, parameters: Optional[SearchParameters] = None
|
|
46
|
+
) -> Generator[Item, None, None]:
|
|
47
|
+
"""Fetch all items in a collection with optional filtering.
|
|
48
|
+
|
|
49
|
+
Args:
|
|
50
|
+
collection_id (str): The ID of the collection.
|
|
51
|
+
parameters (Optional[SearchParameters]): Filtering parameters (spatial, temporal, etc.).
|
|
52
|
+
|
|
53
|
+
Yields:
|
|
54
|
+
Item: Parsed STAC item.
|
|
55
|
+
"""
|
|
56
|
+
if parameters is None:
|
|
57
|
+
parameters = SearchParameters(collections=[collection_id])
|
|
58
|
+
|
|
59
|
+
return self.search_items(parameters)
|
|
60
|
+
|
|
61
|
+
def search_items(self, parameters: SearchParameters) -> Generator[Item, None, None]:
|
|
62
|
+
"""Query the STAC catalog using the POST endpoint with filtering and pagination.
|
|
63
|
+
|
|
64
|
+
Args:
|
|
65
|
+
parameters (SearchParameters): The search parameters.
|
|
66
|
+
|
|
67
|
+
Yields:
|
|
68
|
+
Item: Parsed STAC item.
|
|
69
|
+
"""
|
|
70
|
+
url = self.base_url.with_suffix("/search")
|
|
71
|
+
body = parameters.model_dump(by_alias=True, exclude_none=True)
|
|
72
|
+
return self._paginate_items(url, body)
|
|
73
|
+
|
|
74
|
+
def create_item(self, collection_id: str, item: Item) -> None:
|
|
75
|
+
"""Create a new STAC item in a specified collection.
|
|
76
|
+
|
|
77
|
+
Args:
|
|
78
|
+
collection_id (str): The ID of the collection where the item will be created.
|
|
79
|
+
item (Item): The STAC Item to be created.
|
|
80
|
+
|
|
81
|
+
Raises:
|
|
82
|
+
RequestError: If the API returns an error response.
|
|
83
|
+
"""
|
|
84
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}/items")
|
|
85
|
+
item_json: dict = item.to_dict()
|
|
86
|
+
|
|
87
|
+
response = self.client.post(url, json=item_json)
|
|
88
|
+
check_api_response(response)
|
|
89
|
+
|
|
90
|
+
def update_item(
|
|
91
|
+
self, item_id: str, collection_id: str, update_data: ItemUpdate
|
|
92
|
+
) -> None:
|
|
93
|
+
"""Partially update an existing STAC item.
|
|
94
|
+
|
|
95
|
+
Args:
|
|
96
|
+
item_id (str): The ID of the item to update.
|
|
97
|
+
collection_id (str): The ID of the collection containing the item.
|
|
98
|
+
update_data (ItemUpdate): The structured update payload.
|
|
99
|
+
"""
|
|
100
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
|
|
101
|
+
|
|
102
|
+
update_payload = update_data.model_dump(by_alias=True, exclude_none=True)
|
|
103
|
+
|
|
104
|
+
if "assets" in update_payload:
|
|
105
|
+
update_payload["assets"] = {
|
|
106
|
+
key: asset.to_dict() for key, asset in update_payload["assets"].items()
|
|
107
|
+
}
|
|
108
|
+
if "links" in update_payload:
|
|
109
|
+
update_payload["links"] = [
|
|
110
|
+
link.to_dict() for link in update_payload["links"]
|
|
111
|
+
]
|
|
112
|
+
|
|
113
|
+
response = self.client.patch(url, json=update_payload)
|
|
114
|
+
check_api_response(response)
|
|
115
|
+
|
|
116
|
+
def delete_item(self, item_id: str, collection_id: str) -> None:
|
|
117
|
+
"""Delete a STAC item by its ID.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
item_id (str): The ID of the item to delete.
|
|
121
|
+
collection_id (str): The ID of the collection containing the item.
|
|
122
|
+
|
|
123
|
+
Raises:
|
|
124
|
+
OCError: If the item is not found or deletion is forbidden.
|
|
125
|
+
"""
|
|
126
|
+
url = self.base_url.with_suffix(f"/collections/{collection_id}/items/{item_id}")
|
|
127
|
+
response = self.client.delete(url)
|
|
128
|
+
check_api_response(response)
|
|
129
|
+
|
|
130
|
+
def _paginate_items(self, url: str, body: dict) -> Generator[Item, None, None]:
|
|
131
|
+
"""Handle pagination for the STAC search POST endpoint.
|
|
132
|
+
|
|
133
|
+
Fetches items one page at a time using the 'next' link.
|
|
134
|
+
|
|
135
|
+
Args:
|
|
136
|
+
url (str): The base URL for the search endpoint.
|
|
137
|
+
body (dict): The request body containing search parameters.
|
|
138
|
+
|
|
139
|
+
Yields:
|
|
140
|
+
Item: Parsed STAC item.
|
|
141
|
+
"""
|
|
142
|
+
params = {"limit": body.get("limit", 10)}
|
|
143
|
+
|
|
144
|
+
while True:
|
|
145
|
+
response = self.client.post(url, json=body, params=params)
|
|
146
|
+
check_api_response(response)
|
|
147
|
+
data = response.json()
|
|
148
|
+
|
|
149
|
+
yield from (Item.from_dict(feature) for feature in data.get("features", []))
|
|
150
|
+
|
|
151
|
+
next_href = self._get_next_link(data)
|
|
152
|
+
if not next_href:
|
|
153
|
+
break
|
|
154
|
+
|
|
155
|
+
token = self._extract_pagination_token(next_href)
|
|
156
|
+
if not token:
|
|
157
|
+
break
|
|
158
|
+
params["cursor"] = token
|
|
159
|
+
|
|
160
|
+
def _get_next_link(self, data: dict) -> Optional[str]:
|
|
161
|
+
"""Extract the next page link from the response."""
|
|
162
|
+
next_link = next(
|
|
163
|
+
(link for link in data.get("links", []) if link.get("rel") == "next"), None
|
|
164
|
+
)
|
|
165
|
+
return next_link.get("href", "") if next_link else None
|
|
166
|
+
|
|
167
|
+
def _extract_pagination_token(self, next_href: str) -> Optional[str]:
|
|
168
|
+
"""Extract the pagination token from the next link URL.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
next_href (str): The next page URL.
|
|
172
|
+
|
|
173
|
+
Returns:
|
|
174
|
+
Optional[str]: The extracted token, or None if parsing fails.
|
|
175
|
+
|
|
176
|
+
Raises:
|
|
177
|
+
DatacosmosException: If pagination token extraction fails.
|
|
178
|
+
"""
|
|
179
|
+
try:
|
|
180
|
+
return next_href.split("?")[1].split("=")[-1]
|
|
181
|
+
except (IndexError, AttributeError) as e:
|
|
182
|
+
raise DatacosmosException(
|
|
183
|
+
f"Failed to parse pagination token from {next_href}",
|
|
184
|
+
response=e.response,
|
|
185
|
+
) from e
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Models for the Item Client."""
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
"""Model representing a partial update for a STAC item."""
|
|
2
|
+
|
|
3
|
+
from typing import Any, Optional
|
|
4
|
+
|
|
5
|
+
from pydantic import BaseModel, Field, model_validator
|
|
6
|
+
from pystac import Asset, Link
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class ItemUpdate(BaseModel):
|
|
10
|
+
"""Model representing a partial update for a STAC item."""
|
|
11
|
+
|
|
12
|
+
model_config = {"arbitrary_types_allowed": True}
|
|
13
|
+
|
|
14
|
+
stac_extensions: Optional[list[str]] = None
|
|
15
|
+
geometry: Optional[dict[str, Any]] = None
|
|
16
|
+
bbox: Optional[list[float]] = Field(
|
|
17
|
+
None, min_items=4, max_items=4
|
|
18
|
+
) # Must be [minX, minY, maxX, maxY]
|
|
19
|
+
properties: Optional[dict[str, Any]] = None
|
|
20
|
+
assets: Optional[dict[str, Asset]] = None
|
|
21
|
+
links: Optional[list[Link]] = None
|
|
22
|
+
|
|
23
|
+
def set_geometry(self, geom_type: str, coordinates: list[Any]) -> None:
|
|
24
|
+
"""Set the geometry manually without using shapely.
|
|
25
|
+
|
|
26
|
+
Args:
|
|
27
|
+
geom_type (str): The type of geometry (e.g., 'Point', 'Polygon').
|
|
28
|
+
coordinates (list[Any]): The coordinates defining the geometry.
|
|
29
|
+
"""
|
|
30
|
+
self.geometry = {"type": geom_type, "coordinates": coordinates}
|
|
31
|
+
|
|
32
|
+
@staticmethod
|
|
33
|
+
def has_valid_datetime(properties: dict[str, Any]) -> bool:
|
|
34
|
+
"""Check if 'datetime' is present and not None."""
|
|
35
|
+
return properties.get("datetime") is not None
|
|
36
|
+
|
|
37
|
+
@staticmethod
|
|
38
|
+
def has_valid_datetime_range(properties: dict[str, Any]) -> bool:
|
|
39
|
+
"""Check if both 'start_datetime' and 'end_datetime' are present and not None."""
|
|
40
|
+
return all(
|
|
41
|
+
properties.get(key) is not None
|
|
42
|
+
for key in ["start_datetime", "end_datetime"]
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
@model_validator(mode="before")
|
|
46
|
+
def validate_datetime_fields(cls, values):
|
|
47
|
+
"""Ensure at least one of 'datetime' or 'start_datetime'/'end_datetime' exists."""
|
|
48
|
+
properties = values.get("properties", {})
|
|
49
|
+
|
|
50
|
+
if not cls.has_valid_datetime(properties) and not cls.has_valid_datetime_range(
|
|
51
|
+
properties
|
|
52
|
+
):
|
|
53
|
+
raise ValueError(
|
|
54
|
+
"Either 'datetime' or both 'start_datetime' and 'end_datetime' must be provided."
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
return values
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
"""Module defining the SearchParameters model for STAC API queries, encapsulating filtering criteria.
|
|
2
|
+
|
|
3
|
+
It includes spatial, temporal, and property-based filters for querying STAC items efficiently.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from typing import Optional, Union
|
|
7
|
+
|
|
8
|
+
from pydantic import BaseModel, Field, model_validator
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SearchParameters(BaseModel):
|
|
12
|
+
"""Encapsulates the parameters for the STAC search API with validation."""
|
|
13
|
+
|
|
14
|
+
bbox: Optional[list[float]] = Field(
|
|
15
|
+
None,
|
|
16
|
+
description="Bounding box filter [minX, minY, maxX, maxY]. Optional six values for 3D bounding box.",
|
|
17
|
+
example=[-180.0, -90.0, 180.0, 90.0],
|
|
18
|
+
)
|
|
19
|
+
datetime_range: Optional[str] = Field(
|
|
20
|
+
None,
|
|
21
|
+
alias="datetime",
|
|
22
|
+
description=(
|
|
23
|
+
"Temporal filter, either a single RFC 3339 datetime or an interval. "
|
|
24
|
+
'Example: "2025-01-01T00:00:00Z/.."'
|
|
25
|
+
),
|
|
26
|
+
)
|
|
27
|
+
intersects: Optional[dict] = Field(
|
|
28
|
+
None, description="GeoJSON geometry filter, e.g., a Polygon or Point."
|
|
29
|
+
)
|
|
30
|
+
ids: Optional[list[str]] = Field(
|
|
31
|
+
None,
|
|
32
|
+
description="Array of item IDs to filter by.",
|
|
33
|
+
example=["item1", "item2"],
|
|
34
|
+
)
|
|
35
|
+
collections: Optional[list[str]] = Field(
|
|
36
|
+
None,
|
|
37
|
+
description="Array of collection IDs to filter by.",
|
|
38
|
+
example=["collection1", "collection2"],
|
|
39
|
+
)
|
|
40
|
+
limit: Optional[int] = Field(
|
|
41
|
+
None,
|
|
42
|
+
ge=1,
|
|
43
|
+
le=10000,
|
|
44
|
+
description="Maximum number of items per page. Default: 10, Max: 10000.",
|
|
45
|
+
example=10,
|
|
46
|
+
)
|
|
47
|
+
query: Optional[dict[str, dict[str, Union[str, int, float]]]] = Field(
|
|
48
|
+
None,
|
|
49
|
+
description="Additional property filters, e.g., { 'cloud_coverage': { 'lt': 10 } }.",
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
@model_validator(mode="before")
|
|
53
|
+
def validate_bbox(cls, values):
|
|
54
|
+
"""Validate that the `bbox` field contains either 4 or 6 values."""
|
|
55
|
+
bbox = values.get("bbox")
|
|
56
|
+
if bbox and len(bbox) not in {4, 6}:
|
|
57
|
+
raise ValueError("bbox must contain 4 or 6 values.")
|
|
58
|
+
return values
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
"""Unified interface for STAC API, combining Item & Collection operations."""
|
|
2
|
+
|
|
3
|
+
from datacosmos.stac.collection.collection_client import CollectionClient
|
|
4
|
+
from datacosmos.stac.item.item_client import ItemClient
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class STACClient(ItemClient, CollectionClient):
|
|
8
|
+
"""Unified interface for STAC API, combining Item & Collection operations."""
|
|
9
|
+
|
|
10
|
+
def __init__(self, client):
|
|
11
|
+
"""Initialize the STACClient with a DatacosmosClient."""
|
|
12
|
+
super().__init__(client)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Http response and url utils for datacosmos."""
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Validates an API response."""
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
"""Validates an API response and raises a DatacosmosException if an error occurs."""
|
|
2
|
+
|
|
3
|
+
from pydantic import ValidationError
|
|
4
|
+
from requests import Response
|
|
5
|
+
|
|
6
|
+
from datacosmos.exceptions.datacosmos_exception import DatacosmosException
|
|
7
|
+
from datacosmos.utils.http_response.models.datacosmos_response import DatacosmosResponse
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def check_api_response(response: Response) -> None:
|
|
11
|
+
"""Validates an API response and raises a DatacosmosException if an error occurs.
|
|
12
|
+
|
|
13
|
+
Args:
|
|
14
|
+
resp (requests.Response): The response object.
|
|
15
|
+
|
|
16
|
+
Raises:
|
|
17
|
+
DatacosmosException: If the response status code indicates an error.
|
|
18
|
+
"""
|
|
19
|
+
if 200 <= response.status_code < 400:
|
|
20
|
+
return
|
|
21
|
+
|
|
22
|
+
try:
|
|
23
|
+
response = DatacosmosResponse.model_validate_json(response.text)
|
|
24
|
+
msg = response.errors[0].human_readable()
|
|
25
|
+
if len(response.errors) > 1:
|
|
26
|
+
msg = "\n * " + "\n * ".join(
|
|
27
|
+
error.human_readable() for error in response.errors
|
|
28
|
+
)
|
|
29
|
+
raise DatacosmosException(msg, response=response)
|
|
30
|
+
|
|
31
|
+
except ValidationError:
|
|
32
|
+
raise DatacosmosException(
|
|
33
|
+
f"HTTP {response.status_code}: {response.text}", response=response
|
|
34
|
+
)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
"""Models for validation of API response."""
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""Structured API error message for Datacosmos."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class DatacosmosError(BaseModel):
|
|
7
|
+
"""Structured API error message for Datacosmos."""
|
|
8
|
+
|
|
9
|
+
message: str
|
|
10
|
+
field: str | None = None
|
|
11
|
+
type: str | None = None
|
|
12
|
+
source: str | None = None
|
|
13
|
+
trace_id: str | None = None
|
|
14
|
+
|
|
15
|
+
def human_readable(self) -> str:
|
|
16
|
+
"""Formats the error message into a readable format."""
|
|
17
|
+
msg = self.message
|
|
18
|
+
if self.type:
|
|
19
|
+
msg += f" (type: {self.type})"
|
|
20
|
+
if self.field:
|
|
21
|
+
msg += f" (field: {self.field})"
|
|
22
|
+
if self.source:
|
|
23
|
+
msg += f" (source: {self.source})"
|
|
24
|
+
if self.trace_id:
|
|
25
|
+
msg += f" (trace_id: {self.trace_id})"
|
|
26
|
+
return msg
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""Structured response for Datacosmos handling multiple API errors."""
|
|
2
|
+
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
from datacosmos.utils.http_response.models.datacosmos_error import DatacosmosError
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class DatacosmosResponse(BaseModel):
|
|
9
|
+
"""Structured response for Datacosmos handling multiple API errors."""
|
|
10
|
+
|
|
11
|
+
errors: list[DatacosmosError]
|
datacosmos/utils/url.py
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
"""URL utility class for building and handling URLs in the SDK."""
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class URL:
|
|
5
|
+
"""Class to represent and build URLs in a convenient way."""
|
|
6
|
+
|
|
7
|
+
def __init__(self, protocol: str, host: str, port: int, base: str):
|
|
8
|
+
"""Creates a new basis to build URLs.
|
|
9
|
+
|
|
10
|
+
Args:
|
|
11
|
+
protocol (str): Protocol to use in the URL (http/https).
|
|
12
|
+
host (str): Hostname (e.g., example.com).
|
|
13
|
+
port (int): Port number.
|
|
14
|
+
base (str): Base path (e.g., /api/v1).
|
|
15
|
+
"""
|
|
16
|
+
self.protocol = protocol
|
|
17
|
+
self.host = host
|
|
18
|
+
self.port = port
|
|
19
|
+
self.base = base
|
|
20
|
+
|
|
21
|
+
def string(self) -> str:
|
|
22
|
+
"""Returns the full URL as a string."""
|
|
23
|
+
port = "" if self.port in [80, 443] else f":{self.port}"
|
|
24
|
+
base = f"/{self.base.lstrip('/')}" if self.base else ""
|
|
25
|
+
return f"{self.protocol}://{self.host}{port}{base}"
|
|
26
|
+
|
|
27
|
+
def with_suffix(self, suffix: str) -> str:
|
|
28
|
+
"""Appends a suffix to the URL, ensuring proper formatting.
|
|
29
|
+
|
|
30
|
+
Args:
|
|
31
|
+
suffix (str): The path to append.
|
|
32
|
+
|
|
33
|
+
Returns:
|
|
34
|
+
str: Full URL with the suffix.
|
|
35
|
+
"""
|
|
36
|
+
base = self.string()
|
|
37
|
+
return f"{base.rstrip('/')}/{suffix.lstrip('/')}"
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
config/__init__.py,sha256=KCsaTb9-ZgFui1GM8wZFIPLJy0D0O8l8Z1Sv3NRD9UM,140
|
|
2
|
+
config/config.py,sha256=0M2wKmrcCJjte3UmLNQVags_qce7Id2ampBPqadzPJw,5908
|
|
3
|
+
config/models/__init__.py,sha256=r3lThPkyKjBjUZXRNscFzOrmn_-m_i9DvG3RePfCFYc,41
|
|
4
|
+
config/models/m2m_authentication_config.py,sha256=1eJ_9df7Twn5WeWPbqMuR63ZdxhnTpBtKzqAPMnzP_k,565
|
|
5
|
+
config/models/url.py,sha256=fwr2C06e_RDS8AWxOV_orVxMWhc57bzYoWSjFxQbkwg,835
|
|
6
|
+
datacosmos/__init__.py,sha256=dVHKpbz5FVtfoJAWHRdsUENG6H-vs4UrkuwnIvOGJr4,66
|
|
7
|
+
datacosmos/datacosmos_client.py,sha256=sivVYf45QEHTkUO62fnb1fnObKVmUngTR1Ga-ZRnoQE,4967
|
|
8
|
+
datacosmos/exceptions/__init__.py,sha256=Crz8W7mOvPUXYcfDVotvjUt_3HKawBpmJA_-uel9UJk,45
|
|
9
|
+
datacosmos/exceptions/datacosmos_exception.py,sha256=rKjJvQDvCEbxXWWccxB5GI_sth662bW8Yml0hX-vRw4,923
|
|
10
|
+
datacosmos/stac/__init__.py,sha256=B4x_Mr4X7TzQoYtRC-VzI4W-fEON5WUOaz8cWJbk3Fc,214
|
|
11
|
+
datacosmos/stac/stac_client.py,sha256=Cz_p96RmAgWX8t7Sye4OJRanQpCLihKStvfEw7IgYZc,472
|
|
12
|
+
datacosmos/stac/collection/__init__.py,sha256=VQMLnsU3sER5kh4YxHrHP7XCA3DG1y0n9yoSmvycOY0,212
|
|
13
|
+
datacosmos/stac/collection/collection_client.py,sha256=XTO2s309-cktJosvnwnFFXHDVmJc4vjvbEsZjpsCDmY,5904
|
|
14
|
+
datacosmos/stac/collection/models/__init__.py,sha256=TQaihUS_CM9Eaekm4SbzFTNfv7BmabHv3Z-f37Py5Qs,40
|
|
15
|
+
datacosmos/stac/collection/models/collection_update.py,sha256=Tqmfg4H4UQj5jsgy1dpKJCR59NSfWeiCSi9y8CY8-Cg,1656
|
|
16
|
+
datacosmos/stac/item/__init__.py,sha256=lRuD_yp-JxoLqBA23q0XMkCNImf4T-X3BJnSw9u_3Yk,200
|
|
17
|
+
datacosmos/stac/item/item_client.py,sha256=AYyRR92Wy-rDwc9wFoljb6lAgSCuWft4VT5muqGeyv8,6738
|
|
18
|
+
datacosmos/stac/item/models/__init__.py,sha256=bcOrOcIxGxGBrRVIyQVxSM3C3Xj_qzxIHgQeWo6f7Q8,34
|
|
19
|
+
datacosmos/stac/item/models/item_update.py,sha256=_CpjQn9SsfedfuxlHSiGeptqY4M-p15t9YX__mBRueI,2088
|
|
20
|
+
datacosmos/stac/item/models/search_parameters.py,sha256=yMmcb-Tr2as8585MD5wuZLWcqzwtRRkj07WBkootVS0,2022
|
|
21
|
+
datacosmos/utils/__init__.py,sha256=XQbAnoqJrPpnSpEzAbjh84yqYWw8cBM8mNp8ynTG-54,50
|
|
22
|
+
datacosmos/utils/url.py,sha256=luaGa6UqPIf0h_1u2z3CZ32YQXNl7nGV03lVW7mlRIM,1214
|
|
23
|
+
datacosmos/utils/http_response/__init__.py,sha256=BvOWwC5coYqq_kFn8gIw5m54TLpdfJKlW9vgRkfhXiA,33
|
|
24
|
+
datacosmos/utils/http_response/check_api_response.py,sha256=dKWW01jn2_lWV0xpOBABhEP42CFSsx9dP0iSxykbN54,1186
|
|
25
|
+
datacosmos/utils/http_response/models/__init__.py,sha256=Wj8YT6dqw7rAz_rctllxo5Or_vv8DwopvQvBzwCTvpw,45
|
|
26
|
+
datacosmos/utils/http_response/models/datacosmos_error.py,sha256=Uqi2uM98nJPeCbM7zngV6vHSk97jEAb_nkdDEeUjiQM,740
|
|
27
|
+
datacosmos/utils/http_response/models/datacosmos_response.py,sha256=oV4n-sue7K1wwiIQeHpxdNU8vxeqF3okVPE2rydw5W0,336
|
|
28
|
+
datacosmos-0.0.2.dist-info/LICENSE.md,sha256=vpbRI-UUbZVQfr3VG_CXt9HpRnL1b5kt8uTVbirxeyI,1486
|
|
29
|
+
datacosmos-0.0.2.dist-info/METADATA,sha256=SIuvO1SE647Q9OvrKYuj1VmzWFgKjErvrRxrKoNgcyk,821
|
|
30
|
+
datacosmos-0.0.2.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
31
|
+
datacosmos-0.0.2.dist-info/top_level.txt,sha256=Iu5b533Fmdfz0rFKTnuBPjSUOQL2lEkTfHxsokP72s4,18
|
|
32
|
+
datacosmos-0.0.2.dist-info/RECORD,,
|
|
@@ -1,7 +0,0 @@
|
|
|
1
|
-
datacosmos/__init__.py,sha256=dVHKpbz5FVtfoJAWHRdsUENG6H-vs4UrkuwnIvOGJr4,66
|
|
2
|
-
datacosmos/datacosmos_client.py,sha256=sivVYf45QEHTkUO62fnb1fnObKVmUngTR1Ga-ZRnoQE,4967
|
|
3
|
-
datacosmos-0.0.1.dist-info/LICENSE.md,sha256=vpbRI-UUbZVQfr3VG_CXt9HpRnL1b5kt8uTVbirxeyI,1486
|
|
4
|
-
datacosmos-0.0.1.dist-info/METADATA,sha256=N8KMHCWaO8ED0OYT0hUikP31GgD1fB6SDpFuNAhL924,821
|
|
5
|
-
datacosmos-0.0.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
6
|
-
datacosmos-0.0.1.dist-info/top_level.txt,sha256=ueobs5CNeyDbPMgXPcVV0d0yNdm8CvGtDT3CaksRVtA,11
|
|
7
|
-
datacosmos-0.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|