terrakio-core 0.2.1__py3-none-any.whl → 0.2.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of terrakio-core might be problematic. Click here for more details.
- {terrakio_core-0.2.1.dist-info → terrakio_core-0.2.3.dist-info}/METADATA +3 -7
- terrakio_core-0.2.3.dist-info/RECORD +4 -0
- {terrakio_core-0.2.1.dist-info → terrakio_core-0.2.3.dist-info}/WHEEL +1 -1
- terrakio_core-0.2.3.dist-info/top_level.txt +1 -0
- terrakio_core/__init__.py +0 -0
- terrakio_core/auth.py +0 -237
- terrakio_core/client.py +0 -829
- terrakio_core/config.py +0 -81
- terrakio_core/dataset_management.py +0 -235
- terrakio_core/exceptions.py +0 -18
- terrakio_core/mass_stats.py +0 -262
- terrakio_core/user_management.py +0 -227
- terrakio_core-0.2.1.dist-info/RECORD +0 -12
- terrakio_core-0.2.1.dist-info/top_level.txt +0 -1
terrakio_core/config.py
DELETED
|
@@ -1,81 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
import json
|
|
3
|
-
from pathlib import Path
|
|
4
|
-
from typing import Dict, Any, Optional
|
|
5
|
-
|
|
6
|
-
from .exceptions import ConfigurationError
|
|
7
|
-
|
|
8
|
-
# Default configuration file locations
|
|
9
|
-
DEFAULT_CONFIG_FILE = os.path.join(os.environ.get("HOME", ""), ".tkio_config.json")
|
|
10
|
-
DEFAULT_API_URL = "https://api.terrak.io"
|
|
11
|
-
|
|
12
|
-
def read_config_file(config_file: str = DEFAULT_CONFIG_FILE) -> Dict[str, Any]:
|
|
13
|
-
"""
|
|
14
|
-
Read and parse the configuration file.
|
|
15
|
-
|
|
16
|
-
Args:
|
|
17
|
-
config_file: Path to the configuration file
|
|
18
|
-
|
|
19
|
-
Returns:
|
|
20
|
-
Dict[str, Any]: Configuration parameters
|
|
21
|
-
|
|
22
|
-
Raises:
|
|
23
|
-
ConfigurationError: If the configuration file can't be read or parsed
|
|
24
|
-
"""
|
|
25
|
-
config_path = Path(os.path.expanduser(config_file))
|
|
26
|
-
|
|
27
|
-
if not config_path.exists():
|
|
28
|
-
raise ConfigurationError(
|
|
29
|
-
f"Configuration file not found: {config_file}\n"
|
|
30
|
-
f"Please create a file at {config_file} with the following format:\n"
|
|
31
|
-
'{\n "EMAIL": "your-email@example.com",\n "TERRAKIO_API_KEY": "your-api-key-here"\n}'
|
|
32
|
-
)
|
|
33
|
-
|
|
34
|
-
try:
|
|
35
|
-
with open(config_path, 'r') as f:
|
|
36
|
-
config_data = json.load(f)
|
|
37
|
-
|
|
38
|
-
# Convert the JSON config to our expected format
|
|
39
|
-
config = {
|
|
40
|
-
# Allow config to override default URL if provided
|
|
41
|
-
'url': config_data.get('TERRAKIO_API_URL', DEFAULT_API_URL),
|
|
42
|
-
'key': config_data.get('TERRAKIO_API_KEY')
|
|
43
|
-
}
|
|
44
|
-
return config
|
|
45
|
-
|
|
46
|
-
except Exception as e:
|
|
47
|
-
raise ConfigurationError(f"Failed to parse configuration file: {e}")
|
|
48
|
-
|
|
49
|
-
def create_default_config(email: str, api_key: str, api_url: Optional[str] = None, config_file: str = DEFAULT_CONFIG_FILE) -> None:
|
|
50
|
-
"""
|
|
51
|
-
Create a default configuration file in JSON format.
|
|
52
|
-
|
|
53
|
-
Args:
|
|
54
|
-
email: User email
|
|
55
|
-
api_key: Terrakio API key
|
|
56
|
-
api_url: Optional API URL (if different from default)
|
|
57
|
-
config_file: Path to configuration file
|
|
58
|
-
|
|
59
|
-
Raises:
|
|
60
|
-
ConfigurationError: If the configuration file can't be created
|
|
61
|
-
"""
|
|
62
|
-
config_path = Path(os.path.expanduser(config_file))
|
|
63
|
-
|
|
64
|
-
# Ensure directory exists
|
|
65
|
-
config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
66
|
-
|
|
67
|
-
try:
|
|
68
|
-
config_data = {
|
|
69
|
-
"EMAIL": email,
|
|
70
|
-
"TERRAKIO_API_KEY": api_key
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
# Add API URL if provided
|
|
74
|
-
if api_url:
|
|
75
|
-
config_data["TERRAKIO_API_URL"] = api_url
|
|
76
|
-
|
|
77
|
-
with open(config_path, 'w') as f:
|
|
78
|
-
json.dump(config_data, f, indent=2)
|
|
79
|
-
|
|
80
|
-
except Exception as e:
|
|
81
|
-
raise ConfigurationError(f"Failed to create configuration file: {e}")
|
|
@@ -1,235 +0,0 @@
|
|
|
1
|
-
import requests
|
|
2
|
-
from typing import Dict, Any, List, Optional
|
|
3
|
-
from .exceptions import APIError
|
|
4
|
-
|
|
5
|
-
class DatasetManagement:
|
|
6
|
-
def __init__(self, api_url: str, api_key: str, verify: bool = True, timeout: int = 60):
|
|
7
|
-
"""
|
|
8
|
-
Initialize the Dataset Management client.
|
|
9
|
-
|
|
10
|
-
Args:
|
|
11
|
-
api_url: API base URL
|
|
12
|
-
api_key: API key for authentication
|
|
13
|
-
verify: Verify SSL certificates
|
|
14
|
-
timeout: Request timeout in seconds
|
|
15
|
-
"""
|
|
16
|
-
self.api_url = api_url.rstrip('/')
|
|
17
|
-
self.api_key = api_key
|
|
18
|
-
self.verify = verify
|
|
19
|
-
self.timeout = timeout
|
|
20
|
-
self.session = requests.Session()
|
|
21
|
-
self.session.headers.update({
|
|
22
|
-
'x-api-key': self.api_key,
|
|
23
|
-
'Content-Type': 'application/json'
|
|
24
|
-
})
|
|
25
|
-
|
|
26
|
-
def get_dataset(self, name: str, collection: str = "terrakio-datasets") -> Dict[str, Any]:
|
|
27
|
-
"""
|
|
28
|
-
Retrieve dataset info by dataset name.
|
|
29
|
-
|
|
30
|
-
Args:
|
|
31
|
-
name: The name of the dataset (required)
|
|
32
|
-
collection: The dataset collection (default: 'terrakio-datasets')
|
|
33
|
-
|
|
34
|
-
Returns:
|
|
35
|
-
Dataset information as a dictionary
|
|
36
|
-
|
|
37
|
-
Raises:
|
|
38
|
-
APIError: If the API request fails
|
|
39
|
-
"""
|
|
40
|
-
endpoint = f"{self.api_url}/datasets/{name}"
|
|
41
|
-
params = {"collection": collection} if collection else {}
|
|
42
|
-
try:
|
|
43
|
-
response = self.session.get(
|
|
44
|
-
endpoint,
|
|
45
|
-
params=params,
|
|
46
|
-
timeout=self.timeout,
|
|
47
|
-
verify=self.verify
|
|
48
|
-
)
|
|
49
|
-
if not response.ok:
|
|
50
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
51
|
-
return response.json()
|
|
52
|
-
except requests.RequestException as e:
|
|
53
|
-
raise APIError(f"Request failed: {str(e)}")
|
|
54
|
-
|
|
55
|
-
def list_datasets(self, substring: Optional[str] = None, collection: str = "terrakio-datasets") -> List[Dict[str, Any]]:
|
|
56
|
-
"""
|
|
57
|
-
List datasets, optionally filtering by a substring and collection.
|
|
58
|
-
|
|
59
|
-
Args:
|
|
60
|
-
substring: Substring to filter by (optional)
|
|
61
|
-
collection: Dataset collection (default: 'terrakio-datasets')
|
|
62
|
-
|
|
63
|
-
Returns:
|
|
64
|
-
List of datasets matching the criteria
|
|
65
|
-
|
|
66
|
-
Raises:
|
|
67
|
-
APIError: If the API request fails
|
|
68
|
-
"""
|
|
69
|
-
endpoint = f"{self.api_url}/datasets"
|
|
70
|
-
params = {"collection": collection}
|
|
71
|
-
if substring:
|
|
72
|
-
params["substring"] = substring
|
|
73
|
-
try:
|
|
74
|
-
response = self.session.get(
|
|
75
|
-
endpoint,
|
|
76
|
-
params=params,
|
|
77
|
-
timeout=self.timeout,
|
|
78
|
-
verify=self.verify
|
|
79
|
-
)
|
|
80
|
-
if not response.ok:
|
|
81
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
82
|
-
return response.json()
|
|
83
|
-
except requests.RequestException as e:
|
|
84
|
-
raise APIError(f"Request failed: {str(e)}")
|
|
85
|
-
|
|
86
|
-
def create_dataset(self, name: str, collection: str = "terrakio-datasets", **kwargs) -> Dict[str, Any]:
|
|
87
|
-
"""
|
|
88
|
-
Create a new dataset.
|
|
89
|
-
|
|
90
|
-
Args:
|
|
91
|
-
name: Name of the dataset (required)
|
|
92
|
-
collection: Dataset collection (default: 'terrakio-datasets')
|
|
93
|
-
**kwargs: Additional dataset parameters including:
|
|
94
|
-
- products: List of products
|
|
95
|
-
- dates_iso8601: List of dates
|
|
96
|
-
- bucket: Storage bucket
|
|
97
|
-
- path: Storage path
|
|
98
|
-
- data_type: Data type
|
|
99
|
-
- no_data: No data value
|
|
100
|
-
- l_max: Maximum level
|
|
101
|
-
- y_size: Y size
|
|
102
|
-
- x_size: X size
|
|
103
|
-
- proj4: Projection string
|
|
104
|
-
- abstract: Dataset abstract
|
|
105
|
-
- geotransform: Geotransform parameters
|
|
106
|
-
|
|
107
|
-
Returns:
|
|
108
|
-
Created dataset information
|
|
109
|
-
|
|
110
|
-
Raises:
|
|
111
|
-
APIError: If the API request fails
|
|
112
|
-
"""
|
|
113
|
-
endpoint = f"{self.api_url}/datasets"
|
|
114
|
-
params = {"collection": collection}
|
|
115
|
-
# Create payload with required name parameter
|
|
116
|
-
payload = {"name": name}
|
|
117
|
-
|
|
118
|
-
# Add optional parameters if provided
|
|
119
|
-
for param in ["products", "dates_iso8601", "bucket", "path", "data_type",
|
|
120
|
-
"no_data", "l_max", "y_size", "x_size", "proj4", "abstract", "geotransform"]:
|
|
121
|
-
if param in kwargs:
|
|
122
|
-
payload[param] = kwargs[param]
|
|
123
|
-
|
|
124
|
-
try:
|
|
125
|
-
response = self.session.post(
|
|
126
|
-
endpoint,
|
|
127
|
-
params=params,
|
|
128
|
-
json=payload,
|
|
129
|
-
timeout=self.timeout,
|
|
130
|
-
verify=self.verify
|
|
131
|
-
)
|
|
132
|
-
|
|
133
|
-
if not response.ok:
|
|
134
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
135
|
-
return response.json()
|
|
136
|
-
except requests.RequestException as e:
|
|
137
|
-
raise APIError(f"Request failed: {str(e)}")
|
|
138
|
-
|
|
139
|
-
def update_dataset(self, name: str, append: bool = True, collection: str = "terrakio-datasets", **kwargs) -> Dict[str, Any]:
|
|
140
|
-
"""
|
|
141
|
-
Update a dataset. By default, values are appended unless append is set to False.
|
|
142
|
-
|
|
143
|
-
Args:
|
|
144
|
-
name: Name of the dataset (required)
|
|
145
|
-
append: Whether to append values (default: True)
|
|
146
|
-
collection: Dataset collection (default: 'terrakio-datasets')
|
|
147
|
-
**kwargs: Additional dataset parameters to update
|
|
148
|
-
|
|
149
|
-
Returns:
|
|
150
|
-
Updated dataset information
|
|
151
|
-
|
|
152
|
-
Raises:
|
|
153
|
-
APIError: If the API request fails
|
|
154
|
-
"""
|
|
155
|
-
endpoint = f"{self.api_url}/datasets"
|
|
156
|
-
params = {"append": str(append).lower(), "collection": collection}
|
|
157
|
-
payload = {"name": name}
|
|
158
|
-
payload.update(kwargs)
|
|
159
|
-
try:
|
|
160
|
-
response = self.session.patch(
|
|
161
|
-
endpoint,
|
|
162
|
-
params=params,
|
|
163
|
-
json=payload,
|
|
164
|
-
timeout=self.timeout,
|
|
165
|
-
verify=self.verify
|
|
166
|
-
)
|
|
167
|
-
if not response.ok:
|
|
168
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
169
|
-
return response.json()
|
|
170
|
-
except requests.RequestException as e:
|
|
171
|
-
raise APIError(f"Request failed: {str(e)}")
|
|
172
|
-
|
|
173
|
-
def overwrite_dataset(self, name: str, collection: str = "terrakio-datasets", **kwargs) -> Dict[str, Any]:
|
|
174
|
-
"""
|
|
175
|
-
Overwrite a dataset (replace all values).
|
|
176
|
-
|
|
177
|
-
Args:
|
|
178
|
-
name: Name of the dataset (required)
|
|
179
|
-
collection: Dataset collection (default: 'terrakio-datasets')
|
|
180
|
-
**kwargs: New dataset parameters
|
|
181
|
-
|
|
182
|
-
Returns:
|
|
183
|
-
Updated dataset information
|
|
184
|
-
|
|
185
|
-
Raises:
|
|
186
|
-
APIError: If the API request fails
|
|
187
|
-
"""
|
|
188
|
-
endpoint = f"{self.api_url}/datasets"
|
|
189
|
-
params = {"collection": collection}
|
|
190
|
-
payload = {"name": name}
|
|
191
|
-
payload.update(kwargs)
|
|
192
|
-
try:
|
|
193
|
-
response = self.session.put(
|
|
194
|
-
endpoint,
|
|
195
|
-
params=params,
|
|
196
|
-
json=payload,
|
|
197
|
-
timeout=self.timeout,
|
|
198
|
-
verify=self.verify
|
|
199
|
-
)
|
|
200
|
-
if not response.ok:
|
|
201
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
202
|
-
return response.json()
|
|
203
|
-
except requests.RequestException as e:
|
|
204
|
-
raise APIError(f"Request failed: {str(e)}")
|
|
205
|
-
|
|
206
|
-
def delete_dataset(self, name: str, collection: str = "terrakio-datasets") -> Dict[str, Any]:
|
|
207
|
-
"""
|
|
208
|
-
Delete a dataset by name.
|
|
209
|
-
|
|
210
|
-
Args:
|
|
211
|
-
name: The name of the dataset (required)
|
|
212
|
-
collection: Dataset collection (default: 'terrakio-datasets')
|
|
213
|
-
|
|
214
|
-
Returns:
|
|
215
|
-
API response as a dictionary
|
|
216
|
-
|
|
217
|
-
Raises:
|
|
218
|
-
APIError: If the API request fails
|
|
219
|
-
"""
|
|
220
|
-
endpoint = f"{self.api_url}/datasets/{name}"
|
|
221
|
-
params = {"collection": collection}
|
|
222
|
-
try:
|
|
223
|
-
response = self.session.delete(
|
|
224
|
-
endpoint,
|
|
225
|
-
params=params,
|
|
226
|
-
timeout=self.timeout,
|
|
227
|
-
verify=self.verify
|
|
228
|
-
)
|
|
229
|
-
if response.status_code == 404:
|
|
230
|
-
return {"status": "error", "message": f"Dataset '{name}' does not exist in collection '{collection}'"}
|
|
231
|
-
if not response.ok:
|
|
232
|
-
raise APIError(f"API request failed: {response.status_code} {response.reason}")
|
|
233
|
-
return response.json()
|
|
234
|
-
except requests.RequestException as e:
|
|
235
|
-
raise APIError(f"Request failed: {str(e)}")
|
terrakio_core/exceptions.py
DELETED
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
class APIError(Exception):
|
|
2
|
-
"""Exception raised for errors in the API responses."""
|
|
3
|
-
pass
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class ConfigurationError(Exception):
|
|
7
|
-
"""Exception raised for errors in the configuration."""
|
|
8
|
-
pass
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
class DownloadError(Exception):
|
|
12
|
-
"""Exception raised for errors during data download."""
|
|
13
|
-
pass
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class ValidationError(Exception):
|
|
17
|
-
"""Exception raised for invalid request parameters."""
|
|
18
|
-
pass
|
terrakio_core/mass_stats.py
DELETED
|
@@ -1,262 +0,0 @@
|
|
|
1
|
-
import requests
|
|
2
|
-
from typing import Optional, Dict, Any
|
|
3
|
-
|
|
4
|
-
class MassStats:
|
|
5
|
-
def __init__(self, base_url: str, api_key: str, verify: bool = True, timeout: int = 60):
|
|
6
|
-
self.base_url = base_url.rstrip('/')
|
|
7
|
-
self.api_key = api_key
|
|
8
|
-
self.verify = verify
|
|
9
|
-
self.timeout = timeout
|
|
10
|
-
self.session = requests.Session()
|
|
11
|
-
self.session.headers.update({
|
|
12
|
-
'x-api-key': self.api_key
|
|
13
|
-
})
|
|
14
|
-
|
|
15
|
-
def upload_request(
|
|
16
|
-
self,
|
|
17
|
-
name: str,
|
|
18
|
-
size: int,
|
|
19
|
-
bucket: str,
|
|
20
|
-
output: str,
|
|
21
|
-
location: Optional[str] = None,
|
|
22
|
-
force_loc: bool = False,
|
|
23
|
-
config: Optional[Dict[str, Any]] = None,
|
|
24
|
-
overwrite: bool = False,
|
|
25
|
-
server: Optional[str] = None,
|
|
26
|
-
skip_existing: bool = False
|
|
27
|
-
) -> Dict[str, Any]:
|
|
28
|
-
"""
|
|
29
|
-
Initiate a mass stats upload job.
|
|
30
|
-
|
|
31
|
-
Args:
|
|
32
|
-
name: Name of the job
|
|
33
|
-
size: Size of the data
|
|
34
|
-
bucket: Storage bucket
|
|
35
|
-
output: Output path or identifier
|
|
36
|
-
location: (Optional) Location for the upload
|
|
37
|
-
force_loc: Force location usage
|
|
38
|
-
config: Optional configuration dictionary
|
|
39
|
-
overwrite: Overwrite existing data
|
|
40
|
-
server: Optional server
|
|
41
|
-
skip_existing: Skip existing files
|
|
42
|
-
"""
|
|
43
|
-
url = f"{self.base_url}/mass_stats/upload"
|
|
44
|
-
data = {
|
|
45
|
-
"name": name,
|
|
46
|
-
"size": size,
|
|
47
|
-
"bucket": bucket,
|
|
48
|
-
"output": output,
|
|
49
|
-
"force_loc": force_loc,
|
|
50
|
-
"overwrite": overwrite,
|
|
51
|
-
"skip_existing": skip_existing
|
|
52
|
-
}
|
|
53
|
-
if location is not None:
|
|
54
|
-
data["location"] = location
|
|
55
|
-
if config is not None:
|
|
56
|
-
data["config"] = config
|
|
57
|
-
if server is not None:
|
|
58
|
-
data["server"] = server
|
|
59
|
-
response = self.session.post(url, json=data, verify=self.verify, timeout=self.timeout)
|
|
60
|
-
print("the response is ", response.text)
|
|
61
|
-
# response.raise_for_status()
|
|
62
|
-
return response.json()
|
|
63
|
-
|
|
64
|
-
def start_job(self, task_id: str) -> Dict[str, Any]:
|
|
65
|
-
"""
|
|
66
|
-
Start a mass stats job by task ID.
|
|
67
|
-
"""
|
|
68
|
-
url = f"{self.base_url}/mass_stats/start/{task_id}"
|
|
69
|
-
print("the self session header is ", self.session.headers)
|
|
70
|
-
response = self.session.post(url, verify=self.verify, timeout=self.timeout)
|
|
71
|
-
response.raise_for_status()
|
|
72
|
-
return response.json()
|
|
73
|
-
|
|
74
|
-
def get_task_id(self, name: str, stage: str, uid: Optional[str] = None) -> Dict[str, Any]:
|
|
75
|
-
"""
|
|
76
|
-
Get the task ID for a mass stats job by name and stage (and optionally user ID).
|
|
77
|
-
"""
|
|
78
|
-
url = f"{self.base_url}/mass_stats/job_id?name={name}&stage={stage}"
|
|
79
|
-
if uid is not None:
|
|
80
|
-
url += f"&uid={uid}"
|
|
81
|
-
response = self.session.get(url, verify=self.verify, timeout=self.timeout)
|
|
82
|
-
print("response text is ", response.text)
|
|
83
|
-
return response.json()
|
|
84
|
-
|
|
85
|
-
def track_job(self, ids: Optional[list] = None) -> Dict[str, Any]:
|
|
86
|
-
"""
|
|
87
|
-
Track the status of one or more mass stats jobs.
|
|
88
|
-
If ids is None, gets progress for all of the user's jobs.
|
|
89
|
-
"""
|
|
90
|
-
url = f"{self.base_url}/mass_stats/track"
|
|
91
|
-
data = {"ids": ids} if ids is not None else {}
|
|
92
|
-
response = self.session.post(url, json=data, verify=self.verify, timeout=self.timeout)
|
|
93
|
-
response.raise_for_status()
|
|
94
|
-
return response.json()
|
|
95
|
-
|
|
96
|
-
def get_history(self, limit: int = 100) -> Dict[str, Any]:
|
|
97
|
-
"""
|
|
98
|
-
Get the history of mass stats jobs.
|
|
99
|
-
"""
|
|
100
|
-
url = f"{self.base_url}/mass_stats/history"
|
|
101
|
-
params = {"limit": limit}
|
|
102
|
-
response = self.session.get(url, params=params, verify=self.verify, timeout=self.timeout)
|
|
103
|
-
response.raise_for_status()
|
|
104
|
-
return response.json()
|
|
105
|
-
|
|
106
|
-
def start_post_processing(
|
|
107
|
-
self,
|
|
108
|
-
process_name: str,
|
|
109
|
-
data_name: str,
|
|
110
|
-
output: str,
|
|
111
|
-
consumer_path: str,
|
|
112
|
-
overwrite: bool = False
|
|
113
|
-
) -> Dict[str, Any]:
|
|
114
|
-
"""
|
|
115
|
-
Start post processing for a mass stats job.
|
|
116
|
-
Args:
|
|
117
|
-
process_name: Folder to store output
|
|
118
|
-
data_name: Name of job used to create data
|
|
119
|
-
output: Output type
|
|
120
|
-
consumer_path: Path to the post processing script (Python file)
|
|
121
|
-
overwrite: Overwrite existing post processing output in same location
|
|
122
|
-
Returns:
|
|
123
|
-
Dict with task_id
|
|
124
|
-
"""
|
|
125
|
-
url = f"{self.base_url}/mass_stats/post_process"
|
|
126
|
-
files = {
|
|
127
|
-
'consumer': (consumer_path, open(consumer_path, 'rb'), 'text/x-python')
|
|
128
|
-
}
|
|
129
|
-
data = {
|
|
130
|
-
'process_name': process_name,
|
|
131
|
-
'data_name': data_name,
|
|
132
|
-
'output': output,
|
|
133
|
-
'overwrite': str(overwrite).lower()
|
|
134
|
-
}
|
|
135
|
-
response = self.session.post(url, data=data, files=files, verify=self.verify, timeout=self.timeout)
|
|
136
|
-
print("the response is ", response.text)
|
|
137
|
-
# response.raise_for_status()
|
|
138
|
-
return response.json()
|
|
139
|
-
|
|
140
|
-
def download_results(
|
|
141
|
-
self,
|
|
142
|
-
id: Optional[str] = None,
|
|
143
|
-
force_loc: bool = False,
|
|
144
|
-
bucket: Optional[str] = None,
|
|
145
|
-
location: Optional[str] = None,
|
|
146
|
-
output: Optional[str] = None,
|
|
147
|
-
file_name: Optional[str] = None
|
|
148
|
-
) -> bytes:
|
|
149
|
-
"""
|
|
150
|
-
Download results from a mass stats job or arbitrary results if force_loc is True.
|
|
151
|
-
Returns the content of the .zip file.
|
|
152
|
-
"""
|
|
153
|
-
url = f"{self.base_url}/mass_stats/download"
|
|
154
|
-
data = {}
|
|
155
|
-
if id is not None:
|
|
156
|
-
data["id"] = id
|
|
157
|
-
if force_loc:
|
|
158
|
-
data["force_loc"] = True
|
|
159
|
-
if bucket is not None:
|
|
160
|
-
data["bucket"] = bucket
|
|
161
|
-
if location is not None:
|
|
162
|
-
data["location"] = location
|
|
163
|
-
if output is not None:
|
|
164
|
-
data["output"] = output
|
|
165
|
-
if file_name is not None:
|
|
166
|
-
data["file_name"] = file_name
|
|
167
|
-
response = self.session.post(url, json=data, verify=self.verify, timeout=self.timeout)
|
|
168
|
-
print("the response is ", response.text)
|
|
169
|
-
# response.raise_for_status()
|
|
170
|
-
print("the response content is ", response.content)
|
|
171
|
-
return response.content
|
|
172
|
-
|
|
173
|
-
def cancel_job(self, id: str) -> Dict[str, Any]:
|
|
174
|
-
"""
|
|
175
|
-
Cancel a mass stats job by ID.
|
|
176
|
-
"""
|
|
177
|
-
url = f"{self.base_url}/mass_stats/cancel/{id}"
|
|
178
|
-
response = self.session.post(url, verify=self.verify, timeout=self.timeout)
|
|
179
|
-
response.raise_for_status()
|
|
180
|
-
return response.json()
|
|
181
|
-
|
|
182
|
-
def cancel_all_jobs(self) -> Dict[str, Any]:
|
|
183
|
-
"""
|
|
184
|
-
Cancel all mass stats jobs for the user.
|
|
185
|
-
"""
|
|
186
|
-
url = f"{self.base_url}/mass_stats/cancel"
|
|
187
|
-
response = self.session.post(url, verify=self.verify, timeout=self.timeout)
|
|
188
|
-
response.raise_for_status()
|
|
189
|
-
return response.json()
|
|
190
|
-
|
|
191
|
-
def create_pyramids(self, name: str, levels: int, config: Dict[str, Any]) -> Dict[str, Any]:
|
|
192
|
-
"""
|
|
193
|
-
Create pyramids for a dataset.
|
|
194
|
-
Args:
|
|
195
|
-
name: Name for the pyramid job
|
|
196
|
-
levels: Number of zoom levels to compute
|
|
197
|
-
config: Dataset config (mapping)
|
|
198
|
-
Returns:
|
|
199
|
-
Dict with task_id
|
|
200
|
-
"""
|
|
201
|
-
url = f"{self.base_url}/pyramids/create"
|
|
202
|
-
data = {
|
|
203
|
-
"name": name,
|
|
204
|
-
"levels": levels,
|
|
205
|
-
"config": config
|
|
206
|
-
}
|
|
207
|
-
response = self.session.post(url, json=data, verify=self.verify, timeout=self.timeout)
|
|
208
|
-
print("the url is ", url)
|
|
209
|
-
print("the response is ", response.text)
|
|
210
|
-
print("the response status code is ", response.status_code)
|
|
211
|
-
# response.raise_for_status()
|
|
212
|
-
return response.json()
|
|
213
|
-
|
|
214
|
-
def random_sample(
|
|
215
|
-
self,
|
|
216
|
-
name: str,
|
|
217
|
-
config: dict,
|
|
218
|
-
aoi: dict,
|
|
219
|
-
samples: int,
|
|
220
|
-
year_range: list,
|
|
221
|
-
crs: str,
|
|
222
|
-
tile_size: int,
|
|
223
|
-
res: float,
|
|
224
|
-
output: str,
|
|
225
|
-
server: str,
|
|
226
|
-
region: str,
|
|
227
|
-
bucket: str,
|
|
228
|
-
overwrite: bool = False
|
|
229
|
-
) -> Dict[str, Any]:
|
|
230
|
-
"""
|
|
231
|
-
Submit a random sample job.
|
|
232
|
-
"""
|
|
233
|
-
if year_range is None or len(year_range) != 2:
|
|
234
|
-
raise ValueError("year_range must be a list of two integers")
|
|
235
|
-
start_year, end_year = year_range
|
|
236
|
-
if start_year is None or end_year is None:
|
|
237
|
-
raise ValueError("Both start_year and end_year must be provided for year_range.")
|
|
238
|
-
|
|
239
|
-
url = f"{self.base_url}/random_sample"
|
|
240
|
-
data = {
|
|
241
|
-
"name": name,
|
|
242
|
-
"overwrite": overwrite,
|
|
243
|
-
"config": config,
|
|
244
|
-
"aoi": aoi,
|
|
245
|
-
"samples": samples,
|
|
246
|
-
"year_range": [start_year, end_year],
|
|
247
|
-
"crs": crs,
|
|
248
|
-
"tile_size": tile_size,
|
|
249
|
-
"res": res,
|
|
250
|
-
"output": output,
|
|
251
|
-
"server": server,
|
|
252
|
-
"region": region,
|
|
253
|
-
"bucket": bucket
|
|
254
|
-
}
|
|
255
|
-
print("the data is ", data)
|
|
256
|
-
print("the url is ", url)
|
|
257
|
-
response = self.session.post(url, json=data, verify=self.verify, timeout=self.timeout)
|
|
258
|
-
print("Status code:", response.status_code)
|
|
259
|
-
print("Response text:", response.text)
|
|
260
|
-
# response.raise_for_status()
|
|
261
|
-
return response.json()
|
|
262
|
-
|