terrakio-core 0.4.98__py3-none-any.whl → 0.4.98.1b1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of terrakio-core might be problematic. Click here for more details.

@@ -1,51 +1,68 @@
1
- from typing import Dict, Any, List, Optional
2
- from ..helper.decorators import require_token, require_api_key, require_auth
1
+ from typing import Any, Dict, List, Optional
2
+
3
+ from ..exceptions import (
4
+ CommandPermissionError,
5
+ DatasetNotFoundError,
6
+ DatasetPermissionError,
7
+ GetDatasetError,
8
+ ListDatasetsError,
9
+ CreateDatasetError,
10
+ DatasetAlreadyExistsError,
11
+ DeleteDatasetError,
12
+ OverwriteDatasetError,
13
+ )
14
+ from ..helper.decorators import require_api_key, require_auth, require_token
3
15
 
4
16
  class DatasetManagement:
5
17
  def __init__(self, client):
6
18
  self._client = client
7
19
 
8
-
9
20
  @require_api_key
10
- def list_datasets(self, substring: Optional[str] = None, collection: str = "terrakio-datasets") -> List[Dict[str, Any]]:
21
+ async def list_datasets(self, substring: Optional[str] = None) -> List[Dict[str, Any]]:
11
22
  """
12
23
  List datasets, optionally filtering by a substring and collection.
13
24
 
14
25
  Args:
15
26
  substring: Substring to filter by (optional)
16
- collection: Dataset collection (default: 'terrakio-datasets')
17
27
 
18
28
  Returns:
19
29
  List of datasets matching the criteria
20
30
  """
21
- params = {"collection": collection}
22
- if substring:
23
- params.update({"substring": substring})
24
- return self._client._terrakio_request("GET", "/datasets", params = params)
31
+ params = {"substring": substring} if substring else None
32
+ response, status = await self._client._terrakio_request("GET", "/datasets", params=params)
33
+ if status != 200:
34
+ raise ListDatasetsError(f"List datasets failed with status {status}", status_code=status)
35
+ return response
25
36
 
26
37
  @require_api_key
27
- def get_dataset(self, name: str, collection: str = "terrakio-datasets") -> Dict[str, Any]:
38
+ async def get_dataset(self, name: str) -> Dict[str, Any]:
28
39
  """
29
40
  Retrieve dataset info by dataset name.
30
41
 
31
42
  Args:
32
43
  name: The name of the dataset (required)
33
- collection: The dataset collection (default: 'terrakio-datasets')
34
44
 
35
45
  Returns:
36
46
  Dataset information as a dictionary
37
47
 
38
48
  Raises:
39
- APIError: If the API request fails
49
+ GetDatasetError: If the API request fails
50
+ DatasetNotFoundError: If the dataset is not found
51
+ DatasetPermissionError: If the user does not have permission to get the dataset
40
52
  """
41
- params = {"collection": collection}
42
- return self._client._terrakio_request("GET", f"/datasets/{name}", params = params)
53
+ response, status = await self._client._terrakio_request("GET", f"/datasets/{name}")
54
+ if status != 200:
55
+ if status == 404:
56
+ raise DatasetNotFoundError(f"Dataset {name} not found", status_code = status)
57
+ if status == 403:
58
+ raise DatasetPermissionError(f"You do not have permission to get dataset {name}", status_code = status)
59
+ raise GetDatasetError(f"Get dataset failed with status {status}", status_code = status)
60
+ return response
43
61
 
44
62
  @require_api_key
45
63
  async def create_dataset(
46
64
  self,
47
65
  name: str,
48
- collection: str = "terrakio-datasets",
49
66
  products: Optional[List[str]] = None,
50
67
  dates_iso8601: Optional[List[str]] = None,
51
68
  bucket: Optional[str] = None,
@@ -67,7 +84,6 @@ class DatasetManagement:
67
84
 
68
85
  Args:
69
86
  name: Name of the dataset (required)
70
- collection: Dataset collection (default: 'terrakio-datasets')
71
87
  products: List of products
72
88
  dates_iso8601: List of dates (will be automatically sorted chronologically)
73
89
  bucket: Storage bucket
@@ -89,7 +105,6 @@ class DatasetManagement:
89
105
  Raises:
90
106
  APIError: If the API request fails
91
107
  """
92
- params = {"collection": collection}
93
108
  payload = {"name": name}
94
109
  param_mapping = {
95
110
  "products": products,
@@ -111,14 +126,42 @@ class DatasetManagement:
111
126
  for param, value in param_mapping.items():
112
127
  if value is not None:
113
128
  payload[param] = value
114
- return await self._client._terrakio_request("POST", "/datasets", params = params, json = payload)
115
-
129
+ response, status = await self._client._terrakio_request("POST", "/datasets", json = payload)
130
+ if status != 200:
131
+ if status ==403:
132
+ raise CommandPermissionError(f"You do not have permission to create dataset {name}", status_code = status)
133
+ elif status == 409:
134
+ raise DatasetAlreadyExistsError(f"Dataset {name} already exists", status_code = status)
135
+ raise CreateDatasetError(f"Create dataset failed with status {status}", status_code = status)
136
+ return response
137
+
138
+ @require_api_key
139
+ async def delete_dataset(self, name: str) -> Dict[str, Any]:
140
+ """
141
+ Delete a dataset by name.
142
+
143
+ Args:
144
+ name: The name of the dataset (required)
145
+
146
+ Returns:
147
+ Deleted dataset information
148
+
149
+ Raises:
150
+ CommandPermissionError: If the user does not have permission to delete the dataset
151
+ DeleteDatasetError: If the API request fails
152
+ """
153
+ response, status = await self._client._terrakio_request("DELETE", f"/datasets/{name}")
154
+ if status != 200:
155
+ if status == 403:
156
+ raise CommandPermissionError(f"You do not have permission to delete dataset {name}", status_code = status)
157
+ raise DeleteDatasetError(f"Delete dataset failed with status {status}", status_code = status)
158
+ return response
159
+
116
160
  @require_api_key
117
161
  def update_dataset(
118
162
  self,
119
163
  name: str,
120
164
  append: bool = True,
121
- collection: str = "terrakio-datasets",
122
165
  products: Optional[List[str]] = None,
123
166
  dates_iso8601: Optional[List[str]] = None,
124
167
  bucket: Optional[str] = None,
@@ -140,7 +183,6 @@ class DatasetManagement:
140
183
  Args:
141
184
  name: Name of the dataset (required)
142
185
  append: Whether to append data or replace (default: True)
143
- collection: Dataset collection (default: 'terrakio-datasets')
144
186
  products: List of products
145
187
  dates_iso8601: List of dates (will be automatically sorted chronologically)
146
188
  bucket: Storage bucket
@@ -162,11 +204,10 @@ class DatasetManagement:
162
204
  Raises:
163
205
  APIError: If the API request fails
164
206
  """
165
- # Sort dates_iso8601 chronologically if provided
166
207
  if dates_iso8601 is not None:
167
208
  dates_iso8601 = sorted(dates_iso8601)
168
209
 
169
- params = {"collection": collection, "append": str(append).lower()}
210
+ params = {"append": str(append).lower()}
170
211
  payload = {"name": name}
171
212
  param_mapping = {
172
213
  "products": products,
@@ -194,7 +235,6 @@ class DatasetManagement:
194
235
  self,
195
236
  name: str,
196
237
  append: bool = True,
197
- collection: str = "terrakio-datasets",
198
238
  products: Optional[List[str]] = None,
199
239
  dates_iso8601: Optional[List[str]] = None,
200
240
  bucket: Optional[str] = None,
@@ -217,7 +257,6 @@ class DatasetManagement:
217
257
  Args:
218
258
  name: Name of the dataset (required)
219
259
  append: Whether to append data or replace (default: True)
220
- collection: Dataset collection (default: 'terrakio-datasets')
221
260
  products: List of products
222
261
  dates_iso8601: List of dates (will be automatically sorted chronologically)
223
262
  bucket: Storage bucket
@@ -244,7 +283,7 @@ class DatasetManagement:
244
283
  if dates_iso8601 is not None:
245
284
  dates_iso8601 = sorted(dates_iso8601)
246
285
 
247
- params = {"collection": collection, "append": str(append).lower()}
286
+ params = {"append": str(append).lower()}
248
287
  payload = {"name": name}
249
288
  param_mapping = {
250
289
  "products": products,
@@ -267,14 +306,55 @@ class DatasetManagement:
267
306
  if value is not None:
268
307
  payload[param] = value
269
308
  return self._client._terrakio_request("PATCH", "/datasets", params = params, json = payload)
270
-
271
309
 
272
310
 
273
311
  @require_api_key
274
- def overwrite_dataset(
312
+ async def _get_url_for_upload_inference_script(self, script_path: str) -> str:
313
+ """
314
+ Get the url for the upload of the inference script
315
+ """
316
+ # we have the path, and we just need to get the bucket name
317
+ payload = {
318
+ "script_path": script_path,
319
+ }
320
+ return await self._client._terrakio_request("POST", "models/update_inference_script", json = payload)
321
+
322
+ @require_api_key
323
+ async def update_virtual_dataset_inference(
324
+ self,
325
+ name: str,
326
+ inference_script_path: str,
327
+ append: bool = True,
328
+ ):
329
+ """
330
+ Update the inference script for a virtual dataset.
331
+ """
332
+ params = {"append": str(append).lower()}
333
+ dataset_info = await self.get_dataset(name)
334
+ print("the current dataset info is: ", dataset_info)
335
+ script_path = dataset_info["path"]
336
+ product_name = dataset_info["products"][0]
337
+ script_path = script_path + f"/{product_name}.py"
338
+ upload_url_dict = await self._get_url_for_upload_inference_script(script_path)
339
+ upload_url = upload_url_dict["script_upload_url"]
340
+ try:
341
+ with open(inference_script_path, "r", encoding="utf-8") as f:
342
+ script_content = f.read()
343
+ script_bytes = script_content.encode('utf-8')
344
+ headers = {
345
+ "Content-Type": "text/x-python",
346
+ "Content-Length": str(len(script_bytes))
347
+ }
348
+ response = await self._client._regular_request("PUT", endpoint=upload_url, data=script_bytes, headers=headers)
349
+ except FileNotFoundError:
350
+ raise FileNotFoundError(f"Inference script file not found: {inference_script_path}")
351
+ except Exception as e:
352
+ raise Exception(f"Failed to upload inference script: {str(e)}")
353
+
354
+ @require_api_key
355
+ async def overwrite_dataset(
275
356
  self,
276
357
  name: str,
277
- collection: str = "terrakio-datasets",
278
358
  products: Optional[List[str]] = None,
279
359
  dates_iso8601: Optional[List[str]] = None,
280
360
  bucket: Optional[str] = None,
@@ -295,7 +375,6 @@ class DatasetManagement:
295
375
 
296
376
  Args:
297
377
  name: Name of the dataset (required)
298
- collection: Dataset collection (default: 'terrakio-datasets')
299
378
  products: List of products
300
379
  dates_iso8601: List of dates (will be automatically sorted chronologically)
301
380
  bucket: Storage bucket
@@ -315,9 +394,10 @@ class DatasetManagement:
315
394
  Overwritten dataset information
316
395
 
317
396
  Raises:
318
- APIError: If the API request fails
397
+ CommandPermissionError: If the user does not have permission to overwrite the dataset
398
+ DatasetNotFoundError: If the dataset is not found
399
+ OverwriteDatasetError: If the API request fails
319
400
  """
320
- params = {"collection": collection}
321
401
  payload = {"name": name}
322
402
  param_mapping = {
323
403
  "products": products,
@@ -338,25 +418,14 @@ class DatasetManagement:
338
418
  for param, value in param_mapping.items():
339
419
  if value is not None:
340
420
  payload[param] = value
341
- return self._client._terrakio_request("PUT", "/datasets", params = params, json = payload)
342
-
343
- @require_api_key
344
- def delete_dataset(self, name: str, collection: str = "terrakio-datasets") -> Dict[str, Any]:
345
- """
346
- Delete a dataset by name.
347
-
348
- Args:
349
- name: The name of the dataset (required)
350
- collection: The dataset collection (default: 'terrakio-datasets')
351
-
352
- Returns:
353
- Deleted dataset information
354
-
355
- Raises:
356
- APIError: If the API request fails
357
- """
358
- params = {"collection": collection}
359
- return self._client._terrakio_request("DELETE", f"/datasets/{name}", params = params)
421
+ response, status = await self._client._terrakio_request("PUT", "/datasets", json = payload)
422
+ if status != 200:
423
+ if status == 403:
424
+ raise CommandPermissionError(f"You do not have permission to overwrite dataset {name}", status_code = status)
425
+ elif status == 404:
426
+ raise DatasetNotFoundError(f"Dataset {name} not found", status_code = status)
427
+ raise OverwriteDatasetError(f"Failed to overwrite dataset: {response}", status_code = status)
428
+ return response
360
429
 
361
430
  @require_api_key
362
431
  def download_file_to_path(self, job_name, stage, file_name, output_path):
@@ -370,9 +439,6 @@ class DatasetManagement:
370
439
  verify=self.verify,
371
440
  timeout=self.timeout
372
441
  )
373
-
374
- # fetch bucket info based on job name and stage
375
-
376
442
  taskid = self.mass_stats.get_task_id(job_name, stage).get('task_id')
377
443
  trackinfo = self.mass_stats.track_job([taskid])
378
444
  bucket = trackinfo[taskid]['bucket']