terrakio-core 0.5.3.1__tar.gz → 0.5.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of terrakio-core might be problematic. Click here for more details.

Files changed (24) hide show
  1. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/PKG-INFO +1 -1
  2. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/pyproject.toml +1 -1
  3. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/__init__.py +1 -1
  4. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/mass_stats.py +404 -15
  5. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/exceptions.py +6 -0
  6. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/.gitignore +0 -0
  7. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/README.md +0 -0
  8. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/accessors.py +0 -0
  9. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/async_client.py +0 -0
  10. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/client.py +0 -0
  11. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/config.py +0 -0
  12. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/convenience_functions/create_dataset_file.py +0 -0
  13. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/convenience_functions/geoquries.py +0 -0
  14. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/convenience_functions/zonal_stats.py +0 -0
  15. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/auth.py +0 -0
  16. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/dataset_management.py +0 -0
  17. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/group_management.py +0 -0
  18. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/model_management.py +0 -0
  19. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/space_management.py +0 -0
  20. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/endpoints/user_management.py +0 -0
  21. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/helper/bounded_taskgroup.py +0 -0
  22. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/helper/decorators.py +0 -0
  23. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/helper/tiles.py +0 -0
  24. {terrakio_core-0.5.3.1 → terrakio_core-0.5.4}/terrakio_core/sync_client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: terrakio-core
3
- Version: 0.5.3.1
3
+ Version: 0.5.4
4
4
  Summary: Core package for the terrakio-python-api
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: aiofiles>=24.1.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "terrakio-core"
3
- version = "0.5.3.1"
3
+ version = "0.5.4"
4
4
  description = "Core package for the terrakio-python-api"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -8,7 +8,7 @@ Core components for Terrakio API clients.
8
8
  # Suppress ONNX Runtime GPU device discovery warnings - MUST BE FIRST!
9
9
  import os
10
10
  os.environ['ORT_LOGGING_LEVEL'] = '3'
11
- __version__ = "0.5.3.1"
11
+ __version__ = "0.5.4"
12
12
 
13
13
  from .async_client import AsyncClient
14
14
  from .sync_client import SyncClient as Client
@@ -1,13 +1,23 @@
1
1
  import json
2
2
  import os
3
3
  import time
4
- from typing import Dict, Any, Optional, List, Union
4
+ from typing import Any, Dict, List, Optional, Union
5
5
 
6
6
  import aiohttp
7
+ import dateutil.parser
8
+ import geopandas as gpd
9
+ import pyproj
10
+ import shapely.geometry
7
11
  import typer
8
12
  from dateutil import parser
9
13
  from rich.console import Console
10
- from rich.progress import Progress, TextColumn, BarColumn, TaskProgressColumn, TimeElapsedColumn
14
+ from rich.progress import (
15
+ BarColumn,
16
+ Progress,
17
+ TaskProgressColumn,
18
+ TextColumn,
19
+ TimeElapsedColumn,
20
+ )
11
21
 
12
22
  from ..exceptions import (
13
23
  CancelAllTasksError,
@@ -26,15 +36,58 @@ from ..exceptions import (
26
36
  TaskNotFoundError,
27
37
  UploadArtifactsError,
28
38
  UploadRequestsError,
39
+ QuotaInsufficientError,
29
40
  )
30
41
  from ..helper.decorators import require_api_key
31
42
 
43
+ import enum
44
+
45
+ class OutputTypes(enum.Enum):
46
+ geotiff = 'geotiff'
47
+ png = 'png'
48
+ netcdf = 'netcdf'
49
+ json = 'json'
50
+ json_v2 = 'json_v2'
51
+ csv = 'csv'
52
+
53
+ class Region(str, enum.Enum):
54
+ aus = "aus"
55
+ eu = "eu"
56
+ us = "us"
57
+
58
+ regions = {
59
+ Region.aus : {
60
+ "name" : "australia-southeast1",
61
+ "url" : "https://terrakio-server-candidate-573248941006.australia-southeast1.run.app",
62
+ "bucket" : "terrakio-mass-requests"
63
+ },
64
+
65
+ Region.eu : {
66
+ "name" : "europe-west4",
67
+ "url" : "https://terrakio-server-candidate-573248941006.europe-west4.run.app",
68
+ "bucket" : "terrakio-mass-requests-eu"
69
+ },
70
+
71
+ Region.us : {
72
+ "name" : "us-central1",
73
+ "url" : "https://terrakio-server-candidate-573248941006.us-central1.run.app",
74
+ "bucket" : "terrakio-mass-requests-us"
75
+ },
76
+
77
+ }
78
+
79
+ class Dataset_Dtype(enum.Enum):
80
+ uint8 = 'uint8'
81
+ float32 = 'float32'
32
82
 
33
83
  class MassStats:
34
84
  def __init__(self, client):
35
85
  self._client = client
36
86
  self.console = Console()
37
-
87
+ self.OutputTypes = OutputTypes
88
+ self.Region = Region
89
+ self.regions = regions
90
+ self.Dataset_Dtype = Dataset_Dtype
38
91
  async def track_progress(self, task_id):
39
92
  task_info = await self.get_task(task_id=task_id)
40
93
  number_of_jobs = task_info["task"]["total"]
@@ -156,7 +209,6 @@ class MassStats:
156
209
 
157
210
  self.console.print(f"[bold green]All {number_of_jobs} jobs finished![/bold green]")
158
211
 
159
- # below are functions related to collection
160
212
  @require_api_key
161
213
  async def create_collection(
162
214
  self,
@@ -193,9 +245,8 @@ class MassStats:
193
245
  payload["location"] = location
194
246
 
195
247
  response, status = await self._client._terrakio_request("POST", f"collections/{collection}", json=payload)
196
-
197
248
  if status != 200:
198
- if status == 400:
249
+ if status == 400 or status == 409:
199
250
  raise CollectionAlreadyExistsError(f"Collection {collection} already exists", status_code=status)
200
251
  if status == 422:
201
252
  raise InvalidCollectionTypeError(f"Invalid collection type: {collection_type}", status_code=status)
@@ -289,13 +340,15 @@ class MassStats:
289
340
  CollectionNotFoundError: If the collection is not found
290
341
  DeleteCollectionError: If the API request fails due to unknown reasons
291
342
  """
292
- payload = {
293
- "full": full,
294
- "outputs": outputs,
295
- "data": data
343
+ params = {
344
+ "full": str(full).lower(),
345
+ "data": str(data).lower()
296
346
  }
297
347
 
298
- response, status = await self._client._terrakio_request("DELETE", f"collections/{collection}", json=payload)
348
+ if outputs:
349
+ params["outputs"] = outputs
350
+
351
+ response, status = await self._client._terrakio_request("DELETE", f"collections/{collection}", params=params)
299
352
 
300
353
  if status != 200:
301
354
  if status == 404:
@@ -564,7 +617,7 @@ class MassStats:
564
617
 
565
618
  return response
566
619
 
567
- async def _upload_requests(
620
+ async def _get_upload_url(
568
621
  self,
569
622
  collection: str
570
623
  ) -> Dict[str, Any]:
@@ -641,6 +694,8 @@ class MassStats:
641
694
  response = await self._client._regular_request("PUT", url, data=body, headers=headers)
642
695
  return response
643
696
 
697
+
698
+
644
699
  @require_api_key
645
700
  async def generate_data(
646
701
  self,
@@ -672,7 +727,7 @@ class MassStats:
672
727
 
673
728
  await self.get_collection(collection = collection)
674
729
 
675
- upload_urls = await self._upload_requests(
730
+ upload_urls = await self._get_upload_url(
676
731
  collection = collection
677
732
  )
678
733
 
@@ -696,6 +751,7 @@ class MassStats:
696
751
 
697
752
  return response
698
753
 
754
+
699
755
  @require_api_key
700
756
  async def post_processing(
701
757
  self,
@@ -1017,7 +1073,7 @@ class MassStats:
1017
1073
  """
1018
1074
  await self.get_collection(collection = collection)
1019
1075
 
1020
- upload_urls = await self._upload_requests(collection=collection)
1076
+ upload_urls = await self._get_upload_url(collection=collection)
1021
1077
  url = upload_urls['url']
1022
1078
 
1023
1079
  # Handle requests_file - either file path (str) or file object
@@ -1124,4 +1180,337 @@ class MassStats:
1124
1180
  task_id = response["task_id"]
1125
1181
  await self.track_progress(task_id)
1126
1182
 
1127
- return {"task_id": task_id}
1183
+ return {"task_id": task_id}
1184
+
1185
+ def tile_generator(self, x_min, y_min, x_max, y_max, aoi, crs, res, tile_size, expression, output = OutputTypes.netcdf, fully_cover=True):
1186
+
1187
+ i_max = int((x_max-x_min)/(tile_size*res))
1188
+ j_max = int((y_max-y_min)/(tile_size*res))
1189
+ if fully_cover:
1190
+ i_max += 1
1191
+ j_max += 1
1192
+ for j in range(0, int(j_max)):
1193
+ for i in range(0, int(i_max)):
1194
+ x = x_min + i*(tile_size*res)
1195
+ y = y_max - j*(tile_size*res)
1196
+ bbox = shapely.geometry.box(x, y-(tile_size*res), x + (tile_size*res), y)
1197
+ if not aoi.geometry[0].intersects(bbox):
1198
+ continue
1199
+ feat = {"type": "Feature", "geometry": bbox.__geo_interface__}
1200
+ data = {
1201
+ "feature": feat,
1202
+ "in_crs": crs,
1203
+ "out_crs": crs,
1204
+ "resolution": res,
1205
+ "expr" : expression,
1206
+ "output" : output.value,
1207
+ }
1208
+ yield data, i , j
1209
+
1210
+
1211
+ def get_bounds(self, aoi, crs, to_crs = None):
1212
+ gdf : gpd.GeoDataFrame = gpd.read_file(aoi)
1213
+ gdf = gdf.set_crs(crs, allow_override=True)
1214
+ if to_crs:
1215
+ gdf = gdf.to_crs(to_crs)
1216
+ bounds = gdf.geometry[0].bounds
1217
+ return *bounds, gdf
1218
+
1219
+ def validate_date(self, date: str) -> str:
1220
+ try:
1221
+ date = dateutil.parser.parse(date)
1222
+ return date
1223
+ except ValueError:
1224
+ print(f"Invalid date: {date}")
1225
+ raise typer.BadParameter(f"Invalid date: {date}")
1226
+
1227
+
1228
+ @require_api_key
1229
+ async def dataset(
1230
+ self,
1231
+ products: List[str],
1232
+ name: str,
1233
+ bucket: str = "terrakio",
1234
+ location: str = "testing/MSWXsmall",
1235
+ aoi: Optional[str] = None,
1236
+ expression: Optional[str] = None,
1237
+ date: Optional[str] = "2021-01-01",
1238
+ tile_size: float = 100,
1239
+ crs: str = "epsg:4326",
1240
+ res: float = 10,
1241
+ out_res: float = 10,
1242
+ no_data: float = -9999,
1243
+ dtype: str = "float32",
1244
+ create_doc: bool = False,
1245
+ skip_test: bool = False,
1246
+ force_res: bool = False,
1247
+ to_crs: Optional[str] = None,
1248
+ fully_cover: bool = True,
1249
+ skip_existing: bool = False,
1250
+ ) -> Dict[str, Any]:
1251
+ """
1252
+ Generate a dataset with the specified parameters.
1253
+
1254
+ Args:
1255
+ products: List of product names
1256
+ name: Name of the dataset
1257
+ bucket: Storage bucket
1258
+ location: Storage location
1259
+ aoi: Path to GeoJSON file containing area of interest
1260
+ expression: Expression for data processing
1261
+ date: Date in YYYY-MM-DD format
1262
+ tile_size: Size of tiles (default: 100)
1263
+ crs: Coordinate reference system (default: "epsg:4326")
1264
+ res: Resolution (default: 10)
1265
+ out_res: Output resolution (default: 10)
1266
+ no_data: No data value (default: -9999)
1267
+ dtype: Data type (default: "float32")
1268
+ create_doc: Add dataset to the DB (default: False)
1269
+ skip_test: Skip testing the expression (default: False)
1270
+ force_res: Force resolution in case requests are too large (default: False)
1271
+ to_crs: Target coordinate reference system
1272
+ fully_cover: Fully cover the area (default: True)
1273
+ skip_existing: Skip existing data (default: False)
1274
+
1275
+ Returns:
1276
+ Response containing task_id and collection name
1277
+
1278
+ Raises:
1279
+ CollectionNotFoundError: If the collection is not found
1280
+ GetTaskError: If the API request fails
1281
+ """
1282
+ await self.create_collection(collection = name, bucket = bucket, location = location)
1283
+ date = self.validate_date(date)
1284
+ sample = None
1285
+ reqs = []
1286
+ x_min, y_min, x_max, y_max, aoi = self.get_bounds(aoi, crs, to_crs)
1287
+ if to_crs is None:
1288
+ to_crs = crs
1289
+ c=0
1290
+ for tile_req, i, j in self.tile_generator(x_min, y_min, x_max, y_max, aoi, to_crs, res, tile_size, expression, fully_cover = fully_cover):
1291
+ c+=1
1292
+ if force_res:
1293
+ tile_req["force_res"] = True
1294
+ req_names = []
1295
+ for product in products:
1296
+ req_names.append(f"{product}_{date.strftime('%Y%m%d')}000000_{i:03d}_{j:03d}_00")
1297
+ reqs.append({"group": name, "file": req_names, "request": tile_req})
1298
+ if sample is None:
1299
+ sample = tile_req["expr"]
1300
+ i_max = int((x_max-x_min)/(tile_size*res))
1301
+ j_max = int((y_max-y_min)/(tile_size*res))
1302
+ geot = [x_min, out_res, 0, y_max, 0, -out_res]
1303
+ if not skip_test:
1304
+ result = await self._client.geoquery(**reqs[0]["request"], debug = "requests")
1305
+ request_count = result.get('request_count', 0)
1306
+
1307
+ user_quota = await self._client.auth.get_user_quota()
1308
+ user_quota = user_quota.get('quota', -9999)
1309
+
1310
+ if user_quota !=-9999 and user_quota < len(reqs) * request_count:
1311
+ raise QuotaInsufficientError(f"User quota is insufficient. Please contact support to increase your quota.")
1312
+
1313
+ upload_urls = await self._get_upload_url(collection=name)
1314
+ url = upload_urls['url']
1315
+ await self._upload_json_data(reqs, url, use_gzip=True)
1316
+
1317
+ payload = {"output": "snp", "skip_existing": skip_existing}
1318
+
1319
+ task_id, status = await self._client._terrakio_request("POST", f"collections/{name}/generate_data", json=payload)
1320
+ task_id = task_id["task_id"]
1321
+ if dtype == self.Dataset_Dtype.uint8.value:
1322
+ no_data = int(no_data)
1323
+ if create_doc:
1324
+ await self._client.datasets.create_dataset(
1325
+ name=name,
1326
+ products=products,
1327
+ dates_iso8601=[date.isoformat()],
1328
+ proj4=pyproj.CRS.to_proj4(aoi.crs),
1329
+ i_max=i_max,
1330
+ j_max=j_max,
1331
+ x_size=int((res*tile_size)/out_res),
1332
+ y_size=int((res*tile_size)/out_res),
1333
+ geotransform=geot,
1334
+ no_data=no_data,
1335
+ data_type=dtype,
1336
+ bucket=bucket,
1337
+ path=f"{location}/%s_%s_%03d_%03d_%02d.snp",
1338
+ max_zoom=0,
1339
+ )
1340
+
1341
+ await self.track_progress(task_id)
1342
+
1343
+
1344
+ @require_api_key
1345
+ async def tiles(
1346
+ self,
1347
+ collection: str,
1348
+ name: str = "irrigation_2019",
1349
+ aoi: Optional[str] = None,
1350
+ expression: str = "NSWIrrigation.landuse@(year=2019)",
1351
+ output: OutputTypes = OutputTypes.netcdf,
1352
+ tile_size: float = 10000,
1353
+ crs: str = "epsg:3577",
1354
+ res: float = 10,
1355
+ skip_test: bool = False,
1356
+ force_res: bool = False,
1357
+ to_crs: Optional[str] = None,
1358
+ fully_cover: bool = True,
1359
+ skip_existing: bool = False,
1360
+ ) -> Dict[str, Any]:
1361
+ """
1362
+ Generate tiles with the specified parameters.
1363
+
1364
+ Args:
1365
+ collection: Name of the collection
1366
+ name: Name of the dataset (default: "irrigation_2019")
1367
+ aoi: Path to GeoJSON file containing area of interest
1368
+ expression: Expression for data processing (default: "NSWIrrigation.landuse@(year=2019)")
1369
+ output: Output format (default: "netcdf")
1370
+ tile_size: Size of tiles (default: 10000)
1371
+ crs: Coordinate reference system (default: "epsg:3577")
1372
+ res: Resolution (default: 10)
1373
+ skip_test: Skip testing the expression (default: False)
1374
+ force_res: Force resolution in case requests are too large (default: False)
1375
+ to_crs: Target coordinate reference system
1376
+ fully_cover: Fully cover the area (default: True)
1377
+ skip_existing: Skip existing data (default: False)
1378
+
1379
+ Returns:
1380
+ Response containing task_id
1381
+
1382
+ Raises:
1383
+ CollectionNotFoundError: If the collection is not found
1384
+ GetTaskError: If the API request fails
1385
+ """
1386
+
1387
+
1388
+ await self.get_collection(collection=collection)
1389
+
1390
+ reqs = []
1391
+ sample = None
1392
+ x_min, y_min, x_max, y_max, aoi = self.get_bounds(aoi, crs, to_crs)
1393
+ if to_crs is None:
1394
+ to_crs = crs
1395
+ for tile_req, i, j in self.tile_generator(x_min, y_min, x_max, y_max, aoi, to_crs, res, tile_size, expression, output, fully_cover):
1396
+ if force_res:
1397
+ tile_req["force_res"] = True
1398
+ req_name = f"{name}_{i:02d}_{j:02d}"
1399
+ reqs.append({"group": "tiles", "file": req_name, "request": tile_req})
1400
+ if sample is None:
1401
+ sample = tile_req["expr"]
1402
+
1403
+ if not skip_test:
1404
+ print("the reqs are ", reqs[0])
1405
+ result = await self._client.geoquery(**reqs[0]["request"], debug = "requests")
1406
+ request_count = result.get('request_count', 0)
1407
+
1408
+ user_quota = await self._client.auth.get_user_quota()
1409
+ user_quota = user_quota.get('quota', -9999)
1410
+
1411
+ if user_quota !=-9999 and user_quota < len(reqs) * request_count:
1412
+ raise QuotaInsufficientError(f"User quota is insufficient. Please contact support to increase your quota.")
1413
+
1414
+ count = len(reqs)
1415
+ groups = list(set(dic["group"] for dic in reqs))
1416
+ print(f"[green]{count}[/green] requests with [blue]{len(groups)}[/blue] groups identified.")
1417
+ upload_urls = await self._get_upload_url(collection=collection)
1418
+ url = upload_urls['url']
1419
+ await self._upload_json_data(reqs, url, use_gzip=True)
1420
+
1421
+ payload = {"output": output.value, "skip_existing": skip_existing}
1422
+
1423
+ task_id, status = await self._client._terrakio_request("POST", f"collections/{collection}/generate_data", json=payload)
1424
+ task_id = task_id["task_id"]
1425
+
1426
+ await self.track_progress(task_id)
1427
+
1428
+ @require_api_key
1429
+ async def polygons(
1430
+ self,
1431
+ collection: str,
1432
+ aoi: str,
1433
+ expression: str = "mean:space(MSWX.air_temperature@(year=2022))",
1434
+ output: OutputTypes = OutputTypes.netcdf,
1435
+ id_field: str = "GID_0",
1436
+ crs: str = "epsg:4326",
1437
+ res: float = -1,
1438
+ skip_test: bool = False,
1439
+ skip_existing: bool = False,
1440
+ ) -> Dict[str, Any]:
1441
+ """
1442
+ Generate mass-stats for polygons in a GeoJSON file using the same expression.
1443
+
1444
+ Args:
1445
+ collection: Name of the collection
1446
+ name: Name of the dataset
1447
+ aoi: Path to GeoJSON file containing area of interest
1448
+ expression: Expression for data processing (default: "mean:space(MSWX.air_temperature@(year=2022))")
1449
+ output: Output format (default: "netcdf")
1450
+ id_field: Field name to use as identifier (default: "GID_0")
1451
+ crs: Coordinate reference system (default: "epsg:4326")
1452
+ res: Resolution (default: -1)
1453
+ skip_test: Skip testing the expression (default: False)
1454
+ skip_existing: Skip existing data (default: False)
1455
+
1456
+ Returns:
1457
+ Response containing task_id
1458
+
1459
+ Raises:
1460
+ CollectionNotFoundError: If the collection is not found
1461
+ GetTaskError: If the API request fails
1462
+ ValueError: If id_field not found in feature properties
1463
+ """
1464
+ await self.get_collection(collection=collection)
1465
+
1466
+ gdf = gpd.read_file(aoi)
1467
+ sample = None
1468
+ features = gdf.__geo_interface__
1469
+ reqs = []
1470
+ for feature in features["features"]:
1471
+ feat = {
1472
+ "type": "Feature",
1473
+ "properties": {},
1474
+ "geometry": feature["geometry"],
1475
+ }
1476
+ request = {
1477
+ "feature": feat,
1478
+ "expr": expression,
1479
+ "output": output.value,
1480
+ "in_crs": crs,
1481
+ "out_crs": crs,
1482
+ "resolution": res,
1483
+ }
1484
+ if id_field not in feature["properties"]:
1485
+ raise ValueError(f"ID field {id_field} not found in feature properties.")
1486
+ reqs.append({"group": "polygons", "file": feature["properties"][id_field], "request": request, "metadata": feature["properties"]})
1487
+ if sample is None:
1488
+ sample = request["expr"]
1489
+
1490
+ # Test first request to ensure expression is valid
1491
+ if not skip_test:
1492
+ result = await self._client.geoquery(**reqs[0]["request"], debug="requests")
1493
+ request_count = result.get('request_count', 0)
1494
+
1495
+ user_quota = await self._client.auth.get_user_quota()
1496
+ user_quota = user_quota.get('quota', -9999)
1497
+
1498
+ if user_quota != -9999 and user_quota < len(reqs) * request_count:
1499
+ raise QuotaInsufficientError(f"User quota is insufficient. Please contact support to increase your quota.")
1500
+
1501
+ count = len(reqs)
1502
+ groups = list(set(dic["group"] for dic in reqs))
1503
+ print(f"[green]{count}[/green] requests with [blue]{len(groups)}[/blue] groups identified.")
1504
+
1505
+ upload_urls = await self._get_upload_url(collection=collection)
1506
+ url = upload_urls['url']
1507
+ await self._upload_json_data(reqs, url, use_gzip=True)
1508
+
1509
+ payload = {"output": output.value, "skip_existing": skip_existing}
1510
+
1511
+ task_id, status = await self._client._terrakio_request("POST", f"collections/{collection}/generate_data", json=payload)
1512
+ task_id = task_id["task_id"]
1513
+
1514
+ await self.track_progress(task_id)
1515
+
1516
+ return {"task_id": task_id, "collection": collection}
@@ -391,6 +391,12 @@ class CancelAllTasksError(Exception):
391
391
 
392
392
  class GeoQueryError(Exception):
393
393
  """Exception raised for geo query errors."""
394
+ def __init__(self, message: str, status_code: int = None):
395
+ super().__init__(message)
396
+ self.status_code = status_code
397
+
398
+ class QuotaInsufficientError(Exception):
399
+ """Exception raised for quota insufficient errors."""
394
400
  def __init__(self, message: str, status_code: int = None):
395
401
  super().__init__(message)
396
402
  self.status_code = status_code
File without changes