terrakio-core 0.4.98__tar.gz → 0.5.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/PKG-INFO +5 -2
  2. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/pyproject.toml +5 -5
  3. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/__init__.py +4 -2
  4. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/accessors.py +4 -4
  5. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/async_client.py +31 -172
  6. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/config.py +3 -44
  7. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/create_dataset_file.py +5 -5
  8. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/zonal_stats.py +12 -12
  9. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/endpoints/auth.py +97 -47
  10. terrakio_core-0.5.8/terrakio_core/endpoints/collections/__init__.py +72 -0
  11. terrakio_core-0.5.8/terrakio_core/endpoints/collections/collections.py +166 -0
  12. terrakio_core-0.5.8/terrakio_core/endpoints/collections/common.py +100 -0
  13. terrakio_core-0.5.8/terrakio_core/endpoints/collections/data_operations.py +428 -0
  14. terrakio_core-0.5.8/terrakio_core/endpoints/collections/generation.py +467 -0
  15. terrakio_core-0.5.8/terrakio_core/endpoints/collections/ingestion.py +228 -0
  16. terrakio_core-0.5.8/terrakio_core/endpoints/collections/tasks.py +288 -0
  17. terrakio_core-0.5.8/terrakio_core/endpoints/collections/zonal_stats.py +104 -0
  18. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/endpoints/dataset_management.py +132 -63
  19. terrakio_core-0.5.8/terrakio_core/endpoints/group_management.py +421 -0
  20. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/endpoints/model_management.py +118 -140
  21. terrakio_core-0.5.8/terrakio_core/endpoints/user_management.py +216 -0
  22. terrakio_core-0.5.8/terrakio_core/exceptions.py +402 -0
  23. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/sync_client.py +13 -128
  24. terrakio_core-0.4.98/terrakio_core/endpoints/group_management.py +0 -228
  25. terrakio_core-0.4.98/terrakio_core/endpoints/mass_stats.py +0 -712
  26. terrakio_core-0.4.98/terrakio_core/endpoints/user_management.py +0 -131
  27. terrakio_core-0.4.98/terrakio_core/exceptions.py +0 -20
  28. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/.gitignore +0 -0
  29. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/README.md +0 -0
  30. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/client.py +0 -0
  31. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/geoquries.py +0 -0
  32. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/endpoints/space_management.py +0 -0
  33. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/helper/bounded_taskgroup.py +0 -0
  34. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/helper/decorators.py +0 -0
  35. {terrakio_core-0.4.98 → terrakio_core-0.5.8}/terrakio_core/helper/tiles.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: terrakio-core
3
- Version: 0.4.98
3
+ Version: 0.5.8
4
4
  Summary: Core package for the terrakio-python-api
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: aiofiles>=24.1.0
@@ -10,10 +10,13 @@ Requires-Dist: h5netcdf>=1.6.3
10
10
  Requires-Dist: h5py>=3.14.0
11
11
  Requires-Dist: nest-asyncio>=1.6.0
12
12
  Requires-Dist: netcdf4>=1.7.2
13
- Requires-Dist: onnxruntime>=1.22.1
13
+ Requires-Dist: onnxruntime>=1.23.1
14
14
  Requires-Dist: psutil>=7.0.0
15
+ Requires-Dist: python-snappy>=0.7.3
16
+ Requires-Dist: rasterio>=1.4.3
15
17
  Requires-Dist: scipy>=1.16.1
16
18
  Requires-Dist: shapely>=2.1.1
19
+ Requires-Dist: typer>=0.19.2
17
20
  Requires-Dist: xarray>=2025.7.1
18
21
  Provides-Extra: ml
19
22
  Requires-Dist: scikit-learn>=1.7.1; extra == 'ml'
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "terrakio-core"
3
- version = "0.4.98"
3
+ version = "0.5.8"
4
4
  description = "Core package for the terrakio-python-api"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -11,15 +11,15 @@ dependencies = [
11
11
  "h5netcdf>=1.6.3",
12
12
  "h5py>=3.14.0",
13
13
  "netcdf4>=1.7.2",
14
- "h5py>=3.14.0",
15
- "netcdf4>=1.7.2",
16
- "onnxruntime>=1.22.1",
17
14
  "psutil>=7.0.0",
18
15
  "scipy>=1.16.1",
19
- "scipy>=1.16.1",
20
16
  "shapely>=2.1.1",
21
17
  "xarray>=2025.7.1",
22
18
  "nest-asyncio>=1.6.0",
19
+ "typer>=0.19.2",
20
+ "onnxruntime>=1.23.1",
21
+ "rasterio>=1.4.3",
22
+ "python-snappy>=0.7.3",
23
23
  ]
24
24
 
25
25
  [project.optional-dependencies]
@@ -5,13 +5,15 @@ Terrakio Core
5
5
  Core components for Terrakio API clients.
6
6
  """
7
7
 
8
- __version__ = "0.4.98"
8
+ # Suppress ONNX Runtime GPU device discovery warnings - MUST BE FIRST!
9
+ import os
10
+ os.environ['ORT_LOGGING_LEVEL'] = '3'
11
+ __version__ = "0.5.8"
9
12
 
10
13
  from .async_client import AsyncClient
11
14
  from .sync_client import SyncClient as Client
12
15
  from . import accessors
13
16
 
14
-
15
17
  __all__ = [
16
18
  "AsyncClient",
17
19
  "Client"
@@ -12,7 +12,7 @@ import xarray as xr
12
12
 
13
13
  # Local/relative imports
14
14
  from .convenience_functions.zonal_stats import cloud_object
15
- from .endpoints.mass_stats import MassStats
15
+ from .endpoints.collections import Collections
16
16
 
17
17
  @pd.api.extensions.register_dataframe_accessor("geo")
18
18
  class GeoXarrayAccessor:
@@ -686,7 +686,7 @@ class GeoXarrayAccessor:
686
686
  asyncio.set_event_loop(loop)
687
687
  try:
688
688
  return loop.run_until_complete(
689
- self._client.mass_stats.track_job([self._obj.job_id])
689
+ self._client.collections.track_job([self._obj.job_id])
690
690
  )
691
691
  finally:
692
692
  loop.close()
@@ -845,7 +845,7 @@ class GeoXarrayAccessor:
845
845
  script_content = self._generate_post_processing_script()
846
846
  client = self._client
847
847
  if client:
848
- mass_stats = MassStats(client)
848
+ collections = Collections(client)
849
849
 
850
850
  import asyncio
851
851
  import concurrent.futures
@@ -856,7 +856,7 @@ class GeoXarrayAccessor:
856
856
  # we don't actually have the dataset name, currently it is just getting job named zonal stats job
857
857
  try:
858
858
  return loop.run_until_complete(
859
- mass_stats.zonal_stats_transform(
859
+ collections.zonal_stats_transform(
860
860
  data_name=self._obj.job_name,
861
861
  output="netcdf",
862
862
  consumer = script_content.encode('utf-8'),
@@ -9,10 +9,10 @@ from geopandas import GeoDataFrame
9
9
  from shapely.geometry.base import BaseGeometry as ShapelyGeometry
10
10
  from shapely.geometry import mapping
11
11
  from .client import BaseClient
12
- from .exceptions import APIError
12
+ from .exceptions import APIError, NetworkError, GeoQueryError
13
13
  from .endpoints.dataset_management import DatasetManagement
14
14
  from .endpoints.user_management import UserManagement
15
- from .endpoints.mass_stats import MassStats
15
+ from .endpoints.collections import Collections
16
16
  from .endpoints.group_management import GroupManagement
17
17
  from .endpoints.space_management import SpaceManagement
18
18
  from .endpoints.model_management import ModelManagement
@@ -26,7 +26,7 @@ class AsyncClient(BaseClient):
26
26
  super().__init__(url, api_key, verbose)
27
27
  self.datasets = DatasetManagement(self)
28
28
  self.users = UserManagement(self)
29
- self.mass_stats = MassStats(self)
29
+ self.collections = Collections(self)
30
30
  self.groups = GroupManagement(self)
31
31
  self.space = SpaceManagement(self)
32
32
  self.model = ModelManagement(self)
@@ -36,13 +36,10 @@ class AsyncClient(BaseClient):
36
36
 
37
37
  async def _terrakio_request(self, method: str, endpoint: str, **kwargs):
38
38
  if self.session is None:
39
- # To this:
40
39
  headers = {
41
40
  'x-api-key': self.key,
42
41
  'Authorization': self.token
43
42
  }
44
-
45
- # Only add Content-Type if it's a JSON request
46
43
  if 'json' in kwargs:
47
44
  headers['Content-Type'] = 'application/json'
48
45
  clean_headers = {k: v for k, v in headers.items() if v is not None}
@@ -57,20 +54,16 @@ class AsyncClient(BaseClient):
57
54
  for attempt in range(self.retry + 1):
58
55
  try:
59
56
  async with session.request(method, url, **kwargs) as response:
57
+
60
58
  if not response.ok and self._should_retry(response.status, attempt):
61
59
  self.logger.info(f"Request failed (attempt {attempt+1}/{self.retry+1}): {response.status}. Retrying...")
62
60
  continue
63
- if not response.ok:
64
- error_msg = f"API request failed: {response.status} {response.reason}"
65
- try:
66
- error_data = await response.json()
67
- if "detail" in error_data:
68
- error_msg += f" - {error_data['detail']}"
69
- except:
70
- pass
71
- raise APIError(error_msg, status_code=response.status)
72
- return await self._parse_response(response)
73
-
61
+ if response.ok:
62
+ data = await self._parse_response(response)
63
+ return data, response.status
64
+ else:
65
+ error_data = await response.json()
66
+ return error_data, response.status
74
67
  except aiohttp.ClientError as e:
75
68
  last_exception = e
76
69
  if attempt < self.retry:
@@ -78,8 +71,8 @@ class AsyncClient(BaseClient):
78
71
  continue
79
72
  else:
80
73
  break
81
-
82
- raise APIError(f"Networking error, request failed after {self.retry+1} attempts: {last_exception}", status_code=None)
74
+
75
+ raise NetworkError(f"Network failure after {self.retry+1} attempts: {last_exception}")
83
76
 
84
77
  def _should_retry(self, status_code: int, attempt: int) -> bool:
85
78
  """Determine if the request should be retried based on status code."""
@@ -189,162 +182,28 @@ class AsyncClient(BaseClient):
189
182
  "validated": validated,
190
183
  **kwargs
191
184
  }
192
- result = await self._terrakio_request("POST", "geoquery", json=payload)
193
-
194
- return result
195
-
196
- async def zonal_stats(
197
- self,
198
- gdf: GeoDataFrame,
199
- expr: str,
200
- conc: int = 20,
201
- in_crs: str = "epsg:4326",
202
- out_crs: str = "epsg:4326",
203
- resolution: int = -1,
204
- geom_fix: bool = False,
205
- mass_stats: bool = False,
206
- id_column: Optional[str] = None,
207
- ):
208
- """
209
- Compute zonal statistics for all geometries in a GeoDataFrame.
210
-
211
- Args:
212
- gdf (GeoDataFrame): GeoDataFrame containing geometries
213
- expr (str): Terrakio expression to evaluate, can include spatial aggregations
214
- conc (int): Number of concurrent requests to make
215
- in_crs (str): Input coordinate reference system
216
- out_crs (str): Output coordinate reference system
217
- resolution (int): Resolution parameter
218
- geom_fix (bool): Whether to fix the geometry (default False)
219
- mass_stats (bool): Whether to use mass stats for processing (default False)
220
- id_column (Optional[str]): Name of the ID column to use (default None)
221
-
222
- Returns:
223
- geopandas.GeoDataFrame: GeoDataFrame with added columns for results
224
-
225
- Raises:
226
- ValueError: If concurrency is too high or if data exceeds memory limit without streaming
227
- APIError: If the API request fails
228
- """
229
- # the sync client didn't pass the self here, so the client is now async
230
- return await _zonal_stats(
231
- client=self,
232
- gdf=gdf,
233
- expr=expr,
234
- conc=conc,
235
- in_crs=in_crs,
236
- out_crs=out_crs,
237
- resolution=resolution,
238
- geom_fix=geom_fix,
239
- mass_stats=mass_stats,
240
- id_column=id_column,
241
- )
185
+ result, status_code = await self._terrakio_request("POST", "geoquery", json=payload)
242
186
 
243
- async def create_dataset_file(
244
- self,
245
- name: str,
246
- aoi: str,
247
- expression: str,
248
- output: str,
249
- in_crs: str = "epsg:4326",
250
- res: float = 0.0001,
251
- region: str = "aus",
252
- to_crs: str = "epsg:4326",
253
- overwrite: bool = True,
254
- skip_existing: bool = False,
255
- non_interactive: bool = True,
256
- poll_interval: int = 30,
257
- download_path: str = "/home/user/Downloads",
258
- mask = True,
259
- max_file_size_mb: int = 5120, # Default to 5GB
260
- tile_size: int = 1024,
261
- ) -> dict:
262
- """
263
- Create a dataset file using mass stats operations.
264
-
265
- Args:
266
- aoi (str): Area of interest
267
- expression (str): Terrakio expression to evaluate
268
- output (str): Output format
269
- in_crs (str): Input coordinate reference system (default "epsg:4326")
270
- res (float): Resolution (default 0.0001)
271
- region (str): Region (default "aus")
272
- to_crs (str): Target coordinate reference system (default "epsg:4326")
273
- overwrite (bool): Whether to overwrite existing files (default True)
274
- skip_existing (bool): Whether to skip existing files (default False)
275
- non_interactive (bool): Whether to run non-interactively (default True)
276
- poll_interval (int): Polling interval in seconds (default 30)
277
- download_path (str): Download path (default "/home/user/Downloads")
278
-
279
- Returns:
280
- dict: Dictionary containing generation_task_id and combine_task_id
281
-
282
- Raises:
283
- ConfigurationError: If mass stats client is not properly configured
284
- RuntimeError: If job fails
285
- """
286
- return await _create_dataset_file(
287
- client=self,
288
- aoi=aoi,
289
- expression=expression,
290
- output=output,
291
- in_crs=in_crs,
292
- res=res,
293
- region=region,
294
- to_crs=to_crs,
295
- overwrite=overwrite,
296
- skip_existing=skip_existing,
297
- non_interactive=non_interactive,
298
- poll_interval=poll_interval,
299
- download_path=download_path,
300
- name=name,
301
- mask=mask,
302
- max_file_size_mb=max_file_size_mb,
303
- tile_size=tile_size
304
- )
305
-
306
- async def geo_queries(
307
- self,
308
- queries: list[dict],
309
- conc: int = 20,
310
- ):
311
- """
312
- Execute multiple geo queries concurrently.
187
+ if status_code != 200:
188
+ raise GeoQueryError(result['detail'], status_code=status_code)
313
189
 
314
- Args:
315
- queries (list[dict]): List of dictionaries containing query parameters.
316
- Each query must have 'expr', 'feature', and 'in_crs' keys.
317
- conc (int): Number of concurrent requests to make (default 20, max 100)
318
-
319
- Returns:
320
- Union[float, geopandas.GeoDataFrame]:
321
- - float: Average of all results if results are integers
322
- - GeoDataFrame: GeoDataFrame with geometry and dataset columns if results are xarray datasets
323
-
324
- Raises:
325
- ValueError: If queries list is empty, concurrency is too high, or queries are malformed
326
- APIError: If the API request fails
190
+ return result
327
191
 
328
- Example:
329
- queries = [
330
- {
331
- 'expr': 'WCF.wcf',
332
- 'feature': {'type': 'Feature', 'geometry': {...}, 'properties': {}},
333
- 'in_crs': 'epsg:4326'
334
- },
335
- {
336
- 'expr': 'NDVI.ndvi',
337
- 'feature': {'type': 'Feature', 'geometry': {...}, 'properties': {}},
338
- 'in_crs': 'epsg:4326'
339
- }
340
- ]
341
- result = await client.geo_queries(queries)
342
- """
343
- return await _request_geoquery_list(
344
- client=self,
345
- quries=queries, # Note: keeping original parameter name for compatibility
346
- conc=conc,
347
- )
192
+ async def zonal_stats(self, *args, **kwargs):
193
+ """Proxy to convenience zonal_stats with full argument passthrough."""
194
+ return await _zonal_stats(self, *args, **kwargs)
195
+
196
+ async def create_dataset_file(self, *args, **kwargs) -> dict:
197
+ """Proxy to convenience create_dataset_file with full argument passthrough."""
198
+ kwargs.setdefault('download_path', "/home/user/Downloads")
199
+ kwargs.setdefault('region', "aus")
200
+ return await _create_dataset_file(self, *args, **kwargs)
201
+
202
+ async def geo_queries(self, *args, **kwargs):
203
+ """Proxy to convenience request_geoquery_list with full argument passthrough."""
204
+ if 'queries' in kwargs:
205
+ kwargs['quries'] = kwargs.pop('queries')
206
+ return await _request_geoquery_list(self, *args, **kwargs)
348
207
 
349
208
  async def __aenter__(self):
350
209
  if self._session is None:
@@ -7,7 +7,7 @@ from .exceptions import ConfigurationError
7
7
 
8
8
  # Default configuration file locations
9
9
  DEFAULT_CONFIG_FILE = os.path.join(os.environ.get("HOME", ""), ".tkio_config.json")
10
- DEFAULT_API_URL = "https://api.terrak.io"
10
+ DEFAULT_API_URL = "https://dev-au.terrak.io"
11
11
 
12
12
  def read_config_file(config_file: str = DEFAULT_CONFIG_FILE, logger: logging.Logger = None) -> Dict[str, Any]:
13
13
  """
@@ -27,14 +27,8 @@ def read_config_file(config_file: str = DEFAULT_CONFIG_FILE, logger: logging.Log
27
27
  file if one doesn't exist and returns appropriate status flags.
28
28
  """
29
29
  config_path = Path(os.path.expanduser(config_file))
30
- # the first circumstance is that the config file does not exist
31
- # that we need to login before using any of the functions
32
- # Check if config file exists
30
+
33
31
  if not config_path.exists():
34
- # Create an empty config file
35
- config_path.parent.mkdir(parents=True, exist_ok=True)
36
- with open(config_path, 'w') as f:
37
- json.dump({}, f)
38
32
  logger.info("No API key found. Please provide an API key to use this client.")
39
33
  return {
40
34
  'url': DEFAULT_API_URL,
@@ -45,12 +39,9 @@ def read_config_file(config_file: str = DEFAULT_CONFIG_FILE, logger: logging.Log
45
39
  }
46
40
 
47
41
  try:
48
- # Read the config file
49
42
  with open(config_path, 'r') as f:
50
43
  config_data = json.load(f)
51
44
 
52
- # Read the config file data
53
- # Check if config has an API key
54
45
  if not config_data or 'TERRAKIO_API_KEY' not in config_data or not config_data.get('TERRAKIO_API_KEY'):
55
46
  logger.info("No API key found. Please provide an API key to use this client.")
56
47
  return {
@@ -61,11 +52,8 @@ def read_config_file(config_file: str = DEFAULT_CONFIG_FILE, logger: logging.Log
61
52
  'token': config_data.get('PERSONAL_TOKEN')
62
53
  }
63
54
  logger.info(f"Currently logged in as: {config_data.get('EMAIL')}")
64
- # this meanb that we have already logged in to the tkio account
65
55
 
66
- # Convert the JSON config to our expected format
67
56
  config = {
68
- # Always use the default URL, not from config file
69
57
  'url': DEFAULT_API_URL,
70
58
  'key': config_data.get('TERRAKIO_API_KEY'),
71
59
  'is_logged_in': True,
@@ -84,33 +72,4 @@ def read_config_file(config_file: str = DEFAULT_CONFIG_FILE, logger: logging.Log
84
72
  'is_logged_in': False,
85
73
  'user_email': None,
86
74
  'token': None
87
- }
88
-
89
- def create_default_config(email: str, api_key: str, config_file: str = DEFAULT_CONFIG_FILE) -> None:
90
- """
91
- Create a default configuration file in JSON format.
92
-
93
- Args:
94
- email: User email
95
- api_key: Terrakio API key
96
- config_file: Path to configuration file
97
-
98
- Raises:
99
- ConfigurationError: If the configuration file can't be created
100
- """
101
- config_path = Path(os.path.expanduser(config_file))
102
-
103
- # Ensure directory exists
104
- config_path.parent.mkdir(parents=True, exist_ok=True)
105
-
106
- try:
107
- config_data = {
108
- "EMAIL": email,
109
- "TERRAKIO_API_KEY": api_key
110
- }
111
-
112
- with open(config_path, 'w') as f:
113
- json.dump(config_data, f, indent=2)
114
-
115
- except Exception as e:
116
- raise ConfigurationError(f"Failed to create configuration file: {e}")
75
+ }
@@ -48,7 +48,7 @@ async def create_dataset_file(
48
48
  tempreq.write(reqs)
49
49
  tempreqname = tempreq.name
50
50
 
51
- task_id = await client.mass_stats.execute_job(
51
+ task_id = await client.collections.execute_job(
52
52
  name=body["name"],
53
53
  region=body["region"],
54
54
  output=body["output"],
@@ -64,7 +64,7 @@ async def create_dataset_file(
64
64
  while True:
65
65
  try:
66
66
  taskid = task_id['task_id']
67
- trackinfo = await client.mass_stats.track_job([taskid])
67
+ trackinfo = await client.collections.track_job([taskid])
68
68
  status = trackinfo[taskid]['status']
69
69
  if status == 'Completed':
70
70
  client.logger.info('Data generated successfully!')
@@ -86,14 +86,14 @@ async def create_dataset_file(
86
86
 
87
87
  os.unlink(tempreqname)
88
88
 
89
- combine_result = await client.mass_stats.combine_tiles(body["name"], body["overwrite"], body["output"], max_file_size_mb=max_file_size_mb)
89
+ combine_result = await client.collections.combine_tiles(body["name"], body["overwrite"], body["output"], max_file_size_mb=max_file_size_mb)
90
90
  combine_task_id = combine_result.get("task_id")
91
91
 
92
92
  combine_start_time = time.time()
93
93
  client.logger.info(f"Tracking file generation job {combine_task_id}...")
94
94
  while True:
95
95
  try:
96
- trackinfo = await client.mass_stats.track_job([combine_task_id])
96
+ trackinfo = await client.collections.track_job([combine_task_id])
97
97
  if body["output"] == "netcdf":
98
98
  download_file_name = trackinfo[combine_task_id]['folder'] + '.nc'
99
99
  elif body["output"] == "geotiff":
@@ -117,7 +117,7 @@ async def create_dataset_file(
117
117
  raise
118
118
 
119
119
  if download_path:
120
- await client.mass_stats.download_file(
120
+ await client.collections.download_file(
121
121
  job_name=body["name"],
122
122
  bucket=bucket,
123
123
  file_type='processed',
@@ -92,7 +92,7 @@ class cloud_object(gpd.GeoDataFrame):
92
92
  GeoDataFrame: A GeoDataFrame containing the first n files.
93
93
  """
94
94
 
95
- track_info = await self.client.mass_stats.track_job([self.job_id])
95
+ track_info = await self.client.collections.track_job([self.job_id])
96
96
  job_info = track_info[self.job_id]
97
97
  status = job_info['status']
98
98
 
@@ -491,7 +491,7 @@ def gdf_to_json(
491
491
  id_column: Optional[str] = None,
492
492
  ):
493
493
  """
494
- Convert a GeoDataFrame to a list of JSON requests for mass_stats processing.
494
+ Convert a GeoDataFrame to a list of JSON requests for collections processing.
495
495
 
496
496
  Args:
497
497
  gdf: GeoDataFrame containing geometries and optional metadata
@@ -503,9 +503,9 @@ def gdf_to_json(
503
503
  id_column: Optional column name to use for group and file names
504
504
 
505
505
  Returns:
506
- list: List of dictionaries formatted for mass_stats requests
506
+ list: List of dictionaries formatted for collections requests
507
507
  """
508
- mass_stats_requests = []
508
+ collections_requests = []
509
509
 
510
510
  for idx, row in gdf.iterrows():
511
511
  request_feature = {
@@ -535,11 +535,11 @@ def gdf_to_json(
535
535
  "request": request_feature,
536
536
  }
537
537
 
538
- mass_stats_requests.append(request_entry)
538
+ collections_requests.append(request_entry)
539
539
 
540
- return mass_stats_requests
540
+ return collections_requests
541
541
 
542
- async def handle_mass_stats(
542
+ async def handle_collections(
543
543
  client,
544
544
  gdf: GeoDataFrame,
545
545
  expr: str,
@@ -552,7 +552,7 @@ async def handle_mass_stats(
552
552
  request_json = gdf_to_json(gdf=gdf, expr=expr, in_crs=in_crs, out_crs=out_crs,
553
553
  resolution=resolution, geom_fix=geom_fix, id_column=id_column)
554
554
 
555
- job_response = await client.mass_stats.execute_job(
555
+ job_response = await client.collections.execute_job(
556
556
  name=f"zonal-stats-{str(uuid.uuid4())[:6]}",
557
557
  output="netcdf",
558
558
  config={},
@@ -581,7 +581,7 @@ async def zonal_stats(
581
581
  ):
582
582
  """Compute zonal statistics for all geometries in a GeoDataFrame."""
583
583
  if mass_stats:
584
- mass_stats_id = await handle_mass_stats(
584
+ collections_id = await handle_collections(
585
585
  client = client,
586
586
  gdf = gdf,
587
587
  expr = expr,
@@ -591,9 +591,9 @@ async def zonal_stats(
591
591
  geom_fix = geom_fix,
592
592
  id_column = id_column,
593
593
  )
594
- job_name = await client.mass_stats.track_job([mass_stats_id])
595
- job_name = job_name[mass_stats_id]["name"]
596
- cloud_files_object = cloud_object(job_id = mass_stats_id, job_name = job_name, client = client)
594
+ job_name = await client.collections.track_job([collections_id])
595
+ job_name = job_name[collections_id]["name"]
596
+ cloud_files_object = cloud_object(job_id = collections_id, job_name = job_name, client = client)
597
597
 
598
598
  return cloud_files_object
599
599