terrakio-core 0.4.98.1b7__tar.gz → 0.5.8__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. terrakio_core-0.5.8/.gitignore +166 -0
  2. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/PKG-INFO +4 -2
  3. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/pyproject.toml +4 -5
  4. {terrakio_core-0.4.98.1b7/build/lib → terrakio_core-0.5.8}/terrakio_core/__init__.py +4 -2
  5. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/accessors.py +4 -4
  6. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/async_client.py +7 -5
  7. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/create_dataset_file.py +5 -5
  8. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/zonal_stats.py +12 -12
  9. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/endpoints/auth.py +1 -0
  10. terrakio_core-0.5.8/terrakio_core/endpoints/collections/__init__.py +72 -0
  11. terrakio_core-0.5.8/terrakio_core/endpoints/collections/collections.py +166 -0
  12. terrakio_core-0.5.8/terrakio_core/endpoints/collections/common.py +100 -0
  13. terrakio_core-0.5.8/terrakio_core/endpoints/collections/data_operations.py +428 -0
  14. terrakio_core-0.5.8/terrakio_core/endpoints/collections/generation.py +467 -0
  15. terrakio_core-0.5.8/terrakio_core/endpoints/collections/ingestion.py +228 -0
  16. terrakio_core-0.5.8/terrakio_core/endpoints/collections/tasks.py +288 -0
  17. terrakio_core-0.5.8/terrakio_core/endpoints/collections/zonal_stats.py +104 -0
  18. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/endpoints/dataset_management.py +12 -9
  19. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/endpoints/model_management.py +2 -3
  20. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/exceptions.py +12 -0
  21. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/sync_client.py +4 -4
  22. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/__init__.py +0 -1
  23. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/async_client.py +0 -1
  24. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/client.py +0 -1
  25. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/config.py +0 -1
  26. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/convenience_functions/convenience_functions.py +0 -1
  27. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/auth.py +0 -1
  28. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/dataset_management.py +0 -1
  29. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/group_management.py +0 -1
  30. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/mass_stats.py +0 -1
  31. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/model_management.py +0 -1
  32. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/space_management.py +0 -1
  33. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/endpoints/user_management.py +0 -1
  34. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/exceptions.py +0 -1
  35. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/helper/bounded_taskgroup.py +0 -1
  36. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/helper/decorators.py +0 -1
  37. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/helper/tiles.py +0 -1
  38. terrakio_core-0.4.98.1b7/build/__editable__.terrakio_core-0.3.9-py3-none-any/terrakio_core/sync_client.py +0 -1
  39. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/accessors.py +0 -477
  40. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/async_client.py +0 -356
  41. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/config.py +0 -116
  42. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/convenience_functions/convenience_functions.py +0 -659
  43. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/auth.py +0 -187
  44. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/dataset_management.py +0 -371
  45. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/group_management.py +0 -228
  46. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/mass_stats.py +0 -693
  47. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/model_management.py +0 -1128
  48. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/endpoints/user_management.py +0 -131
  49. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/exceptions.py +0 -20
  50. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/helper/tiles.py +0 -84
  51. terrakio_core-0.4.98.1b7/build/lib/terrakio_core/sync_client.py +0 -1443
  52. terrakio_core-0.4.98.1b7/terrakio_core/__init__.py +0 -18
  53. terrakio_core-0.4.98.1b7/terrakio_core/client.py +0 -133
  54. terrakio_core-0.4.98.1b7/terrakio_core/endpoints/mass_stats.py +0 -1116
  55. terrakio_core-0.4.98.1b7/terrakio_core/endpoints/space_management.py +0 -72
  56. terrakio_core-0.4.98.1b7/terrakio_core/helper/bounded_taskgroup.py +0 -20
  57. terrakio_core-0.4.98.1b7/terrakio_core/helper/decorators.py +0 -58
  58. terrakio_core-0.4.98.1b7/terrakio_core.egg-info/PKG-INFO +0 -46
  59. terrakio_core-0.4.98.1b7/terrakio_core.egg-info/SOURCES.txt +0 -25
  60. terrakio_core-0.4.98.1b7/terrakio_core.egg-info/dependency_links.txt +0 -1
  61. terrakio_core-0.4.98.1b7/terrakio_core.egg-info/requires.txt +0 -20
  62. terrakio_core-0.4.98.1b7/terrakio_core.egg-info/top_level.txt +0 -1
  63. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/README.md +0 -0
  64. {terrakio_core-0.4.98.1b7/build/lib → terrakio_core-0.5.8}/terrakio_core/client.py +0 -0
  65. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/config.py +0 -0
  66. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/convenience_functions/geoquries.py +0 -0
  67. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/endpoints/group_management.py +0 -0
  68. {terrakio_core-0.4.98.1b7/build/lib → terrakio_core-0.5.8}/terrakio_core/endpoints/space_management.py +0 -0
  69. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/endpoints/user_management.py +0 -0
  70. {terrakio_core-0.4.98.1b7/build/lib → terrakio_core-0.5.8}/terrakio_core/helper/bounded_taskgroup.py +0 -0
  71. {terrakio_core-0.4.98.1b7/build/lib → terrakio_core-0.5.8}/terrakio_core/helper/decorators.py +0 -0
  72. {terrakio_core-0.4.98.1b7 → terrakio_core-0.5.8}/terrakio_core/helper/tiles.py +0 -0
@@ -0,0 +1,166 @@
1
+ # Python bytecode cache directories
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # Distribution / packaging
7
+ dist/
8
+ build/
9
+ *.egg-info/
10
+ *.egg
11
+ eggs/
12
+ lib/
13
+ lib64/
14
+ parts/
15
+ sdist/
16
+ var/
17
+ wheels/
18
+ *.dist-info/
19
+
20
+ # UV package manager
21
+ .python-version
22
+ .python-versions
23
+
24
+ # Virtual environments
25
+ venv/
26
+ env/
27
+ ENV/
28
+ .venv/
29
+ .env/
30
+ api_venv/
31
+ *.venv
32
+ virtualenv/
33
+ pipenv/
34
+
35
+ # PyInstaller
36
+ *.manifest
37
+ *.spec
38
+
39
+ # Installer logs
40
+ pip-log.txt
41
+ pip-delete-this-directory.txt
42
+
43
+ # Unit test / coverage reports
44
+ htmlcov/
45
+ .tox/
46
+ .nox/
47
+ .coverage
48
+ .coverage.*
49
+ .cache
50
+ nosetests.xml
51
+ coverage.xml
52
+ *.cover
53
+ *.py,cover
54
+ .hypothesis/
55
+ .pytest_cache/
56
+
57
+ # Jupyter Notebook
58
+ .ipynb_checkpoints
59
+
60
+ # IPython
61
+ profile_default/
62
+ ipython_config.py
63
+
64
+ # pyenv
65
+ .python-version
66
+
67
+ # pipenv
68
+ Pipfile.lock
69
+
70
+ # poetry
71
+ poetry.lock
72
+
73
+ # Celery
74
+ celerybeat-schedule
75
+ celerybeat.pid
76
+
77
+ # SageMath parsed files
78
+ *.sage.py
79
+
80
+ # Environments
81
+ .env
82
+ .env.local
83
+ .env.development.local
84
+ .env.test.local
85
+ .env.production.local
86
+
87
+ # mypy
88
+ .mypy_cache/
89
+ .dmypy.json
90
+ dmypy.json
91
+
92
+ # Pyre type checker
93
+ .pyre/
94
+
95
+ # pytype static type analyzer
96
+ .pytype/
97
+
98
+ # IDE specific files
99
+ .idea/
100
+ .vscode/
101
+ *.swp
102
+ *.swo
103
+ *~
104
+
105
+ # OS specific files
106
+ .DS_Store
107
+ .DS_Store?
108
+ Thumbs.db
109
+ ehthumbs.db
110
+ Desktop.ini
111
+
112
+ # macOS specific files
113
+ .AppleDouble
114
+ .LSOverride
115
+ Icon
116
+ ._*
117
+ .DocumentRevisions-V100
118
+ .fseventsd
119
+ .Spotlight-V100
120
+ .TemporaryItems
121
+ .Trashes
122
+ .VolumeIcon.icns
123
+ .com.apple.timemachine.donotpresent
124
+
125
+ # Windows specific files
126
+ Thumbs.db
127
+ ehthumbs.db
128
+ Desktop.ini
129
+ $RECYCLE.BIN/
130
+ *.cab
131
+ *.msi
132
+ *.msix
133
+ *.msm
134
+ *.msp
135
+ *.lnk
136
+
137
+ # Linux specific files
138
+ *~
139
+ .fuse_hidden*
140
+ .directory
141
+ .Trash-*
142
+ .nfs*
143
+
144
+ # Temporary files
145
+ *.tmp
146
+ *.temp
147
+ *.log
148
+ *.bak
149
+ *.swp
150
+ *.swo
151
+
152
+ # Local configuration files
153
+ .local/
154
+ local_settings.py
155
+ instance/
156
+ .webassets-cache
157
+
158
+ # Database files
159
+ *.db
160
+ *.sqlite3
161
+ *.sqlite
162
+
163
+ # Secrets and sensitive files
164
+ .secrets/
165
+ secrets.json
166
+ .env.secrets
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: terrakio-core
3
- Version: 0.4.98.1b7
3
+ Version: 0.5.8
4
4
  Summary: Core package for the terrakio-python-api
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: aiofiles>=24.1.0
@@ -10,8 +10,10 @@ Requires-Dist: h5netcdf>=1.6.3
10
10
  Requires-Dist: h5py>=3.14.0
11
11
  Requires-Dist: nest-asyncio>=1.6.0
12
12
  Requires-Dist: netcdf4>=1.7.2
13
- Requires-Dist: onnxruntime>=1.22.1
13
+ Requires-Dist: onnxruntime>=1.23.1
14
14
  Requires-Dist: psutil>=7.0.0
15
+ Requires-Dist: python-snappy>=0.7.3
16
+ Requires-Dist: rasterio>=1.4.3
15
17
  Requires-Dist: scipy>=1.16.1
16
18
  Requires-Dist: shapely>=2.1.1
17
19
  Requires-Dist: typer>=0.19.2
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "terrakio-core"
3
- version = "0.4.98.1b7"
3
+ version = "0.5.8"
4
4
  description = "Core package for the terrakio-python-api"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -11,16 +11,15 @@ dependencies = [
11
11
  "h5netcdf>=1.6.3",
12
12
  "h5py>=3.14.0",
13
13
  "netcdf4>=1.7.2",
14
- "h5py>=3.14.0",
15
- "netcdf4>=1.7.2",
16
- "onnxruntime>=1.22.1",
17
14
  "psutil>=7.0.0",
18
15
  "scipy>=1.16.1",
19
- "scipy>=1.16.1",
20
16
  "shapely>=2.1.1",
21
17
  "xarray>=2025.7.1",
22
18
  "nest-asyncio>=1.6.0",
23
19
  "typer>=0.19.2",
20
+ "onnxruntime>=1.23.1",
21
+ "rasterio>=1.4.3",
22
+ "python-snappy>=0.7.3",
24
23
  ]
25
24
 
26
25
  [project.optional-dependencies]
@@ -5,13 +5,15 @@ Terrakio Core
5
5
  Core components for Terrakio API clients.
6
6
  """
7
7
 
8
- __version__ = "0.4.5"
8
+ # Suppress ONNX Runtime GPU device discovery warnings - MUST BE FIRST!
9
+ import os
10
+ os.environ['ORT_LOGGING_LEVEL'] = '3'
11
+ __version__ = "0.5.8"
9
12
 
10
13
  from .async_client import AsyncClient
11
14
  from .sync_client import SyncClient as Client
12
15
  from . import accessors
13
16
 
14
-
15
17
  __all__ = [
16
18
  "AsyncClient",
17
19
  "Client"
@@ -12,7 +12,7 @@ import xarray as xr
12
12
 
13
13
  # Local/relative imports
14
14
  from .convenience_functions.zonal_stats import cloud_object
15
- from .endpoints.mass_stats import MassStats
15
+ from .endpoints.collections import Collections
16
16
 
17
17
  @pd.api.extensions.register_dataframe_accessor("geo")
18
18
  class GeoXarrayAccessor:
@@ -686,7 +686,7 @@ class GeoXarrayAccessor:
686
686
  asyncio.set_event_loop(loop)
687
687
  try:
688
688
  return loop.run_until_complete(
689
- self._client.mass_stats.track_job([self._obj.job_id])
689
+ self._client.collections.track_job([self._obj.job_id])
690
690
  )
691
691
  finally:
692
692
  loop.close()
@@ -845,7 +845,7 @@ class GeoXarrayAccessor:
845
845
  script_content = self._generate_post_processing_script()
846
846
  client = self._client
847
847
  if client:
848
- mass_stats = MassStats(client)
848
+ collections = Collections(client)
849
849
 
850
850
  import asyncio
851
851
  import concurrent.futures
@@ -856,7 +856,7 @@ class GeoXarrayAccessor:
856
856
  # we don't actually have the dataset name, currently it is just getting job named zonal stats job
857
857
  try:
858
858
  return loop.run_until_complete(
859
- mass_stats.zonal_stats_transform(
859
+ collections.zonal_stats_transform(
860
860
  data_name=self._obj.job_name,
861
861
  output="netcdf",
862
862
  consumer = script_content.encode('utf-8'),
@@ -9,10 +9,10 @@ from geopandas import GeoDataFrame
9
9
  from shapely.geometry.base import BaseGeometry as ShapelyGeometry
10
10
  from shapely.geometry import mapping
11
11
  from .client import BaseClient
12
- from .exceptions import APIError, NetworkError
12
+ from .exceptions import APIError, NetworkError, GeoQueryError
13
13
  from .endpoints.dataset_management import DatasetManagement
14
14
  from .endpoints.user_management import UserManagement
15
- from .endpoints.mass_stats import MassStats
15
+ from .endpoints.collections import Collections
16
16
  from .endpoints.group_management import GroupManagement
17
17
  from .endpoints.space_management import SpaceManagement
18
18
  from .endpoints.model_management import ModelManagement
@@ -26,7 +26,7 @@ class AsyncClient(BaseClient):
26
26
  super().__init__(url, api_key, verbose)
27
27
  self.datasets = DatasetManagement(self)
28
28
  self.users = UserManagement(self)
29
- self.mass_stats = MassStats(self)
29
+ self.collections = Collections(self)
30
30
  self.groups = GroupManagement(self)
31
31
  self.space = SpaceManagement(self)
32
32
  self.model = ModelManagement(self)
@@ -54,7 +54,6 @@ class AsyncClient(BaseClient):
54
54
  for attempt in range(self.retry + 1):
55
55
  try:
56
56
  async with session.request(method, url, **kwargs) as response:
57
- content = await response.text()
58
57
 
59
58
  if not response.ok and self._should_retry(response.status, attempt):
60
59
  self.logger.info(f"Request failed (attempt {attempt+1}/{self.retry+1}): {response.status}. Retrying...")
@@ -183,7 +182,10 @@ class AsyncClient(BaseClient):
183
182
  "validated": validated,
184
183
  **kwargs
185
184
  }
186
- result = await self._terrakio_request("POST", "geoquery", json=payload)
185
+ result, status_code = await self._terrakio_request("POST", "geoquery", json=payload)
186
+
187
+ if status_code != 200:
188
+ raise GeoQueryError(result['detail'], status_code=status_code)
187
189
 
188
190
  return result
189
191
 
@@ -48,7 +48,7 @@ async def create_dataset_file(
48
48
  tempreq.write(reqs)
49
49
  tempreqname = tempreq.name
50
50
 
51
- task_id = await client.mass_stats.execute_job(
51
+ task_id = await client.collections.execute_job(
52
52
  name=body["name"],
53
53
  region=body["region"],
54
54
  output=body["output"],
@@ -64,7 +64,7 @@ async def create_dataset_file(
64
64
  while True:
65
65
  try:
66
66
  taskid = task_id['task_id']
67
- trackinfo = await client.mass_stats.track_job([taskid])
67
+ trackinfo = await client.collections.track_job([taskid])
68
68
  status = trackinfo[taskid]['status']
69
69
  if status == 'Completed':
70
70
  client.logger.info('Data generated successfully!')
@@ -86,14 +86,14 @@ async def create_dataset_file(
86
86
 
87
87
  os.unlink(tempreqname)
88
88
 
89
- combine_result = await client.mass_stats.combine_tiles(body["name"], body["overwrite"], body["output"], max_file_size_mb=max_file_size_mb)
89
+ combine_result = await client.collections.combine_tiles(body["name"], body["overwrite"], body["output"], max_file_size_mb=max_file_size_mb)
90
90
  combine_task_id = combine_result.get("task_id")
91
91
 
92
92
  combine_start_time = time.time()
93
93
  client.logger.info(f"Tracking file generation job {combine_task_id}...")
94
94
  while True:
95
95
  try:
96
- trackinfo = await client.mass_stats.track_job([combine_task_id])
96
+ trackinfo = await client.collections.track_job([combine_task_id])
97
97
  if body["output"] == "netcdf":
98
98
  download_file_name = trackinfo[combine_task_id]['folder'] + '.nc'
99
99
  elif body["output"] == "geotiff":
@@ -117,7 +117,7 @@ async def create_dataset_file(
117
117
  raise
118
118
 
119
119
  if download_path:
120
- await client.mass_stats.download_file(
120
+ await client.collections.download_file(
121
121
  job_name=body["name"],
122
122
  bucket=bucket,
123
123
  file_type='processed',
@@ -92,7 +92,7 @@ class cloud_object(gpd.GeoDataFrame):
92
92
  GeoDataFrame: A GeoDataFrame containing the first n files.
93
93
  """
94
94
 
95
- track_info = await self.client.mass_stats.track_job([self.job_id])
95
+ track_info = await self.client.collections.track_job([self.job_id])
96
96
  job_info = track_info[self.job_id]
97
97
  status = job_info['status']
98
98
 
@@ -491,7 +491,7 @@ def gdf_to_json(
491
491
  id_column: Optional[str] = None,
492
492
  ):
493
493
  """
494
- Convert a GeoDataFrame to a list of JSON requests for mass_stats processing.
494
+ Convert a GeoDataFrame to a list of JSON requests for collections processing.
495
495
 
496
496
  Args:
497
497
  gdf: GeoDataFrame containing geometries and optional metadata
@@ -503,9 +503,9 @@ def gdf_to_json(
503
503
  id_column: Optional column name to use for group and file names
504
504
 
505
505
  Returns:
506
- list: List of dictionaries formatted for mass_stats requests
506
+ list: List of dictionaries formatted for collections requests
507
507
  """
508
- mass_stats_requests = []
508
+ collections_requests = []
509
509
 
510
510
  for idx, row in gdf.iterrows():
511
511
  request_feature = {
@@ -535,11 +535,11 @@ def gdf_to_json(
535
535
  "request": request_feature,
536
536
  }
537
537
 
538
- mass_stats_requests.append(request_entry)
538
+ collections_requests.append(request_entry)
539
539
 
540
- return mass_stats_requests
540
+ return collections_requests
541
541
 
542
- async def handle_mass_stats(
542
+ async def handle_collections(
543
543
  client,
544
544
  gdf: GeoDataFrame,
545
545
  expr: str,
@@ -552,7 +552,7 @@ async def handle_mass_stats(
552
552
  request_json = gdf_to_json(gdf=gdf, expr=expr, in_crs=in_crs, out_crs=out_crs,
553
553
  resolution=resolution, geom_fix=geom_fix, id_column=id_column)
554
554
 
555
- job_response = await client.mass_stats.execute_job(
555
+ job_response = await client.collections.execute_job(
556
556
  name=f"zonal-stats-{str(uuid.uuid4())[:6]}",
557
557
  output="netcdf",
558
558
  config={},
@@ -581,7 +581,7 @@ async def zonal_stats(
581
581
  ):
582
582
  """Compute zonal statistics for all geometries in a GeoDataFrame."""
583
583
  if mass_stats:
584
- mass_stats_id = await handle_mass_stats(
584
+ collections_id = await handle_collections(
585
585
  client = client,
586
586
  gdf = gdf,
587
587
  expr = expr,
@@ -591,9 +591,9 @@ async def zonal_stats(
591
591
  geom_fix = geom_fix,
592
592
  id_column = id_column,
593
593
  )
594
- job_name = await client.mass_stats.track_job([mass_stats_id])
595
- job_name = job_name[mass_stats_id]["name"]
596
- cloud_files_object = cloud_object(job_id = mass_stats_id, job_name = job_name, client = client)
594
+ job_name = await client.collections.track_job([collections_id])
595
+ job_name = job_name[collections_id]["name"]
596
+ cloud_files_object = cloud_object(job_id = collections_id, job_name = job_name, client = client)
597
597
 
598
598
  return cloud_files_object
599
599
 
@@ -68,6 +68,7 @@ class AuthClient:
68
68
  "password": password
69
69
  }
70
70
  response, status = await self._client._terrakio_request("POST", "/users/login", json=payload)
71
+
71
72
  if status != 200:
72
73
  if status == 401:
73
74
  raise InvalidUsernamePasswordError(f"Invalid username or password: {response}", status_code=status)
@@ -0,0 +1,72 @@
1
+ from rich.console import Console
2
+
3
+ from .collections import CollectionsMixin
4
+ from .common import Dataset_Dtype, OutputTypes, Region, regions
5
+ from .data_operations import DataOperationsMixin
6
+ from .generation import DataGenerationMixin
7
+ from .ingestion import IngestionMixin
8
+ from .tasks import TasksMixin
9
+ from .zonal_stats import ZonalStatsMixin
10
+
11
+ # Import methods directly for better IDE navigation support
12
+ from .collections import (
13
+ CollectionsMixin as _CollectionsMixin,
14
+ )
15
+ from .tasks import (
16
+ TasksMixin as _TasksMixin,
17
+ )
18
+ from .data_operations import (
19
+ DataOperationsMixin as _DataOperationsMixin,
20
+ )
21
+ from .zonal_stats import (
22
+ ZonalStatsMixin as _ZonalStatsMixin,
23
+ )
24
+ from .ingestion import (
25
+ IngestionMixin as _IngestionMixin,
26
+ )
27
+ from .generation import (
28
+ DataGenerationMixin as _DataGenerationMixin,
29
+ )
30
+
31
+
32
+ class Collections(
33
+ CollectionsMixin,
34
+ TasksMixin,
35
+ DataOperationsMixin,
36
+ ZonalStatsMixin,
37
+ DataGenerationMixin,
38
+ IngestionMixin
39
+ ):
40
+ """
41
+ Collections and collection management client.
42
+
43
+ This class provides methods for managing collections, tasks, data operations,
44
+ zonal statistics, data generation, and ingestion operations.
45
+
46
+ All methods are inherited from the following mixins:
47
+ - CollectionsMixin: Collection CRUD operations (create_collection, get_collection, list_collections, delete_collection)
48
+ - TasksMixin: Task management operations (track_progress, list_tasks, get_task, cancel_task, etc.)
49
+ - DataOperationsMixin: Data generation and processing (generate_data, post_processing, gen_and_process, download_files, upload_artifacts)
50
+ - ZonalStatsMixin: Zonal statistics operations (zonal_stats, zonal_stats_transform)
51
+ - DataGenerationMixin: Data generation operations (training_samples, dataset, tiles, polygons)
52
+ - IngestionMixin: Data ingestion and visualization (create_pyramids, tif)
53
+
54
+ Note: Methods are defined in their respective mixin modules:
55
+ - Collection methods: .collections module
56
+ - Task methods: .tasks module
57
+ - Data operation methods: .data_operations module
58
+ - Zonal stats methods: .zonal_stats module
59
+ - Data generation methods: .generation module
60
+ - Ingestion methods: .ingestion module
61
+ """
62
+
63
+ def __init__(self, client):
64
+ self._client = client
65
+ self.console = Console()
66
+ self.OutputTypes = OutputTypes
67
+ self.Region = Region
68
+ self.regions = regions
69
+ self.Dataset_Dtype = Dataset_Dtype
70
+
71
+ # Explicitly re-export for better IDE support
72
+ __all__ = ['Collections', 'OutputTypes', 'Region', 'Dataset_Dtype', 'regions']
@@ -0,0 +1,166 @@
1
+ from typing import Any, Dict, List, Optional
2
+
3
+ from ...exceptions import (
4
+ CollectionAlreadyExistsError,
5
+ CollectionNotFoundError,
6
+ CreateCollectionError,
7
+ DeleteCollectionError,
8
+ GetCollectionError,
9
+ InvalidCollectionTypeError,
10
+ ListCollectionsError,
11
+ )
12
+ from ...helper.decorators import require_api_key
13
+
14
+
15
+ class CollectionsMixin:
16
+ """Collection CRUD operations."""
17
+
18
+ @require_api_key
19
+ async def create_collection(
20
+ self,
21
+ collection: str,
22
+ bucket: Optional[str] = None,
23
+ location: Optional[str] = None,
24
+ collection_type: str = "basic"
25
+ ) -> Dict[str, Any]:
26
+ """
27
+ Create a collection for the current user.
28
+
29
+ Args:
30
+ collection: The name of the collection (required)
31
+ bucket: The bucket to use (optional, admin only)
32
+ location: The location to use (optional, admin only)
33
+ collection_type: The type of collection to create (optional, defaults to "basic")
34
+
35
+ Returns:
36
+ API response as a dictionary containing the collection id
37
+
38
+ Raises:
39
+ CollectionAlreadyExistsError: If the collection already exists
40
+ InvalidCollectionTypeError: If the collection type is invalid
41
+ CreateCollectionError: If the API request fails due to unknown reasons
42
+ """
43
+ payload = {
44
+ "collection_type": collection_type
45
+ }
46
+
47
+ if bucket is not None:
48
+ payload["bucket"] = bucket
49
+
50
+ if location is not None:
51
+ payload["location"] = location
52
+
53
+ response, status = await self._client._terrakio_request("POST", f"collections/{collection}", json=payload)
54
+ if status != 200:
55
+ if status == 400 or status == 409:
56
+ raise CollectionAlreadyExistsError(f"Collection {collection} already exists", status_code=status)
57
+ if status == 422:
58
+ raise InvalidCollectionTypeError(f"Invalid collection type: {collection_type}", status_code=status)
59
+ raise CreateCollectionError(f"Create collection failed with status {status}", status_code=status)
60
+
61
+ return response
62
+
63
+ @require_api_key
64
+ async def get_collection(self, collection: str) -> Dict[str, Any]:
65
+ """
66
+ Get a collection by name.
67
+
68
+ Args:
69
+ collection: The name of the collection to retrieve(required)
70
+
71
+ Returns:
72
+ API response as a dictionary containing collection information
73
+
74
+ Raises:
75
+ CollectionNotFoundError: If the collection is not found
76
+ GetCollectionError: If the API request fails due to unknown reasons
77
+ """
78
+ response, status = await self._client._terrakio_request("GET", f"collections/{collection}")
79
+
80
+ if status != 200:
81
+ if status == 404:
82
+ raise CollectionNotFoundError(f"Collection {collection} not found", status_code=status)
83
+ raise GetCollectionError(f"Get collection failed with status {status}", status_code=status)
84
+
85
+ return response
86
+
87
+ @require_api_key
88
+ async def list_collections(
89
+ self,
90
+ collection_type: Optional[str] = None,
91
+ limit: Optional[int] = 10,
92
+ page: Optional[int] = 0
93
+ ) -> List[Dict[str, Any]]:
94
+ """
95
+ List collections for the current user.
96
+
97
+ Args:
98
+ collection_type: Filter by collection type (optional)
99
+ limit: Number of collections to return (optional, defaults to 10)
100
+ page: Page number (optional, defaults to 0)
101
+
102
+ Returns:
103
+ API response as a list of dictionaries containing collection information
104
+
105
+ Raises:
106
+ ListCollectionsError: If the API request fails due to unknown reasons
107
+ """
108
+ params = {}
109
+
110
+ if collection_type is not None:
111
+ params["collection_type"] = collection_type
112
+
113
+ if limit is not None:
114
+ params["limit"] = limit
115
+
116
+ if page is not None:
117
+ params["page"] = page
118
+
119
+ response, status = await self._client._terrakio_request("GET", "collections", params=params)
120
+
121
+ if status != 200:
122
+ raise ListCollectionsError(f"List collections failed with status {status}", status_code=status)
123
+
124
+ return response
125
+
126
+ @require_api_key
127
+ async def delete_collection(
128
+ self,
129
+ collection: str,
130
+ full: Optional[bool] = False,
131
+ outputs: Optional[list] = [],
132
+ data: Optional[bool] = False
133
+ ) -> Dict[str, Any]:
134
+ """
135
+ Delete a collection by name.
136
+
137
+ Args:
138
+ collection: The name of the collection to delete (required)
139
+ full: Delete the full collection (optional, defaults to False)
140
+ outputs: Specific output folders to delete (optional, defaults to empty list)
141
+ data: Whether to delete raw data (xdata folder) (optional, defaults to False)
142
+
143
+ Returns:
144
+ API response as a dictionary confirming deletion
145
+
146
+ Raises:
147
+ CollectionNotFoundError: If the collection is not found
148
+ DeleteCollectionError: If the API request fails due to unknown reasons
149
+ """
150
+ params = {
151
+ "full": str(full).lower(),
152
+ "data": str(data).lower()
153
+ }
154
+
155
+ if outputs:
156
+ params["outputs"] = outputs
157
+
158
+ response, status = await self._client._terrakio_request("DELETE", f"collections/{collection}", params=params)
159
+
160
+ if status != 200:
161
+ if status == 404:
162
+ raise CollectionNotFoundError(f"Collection {collection} not found", status_code=status)
163
+ raise DeleteCollectionError(f"Delete collection failed with status {status}", status_code=status)
164
+
165
+ return response
166
+