terrakio-core 0.4.8__py3-none-any.whl → 0.4.94__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of terrakio-core might be problematic. Click here for more details.
- terrakio_core/__init__.py +1 -1
- terrakio_core/accessors.py +800 -328
- terrakio_core/async_client.py +10 -3
- terrakio_core/convenience_functions/create_dataset_file.py +132 -0
- terrakio_core/convenience_functions/geoquries.py +102 -0
- terrakio_core/convenience_functions/{convenience_functions.py → zonal_stats.py} +168 -263
- terrakio_core/endpoints/mass_stats.py +94 -162
- terrakio_core/sync_client.py +0 -340
- terrakio_core-0.4.94.dist-info/METADATA +31 -0
- {terrakio_core-0.4.8.dist-info → terrakio_core-0.4.94.dist-info}/RECORD +11 -10
- {terrakio_core-0.4.8.dist-info → terrakio_core-0.4.94.dist-info}/WHEEL +1 -2
- terrakio_core-0.4.8.dist-info/METADATA +0 -47
- terrakio_core-0.4.8.dist-info/top_level.txt +0 -1
|
@@ -2,6 +2,8 @@ from typing import Dict, Any, Optional
|
|
|
2
2
|
import json
|
|
3
3
|
import gzip
|
|
4
4
|
import os
|
|
5
|
+
import weakref
|
|
6
|
+
import weakref
|
|
5
7
|
from pathlib import Path
|
|
6
8
|
from urllib.parse import urlparse
|
|
7
9
|
from ..helper.decorators import require_token, require_api_key, require_auth
|
|
@@ -12,7 +14,6 @@ import xarray as xr
|
|
|
12
14
|
from io import BytesIO
|
|
13
15
|
import geopandas as gpd
|
|
14
16
|
from shapely.geometry import shape
|
|
15
|
-
from ..convenience_functions.convenience_functions import expand_on_variables_and_time
|
|
16
17
|
|
|
17
18
|
class MassStats:
|
|
18
19
|
def __init__(self, client):
|
|
@@ -144,8 +145,9 @@ class MassStats:
|
|
|
144
145
|
params = {"limit": limit}
|
|
145
146
|
return self._client._terrakio_request("GET", "mass_stats/history", params=params)
|
|
146
147
|
|
|
147
|
-
|
|
148
|
-
|
|
148
|
+
|
|
149
|
+
@require_api_key
|
|
150
|
+
async def start_post_processing(
|
|
149
151
|
self,
|
|
150
152
|
process_name: str,
|
|
151
153
|
data_name: str,
|
|
@@ -153,30 +155,43 @@ class MassStats:
|
|
|
153
155
|
consumer: str,
|
|
154
156
|
overwrite: bool = False
|
|
155
157
|
) -> Dict[str, Any]:
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
158
|
+
|
|
159
|
+
with open(consumer, 'rb') as f:
|
|
160
|
+
script_bytes = f.read()
|
|
161
|
+
|
|
162
|
+
data = aiohttp.FormData()
|
|
163
|
+
data.add_field('process_name', process_name)
|
|
164
|
+
data.add_field('data_name', data_name)
|
|
165
|
+
data.add_field('output', output)
|
|
166
|
+
data.add_field('overwrite', str(overwrite).lower())
|
|
167
|
+
data.add_field('consumer', script_bytes, filename=os.path.basename(consumer), content_type='text/x-python')
|
|
168
|
+
|
|
169
|
+
return await self._client._terrakio_request(
|
|
170
|
+
"POST",
|
|
171
|
+
"mass_stats/post_process",
|
|
172
|
+
data=data,
|
|
173
|
+
)
|
|
168
174
|
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
175
|
+
@require_api_key
|
|
176
|
+
async def zonal_stats_transform(
|
|
177
|
+
self,
|
|
178
|
+
data_name: str,
|
|
179
|
+
output: str,
|
|
180
|
+
consumer: bytes,
|
|
181
|
+
overwrite: bool = False
|
|
182
|
+
) -> Dict[str, Any]:
|
|
183
|
+
|
|
184
|
+
data = aiohttp.FormData()
|
|
185
|
+
data.add_field('data_name', data_name)
|
|
186
|
+
data.add_field('output', output)
|
|
187
|
+
data.add_field('overwrite', str(overwrite).lower())
|
|
188
|
+
data.add_field('consumer', consumer, filename="consumer.py", content_type='text/x-python')
|
|
189
|
+
|
|
190
|
+
return await self._client._terrakio_request(
|
|
191
|
+
"POST",
|
|
192
|
+
"mass_stats/transform",
|
|
193
|
+
data=data,
|
|
194
|
+
)
|
|
180
195
|
|
|
181
196
|
@require_api_key
|
|
182
197
|
def download_results(
|
|
@@ -377,11 +392,22 @@ class MassStats:
|
|
|
377
392
|
except Exception as e:
|
|
378
393
|
raise Exception(f"Error in download process: {e}")
|
|
379
394
|
|
|
380
|
-
def validate_request(self,
|
|
381
|
-
|
|
382
|
-
|
|
395
|
+
def validate_request(self, request_json: Union[str, List[Dict]]):
|
|
396
|
+
# Handle both file path and direct JSON data
|
|
397
|
+
if isinstance(request_json, str):
|
|
398
|
+
# It's a file path
|
|
399
|
+
with open(request_json, 'r') as file:
|
|
400
|
+
request_data = json.load(file)
|
|
401
|
+
elif isinstance(request_json, list):
|
|
402
|
+
# It's already JSON data
|
|
403
|
+
request_data = request_json
|
|
404
|
+
else:
|
|
405
|
+
raise ValueError("request_json must be either a file path (str) or JSON data (list)")
|
|
406
|
+
|
|
407
|
+
# Rest of validation logic stays exactly the same
|
|
383
408
|
if not isinstance(request_data, list):
|
|
384
|
-
raise ValueError(
|
|
409
|
+
raise ValueError("Request JSON should contain a list of dictionaries")
|
|
410
|
+
|
|
385
411
|
for i, request in enumerate(request_data):
|
|
386
412
|
if not isinstance(request, dict):
|
|
387
413
|
raise ValueError(f"Request {i} should be a dictionary")
|
|
@@ -405,7 +431,7 @@ class MassStats:
|
|
|
405
431
|
name: str,
|
|
406
432
|
output: str,
|
|
407
433
|
config: Dict[str, Any],
|
|
408
|
-
request_json: str, #
|
|
434
|
+
request_json: Union[str, List[Dict]], # ← Accept both file path OR data
|
|
409
435
|
region: str = None,
|
|
410
436
|
overwrite: bool = False,
|
|
411
437
|
skip_existing: bool = False,
|
|
@@ -453,19 +479,38 @@ class MassStats:
|
|
|
453
479
|
|
|
454
480
|
return groups
|
|
455
481
|
|
|
456
|
-
# Load and validate request JSON
|
|
482
|
+
# # Load and validate request JSON
|
|
483
|
+
# try:
|
|
484
|
+
# with open(request_json, 'r') as file:
|
|
485
|
+
# request_data = json.load(file)
|
|
486
|
+
# if isinstance(request_data, list):
|
|
487
|
+
# size = len(request_data)
|
|
488
|
+
# else:
|
|
489
|
+
# raise ValueError(f"Request JSON file {request_json} should contain a list of dictionaries")
|
|
490
|
+
# except FileNotFoundError as e:
|
|
491
|
+
# return e
|
|
492
|
+
# except json.JSONDecodeError as e:
|
|
493
|
+
# return e
|
|
457
494
|
try:
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
495
|
+
if isinstance(request_json, str):
|
|
496
|
+
# It's a file path
|
|
497
|
+
with open(request_json, 'r') as file:
|
|
498
|
+
request_data = json.load(file)
|
|
499
|
+
elif isinstance(request_json, list):
|
|
500
|
+
# It's already JSON data
|
|
501
|
+
request_data = request_json
|
|
502
|
+
else:
|
|
503
|
+
raise ValueError("request_json must be either a file path (str) or JSON data (list)")
|
|
504
|
+
|
|
505
|
+
if isinstance(request_data, list):
|
|
506
|
+
size = len(request_data)
|
|
507
|
+
else:
|
|
508
|
+
raise ValueError("Request JSON should contain a list of dictionaries")
|
|
464
509
|
except FileNotFoundError as e:
|
|
465
510
|
return e
|
|
466
511
|
except json.JSONDecodeError as e:
|
|
467
512
|
return e
|
|
468
|
-
|
|
513
|
+
|
|
469
514
|
# Generate manifest from request data (kept in memory)
|
|
470
515
|
try:
|
|
471
516
|
manifest_groups = extract_manifest_from_request(request_data)
|
|
@@ -500,8 +545,16 @@ class MassStats:
|
|
|
500
545
|
# Upload request JSON file
|
|
501
546
|
try:
|
|
502
547
|
self.validate_request(request_json)
|
|
503
|
-
|
|
548
|
+
|
|
549
|
+
if isinstance(request_json, str):
|
|
550
|
+
# File path - use existing _upload_file method
|
|
551
|
+
requests_response = await self._upload_file(request_json, requests_url, use_gzip=True)
|
|
552
|
+
else:
|
|
553
|
+
# JSON data - use _upload_json_data method
|
|
554
|
+
requests_response = await self._upload_json_data(request_json, requests_url, use_gzip=True)
|
|
555
|
+
|
|
504
556
|
if requests_response.status not in [200, 201, 204]:
|
|
557
|
+
# ... rest stays the same
|
|
505
558
|
self._client.logger.error(f"Requests upload error: {requests_response.text()}")
|
|
506
559
|
raise Exception(f"Failed to upload request JSON: {requests_response.text()}")
|
|
507
560
|
except Exception as e:
|
|
@@ -656,125 +709,4 @@ class MassStats:
|
|
|
656
709
|
'overwrite': str(overwrite).lower(),
|
|
657
710
|
'max_file_size_mb': max_file_size_mb
|
|
658
711
|
}
|
|
659
|
-
return await self._client._terrakio_request("POST", "mass_stats/combine_tiles", json=payload)
|
|
660
|
-
|
|
661
|
-
@require_api_key
|
|
662
|
-
async def load_zonal_stats(self, job_id: str, max_files: int = 5, poll_interval: int = 30):
|
|
663
|
-
"""
|
|
664
|
-
Load zonal stats results from a completed mass stats job.
|
|
665
|
-
|
|
666
|
-
Args:
|
|
667
|
-
job_id: The job ID returned from the mass stats execution
|
|
668
|
-
max_files: Maximum number of files to download (default: 5)
|
|
669
|
-
poll_interval: Seconds to wait between status checks (default: 30)
|
|
670
|
-
|
|
671
|
-
Returns:
|
|
672
|
-
GeoDataFrame with geometry and dataset columns, or None if failed
|
|
673
|
-
"""
|
|
674
|
-
try:
|
|
675
|
-
while True:
|
|
676
|
-
try:
|
|
677
|
-
track_info = await self.track_job([job_id])
|
|
678
|
-
job_info = track_info[job_id]
|
|
679
|
-
status = job_info['status']
|
|
680
|
-
|
|
681
|
-
self._client.logger.info(f"Job {job_id} status: {status}")
|
|
682
|
-
|
|
683
|
-
if status == 'Completed':
|
|
684
|
-
self._client.logger.info('Job completed successfully!')
|
|
685
|
-
break
|
|
686
|
-
elif status in ['Failed', 'Cancelled', 'Error']:
|
|
687
|
-
raise RuntimeError(f"Job {job_id} failed with status: {status}")
|
|
688
|
-
|
|
689
|
-
await asyncio.sleep(poll_interval)
|
|
690
|
-
|
|
691
|
-
except KeyboardInterrupt:
|
|
692
|
-
self._client.logger.info(f"\nInterrupted! Job {job_id} is still running.")
|
|
693
|
-
raise
|
|
694
|
-
|
|
695
|
-
async with aiohttp.ClientSession() as session:
|
|
696
|
-
payload = {
|
|
697
|
-
"job_name": job_info['name'],
|
|
698
|
-
"file_type": "raw",
|
|
699
|
-
"bucket": job_info['bucket']
|
|
700
|
-
}
|
|
701
|
-
|
|
702
|
-
result = await self._client._terrakio_request("POST", "mass_stats/download_files", json=payload)
|
|
703
|
-
download_urls = result['download_urls'][:max_files]
|
|
704
|
-
|
|
705
|
-
self._client.logger.info(f"Downloading {len(download_urls)} dataset files...")
|
|
706
|
-
|
|
707
|
-
datasets = []
|
|
708
|
-
for i, url in enumerate(download_urls):
|
|
709
|
-
try:
|
|
710
|
-
self._client.logger.info(f"Downloading dataset {i+1}/{len(download_urls)}...")
|
|
711
|
-
async with session.get(url) as response:
|
|
712
|
-
if response.status == 200:
|
|
713
|
-
content = await response.read()
|
|
714
|
-
dataset = xr.open_dataset(BytesIO(content))
|
|
715
|
-
datasets.append(dataset)
|
|
716
|
-
self._client.logger.info(f"Successfully processed dataset {i+1}")
|
|
717
|
-
else:
|
|
718
|
-
self._client.logger.warning(f"Failed to download dataset {i+1}: HTTP {response.status}")
|
|
719
|
-
except Exception as e:
|
|
720
|
-
self._client.logger.error(f"Error downloading dataset {i+1}: {e}")
|
|
721
|
-
continue
|
|
722
|
-
|
|
723
|
-
if not datasets:
|
|
724
|
-
self._client.logger.warning("No datasets were successfully downloaded")
|
|
725
|
-
return gpd.GeoDataFrame({'geometry': [], 'dataset': []})
|
|
726
|
-
|
|
727
|
-
try:
|
|
728
|
-
json_response = await self._client._terrakio_request(
|
|
729
|
-
"POST", "mass_stats/download_json",
|
|
730
|
-
params={"job_name": job_info['name']}
|
|
731
|
-
)
|
|
732
|
-
json_url = json_response["download_url"]
|
|
733
|
-
|
|
734
|
-
async with session.get(json_url) as response:
|
|
735
|
-
if response.status == 200:
|
|
736
|
-
json_data = await response.json()
|
|
737
|
-
self._client.logger.info("Successfully downloaded geometry data")
|
|
738
|
-
|
|
739
|
-
geometries = []
|
|
740
|
-
max_geometries = min(max_files, len(json_data), len(datasets))
|
|
741
|
-
|
|
742
|
-
for i in range(max_geometries):
|
|
743
|
-
try:
|
|
744
|
-
geom_dict = json_data[i]["request"]["feature"]["geometry"]
|
|
745
|
-
shapely_geom = shape(geom_dict)
|
|
746
|
-
geometries.append(shapely_geom)
|
|
747
|
-
except (KeyError, ValueError) as e:
|
|
748
|
-
self._client.logger.warning(f"Error parsing geometry {i}: {e}")
|
|
749
|
-
continue
|
|
750
|
-
|
|
751
|
-
min_length = min(len(datasets), len(geometries))
|
|
752
|
-
if min_length == 0:
|
|
753
|
-
self._client.logger.warning("No matching datasets and geometries found")
|
|
754
|
-
return gpd.GeoDataFrame({'geometry': [], 'dataset': []})
|
|
755
|
-
|
|
756
|
-
gdf = gpd.GeoDataFrame({
|
|
757
|
-
'geometry': geometries[:min_length],
|
|
758
|
-
'dataset': datasets[:min_length]
|
|
759
|
-
})
|
|
760
|
-
|
|
761
|
-
self._client.logger.info(f"Created GeoDataFrame with {len(gdf)} rows")
|
|
762
|
-
|
|
763
|
-
try:
|
|
764
|
-
expanded_gdf = expand_on_variables_and_time(gdf)
|
|
765
|
-
return expanded_gdf
|
|
766
|
-
except NameError:
|
|
767
|
-
self._client.logger.warning("expand_on_variables_and_time function not found, returning raw GeoDataFrame")
|
|
768
|
-
return gdf
|
|
769
|
-
|
|
770
|
-
else:
|
|
771
|
-
self._client.logger.warning(f"Failed to download geometry data: HTTP {response.status}")
|
|
772
|
-
return gpd.GeoDataFrame({'geometry': [], 'dataset': []})
|
|
773
|
-
|
|
774
|
-
except Exception as e:
|
|
775
|
-
self._client.logger.error(f"Error downloading geometry data: {e}")
|
|
776
|
-
return gpd.GeoDataFrame({'geometry': [], 'dataset': []})
|
|
777
|
-
|
|
778
|
-
except Exception as e:
|
|
779
|
-
self._client.logger.error(f"Failed to load zonal stats for job {job_id}: {e}")
|
|
780
|
-
return None
|
|
712
|
+
return await self._client._terrakio_request("POST", "mass_stats/combine_tiles", json=payload)
|
terrakio_core/sync_client.py
CHANGED
|
@@ -1,343 +1,3 @@
|
|
|
1
|
-
# import asyncio
|
|
2
|
-
# import concurrent.futures
|
|
3
|
-
# import threading
|
|
4
|
-
# import functools
|
|
5
|
-
# import inspect
|
|
6
|
-
# from typing import Optional, Dict, Any, Union
|
|
7
|
-
# from geopandas import GeoDataFrame
|
|
8
|
-
# from shapely.geometry.base import BaseGeometry as ShapelyGeometry
|
|
9
|
-
# from .async_client import AsyncClient
|
|
10
|
-
# from typing import TYPE_CHECKING
|
|
11
|
-
|
|
12
|
-
# # # Add this after your other imports
|
|
13
|
-
# # if TYPE_CHECKING:
|
|
14
|
-
# # from .endpoints.dataset_management import DatasetManagement
|
|
15
|
-
# # from .endpoints.user_management import UserManagement
|
|
16
|
-
# # from .endpoints.mass_stats import MassStats
|
|
17
|
-
# # from .endpoints.group_management import GroupManagement
|
|
18
|
-
# # from .endpoints.space_management import SpaceManagement
|
|
19
|
-
# # from .endpoints.model_management import ModelManagement
|
|
20
|
-
# # from .endpoints.auth import AuthClient
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
# class SyncWrapper:
|
|
24
|
-
# """Generic synchronous wrapper with __dir__ support for runtime autocomplete."""
|
|
25
|
-
|
|
26
|
-
# def __init__(self, async_obj, sync_client):
|
|
27
|
-
# self._async_obj = async_obj
|
|
28
|
-
# self._sync_client = sync_client
|
|
29
|
-
|
|
30
|
-
# def __dir__(self):
|
|
31
|
-
# """Return list of attributes for autocomplete in interactive environments."""
|
|
32
|
-
# async_attrs = [attr for attr in dir(self._async_obj) if not attr.startswith('_')]
|
|
33
|
-
# wrapper_attrs = [attr for attr in object.__dir__(self) if not attr.startswith('_')]
|
|
34
|
-
# return list(set(async_attrs + wrapper_attrs))
|
|
35
|
-
|
|
36
|
-
# def __getattr__(self, name):
|
|
37
|
-
# """Dynamically wrap any method call to convert async to sync."""
|
|
38
|
-
# attr = getattr(self._async_obj, name)
|
|
39
|
-
|
|
40
|
-
# if callable(attr):
|
|
41
|
-
# @functools.wraps(attr)
|
|
42
|
-
# def sync_wrapper(*args, **kwargs):
|
|
43
|
-
# result = attr(*args, **kwargs)
|
|
44
|
-
# if hasattr(result, '__await__'):
|
|
45
|
-
# return self._sync_client._run_async(result)
|
|
46
|
-
# return result
|
|
47
|
-
# return sync_wrapper
|
|
48
|
-
|
|
49
|
-
# return attr
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
# class SyncClient:
|
|
53
|
-
# """
|
|
54
|
-
# Thread-safe synchronous wrapper for AsyncClient.
|
|
55
|
-
# Uses a persistent event loop in a dedicated thread to avoid event loop conflicts.
|
|
56
|
-
# """
|
|
57
|
-
|
|
58
|
-
# # datasets: 'DatasetManagement'
|
|
59
|
-
# # users: 'UserManagement'
|
|
60
|
-
# # mass_stats: 'MassStats'
|
|
61
|
-
# # groups: 'GroupManagement'
|
|
62
|
-
# # space: 'SpaceManagement'
|
|
63
|
-
# # model: 'ModelManagement'
|
|
64
|
-
# # auth: 'AuthClient'
|
|
65
|
-
|
|
66
|
-
# def __init__(self, url: Optional[str] = None, api_key: Optional[str] = None, verbose: bool = False):
|
|
67
|
-
# self._async_client = AsyncClient(url=url, api_key=api_key, verbose=verbose)
|
|
68
|
-
# self._context_entered = False
|
|
69
|
-
# self._closed = False
|
|
70
|
-
|
|
71
|
-
# # Thread and event loop management
|
|
72
|
-
# self._loop = None
|
|
73
|
-
# self._thread = None
|
|
74
|
-
# self._loop_ready = None
|
|
75
|
-
# self._loop_exception = None
|
|
76
|
-
|
|
77
|
-
# # Initialize endpoint managers
|
|
78
|
-
# print("we are here!!!!!!!!!!!!!!!!!")
|
|
79
|
-
# self.datasets = SyncWrapper(self._async_client.datasets, self)
|
|
80
|
-
# self.users = SyncWrapper(self._async_client.users, self)
|
|
81
|
-
# self.mass_stats = SyncWrapper(self._async_client.mass_stats, self)
|
|
82
|
-
# self.groups = SyncWrapper(self._async_client.groups, self)
|
|
83
|
-
# self.space = SyncWrapper(self._async_client.space, self)
|
|
84
|
-
# self.model = SyncWrapper(self._async_client.model, self)
|
|
85
|
-
# self.auth = SyncWrapper(self._async_client.auth, self)
|
|
86
|
-
|
|
87
|
-
# # Register cleanup
|
|
88
|
-
# import atexit
|
|
89
|
-
# atexit.register(self._cleanup)
|
|
90
|
-
|
|
91
|
-
# def _ensure_event_loop(self):
|
|
92
|
-
# """Ensure we have a persistent event loop in a dedicated thread."""
|
|
93
|
-
# if self._loop is None or self._loop.is_closed():
|
|
94
|
-
# self._loop_ready = threading.Event()
|
|
95
|
-
# self._loop_exception = None
|
|
96
|
-
|
|
97
|
-
# def run_loop():
|
|
98
|
-
# """Run the event loop in a dedicated thread."""
|
|
99
|
-
# try:
|
|
100
|
-
# # Create a new event loop for this thread
|
|
101
|
-
# self._loop = asyncio.new_event_loop()
|
|
102
|
-
# asyncio.set_event_loop(self._loop)
|
|
103
|
-
|
|
104
|
-
# # Signal that the loop is ready
|
|
105
|
-
# self._loop_ready.set()
|
|
106
|
-
|
|
107
|
-
# # Run the loop forever (until stopped)
|
|
108
|
-
# self._loop.run_forever()
|
|
109
|
-
# except Exception as e:
|
|
110
|
-
# self._loop_exception = e
|
|
111
|
-
# self._loop_ready.set()
|
|
112
|
-
# finally:
|
|
113
|
-
# # Clean up when the loop stops
|
|
114
|
-
# if self._loop and not self._loop.is_closed():
|
|
115
|
-
# self._loop.close()
|
|
116
|
-
|
|
117
|
-
# # Start the thread
|
|
118
|
-
# self._thread = threading.Thread(target=run_loop, daemon=True)
|
|
119
|
-
# self._thread.start()
|
|
120
|
-
|
|
121
|
-
# # Wait for the loop to be ready
|
|
122
|
-
# self._loop_ready.wait(timeout=10)
|
|
123
|
-
|
|
124
|
-
# if self._loop_exception:
|
|
125
|
-
# raise self._loop_exception
|
|
126
|
-
|
|
127
|
-
# if not self._loop_ready.is_set():
|
|
128
|
-
# raise RuntimeError("Event loop failed to start within timeout")
|
|
129
|
-
|
|
130
|
-
# def _run_async(self, coro):
|
|
131
|
-
# """
|
|
132
|
-
# Run async coroutine using persistent event loop.
|
|
133
|
-
# This is the core method that makes everything work.
|
|
134
|
-
# """
|
|
135
|
-
# # Ensure we have an event loop
|
|
136
|
-
# self._ensure_event_loop()
|
|
137
|
-
|
|
138
|
-
# if self._loop.is_closed():
|
|
139
|
-
# raise RuntimeError("Event loop is closed")
|
|
140
|
-
|
|
141
|
-
# # Create a future to get the result back from the event loop thread
|
|
142
|
-
# future = concurrent.futures.Future()
|
|
143
|
-
|
|
144
|
-
# async def run_with_context():
|
|
145
|
-
# """Run the coroutine with proper context management."""
|
|
146
|
-
# try:
|
|
147
|
-
# # Ensure the async client is properly initialized
|
|
148
|
-
# await self._ensure_context()
|
|
149
|
-
|
|
150
|
-
# # Run the actual coroutine
|
|
151
|
-
# result = await coro
|
|
152
|
-
|
|
153
|
-
# # Set the result on the future
|
|
154
|
-
# future.set_result(result)
|
|
155
|
-
# except Exception as e:
|
|
156
|
-
# # Set the exception on the future
|
|
157
|
-
# future.set_exception(e)
|
|
158
|
-
|
|
159
|
-
# # Schedule the coroutine on the persistent event loop
|
|
160
|
-
# self._loop.call_soon_threadsafe(
|
|
161
|
-
# lambda: asyncio.create_task(run_with_context())
|
|
162
|
-
# )
|
|
163
|
-
|
|
164
|
-
# # Wait for the result (with timeout to avoid hanging)
|
|
165
|
-
# try:
|
|
166
|
-
# return future.result(timeout=300) # 5 minute timeout
|
|
167
|
-
# except concurrent.futures.TimeoutError:
|
|
168
|
-
# raise RuntimeError("Async operation timed out after 5 minutes")
|
|
169
|
-
|
|
170
|
-
# async def _ensure_context(self):
|
|
171
|
-
# """Ensure the async client context is entered."""
|
|
172
|
-
# if not self._context_entered and not self._closed:
|
|
173
|
-
# await self._async_client.__aenter__()
|
|
174
|
-
# self._context_entered = True
|
|
175
|
-
|
|
176
|
-
# async def _exit_context(self):
|
|
177
|
-
# """Exit the async client context."""
|
|
178
|
-
# if self._context_entered and not self._closed:
|
|
179
|
-
# await self._async_client.__aexit__(None, None, None)
|
|
180
|
-
# self._context_entered = False
|
|
181
|
-
|
|
182
|
-
# def close(self):
|
|
183
|
-
# """Close the underlying async client session and stop the event loop."""
|
|
184
|
-
# if not self._closed:
|
|
185
|
-
# if self._loop and not self._loop.is_closed():
|
|
186
|
-
# # Schedule cleanup on the event loop
|
|
187
|
-
# future = concurrent.futures.Future()
|
|
188
|
-
|
|
189
|
-
# async def cleanup():
|
|
190
|
-
# """Clean up the async client."""
|
|
191
|
-
# try:
|
|
192
|
-
# await self._exit_context()
|
|
193
|
-
# future.set_result(None)
|
|
194
|
-
# except Exception as e:
|
|
195
|
-
# future.set_exception(e)
|
|
196
|
-
|
|
197
|
-
# # Run cleanup
|
|
198
|
-
# self._loop.call_soon_threadsafe(
|
|
199
|
-
# lambda: asyncio.create_task(cleanup())
|
|
200
|
-
# )
|
|
201
|
-
|
|
202
|
-
# # Wait for cleanup to complete
|
|
203
|
-
# try:
|
|
204
|
-
# future.result(timeout=10)
|
|
205
|
-
# except:
|
|
206
|
-
# pass # Ignore cleanup errors
|
|
207
|
-
|
|
208
|
-
# # Stop the event loop
|
|
209
|
-
# self._loop.call_soon_threadsafe(self._loop.stop)
|
|
210
|
-
|
|
211
|
-
# # Wait for thread to finish
|
|
212
|
-
# if self._thread and self._thread.is_alive():
|
|
213
|
-
# self._thread.join(timeout=5)
|
|
214
|
-
|
|
215
|
-
# self._closed = True
|
|
216
|
-
|
|
217
|
-
# def _cleanup(self):
|
|
218
|
-
# """Internal cleanup method called by atexit."""
|
|
219
|
-
# if not self._closed:
|
|
220
|
-
# try:
|
|
221
|
-
# self.close()
|
|
222
|
-
# except Exception:
|
|
223
|
-
# pass # Ignore cleanup errors
|
|
224
|
-
|
|
225
|
-
# def __dir__(self):
|
|
226
|
-
# """Return list of attributes for autocomplete in interactive environments."""
|
|
227
|
-
# default_attrs = [attr for attr in object.__dir__(self) if not attr.startswith('_')]
|
|
228
|
-
# async_client_attrs = [attr for attr in dir(self._async_client) if not attr.startswith('_')]
|
|
229
|
-
# endpoint_attrs = ['datasets', 'users', 'mass_stats', 'groups', 'space', 'model', 'auth']
|
|
230
|
-
# all_attrs = default_attrs + async_client_attrs + endpoint_attrs
|
|
231
|
-
# return list(set(all_attrs))
|
|
232
|
-
|
|
233
|
-
# # Your existing methods (geoquery, zonal_stats, etc.)
|
|
234
|
-
# def geoquery(
|
|
235
|
-
# self,
|
|
236
|
-
# expr: str,
|
|
237
|
-
# feature: Union[Dict[str, Any], ShapelyGeometry],
|
|
238
|
-
# in_crs: str = "epsg:4326",
|
|
239
|
-
# out_crs: str = "epsg:4326",
|
|
240
|
-
# resolution: int = -1,
|
|
241
|
-
# geom_fix: bool = False,
|
|
242
|
-
# **kwargs
|
|
243
|
-
# ):
|
|
244
|
-
# """Compute WCS query for a single geometry (synchronous version)."""
|
|
245
|
-
# coro = self._async_client.geoquery(
|
|
246
|
-
# expr=expr,
|
|
247
|
-
# feature=feature,
|
|
248
|
-
# in_crs=in_crs,
|
|
249
|
-
# out_crs=out_crs,
|
|
250
|
-
# output="netcdf",
|
|
251
|
-
# resolution=resolution,
|
|
252
|
-
# geom_fix=geom_fix,
|
|
253
|
-
# **kwargs
|
|
254
|
-
# )
|
|
255
|
-
# return self._run_async(coro)
|
|
256
|
-
|
|
257
|
-
# def zonal_stats(
|
|
258
|
-
# self,
|
|
259
|
-
# gdf: GeoDataFrame,
|
|
260
|
-
# expr: str,
|
|
261
|
-
# conc: int = 20,
|
|
262
|
-
# inplace: bool = False,
|
|
263
|
-
# in_crs: str = "epsg:4326",
|
|
264
|
-
# out_crs: str = "epsg:4326",
|
|
265
|
-
# resolution: int = -1,
|
|
266
|
-
# geom_fix: bool = False,
|
|
267
|
-
# drop_nan: bool = False,
|
|
268
|
-
# spatial_reduction: str = None,
|
|
269
|
-
# temporal_reduction: str = None,
|
|
270
|
-
# max_memory_mb: int = 500,
|
|
271
|
-
# stream_to_disk: bool = False,
|
|
272
|
-
# ):
|
|
273
|
-
# """Compute zonal statistics for all geometries in a GeoDataFrame (synchronous version)."""
|
|
274
|
-
# coro = self._async_client.zonal_stats(
|
|
275
|
-
# gdf=gdf,
|
|
276
|
-
# expr=expr,
|
|
277
|
-
# conc=conc,
|
|
278
|
-
# inplace=inplace,
|
|
279
|
-
# in_crs=in_crs,
|
|
280
|
-
# out_crs=out_crs,
|
|
281
|
-
# resolution=resolution,
|
|
282
|
-
# geom_fix=geom_fix,
|
|
283
|
-
# drop_nan=drop_nan,
|
|
284
|
-
# spatial_reduction=spatial_reduction,
|
|
285
|
-
# temporal_reduction=temporal_reduction,
|
|
286
|
-
# max_memory_mb=max_memory_mb,
|
|
287
|
-
# stream_to_disk=stream_to_disk
|
|
288
|
-
# )
|
|
289
|
-
# return self._run_async(coro)
|
|
290
|
-
|
|
291
|
-
# def create_dataset_file(
|
|
292
|
-
# self,
|
|
293
|
-
# aoi: str,
|
|
294
|
-
# expression: str,
|
|
295
|
-
# output: str,
|
|
296
|
-
# in_crs: str = "epsg:4326",
|
|
297
|
-
# res: float = 0.0001,
|
|
298
|
-
# region: str = "aus",
|
|
299
|
-
# to_crs: str = "epsg:4326",
|
|
300
|
-
# overwrite: bool = True,
|
|
301
|
-
# skip_existing: bool = False,
|
|
302
|
-
# non_interactive: bool = True,
|
|
303
|
-
# poll_interval: int = 30,
|
|
304
|
-
# download_path: str = "/home/user/Downloads",
|
|
305
|
-
# ) -> dict:
|
|
306
|
-
# """Create a dataset file using mass stats operations (synchronous version)."""
|
|
307
|
-
# coro = self._async_client.create_dataset_file(
|
|
308
|
-
# aoi=aoi,
|
|
309
|
-
# expression=expression,
|
|
310
|
-
# output=output,
|
|
311
|
-
# in_crs=in_crs,
|
|
312
|
-
# res=res,
|
|
313
|
-
# region=region,
|
|
314
|
-
# to_crs=to_crs,
|
|
315
|
-
# overwrite=overwrite,
|
|
316
|
-
# skip_existing=skip_existing,
|
|
317
|
-
# non_interactive=non_interactive,
|
|
318
|
-
# poll_interval=poll_interval,
|
|
319
|
-
# download_path=download_path,
|
|
320
|
-
# )
|
|
321
|
-
# return self._run_async(coro)
|
|
322
|
-
|
|
323
|
-
# # Context manager support
|
|
324
|
-
# def __enter__(self):
|
|
325
|
-
# """Context manager entry."""
|
|
326
|
-
# return self
|
|
327
|
-
|
|
328
|
-
# def __exit__(self, exc_type, exc_val, exc_tb):
|
|
329
|
-
# """Context manager exit."""
|
|
330
|
-
# self.close()
|
|
331
|
-
|
|
332
|
-
# def __del__(self):
|
|
333
|
-
# """Destructor to ensure session is closed."""
|
|
334
|
-
# if not self._closed:
|
|
335
|
-
# try:
|
|
336
|
-
# self._cleanup()
|
|
337
|
-
# except Exception:
|
|
338
|
-
# pass
|
|
339
|
-
|
|
340
|
-
|
|
341
1
|
import asyncio
|
|
342
2
|
import concurrent.futures
|
|
343
3
|
import threading
|