terrakio-core 0.4.95__tar.gz → 0.4.96__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of terrakio-core might be problematic. Click here for more details.

Files changed (24) hide show
  1. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/PKG-INFO +1 -1
  2. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/pyproject.toml +1 -1
  3. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/__init__.py +1 -1
  4. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/accessors.py +98 -16
  5. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/convenience_functions/geoquries.py +1 -0
  6. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/convenience_functions/zonal_stats.py +17 -2
  7. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/.gitignore +0 -0
  8. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/README.md +0 -0
  9. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/async_client.py +0 -0
  10. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/client.py +0 -0
  11. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/config.py +0 -0
  12. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/convenience_functions/create_dataset_file.py +0 -0
  13. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/auth.py +0 -0
  14. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/dataset_management.py +0 -0
  15. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/group_management.py +0 -0
  16. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/mass_stats.py +0 -0
  17. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/model_management.py +0 -0
  18. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/space_management.py +0 -0
  19. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/endpoints/user_management.py +0 -0
  20. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/exceptions.py +0 -0
  21. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/helper/bounded_taskgroup.py +0 -0
  22. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/helper/decorators.py +0 -0
  23. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/helper/tiles.py +0 -0
  24. {terrakio_core-0.4.95 → terrakio_core-0.4.96}/terrakio_core/sync_client.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: terrakio-core
3
- Version: 0.4.95
3
+ Version: 0.4.96
4
4
  Summary: Core package for the terrakio-python-api
5
5
  Requires-Python: >=3.11
6
6
  Requires-Dist: aiofiles>=24.1.0
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "terrakio-core"
3
- version = "0.4.95"
3
+ version = "0.4.96"
4
4
  description = "Core package for the terrakio-python-api"
5
5
  readme = "README.md"
6
6
  requires-python = ">=3.11"
@@ -5,7 +5,7 @@ Terrakio Core
5
5
  Core components for Terrakio API clients.
6
6
  """
7
7
 
8
- __version__ = "0.4.95"
8
+ __version__ = "0.4.96"
9
9
 
10
10
  from .async_client import AsyncClient
11
11
  from .sync_client import SyncClient as Client
@@ -673,9 +673,61 @@ class GeoXarrayAccessor:
673
673
  result_gdf[col] = new_data
674
674
 
675
675
  return result_gdf
676
-
676
+
677
677
  def _apply_cloud_reduction(self, reduction_func: str, dim: Optional[Union[str, List[str]]] = None,
678
- columns: Optional[List[str]] = None, **kwargs):
678
+ columns: Optional[List[str]] = None, **kwargs):
679
+
680
+ if hasattr(self._obj, 'job_id') and self._obj.job_id and self._client:
681
+ import asyncio
682
+ import concurrent.futures
683
+
684
+ def check_job_status():
685
+ loop = asyncio.new_event_loop()
686
+ asyncio.set_event_loop(loop)
687
+ try:
688
+ return loop.run_until_complete(
689
+ self._client.mass_stats.track_job([self._obj.job_id])
690
+ )
691
+ finally:
692
+ loop.close()
693
+
694
+ try:
695
+ with concurrent.futures.ThreadPoolExecutor() as executor:
696
+ future = executor.submit(check_job_status)
697
+ track_info = future.result(timeout=10) # Short timeout for status check
698
+
699
+ job_info = track_info[self._obj.job_id]
700
+ status = job_info['status']
701
+
702
+ if status in ["Failed", "Cancelled", "Error"]:
703
+ raise RuntimeError(f"The zonal stats job (job_id: {self._obj.job_id}) has failed, cancelled, or errored. Please check the job status!")
704
+
705
+ elif status != "Completed":
706
+ # Job is still running - include progress information
707
+ completed = job_info.get('completed', 0)
708
+ total = job_info.get('total', 1)
709
+ progress = completed / total if total > 0 else 0
710
+ percentage = progress * 100
711
+
712
+ # Create progress bar
713
+ bar_length = 30 # Shorter bar for error message
714
+ filled_length = int(bar_length * progress)
715
+ bar = '█' * filled_length + '░' * (bar_length - filled_length)
716
+
717
+ raise RuntimeError(
718
+ f"The zonal stats job (job_id: {self._obj.job_id}) is still running. "
719
+ f"Progress: [{bar}] {percentage:.1f}% ({completed}/{total}). "
720
+ f"Please come back at a later time!"
721
+ )
722
+
723
+ except concurrent.futures.TimeoutError:
724
+ self._client.logger.warning("Timeout checking job status, proceeding with reduction")
725
+ except Exception as e:
726
+ if "still running" in str(e) or "failed" in str(e).lower():
727
+ raise # Re-raise our custom errors
728
+ else:
729
+ self._client.logger.warning(f"Could not check job status: {e}, proceeding with reduction")
730
+
679
731
  current_time = time.time()
680
732
  chain_reset_threshold = 0.01
681
733
 
@@ -835,20 +887,25 @@ class GeoXarrayAccessor:
835
887
  "from io import BytesIO",
836
888
  "import tempfile",
837
889
  "import os",
890
+ "import traceback",
838
891
  "",
839
892
  "def consume(filename, file_bytes, metadata):",
840
893
  ]
841
894
 
842
895
  script_lines.extend([
896
+ " tmp_file = None",
897
+ " nc_tmp_file = None",
898
+ " ds = None",
843
899
  " ",
844
900
  " try:",
845
901
  " with tempfile.NamedTemporaryFile(suffix='.nc', delete=False) as tmp_file:",
846
902
  " tmp_file.write(file_bytes)",
847
903
  " tmp_file.flush()",
848
- " ds = xr.open_dataset(tmp_file.name, engine='scipy')",
904
+ " ds = xr.open_dataset(tmp_file.name, engine='h5netcdf')",
849
905
  " ",
850
906
  ])
851
907
 
908
+ # Add operations without excessive debugging
852
909
  for i, op in enumerate(self._pending_operations):
853
910
  op_type = op['type']
854
911
  params = op['params']
@@ -882,8 +939,13 @@ class GeoXarrayAccessor:
882
939
  ' output_filename = f"{base_filename}_processed.csv"',
883
940
  " csv_data = result_df.to_csv(index=False).encode()",
884
941
  " ",
885
- " ds.close()",
886
- " os.unlink(tmp_file.name)",
942
+ " if ds is not None:",
943
+ " ds.close()",
944
+ " if tmp_file and hasattr(tmp_file, 'name'):",
945
+ " try:",
946
+ " os.unlink(tmp_file.name)",
947
+ " except:",
948
+ " pass",
887
949
  " return output_filename, csv_data",
888
950
  " else:",
889
951
  " # Output as NetCDF - still has dimensions",
@@ -897,24 +959,44 @@ class GeoXarrayAccessor:
897
959
  " netcdf_data = f.read()",
898
960
  " ",
899
961
  " # Clean up temp files",
900
- " os.unlink(nc_tmp_file.name)",
962
+ " try:",
963
+ " os.unlink(nc_tmp_file.name)",
964
+ " except:",
965
+ " pass",
901
966
  " ",
902
- " ds.close()",
903
- " os.unlink(tmp_file.name)",
967
+ " if ds is not None:",
968
+ " ds.close()",
969
+ " if tmp_file and hasattr(tmp_file, 'name'):",
970
+ " try:",
971
+ " os.unlink(tmp_file.name)",
972
+ " except:",
973
+ " pass",
904
974
  " return output_filename, netcdf_data",
905
975
  ])
906
976
 
907
977
  script_lines.extend([
908
978
  " ",
909
979
  " except Exception as e:",
910
- " try:",
911
- " os.unlink(tmp_file.name)",
912
- " except:",
913
- " pass",
914
- " try:",
915
- " os.unlink(nc_tmp_file.name)",
916
- " except:",
917
- " pass",
980
+ " ",
981
+ " # Clean up resources",
982
+ " if ds is not None:",
983
+ " try:",
984
+ " ds.close()",
985
+ " except:",
986
+ " pass",
987
+ " ",
988
+ " if tmp_file and hasattr(tmp_file, 'name'):",
989
+ " try:",
990
+ " os.unlink(tmp_file.name)",
991
+ " except:",
992
+ " pass",
993
+ " ",
994
+ " if nc_tmp_file and hasattr(nc_tmp_file, 'name'):",
995
+ " try:",
996
+ " os.unlink(nc_tmp_file.name)",
997
+ " except:",
998
+ " pass",
999
+ " ",
918
1000
  " return None, None",
919
1001
  ])
920
1002
 
@@ -1,4 +1,5 @@
1
1
  import asyncio
2
+ import xarray as xr
2
3
 
3
4
  import geopandas as gpd
4
5
  from shapely.geometry import shape
@@ -139,8 +139,23 @@ class cloud_object(gpd.GeoDataFrame):
139
139
  raise RuntimeError(f"The zonal stats job (job_id: {self.job_id}) has failed, cancelled, or errored. Please check the job status!")
140
140
 
141
141
  else:
142
- raise RuntimeError(f"The zonal stats job (job_id: {self.job_id}) is still running. Please come back at a later time!")
143
-
142
+ # Job is still running - include progress information
143
+ completed = job_info.get('completed', 0)
144
+ total = job_info.get('total', 1)
145
+ progress = completed / total if total > 0 else 0
146
+ percentage = progress * 100
147
+
148
+ # Create progress bar
149
+ bar_length = 30 # Shorter bar for error message
150
+ filled_length = int(bar_length * progress)
151
+ bar = '█' * filled_length + '░' * (bar_length - filled_length)
152
+
153
+ raise RuntimeError(
154
+ f"The zonal stats job (job_id: {self.job_id}) is still running. "
155
+ f"Progress: [{bar}] {percentage:.1f}% ({completed}/{total}). "
156
+ f"Please come back at a later time!"
157
+ )
158
+
144
159
  def expand_on_time(gdf):
145
160
  """
146
161
  Expand datasets on time dimension - each time becomes a new row.
File without changes