geoai-py 0.4.2__py2.py3-none-any.whl → 0.4.3__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geoai/__init__.py +1 -1
- geoai/download.py +138 -34
- geoai/extract.py +153 -46
- geoai/geoai.py +13 -0
- {geoai_py-0.4.2.dist-info → geoai_py-0.4.3.dist-info}/METADATA +1 -1
- geoai_py-0.4.3.dist-info/RECORD +15 -0
- {geoai_py-0.4.2.dist-info → geoai_py-0.4.3.dist-info}/WHEEL +1 -1
- geoai_py-0.4.2.dist-info/RECORD +0 -15
- {geoai_py-0.4.2.dist-info → geoai_py-0.4.3.dist-info}/entry_points.txt +0 -0
- {geoai_py-0.4.2.dist-info → geoai_py-0.4.3.dist-info}/licenses/LICENSE +0 -0
- {geoai_py-0.4.2.dist-info → geoai_py-0.4.3.dist-info}/top_level.txt +0 -0
geoai/__init__.py
CHANGED
geoai/download.py
CHANGED
|
@@ -10,8 +10,9 @@ import matplotlib.pyplot as plt
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
import pandas as pd
|
|
12
12
|
import planetary_computer as pc
|
|
13
|
+
import pystac
|
|
13
14
|
import requests
|
|
14
|
-
import rioxarray
|
|
15
|
+
import rioxarray as rxr
|
|
15
16
|
import xarray as xr
|
|
16
17
|
from pystac_client import Client
|
|
17
18
|
from shapely.geometry import box
|
|
@@ -121,7 +122,7 @@ def download_naip(
|
|
|
121
122
|
#
|
|
122
123
|
else:
|
|
123
124
|
# Fallback to direct rioxarray opening (less common case)
|
|
124
|
-
data =
|
|
125
|
+
data = rxr.open_rasterio(rgb_asset.href)
|
|
125
126
|
data.rio.to_raster(output_path)
|
|
126
127
|
|
|
127
128
|
downloaded_files.append(output_path)
|
|
@@ -129,7 +130,7 @@ def download_naip(
|
|
|
129
130
|
|
|
130
131
|
# Optional: Display a preview (uncomment if needed)
|
|
131
132
|
if preview:
|
|
132
|
-
data =
|
|
133
|
+
data = rxr.open_rasterio(output_path)
|
|
133
134
|
preview_raster(data)
|
|
134
135
|
|
|
135
136
|
except Exception as e:
|
|
@@ -516,7 +517,7 @@ def download_pc_stac_item(
|
|
|
516
517
|
)
|
|
517
518
|
# Still need to open the file to get the data for merging
|
|
518
519
|
if merge_bands:
|
|
519
|
-
band_data =
|
|
520
|
+
band_data = rxr.open_rasterio(file_path)
|
|
520
521
|
band_data_arrays.append((band, band_data))
|
|
521
522
|
band_names.append(band)
|
|
522
523
|
result[band] = file_path
|
|
@@ -525,7 +526,7 @@ def download_pc_stac_item(
|
|
|
525
526
|
if show_progress and not isinstance(progress_iter, list):
|
|
526
527
|
progress_iter.set_description(f"Downloading {band}")
|
|
527
528
|
|
|
528
|
-
band_data =
|
|
529
|
+
band_data = rxr.open_rasterio(band_url)
|
|
529
530
|
|
|
530
531
|
# Store the data array for potential merging later
|
|
531
532
|
if merge_bands:
|
|
@@ -866,10 +867,9 @@ def pc_stac_search(
|
|
|
866
867
|
def pc_stac_download(
|
|
867
868
|
items,
|
|
868
869
|
output_dir=".",
|
|
869
|
-
|
|
870
|
+
assets=None,
|
|
870
871
|
max_workers=4,
|
|
871
872
|
skip_existing=True,
|
|
872
|
-
sign_urls=True,
|
|
873
873
|
):
|
|
874
874
|
"""
|
|
875
875
|
Download assets from STAC items retrieved from the Planetary Computer.
|
|
@@ -882,7 +882,7 @@ def pc_stac_download(
|
|
|
882
882
|
items (list or pystac.Item): STAC Item object or list of STAC Item objects.
|
|
883
883
|
output_dir (str, optional): Directory where assets will be saved.
|
|
884
884
|
Defaults to current directory.
|
|
885
|
-
|
|
885
|
+
assets (list, optional): List of asset keys to download. If None,
|
|
886
886
|
downloads all available assets. Defaults to None.
|
|
887
887
|
max_workers (int, optional): Maximum number of concurrent download threads.
|
|
888
888
|
Defaults to 4.
|
|
@@ -899,7 +899,7 @@ def pc_stac_download(
|
|
|
899
899
|
TypeError: If items is not a STAC Item or list of STAC Items.
|
|
900
900
|
IOError: If there's an error writing the downloaded assets to disk.
|
|
901
901
|
"""
|
|
902
|
-
|
|
902
|
+
|
|
903
903
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
904
904
|
|
|
905
905
|
# Handle single item case
|
|
@@ -911,31 +911,13 @@ def pc_stac_download(
|
|
|
911
911
|
# Create output directory if it doesn't exist
|
|
912
912
|
os.makedirs(output_dir, exist_ok=True)
|
|
913
913
|
|
|
914
|
-
# Function to sign URLs if needed
|
|
915
|
-
def get_signed_url(href):
|
|
916
|
-
if not sign_urls:
|
|
917
|
-
return href
|
|
918
|
-
|
|
919
|
-
# Planetary Computer typically requires signing URLs for accessing data
|
|
920
|
-
# Check if the URL is from Microsoft Planetary Computer
|
|
921
|
-
if "planetarycomputer" in href:
|
|
922
|
-
try:
|
|
923
|
-
sign_url = "https://planetarycomputer.microsoft.com/api/sas/v1/sign"
|
|
924
|
-
response = requests.get(sign_url, params={"href": href})
|
|
925
|
-
response.raise_for_status()
|
|
926
|
-
return response.json().get("href", href)
|
|
927
|
-
except Exception as e:
|
|
928
|
-
print(f"Warning: Failed to sign URL {href}: {str(e)}")
|
|
929
|
-
return href
|
|
930
|
-
return href
|
|
931
|
-
|
|
932
914
|
# Function to download a single asset
|
|
933
915
|
def download_asset(item, asset_key, asset):
|
|
916
|
+
item = pc.sign(item)
|
|
934
917
|
item_id = item.id
|
|
935
918
|
|
|
936
919
|
# Get the asset URL and sign it if needed
|
|
937
|
-
asset_url =
|
|
938
|
-
|
|
920
|
+
asset_url = item.assets[asset_key].href
|
|
939
921
|
# Determine output filename
|
|
940
922
|
if asset.media_type:
|
|
941
923
|
# Use appropriate file extension based on media type
|
|
@@ -995,13 +977,11 @@ def pc_stac_download(
|
|
|
995
977
|
print(f"Processing STAC item: {item_id}")
|
|
996
978
|
|
|
997
979
|
# Determine which assets to download
|
|
998
|
-
if
|
|
999
|
-
assets_to_download = {
|
|
1000
|
-
k: v for k, v in item.assets.items() if k in asset_keys
|
|
1001
|
-
}
|
|
980
|
+
if assets:
|
|
981
|
+
assets_to_download = {k: v for k, v in item.assets.items() if k in assets}
|
|
1002
982
|
if not assets_to_download:
|
|
1003
983
|
print(
|
|
1004
|
-
f"Warning: None of the specified asset keys {
|
|
984
|
+
f"Warning: None of the specified asset keys {assets} found in item {item_id}"
|
|
1005
985
|
)
|
|
1006
986
|
print(f"Available asset keys: {list(item.assets.keys())}")
|
|
1007
987
|
continue
|
|
@@ -1038,3 +1018,127 @@ def pc_stac_download(
|
|
|
1038
1018
|
print(f"\nDownloaded {total_assets} assets for {len(results)} items")
|
|
1039
1019
|
|
|
1040
1020
|
return results
|
|
1021
|
+
|
|
1022
|
+
|
|
1023
|
+
def pc_item_asset_list(item):
|
|
1024
|
+
"""
|
|
1025
|
+
Retrieve the list of asset keys from a STAC item in the Planetary Computer catalog.
|
|
1026
|
+
|
|
1027
|
+
Args:
|
|
1028
|
+
item (str): The URL of the STAC item.
|
|
1029
|
+
|
|
1030
|
+
Returns:
|
|
1031
|
+
list: A list of asset keys available in the signed STAC item.
|
|
1032
|
+
"""
|
|
1033
|
+
if isinstance(item, str):
|
|
1034
|
+
item = pystac.Item.from_file(item)
|
|
1035
|
+
|
|
1036
|
+
if not isinstance(item, pystac.Item):
|
|
1037
|
+
raise ValueError("item_url must be a string (URL) or a pystac.Item object")
|
|
1038
|
+
|
|
1039
|
+
return list(item.assets.keys())
|
|
1040
|
+
|
|
1041
|
+
|
|
1042
|
+
def read_pc_item_asset(item, asset, output=None, as_cog=True, **kwargs):
|
|
1043
|
+
"""
|
|
1044
|
+
Read a specific asset from a STAC item in the Planetary Computer catalog.
|
|
1045
|
+
|
|
1046
|
+
Args:
|
|
1047
|
+
item (str): The URL of the STAC item.
|
|
1048
|
+
asset (str): The key of the asset to read.
|
|
1049
|
+
output (str, optional): If specified, the path to save the asset as a raster file.
|
|
1050
|
+
as_cog (bool, optional): If True, save the asset as a Cloud Optimized GeoTIFF (COG).
|
|
1051
|
+
|
|
1052
|
+
Returns:
|
|
1053
|
+
xarray.DataArray: The data array for the specified asset.
|
|
1054
|
+
"""
|
|
1055
|
+
if isinstance(item, str):
|
|
1056
|
+
item = pystac.Item.from_file(item)
|
|
1057
|
+
|
|
1058
|
+
if not isinstance(item, pystac.Item):
|
|
1059
|
+
raise ValueError("item must be a string (URL) or a pystac.Item object")
|
|
1060
|
+
|
|
1061
|
+
signed_item = pc.sign(item)
|
|
1062
|
+
|
|
1063
|
+
if asset not in signed_item.assets:
|
|
1064
|
+
raise ValueError(
|
|
1065
|
+
f"Asset '{asset}' not found in item '{item.id}'. It has available assets: {list(signed_item.assets.keys())}"
|
|
1066
|
+
)
|
|
1067
|
+
|
|
1068
|
+
asset_url = signed_item.assets[asset].href
|
|
1069
|
+
ds = rxr.open_rasterio(asset_url)
|
|
1070
|
+
|
|
1071
|
+
if as_cog:
|
|
1072
|
+
kwargs["driver"] = "COG" # Ensure the output is a Cloud Optimized GeoTIFF
|
|
1073
|
+
|
|
1074
|
+
if output:
|
|
1075
|
+
print(f"Saving asset '{asset}' to {output}...")
|
|
1076
|
+
ds.rio.to_raster(output, **kwargs)
|
|
1077
|
+
print(f"Asset '{asset}' saved successfully.")
|
|
1078
|
+
return ds
|
|
1079
|
+
|
|
1080
|
+
|
|
1081
|
+
def view_pc_item(
|
|
1082
|
+
url=None,
|
|
1083
|
+
collection=None,
|
|
1084
|
+
item=None,
|
|
1085
|
+
assets=None,
|
|
1086
|
+
bands=None,
|
|
1087
|
+
titiler_endpoint=None,
|
|
1088
|
+
name="STAC Item",
|
|
1089
|
+
attribution="Planetary Computer",
|
|
1090
|
+
opacity=1.0,
|
|
1091
|
+
shown=True,
|
|
1092
|
+
fit_bounds=True,
|
|
1093
|
+
layer_index=None,
|
|
1094
|
+
backend="folium",
|
|
1095
|
+
basemap=None,
|
|
1096
|
+
map_args=None,
|
|
1097
|
+
**kwargs,
|
|
1098
|
+
):
|
|
1099
|
+
|
|
1100
|
+
if backend == "folium":
|
|
1101
|
+
import leafmap.foliumap as leafmap
|
|
1102
|
+
|
|
1103
|
+
elif backend == "ipyleaflet":
|
|
1104
|
+
import leafmap.leafmap as leafmap
|
|
1105
|
+
|
|
1106
|
+
else:
|
|
1107
|
+
raise ValueError(
|
|
1108
|
+
f"Unsupported backend: {backend}. Supported backends are 'folium' and 'ipyleaflet'."
|
|
1109
|
+
)
|
|
1110
|
+
|
|
1111
|
+
if map_args is None:
|
|
1112
|
+
map_args = {}
|
|
1113
|
+
|
|
1114
|
+
if "draw_control" not in map_args:
|
|
1115
|
+
map_args["draw_control"] = False
|
|
1116
|
+
|
|
1117
|
+
if url is not None:
|
|
1118
|
+
|
|
1119
|
+
item = pystac.Item.from_file(url)
|
|
1120
|
+
|
|
1121
|
+
if isinstance(item, pystac.Item):
|
|
1122
|
+
collection = item.collection_id
|
|
1123
|
+
if assets is None:
|
|
1124
|
+
assets = [list(item.assets.keys())[0]]
|
|
1125
|
+
item = item.id
|
|
1126
|
+
|
|
1127
|
+
m = leafmap.Map(**map_args)
|
|
1128
|
+
if basemap is not None:
|
|
1129
|
+
m.add_basemap(basemap)
|
|
1130
|
+
m.add_stac_layer(
|
|
1131
|
+
collection=collection,
|
|
1132
|
+
item=item,
|
|
1133
|
+
assets=assets,
|
|
1134
|
+
bands=bands,
|
|
1135
|
+
titiler_endpoint=titiler_endpoint,
|
|
1136
|
+
name=name,
|
|
1137
|
+
attribution=attribution,
|
|
1138
|
+
opacity=opacity,
|
|
1139
|
+
shown=shown,
|
|
1140
|
+
fit_bounds=fit_bounds,
|
|
1141
|
+
layer_index=layer_index,
|
|
1142
|
+
**kwargs,
|
|
1143
|
+
)
|
|
1144
|
+
return m
|
geoai/extract.py
CHANGED
|
@@ -19,6 +19,7 @@ from torchvision.models.detection import (
|
|
|
19
19
|
maskrcnn_resnet50_fpn,
|
|
20
20
|
)
|
|
21
21
|
from tqdm import tqdm
|
|
22
|
+
import time
|
|
22
23
|
|
|
23
24
|
# Local Imports
|
|
24
25
|
from .utils import get_raster_stats
|
|
@@ -2117,6 +2118,7 @@ class ObjectDetector:
|
|
|
2117
2118
|
confidence_threshold=0.5,
|
|
2118
2119
|
min_object_area=100,
|
|
2119
2120
|
max_object_area=None,
|
|
2121
|
+
n_workers=None,
|
|
2120
2122
|
**kwargs,
|
|
2121
2123
|
):
|
|
2122
2124
|
"""
|
|
@@ -2128,14 +2130,103 @@ class ObjectDetector:
|
|
|
2128
2130
|
confidence_threshold: Minimum confidence score (0.0-1.0). Default: 0.5
|
|
2129
2131
|
min_object_area: Minimum area in pixels to keep an object. Default: 100
|
|
2130
2132
|
max_object_area: Maximum area in pixels to keep an object. Default: None
|
|
2133
|
+
n_workers: int, default=None
|
|
2134
|
+
The number of worker threads to use.
|
|
2135
|
+
"None" means single-threaded processing.
|
|
2136
|
+
"-1" means using all available CPU processors.
|
|
2137
|
+
Positive integer means using that specific number of threads.
|
|
2131
2138
|
**kwargs: Additional parameters
|
|
2132
2139
|
|
|
2133
2140
|
Returns:
|
|
2134
2141
|
GeoDataFrame with car detections and confidence values
|
|
2135
2142
|
"""
|
|
2136
2143
|
|
|
2144
|
+
def _process_single_component(
|
|
2145
|
+
component_mask,
|
|
2146
|
+
conf_data,
|
|
2147
|
+
transform,
|
|
2148
|
+
confidence_threshold,
|
|
2149
|
+
min_object_area,
|
|
2150
|
+
max_object_area,
|
|
2151
|
+
):
|
|
2152
|
+
# Get confidence value
|
|
2153
|
+
conf_region = conf_data[component_mask > 0]
|
|
2154
|
+
if len(conf_region) > 0:
|
|
2155
|
+
confidence = np.mean(conf_region) / 255.0
|
|
2156
|
+
else:
|
|
2157
|
+
confidence = 0.0
|
|
2158
|
+
|
|
2159
|
+
# Skip if confidence is below threshold
|
|
2160
|
+
if confidence < confidence_threshold:
|
|
2161
|
+
return None
|
|
2162
|
+
|
|
2163
|
+
# Find contours
|
|
2164
|
+
contours, _ = cv2.findContours(
|
|
2165
|
+
component_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
|
|
2166
|
+
)
|
|
2167
|
+
|
|
2168
|
+
results = []
|
|
2169
|
+
|
|
2170
|
+
for contour in contours:
|
|
2171
|
+
# Filter by size
|
|
2172
|
+
area = cv2.contourArea(contour)
|
|
2173
|
+
if area < min_object_area:
|
|
2174
|
+
continue
|
|
2175
|
+
|
|
2176
|
+
if max_object_area is not None and area > max_object_area:
|
|
2177
|
+
continue
|
|
2178
|
+
|
|
2179
|
+
# Get minimum area rectangle
|
|
2180
|
+
rect = cv2.minAreaRect(contour)
|
|
2181
|
+
box_points = cv2.boxPoints(rect)
|
|
2182
|
+
|
|
2183
|
+
# Convert to geographic coordinates
|
|
2184
|
+
geo_points = []
|
|
2185
|
+
for x, y in box_points:
|
|
2186
|
+
gx, gy = transform * (x, y)
|
|
2187
|
+
geo_points.append((gx, gy))
|
|
2188
|
+
|
|
2189
|
+
# Create polygon
|
|
2190
|
+
poly = Polygon(geo_points)
|
|
2191
|
+
results.append((poly, confidence, area))
|
|
2192
|
+
|
|
2193
|
+
return results
|
|
2194
|
+
|
|
2195
|
+
import concurrent.futures
|
|
2196
|
+
from functools import partial
|
|
2197
|
+
|
|
2198
|
+
def process_component(args):
|
|
2199
|
+
"""
|
|
2200
|
+
Helper function to process a single component
|
|
2201
|
+
"""
|
|
2202
|
+
(
|
|
2203
|
+
label,
|
|
2204
|
+
labeled_mask,
|
|
2205
|
+
conf_data,
|
|
2206
|
+
transform,
|
|
2207
|
+
confidence_threshold,
|
|
2208
|
+
min_object_area,
|
|
2209
|
+
max_object_area,
|
|
2210
|
+
) = args
|
|
2211
|
+
|
|
2212
|
+
# Create mask for this component
|
|
2213
|
+
component_mask = (labeled_mask == label).astype(np.uint8)
|
|
2214
|
+
|
|
2215
|
+
return _process_single_component(
|
|
2216
|
+
component_mask,
|
|
2217
|
+
conf_data,
|
|
2218
|
+
transform,
|
|
2219
|
+
confidence_threshold,
|
|
2220
|
+
min_object_area,
|
|
2221
|
+
max_object_area,
|
|
2222
|
+
)
|
|
2223
|
+
|
|
2224
|
+
start_time = time.time()
|
|
2137
2225
|
print(f"Processing masks from: {masks_path}")
|
|
2138
2226
|
|
|
2227
|
+
if n_workers == -1:
|
|
2228
|
+
n_workers = os.cpu_count()
|
|
2229
|
+
|
|
2139
2230
|
with rasterio.open(masks_path) as src:
|
|
2140
2231
|
# Read mask and confidence bands
|
|
2141
2232
|
mask_data = src.read(1)
|
|
@@ -2155,56 +2246,68 @@ class ObjectDetector:
|
|
|
2155
2246
|
confidences = []
|
|
2156
2247
|
pixels = []
|
|
2157
2248
|
|
|
2158
|
-
|
|
2159
|
-
|
|
2160
|
-
|
|
2161
|
-
component_mask = (labeled_mask == label).astype(np.uint8)
|
|
2162
|
-
|
|
2163
|
-
# Get confidence value (mean of non-zero values in this region)
|
|
2164
|
-
conf_region = conf_data[component_mask > 0]
|
|
2165
|
-
if len(conf_region) > 0:
|
|
2166
|
-
confidence = (
|
|
2167
|
-
np.mean(conf_region) / 255.0
|
|
2168
|
-
) # Convert back to 0-1 range
|
|
2169
|
-
else:
|
|
2170
|
-
confidence = 0.0
|
|
2171
|
-
|
|
2172
|
-
# Skip if confidence is below threshold
|
|
2173
|
-
if confidence < confidence_threshold:
|
|
2174
|
-
continue
|
|
2175
|
-
|
|
2176
|
-
# Find contours
|
|
2177
|
-
contours, _ = cv2.findContours(
|
|
2178
|
-
component_mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE
|
|
2249
|
+
if n_workers is None or n_workers == 1:
|
|
2250
|
+
print(
|
|
2251
|
+
"Using single-threaded processing, you can speed up processing by setting n_workers > 1"
|
|
2179
2252
|
)
|
|
2253
|
+
# Add progress bar
|
|
2254
|
+
for label in tqdm(
|
|
2255
|
+
range(1, num_features + 1), desc="Processing components"
|
|
2256
|
+
):
|
|
2257
|
+
# Create mask for this component
|
|
2258
|
+
component_mask = (labeled_mask == label).astype(np.uint8)
|
|
2259
|
+
|
|
2260
|
+
result = _process_single_component(
|
|
2261
|
+
component_mask,
|
|
2262
|
+
conf_data,
|
|
2263
|
+
transform,
|
|
2264
|
+
confidence_threshold,
|
|
2265
|
+
min_object_area,
|
|
2266
|
+
max_object_area,
|
|
2267
|
+
)
|
|
2180
2268
|
|
|
2181
|
-
|
|
2182
|
-
|
|
2183
|
-
|
|
2184
|
-
|
|
2185
|
-
|
|
2186
|
-
|
|
2187
|
-
if max_object_area is not None:
|
|
2188
|
-
if area > max_object_area:
|
|
2189
|
-
continue
|
|
2190
|
-
|
|
2191
|
-
# Get minimum area rectangle
|
|
2192
|
-
rect = cv2.minAreaRect(contour)
|
|
2193
|
-
box_points = cv2.boxPoints(rect)
|
|
2269
|
+
if result:
|
|
2270
|
+
for poly, confidence, area in result:
|
|
2271
|
+
# Add to lists
|
|
2272
|
+
polygons.append(poly)
|
|
2273
|
+
confidences.append(confidence)
|
|
2274
|
+
pixels.append(area)
|
|
2194
2275
|
|
|
2195
|
-
|
|
2196
|
-
|
|
2197
|
-
|
|
2198
|
-
|
|
2199
|
-
|
|
2200
|
-
|
|
2201
|
-
|
|
2202
|
-
|
|
2276
|
+
else:
|
|
2277
|
+
# Process components in parallel
|
|
2278
|
+
print(f"Using {n_workers} workers for parallel processing")
|
|
2279
|
+
|
|
2280
|
+
process_args = [
|
|
2281
|
+
(
|
|
2282
|
+
label,
|
|
2283
|
+
labeled_mask,
|
|
2284
|
+
conf_data,
|
|
2285
|
+
transform,
|
|
2286
|
+
confidence_threshold,
|
|
2287
|
+
min_object_area,
|
|
2288
|
+
max_object_area,
|
|
2289
|
+
)
|
|
2290
|
+
for label in range(1, num_features + 1)
|
|
2291
|
+
]
|
|
2292
|
+
|
|
2293
|
+
with concurrent.futures.ThreadPoolExecutor(
|
|
2294
|
+
max_workers=n_workers
|
|
2295
|
+
) as executor:
|
|
2296
|
+
results = list(
|
|
2297
|
+
tqdm(
|
|
2298
|
+
executor.map(process_component, process_args),
|
|
2299
|
+
total=num_features,
|
|
2300
|
+
desc="Processing components",
|
|
2301
|
+
)
|
|
2302
|
+
)
|
|
2203
2303
|
|
|
2204
|
-
|
|
2205
|
-
|
|
2206
|
-
|
|
2207
|
-
|
|
2304
|
+
for result in results:
|
|
2305
|
+
if result:
|
|
2306
|
+
for poly, confidence, area in result:
|
|
2307
|
+
# Add to lists
|
|
2308
|
+
polygons.append(poly)
|
|
2309
|
+
confidences.append(confidence)
|
|
2310
|
+
pixels.append(area)
|
|
2208
2311
|
|
|
2209
2312
|
# Create GeoDataFrame
|
|
2210
2313
|
if polygons:
|
|
@@ -2223,8 +2326,12 @@ class ObjectDetector:
|
|
|
2223
2326
|
gdf.to_file(output_path, driver="GeoJSON")
|
|
2224
2327
|
print(f"Saved {len(gdf)} objects with confidence to {output_path}")
|
|
2225
2328
|
|
|
2329
|
+
end_time = time.time()
|
|
2330
|
+
print(f"Total processing time: {end_time - start_time:.2f} seconds")
|
|
2226
2331
|
return gdf
|
|
2227
2332
|
else:
|
|
2333
|
+
end_time = time.time()
|
|
2334
|
+
print(f"Total processing time: {end_time - start_time:.2f} seconds")
|
|
2228
2335
|
print("No valid polygons found")
|
|
2229
2336
|
return None
|
|
2230
2337
|
|
geoai/geoai.py
CHANGED
|
@@ -1,15 +1,28 @@
|
|
|
1
1
|
"""Main module."""
|
|
2
2
|
|
|
3
|
+
import leafmap
|
|
4
|
+
|
|
3
5
|
from .download import (
|
|
4
6
|
download_naip,
|
|
5
7
|
download_overture_buildings,
|
|
6
8
|
download_pc_stac_item,
|
|
7
9
|
pc_collection_list,
|
|
10
|
+
pc_item_asset_list,
|
|
8
11
|
pc_stac_search,
|
|
9
12
|
pc_stac_download,
|
|
13
|
+
read_pc_item_asset,
|
|
14
|
+
view_pc_item,
|
|
10
15
|
)
|
|
11
16
|
from .extract import *
|
|
12
17
|
from .hf import *
|
|
13
18
|
from .segment import *
|
|
14
19
|
from .train import object_detection, train_MaskRCNN_model
|
|
15
20
|
from .utils import *
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class Map(leafmap.Map):
|
|
24
|
+
"""A subclass of leafmap.Map for GeoAI applications."""
|
|
25
|
+
|
|
26
|
+
def __init__(self, *args, **kwargs):
|
|
27
|
+
"""Initialize the Map class."""
|
|
28
|
+
super().__init__(*args, **kwargs)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
geoai/__init__.py,sha256=b12E2HztHEPaaKMVKpk6GPjC7ElUMxOW9pYKl4VZkmE,3592
|
|
2
|
+
geoai/download.py,sha256=BvCEpcBwZlEOWieixsdyvQDiE0CXRjU7oayLmy5_Dgs,40110
|
|
3
|
+
geoai/extract.py,sha256=GocJufMmrwEWxNBL1J91EXXHL8AKcO8m_lmtUF5AKPw,119102
|
|
4
|
+
geoai/geoai.py,sha256=BqKdWzNruDdGqwqoyTaJzUq4lKGj-RDBZlSO3t3-GxQ,626
|
|
5
|
+
geoai/hf.py,sha256=mLKGxEAS5eHkxZLwuLpYc1o7e3-7QIXdBv-QUY-RkFk,17072
|
|
6
|
+
geoai/segment.py,sha256=g3YW17ftr--CKq6VB32TJEPY8owGQ7uQ0sg_tUT2ooE,13681
|
|
7
|
+
geoai/segmentation.py,sha256=AtPzCvguHAEeuyXafa4bzMFATvltEYcah1B8ZMfkM_s,11373
|
|
8
|
+
geoai/train.py,sha256=-l2j1leTxDnFDLaBslu1q6CobXjm3LEdiQwUWOU8P6M,40088
|
|
9
|
+
geoai/utils.py,sha256=Wg9jbMBKUZSGUmU8Vkp6v19QcDNg5KmcyZxuHqJvgnc,233016
|
|
10
|
+
geoai_py-0.4.3.dist-info/licenses/LICENSE,sha256=vN2L5U7cZ6ZkOHFmc8WiGlsogWsZc5dllMeNxnKVOZg,1070
|
|
11
|
+
geoai_py-0.4.3.dist-info/METADATA,sha256=geDmJ-1zHImsOdcj4gypgq8JqSy8MznnxAnICwh0EbA,6049
|
|
12
|
+
geoai_py-0.4.3.dist-info/WHEEL,sha256=aoLN90hLOL0c0qxXMxWYUM3HA3WmFGZQqEJHX1V_OJE,109
|
|
13
|
+
geoai_py-0.4.3.dist-info/entry_points.txt,sha256=uGp3Az3HURIsRHP9v-ys0hIbUuBBNUfXv6VbYHIXeg4,41
|
|
14
|
+
geoai_py-0.4.3.dist-info/top_level.txt,sha256=1YkCUWu-ii-0qIex7kbwAvfei-gos9ycyDyUCJPNWHY,6
|
|
15
|
+
geoai_py-0.4.3.dist-info/RECORD,,
|
geoai_py-0.4.2.dist-info/RECORD
DELETED
|
@@ -1,15 +0,0 @@
|
|
|
1
|
-
geoai/__init__.py,sha256=kHB6b3uOdCoxMpIpfpXtPxg-KJQVsKJveb8zTw5Mxzs,3592
|
|
2
|
-
geoai/download.py,sha256=SE81OrhH2XSH02xtAXQyh5ltBqA8K7ksGT1Lm6SMqx8,37593
|
|
3
|
-
geoai/extract.py,sha256=bR8TkUXncW0rinsBO5oxlVFGsSVoj6xnm5xdDc-J1Xk,115490
|
|
4
|
-
geoai/geoai.py,sha256=qY1HWmJQ0ZgIPtlgWd4gpuCGw6dovdg8D5pAtHqfr8U,334
|
|
5
|
-
geoai/hf.py,sha256=mLKGxEAS5eHkxZLwuLpYc1o7e3-7QIXdBv-QUY-RkFk,17072
|
|
6
|
-
geoai/segment.py,sha256=g3YW17ftr--CKq6VB32TJEPY8owGQ7uQ0sg_tUT2ooE,13681
|
|
7
|
-
geoai/segmentation.py,sha256=AtPzCvguHAEeuyXafa4bzMFATvltEYcah1B8ZMfkM_s,11373
|
|
8
|
-
geoai/train.py,sha256=-l2j1leTxDnFDLaBslu1q6CobXjm3LEdiQwUWOU8P6M,40088
|
|
9
|
-
geoai/utils.py,sha256=Wg9jbMBKUZSGUmU8Vkp6v19QcDNg5KmcyZxuHqJvgnc,233016
|
|
10
|
-
geoai_py-0.4.2.dist-info/licenses/LICENSE,sha256=vN2L5U7cZ6ZkOHFmc8WiGlsogWsZc5dllMeNxnKVOZg,1070
|
|
11
|
-
geoai_py-0.4.2.dist-info/METADATA,sha256=Sc6m-MyCti-_Dnan5wp6GIy88LwuBAF_IYrFF34SRJE,6049
|
|
12
|
-
geoai_py-0.4.2.dist-info/WHEEL,sha256=9bhjOwO--Rs91xaPcBdlYFUmIudhuXqFlPriQeYQITw,109
|
|
13
|
-
geoai_py-0.4.2.dist-info/entry_points.txt,sha256=uGp3Az3HURIsRHP9v-ys0hIbUuBBNUfXv6VbYHIXeg4,41
|
|
14
|
-
geoai_py-0.4.2.dist-info/top_level.txt,sha256=1YkCUWu-ii-0qIex7kbwAvfei-gos9ycyDyUCJPNWHY,6
|
|
15
|
-
geoai_py-0.4.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|