geoai-py 0.2.2__py2.py3-none-any.whl → 0.2.3__py2.py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geoai/__init__.py +1 -1
- geoai/common.py +188 -3
- geoai/extract.py +2 -2
- geoai/geoai.py +1 -0
- geoai/preprocess.py +185 -116
- {geoai_py-0.2.2.dist-info → geoai_py-0.2.3.dist-info}/METADATA +3 -1
- geoai_py-0.2.3.dist-info/RECORD +13 -0
- geoai_py-0.2.2.dist-info/RECORD +0 -13
- {geoai_py-0.2.2.dist-info → geoai_py-0.2.3.dist-info}/LICENSE +0 -0
- {geoai_py-0.2.2.dist-info → geoai_py-0.2.3.dist-info}/WHEEL +0 -0
- {geoai_py-0.2.2.dist-info → geoai_py-0.2.3.dist-info}/entry_points.txt +0 -0
- {geoai_py-0.2.2.dist-info → geoai_py-0.2.3.dist-info}/top_level.txt +0 -0
geoai/__init__.py
CHANGED
geoai/common.py
CHANGED
|
@@ -4,7 +4,7 @@ import os
|
|
|
4
4
|
from collections.abc import Iterable
|
|
5
5
|
from typing import Any, Dict, List, Optional, Tuple, Type, Union, Callable
|
|
6
6
|
import matplotlib.pyplot as plt
|
|
7
|
-
|
|
7
|
+
import geopandas as gpd
|
|
8
8
|
import leafmap
|
|
9
9
|
import torch
|
|
10
10
|
import numpy as np
|
|
@@ -17,7 +17,7 @@ from torchgeo.samplers import RandomGeoSampler, Units
|
|
|
17
17
|
from torchgeo.transforms import indices
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
def
|
|
20
|
+
def view_raster(
|
|
21
21
|
source: str,
|
|
22
22
|
indexes: Optional[int] = None,
|
|
23
23
|
colormap: Optional[str] = None,
|
|
@@ -85,7 +85,7 @@ def viz_raster(
|
|
|
85
85
|
return m
|
|
86
86
|
|
|
87
87
|
|
|
88
|
-
def
|
|
88
|
+
def view_image(
|
|
89
89
|
image: Union[np.ndarray, torch.Tensor],
|
|
90
90
|
transpose: bool = False,
|
|
91
91
|
bdx: Optional[int] = None,
|
|
@@ -436,3 +436,188 @@ def dict_to_image(
|
|
|
436
436
|
else:
|
|
437
437
|
image = leafmap.array_to_image(da, **kwargs)
|
|
438
438
|
return image
|
|
439
|
+
|
|
440
|
+
|
|
441
|
+
def view_vector(
|
|
442
|
+
vector_data,
|
|
443
|
+
column=None,
|
|
444
|
+
cmap="viridis",
|
|
445
|
+
figsize=(10, 10),
|
|
446
|
+
title=None,
|
|
447
|
+
legend=True,
|
|
448
|
+
basemap=False,
|
|
449
|
+
alpha=0.7,
|
|
450
|
+
edge_color="black",
|
|
451
|
+
classification="quantiles",
|
|
452
|
+
n_classes=5,
|
|
453
|
+
highlight_index=None,
|
|
454
|
+
highlight_color="red",
|
|
455
|
+
scheme=None,
|
|
456
|
+
save_path=None,
|
|
457
|
+
dpi=300,
|
|
458
|
+
):
|
|
459
|
+
"""
|
|
460
|
+
Visualize vector datasets with options for styling, classification, basemaps and more.
|
|
461
|
+
|
|
462
|
+
This function visualizes GeoDataFrame objects with customizable symbology.
|
|
463
|
+
It supports different vector types (points, lines, polygons), attribute-based
|
|
464
|
+
classification, and background basemaps.
|
|
465
|
+
|
|
466
|
+
Args:
|
|
467
|
+
vector_data (geopandas.GeoDataFrame): The vector dataset to visualize.
|
|
468
|
+
column (str, optional): Column to use for choropleth mapping. If None,
|
|
469
|
+
a single color will be used. Defaults to None.
|
|
470
|
+
cmap (str or matplotlib.colors.Colormap, optional): Colormap to use for
|
|
471
|
+
choropleth mapping. Defaults to "viridis".
|
|
472
|
+
figsize (tuple, optional): Figure size as (width, height) in inches.
|
|
473
|
+
Defaults to (10, 10).
|
|
474
|
+
title (str, optional): Title for the plot. Defaults to None.
|
|
475
|
+
legend (bool, optional): Whether to display a legend. Defaults to True.
|
|
476
|
+
basemap (bool, optional): Whether to add a web basemap. Requires contextily.
|
|
477
|
+
Defaults to False.
|
|
478
|
+
alpha (float, optional): Transparency of the vector features, between 0-1.
|
|
479
|
+
Defaults to 0.7.
|
|
480
|
+
edge_color (str, optional): Color for feature edges. Defaults to "black".
|
|
481
|
+
classification (str, optional): Classification method for choropleth maps.
|
|
482
|
+
Options: "quantiles", "equal_interval", "natural_breaks".
|
|
483
|
+
Defaults to "quantiles".
|
|
484
|
+
n_classes (int, optional): Number of classes for choropleth maps.
|
|
485
|
+
Defaults to 5.
|
|
486
|
+
highlight_index (list, optional): List of indices to highlight.
|
|
487
|
+
Defaults to None.
|
|
488
|
+
highlight_color (str, optional): Color to use for highlighted features.
|
|
489
|
+
Defaults to "red".
|
|
490
|
+
scheme (str, optional): MapClassify classification scheme. Overrides
|
|
491
|
+
classification parameter if provided. Defaults to None.
|
|
492
|
+
save_path (str, optional): Path to save the figure. If None, the figure
|
|
493
|
+
is not saved. Defaults to None.
|
|
494
|
+
dpi (int, optional): DPI for saved figure. Defaults to 300.
|
|
495
|
+
|
|
496
|
+
Returns:
|
|
497
|
+
matplotlib.axes.Axes: The Axes object containing the plot.
|
|
498
|
+
|
|
499
|
+
Examples:
|
|
500
|
+
>>> import geopandas as gpd
|
|
501
|
+
>>> cities = gpd.read_file("cities.shp")
|
|
502
|
+
>>> view_vector(cities, "population", cmap="Reds", basemap=True)
|
|
503
|
+
|
|
504
|
+
>>> roads = gpd.read_file("roads.shp")
|
|
505
|
+
>>> view_vector(roads, "type", basemap=True, figsize=(12, 8))
|
|
506
|
+
"""
|
|
507
|
+
import contextily as ctx
|
|
508
|
+
|
|
509
|
+
if isinstance(vector_data, str):
|
|
510
|
+
vector_data = gpd.read_file(vector_data)
|
|
511
|
+
|
|
512
|
+
# Check if input is a GeoDataFrame
|
|
513
|
+
if not isinstance(vector_data, gpd.GeoDataFrame):
|
|
514
|
+
raise TypeError("Input data must be a GeoDataFrame")
|
|
515
|
+
|
|
516
|
+
# Make a copy to avoid changing the original data
|
|
517
|
+
gdf = vector_data.copy()
|
|
518
|
+
|
|
519
|
+
# Set up figure and axis
|
|
520
|
+
fig, ax = plt.subplots(figsize=figsize)
|
|
521
|
+
|
|
522
|
+
# Determine geometry type
|
|
523
|
+
geom_type = gdf.geometry.iloc[0].geom_type
|
|
524
|
+
|
|
525
|
+
# Plotting parameters
|
|
526
|
+
plot_kwargs = {"alpha": alpha, "ax": ax}
|
|
527
|
+
|
|
528
|
+
# Set up keyword arguments based on geometry type
|
|
529
|
+
if "Point" in geom_type:
|
|
530
|
+
plot_kwargs["markersize"] = 50
|
|
531
|
+
plot_kwargs["edgecolor"] = edge_color
|
|
532
|
+
elif "Line" in geom_type:
|
|
533
|
+
plot_kwargs["linewidth"] = 1
|
|
534
|
+
elif "Polygon" in geom_type:
|
|
535
|
+
plot_kwargs["edgecolor"] = edge_color
|
|
536
|
+
|
|
537
|
+
# Classification options
|
|
538
|
+
if column is not None:
|
|
539
|
+
if scheme is not None:
|
|
540
|
+
# Use mapclassify scheme if provided
|
|
541
|
+
plot_kwargs["scheme"] = scheme
|
|
542
|
+
else:
|
|
543
|
+
# Use classification parameter
|
|
544
|
+
if classification == "quantiles":
|
|
545
|
+
plot_kwargs["scheme"] = "quantiles"
|
|
546
|
+
elif classification == "equal_interval":
|
|
547
|
+
plot_kwargs["scheme"] = "equal_interval"
|
|
548
|
+
elif classification == "natural_breaks":
|
|
549
|
+
plot_kwargs["scheme"] = "fisher_jenks"
|
|
550
|
+
|
|
551
|
+
plot_kwargs["k"] = n_classes
|
|
552
|
+
plot_kwargs["cmap"] = cmap
|
|
553
|
+
plot_kwargs["column"] = column
|
|
554
|
+
plot_kwargs["legend"] = legend
|
|
555
|
+
|
|
556
|
+
# Plot the main data
|
|
557
|
+
gdf.plot(**plot_kwargs)
|
|
558
|
+
|
|
559
|
+
# Highlight specific features if requested
|
|
560
|
+
if highlight_index is not None:
|
|
561
|
+
gdf.iloc[highlight_index].plot(
|
|
562
|
+
ax=ax, color=highlight_color, edgecolor="black", linewidth=2, zorder=5
|
|
563
|
+
)
|
|
564
|
+
|
|
565
|
+
# Add basemap if requested
|
|
566
|
+
if basemap:
|
|
567
|
+
try:
|
|
568
|
+
ctx.add_basemap(ax, crs=gdf.crs, source=ctx.providers.OpenStreetMap.Mapnik)
|
|
569
|
+
except Exception as e:
|
|
570
|
+
print(f"Could not add basemap: {e}")
|
|
571
|
+
|
|
572
|
+
# Set title if provided
|
|
573
|
+
if title:
|
|
574
|
+
ax.set_title(title, fontsize=14)
|
|
575
|
+
|
|
576
|
+
# Remove axes if not needed
|
|
577
|
+
ax.set_axis_off()
|
|
578
|
+
|
|
579
|
+
# Adjust layout
|
|
580
|
+
plt.tight_layout()
|
|
581
|
+
|
|
582
|
+
# Save figure if a path is provided
|
|
583
|
+
if save_path:
|
|
584
|
+
plt.savefig(save_path, dpi=dpi, bbox_inches="tight")
|
|
585
|
+
|
|
586
|
+
return ax
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
def view_vector_interactive(
|
|
590
|
+
vector_data,
|
|
591
|
+
**kwargs,
|
|
592
|
+
):
|
|
593
|
+
"""
|
|
594
|
+
Visualize vector datasets with options for styling, classification, basemaps and more.
|
|
595
|
+
|
|
596
|
+
This function visualizes GeoDataFrame objects with customizable symbology.
|
|
597
|
+
It supports different vector types (points, lines, polygons), attribute-based
|
|
598
|
+
classification, and background basemaps.
|
|
599
|
+
|
|
600
|
+
Args:
|
|
601
|
+
vector_data (geopandas.GeoDataFrame): The vector dataset to visualize.
|
|
602
|
+
**kwargs: Additional keyword arguments to pass to GeoDataFrame.explore() function.
|
|
603
|
+
See https://geopandas.org/en/stable/docs/reference/api/geopandas.GeoDataFrame.explore.html
|
|
604
|
+
|
|
605
|
+
Returns:
|
|
606
|
+
folium.Map: The map object with the vector data added.
|
|
607
|
+
|
|
608
|
+
Examples:
|
|
609
|
+
>>> import geopandas as gpd
|
|
610
|
+
>>> cities = gpd.read_file("cities.shp")
|
|
611
|
+
>>> view_vector_interactive(cities)
|
|
612
|
+
|
|
613
|
+
>>> roads = gpd.read_file("roads.shp")
|
|
614
|
+
>>> view_vector_interactive(roads, figsize=(12, 8))
|
|
615
|
+
"""
|
|
616
|
+
if isinstance(vector_data, str):
|
|
617
|
+
vector_data = gpd.read_file(vector_data)
|
|
618
|
+
|
|
619
|
+
# Check if input is a GeoDataFrame
|
|
620
|
+
if not isinstance(vector_data, gpd.GeoDataFrame):
|
|
621
|
+
raise TypeError("Input data must be a GeoDataFrame")
|
|
622
|
+
|
|
623
|
+
return vector_data.explore(**kwargs)
|
geoai/extract.py
CHANGED
|
@@ -198,7 +198,7 @@ class BuildingFootprintExtractor:
|
|
|
198
198
|
|
|
199
199
|
# Define the repository ID and model filename
|
|
200
200
|
repo_id = "giswqs/geoai" # Update with your actual username/repo
|
|
201
|
-
filename = "
|
|
201
|
+
filename = "building_footprints_usa.pth"
|
|
202
202
|
|
|
203
203
|
# Ensure cache directory exists
|
|
204
204
|
# cache_dir = os.path.join(
|
|
@@ -718,7 +718,7 @@ class BuildingFootprintExtractor:
|
|
|
718
718
|
if "confidence" in gdf.columns:
|
|
719
719
|
# Create a colorbar legend
|
|
720
720
|
sm = plt.cm.ScalarMappable(
|
|
721
|
-
cmap=plt.
|
|
721
|
+
cmap=plt.get_cmap("viridis"),
|
|
722
722
|
norm=plt.Normalize(gdf.confidence.min(), gdf.confidence.max()),
|
|
723
723
|
)
|
|
724
724
|
sm.set_array([])
|
geoai/geoai.py
CHANGED
geoai/preprocess.py
CHANGED
|
@@ -13,7 +13,7 @@ import pandas as pd
|
|
|
13
13
|
from rasterio.windows import Window
|
|
14
14
|
from rasterio import features
|
|
15
15
|
from rasterio.plot import show
|
|
16
|
-
from shapely.geometry import box, shape
|
|
16
|
+
from shapely.geometry import box, shape, mapping
|
|
17
17
|
import matplotlib.pyplot as plt
|
|
18
18
|
from tqdm import tqdm
|
|
19
19
|
from torchvision.transforms import RandomRotation
|
|
@@ -945,32 +945,6 @@ def batch_raster_to_vector(
|
|
|
945
945
|
return None
|
|
946
946
|
|
|
947
947
|
|
|
948
|
-
# # Example usage
|
|
949
|
-
# if __name__ == "__main__":
|
|
950
|
-
# # Single file conversion example
|
|
951
|
-
# gdf = raster_to_vector(
|
|
952
|
-
# raster_path="output/labels/tile_000001.tif",
|
|
953
|
-
# output_path="output/labels/tile_000001.geojson",
|
|
954
|
-
# threshold=0,
|
|
955
|
-
# min_area=10,
|
|
956
|
-
# simplify_tolerance=0.5,
|
|
957
|
-
# class_values=[1], # For a binary mask, use [1]
|
|
958
|
-
# attribute_name='class',
|
|
959
|
-
# plot_result=True
|
|
960
|
-
# )
|
|
961
|
-
|
|
962
|
-
# Batch conversion example
|
|
963
|
-
# batch_raster_to_vector(
|
|
964
|
-
# input_dir="path/to/labels",
|
|
965
|
-
# output_dir="path/to/vectors",
|
|
966
|
-
# pattern="*.tif",
|
|
967
|
-
# threshold=0,
|
|
968
|
-
# min_area=10,
|
|
969
|
-
# class_values=[1, 2, 3], # For a multiclass mask
|
|
970
|
-
# merge_output=True
|
|
971
|
-
# )
|
|
972
|
-
|
|
973
|
-
|
|
974
948
|
def vector_to_raster(
|
|
975
949
|
vector_path,
|
|
976
950
|
output_path=None,
|
|
@@ -1280,39 +1254,6 @@ def batch_vector_to_raster(
|
|
|
1280
1254
|
return output_files
|
|
1281
1255
|
|
|
1282
1256
|
|
|
1283
|
-
# # Example usage
|
|
1284
|
-
# if __name__ == "__main__":
|
|
1285
|
-
# # Single file conversion example
|
|
1286
|
-
# raster_data = vector_to_raster(
|
|
1287
|
-
# vector_path="buildings_train.geojson",
|
|
1288
|
-
# output_path="buildings_train.tif",
|
|
1289
|
-
# reference_raster="naip_train.tif", # Optional, can use other parameters instead
|
|
1290
|
-
# # attribute_field="class", # Optional, uses field values for pixel values
|
|
1291
|
-
# all_touched=True, # Ensures small features are captured
|
|
1292
|
-
# plot_result=True
|
|
1293
|
-
# )
|
|
1294
|
-
|
|
1295
|
-
# Example with custom dimensions
|
|
1296
|
-
# raster_data = vector_to_raster(
|
|
1297
|
-
# vector_path="path/to/buildings.geojson",
|
|
1298
|
-
# output_path="path/to/rasterized_buildings.tif",
|
|
1299
|
-
# pixel_size=0.5, # 0.5 meter resolution
|
|
1300
|
-
# bounds=(454780, 5277567, 456282, 5278242), # from original data
|
|
1301
|
-
# crs="EPSG:26911",
|
|
1302
|
-
# output_shape=(1350, 3000), # custom dimensions
|
|
1303
|
-
# attribute_field="class"
|
|
1304
|
-
# )
|
|
1305
|
-
|
|
1306
|
-
# Batch conversion example
|
|
1307
|
-
# output_files = batch_vector_to_raster(
|
|
1308
|
-
# vector_path="path/to/buildings.geojson",
|
|
1309
|
-
# output_dir="path/to/output",
|
|
1310
|
-
# reference_rasters=["path/to/ref1.tif", "path/to/ref2.tif"],
|
|
1311
|
-
# attribute_field="class",
|
|
1312
|
-
# all_touched=True
|
|
1313
|
-
# )
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
1257
|
def export_geotiff_tiles(
|
|
1317
1258
|
in_raster,
|
|
1318
1259
|
out_folder,
|
|
@@ -1823,8 +1764,22 @@ def export_geotiff_tiles(
|
|
|
1823
1764
|
return stats
|
|
1824
1765
|
|
|
1825
1766
|
|
|
1826
|
-
def create_overview_image(
|
|
1827
|
-
|
|
1767
|
+
def create_overview_image(
|
|
1768
|
+
src, tile_coordinates, output_path, tile_size, stride, geojson_path=None
|
|
1769
|
+
):
|
|
1770
|
+
"""Create an overview image showing all tiles and their status, with optional GeoJSON export.
|
|
1771
|
+
|
|
1772
|
+
Args:
|
|
1773
|
+
src (rasterio.io.DatasetReader): The source raster dataset.
|
|
1774
|
+
tile_coordinates (list): A list of dictionaries containing tile information.
|
|
1775
|
+
output_path (str): The path where the overview image will be saved.
|
|
1776
|
+
tile_size (int): The size of each tile in pixels.
|
|
1777
|
+
stride (int): The stride between tiles in pixels. Controls overlap between adjacent tiles.
|
|
1778
|
+
geojson_path (str, optional): If provided, exports the tile rectangles as GeoJSON to this path.
|
|
1779
|
+
|
|
1780
|
+
Returns:
|
|
1781
|
+
str: Path to the saved overview image.
|
|
1782
|
+
"""
|
|
1828
1783
|
# Read a reduced version of the source image
|
|
1829
1784
|
overview_scale = max(
|
|
1830
1785
|
1, int(max(src.width, src.height) / 2000)
|
|
@@ -1857,6 +1812,10 @@ def create_overview_image(src, tile_coordinates, output_path, tile_size, stride)
|
|
|
1857
1812
|
plt.figure(figsize=(12, 12))
|
|
1858
1813
|
plt.imshow(rgb)
|
|
1859
1814
|
|
|
1815
|
+
# If GeoJSON export is requested, prepare GeoJSON structures
|
|
1816
|
+
if geojson_path:
|
|
1817
|
+
features = []
|
|
1818
|
+
|
|
1860
1819
|
# Draw tile boundaries
|
|
1861
1820
|
for tile in tile_coordinates:
|
|
1862
1821
|
# Convert bounds to pixel coordinates in overview
|
|
@@ -1886,6 +1845,43 @@ def create_overview_image(src, tile_coordinates, output_path, tile_size, stride)
|
|
|
1886
1845
|
fontsize=8,
|
|
1887
1846
|
)
|
|
1888
1847
|
|
|
1848
|
+
# Add to GeoJSON features if exporting
|
|
1849
|
+
if geojson_path:
|
|
1850
|
+
# Create a polygon from the bounds (already in geo-coordinates)
|
|
1851
|
+
minx, miny, maxx, maxy = bounds
|
|
1852
|
+
polygon = box(minx, miny, maxx, maxy)
|
|
1853
|
+
|
|
1854
|
+
# Calculate overlap with neighboring tiles
|
|
1855
|
+
overlap = 0
|
|
1856
|
+
if stride < tile_size:
|
|
1857
|
+
overlap = tile_size - stride
|
|
1858
|
+
|
|
1859
|
+
# Create a GeoJSON feature
|
|
1860
|
+
feature = {
|
|
1861
|
+
"type": "Feature",
|
|
1862
|
+
"geometry": mapping(polygon),
|
|
1863
|
+
"properties": {
|
|
1864
|
+
"index": tile["index"],
|
|
1865
|
+
"has_features": tile["has_features"],
|
|
1866
|
+
"bounds_pixel": [
|
|
1867
|
+
tile["x"],
|
|
1868
|
+
tile["y"],
|
|
1869
|
+
tile["x"] + tile_size,
|
|
1870
|
+
tile["y"] + tile_size,
|
|
1871
|
+
],
|
|
1872
|
+
"tile_size_px": tile_size,
|
|
1873
|
+
"stride_px": stride,
|
|
1874
|
+
"overlap_px": overlap,
|
|
1875
|
+
},
|
|
1876
|
+
}
|
|
1877
|
+
|
|
1878
|
+
# Add any additional properties from the tile
|
|
1879
|
+
for key, value in tile.items():
|
|
1880
|
+
if key not in ["x", "y", "index", "has_features", "bounds"]:
|
|
1881
|
+
feature["properties"][key] = value
|
|
1882
|
+
|
|
1883
|
+
features.append(feature)
|
|
1884
|
+
|
|
1889
1885
|
plt.title("Tile Overview (Green = Contains Features, Red = Empty)")
|
|
1890
1886
|
plt.axis("off")
|
|
1891
1887
|
plt.tight_layout()
|
|
@@ -1894,42 +1890,134 @@ def create_overview_image(src, tile_coordinates, output_path, tile_size, stride)
|
|
|
1894
1890
|
|
|
1895
1891
|
print(f"Overview image saved to {output_path}")
|
|
1896
1892
|
|
|
1893
|
+
# Export GeoJSON if requested
|
|
1894
|
+
if geojson_path:
|
|
1895
|
+
geojson_collection = {
|
|
1896
|
+
"type": "FeatureCollection",
|
|
1897
|
+
"features": features,
|
|
1898
|
+
"properties": {
|
|
1899
|
+
"crs": (
|
|
1900
|
+
src.crs.to_string()
|
|
1901
|
+
if hasattr(src.crs, "to_string")
|
|
1902
|
+
else str(src.crs)
|
|
1903
|
+
),
|
|
1904
|
+
"total_tiles": len(features),
|
|
1905
|
+
"source_raster_dimensions": [src.width, src.height],
|
|
1906
|
+
},
|
|
1907
|
+
}
|
|
1908
|
+
|
|
1909
|
+
# Save to file
|
|
1910
|
+
with open(geojson_path, "w") as f:
|
|
1911
|
+
json.dump(geojson_collection, f)
|
|
1912
|
+
|
|
1913
|
+
print(f"GeoJSON saved to {geojson_path}")
|
|
1914
|
+
|
|
1915
|
+
return output_path
|
|
1916
|
+
|
|
1897
1917
|
|
|
1898
|
-
|
|
1899
|
-
|
|
1900
|
-
|
|
1901
|
-
|
|
1902
|
-
|
|
1903
|
-
|
|
1904
|
-
|
|
1905
|
-
|
|
1906
|
-
|
|
1907
|
-
|
|
1908
|
-
|
|
1909
|
-
|
|
1910
|
-
|
|
1911
|
-
|
|
1912
|
-
|
|
1913
|
-
|
|
1914
|
-
|
|
1915
|
-
|
|
1916
|
-
|
|
1917
|
-
#
|
|
1918
|
-
|
|
1919
|
-
|
|
1920
|
-
|
|
1921
|
-
|
|
1922
|
-
|
|
1923
|
-
|
|
1924
|
-
|
|
1925
|
-
|
|
1926
|
-
#
|
|
1927
|
-
|
|
1928
|
-
#
|
|
1929
|
-
|
|
1930
|
-
|
|
1931
|
-
#
|
|
1932
|
-
|
|
1918
|
+
def export_tiles_to_geojson(
|
|
1919
|
+
tile_coordinates, src, output_path, tile_size=None, stride=None
|
|
1920
|
+
):
|
|
1921
|
+
"""
|
|
1922
|
+
Export tile rectangles directly to GeoJSON without creating an overview image.
|
|
1923
|
+
|
|
1924
|
+
Args:
|
|
1925
|
+
tile_coordinates (list): A list of dictionaries containing tile information.
|
|
1926
|
+
src (rasterio.io.DatasetReader): The source raster dataset.
|
|
1927
|
+
output_path (str): The path where the GeoJSON will be saved.
|
|
1928
|
+
tile_size (int, optional): The size of each tile in pixels. Only needed if not in tile_coordinates.
|
|
1929
|
+
stride (int, optional): The stride between tiles in pixels. Used to calculate overlaps between tiles.
|
|
1930
|
+
|
|
1931
|
+
Returns:
|
|
1932
|
+
str: Path to the saved GeoJSON file.
|
|
1933
|
+
"""
|
|
1934
|
+
features = []
|
|
1935
|
+
|
|
1936
|
+
for tile in tile_coordinates:
|
|
1937
|
+
# Get the size from the tile or use the provided parameter
|
|
1938
|
+
tile_width = tile.get("width", tile.get("size", tile_size))
|
|
1939
|
+
tile_height = tile.get("height", tile.get("size", tile_size))
|
|
1940
|
+
|
|
1941
|
+
if tile_width is None or tile_height is None:
|
|
1942
|
+
raise ValueError(
|
|
1943
|
+
"Tile size not found in tile data and no tile_size parameter provided"
|
|
1944
|
+
)
|
|
1945
|
+
|
|
1946
|
+
# Get bounds from the tile
|
|
1947
|
+
if "bounds" in tile:
|
|
1948
|
+
# If bounds are already in geo coordinates
|
|
1949
|
+
minx, miny, maxx, maxy = tile["bounds"]
|
|
1950
|
+
else:
|
|
1951
|
+
# Try to calculate bounds from transform if available
|
|
1952
|
+
if hasattr(src, "transform"):
|
|
1953
|
+
# Convert pixel coordinates to geo coordinates
|
|
1954
|
+
window_transform = src.transform
|
|
1955
|
+
x, y = tile["x"], tile["y"]
|
|
1956
|
+
minx = window_transform[2] + x * window_transform[0]
|
|
1957
|
+
maxy = window_transform[5] + y * window_transform[4]
|
|
1958
|
+
maxx = minx + tile_width * window_transform[0]
|
|
1959
|
+
miny = maxy + tile_height * window_transform[4]
|
|
1960
|
+
else:
|
|
1961
|
+
raise ValueError(
|
|
1962
|
+
"Cannot determine bounds. Neither 'bounds' in tile nor transform in src."
|
|
1963
|
+
)
|
|
1964
|
+
|
|
1965
|
+
# Calculate overlap with neighboring tiles if stride is provided
|
|
1966
|
+
overlap = 0
|
|
1967
|
+
if stride is not None and stride < tile_width:
|
|
1968
|
+
overlap = tile_width - stride
|
|
1969
|
+
|
|
1970
|
+
# Create a polygon from the bounds
|
|
1971
|
+
polygon = box(minx, miny, maxx, maxy)
|
|
1972
|
+
|
|
1973
|
+
# Create a GeoJSON feature
|
|
1974
|
+
feature = {
|
|
1975
|
+
"type": "Feature",
|
|
1976
|
+
"geometry": mapping(polygon),
|
|
1977
|
+
"properties": {
|
|
1978
|
+
"index": tile["index"],
|
|
1979
|
+
"has_features": tile.get("has_features", False),
|
|
1980
|
+
"tile_width_px": tile_width,
|
|
1981
|
+
"tile_height_px": tile_height,
|
|
1982
|
+
},
|
|
1983
|
+
}
|
|
1984
|
+
|
|
1985
|
+
# Add overlap information if stride is provided
|
|
1986
|
+
if stride is not None:
|
|
1987
|
+
feature["properties"]["stride_px"] = stride
|
|
1988
|
+
feature["properties"]["overlap_px"] = overlap
|
|
1989
|
+
|
|
1990
|
+
# Add additional properties from the tile
|
|
1991
|
+
for key, value in tile.items():
|
|
1992
|
+
if key not in ["bounds", "geometry"]:
|
|
1993
|
+
feature["properties"][key] = value
|
|
1994
|
+
|
|
1995
|
+
features.append(feature)
|
|
1996
|
+
|
|
1997
|
+
# Create the GeoJSON collection
|
|
1998
|
+
geojson_collection = {
|
|
1999
|
+
"type": "FeatureCollection",
|
|
2000
|
+
"features": features,
|
|
2001
|
+
"properties": {
|
|
2002
|
+
"crs": (
|
|
2003
|
+
src.crs.to_string() if hasattr(src.crs, "to_string") else str(src.crs)
|
|
2004
|
+
),
|
|
2005
|
+
"total_tiles": len(features),
|
|
2006
|
+
"source_raster_dimensions": (
|
|
2007
|
+
[src.width, src.height] if hasattr(src, "width") else None
|
|
2008
|
+
),
|
|
2009
|
+
},
|
|
2010
|
+
}
|
|
2011
|
+
|
|
2012
|
+
# Create directory if it doesn't exist
|
|
2013
|
+
os.makedirs(os.path.dirname(os.path.abspath(output_path)) or ".", exist_ok=True)
|
|
2014
|
+
|
|
2015
|
+
# Save to file
|
|
2016
|
+
with open(output_path, "w") as f:
|
|
2017
|
+
json.dump(geojson_collection, f)
|
|
2018
|
+
|
|
2019
|
+
print(f"GeoJSON saved to {output_path}")
|
|
2020
|
+
return output_path
|
|
1933
2021
|
|
|
1934
2022
|
|
|
1935
2023
|
def export_training_data(
|
|
@@ -2674,22 +2762,3 @@ def export_training_data(
|
|
|
2674
2762
|
|
|
2675
2763
|
# Return statistics
|
|
2676
2764
|
return stats, out_folder
|
|
2677
|
-
|
|
2678
|
-
|
|
2679
|
-
# if __name__ == "__main__":
|
|
2680
|
-
# # Example parameters
|
|
2681
|
-
# export_training_data(
|
|
2682
|
-
# in_raster="naip_train.tif",
|
|
2683
|
-
# out_folder="output",
|
|
2684
|
-
# in_class_data="buildings_train.geojson",
|
|
2685
|
-
# image_chip_format="GEOTIFF", # Use GeoTIFF format to preserve georeference
|
|
2686
|
-
# tile_size_x=256,
|
|
2687
|
-
# tile_size_y=256,
|
|
2688
|
-
# stride_x=128, # Use overlapping tiles to increase chance of capturing features
|
|
2689
|
-
# stride_y=128,
|
|
2690
|
-
# metadata_format="PASCAL_VOC",
|
|
2691
|
-
# class_value_field="class",
|
|
2692
|
-
# buffer_radius=2, # Add small buffer to buildings to ensure they're captured
|
|
2693
|
-
# all_touched=True, # Ensure small features are rasterized
|
|
2694
|
-
# save_geotiff=True, # Always save as GeoTIFF regardless of image_chip_format
|
|
2695
|
-
# )
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: geoai-py
|
|
3
|
-
Version: 0.2.
|
|
3
|
+
Version: 0.2.3
|
|
4
4
|
Summary: A Python package for using Artificial Intelligence (AI) with geospatial data
|
|
5
5
|
Author-email: Qiusheng Wu <giswqs@gmail.com>
|
|
6
6
|
License: MIT License
|
|
@@ -18,11 +18,13 @@ Requires-Python: >=3.9
|
|
|
18
18
|
Description-Content-Type: text/markdown
|
|
19
19
|
License-File: LICENSE
|
|
20
20
|
Requires-Dist: albumentations
|
|
21
|
+
Requires-Dist: contextily
|
|
21
22
|
Requires-Dist: geopandas
|
|
22
23
|
Requires-Dist: huggingface_hub
|
|
23
24
|
Requires-Dist: jupyter-server-proxy
|
|
24
25
|
Requires-Dist: leafmap
|
|
25
26
|
Requires-Dist: localtileserver
|
|
27
|
+
Requires-Dist: mapclassify
|
|
26
28
|
Requires-Dist: overturemaps
|
|
27
29
|
Requires-Dist: planetary-computer
|
|
28
30
|
Requires-Dist: pystac-client
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
geoai/__init__.py,sha256=TyhISsNnMOVPLx0wyM79k-Dgfak4s7QhfKPEIh0rBg8,923
|
|
2
|
+
geoai/common.py,sha256=stnGB-OiTdpb9hTSaaVn5jKAGweFNM760NFKP6suiv0,21735
|
|
3
|
+
geoai/download.py,sha256=4GiDmLrp2wKslgfm507WeZrwOdYcMekgQXxWGbl5cBw,13094
|
|
4
|
+
geoai/extract.py,sha256=wjo5KyaUMsci3oclnOGFqwPe1VIV4vlkFcbJlPWDOzI,31572
|
|
5
|
+
geoai/geoai.py,sha256=r9mFviDJrs7xvbK8kN3kIzZp-yswqTAleUejQvrZD4U,91
|
|
6
|
+
geoai/preprocess.py,sha256=2O3gaGN5imIpmTdudQdTbvCtrBqdmOb0AGNEz81MW2M,109762
|
|
7
|
+
geoai/segmentation.py,sha256=Vcymnhwl_xikt4v9x8CYJq_vId9R1gB7-YzLfwg-F9M,11372
|
|
8
|
+
geoai_py-0.2.3.dist-info/LICENSE,sha256=vN2L5U7cZ6ZkOHFmc8WiGlsogWsZc5dllMeNxnKVOZg,1070
|
|
9
|
+
geoai_py-0.2.3.dist-info/METADATA,sha256=T7lSXz-PE_AUSStKbnE5HUJi7-q5w04VIyiI7TZ4Xrw,5754
|
|
10
|
+
geoai_py-0.2.3.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
|
|
11
|
+
geoai_py-0.2.3.dist-info/entry_points.txt,sha256=uGp3Az3HURIsRHP9v-ys0hIbUuBBNUfXv6VbYHIXeg4,41
|
|
12
|
+
geoai_py-0.2.3.dist-info/top_level.txt,sha256=1YkCUWu-ii-0qIex7kbwAvfei-gos9ycyDyUCJPNWHY,6
|
|
13
|
+
geoai_py-0.2.3.dist-info/RECORD,,
|
geoai_py-0.2.2.dist-info/RECORD
DELETED
|
@@ -1,13 +0,0 @@
|
|
|
1
|
-
geoai/__init__.py,sha256=yEbFyHPNijxgK-75tatrRELZ9TUdZVYo2uPlxCeBFDA,923
|
|
2
|
-
geoai/common.py,sha256=NdfkQKMPHkwr0B5sDpH5Q_7Nt2AmYt9Gw-KE88NsQ5s,15222
|
|
3
|
-
geoai/download.py,sha256=4GiDmLrp2wKslgfm507WeZrwOdYcMekgQXxWGbl5cBw,13094
|
|
4
|
-
geoai/extract.py,sha256=Fh29d5Fj60YiqhMs62lzkd9T_ONTp2UZ4j98We769sg,31563
|
|
5
|
-
geoai/geoai.py,sha256=BCEtHil0P5cettJdMIhblg1pRaV-vHNQFaYmBrtYP3g,68
|
|
6
|
-
geoai/preprocess.py,sha256=pYtf3-eZY76SKd17MvEZ1qNUvblYW5kzQLvZ-ZM4Wwg,106833
|
|
7
|
-
geoai/segmentation.py,sha256=Vcymnhwl_xikt4v9x8CYJq_vId9R1gB7-YzLfwg-F9M,11372
|
|
8
|
-
geoai_py-0.2.2.dist-info/LICENSE,sha256=vN2L5U7cZ6ZkOHFmc8WiGlsogWsZc5dllMeNxnKVOZg,1070
|
|
9
|
-
geoai_py-0.2.2.dist-info/METADATA,sha256=baREpHpvCvfktqiMSWNI-FGOVme8NAj0UkaJhS6Bkm4,5701
|
|
10
|
-
geoai_py-0.2.2.dist-info/WHEEL,sha256=rF4EZyR2XVS6irmOHQIJx2SUqXLZKRMUrjsg8UwN-XQ,109
|
|
11
|
-
geoai_py-0.2.2.dist-info/entry_points.txt,sha256=uGp3Az3HURIsRHP9v-ys0hIbUuBBNUfXv6VbYHIXeg4,41
|
|
12
|
-
geoai_py-0.2.2.dist-info/top_level.txt,sha256=1YkCUWu-ii-0qIex7kbwAvfei-gos9ycyDyUCJPNWHY,6
|
|
13
|
-
geoai_py-0.2.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|