voxcity 0.3.27__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of voxcity might be problematic. Click here for more details.
- voxcity/exporter/obj.py +2 -2
- voxcity/geoprocessor/grid.py +5 -1
- voxcity/geoprocessor/mesh.py +21 -1
- voxcity/geoprocessor/polygon.py +95 -1
- voxcity/simulator/solar.py +656 -7
- voxcity/simulator/view.py +635 -2
- voxcity/utils/visualization.py +767 -168
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/METADATA +12 -12
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/RECORD +13 -13
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/WHEEL +1 -1
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/AUTHORS.rst +0 -0
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/LICENSE +0 -0
- {voxcity-0.3.27.dist-info → voxcity-0.4.1.dist-info}/top_level.txt +0 -0
voxcity/exporter/obj.py
CHANGED
|
@@ -9,7 +9,7 @@ import numpy as np
|
|
|
9
9
|
import os
|
|
10
10
|
from numba import njit, prange
|
|
11
11
|
import matplotlib.pyplot as plt
|
|
12
|
-
from ..utils.visualization import
|
|
12
|
+
from ..utils.visualization import get_voxel_color_map
|
|
13
13
|
|
|
14
14
|
def convert_colormap_indices(original_map):
|
|
15
15
|
"""
|
|
@@ -209,7 +209,7 @@ def export_obj(array, output_dir, file_name, voxel_size, voxel_color_map=None):
|
|
|
209
209
|
If None, uses default color map.
|
|
210
210
|
"""
|
|
211
211
|
if voxel_color_map is None:
|
|
212
|
-
voxel_color_map =
|
|
212
|
+
voxel_color_map = get_voxel_color_map()
|
|
213
213
|
|
|
214
214
|
# Extract unique voxel values (excluding zero)
|
|
215
215
|
unique_voxel_values = np.unique(array)
|
voxcity/geoprocessor/grid.py
CHANGED
|
@@ -26,7 +26,8 @@ from ..geoprocessor.polygon import (
|
|
|
26
26
|
filter_buildings,
|
|
27
27
|
extract_building_heights_from_geotiff,
|
|
28
28
|
extract_building_heights_from_gdf,
|
|
29
|
-
complement_building_heights_from_gdf
|
|
29
|
+
complement_building_heights_from_gdf,
|
|
30
|
+
process_building_footprints_by_overlap
|
|
30
31
|
)
|
|
31
32
|
from ..utils.lc import (
|
|
32
33
|
get_class_priority,
|
|
@@ -555,6 +556,9 @@ def create_building_height_grid_from_gdf_polygon(
|
|
|
555
556
|
filtered_gdf = extract_building_heights_from_gdf(filtered_gdf, filtered_gdf_comp)
|
|
556
557
|
elif geotiff_path_comp:
|
|
557
558
|
filtered_gdf = extract_building_heights_from_geotiff(geotiff_path_comp, filtered_gdf)
|
|
559
|
+
|
|
560
|
+
# After filtering and complementing heights, process overlapping buildings
|
|
561
|
+
filtered_gdf = process_building_footprints_by_overlap(filtered_gdf, overlap_threshold=0.5)
|
|
558
562
|
|
|
559
563
|
# --------------------------------------------------------------------------
|
|
560
564
|
# 2) PREPARE BUILDING POLYGONS & SPATIAL INDEX
|
voxcity/geoprocessor/mesh.py
CHANGED
|
@@ -4,7 +4,7 @@ import matplotlib.colors as mcolors
|
|
|
4
4
|
import matplotlib.cm as cm
|
|
5
5
|
import matplotlib.pyplot as plt
|
|
6
6
|
|
|
7
|
-
def create_voxel_mesh(voxel_array, class_id, meshsize=1.0):
|
|
7
|
+
def create_voxel_mesh(voxel_array, class_id, meshsize=1.0, building_id_grid=None):
|
|
8
8
|
"""
|
|
9
9
|
Create a mesh from voxels preserving sharp edges, scaled by meshsize.
|
|
10
10
|
|
|
@@ -16,15 +16,21 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0):
|
|
|
16
16
|
The ID of the class to extract.
|
|
17
17
|
meshsize : float
|
|
18
18
|
The real-world size of each voxel in meters, for x, y, and z.
|
|
19
|
+
building_id_grid : np.ndarray (2D), optional
|
|
20
|
+
2D grid of building IDs, shape (X, Y). Used when class_id=-3 (buildings).
|
|
19
21
|
|
|
20
22
|
Returns
|
|
21
23
|
-------
|
|
22
24
|
mesh : trimesh.Trimesh or None
|
|
23
25
|
The resulting mesh for the given class_id (or None if no voxels).
|
|
26
|
+
If class_id=-3, mesh.metadata['building_id'] contains building IDs.
|
|
24
27
|
"""
|
|
25
28
|
# Find voxels of the current class
|
|
26
29
|
voxel_coords = np.argwhere(voxel_array == class_id)
|
|
27
30
|
|
|
31
|
+
if building_id_grid is not None:
|
|
32
|
+
building_id_grid_flipud = np.flipud(building_id_grid)
|
|
33
|
+
|
|
28
34
|
if len(voxel_coords) == 0:
|
|
29
35
|
return None
|
|
30
36
|
|
|
@@ -57,8 +63,14 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0):
|
|
|
57
63
|
vertices = []
|
|
58
64
|
faces = []
|
|
59
65
|
face_normals_list = []
|
|
66
|
+
building_ids = [] # List to store building IDs for each face
|
|
60
67
|
|
|
61
68
|
for x, y, z in voxel_coords:
|
|
69
|
+
# For buildings, get the building ID from the grid
|
|
70
|
+
building_id = None
|
|
71
|
+
if class_id == -3 and building_id_grid is not None:
|
|
72
|
+
building_id = building_id_grid_flipud[x, y]
|
|
73
|
+
|
|
62
74
|
# Check each face of the current voxel
|
|
63
75
|
adjacent_coords = [
|
|
64
76
|
(x, y, z+1), # Front
|
|
@@ -95,6 +107,10 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0):
|
|
|
95
107
|
])
|
|
96
108
|
# Add face normals for both triangles
|
|
97
109
|
face_normals_list.extend([face_normals[face_idx], face_normals[face_idx]])
|
|
110
|
+
|
|
111
|
+
# Store building ID for both triangles if this is a building
|
|
112
|
+
if class_id == -3 and building_id_grid is not None:
|
|
113
|
+
building_ids.extend([building_id, building_id])
|
|
98
114
|
|
|
99
115
|
if not vertices:
|
|
100
116
|
return None
|
|
@@ -112,6 +128,10 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0):
|
|
|
112
128
|
|
|
113
129
|
# Merge vertices that are at the same position
|
|
114
130
|
mesh.merge_vertices()
|
|
131
|
+
|
|
132
|
+
# Add building IDs as metadata for buildings
|
|
133
|
+
if class_id == -3 and building_id_grid is not None and building_ids:
|
|
134
|
+
mesh.metadata = {'building_id': np.array(building_ids)}
|
|
115
135
|
|
|
116
136
|
return mesh
|
|
117
137
|
|
voxcity/geoprocessor/polygon.py
CHANGED
|
@@ -19,6 +19,7 @@ from pyproj import Transformer, CRS
|
|
|
19
19
|
import rasterio
|
|
20
20
|
from rasterio.mask import mask
|
|
21
21
|
import copy
|
|
22
|
+
from rtree import index
|
|
22
23
|
|
|
23
24
|
from .utils import validate_polygon_coordinates
|
|
24
25
|
|
|
@@ -794,4 +795,97 @@ def get_buildings_in_drawn_polygon(building_gdf, drawn_polygon_vertices,
|
|
|
794
795
|
else:
|
|
795
796
|
raise ValueError("operation must be 'intersect' or 'within'")
|
|
796
797
|
|
|
797
|
-
return included_building_ids
|
|
798
|
+
return included_building_ids
|
|
799
|
+
|
|
800
|
+
def process_building_footprints_by_overlap(filtered_gdf, overlap_threshold=0.5):
|
|
801
|
+
"""
|
|
802
|
+
Process building footprints to merge overlapping buildings.
|
|
803
|
+
|
|
804
|
+
Args:
|
|
805
|
+
filtered_gdf (geopandas.GeoDataFrame): GeoDataFrame containing building footprints
|
|
806
|
+
overlap_threshold (float): Threshold for overlap ratio (0.0-1.0) to merge buildings
|
|
807
|
+
|
|
808
|
+
Returns:
|
|
809
|
+
geopandas.GeoDataFrame: Processed GeoDataFrame with updated IDs
|
|
810
|
+
"""
|
|
811
|
+
# Make a copy to avoid modifying the original
|
|
812
|
+
gdf = filtered_gdf.copy()
|
|
813
|
+
|
|
814
|
+
# Ensure 'id' column exists
|
|
815
|
+
if 'id' not in gdf.columns:
|
|
816
|
+
gdf['id'] = gdf.index
|
|
817
|
+
|
|
818
|
+
# Calculate areas and sort by area (descending)
|
|
819
|
+
gdf['area'] = gdf.geometry.area
|
|
820
|
+
gdf = gdf.sort_values(by='area', ascending=False)
|
|
821
|
+
gdf = gdf.reset_index(drop=True)
|
|
822
|
+
|
|
823
|
+
# Create spatial index for efficient querying
|
|
824
|
+
spatial_idx = index.Index()
|
|
825
|
+
for i, geom in enumerate(gdf.geometry):
|
|
826
|
+
if geom.is_valid:
|
|
827
|
+
spatial_idx.insert(i, geom.bounds)
|
|
828
|
+
else:
|
|
829
|
+
# Fix invalid geometries
|
|
830
|
+
fixed_geom = geom.buffer(0)
|
|
831
|
+
if fixed_geom.is_valid:
|
|
832
|
+
spatial_idx.insert(i, fixed_geom.bounds)
|
|
833
|
+
|
|
834
|
+
# Track ID replacements to avoid repeated processing
|
|
835
|
+
id_mapping = {}
|
|
836
|
+
|
|
837
|
+
# Process each building (skip the largest one)
|
|
838
|
+
for i in range(1, len(gdf)):
|
|
839
|
+
current_poly = gdf.iloc[i].geometry
|
|
840
|
+
current_area = gdf.iloc[i].area
|
|
841
|
+
current_id = gdf.iloc[i]['id']
|
|
842
|
+
|
|
843
|
+
# Skip if already mapped
|
|
844
|
+
if current_id in id_mapping:
|
|
845
|
+
continue
|
|
846
|
+
|
|
847
|
+
# Ensure geometry is valid
|
|
848
|
+
if not current_poly.is_valid:
|
|
849
|
+
current_poly = current_poly.buffer(0)
|
|
850
|
+
if not current_poly.is_valid:
|
|
851
|
+
continue
|
|
852
|
+
|
|
853
|
+
# Find potential overlaps with larger polygons
|
|
854
|
+
potential_overlaps = [j for j in spatial_idx.intersection(current_poly.bounds) if j < i]
|
|
855
|
+
|
|
856
|
+
for j in potential_overlaps:
|
|
857
|
+
larger_poly = gdf.iloc[j].geometry
|
|
858
|
+
larger_id = gdf.iloc[j]['id']
|
|
859
|
+
|
|
860
|
+
# Skip if already processed
|
|
861
|
+
if larger_id in id_mapping:
|
|
862
|
+
larger_id = id_mapping[larger_id]
|
|
863
|
+
|
|
864
|
+
# Ensure geometry is valid
|
|
865
|
+
if not larger_poly.is_valid:
|
|
866
|
+
larger_poly = larger_poly.buffer(0)
|
|
867
|
+
if not larger_poly.is_valid:
|
|
868
|
+
continue
|
|
869
|
+
|
|
870
|
+
try:
|
|
871
|
+
# Calculate overlap
|
|
872
|
+
if current_poly.intersects(larger_poly):
|
|
873
|
+
overlap = current_poly.intersection(larger_poly)
|
|
874
|
+
overlap_ratio = overlap.area / current_area
|
|
875
|
+
|
|
876
|
+
# Replace ID if overlap exceeds threshold
|
|
877
|
+
if overlap_ratio > overlap_threshold:
|
|
878
|
+
id_mapping[current_id] = larger_id
|
|
879
|
+
gdf.at[i, 'id'] = larger_id
|
|
880
|
+
break # Stop at first significant overlap
|
|
881
|
+
except (GEOSException, ValueError) as e:
|
|
882
|
+
# Handle geometry errors gracefully
|
|
883
|
+
continue
|
|
884
|
+
|
|
885
|
+
# Propagate ID changes through the original DataFrame
|
|
886
|
+
for i, row in filtered_gdf.iterrows():
|
|
887
|
+
orig_id = row.get('id')
|
|
888
|
+
if orig_id in id_mapping:
|
|
889
|
+
filtered_gdf.at[i, 'id'] = id_mapping[orig_id]
|
|
890
|
+
|
|
891
|
+
return filtered_gdf
|