voxcity 0.6.26__py3-none-any.whl → 1.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- voxcity/__init__.py +10 -4
- voxcity/downloader/__init__.py +2 -1
- voxcity/downloader/gba.py +210 -0
- voxcity/downloader/gee.py +5 -1
- voxcity/downloader/mbfp.py +1 -1
- voxcity/downloader/oemj.py +80 -8
- voxcity/downloader/utils.py +73 -73
- voxcity/errors.py +30 -0
- voxcity/exporter/__init__.py +9 -1
- voxcity/exporter/cityles.py +129 -34
- voxcity/exporter/envimet.py +51 -26
- voxcity/exporter/magicavoxel.py +42 -5
- voxcity/exporter/netcdf.py +27 -0
- voxcity/exporter/obj.py +103 -28
- voxcity/generator/__init__.py +47 -0
- voxcity/generator/api.py +721 -0
- voxcity/generator/grids.py +381 -0
- voxcity/generator/io.py +94 -0
- voxcity/generator/pipeline.py +282 -0
- voxcity/generator/update.py +429 -0
- voxcity/generator/voxelizer.py +392 -0
- voxcity/geoprocessor/__init__.py +75 -6
- voxcity/geoprocessor/conversion.py +153 -0
- voxcity/geoprocessor/draw.py +1488 -1169
- voxcity/geoprocessor/heights.py +199 -0
- voxcity/geoprocessor/io.py +101 -0
- voxcity/geoprocessor/merge_utils.py +91 -0
- voxcity/geoprocessor/mesh.py +26 -10
- voxcity/geoprocessor/network.py +35 -6
- voxcity/geoprocessor/overlap.py +84 -0
- voxcity/geoprocessor/raster/__init__.py +82 -0
- voxcity/geoprocessor/raster/buildings.py +435 -0
- voxcity/geoprocessor/raster/canopy.py +258 -0
- voxcity/geoprocessor/raster/core.py +150 -0
- voxcity/geoprocessor/raster/export.py +93 -0
- voxcity/geoprocessor/raster/landcover.py +159 -0
- voxcity/geoprocessor/raster/raster.py +110 -0
- voxcity/geoprocessor/selection.py +85 -0
- voxcity/geoprocessor/utils.py +824 -820
- voxcity/models.py +113 -0
- voxcity/simulator/common/__init__.py +22 -0
- voxcity/simulator/common/geometry.py +98 -0
- voxcity/simulator/common/raytracing.py +450 -0
- voxcity/simulator/solar/__init__.py +66 -0
- voxcity/simulator/solar/integration.py +336 -0
- voxcity/simulator/solar/kernels.py +62 -0
- voxcity/simulator/solar/radiation.py +648 -0
- voxcity/simulator/solar/sky.py +668 -0
- voxcity/simulator/solar/temporal.py +792 -0
- voxcity/simulator/view.py +36 -2286
- voxcity/simulator/visibility/__init__.py +29 -0
- voxcity/simulator/visibility/landmark.py +392 -0
- voxcity/simulator/visibility/view.py +508 -0
- voxcity/utils/__init__.py +11 -0
- voxcity/utils/classes.py +194 -0
- voxcity/utils/lc.py +80 -39
- voxcity/utils/logging.py +61 -0
- voxcity/utils/orientation.py +51 -0
- voxcity/utils/shape.py +230 -0
- voxcity/utils/weather/__init__.py +26 -0
- voxcity/utils/weather/epw.py +146 -0
- voxcity/utils/weather/files.py +36 -0
- voxcity/utils/weather/onebuilding.py +486 -0
- voxcity/visualizer/__init__.py +24 -0
- voxcity/visualizer/builder.py +43 -0
- voxcity/visualizer/grids.py +141 -0
- voxcity/visualizer/maps.py +187 -0
- voxcity/visualizer/palette.py +228 -0
- voxcity/visualizer/renderer.py +1145 -0
- {voxcity-0.6.26.dist-info → voxcity-1.0.2.dist-info}/METADATA +162 -48
- voxcity-1.0.2.dist-info/RECORD +81 -0
- voxcity/generator.py +0 -1302
- voxcity/geoprocessor/grid.py +0 -1739
- voxcity/geoprocessor/polygon.py +0 -1344
- voxcity/simulator/solar.py +0 -2339
- voxcity/utils/visualization.py +0 -2849
- voxcity/utils/weather.py +0 -1038
- voxcity-0.6.26.dist-info/RECORD +0 -38
- {voxcity-0.6.26.dist-info → voxcity-1.0.2.dist-info}/WHEEL +0 -0
- {voxcity-0.6.26.dist-info → voxcity-1.0.2.dist-info}/licenses/AUTHORS.rst +0 -0
- {voxcity-0.6.26.dist-info → voxcity-1.0.2.dist-info}/licenses/LICENSE +0 -0
|
@@ -0,0 +1,199 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Height extraction and complement utilities for building footprints.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from typing import List, Dict
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import geopandas as gpd
|
|
9
|
+
import pandas as pd
|
|
10
|
+
from shapely.errors import GEOSException
|
|
11
|
+
from shapely.geometry import shape
|
|
12
|
+
from rtree import index
|
|
13
|
+
import rasterio
|
|
14
|
+
from pyproj import Transformer, CRS
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def extract_building_heights_from_gdf(gdf_0: gpd.GeoDataFrame, gdf_1: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
18
|
+
"""
|
|
19
|
+
Extract building heights from one GeoDataFrame and apply them to another based on spatial overlap.
|
|
20
|
+
"""
|
|
21
|
+
gdf_primary = gdf_0.copy()
|
|
22
|
+
gdf_ref = gdf_1.copy()
|
|
23
|
+
|
|
24
|
+
if 'height' not in gdf_primary.columns:
|
|
25
|
+
gdf_primary['height'] = 0.0
|
|
26
|
+
if 'height' not in gdf_ref.columns:
|
|
27
|
+
gdf_ref['height'] = 0.0
|
|
28
|
+
|
|
29
|
+
count_0 = 0
|
|
30
|
+
count_1 = 0
|
|
31
|
+
count_2 = 0
|
|
32
|
+
|
|
33
|
+
spatial_index = index.Index()
|
|
34
|
+
for i, geom in enumerate(gdf_ref.geometry):
|
|
35
|
+
if geom.is_valid:
|
|
36
|
+
spatial_index.insert(i, geom.bounds)
|
|
37
|
+
|
|
38
|
+
for idx_primary, row in gdf_primary.iterrows():
|
|
39
|
+
if row['height'] <= 0 or pd.isna(row['height']):
|
|
40
|
+
count_0 += 1
|
|
41
|
+
geom = row.geometry
|
|
42
|
+
|
|
43
|
+
overlapping_height_area = 0
|
|
44
|
+
overlapping_area = 0
|
|
45
|
+
|
|
46
|
+
potential_matches = list(spatial_index.intersection(geom.bounds))
|
|
47
|
+
|
|
48
|
+
for ref_idx in potential_matches:
|
|
49
|
+
if ref_idx >= len(gdf_ref):
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
ref_row = gdf_ref.iloc[ref_idx]
|
|
53
|
+
try:
|
|
54
|
+
if geom.intersects(ref_row.geometry):
|
|
55
|
+
overlap_area = geom.intersection(ref_row.geometry).area
|
|
56
|
+
overlapping_height_area += ref_row['height'] * overlap_area
|
|
57
|
+
overlapping_area += overlap_area
|
|
58
|
+
except GEOSException:
|
|
59
|
+
try:
|
|
60
|
+
fixed_ref_geom = ref_row.geometry.buffer(0)
|
|
61
|
+
if geom.intersects(fixed_ref_geom):
|
|
62
|
+
overlap_area = geom.intersection(fixed_ref_geom).area
|
|
63
|
+
overlapping_height_area += ref_row['height'] * overlap_area
|
|
64
|
+
overlapping_area += overlap_area
|
|
65
|
+
except Exception:
|
|
66
|
+
print(f"Failed to fix polygon")
|
|
67
|
+
continue
|
|
68
|
+
|
|
69
|
+
if overlapping_height_area > 0:
|
|
70
|
+
count_1 += 1
|
|
71
|
+
new_height = overlapping_height_area / overlapping_area
|
|
72
|
+
gdf_primary.at[idx_primary, 'height'] = new_height
|
|
73
|
+
else:
|
|
74
|
+
count_2 += 1
|
|
75
|
+
gdf_primary.at[idx_primary, 'height'] = np.nan
|
|
76
|
+
|
|
77
|
+
if count_0 > 0:
|
|
78
|
+
print(f"For {count_1} of these building footprints without height, values from the complementary source were assigned.")
|
|
79
|
+
print(f"For {count_2} of these building footprints without height, no data exist in complementary data.")
|
|
80
|
+
|
|
81
|
+
return gdf_primary
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def complement_building_heights_from_gdf(gdf_0, gdf_1, primary_id='id', ref_id='id'):
|
|
85
|
+
"""
|
|
86
|
+
Vectorized approach with GeoPandas to compute weighted heights and add non-intersecting buildings.
|
|
87
|
+
Returns a single combined GeoDataFrame.
|
|
88
|
+
"""
|
|
89
|
+
gdf_primary = gdf_0.copy()
|
|
90
|
+
gdf_ref = gdf_1.copy()
|
|
91
|
+
|
|
92
|
+
if 'height' not in gdf_primary.columns:
|
|
93
|
+
gdf_primary['height'] = 0.0
|
|
94
|
+
if 'height' not in gdf_ref.columns:
|
|
95
|
+
gdf_ref['height'] = 0.0
|
|
96
|
+
|
|
97
|
+
gdf_primary = gdf_primary.rename(columns={'height': 'height_primary'})
|
|
98
|
+
gdf_ref = gdf_ref.rename(columns={'height': 'height_ref'})
|
|
99
|
+
|
|
100
|
+
intersect_gdf = gpd.overlay(gdf_primary, gdf_ref, how='intersection')
|
|
101
|
+
intersect_gdf['intersect_area'] = intersect_gdf.area
|
|
102
|
+
intersect_gdf['height_area'] = intersect_gdf['height_ref'] * intersect_gdf['intersect_area']
|
|
103
|
+
|
|
104
|
+
group_cols = {
|
|
105
|
+
'height_area': 'sum',
|
|
106
|
+
'intersect_area': 'sum'
|
|
107
|
+
}
|
|
108
|
+
grouped = intersect_gdf.groupby(f'{primary_id}_1').agg(group_cols)
|
|
109
|
+
grouped['weighted_height'] = grouped['height_area'] / grouped['intersect_area']
|
|
110
|
+
|
|
111
|
+
gdf_primary = gdf_primary.merge(grouped['weighted_height'],
|
|
112
|
+
left_on=primary_id,
|
|
113
|
+
right_index=True,
|
|
114
|
+
how='left')
|
|
115
|
+
|
|
116
|
+
zero_or_nan_mask = (gdf_primary['height_primary'] == 0) | (gdf_primary['height_primary'].isna())
|
|
117
|
+
valid_weighted_height_mask = zero_or_nan_mask & gdf_primary['weighted_height'].notna()
|
|
118
|
+
gdf_primary.loc[valid_weighted_height_mask, 'height_primary'] = gdf_primary.loc[valid_weighted_height_mask, 'weighted_height']
|
|
119
|
+
gdf_primary['height_primary'] = gdf_primary['height_primary'].fillna(np.nan)
|
|
120
|
+
|
|
121
|
+
sjoin_gdf = gpd.sjoin(gdf_ref, gdf_primary, how='left', predicate='intersects')
|
|
122
|
+
non_intersect_mask = sjoin_gdf[f'{primary_id}_right'].isna()
|
|
123
|
+
non_intersect_ids = sjoin_gdf[non_intersect_mask][f'{ref_id}_left'].unique()
|
|
124
|
+
gdf_ref_non_intersect = gdf_ref[gdf_ref[ref_id].isin(non_intersect_ids)]
|
|
125
|
+
gdf_ref_non_intersect = gdf_ref_non_intersect.rename(columns={'height_ref': 'height'})
|
|
126
|
+
|
|
127
|
+
gdf_primary = gdf_primary.rename(columns={'height_primary': 'height'})
|
|
128
|
+
if 'weighted_height' in gdf_primary.columns:
|
|
129
|
+
gdf_primary.drop(columns='weighted_height', inplace=True)
|
|
130
|
+
|
|
131
|
+
final_gdf = pd.concat([gdf_primary, gdf_ref_non_intersect], ignore_index=True)
|
|
132
|
+
|
|
133
|
+
count_total = len(gdf_primary)
|
|
134
|
+
count_0 = len(gdf_primary[zero_or_nan_mask])
|
|
135
|
+
count_1 = len(gdf_primary[valid_weighted_height_mask])
|
|
136
|
+
count_2 = count_0 - count_1
|
|
137
|
+
count_3 = len(gdf_ref_non_intersect)
|
|
138
|
+
count_4 = count_3
|
|
139
|
+
height_mask = gdf_ref_non_intersect['height'].notna() & (gdf_ref_non_intersect['height'] > 0)
|
|
140
|
+
count_5 = len(gdf_ref_non_intersect[height_mask])
|
|
141
|
+
count_6 = count_4 - count_5
|
|
142
|
+
final_height_mask = final_gdf['height'].notna() & (final_gdf['height'] > 0)
|
|
143
|
+
count_7 = len(final_gdf[final_height_mask])
|
|
144
|
+
count_8 = len(final_gdf)
|
|
145
|
+
|
|
146
|
+
if count_0 > 0:
|
|
147
|
+
print(f"{count_0} of the total {count_total} building footprints from base data source did not have height data.")
|
|
148
|
+
print(f"For {count_1} of these building footprints without height, values from complementary data were assigned.")
|
|
149
|
+
print(f"For the rest {count_2}, no data exists in complementary data.")
|
|
150
|
+
print(f"Footprints of {count_3} buildings were added from the complementary source.")
|
|
151
|
+
print(f"Of these {count_4} additional building footprints, {count_5} had height data while {count_6} had no height data.")
|
|
152
|
+
print(f"In total, {count_7} buildings had height data out of {count_8} total building footprints.")
|
|
153
|
+
|
|
154
|
+
return final_gdf
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
def extract_building_heights_from_geotiff(geotiff_path, gdf):
|
|
158
|
+
"""
|
|
159
|
+
Extract building heights from a GeoTIFF raster for building footprints in a GeoDataFrame.
|
|
160
|
+
"""
|
|
161
|
+
gdf = gdf.copy()
|
|
162
|
+
|
|
163
|
+
count_0 = 0
|
|
164
|
+
count_1 = 0
|
|
165
|
+
count_2 = 0
|
|
166
|
+
|
|
167
|
+
with rasterio.open(geotiff_path) as src:
|
|
168
|
+
transformer = Transformer.from_crs(CRS.from_epsg(4326), src.crs, always_xy=True)
|
|
169
|
+
|
|
170
|
+
mask_condition = (gdf.geometry.geom_type == 'Polygon') & ((gdf.get('height', 0) <= 0) | gdf.get('height').isna())
|
|
171
|
+
buildings_to_process = gdf[mask_condition]
|
|
172
|
+
count_0 = len(buildings_to_process)
|
|
173
|
+
|
|
174
|
+
for idx, row in buildings_to_process.iterrows():
|
|
175
|
+
coords = list(row.geometry.exterior.coords)
|
|
176
|
+
transformed_coords = [transformer.transform(lon, lat) for lon, lat in coords]
|
|
177
|
+
polygon = shape({"type": "Polygon", "coordinates": [transformed_coords]})
|
|
178
|
+
|
|
179
|
+
try:
|
|
180
|
+
masked_data, _ = rasterio.mask.mask(src, [polygon], crop=True, all_touched=True)
|
|
181
|
+
heights = masked_data[0][masked_data[0] != src.nodata]
|
|
182
|
+
if len(heights) > 0:
|
|
183
|
+
count_1 += 1
|
|
184
|
+
gdf.at[idx, 'height'] = float(np.mean(heights))
|
|
185
|
+
else:
|
|
186
|
+
count_2 += 1
|
|
187
|
+
gdf.at[idx, 'height'] = np.nan
|
|
188
|
+
except ValueError as e:
|
|
189
|
+
print(f"Error processing building at index {idx}. Error: {str(e)}")
|
|
190
|
+
gdf.at[idx, 'height'] = None
|
|
191
|
+
|
|
192
|
+
if count_0 > 0:
|
|
193
|
+
print(f"{count_0} of the total {len(gdf)} building footprint from OSM did not have height data.")
|
|
194
|
+
print(f"For {count_1} of these building footprints without height, values from complementary data were assigned.")
|
|
195
|
+
print(f"For {count_2} of these building footprints without height, no data exist in complementary data.")
|
|
196
|
+
|
|
197
|
+
return gdf
|
|
198
|
+
|
|
199
|
+
|
|
@@ -0,0 +1,101 @@
|
|
|
1
|
+
"""
|
|
2
|
+
I/O helpers for reading/writing vector data (GPKG, gzipped GeoJSON lines) and
|
|
3
|
+
saving FeatureCollections.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import copy
|
|
7
|
+
import gzip
|
|
8
|
+
import json
|
|
9
|
+
from typing import List
|
|
10
|
+
|
|
11
|
+
import geopandas as gpd
|
|
12
|
+
|
|
13
|
+
from .conversion import filter_and_convert_gdf_to_geojson
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def get_geojson_from_gpkg(gpkg_path, rectangle_vertices):
|
|
17
|
+
"""
|
|
18
|
+
Read a GeoPackage file and convert it to GeoJSON features within a bounding rectangle.
|
|
19
|
+
"""
|
|
20
|
+
print(f"Opening GPKG file: {gpkg_path}")
|
|
21
|
+
gdf = gpd.read_file(gpkg_path)
|
|
22
|
+
geojson = filter_and_convert_gdf_to_geojson(gdf, rectangle_vertices)
|
|
23
|
+
return geojson
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def get_gdf_from_gpkg(gpkg_path, rectangle_vertices):
|
|
27
|
+
"""
|
|
28
|
+
Read a GeoPackage file and convert it to a GeoDataFrame with consistent CRS.
|
|
29
|
+
|
|
30
|
+
Note: rectangle_vertices is currently unused but kept for signature compatibility.
|
|
31
|
+
"""
|
|
32
|
+
print(f"Opening GPKG file: {gpkg_path}")
|
|
33
|
+
gdf = gpd.read_file(gpkg_path)
|
|
34
|
+
|
|
35
|
+
if gdf.crs is None:
|
|
36
|
+
gdf.set_crs(epsg=4326, inplace=True)
|
|
37
|
+
elif gdf.crs != "EPSG:4326":
|
|
38
|
+
gdf = gdf.to_crs(epsg=4326)
|
|
39
|
+
|
|
40
|
+
gdf['id'] = gdf.index
|
|
41
|
+
return gdf
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def load_gdf_from_multiple_gz(file_paths):
|
|
45
|
+
"""
|
|
46
|
+
Load GeoJSON features from multiple gzipped files into a single GeoDataFrame.
|
|
47
|
+
Each line in each file must be a single GeoJSON Feature.
|
|
48
|
+
"""
|
|
49
|
+
geojson_objects = []
|
|
50
|
+
|
|
51
|
+
for gz_file_path in file_paths:
|
|
52
|
+
with gzip.open(gz_file_path, 'rt', encoding='utf-8') as file:
|
|
53
|
+
for line in file:
|
|
54
|
+
try:
|
|
55
|
+
data = json.loads(line)
|
|
56
|
+
if 'properties' in data and 'height' in data['properties']:
|
|
57
|
+
if data['properties']['height'] is None:
|
|
58
|
+
data['properties']['height'] = 0
|
|
59
|
+
else:
|
|
60
|
+
if 'properties' not in data:
|
|
61
|
+
data['properties'] = {}
|
|
62
|
+
data['properties']['height'] = 0
|
|
63
|
+
geojson_objects.append(data)
|
|
64
|
+
except json.JSONDecodeError as e:
|
|
65
|
+
print(f"Skipping line in {gz_file_path} due to JSONDecodeError: {e}")
|
|
66
|
+
|
|
67
|
+
gdf = gpd.GeoDataFrame.from_features(geojson_objects)
|
|
68
|
+
gdf.set_crs(epsg=4326, inplace=True)
|
|
69
|
+
return gdf
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def swap_coordinates(features):
|
|
73
|
+
"""
|
|
74
|
+
Swap coordinate ordering in GeoJSON features from (lat, lon) to (lon, lat).
|
|
75
|
+
Modifies the input features in-place.
|
|
76
|
+
"""
|
|
77
|
+
for feature in features:
|
|
78
|
+
if feature['geometry']['type'] == 'Polygon':
|
|
79
|
+
new_coords = [[[lon, lat] for lat, lon in polygon] for polygon in feature['geometry']['coordinates']]
|
|
80
|
+
feature['geometry']['coordinates'] = new_coords
|
|
81
|
+
elif feature['geometry']['type'] == 'MultiPolygon':
|
|
82
|
+
new_coords = [[[[lon, lat] for lat, lon in polygon] for polygon in multipolygon] for multipolygon in feature['geometry']['coordinates']]
|
|
83
|
+
feature['geometry']['coordinates'] = new_coords
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def save_geojson(features, save_path):
|
|
87
|
+
"""
|
|
88
|
+
Save GeoJSON features to a file with coordinate swapping and pretty printing.
|
|
89
|
+
"""
|
|
90
|
+
geojson_features = copy.deepcopy(features)
|
|
91
|
+
swap_coordinates(geojson_features)
|
|
92
|
+
|
|
93
|
+
geojson = {
|
|
94
|
+
"type": "FeatureCollection",
|
|
95
|
+
"features": geojson_features
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
with open(save_path, 'w') as f:
|
|
99
|
+
json.dump(geojson, f, indent=2)
|
|
100
|
+
|
|
101
|
+
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilities to merge GeoDataFrames while resolving ID conflicts.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
import pandas as pd
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def _merge_gdfs_with_missing_columns(gdf_1, gdf_2):
|
|
9
|
+
"""
|
|
10
|
+
Helper to merge two GeoDataFrames while handling missing columns by filling with None.
|
|
11
|
+
"""
|
|
12
|
+
columns_1 = set(gdf_1.columns)
|
|
13
|
+
columns_2 = set(gdf_2.columns)
|
|
14
|
+
|
|
15
|
+
only_in_1 = columns_1 - columns_2
|
|
16
|
+
only_in_2 = columns_2 - columns_1
|
|
17
|
+
|
|
18
|
+
for col in only_in_2:
|
|
19
|
+
gdf_1[col] = None
|
|
20
|
+
for col in only_in_1:
|
|
21
|
+
gdf_2[col] = None
|
|
22
|
+
|
|
23
|
+
all_columns = sorted(list(columns_1.union(columns_2)))
|
|
24
|
+
gdf_1 = gdf_1[all_columns]
|
|
25
|
+
gdf_2 = gdf_2[all_columns]
|
|
26
|
+
|
|
27
|
+
merged_gdf = pd.concat([gdf_1, gdf_2], ignore_index=True)
|
|
28
|
+
return merged_gdf
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def merge_gdfs_with_id_conflict_resolution(gdf_1, gdf_2, id_columns=['id', 'building_id']):
|
|
32
|
+
"""
|
|
33
|
+
Merge two GeoDataFrames while resolving ID conflicts by modifying IDs in the second GeoDataFrame.
|
|
34
|
+
"""
|
|
35
|
+
gdf_primary = gdf_1.copy()
|
|
36
|
+
gdf_secondary = gdf_2.copy()
|
|
37
|
+
|
|
38
|
+
missing_columns = []
|
|
39
|
+
for col in id_columns:
|
|
40
|
+
if col not in gdf_primary.columns:
|
|
41
|
+
missing_columns.append(f"'{col}' missing from gdf_1")
|
|
42
|
+
if col not in gdf_secondary.columns:
|
|
43
|
+
missing_columns.append(f"'{col}' missing from gdf_2")
|
|
44
|
+
|
|
45
|
+
if missing_columns:
|
|
46
|
+
print(f"Warning: Missing ID columns: {', '.join(missing_columns)}")
|
|
47
|
+
id_columns = [col for col in id_columns if col in gdf_primary.columns and col in gdf_secondary.columns]
|
|
48
|
+
|
|
49
|
+
if not id_columns:
|
|
50
|
+
print("Warning: No valid ID columns found. Merging without ID conflict resolution.")
|
|
51
|
+
merged_gdf = _merge_gdfs_with_missing_columns(gdf_primary, gdf_secondary)
|
|
52
|
+
return merged_gdf
|
|
53
|
+
|
|
54
|
+
max_ids = {}
|
|
55
|
+
for col in id_columns:
|
|
56
|
+
if gdf_primary[col].dtype in ['int64', 'int32', 'float64', 'float32']:
|
|
57
|
+
max_ids[col] = gdf_primary[col].max()
|
|
58
|
+
else:
|
|
59
|
+
max_ids[col] = len(gdf_primary)
|
|
60
|
+
|
|
61
|
+
next_ids = {col: max_ids[col] + 1 for col in id_columns}
|
|
62
|
+
modified_buildings = 0
|
|
63
|
+
|
|
64
|
+
for idx, row in gdf_secondary.iterrows():
|
|
65
|
+
needs_new_ids = False
|
|
66
|
+
for col in id_columns:
|
|
67
|
+
current_id = row[col]
|
|
68
|
+
if current_id in gdf_primary[col].values:
|
|
69
|
+
needs_new_ids = True
|
|
70
|
+
break
|
|
71
|
+
if needs_new_ids:
|
|
72
|
+
modified_buildings += 1
|
|
73
|
+
for col in id_columns:
|
|
74
|
+
new_id = next_ids[col]
|
|
75
|
+
gdf_secondary.at[idx, col] = new_id
|
|
76
|
+
next_ids[col] += 1
|
|
77
|
+
|
|
78
|
+
merged_gdf = _merge_gdfs_with_missing_columns(gdf_primary, gdf_secondary)
|
|
79
|
+
|
|
80
|
+
total_buildings = len(merged_gdf)
|
|
81
|
+
primary_buildings = len(gdf_primary)
|
|
82
|
+
secondary_buildings = len(gdf_secondary)
|
|
83
|
+
|
|
84
|
+
print(f"Merged {primary_buildings} buildings from primary dataset with {secondary_buildings} buildings from secondary dataset.")
|
|
85
|
+
print(f"Total buildings in merged dataset: {total_buildings}")
|
|
86
|
+
if modified_buildings > 0:
|
|
87
|
+
print(f"Modified IDs for {modified_buildings} buildings in secondary dataset to resolve conflicts.")
|
|
88
|
+
|
|
89
|
+
return merged_gdf
|
|
90
|
+
|
|
91
|
+
|
voxcity/geoprocessor/mesh.py
CHANGED
|
@@ -1,9 +1,19 @@
|
|
|
1
|
+
"""Mesh generation utilities for voxel and 2D grid visualization.
|
|
2
|
+
|
|
3
|
+
Orientation contract:
|
|
4
|
+
- Mesh builders expect 2D inputs (e.g., simulation grids, building_id grids)
|
|
5
|
+
to be provided in north_up orientation (row 0 = north/top) with columns
|
|
6
|
+
increasing eastward (col 0 = west/left). Any internal flips are
|
|
7
|
+
implementation details to match mesh coordinates.
|
|
8
|
+
"""
|
|
9
|
+
|
|
1
10
|
import numpy as np
|
|
2
11
|
import os
|
|
3
12
|
import trimesh
|
|
4
13
|
import matplotlib.colors as mcolors
|
|
5
14
|
import matplotlib.cm as cm
|
|
6
15
|
import matplotlib.pyplot as plt
|
|
16
|
+
from ..utils.orientation import ensure_orientation, ORIENTATION_NORTH_UP, ORIENTATION_SOUTH_UP
|
|
7
17
|
|
|
8
18
|
def create_voxel_mesh(voxel_array, class_id, meshsize=1.0, building_id_grid=None, mesh_type=None):
|
|
9
19
|
"""
|
|
@@ -38,9 +48,9 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0, building_id_grid=None
|
|
|
38
48
|
mesh_type : str, optional
|
|
39
49
|
Type of mesh to create, controlling which faces are included:
|
|
40
50
|
- None (default): create faces at boundaries between different classes
|
|
41
|
-
- 'building_solar': only create faces at boundaries between
|
|
42
|
-
and either void (0) or trees (-2). Useful for
|
|
43
|
-
where only exposed surfaces matter.
|
|
51
|
+
- 'building_solar' or 'open_air': only create faces at boundaries between
|
|
52
|
+
buildings (-3) and either void (0) or trees (-2). Useful for
|
|
53
|
+
solar analysis where only exposed surfaces matter.
|
|
44
54
|
|
|
45
55
|
Returns
|
|
46
56
|
-------
|
|
@@ -81,7 +91,11 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0, building_id_grid=None
|
|
|
81
91
|
voxel_coords = np.argwhere(voxel_array == class_id)
|
|
82
92
|
|
|
83
93
|
if building_id_grid is not None:
|
|
84
|
-
building_id_grid_flipud =
|
|
94
|
+
building_id_grid_flipud = ensure_orientation(
|
|
95
|
+
building_id_grid,
|
|
96
|
+
ORIENTATION_NORTH_UP,
|
|
97
|
+
ORIENTATION_SOUTH_UP,
|
|
98
|
+
)
|
|
85
99
|
|
|
86
100
|
if len(voxel_coords) == 0:
|
|
87
101
|
return None
|
|
@@ -142,8 +156,8 @@ def create_voxel_mesh(voxel_array, class_id, meshsize=1.0, building_id_grid=None
|
|
|
142
156
|
else:
|
|
143
157
|
adj_value = voxel_array[adj_coord]
|
|
144
158
|
|
|
145
|
-
if
|
|
146
|
-
#
|
|
159
|
+
if class_id == -3 and mesh_type in ('building_solar', 'open_air'):
|
|
160
|
+
# Only create faces at boundaries with void (0) or trees (-2)
|
|
147
161
|
is_boundary = (adj_value == 0 or adj_value == -2)
|
|
148
162
|
else:
|
|
149
163
|
# Default behavior - create faces at any class change
|
|
@@ -278,9 +292,9 @@ def create_sim_surface_mesh(sim_grid, dem_grid,
|
|
|
278
292
|
- The mesh is positioned at dem_grid + z_offset to float above the terrain
|
|
279
293
|
- Face colors are interpolated from the colormap based on sim_grid values
|
|
280
294
|
"""
|
|
281
|
-
# Flip arrays vertically
|
|
282
|
-
sim_grid_flipped =
|
|
283
|
-
dem_grid_flipped =
|
|
295
|
+
# Flip arrays vertically using orientation helper
|
|
296
|
+
sim_grid_flipped = ensure_orientation(sim_grid, ORIENTATION_NORTH_UP, ORIENTATION_SOUTH_UP)
|
|
297
|
+
dem_grid_flipped = ensure_orientation(dem_grid, ORIENTATION_NORTH_UP, ORIENTATION_SOUTH_UP)
|
|
284
298
|
|
|
285
299
|
# Identify valid (non-NaN) values
|
|
286
300
|
valid_mask = ~np.isnan(sim_grid_flipped)
|
|
@@ -318,7 +332,9 @@ def create_sim_surface_mesh(sim_grid, dem_grid,
|
|
|
318
332
|
if np.isnan(val):
|
|
319
333
|
continue
|
|
320
334
|
|
|
321
|
-
|
|
335
|
+
# Match voxel ground rounding: int(dem/mesh + 0.5) + 1 == int(dem/mesh + 1.5)
|
|
336
|
+
# Then lower the plane by one mesh layer as requested
|
|
337
|
+
z_base = meshsize * int(dem_grid_flipped[x, y] / meshsize + 1.5) + z_offset - meshsize
|
|
322
338
|
|
|
323
339
|
# 4 corners in (x,y)*meshsize
|
|
324
340
|
v0 = [ x * meshsize, y * meshsize, z_base ]
|
voxcity/geoprocessor/network.py
CHANGED
|
@@ -15,7 +15,7 @@ import pyproj
|
|
|
15
15
|
from pyproj import Transformer
|
|
16
16
|
from joblib import Parallel, delayed
|
|
17
17
|
|
|
18
|
-
from .
|
|
18
|
+
from .raster import grid_to_geodataframe
|
|
19
19
|
|
|
20
20
|
def vectorized_edge_values(G, polygons_gdf, value_col='value'):
|
|
21
21
|
"""
|
|
@@ -99,8 +99,9 @@ def vectorized_edge_values(G, polygons_gdf, value_col='value'):
|
|
|
99
99
|
|
|
100
100
|
def get_network_values(
|
|
101
101
|
grid,
|
|
102
|
-
rectangle_vertices,
|
|
103
|
-
meshsize,
|
|
102
|
+
rectangle_vertices=None,
|
|
103
|
+
meshsize=None,
|
|
104
|
+
voxcity=None,
|
|
104
105
|
value_name='value',
|
|
105
106
|
**kwargs
|
|
106
107
|
):
|
|
@@ -115,10 +116,13 @@ def get_network_values(
|
|
|
115
116
|
----------
|
|
116
117
|
grid : array-like or geopandas.GeoDataFrame
|
|
117
118
|
Either a grid array of values or a pre-built GeoDataFrame with polygons and values.
|
|
118
|
-
rectangle_vertices : list of tuples
|
|
119
|
+
rectangle_vertices : list of tuples, optional
|
|
119
120
|
List of (lon, lat) coordinates defining the bounding rectangle in EPSG:4326.
|
|
120
|
-
|
|
121
|
-
|
|
121
|
+
Optional if `voxcity` is provided.
|
|
122
|
+
meshsize : float, optional
|
|
123
|
+
Size of each grid cell (used only if grid is array-like). Optional if `voxcity` is provided.
|
|
124
|
+
voxcity : VoxCity, optional
|
|
125
|
+
VoxCity object from which `rectangle_vertices` and `meshsize` will be derived if not supplied.
|
|
122
126
|
value_name : str, default='value'
|
|
123
127
|
Name to use for the edge attribute storing computed values.
|
|
124
128
|
**kwargs : dict
|
|
@@ -162,6 +166,31 @@ def get_network_values(
|
|
|
162
166
|
}
|
|
163
167
|
settings = {**defaults, **kwargs}
|
|
164
168
|
|
|
169
|
+
# Derive geometry parameters from VoxCity if supplied (inline to avoid extra helper)
|
|
170
|
+
if voxcity is not None:
|
|
171
|
+
derived_rv = None
|
|
172
|
+
derived_meshsize = None
|
|
173
|
+
# Try extras['rectangle_vertices'] when available
|
|
174
|
+
if hasattr(voxcity, "extras") and isinstance(voxcity.extras, dict):
|
|
175
|
+
derived_rv = voxcity.extras.get("rectangle_vertices")
|
|
176
|
+
# Pull meshsize and bounds from voxels.meta
|
|
177
|
+
voxels = getattr(voxcity, "voxels", None)
|
|
178
|
+
meta = getattr(voxels, "meta", None) if voxels is not None else None
|
|
179
|
+
if meta is not None:
|
|
180
|
+
derived_meshsize = getattr(meta, "meshsize", None)
|
|
181
|
+
if derived_rv is None:
|
|
182
|
+
bounds = getattr(meta, "bounds", None)
|
|
183
|
+
if bounds is not None:
|
|
184
|
+
west, south, east, north = bounds
|
|
185
|
+
derived_rv = [(west, south), (west, north), (east, north), (east, south)]
|
|
186
|
+
if rectangle_vertices is None:
|
|
187
|
+
rectangle_vertices = derived_rv
|
|
188
|
+
if meshsize is None:
|
|
189
|
+
meshsize = derived_meshsize
|
|
190
|
+
|
|
191
|
+
if rectangle_vertices is None:
|
|
192
|
+
raise ValueError("rectangle_vertices must be provided, either directly or via `voxcity`.")
|
|
193
|
+
|
|
165
194
|
# Build polygons GDF if needed
|
|
166
195
|
polygons_gdf = (grid if isinstance(grid, gpd.GeoDataFrame)
|
|
167
196
|
else grid_to_geodataframe(grid, rectangle_vertices, meshsize))
|
|
@@ -0,0 +1,84 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utilities for processing overlaps between building footprints.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from rtree import index
|
|
6
|
+
from shapely.errors import GEOSException
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
def process_building_footprints_by_overlap(filtered_gdf, overlap_threshold=0.5):
|
|
10
|
+
"""
|
|
11
|
+
Merge overlapping buildings based on area overlap ratio, assigning the ID of the larger building
|
|
12
|
+
to smaller overlapping ones.
|
|
13
|
+
"""
|
|
14
|
+
gdf = filtered_gdf.copy()
|
|
15
|
+
|
|
16
|
+
if 'id' not in gdf.columns:
|
|
17
|
+
gdf['id'] = gdf.index
|
|
18
|
+
|
|
19
|
+
if gdf.crs is None:
|
|
20
|
+
gdf_projected = gdf.copy()
|
|
21
|
+
else:
|
|
22
|
+
gdf_projected = gdf.to_crs("EPSG:3857")
|
|
23
|
+
|
|
24
|
+
gdf_projected['area'] = gdf_projected.geometry.area
|
|
25
|
+
gdf_projected = gdf_projected.sort_values(by='area', ascending=False)
|
|
26
|
+
gdf_projected = gdf_projected.reset_index(drop=True)
|
|
27
|
+
|
|
28
|
+
spatial_idx = index.Index()
|
|
29
|
+
for i, geom in enumerate(gdf_projected.geometry):
|
|
30
|
+
if geom.is_valid:
|
|
31
|
+
spatial_idx.insert(i, geom.bounds)
|
|
32
|
+
else:
|
|
33
|
+
fixed_geom = geom.buffer(0)
|
|
34
|
+
if fixed_geom.is_valid:
|
|
35
|
+
spatial_idx.insert(i, fixed_geom.bounds)
|
|
36
|
+
|
|
37
|
+
id_mapping = {}
|
|
38
|
+
|
|
39
|
+
for i in range(1, len(gdf_projected)):
|
|
40
|
+
current_poly = gdf_projected.iloc[i].geometry
|
|
41
|
+
current_area = gdf_projected.iloc[i].area
|
|
42
|
+
current_id = gdf_projected.iloc[i]['id']
|
|
43
|
+
|
|
44
|
+
if current_id in id_mapping:
|
|
45
|
+
continue
|
|
46
|
+
|
|
47
|
+
if not current_poly.is_valid:
|
|
48
|
+
current_poly = current_poly.buffer(0)
|
|
49
|
+
if not current_poly.is_valid:
|
|
50
|
+
continue
|
|
51
|
+
|
|
52
|
+
potential_overlaps = [j for j in spatial_idx.intersection(current_poly.bounds) if j < i]
|
|
53
|
+
|
|
54
|
+
for j in potential_overlaps:
|
|
55
|
+
larger_poly = gdf_projected.iloc[j].geometry
|
|
56
|
+
larger_id = gdf_projected.iloc[j]['id']
|
|
57
|
+
|
|
58
|
+
if larger_id in id_mapping:
|
|
59
|
+
larger_id = id_mapping[larger_id]
|
|
60
|
+
|
|
61
|
+
if not larger_poly.is_valid:
|
|
62
|
+
larger_poly = larger_poly.buffer(0)
|
|
63
|
+
if not larger_poly.is_valid:
|
|
64
|
+
continue
|
|
65
|
+
|
|
66
|
+
try:
|
|
67
|
+
if current_poly.intersects(larger_poly):
|
|
68
|
+
overlap = current_poly.intersection(larger_poly)
|
|
69
|
+
overlap_ratio = overlap.area / current_area
|
|
70
|
+
if overlap_ratio > overlap_threshold:
|
|
71
|
+
id_mapping[current_id] = larger_id
|
|
72
|
+
gdf_projected.at[i, 'id'] = larger_id
|
|
73
|
+
break
|
|
74
|
+
except (GEOSException, ValueError):
|
|
75
|
+
continue
|
|
76
|
+
|
|
77
|
+
for i, row in filtered_gdf.iterrows():
|
|
78
|
+
orig_id = row.get('id')
|
|
79
|
+
if orig_id in id_mapping:
|
|
80
|
+
filtered_gdf.at[i, 'id'] = id_mapping[orig_id]
|
|
81
|
+
|
|
82
|
+
return filtered_gdf
|
|
83
|
+
|
|
84
|
+
|