voxcity 0.6.16__py3-none-any.whl → 0.6.18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of voxcity might be problematic. Click here for more details.
- voxcity/downloader/osm.py +23 -7
- voxcity/downloader/overture.py +26 -1
- voxcity/exporter/__init__.py +2 -1
- voxcity/exporter/netcdf.py +211 -0
- voxcity/exporter/obj.py +538 -1
- voxcity/generator.py +102 -7
- voxcity/geoprocessor/grid.py +1738 -1732
- voxcity/utils/visualization.py +31 -0
- {voxcity-0.6.16.dist-info → voxcity-0.6.18.dist-info}/METADATA +4 -2
- {voxcity-0.6.16.dist-info → voxcity-0.6.18.dist-info}/RECORD +13 -12
- {voxcity-0.6.16.dist-info → voxcity-0.6.18.dist-info}/AUTHORS.rst +0 -0
- {voxcity-0.6.16.dist-info → voxcity-0.6.18.dist-info}/LICENSE +0 -0
- {voxcity-0.6.16.dist-info → voxcity-0.6.18.dist-info}/WHEEL +0 -0
voxcity/geoprocessor/grid.py
CHANGED
|
@@ -1,1733 +1,1739 @@
|
|
|
1
|
-
"""
|
|
2
|
-
This module provides functions for creating and manipulating grids of building heights, land cover, and elevation data.
|
|
3
|
-
It includes functionality for:
|
|
4
|
-
- Grid creation and manipulation for various data types (buildings, land cover, elevation)
|
|
5
|
-
- Coordinate transformations and spatial operations
|
|
6
|
-
- Data interpolation and aggregation
|
|
7
|
-
- Vector to raster conversion
|
|
8
|
-
"""
|
|
9
|
-
|
|
10
|
-
import numpy as np
|
|
11
|
-
import pandas as pd
|
|
12
|
-
import os
|
|
13
|
-
from shapely.geometry import Polygon, Point, MultiPolygon, box, mapping
|
|
14
|
-
from scipy.ndimage import label, generate_binary_structure
|
|
15
|
-
from pyproj import Geod, Transformer, CRS
|
|
16
|
-
import rasterio
|
|
17
|
-
from rasterio import features
|
|
18
|
-
from rasterio.transform import from_bounds
|
|
19
|
-
from affine import Affine
|
|
20
|
-
import geopandas as gpd
|
|
21
|
-
from collections import defaultdict
|
|
22
|
-
from scipy.interpolate import griddata
|
|
23
|
-
from shapely.errors import GEOSException
|
|
24
|
-
from rtree import index
|
|
25
|
-
import warnings
|
|
26
|
-
|
|
27
|
-
from .utils import (
|
|
28
|
-
initialize_geod,
|
|
29
|
-
calculate_distance,
|
|
30
|
-
normalize_to_one_meter,
|
|
31
|
-
create_building_polygons,
|
|
32
|
-
convert_format_lat_lon
|
|
33
|
-
)
|
|
34
|
-
from ..geoprocessor.polygon import (
|
|
35
|
-
filter_buildings,
|
|
36
|
-
extract_building_heights_from_geotiff,
|
|
37
|
-
extract_building_heights_from_gdf,
|
|
38
|
-
complement_building_heights_from_gdf,
|
|
39
|
-
process_building_footprints_by_overlap
|
|
40
|
-
)
|
|
41
|
-
from ..utils.lc import (
|
|
42
|
-
get_class_priority,
|
|
43
|
-
create_land_cover_polygons,
|
|
44
|
-
get_dominant_class,
|
|
45
|
-
)
|
|
46
|
-
from ..downloader.gee import (
|
|
47
|
-
get_roi,
|
|
48
|
-
save_geotiff_open_buildings_temporal
|
|
49
|
-
)
|
|
50
|
-
|
|
51
|
-
def apply_operation(arr, meshsize):
|
|
52
|
-
"""
|
|
53
|
-
Applies a sequence of operations to an array based on a mesh size to normalize and discretize values.
|
|
54
|
-
|
|
55
|
-
This function performs the following sequence of operations:
|
|
56
|
-
1. Divides array by mesh size to normalize values
|
|
57
|
-
2. Adds 0.5 to round values to nearest integer
|
|
58
|
-
3. Floors the result to get integer values
|
|
59
|
-
4. Scales back to original units by multiplying by mesh size
|
|
60
|
-
|
|
61
|
-
Args:
|
|
62
|
-
arr (numpy.ndarray): Input array to transform
|
|
63
|
-
meshsize (float): Size of mesh to use for calculations
|
|
64
|
-
|
|
65
|
-
Returns:
|
|
66
|
-
numpy.ndarray: Transformed array after applying operations
|
|
67
|
-
|
|
68
|
-
Example:
|
|
69
|
-
>>> arr = np.array([1.2, 2.7, 3.4])
|
|
70
|
-
>>> meshsize = 0.5
|
|
71
|
-
>>> result = apply_operation(arr, meshsize)
|
|
72
|
-
"""
|
|
73
|
-
# Divide array by mesh size to normalize values
|
|
74
|
-
step1 = arr / meshsize
|
|
75
|
-
# Add 0.5 to round values to nearest integer
|
|
76
|
-
step2 = step1 + 0.5
|
|
77
|
-
# Floor to get integer values
|
|
78
|
-
step3 = np.floor(step2)
|
|
79
|
-
# Scale back to original units
|
|
80
|
-
return step3 * meshsize
|
|
81
|
-
|
|
82
|
-
def translate_array(input_array, translation_dict):
|
|
83
|
-
"""
|
|
84
|
-
Translates values in an array according to a dictionary mapping.
|
|
85
|
-
|
|
86
|
-
This function creates a new array where each value from the input array
|
|
87
|
-
is replaced by its corresponding value from the translation dictionary.
|
|
88
|
-
Values not found in the dictionary are replaced with empty strings.
|
|
89
|
-
|
|
90
|
-
Args:
|
|
91
|
-
input_array (numpy.ndarray): Array containing values to translate
|
|
92
|
-
translation_dict (dict): Dictionary mapping input values to output values
|
|
93
|
-
|
|
94
|
-
Returns:
|
|
95
|
-
numpy.ndarray: Array with translated values, with same shape as input array
|
|
96
|
-
|
|
97
|
-
Example:
|
|
98
|
-
>>> arr = np.array([[1, 2], [3, 4]])
|
|
99
|
-
>>> trans_dict = {1: 'A', 2: 'B', 3: 'C', 4: 'D'}
|
|
100
|
-
>>> result = translate_array(arr, trans_dict)
|
|
101
|
-
>>> # result = array([['A', 'B'], ['C', 'D']], dtype=object)
|
|
102
|
-
"""
|
|
103
|
-
# Create empty array of same shape that can hold objects (e.g. strings)
|
|
104
|
-
translated_array = np.empty_like(input_array, dtype=object)
|
|
105
|
-
# Iterate through array and replace values using dictionary
|
|
106
|
-
for i in range(input_array.shape[0]):
|
|
107
|
-
for j in range(input_array.shape[1]):
|
|
108
|
-
value = input_array[i, j]
|
|
109
|
-
# Use dict.get() to handle missing keys, defaulting to empty string
|
|
110
|
-
translated_array[i, j] = translation_dict.get(value, '')
|
|
111
|
-
return translated_array
|
|
112
|
-
|
|
113
|
-
def group_and_label_cells(array):
|
|
114
|
-
"""
|
|
115
|
-
Convert non-zero numbers in a 2D numpy array to sequential IDs starting from 1.
|
|
116
|
-
|
|
117
|
-
This function creates a new array where all non-zero values are replaced with
|
|
118
|
-
sequential IDs (1, 2, 3, etc.) while preserving zero values. This is useful
|
|
119
|
-
for labeling distinct regions or features in a grid.
|
|
120
|
-
|
|
121
|
-
Args:
|
|
122
|
-
array (numpy.ndarray): Input 2D array with non-zero values to be labeled
|
|
123
|
-
|
|
124
|
-
Returns:
|
|
125
|
-
numpy.ndarray: Array with non-zero values converted to sequential IDs,
|
|
126
|
-
maintaining the same shape as input array
|
|
127
|
-
|
|
128
|
-
Example:
|
|
129
|
-
>>> arr = np.array([[0, 5, 5], [0, 5, 8], [0, 0, 8]])
|
|
130
|
-
>>> result = group_and_label_cells(arr)
|
|
131
|
-
>>> # result = array([[0, 1, 1], [0, 1, 2], [0, 0, 2]])
|
|
132
|
-
"""
|
|
133
|
-
# Create a copy to avoid modifying input
|
|
134
|
-
result = array.copy()
|
|
135
|
-
|
|
136
|
-
# Get sorted set of unique non-zero values
|
|
137
|
-
unique_values = sorted(set(array.flatten()) - {0})
|
|
138
|
-
|
|
139
|
-
# Create mapping from original values to sequential IDs (1, 2, 3, etc)
|
|
140
|
-
value_to_id = {value: idx + 1 for idx, value in enumerate(unique_values)}
|
|
141
|
-
|
|
142
|
-
# Replace each non-zero value with its new sequential ID
|
|
143
|
-
for value in unique_values:
|
|
144
|
-
result[array == value] = value_to_id[value]
|
|
145
|
-
|
|
146
|
-
return result
|
|
147
|
-
|
|
148
|
-
def process_grid_optimized(grid_bi, dem_grid):
|
|
149
|
-
"""
|
|
150
|
-
Optimized version that computes per-building averages without allocating
|
|
151
|
-
huge arrays when building IDs are large and sparse.
|
|
152
|
-
"""
|
|
153
|
-
result = dem_grid.copy()
|
|
154
|
-
|
|
155
|
-
# Only process if there are non-zero values
|
|
156
|
-
if np.any(grid_bi != 0):
|
|
157
|
-
# Convert to integer IDs (handle NaN for float arrays)
|
|
158
|
-
if grid_bi.dtype.kind == 'f':
|
|
159
|
-
grid_bi_int = np.nan_to_num(grid_bi, nan=0).astype(np.int64)
|
|
160
|
-
else:
|
|
161
|
-
grid_bi_int = grid_bi.astype(np.int64)
|
|
162
|
-
|
|
163
|
-
# Work only on non-zero cells
|
|
164
|
-
flat_ids = grid_bi_int.ravel()
|
|
165
|
-
flat_dem = dem_grid.ravel()
|
|
166
|
-
nz_mask = flat_ids != 0
|
|
167
|
-
if np.any(nz_mask):
|
|
168
|
-
ids_nz = flat_ids[nz_mask]
|
|
169
|
-
vals_nz = flat_dem[nz_mask]
|
|
170
|
-
|
|
171
|
-
# Densify IDs via inverse indices to avoid np.bincount on large max(id)
|
|
172
|
-
unique_ids, inverse_idx = np.unique(ids_nz, return_inverse=True)
|
|
173
|
-
sums = np.bincount(inverse_idx, weights=vals_nz)
|
|
174
|
-
counts = np.bincount(inverse_idx)
|
|
175
|
-
counts[counts == 0] = 1
|
|
176
|
-
means = sums / counts
|
|
177
|
-
|
|
178
|
-
# Scatter means back to result for non-zero cells
|
|
179
|
-
result.ravel()[nz_mask] = means[inverse_idx]
|
|
180
|
-
|
|
181
|
-
return result - np.min(result)
|
|
182
|
-
|
|
183
|
-
def process_grid(grid_bi, dem_grid):
|
|
184
|
-
"""
|
|
185
|
-
Safe version that tries optimization first, then falls back to original method.
|
|
186
|
-
"""
|
|
187
|
-
try:
|
|
188
|
-
# Try the optimized version first
|
|
189
|
-
return process_grid_optimized(grid_bi, dem_grid)
|
|
190
|
-
except Exception as e:
|
|
191
|
-
print(f"Optimized process_grid failed: {e}, using original method")
|
|
192
|
-
# Fall back to original implementation
|
|
193
|
-
unique_ids = np.unique(grid_bi[grid_bi != 0])
|
|
194
|
-
result = dem_grid.copy()
|
|
195
|
-
|
|
196
|
-
for id_num in unique_ids:
|
|
197
|
-
mask = (grid_bi == id_num)
|
|
198
|
-
avg_value = np.mean(dem_grid[mask])
|
|
199
|
-
result[mask] = avg_value
|
|
200
|
-
|
|
201
|
-
return result - np.min(result)
|
|
202
|
-
"""
|
|
203
|
-
Optimized version that avoids converting to Python lists.
|
|
204
|
-
Works directly with numpy arrays.
|
|
205
|
-
"""
|
|
206
|
-
if not isinstance(arr, np.ndarray):
|
|
207
|
-
return arr
|
|
208
|
-
|
|
209
|
-
# Create output array
|
|
210
|
-
result = np.empty_like(arr, dtype=object)
|
|
211
|
-
|
|
212
|
-
# Vectorized operation for empty cells
|
|
213
|
-
for i in range(arr.shape[0]):
|
|
214
|
-
for j in range(arr.shape[1]):
|
|
215
|
-
cell = arr[i, j]
|
|
216
|
-
|
|
217
|
-
if cell is None or (isinstance(cell, list) and len(cell) == 0):
|
|
218
|
-
result[i, j] = []
|
|
219
|
-
elif isinstance(cell, list):
|
|
220
|
-
# Process list without converting entire array
|
|
221
|
-
new_cell = []
|
|
222
|
-
for segment in cell:
|
|
223
|
-
if isinstance(segment, (list, np.ndarray)):
|
|
224
|
-
# Use numpy operations where possible
|
|
225
|
-
if isinstance(segment, np.ndarray):
|
|
226
|
-
new_segment = np.where(np.isnan(segment), replace_value, segment).tolist()
|
|
227
|
-
else:
|
|
228
|
-
new_segment = [replace_value if (isinstance(v, float) and np.isnan(v)) else v for v in segment]
|
|
229
|
-
new_cell.append(new_segment)
|
|
230
|
-
else:
|
|
231
|
-
new_cell.append(segment)
|
|
232
|
-
result[i, j] = new_cell
|
|
233
|
-
else:
|
|
234
|
-
result[i, j] = cell
|
|
235
|
-
|
|
236
|
-
return result
|
|
237
|
-
|
|
238
|
-
def calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize):
|
|
239
|
-
"""
|
|
240
|
-
Calculate grid size and adjusted mesh size based on input parameters.
|
|
241
|
-
|
|
242
|
-
This function determines the number of grid cells needed in each direction and
|
|
243
|
-
adjusts the mesh size to exactly fit the desired area. The calculation takes into
|
|
244
|
-
account the input vectors and desired mesh size to ensure proper coverage.
|
|
245
|
-
|
|
246
|
-
Args:
|
|
247
|
-
side_1 (numpy.ndarray): First side vector defining the grid extent
|
|
248
|
-
side_2 (numpy.ndarray): Second side vector defining the grid extent
|
|
249
|
-
u_vec (numpy.ndarray): Unit vector in first direction
|
|
250
|
-
v_vec (numpy.ndarray): Unit vector in second direction
|
|
251
|
-
meshsize (float): Desired mesh size in the same units as the vectors
|
|
252
|
-
|
|
253
|
-
Returns:
|
|
254
|
-
tuple: A tuple containing:
|
|
255
|
-
- grid_size (tuple of ints): Number of cells in each direction (nx, ny)
|
|
256
|
-
- adjusted_mesh_size (tuple of floats): Actual mesh sizes that fit the area exactly
|
|
257
|
-
|
|
258
|
-
Example:
|
|
259
|
-
>>> side1 = np.array([100, 0]) # 100 units in x direction
|
|
260
|
-
>>> side2 = np.array([0, 50]) # 50 units in y direction
|
|
261
|
-
>>> u = np.array([1, 0]) # Unit vector in x
|
|
262
|
-
>>> v = np.array([0, 1]) # Unit vector in y
|
|
263
|
-
>>> mesh = 10 # Desired 10-unit mesh
|
|
264
|
-
>>> grid_size, adj_mesh = calculate_grid_size(side1, side2, u, v, mesh)
|
|
265
|
-
"""
|
|
266
|
-
# Calculate total side lengths in meters using the relationship between side vectors and unit vectors
|
|
267
|
-
# u_vec and v_vec represent degrees per meter along each side direction
|
|
268
|
-
dist_side_1_m = np.linalg.norm(side_1) / (np.linalg.norm(u_vec) + 1e-12)
|
|
269
|
-
dist_side_2_m = np.linalg.norm(side_2) / (np.linalg.norm(v_vec) + 1e-12)
|
|
270
|
-
|
|
271
|
-
# Calculate number of cells (nx along u, ny along v), rounding to nearest integer and ensuring at least 1
|
|
272
|
-
grid_size_0 = max(1, int(dist_side_1_m / meshsize + 0.5))
|
|
273
|
-
grid_size_1 = max(1, int(dist_side_2_m / meshsize + 0.5))
|
|
274
|
-
|
|
275
|
-
# Adjust mesh sizes (in meters) to exactly fit the sides with the calculated number of cells
|
|
276
|
-
adjusted_mesh_size_0 = dist_side_1_m / grid_size_0
|
|
277
|
-
adjusted_mesh_size_1 = dist_side_2_m / grid_size_1
|
|
278
|
-
|
|
279
|
-
return (grid_size_0, grid_size_1), (adjusted_mesh_size_0, adjusted_mesh_size_1)
|
|
280
|
-
|
|
281
|
-
def create_coordinate_mesh(origin, grid_size, adjusted_meshsize, u_vec, v_vec):
|
|
282
|
-
"""
|
|
283
|
-
Create a coordinate mesh based on input parameters.
|
|
284
|
-
|
|
285
|
-
This function generates a 3D array representing a coordinate mesh, where each point
|
|
286
|
-
in the mesh is calculated by adding scaled vectors to the origin point. The mesh
|
|
287
|
-
is created using the specified grid size and adjusted mesh sizes.
|
|
288
|
-
|
|
289
|
-
Args:
|
|
290
|
-
origin (numpy.ndarray): Origin point coordinates (shape: (2,) or (3,))
|
|
291
|
-
grid_size (tuple): Size of grid in each dimension (nx, ny)
|
|
292
|
-
adjusted_meshsize (tuple): Adjusted mesh size in each dimension (dx, dy)
|
|
293
|
-
u_vec (numpy.ndarray): Unit vector in first direction
|
|
294
|
-
v_vec (numpy.ndarray): Unit vector in second direction
|
|
295
|
-
|
|
296
|
-
Returns:
|
|
297
|
-
numpy.ndarray: 3D array of shape (coord_dim, ny, nx) containing the coordinates
|
|
298
|
-
of each point in the mesh. coord_dim is the same as the
|
|
299
|
-
dimensionality of the input vectors.
|
|
300
|
-
|
|
301
|
-
Example:
|
|
302
|
-
>>> origin = np.array([0, 0])
|
|
303
|
-
>>> grid_size = (5, 4)
|
|
304
|
-
>>> mesh_size = (10, 10)
|
|
305
|
-
>>> u = np.array([1, 0])
|
|
306
|
-
>>> v = np.array([0, 1])
|
|
307
|
-
>>> coords = create_coordinate_mesh(origin, grid_size, mesh_size, u, v)
|
|
308
|
-
"""
|
|
309
|
-
# Create evenly spaced points along each axis
|
|
310
|
-
x = np.linspace(0, grid_size[0], grid_size[0])
|
|
311
|
-
y = np.linspace(0, grid_size[1], grid_size[1])
|
|
312
|
-
|
|
313
|
-
# Create 2D coordinate grids
|
|
314
|
-
xx, yy = np.meshgrid(x, y)
|
|
315
|
-
|
|
316
|
-
# Calculate coordinates of each cell by adding scaled vectors
|
|
317
|
-
cell_coords = origin[:, np.newaxis, np.newaxis] + \
|
|
318
|
-
xx[np.newaxis, :, :] * adjusted_meshsize[0] * u_vec[:, np.newaxis, np.newaxis] + \
|
|
319
|
-
yy[np.newaxis, :, :] * adjusted_meshsize[1] * v_vec[:, np.newaxis, np.newaxis]
|
|
320
|
-
|
|
321
|
-
return cell_coords
|
|
322
|
-
|
|
323
|
-
def create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec):
|
|
324
|
-
"""
|
|
325
|
-
Create a polygon representing a grid cell.
|
|
326
|
-
|
|
327
|
-
This function generates a rectangular polygon for a specific grid cell by calculating
|
|
328
|
-
its four corners based on the cell indices and grid parameters. The polygon is
|
|
329
|
-
created in counter-clockwise order starting from the bottom-left corner.
|
|
330
|
-
|
|
331
|
-
Args:
|
|
332
|
-
origin (numpy.ndarray): Origin point coordinates (shape: (2,) or (3,))
|
|
333
|
-
i (int): Row index of the cell
|
|
334
|
-
j (int): Column index of the cell
|
|
335
|
-
adjusted_meshsize (tuple): Adjusted mesh size in each dimension (dx, dy)
|
|
336
|
-
u_vec (numpy.ndarray): Unit vector in first direction
|
|
337
|
-
v_vec (numpy.ndarray): Unit vector in second direction
|
|
338
|
-
|
|
339
|
-
Returns:
|
|
340
|
-
shapely.geometry.Polygon: Polygon representing the grid cell, with vertices
|
|
341
|
-
ordered counter-clockwise from bottom-left
|
|
342
|
-
|
|
343
|
-
Example:
|
|
344
|
-
>>> origin = np.array([0, 0])
|
|
345
|
-
>>> i, j = 1, 2 # Cell at row 1, column 2
|
|
346
|
-
>>> mesh_size = (10, 10)
|
|
347
|
-
>>> u = np.array([1, 0])
|
|
348
|
-
>>> v = np.array([0, 1])
|
|
349
|
-
>>> cell_poly = create_cell_polygon(origin, i, j, mesh_size, u, v)
|
|
350
|
-
"""
|
|
351
|
-
# Calculate the four corners of the cell by adding scaled vectors
|
|
352
|
-
bottom_left = origin + i * adjusted_meshsize[0] * u_vec + j * adjusted_meshsize[1] * v_vec
|
|
353
|
-
bottom_right = origin + (i + 1) * adjusted_meshsize[0] * u_vec + j * adjusted_meshsize[1] * v_vec
|
|
354
|
-
top_right = origin + (i + 1) * adjusted_meshsize[0] * u_vec + (j + 1) * adjusted_meshsize[1] * v_vec
|
|
355
|
-
top_left = origin + i * adjusted_meshsize[0] * u_vec + (j + 1) * adjusted_meshsize[1] * v_vec
|
|
356
|
-
|
|
357
|
-
# Create polygon from corners in counter-clockwise order
|
|
358
|
-
return Polygon([bottom_left, bottom_right, top_right, top_left])
|
|
359
|
-
|
|
360
|
-
def tree_height_grid_from_land_cover(land_cover_grid_ori):
|
|
361
|
-
"""
|
|
362
|
-
Convert a land cover grid to a tree height grid.
|
|
363
|
-
|
|
364
|
-
This function transforms a land cover classification grid into a grid of tree heights
|
|
365
|
-
by mapping land cover classes to predefined tree heights. The function first flips
|
|
366
|
-
the input grid vertically and adjusts class values, then applies a translation
|
|
367
|
-
dictionary to convert classes to heights.
|
|
368
|
-
|
|
369
|
-
Land cover class to tree height mapping:
|
|
370
|
-
- Class 4 (Forest): 10m height
|
|
371
|
-
- All other classes: 0m height
|
|
372
|
-
|
|
373
|
-
Args:
|
|
374
|
-
land_cover_grid_ori (numpy.ndarray): Original land cover grid with class values
|
|
375
|
-
|
|
376
|
-
Returns:
|
|
377
|
-
numpy.ndarray: Grid of tree heights in meters, with same dimensions as input
|
|
378
|
-
|
|
379
|
-
Example:
|
|
380
|
-
>>> lc_grid = np.array([[1, 4, 2], [4, 3, 4], [2, 1, 3]])
|
|
381
|
-
>>> tree_heights = tree_height_grid_from_land_cover(lc_grid)
|
|
382
|
-
>>> # Result: array([[0, 10, 0], [10, 0, 10], [0, 0, 0]])
|
|
383
|
-
"""
|
|
384
|
-
# Flip array vertically and add 1 to all values
|
|
385
|
-
land_cover_grid = np.flipud(land_cover_grid_ori) + 1
|
|
386
|
-
|
|
387
|
-
# Define mapping from land cover classes to tree heights
|
|
388
|
-
tree_translation_dict = {
|
|
389
|
-
1: 0, # No trees
|
|
390
|
-
2: 0, # No trees
|
|
391
|
-
3: 0, # No trees
|
|
392
|
-
4: 10, # Forest - 10m height
|
|
393
|
-
5: 0, # No trees
|
|
394
|
-
6: 0, # No trees
|
|
395
|
-
7: 0, # No trees
|
|
396
|
-
8: 0, # No trees
|
|
397
|
-
9: 0, # No trees
|
|
398
|
-
10: 0 # No trees
|
|
399
|
-
}
|
|
400
|
-
|
|
401
|
-
# Convert land cover classes to tree heights and flip back
|
|
402
|
-
tree_height_grid = translate_array(np.flipud(land_cover_grid), tree_translation_dict).astype(int)
|
|
403
|
-
|
|
404
|
-
return tree_height_grid
|
|
405
|
-
|
|
406
|
-
def create_land_cover_grid_from_geotiff_polygon(tiff_path, mesh_size, land_cover_classes, polygon):
|
|
407
|
-
"""
|
|
408
|
-
Create a land cover grid from a GeoTIFF file within a polygon boundary.
|
|
409
|
-
|
|
410
|
-
Args:
|
|
411
|
-
tiff_path (str): Path to GeoTIFF file
|
|
412
|
-
mesh_size (float): Size of mesh cells
|
|
413
|
-
land_cover_classes (dict): Dictionary mapping land cover classes
|
|
414
|
-
polygon (list): List of polygon vertices
|
|
415
|
-
|
|
416
|
-
Returns:
|
|
417
|
-
numpy.ndarray: Grid of land cover classes within the polygon
|
|
418
|
-
"""
|
|
419
|
-
with rasterio.open(tiff_path) as src:
|
|
420
|
-
# Read RGB bands from GeoTIFF
|
|
421
|
-
img = src.read((1,2,3))
|
|
422
|
-
left, bottom, right, top = src.bounds
|
|
423
|
-
src_crs = src.crs
|
|
424
|
-
|
|
425
|
-
# Create a Shapely polygon from input coordinates
|
|
426
|
-
poly = Polygon(polygon)
|
|
427
|
-
|
|
428
|
-
# Get bounds of the polygon in WGS84 coordinates
|
|
429
|
-
left_wgs84, bottom_wgs84, right_wgs84, top_wgs84 = poly.bounds
|
|
430
|
-
# print(left, bottom, right, top)
|
|
431
|
-
|
|
432
|
-
# Calculate width and height using geodesic calculations for accuracy
|
|
433
|
-
geod = Geod(ellps="WGS84")
|
|
434
|
-
_, _, width = geod.inv(left_wgs84, bottom_wgs84, right_wgs84, bottom_wgs84)
|
|
435
|
-
_, _, height = geod.inv(left_wgs84, bottom_wgs84, left_wgs84, top_wgs84)
|
|
436
|
-
|
|
437
|
-
# Calculate number of grid cells based on mesh size
|
|
438
|
-
num_cells_x = int(width / mesh_size + 0.5)
|
|
439
|
-
num_cells_y = int(height / mesh_size + 0.5)
|
|
440
|
-
|
|
441
|
-
# Adjust mesh_size to fit the image exactly
|
|
442
|
-
adjusted_mesh_size_x = (right - left) / num_cells_x
|
|
443
|
-
adjusted_mesh_size_y = (top - bottom) / num_cells_y
|
|
444
|
-
|
|
445
|
-
# Create affine transform for mapping between pixel and world coordinates
|
|
446
|
-
new_affine = Affine(adjusted_mesh_size_x, 0, left, 0, -adjusted_mesh_size_y, top)
|
|
447
|
-
|
|
448
|
-
# Create coordinate grids for the new mesh
|
|
449
|
-
cols, rows = np.meshgrid(np.arange(num_cells_x), np.arange(num_cells_y))
|
|
450
|
-
xs, ys = new_affine * (cols, rows)
|
|
451
|
-
xs_flat, ys_flat = xs.flatten(), ys.flatten()
|
|
452
|
-
|
|
453
|
-
# Convert world coordinates to image pixel indices
|
|
454
|
-
row, col = src.index(xs_flat, ys_flat)
|
|
455
|
-
row, col = np.array(row), np.array(col)
|
|
456
|
-
|
|
457
|
-
# Filter out indices that fall outside the image bounds
|
|
458
|
-
valid = (row >= 0) & (row < src.height) & (col >= 0) & (col < src.width)
|
|
459
|
-
row, col = row[valid], col[valid]
|
|
460
|
-
|
|
461
|
-
# Initialize output grid with 'No Data' values
|
|
462
|
-
grid = np.full((num_cells_y, num_cells_x), 'No Data', dtype=object)
|
|
463
|
-
|
|
464
|
-
# Fill grid with dominant land cover classes
|
|
465
|
-
for i, (r, c) in enumerate(zip(row, col)):
|
|
466
|
-
cell_data = img[:, r, c]
|
|
467
|
-
dominant_class = get_dominant_class(cell_data, land_cover_classes)
|
|
468
|
-
grid_row, grid_col = np.unravel_index(i, (num_cells_y, num_cells_x))
|
|
469
|
-
grid[grid_row, grid_col] = dominant_class
|
|
470
|
-
|
|
471
|
-
# Flip grid vertically to match geographic orientation
|
|
472
|
-
return np.flipud(grid)
|
|
473
|
-
|
|
474
|
-
def create_land_cover_grid_from_gdf_polygon(gdf, meshsize, source, rectangle_vertices, default_class='Developed space'):
|
|
475
|
-
"""Create a grid of land cover classes from GeoDataFrame polygon data.
|
|
476
|
-
|
|
477
|
-
Args:
|
|
478
|
-
gdf (GeoDataFrame): GeoDataFrame containing land cover polygons
|
|
479
|
-
meshsize (float): Size of each grid cell in meters
|
|
480
|
-
source (str): Source of the land cover data to determine class priorities
|
|
481
|
-
rectangle_vertices (list): List of 4 (lon,lat) coordinate pairs defining the rectangle bounds
|
|
482
|
-
default_class (str, optional): Default land cover class for cells with no intersecting polygons.
|
|
483
|
-
Defaults to 'Developed space'.
|
|
484
|
-
|
|
485
|
-
Returns:
|
|
486
|
-
numpy.ndarray: 2D grid of land cover classes as strings
|
|
487
|
-
|
|
488
|
-
The function creates a regular grid over the given rectangle area and determines the dominant
|
|
489
|
-
land cover class for each cell based on polygon intersections. Classes are assigned based on
|
|
490
|
-
priority rules and majority area coverage.
|
|
491
|
-
"""
|
|
492
|
-
|
|
493
|
-
# Default priority mapping for land cover classes (lower number = higher priority)
|
|
494
|
-
class_priority = {
|
|
495
|
-
'Bareland': 4,
|
|
496
|
-
'Rangeland': 6,
|
|
497
|
-
'Developed space': 8,
|
|
498
|
-
'Road': 1, # Roads have highest priority
|
|
499
|
-
'Tree': 7,
|
|
500
|
-
'Water': 3,
|
|
501
|
-
'Agriculture land': 5,
|
|
502
|
-
'Building': 2 # Buildings have second highest priority
|
|
503
|
-
}
|
|
504
|
-
|
|
505
|
-
# Get source-specific priority mapping if available
|
|
506
|
-
class_priority = get_class_priority(source)
|
|
507
|
-
|
|
508
|
-
# Calculate grid dimensions and normalize direction vectors
|
|
509
|
-
geod = initialize_geod()
|
|
510
|
-
vertex_0, vertex_1, vertex_3 = rectangle_vertices[0], rectangle_vertices[1], rectangle_vertices[3]
|
|
511
|
-
|
|
512
|
-
# Calculate actual distances between vertices using geodesic calculations
|
|
513
|
-
dist_side_1 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_1[0], vertex_1[1])
|
|
514
|
-
dist_side_2 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_3[0], vertex_3[1])
|
|
515
|
-
|
|
516
|
-
# Create vectors representing the sides of the rectangle
|
|
517
|
-
side_1 = np.array(vertex_1) - np.array(vertex_0)
|
|
518
|
-
side_2 = np.array(vertex_3) - np.array(vertex_0)
|
|
519
|
-
|
|
520
|
-
# Normalize vectors to represent 1 meter in each direction
|
|
521
|
-
u_vec = normalize_to_one_meter(side_1, dist_side_1)
|
|
522
|
-
v_vec = normalize_to_one_meter(side_2, dist_side_2)
|
|
523
|
-
|
|
524
|
-
origin = np.array(rectangle_vertices[0])
|
|
525
|
-
grid_size, adjusted_meshsize = calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize)
|
|
526
|
-
|
|
527
|
-
print(f"Adjusted mesh size: {adjusted_meshsize}")
|
|
528
|
-
|
|
529
|
-
# Initialize grid with default land cover class
|
|
530
|
-
grid = np.full(grid_size, default_class, dtype=object)
|
|
531
|
-
|
|
532
|
-
# Calculate bounding box for spatial indexing
|
|
533
|
-
extent = [min(coord[1] for coord in rectangle_vertices), max(coord[1] for coord in rectangle_vertices),
|
|
534
|
-
min(coord[0] for coord in rectangle_vertices), max(coord[0] for coord in rectangle_vertices)]
|
|
535
|
-
plotting_box = box(extent[2], extent[0], extent[3], extent[1])
|
|
536
|
-
|
|
537
|
-
# Create spatial index for efficient polygon lookup
|
|
538
|
-
land_cover_polygons = []
|
|
539
|
-
idx = index.Index()
|
|
540
|
-
for i, row in gdf.iterrows():
|
|
541
|
-
polygon = row.geometry
|
|
542
|
-
land_cover_class = row['class']
|
|
543
|
-
land_cover_polygons.append((polygon, land_cover_class))
|
|
544
|
-
idx.insert(i, polygon.bounds)
|
|
545
|
-
|
|
546
|
-
# Iterate through each grid cell
|
|
547
|
-
for i in range(grid_size[0]):
|
|
548
|
-
for j in range(grid_size[1]):
|
|
549
|
-
land_cover_class = default_class
|
|
550
|
-
cell = create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec)
|
|
551
|
-
|
|
552
|
-
# Check intersections with polygons that could overlap this cell
|
|
553
|
-
for k in idx.intersection(cell.bounds):
|
|
554
|
-
polygon, land_cover_class_temp = land_cover_polygons[k]
|
|
555
|
-
try:
|
|
556
|
-
if cell.intersects(polygon):
|
|
557
|
-
intersection = cell.intersection(polygon)
|
|
558
|
-
# If polygon covers more than 50% of cell, consider its land cover class
|
|
559
|
-
if intersection.area > cell.area/2:
|
|
560
|
-
rank = class_priority[land_cover_class]
|
|
561
|
-
rank_temp = class_priority[land_cover_class_temp]
|
|
562
|
-
# Update cell class if new class has higher priority (lower rank)
|
|
563
|
-
if rank_temp < rank:
|
|
564
|
-
land_cover_class = land_cover_class_temp
|
|
565
|
-
grid[i, j] = land_cover_class
|
|
566
|
-
except GEOSException as e:
|
|
567
|
-
print(f"GEOS error at grid cell ({i}, {j}): {str(e)}")
|
|
568
|
-
# Attempt to fix invalid polygon geometry
|
|
569
|
-
try:
|
|
570
|
-
fixed_polygon = polygon.buffer(0)
|
|
571
|
-
if cell.intersects(fixed_polygon):
|
|
572
|
-
intersection = cell.intersection(fixed_polygon)
|
|
573
|
-
if intersection.area > cell.area/2:
|
|
574
|
-
rank = class_priority[land_cover_class]
|
|
575
|
-
rank_temp = class_priority[land_cover_class_temp]
|
|
576
|
-
if rank_temp < rank:
|
|
577
|
-
land_cover_class = land_cover_class_temp
|
|
578
|
-
grid[i, j] = land_cover_class
|
|
579
|
-
except Exception as fix_error:
|
|
580
|
-
print(f"Failed to fix polygon at grid cell ({i}, {j}): {str(fix_error)}")
|
|
581
|
-
continue
|
|
582
|
-
return grid
|
|
583
|
-
|
|
584
|
-
def create_height_grid_from_geotiff_polygon(tiff_path, mesh_size, polygon):
|
|
585
|
-
"""
|
|
586
|
-
Create a height grid from a GeoTIFF file within a polygon boundary.
|
|
587
|
-
|
|
588
|
-
Args:
|
|
589
|
-
tiff_path (str): Path to GeoTIFF file
|
|
590
|
-
mesh_size (float): Size of mesh cells
|
|
591
|
-
polygon (list): List of polygon vertices
|
|
592
|
-
|
|
593
|
-
Returns:
|
|
594
|
-
numpy.ndarray: Grid of heights within the polygon
|
|
595
|
-
"""
|
|
596
|
-
with rasterio.open(tiff_path) as src:
|
|
597
|
-
# Read height data
|
|
598
|
-
img = src.read(1)
|
|
599
|
-
left, bottom, right, top = src.bounds
|
|
600
|
-
src_crs = src.crs
|
|
601
|
-
|
|
602
|
-
# Create polygon from input coordinates
|
|
603
|
-
poly = Polygon(polygon)
|
|
604
|
-
|
|
605
|
-
# Get polygon bounds in WGS84
|
|
606
|
-
left_wgs84, bottom_wgs84, right_wgs84, top_wgs84 = poly.bounds
|
|
607
|
-
# print(left, bottom, right, top)
|
|
608
|
-
# print(left_wgs84, bottom_wgs84, right_wgs84, top_wgs84)
|
|
609
|
-
|
|
610
|
-
# Calculate actual distances using geodesic methods
|
|
611
|
-
geod = Geod(ellps="WGS84")
|
|
612
|
-
_, _, width = geod.inv(left_wgs84, bottom_wgs84, right_wgs84, bottom_wgs84)
|
|
613
|
-
_, _, height = geod.inv(left_wgs84, bottom_wgs84, left_wgs84, top_wgs84)
|
|
614
|
-
|
|
615
|
-
# Calculate grid dimensions and adjust mesh size
|
|
616
|
-
num_cells_x = int(width / mesh_size + 0.5)
|
|
617
|
-
num_cells_y = int(height / mesh_size + 0.5)
|
|
618
|
-
|
|
619
|
-
adjusted_mesh_size_x = (right - left) / num_cells_x
|
|
620
|
-
adjusted_mesh_size_y = (top - bottom) / num_cells_y
|
|
621
|
-
|
|
622
|
-
# Create affine transform for coordinate mapping
|
|
623
|
-
new_affine = Affine(adjusted_mesh_size_x, 0, left, 0, -adjusted_mesh_size_y, top)
|
|
624
|
-
|
|
625
|
-
# Generate coordinate grids
|
|
626
|
-
cols, rows = np.meshgrid(np.arange(num_cells_x), np.arange(num_cells_y))
|
|
627
|
-
xs, ys = new_affine * (cols, rows)
|
|
628
|
-
xs_flat, ys_flat = xs.flatten(), ys.flatten()
|
|
629
|
-
|
|
630
|
-
# Convert to image coordinates
|
|
631
|
-
row, col = src.index(xs_flat, ys_flat)
|
|
632
|
-
row, col = np.array(row), np.array(col)
|
|
633
|
-
|
|
634
|
-
# Filter valid indices
|
|
635
|
-
valid = (row >= 0) & (row < src.height) & (col >= 0) & (col < src.width)
|
|
636
|
-
row, col = row[valid], col[valid]
|
|
637
|
-
|
|
638
|
-
# Create output grid and fill with height values
|
|
639
|
-
grid = np.full((num_cells_y, num_cells_x), np.nan)
|
|
640
|
-
flat_indices = np.ravel_multi_index((row, col), img.shape)
|
|
641
|
-
np.put(grid, np.ravel_multi_index((rows.flatten()[valid], cols.flatten()[valid]), grid.shape), img.flat[flat_indices])
|
|
642
|
-
|
|
643
|
-
return np.flipud(grid)
|
|
644
|
-
|
|
645
|
-
def create_building_height_grid_from_gdf_polygon(
|
|
646
|
-
gdf,
|
|
647
|
-
meshsize,
|
|
648
|
-
rectangle_vertices,
|
|
649
|
-
overlapping_footprint=False,
|
|
650
|
-
gdf_comp=None,
|
|
651
|
-
geotiff_path_comp=None,
|
|
652
|
-
complement_building_footprints=None,
|
|
653
|
-
complement_height=None
|
|
654
|
-
):
|
|
655
|
-
"""
|
|
656
|
-
Create a building height grid from GeoDataFrame data within a polygon boundary.
|
|
657
|
-
|
|
658
|
-
Args:
|
|
659
|
-
gdf (geopandas.GeoDataFrame): GeoDataFrame containing building information
|
|
660
|
-
meshsize (float): Size of mesh cells
|
|
661
|
-
rectangle_vertices (list): List of rectangle vertices defining the boundary
|
|
662
|
-
overlapping_footprint (bool): If True, use precise geometry-based processing for overlaps.
|
|
663
|
-
If False, use faster rasterio-based approach.
|
|
664
|
-
gdf_comp (geopandas.GeoDataFrame, optional): Complementary GeoDataFrame
|
|
665
|
-
geotiff_path_comp (str, optional): Path to complementary GeoTIFF file
|
|
666
|
-
complement_building_footprints (bool, optional): Whether to complement footprints
|
|
667
|
-
complement_height (float, optional): Height value to use for buildings with height=0
|
|
668
|
-
|
|
669
|
-
Returns:
|
|
670
|
-
tuple: (building_height_grid, building_min_height_grid, building_id_grid, filtered_buildings)
|
|
671
|
-
- building_height_grid (numpy.ndarray): Grid of building heights
|
|
672
|
-
- building_min_height_grid (numpy.ndarray): Grid of min building heights (list per cell)
|
|
673
|
-
- building_id_grid (numpy.ndarray): Grid of building IDs
|
|
674
|
-
- filtered_buildings (geopandas.GeoDataFrame): The buildings used (filtered_gdf)
|
|
675
|
-
"""
|
|
676
|
-
# --------------------------------------------------------------------------
|
|
677
|
-
# 1) COMMON INITIAL SETUP AND DATA FILTERING
|
|
678
|
-
# --------------------------------------------------------------------------
|
|
679
|
-
geod = initialize_geod()
|
|
680
|
-
vertex_0, vertex_1, vertex_3 = rectangle_vertices[0], rectangle_vertices[1], rectangle_vertices[3]
|
|
681
|
-
|
|
682
|
-
# Distances for each side
|
|
683
|
-
dist_side_1 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_1[0], vertex_1[1])
|
|
684
|
-
dist_side_2 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_3[0], vertex_3[1])
|
|
685
|
-
|
|
686
|
-
# Normalized vectors
|
|
687
|
-
side_1 = np.array(vertex_1) - np.array(vertex_0)
|
|
688
|
-
side_2 = np.array(vertex_3) - np.array(vertex_0)
|
|
689
|
-
u_vec = normalize_to_one_meter(side_1, dist_side_1)
|
|
690
|
-
v_vec = normalize_to_one_meter(side_2, dist_side_2)
|
|
691
|
-
|
|
692
|
-
# Grid parameters
|
|
693
|
-
origin = np.array(rectangle_vertices[0])
|
|
694
|
-
grid_size, adjusted_meshsize = calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize)
|
|
695
|
-
|
|
696
|
-
# Filter the input GeoDataFrame by bounding box
|
|
697
|
-
extent = [
|
|
698
|
-
min(coord[1] for coord in rectangle_vertices),
|
|
699
|
-
max(coord[1] for coord in rectangle_vertices),
|
|
700
|
-
min(coord[0] for coord in rectangle_vertices),
|
|
701
|
-
max(coord[0] for coord in rectangle_vertices)
|
|
702
|
-
]
|
|
703
|
-
plotting_box = box(extent[2], extent[0], extent[3], extent[1])
|
|
704
|
-
filtered_gdf = gdf[gdf.geometry.intersects(plotting_box)].copy()
|
|
705
|
-
|
|
706
|
-
# Count buildings with height=0 or NaN
|
|
707
|
-
zero_height_count = len(filtered_gdf[filtered_gdf['height'] == 0])
|
|
708
|
-
nan_height_count = len(filtered_gdf[filtered_gdf['height'].isna()])
|
|
709
|
-
print(f"{zero_height_count+nan_height_count} of the total {len(filtered_gdf)} building footprint from the base data source did not have height data.")
|
|
710
|
-
|
|
711
|
-
# Optionally merge heights from complementary sources
|
|
712
|
-
if gdf_comp is not None:
|
|
713
|
-
filtered_gdf_comp = gdf_comp[gdf_comp.geometry.intersects(plotting_box)].copy()
|
|
714
|
-
if complement_building_footprints:
|
|
715
|
-
filtered_gdf = complement_building_heights_from_gdf(filtered_gdf, filtered_gdf_comp)
|
|
716
|
-
else:
|
|
717
|
-
filtered_gdf = extract_building_heights_from_gdf(filtered_gdf, filtered_gdf_comp)
|
|
718
|
-
elif geotiff_path_comp:
|
|
719
|
-
filtered_gdf = extract_building_heights_from_geotiff(geotiff_path_comp, filtered_gdf)
|
|
720
|
-
|
|
721
|
-
# After filtering and complementing heights, process overlapping buildings
|
|
722
|
-
filtered_gdf = process_building_footprints_by_overlap(filtered_gdf, overlap_threshold=0.5)
|
|
723
|
-
|
|
724
|
-
# --------------------------------------------------------------------------
|
|
725
|
-
# 2) BRANCH BASED ON OVERLAPPING_FOOTPRINT PARAMETER
|
|
726
|
-
# --------------------------------------------------------------------------
|
|
727
|
-
|
|
728
|
-
if overlapping_footprint:
|
|
729
|
-
# Use precise geometry-based approach for better overlap handling
|
|
730
|
-
return _process_with_geometry_intersection(
|
|
731
|
-
filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, complement_height
|
|
732
|
-
)
|
|
733
|
-
else:
|
|
734
|
-
# Use faster rasterio-based approach
|
|
735
|
-
return _process_with_rasterio(
|
|
736
|
-
filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec,
|
|
737
|
-
rectangle_vertices, complement_height
|
|
738
|
-
)
|
|
739
|
-
|
|
740
|
-
|
|
741
|
-
def _process_with_geometry_intersection(filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, complement_height):
|
|
742
|
-
"""
|
|
743
|
-
Process buildings using precise geometry intersection approach.
|
|
744
|
-
Better for handling overlapping footprints but slower.
|
|
745
|
-
"""
|
|
746
|
-
# Initialize output grids
|
|
747
|
-
building_height_grid = np.zeros(grid_size)
|
|
748
|
-
building_id_grid = np.zeros(grid_size)
|
|
749
|
-
|
|
750
|
-
# Use a Python list-of-lists or object array for min_height tracking
|
|
751
|
-
building_min_height_grid = np.empty(grid_size, dtype=object)
|
|
752
|
-
for i in range(grid_size[0]):
|
|
753
|
-
for j in range(grid_size[1]):
|
|
754
|
-
building_min_height_grid[i, j] = []
|
|
755
|
-
|
|
756
|
-
# --------------------------------------------------------------------------
|
|
757
|
-
# PREPARE BUILDING POLYGONS & SPATIAL INDEX
|
|
758
|
-
# --------------------------------------------------------------------------
|
|
759
|
-
building_polygons = []
|
|
760
|
-
for idx_b, row in filtered_gdf.iterrows():
|
|
761
|
-
polygon = row.geometry
|
|
762
|
-
height = row.get('height', None)
|
|
763
|
-
|
|
764
|
-
# Replace height=0 with complement_height if specified
|
|
765
|
-
if complement_height is not None and (height == 0 or height is None):
|
|
766
|
-
height = complement_height
|
|
767
|
-
|
|
768
|
-
min_height = row.get('min_height', 0)
|
|
769
|
-
if pd.isna(min_height):
|
|
770
|
-
min_height = 0
|
|
771
|
-
|
|
772
|
-
is_inner = row.get('is_inner', False)
|
|
773
|
-
feature_id = row.get('id', idx_b)
|
|
774
|
-
|
|
775
|
-
# Fix invalid geometry
|
|
776
|
-
if not polygon.is_valid:
|
|
777
|
-
try:
|
|
778
|
-
polygon = polygon.buffer(0)
|
|
779
|
-
if not polygon.is_valid:
|
|
780
|
-
polygon = polygon.simplify(1e-8)
|
|
781
|
-
except Exception as e:
|
|
782
|
-
pass
|
|
783
|
-
|
|
784
|
-
bounding_box = polygon.bounds # (minx, miny, maxx, maxy)
|
|
785
|
-
building_polygons.append((
|
|
786
|
-
polygon, bounding_box, height, min_height, is_inner, feature_id
|
|
787
|
-
))
|
|
788
|
-
|
|
789
|
-
# Build R-tree index using bounding boxes
|
|
790
|
-
idx = index.Index()
|
|
791
|
-
for i_b, (poly, bbox, _, _, _, _) in enumerate(building_polygons):
|
|
792
|
-
idx.insert(i_b, bbox)
|
|
793
|
-
|
|
794
|
-
# --------------------------------------------------------------------------
|
|
795
|
-
# MAIN GRID LOOP WITH PRECISE INTERSECTION
|
|
796
|
-
# --------------------------------------------------------------------------
|
|
797
|
-
INTERSECTION_THRESHOLD = 0.3
|
|
798
|
-
|
|
799
|
-
for i in range(grid_size[0]):
|
|
800
|
-
for j in range(grid_size[1]):
|
|
801
|
-
# Create the cell polygon once
|
|
802
|
-
cell = create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec)
|
|
803
|
-
if not cell.is_valid:
|
|
804
|
-
cell = cell.buffer(0)
|
|
805
|
-
cell_area = cell.area
|
|
806
|
-
|
|
807
|
-
# Find possible intersections from the index
|
|
808
|
-
potential = list(idx.intersection(cell.bounds))
|
|
809
|
-
if not potential:
|
|
810
|
-
continue
|
|
811
|
-
|
|
812
|
-
# Sort buildings by height descending
|
|
813
|
-
cell_buildings = []
|
|
814
|
-
for k in potential:
|
|
815
|
-
bpoly, bbox, height, minh, inr, fid = building_polygons[k]
|
|
816
|
-
sort_val = height if (height is not None) else -float('inf')
|
|
817
|
-
cell_buildings.append((k, bpoly, bbox, height, minh, inr, fid, sort_val))
|
|
818
|
-
cell_buildings.sort(key=lambda x: x[-1], reverse=True)
|
|
819
|
-
|
|
820
|
-
found_intersection = False
|
|
821
|
-
all_zero_or_nan = True
|
|
822
|
-
|
|
823
|
-
for (k, polygon, bbox, height, min_height, is_inner, feature_id, _) in cell_buildings:
|
|
824
|
-
try:
|
|
825
|
-
# Quick bounding-box check
|
|
826
|
-
minx_p, miny_p, maxx_p, maxy_p = bbox
|
|
827
|
-
minx_c, miny_c, maxx_c, maxy_c = cell.bounds
|
|
828
|
-
|
|
829
|
-
# Overlap bounding box
|
|
830
|
-
overlap_minx = max(minx_p, minx_c)
|
|
831
|
-
overlap_miny = max(miny_p, miny_c)
|
|
832
|
-
overlap_maxx = min(maxx_p, maxx_c)
|
|
833
|
-
overlap_maxy = min(maxy_p, maxy_c)
|
|
834
|
-
|
|
835
|
-
if (overlap_maxx <= overlap_minx) or (overlap_maxy <= overlap_miny):
|
|
836
|
-
continue
|
|
837
|
-
|
|
838
|
-
# Area of bounding-box intersection
|
|
839
|
-
bbox_intersect_area = (overlap_maxx - overlap_minx) * (overlap_maxy - overlap_miny)
|
|
840
|
-
if bbox_intersect_area < INTERSECTION_THRESHOLD * cell_area:
|
|
841
|
-
continue
|
|
842
|
-
|
|
843
|
-
# Ensure valid geometry
|
|
844
|
-
if not polygon.is_valid:
|
|
845
|
-
polygon = polygon.buffer(0)
|
|
846
|
-
|
|
847
|
-
if cell.intersects(polygon):
|
|
848
|
-
intersection = cell.intersection(polygon)
|
|
849
|
-
inter_area = intersection.area
|
|
850
|
-
|
|
851
|
-
# If the fraction of cell covered > threshold
|
|
852
|
-
if (inter_area / cell_area) > INTERSECTION_THRESHOLD:
|
|
853
|
-
found_intersection = True
|
|
854
|
-
|
|
855
|
-
# If not an inner courtyard
|
|
856
|
-
if not is_inner:
|
|
857
|
-
building_min_height_grid[i, j].append([min_height, height])
|
|
858
|
-
building_id_grid[i, j] = feature_id
|
|
859
|
-
|
|
860
|
-
# Update building height if valid
|
|
861
|
-
if (height is not None and not np.isnan(height) and height > 0):
|
|
862
|
-
all_zero_or_nan = False
|
|
863
|
-
current_height = building_height_grid[i, j]
|
|
864
|
-
|
|
865
|
-
# Replace if we had 0, nan, or smaller height
|
|
866
|
-
if (current_height == 0 or np.isnan(current_height) or current_height < height):
|
|
867
|
-
building_height_grid[i, j] = height
|
|
868
|
-
else:
|
|
869
|
-
# Inner courtyards => override with 0
|
|
870
|
-
building_min_height_grid[i, j] = [[0, 0]]
|
|
871
|
-
building_height_grid[i, j] = 0
|
|
872
|
-
found_intersection = True
|
|
873
|
-
all_zero_or_nan = False
|
|
874
|
-
break
|
|
875
|
-
|
|
876
|
-
except (GEOSException, ValueError) as e:
|
|
877
|
-
# Attempt fallback fix
|
|
878
|
-
try:
|
|
879
|
-
simplified_polygon = polygon.simplify(1e-8)
|
|
880
|
-
if simplified_polygon.is_valid:
|
|
881
|
-
intersection = cell.intersection(simplified_polygon)
|
|
882
|
-
inter_area = intersection.area
|
|
883
|
-
if (inter_area / cell_area) > INTERSECTION_THRESHOLD:
|
|
884
|
-
found_intersection = True
|
|
885
|
-
if not is_inner:
|
|
886
|
-
building_min_height_grid[i, j].append([min_height, height])
|
|
887
|
-
building_id_grid[i, j] = feature_id
|
|
888
|
-
if (height is not None and not np.isnan(height) and height > 0):
|
|
889
|
-
all_zero_or_nan = False
|
|
890
|
-
if (building_height_grid[i, j] == 0 or
|
|
891
|
-
np.isnan(building_height_grid[i, j]) or
|
|
892
|
-
building_height_grid[i, j] < height):
|
|
893
|
-
building_height_grid[i, j] = height
|
|
894
|
-
else:
|
|
895
|
-
building_min_height_grid[i, j] = [[0, 0]]
|
|
896
|
-
building_height_grid[i, j] = 0
|
|
897
|
-
found_intersection = True
|
|
898
|
-
all_zero_or_nan = False
|
|
899
|
-
break
|
|
900
|
-
except Exception as fix_error:
|
|
901
|
-
print(f"Failed to process cell ({i}, {j}) - Building {k}: {str(fix_error)}")
|
|
902
|
-
continue
|
|
903
|
-
|
|
904
|
-
# If we found intersecting buildings but all were zero/NaN, mark as NaN
|
|
905
|
-
if found_intersection and all_zero_or_nan:
|
|
906
|
-
building_height_grid[i, j] = np.nan
|
|
907
|
-
|
|
908
|
-
return building_height_grid, building_min_height_grid, building_id_grid, filtered_gdf
|
|
909
|
-
|
|
910
|
-
|
|
911
|
-
def _process_with_rasterio(filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, rectangle_vertices, complement_height):
|
|
912
|
-
"""
|
|
913
|
-
Process buildings using fast rasterio-based approach.
|
|
914
|
-
Faster but less precise for overlapping footprints.
|
|
915
|
-
"""
|
|
916
|
-
# Set up transform for rasterio using rotated basis defined by u_vec and v_vec
|
|
917
|
-
# Step vectors in coordinate units (degrees) per cell
|
|
918
|
-
u_step = adjusted_meshsize[0] * u_vec
|
|
919
|
-
v_step = adjusted_meshsize[1] * v_vec
|
|
920
|
-
|
|
921
|
-
# Define the top-left corner so that row=0 is the northern edge
|
|
922
|
-
top_left = origin + grid_size[1] * v_step
|
|
923
|
-
|
|
924
|
-
# Affine transform mapping (col, row) -> (x, y)
|
|
925
|
-
# x = a*col + b*row + c ; y = d*col + e*row + f
|
|
926
|
-
# col increases along u_step; row increases southward, hence -v_step
|
|
927
|
-
transform = Affine(u_step[0], -v_step[0], top_left[0],
|
|
928
|
-
u_step[1], -v_step[1], top_left[1])
|
|
929
|
-
|
|
930
|
-
# Process buildings data
|
|
931
|
-
filtered_gdf = filtered_gdf.copy()
|
|
932
|
-
if complement_height is not None:
|
|
933
|
-
mask = (filtered_gdf['height'] == 0) | (filtered_gdf['height'].isna())
|
|
934
|
-
filtered_gdf.loc[mask, 'height'] = complement_height
|
|
935
|
-
|
|
936
|
-
# Add missing columns with defaults
|
|
937
|
-
filtered_gdf['min_height'] = 0
|
|
938
|
-
|
|
939
|
-
if 'is_inner' not in filtered_gdf.columns:
|
|
940
|
-
filtered_gdf['is_inner'] = False
|
|
941
|
-
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
|
|
947
|
-
|
|
948
|
-
|
|
949
|
-
|
|
950
|
-
|
|
951
|
-
|
|
952
|
-
|
|
953
|
-
|
|
954
|
-
|
|
955
|
-
|
|
956
|
-
|
|
957
|
-
|
|
958
|
-
|
|
959
|
-
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
967
|
-
|
|
968
|
-
|
|
969
|
-
|
|
970
|
-
|
|
971
|
-
|
|
972
|
-
|
|
973
|
-
|
|
974
|
-
|
|
975
|
-
|
|
976
|
-
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
|
|
980
|
-
|
|
981
|
-
|
|
982
|
-
|
|
983
|
-
|
|
984
|
-
|
|
985
|
-
|
|
986
|
-
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
1003
|
-
|
|
1004
|
-
|
|
1005
|
-
|
|
1006
|
-
|
|
1007
|
-
|
|
1008
|
-
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
|
|
1012
|
-
|
|
1013
|
-
|
|
1014
|
-
|
|
1015
|
-
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
|
|
1025
|
-
|
|
1026
|
-
|
|
1027
|
-
|
|
1028
|
-
|
|
1029
|
-
|
|
1030
|
-
|
|
1031
|
-
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
|
|
1035
|
-
|
|
1036
|
-
|
|
1037
|
-
|
|
1038
|
-
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
|
|
1071
|
-
|
|
1072
|
-
|
|
1073
|
-
|
|
1074
|
-
|
|
1075
|
-
|
|
1076
|
-
|
|
1077
|
-
|
|
1078
|
-
|
|
1079
|
-
|
|
1080
|
-
|
|
1081
|
-
|
|
1082
|
-
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1092
|
-
|
|
1093
|
-
|
|
1094
|
-
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
|
|
1098
|
-
|
|
1099
|
-
|
|
1100
|
-
#
|
|
1101
|
-
|
|
1102
|
-
|
|
1103
|
-
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
#
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
#
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
1144
|
-
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
|
|
1161
|
-
|
|
1162
|
-
|
|
1163
|
-
|
|
1164
|
-
|
|
1165
|
-
|
|
1166
|
-
|
|
1167
|
-
|
|
1168
|
-
|
|
1169
|
-
|
|
1170
|
-
|
|
1171
|
-
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
|
|
1175
|
-
|
|
1176
|
-
|
|
1177
|
-
|
|
1178
|
-
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
|
|
1182
|
-
|
|
1183
|
-
|
|
1184
|
-
|
|
1185
|
-
|
|
1186
|
-
|
|
1187
|
-
|
|
1188
|
-
|
|
1189
|
-
|
|
1190
|
-
|
|
1191
|
-
|
|
1192
|
-
|
|
1193
|
-
|
|
1194
|
-
|
|
1195
|
-
#
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1203
|
-
|
|
1204
|
-
|
|
1205
|
-
|
|
1206
|
-
|
|
1207
|
-
|
|
1208
|
-
|
|
1209
|
-
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
|
|
1218
|
-
|
|
1219
|
-
|
|
1220
|
-
|
|
1221
|
-
|
|
1222
|
-
|
|
1223
|
-
|
|
1224
|
-
|
|
1225
|
-
|
|
1226
|
-
|
|
1227
|
-
|
|
1228
|
-
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
|
|
1264
|
-
|
|
1265
|
-
|
|
1266
|
-
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
|
|
1271
|
-
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
#
|
|
1284
|
-
|
|
1285
|
-
|
|
1286
|
-
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
|
|
1292
|
-
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
|
|
1302
|
-
|
|
1303
|
-
|
|
1304
|
-
|
|
1305
|
-
|
|
1306
|
-
#
|
|
1307
|
-
|
|
1308
|
-
|
|
1309
|
-
|
|
1310
|
-
|
|
1311
|
-
|
|
1312
|
-
|
|
1313
|
-
|
|
1314
|
-
|
|
1315
|
-
|
|
1316
|
-
|
|
1317
|
-
|
|
1318
|
-
|
|
1319
|
-
|
|
1320
|
-
|
|
1321
|
-
|
|
1322
|
-
|
|
1323
|
-
|
|
1324
|
-
|
|
1325
|
-
|
|
1326
|
-
|
|
1327
|
-
|
|
1328
|
-
|
|
1329
|
-
|
|
1330
|
-
|
|
1331
|
-
|
|
1332
|
-
|
|
1333
|
-
|
|
1334
|
-
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
|
|
1347
|
-
|
|
1348
|
-
|
|
1349
|
-
|
|
1350
|
-
|
|
1351
|
-
|
|
1352
|
-
|
|
1353
|
-
|
|
1354
|
-
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
|
|
1360
|
-
|
|
1361
|
-
|
|
1362
|
-
|
|
1363
|
-
#
|
|
1364
|
-
if
|
|
1365
|
-
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
|
|
1369
|
-
|
|
1370
|
-
|
|
1371
|
-
|
|
1372
|
-
|
|
1373
|
-
|
|
1374
|
-
|
|
1375
|
-
|
|
1376
|
-
|
|
1377
|
-
#
|
|
1378
|
-
|
|
1379
|
-
#
|
|
1380
|
-
|
|
1381
|
-
|
|
1382
|
-
|
|
1383
|
-
|
|
1384
|
-
|
|
1385
|
-
|
|
1386
|
-
|
|
1387
|
-
|
|
1388
|
-
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
#
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1401
|
-
|
|
1402
|
-
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
)
|
|
1406
|
-
|
|
1407
|
-
#
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
#
|
|
1414
|
-
#
|
|
1415
|
-
#
|
|
1416
|
-
#
|
|
1417
|
-
#
|
|
1418
|
-
#
|
|
1419
|
-
#
|
|
1420
|
-
#
|
|
1421
|
-
#
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1428
|
-
|
|
1429
|
-
|
|
1430
|
-
|
|
1431
|
-
|
|
1432
|
-
|
|
1433
|
-
|
|
1434
|
-
|
|
1435
|
-
|
|
1436
|
-
|
|
1437
|
-
#
|
|
1438
|
-
|
|
1439
|
-
|
|
1440
|
-
|
|
1441
|
-
|
|
1442
|
-
|
|
1443
|
-
|
|
1444
|
-
|
|
1445
|
-
|
|
1446
|
-
#
|
|
1447
|
-
#
|
|
1448
|
-
|
|
1449
|
-
|
|
1450
|
-
|
|
1451
|
-
|
|
1452
|
-
|
|
1453
|
-
|
|
1454
|
-
|
|
1455
|
-
|
|
1456
|
-
|
|
1457
|
-
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
|
|
1461
|
-
|
|
1462
|
-
|
|
1463
|
-
|
|
1464
|
-
|
|
1465
|
-
|
|
1466
|
-
|
|
1467
|
-
|
|
1468
|
-
|
|
1469
|
-
|
|
1470
|
-
|
|
1471
|
-
|
|
1472
|
-
|
|
1473
|
-
|
|
1474
|
-
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
|
|
1478
|
-
|
|
1479
|
-
|
|
1480
|
-
|
|
1481
|
-
|
|
1482
|
-
|
|
1483
|
-
|
|
1484
|
-
|
|
1485
|
-
|
|
1486
|
-
|
|
1487
|
-
|
|
1488
|
-
|
|
1489
|
-
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1504
|
-
|
|
1505
|
-
|
|
1506
|
-
|
|
1507
|
-
#
|
|
1508
|
-
|
|
1509
|
-
|
|
1510
|
-
|
|
1511
|
-
|
|
1512
|
-
|
|
1513
|
-
|
|
1514
|
-
|
|
1515
|
-
|
|
1516
|
-
|
|
1517
|
-
|
|
1518
|
-
|
|
1519
|
-
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1525
|
-
|
|
1526
|
-
|
|
1527
|
-
#
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1532
|
-
|
|
1533
|
-
|
|
1534
|
-
|
|
1535
|
-
)
|
|
1536
|
-
|
|
1537
|
-
#
|
|
1538
|
-
|
|
1539
|
-
|
|
1540
|
-
|
|
1541
|
-
|
|
1542
|
-
|
|
1543
|
-
#
|
|
1544
|
-
#
|
|
1545
|
-
|
|
1546
|
-
|
|
1547
|
-
terrain_gdf
|
|
1548
|
-
|
|
1549
|
-
|
|
1550
|
-
|
|
1551
|
-
|
|
1552
|
-
|
|
1553
|
-
|
|
1554
|
-
|
|
1555
|
-
|
|
1556
|
-
|
|
1557
|
-
|
|
1558
|
-
#
|
|
1559
|
-
#
|
|
1560
|
-
|
|
1561
|
-
|
|
1562
|
-
|
|
1563
|
-
|
|
1564
|
-
|
|
1565
|
-
#
|
|
1566
|
-
|
|
1567
|
-
|
|
1568
|
-
|
|
1569
|
-
|
|
1570
|
-
|
|
1571
|
-
"""
|
|
1572
|
-
|
|
1573
|
-
|
|
1574
|
-
|
|
1575
|
-
|
|
1576
|
-
|
|
1577
|
-
|
|
1578
|
-
|
|
1579
|
-
|
|
1580
|
-
|
|
1581
|
-
|
|
1582
|
-
|
|
1583
|
-
|
|
1584
|
-
|
|
1585
|
-
|
|
1586
|
-
|
|
1587
|
-
|
|
1588
|
-
|
|
1589
|
-
|
|
1590
|
-
|
|
1591
|
-
|
|
1592
|
-
|
|
1593
|
-
|
|
1594
|
-
|
|
1595
|
-
|
|
1596
|
-
|
|
1597
|
-
|
|
1598
|
-
|
|
1599
|
-
|
|
1600
|
-
|
|
1601
|
-
|
|
1602
|
-
|
|
1603
|
-
|
|
1604
|
-
|
|
1605
|
-
|
|
1606
|
-
|
|
1607
|
-
|
|
1608
|
-
|
|
1609
|
-
|
|
1610
|
-
|
|
1611
|
-
|
|
1612
|
-
|
|
1613
|
-
|
|
1614
|
-
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1618
|
-
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
|
|
1625
|
-
|
|
1626
|
-
|
|
1627
|
-
|
|
1628
|
-
|
|
1629
|
-
|
|
1630
|
-
|
|
1631
|
-
|
|
1632
|
-
|
|
1633
|
-
|
|
1634
|
-
|
|
1635
|
-
|
|
1636
|
-
|
|
1637
|
-
|
|
1638
|
-
|
|
1639
|
-
|
|
1640
|
-
|
|
1641
|
-
|
|
1642
|
-
|
|
1643
|
-
|
|
1644
|
-
|
|
1645
|
-
|
|
1646
|
-
|
|
1647
|
-
|
|
1648
|
-
|
|
1649
|
-
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
|
|
1653
|
-
|
|
1654
|
-
|
|
1655
|
-
|
|
1656
|
-
|
|
1657
|
-
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
|
|
1661
|
-
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1665
|
-
|
|
1666
|
-
|
|
1667
|
-
if
|
|
1668
|
-
|
|
1669
|
-
|
|
1670
|
-
|
|
1671
|
-
|
|
1672
|
-
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1679
|
-
|
|
1680
|
-
|
|
1681
|
-
#
|
|
1682
|
-
|
|
1683
|
-
|
|
1684
|
-
|
|
1685
|
-
|
|
1686
|
-
|
|
1687
|
-
|
|
1688
|
-
|
|
1689
|
-
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
|
|
1693
|
-
|
|
1694
|
-
|
|
1695
|
-
|
|
1696
|
-
|
|
1697
|
-
|
|
1698
|
-
|
|
1699
|
-
|
|
1700
|
-
|
|
1701
|
-
|
|
1702
|
-
|
|
1703
|
-
|
|
1704
|
-
|
|
1705
|
-
|
|
1706
|
-
|
|
1707
|
-
|
|
1708
|
-
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1712
|
-
#
|
|
1713
|
-
|
|
1714
|
-
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
#
|
|
1719
|
-
|
|
1720
|
-
|
|
1721
|
-
|
|
1722
|
-
|
|
1723
|
-
|
|
1724
|
-
|
|
1725
|
-
)
|
|
1726
|
-
|
|
1727
|
-
|
|
1728
|
-
|
|
1729
|
-
|
|
1730
|
-
|
|
1731
|
-
|
|
1732
|
-
|
|
1
|
+
"""
|
|
2
|
+
This module provides functions for creating and manipulating grids of building heights, land cover, and elevation data.
|
|
3
|
+
It includes functionality for:
|
|
4
|
+
- Grid creation and manipulation for various data types (buildings, land cover, elevation)
|
|
5
|
+
- Coordinate transformations and spatial operations
|
|
6
|
+
- Data interpolation and aggregation
|
|
7
|
+
- Vector to raster conversion
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
import pandas as pd
|
|
12
|
+
import os
|
|
13
|
+
from shapely.geometry import Polygon, Point, MultiPolygon, box, mapping
|
|
14
|
+
from scipy.ndimage import label, generate_binary_structure
|
|
15
|
+
from pyproj import Geod, Transformer, CRS
|
|
16
|
+
import rasterio
|
|
17
|
+
from rasterio import features
|
|
18
|
+
from rasterio.transform import from_bounds
|
|
19
|
+
from affine import Affine
|
|
20
|
+
import geopandas as gpd
|
|
21
|
+
from collections import defaultdict
|
|
22
|
+
from scipy.interpolate import griddata
|
|
23
|
+
from shapely.errors import GEOSException
|
|
24
|
+
from rtree import index
|
|
25
|
+
import warnings
|
|
26
|
+
|
|
27
|
+
from .utils import (
|
|
28
|
+
initialize_geod,
|
|
29
|
+
calculate_distance,
|
|
30
|
+
normalize_to_one_meter,
|
|
31
|
+
create_building_polygons,
|
|
32
|
+
convert_format_lat_lon
|
|
33
|
+
)
|
|
34
|
+
from ..geoprocessor.polygon import (
|
|
35
|
+
filter_buildings,
|
|
36
|
+
extract_building_heights_from_geotiff,
|
|
37
|
+
extract_building_heights_from_gdf,
|
|
38
|
+
complement_building_heights_from_gdf,
|
|
39
|
+
process_building_footprints_by_overlap
|
|
40
|
+
)
|
|
41
|
+
from ..utils.lc import (
|
|
42
|
+
get_class_priority,
|
|
43
|
+
create_land_cover_polygons,
|
|
44
|
+
get_dominant_class,
|
|
45
|
+
)
|
|
46
|
+
from ..downloader.gee import (
|
|
47
|
+
get_roi,
|
|
48
|
+
save_geotiff_open_buildings_temporal
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
def apply_operation(arr, meshsize):
|
|
52
|
+
"""
|
|
53
|
+
Applies a sequence of operations to an array based on a mesh size to normalize and discretize values.
|
|
54
|
+
|
|
55
|
+
This function performs the following sequence of operations:
|
|
56
|
+
1. Divides array by mesh size to normalize values
|
|
57
|
+
2. Adds 0.5 to round values to nearest integer
|
|
58
|
+
3. Floors the result to get integer values
|
|
59
|
+
4. Scales back to original units by multiplying by mesh size
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
arr (numpy.ndarray): Input array to transform
|
|
63
|
+
meshsize (float): Size of mesh to use for calculations
|
|
64
|
+
|
|
65
|
+
Returns:
|
|
66
|
+
numpy.ndarray: Transformed array after applying operations
|
|
67
|
+
|
|
68
|
+
Example:
|
|
69
|
+
>>> arr = np.array([1.2, 2.7, 3.4])
|
|
70
|
+
>>> meshsize = 0.5
|
|
71
|
+
>>> result = apply_operation(arr, meshsize)
|
|
72
|
+
"""
|
|
73
|
+
# Divide array by mesh size to normalize values
|
|
74
|
+
step1 = arr / meshsize
|
|
75
|
+
# Add 0.5 to round values to nearest integer
|
|
76
|
+
step2 = step1 + 0.5
|
|
77
|
+
# Floor to get integer values
|
|
78
|
+
step3 = np.floor(step2)
|
|
79
|
+
# Scale back to original units
|
|
80
|
+
return step3 * meshsize
|
|
81
|
+
|
|
82
|
+
def translate_array(input_array, translation_dict):
|
|
83
|
+
"""
|
|
84
|
+
Translates values in an array according to a dictionary mapping.
|
|
85
|
+
|
|
86
|
+
This function creates a new array where each value from the input array
|
|
87
|
+
is replaced by its corresponding value from the translation dictionary.
|
|
88
|
+
Values not found in the dictionary are replaced with empty strings.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
input_array (numpy.ndarray): Array containing values to translate
|
|
92
|
+
translation_dict (dict): Dictionary mapping input values to output values
|
|
93
|
+
|
|
94
|
+
Returns:
|
|
95
|
+
numpy.ndarray: Array with translated values, with same shape as input array
|
|
96
|
+
|
|
97
|
+
Example:
|
|
98
|
+
>>> arr = np.array([[1, 2], [3, 4]])
|
|
99
|
+
>>> trans_dict = {1: 'A', 2: 'B', 3: 'C', 4: 'D'}
|
|
100
|
+
>>> result = translate_array(arr, trans_dict)
|
|
101
|
+
>>> # result = array([['A', 'B'], ['C', 'D']], dtype=object)
|
|
102
|
+
"""
|
|
103
|
+
# Create empty array of same shape that can hold objects (e.g. strings)
|
|
104
|
+
translated_array = np.empty_like(input_array, dtype=object)
|
|
105
|
+
# Iterate through array and replace values using dictionary
|
|
106
|
+
for i in range(input_array.shape[0]):
|
|
107
|
+
for j in range(input_array.shape[1]):
|
|
108
|
+
value = input_array[i, j]
|
|
109
|
+
# Use dict.get() to handle missing keys, defaulting to empty string
|
|
110
|
+
translated_array[i, j] = translation_dict.get(value, '')
|
|
111
|
+
return translated_array
|
|
112
|
+
|
|
113
|
+
def group_and_label_cells(array):
|
|
114
|
+
"""
|
|
115
|
+
Convert non-zero numbers in a 2D numpy array to sequential IDs starting from 1.
|
|
116
|
+
|
|
117
|
+
This function creates a new array where all non-zero values are replaced with
|
|
118
|
+
sequential IDs (1, 2, 3, etc.) while preserving zero values. This is useful
|
|
119
|
+
for labeling distinct regions or features in a grid.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
array (numpy.ndarray): Input 2D array with non-zero values to be labeled
|
|
123
|
+
|
|
124
|
+
Returns:
|
|
125
|
+
numpy.ndarray: Array with non-zero values converted to sequential IDs,
|
|
126
|
+
maintaining the same shape as input array
|
|
127
|
+
|
|
128
|
+
Example:
|
|
129
|
+
>>> arr = np.array([[0, 5, 5], [0, 5, 8], [0, 0, 8]])
|
|
130
|
+
>>> result = group_and_label_cells(arr)
|
|
131
|
+
>>> # result = array([[0, 1, 1], [0, 1, 2], [0, 0, 2]])
|
|
132
|
+
"""
|
|
133
|
+
# Create a copy to avoid modifying input
|
|
134
|
+
result = array.copy()
|
|
135
|
+
|
|
136
|
+
# Get sorted set of unique non-zero values
|
|
137
|
+
unique_values = sorted(set(array.flatten()) - {0})
|
|
138
|
+
|
|
139
|
+
# Create mapping from original values to sequential IDs (1, 2, 3, etc)
|
|
140
|
+
value_to_id = {value: idx + 1 for idx, value in enumerate(unique_values)}
|
|
141
|
+
|
|
142
|
+
# Replace each non-zero value with its new sequential ID
|
|
143
|
+
for value in unique_values:
|
|
144
|
+
result[array == value] = value_to_id[value]
|
|
145
|
+
|
|
146
|
+
return result
|
|
147
|
+
|
|
148
|
+
def process_grid_optimized(grid_bi, dem_grid):
|
|
149
|
+
"""
|
|
150
|
+
Optimized version that computes per-building averages without allocating
|
|
151
|
+
huge arrays when building IDs are large and sparse.
|
|
152
|
+
"""
|
|
153
|
+
result = dem_grid.copy()
|
|
154
|
+
|
|
155
|
+
# Only process if there are non-zero values
|
|
156
|
+
if np.any(grid_bi != 0):
|
|
157
|
+
# Convert to integer IDs (handle NaN for float arrays)
|
|
158
|
+
if grid_bi.dtype.kind == 'f':
|
|
159
|
+
grid_bi_int = np.nan_to_num(grid_bi, nan=0).astype(np.int64)
|
|
160
|
+
else:
|
|
161
|
+
grid_bi_int = grid_bi.astype(np.int64)
|
|
162
|
+
|
|
163
|
+
# Work only on non-zero cells
|
|
164
|
+
flat_ids = grid_bi_int.ravel()
|
|
165
|
+
flat_dem = dem_grid.ravel()
|
|
166
|
+
nz_mask = flat_ids != 0
|
|
167
|
+
if np.any(nz_mask):
|
|
168
|
+
ids_nz = flat_ids[nz_mask]
|
|
169
|
+
vals_nz = flat_dem[nz_mask]
|
|
170
|
+
|
|
171
|
+
# Densify IDs via inverse indices to avoid np.bincount on large max(id)
|
|
172
|
+
unique_ids, inverse_idx = np.unique(ids_nz, return_inverse=True)
|
|
173
|
+
sums = np.bincount(inverse_idx, weights=vals_nz)
|
|
174
|
+
counts = np.bincount(inverse_idx)
|
|
175
|
+
counts[counts == 0] = 1
|
|
176
|
+
means = sums / counts
|
|
177
|
+
|
|
178
|
+
# Scatter means back to result for non-zero cells
|
|
179
|
+
result.ravel()[nz_mask] = means[inverse_idx]
|
|
180
|
+
|
|
181
|
+
return result - np.min(result)
|
|
182
|
+
|
|
183
|
+
def process_grid(grid_bi, dem_grid):
|
|
184
|
+
"""
|
|
185
|
+
Safe version that tries optimization first, then falls back to original method.
|
|
186
|
+
"""
|
|
187
|
+
try:
|
|
188
|
+
# Try the optimized version first
|
|
189
|
+
return process_grid_optimized(grid_bi, dem_grid)
|
|
190
|
+
except Exception as e:
|
|
191
|
+
print(f"Optimized process_grid failed: {e}, using original method")
|
|
192
|
+
# Fall back to original implementation
|
|
193
|
+
unique_ids = np.unique(grid_bi[grid_bi != 0])
|
|
194
|
+
result = dem_grid.copy()
|
|
195
|
+
|
|
196
|
+
for id_num in unique_ids:
|
|
197
|
+
mask = (grid_bi == id_num)
|
|
198
|
+
avg_value = np.mean(dem_grid[mask])
|
|
199
|
+
result[mask] = avg_value
|
|
200
|
+
|
|
201
|
+
return result - np.min(result)
|
|
202
|
+
"""
|
|
203
|
+
Optimized version that avoids converting to Python lists.
|
|
204
|
+
Works directly with numpy arrays.
|
|
205
|
+
"""
|
|
206
|
+
if not isinstance(arr, np.ndarray):
|
|
207
|
+
return arr
|
|
208
|
+
|
|
209
|
+
# Create output array
|
|
210
|
+
result = np.empty_like(arr, dtype=object)
|
|
211
|
+
|
|
212
|
+
# Vectorized operation for empty cells
|
|
213
|
+
for i in range(arr.shape[0]):
|
|
214
|
+
for j in range(arr.shape[1]):
|
|
215
|
+
cell = arr[i, j]
|
|
216
|
+
|
|
217
|
+
if cell is None or (isinstance(cell, list) and len(cell) == 0):
|
|
218
|
+
result[i, j] = []
|
|
219
|
+
elif isinstance(cell, list):
|
|
220
|
+
# Process list without converting entire array
|
|
221
|
+
new_cell = []
|
|
222
|
+
for segment in cell:
|
|
223
|
+
if isinstance(segment, (list, np.ndarray)):
|
|
224
|
+
# Use numpy operations where possible
|
|
225
|
+
if isinstance(segment, np.ndarray):
|
|
226
|
+
new_segment = np.where(np.isnan(segment), replace_value, segment).tolist()
|
|
227
|
+
else:
|
|
228
|
+
new_segment = [replace_value if (isinstance(v, float) and np.isnan(v)) else v for v in segment]
|
|
229
|
+
new_cell.append(new_segment)
|
|
230
|
+
else:
|
|
231
|
+
new_cell.append(segment)
|
|
232
|
+
result[i, j] = new_cell
|
|
233
|
+
else:
|
|
234
|
+
result[i, j] = cell
|
|
235
|
+
|
|
236
|
+
return result
|
|
237
|
+
|
|
238
|
+
def calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize):
|
|
239
|
+
"""
|
|
240
|
+
Calculate grid size and adjusted mesh size based on input parameters.
|
|
241
|
+
|
|
242
|
+
This function determines the number of grid cells needed in each direction and
|
|
243
|
+
adjusts the mesh size to exactly fit the desired area. The calculation takes into
|
|
244
|
+
account the input vectors and desired mesh size to ensure proper coverage.
|
|
245
|
+
|
|
246
|
+
Args:
|
|
247
|
+
side_1 (numpy.ndarray): First side vector defining the grid extent
|
|
248
|
+
side_2 (numpy.ndarray): Second side vector defining the grid extent
|
|
249
|
+
u_vec (numpy.ndarray): Unit vector in first direction
|
|
250
|
+
v_vec (numpy.ndarray): Unit vector in second direction
|
|
251
|
+
meshsize (float): Desired mesh size in the same units as the vectors
|
|
252
|
+
|
|
253
|
+
Returns:
|
|
254
|
+
tuple: A tuple containing:
|
|
255
|
+
- grid_size (tuple of ints): Number of cells in each direction (nx, ny)
|
|
256
|
+
- adjusted_mesh_size (tuple of floats): Actual mesh sizes that fit the area exactly
|
|
257
|
+
|
|
258
|
+
Example:
|
|
259
|
+
>>> side1 = np.array([100, 0]) # 100 units in x direction
|
|
260
|
+
>>> side2 = np.array([0, 50]) # 50 units in y direction
|
|
261
|
+
>>> u = np.array([1, 0]) # Unit vector in x
|
|
262
|
+
>>> v = np.array([0, 1]) # Unit vector in y
|
|
263
|
+
>>> mesh = 10 # Desired 10-unit mesh
|
|
264
|
+
>>> grid_size, adj_mesh = calculate_grid_size(side1, side2, u, v, mesh)
|
|
265
|
+
"""
|
|
266
|
+
# Calculate total side lengths in meters using the relationship between side vectors and unit vectors
|
|
267
|
+
# u_vec and v_vec represent degrees per meter along each side direction
|
|
268
|
+
dist_side_1_m = np.linalg.norm(side_1) / (np.linalg.norm(u_vec) + 1e-12)
|
|
269
|
+
dist_side_2_m = np.linalg.norm(side_2) / (np.linalg.norm(v_vec) + 1e-12)
|
|
270
|
+
|
|
271
|
+
# Calculate number of cells (nx along u, ny along v), rounding to nearest integer and ensuring at least 1
|
|
272
|
+
grid_size_0 = max(1, int(dist_side_1_m / meshsize + 0.5))
|
|
273
|
+
grid_size_1 = max(1, int(dist_side_2_m / meshsize + 0.5))
|
|
274
|
+
|
|
275
|
+
# Adjust mesh sizes (in meters) to exactly fit the sides with the calculated number of cells
|
|
276
|
+
adjusted_mesh_size_0 = dist_side_1_m / grid_size_0
|
|
277
|
+
adjusted_mesh_size_1 = dist_side_2_m / grid_size_1
|
|
278
|
+
|
|
279
|
+
return (grid_size_0, grid_size_1), (adjusted_mesh_size_0, adjusted_mesh_size_1)
|
|
280
|
+
|
|
281
|
+
def create_coordinate_mesh(origin, grid_size, adjusted_meshsize, u_vec, v_vec):
|
|
282
|
+
"""
|
|
283
|
+
Create a coordinate mesh based on input parameters.
|
|
284
|
+
|
|
285
|
+
This function generates a 3D array representing a coordinate mesh, where each point
|
|
286
|
+
in the mesh is calculated by adding scaled vectors to the origin point. The mesh
|
|
287
|
+
is created using the specified grid size and adjusted mesh sizes.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
origin (numpy.ndarray): Origin point coordinates (shape: (2,) or (3,))
|
|
291
|
+
grid_size (tuple): Size of grid in each dimension (nx, ny)
|
|
292
|
+
adjusted_meshsize (tuple): Adjusted mesh size in each dimension (dx, dy)
|
|
293
|
+
u_vec (numpy.ndarray): Unit vector in first direction
|
|
294
|
+
v_vec (numpy.ndarray): Unit vector in second direction
|
|
295
|
+
|
|
296
|
+
Returns:
|
|
297
|
+
numpy.ndarray: 3D array of shape (coord_dim, ny, nx) containing the coordinates
|
|
298
|
+
of each point in the mesh. coord_dim is the same as the
|
|
299
|
+
dimensionality of the input vectors.
|
|
300
|
+
|
|
301
|
+
Example:
|
|
302
|
+
>>> origin = np.array([0, 0])
|
|
303
|
+
>>> grid_size = (5, 4)
|
|
304
|
+
>>> mesh_size = (10, 10)
|
|
305
|
+
>>> u = np.array([1, 0])
|
|
306
|
+
>>> v = np.array([0, 1])
|
|
307
|
+
>>> coords = create_coordinate_mesh(origin, grid_size, mesh_size, u, v)
|
|
308
|
+
"""
|
|
309
|
+
# Create evenly spaced points along each axis
|
|
310
|
+
x = np.linspace(0, grid_size[0], grid_size[0])
|
|
311
|
+
y = np.linspace(0, grid_size[1], grid_size[1])
|
|
312
|
+
|
|
313
|
+
# Create 2D coordinate grids
|
|
314
|
+
xx, yy = np.meshgrid(x, y)
|
|
315
|
+
|
|
316
|
+
# Calculate coordinates of each cell by adding scaled vectors
|
|
317
|
+
cell_coords = origin[:, np.newaxis, np.newaxis] + \
|
|
318
|
+
xx[np.newaxis, :, :] * adjusted_meshsize[0] * u_vec[:, np.newaxis, np.newaxis] + \
|
|
319
|
+
yy[np.newaxis, :, :] * adjusted_meshsize[1] * v_vec[:, np.newaxis, np.newaxis]
|
|
320
|
+
|
|
321
|
+
return cell_coords
|
|
322
|
+
|
|
323
|
+
def create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec):
|
|
324
|
+
"""
|
|
325
|
+
Create a polygon representing a grid cell.
|
|
326
|
+
|
|
327
|
+
This function generates a rectangular polygon for a specific grid cell by calculating
|
|
328
|
+
its four corners based on the cell indices and grid parameters. The polygon is
|
|
329
|
+
created in counter-clockwise order starting from the bottom-left corner.
|
|
330
|
+
|
|
331
|
+
Args:
|
|
332
|
+
origin (numpy.ndarray): Origin point coordinates (shape: (2,) or (3,))
|
|
333
|
+
i (int): Row index of the cell
|
|
334
|
+
j (int): Column index of the cell
|
|
335
|
+
adjusted_meshsize (tuple): Adjusted mesh size in each dimension (dx, dy)
|
|
336
|
+
u_vec (numpy.ndarray): Unit vector in first direction
|
|
337
|
+
v_vec (numpy.ndarray): Unit vector in second direction
|
|
338
|
+
|
|
339
|
+
Returns:
|
|
340
|
+
shapely.geometry.Polygon: Polygon representing the grid cell, with vertices
|
|
341
|
+
ordered counter-clockwise from bottom-left
|
|
342
|
+
|
|
343
|
+
Example:
|
|
344
|
+
>>> origin = np.array([0, 0])
|
|
345
|
+
>>> i, j = 1, 2 # Cell at row 1, column 2
|
|
346
|
+
>>> mesh_size = (10, 10)
|
|
347
|
+
>>> u = np.array([1, 0])
|
|
348
|
+
>>> v = np.array([0, 1])
|
|
349
|
+
>>> cell_poly = create_cell_polygon(origin, i, j, mesh_size, u, v)
|
|
350
|
+
"""
|
|
351
|
+
# Calculate the four corners of the cell by adding scaled vectors
|
|
352
|
+
bottom_left = origin + i * adjusted_meshsize[0] * u_vec + j * adjusted_meshsize[1] * v_vec
|
|
353
|
+
bottom_right = origin + (i + 1) * adjusted_meshsize[0] * u_vec + j * adjusted_meshsize[1] * v_vec
|
|
354
|
+
top_right = origin + (i + 1) * adjusted_meshsize[0] * u_vec + (j + 1) * adjusted_meshsize[1] * v_vec
|
|
355
|
+
top_left = origin + i * adjusted_meshsize[0] * u_vec + (j + 1) * adjusted_meshsize[1] * v_vec
|
|
356
|
+
|
|
357
|
+
# Create polygon from corners in counter-clockwise order
|
|
358
|
+
return Polygon([bottom_left, bottom_right, top_right, top_left])
|
|
359
|
+
|
|
360
|
+
def tree_height_grid_from_land_cover(land_cover_grid_ori):
|
|
361
|
+
"""
|
|
362
|
+
Convert a land cover grid to a tree height grid.
|
|
363
|
+
|
|
364
|
+
This function transforms a land cover classification grid into a grid of tree heights
|
|
365
|
+
by mapping land cover classes to predefined tree heights. The function first flips
|
|
366
|
+
the input grid vertically and adjusts class values, then applies a translation
|
|
367
|
+
dictionary to convert classes to heights.
|
|
368
|
+
|
|
369
|
+
Land cover class to tree height mapping:
|
|
370
|
+
- Class 4 (Forest): 10m height
|
|
371
|
+
- All other classes: 0m height
|
|
372
|
+
|
|
373
|
+
Args:
|
|
374
|
+
land_cover_grid_ori (numpy.ndarray): Original land cover grid with class values
|
|
375
|
+
|
|
376
|
+
Returns:
|
|
377
|
+
numpy.ndarray: Grid of tree heights in meters, with same dimensions as input
|
|
378
|
+
|
|
379
|
+
Example:
|
|
380
|
+
>>> lc_grid = np.array([[1, 4, 2], [4, 3, 4], [2, 1, 3]])
|
|
381
|
+
>>> tree_heights = tree_height_grid_from_land_cover(lc_grid)
|
|
382
|
+
>>> # Result: array([[0, 10, 0], [10, 0, 10], [0, 0, 0]])
|
|
383
|
+
"""
|
|
384
|
+
# Flip array vertically and add 1 to all values
|
|
385
|
+
land_cover_grid = np.flipud(land_cover_grid_ori) + 1
|
|
386
|
+
|
|
387
|
+
# Define mapping from land cover classes to tree heights
|
|
388
|
+
tree_translation_dict = {
|
|
389
|
+
1: 0, # No trees
|
|
390
|
+
2: 0, # No trees
|
|
391
|
+
3: 0, # No trees
|
|
392
|
+
4: 10, # Forest - 10m height
|
|
393
|
+
5: 0, # No trees
|
|
394
|
+
6: 0, # No trees
|
|
395
|
+
7: 0, # No trees
|
|
396
|
+
8: 0, # No trees
|
|
397
|
+
9: 0, # No trees
|
|
398
|
+
10: 0 # No trees
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
# Convert land cover classes to tree heights and flip back
|
|
402
|
+
tree_height_grid = translate_array(np.flipud(land_cover_grid), tree_translation_dict).astype(int)
|
|
403
|
+
|
|
404
|
+
return tree_height_grid
|
|
405
|
+
|
|
406
|
+
def create_land_cover_grid_from_geotiff_polygon(tiff_path, mesh_size, land_cover_classes, polygon):
|
|
407
|
+
"""
|
|
408
|
+
Create a land cover grid from a GeoTIFF file within a polygon boundary.
|
|
409
|
+
|
|
410
|
+
Args:
|
|
411
|
+
tiff_path (str): Path to GeoTIFF file
|
|
412
|
+
mesh_size (float): Size of mesh cells
|
|
413
|
+
land_cover_classes (dict): Dictionary mapping land cover classes
|
|
414
|
+
polygon (list): List of polygon vertices
|
|
415
|
+
|
|
416
|
+
Returns:
|
|
417
|
+
numpy.ndarray: Grid of land cover classes within the polygon
|
|
418
|
+
"""
|
|
419
|
+
with rasterio.open(tiff_path) as src:
|
|
420
|
+
# Read RGB bands from GeoTIFF
|
|
421
|
+
img = src.read((1,2,3))
|
|
422
|
+
left, bottom, right, top = src.bounds
|
|
423
|
+
src_crs = src.crs
|
|
424
|
+
|
|
425
|
+
# Create a Shapely polygon from input coordinates
|
|
426
|
+
poly = Polygon(polygon)
|
|
427
|
+
|
|
428
|
+
# Get bounds of the polygon in WGS84 coordinates
|
|
429
|
+
left_wgs84, bottom_wgs84, right_wgs84, top_wgs84 = poly.bounds
|
|
430
|
+
# print(left, bottom, right, top)
|
|
431
|
+
|
|
432
|
+
# Calculate width and height using geodesic calculations for accuracy
|
|
433
|
+
geod = Geod(ellps="WGS84")
|
|
434
|
+
_, _, width = geod.inv(left_wgs84, bottom_wgs84, right_wgs84, bottom_wgs84)
|
|
435
|
+
_, _, height = geod.inv(left_wgs84, bottom_wgs84, left_wgs84, top_wgs84)
|
|
436
|
+
|
|
437
|
+
# Calculate number of grid cells based on mesh size
|
|
438
|
+
num_cells_x = int(width / mesh_size + 0.5)
|
|
439
|
+
num_cells_y = int(height / mesh_size + 0.5)
|
|
440
|
+
|
|
441
|
+
# Adjust mesh_size to fit the image exactly
|
|
442
|
+
adjusted_mesh_size_x = (right - left) / num_cells_x
|
|
443
|
+
adjusted_mesh_size_y = (top - bottom) / num_cells_y
|
|
444
|
+
|
|
445
|
+
# Create affine transform for mapping between pixel and world coordinates
|
|
446
|
+
new_affine = Affine(adjusted_mesh_size_x, 0, left, 0, -adjusted_mesh_size_y, top)
|
|
447
|
+
|
|
448
|
+
# Create coordinate grids for the new mesh
|
|
449
|
+
cols, rows = np.meshgrid(np.arange(num_cells_x), np.arange(num_cells_y))
|
|
450
|
+
xs, ys = new_affine * (cols, rows)
|
|
451
|
+
xs_flat, ys_flat = xs.flatten(), ys.flatten()
|
|
452
|
+
|
|
453
|
+
# Convert world coordinates to image pixel indices
|
|
454
|
+
row, col = src.index(xs_flat, ys_flat)
|
|
455
|
+
row, col = np.array(row), np.array(col)
|
|
456
|
+
|
|
457
|
+
# Filter out indices that fall outside the image bounds
|
|
458
|
+
valid = (row >= 0) & (row < src.height) & (col >= 0) & (col < src.width)
|
|
459
|
+
row, col = row[valid], col[valid]
|
|
460
|
+
|
|
461
|
+
# Initialize output grid with 'No Data' values
|
|
462
|
+
grid = np.full((num_cells_y, num_cells_x), 'No Data', dtype=object)
|
|
463
|
+
|
|
464
|
+
# Fill grid with dominant land cover classes
|
|
465
|
+
for i, (r, c) in enumerate(zip(row, col)):
|
|
466
|
+
cell_data = img[:, r, c]
|
|
467
|
+
dominant_class = get_dominant_class(cell_data, land_cover_classes)
|
|
468
|
+
grid_row, grid_col = np.unravel_index(i, (num_cells_y, num_cells_x))
|
|
469
|
+
grid[grid_row, grid_col] = dominant_class
|
|
470
|
+
|
|
471
|
+
# Flip grid vertically to match geographic orientation
|
|
472
|
+
return np.flipud(grid)
|
|
473
|
+
|
|
474
|
+
def create_land_cover_grid_from_gdf_polygon(gdf, meshsize, source, rectangle_vertices, default_class='Developed space'):
|
|
475
|
+
"""Create a grid of land cover classes from GeoDataFrame polygon data.
|
|
476
|
+
|
|
477
|
+
Args:
|
|
478
|
+
gdf (GeoDataFrame): GeoDataFrame containing land cover polygons
|
|
479
|
+
meshsize (float): Size of each grid cell in meters
|
|
480
|
+
source (str): Source of the land cover data to determine class priorities
|
|
481
|
+
rectangle_vertices (list): List of 4 (lon,lat) coordinate pairs defining the rectangle bounds
|
|
482
|
+
default_class (str, optional): Default land cover class for cells with no intersecting polygons.
|
|
483
|
+
Defaults to 'Developed space'.
|
|
484
|
+
|
|
485
|
+
Returns:
|
|
486
|
+
numpy.ndarray: 2D grid of land cover classes as strings
|
|
487
|
+
|
|
488
|
+
The function creates a regular grid over the given rectangle area and determines the dominant
|
|
489
|
+
land cover class for each cell based on polygon intersections. Classes are assigned based on
|
|
490
|
+
priority rules and majority area coverage.
|
|
491
|
+
"""
|
|
492
|
+
|
|
493
|
+
# Default priority mapping for land cover classes (lower number = higher priority)
|
|
494
|
+
class_priority = {
|
|
495
|
+
'Bareland': 4,
|
|
496
|
+
'Rangeland': 6,
|
|
497
|
+
'Developed space': 8,
|
|
498
|
+
'Road': 1, # Roads have highest priority
|
|
499
|
+
'Tree': 7,
|
|
500
|
+
'Water': 3,
|
|
501
|
+
'Agriculture land': 5,
|
|
502
|
+
'Building': 2 # Buildings have second highest priority
|
|
503
|
+
}
|
|
504
|
+
|
|
505
|
+
# Get source-specific priority mapping if available
|
|
506
|
+
class_priority = get_class_priority(source)
|
|
507
|
+
|
|
508
|
+
# Calculate grid dimensions and normalize direction vectors
|
|
509
|
+
geod = initialize_geod()
|
|
510
|
+
vertex_0, vertex_1, vertex_3 = rectangle_vertices[0], rectangle_vertices[1], rectangle_vertices[3]
|
|
511
|
+
|
|
512
|
+
# Calculate actual distances between vertices using geodesic calculations
|
|
513
|
+
dist_side_1 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_1[0], vertex_1[1])
|
|
514
|
+
dist_side_2 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_3[0], vertex_3[1])
|
|
515
|
+
|
|
516
|
+
# Create vectors representing the sides of the rectangle
|
|
517
|
+
side_1 = np.array(vertex_1) - np.array(vertex_0)
|
|
518
|
+
side_2 = np.array(vertex_3) - np.array(vertex_0)
|
|
519
|
+
|
|
520
|
+
# Normalize vectors to represent 1 meter in each direction
|
|
521
|
+
u_vec = normalize_to_one_meter(side_1, dist_side_1)
|
|
522
|
+
v_vec = normalize_to_one_meter(side_2, dist_side_2)
|
|
523
|
+
|
|
524
|
+
origin = np.array(rectangle_vertices[0])
|
|
525
|
+
grid_size, adjusted_meshsize = calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize)
|
|
526
|
+
|
|
527
|
+
print(f"Adjusted mesh size: {adjusted_meshsize}")
|
|
528
|
+
|
|
529
|
+
# Initialize grid with default land cover class
|
|
530
|
+
grid = np.full(grid_size, default_class, dtype=object)
|
|
531
|
+
|
|
532
|
+
# Calculate bounding box for spatial indexing
|
|
533
|
+
extent = [min(coord[1] for coord in rectangle_vertices), max(coord[1] for coord in rectangle_vertices),
|
|
534
|
+
min(coord[0] for coord in rectangle_vertices), max(coord[0] for coord in rectangle_vertices)]
|
|
535
|
+
plotting_box = box(extent[2], extent[0], extent[3], extent[1])
|
|
536
|
+
|
|
537
|
+
# Create spatial index for efficient polygon lookup
|
|
538
|
+
land_cover_polygons = []
|
|
539
|
+
idx = index.Index()
|
|
540
|
+
for i, row in gdf.iterrows():
|
|
541
|
+
polygon = row.geometry
|
|
542
|
+
land_cover_class = row['class']
|
|
543
|
+
land_cover_polygons.append((polygon, land_cover_class))
|
|
544
|
+
idx.insert(i, polygon.bounds)
|
|
545
|
+
|
|
546
|
+
# Iterate through each grid cell
|
|
547
|
+
for i in range(grid_size[0]):
|
|
548
|
+
for j in range(grid_size[1]):
|
|
549
|
+
land_cover_class = default_class
|
|
550
|
+
cell = create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec)
|
|
551
|
+
|
|
552
|
+
# Check intersections with polygons that could overlap this cell
|
|
553
|
+
for k in idx.intersection(cell.bounds):
|
|
554
|
+
polygon, land_cover_class_temp = land_cover_polygons[k]
|
|
555
|
+
try:
|
|
556
|
+
if cell.intersects(polygon):
|
|
557
|
+
intersection = cell.intersection(polygon)
|
|
558
|
+
# If polygon covers more than 50% of cell, consider its land cover class
|
|
559
|
+
if intersection.area > cell.area/2:
|
|
560
|
+
rank = class_priority[land_cover_class]
|
|
561
|
+
rank_temp = class_priority[land_cover_class_temp]
|
|
562
|
+
# Update cell class if new class has higher priority (lower rank)
|
|
563
|
+
if rank_temp < rank:
|
|
564
|
+
land_cover_class = land_cover_class_temp
|
|
565
|
+
grid[i, j] = land_cover_class
|
|
566
|
+
except GEOSException as e:
|
|
567
|
+
print(f"GEOS error at grid cell ({i}, {j}): {str(e)}")
|
|
568
|
+
# Attempt to fix invalid polygon geometry
|
|
569
|
+
try:
|
|
570
|
+
fixed_polygon = polygon.buffer(0)
|
|
571
|
+
if cell.intersects(fixed_polygon):
|
|
572
|
+
intersection = cell.intersection(fixed_polygon)
|
|
573
|
+
if intersection.area > cell.area/2:
|
|
574
|
+
rank = class_priority[land_cover_class]
|
|
575
|
+
rank_temp = class_priority[land_cover_class_temp]
|
|
576
|
+
if rank_temp < rank:
|
|
577
|
+
land_cover_class = land_cover_class_temp
|
|
578
|
+
grid[i, j] = land_cover_class
|
|
579
|
+
except Exception as fix_error:
|
|
580
|
+
print(f"Failed to fix polygon at grid cell ({i}, {j}): {str(fix_error)}")
|
|
581
|
+
continue
|
|
582
|
+
return grid
|
|
583
|
+
|
|
584
|
+
def create_height_grid_from_geotiff_polygon(tiff_path, mesh_size, polygon):
|
|
585
|
+
"""
|
|
586
|
+
Create a height grid from a GeoTIFF file within a polygon boundary.
|
|
587
|
+
|
|
588
|
+
Args:
|
|
589
|
+
tiff_path (str): Path to GeoTIFF file
|
|
590
|
+
mesh_size (float): Size of mesh cells
|
|
591
|
+
polygon (list): List of polygon vertices
|
|
592
|
+
|
|
593
|
+
Returns:
|
|
594
|
+
numpy.ndarray: Grid of heights within the polygon
|
|
595
|
+
"""
|
|
596
|
+
with rasterio.open(tiff_path) as src:
|
|
597
|
+
# Read height data
|
|
598
|
+
img = src.read(1)
|
|
599
|
+
left, bottom, right, top = src.bounds
|
|
600
|
+
src_crs = src.crs
|
|
601
|
+
|
|
602
|
+
# Create polygon from input coordinates
|
|
603
|
+
poly = Polygon(polygon)
|
|
604
|
+
|
|
605
|
+
# Get polygon bounds in WGS84
|
|
606
|
+
left_wgs84, bottom_wgs84, right_wgs84, top_wgs84 = poly.bounds
|
|
607
|
+
# print(left, bottom, right, top)
|
|
608
|
+
# print(left_wgs84, bottom_wgs84, right_wgs84, top_wgs84)
|
|
609
|
+
|
|
610
|
+
# Calculate actual distances using geodesic methods
|
|
611
|
+
geod = Geod(ellps="WGS84")
|
|
612
|
+
_, _, width = geod.inv(left_wgs84, bottom_wgs84, right_wgs84, bottom_wgs84)
|
|
613
|
+
_, _, height = geod.inv(left_wgs84, bottom_wgs84, left_wgs84, top_wgs84)
|
|
614
|
+
|
|
615
|
+
# Calculate grid dimensions and adjust mesh size
|
|
616
|
+
num_cells_x = int(width / mesh_size + 0.5)
|
|
617
|
+
num_cells_y = int(height / mesh_size + 0.5)
|
|
618
|
+
|
|
619
|
+
adjusted_mesh_size_x = (right - left) / num_cells_x
|
|
620
|
+
adjusted_mesh_size_y = (top - bottom) / num_cells_y
|
|
621
|
+
|
|
622
|
+
# Create affine transform for coordinate mapping
|
|
623
|
+
new_affine = Affine(adjusted_mesh_size_x, 0, left, 0, -adjusted_mesh_size_y, top)
|
|
624
|
+
|
|
625
|
+
# Generate coordinate grids
|
|
626
|
+
cols, rows = np.meshgrid(np.arange(num_cells_x), np.arange(num_cells_y))
|
|
627
|
+
xs, ys = new_affine * (cols, rows)
|
|
628
|
+
xs_flat, ys_flat = xs.flatten(), ys.flatten()
|
|
629
|
+
|
|
630
|
+
# Convert to image coordinates
|
|
631
|
+
row, col = src.index(xs_flat, ys_flat)
|
|
632
|
+
row, col = np.array(row), np.array(col)
|
|
633
|
+
|
|
634
|
+
# Filter valid indices
|
|
635
|
+
valid = (row >= 0) & (row < src.height) & (col >= 0) & (col < src.width)
|
|
636
|
+
row, col = row[valid], col[valid]
|
|
637
|
+
|
|
638
|
+
# Create output grid and fill with height values
|
|
639
|
+
grid = np.full((num_cells_y, num_cells_x), np.nan)
|
|
640
|
+
flat_indices = np.ravel_multi_index((row, col), img.shape)
|
|
641
|
+
np.put(grid, np.ravel_multi_index((rows.flatten()[valid], cols.flatten()[valid]), grid.shape), img.flat[flat_indices])
|
|
642
|
+
|
|
643
|
+
return np.flipud(grid)
|
|
644
|
+
|
|
645
|
+
def create_building_height_grid_from_gdf_polygon(
|
|
646
|
+
gdf,
|
|
647
|
+
meshsize,
|
|
648
|
+
rectangle_vertices,
|
|
649
|
+
overlapping_footprint=False,
|
|
650
|
+
gdf_comp=None,
|
|
651
|
+
geotiff_path_comp=None,
|
|
652
|
+
complement_building_footprints=None,
|
|
653
|
+
complement_height=None
|
|
654
|
+
):
|
|
655
|
+
"""
|
|
656
|
+
Create a building height grid from GeoDataFrame data within a polygon boundary.
|
|
657
|
+
|
|
658
|
+
Args:
|
|
659
|
+
gdf (geopandas.GeoDataFrame): GeoDataFrame containing building information
|
|
660
|
+
meshsize (float): Size of mesh cells
|
|
661
|
+
rectangle_vertices (list): List of rectangle vertices defining the boundary
|
|
662
|
+
overlapping_footprint (bool): If True, use precise geometry-based processing for overlaps.
|
|
663
|
+
If False, use faster rasterio-based approach.
|
|
664
|
+
gdf_comp (geopandas.GeoDataFrame, optional): Complementary GeoDataFrame
|
|
665
|
+
geotiff_path_comp (str, optional): Path to complementary GeoTIFF file
|
|
666
|
+
complement_building_footprints (bool, optional): Whether to complement footprints
|
|
667
|
+
complement_height (float, optional): Height value to use for buildings with height=0
|
|
668
|
+
|
|
669
|
+
Returns:
|
|
670
|
+
tuple: (building_height_grid, building_min_height_grid, building_id_grid, filtered_buildings)
|
|
671
|
+
- building_height_grid (numpy.ndarray): Grid of building heights
|
|
672
|
+
- building_min_height_grid (numpy.ndarray): Grid of min building heights (list per cell)
|
|
673
|
+
- building_id_grid (numpy.ndarray): Grid of building IDs
|
|
674
|
+
- filtered_buildings (geopandas.GeoDataFrame): The buildings used (filtered_gdf)
|
|
675
|
+
"""
|
|
676
|
+
# --------------------------------------------------------------------------
|
|
677
|
+
# 1) COMMON INITIAL SETUP AND DATA FILTERING
|
|
678
|
+
# --------------------------------------------------------------------------
|
|
679
|
+
geod = initialize_geod()
|
|
680
|
+
vertex_0, vertex_1, vertex_3 = rectangle_vertices[0], rectangle_vertices[1], rectangle_vertices[3]
|
|
681
|
+
|
|
682
|
+
# Distances for each side
|
|
683
|
+
dist_side_1 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_1[0], vertex_1[1])
|
|
684
|
+
dist_side_2 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_3[0], vertex_3[1])
|
|
685
|
+
|
|
686
|
+
# Normalized vectors
|
|
687
|
+
side_1 = np.array(vertex_1) - np.array(vertex_0)
|
|
688
|
+
side_2 = np.array(vertex_3) - np.array(vertex_0)
|
|
689
|
+
u_vec = normalize_to_one_meter(side_1, dist_side_1)
|
|
690
|
+
v_vec = normalize_to_one_meter(side_2, dist_side_2)
|
|
691
|
+
|
|
692
|
+
# Grid parameters
|
|
693
|
+
origin = np.array(rectangle_vertices[0])
|
|
694
|
+
grid_size, adjusted_meshsize = calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize)
|
|
695
|
+
|
|
696
|
+
# Filter the input GeoDataFrame by bounding box
|
|
697
|
+
extent = [
|
|
698
|
+
min(coord[1] for coord in rectangle_vertices),
|
|
699
|
+
max(coord[1] for coord in rectangle_vertices),
|
|
700
|
+
min(coord[0] for coord in rectangle_vertices),
|
|
701
|
+
max(coord[0] for coord in rectangle_vertices)
|
|
702
|
+
]
|
|
703
|
+
plotting_box = box(extent[2], extent[0], extent[3], extent[1])
|
|
704
|
+
filtered_gdf = gdf[gdf.geometry.intersects(plotting_box)].copy()
|
|
705
|
+
|
|
706
|
+
# Count buildings with height=0 or NaN
|
|
707
|
+
zero_height_count = len(filtered_gdf[filtered_gdf['height'] == 0])
|
|
708
|
+
nan_height_count = len(filtered_gdf[filtered_gdf['height'].isna()])
|
|
709
|
+
print(f"{zero_height_count+nan_height_count} of the total {len(filtered_gdf)} building footprint from the base data source did not have height data.")
|
|
710
|
+
|
|
711
|
+
# Optionally merge heights from complementary sources
|
|
712
|
+
if gdf_comp is not None:
|
|
713
|
+
filtered_gdf_comp = gdf_comp[gdf_comp.geometry.intersects(plotting_box)].copy()
|
|
714
|
+
if complement_building_footprints:
|
|
715
|
+
filtered_gdf = complement_building_heights_from_gdf(filtered_gdf, filtered_gdf_comp)
|
|
716
|
+
else:
|
|
717
|
+
filtered_gdf = extract_building_heights_from_gdf(filtered_gdf, filtered_gdf_comp)
|
|
718
|
+
elif geotiff_path_comp:
|
|
719
|
+
filtered_gdf = extract_building_heights_from_geotiff(geotiff_path_comp, filtered_gdf)
|
|
720
|
+
|
|
721
|
+
# After filtering and complementing heights, process overlapping buildings
|
|
722
|
+
filtered_gdf = process_building_footprints_by_overlap(filtered_gdf, overlap_threshold=0.5)
|
|
723
|
+
|
|
724
|
+
# --------------------------------------------------------------------------
|
|
725
|
+
# 2) BRANCH BASED ON OVERLAPPING_FOOTPRINT PARAMETER
|
|
726
|
+
# --------------------------------------------------------------------------
|
|
727
|
+
|
|
728
|
+
if overlapping_footprint:
|
|
729
|
+
# Use precise geometry-based approach for better overlap handling
|
|
730
|
+
return _process_with_geometry_intersection(
|
|
731
|
+
filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, complement_height
|
|
732
|
+
)
|
|
733
|
+
else:
|
|
734
|
+
# Use faster rasterio-based approach
|
|
735
|
+
return _process_with_rasterio(
|
|
736
|
+
filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec,
|
|
737
|
+
rectangle_vertices, complement_height
|
|
738
|
+
)
|
|
739
|
+
|
|
740
|
+
|
|
741
|
+
def _process_with_geometry_intersection(filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, complement_height):
|
|
742
|
+
"""
|
|
743
|
+
Process buildings using precise geometry intersection approach.
|
|
744
|
+
Better for handling overlapping footprints but slower.
|
|
745
|
+
"""
|
|
746
|
+
# Initialize output grids
|
|
747
|
+
building_height_grid = np.zeros(grid_size)
|
|
748
|
+
building_id_grid = np.zeros(grid_size)
|
|
749
|
+
|
|
750
|
+
# Use a Python list-of-lists or object array for min_height tracking
|
|
751
|
+
building_min_height_grid = np.empty(grid_size, dtype=object)
|
|
752
|
+
for i in range(grid_size[0]):
|
|
753
|
+
for j in range(grid_size[1]):
|
|
754
|
+
building_min_height_grid[i, j] = []
|
|
755
|
+
|
|
756
|
+
# --------------------------------------------------------------------------
|
|
757
|
+
# PREPARE BUILDING POLYGONS & SPATIAL INDEX
|
|
758
|
+
# --------------------------------------------------------------------------
|
|
759
|
+
building_polygons = []
|
|
760
|
+
for idx_b, row in filtered_gdf.iterrows():
|
|
761
|
+
polygon = row.geometry
|
|
762
|
+
height = row.get('height', None)
|
|
763
|
+
|
|
764
|
+
# Replace height=0 with complement_height if specified
|
|
765
|
+
if complement_height is not None and (height == 0 or height is None):
|
|
766
|
+
height = complement_height
|
|
767
|
+
|
|
768
|
+
min_height = row.get('min_height', 0)
|
|
769
|
+
if pd.isna(min_height):
|
|
770
|
+
min_height = 0
|
|
771
|
+
|
|
772
|
+
is_inner = row.get('is_inner', False)
|
|
773
|
+
feature_id = row.get('id', idx_b)
|
|
774
|
+
|
|
775
|
+
# Fix invalid geometry
|
|
776
|
+
if not polygon.is_valid:
|
|
777
|
+
try:
|
|
778
|
+
polygon = polygon.buffer(0)
|
|
779
|
+
if not polygon.is_valid:
|
|
780
|
+
polygon = polygon.simplify(1e-8)
|
|
781
|
+
except Exception as e:
|
|
782
|
+
pass
|
|
783
|
+
|
|
784
|
+
bounding_box = polygon.bounds # (minx, miny, maxx, maxy)
|
|
785
|
+
building_polygons.append((
|
|
786
|
+
polygon, bounding_box, height, min_height, is_inner, feature_id
|
|
787
|
+
))
|
|
788
|
+
|
|
789
|
+
# Build R-tree index using bounding boxes
|
|
790
|
+
idx = index.Index()
|
|
791
|
+
for i_b, (poly, bbox, _, _, _, _) in enumerate(building_polygons):
|
|
792
|
+
idx.insert(i_b, bbox)
|
|
793
|
+
|
|
794
|
+
# --------------------------------------------------------------------------
|
|
795
|
+
# MAIN GRID LOOP WITH PRECISE INTERSECTION
|
|
796
|
+
# --------------------------------------------------------------------------
|
|
797
|
+
INTERSECTION_THRESHOLD = 0.3
|
|
798
|
+
|
|
799
|
+
for i in range(grid_size[0]):
|
|
800
|
+
for j in range(grid_size[1]):
|
|
801
|
+
# Create the cell polygon once
|
|
802
|
+
cell = create_cell_polygon(origin, i, j, adjusted_meshsize, u_vec, v_vec)
|
|
803
|
+
if not cell.is_valid:
|
|
804
|
+
cell = cell.buffer(0)
|
|
805
|
+
cell_area = cell.area
|
|
806
|
+
|
|
807
|
+
# Find possible intersections from the index
|
|
808
|
+
potential = list(idx.intersection(cell.bounds))
|
|
809
|
+
if not potential:
|
|
810
|
+
continue
|
|
811
|
+
|
|
812
|
+
# Sort buildings by height descending
|
|
813
|
+
cell_buildings = []
|
|
814
|
+
for k in potential:
|
|
815
|
+
bpoly, bbox, height, minh, inr, fid = building_polygons[k]
|
|
816
|
+
sort_val = height if (height is not None) else -float('inf')
|
|
817
|
+
cell_buildings.append((k, bpoly, bbox, height, minh, inr, fid, sort_val))
|
|
818
|
+
cell_buildings.sort(key=lambda x: x[-1], reverse=True)
|
|
819
|
+
|
|
820
|
+
found_intersection = False
|
|
821
|
+
all_zero_or_nan = True
|
|
822
|
+
|
|
823
|
+
for (k, polygon, bbox, height, min_height, is_inner, feature_id, _) in cell_buildings:
|
|
824
|
+
try:
|
|
825
|
+
# Quick bounding-box check
|
|
826
|
+
minx_p, miny_p, maxx_p, maxy_p = bbox
|
|
827
|
+
minx_c, miny_c, maxx_c, maxy_c = cell.bounds
|
|
828
|
+
|
|
829
|
+
# Overlap bounding box
|
|
830
|
+
overlap_minx = max(minx_p, minx_c)
|
|
831
|
+
overlap_miny = max(miny_p, miny_c)
|
|
832
|
+
overlap_maxx = min(maxx_p, maxx_c)
|
|
833
|
+
overlap_maxy = min(maxy_p, maxy_c)
|
|
834
|
+
|
|
835
|
+
if (overlap_maxx <= overlap_minx) or (overlap_maxy <= overlap_miny):
|
|
836
|
+
continue
|
|
837
|
+
|
|
838
|
+
# Area of bounding-box intersection
|
|
839
|
+
bbox_intersect_area = (overlap_maxx - overlap_minx) * (overlap_maxy - overlap_miny)
|
|
840
|
+
if bbox_intersect_area < INTERSECTION_THRESHOLD * cell_area:
|
|
841
|
+
continue
|
|
842
|
+
|
|
843
|
+
# Ensure valid geometry
|
|
844
|
+
if not polygon.is_valid:
|
|
845
|
+
polygon = polygon.buffer(0)
|
|
846
|
+
|
|
847
|
+
if cell.intersects(polygon):
|
|
848
|
+
intersection = cell.intersection(polygon)
|
|
849
|
+
inter_area = intersection.area
|
|
850
|
+
|
|
851
|
+
# If the fraction of cell covered > threshold
|
|
852
|
+
if (inter_area / cell_area) > INTERSECTION_THRESHOLD:
|
|
853
|
+
found_intersection = True
|
|
854
|
+
|
|
855
|
+
# If not an inner courtyard
|
|
856
|
+
if not is_inner:
|
|
857
|
+
building_min_height_grid[i, j].append([min_height, height])
|
|
858
|
+
building_id_grid[i, j] = feature_id
|
|
859
|
+
|
|
860
|
+
# Update building height if valid
|
|
861
|
+
if (height is not None and not np.isnan(height) and height > 0):
|
|
862
|
+
all_zero_or_nan = False
|
|
863
|
+
current_height = building_height_grid[i, j]
|
|
864
|
+
|
|
865
|
+
# Replace if we had 0, nan, or smaller height
|
|
866
|
+
if (current_height == 0 or np.isnan(current_height) or current_height < height):
|
|
867
|
+
building_height_grid[i, j] = height
|
|
868
|
+
else:
|
|
869
|
+
# Inner courtyards => override with 0
|
|
870
|
+
building_min_height_grid[i, j] = [[0, 0]]
|
|
871
|
+
building_height_grid[i, j] = 0
|
|
872
|
+
found_intersection = True
|
|
873
|
+
all_zero_or_nan = False
|
|
874
|
+
break
|
|
875
|
+
|
|
876
|
+
except (GEOSException, ValueError) as e:
|
|
877
|
+
# Attempt fallback fix
|
|
878
|
+
try:
|
|
879
|
+
simplified_polygon = polygon.simplify(1e-8)
|
|
880
|
+
if simplified_polygon.is_valid:
|
|
881
|
+
intersection = cell.intersection(simplified_polygon)
|
|
882
|
+
inter_area = intersection.area
|
|
883
|
+
if (inter_area / cell_area) > INTERSECTION_THRESHOLD:
|
|
884
|
+
found_intersection = True
|
|
885
|
+
if not is_inner:
|
|
886
|
+
building_min_height_grid[i, j].append([min_height, height])
|
|
887
|
+
building_id_grid[i, j] = feature_id
|
|
888
|
+
if (height is not None and not np.isnan(height) and height > 0):
|
|
889
|
+
all_zero_or_nan = False
|
|
890
|
+
if (building_height_grid[i, j] == 0 or
|
|
891
|
+
np.isnan(building_height_grid[i, j]) or
|
|
892
|
+
building_height_grid[i, j] < height):
|
|
893
|
+
building_height_grid[i, j] = height
|
|
894
|
+
else:
|
|
895
|
+
building_min_height_grid[i, j] = [[0, 0]]
|
|
896
|
+
building_height_grid[i, j] = 0
|
|
897
|
+
found_intersection = True
|
|
898
|
+
all_zero_or_nan = False
|
|
899
|
+
break
|
|
900
|
+
except Exception as fix_error:
|
|
901
|
+
print(f"Failed to process cell ({i}, {j}) - Building {k}: {str(fix_error)}")
|
|
902
|
+
continue
|
|
903
|
+
|
|
904
|
+
# If we found intersecting buildings but all were zero/NaN, mark as NaN
|
|
905
|
+
if found_intersection and all_zero_or_nan:
|
|
906
|
+
building_height_grid[i, j] = np.nan
|
|
907
|
+
|
|
908
|
+
return building_height_grid, building_min_height_grid, building_id_grid, filtered_gdf
|
|
909
|
+
|
|
910
|
+
|
|
911
|
+
def _process_with_rasterio(filtered_gdf, grid_size, adjusted_meshsize, origin, u_vec, v_vec, rectangle_vertices, complement_height):
|
|
912
|
+
"""
|
|
913
|
+
Process buildings using fast rasterio-based approach.
|
|
914
|
+
Faster but less precise for overlapping footprints.
|
|
915
|
+
"""
|
|
916
|
+
# Set up transform for rasterio using rotated basis defined by u_vec and v_vec
|
|
917
|
+
# Step vectors in coordinate units (degrees) per cell
|
|
918
|
+
u_step = adjusted_meshsize[0] * u_vec
|
|
919
|
+
v_step = adjusted_meshsize[1] * v_vec
|
|
920
|
+
|
|
921
|
+
# Define the top-left corner so that row=0 is the northern edge
|
|
922
|
+
top_left = origin + grid_size[1] * v_step
|
|
923
|
+
|
|
924
|
+
# Affine transform mapping (col, row) -> (x, y)
|
|
925
|
+
# x = a*col + b*row + c ; y = d*col + e*row + f
|
|
926
|
+
# col increases along u_step; row increases southward, hence -v_step
|
|
927
|
+
transform = Affine(u_step[0], -v_step[0], top_left[0],
|
|
928
|
+
u_step[1], -v_step[1], top_left[1])
|
|
929
|
+
|
|
930
|
+
# Process buildings data
|
|
931
|
+
filtered_gdf = filtered_gdf.copy()
|
|
932
|
+
if complement_height is not None:
|
|
933
|
+
mask = (filtered_gdf['height'] == 0) | (filtered_gdf['height'].isna())
|
|
934
|
+
filtered_gdf.loc[mask, 'height'] = complement_height
|
|
935
|
+
|
|
936
|
+
# Add missing columns with defaults
|
|
937
|
+
filtered_gdf['min_height'] = 0
|
|
938
|
+
|
|
939
|
+
if 'is_inner' not in filtered_gdf.columns:
|
|
940
|
+
filtered_gdf['is_inner'] = False
|
|
941
|
+
else:
|
|
942
|
+
# Ensure boolean dtype with NaN treated as False for safe boolean operations
|
|
943
|
+
try:
|
|
944
|
+
filtered_gdf['is_inner'] = filtered_gdf['is_inner'].fillna(False).astype(bool)
|
|
945
|
+
except Exception:
|
|
946
|
+
filtered_gdf['is_inner'] = False
|
|
947
|
+
|
|
948
|
+
if 'id' not in filtered_gdf.columns:
|
|
949
|
+
filtered_gdf['id'] = range(len(filtered_gdf))
|
|
950
|
+
|
|
951
|
+
# Sort by height for proper layering
|
|
952
|
+
regular_buildings = filtered_gdf[~filtered_gdf['is_inner']].copy()
|
|
953
|
+
regular_buildings = regular_buildings.sort_values('height', ascending=True, na_position='first')
|
|
954
|
+
|
|
955
|
+
# Temporary raster grids in rasterio's (rows=ny, cols=nx) order
|
|
956
|
+
height_raster = np.zeros((grid_size[1], grid_size[0]), dtype=np.float64)
|
|
957
|
+
id_raster = np.zeros((grid_size[1], grid_size[0]), dtype=np.float64)
|
|
958
|
+
|
|
959
|
+
# Vectorized rasterization
|
|
960
|
+
if len(regular_buildings) > 0:
|
|
961
|
+
valid_buildings = regular_buildings[regular_buildings.geometry.is_valid].copy()
|
|
962
|
+
|
|
963
|
+
if len(valid_buildings) > 0:
|
|
964
|
+
# Height grid
|
|
965
|
+
height_shapes = [(mapping(geom), height) for geom, height in
|
|
966
|
+
zip(valid_buildings.geometry, valid_buildings['height'])
|
|
967
|
+
if pd.notna(height) and height > 0]
|
|
968
|
+
|
|
969
|
+
if height_shapes:
|
|
970
|
+
height_raster = features.rasterize(
|
|
971
|
+
height_shapes,
|
|
972
|
+
out_shape=(grid_size[1], grid_size[0]),
|
|
973
|
+
transform=transform,
|
|
974
|
+
fill=0,
|
|
975
|
+
dtype=np.float64
|
|
976
|
+
)
|
|
977
|
+
|
|
978
|
+
# ID grid
|
|
979
|
+
id_shapes = [(mapping(geom), id_val) for geom, id_val in
|
|
980
|
+
zip(valid_buildings.geometry, valid_buildings['id'])]
|
|
981
|
+
|
|
982
|
+
if id_shapes:
|
|
983
|
+
id_raster = features.rasterize(
|
|
984
|
+
id_shapes,
|
|
985
|
+
out_shape=(grid_size[1], grid_size[0]),
|
|
986
|
+
transform=transform,
|
|
987
|
+
fill=0,
|
|
988
|
+
dtype=np.float64
|
|
989
|
+
)
|
|
990
|
+
|
|
991
|
+
# Handle inner courtyards
|
|
992
|
+
inner_buildings = filtered_gdf[filtered_gdf['is_inner']].copy()
|
|
993
|
+
if len(inner_buildings) > 0:
|
|
994
|
+
inner_shapes = [(mapping(geom), 1) for geom in inner_buildings.geometry if geom.is_valid]
|
|
995
|
+
if inner_shapes:
|
|
996
|
+
inner_mask = features.rasterize(
|
|
997
|
+
inner_shapes,
|
|
998
|
+
out_shape=(grid_size[1], grid_size[0]),
|
|
999
|
+
transform=transform,
|
|
1000
|
+
fill=0,
|
|
1001
|
+
dtype=np.uint8
|
|
1002
|
+
)
|
|
1003
|
+
height_raster[inner_mask > 0] = 0
|
|
1004
|
+
id_raster[inner_mask > 0] = 0
|
|
1005
|
+
|
|
1006
|
+
# Simplified min_height grid
|
|
1007
|
+
building_min_height_grid = np.empty(grid_size, dtype=object)
|
|
1008
|
+
min_heights_raster = np.zeros((grid_size[1], grid_size[0]), dtype=np.float64)
|
|
1009
|
+
|
|
1010
|
+
if len(regular_buildings) > 0:
|
|
1011
|
+
valid_buildings = regular_buildings[regular_buildings.geometry.is_valid].copy()
|
|
1012
|
+
if len(valid_buildings) > 0:
|
|
1013
|
+
min_height_shapes = [(mapping(geom), min_h) for geom, min_h in
|
|
1014
|
+
zip(valid_buildings.geometry, valid_buildings['min_height'])
|
|
1015
|
+
if pd.notna(min_h)]
|
|
1016
|
+
|
|
1017
|
+
if min_height_shapes:
|
|
1018
|
+
min_heights_raster = features.rasterize(
|
|
1019
|
+
min_height_shapes,
|
|
1020
|
+
out_shape=(grid_size[1], grid_size[0]),
|
|
1021
|
+
transform=transform,
|
|
1022
|
+
fill=0,
|
|
1023
|
+
dtype=np.float64
|
|
1024
|
+
)
|
|
1025
|
+
|
|
1026
|
+
# Convert to list format (simplified)
|
|
1027
|
+
# Convert raster (ny, nx) to internal orientation (nx, ny) with north-up
|
|
1028
|
+
building_height_grid = np.flipud(height_raster).T
|
|
1029
|
+
building_id_grid = np.flipud(id_raster).T
|
|
1030
|
+
min_heights = np.flipud(min_heights_raster).T
|
|
1031
|
+
|
|
1032
|
+
for i in range(grid_size[0]):
|
|
1033
|
+
for j in range(grid_size[1]):
|
|
1034
|
+
if building_height_grid[i, j] > 0:
|
|
1035
|
+
building_min_height_grid[i, j] = [[min_heights[i, j], building_height_grid[i, j]]]
|
|
1036
|
+
else:
|
|
1037
|
+
building_min_height_grid[i, j] = []
|
|
1038
|
+
|
|
1039
|
+
return building_height_grid, building_min_height_grid, building_id_grid, filtered_gdf
|
|
1040
|
+
|
|
1041
|
+
def create_building_height_grid_from_open_building_temporal_polygon(meshsize, rectangle_vertices, output_dir):
|
|
1042
|
+
"""
|
|
1043
|
+
Create a building height grid from OpenBuildings temporal data within a polygon.
|
|
1044
|
+
|
|
1045
|
+
Args:
|
|
1046
|
+
meshsize (float): Size of mesh cells
|
|
1047
|
+
rectangle_vertices (list): List of rectangle vertices defining the boundary
|
|
1048
|
+
output_dir (str): Directory to save intermediate GeoTIFF files
|
|
1049
|
+
|
|
1050
|
+
Returns:
|
|
1051
|
+
tuple: (building_height_grid, building_min_height_grid, building_id_grid, filtered_buildings)
|
|
1052
|
+
"""
|
|
1053
|
+
# Get region of interest from vertices
|
|
1054
|
+
roi = get_roi(rectangle_vertices)
|
|
1055
|
+
|
|
1056
|
+
# Create output directory and save intermediate GeoTIFF
|
|
1057
|
+
os.makedirs(output_dir, exist_ok=True)
|
|
1058
|
+
geotiff_path = os.path.join(output_dir, "building_height.tif")
|
|
1059
|
+
save_geotiff_open_buildings_temporal(roi, geotiff_path)
|
|
1060
|
+
|
|
1061
|
+
# Create height grid from GeoTIFF
|
|
1062
|
+
building_height_grid = create_height_grid_from_geotiff_polygon(geotiff_path, meshsize, rectangle_vertices)
|
|
1063
|
+
|
|
1064
|
+
# Initialize min height grid with appropriate height ranges
|
|
1065
|
+
building_min_height_grid = np.empty(building_height_grid.shape, dtype=object)
|
|
1066
|
+
for i in range(building_height_grid.shape[0]):
|
|
1067
|
+
for j in range(building_height_grid.shape[1]):
|
|
1068
|
+
if building_height_grid[i, j] <= 0:
|
|
1069
|
+
building_min_height_grid[i, j] = []
|
|
1070
|
+
else:
|
|
1071
|
+
building_min_height_grid[i, j] = [[0, building_height_grid[i, j]]]
|
|
1072
|
+
|
|
1073
|
+
# Create building ID grid with sequential numbering for non-zero heights
|
|
1074
|
+
filtered_buildings = gpd.GeoDataFrame()
|
|
1075
|
+
building_id_grid = np.zeros_like(building_height_grid, dtype=int)
|
|
1076
|
+
non_zero_positions = np.nonzero(building_height_grid)
|
|
1077
|
+
sequence = np.arange(1, len(non_zero_positions[0]) + 1)
|
|
1078
|
+
building_id_grid[non_zero_positions] = sequence
|
|
1079
|
+
|
|
1080
|
+
return building_height_grid, building_min_height_grid, building_id_grid, filtered_buildings
|
|
1081
|
+
|
|
1082
|
+
def create_dem_grid_from_geotiff_polygon(tiff_path, mesh_size, rectangle_vertices, dem_interpolation=False):
|
|
1083
|
+
"""
|
|
1084
|
+
Create a Digital Elevation Model (DEM) grid from a GeoTIFF file within a polygon boundary.
|
|
1085
|
+
|
|
1086
|
+
Args:
|
|
1087
|
+
tiff_path (str): Path to GeoTIFF file
|
|
1088
|
+
mesh_size (float): Size of mesh cells
|
|
1089
|
+
rectangle_vertices (list): List of rectangle vertices defining the boundary
|
|
1090
|
+
dem_interpolation (bool): Whether to use cubic interpolation for smoother results
|
|
1091
|
+
|
|
1092
|
+
Returns:
|
|
1093
|
+
numpy.ndarray: Grid of elevation values
|
|
1094
|
+
"""
|
|
1095
|
+
# Convert vertex coordinates to lat/lon format
|
|
1096
|
+
converted_coords = convert_format_lat_lon(rectangle_vertices)
|
|
1097
|
+
roi_shapely = Polygon(converted_coords)
|
|
1098
|
+
|
|
1099
|
+
with rasterio.open(tiff_path) as src:
|
|
1100
|
+
# Read DEM data and handle no-data values
|
|
1101
|
+
dem = src.read(1)
|
|
1102
|
+
dem = np.where(dem < -1000, 0, dem) # Replace extreme negative values with 0
|
|
1103
|
+
transform = src.transform
|
|
1104
|
+
src_crs = src.crs
|
|
1105
|
+
|
|
1106
|
+
# Handle coordinate system conversion
|
|
1107
|
+
if src_crs.to_epsg() != 3857:
|
|
1108
|
+
transformer_to_3857 = Transformer.from_crs(src_crs, CRS.from_epsg(3857), always_xy=True)
|
|
1109
|
+
else:
|
|
1110
|
+
transformer_to_3857 = lambda x, y: (x, y)
|
|
1111
|
+
|
|
1112
|
+
# Transform ROI bounds to EPSG:3857 (Web Mercator)
|
|
1113
|
+
roi_bounds = roi_shapely.bounds
|
|
1114
|
+
roi_left, roi_bottom = transformer_to_3857.transform(roi_bounds[0], roi_bounds[1])
|
|
1115
|
+
roi_right, roi_top = transformer_to_3857.transform(roi_bounds[2], roi_bounds[3])
|
|
1116
|
+
|
|
1117
|
+
# Convert to WGS84 for accurate distance calculations
|
|
1118
|
+
wgs84 = CRS.from_epsg(4326)
|
|
1119
|
+
transformer_to_wgs84 = Transformer.from_crs(CRS.from_epsg(3857), wgs84, always_xy=True)
|
|
1120
|
+
roi_left_wgs84, roi_bottom_wgs84 = transformer_to_wgs84.transform(roi_left, roi_bottom)
|
|
1121
|
+
roi_right_wgs84, roi_top_wgs84 = transformer_to_wgs84.transform(roi_right, roi_top)
|
|
1122
|
+
|
|
1123
|
+
# Calculate actual distances using geodesic methods
|
|
1124
|
+
geod = Geod(ellps="WGS84")
|
|
1125
|
+
_, _, roi_width_m = geod.inv(roi_left_wgs84, roi_bottom_wgs84, roi_right_wgs84, roi_bottom_wgs84)
|
|
1126
|
+
_, _, roi_height_m = geod.inv(roi_left_wgs84, roi_bottom_wgs84, roi_left_wgs84, roi_top_wgs84)
|
|
1127
|
+
|
|
1128
|
+
# Calculate grid dimensions
|
|
1129
|
+
num_cells_x = int(roi_width_m / mesh_size + 0.5)
|
|
1130
|
+
num_cells_y = int(roi_height_m / mesh_size + 0.5)
|
|
1131
|
+
|
|
1132
|
+
# Create coordinate grid in EPSG:3857
|
|
1133
|
+
x = np.linspace(roi_left, roi_right, num_cells_x, endpoint=False)
|
|
1134
|
+
y = np.linspace(roi_top, roi_bottom, num_cells_y, endpoint=False)
|
|
1135
|
+
xx, yy = np.meshgrid(x, y)
|
|
1136
|
+
|
|
1137
|
+
# Transform original DEM coordinates to EPSG:3857
|
|
1138
|
+
rows, cols = np.meshgrid(range(dem.shape[0]), range(dem.shape[1]), indexing='ij')
|
|
1139
|
+
orig_x, orig_y = rasterio.transform.xy(transform, rows.ravel(), cols.ravel())
|
|
1140
|
+
orig_x, orig_y = transformer_to_3857.transform(orig_x, orig_y)
|
|
1141
|
+
|
|
1142
|
+
# Interpolate DEM values onto new grid
|
|
1143
|
+
points = np.column_stack((orig_x, orig_y))
|
|
1144
|
+
values = dem.ravel()
|
|
1145
|
+
if dem_interpolation:
|
|
1146
|
+
# Use cubic interpolation for smoother results
|
|
1147
|
+
grid = griddata(points, values, (xx, yy), method='cubic')
|
|
1148
|
+
else:
|
|
1149
|
+
# Use nearest neighbor interpolation for raw data
|
|
1150
|
+
grid = griddata(points, values, (xx, yy), method='nearest')
|
|
1151
|
+
|
|
1152
|
+
return np.flipud(grid)
|
|
1153
|
+
|
|
1154
|
+
def grid_to_geodataframe(grid_ori, rectangle_vertices, meshsize):
|
|
1155
|
+
"""
|
|
1156
|
+
Converts a 2D grid to a GeoDataFrame with cell polygons and values.
|
|
1157
|
+
|
|
1158
|
+
This function transforms a regular grid into a GeoDataFrame where each cell is
|
|
1159
|
+
represented as a polygon. The transformation handles coordinate systems properly,
|
|
1160
|
+
converting between WGS84 (EPSG:4326) and Web Mercator (EPSG:3857) for accurate
|
|
1161
|
+
distance calculations.
|
|
1162
|
+
|
|
1163
|
+
Args:
|
|
1164
|
+
grid_ori (numpy.ndarray): 2D array containing grid values
|
|
1165
|
+
rectangle_vertices (list): List of [lon, lat] coordinates defining area corners.
|
|
1166
|
+
Should be in WGS84 (EPSG:4326) format.
|
|
1167
|
+
meshsize (float): Size of each grid cell in meters
|
|
1168
|
+
|
|
1169
|
+
Returns:
|
|
1170
|
+
GeoDataFrame: A GeoDataFrame with columns:
|
|
1171
|
+
- geometry: Polygon geometry of each grid cell in WGS84 (EPSG:4326)
|
|
1172
|
+
- value: Value from the original grid
|
|
1173
|
+
|
|
1174
|
+
Example:
|
|
1175
|
+
>>> grid = np.array([[1, 2], [3, 4]])
|
|
1176
|
+
>>> vertices = [[lon1, lat1], [lon2, lat2], [lon3, lat3], [lon4, lat4]]
|
|
1177
|
+
>>> mesh_size = 100 # 100 meters
|
|
1178
|
+
>>> gdf = grid_to_geodataframe(grid, vertices, mesh_size)
|
|
1179
|
+
|
|
1180
|
+
Notes:
|
|
1181
|
+
- The input grid is flipped vertically before processing to match geographic
|
|
1182
|
+
orientation (north at top)
|
|
1183
|
+
- The output GeoDataFrame uses WGS84 (EPSG:4326) coordinate system
|
|
1184
|
+
"""
|
|
1185
|
+
grid = np.flipud(grid_ori.copy())
|
|
1186
|
+
|
|
1187
|
+
# Extract bounds from rectangle vertices
|
|
1188
|
+
min_lon = min(v[0] for v in rectangle_vertices)
|
|
1189
|
+
max_lon = max(v[0] for v in rectangle_vertices)
|
|
1190
|
+
min_lat = min(v[1] for v in rectangle_vertices)
|
|
1191
|
+
max_lat = max(v[1] for v in rectangle_vertices)
|
|
1192
|
+
|
|
1193
|
+
rows, cols = grid.shape
|
|
1194
|
+
|
|
1195
|
+
# Set up transformers for accurate coordinate calculations
|
|
1196
|
+
wgs84 = CRS.from_epsg(4326)
|
|
1197
|
+
web_mercator = CRS.from_epsg(3857)
|
|
1198
|
+
transformer_to_mercator = Transformer.from_crs(wgs84, web_mercator, always_xy=True)
|
|
1199
|
+
transformer_to_wgs84 = Transformer.from_crs(web_mercator, wgs84, always_xy=True)
|
|
1200
|
+
|
|
1201
|
+
# Convert bounds to Web Mercator for accurate distance calculations
|
|
1202
|
+
min_x, min_y = transformer_to_mercator.transform(min_lon, min_lat)
|
|
1203
|
+
max_x, max_y = transformer_to_mercator.transform(max_lon, max_lat)
|
|
1204
|
+
|
|
1205
|
+
# Calculate cell sizes in Web Mercator coordinates
|
|
1206
|
+
cell_size_x = (max_x - min_x) / cols
|
|
1207
|
+
cell_size_y = (max_y - min_y) / rows
|
|
1208
|
+
|
|
1209
|
+
# Create lists to store data
|
|
1210
|
+
polygons = []
|
|
1211
|
+
values = []
|
|
1212
|
+
|
|
1213
|
+
# Create grid cells
|
|
1214
|
+
for i in range(rows):
|
|
1215
|
+
for j in range(cols):
|
|
1216
|
+
# Calculate cell bounds in Web Mercator
|
|
1217
|
+
cell_min_x = min_x + j * cell_size_x
|
|
1218
|
+
cell_max_x = min_x + (j + 1) * cell_size_x
|
|
1219
|
+
# Flip vertical axis since grid is stored with origin at top-left
|
|
1220
|
+
cell_min_y = max_y - (i + 1) * cell_size_y
|
|
1221
|
+
cell_max_y = max_y - i * cell_size_y
|
|
1222
|
+
|
|
1223
|
+
# Convert cell corners back to WGS84
|
|
1224
|
+
cell_min_lon, cell_min_lat = transformer_to_wgs84.transform(cell_min_x, cell_min_y)
|
|
1225
|
+
cell_max_lon, cell_max_lat = transformer_to_wgs84.transform(cell_max_x, cell_max_y)
|
|
1226
|
+
|
|
1227
|
+
# Create polygon for cell
|
|
1228
|
+
cell_poly = box(cell_min_lon, cell_min_lat, cell_max_lon, cell_max_lat)
|
|
1229
|
+
|
|
1230
|
+
polygons.append(cell_poly)
|
|
1231
|
+
values.append(grid[i, j])
|
|
1232
|
+
|
|
1233
|
+
# Create GeoDataFrame
|
|
1234
|
+
gdf = gpd.GeoDataFrame({
|
|
1235
|
+
'geometry': polygons,
|
|
1236
|
+
'value': values
|
|
1237
|
+
}, crs=CRS.from_epsg(4326))
|
|
1238
|
+
|
|
1239
|
+
return gdf
|
|
1240
|
+
|
|
1241
|
+
def grid_to_point_geodataframe(grid_ori, rectangle_vertices, meshsize):
|
|
1242
|
+
"""
|
|
1243
|
+
Converts a 2D grid to a GeoDataFrame with point geometries at cell centers and values.
|
|
1244
|
+
|
|
1245
|
+
This function transforms a regular grid into a GeoDataFrame where each cell is
|
|
1246
|
+
represented by a point at its center. The transformation handles coordinate systems
|
|
1247
|
+
properly, converting between WGS84 (EPSG:4326) and Web Mercator (EPSG:3857) for
|
|
1248
|
+
accurate distance calculations.
|
|
1249
|
+
|
|
1250
|
+
Args:
|
|
1251
|
+
grid_ori (numpy.ndarray): 2D array containing grid values
|
|
1252
|
+
rectangle_vertices (list): List of [lon, lat] coordinates defining area corners.
|
|
1253
|
+
Should be in WGS84 (EPSG:4326) format.
|
|
1254
|
+
meshsize (float): Size of each grid cell in meters
|
|
1255
|
+
|
|
1256
|
+
Returns:
|
|
1257
|
+
GeoDataFrame: A GeoDataFrame with columns:
|
|
1258
|
+
- geometry: Point geometry at center of each grid cell in WGS84 (EPSG:4326)
|
|
1259
|
+
- value: Value from the original grid
|
|
1260
|
+
|
|
1261
|
+
Example:
|
|
1262
|
+
>>> grid = np.array([[1, 2], [3, 4]])
|
|
1263
|
+
>>> vertices = [[lon1, lat1], [lon2, lat2], [lon3, lat3], [lon4, lat4]]
|
|
1264
|
+
>>> mesh_size = 100 # 100 meters
|
|
1265
|
+
>>> gdf = grid_to_point_geodataframe(grid, vertices, mesh_size)
|
|
1266
|
+
|
|
1267
|
+
Notes:
|
|
1268
|
+
- The input grid is flipped vertically before processing to match geographic
|
|
1269
|
+
orientation (north at top)
|
|
1270
|
+
- The output GeoDataFrame uses WGS84 (EPSG:4326) coordinate system
|
|
1271
|
+
- Points are placed at the center of each grid cell
|
|
1272
|
+
"""
|
|
1273
|
+
grid = np.flipud(grid_ori.copy())
|
|
1274
|
+
|
|
1275
|
+
# Extract bounds from rectangle vertices
|
|
1276
|
+
min_lon = min(v[0] for v in rectangle_vertices)
|
|
1277
|
+
max_lon = max(v[0] for v in rectangle_vertices)
|
|
1278
|
+
min_lat = min(v[1] for v in rectangle_vertices)
|
|
1279
|
+
max_lat = max(v[1] for v in rectangle_vertices)
|
|
1280
|
+
|
|
1281
|
+
rows, cols = grid.shape
|
|
1282
|
+
|
|
1283
|
+
# Set up transformers for accurate coordinate calculations
|
|
1284
|
+
wgs84 = CRS.from_epsg(4326)
|
|
1285
|
+
web_mercator = CRS.from_epsg(3857)
|
|
1286
|
+
transformer_to_mercator = Transformer.from_crs(wgs84, web_mercator, always_xy=True)
|
|
1287
|
+
transformer_to_wgs84 = Transformer.from_crs(web_mercator, wgs84, always_xy=True)
|
|
1288
|
+
|
|
1289
|
+
# Convert bounds to Web Mercator for accurate distance calculations
|
|
1290
|
+
min_x, min_y = transformer_to_mercator.transform(min_lon, min_lat)
|
|
1291
|
+
max_x, max_y = transformer_to_mercator.transform(max_lon, max_lat)
|
|
1292
|
+
|
|
1293
|
+
# Calculate cell sizes in Web Mercator coordinates
|
|
1294
|
+
cell_size_x = (max_x - min_x) / cols
|
|
1295
|
+
cell_size_y = (max_y - min_y) / rows
|
|
1296
|
+
|
|
1297
|
+
# Create lists to store data
|
|
1298
|
+
points = []
|
|
1299
|
+
values = []
|
|
1300
|
+
|
|
1301
|
+
# Create grid points at cell centers
|
|
1302
|
+
for i in range(rows):
|
|
1303
|
+
for j in range(cols):
|
|
1304
|
+
# Calculate cell center in Web Mercator
|
|
1305
|
+
cell_center_x = min_x + (j + 0.5) * cell_size_x
|
|
1306
|
+
# Flip vertical axis since grid is stored with origin at top-left
|
|
1307
|
+
cell_center_y = max_y - (i + 0.5) * cell_size_y
|
|
1308
|
+
|
|
1309
|
+
# Convert cell center back to WGS84
|
|
1310
|
+
center_lon, center_lat = transformer_to_wgs84.transform(cell_center_x, cell_center_y)
|
|
1311
|
+
|
|
1312
|
+
# Create point for cell center
|
|
1313
|
+
from shapely.geometry import Point
|
|
1314
|
+
cell_point = Point(center_lon, center_lat)
|
|
1315
|
+
|
|
1316
|
+
points.append(cell_point)
|
|
1317
|
+
values.append(grid[i, j])
|
|
1318
|
+
|
|
1319
|
+
# Create GeoDataFrame
|
|
1320
|
+
gdf = gpd.GeoDataFrame({
|
|
1321
|
+
'geometry': points,
|
|
1322
|
+
'value': values
|
|
1323
|
+
}, crs=CRS.from_epsg(4326))
|
|
1324
|
+
|
|
1325
|
+
return gdf
|
|
1326
|
+
|
|
1327
|
+
def create_vegetation_height_grid_from_gdf_polygon(veg_gdf, mesh_size, polygon):
|
|
1328
|
+
"""
|
|
1329
|
+
Create a vegetation height grid from a GeoDataFrame of vegetation polygons/objects
|
|
1330
|
+
within the bounding box of a given polygon, at a specified mesh spacing.
|
|
1331
|
+
Cells that intersect one or more vegetation polygons receive the
|
|
1332
|
+
(by default) maximum vegetation height among intersecting polygons.
|
|
1333
|
+
Cells that do not intersect any vegetation are set to 0.
|
|
1334
|
+
|
|
1335
|
+
Args:
|
|
1336
|
+
veg_gdf (GeoDataFrame): A GeoDataFrame containing vegetation features
|
|
1337
|
+
(usually polygons) with a 'height' column
|
|
1338
|
+
(or a similarly named attribute). Must be in
|
|
1339
|
+
EPSG:4326 or reprojectable to it.
|
|
1340
|
+
mesh_size (float): Desired grid spacing in meters.
|
|
1341
|
+
polygon (list or Polygon):
|
|
1342
|
+
- If a list of (lon, lat) coords, will be converted to a shapely Polygon
|
|
1343
|
+
in EPSG:4326.
|
|
1344
|
+
- If a shapely Polygon, it must be in or reprojectable to EPSG:4326.
|
|
1345
|
+
|
|
1346
|
+
Returns:
|
|
1347
|
+
np.ndarray: 2D array of vegetation height values covering the bounding box
|
|
1348
|
+
of the polygon. The array is indexed [row, col] from top row
|
|
1349
|
+
(north) to bottom row (south). Cells with no intersecting
|
|
1350
|
+
vegetation are set to 0.
|
|
1351
|
+
"""
|
|
1352
|
+
# ------------------------------------------------------------------------
|
|
1353
|
+
# 1. Ensure veg_gdf is in WGS84 (EPSG:4326)
|
|
1354
|
+
# ------------------------------------------------------------------------
|
|
1355
|
+
if veg_gdf.crs is None:
|
|
1356
|
+
warnings.warn("veg_gdf has no CRS. Assuming EPSG:4326. "
|
|
1357
|
+
"If this is incorrect, please set the correct CRS and re-run.")
|
|
1358
|
+
veg_gdf = veg_gdf.set_crs(epsg=4326)
|
|
1359
|
+
else:
|
|
1360
|
+
if veg_gdf.crs.to_epsg() != 4326:
|
|
1361
|
+
veg_gdf = veg_gdf.to_crs(epsg=4326)
|
|
1362
|
+
|
|
1363
|
+
# Must have a 'height' column (or change to your column name)
|
|
1364
|
+
if 'height' not in veg_gdf.columns:
|
|
1365
|
+
raise ValueError("Vegetation GeoDataFrame must have a 'height' column.")
|
|
1366
|
+
|
|
1367
|
+
# ------------------------------------------------------------------------
|
|
1368
|
+
# 2. Convert input polygon to shapely Polygon in WGS84
|
|
1369
|
+
# ------------------------------------------------------------------------
|
|
1370
|
+
if isinstance(polygon, list):
|
|
1371
|
+
poly = Polygon(polygon)
|
|
1372
|
+
elif isinstance(polygon, Polygon):
|
|
1373
|
+
poly = polygon
|
|
1374
|
+
else:
|
|
1375
|
+
raise ValueError("polygon must be a list of (lon, lat) or a shapely Polygon.")
|
|
1376
|
+
|
|
1377
|
+
# ------------------------------------------------------------------------
|
|
1378
|
+
# 3. Compute bounding box & grid dimensions
|
|
1379
|
+
# ------------------------------------------------------------------------
|
|
1380
|
+
left, bottom, right, top = poly.bounds
|
|
1381
|
+
geod = Geod(ellps="WGS84")
|
|
1382
|
+
|
|
1383
|
+
# Horizontal (width) distance in meters
|
|
1384
|
+
_, _, width_m = geod.inv(left, bottom, right, bottom)
|
|
1385
|
+
# Vertical (height) distance in meters
|
|
1386
|
+
_, _, height_m = geod.inv(left, bottom, left, top)
|
|
1387
|
+
|
|
1388
|
+
# Number of cells horizontally and vertically
|
|
1389
|
+
num_cells_x = int(width_m / mesh_size + 0.5)
|
|
1390
|
+
num_cells_y = int(height_m / mesh_size + 0.5)
|
|
1391
|
+
|
|
1392
|
+
if num_cells_x < 1 or num_cells_y < 1:
|
|
1393
|
+
warnings.warn("Polygon bounding box is smaller than mesh_size; returning empty array.")
|
|
1394
|
+
return np.array([])
|
|
1395
|
+
|
|
1396
|
+
# ------------------------------------------------------------------------
|
|
1397
|
+
# 4. Generate the grid (cell centers) covering the bounding box
|
|
1398
|
+
# ------------------------------------------------------------------------
|
|
1399
|
+
xs = np.linspace(left, right, num_cells_x)
|
|
1400
|
+
ys = np.linspace(top, bottom, num_cells_y) # top→bottom
|
|
1401
|
+
X, Y = np.meshgrid(xs, ys)
|
|
1402
|
+
|
|
1403
|
+
# Flatten these for convenience
|
|
1404
|
+
xs_flat = X.ravel()
|
|
1405
|
+
ys_flat = Y.ravel()
|
|
1406
|
+
|
|
1407
|
+
# Create cell-center points as a GeoDataFrame
|
|
1408
|
+
grid_points = gpd.GeoDataFrame(
|
|
1409
|
+
geometry=[Point(lon, lat) for lon, lat in zip(xs_flat, ys_flat)],
|
|
1410
|
+
crs="EPSG:4326"
|
|
1411
|
+
)
|
|
1412
|
+
|
|
1413
|
+
# ------------------------------------------------------------------------
|
|
1414
|
+
# 5. Spatial join (INTERSECTION) to find which vegetation objects each cell intersects
|
|
1415
|
+
# - We only fill the cell if the point is actually inside (or intersects) a vegetation polygon
|
|
1416
|
+
# If your data is more consistent with "contains" or "within", adjust the predicate accordingly.
|
|
1417
|
+
# ------------------------------------------------------------------------
|
|
1418
|
+
# NOTE:
|
|
1419
|
+
# * If your vegetation is polygons, "predicate='intersects'" or "contains"
|
|
1420
|
+
# can be used. Typically we check whether the cell center is inside the polygon.
|
|
1421
|
+
# * If your vegetation is a point layer, you might do "predicate='within'"
|
|
1422
|
+
# or similar. Adjust as needed.
|
|
1423
|
+
#
|
|
1424
|
+
# We'll do a left join so that unmatched cells remain in the result with NaN values.
|
|
1425
|
+
# Then we group by the index of the original grid_points to handle multiple intersects.
|
|
1426
|
+
# The 'index_right' is from the vegetation layer.
|
|
1427
|
+
# ------------------------------------------------------------------------
|
|
1428
|
+
|
|
1429
|
+
joined = gpd.sjoin(
|
|
1430
|
+
grid_points,
|
|
1431
|
+
veg_gdf[['height', 'geometry']],
|
|
1432
|
+
how='left',
|
|
1433
|
+
predicate='intersects'
|
|
1434
|
+
)
|
|
1435
|
+
|
|
1436
|
+
# Because one cell (row in grid_points) can intersect multiple polygons,
|
|
1437
|
+
# we need to aggregate them. We'll take the *maximum* height by default.
|
|
1438
|
+
joined_agg = (
|
|
1439
|
+
joined.groupby(joined.index) # group by the index from grid_points
|
|
1440
|
+
.agg({'height': 'max'}) # or 'mean' if you prefer an average
|
|
1441
|
+
)
|
|
1442
|
+
|
|
1443
|
+
# joined_agg is now a DataFrame with the same index as grid_points.
|
|
1444
|
+
# If a row didn't intersect any polygon, 'height' is NaN.
|
|
1445
|
+
|
|
1446
|
+
# ------------------------------------------------------------------------
|
|
1447
|
+
# 6. Build the 2D height array, initializing with zeros
|
|
1448
|
+
# ------------------------------------------------------------------------
|
|
1449
|
+
veg_grid = np.zeros((num_cells_y, num_cells_x), dtype=float)
|
|
1450
|
+
|
|
1451
|
+
# The row, col in the final array corresponds to how we built 'grid_points':
|
|
1452
|
+
# row = i // num_cells_x
|
|
1453
|
+
# col = i % num_cells_x
|
|
1454
|
+
for i, row_data in joined_agg.iterrows():
|
|
1455
|
+
if not np.isnan(row_data['height']): # Only set values for cells with vegetation
|
|
1456
|
+
row_idx = i // num_cells_x
|
|
1457
|
+
col_idx = i % num_cells_x
|
|
1458
|
+
veg_grid[row_idx, col_idx] = row_data['height']
|
|
1459
|
+
|
|
1460
|
+
# Result: row=0 is the top-most row, row=-1 is bottom.
|
|
1461
|
+
return np.flipud(veg_grid)
|
|
1462
|
+
|
|
1463
|
+
def create_dem_grid_from_gdf_polygon(terrain_gdf, mesh_size, polygon):
|
|
1464
|
+
"""
|
|
1465
|
+
Create a height grid from a terrain GeoDataFrame within the bounding box
|
|
1466
|
+
of the given polygon, using nearest-neighbor sampling of elevations.
|
|
1467
|
+
Edges of the bounding box will also receive a nearest elevation,
|
|
1468
|
+
so there should be no NaNs around edges if data coverage is sufficient.
|
|
1469
|
+
|
|
1470
|
+
Args:
|
|
1471
|
+
terrain_gdf (GeoDataFrame): A GeoDataFrame containing terrain features
|
|
1472
|
+
(points or centroids) with an 'elevation' column.
|
|
1473
|
+
Must be in EPSG:4326 or reprojectable to it.
|
|
1474
|
+
mesh_size (float): Desired grid spacing in meters.
|
|
1475
|
+
polygon (list or Polygon): Polygon specifying the region of interest.
|
|
1476
|
+
- If list of (lon, lat), will be made into a Polygon.
|
|
1477
|
+
- If a shapely Polygon, must be in WGS84 (EPSG:4326)
|
|
1478
|
+
or reprojected to it.
|
|
1479
|
+
|
|
1480
|
+
Returns:
|
|
1481
|
+
np.ndarray: 2D array of height values covering the bounding box of the polygon,
|
|
1482
|
+
from top row (north) to bottom row (south). Any location not
|
|
1483
|
+
matched by terrain_gdf data remains NaN, but edges will not
|
|
1484
|
+
automatically be NaN if terrain coverage exists.
|
|
1485
|
+
"""
|
|
1486
|
+
|
|
1487
|
+
# ------------------------------------------------------------------------
|
|
1488
|
+
# 1. Ensure terrain_gdf is in WGS84 (EPSG:4326)
|
|
1489
|
+
# ------------------------------------------------------------------------
|
|
1490
|
+
if terrain_gdf.crs is None:
|
|
1491
|
+
warnings.warn("terrain_gdf has no CRS. Assuming EPSG:4326. "
|
|
1492
|
+
"If this is incorrect, please set the correct CRS and re-run.")
|
|
1493
|
+
terrain_gdf = terrain_gdf.set_crs(epsg=4326)
|
|
1494
|
+
else:
|
|
1495
|
+
# Reproject if needed
|
|
1496
|
+
if terrain_gdf.crs.to_epsg() != 4326:
|
|
1497
|
+
terrain_gdf = terrain_gdf.to_crs(epsg=4326)
|
|
1498
|
+
|
|
1499
|
+
# Convert input polygon to shapely Polygon in WGS84
|
|
1500
|
+
if isinstance(polygon, list):
|
|
1501
|
+
poly = Polygon(polygon) # assume coords are (lon, lat) in EPSG:4326
|
|
1502
|
+
elif isinstance(polygon, Polygon):
|
|
1503
|
+
poly = polygon
|
|
1504
|
+
else:
|
|
1505
|
+
raise ValueError("`polygon` must be a list of (lon, lat) or a shapely Polygon.")
|
|
1506
|
+
|
|
1507
|
+
# ------------------------------------------------------------------------
|
|
1508
|
+
# 2. Compute bounding box and number of grid cells
|
|
1509
|
+
# ------------------------------------------------------------------------
|
|
1510
|
+
left, bottom, right, top = poly.bounds
|
|
1511
|
+
geod = Geod(ellps="WGS84")
|
|
1512
|
+
|
|
1513
|
+
# Geodesic distances in meters
|
|
1514
|
+
_, _, width_m = geod.inv(left, bottom, right, bottom)
|
|
1515
|
+
_, _, height_m = geod.inv(left, bottom, left, top)
|
|
1516
|
+
|
|
1517
|
+
# Number of cells in X and Y directions
|
|
1518
|
+
num_cells_x = int(width_m / mesh_size + 0.5)
|
|
1519
|
+
num_cells_y = int(height_m / mesh_size + 0.5)
|
|
1520
|
+
|
|
1521
|
+
if num_cells_x < 1 or num_cells_y < 1:
|
|
1522
|
+
warnings.warn("Polygon bounding box is smaller than mesh_size; returning empty array.")
|
|
1523
|
+
return np.array([])
|
|
1524
|
+
|
|
1525
|
+
# ------------------------------------------------------------------------
|
|
1526
|
+
# 3. Generate grid points covering the bounding box
|
|
1527
|
+
# (all points, not just inside the polygon)
|
|
1528
|
+
# ------------------------------------------------------------------------
|
|
1529
|
+
xs = np.linspace(left, right, num_cells_x)
|
|
1530
|
+
ys = np.linspace(top, bottom, num_cells_y) # top→bottom
|
|
1531
|
+
X, Y = np.meshgrid(xs, ys)
|
|
1532
|
+
|
|
1533
|
+
# Flatten for convenience
|
|
1534
|
+
xs_flat = X.ravel()
|
|
1535
|
+
ys_flat = Y.ravel()
|
|
1536
|
+
|
|
1537
|
+
# Create GeoDataFrame of all bounding-box points
|
|
1538
|
+
grid_points = gpd.GeoDataFrame(
|
|
1539
|
+
geometry=[Point(lon, lat) for lon, lat in zip(xs_flat, ys_flat)],
|
|
1540
|
+
crs="EPSG:4326"
|
|
1541
|
+
)
|
|
1542
|
+
|
|
1543
|
+
# ------------------------------------------------------------------------
|
|
1544
|
+
# 4. Nearest-neighbor join from terrain_gdf to grid points
|
|
1545
|
+
# ------------------------------------------------------------------------
|
|
1546
|
+
if 'elevation' not in terrain_gdf.columns:
|
|
1547
|
+
raise ValueError("terrain_gdf must have an 'elevation' column.")
|
|
1548
|
+
|
|
1549
|
+
# Nearest spatial join (requires GeoPandas >= 0.10)
|
|
1550
|
+
# This will assign each grid point the nearest terrain_gdf elevation.
|
|
1551
|
+
grid_points_elev = gpd.sjoin_nearest(
|
|
1552
|
+
grid_points,
|
|
1553
|
+
terrain_gdf[['elevation', 'geometry']],
|
|
1554
|
+
how="left",
|
|
1555
|
+
distance_col="dist_to_terrain"
|
|
1556
|
+
)
|
|
1557
|
+
|
|
1558
|
+
# ------------------------------------------------------------------------
|
|
1559
|
+
# 5. Build the final 2D height array
|
|
1560
|
+
# (rows: top->bottom, columns: left->right)
|
|
1561
|
+
# ------------------------------------------------------------------------
|
|
1562
|
+
dem_grid = np.full((num_cells_y, num_cells_x), np.nan, dtype=float)
|
|
1563
|
+
|
|
1564
|
+
# The index mapping of grid_points_elev is the same as grid_points, so:
|
|
1565
|
+
# row = i // num_cells_x, col = i % num_cells_x
|
|
1566
|
+
for i, elevation_val in zip(grid_points_elev.index, grid_points_elev['elevation']):
|
|
1567
|
+
row = i // num_cells_x
|
|
1568
|
+
col = i % num_cells_x
|
|
1569
|
+
dem_grid[row, col] = elevation_val # could be NaN if no data
|
|
1570
|
+
|
|
1571
|
+
# By default, row=0 is the "north/top" row, row=-1 is "south/bottom" row.
|
|
1572
|
+
# If you prefer the bottom row as index=0, you'd do: np.flipud(dem_grid)
|
|
1573
|
+
|
|
1574
|
+
return np.flipud(dem_grid)
|
|
1575
|
+
|
|
1576
|
+
def create_canopy_grids_from_tree_gdf(tree_gdf, meshsize, rectangle_vertices):
|
|
1577
|
+
"""
|
|
1578
|
+
Create canopy top and bottom height grids from a tree GeoDataFrame.
|
|
1579
|
+
|
|
1580
|
+
Assumptions:
|
|
1581
|
+
- Each tree is a point with attributes: 'top_height', 'bottom_height', 'crown_diameter'.
|
|
1582
|
+
- The crown is modeled as a solid of revolution with an ellipsoidal vertical profile.
|
|
1583
|
+
For a tree with top H_t, bottom H_b and crown radius R = crown_diameter/2,
|
|
1584
|
+
at a horizontal distance r (r <= R) from the tree center:
|
|
1585
|
+
z_top(r) = z0 + a * sqrt(1 - (r/R)^2)
|
|
1586
|
+
z_bot(r) = z0 - a * sqrt(1 - (r/R)^2)
|
|
1587
|
+
where a = (H_t - H_b)/2 and z0 = (H_t + H_b)/2.
|
|
1588
|
+
|
|
1589
|
+
The function outputs two grids (shape: (nx, ny) consistent with other grid functions):
|
|
1590
|
+
- canopy_height_grid: maximum canopy top height per cell across trees
|
|
1591
|
+
- canopy_bottom_height_grid: maximum canopy bottom height per cell across trees
|
|
1592
|
+
|
|
1593
|
+
Args:
|
|
1594
|
+
tree_gdf (geopandas.GeoDataFrame): Tree points with required columns.
|
|
1595
|
+
meshsize (float): Grid spacing in meters.
|
|
1596
|
+
rectangle_vertices (list[tuple]): 4 vertices [(lon,lat), ...] defining the grid rectangle.
|
|
1597
|
+
|
|
1598
|
+
Returns:
|
|
1599
|
+
tuple[np.ndarray, np.ndarray]: (canopy_height_grid, canopy_bottom_height_grid)
|
|
1600
|
+
"""
|
|
1601
|
+
|
|
1602
|
+
# Validate and prepare input GeoDataFrame
|
|
1603
|
+
if tree_gdf is None or len(tree_gdf) == 0:
|
|
1604
|
+
return np.array([]), np.array([])
|
|
1605
|
+
|
|
1606
|
+
required_cols = ['top_height', 'bottom_height', 'crown_diameter', 'geometry']
|
|
1607
|
+
for col in required_cols:
|
|
1608
|
+
if col not in tree_gdf.columns:
|
|
1609
|
+
raise ValueError(f"tree_gdf must contain '{col}' column.")
|
|
1610
|
+
|
|
1611
|
+
# Ensure CRS is WGS84
|
|
1612
|
+
if tree_gdf.crs is None:
|
|
1613
|
+
warnings.warn("tree_gdf has no CRS. Assuming EPSG:4326.")
|
|
1614
|
+
tree_gdf = tree_gdf.set_crs(epsg=4326)
|
|
1615
|
+
elif tree_gdf.crs.to_epsg() != 4326:
|
|
1616
|
+
tree_gdf = tree_gdf.to_crs(epsg=4326)
|
|
1617
|
+
|
|
1618
|
+
# Grid setup consistent with building/land cover grid functions
|
|
1619
|
+
geod = initialize_geod()
|
|
1620
|
+
vertex_0, vertex_1, vertex_3 = rectangle_vertices[0], rectangle_vertices[1], rectangle_vertices[3]
|
|
1621
|
+
|
|
1622
|
+
dist_side_1 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_1[0], vertex_1[1])
|
|
1623
|
+
dist_side_2 = calculate_distance(geod, vertex_0[0], vertex_0[1], vertex_3[0], vertex_3[1])
|
|
1624
|
+
|
|
1625
|
+
side_1 = np.array(vertex_1) - np.array(vertex_0)
|
|
1626
|
+
side_2 = np.array(vertex_3) - np.array(vertex_0)
|
|
1627
|
+
u_vec = normalize_to_one_meter(side_1, dist_side_1)
|
|
1628
|
+
v_vec = normalize_to_one_meter(side_2, dist_side_2)
|
|
1629
|
+
|
|
1630
|
+
origin = np.array(rectangle_vertices[0])
|
|
1631
|
+
grid_size, adjusted_meshsize = calculate_grid_size(side_1, side_2, u_vec, v_vec, meshsize)
|
|
1632
|
+
|
|
1633
|
+
nx, ny = grid_size[0], grid_size[1]
|
|
1634
|
+
|
|
1635
|
+
# Precompute metric cell-center coordinates in grid's (u,v) metric space (meters from origin)
|
|
1636
|
+
i_centers_m = (np.arange(nx) + 0.5) * adjusted_meshsize[0]
|
|
1637
|
+
j_centers_m = (np.arange(ny) + 0.5) * adjusted_meshsize[1]
|
|
1638
|
+
|
|
1639
|
+
# Initialize output grids
|
|
1640
|
+
canopy_top = np.zeros((nx, ny), dtype=float)
|
|
1641
|
+
canopy_bottom = np.zeros((nx, ny), dtype=float)
|
|
1642
|
+
|
|
1643
|
+
# Matrix to convert lon/lat offsets to metric (u,v) using u_vec, v_vec
|
|
1644
|
+
# delta_lonlat ≈ [u_vec v_vec] @ [alpha; beta], where alpha/beta are meters along u/v
|
|
1645
|
+
transform_mat = np.column_stack((u_vec, v_vec)) # shape (2,2)
|
|
1646
|
+
try:
|
|
1647
|
+
transform_inv = np.linalg.inv(transform_mat)
|
|
1648
|
+
except np.linalg.LinAlgError:
|
|
1649
|
+
# Fallback if u_vec/v_vec are degenerate (shouldn't happen for proper rectangles)
|
|
1650
|
+
transform_inv = np.linalg.pinv(transform_mat)
|
|
1651
|
+
|
|
1652
|
+
# Iterate trees and accumulate ellipsoidal canopy surfaces
|
|
1653
|
+
for _, row in tree_gdf.iterrows():
|
|
1654
|
+
geom = row['geometry']
|
|
1655
|
+
if geom is None or not hasattr(geom, 'x'):
|
|
1656
|
+
continue
|
|
1657
|
+
|
|
1658
|
+
top_h = float(row.get('top_height', 0.0) or 0.0)
|
|
1659
|
+
bot_h = float(row.get('bottom_height', 0.0) or 0.0)
|
|
1660
|
+
dia = float(row.get('crown_diameter', 0.0) or 0.0)
|
|
1661
|
+
|
|
1662
|
+
# Sanity checks and clamps
|
|
1663
|
+
if dia <= 0 or top_h <= 0:
|
|
1664
|
+
continue
|
|
1665
|
+
if bot_h < 0:
|
|
1666
|
+
bot_h = 0.0
|
|
1667
|
+
if bot_h > top_h:
|
|
1668
|
+
top_h, bot_h = bot_h, top_h
|
|
1669
|
+
|
|
1670
|
+
R = dia / 2.0 # radius (meters)
|
|
1671
|
+
a = max((top_h - bot_h) / 2.0, 0.0)
|
|
1672
|
+
z0 = (top_h + bot_h) / 2.0
|
|
1673
|
+
if a == 0:
|
|
1674
|
+
# Flat disk between bot_h and top_h collapses; treat as constant top/bottom
|
|
1675
|
+
a = 0.0
|
|
1676
|
+
|
|
1677
|
+
# Tree center in lon/lat
|
|
1678
|
+
tree_lon = float(geom.x)
|
|
1679
|
+
tree_lat = float(geom.y)
|
|
1680
|
+
|
|
1681
|
+
# Map tree center to (u,v) metric coordinates relative to origin
|
|
1682
|
+
delta = np.array([tree_lon, tree_lat]) - origin
|
|
1683
|
+
alpha_beta = transform_inv @ delta # meters along u (alpha) and v (beta)
|
|
1684
|
+
alpha_m = alpha_beta[0]
|
|
1685
|
+
beta_m = alpha_beta[1]
|
|
1686
|
+
|
|
1687
|
+
# Determine affected index ranges (bounding box in grid indices)
|
|
1688
|
+
# Convert radius in meters to index offsets along u and v
|
|
1689
|
+
du_cells = int(R / adjusted_meshsize[0] + 2)
|
|
1690
|
+
dv_cells = int(R / adjusted_meshsize[1] + 2)
|
|
1691
|
+
|
|
1692
|
+
i_center_idx = int(alpha_m / adjusted_meshsize[0])
|
|
1693
|
+
j_center_idx = int(beta_m / adjusted_meshsize[1])
|
|
1694
|
+
|
|
1695
|
+
i_min = max(0, i_center_idx - du_cells)
|
|
1696
|
+
i_max = min(nx - 1, i_center_idx + du_cells)
|
|
1697
|
+
j_min = max(0, j_center_idx - dv_cells)
|
|
1698
|
+
j_max = min(ny - 1, j_center_idx + dv_cells)
|
|
1699
|
+
|
|
1700
|
+
if i_min > i_max or j_min > j_max:
|
|
1701
|
+
continue
|
|
1702
|
+
|
|
1703
|
+
# Slice cell center coords for local window
|
|
1704
|
+
ic = i_centers_m[i_min:i_max + 1][:, None] # shape (Ii,1)
|
|
1705
|
+
jc = j_centers_m[j_min:j_max + 1][None, :] # shape (1,Jj)
|
|
1706
|
+
|
|
1707
|
+
# Compute radial distance in meters in grid metric space
|
|
1708
|
+
di = ic - alpha_m
|
|
1709
|
+
dj = jc - beta_m
|
|
1710
|
+
r = np.sqrt(di * di + dj * dj)
|
|
1711
|
+
|
|
1712
|
+
# Mask for points within crown radius
|
|
1713
|
+
within = r <= R
|
|
1714
|
+
if not np.any(within):
|
|
1715
|
+
continue
|
|
1716
|
+
|
|
1717
|
+
# Ellipsoidal vertical profile
|
|
1718
|
+
# Avoid numerical issues for r slightly > R due to precision
|
|
1719
|
+
ratio = np.clip(r / max(R, 1e-9), 0.0, 1.0)
|
|
1720
|
+
factor = np.sqrt(1.0 - ratio * ratio)
|
|
1721
|
+
local_top = z0 + a * factor
|
|
1722
|
+
local_bot = z0 - a * factor
|
|
1723
|
+
|
|
1724
|
+
# Apply mask; cells outside remain zero contribution
|
|
1725
|
+
local_top_masked = np.where(within, local_top, 0.0)
|
|
1726
|
+
local_bot_masked = np.where(within, local_bot, 0.0)
|
|
1727
|
+
|
|
1728
|
+
# Merge with maxima to represent union of crowns
|
|
1729
|
+
canopy_top[i_min:i_max + 1, j_min:j_max + 1] = np.maximum(
|
|
1730
|
+
canopy_top[i_min:i_max + 1, j_min:j_max + 1], local_top_masked
|
|
1731
|
+
)
|
|
1732
|
+
canopy_bottom[i_min:i_max + 1, j_min:j_max + 1] = np.maximum(
|
|
1733
|
+
canopy_bottom[i_min:i_max + 1, j_min:j_max + 1], local_bot_masked
|
|
1734
|
+
)
|
|
1735
|
+
|
|
1736
|
+
# Ensure bottom <= top everywhere
|
|
1737
|
+
canopy_bottom = np.minimum(canopy_bottom, canopy_top)
|
|
1738
|
+
|
|
1733
1739
|
return canopy_top, canopy_bottom
|