voxcity 0.5.6__tar.gz → 0.5.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of voxcity might be problematic. Click here for more details.
- {voxcity-0.5.6 → voxcity-0.5.9}/PKG-INFO +2 -2
- {voxcity-0.5.6 → voxcity-0.5.9}/pyproject.toml +2 -3
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/citygml.py +347 -194
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/exporter/obj.py +2 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/generator.py +4 -4
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/grid.py +1 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/utils/visualization.py +92 -4
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity.egg-info/PKG-INFO +2 -2
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity.egg-info/requires.txt +1 -1
- {voxcity-0.5.6 → voxcity-0.5.9}/AUTHORS.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/CONTRIBUTING.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/HISTORY.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/LICENSE +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/MANIFEST.in +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/README.md +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/Makefile +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/archive/README.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/authors.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/conf.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/index.rst +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/docs/make.bat +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/setup.cfg +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/__init__.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/__init__.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/eubucco.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/gee.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/mbfp.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/oemj.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/omt.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/osm.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/overture.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/downloader/utils.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/exporter/__init_.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/exporter/envimet.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/exporter/magicavoxel.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/__init_.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/draw.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/mesh.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/network.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/polygon.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/geoprocessor/utils.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/simulator/__init_.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/simulator/solar.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/simulator/utils.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/simulator/view.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/utils/__init_.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/utils/lc.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/utils/material.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity/utils/weather.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity.egg-info/SOURCES.txt +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity.egg-info/dependency_links.txt +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/src/voxcity.egg-info/top_level.txt +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/tests/__init__.py +0 -0
- {voxcity-0.5.6 → voxcity-0.5.9}/tests/voxelcity.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: voxcity
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.9
|
|
4
4
|
Summary: voxcity is an easy and one-stop tool to output 3d city models for microclimate simulation by integrating multiple geospatial open-data
|
|
5
5
|
Author-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
|
|
6
6
|
Maintainer-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
|
|
@@ -45,7 +45,7 @@ Requires-Dist: pycountry
|
|
|
45
45
|
Requires-Dist: osm2geojson
|
|
46
46
|
Requires-Dist: seaborn
|
|
47
47
|
Requires-Dist: overturemaps
|
|
48
|
-
Requires-Dist: protobuf
|
|
48
|
+
Requires-Dist: protobuf<6,>=4.21
|
|
49
49
|
Requires-Dist: timezonefinder
|
|
50
50
|
Requires-Dist: astral
|
|
51
51
|
Requires-Dist: osmnx
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "voxcity"
|
|
3
|
-
version = "0.5.
|
|
3
|
+
version = "0.5.9"
|
|
4
4
|
requires-python = ">=3.10,<3.13"
|
|
5
5
|
classifiers = [
|
|
6
6
|
"Programming Language :: Python :: 3.10",
|
|
@@ -47,7 +47,7 @@ dependencies = [
|
|
|
47
47
|
"osm2geojson",
|
|
48
48
|
"seaborn",
|
|
49
49
|
"overturemaps",
|
|
50
|
-
"protobuf
|
|
50
|
+
"protobuf>=4.21,<6",
|
|
51
51
|
"timezonefinder",
|
|
52
52
|
"astral",
|
|
53
53
|
"osmnx",
|
|
@@ -56,7 +56,6 @@ dependencies = [
|
|
|
56
56
|
"pyvista",
|
|
57
57
|
"IPython",
|
|
58
58
|
"lxml"
|
|
59
|
-
|
|
60
59
|
]
|
|
61
60
|
|
|
62
61
|
[project.optional-dependencies]
|
|
@@ -11,30 +11,126 @@ from shapely.geometry import Polygon, Point, MultiPolygon
|
|
|
11
11
|
import pandas as pd
|
|
12
12
|
from tqdm import tqdm
|
|
13
13
|
|
|
14
|
+
# --------------------------------------------------------------------
|
|
15
|
+
# Script to get tile boundary from file name
|
|
16
|
+
# --------------------------------------------------------------------
|
|
17
|
+
import re
|
|
18
|
+
from shapely.geometry import Polygon
|
|
19
|
+
|
|
20
|
+
def decode_2nd_level_mesh(mesh6):
|
|
21
|
+
"""
|
|
22
|
+
Given exactly 6 digits (string) for a standard (2nd-level) mesh code,
|
|
23
|
+
return (lat_sw, lon_sw, lat_ne, lon_ne) in degrees.
|
|
24
|
+
"""
|
|
25
|
+
code = int(mesh6)
|
|
26
|
+
# Extract each piece
|
|
27
|
+
N1 = code // 10000 # first 2 digits
|
|
28
|
+
M1 = (code // 100) % 100 # next 2 digits
|
|
29
|
+
row_2nd = (code // 10) % 10 # 5th digit
|
|
30
|
+
col_2nd = code % 10 # 6th digit
|
|
31
|
+
|
|
32
|
+
# 1st-level mesh "southwest" corner
|
|
33
|
+
lat_sw_1 = (N1 * 40.0) / 60.0 # each N1 => 40' => 2/3 degrees
|
|
34
|
+
lon_sw_1 = 100.0 + M1 # each M1 => offset from 100°E
|
|
35
|
+
|
|
36
|
+
# 2nd-level mesh subdivides that 8×8 => each cell = 1/12° lat x 0.125° lon
|
|
37
|
+
dlat_2nd = (40.0 / 60.0) / 8.0 # 1/12°
|
|
38
|
+
dlon_2nd = 1.0 / 8.0 # 0.125°
|
|
39
|
+
|
|
40
|
+
lat_sw = lat_sw_1 + row_2nd * dlat_2nd
|
|
41
|
+
lon_sw = lon_sw_1 + col_2nd * dlon_2nd
|
|
42
|
+
lat_ne = lat_sw + dlat_2nd
|
|
43
|
+
lon_ne = lon_sw + dlon_2nd
|
|
44
|
+
|
|
45
|
+
return (lat_sw, lon_sw, lat_ne, lon_ne)
|
|
46
|
+
|
|
47
|
+
def decode_mesh_code(mesh_str):
|
|
48
|
+
"""
|
|
49
|
+
Handles:
|
|
50
|
+
- 6-digit codes (standard 2nd-level mesh),
|
|
51
|
+
- 8-digit codes (2nd-level subdivided 10×10).
|
|
52
|
+
|
|
53
|
+
Returns a list of (lon, lat) forming a *closed* bounding polygon in WGS84.
|
|
54
|
+
"""
|
|
55
|
+
if len(mesh_str) < 6:
|
|
56
|
+
raise ValueError(f"Mesh code '{mesh_str}' is too short.")
|
|
57
|
+
|
|
58
|
+
# Decode the first 6 digits as a 2nd-level mesh
|
|
59
|
+
mesh6 = mesh_str[:6]
|
|
60
|
+
lat_sw_2, lon_sw_2, lat_ne_2, lon_ne_2 = decode_2nd_level_mesh(mesh6)
|
|
61
|
+
|
|
62
|
+
# If exactly 6 digits => full 2nd-level tile
|
|
63
|
+
if len(mesh_str) == 6:
|
|
64
|
+
return [
|
|
65
|
+
(lon_sw_2, lat_sw_2),
|
|
66
|
+
(lon_ne_2, lat_sw_2),
|
|
67
|
+
(lon_ne_2, lat_ne_2),
|
|
68
|
+
(lon_sw_2, lat_ne_2),
|
|
69
|
+
(lon_sw_2, lat_sw_2)
|
|
70
|
+
]
|
|
71
|
+
|
|
72
|
+
# If 8 digits => last 2 subdivide the tile 10×10
|
|
73
|
+
elif len(mesh_str) == 8:
|
|
74
|
+
row_10 = int(mesh_str[6]) # 7th digit
|
|
75
|
+
col_10 = int(mesh_str[7]) # 8th digit
|
|
76
|
+
|
|
77
|
+
# Sub-tile size in lat/lon
|
|
78
|
+
dlat_10 = (lat_ne_2 - lat_sw_2) / 10.0
|
|
79
|
+
dlon_10 = (lon_ne_2 - lon_sw_2) / 10.0
|
|
80
|
+
|
|
81
|
+
lat_sw = lat_sw_2 + row_10 * dlat_10
|
|
82
|
+
lon_sw = lon_sw_2 + col_10 * dlon_10
|
|
83
|
+
lat_ne = lat_sw + dlat_10
|
|
84
|
+
lon_ne = lon_sw + dlon_10
|
|
85
|
+
|
|
86
|
+
return [
|
|
87
|
+
(lon_sw, lat_sw),
|
|
88
|
+
(lon_ne, lat_sw),
|
|
89
|
+
(lon_ne, lat_ne),
|
|
90
|
+
(lon_sw, lat_ne),
|
|
91
|
+
(lon_sw, lat_sw)
|
|
92
|
+
]
|
|
93
|
+
|
|
94
|
+
else:
|
|
95
|
+
raise ValueError(
|
|
96
|
+
f"Unsupported mesh code length '{mesh_str}'. "
|
|
97
|
+
"This script only handles 6-digit or 8-digit codes."
|
|
98
|
+
)
|
|
99
|
+
|
|
100
|
+
def get_tile_polygon_from_filename(filename):
|
|
101
|
+
"""
|
|
102
|
+
Extract the mesh code from a typical Project PLATEAU filename
|
|
103
|
+
(e.g. '51357348_bldg_6697_op.gml') and decode it.
|
|
104
|
+
Returns the bounding polygon in WGS84 as a list of (lon, lat).
|
|
105
|
+
"""
|
|
106
|
+
# Look for leading digits until the first underscore
|
|
107
|
+
m = re.match(r'^(\d+)_', filename)
|
|
108
|
+
if not m:
|
|
109
|
+
# If no match, you can either raise an error or return None
|
|
110
|
+
raise ValueError(f"No leading digit code found in filename: {filename}")
|
|
111
|
+
|
|
112
|
+
mesh_code = m.group(1)
|
|
113
|
+
return decode_mesh_code(mesh_code)
|
|
114
|
+
|
|
115
|
+
# --------------------------------------------------------------------
|
|
116
|
+
# Original script logic
|
|
117
|
+
# --------------------------------------------------------------------
|
|
118
|
+
|
|
14
119
|
def download_and_extract_zip(url, extract_to='.'):
|
|
15
120
|
"""
|
|
16
121
|
Download and extract a zip file from a URL
|
|
17
122
|
"""
|
|
18
|
-
# Send a GET request to the URL
|
|
19
123
|
response = requests.get(url)
|
|
20
|
-
|
|
21
|
-
# Check if the request was successful
|
|
22
124
|
if response.status_code == 200:
|
|
23
|
-
# Extract the base name of the zip file from the URL
|
|
24
125
|
parsed_url = urlparse(url)
|
|
25
126
|
zip_filename = os.path.basename(parsed_url.path)
|
|
26
127
|
folder_name = os.path.splitext(zip_filename)[0] # Remove the .zip extension
|
|
27
128
|
|
|
28
|
-
# Create the extraction directory
|
|
29
129
|
extraction_path = os.path.join(extract_to, folder_name)
|
|
30
130
|
os.makedirs(extraction_path, exist_ok=True)
|
|
31
131
|
|
|
32
|
-
# Create a BytesIO object from the response content
|
|
33
132
|
zip_file = io.BytesIO(response.content)
|
|
34
|
-
|
|
35
|
-
# Open the zip file
|
|
36
133
|
with zipfile.ZipFile(zip_file) as z:
|
|
37
|
-
# Extract all the contents of the zip file to the specified directory
|
|
38
134
|
z.extractall(extraction_path)
|
|
39
135
|
print(f"Extracted to {extraction_path}")
|
|
40
136
|
else:
|
|
@@ -55,7 +151,6 @@ def swap_coordinates(polygon):
|
|
|
55
151
|
Swap coordinates in a polygon (lat/lon to lon/lat or vice versa)
|
|
56
152
|
"""
|
|
57
153
|
if isinstance(polygon, MultiPolygon):
|
|
58
|
-
# Handle MultiPolygon objects
|
|
59
154
|
new_polygons = []
|
|
60
155
|
for geom in polygon.geoms:
|
|
61
156
|
coords = list(geom.exterior.coords)
|
|
@@ -63,7 +158,6 @@ def swap_coordinates(polygon):
|
|
|
63
158
|
new_polygons.append(Polygon(swapped_coords))
|
|
64
159
|
return MultiPolygon(new_polygons)
|
|
65
160
|
else:
|
|
66
|
-
# Handle regular Polygon objects
|
|
67
161
|
coords = list(polygon.exterior.coords)
|
|
68
162
|
swapped_coords = [(y, x) for x, y in coords]
|
|
69
163
|
return Polygon(swapped_coords)
|
|
@@ -87,25 +181,20 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
87
181
|
for tin in relief.findall('.//dem:TINRelief', namespaces):
|
|
88
182
|
tin_id = tin.get('{http://www.opengis.net/gml}id')
|
|
89
183
|
|
|
90
|
-
# Extract triangulated surface
|
|
91
184
|
triangles = tin.findall('.//gml:Triangle', namespaces)
|
|
92
185
|
for i, triangle in enumerate(triangles):
|
|
93
|
-
# Extract the coordinates of each triangle
|
|
94
186
|
pos_lists = triangle.findall('.//gml:posList', namespaces)
|
|
95
|
-
|
|
96
187
|
for pos_list in pos_lists:
|
|
97
188
|
try:
|
|
98
|
-
# Process the coordinates
|
|
99
189
|
coords_text = pos_list.text.strip().split()
|
|
100
190
|
coords = []
|
|
101
191
|
elevations = []
|
|
102
192
|
|
|
103
|
-
# Process coordinates in triplets (x, y, z)
|
|
104
193
|
for j in range(0, len(coords_text), 3):
|
|
105
194
|
if j + 2 < len(coords_text):
|
|
106
195
|
x = float(coords_text[j])
|
|
107
|
-
y = float(coords_text[j
|
|
108
|
-
z = float(coords_text[j
|
|
196
|
+
y = float(coords_text[j+1])
|
|
197
|
+
z = float(coords_text[j+2])
|
|
109
198
|
|
|
110
199
|
if not np.isinf(x) and not np.isinf(y) and not np.isinf(z):
|
|
111
200
|
coords.append((x, y))
|
|
@@ -114,10 +203,8 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
114
203
|
if len(coords) >= 3 and validate_coords(coords):
|
|
115
204
|
polygon = Polygon(coords)
|
|
116
205
|
if polygon.is_valid:
|
|
117
|
-
# Calculate centroid for point representation
|
|
118
206
|
centroid = polygon.centroid
|
|
119
207
|
avg_elevation = np.mean(elevations)
|
|
120
|
-
|
|
121
208
|
terrain_elements.append({
|
|
122
209
|
'relief_id': relief_id,
|
|
123
210
|
'tin_id': tin_id,
|
|
@@ -136,7 +223,6 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
136
223
|
for line in breakline.findall('.//gml:LineString', namespaces):
|
|
137
224
|
line_id = line.get('{http://www.opengis.net/gml}id')
|
|
138
225
|
pos_list = line.find('.//gml:posList', namespaces)
|
|
139
|
-
|
|
140
226
|
if pos_list is not None:
|
|
141
227
|
try:
|
|
142
228
|
coords_text = pos_list.text.strip().split()
|
|
@@ -146,9 +232,8 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
146
232
|
for j in range(0, len(coords_text), 3):
|
|
147
233
|
if j + 2 < len(coords_text):
|
|
148
234
|
x = float(coords_text[j])
|
|
149
|
-
y = float(coords_text[j
|
|
150
|
-
z = float(coords_text[j
|
|
151
|
-
|
|
235
|
+
y = float(coords_text[j+1])
|
|
236
|
+
z = float(coords_text[j+2])
|
|
152
237
|
if not np.isinf(x) and not np.isinf(y) and not np.isinf(z):
|
|
153
238
|
points.append(Point(x, y))
|
|
154
239
|
elevations.append(z)
|
|
@@ -173,7 +258,6 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
173
258
|
for point in mass_point.findall('.//gml:Point', namespaces):
|
|
174
259
|
point_id = point.get('{http://www.opengis.net/gml}id')
|
|
175
260
|
pos = point.find('.//gml:pos', namespaces)
|
|
176
|
-
|
|
177
261
|
if pos is not None:
|
|
178
262
|
try:
|
|
179
263
|
coords = pos.text.strip().split()
|
|
@@ -181,7 +265,6 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
181
265
|
x = float(coords[0])
|
|
182
266
|
y = float(coords[1])
|
|
183
267
|
z = float(coords[2])
|
|
184
|
-
|
|
185
268
|
if not np.isinf(x) and not np.isinf(y) and not np.isinf(z):
|
|
186
269
|
point_geom = Point(x, y)
|
|
187
270
|
if point_geom.is_valid:
|
|
@@ -204,13 +287,13 @@ def extract_terrain_info(file_path, namespaces):
|
|
|
204
287
|
print(f"Error processing terrain in file {Path(file_path).name}: {e}")
|
|
205
288
|
return []
|
|
206
289
|
|
|
290
|
+
|
|
207
291
|
def extract_vegetation_info(file_path, namespaces):
|
|
208
292
|
"""
|
|
209
293
|
Extract vegetation features (PlantCover, SolitaryVegetationObject)
|
|
210
294
|
from a CityGML file, handling LOD0..LOD3 geometry and MultiSurface/CompositeSurface.
|
|
211
295
|
"""
|
|
212
296
|
vegetation_elements = []
|
|
213
|
-
|
|
214
297
|
try:
|
|
215
298
|
tree = ET.parse(file_path)
|
|
216
299
|
root = tree.getroot()
|
|
@@ -218,32 +301,23 @@ def extract_vegetation_info(file_path, namespaces):
|
|
|
218
301
|
print(f"Error parsing CityGML file {Path(file_path).name}: {e}")
|
|
219
302
|
return vegetation_elements
|
|
220
303
|
|
|
221
|
-
#
|
|
222
|
-
# Helper: parse all polygons from a <gml:MultiSurface> or <veg:lodXMultiSurface>
|
|
223
|
-
# ----------------------------------------------------------------------------
|
|
304
|
+
# Helper: parse polygons in <gml:MultiSurface> or <veg:lodXMultiSurface>
|
|
224
305
|
def parse_lod_multisurface(lod_elem):
|
|
225
|
-
"""Return a Shapely (Multi)Polygon from gml:Polygon elements under lod_elem."""
|
|
226
306
|
polygons = []
|
|
227
|
-
# Find all Polygons (including nested in CompositeSurface)
|
|
228
307
|
for poly_node in lod_elem.findall('.//gml:Polygon', namespaces):
|
|
229
308
|
ring_node = poly_node.find('.//gml:exterior//gml:LinearRing//gml:posList', namespaces)
|
|
230
309
|
if ring_node is None or ring_node.text is None:
|
|
231
310
|
continue
|
|
232
|
-
|
|
233
|
-
# Parse coordinate text
|
|
234
311
|
coords_text = ring_node.text.strip().split()
|
|
235
312
|
coords = []
|
|
236
|
-
# Typically posList is in triplets: (x, y, z)
|
|
237
313
|
for i in range(0, len(coords_text), 3):
|
|
238
314
|
try:
|
|
239
315
|
x = float(coords_text[i])
|
|
240
316
|
y = float(coords_text[i+1])
|
|
241
|
-
# z = float(coords_text[i+2]) #
|
|
317
|
+
# z = float(coords_text[i+2]) # If you need Z
|
|
242
318
|
coords.append((x, y))
|
|
243
319
|
except:
|
|
244
|
-
# Skip any parse error or incomplete coordinate
|
|
245
320
|
pass
|
|
246
|
-
|
|
247
321
|
if len(coords) >= 3:
|
|
248
322
|
polygon = Polygon(coords)
|
|
249
323
|
if polygon.is_valid:
|
|
@@ -256,36 +330,27 @@ def extract_vegetation_info(file_path, namespaces):
|
|
|
256
330
|
else:
|
|
257
331
|
return MultiPolygon(polygons)
|
|
258
332
|
|
|
259
|
-
# ----------------------------------------------------------------------------
|
|
260
|
-
# Helper: retrieve geometry from all LOD tags
|
|
261
|
-
# ----------------------------------------------------------------------------
|
|
262
333
|
def get_veg_geometry(veg_elem):
|
|
263
334
|
"""
|
|
264
335
|
Search for geometry under lod0Geometry, lod1Geometry, lod2Geometry,
|
|
265
|
-
lod3Geometry, lod4Geometry, as well as lod0MultiSurface ...
|
|
336
|
+
lod3Geometry, lod4Geometry, as well as lod0MultiSurface ... lod4MultiSurface.
|
|
266
337
|
Return a Shapely geometry (Polygon or MultiPolygon) if found.
|
|
267
338
|
"""
|
|
268
339
|
geometry_lods = [
|
|
269
340
|
"lod0Geometry", "lod1Geometry", "lod2Geometry", "lod3Geometry", "lod4Geometry",
|
|
270
341
|
"lod0MultiSurface", "lod1MultiSurface", "lod2MultiSurface", "lod3MultiSurface", "lod4MultiSurface"
|
|
271
342
|
]
|
|
272
|
-
|
|
273
343
|
for lod_tag in geometry_lods:
|
|
274
|
-
# e.g. .//veg:lod3Geometry
|
|
275
344
|
lod_elem = veg_elem.find(f'.//veg:{lod_tag}', namespaces)
|
|
276
345
|
if lod_elem is not None:
|
|
277
346
|
geom = parse_lod_multisurface(lod_elem)
|
|
278
347
|
if geom is not None:
|
|
279
348
|
return geom
|
|
280
|
-
|
|
281
349
|
return None
|
|
282
350
|
|
|
283
|
-
# ----------------------------------------------------------------------------
|
|
284
351
|
# 1) PlantCover
|
|
285
|
-
# ----------------------------------------------------------------------------
|
|
286
352
|
for plant_cover in root.findall('.//veg:PlantCover', namespaces):
|
|
287
353
|
cover_id = plant_cover.get('{http://www.opengis.net/gml}id')
|
|
288
|
-
# averageHeight (if present)
|
|
289
354
|
avg_height_elem = plant_cover.find('.//veg:averageHeight', namespaces)
|
|
290
355
|
if avg_height_elem is not None and avg_height_elem.text:
|
|
291
356
|
try:
|
|
@@ -295,9 +360,7 @@ def extract_vegetation_info(file_path, namespaces):
|
|
|
295
360
|
else:
|
|
296
361
|
vegetation_height = None
|
|
297
362
|
|
|
298
|
-
# parse geometry from LOD0..LOD3
|
|
299
363
|
geometry = get_veg_geometry(plant_cover)
|
|
300
|
-
|
|
301
364
|
if geometry is not None and not geometry.is_empty:
|
|
302
365
|
vegetation_elements.append({
|
|
303
366
|
'object_type': 'PlantCover',
|
|
@@ -307,9 +370,7 @@ def extract_vegetation_info(file_path, namespaces):
|
|
|
307
370
|
'source_file': Path(file_path).name
|
|
308
371
|
})
|
|
309
372
|
|
|
310
|
-
# ----------------------------------------------------------------------------
|
|
311
373
|
# 2) SolitaryVegetationObject
|
|
312
|
-
# ----------------------------------------------------------------------------
|
|
313
374
|
for solitary in root.findall('.//veg:SolitaryVegetationObject', namespaces):
|
|
314
375
|
veg_id = solitary.get('{http://www.opengis.net/gml}id')
|
|
315
376
|
height_elem = solitary.find('.//veg:height', namespaces)
|
|
@@ -335,6 +396,51 @@ def extract_vegetation_info(file_path, namespaces):
|
|
|
335
396
|
print(f"Extracted {len(vegetation_elements)} vegetation objects from {Path(file_path).name}")
|
|
336
397
|
return vegetation_elements
|
|
337
398
|
|
|
399
|
+
|
|
400
|
+
def extract_building_footprint(building, namespaces):
|
|
401
|
+
"""
|
|
402
|
+
Extract building footprint from possible LOD representations
|
|
403
|
+
"""
|
|
404
|
+
lod_tags = [
|
|
405
|
+
# LOD0
|
|
406
|
+
'.//bldg:lod0FootPrint//gml:MultiSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
407
|
+
'.//bldg:lod0RoofEdge//gml:MultiSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
408
|
+
'.//bldg:lod0Solid//gml:Solid//gml:exterior//gml:CompositeSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
409
|
+
|
|
410
|
+
# LOD1
|
|
411
|
+
'.//bldg:lod1Solid//gml:Solid//gml:exterior//gml:CompositeSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
412
|
+
|
|
413
|
+
# LOD2
|
|
414
|
+
'.//bldg:lod2Solid//gml:Solid//gml:exterior//gml:CompositeSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
415
|
+
|
|
416
|
+
# fallback
|
|
417
|
+
'.//gml:MultiSurface//gml:surfaceMember//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList',
|
|
418
|
+
'.//gml:Polygon//gml:exterior//gml:LinearRing//gml:posList'
|
|
419
|
+
]
|
|
420
|
+
|
|
421
|
+
for tag in lod_tags:
|
|
422
|
+
pos_list_elements = building.findall(tag, namespaces)
|
|
423
|
+
if pos_list_elements:
|
|
424
|
+
# If in LOD1/LOD2 solid, we look for the bottom face
|
|
425
|
+
if 'lod1Solid' in tag or 'lod2Solid' in tag or 'lod0Solid' in tag:
|
|
426
|
+
lowest_z = float('inf')
|
|
427
|
+
footprint_pos_list = None
|
|
428
|
+
for pos_list_elem in pos_list_elements:
|
|
429
|
+
coords_text = pos_list_elem.text.strip().split()
|
|
430
|
+
z_values = [float(coords_text[i+2])
|
|
431
|
+
for i in range(0, len(coords_text), 3)
|
|
432
|
+
if i+2 < len(coords_text)]
|
|
433
|
+
if z_values and all(z == z_values[0] for z in z_values) and z_values[0] < lowest_z:
|
|
434
|
+
lowest_z = z_values[0]
|
|
435
|
+
footprint_pos_list = pos_list_elem
|
|
436
|
+
if footprint_pos_list:
|
|
437
|
+
return footprint_pos_list, lowest_z
|
|
438
|
+
else:
|
|
439
|
+
# For simpler LOD0 footprints, just return the first
|
|
440
|
+
return pos_list_elements[0], None
|
|
441
|
+
return None, None
|
|
442
|
+
|
|
443
|
+
|
|
338
444
|
def process_citygml_file(file_path):
|
|
339
445
|
"""
|
|
340
446
|
Process a CityGML file to extract building, terrain, and vegetation information
|
|
@@ -343,7 +449,6 @@ def process_citygml_file(file_path):
|
|
|
343
449
|
terrain_elements = []
|
|
344
450
|
vegetation_elements = []
|
|
345
451
|
|
|
346
|
-
# Namespaces (now includes 'veg')
|
|
347
452
|
namespaces = {
|
|
348
453
|
'core': 'http://www.opengis.net/citygml/2.0',
|
|
349
454
|
'bldg': 'http://www.opengis.net/citygml/building/2.0',
|
|
@@ -354,27 +459,36 @@ def process_citygml_file(file_path):
|
|
|
354
459
|
}
|
|
355
460
|
|
|
356
461
|
try:
|
|
357
|
-
# Parse the file once at the start (optional; if you want to share 'root' among sub-extractors)
|
|
358
462
|
tree = ET.parse(file_path)
|
|
359
463
|
root = tree.getroot()
|
|
360
464
|
|
|
361
|
-
#
|
|
465
|
+
# Extract Buildings
|
|
362
466
|
for building in root.findall('.//bldg:Building', namespaces):
|
|
363
467
|
building_id = building.get('{http://www.opengis.net/gml}id')
|
|
468
|
+
|
|
364
469
|
measured_height = building.find('.//bldg:measuredHeight', namespaces)
|
|
365
|
-
height = float(measured_height.text) if measured_height is not None else None
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
470
|
+
height = float(measured_height.text) if measured_height is not None and measured_height.text else None
|
|
471
|
+
|
|
472
|
+
storeys = building.find('.//bldg:storeysAboveGround', namespaces)
|
|
473
|
+
num_storeys = int(storeys.text) if storeys is not None and storeys.text else None
|
|
474
|
+
|
|
475
|
+
pos_list, ground_elevation = extract_building_footprint(building, namespaces)
|
|
476
|
+
if pos_list is not None:
|
|
370
477
|
try:
|
|
371
|
-
|
|
478
|
+
coords_text = pos_list.text.strip().split()
|
|
372
479
|
coords = []
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
480
|
+
|
|
481
|
+
# Decide if we have (x,y) pairs or (x,y,z) triplets
|
|
482
|
+
coord_step = 3 if (len(coords_text) % 3) == 0 else 2
|
|
483
|
+
|
|
484
|
+
for i in range(0, len(coords_text), coord_step):
|
|
485
|
+
if i + coord_step - 1 < len(coords_text):
|
|
486
|
+
lon = float(coords_text[i])
|
|
487
|
+
lat = float(coords_text[i+1])
|
|
488
|
+
if coord_step == 3 and i+2 < len(coords_text):
|
|
489
|
+
z = float(coords_text[i+2])
|
|
490
|
+
if ground_elevation is None:
|
|
491
|
+
ground_elevation = z
|
|
378
492
|
if not np.isinf(lon) and not np.isinf(lat):
|
|
379
493
|
coords.append((lon, lat))
|
|
380
494
|
|
|
@@ -384,17 +498,18 @@ def process_citygml_file(file_path):
|
|
|
384
498
|
buildings.append({
|
|
385
499
|
'building_id': building_id,
|
|
386
500
|
'height': height,
|
|
387
|
-
'
|
|
501
|
+
'storeys': num_storeys,
|
|
502
|
+
'ground_elevation': ground_elevation,
|
|
388
503
|
'geometry': polygon,
|
|
389
504
|
'source_file': Path(file_path).name
|
|
390
505
|
})
|
|
391
506
|
except (ValueError, IndexError) as e:
|
|
392
|
-
print(f"Error processing building {building_id} in
|
|
507
|
+
print(f"Error processing building {building_id} footprint in {Path(file_path).name}: {e}")
|
|
393
508
|
|
|
394
|
-
#
|
|
509
|
+
# Extract Terrain
|
|
395
510
|
terrain_elements = extract_terrain_info(file_path, namespaces)
|
|
396
511
|
|
|
397
|
-
#
|
|
512
|
+
# Extract Vegetation
|
|
398
513
|
vegetation_elements = extract_vegetation_info(file_path, namespaces)
|
|
399
514
|
|
|
400
515
|
print(f"Processed {Path(file_path).name}: "
|
|
@@ -406,151 +521,189 @@ def process_citygml_file(file_path):
|
|
|
406
521
|
|
|
407
522
|
return buildings, terrain_elements, vegetation_elements
|
|
408
523
|
|
|
409
|
-
|
|
524
|
+
|
|
525
|
+
def parse_file(file_path, file_type=None):
|
|
526
|
+
"""
|
|
527
|
+
Parse a file based on its detected type
|
|
528
|
+
"""
|
|
529
|
+
if file_type is None:
|
|
530
|
+
file_ext = os.path.splitext(file_path)[1].lower()
|
|
531
|
+
if file_ext == '.gml':
|
|
532
|
+
file_type = 'citygml'
|
|
533
|
+
elif file_ext == '.xml':
|
|
534
|
+
try:
|
|
535
|
+
tree = ET.parse(file_path)
|
|
536
|
+
root = tree.getroot()
|
|
537
|
+
ns = root.nsmap
|
|
538
|
+
if any('citygml' in ns_uri.lower() for ns_uri in ns.values()):
|
|
539
|
+
file_type = 'citygml'
|
|
540
|
+
else:
|
|
541
|
+
file_type = 'xml'
|
|
542
|
+
except:
|
|
543
|
+
file_type = 'xml'
|
|
544
|
+
elif file_ext in ['.json', '.geojson']:
|
|
545
|
+
file_type = 'geojson'
|
|
546
|
+
else:
|
|
547
|
+
print(f"Unsupported file type: {file_ext}")
|
|
548
|
+
return None, None, None
|
|
549
|
+
|
|
550
|
+
if file_type == 'citygml':
|
|
551
|
+
return process_citygml_file(file_path)
|
|
552
|
+
elif file_type == 'geojson':
|
|
553
|
+
print(f"GeoJSON processing not implemented for {file_path}")
|
|
554
|
+
return [], [], []
|
|
555
|
+
elif file_type == 'xml':
|
|
556
|
+
print(f"Generic XML processing not implemented for {file_path}")
|
|
557
|
+
return [], [], []
|
|
558
|
+
else:
|
|
559
|
+
print(f"Unsupported file type: {file_type}")
|
|
560
|
+
return [], [], []
|
|
561
|
+
|
|
562
|
+
|
|
563
|
+
def swap_coordinates_if_needed(gdf, geometry_col='geometry'):
|
|
564
|
+
"""
|
|
565
|
+
Swap lat/lon coordinates in a GeoDataFrame if its geometry is in lat-lon order.
|
|
566
|
+
We assume the original data is EPSG:6697 (which is a projected coordinate system).
|
|
567
|
+
But we frequently find that data is actually lat-lon. This function ensures
|
|
568
|
+
final geometry is in the correct coordinate order (lon, lat).
|
|
569
|
+
"""
|
|
570
|
+
swapped_geometries = []
|
|
571
|
+
for geom in gdf[geometry_col]:
|
|
572
|
+
# If it's a Polygon or MultiPolygon, use swap_coordinates()
|
|
573
|
+
if isinstance(geom, (Polygon, MultiPolygon)):
|
|
574
|
+
swapped_geometries.append(swap_coordinates(geom))
|
|
575
|
+
elif isinstance(geom, Point):
|
|
576
|
+
swapped_geometries.append(Point(geom.y, geom.x))
|
|
577
|
+
else:
|
|
578
|
+
swapped_geometries.append(geom)
|
|
579
|
+
return swapped_geometries
|
|
580
|
+
|
|
581
|
+
|
|
582
|
+
def load_buid_dem_veg_from_citygml(url=None,
|
|
583
|
+
base_dir='.',
|
|
584
|
+
citygml_path=None,
|
|
585
|
+
rectangle_vertices=None):
|
|
410
586
|
"""
|
|
411
|
-
Load PLATEAU data, extracting Buildings, Terrain, and Vegetation data
|
|
587
|
+
Load PLATEAU data, extracting Buildings, Terrain, and Vegetation data.
|
|
588
|
+
Can process from URL (download & extract) or directly from local file.
|
|
589
|
+
|
|
590
|
+
If rectangle_vertices is provided (as [(lon1, lat1), (lon2, lat2), ...]),
|
|
591
|
+
only tiles intersecting that rectangle will be processed.
|
|
412
592
|
"""
|
|
413
|
-
|
|
414
|
-
|
|
593
|
+
all_buildings = []
|
|
594
|
+
all_terrain = []
|
|
595
|
+
all_vegetation = []
|
|
596
|
+
|
|
597
|
+
# Build the rectangle polygon if given
|
|
598
|
+
rectangle_polygon = None
|
|
599
|
+
if rectangle_vertices and len(rectangle_vertices) >= 3:
|
|
600
|
+
rectangle_polygon = Polygon(rectangle_vertices)
|
|
601
|
+
|
|
602
|
+
if url:
|
|
603
|
+
citygml_path, foldername = download_and_extract_zip(url, extract_to=base_dir)
|
|
604
|
+
elif citygml_path:
|
|
605
|
+
foldername = os.path.basename(citygml_path)
|
|
606
|
+
else:
|
|
607
|
+
print("Either url or citygml_path must be specified")
|
|
608
|
+
return None, None, None
|
|
415
609
|
|
|
416
|
-
#
|
|
610
|
+
# Identify CityGML files in typical folder structure
|
|
417
611
|
try:
|
|
418
612
|
citygml_dir = os.path.join(citygml_path, 'udx')
|
|
419
613
|
if not os.path.exists(citygml_dir):
|
|
420
|
-
|
|
421
|
-
|
|
614
|
+
citygml_dir_2 = os.path.join(citygml_path, foldername, 'udx')
|
|
615
|
+
if os.path.exists(citygml_dir_2):
|
|
616
|
+
citygml_dir = citygml_dir_2
|
|
617
|
+
|
|
618
|
+
# Potential sub-folders
|
|
422
619
|
bldg_dir = os.path.join(citygml_dir, 'bldg')
|
|
423
620
|
dem_dir = os.path.join(citygml_dir, 'dem')
|
|
424
|
-
|
|
425
|
-
# NEW: check for vegetation folder
|
|
426
621
|
veg_dir = os.path.join(citygml_dir, 'veg')
|
|
427
|
-
|
|
622
|
+
|
|
428
623
|
citygml_files = []
|
|
429
|
-
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
|
|
434
|
-
|
|
435
|
-
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
624
|
+
for folder in [bldg_dir, dem_dir, veg_dir, citygml_dir]:
|
|
625
|
+
if os.path.exists(folder):
|
|
626
|
+
citygml_files += [
|
|
627
|
+
os.path.join(folder, f) for f in os.listdir(folder)
|
|
628
|
+
if f.endswith(('.gml', '.xml'))
|
|
629
|
+
]
|
|
630
|
+
|
|
631
|
+
print(f"Found {len(citygml_files)} CityGML files to process")
|
|
632
|
+
|
|
633
|
+
for file_path in tqdm(citygml_files, desc="Processing files"):
|
|
634
|
+
filename = os.path.basename(file_path)
|
|
635
|
+
|
|
636
|
+
# If a rectangle is given, check tile intersection
|
|
637
|
+
if rectangle_polygon is not None:
|
|
638
|
+
try:
|
|
639
|
+
tile_polygon_lonlat = get_tile_polygon_from_filename(filename) # returns [(lon, lat), ...]
|
|
640
|
+
tile_polygon = Polygon(tile_polygon_lonlat)
|
|
641
|
+
|
|
642
|
+
# If no overlap, skip processing
|
|
643
|
+
if not tile_polygon.intersects(rectangle_polygon):
|
|
644
|
+
continue
|
|
645
|
+
except Exception as e:
|
|
646
|
+
# If we cannot parse a tile boundary, skip or handle as you wish
|
|
647
|
+
print(f"Warning: could not get tile boundary from {filename}: {e}, extracting the tile whether it is in the rectangle or not.")
|
|
648
|
+
# continue
|
|
649
|
+
|
|
650
|
+
# Parse the file
|
|
651
|
+
buildings, terrain_elements, vegetation_elements = parse_file(file_path)
|
|
652
|
+
all_buildings.extend(buildings)
|
|
653
|
+
all_terrain.extend(terrain_elements)
|
|
654
|
+
all_vegetation.extend(vegetation_elements)
|
|
655
|
+
|
|
456
656
|
except Exception as e:
|
|
457
657
|
print(f"Error finding CityGML files: {e}")
|
|
458
658
|
return None, None, None
|
|
459
659
|
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
# 3) Process each CityGML
|
|
465
|
-
for file_path in tqdm(citygml_files, desc="Processing CityGML files"):
|
|
466
|
-
buildings, terrain_elements, vegetation_elements = process_citygml_file(file_path)
|
|
467
|
-
all_buildings.extend(buildings)
|
|
468
|
-
all_terrain.extend(terrain_elements)
|
|
469
|
-
all_vegetation.extend(vegetation_elements)
|
|
660
|
+
# Convert to GeoDataFrames
|
|
661
|
+
gdf_buildings = None
|
|
662
|
+
gdf_terrain = None
|
|
663
|
+
gdf_vegetation = None
|
|
470
664
|
|
|
471
|
-
# 4) Create GeoDataFrame for Buildings
|
|
472
665
|
if all_buildings:
|
|
473
666
|
gdf_buildings = gpd.GeoDataFrame(all_buildings, geometry='geometry')
|
|
474
|
-
gdf_buildings.set_crs(epsg=6697, inplace=True)
|
|
475
|
-
|
|
476
|
-
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
{
|
|
480
|
-
'building_id': gdf_buildings['building_id'],
|
|
481
|
-
'height': gdf_buildings['height'],
|
|
482
|
-
'ground_elevation': gdf_buildings['ground_elevation'],
|
|
483
|
-
'source_file': gdf_buildings['source_file'],
|
|
484
|
-
'geometry': swapped_geometries
|
|
485
|
-
},
|
|
486
|
-
crs='EPSG:6697'
|
|
487
|
-
)
|
|
488
|
-
|
|
489
|
-
# Save
|
|
490
|
-
gdf_buildings_swapped['id'] = gdf_buildings_swapped.index
|
|
491
|
-
# gdf_buildings_swapped.to_file('all_buildings_with_elevation.geojson', driver='GeoJSON')
|
|
492
|
-
# print(f"\nBuildings saved to all_buildings_with_elevation.geojson")
|
|
493
|
-
else:
|
|
494
|
-
gdf_buildings_swapped = None
|
|
667
|
+
gdf_buildings.set_crs(epsg=6697, inplace=True) # or "EPSG:4326", depending on your data
|
|
668
|
+
# Swap if needed
|
|
669
|
+
gdf_buildings['geometry'] = swap_coordinates_if_needed(gdf_buildings, geometry_col='geometry')
|
|
670
|
+
# Add an ID
|
|
671
|
+
gdf_buildings['id'] = range(len(gdf_buildings))
|
|
495
672
|
|
|
496
|
-
# 5) Create GeoDataFrame for Terrain
|
|
497
673
|
if all_terrain:
|
|
498
674
|
gdf_terrain = gpd.GeoDataFrame(all_terrain, geometry='geometry')
|
|
499
675
|
gdf_terrain.set_crs(epsg=6697, inplace=True)
|
|
676
|
+
gdf_terrain['geometry'] = swap_coordinates_if_needed(gdf_terrain, geometry_col='geometry')
|
|
500
677
|
|
|
501
|
-
swapped_geometries = []
|
|
502
|
-
for geom in gdf_terrain.geometry:
|
|
503
|
-
if isinstance(geom, (Polygon, MultiPolygon)):
|
|
504
|
-
swapped_geometries.append(swap_coordinates(geom))
|
|
505
|
-
elif isinstance(geom, Point):
|
|
506
|
-
swapped_geometries.append(Point(geom.y, geom.x))
|
|
507
|
-
else:
|
|
508
|
-
swapped_geometries.append(geom)
|
|
509
|
-
|
|
510
|
-
terrain_data = {
|
|
511
|
-
'relief_id': gdf_terrain.get('relief_id', ''),
|
|
512
|
-
'tin_id': gdf_terrain.get('tin_id', ''),
|
|
513
|
-
'triangle_id': gdf_terrain.get('triangle_id', ''),
|
|
514
|
-
'breakline_id': gdf_terrain.get('breakline_id', ''),
|
|
515
|
-
'mass_point_id': gdf_terrain.get('mass_point_id', ''),
|
|
516
|
-
'point_id': gdf_terrain.get('point_id', ''),
|
|
517
|
-
'elevation': gdf_terrain['elevation'],
|
|
518
|
-
'source_file': gdf_terrain['source_file'],
|
|
519
|
-
'geometry': swapped_geometries
|
|
520
|
-
}
|
|
521
|
-
|
|
522
|
-
gdf_terrain_swapped = gpd.GeoDataFrame(terrain_data, geometry='geometry', crs='EPSG:6697')
|
|
523
|
-
# gdf_terrain_swapped.to_file('terrain_elevation.geojson', driver='GeoJSON')
|
|
524
|
-
# print(f"Terrain saved to terrain_elevation.geojson")
|
|
525
|
-
else:
|
|
526
|
-
gdf_terrain_swapped = None
|
|
527
|
-
|
|
528
|
-
# 6) Create GeoDataFrame for Vegetation
|
|
529
678
|
if all_vegetation:
|
|
530
|
-
|
|
531
|
-
|
|
532
|
-
|
|
533
|
-
swapped_geometries = []
|
|
534
|
-
for geom in gdf_veg.geometry:
|
|
535
|
-
if isinstance(geom, (Polygon, MultiPolygon)):
|
|
536
|
-
swapped_geometries.append(swap_coordinates(geom))
|
|
537
|
-
elif isinstance(geom, Point):
|
|
538
|
-
swapped_geometries.append(Point(geom.y, geom.x))
|
|
539
|
-
else:
|
|
540
|
-
swapped_geometries.append(geom)
|
|
541
|
-
|
|
542
|
-
vegetation_data = {
|
|
543
|
-
'object_type': gdf_veg.get('object_type', ''),
|
|
544
|
-
'vegetation_id': gdf_veg.get('vegetation_id', ''),
|
|
545
|
-
'height': gdf_veg.get('height', None),
|
|
546
|
-
'avg_elevation': gdf_veg.get('avg_elevation', None), # Use .get() with a default
|
|
547
|
-
'source_file': gdf_veg.get('source_file', ''),
|
|
548
|
-
'geometry': swapped_geometries
|
|
549
|
-
}
|
|
550
|
-
gdf_vegetation_swapped = gpd.GeoDataFrame(vegetation_data, geometry='geometry', crs='EPSG:6697')
|
|
551
|
-
# gdf_vegetation_swapped.to_file('vegetation_elevation.geojson', driver='GeoJSON')
|
|
552
|
-
# print(f"Vegetation saved to vegetation_elevation.geojson")
|
|
553
|
-
else:
|
|
554
|
-
gdf_vegetation_swapped = None
|
|
679
|
+
gdf_vegetation = gpd.GeoDataFrame(all_vegetation, geometry='geometry')
|
|
680
|
+
gdf_vegetation.set_crs(epsg=6697, inplace=True)
|
|
681
|
+
gdf_vegetation['geometry'] = swap_coordinates_if_needed(gdf_vegetation, geometry_col='geometry')
|
|
555
682
|
|
|
556
|
-
return
|
|
683
|
+
return gdf_buildings, gdf_terrain, gdf_vegetation
|
|
684
|
+
|
|
685
|
+
|
|
686
|
+
def process_single_file(file_path):
|
|
687
|
+
"""
|
|
688
|
+
Process a single file (for testing)
|
|
689
|
+
"""
|
|
690
|
+
file_ext = os.path.splitext(file_path)[1].lower()
|
|
691
|
+
if file_ext in ['.gml', '.xml']:
|
|
692
|
+
buildings, terrain, vegetation = parse_file(file_path)
|
|
693
|
+
print(f"\nProcessed {file_path}:")
|
|
694
|
+
print(f" - {len(buildings)} buildings extracted")
|
|
695
|
+
print(f" - {len(terrain)} terrain elements extracted")
|
|
696
|
+
print(f" - {len(vegetation)} vegetation objects extracted")
|
|
697
|
+
|
|
698
|
+
# Example: create building GeoDataFrame and save to GeoJSON
|
|
699
|
+
if buildings:
|
|
700
|
+
gdf_buildings = gpd.GeoDataFrame(buildings, geometry='geometry')
|
|
701
|
+
gdf_buildings.set_crs(epsg=6697, inplace=True)
|
|
702
|
+
output_file = os.path.splitext(file_path)[0] + "_buildings.geojson"
|
|
703
|
+
gdf_buildings.to_file(output_file, driver='GeoJSON')
|
|
704
|
+
print(f"Buildings saved to {output_file}")
|
|
705
|
+
|
|
706
|
+
return buildings, terrain, vegetation
|
|
707
|
+
else:
|
|
708
|
+
print(f"Unsupported file type: {file_ext}")
|
|
709
|
+
return None, None, None
|
|
@@ -23,7 +23,7 @@ from .downloader.oemj import save_oemj_as_geotiff
|
|
|
23
23
|
from .downloader.omt import load_gdf_from_openmaptiles
|
|
24
24
|
from .downloader.eubucco import load_gdf_from_eubucco
|
|
25
25
|
from .downloader.overture import load_gdf_from_overture
|
|
26
|
-
from .downloader.citygml import
|
|
26
|
+
from .downloader.citygml import load_buid_dem_veg_from_citygml
|
|
27
27
|
from .downloader.gee import (
|
|
28
28
|
initialize_earth_engine,
|
|
29
29
|
get_roi,
|
|
@@ -691,7 +691,7 @@ def get_voxcity(rectangle_vertices, building_source, land_cover_source, canopy_h
|
|
|
691
691
|
|
|
692
692
|
return voxcity_grid, building_height_grid, building_min_height_grid, building_id_grid, canopy_height_grid, land_cover_grid, dem_grid, building_gdf
|
|
693
693
|
|
|
694
|
-
def get_voxcity_CityGML(rectangle_vertices,
|
|
694
|
+
def get_voxcity_CityGML(rectangle_vertices, land_cover_source, canopy_height_source, meshsize, url_citygml=None, citygml_path=None, **kwargs):
|
|
695
695
|
"""Main function to generate a complete voxel city model.
|
|
696
696
|
|
|
697
697
|
Args:
|
|
@@ -728,7 +728,7 @@ def get_voxcity_CityGML(rectangle_vertices, url_citygml, land_cover_source, cano
|
|
|
728
728
|
kwargs.pop('output_dir', None)
|
|
729
729
|
|
|
730
730
|
# get all required gdfs
|
|
731
|
-
building_gdf, terrain_gdf, vegetation_gdf =
|
|
731
|
+
building_gdf, terrain_gdf, vegetation_gdf = load_buid_dem_veg_from_citygml(url=url_citygml, citygml_path=citygml_path, base_dir=output_dir, rectangle_vertices=rectangle_vertices)
|
|
732
732
|
|
|
733
733
|
land_cover_grid = get_land_cover_grid(rectangle_vertices, meshsize, land_cover_source, output_dir, **kwargs)
|
|
734
734
|
|
|
@@ -817,7 +817,7 @@ def get_voxcity_CityGML(rectangle_vertices, url_citygml, land_cover_source, cano
|
|
|
817
817
|
# Generate 3D voxel grid
|
|
818
818
|
voxcity_grid = create_3d_voxel(building_height_grid, building_min_height_grid, building_id_grid, land_cover_grid, dem_grid, canopy_height_grid, meshsize, land_cover_source)
|
|
819
819
|
|
|
820
|
-
return voxcity_grid, building_height_grid, building_min_height_grid, building_id_grid, canopy_height_grid, land_cover_grid, dem_grid,
|
|
820
|
+
return voxcity_grid, building_height_grid, building_min_height_grid, building_id_grid, canopy_height_grid, land_cover_grid, dem_grid, filtered_buildings
|
|
821
821
|
|
|
822
822
|
def replace_nan_in_nested(arr, replace_value=10.0):
|
|
823
823
|
"""Replace NaN values in a nested array structure with a specified value.
|
|
@@ -11,6 +11,7 @@ from pyproj import Geod, Transformer, CRS
|
|
|
11
11
|
import rasterio
|
|
12
12
|
from affine import Affine
|
|
13
13
|
from shapely.geometry import box, Polygon, Point, MultiPolygon
|
|
14
|
+
import warnings
|
|
14
15
|
|
|
15
16
|
from scipy.interpolate import griddata
|
|
16
17
|
from shapely.errors import GEOSException
|
|
@@ -43,6 +43,7 @@ from ..geoprocessor.mesh import (
|
|
|
43
43
|
create_city_meshes,
|
|
44
44
|
export_meshes
|
|
45
45
|
)
|
|
46
|
+
# from ..exporter.obj import save_obj_from_colored_mesh
|
|
46
47
|
from .material import get_material_dict
|
|
47
48
|
|
|
48
49
|
# def get_voxel_color_map():
|
|
@@ -74,6 +75,7 @@ from .material import get_material_dict
|
|
|
74
75
|
# 13: [150, 166, 190], # 'Building (ground surface)'
|
|
75
76
|
# 14: [239, 228, 176], # 'No Data (ground surface)'
|
|
76
77
|
# }
|
|
78
|
+
|
|
77
79
|
def get_voxel_color_map(color_scheme='default'):
|
|
78
80
|
"""
|
|
79
81
|
Returns a color map for voxel visualization based on the specified color scheme.
|
|
@@ -1528,6 +1530,7 @@ def visualize_voxcity_multi_view(voxel_array, meshsize, **kwargs):
|
|
|
1528
1530
|
vmax = kwargs.get("vmax", np.nanmax(sim_grid))
|
|
1529
1531
|
projection_type = kwargs.get("projection_type", "perspective")
|
|
1530
1532
|
distance_factor = kwargs.get("distance_factor", 1.0)
|
|
1533
|
+
save_obj = kwargs.get("save_obj", False)
|
|
1531
1534
|
|
|
1532
1535
|
# Create meshes
|
|
1533
1536
|
print("Creating voxel meshes...")
|
|
@@ -1579,6 +1582,13 @@ def visualize_voxcity_multi_view(voxel_array, meshsize, **kwargs):
|
|
|
1579
1582
|
plt.axis('off')
|
|
1580
1583
|
plt.show()
|
|
1581
1584
|
plt.close()
|
|
1585
|
+
|
|
1586
|
+
# After creating the meshes and before visualization
|
|
1587
|
+
if save_obj:
|
|
1588
|
+
output_directory = kwargs.get('output_directory', 'output')
|
|
1589
|
+
output_file_name = kwargs.get('output_file_name', 'voxcity_mesh')
|
|
1590
|
+
obj_path, mtl_path = save_obj_from_colored_mesh(meshes, output_directory, output_file_name)
|
|
1591
|
+
print(f"Saved mesh files to:\n {obj_path}\n {mtl_path}")
|
|
1582
1592
|
|
|
1583
1593
|
def visualize_voxcity_multi_view_with_multiple_sim_grids(voxel_array, meshsize, sim_configs, **kwargs):
|
|
1584
1594
|
"""
|
|
@@ -1610,7 +1620,9 @@ def visualize_voxcity_multi_view_with_multiple_sim_grids(voxel_array, meshsize,
|
|
|
1610
1620
|
# Configure PyVista settings
|
|
1611
1621
|
pv.set_plot_theme('document')
|
|
1612
1622
|
pv.global_theme.background = 'white'
|
|
1613
|
-
|
|
1623
|
+
window_width = kwargs.get("window_width", 1024)
|
|
1624
|
+
window_height = kwargs.get("window_height", 768)
|
|
1625
|
+
pv.global_theme.window_size = [window_width, window_height]
|
|
1614
1626
|
pv.global_theme.jupyter_backend = 'static'
|
|
1615
1627
|
|
|
1616
1628
|
# Parse general kwargs
|
|
@@ -1743,7 +1755,9 @@ def visualize_voxcity_with_sim_meshes(voxel_array, meshsize, custom_meshes=None,
|
|
|
1743
1755
|
# Configure PyVista settings
|
|
1744
1756
|
pv.set_plot_theme('document')
|
|
1745
1757
|
pv.global_theme.background = 'white'
|
|
1746
|
-
|
|
1758
|
+
window_width = kwargs.get("window_width", 1024)
|
|
1759
|
+
window_height = kwargs.get("window_height", 768)
|
|
1760
|
+
pv.global_theme.window_size = [window_width, window_height]
|
|
1747
1761
|
pv.global_theme.jupyter_backend = 'static'
|
|
1748
1762
|
|
|
1749
1763
|
# Parse kwargs
|
|
@@ -1764,6 +1778,7 @@ def visualize_voxcity_with_sim_meshes(voxel_array, meshsize, custom_meshes=None,
|
|
|
1764
1778
|
colorbar_title = kwargs.get("colorbar_title", "")
|
|
1765
1779
|
value_name = kwargs.get("value_name", None)
|
|
1766
1780
|
nan_color = kwargs.get("nan_color", "gray")
|
|
1781
|
+
save_obj = kwargs.get("save_obj", False)
|
|
1767
1782
|
|
|
1768
1783
|
if value_name is None:
|
|
1769
1784
|
print("Set value_name")
|
|
@@ -1891,7 +1906,7 @@ def visualize_voxcity_with_sim_meshes(voxel_array, meshsize, custom_meshes=None,
|
|
|
1891
1906
|
|
|
1892
1907
|
# Display each view separately
|
|
1893
1908
|
for view_name, img_file in image_files:
|
|
1894
|
-
plt.figure(figsize=(
|
|
1909
|
+
plt.figure(figsize=(24, 16))
|
|
1895
1910
|
img = plt.imread(img_file)
|
|
1896
1911
|
plt.imshow(img)
|
|
1897
1912
|
plt.title(view_name.replace('_', ' ').title(), pad=20)
|
|
@@ -1899,6 +1914,13 @@ def visualize_voxcity_with_sim_meshes(voxel_array, meshsize, custom_meshes=None,
|
|
|
1899
1914
|
plt.show()
|
|
1900
1915
|
plt.close()
|
|
1901
1916
|
|
|
1917
|
+
# After creating the meshes and before visualization
|
|
1918
|
+
if save_obj:
|
|
1919
|
+
output_directory = kwargs.get('output_directory', 'output')
|
|
1920
|
+
output_file_name = kwargs.get('output_file_name', 'voxcity_mesh')
|
|
1921
|
+
obj_path, mtl_path = save_obj_from_colored_mesh(meshes, output_directory, output_file_name)
|
|
1922
|
+
print(f"Saved mesh files to:\n {obj_path}\n {mtl_path}")
|
|
1923
|
+
|
|
1902
1924
|
return image_files
|
|
1903
1925
|
|
|
1904
1926
|
def visualize_building_sim_results(voxel_array, meshsize, building_sim_mesh, **kwargs):
|
|
@@ -1952,4 +1974,70 @@ def visualize_building_sim_results(voxel_array, meshsize, building_sim_mesh, **k
|
|
|
1952
1974
|
meshsize,
|
|
1953
1975
|
custom_meshes=custom_meshes,
|
|
1954
1976
|
**kwargs
|
|
1955
|
-
)
|
|
1977
|
+
)
|
|
1978
|
+
|
|
1979
|
+
def save_obj_from_colored_mesh(meshes, output_path, base_filename):
|
|
1980
|
+
"""
|
|
1981
|
+
Save colored meshes as OBJ and MTL files.
|
|
1982
|
+
|
|
1983
|
+
Parameters
|
|
1984
|
+
----------
|
|
1985
|
+
meshes : dict
|
|
1986
|
+
Dictionary of trimesh.Trimesh objects with face colors.
|
|
1987
|
+
output_path : str
|
|
1988
|
+
Directory path where to save the files.
|
|
1989
|
+
base_filename : str
|
|
1990
|
+
Base name for the output files (without extension).
|
|
1991
|
+
|
|
1992
|
+
Returns
|
|
1993
|
+
-------
|
|
1994
|
+
tuple
|
|
1995
|
+
Paths to the saved (obj_file, mtl_file).
|
|
1996
|
+
"""
|
|
1997
|
+
|
|
1998
|
+
os.makedirs(output_path, exist_ok=True)
|
|
1999
|
+
obj_path = os.path.join(output_path, f"{base_filename}.obj")
|
|
2000
|
+
mtl_path = os.path.join(output_path, f"{base_filename}.mtl")
|
|
2001
|
+
|
|
2002
|
+
# Combine all meshes
|
|
2003
|
+
combined_mesh = trimesh.util.concatenate(list(meshes.values()))
|
|
2004
|
+
|
|
2005
|
+
# Create unique materials for each unique face color
|
|
2006
|
+
face_colors = combined_mesh.visual.face_colors
|
|
2007
|
+
unique_colors = np.unique(face_colors, axis=0)
|
|
2008
|
+
|
|
2009
|
+
# Write MTL file
|
|
2010
|
+
with open(mtl_path, 'w') as mtl_file:
|
|
2011
|
+
for i, color in enumerate(unique_colors):
|
|
2012
|
+
material_name = f'material_{i}'
|
|
2013
|
+
mtl_file.write(f'newmtl {material_name}\n')
|
|
2014
|
+
# Convert RGBA to RGB float values
|
|
2015
|
+
rgb = color[:3].astype(float) / 255.0
|
|
2016
|
+
mtl_file.write(f'Kd {rgb[0]:.6f} {rgb[1]:.6f} {rgb[2]:.6f}\n')
|
|
2017
|
+
mtl_file.write(f'd {color[3]/255.0:.6f}\n\n') # Alpha value
|
|
2018
|
+
|
|
2019
|
+
# Create material groups based on face colors
|
|
2020
|
+
color_to_material = {tuple(c): f'material_{i}' for i, c in enumerate(unique_colors)}
|
|
2021
|
+
|
|
2022
|
+
# Write OBJ file
|
|
2023
|
+
with open(obj_path, 'w') as obj_file:
|
|
2024
|
+
obj_file.write(f'mtllib {os.path.basename(mtl_path)}\n')
|
|
2025
|
+
|
|
2026
|
+
# Write vertices
|
|
2027
|
+
for vertex in combined_mesh.vertices:
|
|
2028
|
+
obj_file.write(f'v {vertex[0]:.6f} {vertex[1]:.6f} {vertex[2]:.6f}\n')
|
|
2029
|
+
|
|
2030
|
+
# Write faces grouped by material
|
|
2031
|
+
current_material = None
|
|
2032
|
+
for face_idx, face in enumerate(combined_mesh.faces):
|
|
2033
|
+
face_color = tuple(face_colors[face_idx])
|
|
2034
|
+
material_name = color_to_material[face_color]
|
|
2035
|
+
|
|
2036
|
+
if material_name != current_material:
|
|
2037
|
+
obj_file.write(f'usemtl {material_name}\n')
|
|
2038
|
+
current_material = material_name
|
|
2039
|
+
|
|
2040
|
+
# OBJ indices are 1-based
|
|
2041
|
+
obj_file.write(f'f {face[0]+1} {face[1]+1} {face[2]+1}\n')
|
|
2042
|
+
|
|
2043
|
+
return obj_path, mtl_path
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: voxcity
|
|
3
|
-
Version: 0.5.
|
|
3
|
+
Version: 0.5.9
|
|
4
4
|
Summary: voxcity is an easy and one-stop tool to output 3d city models for microclimate simulation by integrating multiple geospatial open-data
|
|
5
5
|
Author-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
|
|
6
6
|
Maintainer-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
|
|
@@ -45,7 +45,7 @@ Requires-Dist: pycountry
|
|
|
45
45
|
Requires-Dist: osm2geojson
|
|
46
46
|
Requires-Dist: seaborn
|
|
47
47
|
Requires-Dist: overturemaps
|
|
48
|
-
Requires-Dist: protobuf
|
|
48
|
+
Requires-Dist: protobuf<6,>=4.21
|
|
49
49
|
Requires-Dist: timezonefinder
|
|
50
50
|
Requires-Dist: astral
|
|
51
51
|
Requires-Dist: osmnx
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|