voxcity 0.6.15__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- voxcity/__init__.py +14 -8
- voxcity/downloader/__init__.py +2 -1
- voxcity/downloader/citygml.py +32 -18
- voxcity/downloader/gba.py +210 -0
- voxcity/downloader/gee.py +5 -1
- voxcity/downloader/mbfp.py +1 -1
- voxcity/downloader/oemj.py +80 -8
- voxcity/downloader/osm.py +23 -7
- voxcity/downloader/overture.py +26 -1
- voxcity/downloader/utils.py +73 -73
- voxcity/errors.py +30 -0
- voxcity/exporter/__init__.py +13 -4
- voxcity/exporter/cityles.py +633 -535
- voxcity/exporter/envimet.py +728 -708
- voxcity/exporter/magicavoxel.py +334 -297
- voxcity/exporter/netcdf.py +238 -0
- voxcity/exporter/obj.py +1481 -655
- voxcity/generator/__init__.py +44 -0
- voxcity/generator/api.py +675 -0
- voxcity/generator/grids.py +379 -0
- voxcity/generator/io.py +94 -0
- voxcity/generator/pipeline.py +282 -0
- voxcity/generator/voxelizer.py +380 -0
- voxcity/geoprocessor/__init__.py +75 -6
- voxcity/geoprocessor/conversion.py +153 -0
- voxcity/geoprocessor/draw.py +62 -12
- voxcity/geoprocessor/heights.py +199 -0
- voxcity/geoprocessor/io.py +101 -0
- voxcity/geoprocessor/merge_utils.py +91 -0
- voxcity/geoprocessor/mesh.py +806 -790
- voxcity/geoprocessor/network.py +708 -679
- voxcity/geoprocessor/overlap.py +84 -0
- voxcity/geoprocessor/raster/__init__.py +82 -0
- voxcity/geoprocessor/raster/buildings.py +428 -0
- voxcity/geoprocessor/raster/canopy.py +258 -0
- voxcity/geoprocessor/raster/core.py +150 -0
- voxcity/geoprocessor/raster/export.py +93 -0
- voxcity/geoprocessor/raster/landcover.py +156 -0
- voxcity/geoprocessor/raster/raster.py +110 -0
- voxcity/geoprocessor/selection.py +85 -0
- voxcity/geoprocessor/utils.py +18 -14
- voxcity/models.py +113 -0
- voxcity/simulator/common/__init__.py +22 -0
- voxcity/simulator/common/geometry.py +98 -0
- voxcity/simulator/common/raytracing.py +450 -0
- voxcity/simulator/solar/__init__.py +43 -0
- voxcity/simulator/solar/integration.py +336 -0
- voxcity/simulator/solar/kernels.py +62 -0
- voxcity/simulator/solar/radiation.py +648 -0
- voxcity/simulator/solar/temporal.py +434 -0
- voxcity/simulator/view.py +36 -2286
- voxcity/simulator/visibility/__init__.py +29 -0
- voxcity/simulator/visibility/landmark.py +392 -0
- voxcity/simulator/visibility/view.py +508 -0
- voxcity/utils/logging.py +61 -0
- voxcity/utils/orientation.py +51 -0
- voxcity/utils/weather/__init__.py +26 -0
- voxcity/utils/weather/epw.py +146 -0
- voxcity/utils/weather/files.py +36 -0
- voxcity/utils/weather/onebuilding.py +486 -0
- voxcity/visualizer/__init__.py +24 -0
- voxcity/visualizer/builder.py +43 -0
- voxcity/visualizer/grids.py +141 -0
- voxcity/visualizer/maps.py +187 -0
- voxcity/visualizer/palette.py +228 -0
- voxcity/visualizer/renderer.py +928 -0
- {voxcity-0.6.15.dist-info → voxcity-0.7.0.dist-info}/METADATA +113 -36
- voxcity-0.7.0.dist-info/RECORD +77 -0
- {voxcity-0.6.15.dist-info → voxcity-0.7.0.dist-info}/WHEEL +1 -1
- voxcity/generator.py +0 -1137
- voxcity/geoprocessor/grid.py +0 -1568
- voxcity/geoprocessor/polygon.py +0 -1344
- voxcity/simulator/solar.py +0 -2329
- voxcity/utils/visualization.py +0 -2660
- voxcity/utils/weather.py +0 -817
- voxcity-0.6.15.dist-info/RECORD +0 -37
- {voxcity-0.6.15.dist-info → voxcity-0.7.0.dist-info/licenses}/AUTHORS.rst +0 -0
- {voxcity-0.6.15.dist-info → voxcity-0.7.0.dist-info/licenses}/LICENSE +0 -0
voxcity/__init__.py
CHANGED
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
__author__ = """Kunihiko Fujiwara"""
|
|
2
|
-
__email__ = 'kunihiko@nus.edu.sg'
|
|
3
|
-
__version__ = '0.1.0'
|
|
4
|
-
|
|
5
|
-
#
|
|
6
|
-
#
|
|
7
|
-
#
|
|
8
|
-
#
|
|
1
|
+
__author__ = """Kunihiko Fujiwara"""
|
|
2
|
+
__email__ = 'kunihiko@nus.edu.sg'
|
|
3
|
+
__version__ = '0.1.0'
|
|
4
|
+
|
|
5
|
+
# Keep package __init__ lightweight to avoid import-time failures.
|
|
6
|
+
# Re-exports of heavy modules/classes are intentionally omitted here.
|
|
7
|
+
# Downstream modules should import directly from their subpackages, e.g.:
|
|
8
|
+
# from voxcity.geoprocessor.draw import draw_rectangle_map_cityname
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"__author__",
|
|
12
|
+
"__email__",
|
|
13
|
+
"__version__",
|
|
14
|
+
]
|
voxcity/downloader/__init__.py
CHANGED
voxcity/downloader/citygml.py
CHANGED
|
@@ -156,13 +156,16 @@ def get_tile_polygon_from_filename(filename):
|
|
|
156
156
|
# Original script logic
|
|
157
157
|
# --------------------------------------------------------------------
|
|
158
158
|
|
|
159
|
-
def download_and_extract_zip(url, extract_to='.'):
|
|
159
|
+
def download_and_extract_zip(url, extract_to='.', ssl_verify=True, ca_bundle=None, timeout=60):
|
|
160
160
|
"""
|
|
161
161
|
Download and extract a zip file from a URL to specified directory.
|
|
162
162
|
|
|
163
163
|
Args:
|
|
164
164
|
url (str): URL of the zip file to download.
|
|
165
165
|
extract_to (str): Directory to extract files to (default: current directory).
|
|
166
|
+
ssl_verify (bool): Whether to verify SSL certificates (default: True).
|
|
167
|
+
ca_bundle (str|None): Path to a CA bundle file. Overrides verify when provided.
|
|
168
|
+
timeout (int|float): Request timeout in seconds (default: 60).
|
|
166
169
|
|
|
167
170
|
Returns:
|
|
168
171
|
tuple: (extraction_path, folder_name) where files were extracted.
|
|
@@ -171,21 +174,27 @@ def download_and_extract_zip(url, extract_to='.'):
|
|
|
171
174
|
- Creates a subdirectory named after the zip file (without .zip)
|
|
172
175
|
- Prints status messages for success/failure
|
|
173
176
|
"""
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
177
|
+
verify_arg = ca_bundle if ca_bundle else ssl_verify
|
|
178
|
+
try:
|
|
179
|
+
response = requests.get(url, verify=verify_arg, timeout=timeout)
|
|
180
|
+
if response.status_code == 200:
|
|
181
|
+
parsed_url = urlparse(url)
|
|
182
|
+
zip_filename = os.path.basename(parsed_url.path)
|
|
183
|
+
folder_name = os.path.splitext(zip_filename)[0] # Remove the .zip extension
|
|
184
|
+
|
|
185
|
+
extraction_path = os.path.join(extract_to, folder_name)
|
|
186
|
+
os.makedirs(extraction_path, exist_ok=True)
|
|
187
|
+
|
|
188
|
+
zip_file = io.BytesIO(response.content)
|
|
189
|
+
with zipfile.ZipFile(zip_file) as z:
|
|
190
|
+
z.extractall(extraction_path)
|
|
191
|
+
print(f"Extracted to {extraction_path}")
|
|
192
|
+
else:
|
|
193
|
+
print(f"Failed to download the file. Status code: {response.status_code}")
|
|
194
|
+
except requests.exceptions.SSLError as e:
|
|
195
|
+
print("SSL error when downloading CityGML zip. You can pass 'ssl_verify=False' to skip verification, "
|
|
196
|
+
"or provide a CA bundle path via 'ca_bundle'. Error:", e)
|
|
197
|
+
raise
|
|
189
198
|
|
|
190
199
|
return extraction_path, folder_name
|
|
191
200
|
|
|
@@ -848,7 +857,10 @@ def swap_coordinates_if_needed(gdf, geometry_col='geometry'):
|
|
|
848
857
|
def load_buid_dem_veg_from_citygml(url=None,
|
|
849
858
|
base_dir='.',
|
|
850
859
|
citygml_path=None,
|
|
851
|
-
rectangle_vertices=None
|
|
860
|
+
rectangle_vertices=None,
|
|
861
|
+
ssl_verify=True,
|
|
862
|
+
ca_bundle=None,
|
|
863
|
+
timeout=60):
|
|
852
864
|
"""
|
|
853
865
|
Load and process PLATEAU data from URL or local files.
|
|
854
866
|
|
|
@@ -879,7 +891,9 @@ def load_buid_dem_veg_from_citygml(url=None,
|
|
|
879
891
|
rectangle_polygon = Polygon(rectangle_vertices)
|
|
880
892
|
|
|
881
893
|
if url:
|
|
882
|
-
citygml_path, foldername = download_and_extract_zip(
|
|
894
|
+
citygml_path, foldername = download_and_extract_zip(
|
|
895
|
+
url, extract_to=base_dir, ssl_verify=ssl_verify, ca_bundle=ca_bundle, timeout=timeout
|
|
896
|
+
)
|
|
883
897
|
elif citygml_path:
|
|
884
898
|
foldername = os.path.basename(citygml_path)
|
|
885
899
|
else:
|
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Downloader for Global Building Atlas (GBA) LOD1 polygons.
|
|
3
|
+
|
|
4
|
+
This module downloads GeoParquet tiles from the Global Building Atlas (GBA)
|
|
5
|
+
hosted at data.source.coop, selects tiles intersecting a user-specified
|
|
6
|
+
rectangle, loads them into a GeoDataFrame, and filters features to the
|
|
7
|
+
rectangle extent.
|
|
8
|
+
|
|
9
|
+
Tile scheme:
|
|
10
|
+
- Global 5x5-degree tiles named like: e010_n50_e015_n45.parquet
|
|
11
|
+
- longitudes: e/w with 3-digit zero padding (e.g., e010, w060)
|
|
12
|
+
- latitudes: n/s with 2-digit zero padding (e.g., n50, s25)
|
|
13
|
+
- filename order: west_lon, north_lat, east_lon, south_lat
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
gdf = load_gdf_from_gba(rectangle_vertices=[(lon1, lat1), (lon2, lat2), ...])
|
|
17
|
+
|
|
18
|
+
Notes:
|
|
19
|
+
- Output CRS is EPSG:4326.
|
|
20
|
+
- Requires pyarrow or fastparquet for parquet reading via GeoPandas.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import math
|
|
26
|
+
import os
|
|
27
|
+
import tempfile
|
|
28
|
+
from typing import Iterable, List, Optional, Sequence, Tuple
|
|
29
|
+
|
|
30
|
+
import geopandas as gpd
|
|
31
|
+
import pandas as pd
|
|
32
|
+
import requests
|
|
33
|
+
from shapely.geometry import Polygon
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _bbox_from_rectangle_vertices(vertices: Sequence[Tuple[float, float]]) -> Tuple[float, float, float, float]:
|
|
37
|
+
"""
|
|
38
|
+
Convert rectangle vertices in (lon, lat) into bbox as (min_lon, min_lat, max_lon, max_lat).
|
|
39
|
+
"""
|
|
40
|
+
if not vertices:
|
|
41
|
+
raise ValueError("rectangle_vertices must be a non-empty sequence of (lon, lat)")
|
|
42
|
+
lons = [v[0] for v in vertices]
|
|
43
|
+
lats = [v[1] for v in vertices]
|
|
44
|
+
return (min(lons), min(lats), max(lons), max(lats))
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _pad_lon(deg: int) -> str:
|
|
48
|
+
return f"{abs(deg):03d}"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _pad_lat(deg: int) -> str:
|
|
52
|
+
return f"{abs(deg):02d}"
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _lon_tag(deg: int) -> str:
|
|
56
|
+
return ("e" if deg >= 0 else "w") + _pad_lon(deg)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _lat_tag(deg: int) -> str:
|
|
60
|
+
return ("n" if deg >= 0 else "s") + _pad_lat(deg)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _snap_down(value: float, step: int) -> int:
|
|
64
|
+
return int(math.floor(value / step) * step)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _snap_up(value: float, step: int) -> int:
|
|
68
|
+
return int(math.ceil(value / step) * step)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _generate_tile_bounds_for_bbox(
|
|
72
|
+
min_lon: float, min_lat: float, max_lon: float, max_lat: float, tile_size_deg: int = 5
|
|
73
|
+
) -> Iterable[Tuple[int, int, int, int]]:
|
|
74
|
+
"""
|
|
75
|
+
Generate 5-degree tile bounds (west, south, east, north) covering bbox.
|
|
76
|
+
All values are integer degrees aligned to tile_size_deg.
|
|
77
|
+
"""
|
|
78
|
+
west = _snap_down(min_lon, tile_size_deg)
|
|
79
|
+
east = _snap_up(max_lon, tile_size_deg)
|
|
80
|
+
south = _snap_down(min_lat, tile_size_deg)
|
|
81
|
+
north = _snap_up(max_lat, tile_size_deg)
|
|
82
|
+
|
|
83
|
+
for lon in range(west, east, tile_size_deg):
|
|
84
|
+
for lat in range(south, north, tile_size_deg):
|
|
85
|
+
yield (lon, lat, lon + tile_size_deg, lat + tile_size_deg)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _tile_filename(west: int, south: int, east: int, north: int) -> str:
|
|
89
|
+
"""
|
|
90
|
+
Construct GBA tile filename for given integer-degree bounds.
|
|
91
|
+
Naming convention examples:
|
|
92
|
+
e010_n50_e015_n45.parquet
|
|
93
|
+
e140_s25_e145_s30.parquet
|
|
94
|
+
w060_s30_w055_s35.parquet
|
|
95
|
+
"""
|
|
96
|
+
return f"{_lon_tag(west)}_{_lat_tag(north)}_{_lon_tag(east)}_{_lat_tag(south)}.parquet"
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _tile_url(base_url: str, west: int, south: int, east: int, north: int) -> str:
|
|
100
|
+
filename = _tile_filename(west, south, east, north)
|
|
101
|
+
return f"{base_url.rstrip('/')}/{filename}"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _download_parquet(url: str, download_dir: str, timeout: int = 60) -> Optional[str]:
|
|
105
|
+
"""
|
|
106
|
+
Download a parquet file to download_dir. Returns local filepath or None if not found.
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
with requests.get(url, stream=True, timeout=timeout) as r:
|
|
110
|
+
if r.status_code != 200:
|
|
111
|
+
return None
|
|
112
|
+
filename = os.path.basename(url)
|
|
113
|
+
local_path = os.path.join(download_dir, filename)
|
|
114
|
+
with open(local_path, "wb") as f:
|
|
115
|
+
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
|
116
|
+
if chunk:
|
|
117
|
+
f.write(chunk)
|
|
118
|
+
return local_path
|
|
119
|
+
except requests.RequestException:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _filter_to_rectangle(gdf: gpd.GeoDataFrame, rectangle: Polygon, clip: bool) -> gpd.GeoDataFrame:
|
|
124
|
+
gdf = gdf[gdf.geometry.notnull()].copy()
|
|
125
|
+
# Ensure CRS is WGS84
|
|
126
|
+
if gdf.crs is None:
|
|
127
|
+
gdf.set_crs(epsg=4326, inplace=True)
|
|
128
|
+
elif gdf.crs.to_epsg() != 4326:
|
|
129
|
+
gdf = gdf.to_crs(epsg=4326)
|
|
130
|
+
|
|
131
|
+
intersects = gdf.intersects(rectangle)
|
|
132
|
+
gdf = gdf[intersects].copy()
|
|
133
|
+
if clip and not gdf.empty:
|
|
134
|
+
# GeoPandas clip performs overlay to trim geometries to rectangle
|
|
135
|
+
gdf = gpd.clip(gdf, gpd.GeoSeries([rectangle], crs="EPSG:4326").to_frame("geometry"))
|
|
136
|
+
return gdf
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def load_gdf_from_gba(
|
|
140
|
+
rectangle_vertices: Sequence[Tuple[float, float]],
|
|
141
|
+
base_url: str = "https://data.source.coop/tge-labs/globalbuildingatlas-lod1",
|
|
142
|
+
download_dir: Optional[str] = None,
|
|
143
|
+
clip_to_rectangle: bool = False,
|
|
144
|
+
) -> Optional[gpd.GeoDataFrame]:
|
|
145
|
+
"""
|
|
146
|
+
Download GBA tiles intersecting a rectangle and return combined GeoDataFrame.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
rectangle_vertices: Sequence of (lon, lat) defining the area of interest.
|
|
150
|
+
base_url: Base URL hosting GBA parquet tiles.
|
|
151
|
+
download_dir: Optional directory to store downloaded tiles. If None, a
|
|
152
|
+
temporary directory is used and cleaned up by the OS later.
|
|
153
|
+
clip_to_rectangle: If True, geometries are clipped to rectangle extent.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
GeoDataFrame with EPSG:4326 geometry and an 'id' column, or None if no data.
|
|
157
|
+
"""
|
|
158
|
+
min_lon, min_lat, max_lon, max_lat = _bbox_from_rectangle_vertices(rectangle_vertices)
|
|
159
|
+
rectangle = Polygon([
|
|
160
|
+
(min_lon, min_lat),
|
|
161
|
+
(max_lon, min_lat),
|
|
162
|
+
(max_lon, max_lat),
|
|
163
|
+
(min_lon, max_lat),
|
|
164
|
+
(min_lon, min_lat),
|
|
165
|
+
])
|
|
166
|
+
|
|
167
|
+
tmp_dir_created = False
|
|
168
|
+
if download_dir is None:
|
|
169
|
+
download_dir = tempfile.mkdtemp(prefix="gba_tiles_")
|
|
170
|
+
tmp_dir_created = True
|
|
171
|
+
else:
|
|
172
|
+
os.makedirs(download_dir, exist_ok=True)
|
|
173
|
+
|
|
174
|
+
local_files: List[str] = []
|
|
175
|
+
for west, south, east, north in _generate_tile_bounds_for_bbox(min_lon, min_lat, max_lon, max_lat):
|
|
176
|
+
url = _tile_url(base_url, west, south, east, north)
|
|
177
|
+
local = _download_parquet(url, download_dir)
|
|
178
|
+
if local is not None:
|
|
179
|
+
local_files.append(local)
|
|
180
|
+
|
|
181
|
+
if not local_files:
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
gdfs: List[gpd.GeoDataFrame] = []
|
|
185
|
+
for path in local_files:
|
|
186
|
+
try:
|
|
187
|
+
# GeoParquet read
|
|
188
|
+
gdf = gpd.read_parquet(path)
|
|
189
|
+
if gdf is not None and not gdf.empty:
|
|
190
|
+
gdfs.append(gdf)
|
|
191
|
+
except Exception:
|
|
192
|
+
# Skip unreadable tiles
|
|
193
|
+
continue
|
|
194
|
+
|
|
195
|
+
if not gdfs:
|
|
196
|
+
return None
|
|
197
|
+
|
|
198
|
+
combined = pd.concat(gdfs, ignore_index=True)
|
|
199
|
+
combined = gpd.GeoDataFrame(combined, geometry="geometry")
|
|
200
|
+
combined = _filter_to_rectangle(combined, rectangle, clip=clip_to_rectangle)
|
|
201
|
+
|
|
202
|
+
if combined.empty:
|
|
203
|
+
return None
|
|
204
|
+
|
|
205
|
+
# Ensure sequential ids
|
|
206
|
+
combined["id"] = combined.index.astype(int)
|
|
207
|
+
combined.set_crs(epsg=4326, inplace=True, allow_override=True)
|
|
208
|
+
return combined
|
|
209
|
+
|
|
210
|
+
|
voxcity/downloader/gee.py
CHANGED
|
@@ -217,7 +217,11 @@ def get_dem_image(roi_buffered, source):
|
|
|
217
217
|
# dem = collection.mosaic()
|
|
218
218
|
|
|
219
219
|
# elif source == 'FABDEM':
|
|
220
|
-
|
|
220
|
+
# If we reach here without assigning `dem`, the source is unsupported
|
|
221
|
+
try:
|
|
222
|
+
return dem.clip(roi_buffered)
|
|
223
|
+
except UnboundLocalError:
|
|
224
|
+
raise ValueError(f"Unsupported or unimplemented DEM source: {source}")
|
|
221
225
|
|
|
222
226
|
def save_geotiff_esa_land_cover(roi, geotiff_path):
|
|
223
227
|
"""Save ESA WorldCover land cover data as a colored GeoTIFF.
|
voxcity/downloader/mbfp.py
CHANGED
|
@@ -20,7 +20,7 @@ import pandas as pd
|
|
|
20
20
|
import os
|
|
21
21
|
from .utils import download_file
|
|
22
22
|
from ..geoprocessor.utils import tile_from_lat_lon, quadkey_to_tile
|
|
23
|
-
from ..geoprocessor.
|
|
23
|
+
from ..geoprocessor.io import load_gdf_from_multiple_gz, swap_coordinates
|
|
24
24
|
|
|
25
25
|
def get_geojson_links(output_dir):
|
|
26
26
|
"""Download and load the dataset links CSV file containing building footprint URLs.
|
voxcity/downloader/oemj.py
CHANGED
|
@@ -19,6 +19,7 @@ Example Usage:
|
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
21
|
import requests
|
|
22
|
+
import os
|
|
22
23
|
from PIL import Image, ImageDraw
|
|
23
24
|
from io import BytesIO
|
|
24
25
|
import math
|
|
@@ -73,7 +74,7 @@ def num2deg(xtile, ytile, zoom):
|
|
|
73
74
|
lat_deg = math.degrees(lat_rad)
|
|
74
75
|
return (lon_deg, lat_deg)
|
|
75
76
|
|
|
76
|
-
def download_tiles(polygon, zoom):
|
|
77
|
+
def download_tiles(polygon, zoom, *, ssl_verify=True, allow_insecure_ssl=False, allow_http_fallback=False, timeout_s=30):
|
|
77
78
|
"""Download satellite imagery tiles covering a polygon region.
|
|
78
79
|
|
|
79
80
|
Downloads all tiles that intersect with the given polygon at the specified zoom level
|
|
@@ -112,11 +113,70 @@ def download_tiles(polygon, zoom):
|
|
|
112
113
|
for x in range(min(min_x, max_x), max(min_x, max_x) + 1):
|
|
113
114
|
for y in range(min(min_y, max_y), max(min_y, max_y) + 1):
|
|
114
115
|
url = f"https://www.open-earth-map.org/demo/Japan/{zoom}/{x}/{y}.png"
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
116
|
+
# Try secure HTTPS first with user-provided verification option
|
|
117
|
+
content = None
|
|
118
|
+
try:
|
|
119
|
+
resp = requests.get(url, timeout=timeout_s, verify=ssl_verify)
|
|
120
|
+
if resp.status_code == 200:
|
|
121
|
+
content = resp.content
|
|
122
|
+
else:
|
|
123
|
+
print(f"Failed to download tile (status {resp.status_code}): {url}")
|
|
124
|
+
except requests.exceptions.SSLError:
|
|
125
|
+
# Optionally retry with certificate verification disabled
|
|
126
|
+
if allow_insecure_ssl:
|
|
127
|
+
try:
|
|
128
|
+
resp = requests.get(url, timeout=timeout_s, verify=False)
|
|
129
|
+
if resp.status_code == 200:
|
|
130
|
+
content = resp.content
|
|
131
|
+
else:
|
|
132
|
+
print(f"Failed to download tile (status {resp.status_code}) with insecure SSL: {url}")
|
|
133
|
+
except requests.exceptions.RequestException as e:
|
|
134
|
+
# Optionally try HTTP fallback
|
|
135
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
136
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
137
|
+
try:
|
|
138
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
139
|
+
if resp.status_code == 200:
|
|
140
|
+
content = resp.content
|
|
141
|
+
else:
|
|
142
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
143
|
+
except requests.exceptions.RequestException as e2:
|
|
144
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e2})")
|
|
145
|
+
else:
|
|
146
|
+
print(f"SSL error downloading tile: {url} ({e})")
|
|
147
|
+
else:
|
|
148
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
149
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
150
|
+
try:
|
|
151
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
152
|
+
if resp.status_code == 200:
|
|
153
|
+
content = resp.content
|
|
154
|
+
else:
|
|
155
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
156
|
+
except requests.exceptions.RequestException as e:
|
|
157
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e})")
|
|
158
|
+
else:
|
|
159
|
+
print(f"SSL error downloading tile: {url}")
|
|
160
|
+
except requests.exceptions.RequestException as e:
|
|
161
|
+
# Network error (timeout/connection). Try HTTP if allowed.
|
|
162
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
163
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
164
|
+
try:
|
|
165
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
166
|
+
if resp.status_code == 200:
|
|
167
|
+
content = resp.content
|
|
168
|
+
else:
|
|
169
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
170
|
+
except requests.exceptions.RequestException as e2:
|
|
171
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e2})")
|
|
172
|
+
else:
|
|
173
|
+
print(f"Error downloading tile: {url} ({e})")
|
|
174
|
+
|
|
175
|
+
if content is not None:
|
|
176
|
+
try:
|
|
177
|
+
tiles[(x, y)] = Image.open(BytesIO(content))
|
|
178
|
+
except Exception as e:
|
|
179
|
+
print(f"Error decoding tile image for {url}: {e}")
|
|
120
180
|
|
|
121
181
|
return tiles, (min(min_x, max_x), min(min_y, max_y), max(min_x, max_x), max(min_y, max_y))
|
|
122
182
|
|
|
@@ -231,6 +291,11 @@ def save_as_geotiff(image, polygon, zoom, bbox, bounds, output_path):
|
|
|
231
291
|
pixel_size_x = (lower_right_x - upper_left_x) / image.width
|
|
232
292
|
pixel_size_y = (upper_left_y - lower_right_y) / image.height
|
|
233
293
|
|
|
294
|
+
# Ensure output directory exists
|
|
295
|
+
out_dir = os.path.dirname(output_path)
|
|
296
|
+
if out_dir:
|
|
297
|
+
os.makedirs(out_dir, exist_ok=True)
|
|
298
|
+
|
|
234
299
|
# Create GeoTIFF
|
|
235
300
|
driver = gdal.GetDriverByName('GTiff')
|
|
236
301
|
dataset = driver.Create(output_path, image.width, image.height, 3, gdal.GDT_Byte)
|
|
@@ -249,7 +314,7 @@ def save_as_geotiff(image, polygon, zoom, bbox, bounds, output_path):
|
|
|
249
314
|
|
|
250
315
|
dataset = None
|
|
251
316
|
|
|
252
|
-
def save_oemj_as_geotiff(polygon, filepath, zoom=16):
|
|
317
|
+
def save_oemj_as_geotiff(polygon, filepath, zoom=16, *, ssl_verify=True, allow_insecure_ssl=False, allow_http_fallback=False, timeout_s=30):
|
|
253
318
|
"""Download and save OpenEarthMap Japan imagery as a georeferenced GeoTIFF file.
|
|
254
319
|
|
|
255
320
|
This is the main function that orchestrates the entire process of downloading,
|
|
@@ -281,7 +346,14 @@ def save_oemj_as_geotiff(polygon, filepath, zoom=16):
|
|
|
281
346
|
- The output GeoTIFF will be in Web Mercator projection (EPSG:3857)
|
|
282
347
|
"""
|
|
283
348
|
try:
|
|
284
|
-
tiles, bounds = download_tiles(
|
|
349
|
+
tiles, bounds = download_tiles(
|
|
350
|
+
polygon,
|
|
351
|
+
zoom,
|
|
352
|
+
ssl_verify=ssl_verify,
|
|
353
|
+
allow_insecure_ssl=allow_insecure_ssl,
|
|
354
|
+
allow_http_fallback=allow_http_fallback,
|
|
355
|
+
timeout_s=timeout_s,
|
|
356
|
+
)
|
|
285
357
|
if not tiles:
|
|
286
358
|
raise ValueError("No tiles were downloaded. Please check the polygon coordinates and zoom level.")
|
|
287
359
|
|
voxcity/downloader/osm.py
CHANGED
|
@@ -370,7 +370,7 @@ def create_rings_from_ways(way_ids, ways, nodes):
|
|
|
370
370
|
|
|
371
371
|
return rings
|
|
372
372
|
|
|
373
|
-
def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
373
|
+
def load_gdf_from_openstreetmap(rectangle_vertices, floor_height=3.0):
|
|
374
374
|
"""Download and process building footprint data from OpenStreetMap.
|
|
375
375
|
|
|
376
376
|
This function:
|
|
@@ -471,7 +471,7 @@ def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
|
471
471
|
"""
|
|
472
472
|
return [coord for coord in geometry] # Keep original order since already (lon, lat)
|
|
473
473
|
|
|
474
|
-
def get_height_from_properties(properties):
|
|
474
|
+
def get_height_from_properties(properties, floor_height=3.0):
|
|
475
475
|
"""Helper function to extract height from properties.
|
|
476
476
|
|
|
477
477
|
Args:
|
|
@@ -487,9 +487,25 @@ def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
|
487
487
|
except ValueError:
|
|
488
488
|
pass
|
|
489
489
|
|
|
490
|
+
# Infer from floors when available
|
|
491
|
+
floors_candidates = [
|
|
492
|
+
properties.get('building:levels'),
|
|
493
|
+
properties.get('levels'),
|
|
494
|
+
properties.get('num_floors')
|
|
495
|
+
]
|
|
496
|
+
for floors in floors_candidates:
|
|
497
|
+
if floors is None:
|
|
498
|
+
continue
|
|
499
|
+
try:
|
|
500
|
+
floors_val = float(floors)
|
|
501
|
+
if floors_val > 0:
|
|
502
|
+
return float(floor_height) * floors_val
|
|
503
|
+
except ValueError:
|
|
504
|
+
continue
|
|
505
|
+
|
|
490
506
|
return 0 # Default height if no valid height found
|
|
491
507
|
|
|
492
|
-
def extract_properties(element):
|
|
508
|
+
def extract_properties(element, floor_height=3.0):
|
|
493
509
|
"""Helper function to extract and process properties from an element.
|
|
494
510
|
|
|
495
511
|
Args:
|
|
@@ -501,7 +517,7 @@ def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
|
501
517
|
properties = element.get('tags', {})
|
|
502
518
|
|
|
503
519
|
# Get height (now using the helper function)
|
|
504
|
-
height = get_height_from_properties(properties)
|
|
520
|
+
height = get_height_from_properties(properties, floor_height=floor_height)
|
|
505
521
|
|
|
506
522
|
# Get min_height and min_level
|
|
507
523
|
min_height = properties.get('min_height', '0')
|
|
@@ -526,7 +542,7 @@ def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
|
526
542
|
"is_inner": False,
|
|
527
543
|
"levels": levels,
|
|
528
544
|
"height_source": "explicit" if properties.get('height') or properties.get('building:height')
|
|
529
|
-
else "levels" if levels is not None
|
|
545
|
+
else "levels" if (levels is not None) or (properties.get('num_floors') is not None)
|
|
530
546
|
else "default",
|
|
531
547
|
"min_level": min_level if min_level != '0' else None,
|
|
532
548
|
"building": properties.get('building', 'no'),
|
|
@@ -584,13 +600,13 @@ def load_gdf_from_openstreetmap(rectangle_vertices):
|
|
|
584
600
|
if element['type'] == 'way':
|
|
585
601
|
if 'geometry' in element:
|
|
586
602
|
coords = [(node['lon'], node['lat']) for node in element['geometry']]
|
|
587
|
-
properties = extract_properties(element)
|
|
603
|
+
properties = extract_properties(element, floor_height=floor_height)
|
|
588
604
|
feature = create_polygon_feature(coords, properties)
|
|
589
605
|
if feature:
|
|
590
606
|
features.append(feature)
|
|
591
607
|
|
|
592
608
|
elif element['type'] == 'relation':
|
|
593
|
-
properties = extract_properties(element)
|
|
609
|
+
properties = extract_properties(element, floor_height=floor_height)
|
|
594
610
|
|
|
595
611
|
# Process each member of the relation
|
|
596
612
|
for member in element['members']:
|
voxcity/downloader/overture.py
CHANGED
|
@@ -254,7 +254,7 @@ def join_gdfs_vertically(gdf1, gdf2):
|
|
|
254
254
|
|
|
255
255
|
return combined_gdf
|
|
256
256
|
|
|
257
|
-
def load_gdf_from_overture(rectangle_vertices):
|
|
257
|
+
def load_gdf_from_overture(rectangle_vertices, floor_height=3.0):
|
|
258
258
|
"""
|
|
259
259
|
Download and process building footprint data from Overture Maps.
|
|
260
260
|
|
|
@@ -287,6 +287,31 @@ def load_gdf_from_overture(rectangle_vertices):
|
|
|
287
287
|
# Combine both datasets into a single comprehensive building dataset
|
|
288
288
|
joined_building_gdf = join_gdfs_vertically(building_gdf, building_part_gdf)
|
|
289
289
|
|
|
290
|
+
# Ensure numeric height and infer from floors when missing
|
|
291
|
+
try:
|
|
292
|
+
joined_building_gdf['height'] = pd.to_numeric(joined_building_gdf.get('height', None), errors='coerce')
|
|
293
|
+
except Exception:
|
|
294
|
+
# Create height column if missing
|
|
295
|
+
joined_building_gdf['height'] = None
|
|
296
|
+
joined_building_gdf['height'] = pd.to_numeric(joined_building_gdf['height'], errors='coerce')
|
|
297
|
+
|
|
298
|
+
# Combine possible floors columns (first non-null among candidates)
|
|
299
|
+
floors_candidates = []
|
|
300
|
+
for col in ['building:levels', 'levels', 'num_floors', 'floors']:
|
|
301
|
+
if col in joined_building_gdf.columns:
|
|
302
|
+
floors_candidates.append(pd.to_numeric(joined_building_gdf[col], errors='coerce'))
|
|
303
|
+
if floors_candidates:
|
|
304
|
+
floors_series = floors_candidates[0]
|
|
305
|
+
for s in floors_candidates[1:]:
|
|
306
|
+
floors_series = floors_series.combine_first(s)
|
|
307
|
+
# Infer height where height is NaN/<=0 and floors > 0
|
|
308
|
+
mask_missing_height = (~joined_building_gdf['height'].notna()) | (joined_building_gdf['height'] <= 0)
|
|
309
|
+
if isinstance(floor_height, (int, float)):
|
|
310
|
+
inferred = floors_series * float(floor_height)
|
|
311
|
+
else:
|
|
312
|
+
inferred = floors_series * 3.0
|
|
313
|
+
joined_building_gdf.loc[mask_missing_height & (floors_series > 0), 'height'] = inferred
|
|
314
|
+
|
|
290
315
|
# Assign sequential IDs based on the final dataset index
|
|
291
316
|
joined_building_gdf['id'] = joined_building_gdf.index
|
|
292
317
|
|