voxcity 0.6.26__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- voxcity/__init__.py +14 -8
- voxcity/downloader/__init__.py +2 -1
- voxcity/downloader/gba.py +210 -0
- voxcity/downloader/gee.py +5 -1
- voxcity/downloader/mbfp.py +1 -1
- voxcity/downloader/oemj.py +80 -8
- voxcity/downloader/utils.py +73 -73
- voxcity/errors.py +30 -0
- voxcity/exporter/__init__.py +13 -5
- voxcity/exporter/cityles.py +633 -538
- voxcity/exporter/envimet.py +728 -708
- voxcity/exporter/magicavoxel.py +334 -297
- voxcity/exporter/netcdf.py +238 -211
- voxcity/exporter/obj.py +1481 -1406
- voxcity/generator/__init__.py +44 -0
- voxcity/generator/api.py +675 -0
- voxcity/generator/grids.py +379 -0
- voxcity/generator/io.py +94 -0
- voxcity/generator/pipeline.py +282 -0
- voxcity/generator/voxelizer.py +380 -0
- voxcity/geoprocessor/__init__.py +75 -6
- voxcity/geoprocessor/conversion.py +153 -0
- voxcity/geoprocessor/draw.py +62 -12
- voxcity/geoprocessor/heights.py +199 -0
- voxcity/geoprocessor/io.py +101 -0
- voxcity/geoprocessor/merge_utils.py +91 -0
- voxcity/geoprocessor/mesh.py +806 -790
- voxcity/geoprocessor/network.py +708 -679
- voxcity/geoprocessor/overlap.py +84 -0
- voxcity/geoprocessor/raster/__init__.py +82 -0
- voxcity/geoprocessor/raster/buildings.py +428 -0
- voxcity/geoprocessor/raster/canopy.py +258 -0
- voxcity/geoprocessor/raster/core.py +150 -0
- voxcity/geoprocessor/raster/export.py +93 -0
- voxcity/geoprocessor/raster/landcover.py +156 -0
- voxcity/geoprocessor/raster/raster.py +110 -0
- voxcity/geoprocessor/selection.py +85 -0
- voxcity/geoprocessor/utils.py +18 -14
- voxcity/models.py +113 -0
- voxcity/simulator/common/__init__.py +22 -0
- voxcity/simulator/common/geometry.py +98 -0
- voxcity/simulator/common/raytracing.py +450 -0
- voxcity/simulator/solar/__init__.py +43 -0
- voxcity/simulator/solar/integration.py +336 -0
- voxcity/simulator/solar/kernels.py +62 -0
- voxcity/simulator/solar/radiation.py +648 -0
- voxcity/simulator/solar/temporal.py +434 -0
- voxcity/simulator/view.py +36 -2286
- voxcity/simulator/visibility/__init__.py +29 -0
- voxcity/simulator/visibility/landmark.py +392 -0
- voxcity/simulator/visibility/view.py +508 -0
- voxcity/utils/logging.py +61 -0
- voxcity/utils/orientation.py +51 -0
- voxcity/utils/weather/__init__.py +26 -0
- voxcity/utils/weather/epw.py +146 -0
- voxcity/utils/weather/files.py +36 -0
- voxcity/utils/weather/onebuilding.py +486 -0
- voxcity/visualizer/__init__.py +24 -0
- voxcity/visualizer/builder.py +43 -0
- voxcity/visualizer/grids.py +141 -0
- voxcity/visualizer/maps.py +187 -0
- voxcity/visualizer/palette.py +228 -0
- voxcity/visualizer/renderer.py +928 -0
- {voxcity-0.6.26.dist-info → voxcity-0.7.0.dist-info}/METADATA +107 -34
- voxcity-0.7.0.dist-info/RECORD +77 -0
- voxcity/generator.py +0 -1302
- voxcity/geoprocessor/grid.py +0 -1739
- voxcity/geoprocessor/polygon.py +0 -1344
- voxcity/simulator/solar.py +0 -2339
- voxcity/utils/visualization.py +0 -2849
- voxcity/utils/weather.py +0 -1038
- voxcity-0.6.26.dist-info/RECORD +0 -38
- {voxcity-0.6.26.dist-info → voxcity-0.7.0.dist-info}/WHEEL +0 -0
- {voxcity-0.6.26.dist-info → voxcity-0.7.0.dist-info}/licenses/AUTHORS.rst +0 -0
- {voxcity-0.6.26.dist-info → voxcity-0.7.0.dist-info}/licenses/LICENSE +0 -0
voxcity/__init__.py
CHANGED
|
@@ -1,8 +1,14 @@
|
|
|
1
|
-
__author__ = """Kunihiko Fujiwara"""
|
|
2
|
-
__email__ = 'kunihiko@nus.edu.sg'
|
|
3
|
-
__version__ = '0.1.0'
|
|
4
|
-
|
|
5
|
-
#
|
|
6
|
-
#
|
|
7
|
-
#
|
|
8
|
-
#
|
|
1
|
+
__author__ = """Kunihiko Fujiwara"""
|
|
2
|
+
__email__ = 'kunihiko@nus.edu.sg'
|
|
3
|
+
__version__ = '0.1.0'
|
|
4
|
+
|
|
5
|
+
# Keep package __init__ lightweight to avoid import-time failures.
|
|
6
|
+
# Re-exports of heavy modules/classes are intentionally omitted here.
|
|
7
|
+
# Downstream modules should import directly from their subpackages, e.g.:
|
|
8
|
+
# from voxcity.geoprocessor.draw import draw_rectangle_map_cityname
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"__author__",
|
|
12
|
+
"__email__",
|
|
13
|
+
"__version__",
|
|
14
|
+
]
|
voxcity/downloader/__init__.py
CHANGED
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Downloader for Global Building Atlas (GBA) LOD1 polygons.
|
|
3
|
+
|
|
4
|
+
This module downloads GeoParquet tiles from the Global Building Atlas (GBA)
|
|
5
|
+
hosted at data.source.coop, selects tiles intersecting a user-specified
|
|
6
|
+
rectangle, loads them into a GeoDataFrame, and filters features to the
|
|
7
|
+
rectangle extent.
|
|
8
|
+
|
|
9
|
+
Tile scheme:
|
|
10
|
+
- Global 5x5-degree tiles named like: e010_n50_e015_n45.parquet
|
|
11
|
+
- longitudes: e/w with 3-digit zero padding (e.g., e010, w060)
|
|
12
|
+
- latitudes: n/s with 2-digit zero padding (e.g., n50, s25)
|
|
13
|
+
- filename order: west_lon, north_lat, east_lon, south_lat
|
|
14
|
+
|
|
15
|
+
Usage:
|
|
16
|
+
gdf = load_gdf_from_gba(rectangle_vertices=[(lon1, lat1), (lon2, lat2), ...])
|
|
17
|
+
|
|
18
|
+
Notes:
|
|
19
|
+
- Output CRS is EPSG:4326.
|
|
20
|
+
- Requires pyarrow or fastparquet for parquet reading via GeoPandas.
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
from __future__ import annotations
|
|
24
|
+
|
|
25
|
+
import math
|
|
26
|
+
import os
|
|
27
|
+
import tempfile
|
|
28
|
+
from typing import Iterable, List, Optional, Sequence, Tuple
|
|
29
|
+
|
|
30
|
+
import geopandas as gpd
|
|
31
|
+
import pandas as pd
|
|
32
|
+
import requests
|
|
33
|
+
from shapely.geometry import Polygon
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
def _bbox_from_rectangle_vertices(vertices: Sequence[Tuple[float, float]]) -> Tuple[float, float, float, float]:
|
|
37
|
+
"""
|
|
38
|
+
Convert rectangle vertices in (lon, lat) into bbox as (min_lon, min_lat, max_lon, max_lat).
|
|
39
|
+
"""
|
|
40
|
+
if not vertices:
|
|
41
|
+
raise ValueError("rectangle_vertices must be a non-empty sequence of (lon, lat)")
|
|
42
|
+
lons = [v[0] for v in vertices]
|
|
43
|
+
lats = [v[1] for v in vertices]
|
|
44
|
+
return (min(lons), min(lats), max(lons), max(lats))
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def _pad_lon(deg: int) -> str:
|
|
48
|
+
return f"{abs(deg):03d}"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def _pad_lat(deg: int) -> str:
|
|
52
|
+
return f"{abs(deg):02d}"
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def _lon_tag(deg: int) -> str:
|
|
56
|
+
return ("e" if deg >= 0 else "w") + _pad_lon(deg)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def _lat_tag(deg: int) -> str:
|
|
60
|
+
return ("n" if deg >= 0 else "s") + _pad_lat(deg)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def _snap_down(value: float, step: int) -> int:
|
|
64
|
+
return int(math.floor(value / step) * step)
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def _snap_up(value: float, step: int) -> int:
|
|
68
|
+
return int(math.ceil(value / step) * step)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def _generate_tile_bounds_for_bbox(
|
|
72
|
+
min_lon: float, min_lat: float, max_lon: float, max_lat: float, tile_size_deg: int = 5
|
|
73
|
+
) -> Iterable[Tuple[int, int, int, int]]:
|
|
74
|
+
"""
|
|
75
|
+
Generate 5-degree tile bounds (west, south, east, north) covering bbox.
|
|
76
|
+
All values are integer degrees aligned to tile_size_deg.
|
|
77
|
+
"""
|
|
78
|
+
west = _snap_down(min_lon, tile_size_deg)
|
|
79
|
+
east = _snap_up(max_lon, tile_size_deg)
|
|
80
|
+
south = _snap_down(min_lat, tile_size_deg)
|
|
81
|
+
north = _snap_up(max_lat, tile_size_deg)
|
|
82
|
+
|
|
83
|
+
for lon in range(west, east, tile_size_deg):
|
|
84
|
+
for lat in range(south, north, tile_size_deg):
|
|
85
|
+
yield (lon, lat, lon + tile_size_deg, lat + tile_size_deg)
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
def _tile_filename(west: int, south: int, east: int, north: int) -> str:
|
|
89
|
+
"""
|
|
90
|
+
Construct GBA tile filename for given integer-degree bounds.
|
|
91
|
+
Naming convention examples:
|
|
92
|
+
e010_n50_e015_n45.parquet
|
|
93
|
+
e140_s25_e145_s30.parquet
|
|
94
|
+
w060_s30_w055_s35.parquet
|
|
95
|
+
"""
|
|
96
|
+
return f"{_lon_tag(west)}_{_lat_tag(north)}_{_lon_tag(east)}_{_lat_tag(south)}.parquet"
|
|
97
|
+
|
|
98
|
+
|
|
99
|
+
def _tile_url(base_url: str, west: int, south: int, east: int, north: int) -> str:
|
|
100
|
+
filename = _tile_filename(west, south, east, north)
|
|
101
|
+
return f"{base_url.rstrip('/')}/{filename}"
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _download_parquet(url: str, download_dir: str, timeout: int = 60) -> Optional[str]:
|
|
105
|
+
"""
|
|
106
|
+
Download a parquet file to download_dir. Returns local filepath or None if not found.
|
|
107
|
+
"""
|
|
108
|
+
try:
|
|
109
|
+
with requests.get(url, stream=True, timeout=timeout) as r:
|
|
110
|
+
if r.status_code != 200:
|
|
111
|
+
return None
|
|
112
|
+
filename = os.path.basename(url)
|
|
113
|
+
local_path = os.path.join(download_dir, filename)
|
|
114
|
+
with open(local_path, "wb") as f:
|
|
115
|
+
for chunk in r.iter_content(chunk_size=1024 * 1024):
|
|
116
|
+
if chunk:
|
|
117
|
+
f.write(chunk)
|
|
118
|
+
return local_path
|
|
119
|
+
except requests.RequestException:
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
def _filter_to_rectangle(gdf: gpd.GeoDataFrame, rectangle: Polygon, clip: bool) -> gpd.GeoDataFrame:
|
|
124
|
+
gdf = gdf[gdf.geometry.notnull()].copy()
|
|
125
|
+
# Ensure CRS is WGS84
|
|
126
|
+
if gdf.crs is None:
|
|
127
|
+
gdf.set_crs(epsg=4326, inplace=True)
|
|
128
|
+
elif gdf.crs.to_epsg() != 4326:
|
|
129
|
+
gdf = gdf.to_crs(epsg=4326)
|
|
130
|
+
|
|
131
|
+
intersects = gdf.intersects(rectangle)
|
|
132
|
+
gdf = gdf[intersects].copy()
|
|
133
|
+
if clip and not gdf.empty:
|
|
134
|
+
# GeoPandas clip performs overlay to trim geometries to rectangle
|
|
135
|
+
gdf = gpd.clip(gdf, gpd.GeoSeries([rectangle], crs="EPSG:4326").to_frame("geometry"))
|
|
136
|
+
return gdf
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def load_gdf_from_gba(
|
|
140
|
+
rectangle_vertices: Sequence[Tuple[float, float]],
|
|
141
|
+
base_url: str = "https://data.source.coop/tge-labs/globalbuildingatlas-lod1",
|
|
142
|
+
download_dir: Optional[str] = None,
|
|
143
|
+
clip_to_rectangle: bool = False,
|
|
144
|
+
) -> Optional[gpd.GeoDataFrame]:
|
|
145
|
+
"""
|
|
146
|
+
Download GBA tiles intersecting a rectangle and return combined GeoDataFrame.
|
|
147
|
+
|
|
148
|
+
Args:
|
|
149
|
+
rectangle_vertices: Sequence of (lon, lat) defining the area of interest.
|
|
150
|
+
base_url: Base URL hosting GBA parquet tiles.
|
|
151
|
+
download_dir: Optional directory to store downloaded tiles. If None, a
|
|
152
|
+
temporary directory is used and cleaned up by the OS later.
|
|
153
|
+
clip_to_rectangle: If True, geometries are clipped to rectangle extent.
|
|
154
|
+
|
|
155
|
+
Returns:
|
|
156
|
+
GeoDataFrame with EPSG:4326 geometry and an 'id' column, or None if no data.
|
|
157
|
+
"""
|
|
158
|
+
min_lon, min_lat, max_lon, max_lat = _bbox_from_rectangle_vertices(rectangle_vertices)
|
|
159
|
+
rectangle = Polygon([
|
|
160
|
+
(min_lon, min_lat),
|
|
161
|
+
(max_lon, min_lat),
|
|
162
|
+
(max_lon, max_lat),
|
|
163
|
+
(min_lon, max_lat),
|
|
164
|
+
(min_lon, min_lat),
|
|
165
|
+
])
|
|
166
|
+
|
|
167
|
+
tmp_dir_created = False
|
|
168
|
+
if download_dir is None:
|
|
169
|
+
download_dir = tempfile.mkdtemp(prefix="gba_tiles_")
|
|
170
|
+
tmp_dir_created = True
|
|
171
|
+
else:
|
|
172
|
+
os.makedirs(download_dir, exist_ok=True)
|
|
173
|
+
|
|
174
|
+
local_files: List[str] = []
|
|
175
|
+
for west, south, east, north in _generate_tile_bounds_for_bbox(min_lon, min_lat, max_lon, max_lat):
|
|
176
|
+
url = _tile_url(base_url, west, south, east, north)
|
|
177
|
+
local = _download_parquet(url, download_dir)
|
|
178
|
+
if local is not None:
|
|
179
|
+
local_files.append(local)
|
|
180
|
+
|
|
181
|
+
if not local_files:
|
|
182
|
+
return None
|
|
183
|
+
|
|
184
|
+
gdfs: List[gpd.GeoDataFrame] = []
|
|
185
|
+
for path in local_files:
|
|
186
|
+
try:
|
|
187
|
+
# GeoParquet read
|
|
188
|
+
gdf = gpd.read_parquet(path)
|
|
189
|
+
if gdf is not None and not gdf.empty:
|
|
190
|
+
gdfs.append(gdf)
|
|
191
|
+
except Exception:
|
|
192
|
+
# Skip unreadable tiles
|
|
193
|
+
continue
|
|
194
|
+
|
|
195
|
+
if not gdfs:
|
|
196
|
+
return None
|
|
197
|
+
|
|
198
|
+
combined = pd.concat(gdfs, ignore_index=True)
|
|
199
|
+
combined = gpd.GeoDataFrame(combined, geometry="geometry")
|
|
200
|
+
combined = _filter_to_rectangle(combined, rectangle, clip=clip_to_rectangle)
|
|
201
|
+
|
|
202
|
+
if combined.empty:
|
|
203
|
+
return None
|
|
204
|
+
|
|
205
|
+
# Ensure sequential ids
|
|
206
|
+
combined["id"] = combined.index.astype(int)
|
|
207
|
+
combined.set_crs(epsg=4326, inplace=True, allow_override=True)
|
|
208
|
+
return combined
|
|
209
|
+
|
|
210
|
+
|
voxcity/downloader/gee.py
CHANGED
|
@@ -217,7 +217,11 @@ def get_dem_image(roi_buffered, source):
|
|
|
217
217
|
# dem = collection.mosaic()
|
|
218
218
|
|
|
219
219
|
# elif source == 'FABDEM':
|
|
220
|
-
|
|
220
|
+
# If we reach here without assigning `dem`, the source is unsupported
|
|
221
|
+
try:
|
|
222
|
+
return dem.clip(roi_buffered)
|
|
223
|
+
except UnboundLocalError:
|
|
224
|
+
raise ValueError(f"Unsupported or unimplemented DEM source: {source}")
|
|
221
225
|
|
|
222
226
|
def save_geotiff_esa_land_cover(roi, geotiff_path):
|
|
223
227
|
"""Save ESA WorldCover land cover data as a colored GeoTIFF.
|
voxcity/downloader/mbfp.py
CHANGED
|
@@ -20,7 +20,7 @@ import pandas as pd
|
|
|
20
20
|
import os
|
|
21
21
|
from .utils import download_file
|
|
22
22
|
from ..geoprocessor.utils import tile_from_lat_lon, quadkey_to_tile
|
|
23
|
-
from ..geoprocessor.
|
|
23
|
+
from ..geoprocessor.io import load_gdf_from_multiple_gz, swap_coordinates
|
|
24
24
|
|
|
25
25
|
def get_geojson_links(output_dir):
|
|
26
26
|
"""Download and load the dataset links CSV file containing building footprint URLs.
|
voxcity/downloader/oemj.py
CHANGED
|
@@ -19,6 +19,7 @@ Example Usage:
|
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
21
|
import requests
|
|
22
|
+
import os
|
|
22
23
|
from PIL import Image, ImageDraw
|
|
23
24
|
from io import BytesIO
|
|
24
25
|
import math
|
|
@@ -73,7 +74,7 @@ def num2deg(xtile, ytile, zoom):
|
|
|
73
74
|
lat_deg = math.degrees(lat_rad)
|
|
74
75
|
return (lon_deg, lat_deg)
|
|
75
76
|
|
|
76
|
-
def download_tiles(polygon, zoom):
|
|
77
|
+
def download_tiles(polygon, zoom, *, ssl_verify=True, allow_insecure_ssl=False, allow_http_fallback=False, timeout_s=30):
|
|
77
78
|
"""Download satellite imagery tiles covering a polygon region.
|
|
78
79
|
|
|
79
80
|
Downloads all tiles that intersect with the given polygon at the specified zoom level
|
|
@@ -112,11 +113,70 @@ def download_tiles(polygon, zoom):
|
|
|
112
113
|
for x in range(min(min_x, max_x), max(min_x, max_x) + 1):
|
|
113
114
|
for y in range(min(min_y, max_y), max(min_y, max_y) + 1):
|
|
114
115
|
url = f"https://www.open-earth-map.org/demo/Japan/{zoom}/{x}/{y}.png"
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
116
|
+
# Try secure HTTPS first with user-provided verification option
|
|
117
|
+
content = None
|
|
118
|
+
try:
|
|
119
|
+
resp = requests.get(url, timeout=timeout_s, verify=ssl_verify)
|
|
120
|
+
if resp.status_code == 200:
|
|
121
|
+
content = resp.content
|
|
122
|
+
else:
|
|
123
|
+
print(f"Failed to download tile (status {resp.status_code}): {url}")
|
|
124
|
+
except requests.exceptions.SSLError:
|
|
125
|
+
# Optionally retry with certificate verification disabled
|
|
126
|
+
if allow_insecure_ssl:
|
|
127
|
+
try:
|
|
128
|
+
resp = requests.get(url, timeout=timeout_s, verify=False)
|
|
129
|
+
if resp.status_code == 200:
|
|
130
|
+
content = resp.content
|
|
131
|
+
else:
|
|
132
|
+
print(f"Failed to download tile (status {resp.status_code}) with insecure SSL: {url}")
|
|
133
|
+
except requests.exceptions.RequestException as e:
|
|
134
|
+
# Optionally try HTTP fallback
|
|
135
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
136
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
137
|
+
try:
|
|
138
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
139
|
+
if resp.status_code == 200:
|
|
140
|
+
content = resp.content
|
|
141
|
+
else:
|
|
142
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
143
|
+
except requests.exceptions.RequestException as e2:
|
|
144
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e2})")
|
|
145
|
+
else:
|
|
146
|
+
print(f"SSL error downloading tile: {url} ({e})")
|
|
147
|
+
else:
|
|
148
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
149
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
150
|
+
try:
|
|
151
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
152
|
+
if resp.status_code == 200:
|
|
153
|
+
content = resp.content
|
|
154
|
+
else:
|
|
155
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
156
|
+
except requests.exceptions.RequestException as e:
|
|
157
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e})")
|
|
158
|
+
else:
|
|
159
|
+
print(f"SSL error downloading tile: {url}")
|
|
160
|
+
except requests.exceptions.RequestException as e:
|
|
161
|
+
# Network error (timeout/connection). Try HTTP if allowed.
|
|
162
|
+
if allow_http_fallback and url.lower().startswith("https://"):
|
|
163
|
+
http_url = "http://" + url.split("://", 1)[1]
|
|
164
|
+
try:
|
|
165
|
+
resp = requests.get(http_url, timeout=timeout_s)
|
|
166
|
+
if resp.status_code == 200:
|
|
167
|
+
content = resp.content
|
|
168
|
+
else:
|
|
169
|
+
print(f"Failed to download tile over HTTP (status {resp.status_code}): {http_url}")
|
|
170
|
+
except requests.exceptions.RequestException as e2:
|
|
171
|
+
print(f"HTTP fallback failed for tile: {http_url} ({e2})")
|
|
172
|
+
else:
|
|
173
|
+
print(f"Error downloading tile: {url} ({e})")
|
|
174
|
+
|
|
175
|
+
if content is not None:
|
|
176
|
+
try:
|
|
177
|
+
tiles[(x, y)] = Image.open(BytesIO(content))
|
|
178
|
+
except Exception as e:
|
|
179
|
+
print(f"Error decoding tile image for {url}: {e}")
|
|
120
180
|
|
|
121
181
|
return tiles, (min(min_x, max_x), min(min_y, max_y), max(min_x, max_x), max(min_y, max_y))
|
|
122
182
|
|
|
@@ -231,6 +291,11 @@ def save_as_geotiff(image, polygon, zoom, bbox, bounds, output_path):
|
|
|
231
291
|
pixel_size_x = (lower_right_x - upper_left_x) / image.width
|
|
232
292
|
pixel_size_y = (upper_left_y - lower_right_y) / image.height
|
|
233
293
|
|
|
294
|
+
# Ensure output directory exists
|
|
295
|
+
out_dir = os.path.dirname(output_path)
|
|
296
|
+
if out_dir:
|
|
297
|
+
os.makedirs(out_dir, exist_ok=True)
|
|
298
|
+
|
|
234
299
|
# Create GeoTIFF
|
|
235
300
|
driver = gdal.GetDriverByName('GTiff')
|
|
236
301
|
dataset = driver.Create(output_path, image.width, image.height, 3, gdal.GDT_Byte)
|
|
@@ -249,7 +314,7 @@ def save_as_geotiff(image, polygon, zoom, bbox, bounds, output_path):
|
|
|
249
314
|
|
|
250
315
|
dataset = None
|
|
251
316
|
|
|
252
|
-
def save_oemj_as_geotiff(polygon, filepath, zoom=16):
|
|
317
|
+
def save_oemj_as_geotiff(polygon, filepath, zoom=16, *, ssl_verify=True, allow_insecure_ssl=False, allow_http_fallback=False, timeout_s=30):
|
|
253
318
|
"""Download and save OpenEarthMap Japan imagery as a georeferenced GeoTIFF file.
|
|
254
319
|
|
|
255
320
|
This is the main function that orchestrates the entire process of downloading,
|
|
@@ -281,7 +346,14 @@ def save_oemj_as_geotiff(polygon, filepath, zoom=16):
|
|
|
281
346
|
- The output GeoTIFF will be in Web Mercator projection (EPSG:3857)
|
|
282
347
|
"""
|
|
283
348
|
try:
|
|
284
|
-
tiles, bounds = download_tiles(
|
|
349
|
+
tiles, bounds = download_tiles(
|
|
350
|
+
polygon,
|
|
351
|
+
zoom,
|
|
352
|
+
ssl_verify=ssl_verify,
|
|
353
|
+
allow_insecure_ssl=allow_insecure_ssl,
|
|
354
|
+
allow_http_fallback=allow_http_fallback,
|
|
355
|
+
timeout_s=timeout_s,
|
|
356
|
+
)
|
|
285
357
|
if not tiles:
|
|
286
358
|
raise ValueError("No tiles were downloaded. Please check the polygon coordinates and zoom level.")
|
|
287
359
|
|
voxcity/downloader/utils.py
CHANGED
|
@@ -1,74 +1,74 @@
|
|
|
1
|
-
# Utility functions for downloading files from various sources
|
|
2
|
-
import requests
|
|
3
|
-
import gdown
|
|
4
|
-
|
|
5
|
-
def download_file(url, filename):
|
|
6
|
-
"""Download a file from a URL and save it locally.
|
|
7
|
-
|
|
8
|
-
This function uses the requests library to download a file from any publicly
|
|
9
|
-
accessible URL and save it to the local filesystem. It handles the download
|
|
10
|
-
process and provides feedback on the operation's success or failure.
|
|
11
|
-
|
|
12
|
-
Args:
|
|
13
|
-
url (str): URL of the file to download. Must be a valid, accessible URL.
|
|
14
|
-
filename (str): Local path where the downloaded file will be saved.
|
|
15
|
-
Include the full path and filename with extension.
|
|
16
|
-
|
|
17
|
-
Returns:
|
|
18
|
-
None
|
|
19
|
-
|
|
20
|
-
Prints:
|
|
21
|
-
- Success message with filename if download is successful (status code 200)
|
|
22
|
-
- Error message with status code if download fails
|
|
23
|
-
|
|
24
|
-
Example:
|
|
25
|
-
>>> download_file('https://example.com/file.pdf', 'local_file.pdf')
|
|
26
|
-
File downloaded successfully and saved as local_file.pdf
|
|
27
|
-
"""
|
|
28
|
-
# Attempt to download the file from the provided URL
|
|
29
|
-
response = requests.get(url)
|
|
30
|
-
|
|
31
|
-
# Check if the download was successful (HTTP status code 200)
|
|
32
|
-
if response.status_code == 200:
|
|
33
|
-
# Open the local file in binary write mode and save the content
|
|
34
|
-
with open(filename, 'wb') as file:
|
|
35
|
-
file.write(response.content)
|
|
36
|
-
print(f"File downloaded successfully and saved as {filename}")
|
|
37
|
-
else:
|
|
38
|
-
print(f"Failed to download file. Status code: {response.status_code}")
|
|
39
|
-
|
|
40
|
-
def download_file_google_drive(file_id, output_path):
|
|
41
|
-
"""Download a file from Google Drive using its file ID.
|
|
42
|
-
|
|
43
|
-
This function specifically handles downloads from Google Drive using the gdown
|
|
44
|
-
library, which is designed to bypass Google Drive's download restrictions.
|
|
45
|
-
It's useful for downloading large files or files that require authentication.
|
|
46
|
-
|
|
47
|
-
Args:
|
|
48
|
-
file_id (str): Google Drive file ID. This is the unique identifier in the
|
|
49
|
-
sharing URL after '/d/' or 'id='.
|
|
50
|
-
output_path (str): Local path where the downloaded file will be saved.
|
|
51
|
-
Include the full path and filename with extension.
|
|
52
|
-
|
|
53
|
-
Returns:
|
|
54
|
-
bool: True if download was successful, False if any error occurred
|
|
55
|
-
|
|
56
|
-
Prints:
|
|
57
|
-
Error message with exception details if download fails
|
|
58
|
-
|
|
59
|
-
Example:
|
|
60
|
-
>>> success = download_file_google_drive('1234abcd...', 'downloaded_file.zip')
|
|
61
|
-
>>> if success:
|
|
62
|
-
>>> print("Download completed successfully")
|
|
63
|
-
"""
|
|
64
|
-
# Construct the direct download URL using the file ID
|
|
65
|
-
url = f"https://drive.google.com/uc?id={file_id}"
|
|
66
|
-
|
|
67
|
-
try:
|
|
68
|
-
# Use gdown to handle the Google Drive download
|
|
69
|
-
# quiet=False enables download progress display
|
|
70
|
-
gdown.download(url, output_path, quiet=False)
|
|
71
|
-
return True
|
|
72
|
-
except Exception as e:
|
|
73
|
-
print(f"Error downloading file {file_id}: {str(e)}")
|
|
1
|
+
# Utility functions for downloading files from various sources
|
|
2
|
+
import requests
|
|
3
|
+
import gdown
|
|
4
|
+
|
|
5
|
+
def download_file(url, filename):
|
|
6
|
+
"""Download a file from a URL and save it locally.
|
|
7
|
+
|
|
8
|
+
This function uses the requests library to download a file from any publicly
|
|
9
|
+
accessible URL and save it to the local filesystem. It handles the download
|
|
10
|
+
process and provides feedback on the operation's success or failure.
|
|
11
|
+
|
|
12
|
+
Args:
|
|
13
|
+
url (str): URL of the file to download. Must be a valid, accessible URL.
|
|
14
|
+
filename (str): Local path where the downloaded file will be saved.
|
|
15
|
+
Include the full path and filename with extension.
|
|
16
|
+
|
|
17
|
+
Returns:
|
|
18
|
+
None
|
|
19
|
+
|
|
20
|
+
Prints:
|
|
21
|
+
- Success message with filename if download is successful (status code 200)
|
|
22
|
+
- Error message with status code if download fails
|
|
23
|
+
|
|
24
|
+
Example:
|
|
25
|
+
>>> download_file('https://example.com/file.pdf', 'local_file.pdf')
|
|
26
|
+
File downloaded successfully and saved as local_file.pdf
|
|
27
|
+
"""
|
|
28
|
+
# Attempt to download the file from the provided URL
|
|
29
|
+
response = requests.get(url)
|
|
30
|
+
|
|
31
|
+
# Check if the download was successful (HTTP status code 200)
|
|
32
|
+
if response.status_code == 200:
|
|
33
|
+
# Open the local file in binary write mode and save the content
|
|
34
|
+
with open(filename, 'wb') as file:
|
|
35
|
+
file.write(response.content)
|
|
36
|
+
print(f"File downloaded successfully and saved as {filename}")
|
|
37
|
+
else:
|
|
38
|
+
print(f"Failed to download file. Status code: {response.status_code}")
|
|
39
|
+
|
|
40
|
+
def download_file_google_drive(file_id, output_path):
|
|
41
|
+
"""Download a file from Google Drive using its file ID.
|
|
42
|
+
|
|
43
|
+
This function specifically handles downloads from Google Drive using the gdown
|
|
44
|
+
library, which is designed to bypass Google Drive's download restrictions.
|
|
45
|
+
It's useful for downloading large files or files that require authentication.
|
|
46
|
+
|
|
47
|
+
Args:
|
|
48
|
+
file_id (str): Google Drive file ID. This is the unique identifier in the
|
|
49
|
+
sharing URL after '/d/' or 'id='.
|
|
50
|
+
output_path (str): Local path where the downloaded file will be saved.
|
|
51
|
+
Include the full path and filename with extension.
|
|
52
|
+
|
|
53
|
+
Returns:
|
|
54
|
+
bool: True if download was successful, False if any error occurred
|
|
55
|
+
|
|
56
|
+
Prints:
|
|
57
|
+
Error message with exception details if download fails
|
|
58
|
+
|
|
59
|
+
Example:
|
|
60
|
+
>>> success = download_file_google_drive('1234abcd...', 'downloaded_file.zip')
|
|
61
|
+
>>> if success:
|
|
62
|
+
>>> print("Download completed successfully")
|
|
63
|
+
"""
|
|
64
|
+
# Construct the direct download URL using the file ID
|
|
65
|
+
url = f"https://drive.google.com/uc?id={file_id}"
|
|
66
|
+
|
|
67
|
+
try:
|
|
68
|
+
# Use gdown to handle the Google Drive download
|
|
69
|
+
# quiet=False enables download progress display
|
|
70
|
+
gdown.download(url, output_path, quiet=False)
|
|
71
|
+
return True
|
|
72
|
+
except Exception as e:
|
|
73
|
+
print(f"Error downloading file {file_id}: {str(e)}")
|
|
74
74
|
return False
|
voxcity/errors.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Package-specific error hierarchy for voxcity.
|
|
3
|
+
|
|
4
|
+
This enables precise exception handling without leaking low-level
|
|
5
|
+
implementation details across boundaries.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class VoxCityError(Exception):
|
|
12
|
+
"""Base exception for all voxcity errors."""
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class ConfigurationError(VoxCityError):
|
|
16
|
+
"""Raised when configuration values are missing or invalid."""
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DownloaderError(VoxCityError):
|
|
20
|
+
"""Raised by downloader modules when remote data retrieval fails."""
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class ProcessingError(VoxCityError):
|
|
24
|
+
"""Raised for failures during grid/voxel processing or geoprocessing."""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class VisualizationError(VoxCityError):
|
|
28
|
+
"""Raised for visualization/rendering failures."""
|
|
29
|
+
|
|
30
|
+
|
voxcity/exporter/__init__.py
CHANGED
|
@@ -1,5 +1,13 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
from .
|
|
4
|
-
from .
|
|
5
|
-
from .
|
|
1
|
+
from typing import Protocol, runtime_checkable
|
|
2
|
+
|
|
3
|
+
from .envimet import *
|
|
4
|
+
from .magicavoxel import *
|
|
5
|
+
from .obj import *
|
|
6
|
+
from .cityles import *
|
|
7
|
+
from .netcdf import *
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@runtime_checkable
|
|
11
|
+
class Exporter(Protocol):
|
|
12
|
+
def export(self, obj, output_directory: str, base_filename: str): # pragma: no cover - protocol
|
|
13
|
+
...
|