voxcity 0.3.0__py3-none-any.whl → 0.3.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of voxcity might be problematic. Click here for more details.

@@ -0,0 +1,523 @@
1
+ import requests
2
+ import xml.etree.ElementTree as ET
3
+ import re
4
+ from math import radians, sin, cos, sqrt, atan2
5
+ from pathlib import Path
6
+ from typing import Optional, Dict, List, Tuple, Union
7
+ import json
8
+ import zipfile
9
+ import pandas as pd
10
+ import io
11
+ import os
12
+ import numpy as np
13
+ from datetime import datetime
14
+
15
+ def safe_rename(src: Path, dst: Path) -> Path:
16
+ """
17
+ Safely rename a file, handling existing files by adding a number suffix.
18
+
19
+ Args:
20
+ src: Source file path
21
+ dst: Destination file path
22
+
23
+ Returns:
24
+ Path: Final destination path used
25
+ """
26
+ if not dst.exists():
27
+ src.rename(dst)
28
+ return dst
29
+
30
+ # If file exists, add number suffix
31
+ base = dst.stem
32
+ ext = dst.suffix
33
+ counter = 1
34
+ while True:
35
+ new_dst = dst.with_name(f"{base}_{counter}{ext}")
36
+ if not new_dst.exists():
37
+ src.rename(new_dst)
38
+ return new_dst
39
+ counter += 1
40
+
41
+ def safe_extract(zip_ref: zipfile.ZipFile, filename: str, extract_dir: Path) -> Path:
42
+ """
43
+ Safely extract a file from zip, handling existing files.
44
+
45
+ Args:
46
+ zip_ref: Open ZipFile reference
47
+ filename: Name of file to extract
48
+ extract_dir: Directory to extract to
49
+
50
+ Returns:
51
+ Path: Path to extracted file
52
+ """
53
+ try:
54
+ zip_ref.extract(filename, extract_dir)
55
+ return extract_dir / filename
56
+ except FileExistsError:
57
+ # If file exists, extract to temporary name and return path
58
+ temp_name = f"temp_{os.urandom(4).hex()}_{filename}"
59
+ zip_ref.extract(filename, extract_dir, temp_name)
60
+ return extract_dir / temp_name
61
+
62
+ def process_epw(epw_path: Union[str, Path]) -> Tuple[pd.DataFrame, Dict]:
63
+ """
64
+ Process an EPW file into a pandas DataFrame.
65
+
66
+ Args:
67
+ epw_path: Path to the EPW file
68
+
69
+ Returns:
70
+ Tuple containing:
71
+ - DataFrame with hourly weather data
72
+ - Dictionary with EPW header metadata
73
+ """
74
+ # EPW column names (these are standardized)
75
+ columns = [
76
+ 'Year', 'Month', 'Day', 'Hour', 'Minute',
77
+ 'Data Source and Uncertainty Flags',
78
+ 'Dry Bulb Temperature', 'Dew Point Temperature',
79
+ 'Relative Humidity', 'Atmospheric Station Pressure',
80
+ 'Extraterrestrial Horizontal Radiation',
81
+ 'Extraterrestrial Direct Normal Radiation',
82
+ 'Horizontal Infrared Radiation Intensity',
83
+ 'Global Horizontal Radiation',
84
+ 'Direct Normal Radiation', 'Diffuse Horizontal Radiation',
85
+ 'Global Horizontal Illuminance',
86
+ 'Direct Normal Illuminance', 'Diffuse Horizontal Illuminance',
87
+ 'Zenith Luminance', 'Wind Direction', 'Wind Speed',
88
+ 'Total Sky Cover', 'Opaque Sky Cover', 'Visibility',
89
+ 'Ceiling Height', 'Present Weather Observation',
90
+ 'Present Weather Codes', 'Precipitable Water',
91
+ 'Aerosol Optical Depth', 'Snow Depth',
92
+ 'Days Since Last Snowfall', 'Albedo',
93
+ 'Liquid Precipitation Depth', 'Liquid Precipitation Quantity'
94
+ ]
95
+
96
+ # Read EPW file
97
+ with open(epw_path, 'r') as f:
98
+ lines = f.readlines()
99
+
100
+ # Extract header metadata (first 8 lines)
101
+ headers = {
102
+ 'LOCATION': lines[0].strip(),
103
+ 'DESIGN_CONDITIONS': lines[1].strip(),
104
+ 'TYPICAL_EXTREME_PERIODS': lines[2].strip(),
105
+ 'GROUND_TEMPERATURES': lines[3].strip(),
106
+ 'HOLIDAYS_DAYLIGHT_SAVINGS': lines[4].strip(),
107
+ 'COMMENTS_1': lines[5].strip(),
108
+ 'COMMENTS_2': lines[6].strip(),
109
+ 'DATA_PERIODS': lines[7].strip()
110
+ }
111
+
112
+ # Parse location data
113
+ location = headers['LOCATION'].split(',')
114
+ if len(location) >= 10:
115
+ headers['LOCATION'] = {
116
+ 'City': location[1].strip(),
117
+ 'State': location[2].strip(),
118
+ 'Country': location[3].strip(),
119
+ 'Data Source': location[4].strip(),
120
+ 'WMO': location[5].strip(),
121
+ 'Latitude': float(location[6]),
122
+ 'Longitude': float(location[7]),
123
+ 'Time Zone': float(location[8]),
124
+ 'Elevation': float(location[9])
125
+ }
126
+
127
+ # Create DataFrame from weather data (skipping headers)
128
+ data = [line.strip().split(',') for line in lines[8:]]
129
+ df = pd.DataFrame(data, columns=columns)
130
+
131
+ # Convert numeric columns
132
+ numeric_columns = [
133
+ 'Year', 'Month', 'Day', 'Hour', 'Minute',
134
+ 'Dry Bulb Temperature', 'Dew Point Temperature',
135
+ 'Relative Humidity', 'Atmospheric Station Pressure',
136
+ 'Extraterrestrial Horizontal Radiation',
137
+ 'Extraterrestrial Direct Normal Radiation',
138
+ 'Horizontal Infrared Radiation Intensity',
139
+ 'Global Horizontal Radiation',
140
+ 'Direct Normal Radiation', 'Diffuse Horizontal Radiation',
141
+ 'Global Horizontal Illuminance',
142
+ 'Direct Normal Illuminance', 'Diffuse Horizontal Illuminance',
143
+ 'Zenith Luminance', 'Wind Direction', 'Wind Speed',
144
+ 'Total Sky Cover', 'Opaque Sky Cover', 'Visibility',
145
+ 'Ceiling Height', 'Precipitable Water',
146
+ 'Aerosol Optical Depth', 'Snow Depth',
147
+ 'Days Since Last Snowfall', 'Albedo',
148
+ 'Liquid Precipitation Depth', 'Liquid Precipitation Quantity'
149
+ ]
150
+
151
+ for col in numeric_columns:
152
+ df[col] = pd.to_numeric(df[col], errors='coerce')
153
+
154
+ # Create datetime index
155
+ df['datetime'] = pd.to_datetime({
156
+ 'year': df['Year'],
157
+ 'month': df['Month'],
158
+ 'day': df['Day'],
159
+ 'hour': df['Hour'] - 1, # EPW hours are 1-24, pandas expects 0-23
160
+ 'minute': df['Minute']
161
+ })
162
+ df.set_index('datetime', inplace=True)
163
+
164
+ return df, headers
165
+
166
+ def get_nearest_epw_from_climate_onebuilding(latitude: float, longitude: float, output_dir: str = "./", max_distance: Optional[float] = None,
167
+ extract_zip: bool = True, load_data: bool = True) -> Tuple[Optional[str], Optional[pd.DataFrame], Optional[Dict]]:
168
+ """
169
+ Download and process EPW weather file from Climate.OneBuilding.Org based on coordinates.
170
+
171
+ Args:
172
+ latitude (float): Latitude of the location
173
+ longitude (float): Longitude of the location
174
+ output_dir (str): Directory to save the EPW file (defaults to current directory)
175
+ max_distance (float, optional): Maximum distance in kilometers to search for stations
176
+ extract_zip (bool): Whether to extract the ZIP file (default True)
177
+ load_data (bool): Whether to load the EPW data into a DataFrame (default True)
178
+
179
+ Returns:
180
+ Tuple containing:
181
+ - Path to the EPW file (or None if download fails)
182
+ - DataFrame with hourly weather data (if load_data=True)
183
+ - Dictionary with EPW header metadata (if load_data=True)
184
+ """
185
+
186
+ # List of primary KML sources
187
+ KML_SOURCES = {
188
+ "Africa": "https://climate.onebuilding.org/WMO_Region_1_Africa/Region1_Africa_EPW_Processing_locations.kml",
189
+ "Asia": "https://climate.onebuilding.org/WMO_Region_2_Asia/Region2_Asia_EPW_Processing_locations.kml",
190
+ "Japan": "https://climate.onebuilding.org/sources/JGMY_EPW_Processing_locations.kml",
191
+ "India": "https://climate.onebuilding.org/sources/ITMY_EPW_Processing_locations.kml",
192
+ "Argentina": "https://climate.onebuilding.org/sources/ArgTMY_EPW_Processing_locations.kml",
193
+ "Canada": "https://climate.onebuilding.org/sources/Region4_Canada_TMYx_EPW_Processing_locations.kml",
194
+ "USA": "https://climate.onebuilding.org/sources/Region4_USA_TMYx_EPW_Processing_locations.kml",
195
+ "Caribbean": "https://climate.onebuilding.org/sources/Region4_NA_CA_Caribbean_TMYx_EPW_Processing_locations.kml",
196
+ "Southwest_Pacific": "https://climate.onebuilding.org/sources/Region5_Southwest_Pacific_TMYx_EPW_Processing_locations.kml",
197
+ "Europe": "https://climate.onebuilding.org/sources/Region6_Europe_TMYx_EPW_Processing_locations.kml",
198
+ "Antarctica": "https://climate.onebuilding.org/sources/Region7_Antarctica_TMYx_EPW_Processing_locations.kml"
199
+ }
200
+
201
+ def try_decode(content: bytes) -> str:
202
+ """Try different encodings to decode content."""
203
+ encodings = ['utf-8', 'latin1', 'iso-8859-1', 'cp1252']
204
+ for encoding in encodings:
205
+ try:
206
+ return content.decode(encoding)
207
+ except UnicodeDecodeError:
208
+ continue
209
+
210
+ # If all else fails, try to decode with replacement
211
+ return content.decode('utf-8', errors='replace')
212
+
213
+ def clean_xml(content: str) -> str:
214
+ """Clean XML content of invalid characters."""
215
+ # Replace problematic characters
216
+ content = content.replace('ñ', 'n')
217
+ content = content.replace('Ñ', 'N')
218
+ content = content.replace('ñ', 'n')
219
+ content = content.replace('Ñ', 'N')
220
+
221
+ # Remove other invalid XML characters
222
+ content = re.sub(r'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\xFF]', '', content)
223
+ return content
224
+
225
+ def haversine_distance(lat1: float, lon1: float, lat2: float, lon2: float) -> float:
226
+ """Calculate the great circle distance between two points on Earth."""
227
+ R = 6371 # Earth's radius in kilometers
228
+
229
+ lat1, lon1, lat2, lon2 = map(radians, [lat1, lon1, lat2, lon2])
230
+ dlat = lat2 - lat1
231
+ dlon = lon2 - lon1
232
+
233
+ a = sin(dlat/2)**2 + cos(lat1) * cos(lat2) * sin(dlon/2)**2
234
+ c = 2 * atan2(sqrt(a), sqrt(1-a))
235
+ return R * c
236
+
237
+ def parse_coordinates(point_text: str) -> Tuple[float, float, float]:
238
+ """Parse coordinates from KML Point text."""
239
+ try:
240
+ coords = point_text.strip().split(',')
241
+ if len(coords) >= 2:
242
+ lon, lat = map(float, coords[:2])
243
+ elevation = float(coords[2]) if len(coords) > 2 else 0
244
+ return lat, lon, elevation
245
+ except (ValueError, IndexError):
246
+ pass
247
+ return None
248
+
249
+ def parse_station_from_description(desc: str, point_coords: Optional[Tuple[float, float, float]] = None) -> Dict:
250
+ """Parse station metadata from KML description."""
251
+ if not desc:
252
+ return None
253
+
254
+ # Extract URL
255
+ url_match = re.search(r'URL (https://.*?\.zip)', desc)
256
+ if not url_match:
257
+ return None
258
+
259
+ url = url_match.group(1)
260
+
261
+ # First try to parse coordinates in deg/min format
262
+ coord_match = re.search(r'([NS]) (\d+)°\s*(\d+\.\d+)\'.*?([EW]) (\d+)°\s*(\d+\.\d+)\'', desc)
263
+
264
+ if coord_match:
265
+ ns, lat_deg, lat_min, ew, lon_deg, lon_min = coord_match.groups()
266
+ lat = float(lat_deg) + float(lat_min)/60
267
+ if ns == 'S':
268
+ lat = -lat
269
+ lon = float(lon_deg) + float(lon_min)/60
270
+ if ew == 'W':
271
+ lon = -lon
272
+ elif point_coords:
273
+ lat, lon, _ = point_coords
274
+ else:
275
+ return None
276
+
277
+ # Extract metadata with error handling
278
+ def extract_value(pattern: str, default: str = None) -> str:
279
+ match = re.search(pattern, desc)
280
+ return match.group(1) if match else default
281
+
282
+ metadata = {
283
+ 'url': url,
284
+ 'latitude': lat,
285
+ 'longitude': lon,
286
+ 'elevation': int(extract_value(r'Elevation <b>(-?\d+)</b>', '0')),
287
+ 'name': extract_value(r'<b>(.*?)</b>'),
288
+ 'wmo': extract_value(r'WMO <b>(\d+)</b>'),
289
+ 'climate_zone': extract_value(r'Climate Zone <b>(.*?)</b>'),
290
+ 'period': extract_value(r'Period of Record=(\d{4}-\d{4})'),
291
+ 'heating_db': extract_value(r'99% Heating DB <b>(.*?)</b>'),
292
+ 'cooling_db': extract_value(r'1% Cooling DB <b>(.*?)</b>'),
293
+ 'hdd18': extract_value(r'HDD18 <b>(\d+)</b>'),
294
+ 'cdd10': extract_value(r'CDD10 <b>(\d+)</b>'),
295
+ 'time_zone': extract_value(r'Time Zone {GMT <b>([-+]?\d+\.\d+)</b>')
296
+ }
297
+
298
+ return metadata
299
+
300
+ def get_stations_from_kml(kml_url: str) -> List[Dict]:
301
+ """Get weather stations from a KML file."""
302
+ try:
303
+ response = requests.get(kml_url, timeout=30)
304
+ response.raise_for_status()
305
+
306
+ # Try to decode content
307
+ content = try_decode(response.content)
308
+ content = clean_xml(content)
309
+
310
+ try:
311
+ root = ET.fromstring(content.encode('utf-8'))
312
+ except ET.ParseError as e:
313
+ print(f"Error parsing KML file {kml_url}: {e}")
314
+ return []
315
+
316
+ # Define namespace
317
+ ns = {'kml': 'http://earth.google.com/kml/2.1'}
318
+
319
+ stations = []
320
+
321
+ # Find all Placemarks
322
+ for placemark in root.findall('.//kml:Placemark', ns):
323
+ name = placemark.find('kml:name', ns)
324
+ desc = placemark.find('kml:description', ns)
325
+ point = placemark.find('.//kml:Point/kml:coordinates', ns)
326
+
327
+ # Skip placemarks without description
328
+ if desc is None or not desc.text or "Data Source" not in desc.text:
329
+ continue
330
+
331
+ # Get coordinates from Point element if available
332
+ point_coords = None
333
+ if point is not None and point.text:
334
+ point_coords = parse_coordinates(point.text)
335
+
336
+ # Parse station data
337
+ station_data = parse_station_from_description(desc.text, point_coords)
338
+ if station_data:
339
+ station_data['name'] = name.text if name is not None else "Unknown"
340
+ station_data['kml_source'] = kml_url
341
+ stations.append(station_data)
342
+
343
+ return stations
344
+
345
+ except requests.exceptions.RequestException as e:
346
+ print(f"Error accessing KML file {kml_url}: {e}")
347
+ return []
348
+ except Exception as e:
349
+ print(f"Error processing KML file {kml_url}: {e}")
350
+ return []
351
+
352
+ try:
353
+ # Create output directory if it doesn't exist
354
+ Path(output_dir).mkdir(parents=True, exist_ok=True)
355
+
356
+ # Get stations from all KML sources
357
+ print("Fetching weather station data from Climate.OneBuilding.Org...")
358
+ all_stations = []
359
+
360
+ for region, url in KML_SOURCES.items():
361
+ print(f"Scanning {region}...")
362
+ stations = get_stations_from_kml(url)
363
+ all_stations.extend(stations)
364
+ print(f"Found {len(stations)} stations in {region}")
365
+
366
+ print(f"\nTotal stations found: {len(all_stations)}")
367
+
368
+ if not all_stations:
369
+ raise ValueError("No weather stations found")
370
+
371
+ # Calculate distances and find nearest station
372
+ stations_with_distances = [
373
+ (station, haversine_distance(latitude, longitude, station['latitude'], station['longitude']))
374
+ for station in all_stations
375
+ ]
376
+
377
+ # Filter by max distance if specified
378
+ if max_distance is not None:
379
+ close_stations = [
380
+ (station, distance)
381
+ for station, distance in stations_with_distances
382
+ if distance <= max_distance
383
+ ]
384
+ if not close_stations:
385
+ # If no stations within max_distance, find the closest one anyway
386
+ closest_station, min_distance = min(stations_with_distances, key=lambda x: x[1])
387
+ print(f"\nNo stations found within {max_distance} km. Closest station is {min_distance:.1f} km away.")
388
+ print("Using closest available station.")
389
+ stations_with_distances = [(closest_station, min_distance)]
390
+ else:
391
+ stations_with_distances = close_stations
392
+
393
+ # Find nearest station
394
+ nearest_station, distance = min(stations_with_distances, key=lambda x: x[1])
395
+
396
+ # Download the EPW file
397
+ print(f"\nDownloading EPW file for {nearest_station['name']}...")
398
+ epw_response = requests.get(nearest_station['url'])
399
+ epw_response.raise_for_status()
400
+
401
+ # Create a temporary directory for zip extraction
402
+ temp_dir = Path(output_dir) / "temp"
403
+ temp_dir.mkdir(parents=True, exist_ok=True)
404
+
405
+ # Save the zip file
406
+ zip_file = temp_dir / "weather_data.zip"
407
+ with open(zip_file, 'wb') as f:
408
+ f.write(epw_response.content)
409
+
410
+ final_epw = None
411
+ try:
412
+ # Extract the zip file
413
+ if extract_zip:
414
+ with zipfile.ZipFile(zip_file, 'r') as zip_ref:
415
+ # Find the EPW file in the archive
416
+ epw_files = [f for f in zip_ref.namelist() if f.lower().endswith('.epw')]
417
+ if not epw_files:
418
+ raise ValueError("No EPW file found in the downloaded archive")
419
+
420
+ # Extract the EPW file
421
+ epw_filename = epw_files[0]
422
+ extracted_epw = safe_extract(zip_ref, epw_filename, temp_dir)
423
+
424
+ # Move the EPW file to the final location
425
+ final_epw = Path(output_dir) / f"{nearest_station['name'].replace(' ', '_').replace(',', '').lower()}.epw"
426
+ final_epw = safe_rename(extracted_epw, final_epw)
427
+ finally:
428
+ # Clean up temporary files regardless of success or failure
429
+ try:
430
+ if zip_file.exists():
431
+ zip_file.unlink()
432
+ if temp_dir.exists() and not any(temp_dir.iterdir()):
433
+ temp_dir.rmdir()
434
+ except Exception as e:
435
+ print(f"Warning: Could not clean up temporary files: {e}")
436
+
437
+ if final_epw is None:
438
+ raise ValueError("Failed to extract EPW file")
439
+
440
+ # Save metadata
441
+ metadata_file = final_epw.with_suffix('.json')
442
+ with open(metadata_file, 'w') as f:
443
+ json.dump(nearest_station, f, indent=2)
444
+
445
+ # Print station information
446
+ print(f"\nDownloaded EPW file for {nearest_station['name']}")
447
+ print(f"Distance: {distance:.2f} km")
448
+ print(f"Station coordinates: {nearest_station['latitude']}, {nearest_station['longitude']}")
449
+ if nearest_station['wmo']:
450
+ print(f"WMO: {nearest_station['wmo']}")
451
+ if nearest_station['climate_zone']:
452
+ print(f"Climate zone: {nearest_station['climate_zone']}")
453
+ if nearest_station['period']:
454
+ print(f"Data period: {nearest_station['period']}")
455
+ print(f"Files saved:")
456
+ print(f"- EPW: {final_epw}")
457
+ print(f"- Metadata: {metadata_file}")
458
+
459
+ # Load the EPW data if requested
460
+ df = None
461
+ headers = None
462
+ if load_data:
463
+ print("\nLoading EPW data...")
464
+ df, headers = process_epw(final_epw)
465
+ print(f"Loaded {len(df)} hourly records")
466
+
467
+ return str(final_epw), df, headers
468
+
469
+ except Exception as e:
470
+ print(f"Error processing data: {e}")
471
+ return None, None, None
472
+
473
+ def read_epw_for_solar_simulation(epw_file_path):
474
+ with open(epw_file_path, 'r', encoding='utf-8') as f:
475
+ lines = f.readlines()
476
+
477
+ # Find the LOCATION line
478
+ location_line = None
479
+ for line in lines:
480
+ if line.startswith("LOCATION"):
481
+ location_line = line.strip().split(',')
482
+ break
483
+
484
+ if location_line is None:
485
+ raise ValueError("Could not find LOCATION line in EPW file.")
486
+
487
+ # LOCATION line format:
488
+ # LOCATION,City,State/Country,Country,DataSource,WMO,Latitude,Longitude,Time Zone,Elevation
489
+ # Example: LOCATION,Marina.Muni.AP,CA,USA,SRC-TMYx,690070,36.68300,-121.7670,-8.0,43.0
490
+ lat = float(location_line[6])
491
+ lon = float(location_line[7])
492
+ tz = float(location_line[8]) # local standard time offset
493
+ elevation_m = float(location_line[9])
494
+
495
+ # Find start of data
496
+ data_start_index = None
497
+ for i, line in enumerate(lines):
498
+ vals = line.strip().split(',')
499
+ if i >= 8 and len(vals) > 30:
500
+ data_start_index = i
501
+ break
502
+
503
+ if data_start_index is None:
504
+ raise ValueError("Could not find start of weather data lines in EPW file.")
505
+
506
+ data = []
507
+ for l in lines[data_start_index:]:
508
+ vals = l.strip().split(',')
509
+ if len(vals) < 15:
510
+ continue
511
+ year = int(vals[0])
512
+ month = int(vals[1])
513
+ day = int(vals[2])
514
+ hour = int(vals[3]) - 1
515
+ dni = float(vals[13])
516
+ dhi = float(vals[14])
517
+ timestamp = pd.Timestamp(year, month, day, hour)
518
+ data.append([timestamp, dni, dhi])
519
+
520
+ df = pd.DataFrame(data, columns=['time', 'DNI', 'DHI']).set_index('time')
521
+ df = df.sort_index()
522
+
523
+ return df, lat, lon, tz, elevation_m
voxcity/voxcity.py CHANGED
@@ -306,8 +306,11 @@ def get_dem_grid(rectangle_vertices, meshsize, source, output_dir, **kwargs):
306
306
  image = get_dem_image(roi_buffered, source)
307
307
 
308
308
  # Save DEM data with appropriate resolution based on source
309
- if source in ["England 1m DTM", 'USGS 3DEP 1m', 'DEM France 1m', 'DEM France 5m', 'AUSTRALIA 5M DEM']:
309
+ if source in ["England 1m DTM", 'DEM France 1m', 'DEM France 5m', 'AUSTRALIA 5M DEM']:
310
310
  save_geotiff(image, geotiff_path, scale=meshsize, region=roi_buffered, crs='EPSG:4326')
311
+ elif source == 'USGS 3DEP 1m':
312
+ scale = max(meshsize, 1.25)
313
+ save_geotiff(image, geotiff_path, scale=scale, region=roi_buffered, crs='EPSG:4326')
311
314
  else:
312
315
  # Default to 30m resolution for other sources
313
316
  save_geotiff(image, geotiff_path, scale=30, region=roi_buffered)
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: voxcity
3
- Version: 0.3.0
3
+ Version: 0.3.1
4
4
  Summary: voxcity is an easy and one-stop tool to output 3d city models for microclimate simulation by integrating multiple geospatial open-data
5
5
  Author-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
6
6
  Maintainer-email: Kunihiko Fujiwara <kunihiko@nus.edu.sg>
@@ -47,6 +47,7 @@ Requires-Dist: seaborn
47
47
  Requires-Dist: overturemaps
48
48
  Requires-Dist: protobuf==3.20.3
49
49
  Requires-Dist: timezonefinder
50
+ Requires-Dist: astral
50
51
  Provides-Extra: dev
51
52
  Requires-Dist: coverage; extra == "dev"
52
53
  Requires-Dist: mypy; extra == "dev"
@@ -1,5 +1,5 @@
1
1
  voxcity/__init__.py,sha256=HJM0D2Mv9qpk4JdVzt2SRAAk-hA1D_pCO0ezZH9F7KA,248
2
- voxcity/voxcity.py,sha256=G4G7BAm0b_BPn_NRDg7izSyjbUu0mEwVu_-XqFaos1I,32105
2
+ voxcity/voxcity.py,sha256=ewwSxA_lMIkQ5yiLZutq4UCLfnUm0r5f2Jiy-q6cFm0,32256
3
3
  voxcity/download/__init__.py,sha256=OgGcGxOXF4tjcEL6DhOnt13DYPTvOigUelp5xIpTqM0,171
4
4
  voxcity/download/eubucco.py,sha256=vd-LoWwUk1A1WC1cSeJTVRFPlAiU04NyQj3RMjohx4M,15149
5
5
  voxcity/download/gee.py,sha256=mHrG8mMhhOAvA6wASurZvUPpCKCcg75GriD6VN8VbCM,14297
@@ -18,15 +18,17 @@ voxcity/geo/__init_.py,sha256=rsj0OMzrTNACccdvEfmf632mb03BRUtKLuecppsxX40,62
18
18
  voxcity/geo/draw.py,sha256=yRaJHFAztLuFRO6gJtTGqLQPQkLvGrvw3E0fucnbKPQ,9090
19
19
  voxcity/geo/grid.py,sha256=l9iqi2OCmtJixCc3Y3RthF403pdrx6sB0565wZ1uHgM,40042
20
20
  voxcity/geo/utils.py,sha256=sR9InBHxV76XjlGPLD7blg_6EjbM0MG5DOyJffhBjWk,19372
21
- voxcity/sim/__init_.py,sha256=kArhaQ1zwMqYJYRA9Txtf6w6vdcZf173cp_jfSVK6Ls,43
22
- voxcity/sim/solar.py,sha256=QcfZLjx3LIbFI_hOR2yZNfIy4_RGvg5ej60QNGAgwTY,10219
23
- voxcity/sim/view.py,sha256=GSvqvuSOm_Gf224G6WZXDpqC9sGXfY73e79lR2rPjHE,29944
24
- voxcity/utils/__init_.py,sha256=2njagPXvdNur8otn77X9mjps5-cDBQIuk09206TTkvs,47
21
+ voxcity/sim/__init_.py,sha256=APdkcdaovj0v_RPOaA4SBvFUKT2RM7Hxuuz3Sux4gCo,65
22
+ voxcity/sim/solar.py,sha256=cTONZh1CXjcNXXOCD8Pn9FbVdMg3JUbtaxMUwifm0dk,20392
23
+ voxcity/sim/utils.py,sha256=sEYBB2-hLJxTiXQps1_-Fi7t1HN3-1OPOvBCWtgIisA,130
24
+ voxcity/sim/view.py,sha256=3NATlsxlIfkYEo6nb_VIghgElSLCyeZtThLauLpPNXc,29415
25
+ voxcity/utils/__init_.py,sha256=xjEadXQ9wXTw0lsx0JTbyTqASWw0GJLfT6eRr0CyQzw,71
25
26
  voxcity/utils/lc.py,sha256=RwPd-VY3POV3gTrBhM7TubgGb9MCd3nVah_G8iUEF7k,11562
26
27
  voxcity/utils/visualization.py,sha256=GVERj0noHAvJtDT0fV3K6w7pTfuAUfwKez-UMuEakEg,42214
27
- voxcity-0.3.0.dist-info/AUTHORS.rst,sha256=m82vkI5QokEGdcHof2OxK39lf81w1P58kG9ZNNAKS9U,175
28
- voxcity-0.3.0.dist-info/LICENSE,sha256=-hGliOFiwUrUSoZiB5WF90xXGqinKyqiDI2t6hrnam8,1087
29
- voxcity-0.3.0.dist-info/METADATA,sha256=v1eHWrygVUiclbRseVsZzvOXrMylJrPvzdAWbcVU2r8,19853
30
- voxcity-0.3.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
31
- voxcity-0.3.0.dist-info/top_level.txt,sha256=00b2U-LKfDllt6RL1R33MXie5MvxzUFye0NGD96t_8I,8
32
- voxcity-0.3.0.dist-info/RECORD,,
28
+ voxcity/utils/weather.py,sha256=Qwnr0paGdRQstwD0A9q2QfJIV-aQUyxH-6viRwXOuwM,21482
29
+ voxcity-0.3.1.dist-info/AUTHORS.rst,sha256=m82vkI5QokEGdcHof2OxK39lf81w1P58kG9ZNNAKS9U,175
30
+ voxcity-0.3.1.dist-info/LICENSE,sha256=-hGliOFiwUrUSoZiB5WF90xXGqinKyqiDI2t6hrnam8,1087
31
+ voxcity-0.3.1.dist-info/METADATA,sha256=DkgUBxIg7p7L-ftv9zR0Vxzyx4cDbQzNrZuIr2miNEs,19876
32
+ voxcity-0.3.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
33
+ voxcity-0.3.1.dist-info/top_level.txt,sha256=00b2U-LKfDllt6RL1R33MXie5MvxzUFye0NGD96t_8I,8
34
+ voxcity-0.3.1.dist-info/RECORD,,