ObjectNat 0.1.5__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ObjectNat might be problematic. Click here for more details.
- objectnat/__init__.py +11 -17
- objectnat/_api.py +15 -0
- objectnat/_config.py +67 -0
- objectnat/_version.py +1 -0
- objectnat/methods/balanced_buildings.py +4 -1
- objectnat/methods/cluster_points_in_polygons.py +4 -1
- objectnat/methods/coverage_zones.py +6 -21
- objectnat/methods/isochrones.py +121 -47
- objectnat/methods/living_buildings_osm.py +24 -94
- objectnat/methods/provision/__init__.py +0 -0
- objectnat/methods/provision/city_provision.py +325 -0
- objectnat/methods/provision/provision.py +90 -0
- objectnat/methods/provision/provision_exceptions.py +59 -0
- objectnat/methods/visibility_analysis.py +4 -1
- {objectnat-0.1.5.dist-info → objectnat-0.2.1.dist-info}/METADATA +35 -21
- objectnat-0.2.1.dist-info/RECORD +21 -0
- {objectnat-0.1.5.dist-info → objectnat-0.2.1.dist-info}/WHEEL +1 -1
- objectnat/methods/adjacency_matrix.py +0 -39
- objectnat/methods/demands.py +0 -43
- objectnat/methods/osm_graph.py +0 -23
- objectnat/methods/provision.py +0 -135
- objectnat-0.1.5.dist-info/RECORD +0 -18
- {objectnat-0.1.5.dist-info → objectnat-0.2.1.dist-info}/LICENSE.txt +0 -0
objectnat/__init__.py
CHANGED
|
@@ -1,19 +1,13 @@
|
|
|
1
|
-
|
|
1
|
+
"""
|
|
2
|
+
ObjectNat
|
|
3
|
+
========
|
|
2
4
|
|
|
3
|
-
from dongraphio.enums import GraphType
|
|
4
5
|
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
from .
|
|
11
|
-
from .
|
|
12
|
-
from .
|
|
13
|
-
from .methods.provision import NoOsmIdException, NoWeightAdjacencyException, get_provision
|
|
14
|
-
from .methods.visibility_analysis import (
|
|
15
|
-
calculate_visibility_catchment_area,
|
|
16
|
-
get_visibilities_from_points,
|
|
17
|
-
get_visibility,
|
|
18
|
-
get_visibility_accurate,
|
|
19
|
-
)
|
|
6
|
+
ObjectNat is an open-source library created for geospatial analysis created by IDU team.
|
|
7
|
+
|
|
8
|
+
Homepage https://github.com/DDonnyy/ObjectNat.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from ._config import config
|
|
12
|
+
from ._api import *
|
|
13
|
+
from ._version import VERSION as __version__
|
objectnat/_api.py
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# pylint: disable=unused-import,wildcard-import,unused-wildcard-import
|
|
2
|
+
from iduedu import *
|
|
3
|
+
|
|
4
|
+
from .methods.balanced_buildings import get_balanced_buildings
|
|
5
|
+
from .methods.cluster_points_in_polygons import get_clusters_polygon
|
|
6
|
+
from .methods.coverage_zones import get_isochrone_zone_coverage, get_radius_zone_coverage
|
|
7
|
+
from .methods.isochrones import get_accessibility_isochrones
|
|
8
|
+
from .methods.living_buildings_osm import download_buildings
|
|
9
|
+
from .methods.provision.provision import clip_provision, get_service_provision, recalculate_links
|
|
10
|
+
from .methods.visibility_analysis import (
|
|
11
|
+
calculate_visibility_catchment_area,
|
|
12
|
+
get_visibilities_from_points,
|
|
13
|
+
get_visibility,
|
|
14
|
+
get_visibility_accurate,
|
|
15
|
+
)
|
objectnat/_config.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
import sys
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from iduedu import config as iduedu_config
|
|
5
|
+
from loguru import logger
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class Config:
|
|
9
|
+
"""
|
|
10
|
+
A configuration class to manage global settings for the application, such as Overpass API URL,
|
|
11
|
+
timeouts, and logging options.
|
|
12
|
+
|
|
13
|
+
Attributes
|
|
14
|
+
----------
|
|
15
|
+
overpass_url : str
|
|
16
|
+
URL for accessing the Overpass API. Defaults to "http://lz4.overpass-api.de/api/interpreter".
|
|
17
|
+
timeout : int or None
|
|
18
|
+
Timeout in seconds for API requests. If None, no timeout is applied.
|
|
19
|
+
enable_tqdm_bar : bool
|
|
20
|
+
Enables or disables progress bars (via tqdm). Defaults to True.
|
|
21
|
+
logger : Logger
|
|
22
|
+
Logging instance to handle application logging.
|
|
23
|
+
|
|
24
|
+
Methods
|
|
25
|
+
-------
|
|
26
|
+
change_logger_lvl(lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"])
|
|
27
|
+
Changes the logging level to the specified value.
|
|
28
|
+
set_overpass_url(url: str)
|
|
29
|
+
Sets a new Overpass API URL.
|
|
30
|
+
set_timeout(timeout: int)
|
|
31
|
+
Sets the timeout for API requests.
|
|
32
|
+
set_enable_tqdm(enable: bool)
|
|
33
|
+
Enables or disables progress bars in the application.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
overpass_url="http://lz4.overpass-api.de/api/interpreter",
|
|
39
|
+
timeout=None,
|
|
40
|
+
enable_tqdm_bar=True,
|
|
41
|
+
):
|
|
42
|
+
self.overpass_url = overpass_url
|
|
43
|
+
self.timeout = timeout
|
|
44
|
+
self.enable_tqdm_bar = enable_tqdm_bar
|
|
45
|
+
self.logger = logger
|
|
46
|
+
self.iduedu_config = iduedu_config
|
|
47
|
+
|
|
48
|
+
def change_logger_lvl(self, lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"]):
|
|
49
|
+
self.logger.remove()
|
|
50
|
+
self.logger.add(sys.stderr, level=lvl)
|
|
51
|
+
self.iduedu_config.change_logger_lvl(lvl)
|
|
52
|
+
|
|
53
|
+
def set_overpass_url(self, url: str):
|
|
54
|
+
self.overpass_url = url
|
|
55
|
+
self.iduedu_config.set_overpass_url(url)
|
|
56
|
+
|
|
57
|
+
def set_timeout(self, timeout: int):
|
|
58
|
+
self.timeout = timeout
|
|
59
|
+
self.iduedu_config.set_timeout(timeout)
|
|
60
|
+
|
|
61
|
+
def set_enable_tqdm(self, enable: bool):
|
|
62
|
+
self.enable_tqdm_bar = enable
|
|
63
|
+
self.iduedu_config.set_enable_tqdm(enable)
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
config = Config()
|
|
67
|
+
config.change_logger_lvl("INFO")
|
objectnat/_version.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VERSION = "0.2.1"
|
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
import geopandas as gpd
|
|
2
2
|
import population_restorator.balancer.houses as b_build
|
|
3
3
|
import population_restorator.balancer.territories as b_terr
|
|
4
|
-
|
|
4
|
+
|
|
5
|
+
from objectnat import config
|
|
6
|
+
|
|
7
|
+
logger = config.logger
|
|
5
8
|
|
|
6
9
|
|
|
7
10
|
def get_balanced_buildings(
|
|
@@ -2,9 +2,12 @@ from typing import Literal
|
|
|
2
2
|
|
|
3
3
|
import geopandas as gpd
|
|
4
4
|
import pandas as pd
|
|
5
|
-
from loguru import logger
|
|
6
5
|
from sklearn.cluster import DBSCAN, HDBSCAN
|
|
7
6
|
|
|
7
|
+
from objectnat import config
|
|
8
|
+
|
|
9
|
+
logger = config.logger
|
|
10
|
+
|
|
8
11
|
|
|
9
12
|
def _get_cluster(services_select, min_dist, min_point, method):
|
|
10
13
|
services_coords = pd.DataFrame(
|
|
@@ -2,8 +2,6 @@ from typing import Literal
|
|
|
2
2
|
|
|
3
3
|
import geopandas as gpd
|
|
4
4
|
import networkx as nx
|
|
5
|
-
import pandas as pd
|
|
6
|
-
from dongraphio import GraphType
|
|
7
5
|
|
|
8
6
|
from .isochrones import get_accessibility_isochrones
|
|
9
7
|
|
|
@@ -48,7 +46,6 @@ def get_isochrone_zone_coverage(
|
|
|
48
46
|
weight_type: Literal["time_min", "length_meter"],
|
|
49
47
|
weight_value: int,
|
|
50
48
|
city_graph: nx.Graph,
|
|
51
|
-
graph_type: list[GraphType],
|
|
52
49
|
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]:
|
|
53
50
|
"""
|
|
54
51
|
Create isochrones for each service location based on travel time/distance.
|
|
@@ -56,15 +53,13 @@ def get_isochrone_zone_coverage(
|
|
|
56
53
|
Parameters
|
|
57
54
|
----------
|
|
58
55
|
services : gpd.GeoDataFrame
|
|
59
|
-
|
|
56
|
+
Layer containing the service locations.
|
|
60
57
|
weight_type : str
|
|
61
58
|
Type of weight used for calculating isochrones, either "time_min" or "length_meter".
|
|
62
59
|
weight_value : int
|
|
63
60
|
The value of the weight, representing time in minutes or distance in meters.
|
|
64
61
|
city_graph : nx.Graph
|
|
65
62
|
The graph representing the city's transportation network.
|
|
66
|
-
graph_type : list[GraphType]
|
|
67
|
-
List of graph types to be used for isochrone calculations.
|
|
68
63
|
|
|
69
64
|
Returns
|
|
70
65
|
-------
|
|
@@ -76,10 +71,10 @@ def get_isochrone_zone_coverage(
|
|
|
76
71
|
>>> import networkx as nx
|
|
77
72
|
>>> import geopandas as gpd
|
|
78
73
|
>>> from shapely.geometry import Point
|
|
79
|
-
|
|
74
|
+
>>> from iduedu import get_intermodal_graph
|
|
80
75
|
|
|
81
|
-
>>> # Create a sample city graph
|
|
82
|
-
>>>
|
|
76
|
+
>>> # Create a sample city graph with get_intermodal_graph()
|
|
77
|
+
>>> graph = get_intermodal_graph(polygon=my_territory_polygon)
|
|
83
78
|
|
|
84
79
|
>>> # Create a sample GeoDataFrame for services
|
|
85
80
|
>>> services = gpd.read_file('services.geojson')
|
|
@@ -87,19 +82,9 @@ def get_isochrone_zone_coverage(
|
|
|
87
82
|
>>> # Define parameters
|
|
88
83
|
>>> weight_type = "time_min"
|
|
89
84
|
>>> weight_value = 10
|
|
90
|
-
>>> graph_type = [GraphType.PUBLIC_TRANSPORT, GraphType.WALK]
|
|
91
85
|
|
|
92
86
|
>>> # Get isochrone zone coverage
|
|
93
|
-
>>>
|
|
94
|
-
>>> isochrone_zones[0] # represent isochrones geodataframe
|
|
87
|
+
>>> isochrones, pt_stops, pt_routes = get_isochrone_zone_coverage(services, weight_type, weight_value, city_graph)
|
|
95
88
|
"""
|
|
96
|
-
|
|
97
|
-
points = services.geometry.representative_point()
|
|
98
|
-
iso, routes, stops = get_accessibility_isochrones(
|
|
99
|
-
points, graph_type, weight_value, weight_type, city_graph, points.crs.to_epsg()
|
|
100
|
-
)
|
|
101
|
-
services_ = services.copy()
|
|
102
|
-
iso = gpd.GeoDataFrame(
|
|
103
|
-
pd.concat([iso.drop(columns=["point", "point_number"]), services_.drop(columns=["geometry"])], axis=1)
|
|
104
|
-
)
|
|
89
|
+
iso, routes, stops = get_accessibility_isochrones(services, weight_value, weight_type, city_graph)
|
|
105
90
|
return iso, routes, stops
|
objectnat/methods/isochrones.py
CHANGED
|
@@ -1,66 +1,140 @@
|
|
|
1
|
+
from typing import Literal
|
|
2
|
+
|
|
1
3
|
import geopandas as gpd
|
|
2
4
|
import networkx as nx
|
|
3
|
-
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from osmnx import graph_to_gdfs
|
|
8
|
+
from pyproj import CRS
|
|
9
|
+
from scipy.spatial import KDTree
|
|
4
10
|
from shapely import Point
|
|
11
|
+
from shapely.ops import unary_union
|
|
12
|
+
|
|
13
|
+
from objectnat import config
|
|
14
|
+
|
|
15
|
+
logger = config.logger
|
|
5
16
|
|
|
6
17
|
|
|
7
18
|
def get_accessibility_isochrones(
|
|
8
|
-
points:
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
city_graph: nx.MultiDiGraph,
|
|
13
|
-
city_crs: int,
|
|
19
|
+
points: gpd.GeoDataFrame,
|
|
20
|
+
weight_value: float,
|
|
21
|
+
weight_type: Literal["time_min", "length_meter"],
|
|
22
|
+
graph_nx: nx.Graph,
|
|
14
23
|
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]:
|
|
15
24
|
"""
|
|
16
|
-
Calculate accessibility isochrones based on the provided city graph
|
|
25
|
+
Calculate accessibility isochrones from a gpd.GeoDataFrame based on the provided city graph.
|
|
26
|
+
|
|
27
|
+
Isochrones represent areas that can be reached from a given point within a specific time or distance,
|
|
28
|
+
using a graph that contains road and transport network data.
|
|
17
29
|
|
|
18
30
|
Parameters
|
|
19
31
|
----------
|
|
20
|
-
points :
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
The CRS (Coordinate Reference System) for the city.
|
|
32
|
+
points : gpd.GeoDataFrame
|
|
33
|
+
A GeoDataFrame containing the geometry from which accessibility isochrones should be calculated.
|
|
34
|
+
The CRS of this GeoDataFrame must match the CRS of the provided graph.
|
|
35
|
+
weight_value : float
|
|
36
|
+
The maximum distance or time threshold for calculating isochrones.
|
|
37
|
+
weight_type : Literal["time_min", "length_meter"]
|
|
38
|
+
The type of weight to use for distance calculations. Either time in minutes ("time_min") or distance
|
|
39
|
+
in meters ("length_meter").
|
|
40
|
+
graph_nx : nx.Graph
|
|
41
|
+
A NetworkX graph representing the city network.
|
|
42
|
+
The graph must contain the appropriate CRS and, for time-based isochrones, a speed attribute.
|
|
32
43
|
|
|
33
44
|
Returns
|
|
34
45
|
-------
|
|
35
46
|
tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]
|
|
36
|
-
A tuple containing
|
|
47
|
+
A tuple containing:
|
|
48
|
+
- isochrones : GeoDataFrame with the calculated isochrone geometries.
|
|
49
|
+
- public transport stops (if applicable) : GeoDataFrame with public transport stops within the isochrone, or None if not applicable.
|
|
50
|
+
- public transport routes (if applicable) : GeoDataFrame with public transport routes within the isochrone, or None if not applicable.
|
|
37
51
|
|
|
38
52
|
Examples
|
|
39
53
|
--------
|
|
40
|
-
>>>
|
|
41
|
-
>>>
|
|
42
|
-
>>>
|
|
43
|
-
>>>
|
|
44
|
-
|
|
45
|
-
>>> # Create a sample city graph or download it from osm with get_intermodal_graph_from_osm()
|
|
46
|
-
>>> city_graph = nx.MultiDiGraph()
|
|
47
|
-
|
|
48
|
-
>>> # Define parameters
|
|
49
|
-
>>> graph_type = [GraphType.PUBLIC_TRANSPORT, GraphType.WALK]
|
|
50
|
-
>>> points = gpd.GeoSeries([Point(0, 0)])
|
|
51
|
-
>>> weight_value = 15
|
|
52
|
-
>>> weight_type = "time_min"
|
|
53
|
-
>>> city_crs = 4326 # Should be the same with CRS of the city graph
|
|
54
|
-
|
|
55
|
-
>>> # Calculate isochrones
|
|
56
|
-
>>> isochrones, routes, stops = get_accessibility_isochrones(
|
|
57
|
-
... graph_type, points, weight_value, weight_type, city_graph, city_crs
|
|
58
|
-
... )
|
|
59
|
-
|
|
60
|
-
>>> print(isochrones)
|
|
61
|
-
>>> print(routes)
|
|
62
|
-
>>> print(stops)
|
|
54
|
+
>>> from iduedu import get_intermodal_graph
|
|
55
|
+
>>> graph = get_intermodal_graph(polygon=my_territory_polygon)
|
|
56
|
+
>>> points = gpd.GeoDataFrame(geometry=[Point(30.33, 59.95)], crs=4326).to_crs(graph.graph['crs'])
|
|
57
|
+
>>> isochrones, pt_stops, pt_routes = get_accessibility_isochrones(points, weight_value=15, weight_type="time_min", graph_nx=my_graph)
|
|
58
|
+
|
|
63
59
|
"""
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
60
|
+
|
|
61
|
+
assert points.crs == CRS.from_epsg(
|
|
62
|
+
graph_nx.graph["crs"]
|
|
63
|
+
), f'CRS mismatch , points.crs = {points.crs.to_epsg()}, graph["crs"] = {graph_nx.graph["crs"]}'
|
|
64
|
+
|
|
65
|
+
nodes_with_data = list(graph_nx.nodes(data=True))
|
|
66
|
+
logger.info("Calculating isochrones distances...")
|
|
67
|
+
coordinates = np.array([(data["x"], data["y"]) for node, data in nodes_with_data])
|
|
68
|
+
tree = KDTree(coordinates)
|
|
69
|
+
|
|
70
|
+
target_coord = [(p.x, p.y) for p in points.representative_point()]
|
|
71
|
+
distances, indices = tree.query(target_coord)
|
|
72
|
+
|
|
73
|
+
nearest_nodes = [nodes_with_data[idx][0] for idx in indices]
|
|
74
|
+
del nodes_with_data
|
|
75
|
+
dist_nearest = pd.DataFrame(data=distances, index=nearest_nodes, columns=["dist"])
|
|
76
|
+
speed = 0
|
|
77
|
+
if graph_nx.graph["type"] in ["walk", "intermodal"] and weight_type == "time_min":
|
|
78
|
+
try:
|
|
79
|
+
speed = graph_nx.graph["walk_speed"]
|
|
80
|
+
except KeyError:
|
|
81
|
+
logger.warning("There is no walk_speed in graph, set to the default speed - 83.33 m/min")
|
|
82
|
+
speed = 83.33
|
|
83
|
+
dist_nearest = dist_nearest / speed
|
|
84
|
+
elif weight_type == "time_min":
|
|
85
|
+
speed = 20 * 1000 / 60
|
|
86
|
+
dist_nearest = dist_nearest / speed
|
|
87
|
+
|
|
88
|
+
if (dist_nearest > weight_value).all().all():
|
|
89
|
+
raise RuntimeError(
|
|
90
|
+
"The point(s) lie further from the graph than weight_value, it's impossible to "
|
|
91
|
+
"construct isochrones. Check the coordinates of the point(s)/their projection"
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
data = []
|
|
95
|
+
for source in nearest_nodes:
|
|
96
|
+
dist, path = nx.single_source_dijkstra(graph_nx, source, weight=weight_type, cutoff=weight_value)
|
|
97
|
+
for target_node, way in path.items():
|
|
98
|
+
source = way[0]
|
|
99
|
+
distance = dist.get(target_node, np.nan)
|
|
100
|
+
data.append((source, target_node, distance))
|
|
101
|
+
del dist
|
|
102
|
+
dist_matrix = pd.DataFrame(data, columns=["source", "destination", "distance"])
|
|
103
|
+
del data
|
|
104
|
+
dist_matrix = dist_matrix.pivot_table(index="source", columns="destination", values="distance", sort=False)
|
|
105
|
+
|
|
106
|
+
dist_matrix = dist_matrix.add(dist_nearest.dist, axis=0)
|
|
107
|
+
dist_matrix = dist_matrix.mask(dist_matrix >= weight_value, np.nan)
|
|
108
|
+
dist_matrix.dropna(how="all", inplace=True)
|
|
109
|
+
|
|
110
|
+
results = []
|
|
111
|
+
logger.info("Building isochrones geometry...")
|
|
112
|
+
for _, row in dist_matrix.iterrows():
|
|
113
|
+
geometry = []
|
|
114
|
+
for node_to, value in row.items():
|
|
115
|
+
if not pd.isna(value):
|
|
116
|
+
node = graph_nx.nodes[node_to]
|
|
117
|
+
point = Point(node["x"], node["y"])
|
|
118
|
+
geometry.append(
|
|
119
|
+
point.buffer(round((weight_value - value) * speed * 0.8, 2))
|
|
120
|
+
if weight_type == "time_min"
|
|
121
|
+
else point.buffer(round((weight_value - value) * 0.8, 2))
|
|
122
|
+
)
|
|
123
|
+
geometry = unary_union(geometry)
|
|
124
|
+
results.append(geometry)
|
|
125
|
+
|
|
126
|
+
isochrones = gpd.GeoDataFrame(data=points, geometry=results, crs=graph_nx.graph["crs"])
|
|
127
|
+
isochrones["weight_type"] = weight_type
|
|
128
|
+
isochrones["weight_value"] = weight_value
|
|
129
|
+
|
|
130
|
+
isochrones_subgraph = graph_nx.subgraph(dist_matrix.columns)
|
|
131
|
+
nodes = pd.DataFrame.from_dict(dict(isochrones_subgraph.nodes(data=True)), orient="index")
|
|
132
|
+
if "desc" in nodes.columns and "stop" in nodes["desc"].unique():
|
|
133
|
+
pt_nodes = nodes[nodes["desc"] == "stop"]
|
|
134
|
+
nodes, edges = graph_to_gdfs(isochrones_subgraph.subgraph(pt_nodes.index))
|
|
135
|
+
nodes.reset_index(drop=True, inplace=True)
|
|
136
|
+
nodes = nodes[["desc", "route", "geometry"]]
|
|
137
|
+
edges.reset_index(drop=True, inplace=True)
|
|
138
|
+
edges = edges[["type", "route", "geometry"]]
|
|
139
|
+
return isochrones, nodes, edges
|
|
140
|
+
return isochrones, None, None
|
|
@@ -1,73 +1,15 @@
|
|
|
1
1
|
import geopandas as gpd
|
|
2
|
-
import
|
|
2
|
+
import numpy as np
|
|
3
3
|
import osmnx as ox
|
|
4
4
|
import pandas as pd
|
|
5
|
-
import
|
|
6
|
-
from loguru import logger
|
|
5
|
+
from iduedu import get_boundary
|
|
7
6
|
from shapely import MultiPolygon, Polygon
|
|
8
7
|
|
|
9
|
-
from
|
|
10
|
-
|
|
11
|
-
DEFAULT_OVERPASS_URL = "http://overpass-api.de/api/interpreter"
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def get_terr_polygon_osm_name(territory_name: str) -> Polygon | MultiPolygon:
|
|
15
|
-
"""
|
|
16
|
-
Retrieve the polygon geometry of a specified territory using its name from OpenStreetMap.
|
|
17
|
-
|
|
18
|
-
Parameters
|
|
19
|
-
----------
|
|
20
|
-
territory_name : str
|
|
21
|
-
The name of the territory to retrieve the polygon for.
|
|
22
|
-
|
|
23
|
-
Returns
|
|
24
|
-
-------
|
|
25
|
-
gpd.GeoDataFrame
|
|
26
|
-
A GeoDataFrame containing the polygon geometry of the specified territory.
|
|
27
|
-
|
|
28
|
-
Examples
|
|
29
|
-
--------
|
|
30
|
-
>>> territory_name = "Saint-Petersburg, Russia"
|
|
31
|
-
>>> polygon = get_terr_polygon_osm_name(territory_name)
|
|
32
|
-
"""
|
|
33
|
-
logger.info(f"Retrieving polygon geometry for '{territory_name}'")
|
|
34
|
-
place = ox.geocode_to_gdf(territory_name)
|
|
35
|
-
polygon = place.geometry.values[0]
|
|
36
|
-
return polygon.unary_union
|
|
8
|
+
from objectnat import config
|
|
37
9
|
|
|
10
|
+
from ..utils import get_utm_crs_for_4326_gdf
|
|
38
11
|
|
|
39
|
-
|
|
40
|
-
"""
|
|
41
|
-
Retrieve the polygon geometry of a specified territory using its OSM ID from OpenStreetMap.
|
|
42
|
-
|
|
43
|
-
Parameters
|
|
44
|
-
----------
|
|
45
|
-
osm_id : int
|
|
46
|
-
The OpenStreetMap ID of the territory to retrieve the polygon for.
|
|
47
|
-
|
|
48
|
-
Returns
|
|
49
|
-
-------
|
|
50
|
-
Polygon | MultiPolygon
|
|
51
|
-
A Polygon or MultiPolygon geometry of the specified territory.
|
|
52
|
-
|
|
53
|
-
Examples
|
|
54
|
-
--------
|
|
55
|
-
>>> osm_id = 421007
|
|
56
|
-
>>> polygon = get_terr_polygon_osm_id(osm_id)
|
|
57
|
-
"""
|
|
58
|
-
overpass_query = f"""
|
|
59
|
-
[out:json];
|
|
60
|
-
(
|
|
61
|
-
relation({osm_id});
|
|
62
|
-
);
|
|
63
|
-
out geom;
|
|
64
|
-
"""
|
|
65
|
-
logger.info(f"Retrieving polygon geometry for osm id '{osm_id}'")
|
|
66
|
-
result = requests.get(DEFAULT_OVERPASS_URL, params={"data": overpass_query}, timeout=500)
|
|
67
|
-
json_result = result.json()
|
|
68
|
-
json_result = osm2geojson.json2geojson(json_result)
|
|
69
|
-
json_result = gpd.GeoDataFrame.from_features(json_result["features"]).set_crs(4326)
|
|
70
|
-
return json_result.geometry.unary_union
|
|
12
|
+
logger = config.logger
|
|
71
13
|
|
|
72
14
|
|
|
73
15
|
def eval_is_living(row: gpd.GeoSeries):
|
|
@@ -143,16 +85,20 @@ def eval_population(source: gpd.GeoDataFrame, population_column: str, area_per_p
|
|
|
143
85
|
df["building:levels"] = pd.to_numeric(df["building:levels"], errors="coerce")
|
|
144
86
|
df = df.dropna(subset=["building:levels"])
|
|
145
87
|
df["building:levels"] = df["building:levels"].astype(int)
|
|
146
|
-
df[population_column] =
|
|
147
|
-
df.loc[df["is_living"] == 1, population_column] =
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
88
|
+
df[population_column] = np.nan
|
|
89
|
+
df.loc[df["is_living"] == 1, population_column] = (
|
|
90
|
+
df[df["is_living"] == 1]
|
|
91
|
+
.apply(
|
|
92
|
+
lambda row: (
|
|
93
|
+
3
|
|
94
|
+
if ((row["area"] <= 400) & (row["building:levels"] <= 2))
|
|
95
|
+
else (row["building:levels"] * row["area"] * 0.8 / area_per_person)
|
|
96
|
+
),
|
|
97
|
+
axis=1,
|
|
98
|
+
)
|
|
99
|
+
.round(0)
|
|
100
|
+
.astype(int)
|
|
154
101
|
)
|
|
155
|
-
df[population_column] = df[population_column].fillna(0).round(0).astype(int)
|
|
156
102
|
return df
|
|
157
103
|
|
|
158
104
|
|
|
@@ -191,26 +137,10 @@ def download_buildings(
|
|
|
191
137
|
>>> buildings_df = download_buildings(osm_territory_name="Saint-Petersburg, Russia")
|
|
192
138
|
>>> buildings_df.head()
|
|
193
139
|
"""
|
|
194
|
-
|
|
195
|
-
polygon = get_terr_polygon_osm_id(osm_territory_id)
|
|
196
|
-
return download_buildings(
|
|
197
|
-
terr_polygon=polygon,
|
|
198
|
-
area_per_person=area_per_person,
|
|
199
|
-
is_living_column=is_living_column,
|
|
200
|
-
population_column=population_column,
|
|
201
|
-
)
|
|
202
|
-
|
|
203
|
-
if osm_territory_name is not None:
|
|
204
|
-
polygon = get_terr_polygon_osm_name(osm_territory_name)
|
|
205
|
-
return download_buildings(
|
|
206
|
-
terr_polygon=polygon,
|
|
207
|
-
area_per_person=area_per_person,
|
|
208
|
-
is_living_column=is_living_column,
|
|
209
|
-
population_column=population_column,
|
|
210
|
-
)
|
|
140
|
+
polygon = get_boundary(osm_territory_id, osm_territory_name, terr_polygon)
|
|
211
141
|
|
|
212
|
-
logger.
|
|
213
|
-
buildings = ox.features_from_polygon(
|
|
142
|
+
logger.debug("Downloading buildings from OpenStreetMap and counting population...")
|
|
143
|
+
buildings = ox.features_from_polygon(polygon, tags={"building": True})
|
|
214
144
|
if not buildings.empty:
|
|
215
145
|
buildings = buildings.loc[
|
|
216
146
|
(buildings["geometry"].geom_type == "Polygon") | (buildings["geometry"].geom_type == "MultiPolygon")
|
|
@@ -222,7 +152,7 @@ def download_buildings(
|
|
|
222
152
|
buildings[is_living_column] = buildings.apply(eval_is_living, axis=1)
|
|
223
153
|
buildings = eval_population(buildings, population_column, area_per_person)
|
|
224
154
|
buildings.reset_index(drop=True, inplace=True)
|
|
225
|
-
logger.
|
|
155
|
+
logger.debug("Done!")
|
|
226
156
|
return buildings[
|
|
227
157
|
[
|
|
228
158
|
"building",
|
|
@@ -234,9 +164,9 @@ def download_buildings(
|
|
|
234
164
|
"building:levels",
|
|
235
165
|
"leisure",
|
|
236
166
|
"design:year",
|
|
237
|
-
|
|
167
|
+
is_living_column,
|
|
238
168
|
"building:levels_is_real",
|
|
239
|
-
|
|
169
|
+
population_column,
|
|
240
170
|
"geometry",
|
|
241
171
|
]
|
|
242
172
|
]
|
|
File without changes
|