ObjectNat 1.2.1__py3-none-any.whl → 1.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ObjectNat might be problematic. Click here for more details.
- objectnat/_api.py +14 -14
- objectnat/_config.py +47 -47
- objectnat/_version.py +1 -1
- objectnat/methods/coverage_zones/__init__.py +3 -3
- objectnat/methods/isochrones/__init__.py +1 -1
- objectnat/methods/isochrones/isochrone_utils.py +167 -167
- objectnat/methods/noise/__init__.py +3 -3
- objectnat/methods/noise/noise_init_data.py +10 -10
- objectnat/methods/noise/noise_reduce.py +155 -155
- objectnat/methods/point_clustering/__init__.py +1 -1
- objectnat/methods/provision/__init__.py +1 -1
- objectnat/methods/provision/provision.py +4 -0
- objectnat/methods/provision/provision_exceptions.py +59 -59
- objectnat/methods/provision/provision_model.py +348 -337
- objectnat/methods/utils/__init__.py +1 -1
- objectnat/methods/utils/geom_utils.py +173 -173
- objectnat/methods/utils/math_utils.py +32 -32
- objectnat/methods/visibility/__init__.py +6 -6
- {objectnat-1.2.1.dist-info → objectnat-1.2.2.dist-info}/METADATA +9 -8
- objectnat-1.2.2.dist-info/RECORD +33 -0
- {objectnat-1.2.1.dist-info → objectnat-1.2.2.dist-info}/WHEEL +1 -1
- objectnat-1.2.1.dist-info/RECORD +0 -33
- {objectnat-1.2.1.dist-info → objectnat-1.2.2.dist-info/licenses}/LICENSE.txt +0 -0
objectnat/_api.py
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
# pylint: disable=unused-import,wildcard-import,unused-wildcard-import
|
|
2
|
-
|
|
3
|
-
from .methods.coverage_zones import get_graph_coverage, get_radius_coverage, get_stepped_graph_coverage
|
|
4
|
-
from .methods.isochrones import get_accessibility_isochrone_stepped, get_accessibility_isochrones
|
|
5
|
-
from .methods.noise import calculate_simplified_noise_frame, simulate_noise
|
|
6
|
-
from .methods.point_clustering import get_clusters_polygon
|
|
7
|
-
from .methods.provision import clip_provision, get_service_provision, recalculate_links
|
|
8
|
-
from .methods.utils import gdf_to_graph, graph_to_gdf
|
|
9
|
-
from .methods.visibility import (
|
|
10
|
-
calculate_visibility_catchment_area,
|
|
11
|
-
get_visibilities_from_points,
|
|
12
|
-
get_visibility,
|
|
13
|
-
get_visibility_accurate,
|
|
14
|
-
)
|
|
1
|
+
# pylint: disable=unused-import,wildcard-import,unused-wildcard-import
|
|
2
|
+
|
|
3
|
+
from .methods.coverage_zones import get_graph_coverage, get_radius_coverage, get_stepped_graph_coverage
|
|
4
|
+
from .methods.isochrones import get_accessibility_isochrone_stepped, get_accessibility_isochrones
|
|
5
|
+
from .methods.noise import calculate_simplified_noise_frame, simulate_noise
|
|
6
|
+
from .methods.point_clustering import get_clusters_polygon
|
|
7
|
+
from .methods.provision import clip_provision, get_service_provision, recalculate_links
|
|
8
|
+
from .methods.utils import gdf_to_graph, graph_to_gdf
|
|
9
|
+
from .methods.visibility import (
|
|
10
|
+
calculate_visibility_catchment_area,
|
|
11
|
+
get_visibilities_from_points,
|
|
12
|
+
get_visibility,
|
|
13
|
+
get_visibility_accurate,
|
|
14
|
+
)
|
objectnat/_config.py
CHANGED
|
@@ -1,47 +1,47 @@
|
|
|
1
|
-
import sys
|
|
2
|
-
from typing import Literal
|
|
3
|
-
|
|
4
|
-
from loguru import logger
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
class Config:
|
|
8
|
-
"""
|
|
9
|
-
A configuration class to manage global settings for the application, such as Overpass API URL,
|
|
10
|
-
timeouts, and logging options.
|
|
11
|
-
|
|
12
|
-
Attributes
|
|
13
|
-
----------
|
|
14
|
-
enable_tqdm_bar : bool
|
|
15
|
-
Enables or disables progress bars (via tqdm). Defaults to True.
|
|
16
|
-
logger : Logger
|
|
17
|
-
Logging instance to handle application logging.
|
|
18
|
-
|
|
19
|
-
Methods
|
|
20
|
-
-------
|
|
21
|
-
change_logger_lvl(lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"])
|
|
22
|
-
Changes the logging level to the specified value.
|
|
23
|
-
set_enable_tqdm(enable: bool)
|
|
24
|
-
Enables or disables progress bars in the application.
|
|
25
|
-
"""
|
|
26
|
-
|
|
27
|
-
def __init__(
|
|
28
|
-
self,
|
|
29
|
-
enable_tqdm_bar=True,
|
|
30
|
-
):
|
|
31
|
-
self.enable_tqdm_bar = enable_tqdm_bar
|
|
32
|
-
self.logger = logger
|
|
33
|
-
self.pandarallel_use_file_system = False
|
|
34
|
-
|
|
35
|
-
def change_logger_lvl(self, lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"]):
|
|
36
|
-
self.logger.remove()
|
|
37
|
-
self.logger.add(sys.stderr, level=lvl)
|
|
38
|
-
|
|
39
|
-
def set_enable_tqdm(self, enable: bool):
|
|
40
|
-
self.enable_tqdm_bar = enable
|
|
41
|
-
|
|
42
|
-
def set_pandarallel_use_file_system(self, enable: bool):
|
|
43
|
-
self.pandarallel_use_file_system = enable
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
config = Config()
|
|
47
|
-
config.change_logger_lvl("INFO")
|
|
1
|
+
import sys
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
from loguru import logger
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class Config:
|
|
8
|
+
"""
|
|
9
|
+
A configuration class to manage global settings for the application, such as Overpass API URL,
|
|
10
|
+
timeouts, and logging options.
|
|
11
|
+
|
|
12
|
+
Attributes
|
|
13
|
+
----------
|
|
14
|
+
enable_tqdm_bar : bool
|
|
15
|
+
Enables or disables progress bars (via tqdm). Defaults to True.
|
|
16
|
+
logger : Logger
|
|
17
|
+
Logging instance to handle application logging.
|
|
18
|
+
|
|
19
|
+
Methods
|
|
20
|
+
-------
|
|
21
|
+
change_logger_lvl(lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"])
|
|
22
|
+
Changes the logging level to the specified value.
|
|
23
|
+
set_enable_tqdm(enable: bool)
|
|
24
|
+
Enables or disables progress bars in the application.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
enable_tqdm_bar=True,
|
|
30
|
+
):
|
|
31
|
+
self.enable_tqdm_bar = enable_tqdm_bar
|
|
32
|
+
self.logger = logger
|
|
33
|
+
self.pandarallel_use_file_system = False
|
|
34
|
+
|
|
35
|
+
def change_logger_lvl(self, lvl: Literal["TRACE", "DEBUG", "INFO", "WARN", "ERROR"]):
|
|
36
|
+
self.logger.remove()
|
|
37
|
+
self.logger.add(sys.stderr, level=lvl)
|
|
38
|
+
|
|
39
|
+
def set_enable_tqdm(self, enable: bool):
|
|
40
|
+
self.enable_tqdm_bar = enable
|
|
41
|
+
|
|
42
|
+
def set_pandarallel_use_file_system(self, enable: bool):
|
|
43
|
+
self.pandarallel_use_file_system = enable
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
config = Config()
|
|
47
|
+
config.change_logger_lvl("INFO")
|
objectnat/_version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
VERSION = "1.2.
|
|
1
|
+
VERSION = "1.2.2"
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
from .graph_coverage import get_graph_coverage
|
|
2
|
-
from .radius_voronoi_coverage import get_radius_coverage
|
|
3
|
-
from .stepped_coverage import get_stepped_graph_coverage
|
|
1
|
+
from .graph_coverage import get_graph_coverage
|
|
2
|
+
from .radius_voronoi_coverage import get_radius_coverage
|
|
3
|
+
from .stepped_coverage import get_stepped_graph_coverage
|
|
@@ -1 +1 @@
|
|
|
1
|
-
from .isochrones import get_accessibility_isochrones, get_accessibility_isochrone_stepped
|
|
1
|
+
from .isochrones import get_accessibility_isochrones, get_accessibility_isochrone_stepped
|
|
@@ -1,167 +1,167 @@
|
|
|
1
|
-
from typing import Literal
|
|
2
|
-
|
|
3
|
-
import geopandas as gpd
|
|
4
|
-
import networkx as nx
|
|
5
|
-
import numpy as np
|
|
6
|
-
import pandas as pd
|
|
7
|
-
from pyproj.exceptions import CRSError
|
|
8
|
-
from shapely.ops import polygonize
|
|
9
|
-
|
|
10
|
-
from objectnat import config
|
|
11
|
-
from objectnat.methods.utils.geom_utils import polygons_to_multilinestring
|
|
12
|
-
from objectnat.methods.utils.graph_utils import get_closest_nodes_from_gdf, remove_weakly_connected_nodes
|
|
13
|
-
|
|
14
|
-
logger = config.logger
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
def _validate_inputs(
|
|
18
|
-
points: gpd.GeoDataFrame, weight_value: float, weight_type: Literal["time_min", "length_meter"], nx_graph: nx.Graph
|
|
19
|
-
) -> tuple[str, str]:
|
|
20
|
-
"""Validate common inputs for accessibility functions."""
|
|
21
|
-
if weight_value <= 0:
|
|
22
|
-
raise ValueError("Weight value must be greater than 0")
|
|
23
|
-
if weight_type not in ["time_min", "length_meter"]:
|
|
24
|
-
raise UserWarning("Weight type should be either 'time_min' or 'length_meter'")
|
|
25
|
-
|
|
26
|
-
try:
|
|
27
|
-
local_crs = nx_graph.graph["crs"]
|
|
28
|
-
except KeyError as exc:
|
|
29
|
-
raise ValueError("Graph does not have crs attribute") from exc
|
|
30
|
-
try:
|
|
31
|
-
graph_type = nx_graph.graph["type"]
|
|
32
|
-
except KeyError as exc:
|
|
33
|
-
raise ValueError("Graph does not have type attribute") from exc
|
|
34
|
-
|
|
35
|
-
try:
|
|
36
|
-
points.to_crs(local_crs, inplace=True)
|
|
37
|
-
except CRSError as e:
|
|
38
|
-
raise CRSError(f"Graph crs ({local_crs}) has invalid format.") from e
|
|
39
|
-
|
|
40
|
-
return local_crs, graph_type
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
def _prepare_graph_and_nodes(
|
|
44
|
-
points: gpd.GeoDataFrame, nx_graph: nx.Graph, graph_type: str, weight_type: str, weight_value: float
|
|
45
|
-
) -> tuple[nx.Graph, gpd.GeoDataFrame, pd.DataFrame, float]:
|
|
46
|
-
"""Prepare graph and calculate nearest nodes with distances."""
|
|
47
|
-
nx_graph = remove_weakly_connected_nodes(nx_graph)
|
|
48
|
-
distances, nearest_nodes = get_closest_nodes_from_gdf(points, nx_graph)
|
|
49
|
-
points["nearest_node"] = nearest_nodes
|
|
50
|
-
|
|
51
|
-
dist_nearest = pd.DataFrame(data=distances, index=nearest_nodes, columns=["dist"]).drop_duplicates()
|
|
52
|
-
|
|
53
|
-
# Calculate speed adjustment if needed
|
|
54
|
-
speed = 0
|
|
55
|
-
if graph_type in ["walk", "intermodal"] and weight_type == "time_min":
|
|
56
|
-
try:
|
|
57
|
-
speed = nx_graph.graph["walk_speed"]
|
|
58
|
-
except KeyError:
|
|
59
|
-
logger.warning("There is no walk_speed in graph, set to the default speed - 83.33 m/min")
|
|
60
|
-
speed = 83.33
|
|
61
|
-
dist_nearest = dist_nearest / speed
|
|
62
|
-
elif weight_type == "time_min":
|
|
63
|
-
speed = 20 * 1000 / 60
|
|
64
|
-
dist_nearest = dist_nearest / speed
|
|
65
|
-
|
|
66
|
-
if (dist_nearest > weight_value).all().all():
|
|
67
|
-
raise RuntimeError(
|
|
68
|
-
"The point(s) lie further from the graph than weight_value, it's impossible to "
|
|
69
|
-
"construct isochrones. Check the coordinates of the point(s)/their projection"
|
|
70
|
-
)
|
|
71
|
-
|
|
72
|
-
return nx_graph, points, dist_nearest, speed
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
def _process_pt_data(
|
|
76
|
-
nodes: gpd.GeoDataFrame, edges: gpd.GeoDataFrame, graph_type: str
|
|
77
|
-
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame] | tuple[None, None]:
|
|
78
|
-
"""Process public transport data if available."""
|
|
79
|
-
if "type" in nodes.columns and "platform" in nodes["type"].unique():
|
|
80
|
-
pt_nodes = nodes[(nodes["type"] != "platform") & (~nodes["type"].isna())]
|
|
81
|
-
if graph_type == "intermodal":
|
|
82
|
-
edges = edges[~edges["type"].isin(["walk", "boarding"])]
|
|
83
|
-
pt_nodes = pt_nodes[["type", "route", "geometry"]]
|
|
84
|
-
edges = edges[["type", "route", "geometry"]]
|
|
85
|
-
return pt_nodes, edges
|
|
86
|
-
return None, None
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def _calculate_distance_matrix(
|
|
90
|
-
nx_graph: nx.Graph,
|
|
91
|
-
nearest_nodes: np.ndarray,
|
|
92
|
-
weight_type: str,
|
|
93
|
-
weight_value: float,
|
|
94
|
-
dist_nearest: pd.DataFrame,
|
|
95
|
-
) -> tuple[pd.DataFrame, nx.Graph]:
|
|
96
|
-
"""Calculate distance matrix from nearest nodes."""
|
|
97
|
-
|
|
98
|
-
data = {}
|
|
99
|
-
for source in nearest_nodes:
|
|
100
|
-
dist = nx.single_source_dijkstra_path_length(nx_graph, source, weight=weight_type, cutoff=weight_value)
|
|
101
|
-
data.update({source: dist})
|
|
102
|
-
|
|
103
|
-
dist_matrix = pd.DataFrame.from_dict(data, orient="index")
|
|
104
|
-
dist_matrix = dist_matrix.add(dist_nearest.dist, axis=0)
|
|
105
|
-
dist_matrix = dist_matrix.mask(dist_matrix > weight_value, np.nan)
|
|
106
|
-
dist_matrix.dropna(how="all", inplace=True)
|
|
107
|
-
dist_matrix.dropna(how="all", axis=1, inplace=True)
|
|
108
|
-
|
|
109
|
-
subgraph = nx_graph.subgraph(dist_matrix.columns.to_list())
|
|
110
|
-
|
|
111
|
-
return dist_matrix, subgraph
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
def _create_isochrones_gdf(
|
|
115
|
-
points: gpd.GeoDataFrame,
|
|
116
|
-
results: list,
|
|
117
|
-
dist_matrix: pd.DataFrame,
|
|
118
|
-
local_crs: str,
|
|
119
|
-
weight_type: str,
|
|
120
|
-
weight_value: float,
|
|
121
|
-
) -> gpd.GeoDataFrame:
|
|
122
|
-
"""Create final isochrones GeoDataFrame."""
|
|
123
|
-
isochrones = gpd.GeoDataFrame(geometry=results, index=dist_matrix.index, crs=local_crs)
|
|
124
|
-
isochrones = (
|
|
125
|
-
points.drop(columns="geometry")
|
|
126
|
-
.merge(isochrones, left_on="nearest_node", right_index=True, how="left")
|
|
127
|
-
.drop(columns="nearest_node")
|
|
128
|
-
)
|
|
129
|
-
isochrones = gpd.GeoDataFrame(isochrones, geometry="geometry", crs=local_crs)
|
|
130
|
-
isochrones["weight_type"] = weight_type
|
|
131
|
-
isochrones["weight_value"] = weight_value
|
|
132
|
-
return isochrones
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
def create_separated_dist_polygons(
|
|
136
|
-
points: gpd.GeoDataFrame, weight_value, weight_type, step, speed
|
|
137
|
-
) -> gpd.GeoDataFrame:
|
|
138
|
-
points["dist"] = points["dist"].clip(lower=0.1)
|
|
139
|
-
steps = np.arange(0, weight_value + step, step)
|
|
140
|
-
if steps[-1] > weight_value:
|
|
141
|
-
steps[-1] = weight_value # Ensure last step doesn't exceed weight_value
|
|
142
|
-
for i in range(len(steps) - 1):
|
|
143
|
-
min_dist = steps[i]
|
|
144
|
-
max_dist = steps[i + 1]
|
|
145
|
-
nodes_in_step = points["dist"].between(min_dist, max_dist, inclusive="left")
|
|
146
|
-
nodes_in_step = nodes_in_step[nodes_in_step].index
|
|
147
|
-
if not nodes_in_step.empty:
|
|
148
|
-
buffer_size = (max_dist - points.loc[nodes_in_step, "dist"]) * 0.7
|
|
149
|
-
if weight_type == "time_min":
|
|
150
|
-
buffer_size = buffer_size * speed
|
|
151
|
-
points.loc[nodes_in_step, "buffer_size"] = buffer_size
|
|
152
|
-
points.geometry = points.geometry.buffer(points["buffer_size"])
|
|
153
|
-
points["dist"] = np.minimum(np.ceil(points["dist"] / step) * step, weight_value)
|
|
154
|
-
points = points.dissolve(by="dist", as_index=False)
|
|
155
|
-
polygons = gpd.GeoDataFrame(
|
|
156
|
-
geometry=list(polygonize(points.geometry.apply(polygons_to_multilinestring).union_all())),
|
|
157
|
-
crs=points.crs,
|
|
158
|
-
)
|
|
159
|
-
polygons_points = polygons.copy()
|
|
160
|
-
polygons_points.geometry = polygons.representative_point()
|
|
161
|
-
stepped_polygons = polygons_points.sjoin(points, predicate="within").reset_index()
|
|
162
|
-
stepped_polygons = stepped_polygons.groupby("index").agg({"dist": "mean"})
|
|
163
|
-
stepped_polygons["dist"] = np.minimum(np.floor(stepped_polygons["dist"] / step) * step, weight_value)
|
|
164
|
-
stepped_polygons["geometry"] = polygons
|
|
165
|
-
stepped_polygons = gpd.GeoDataFrame(stepped_polygons, geometry="geometry", crs=points.crs).reset_index(drop=True)
|
|
166
|
-
stepped_polygons = stepped_polygons.dissolve(by="dist", as_index=False).explode(ignore_index=True)
|
|
167
|
-
return stepped_polygons
|
|
1
|
+
from typing import Literal
|
|
2
|
+
|
|
3
|
+
import geopandas as gpd
|
|
4
|
+
import networkx as nx
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from pyproj.exceptions import CRSError
|
|
8
|
+
from shapely.ops import polygonize
|
|
9
|
+
|
|
10
|
+
from objectnat import config
|
|
11
|
+
from objectnat.methods.utils.geom_utils import polygons_to_multilinestring
|
|
12
|
+
from objectnat.methods.utils.graph_utils import get_closest_nodes_from_gdf, remove_weakly_connected_nodes
|
|
13
|
+
|
|
14
|
+
logger = config.logger
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def _validate_inputs(
|
|
18
|
+
points: gpd.GeoDataFrame, weight_value: float, weight_type: Literal["time_min", "length_meter"], nx_graph: nx.Graph
|
|
19
|
+
) -> tuple[str, str]:
|
|
20
|
+
"""Validate common inputs for accessibility functions."""
|
|
21
|
+
if weight_value <= 0:
|
|
22
|
+
raise ValueError("Weight value must be greater than 0")
|
|
23
|
+
if weight_type not in ["time_min", "length_meter"]:
|
|
24
|
+
raise UserWarning("Weight type should be either 'time_min' or 'length_meter'")
|
|
25
|
+
|
|
26
|
+
try:
|
|
27
|
+
local_crs = nx_graph.graph["crs"]
|
|
28
|
+
except KeyError as exc:
|
|
29
|
+
raise ValueError("Graph does not have crs attribute") from exc
|
|
30
|
+
try:
|
|
31
|
+
graph_type = nx_graph.graph["type"]
|
|
32
|
+
except KeyError as exc:
|
|
33
|
+
raise ValueError("Graph does not have type attribute") from exc
|
|
34
|
+
|
|
35
|
+
try:
|
|
36
|
+
points.to_crs(local_crs, inplace=True)
|
|
37
|
+
except CRSError as e:
|
|
38
|
+
raise CRSError(f"Graph crs ({local_crs}) has invalid format.") from e
|
|
39
|
+
|
|
40
|
+
return local_crs, graph_type
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def _prepare_graph_and_nodes(
|
|
44
|
+
points: gpd.GeoDataFrame, nx_graph: nx.Graph, graph_type: str, weight_type: str, weight_value: float
|
|
45
|
+
) -> tuple[nx.Graph, gpd.GeoDataFrame, pd.DataFrame, float]:
|
|
46
|
+
"""Prepare graph and calculate nearest nodes with distances."""
|
|
47
|
+
nx_graph = remove_weakly_connected_nodes(nx_graph)
|
|
48
|
+
distances, nearest_nodes = get_closest_nodes_from_gdf(points, nx_graph)
|
|
49
|
+
points["nearest_node"] = nearest_nodes
|
|
50
|
+
|
|
51
|
+
dist_nearest = pd.DataFrame(data=distances, index=nearest_nodes, columns=["dist"]).drop_duplicates()
|
|
52
|
+
|
|
53
|
+
# Calculate speed adjustment if needed
|
|
54
|
+
speed = 0
|
|
55
|
+
if graph_type in ["walk", "intermodal"] and weight_type == "time_min":
|
|
56
|
+
try:
|
|
57
|
+
speed = nx_graph.graph["walk_speed"]
|
|
58
|
+
except KeyError:
|
|
59
|
+
logger.warning("There is no walk_speed in graph, set to the default speed - 83.33 m/min")
|
|
60
|
+
speed = 83.33
|
|
61
|
+
dist_nearest = dist_nearest / speed
|
|
62
|
+
elif weight_type == "time_min":
|
|
63
|
+
speed = 20 * 1000 / 60
|
|
64
|
+
dist_nearest = dist_nearest / speed
|
|
65
|
+
|
|
66
|
+
if (dist_nearest > weight_value).all().all():
|
|
67
|
+
raise RuntimeError(
|
|
68
|
+
"The point(s) lie further from the graph than weight_value, it's impossible to "
|
|
69
|
+
"construct isochrones. Check the coordinates of the point(s)/their projection"
|
|
70
|
+
)
|
|
71
|
+
|
|
72
|
+
return nx_graph, points, dist_nearest, speed
|
|
73
|
+
|
|
74
|
+
|
|
75
|
+
def _process_pt_data(
|
|
76
|
+
nodes: gpd.GeoDataFrame, edges: gpd.GeoDataFrame, graph_type: str
|
|
77
|
+
) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame] | tuple[None, None]:
|
|
78
|
+
"""Process public transport data if available."""
|
|
79
|
+
if "type" in nodes.columns and "platform" in nodes["type"].unique():
|
|
80
|
+
pt_nodes = nodes[(nodes["type"] != "platform") & (~nodes["type"].isna())]
|
|
81
|
+
if graph_type == "intermodal":
|
|
82
|
+
edges = edges[~edges["type"].isin(["walk", "boarding"])]
|
|
83
|
+
pt_nodes = pt_nodes[["type", "route", "geometry"]]
|
|
84
|
+
edges = edges[["type", "route", "geometry"]]
|
|
85
|
+
return pt_nodes, edges
|
|
86
|
+
return None, None
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _calculate_distance_matrix(
|
|
90
|
+
nx_graph: nx.Graph,
|
|
91
|
+
nearest_nodes: np.ndarray,
|
|
92
|
+
weight_type: str,
|
|
93
|
+
weight_value: float,
|
|
94
|
+
dist_nearest: pd.DataFrame,
|
|
95
|
+
) -> tuple[pd.DataFrame, nx.Graph]:
|
|
96
|
+
"""Calculate distance matrix from nearest nodes."""
|
|
97
|
+
|
|
98
|
+
data = {}
|
|
99
|
+
for source in nearest_nodes:
|
|
100
|
+
dist = nx.single_source_dijkstra_path_length(nx_graph, source, weight=weight_type, cutoff=weight_value)
|
|
101
|
+
data.update({source: dist})
|
|
102
|
+
|
|
103
|
+
dist_matrix = pd.DataFrame.from_dict(data, orient="index")
|
|
104
|
+
dist_matrix = dist_matrix.add(dist_nearest.dist, axis=0)
|
|
105
|
+
dist_matrix = dist_matrix.mask(dist_matrix > weight_value, np.nan)
|
|
106
|
+
dist_matrix.dropna(how="all", inplace=True)
|
|
107
|
+
dist_matrix.dropna(how="all", axis=1, inplace=True)
|
|
108
|
+
|
|
109
|
+
subgraph = nx_graph.subgraph(dist_matrix.columns.to_list())
|
|
110
|
+
|
|
111
|
+
return dist_matrix, subgraph
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def _create_isochrones_gdf(
|
|
115
|
+
points: gpd.GeoDataFrame,
|
|
116
|
+
results: list,
|
|
117
|
+
dist_matrix: pd.DataFrame,
|
|
118
|
+
local_crs: str,
|
|
119
|
+
weight_type: str,
|
|
120
|
+
weight_value: float,
|
|
121
|
+
) -> gpd.GeoDataFrame:
|
|
122
|
+
"""Create final isochrones GeoDataFrame."""
|
|
123
|
+
isochrones = gpd.GeoDataFrame(geometry=results, index=dist_matrix.index, crs=local_crs)
|
|
124
|
+
isochrones = (
|
|
125
|
+
points.drop(columns="geometry")
|
|
126
|
+
.merge(isochrones, left_on="nearest_node", right_index=True, how="left")
|
|
127
|
+
.drop(columns="nearest_node")
|
|
128
|
+
)
|
|
129
|
+
isochrones = gpd.GeoDataFrame(isochrones, geometry="geometry", crs=local_crs)
|
|
130
|
+
isochrones["weight_type"] = weight_type
|
|
131
|
+
isochrones["weight_value"] = weight_value
|
|
132
|
+
return isochrones
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def create_separated_dist_polygons(
|
|
136
|
+
points: gpd.GeoDataFrame, weight_value, weight_type, step, speed
|
|
137
|
+
) -> gpd.GeoDataFrame:
|
|
138
|
+
points["dist"] = points["dist"].clip(lower=0.1)
|
|
139
|
+
steps = np.arange(0, weight_value + step, step)
|
|
140
|
+
if steps[-1] > weight_value:
|
|
141
|
+
steps[-1] = weight_value # Ensure last step doesn't exceed weight_value
|
|
142
|
+
for i in range(len(steps) - 1):
|
|
143
|
+
min_dist = steps[i]
|
|
144
|
+
max_dist = steps[i + 1]
|
|
145
|
+
nodes_in_step = points["dist"].between(min_dist, max_dist, inclusive="left")
|
|
146
|
+
nodes_in_step = nodes_in_step[nodes_in_step].index
|
|
147
|
+
if not nodes_in_step.empty:
|
|
148
|
+
buffer_size = (max_dist - points.loc[nodes_in_step, "dist"]) * 0.7
|
|
149
|
+
if weight_type == "time_min":
|
|
150
|
+
buffer_size = buffer_size * speed
|
|
151
|
+
points.loc[nodes_in_step, "buffer_size"] = buffer_size
|
|
152
|
+
points.geometry = points.geometry.buffer(points["buffer_size"])
|
|
153
|
+
points["dist"] = np.minimum(np.ceil(points["dist"] / step) * step, weight_value)
|
|
154
|
+
points = points.dissolve(by="dist", as_index=False)
|
|
155
|
+
polygons = gpd.GeoDataFrame(
|
|
156
|
+
geometry=list(polygonize(points.geometry.apply(polygons_to_multilinestring).union_all())),
|
|
157
|
+
crs=points.crs,
|
|
158
|
+
)
|
|
159
|
+
polygons_points = polygons.copy()
|
|
160
|
+
polygons_points.geometry = polygons.representative_point()
|
|
161
|
+
stepped_polygons = polygons_points.sjoin(points, predicate="within").reset_index()
|
|
162
|
+
stepped_polygons = stepped_polygons.groupby("index").agg({"dist": "mean"})
|
|
163
|
+
stepped_polygons["dist"] = np.minimum(np.floor(stepped_polygons["dist"] / step) * step, weight_value)
|
|
164
|
+
stepped_polygons["geometry"] = polygons
|
|
165
|
+
stepped_polygons = gpd.GeoDataFrame(stepped_polygons, geometry="geometry", crs=points.crs).reset_index(drop=True)
|
|
166
|
+
stepped_polygons = stepped_polygons.dissolve(by="dist", as_index=False).explode(ignore_index=True)
|
|
167
|
+
return stepped_polygons
|
|
@@ -1,3 +1,3 @@
|
|
|
1
|
-
from .noise_simulation import simulate_noise
|
|
2
|
-
from .noise_reduce import dist_to_target_db, green_noise_reduce_db
|
|
3
|
-
from .noise_simulation_simplified import calculate_simplified_noise_frame
|
|
1
|
+
from .noise_simulation import simulate_noise
|
|
2
|
+
from .noise_reduce import dist_to_target_db, green_noise_reduce_db
|
|
3
|
+
from .noise_simulation_simplified import calculate_simplified_noise_frame
|
|
@@ -1,10 +1,10 @@
|
|
|
1
|
-
import pandas as pd
|
|
2
|
-
|
|
3
|
-
data = {
|
|
4
|
-
30: {63: 0, 125: 0.0002, 250: 0.0009, 500: 0.003, 1000: 0.0075, 2000: 0.014, 4000: 0.025, 8000: 0.064},
|
|
5
|
-
20: {63: 0, 125: 0.0003, 250: 0.0011, 500: 0.0028, 1000: 0.0052, 2000: 0.0096, 4000: 0.025, 8000: 0.083},
|
|
6
|
-
10: {63: 0, 125: 0.0004, 250: 0.001, 500: 0.002, 1000: 0.0039, 2000: 0.01, 4000: 0.035, 8000: 0.125},
|
|
7
|
-
0: {63: 0, 125: 0.0004, 250: 0.0008, 500: 0.0017, 1000: 0.0049, 2000: 0.017, 4000: 0.058, 8000: 0.156},
|
|
8
|
-
}
|
|
9
|
-
|
|
10
|
-
air_resist_ratio = pd.DataFrame(data)
|
|
1
|
+
import pandas as pd
|
|
2
|
+
|
|
3
|
+
data = {
|
|
4
|
+
30: {63: 0, 125: 0.0002, 250: 0.0009, 500: 0.003, 1000: 0.0075, 2000: 0.014, 4000: 0.025, 8000: 0.064},
|
|
5
|
+
20: {63: 0, 125: 0.0003, 250: 0.0011, 500: 0.0028, 1000: 0.0052, 2000: 0.0096, 4000: 0.025, 8000: 0.083},
|
|
6
|
+
10: {63: 0, 125: 0.0004, 250: 0.001, 500: 0.002, 1000: 0.0039, 2000: 0.01, 4000: 0.035, 8000: 0.125},
|
|
7
|
+
0: {63: 0, 125: 0.0004, 250: 0.0008, 500: 0.0017, 1000: 0.0049, 2000: 0.017, 4000: 0.058, 8000: 0.156},
|
|
8
|
+
}
|
|
9
|
+
|
|
10
|
+
air_resist_ratio = pd.DataFrame(data)
|