ObjectNat 1.1.0__tar.gz → 1.2.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ObjectNat might be problematic. Click here for more details.
- {objectnat-1.1.0 → objectnat-1.2.0}/PKG-INFO +1 -1
- {objectnat-1.1.0 → objectnat-1.2.0}/pyproject.toml +1 -2
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/_api.py +2 -1
- objectnat-1.2.0/src/objectnat/_version.py +1 -0
- objectnat-1.2.0/src/objectnat/methods/noise/__init__.py +4 -0
- objectnat-1.1.0/src/objectnat/methods/noise/noise_sim.py → objectnat-1.2.0/src/objectnat/methods/noise/noise_simulation.py +9 -17
- objectnat-1.2.0/src/objectnat/methods/noise/noise_simulation_simplified.py +135 -0
- objectnat-1.2.0/src/objectnat/methods/utils/__init__.py +1 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/utils/geom_utils.py +45 -2
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/utils/graph_utils.py +115 -1
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/visibility/visibility_analysis.py +2 -2
- objectnat-1.1.0/src/objectnat/_version.py +0 -1
- objectnat-1.1.0/src/objectnat/methods/noise/__init__.py +0 -3
- objectnat-1.1.0/src/objectnat/methods/utils/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/LICENSE.txt +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/README.md +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/_config.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/graph_coverage.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/radius_voronoi_coverage.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/stepped_coverage.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/isochrones/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/isochrones/isochrone_utils.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/isochrones/isochrones.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/noise/noise_exceptions.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/noise/noise_init_data.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/noise/noise_reduce.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/point_clustering/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/point_clustering/cluster_points_in_polygons.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/provision/__init__.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/provision/provision.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/provision/provision_exceptions.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/provision/provision_model.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/utils/math_utils.py +0 -0
- {objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/visibility/__init__.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "ObjectNat"
|
|
3
|
-
version = "1.
|
|
3
|
+
version = "1.2.0"
|
|
4
4
|
description = "ObjectNat is an open-source library created for geospatial analysis created by IDU team"
|
|
5
5
|
license = "BSD-3-Clause"
|
|
6
6
|
authors = ["DDonnyy <63115678+DDonnyy@users.noreply.github.com>"]
|
|
@@ -19,7 +19,6 @@ networkx = "^3.4.2"
|
|
|
19
19
|
scikit-learn = "^1.4.0"
|
|
20
20
|
loguru = "^0.7.3"
|
|
21
21
|
|
|
22
|
-
|
|
23
22
|
[tool.poetry.group.dev.dependencies]
|
|
24
23
|
iduedu = "^0.5.0"
|
|
25
24
|
pyarrow = "^19.0.1"
|
|
@@ -2,9 +2,10 @@
|
|
|
2
2
|
|
|
3
3
|
from .methods.coverage_zones import get_graph_coverage, get_radius_coverage, get_stepped_graph_coverage
|
|
4
4
|
from .methods.isochrones import get_accessibility_isochrone_stepped, get_accessibility_isochrones
|
|
5
|
-
from .methods.noise import simulate_noise
|
|
5
|
+
from .methods.noise import calculate_simplified_noise_frame, simulate_noise
|
|
6
6
|
from .methods.point_clustering import get_clusters_polygon
|
|
7
7
|
from .methods.provision import clip_provision, get_service_provision, recalculate_links
|
|
8
|
+
from .methods.utils import gdf_to_graph, graph_to_gdf
|
|
8
9
|
from .methods.visibility import (
|
|
9
10
|
calculate_visibility_catchment_area,
|
|
10
11
|
get_visibilities_from_points,
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
VERSION = "1.2.0"
|
|
@@ -13,6 +13,7 @@ from tqdm import tqdm
|
|
|
13
13
|
from objectnat import config
|
|
14
14
|
from objectnat.methods.noise.noise_exceptions import InvalidStepError
|
|
15
15
|
from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
|
|
16
|
+
from objectnat.methods.noise.noise_simulation_simplified import _eval_donuts_gdf
|
|
16
17
|
from objectnat.methods.utils.geom_utils import (
|
|
17
18
|
gdf_to_circle_zones_from_point,
|
|
18
19
|
get_point_from_a_thorough_b,
|
|
@@ -102,9 +103,15 @@ def simulate_noise(
|
|
|
102
103
|
raise ValueError(
|
|
103
104
|
f"One or more values in 'source_noise_db' column exceed the physical limit of {MAX_DB_VALUE} dB."
|
|
104
105
|
)
|
|
106
|
+
if source_points["source_noise_db"].isnull().any():
|
|
107
|
+
raise ValueError(f"Column 'source_noise_db' contains missing (NaN) values")
|
|
105
108
|
use_column_db = True
|
|
106
109
|
|
|
107
|
-
use_column_freq =
|
|
110
|
+
use_column_freq = False
|
|
111
|
+
if "geometric_mean_freq_hz" in source_points.columns:
|
|
112
|
+
if source_points["geometric_mean_freq_hz"].isnull().any():
|
|
113
|
+
raise ValueError(f"Column 'geometric_mean_freq_hz' contains missing (NaN) values")
|
|
114
|
+
use_column_freq = True
|
|
108
115
|
|
|
109
116
|
if not use_column_db:
|
|
110
117
|
if source_noise_db is None:
|
|
@@ -298,22 +305,7 @@ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple
|
|
|
298
305
|
noise_reduce = int(round(green_noise_reduce_db(geometric_mean_freq_hz, r_tree_new)))
|
|
299
306
|
reduce_polygons.append((red_polygon, noise_reduce))
|
|
300
307
|
|
|
301
|
-
|
|
302
|
-
donuts = []
|
|
303
|
-
don_values = []
|
|
304
|
-
to_cut_off = point_from
|
|
305
|
-
for i in range(len(donuts_dist_values[:-1])):
|
|
306
|
-
cur_buffer = point_from.buffer(donuts_dist_values[i + 1][0])
|
|
307
|
-
donuts.append(cur_buffer.difference(to_cut_off))
|
|
308
|
-
don_values.append(donuts_dist_values[i][1])
|
|
309
|
-
to_cut_off = cur_buffer
|
|
310
|
-
|
|
311
|
-
noise_from_point = (
|
|
312
|
-
gpd.GeoDataFrame(geometry=donuts, data={"noise_level": don_values}, crs=local_crs)
|
|
313
|
-
.clip(vis_poly, keep_geom_type=True)
|
|
314
|
-
.explode(ignore_index=True)
|
|
315
|
-
)
|
|
316
|
-
|
|
308
|
+
noise_from_point = _eval_donuts_gdf(point_from, donuts_dist_values, local_crs, vis_poly)
|
|
317
309
|
# intersect noise poly with noise reduce
|
|
318
310
|
if len(reduce_polygons) > 0:
|
|
319
311
|
reduce_polygons = gpd.GeoDataFrame(
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
# simplified version
|
|
2
|
+
import geopandas as gpd
|
|
3
|
+
import pandas as pd
|
|
4
|
+
from shapely.ops import polygonize, unary_union
|
|
5
|
+
|
|
6
|
+
from objectnat.methods.noise.noise_reduce import dist_to_target_db
|
|
7
|
+
from objectnat.methods.utils.geom_utils import (
|
|
8
|
+
distribute_points_on_linestrings,
|
|
9
|
+
distribute_points_on_polygons,
|
|
10
|
+
polygons_to_multilinestring,
|
|
11
|
+
)
|
|
12
|
+
from objectnat.methods.visibility.visibility_analysis import get_visibility_accurate
|
|
13
|
+
|
|
14
|
+
MAX_DB_VALUE = 194
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def calculate_simplified_noise_frame(
|
|
18
|
+
noise_sources: gpd.GeoDataFrame, obstacles: gpd.GeoDataFrame, air_temperature, **kwargs
|
|
19
|
+
) -> gpd.GeoDataFrame:
|
|
20
|
+
target_noise_db = kwargs.get("target_noise_db", 40)
|
|
21
|
+
db_sim_step = kwargs.get("db_sim_step", 5)
|
|
22
|
+
linestring_point_radius = kwargs.get("linestring_point_radius", 20)
|
|
23
|
+
polygon_point_radius = kwargs.get("polygon_point_radius", 10)
|
|
24
|
+
|
|
25
|
+
required_columns = ["source_noise_db", "geometric_mean_freq_hz"]
|
|
26
|
+
for col in required_columns:
|
|
27
|
+
if col not in noise_sources.columns:
|
|
28
|
+
raise ValueError(f"'{col}' column is missing in provided GeoDataFrame")
|
|
29
|
+
if noise_sources[col].isnull().any():
|
|
30
|
+
raise ValueError(f"Column '{col}' contains missing (NaN) values")
|
|
31
|
+
if (noise_sources["source_noise_db"] > MAX_DB_VALUE).any():
|
|
32
|
+
raise ValueError(
|
|
33
|
+
f"One or more values in 'source_noise_db' column exceed the physical limit of {MAX_DB_VALUE} dB."
|
|
34
|
+
)
|
|
35
|
+
original_crs = noise_sources.crs
|
|
36
|
+
if len(obstacles) > 0:
|
|
37
|
+
obstacles = obstacles.copy()
|
|
38
|
+
obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
|
|
39
|
+
local_crs = obstacles.estimate_utm_crs()
|
|
40
|
+
obstacles.to_crs(local_crs, inplace=True)
|
|
41
|
+
noise_sources.to_crs(local_crs, inplace=True)
|
|
42
|
+
else:
|
|
43
|
+
local_crs = noise_sources.estimate_utm_crs()
|
|
44
|
+
noise_sources.to_crs(local_crs, inplace=True)
|
|
45
|
+
noise_sources.reset_index(drop=True)
|
|
46
|
+
|
|
47
|
+
noise_sources = noise_sources.explode(ignore_index=True)
|
|
48
|
+
noise_sources["geom_type"] = noise_sources.geom_type
|
|
49
|
+
|
|
50
|
+
grouped_sources = noise_sources.groupby(by=["source_noise_db", "geometric_mean_freq_hz", "geom_type"])
|
|
51
|
+
|
|
52
|
+
frame_result = []
|
|
53
|
+
|
|
54
|
+
for (source_db, freq_hz, geom_type), group_gdf in grouped_sources:
|
|
55
|
+
# calculating layer dist and db values
|
|
56
|
+
dist_db = [(0, source_db)]
|
|
57
|
+
cur_db = source_db - db_sim_step
|
|
58
|
+
max_dist = 0
|
|
59
|
+
while cur_db > target_noise_db - db_sim_step:
|
|
60
|
+
if cur_db - db_sim_step < target_noise_db:
|
|
61
|
+
cur_db = target_noise_db
|
|
62
|
+
max_dist = dist_to_target_db(source_db, cur_db, freq_hz, air_temperature)
|
|
63
|
+
dist_db.append((max_dist, cur_db))
|
|
64
|
+
cur_db -= db_sim_step
|
|
65
|
+
|
|
66
|
+
if geom_type == "Point":
|
|
67
|
+
for _, row in group_gdf.iterrows():
|
|
68
|
+
point_from = row.geometry
|
|
69
|
+
point_buffer = point_from.buffer(max_dist, resolution=16)
|
|
70
|
+
local_obstacles = obstacles[obstacles.intersects(point_buffer)]
|
|
71
|
+
vis_poly = get_visibility_accurate(point_from, obstacles=local_obstacles, view_distance=max_dist)
|
|
72
|
+
noise_from_feature = _eval_donuts_gdf(point_from, dist_db, local_crs, vis_poly)
|
|
73
|
+
frame_result.append(noise_from_feature)
|
|
74
|
+
|
|
75
|
+
elif geom_type == "LineString":
|
|
76
|
+
layer_points = distribute_points_on_linestrings(group_gdf, radius=linestring_point_radius, lloyd_relax_n=1)
|
|
77
|
+
noise_from_feature = _process_lines_or_polygons(
|
|
78
|
+
group_gdf, max_dist, obstacles, layer_points, dist_db, local_crs
|
|
79
|
+
)
|
|
80
|
+
frame_result.append(noise_from_feature)
|
|
81
|
+
elif geom_type == "Polygon":
|
|
82
|
+
group_gdf.geometry = group_gdf.buffer(0.1, resolution=1)
|
|
83
|
+
layer_points = distribute_points_on_polygons(
|
|
84
|
+
group_gdf, only_exterior=False, radius=polygon_point_radius, lloyd_relax_n=1
|
|
85
|
+
)
|
|
86
|
+
noise_from_feature = _process_lines_or_polygons(
|
|
87
|
+
group_gdf, max_dist, obstacles, layer_points, dist_db, local_crs
|
|
88
|
+
)
|
|
89
|
+
frame_result.append(noise_from_feature)
|
|
90
|
+
else:
|
|
91
|
+
pass
|
|
92
|
+
|
|
93
|
+
noise_gdf = gpd.GeoDataFrame(pd.concat(frame_result, ignore_index=True), crs=local_crs)
|
|
94
|
+
polygons = gpd.GeoDataFrame(
|
|
95
|
+
geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).union_all())), crs=local_crs
|
|
96
|
+
)
|
|
97
|
+
polygons_points = polygons.copy()
|
|
98
|
+
polygons_points.geometry = polygons.representative_point()
|
|
99
|
+
sim_result = polygons_points.sjoin(noise_gdf, predicate="within").reset_index()
|
|
100
|
+
sim_result = sim_result.groupby("index").agg({"noise_level": "max"})
|
|
101
|
+
sim_result["geometry"] = polygons
|
|
102
|
+
sim_result = (
|
|
103
|
+
gpd.GeoDataFrame(sim_result, geometry="geometry", crs=local_crs).dissolve(by="noise_level").reset_index()
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
return sim_result.to_crs(original_crs)
|
|
107
|
+
|
|
108
|
+
|
|
109
|
+
def _process_lines_or_polygons(group_gdf, max_dist, obstacles, layer_points, dist_db, local_crs) -> gpd.GeoDataFrame:
|
|
110
|
+
features_vision_polys = []
|
|
111
|
+
layer_buffer = group_gdf.buffer(max_dist, resolution=16).union_all()
|
|
112
|
+
local_obstacles = obstacles[obstacles.intersects(layer_buffer)]
|
|
113
|
+
for _, row in layer_points.iterrows():
|
|
114
|
+
point_from = row.geometry
|
|
115
|
+
vis_poly = get_visibility_accurate(point_from, obstacles=local_obstacles, view_distance=max_dist)
|
|
116
|
+
features_vision_polys.append(vis_poly)
|
|
117
|
+
features_vision_polys = unary_union(features_vision_polys)
|
|
118
|
+
return _eval_donuts_gdf(group_gdf.union_all(), dist_db, local_crs, features_vision_polys)
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
def _eval_donuts_gdf(initial_geometry, dist_db, local_crs, clip_poly) -> gpd.GeoDataFrame:
|
|
122
|
+
donuts = []
|
|
123
|
+
don_values = []
|
|
124
|
+
to_cut_off = initial_geometry
|
|
125
|
+
for i in range(len(dist_db[:-1])):
|
|
126
|
+
cur_buffer = initial_geometry.buffer(dist_db[i + 1][0])
|
|
127
|
+
donuts.append(cur_buffer.difference(to_cut_off))
|
|
128
|
+
don_values.append(dist_db[i][1])
|
|
129
|
+
to_cut_off = cur_buffer
|
|
130
|
+
noise_from_feature = (
|
|
131
|
+
gpd.GeoDataFrame(geometry=donuts, data={"noise_level": don_values}, crs=local_crs)
|
|
132
|
+
.clip(clip_poly, keep_geom_type=True)
|
|
133
|
+
.explode(ignore_index=True)
|
|
134
|
+
)
|
|
135
|
+
return noise_from_feature
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .graph_utils import gdf_to_graph, graph_to_gdf
|
|
@@ -15,9 +15,9 @@ def polygons_to_multilinestring(geom: Polygon | MultiPolygon):
|
|
|
15
15
|
|
|
16
16
|
def convert_polygon(polygon: Polygon):
|
|
17
17
|
lines = []
|
|
18
|
-
exterior = LineString(polygon.exterior
|
|
18
|
+
exterior = LineString(polygon.exterior)
|
|
19
19
|
lines.append(exterior)
|
|
20
|
-
interior = [LineString(p
|
|
20
|
+
interior = [LineString(p) for p in polygon.interiors]
|
|
21
21
|
lines = lines + interior
|
|
22
22
|
return lines
|
|
23
23
|
|
|
@@ -128,3 +128,46 @@ def combine_geometry(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
|
128
128
|
joined["geometry"] = enclosures
|
|
129
129
|
joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=crs)
|
|
130
130
|
return joined
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def distribute_points_on_linestrings(lines: gpd.GeoDataFrame, radius, lloyd_relax_n=2) -> gpd.GeoDataFrame:
|
|
134
|
+
lines = lines.copy()
|
|
135
|
+
lines = lines.explode(ignore_index=True)
|
|
136
|
+
lines = lines[lines.geom_type == "LineString"]
|
|
137
|
+
original_crs = lines.crs
|
|
138
|
+
lines = lines.to_crs(crs=lines.estimate_utm_crs())
|
|
139
|
+
lines = lines.reset_index(drop=True)
|
|
140
|
+
lines = lines[["geometry"]]
|
|
141
|
+
radius = radius * 1.1
|
|
142
|
+
segmentized = lines.geometry.apply(lambda x: x.simplify(radius).segmentize(radius))
|
|
143
|
+
points = [Point(pt) for line in segmentized for pt in line.coords]
|
|
144
|
+
|
|
145
|
+
points = gpd.GeoDataFrame(geometry=points, crs=lines.crs)
|
|
146
|
+
lines["lines"] = lines.geometry
|
|
147
|
+
geom_concave = lines.buffer(5, resolution=1).union_all()
|
|
148
|
+
|
|
149
|
+
for i in range(lloyd_relax_n):
|
|
150
|
+
points.geometry = points.voronoi_polygons().clip(geom_concave).centroid
|
|
151
|
+
points = points.sjoin_nearest(lines, how="left")
|
|
152
|
+
points = points[~points.index.duplicated(keep="first")]
|
|
153
|
+
points["geometry"] = points["lines"].interpolate(points["lines"].project(points.geometry))
|
|
154
|
+
points.drop(columns=["lines", "index_right"], inplace=True)
|
|
155
|
+
|
|
156
|
+
return points.dropna().to_crs(original_crs)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def distribute_points_on_polygons(
|
|
160
|
+
polygons: gpd.GeoDataFrame, radius, only_exterior=True, lloyd_relax_n=2
|
|
161
|
+
) -> gpd.GeoDataFrame:
|
|
162
|
+
polygons = polygons.copy()
|
|
163
|
+
polygons = polygons.explode(ignore_index=True)
|
|
164
|
+
polygons = polygons[polygons.geom_type == "Polygon"]
|
|
165
|
+
|
|
166
|
+
if only_exterior:
|
|
167
|
+
polygons.geometry = polygons.geometry.apply(lambda x: LineString(x.exterior))
|
|
168
|
+
else:
|
|
169
|
+
polygons = gpd.GeoDataFrame(
|
|
170
|
+
geometry=list(polygons.geometry.apply(polygons_to_multilinestring)), crs=polygons.crs
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
return distribute_points_on_linestrings(polygons, radius, lloyd_relax_n=lloyd_relax_n)
|
|
@@ -4,7 +4,7 @@ import numpy as np
|
|
|
4
4
|
import pandas as pd
|
|
5
5
|
from loguru import logger
|
|
6
6
|
from scipy.spatial import KDTree
|
|
7
|
-
from shapely import LineString
|
|
7
|
+
from shapely import LineString, MultiLineString, line_merge, node
|
|
8
8
|
from shapely.geometry.point import Point
|
|
9
9
|
|
|
10
10
|
|
|
@@ -89,6 +89,120 @@ def graph_to_gdf(
|
|
|
89
89
|
return nodes_gdf, edges_gdf
|
|
90
90
|
|
|
91
91
|
|
|
92
|
+
def gdf_to_graph(
|
|
93
|
+
gdf: gpd.GeoDataFrame, project_gdf_attr=True, reproject_to_utm_crs=True, speed=5, check_intersections=True
|
|
94
|
+
) -> nx.DiGraph:
|
|
95
|
+
"""
|
|
96
|
+
Converts a GeoDataFrame of LineStrings into a directed graph (nx.DiGraph).
|
|
97
|
+
|
|
98
|
+
This function transforms a set of linear geometries (which may or may not form a planar graph)
|
|
99
|
+
into a directed graph where each edge corresponds to a LineString (or its segment) from the GeoDataFrame.
|
|
100
|
+
Intersections are optionally checked and merged. Attributes from the original GeoDataFrame
|
|
101
|
+
can be projected onto the graph edges using spatial matching.
|
|
102
|
+
|
|
103
|
+
Parameters
|
|
104
|
+
----------
|
|
105
|
+
gdf : gpd.GeoDataFrame
|
|
106
|
+
A GeoDataFrame containing at least one LineString geometry.
|
|
107
|
+
project_gdf_attr : bool, default=True
|
|
108
|
+
If True, attributes from the input GeoDataFrame will be spatially projected
|
|
109
|
+
onto the resulting graph edges. This can be an expensive operation for large datasets.
|
|
110
|
+
reproject_to_utm_crs : bool, default=True
|
|
111
|
+
If True, reprojects the GeoDataFrame to the estimated local UTM CRS
|
|
112
|
+
to ensure accurate edge length calculations in meters.
|
|
113
|
+
If False, edge lengths are still computed in UTM CRS, but the final graph
|
|
114
|
+
will remain in the original CRS of the input GeoDataFrame.
|
|
115
|
+
speed : float, default=5
|
|
116
|
+
Assumed travel speed in km/h used to compute edge traversal time (in minutes).
|
|
117
|
+
check_intersections : bool, default=True
|
|
118
|
+
If True, merges geometries to ensure topological correctness.
|
|
119
|
+
Can be disabled if the input geometries already form a proper planar graph
|
|
120
|
+
with no unintended intersections.
|
|
121
|
+
|
|
122
|
+
Returns
|
|
123
|
+
-------
|
|
124
|
+
nx.DiGraph
|
|
125
|
+
A directed graph where each edge corresponds to a line segment from the input GeoDataFrame.
|
|
126
|
+
Edge attributes include geometry, length in meters, travel time (in minutes), and any
|
|
127
|
+
additional projected attributes from the original GeoDataFrame.
|
|
128
|
+
|
|
129
|
+
Raises
|
|
130
|
+
------
|
|
131
|
+
ValueError
|
|
132
|
+
If the input GeoDataFrame contains no valid LineStrings.
|
|
133
|
+
"""
|
|
134
|
+
|
|
135
|
+
def unique_list(agg_vals):
|
|
136
|
+
agg_vals = list(set(agg_vals.dropna()))
|
|
137
|
+
if len(agg_vals) == 1:
|
|
138
|
+
return agg_vals[0]
|
|
139
|
+
return agg_vals
|
|
140
|
+
|
|
141
|
+
original_crs = gdf.crs
|
|
142
|
+
gdf = gdf.to_crs(gdf.estimate_utm_crs())
|
|
143
|
+
|
|
144
|
+
gdf = gdf.explode(ignore_index=True)
|
|
145
|
+
gdf = gdf[gdf.geom_type == "LineString"]
|
|
146
|
+
|
|
147
|
+
if len(gdf) == 0:
|
|
148
|
+
raise ValueError("Provided GeoDataFrame contains no valid LineStrings")
|
|
149
|
+
|
|
150
|
+
if check_intersections:
|
|
151
|
+
lines = line_merge(node(MultiLineString(gdf.geometry.to_list())))
|
|
152
|
+
else:
|
|
153
|
+
lines = line_merge(MultiLineString(gdf.geometry.to_list()))
|
|
154
|
+
|
|
155
|
+
lines = gpd.GeoDataFrame(geometry=list(lines.geoms), crs=gdf.crs)
|
|
156
|
+
|
|
157
|
+
if len(gdf.columns) > 1 and project_gdf_attr:
|
|
158
|
+
lines_centroids = lines.copy()
|
|
159
|
+
lines_centroids.geometry = lines_centroids.apply(
|
|
160
|
+
lambda row: row.geometry.line_interpolate_point(row.geometry.length / 2), axis=1
|
|
161
|
+
).buffer(0.05, resolution=2)
|
|
162
|
+
lines_with_attrs = gpd.sjoin(lines_centroids, gdf, how="left", predicate="intersects")
|
|
163
|
+
aggregated_attrs = (
|
|
164
|
+
lines_with_attrs.drop(columns=["geometry", "index_right"]) # убрать геометрию буфера
|
|
165
|
+
.groupby(lines_with_attrs.index)
|
|
166
|
+
.agg(unique_list)
|
|
167
|
+
)
|
|
168
|
+
lines = pd.concat([lines, aggregated_attrs], axis=1)
|
|
169
|
+
|
|
170
|
+
lines["length_meter"] = np.round(lines.length, 2)
|
|
171
|
+
if not reproject_to_utm_crs:
|
|
172
|
+
lines = lines.to_crs(original_crs)
|
|
173
|
+
|
|
174
|
+
coords = lines.geometry.get_coordinates()
|
|
175
|
+
coords_grouped_by_index = coords.reset_index(names="old_index").groupby("old_index")
|
|
176
|
+
start_coords = coords_grouped_by_index.head(1).apply(lambda a: (a.x, a.y), axis=1).rename("start")
|
|
177
|
+
end_coords = coords_grouped_by_index.tail(1).apply(lambda a: (a.x, a.y), axis=1).rename("end")
|
|
178
|
+
coords = pd.concat([start_coords.reset_index(), end_coords.reset_index()], axis=1)[["start", "end"]]
|
|
179
|
+
lines = pd.concat([lines, coords], axis=1)
|
|
180
|
+
unique_coords = pd.concat([coords["start"], coords["end"]], ignore_index=True).unique()
|
|
181
|
+
coord_to_index = {coord: idx for idx, coord in enumerate(unique_coords)}
|
|
182
|
+
|
|
183
|
+
lines["u"] = lines["start"].map(coord_to_index)
|
|
184
|
+
lines["v"] = lines["end"].map(coord_to_index)
|
|
185
|
+
|
|
186
|
+
speed = speed * 1000 / 60
|
|
187
|
+
lines["time_min"] = np.round(lines["length_meter"] / speed, 2)
|
|
188
|
+
|
|
189
|
+
graph = nx.Graph()
|
|
190
|
+
for coords, node_id in coord_to_index.items():
|
|
191
|
+
x, y = coords
|
|
192
|
+
graph.add_node(node_id, x=float(x), y=float(y))
|
|
193
|
+
|
|
194
|
+
columns_to_attr = lines.columns.difference(["start", "end", "u", "v"])
|
|
195
|
+
for _, row in lines.iterrows():
|
|
196
|
+
edge_attrs = {}
|
|
197
|
+
for col in columns_to_attr:
|
|
198
|
+
edge_attrs[col] = row[col]
|
|
199
|
+
graph.add_edge(row.u, row.v, **edge_attrs)
|
|
200
|
+
|
|
201
|
+
graph.graph["crs"] = lines.crs
|
|
202
|
+
graph.graph["speed m/min"] = speed
|
|
203
|
+
return nx.DiGraph(graph)
|
|
204
|
+
|
|
205
|
+
|
|
92
206
|
def get_closest_nodes_from_gdf(gdf: gpd.GeoDataFrame, nx_graph: nx.Graph) -> tuple:
|
|
93
207
|
"""
|
|
94
208
|
Finds the closest graph nodes to the geometries in a GeoDataFrame.
|
|
@@ -87,9 +87,9 @@ def get_visibility_accurate(
|
|
|
87
87
|
point_from = point_from.iloc[0].geometry
|
|
88
88
|
else:
|
|
89
89
|
obstacles = obstacles.copy()
|
|
90
|
-
|
|
90
|
+
if obstacles.contains(point_from).any():
|
|
91
|
+
return Polygon()
|
|
91
92
|
obstacles.reset_index(inplace=True, drop=True)
|
|
92
|
-
|
|
93
93
|
point_buffer = point_from.buffer(view_distance, resolution=32)
|
|
94
94
|
allowed_geom_types = ["MultiPolygon", "Polygon", "LineString", "MultiLineString"]
|
|
95
95
|
obstacles = obstacles[obstacles.geom_type.isin(allowed_geom_types)]
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
VERSION = "1.1.0"
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
{objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/radius_voronoi_coverage.py
RENAMED
|
File without changes
|
{objectnat-1.1.0 → objectnat-1.2.0}/src/objectnat/methods/coverage_zones/stepped_coverage.py
RENAMED
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|