ObjectNat 1.2.2__py3-none-any.whl → 1.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of ObjectNat might be problematic. Click here for more details.
- objectnat/_api.py +14 -14
- objectnat/_config.py +43 -47
- objectnat/_version.py +1 -1
- objectnat/methods/coverage_zones/__init__.py +3 -3
- objectnat/methods/coverage_zones/graph_coverage.py +11 -4
- objectnat/methods/coverage_zones/radius_voronoi_coverage.py +4 -2
- objectnat/methods/coverage_zones/stepped_coverage.py +20 -10
- objectnat/methods/isochrones/__init__.py +1 -1
- objectnat/methods/isochrones/isochrone_utils.py +167 -167
- objectnat/methods/isochrones/isochrones.py +31 -11
- objectnat/methods/noise/__init__.py +3 -3
- objectnat/methods/noise/noise_init_data.py +10 -10
- objectnat/methods/noise/noise_reduce.py +155 -155
- objectnat/methods/noise/noise_simulation.py +14 -13
- objectnat/methods/noise/noise_simulation_simplified.py +10 -9
- objectnat/methods/point_clustering/__init__.py +1 -1
- objectnat/methods/point_clustering/cluster_points_in_polygons.py +3 -3
- objectnat/methods/provision/__init__.py +1 -1
- objectnat/methods/provision/provision.py +112 -20
- objectnat/methods/provision/provision_exceptions.py +59 -59
- objectnat/methods/provision/provision_model.py +323 -348
- objectnat/methods/utils/__init__.py +1 -1
- objectnat/methods/utils/geom_utils.py +173 -173
- objectnat/methods/utils/graph_utils.py +5 -5
- objectnat/methods/utils/math_utils.py +32 -32
- objectnat/methods/visibility/__init__.py +6 -6
- objectnat/methods/visibility/visibility_analysis.py +9 -17
- objectnat-1.3.0.dist-info/METADATA +201 -0
- objectnat-1.3.0.dist-info/RECORD +33 -0
- {objectnat-1.2.2.dist-info → objectnat-1.3.0.dist-info}/WHEEL +1 -1
- objectnat-1.2.2.dist-info/METADATA +0 -116
- objectnat-1.2.2.dist-info/RECORD +0 -33
- {objectnat-1.2.2.dist-info/licenses → objectnat-1.3.0.dist-info}/LICENSE.txt +0 -0
|
@@ -1 +1 @@
|
|
|
1
|
-
from .graph_utils import gdf_to_graph, graph_to_gdf
|
|
1
|
+
from .graph_utils import gdf_to_graph, graph_to_gdf
|
|
@@ -1,173 +1,173 @@
|
|
|
1
|
-
import math
|
|
2
|
-
|
|
3
|
-
import geopandas as gpd
|
|
4
|
-
from shapely import LineString, MultiPolygon, Point, Polygon
|
|
5
|
-
from shapely.ops import polygonize, unary_union
|
|
6
|
-
|
|
7
|
-
from objectnat import config
|
|
8
|
-
|
|
9
|
-
logger = config.logger
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
def polygons_to_multilinestring(geom: Polygon | MultiPolygon):
|
|
13
|
-
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
14
|
-
from shapely import LineString, MultiLineString, MultiPolygon
|
|
15
|
-
|
|
16
|
-
def convert_polygon(polygon: Polygon):
|
|
17
|
-
lines = []
|
|
18
|
-
exterior = LineString(polygon.exterior)
|
|
19
|
-
lines.append(exterior)
|
|
20
|
-
interior = [LineString(p) for p in polygon.interiors]
|
|
21
|
-
lines = lines + interior
|
|
22
|
-
return lines
|
|
23
|
-
|
|
24
|
-
def convert_multipolygon(polygon: MultiPolygon):
|
|
25
|
-
return MultiLineString(sum([convert_polygon(p) for p in polygon.geoms], []))
|
|
26
|
-
|
|
27
|
-
if geom.geom_type == "Polygon":
|
|
28
|
-
return MultiLineString(convert_polygon(geom))
|
|
29
|
-
return convert_multipolygon(geom)
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def explode_linestring(geometry: LineString) -> list[LineString]:
|
|
33
|
-
"""A function to return all segments of a linestring as a list of linestrings"""
|
|
34
|
-
coords_ext = geometry.coords # Create a list of all line node coordinates
|
|
35
|
-
result = [LineString(part) for part in zip(coords_ext, coords_ext[1:])]
|
|
36
|
-
return result
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def point_side_of_line(line: LineString, point: Point) -> int:
|
|
40
|
-
"""A positive indicates the left-hand side a negative indicates the right-hand side"""
|
|
41
|
-
x1, y1 = line.coords[0]
|
|
42
|
-
x2, y2 = line.coords[-1]
|
|
43
|
-
x, y = point.coords[0]
|
|
44
|
-
cross_product = (x2 - x1) * (y - y1) - (y2 - y1) * (x - x1)
|
|
45
|
-
if cross_product > 0:
|
|
46
|
-
return 1
|
|
47
|
-
return -1
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
def get_point_from_a_thorough_b(a: Point, b: Point, dist):
|
|
51
|
-
"""
|
|
52
|
-
Func to get Point from point a thorough point b on dist
|
|
53
|
-
"""
|
|
54
|
-
direction = math.atan2(b.y - a.y, b.x - a.x)
|
|
55
|
-
c_x = a.x + dist * math.cos(direction)
|
|
56
|
-
c_y = a.y + dist * math.sin(direction)
|
|
57
|
-
return Point(c_x, c_y)
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def gdf_to_circle_zones_from_point(
|
|
61
|
-
gdf: gpd.GeoDataFrame, point_from: Point, zone_radius, resolution=4, explode_multigeom=True
|
|
62
|
-
) -> gpd.GeoDataFrame:
|
|
63
|
-
"""n_segments = 4*resolution,e.g. if resolution = 4 that means there will be 16 segments"""
|
|
64
|
-
crs = gdf.crs
|
|
65
|
-
buffer = point_from.buffer(zone_radius, resolution=resolution)
|
|
66
|
-
gdf_unary = gdf.clip(buffer, keep_geom_type=True).union_all()
|
|
67
|
-
gdf_geometry = (
|
|
68
|
-
gpd.GeoDataFrame(geometry=[gdf_unary], crs=crs)
|
|
69
|
-
.explode(index_parts=True)
|
|
70
|
-
.geometry.apply(polygons_to_multilinestring)
|
|
71
|
-
.union_all()
|
|
72
|
-
)
|
|
73
|
-
zones_lines = [LineString([Point(coords1), Point(point_from)]) for coords1 in buffer.exterior.coords[:-1]]
|
|
74
|
-
if explode_multigeom:
|
|
75
|
-
return (
|
|
76
|
-
gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs)
|
|
77
|
-
.clip(gdf_unary, keep_geom_type=True)
|
|
78
|
-
.explode(index_parts=False)
|
|
79
|
-
)
|
|
80
|
-
return gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs).clip(
|
|
81
|
-
gdf_unary, keep_geom_type=True
|
|
82
|
-
)
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def remove_inner_geom(polygon: Polygon | MultiPolygon):
|
|
86
|
-
"""function to get rid of inner polygons"""
|
|
87
|
-
if isinstance(polygon, Polygon):
|
|
88
|
-
return Polygon(polygon.exterior.coords)
|
|
89
|
-
if isinstance(polygon, MultiPolygon):
|
|
90
|
-
polys = []
|
|
91
|
-
for poly in polygon.geoms:
|
|
92
|
-
polys.append(Polygon(poly.exterior.coords))
|
|
93
|
-
return MultiPolygon(polys)
|
|
94
|
-
else:
|
|
95
|
-
return Polygon()
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
def combine_geometry(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
99
|
-
"""
|
|
100
|
-
Combine geometry of intersecting layers into a single GeoDataFrame.
|
|
101
|
-
Parameters
|
|
102
|
-
----------
|
|
103
|
-
gdf: gpd.GeoDataFrame
|
|
104
|
-
A GeoPandas GeoDataFrame
|
|
105
|
-
|
|
106
|
-
Returns
|
|
107
|
-
-------
|
|
108
|
-
gpd.GeoDataFrame
|
|
109
|
-
The combined GeoDataFrame with aggregated in lists columns.
|
|
110
|
-
|
|
111
|
-
Examples
|
|
112
|
-
--------
|
|
113
|
-
>>> gdf = gpd.read_file('path_to_your_file.geojson')
|
|
114
|
-
>>> result = combine_geometry(gdf)
|
|
115
|
-
"""
|
|
116
|
-
|
|
117
|
-
crs = gdf.crs
|
|
118
|
-
|
|
119
|
-
enclosures = gpd.GeoDataFrame(
|
|
120
|
-
geometry=list(polygonize(gdf["geometry"].apply(polygons_to_multilinestring).union_all())), crs=crs
|
|
121
|
-
)
|
|
122
|
-
enclosures_points = enclosures.copy()
|
|
123
|
-
enclosures_points.geometry = enclosures.representative_point()
|
|
124
|
-
joined = gpd.sjoin(enclosures_points, gdf, how="inner", predicate="within").reset_index()
|
|
125
|
-
cols = joined.columns.tolist()
|
|
126
|
-
cols.remove("geometry")
|
|
127
|
-
joined = joined.groupby("index").agg({column: list for column in cols})
|
|
128
|
-
joined["geometry"] = enclosures
|
|
129
|
-
joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=crs)
|
|
130
|
-
return joined
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
def distribute_points_on_linestrings(lines: gpd.GeoDataFrame, radius, lloyd_relax_n=2) -> gpd.GeoDataFrame:
|
|
134
|
-
lines = lines.copy()
|
|
135
|
-
lines = lines.explode(ignore_index=True)
|
|
136
|
-
lines = lines[lines.geom_type == "LineString"]
|
|
137
|
-
original_crs = lines.crs
|
|
138
|
-
lines = lines.to_crs(crs=lines.estimate_utm_crs())
|
|
139
|
-
lines = lines.reset_index(drop=True)
|
|
140
|
-
lines = lines[["geometry"]]
|
|
141
|
-
radius = radius * 1.1
|
|
142
|
-
segmentized = lines.geometry.apply(lambda x: x.simplify(radius).segmentize(radius))
|
|
143
|
-
points = [Point(pt) for line in segmentized for pt in line.coords]
|
|
144
|
-
|
|
145
|
-
points = gpd.GeoDataFrame(geometry=points, crs=lines.crs)
|
|
146
|
-
lines["lines"] = lines.geometry
|
|
147
|
-
geom_concave = lines.buffer(5, resolution=1).union_all()
|
|
148
|
-
|
|
149
|
-
for i in range(lloyd_relax_n):
|
|
150
|
-
points.geometry = points.voronoi_polygons().clip(geom_concave).centroid
|
|
151
|
-
points = points.sjoin_nearest(lines, how="left")
|
|
152
|
-
points = points[~points.index.duplicated(keep="first")]
|
|
153
|
-
points["geometry"] = points["lines"].interpolate(points["lines"].project(points.geometry))
|
|
154
|
-
points.drop(columns=["lines", "index_right"], inplace=True)
|
|
155
|
-
|
|
156
|
-
return points.dropna().to_crs(original_crs)
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
def distribute_points_on_polygons(
|
|
160
|
-
polygons: gpd.GeoDataFrame, radius, only_exterior=True, lloyd_relax_n=2
|
|
161
|
-
) -> gpd.GeoDataFrame:
|
|
162
|
-
polygons = polygons.copy()
|
|
163
|
-
polygons = polygons.explode(ignore_index=True)
|
|
164
|
-
polygons = polygons[polygons.geom_type == "Polygon"]
|
|
165
|
-
|
|
166
|
-
if only_exterior:
|
|
167
|
-
polygons.geometry = polygons.geometry.apply(lambda x: LineString(x.exterior))
|
|
168
|
-
else:
|
|
169
|
-
polygons = gpd.GeoDataFrame(
|
|
170
|
-
geometry=list(polygons.geometry.apply(polygons_to_multilinestring)), crs=polygons.crs
|
|
171
|
-
)
|
|
172
|
-
|
|
173
|
-
return distribute_points_on_linestrings(polygons, radius, lloyd_relax_n=lloyd_relax_n)
|
|
1
|
+
import math
|
|
2
|
+
|
|
3
|
+
import geopandas as gpd
|
|
4
|
+
from shapely import LineString, MultiPolygon, Point, Polygon
|
|
5
|
+
from shapely.ops import polygonize, unary_union
|
|
6
|
+
|
|
7
|
+
from objectnat import config
|
|
8
|
+
|
|
9
|
+
logger = config.logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def polygons_to_multilinestring(geom: Polygon | MultiPolygon):
|
|
13
|
+
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
14
|
+
from shapely import LineString, MultiLineString, MultiPolygon
|
|
15
|
+
|
|
16
|
+
def convert_polygon(polygon: Polygon):
|
|
17
|
+
lines = []
|
|
18
|
+
exterior = LineString(polygon.exterior)
|
|
19
|
+
lines.append(exterior)
|
|
20
|
+
interior = [LineString(p) for p in polygon.interiors]
|
|
21
|
+
lines = lines + interior
|
|
22
|
+
return lines
|
|
23
|
+
|
|
24
|
+
def convert_multipolygon(polygon: MultiPolygon):
|
|
25
|
+
return MultiLineString(sum([convert_polygon(p) for p in polygon.geoms], []))
|
|
26
|
+
|
|
27
|
+
if geom.geom_type == "Polygon":
|
|
28
|
+
return MultiLineString(convert_polygon(geom))
|
|
29
|
+
return convert_multipolygon(geom)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def explode_linestring(geometry: LineString) -> list[LineString]:
|
|
33
|
+
"""A function to return all segments of a linestring as a list of linestrings"""
|
|
34
|
+
coords_ext = geometry.coords # Create a list of all line node coordinates
|
|
35
|
+
result = [LineString(part) for part in zip(coords_ext, coords_ext[1:])]
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def point_side_of_line(line: LineString, point: Point) -> int:
|
|
40
|
+
"""A positive indicates the left-hand side a negative indicates the right-hand side"""
|
|
41
|
+
x1, y1 = line.coords[0]
|
|
42
|
+
x2, y2 = line.coords[-1]
|
|
43
|
+
x, y = point.coords[0]
|
|
44
|
+
cross_product = (x2 - x1) * (y - y1) - (y2 - y1) * (x - x1)
|
|
45
|
+
if cross_product > 0:
|
|
46
|
+
return 1
|
|
47
|
+
return -1
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_point_from_a_thorough_b(a: Point, b: Point, dist):
|
|
51
|
+
"""
|
|
52
|
+
Func to get Point from point a thorough point b on dist
|
|
53
|
+
"""
|
|
54
|
+
direction = math.atan2(b.y - a.y, b.x - a.x)
|
|
55
|
+
c_x = a.x + dist * math.cos(direction)
|
|
56
|
+
c_y = a.y + dist * math.sin(direction)
|
|
57
|
+
return Point(c_x, c_y)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def gdf_to_circle_zones_from_point(
|
|
61
|
+
gdf: gpd.GeoDataFrame, point_from: Point, zone_radius, resolution=4, explode_multigeom=True
|
|
62
|
+
) -> gpd.GeoDataFrame:
|
|
63
|
+
"""n_segments = 4*resolution,e.g. if resolution = 4 that means there will be 16 segments"""
|
|
64
|
+
crs = gdf.crs
|
|
65
|
+
buffer = point_from.buffer(zone_radius, resolution=resolution)
|
|
66
|
+
gdf_unary = gdf.clip(buffer, keep_geom_type=True).union_all()
|
|
67
|
+
gdf_geometry = (
|
|
68
|
+
gpd.GeoDataFrame(geometry=[gdf_unary], crs=crs)
|
|
69
|
+
.explode(index_parts=True)
|
|
70
|
+
.geometry.apply(polygons_to_multilinestring)
|
|
71
|
+
.union_all()
|
|
72
|
+
)
|
|
73
|
+
zones_lines = [LineString([Point(coords1), Point(point_from)]) for coords1 in buffer.exterior.coords[:-1]]
|
|
74
|
+
if explode_multigeom:
|
|
75
|
+
return (
|
|
76
|
+
gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs)
|
|
77
|
+
.clip(gdf_unary, keep_geom_type=True)
|
|
78
|
+
.explode(index_parts=False)
|
|
79
|
+
)
|
|
80
|
+
return gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs).clip(
|
|
81
|
+
gdf_unary, keep_geom_type=True
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def remove_inner_geom(polygon: Polygon | MultiPolygon):
|
|
86
|
+
"""function to get rid of inner polygons"""
|
|
87
|
+
if isinstance(polygon, Polygon):
|
|
88
|
+
return Polygon(polygon.exterior.coords)
|
|
89
|
+
if isinstance(polygon, MultiPolygon):
|
|
90
|
+
polys = []
|
|
91
|
+
for poly in polygon.geoms:
|
|
92
|
+
polys.append(Polygon(poly.exterior.coords))
|
|
93
|
+
return MultiPolygon(polys)
|
|
94
|
+
else:
|
|
95
|
+
return Polygon()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def combine_geometry(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
99
|
+
"""
|
|
100
|
+
Combine geometry of intersecting layers into a single GeoDataFrame.
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
gdf: gpd.GeoDataFrame
|
|
104
|
+
A GeoPandas GeoDataFrame
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
gpd.GeoDataFrame
|
|
109
|
+
The combined GeoDataFrame with aggregated in lists columns.
|
|
110
|
+
|
|
111
|
+
Examples
|
|
112
|
+
--------
|
|
113
|
+
>>> gdf = gpd.read_file('path_to_your_file.geojson')
|
|
114
|
+
>>> result = combine_geometry(gdf)
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
crs = gdf.crs
|
|
118
|
+
|
|
119
|
+
enclosures = gpd.GeoDataFrame(
|
|
120
|
+
geometry=list(polygonize(gdf["geometry"].apply(polygons_to_multilinestring).union_all())), crs=crs
|
|
121
|
+
)
|
|
122
|
+
enclosures_points = enclosures.copy()
|
|
123
|
+
enclosures_points.geometry = enclosures.representative_point()
|
|
124
|
+
joined = gpd.sjoin(enclosures_points, gdf, how="inner", predicate="within").reset_index()
|
|
125
|
+
cols = joined.columns.tolist()
|
|
126
|
+
cols.remove("geometry")
|
|
127
|
+
joined = joined.groupby("index").agg({column: list for column in cols})
|
|
128
|
+
joined["geometry"] = enclosures
|
|
129
|
+
joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=crs)
|
|
130
|
+
return joined
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def distribute_points_on_linestrings(lines: gpd.GeoDataFrame, radius, lloyd_relax_n=2) -> gpd.GeoDataFrame:
|
|
134
|
+
lines = lines.copy()
|
|
135
|
+
lines = lines.explode(ignore_index=True)
|
|
136
|
+
lines = lines[lines.geom_type == "LineString"]
|
|
137
|
+
original_crs = lines.crs
|
|
138
|
+
lines = lines.to_crs(crs=lines.estimate_utm_crs())
|
|
139
|
+
lines = lines.reset_index(drop=True)
|
|
140
|
+
lines = lines[["geometry"]]
|
|
141
|
+
radius = radius * 1.1
|
|
142
|
+
segmentized = lines.geometry.apply(lambda x: x.simplify(radius).segmentize(radius))
|
|
143
|
+
points = [Point(pt) for line in segmentized for pt in line.coords]
|
|
144
|
+
|
|
145
|
+
points = gpd.GeoDataFrame(geometry=points, crs=lines.crs)
|
|
146
|
+
lines["lines"] = lines.geometry
|
|
147
|
+
geom_concave = lines.buffer(5, resolution=1).union_all()
|
|
148
|
+
|
|
149
|
+
for i in range(lloyd_relax_n):
|
|
150
|
+
points.geometry = points.voronoi_polygons().clip(geom_concave).centroid
|
|
151
|
+
points = points.sjoin_nearest(lines, how="left")
|
|
152
|
+
points = points[~points.index.duplicated(keep="first")]
|
|
153
|
+
points["geometry"] = points["lines"].interpolate(points["lines"].project(points.geometry))
|
|
154
|
+
points.drop(columns=["lines", "index_right"], inplace=True)
|
|
155
|
+
|
|
156
|
+
return points.dropna().to_crs(original_crs)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def distribute_points_on_polygons(
|
|
160
|
+
polygons: gpd.GeoDataFrame, radius, only_exterior=True, lloyd_relax_n=2
|
|
161
|
+
) -> gpd.GeoDataFrame:
|
|
162
|
+
polygons = polygons.copy()
|
|
163
|
+
polygons = polygons.explode(ignore_index=True)
|
|
164
|
+
polygons = polygons[polygons.geom_type == "Polygon"]
|
|
165
|
+
|
|
166
|
+
if only_exterior:
|
|
167
|
+
polygons.geometry = polygons.geometry.apply(lambda x: LineString(x.exterior))
|
|
168
|
+
else:
|
|
169
|
+
polygons = gpd.GeoDataFrame(
|
|
170
|
+
geometry=list(polygons.geometry.apply(polygons_to_multilinestring)), crs=polygons.crs
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
return distribute_points_on_linestrings(polygons, radius, lloyd_relax_n=lloyd_relax_n)
|
|
@@ -51,7 +51,7 @@ def graph_to_gdf(
|
|
|
51
51
|
"""
|
|
52
52
|
Converts nx graph to gpd.GeoDataFrame as edges.
|
|
53
53
|
|
|
54
|
-
|
|
54
|
+
Args:
|
|
55
55
|
graph (nx.MultiDiGraph):
|
|
56
56
|
The graph to convert.
|
|
57
57
|
edges (bool):
|
|
@@ -98,7 +98,7 @@ def gdf_to_graph(
|
|
|
98
98
|
Intersections are optionally checked and merged. Attributes from the original GeoDataFrame
|
|
99
99
|
can be projected onto the graph edges using spatial matching.
|
|
100
100
|
|
|
101
|
-
|
|
101
|
+
Args:
|
|
102
102
|
gdf (gpd.GeoDataFrame): A GeoDataFrame containing at least one LineString geometry.
|
|
103
103
|
project_gdf_attr (bool): If True, attributes from the input GeoDataFrame will be spatially
|
|
104
104
|
projected onto the resulting graph edges. This can be an expensive operation for large datasets.
|
|
@@ -193,7 +193,7 @@ def get_closest_nodes_from_gdf(gdf: gpd.GeoDataFrame, nx_graph: nx.Graph) -> tup
|
|
|
193
193
|
"""
|
|
194
194
|
Finds the closest graph nodes to the geometries in a GeoDataFrame.
|
|
195
195
|
|
|
196
|
-
|
|
196
|
+
Args
|
|
197
197
|
----------
|
|
198
198
|
gdf : gpd.GeoDataFrame
|
|
199
199
|
GeoDataFrame with geometries for which the nearest graph nodes will be found.
|
|
@@ -228,7 +228,7 @@ def remove_weakly_connected_nodes(graph: nx.DiGraph) -> nx.DiGraph:
|
|
|
228
228
|
"""
|
|
229
229
|
Removes all nodes that are not part of the largest strongly connected component in the graph.
|
|
230
230
|
|
|
231
|
-
|
|
231
|
+
Args
|
|
232
232
|
----------
|
|
233
233
|
graph : nx.DiGraph
|
|
234
234
|
A directed NetworkX graph.
|
|
@@ -275,7 +275,7 @@ def reverse_graph(nx_graph: nx.Graph, weight: str) -> tuple[nx.Graph, nx.DiGraph
|
|
|
275
275
|
For directed graphs, the function returns a new graph with all edge directions reversed,
|
|
276
276
|
preserving the specified edge weight.
|
|
277
277
|
|
|
278
|
-
|
|
278
|
+
Args
|
|
279
279
|
----------
|
|
280
280
|
nx_graph : nx.Graph
|
|
281
281
|
Input NetworkX graph (can be directed or undirected).
|
|
@@ -1,32 +1,32 @@
|
|
|
1
|
-
import numpy as np
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
def min_max_normalization(data, new_min=0, new_max=1):
|
|
5
|
-
"""
|
|
6
|
-
Min-max normalization for a given array of data.
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
----------
|
|
10
|
-
data: numpy.ndarray
|
|
11
|
-
Input data to be normalized.
|
|
12
|
-
new_min: float, optional
|
|
13
|
-
New minimum value for normalization. Defaults to 0.
|
|
14
|
-
new_max: float, optional
|
|
15
|
-
New maximum value for normalization. Defaults to 1.
|
|
16
|
-
|
|
17
|
-
Returns
|
|
18
|
-
-------
|
|
19
|
-
numpy.ndarray
|
|
20
|
-
Normalized data.
|
|
21
|
-
|
|
22
|
-
Examples
|
|
23
|
-
--------
|
|
24
|
-
>>> import numpy as np
|
|
25
|
-
>>> data = np.array([1, 2, 3, 4, 5])
|
|
26
|
-
>>> normalized_data = min_max_normalization(data, new_min=0, new_max=1)
|
|
27
|
-
"""
|
|
28
|
-
|
|
29
|
-
min_value = np.min(data)
|
|
30
|
-
max_value = np.max(data)
|
|
31
|
-
normalized_data = (data - min_value) / (max_value - min_value) * (new_max - new_min) + new_min
|
|
32
|
-
return normalized_data
|
|
1
|
+
import numpy as np
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def min_max_normalization(data, new_min=0, new_max=1):
|
|
5
|
+
"""
|
|
6
|
+
Min-max normalization for a given array of data.
|
|
7
|
+
|
|
8
|
+
Args
|
|
9
|
+
----------
|
|
10
|
+
data: numpy.ndarray
|
|
11
|
+
Input data to be normalized.
|
|
12
|
+
new_min: float, optional
|
|
13
|
+
New minimum value for normalization. Defaults to 0.
|
|
14
|
+
new_max: float, optional
|
|
15
|
+
New maximum value for normalization. Defaults to 1.
|
|
16
|
+
|
|
17
|
+
Returns
|
|
18
|
+
-------
|
|
19
|
+
numpy.ndarray
|
|
20
|
+
Normalized data.
|
|
21
|
+
|
|
22
|
+
Examples
|
|
23
|
+
--------
|
|
24
|
+
>>> import numpy as np
|
|
25
|
+
>>> data = np.array([1, 2, 3, 4, 5])
|
|
26
|
+
>>> normalized_data = min_max_normalization(data, new_min=0, new_max=1)
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
min_value = np.min(data)
|
|
30
|
+
max_value = np.max(data)
|
|
31
|
+
normalized_data = (data - min_value) / (max_value - min_value) * (new_max - new_min) + new_min
|
|
32
|
+
return normalized_data
|
|
@@ -1,6 +1,6 @@
|
|
|
1
|
-
from .visibility_analysis import (
|
|
2
|
-
calculate_visibility_catchment_area,
|
|
3
|
-
get_visibilities_from_points,
|
|
4
|
-
get_visibility,
|
|
5
|
-
get_visibility_accurate,
|
|
6
|
-
)
|
|
1
|
+
from .visibility_analysis import (
|
|
2
|
+
calculate_visibility_catchment_area,
|
|
3
|
+
get_visibilities_from_points,
|
|
4
|
+
get_visibility,
|
|
5
|
+
get_visibility_accurate,
|
|
6
|
+
)
|
|
@@ -4,7 +4,6 @@ from multiprocessing import cpu_count
|
|
|
4
4
|
import geopandas as gpd
|
|
5
5
|
import numpy as np
|
|
6
6
|
import pandas as pd
|
|
7
|
-
from pandarallel import pandarallel
|
|
8
7
|
from shapely import LineString, MultiPolygon, Point, Polygon
|
|
9
8
|
from shapely.ops import unary_union
|
|
10
9
|
from tqdm.contrib.concurrent import process_map
|
|
@@ -28,7 +27,7 @@ def get_visibility_accurate(
|
|
|
28
27
|
"""
|
|
29
28
|
Function to get accurate visibility from a given point to buildings within a given distance.
|
|
30
29
|
|
|
31
|
-
|
|
30
|
+
Args:
|
|
32
31
|
point_from (Point | gpd.GeoDataFrame):
|
|
33
32
|
The point or GeoDataFrame with 1 point from which the line of sight is drawn.
|
|
34
33
|
If Point is provided it should be in the same crs as obstacles.
|
|
@@ -165,7 +164,7 @@ def get_visibility(
|
|
|
165
164
|
"""
|
|
166
165
|
Function to get a quick estimate of visibility from a given point to buildings within a given distance.
|
|
167
166
|
|
|
168
|
-
|
|
167
|
+
Args:
|
|
169
168
|
point_from (Point | gpd.GeoDataFrame):
|
|
170
169
|
The point or GeoDataFrame with 1 point from which the line of sight is drawn.
|
|
171
170
|
If Point is provided it should be in the same crs as obstacles.
|
|
@@ -239,7 +238,7 @@ def get_visibilities_from_points(
|
|
|
239
238
|
"""
|
|
240
239
|
Calculate visibility polygons from a set of points considering obstacles within a specified view distance.
|
|
241
240
|
|
|
242
|
-
|
|
241
|
+
Args:
|
|
243
242
|
points (gpd.GeoDataFrame):
|
|
244
243
|
GeoDataFrame containing the points from which visibility is calculated.
|
|
245
244
|
obstacles (gpd.GeoDataFrame):
|
|
@@ -297,7 +296,7 @@ def calculate_visibility_catchment_area(
|
|
|
297
296
|
This function is designed to work with at least 1000 points spaced 10-20 meters apart for optimal results.
|
|
298
297
|
Points can be generated using a road graph.
|
|
299
298
|
|
|
300
|
-
|
|
299
|
+
Args:
|
|
301
300
|
points (gpd.GeoDataFrame): GeoDataFrame containing the points from which visibility is calculated.
|
|
302
301
|
obstacles (gpd.GeoDataFrame): GeoDataFrame containing the obstacles that block visibility.
|
|
303
302
|
view_distance (int | float): The maximum distance from each point within which visibility is calculated.
|
|
@@ -313,21 +312,12 @@ def calculate_visibility_catchment_area(
|
|
|
313
312
|
return x
|
|
314
313
|
|
|
315
314
|
def calc_group_factor(x):
|
|
316
|
-
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
317
|
-
import numpy as np
|
|
318
|
-
|
|
319
315
|
return np.mean(x.new_ratio) * x.count_n
|
|
320
316
|
|
|
321
317
|
def unary_union_groups(x):
|
|
322
|
-
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
323
|
-
from shapely import MultiPolygon
|
|
324
|
-
|
|
325
|
-
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
326
|
-
from shapely.ops import unary_union
|
|
327
|
-
|
|
328
318
|
return unary_union(MultiPolygon(list(x["geometry"])).buffer(0))
|
|
329
319
|
|
|
330
|
-
|
|
320
|
+
raise NotImplementedError("This method is temporarily unsupported.")
|
|
331
321
|
|
|
332
322
|
local_crs = obstacles.estimate_utm_crs()
|
|
333
323
|
obstacles = obstacles.to_crs(local_crs)
|
|
@@ -370,7 +360,7 @@ def calculate_visibility_catchment_area(
|
|
|
370
360
|
all_in["count_n"] = all_in["index_right"].apply(len)
|
|
371
361
|
|
|
372
362
|
logger.info("Calculating intersection's parameters")
|
|
373
|
-
all_in["factor"] = all_in.parallel_apply(calc_group_factor, axis=1)
|
|
363
|
+
# all_in["factor"] = all_in.parallel_apply(calc_group_factor, axis=1) # TODO replace pandarallel methods
|
|
374
364
|
threshold = all_in["factor"].quantile(0.3)
|
|
375
365
|
all_in = all_in[all_in["factor"] > threshold]
|
|
376
366
|
|
|
@@ -378,7 +368,9 @@ def calculate_visibility_catchment_area(
|
|
|
378
368
|
min_max_normalization(np.sqrt(all_in["factor"].values), new_min=1, new_max=5)
|
|
379
369
|
).astype(int)
|
|
380
370
|
logger.info("Calculating normalized groups geometry...")
|
|
381
|
-
all_in =
|
|
371
|
+
all_in = (
|
|
372
|
+
all_in.groupby("factor_normalized").parallel_apply(unary_union_groups).reset_index()
|
|
373
|
+
) # TODO replace pandarallel methods
|
|
382
374
|
all_in = gpd.GeoDataFrame(data=all_in.rename(columns={0: "geometry"}), geometry="geometry", crs=32636)
|
|
383
375
|
|
|
384
376
|
all_in = all_in.explode(index_parts=True).reset_index(drop=True)
|