ObjectNat 1.3.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- objectnat/__init__.py +9 -0
- objectnat/_api.py +14 -0
- objectnat/_config.py +43 -0
- objectnat/_version.py +1 -0
- objectnat/methods/__init__.py +0 -0
- objectnat/methods/coverage_zones/__init__.py +3 -0
- objectnat/methods/coverage_zones/graph_coverage.py +105 -0
- objectnat/methods/coverage_zones/radius_voronoi_coverage.py +39 -0
- objectnat/methods/coverage_zones/stepped_coverage.py +136 -0
- objectnat/methods/isochrones/__init__.py +1 -0
- objectnat/methods/isochrones/isochrone_utils.py +167 -0
- objectnat/methods/isochrones/isochrones.py +282 -0
- objectnat/methods/noise/__init__.py +3 -0
- objectnat/methods/noise/noise_init_data.py +10 -0
- objectnat/methods/noise/noise_reduce.py +155 -0
- objectnat/methods/noise/noise_simulation.py +453 -0
- objectnat/methods/noise/noise_simulation_simplified.py +222 -0
- objectnat/methods/point_clustering/__init__.py +1 -0
- objectnat/methods/point_clustering/cluster_points_in_polygons.py +115 -0
- objectnat/methods/provision/__init__.py +1 -0
- objectnat/methods/provision/provision.py +213 -0
- objectnat/methods/provision/provision_exceptions.py +59 -0
- objectnat/methods/provision/provision_model.py +323 -0
- objectnat/methods/utils/__init__.py +1 -0
- objectnat/methods/utils/geom_utils.py +173 -0
- objectnat/methods/utils/graph_utils.py +306 -0
- objectnat/methods/utils/math_utils.py +32 -0
- objectnat/methods/visibility/__init__.py +6 -0
- objectnat/methods/visibility/visibility_analysis.py +485 -0
- objectnat-1.3.3.dist-info/METADATA +202 -0
- objectnat-1.3.3.dist-info/RECORD +33 -0
- objectnat-1.3.3.dist-info/WHEEL +4 -0
- objectnat-1.3.3.dist-info/licenses/LICENSE.txt +28 -0
|
@@ -0,0 +1,323 @@
|
|
|
1
|
+
# pylint: disable=singleton-comparison
|
|
2
|
+
from typing import Tuple
|
|
3
|
+
|
|
4
|
+
import geopandas as gpd
|
|
5
|
+
import numpy as np
|
|
6
|
+
import pandas as pd
|
|
7
|
+
from shapely import LineString
|
|
8
|
+
|
|
9
|
+
from objectnat import config
|
|
10
|
+
|
|
11
|
+
from .provision_exceptions import CapacityKeyError, DemandKeyError
|
|
12
|
+
|
|
13
|
+
logger = config.logger
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class Provision:
|
|
17
|
+
"""
|
|
18
|
+
Represents the logic for city provision calculations using a gravity or linear model.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
services (gpd.GeoDataFrame): GeoDataFrame representing the services available in the city.
|
|
22
|
+
demanded_buildings (gpd.GeoDataFrame): GeoDataFrame representing the buildings with demands for services.
|
|
23
|
+
adjacency_matrix (pd.DataFrame): DataFrame representing the adjacency matrix between buildings.
|
|
24
|
+
threshold (int): Threshold value for the provision calculations.
|
|
25
|
+
|
|
26
|
+
Returns:
|
|
27
|
+
Provision: The CityProvision object.
|
|
28
|
+
|
|
29
|
+
Raises: KeyError: If the 'demand' column is missing in the provided 'demanded_buildings' GeoDataFrame,
|
|
30
|
+
or if the 'capacity' column is missing in the provided 'services' GeoDataFrame. ValueError: If the 'capacity'
|
|
31
|
+
column in 'services' or 'demand' column 'demanded_buildings' GeoDataFrame has no valid value.
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
destination_matrix = None
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
services: gpd.GeoDataFrame,
|
|
39
|
+
demanded_buildings: gpd.GeoDataFrame,
|
|
40
|
+
adjacency_matrix: pd.DataFrame,
|
|
41
|
+
threshold: int,
|
|
42
|
+
):
|
|
43
|
+
self.services = self.ensure_services(services.copy())
|
|
44
|
+
self.demanded_buildings = self.ensure_buildings(demanded_buildings.copy())
|
|
45
|
+
self.adjacency_matrix = self.delete_useless_matrix_rows_columns(
|
|
46
|
+
adjacency_matrix.copy(), demanded_buildings, services
|
|
47
|
+
).copy()
|
|
48
|
+
self.threshold = threshold
|
|
49
|
+
self.services.to_crs(self.demanded_buildings.crs, inplace=True)
|
|
50
|
+
|
|
51
|
+
@staticmethod
|
|
52
|
+
def ensure_buildings(v: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
53
|
+
if "demand" not in v.columns:
|
|
54
|
+
raise DemandKeyError
|
|
55
|
+
v["demand_left"] = v["demand"]
|
|
56
|
+
return v
|
|
57
|
+
|
|
58
|
+
@staticmethod
|
|
59
|
+
def ensure_services(v: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
60
|
+
if "capacity" not in v.columns:
|
|
61
|
+
raise CapacityKeyError
|
|
62
|
+
v["capacity_left"] = v["capacity"]
|
|
63
|
+
return v
|
|
64
|
+
|
|
65
|
+
@staticmethod
|
|
66
|
+
def delete_useless_matrix_rows_columns(adjacency_matrix, demanded_buildings, services):
|
|
67
|
+
adjacency_matrix.index = adjacency_matrix.index.astype(int)
|
|
68
|
+
|
|
69
|
+
builds_indexes = set(demanded_buildings.index.astype(int).tolist())
|
|
70
|
+
rows = set(adjacency_matrix.index.astype(int).tolist())
|
|
71
|
+
dif = rows ^ builds_indexes
|
|
72
|
+
adjacency_matrix.drop(index=(list(dif)), axis=0, inplace=True)
|
|
73
|
+
|
|
74
|
+
service_indexes = set(services.index.astype(int).tolist())
|
|
75
|
+
columns = set(adjacency_matrix.columns.astype(int).tolist())
|
|
76
|
+
dif = columns ^ service_indexes
|
|
77
|
+
adjacency_matrix.drop(columns=(list(dif)), axis=0, inplace=True)
|
|
78
|
+
return adjacency_matrix.transpose()
|
|
79
|
+
|
|
80
|
+
def run(self) -> Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame, gpd.GeoDataFrame]:
|
|
81
|
+
|
|
82
|
+
def calculate_flows_y(loc):
|
|
83
|
+
c = services_table.loc[loc.name]["capacity_left"]
|
|
84
|
+
p = 1 / loc / loc
|
|
85
|
+
p = p / p.sum()
|
|
86
|
+
threshold = p.quantile(best_houses)
|
|
87
|
+
p = p[p >= threshold]
|
|
88
|
+
p = p / p.sum()
|
|
89
|
+
if p.sum() == 0:
|
|
90
|
+
return loc
|
|
91
|
+
rng = np.random.default_rng(seed=0)
|
|
92
|
+
r = pd.Series(0, p.index)
|
|
93
|
+
choice = np.unique(rng.choice(p.index, int(c), p=p.values), return_counts=True)
|
|
94
|
+
choice = r.add(pd.Series(choice[1], choice[0]), fill_value=0)
|
|
95
|
+
|
|
96
|
+
return choice
|
|
97
|
+
|
|
98
|
+
def balance_flows_to_demands(loc):
|
|
99
|
+
d = houses_table.loc[loc.name]["demand_left"]
|
|
100
|
+
loc = loc[loc > 0]
|
|
101
|
+
if loc.sum() > 0:
|
|
102
|
+
p = loc / loc.sum()
|
|
103
|
+
rng = np.random.default_rng(seed=0)
|
|
104
|
+
r = pd.Series(0, p.index)
|
|
105
|
+
choice = np.unique(rng.choice(p.index, int(d), p=p.values), return_counts=True)
|
|
106
|
+
choice = r.add(pd.Series(choice[1], choice[0]), fill_value=0)
|
|
107
|
+
choice = pd.Series(
|
|
108
|
+
data=np.minimum(loc.sort_index().values, choice.sort_index().values),
|
|
109
|
+
index=loc.sort_index().index,
|
|
110
|
+
)
|
|
111
|
+
return choice
|
|
112
|
+
return loc
|
|
113
|
+
|
|
114
|
+
logger.debug(
|
|
115
|
+
f"Calculating provision from {len(self.services)} services to {len(self.demanded_buildings)} buildings."
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
distance_matrix = self.adjacency_matrix
|
|
119
|
+
destination_matrix = pd.DataFrame(
|
|
120
|
+
0,
|
|
121
|
+
index=distance_matrix.index,
|
|
122
|
+
columns=distance_matrix.columns,
|
|
123
|
+
dtype=int,
|
|
124
|
+
)
|
|
125
|
+
distance_matrix = distance_matrix.where(distance_matrix <= self.threshold * 3, np.inf)
|
|
126
|
+
|
|
127
|
+
houses_table = self.demanded_buildings[["demand", "demand_left"]].copy()
|
|
128
|
+
services_table = self.services[["capacity", "capacity_left"]].copy()
|
|
129
|
+
distance_matrix = distance_matrix.drop(
|
|
130
|
+
index=services_table[services_table["capacity_left"] == 0].index.values,
|
|
131
|
+
columns=houses_table[houses_table["demand_left"] == 0].index.values,
|
|
132
|
+
errors="ignore",
|
|
133
|
+
)
|
|
134
|
+
distance_matrix = distance_matrix.loc[~(distance_matrix == np.inf).all(axis=1)]
|
|
135
|
+
distance_matrix = distance_matrix.loc[:, ~(distance_matrix == np.inf).all(axis=0)]
|
|
136
|
+
|
|
137
|
+
distance_matrix = distance_matrix + 1
|
|
138
|
+
selection_range = (self.threshold + 1) / 2
|
|
139
|
+
best_houses = 0.9
|
|
140
|
+
while len(distance_matrix.columns) > 0 and len(distance_matrix.index) > 0:
|
|
141
|
+
objects_n = sum(distance_matrix.shape)
|
|
142
|
+
logger.debug(
|
|
143
|
+
f"Matrix shape: {distance_matrix.shape},"
|
|
144
|
+
f" Total objects: {objects_n},"
|
|
145
|
+
f" Selection range: {selection_range},"
|
|
146
|
+
f" Best houses: {best_houses}"
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
temp_destination_matrix = distance_matrix.apply(
|
|
150
|
+
lambda x: calculate_flows_y(x[x <= selection_range]), axis=1
|
|
151
|
+
)
|
|
152
|
+
temp_destination_matrix = temp_destination_matrix.fillna(0)
|
|
153
|
+
temp_destination_matrix = temp_destination_matrix.apply(balance_flows_to_demands, axis=0)
|
|
154
|
+
temp_destination_matrix = temp_destination_matrix.fillna(0)
|
|
155
|
+
temp_destination_matrix_aligned = temp_destination_matrix.reindex(
|
|
156
|
+
index=destination_matrix.index, columns=destination_matrix.columns, fill_value=0
|
|
157
|
+
)
|
|
158
|
+
del temp_destination_matrix
|
|
159
|
+
destination_matrix_np = destination_matrix.to_numpy()
|
|
160
|
+
temp_destination_matrix_np = temp_destination_matrix_aligned.to_numpy()
|
|
161
|
+
del temp_destination_matrix_aligned
|
|
162
|
+
destination_matrix = pd.DataFrame(
|
|
163
|
+
destination_matrix_np + temp_destination_matrix_np,
|
|
164
|
+
index=destination_matrix.index,
|
|
165
|
+
columns=destination_matrix.columns,
|
|
166
|
+
)
|
|
167
|
+
del destination_matrix_np, temp_destination_matrix_np
|
|
168
|
+
axis_1 = destination_matrix.sum(axis=1).astype(int)
|
|
169
|
+
axis_0 = destination_matrix.sum(axis=0).astype(int)
|
|
170
|
+
|
|
171
|
+
services_table["capacity_left"] = services_table["capacity"].subtract(axis_1, fill_value=0)
|
|
172
|
+
houses_table["demand_left"] = houses_table["demand"].subtract(axis_0, fill_value=0)
|
|
173
|
+
del axis_1, axis_0
|
|
174
|
+
distance_matrix = distance_matrix.drop(
|
|
175
|
+
index=services_table[services_table["capacity_left"] == 0].index.values,
|
|
176
|
+
columns=houses_table[houses_table["demand_left"] == 0].index.values,
|
|
177
|
+
errors="ignore",
|
|
178
|
+
)
|
|
179
|
+
distance_matrix = distance_matrix.loc[~(distance_matrix == np.inf).all(axis=1)]
|
|
180
|
+
distance_matrix = distance_matrix.loc[:, ~(distance_matrix == np.inf).all(axis=0)]
|
|
181
|
+
|
|
182
|
+
selection_range *= 1.5
|
|
183
|
+
if best_houses <= 0.1:
|
|
184
|
+
best_houses = 0
|
|
185
|
+
else:
|
|
186
|
+
objects_n_new = sum(distance_matrix.shape)
|
|
187
|
+
best_houses = objects_n_new / (objects_n / best_houses)
|
|
188
|
+
|
|
189
|
+
logger.debug("Done!")
|
|
190
|
+
del distance_matrix, houses_table, services_table
|
|
191
|
+
self.destination_matrix = destination_matrix
|
|
192
|
+
|
|
193
|
+
_additional_options(
|
|
194
|
+
self.demanded_buildings,
|
|
195
|
+
self.services,
|
|
196
|
+
self.adjacency_matrix,
|
|
197
|
+
self.destination_matrix,
|
|
198
|
+
self.threshold,
|
|
199
|
+
)
|
|
200
|
+
|
|
201
|
+
return (
|
|
202
|
+
self.demanded_buildings,
|
|
203
|
+
self.services,
|
|
204
|
+
_calc_links(
|
|
205
|
+
self.destination_matrix,
|
|
206
|
+
self.services,
|
|
207
|
+
self.demanded_buildings,
|
|
208
|
+
self.adjacency_matrix,
|
|
209
|
+
),
|
|
210
|
+
)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def _calc_links(
|
|
214
|
+
destination_matrix: pd.DataFrame,
|
|
215
|
+
services: gpd.GeoDataFrame,
|
|
216
|
+
buildings: gpd.GeoDataFrame,
|
|
217
|
+
distance_matrix: pd.DataFrame,
|
|
218
|
+
):
|
|
219
|
+
buildings_ = buildings.copy()
|
|
220
|
+
services_ = services.copy()
|
|
221
|
+
buildings_.geometry = buildings_.representative_point()
|
|
222
|
+
services_.geometry = services_.representative_point()
|
|
223
|
+
|
|
224
|
+
def subfunc(loc):
|
|
225
|
+
try:
|
|
226
|
+
return [
|
|
227
|
+
{
|
|
228
|
+
"building_index": int(k),
|
|
229
|
+
"demand": int(v),
|
|
230
|
+
"service_index": int(loc.name),
|
|
231
|
+
}
|
|
232
|
+
for k, v in loc.to_dict().items()
|
|
233
|
+
]
|
|
234
|
+
except:
|
|
235
|
+
return np.NaN
|
|
236
|
+
|
|
237
|
+
def subfunc_geom(loc):
|
|
238
|
+
return LineString(
|
|
239
|
+
(
|
|
240
|
+
buildings_.geometry[loc["building_index"]],
|
|
241
|
+
services_.geometry[loc["service_index"]],
|
|
242
|
+
)
|
|
243
|
+
)
|
|
244
|
+
|
|
245
|
+
flat_matrix = destination_matrix.transpose().apply(lambda x: subfunc(x[x > 0]), result_type="reduce")
|
|
246
|
+
|
|
247
|
+
distribution_links = gpd.GeoDataFrame(data=[item for sublist in list(flat_matrix) for item in sublist])
|
|
248
|
+
if distribution_links.empty:
|
|
249
|
+
logger.warning(
|
|
250
|
+
"Unable to create distribution links - no demand could be matched with service locations. "
|
|
251
|
+
"This is likely because either: "
|
|
252
|
+
"1) The demand column in buildings contains zero values, or "
|
|
253
|
+
"2) The capacity column in services contains zero values, or "
|
|
254
|
+
"3) There are no service locations within the maximum allowed distance"
|
|
255
|
+
)
|
|
256
|
+
return distribution_links
|
|
257
|
+
distribution_links["distance"] = distribution_links.apply(
|
|
258
|
+
lambda x: distance_matrix.loc[x["service_index"]][x["building_index"]],
|
|
259
|
+
axis=1,
|
|
260
|
+
result_type="reduce",
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
sel = distribution_links["building_index"].isin(buildings_.index.values) & distribution_links["service_index"].isin(
|
|
264
|
+
services_.index.values
|
|
265
|
+
)
|
|
266
|
+
sel = distribution_links.loc[sel[sel].index.values]
|
|
267
|
+
distribution_links = distribution_links.set_geometry(sel.apply(subfunc_geom, axis=1)).set_crs(buildings_.crs)
|
|
268
|
+
distribution_links["distance"] = distribution_links["distance"].astype(float).round(2)
|
|
269
|
+
return distribution_links
|
|
270
|
+
|
|
271
|
+
|
|
272
|
+
def _additional_options(
|
|
273
|
+
buildings,
|
|
274
|
+
services,
|
|
275
|
+
matrix,
|
|
276
|
+
destination_matrix,
|
|
277
|
+
normative_distance,
|
|
278
|
+
):
|
|
279
|
+
buildings["avg_dist"] = 0
|
|
280
|
+
buildings["supplied_demands_within"] = 0
|
|
281
|
+
buildings["supplied_demands_without"] = 0
|
|
282
|
+
services["carried_capacity_within"] = 0
|
|
283
|
+
services["carried_capacity_without"] = 0
|
|
284
|
+
for _, loc in destination_matrix.iterrows():
|
|
285
|
+
distances_all = matrix.loc[loc.name]
|
|
286
|
+
distances = distances_all[distances_all <= normative_distance]
|
|
287
|
+
s = matrix.loc[loc.name] <= normative_distance
|
|
288
|
+
within = loc[s]
|
|
289
|
+
without = loc[~s]
|
|
290
|
+
within = within[within > 0]
|
|
291
|
+
without = without[without > 0]
|
|
292
|
+
buildings["avg_dist"] = (
|
|
293
|
+
buildings["avg_dist"]
|
|
294
|
+
.add(distances.multiply(within, fill_value=0), fill_value=0)
|
|
295
|
+
.add(distances_all.multiply(without, fill_value=0), fill_value=0)
|
|
296
|
+
)
|
|
297
|
+
buildings["demand_left"] = buildings["demand_left"].sub(within.add(without, fill_value=0), fill_value=0)
|
|
298
|
+
buildings["supplied_demands_within"] = buildings["supplied_demands_within"].add(within, fill_value=0)
|
|
299
|
+
buildings["supplied_demands_without"] = buildings["supplied_demands_without"].add(without, fill_value=0)
|
|
300
|
+
|
|
301
|
+
services.at[loc.name, "capacity_left"] = (
|
|
302
|
+
services.at[loc.name, "capacity_left"] - within.add(without, fill_value=0).sum()
|
|
303
|
+
)
|
|
304
|
+
services.at[loc.name, "carried_capacity_within"] = (
|
|
305
|
+
services.at[loc.name, "carried_capacity_within"] + within.sum()
|
|
306
|
+
)
|
|
307
|
+
services.at[loc.name, "carried_capacity_without"] = (
|
|
308
|
+
services.at[loc.name, "carried_capacity_without"] + without.sum()
|
|
309
|
+
)
|
|
310
|
+
buildings["min_dist"] = matrix.min(axis=0).replace(np.inf, None)
|
|
311
|
+
buildings["avg_dist"] = (buildings["avg_dist"] / (buildings["demand"] - buildings["demand_left"])).astype(
|
|
312
|
+
np.float32
|
|
313
|
+
)
|
|
314
|
+
buildings["avg_dist"] = buildings.apply(
|
|
315
|
+
lambda x: np.nan if (x["demand"] == x["demand_left"]) else round(x["avg_dist"], 2), axis=1
|
|
316
|
+
)
|
|
317
|
+
buildings["provision_value"] = (buildings["supplied_demands_within"] / buildings["demand"]).astype(float).round(2)
|
|
318
|
+
services["service_load"] = (services["capacity"] - services["capacity_left"]).astype(np.uint16)
|
|
319
|
+
buildings["supplied_demands_within"] = buildings["supplied_demands_within"].astype(np.uint16)
|
|
320
|
+
buildings["supplied_demands_without"] = buildings["supplied_demands_without"].astype(np.uint16)
|
|
321
|
+
services["carried_capacity_within"] = services["carried_capacity_within"].astype(np.uint16)
|
|
322
|
+
services["carried_capacity_without"] = services["carried_capacity_without"].astype(np.uint16)
|
|
323
|
+
logger.debug("Done adding additional options")
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
from .graph_utils import gdf_to_graph, graph_to_gdf
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
import math
|
|
2
|
+
|
|
3
|
+
import geopandas as gpd
|
|
4
|
+
from shapely import LineString, MultiPolygon, Point, Polygon
|
|
5
|
+
from shapely.ops import polygonize, unary_union
|
|
6
|
+
|
|
7
|
+
from objectnat import config
|
|
8
|
+
|
|
9
|
+
logger = config.logger
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def polygons_to_multilinestring(geom: Polygon | MultiPolygon):
|
|
13
|
+
# pylint: disable-next=redefined-outer-name,reimported,import-outside-toplevel
|
|
14
|
+
from shapely import LineString, MultiLineString, MultiPolygon
|
|
15
|
+
|
|
16
|
+
def convert_polygon(polygon: Polygon):
|
|
17
|
+
lines = []
|
|
18
|
+
exterior = LineString(polygon.exterior)
|
|
19
|
+
lines.append(exterior)
|
|
20
|
+
interior = [LineString(p) for p in polygon.interiors]
|
|
21
|
+
lines = lines + interior
|
|
22
|
+
return lines
|
|
23
|
+
|
|
24
|
+
def convert_multipolygon(polygon: MultiPolygon):
|
|
25
|
+
return MultiLineString(sum([convert_polygon(p) for p in polygon.geoms], []))
|
|
26
|
+
|
|
27
|
+
if geom.geom_type == "Polygon":
|
|
28
|
+
return MultiLineString(convert_polygon(geom))
|
|
29
|
+
return convert_multipolygon(geom)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def explode_linestring(geometry: LineString) -> list[LineString]:
|
|
33
|
+
"""A function to return all segments of a linestring as a list of linestrings"""
|
|
34
|
+
coords_ext = geometry.coords # Create a list of all line node coordinates
|
|
35
|
+
result = [LineString(part) for part in zip(coords_ext, coords_ext[1:])]
|
|
36
|
+
return result
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def point_side_of_line(line: LineString, point: Point) -> int:
|
|
40
|
+
"""A positive indicates the left-hand side a negative indicates the right-hand side"""
|
|
41
|
+
x1, y1 = line.coords[0]
|
|
42
|
+
x2, y2 = line.coords[-1]
|
|
43
|
+
x, y = point.coords[0]
|
|
44
|
+
cross_product = (x2 - x1) * (y - y1) - (y2 - y1) * (x - x1)
|
|
45
|
+
if cross_product > 0:
|
|
46
|
+
return 1
|
|
47
|
+
return -1
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def get_point_from_a_thorough_b(a: Point, b: Point, dist):
|
|
51
|
+
"""
|
|
52
|
+
Func to get Point from point a thorough point b on dist
|
|
53
|
+
"""
|
|
54
|
+
direction = math.atan2(b.y - a.y, b.x - a.x)
|
|
55
|
+
c_x = a.x + dist * math.cos(direction)
|
|
56
|
+
c_y = a.y + dist * math.sin(direction)
|
|
57
|
+
return Point(c_x, c_y)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def gdf_to_circle_zones_from_point(
|
|
61
|
+
gdf: gpd.GeoDataFrame, point_from: Point, zone_radius, resolution=4, explode_multigeom=True
|
|
62
|
+
) -> gpd.GeoDataFrame:
|
|
63
|
+
"""n_segments = 4*resolution,e.g. if resolution = 4 that means there will be 16 segments"""
|
|
64
|
+
crs = gdf.crs
|
|
65
|
+
buffer = point_from.buffer(zone_radius, resolution=resolution)
|
|
66
|
+
gdf_unary = gdf.clip(buffer, keep_geom_type=True).union_all()
|
|
67
|
+
gdf_geometry = (
|
|
68
|
+
gpd.GeoDataFrame(geometry=[gdf_unary], crs=crs)
|
|
69
|
+
.explode(index_parts=True)
|
|
70
|
+
.geometry.apply(polygons_to_multilinestring)
|
|
71
|
+
.union_all()
|
|
72
|
+
)
|
|
73
|
+
zones_lines = [LineString([Point(coords1), Point(point_from)]) for coords1 in buffer.exterior.coords[:-1]]
|
|
74
|
+
if explode_multigeom:
|
|
75
|
+
return (
|
|
76
|
+
gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs)
|
|
77
|
+
.clip(gdf_unary, keep_geom_type=True)
|
|
78
|
+
.explode(index_parts=False)
|
|
79
|
+
)
|
|
80
|
+
return gpd.GeoDataFrame(geometry=list(polygonize(unary_union([gdf_geometry] + zones_lines))), crs=crs).clip(
|
|
81
|
+
gdf_unary, keep_geom_type=True
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def remove_inner_geom(polygon: Polygon | MultiPolygon):
|
|
86
|
+
"""function to get rid of inner polygons"""
|
|
87
|
+
if isinstance(polygon, Polygon):
|
|
88
|
+
return Polygon(polygon.exterior.coords)
|
|
89
|
+
if isinstance(polygon, MultiPolygon):
|
|
90
|
+
polys = []
|
|
91
|
+
for poly in polygon.geoms:
|
|
92
|
+
polys.append(Polygon(poly.exterior.coords))
|
|
93
|
+
return MultiPolygon(polys)
|
|
94
|
+
else:
|
|
95
|
+
return Polygon()
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
def combine_geometry(gdf: gpd.GeoDataFrame) -> gpd.GeoDataFrame:
|
|
99
|
+
"""
|
|
100
|
+
Combine geometry of intersecting layers into a single GeoDataFrame.
|
|
101
|
+
Parameters
|
|
102
|
+
----------
|
|
103
|
+
gdf: gpd.GeoDataFrame
|
|
104
|
+
A GeoPandas GeoDataFrame
|
|
105
|
+
|
|
106
|
+
Returns
|
|
107
|
+
-------
|
|
108
|
+
gpd.GeoDataFrame
|
|
109
|
+
The combined GeoDataFrame with aggregated in lists columns.
|
|
110
|
+
|
|
111
|
+
Examples
|
|
112
|
+
--------
|
|
113
|
+
>>> gdf = gpd.read_file('path_to_your_file.geojson')
|
|
114
|
+
>>> result = combine_geometry(gdf)
|
|
115
|
+
"""
|
|
116
|
+
|
|
117
|
+
crs = gdf.crs
|
|
118
|
+
|
|
119
|
+
enclosures = gpd.GeoDataFrame(
|
|
120
|
+
geometry=list(polygonize(gdf["geometry"].apply(polygons_to_multilinestring).union_all())), crs=crs
|
|
121
|
+
)
|
|
122
|
+
enclosures_points = enclosures.copy()
|
|
123
|
+
enclosures_points.geometry = enclosures.representative_point()
|
|
124
|
+
joined = gpd.sjoin(enclosures_points, gdf, how="inner", predicate="within").reset_index()
|
|
125
|
+
cols = joined.columns.tolist()
|
|
126
|
+
cols.remove("geometry")
|
|
127
|
+
joined = joined.groupby("index").agg({column: list for column in cols})
|
|
128
|
+
joined["geometry"] = enclosures
|
|
129
|
+
joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=crs)
|
|
130
|
+
return joined
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def distribute_points_on_linestrings(lines: gpd.GeoDataFrame, radius, lloyd_relax_n=2) -> gpd.GeoDataFrame:
|
|
134
|
+
lines = lines.copy()
|
|
135
|
+
lines = lines.explode(ignore_index=True)
|
|
136
|
+
lines = lines[lines.geom_type == "LineString"]
|
|
137
|
+
original_crs = lines.crs
|
|
138
|
+
lines = lines.to_crs(crs=lines.estimate_utm_crs())
|
|
139
|
+
lines = lines.reset_index(drop=True)
|
|
140
|
+
lines = lines[["geometry"]]
|
|
141
|
+
radius = radius * 1.1
|
|
142
|
+
segmentized = lines.geometry.apply(lambda x: x.simplify(radius).segmentize(radius))
|
|
143
|
+
points = [Point(pt) for line in segmentized for pt in line.coords]
|
|
144
|
+
|
|
145
|
+
points = gpd.GeoDataFrame(geometry=points, crs=lines.crs)
|
|
146
|
+
lines["lines"] = lines.geometry
|
|
147
|
+
geom_concave = lines.buffer(5, resolution=1).union_all()
|
|
148
|
+
|
|
149
|
+
for i in range(lloyd_relax_n):
|
|
150
|
+
points.geometry = points.voronoi_polygons().clip(geom_concave).centroid
|
|
151
|
+
points = points.sjoin_nearest(lines, how="left")
|
|
152
|
+
points = points[~points.index.duplicated(keep="first")]
|
|
153
|
+
points["geometry"] = points["lines"].interpolate(points["lines"].project(points.geometry))
|
|
154
|
+
points.drop(columns=["lines", "index_right"], inplace=True)
|
|
155
|
+
|
|
156
|
+
return points.dropna().to_crs(original_crs)
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
def distribute_points_on_polygons(
|
|
160
|
+
polygons: gpd.GeoDataFrame, radius, only_exterior=True, lloyd_relax_n=2
|
|
161
|
+
) -> gpd.GeoDataFrame:
|
|
162
|
+
polygons = polygons.copy()
|
|
163
|
+
polygons = polygons.explode(ignore_index=True)
|
|
164
|
+
polygons = polygons[polygons.geom_type == "Polygon"]
|
|
165
|
+
|
|
166
|
+
if only_exterior:
|
|
167
|
+
polygons.geometry = polygons.geometry.apply(lambda x: LineString(x.exterior))
|
|
168
|
+
else:
|
|
169
|
+
polygons = gpd.GeoDataFrame(
|
|
170
|
+
geometry=list(polygons.geometry.apply(polygons_to_multilinestring)), crs=polygons.crs
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
return distribute_points_on_linestrings(polygons, radius, lloyd_relax_n=lloyd_relax_n)
|