ObjectNat 0.2.6__py3-none-any.whl → 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ObjectNat might be problematic. Click here for more details.

Files changed (36) hide show
  1. objectnat/_api.py +6 -8
  2. objectnat/_config.py +0 -24
  3. objectnat/_version.py +1 -1
  4. objectnat/methods/coverage_zones/__init__.py +2 -0
  5. objectnat/methods/coverage_zones/graph_coverage.py +118 -0
  6. objectnat/methods/coverage_zones/radius_voronoi.py +45 -0
  7. objectnat/methods/isochrones/__init__.py +1 -0
  8. objectnat/methods/isochrones/isochrone_utils.py +130 -0
  9. objectnat/methods/isochrones/isochrones.py +325 -0
  10. objectnat/methods/noise/__init__.py +3 -0
  11. objectnat/methods/noise/noise_exceptions.py +14 -0
  12. objectnat/methods/noise/noise_init_data.py +10 -0
  13. objectnat/methods/noise/noise_reduce.py +155 -0
  14. objectnat/methods/noise/noise_sim.py +423 -0
  15. objectnat/methods/point_clustering/__init__.py +1 -0
  16. objectnat/methods/{cluster_points_in_polygons.py → point_clustering/cluster_points_in_polygons.py} +22 -28
  17. objectnat/methods/provision/__init__.py +1 -0
  18. objectnat/methods/provision/provision.py +10 -7
  19. objectnat/methods/provision/provision_exceptions.py +4 -4
  20. objectnat/methods/provision/provision_model.py +21 -20
  21. objectnat/methods/utils/__init__.py +0 -0
  22. objectnat/methods/utils/geom_utils.py +130 -0
  23. objectnat/methods/utils/graph_utils.py +127 -0
  24. objectnat/methods/utils/math_utils.py +32 -0
  25. objectnat/methods/visibility/__init__.py +6 -0
  26. objectnat/methods/{visibility_analysis.py → visibility/visibility_analysis.py} +222 -243
  27. objectnat-1.0.0.dist-info/METADATA +143 -0
  28. objectnat-1.0.0.dist-info/RECORD +32 -0
  29. objectnat/methods/balanced_buildings.py +0 -69
  30. objectnat/methods/coverage_zones.py +0 -90
  31. objectnat/methods/isochrones.py +0 -143
  32. objectnat/methods/living_buildings_osm.py +0 -168
  33. objectnat-0.2.6.dist-info/METADATA +0 -113
  34. objectnat-0.2.6.dist-info/RECORD +0 -19
  35. {objectnat-0.2.6.dist-info → objectnat-1.0.0.dist-info}/LICENSE.txt +0 -0
  36. {objectnat-0.2.6.dist-info → objectnat-1.0.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,423 @@
1
+ import concurrent.futures
2
+ import math
3
+ import multiprocessing
4
+ import time
5
+
6
+ import geopandas as gpd
7
+ import pandas as pd
8
+ from shapely import GEOSException
9
+ from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon
10
+ from shapely.ops import polygonize, unary_union
11
+ from tqdm import tqdm
12
+
13
+ from objectnat import config
14
+ from objectnat.methods.noise.noise_exceptions import InvalidStepError
15
+ from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
16
+ from objectnat.methods.utils.geom_utils import (
17
+ gdf_to_circle_zones_from_point,
18
+ get_point_from_a_thorough_b,
19
+ polygons_to_multilinestring,
20
+ )
21
+ from objectnat.methods.visibility.visibility_analysis import get_visibility_accurate
22
+
23
+ logger = config.logger
24
+
25
+
26
+ def simulate_noise(
27
+ source_points: gpd.GeoDataFrame, obstacles: gpd.GeoDataFrame, source_noise_db, geometric_mean_freq_hz, **kwargs
28
+ ):
29
+ """
30
+ Simulates noise propagation from a set of source points considering obstacles, trees, and environmental factors.
31
+
32
+ Args:
33
+ source_points (gpd.GeoDataFrame): A GeoDataFrame containing one or more points representing the noise sources.
34
+ A separate simulation will be run for each point.
35
+ obstacles (gpd.GeoDataFrame): A GeoDataFrame representing obstacles in the environment. If a column with
36
+ sound absorption coefficients is present, its name should be provided in the `absorb_ratio_column` argument.
37
+ Missing values will be filled with the `standart_absorb_ratio`.
38
+ source_noise_db (float): The noise level of the point source in decibels (dB). Decibels are logarithmic units
39
+ used to measure sound intensity. A value of 20 dB represents a barely audible whisper, while 140 dB
40
+ is comparable to the noise of jet engines.
41
+ geometric_mean_freq_hz (float): The geometric mean frequency of the sound (in Hz). This parameter influences
42
+ the sound wave's propagation and scattering in the presence of trees. Lower frequencies travel longer
43
+ distances than higher frequencies. It's recommended to use values between 63 Hz and 8000 Hz; values outside
44
+ this range will be clamped to the nearest boundary for the sound absorption coefficient calculation.
45
+
46
+ Optional kwargs:
47
+ absorb_ratio_column (str, optional): The name of the column in the `obstacles` GeoDataFrame that contains the
48
+ sound absorption coefficients for each obstacle. Default is None. If not specified, all obstacles will have
49
+ the `standart_absorb_ratio`.
50
+ standart_absorb_ratio (float, optional): The default sound absorption coefficient to use for obstacles without
51
+ specified values in the `absorb_ratio_column`. Default is 0.05, which is a typical value for concrete walls.
52
+ trees (gpd.GeoDataFrame, optional): A GeoDataFrame containing trees or dense vegetation along the sound wave's
53
+ path. Trees will scatter and absorb sound waves.
54
+ tree_resolution (int, optional): A resolution parameter for simulating tree interactions with sound waves.
55
+ Recommended values are between 2 and 16, with higher values providing more accurate simulation results.
56
+ air_temperature (float, optional): The air temperature in degrees Celsius. The recommended range is from 0 to
57
+ 30 degrees Celsius, as temperatures outside this range will be clipped. Temperature affects the sound
58
+ propagation in the air.
59
+ target_noise_db (float, optional): The target noise level (in dB) for the simulation. Default is 40 dB.
60
+ Lower values may not be relevant for further analysis, as they are near the threshold of human hearing.
61
+ db_sim_step (float, optional): The step size in decibels for the noise simulation. Default is 1. For more
62
+ precise analysis, this can be adjusted. If the difference between `source_noise_db` and `target_noise_db`
63
+ is not divisible by the step size, the function will raise an error.
64
+ reflection_n (int, optional): The maximum number of reflections (bounces) to simulate for each sound wave.
65
+ Recommended values are between 1 and 3. Larger values will result in longer simulation times.
66
+ dead_area_r (float, optional): A debugging parameter that defines the radius of the "dead zone" for reflections.
67
+ Points within this area will not generate reflections. This is useful to prevent the algorithm from getting
68
+ stuck in corners or along building walls.
69
+
70
+ Returns:
71
+ gpd.GeoDataFrame: A GeoDataFrame containing the noise simulation results, including noise levels and geometries
72
+ of the affected areas. Each point's simulation results will be merged into a single GeoDataFrame.
73
+ """
74
+ # Obstacles args
75
+ absorb_ratio_column = kwargs.get("absorb_ratio_column", None)
76
+ standart_absorb_ratio = kwargs.get("standart_absorb_ratio", 0.05)
77
+
78
+ # Trees args
79
+ trees = kwargs.get("trees", None)
80
+ tree_res = kwargs.get("tree_resolution", 4)
81
+
82
+ # Simulation conditions
83
+ air_temperature = kwargs.get("air_temperature", 20)
84
+ target_noise_db = kwargs.get("target_noise_db", 40)
85
+
86
+ # Simulation params
87
+ db_sim_step = kwargs.get("db_sim_step", 1)
88
+ reflection_n = kwargs.get("reflection_n", 3)
89
+ dead_area_r = kwargs.get("dead_area_r", 5)
90
+
91
+ original_crs = source_points.crs
92
+
93
+ div_ = (source_noise_db - target_noise_db) % db_sim_step
94
+ if div_ != 0:
95
+ raise InvalidStepError(source_noise_db, target_noise_db, db_sim_step, div_)
96
+ # Choosing crs and simplifying obs if any
97
+ source_points = source_points.copy()
98
+ if len(obstacles) > 0:
99
+ obstacles = obstacles.copy()
100
+ obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
101
+ local_crs = obstacles.estimate_utm_crs()
102
+ obstacles.to_crs(local_crs, inplace=True)
103
+ source_points.to_crs(local_crs, inplace=True)
104
+ else:
105
+ local_crs = source_points.estimate_utm_crs()
106
+ source_points.to_crs(local_crs, inplace=True)
107
+ source_points.reset_index(drop=True)
108
+ source_points.geometry = source_points.centroid
109
+
110
+ # Simplifying trees
111
+ if trees is not None:
112
+ trees = trees.copy()
113
+ trees.to_crs(local_crs, inplace=True)
114
+ trees.geometry = trees.geometry.simplify(tolerance=1)
115
+ else:
116
+ trees = gpd.GeoDataFrame()
117
+
118
+ if absorb_ratio_column is None:
119
+ obstacles["absorb_ratio"] = standart_absorb_ratio
120
+ else:
121
+ obstacles["absorb_ratio"] = obstacles[absorb_ratio_column]
122
+ obstacles["absorb_ratio"] = obstacles["absorb_ratio"].fillna(standart_absorb_ratio)
123
+ obstacles = obstacles[["absorb_ratio", "geometry"]]
124
+
125
+ logger.info(
126
+ dist_to_target_db(
127
+ source_noise_db,
128
+ target_noise_db,
129
+ geometric_mean_freq_hz,
130
+ air_temperature,
131
+ return_desc=True,
132
+ check_temp_freq=True,
133
+ )
134
+ )
135
+ # calculating layer dist and db values
136
+ dist_db = [(0, source_noise_db)]
137
+ cur_db = source_noise_db - db_sim_step
138
+ while cur_db != target_noise_db - db_sim_step:
139
+ max_dist = dist_to_target_db(source_noise_db, cur_db, geometric_mean_freq_hz, air_temperature)
140
+ dist_db.append((max_dist, cur_db))
141
+ cur_db = cur_db - db_sim_step
142
+
143
+ # creating initial task and simulating for each point
144
+ all_p_res = []
145
+ for ind, row in source_points.iterrows():
146
+ logger.info(f"Started simulation for point {ind+1} / {len(source_points)}")
147
+ source_point = row.geometry
148
+ task_queue = multiprocessing.Queue()
149
+ args = (source_point, obstacles, trees, 0, 0, dist_db)
150
+ kwargs = {
151
+ "reflection_n": reflection_n,
152
+ "geometric_mean_freq_hz": geometric_mean_freq_hz,
153
+ "tree_res": tree_res,
154
+ "min_db": target_noise_db,
155
+ }
156
+ task_queue.put((_noise_from_point_task, args, kwargs))
157
+
158
+ noise_gdf = _parallel_split_queue(
159
+ task_queue, dead_area=source_point.buffer(dead_area_r, resolution=2), dead_area_r=dead_area_r
160
+ )
161
+
162
+ noise_gdf = gpd.GeoDataFrame(pd.concat(noise_gdf, ignore_index=True), crs=local_crs)
163
+ polygons = gpd.GeoDataFrame(
164
+ geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).union_all())), crs=local_crs
165
+ )
166
+ polygons_points = polygons.copy()
167
+ polygons_points.geometry = polygons.representative_point()
168
+ sim_result = polygons_points.sjoin(noise_gdf, predicate="within").reset_index()
169
+ sim_result = sim_result.groupby("index").agg({"noise_level": "max"})
170
+ sim_result["geometry"] = polygons
171
+ sim_result = (
172
+ gpd.GeoDataFrame(sim_result, geometry="geometry", crs=local_crs).dissolve(by="noise_level").reset_index()
173
+ )
174
+ sim_result["source_point_ind"] = ind
175
+ all_p_res.append(sim_result)
176
+
177
+ return gpd.GeoDataFrame(pd.concat(all_p_res, ignore_index=True), crs=local_crs).to_crs(original_crs)
178
+
179
+
180
+ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]: # pragma: no cover
181
+ # Unpacking task
182
+ point_from, obstacles, trees_orig, passed_dist, deep, dist_db = task
183
+
184
+ def donuts_dist_values(dist_db, passed_dist, max_view_dist):
185
+ new_dist_db = dist_db + [(passed_dist, None), (max_view_dist + passed_dist, None)]
186
+ new_dist_db = sorted(new_dist_db, key=lambda x: x[0])
187
+ start = None
188
+ end = None
189
+ for i, (dist, db) in enumerate(new_dist_db[:-1]):
190
+ if db is None:
191
+ if start is None:
192
+ new_dist_db[i] = (dist, new_dist_db[i - 1][1])
193
+ start = i
194
+ else:
195
+ new_dist_db[i] = (dist, new_dist_db[i + 1][1])
196
+ end = i + 1
197
+ break
198
+ return [(dist - passed_dist, db) for dist, db in new_dist_db[start:end]]
199
+
200
+ max_dist = max(dist_db, key=lambda x: x[0])[0]
201
+ min_db = kwargs.get("min_db")
202
+ reflection_n = kwargs.get("reflection_n")
203
+ geometric_mean_freq_hz = kwargs.get("geometric_mean_freq_hz")
204
+ tree_res = kwargs.get("tree_res")
205
+ local_crs = obstacles.crs
206
+ dist = round(max_dist - passed_dist, 1)
207
+
208
+ obstacles = obstacles[obstacles.intersects(point_from.buffer(dist, resolution=8))]
209
+
210
+ if len(obstacles) == 0:
211
+ obstacles_union = Polygon()
212
+ else:
213
+ obstacles_union = obstacles.union_all()
214
+
215
+ vis_poly, max_view_dist = get_visibility_accurate(point_from, obstacles, dist, return_max_view_dist=True)
216
+
217
+ donuts_dist_values = donuts_dist_values(dist_db, passed_dist, max_view_dist)
218
+
219
+ allowed_geom_types = ["MultiPolygon", "Polygon"]
220
+
221
+ # Trees noise reduce
222
+ reduce_polygons = []
223
+ if len(trees_orig) > 0:
224
+ trees_orig = trees_orig[trees_orig.intersects(point_from.buffer(dist, resolution=8))]
225
+ if len(trees_orig) > 0:
226
+ try:
227
+ trees = gdf_to_circle_zones_from_point(trees_orig, point_from, dist, resolution=tree_res)
228
+ trees = trees.clip(vis_poly, keep_geom_type=True).explode(index_parts=False)
229
+ except TypeError:
230
+ trees = gpd.GeoDataFrame()
231
+
232
+ for _, row in trees.iterrows():
233
+ tree_geom = row.geometry
234
+ if tree_geom.area < 1:
235
+ continue
236
+ dist_to_centroid = tree_geom.centroid.distance(point_from)
237
+
238
+ points_with_angle = [
239
+ (
240
+ Point(pt),
241
+ round(abs(math.atan2(pt[1] - point_from.y, pt[0] - point_from.x)), 5),
242
+ Point(pt).distance(point_from),
243
+ )
244
+ for pt in tree_geom.exterior.coords
245
+ ]
246
+
247
+ p0_1 = max(points_with_angle, key=lambda x: (x[1], x[2]))
248
+ p0_2 = min(points_with_angle, key=lambda x: (x[1], -x[2]))
249
+ delta_angle = 2 * math.pi + p0_1[1] - p0_2[1]
250
+ if delta_angle > math.pi:
251
+ delta_angle = 2 * math.pi - delta_angle
252
+
253
+ a = math.sqrt((dist**2) * (1 + (math.tan(delta_angle / 2) ** 2)))
254
+ p1 = get_point_from_a_thorough_b(point_from, p0_1[0], a)
255
+ p2 = get_point_from_a_thorough_b(point_from, p0_2[0], a)
256
+ red_polygon = unary_union([Polygon([p0_1[0], p1, p2, p0_2[0]]).intersection(vis_poly), tree_geom])
257
+ if isinstance(red_polygon, GeometryCollection):
258
+ red_polygon = max(
259
+ ((poly, poly.area) for poly in red_polygon.geoms if isinstance(poly, (MultiPolygon, Polygon))),
260
+ key=lambda x: x[1],
261
+ )[0]
262
+ if isinstance(red_polygon, MultiPolygon):
263
+ red_polygon = red_polygon.buffer(0.1, resolution=1).buffer(-0.1, resolution=1)
264
+ if isinstance(red_polygon, MultiPolygon):
265
+ red_polygon = max(((poly, poly.area) for poly in red_polygon.geoms), key=lambda x: x[1])[0]
266
+ if isinstance(red_polygon, Polygon) and not red_polygon.is_empty:
267
+ red_polygon = Polygon(red_polygon.exterior)
268
+ r_tree_new = round(
269
+ tree_geom.area / (2 * dist_to_centroid * math.sin(abs(p0_1[1] - p0_2[1]) / 2)), 2
270
+ )
271
+
272
+ noise_reduce = int(round(green_noise_reduce_db(geometric_mean_freq_hz, r_tree_new)))
273
+ reduce_polygons.append((red_polygon, noise_reduce))
274
+
275
+ # Generating donuts - db values
276
+ donuts = []
277
+ don_values = []
278
+ to_cut_off = point_from
279
+ for i in range(len(donuts_dist_values[:-1])):
280
+ cur_buffer = point_from.buffer(donuts_dist_values[i + 1][0])
281
+ donuts.append(cur_buffer.difference(to_cut_off))
282
+ don_values.append(donuts_dist_values[i][1])
283
+ to_cut_off = cur_buffer
284
+
285
+ noise_from_point = (
286
+ gpd.GeoDataFrame(geometry=donuts, data={"noise_level": don_values}, crs=local_crs)
287
+ .clip(vis_poly, keep_geom_type=True)
288
+ .explode(ignore_index=True)
289
+ )
290
+
291
+ # intersect noise poly with noise reduce
292
+ if len(reduce_polygons) > 0:
293
+ reduce_polygons = gpd.GeoDataFrame(
294
+ reduce_polygons, columns=["geometry", "reduce"], geometry="geometry", crs=local_crs
295
+ )
296
+
297
+ all_lines = (
298
+ reduce_polygons.geometry.apply(polygons_to_multilinestring).tolist()
299
+ + noise_from_point.geometry.apply(polygons_to_multilinestring).tolist()
300
+ )
301
+
302
+ cutted_polygons = gpd.GeoDataFrame(geometry=list(polygonize(unary_union(all_lines))), crs=local_crs)
303
+
304
+ cutted_polygons_points = cutted_polygons.copy()
305
+ cutted_polygons_points.geometry = cutted_polygons.representative_point()
306
+
307
+ joined = (
308
+ cutted_polygons_points.sjoin(noise_from_point, predicate="within", how="left")
309
+ .drop(columns="index_right")
310
+ .sjoin(reduce_polygons, predicate="within", how="left")
311
+ .drop(columns="index_right")
312
+ )
313
+ joined.geometry = cutted_polygons.geometry
314
+ joined = (
315
+ joined.reset_index().groupby("index").agg({"geometry": "first", "reduce": "sum", "noise_level": "first"})
316
+ )
317
+ joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=local_crs)
318
+ noise_from_point = joined.copy()
319
+
320
+ noise_from_point = noise_from_point.dropna(subset=["noise_level"])
321
+
322
+ noise_from_point["reduce"] = noise_from_point["reduce"].fillna(0)
323
+ noise_from_point["noise_level"] = noise_from_point["noise_level"] - noise_from_point["reduce"]
324
+ else:
325
+ noise_from_point["reduce"] = 0
326
+ noise_from_point = noise_from_point[noise_from_point.geom_type.isin(allowed_geom_types)]
327
+ noise_from_point = noise_from_point[noise_from_point["noise_level"] >= min_db]
328
+ if deep == reflection_n:
329
+ return noise_from_point, None
330
+
331
+ if isinstance(vis_poly, Polygon):
332
+ vis_poly_points = [Point(coords) for coords in vis_poly.exterior.coords]
333
+ else:
334
+ vis_poly_points = [Point(coords) for geom in vis_poly.geoms for coords in geom.exterior.coords]
335
+ vis_poly_points = gpd.GeoDataFrame(geometry=vis_poly_points, crs=local_crs)
336
+
337
+ # Generating reflection points
338
+ vis_poly_points["point"] = vis_poly_points["geometry"].copy()
339
+ vis_poly_points.geometry = vis_poly_points.geometry.buffer(1, resolution=1)
340
+ vis_poly_points = vis_poly_points.sjoin(obstacles, predicate="intersects").drop(columns="index_right")
341
+ vis_poly_points = vis_poly_points[~vis_poly_points.index.duplicated(keep="first")]
342
+ vis_poly_points.dropna(subset=["absorb_ratio"], inplace=True)
343
+ nearby_poly = point_from.buffer(1.1, resolution=2)
344
+ try:
345
+ vis_poly_points.geometry = (
346
+ vis_poly_points.difference(vis_poly).difference(obstacles_union).difference(nearby_poly)
347
+ )
348
+ except GEOSException:
349
+ return noise_from_point, None
350
+ vis_poly_points = vis_poly_points[~vis_poly_points.is_empty]
351
+ vis_poly_points = vis_poly_points[vis_poly_points.area >= 0.01]
352
+ vis_poly_points["geometry"] = vis_poly_points["point"]
353
+ vis_poly_points["dist"] = vis_poly_points.distance(point_from)
354
+ vis_poly_points = vis_poly_points[vis_poly_points["dist"] < max_dist - 5]
355
+ vis_poly_points = vis_poly_points.sjoin(noise_from_point, predicate="intersects", how="left")
356
+
357
+ if len(vis_poly_points) == 0:
358
+ return noise_from_point, None
359
+
360
+ new_obs = pd.concat([obstacles, gpd.GeoDataFrame(geometry=[vis_poly], crs=local_crs)], ignore_index=True)
361
+
362
+ # Creating new reflection tasks
363
+ new_tasks = []
364
+ for _, loc in vis_poly_points.iterrows():
365
+ if not isinstance(loc.geometry, Point):
366
+ continue
367
+ new_passed_dist = round(loc.dist + passed_dist, 2)
368
+ dist_last = max_dist - new_passed_dist
369
+ if dist_last > 1:
370
+ db_change = loc["reduce"]
371
+ dist_change = loc["absorb_ratio"] * dist_last
372
+ new_dist_db = [(dist - dist_change, db - db_change) for dist, db in dist_db]
373
+ task_obs = new_obs.copy()
374
+ task_obs.geometry = task_obs.difference(loc.geometry.buffer(1, resolution=1))
375
+ new_tasks.append(
376
+ (
377
+ _noise_from_point_task,
378
+ (loc.geometry, task_obs, trees_orig, new_passed_dist, deep + 1, new_dist_db),
379
+ kwargs,
380
+ )
381
+ )
382
+
383
+ return noise_from_point, new_tasks
384
+
385
+
386
+ def _parallel_split_queue(task_queue: multiprocessing.Queue, dead_area: Polygon, dead_area_r: int):
387
+ results = []
388
+ total_tasks = task_queue.qsize()
389
+
390
+ with tqdm(total=total_tasks, desc="Simulating noise") as pbar:
391
+ with concurrent.futures.ProcessPoolExecutor() as executor:
392
+ future_to_task = {}
393
+ while True:
394
+ while not task_queue.empty() and len(future_to_task) < executor._max_workers:
395
+ func, task, kwargs = task_queue.get_nowait()
396
+ future = executor.submit(func, task, **kwargs)
397
+ future_to_task[future] = task
398
+
399
+ done, _ = concurrent.futures.wait(future_to_task.keys(), return_when=concurrent.futures.FIRST_COMPLETED)
400
+
401
+ for future in done:
402
+ future_to_task.pop(future)
403
+ result, new_tasks = future.result()
404
+ if new_tasks:
405
+ new_tasks_n = 0
406
+ new_dead_area_points = [dead_area]
407
+ for func, new_task, kwargs in new_tasks:
408
+ if not dead_area.covers(new_task[0]):
409
+ new_tasks_n = new_tasks_n + 1
410
+ task_queue.put((func, new_task, kwargs))
411
+ new_dead_area_points.append(new_task[0].buffer(dead_area_r, resolution=2))
412
+
413
+ dead_area = unary_union(new_dead_area_points)
414
+ total_tasks += new_tasks_n
415
+ pbar.total = total_tasks
416
+ pbar.refresh()
417
+ results.append(result)
418
+ pbar.update(1)
419
+ time.sleep(0.01)
420
+ if not future_to_task and task_queue.empty():
421
+ break
422
+
423
+ return results
@@ -0,0 +1 @@
1
+ from .cluster_points_in_polygons import get_clusters_polygon
@@ -37,7 +37,7 @@ def get_clusters_polygon(
37
37
  ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
38
38
  """
39
39
  Generate cluster polygons for given points based on a specified minimum distance and minimum points per cluster.
40
- Optionally, calculate the relative ratio between types of services within the clusters.
40
+ Optionally, calculate the relative ratio between types of points within the clusters.
41
41
 
42
42
  Parameters
43
43
  ----------
@@ -56,22 +56,12 @@ def get_clusters_polygon(
56
56
  -------
57
57
  tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]
58
58
  A tuple containing the clustered polygons GeoDataFrame and the original points GeoDataFrame with cluster labels.
59
-
60
- Examples
61
- --------
62
- >>> import geopandas as gpd
63
- >>> from shapely.geometry import Point
64
-
65
- >>> points = gpd.GeoDataFrame({
66
- ... 'geometry': [Point(0, 0), Point(1, 1), Point(2, 2)],
67
- ... 'service_code': [1, 1, 2]
68
- ... }, crs=4326)
69
-
70
- >>> clusters, services = get_clusters_polygon(points, min_dist=50, min_point=2)
71
59
  """
72
60
  if method not in ["DBSCAN", "HDBSCAN"]:
73
61
  raise ValueError("Method must be either 'DBSCAN' or 'HDBSCAN'")
74
-
62
+ original_crs = points.crs
63
+ local_crs = points.estimate_utm_crs()
64
+ points = points.to_crs(local_crs)
75
65
  services_select = _get_cluster(points, min_dist, min_point, method)
76
66
 
77
67
  if service_code_column not in points.columns:
@@ -80,43 +70,47 @@ def get_clusters_polygon(
80
70
  )
81
71
  points[service_code_column] = service_code_column
82
72
 
83
- services_normal = services_select[services_select["cluster"] != -1]
84
- services_outlier = services_select[services_select["cluster"] == -1]
73
+ points_normal = services_select[services_select["cluster"] != -1].copy()
74
+ points_outlier = services_select[services_select["cluster"] == -1].copy()
85
75
 
86
- if len(services_normal) > 0:
87
- cluster_service = services_normal.groupby("cluster", group_keys=True).apply(
76
+ if len(points_normal) > 0:
77
+ cluster_service = points_normal.groupby("cluster", group_keys=True).apply(
88
78
  _get_service_ratio, service_code_column=service_code_column
89
79
  )
90
80
  if isinstance(cluster_service, pd.Series):
91
81
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
92
82
 
93
- polygons_normal = services_normal.dissolve("cluster").concave_hull(ratio=0.7, allow_holes=False)
83
+ polygons_normal = points_normal.dissolve("cluster").concave_hull(ratio=0.1, allow_holes=True)
94
84
  df_clusters_normal = pd.concat([cluster_service, polygons_normal.rename("geometry")], axis=1)
95
85
  cluster_normal = df_clusters_normal.index.max()
86
+ points_normal["outlier"] = False
87
+ df_clusters_normal["outlier"] = False
96
88
  else:
97
89
  df_clusters_normal = None
98
90
  cluster_normal = 0
99
91
 
100
- if len(services_outlier) > 0:
92
+ if len(points_outlier) > 0:
101
93
  clusters_outlier = cluster_normal + 1
102
- new_clusters = list(range(clusters_outlier, clusters_outlier + len(services_outlier)))
103
- services_outlier.loc[:, "cluster"] = new_clusters
94
+ new_clusters = list(range(clusters_outlier, clusters_outlier + len(points_outlier)))
95
+ points_outlier.loc[:, "cluster"] = new_clusters
104
96
 
105
- cluster_service = services_outlier.groupby("cluster", group_keys=True).apply(
97
+ cluster_service = points_outlier.groupby("cluster", group_keys=True).apply(
106
98
  _get_service_ratio, service_code_column=service_code_column
107
99
  )
108
100
  if isinstance(cluster_service, pd.Series):
109
101
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
110
102
 
111
- df_clusters_outlier = cluster_service.join(services_outlier.set_index("cluster")["geometry"])
103
+ df_clusters_outlier = cluster_service.join(points_outlier.set_index("cluster")["geometry"])
104
+ points_outlier["outlier"] = True
105
+ df_clusters_outlier["outlier"] = True
112
106
  else:
113
- services_outlier = None
107
+ points_outlier = None
114
108
  df_clusters_outlier = None
115
109
 
116
110
  df_clusters = pd.concat([df_clusters_normal, df_clusters_outlier]).fillna(0).set_geometry("geometry")
117
111
  df_clusters["geometry"] = df_clusters["geometry"].buffer(min_dist / 2)
118
- df_clusters = df_clusters.rename(columns={"index": "cluster_id"})
112
+ df_clusters = df_clusters.reset_index().rename(columns={"index": "cluster"})
119
113
 
120
- services = pd.concat([services_normal, services_outlier])
114
+ points = pd.concat([points_normal, points_outlier])
121
115
 
122
- return df_clusters, services
116
+ return df_clusters.to_crs(original_crs), points.to_crs(original_crs)
@@ -0,0 +1 @@
1
+ from .provision import clip_provision, get_service_provision, recalculate_links
@@ -51,14 +51,17 @@ def clip_provision(
51
51
  buildings: gpd.GeoDataFrame, services: gpd.GeoDataFrame, links: gpd.GeoDataFrame, selection_zone: gpd.GeoDataFrame
52
52
  ) -> Tuple[gpd.GeoDataFrame, gpd.GeoDataFrame, gpd.GeoDataFrame]:
53
53
 
54
- assert (
55
- selection_zone.crs == buildings.crs == services.crs == links.crs
56
- ), f"CRS mismatch: buildings_crs:{buildings.crs}, links_crs:{links.crs} , services_crs:{services.crs}, selection_zone_crs:{selection_zone.crs}"
54
+ assert selection_zone.crs == buildings.crs == services.crs == links.crs, (
55
+ f"CRS mismatch: buildings_crs:{buildings.crs}, "
56
+ f"links_crs:{links.crs} , "
57
+ f"services_crs:{services.crs}, "
58
+ f"selection_zone_crs:{selection_zone.crs}"
59
+ )
57
60
  buildings = buildings.copy()
58
61
  links = links.copy()
59
62
  services = services.copy()
60
63
 
61
- s = buildings.intersects(selection_zone.unary_union)
64
+ s = buildings.intersects(selection_zone.union_all())
62
65
  buildings = buildings.loc[s[s].index]
63
66
  links = links[links["building_index"].isin(buildings.index.tolist())]
64
67
  services_to_keep = set(links["service_index"].tolist())
@@ -85,14 +88,14 @@ def recalculate_links(
85
88
  )
86
89
  free_demand["demand"] = free_demand["demand"].apply(sum)
87
90
  free_demand = free_demand.reindex(buildings.index, fill_value=0)
88
- new_sum_time = (buildings["supplyed_demands_within"] + buildings["supplyed_demands_without"]) * buildings[
91
+ new_sum_time = (buildings["supplied_demands_within"] + buildings["supplied_demands_without"]) * buildings[
89
92
  "avg_dist"
90
93
  ] - free_demand["distance"]
91
94
 
92
95
  buildings["demand_left"] = buildings["demand_left"] + free_demand["demand"]
93
- buildings["supplyed_demands_without"] = buildings["supplyed_demands_without"] - free_demand["demand"]
96
+ buildings["supplied_demands_without"] = buildings["supplied_demands_without"] - free_demand["demand"]
94
97
  buildings["avg_dist"] = new_sum_time / (
95
- buildings["supplyed_demands_without"] + buildings["supplyed_demands_within"]
98
+ buildings["supplied_demands_without"] + buildings["supplied_demands_within"]
96
99
  )
97
100
  buildings["avg_dist"] = buildings.apply(
98
101
  lambda x: np.nan if (x["demand"] == x["demand_left"]) else round(x["avg_dist"], 2), axis=1
@@ -7,7 +7,7 @@ class CapacityKeyError(KeyError):
7
7
 
8
8
  def __str__(self):
9
9
  if self.message:
10
- return "CapacityKeyError, {0} ".format(self.message)
10
+ return f"CapacityKeyError, {self.message} "
11
11
 
12
12
  return (
13
13
  "Column 'capacity' was not found in provided 'services' GeoDataFrame. This attribute "
@@ -24,7 +24,7 @@ class CapacityValueError(ValueError):
24
24
 
25
25
  def __str__(self):
26
26
  if self.message:
27
- return "CapacityValueError, {0} ".format(self.message)
27
+ return f"CapacityValueError, {self.message} "
28
28
 
29
29
  return "Column 'capacity' in 'services' GeoDataFrame has no valid value."
30
30
 
@@ -38,7 +38,7 @@ class DemandKeyError(KeyError):
38
38
 
39
39
  def __str__(self):
40
40
  if self.message:
41
- return "DemandKeyError, {0} ".format(self.message)
41
+ return f"DemandKeyError, {self.message} "
42
42
 
43
43
  return (
44
44
  "The column 'demand' was not found in the provided 'demanded_buildings' GeoDataFrame. "
@@ -55,5 +55,5 @@ class DemandValueError(ValueError):
55
55
 
56
56
  def __str__(self):
57
57
  if self.message:
58
- return "DemandValueError, {0} ".format(self.message)
58
+ return f"DemandValueError, {self.message} "
59
59
  return "Column 'demand' in 'demanded_buildings' GeoDataFrame has no valid value."