ObjectNat 1.1.0__py3-none-any.whl → 1.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ObjectNat might be problematic. Click here for more details.

Files changed (35) hide show
  1. objectnat/__init__.py +9 -13
  2. objectnat/_api.py +14 -13
  3. objectnat/_config.py +47 -47
  4. objectnat/_version.py +1 -1
  5. objectnat/methods/coverage_zones/__init__.py +3 -3
  6. objectnat/methods/coverage_zones/graph_coverage.py +98 -108
  7. objectnat/methods/coverage_zones/radius_voronoi_coverage.py +37 -45
  8. objectnat/methods/coverage_zones/stepped_coverage.py +126 -142
  9. objectnat/methods/isochrones/__init__.py +1 -1
  10. objectnat/methods/isochrones/isochrone_utils.py +167 -167
  11. objectnat/methods/isochrones/isochrones.py +262 -299
  12. objectnat/methods/noise/__init__.py +3 -3
  13. objectnat/methods/noise/noise_init_data.py +10 -10
  14. objectnat/methods/noise/noise_reduce.py +155 -155
  15. objectnat/methods/noise/{noise_sim.py → noise_simulation.py} +452 -448
  16. objectnat/methods/noise/noise_simulation_simplified.py +209 -0
  17. objectnat/methods/point_clustering/__init__.py +1 -1
  18. objectnat/methods/point_clustering/cluster_points_in_polygons.py +115 -116
  19. objectnat/methods/provision/__init__.py +1 -1
  20. objectnat/methods/provision/provision.py +117 -110
  21. objectnat/methods/provision/provision_exceptions.py +59 -59
  22. objectnat/methods/provision/provision_model.py +337 -337
  23. objectnat/methods/utils/__init__.py +1 -0
  24. objectnat/methods/utils/geom_utils.py +173 -130
  25. objectnat/methods/utils/graph_utils.py +306 -206
  26. objectnat/methods/utils/math_utils.py +32 -32
  27. objectnat/methods/visibility/__init__.py +6 -6
  28. objectnat/methods/visibility/visibility_analysis.py +470 -511
  29. {objectnat-1.1.0.dist-info → objectnat-1.2.1.dist-info}/LICENSE.txt +28 -28
  30. objectnat-1.2.1.dist-info/METADATA +115 -0
  31. objectnat-1.2.1.dist-info/RECORD +33 -0
  32. objectnat/methods/noise/noise_exceptions.py +0 -14
  33. objectnat-1.1.0.dist-info/METADATA +0 -148
  34. objectnat-1.1.0.dist-info/RECORD +0 -33
  35. {objectnat-1.1.0.dist-info → objectnat-1.2.1.dist-info}/WHEEL +0 -0
@@ -1,448 +1,452 @@
1
- import concurrent.futures
2
- import math
3
- import multiprocessing
4
- import time
5
-
6
- import geopandas as gpd
7
- import pandas as pd
8
- from shapely import GEOSException
9
- from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon
10
- from shapely.ops import polygonize, unary_union
11
- from tqdm import tqdm
12
-
13
- from objectnat import config
14
- from objectnat.methods.noise.noise_exceptions import InvalidStepError
15
- from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
16
- from objectnat.methods.utils.geom_utils import (
17
- gdf_to_circle_zones_from_point,
18
- get_point_from_a_thorough_b,
19
- polygons_to_multilinestring,
20
- )
21
- from objectnat.methods.visibility.visibility_analysis import get_visibility_accurate
22
-
23
- logger = config.logger
24
-
25
- MAX_DB_VALUE = 194
26
-
27
-
28
- def simulate_noise(
29
- source_points: gpd.GeoDataFrame,
30
- obstacles: gpd.GeoDataFrame,
31
- source_noise_db: float = None,
32
- geometric_mean_freq_hz: float = None,
33
- **kwargs,
34
- ):
35
- """
36
- Simulates noise propagation from a set of source points considering obstacles, trees, and environmental factors.
37
-
38
- Args:
39
- source_points (gpd.GeoDataFrame): A GeoDataFrame with one or more point geometries representing noise sources.
40
- Optionally, it can include 'source_noise_db' and 'geometric_mean_freq_hz' columns for per-point simulation.
41
- obstacles (gpd.GeoDataFrame): A GeoDataFrame representing obstacles in the environment. If a column with
42
- sound absorption coefficients is present, its name should be provided in the `absorb_ratio_column` argument.
43
- Missing values will be filled with the `standart_absorb_ratio`.
44
- source_noise_db (float, optional): Default noise level (dB) to use if not specified per-point. Decibels are
45
- logarithmic units used to measure sound intensity. A value of 20 dB represents a barely audible whisper,
46
- while 140 dB is comparable to the noise of jet engines.
47
- geometric_mean_freq_hz (float, optional): Default frequency (Hz) to use if not specified per-point.
48
- This parameter influences the sound wave's propagation and scattering in the presence of trees.
49
- Lower frequencies travel longer distances than higher frequencies.
50
- It's recommended to use values between 63 Hz and 8000 Hz; values outside this range will be clamped to the
51
- nearest boundary for the sound absorption coefficient calculation.
52
-
53
- Optional kwargs:
54
- absorb_ratio_column (str, optional): The name of the column in the `obstacles` GeoDataFrame that contains the
55
- sound absorption coefficients for each obstacle. Default is None. If not specified, all obstacles will have
56
- the `standart_absorb_ratio`.
57
- standart_absorb_ratio (float, optional): The default sound absorption coefficient to use for obstacles without
58
- specified values in the `absorb_ratio_column`. Default is 0.05, which is a typical value for concrete walls.
59
- trees (gpd.GeoDataFrame, optional): A GeoDataFrame containing trees or dense vegetation along the sound wave's
60
- path. Trees will scatter and absorb sound waves.
61
- tree_resolution (int, optional): A resolution parameter for simulating tree interactions with sound waves.
62
- Recommended values are between 2 and 16, with higher values providing more accurate simulation results.
63
- air_temperature (float, optional): The air temperature in degrees Celsius. The recommended range is from 0 to
64
- 30 degrees Celsius, as temperatures outside this range will be clipped. Temperature affects the sound
65
- propagation in the air.
66
- target_noise_db (float, optional): The target noise level (in dB) for the simulation. Default is 40 dB.
67
- Lower values may not be relevant for further analysis, as they are near the threshold of human hearing.
68
- db_sim_step (float, optional): The step size in decibels for the noise simulation. Default is 1. For more
69
- precise analysis, this can be adjusted. If the difference between `source_noise_db` and `target_noise_db`
70
- is not divisible by the step size, the function will raise an error.
71
- reflection_n (int, optional): The maximum number of reflections (bounces) to simulate for each sound wave.
72
- Recommended values are between 1 and 3. Larger values will result in longer simulation times.
73
- dead_area_r (float, optional): A debugging parameter that defines the radius of the "dead zone" for reflections.
74
- Points within this area will not generate reflections. This is useful to prevent the algorithm from getting
75
- stuck in corners or along building walls.
76
-
77
- Returns:
78
- gpd.GeoDataFrame: A GeoDataFrame containing the noise simulation results, including noise levels and geometries
79
- of the affected areas. Each point's simulation results will be merged into a single GeoDataFrame.
80
- """
81
- # Obstacles args
82
- absorb_ratio_column = kwargs.get("absorb_ratio_column", None)
83
- standart_absorb_ratio = kwargs.get("standart_absorb_ratio", 0.05)
84
-
85
- # Trees args
86
- trees = kwargs.get("trees", None)
87
- tree_res = kwargs.get("tree_resolution", 4)
88
-
89
- # Simulation conditions
90
- air_temperature = kwargs.get("air_temperature", 20)
91
- target_noise_db = kwargs.get("target_noise_db", 40)
92
-
93
- # Simulation params
94
- db_sim_step = kwargs.get("db_sim_step", 1)
95
- reflection_n = kwargs.get("reflection_n", 3)
96
- dead_area_r = kwargs.get("dead_area_r", 5)
97
-
98
- # Validate optional columns or default values
99
- use_column_db = False
100
- if "source_noise_db" in source_points.columns:
101
- if (source_points["source_noise_db"] > MAX_DB_VALUE).any():
102
- raise ValueError(
103
- f"One or more values in 'source_noise_db' column exceed the physical limit of {MAX_DB_VALUE} dB."
104
- )
105
- use_column_db = True
106
-
107
- use_column_freq = "geometric_mean_freq_hz" in source_points.columns
108
-
109
- if not use_column_db:
110
- if source_noise_db is None:
111
- raise ValueError(
112
- "Either `source_noise_db` must be provided or the `source_points` must contain a 'source_noise_db' column."
113
- )
114
- if source_noise_db > MAX_DB_VALUE:
115
- raise ValueError(
116
- f"source_noise_db ({source_noise_db} dB) exceeds the physical limit of {MAX_DB_VALUE} dB in air."
117
- )
118
-
119
- if not use_column_freq:
120
- if geometric_mean_freq_hz is None:
121
- raise ValueError(
122
- "Either `geometric_mean_freq_hz` must be provided or the `source_points` must contain a 'geometric_mean_freq_hz' column."
123
- )
124
- if not use_column_db and not use_column_freq and len(source_points) > 1:
125
- logger.warning(
126
- "`source_noise_db` and `geometric_mean_freq_hz` will be used for all points. Per-point simulation parameters not found."
127
- )
128
-
129
- original_crs = source_points.crs
130
- source_points = source_points.copy()
131
-
132
- source_points = source_points.copy()
133
- if len(obstacles) > 0:
134
- obstacles = obstacles.copy()
135
- obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
136
- local_crs = obstacles.estimate_utm_crs()
137
- obstacles.to_crs(local_crs, inplace=True)
138
- source_points.to_crs(local_crs, inplace=True)
139
- else:
140
- local_crs = source_points.estimate_utm_crs()
141
- source_points.to_crs(local_crs, inplace=True)
142
- source_points.reset_index(drop=True)
143
- source_points.geometry = source_points.centroid
144
-
145
- # Simplifying trees
146
- if trees is not None:
147
- trees = trees.copy()
148
- trees.to_crs(local_crs, inplace=True)
149
- trees.geometry = trees.geometry.simplify(tolerance=1)
150
- else:
151
- trees = gpd.GeoDataFrame()
152
-
153
- if absorb_ratio_column is None:
154
- obstacles["absorb_ratio"] = standart_absorb_ratio
155
- else:
156
- obstacles["absorb_ratio"] = obstacles[absorb_ratio_column].fillna(standart_absorb_ratio)
157
- obstacles = obstacles[["absorb_ratio", "geometry"]]
158
-
159
- # creating initial task and simulating for each point
160
- task_queue = multiprocessing.Queue()
161
- dead_area_dict = {}
162
- for ind, row in source_points.iterrows():
163
- source_point = row.geometry
164
- local_db = row["source_noise_db"] if use_column_db else source_noise_db
165
- local_freq = row["geometric_mean_freq_hz"] if use_column_freq else geometric_mean_freq_hz
166
- div_ = (local_db - target_noise_db) % db_sim_step
167
- if div_ != 0:
168
- raise InvalidStepError(local_db, target_noise_db, db_sim_step, div_)
169
- # calculating layer dist and db values
170
- dist_db = [(0, local_db)]
171
- cur_db = local_db - db_sim_step
172
- while cur_db != target_noise_db - db_sim_step:
173
- max_dist = dist_to_target_db(local_db, cur_db, local_freq, air_temperature)
174
- dist_db.append((max_dist, cur_db))
175
- cur_db -= db_sim_step
176
-
177
- args = (source_point, obstacles, trees, 0, 0, dist_db)
178
- kwargs = {
179
- "reflection_n": reflection_n,
180
- "geometric_mean_freq_hz": local_freq,
181
- "tree_res": tree_res,
182
- "min_db": target_noise_db,
183
- "simulation_ind": ind,
184
- }
185
- task_queue.put((_noise_from_point_task, args, kwargs))
186
- dead_area_dict[ind] = source_point.buffer(dead_area_r, resolution=2)
187
-
188
- noise_gdf = _parallel_split_queue(task_queue, dead_area_dict=dead_area_dict, dead_area_r=dead_area_r)
189
-
190
- noise_gdf = gpd.GeoDataFrame(pd.concat(noise_gdf, ignore_index=True), crs=local_crs)
191
- polygons = gpd.GeoDataFrame(
192
- geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).union_all())), crs=local_crs
193
- )
194
- polygons_points = polygons.copy()
195
- polygons_points.geometry = polygons.representative_point()
196
- sim_result = polygons_points.sjoin(noise_gdf, predicate="within").reset_index()
197
- sim_result = sim_result.groupby("index").agg({"noise_level": "max"})
198
- sim_result["geometry"] = polygons
199
- sim_result = (
200
- gpd.GeoDataFrame(sim_result, geometry="geometry", crs=local_crs).dissolve(by="noise_level").reset_index()
201
- )
202
-
203
- return sim_result.to_crs(original_crs)
204
-
205
-
206
- def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]: # pragma: no cover
207
- # Unpacking task
208
- point_from, obstacles, trees_orig, passed_dist, deep, dist_db = task
209
-
210
- def donuts_dist_values(dist_db, passed_dist, max_view_dist):
211
- new_dist_db = dist_db + [(passed_dist, None), (max_view_dist + passed_dist, None)]
212
- new_dist_db = sorted(new_dist_db, key=lambda x: x[0])
213
- start = None
214
- end = None
215
- for i, (dist, db) in enumerate(new_dist_db[:-1]):
216
- if db is None:
217
- if start is None:
218
- new_dist_db[i] = (dist, new_dist_db[i - 1][1])
219
- start = i
220
- else:
221
- new_dist_db[i] = (dist, new_dist_db[i + 1][1])
222
- end = i + 1
223
- break
224
- return [(dist - passed_dist, db) for dist, db in new_dist_db[start:end]]
225
-
226
- max_dist = max(dist_db, key=lambda x: x[0])[0]
227
- min_db = kwargs.get("min_db")
228
- reflection_n = kwargs.get("reflection_n")
229
- geometric_mean_freq_hz = kwargs.get("geometric_mean_freq_hz")
230
- tree_res = kwargs.get("tree_res")
231
- local_crs = obstacles.crs
232
- dist = round(max_dist - passed_dist, 1)
233
-
234
- obstacles = obstacles[obstacles.intersects(point_from.buffer(dist, resolution=8))]
235
-
236
- if len(obstacles) == 0:
237
- obstacles_union = Polygon()
238
- else:
239
- obstacles_union = obstacles.union_all()
240
-
241
- vis_poly, max_view_dist = get_visibility_accurate(point_from, obstacles, dist, return_max_view_dist=True)
242
-
243
- donuts_dist_values = donuts_dist_values(dist_db, passed_dist, max_view_dist)
244
-
245
- allowed_geom_types = ["MultiPolygon", "Polygon"]
246
-
247
- # Trees noise reduce
248
- reduce_polygons = []
249
- if len(trees_orig) > 0:
250
- trees_orig = trees_orig[trees_orig.intersects(point_from.buffer(dist, resolution=8))]
251
- if len(trees_orig) > 0:
252
- try:
253
- trees = gdf_to_circle_zones_from_point(trees_orig, point_from, dist, resolution=tree_res)
254
- trees = trees.clip(vis_poly, keep_geom_type=True).explode(index_parts=False)
255
- except TypeError:
256
- trees = gpd.GeoDataFrame()
257
-
258
- for _, row in trees.iterrows():
259
- tree_geom = row.geometry
260
- if tree_geom.area < 1:
261
- continue
262
- dist_to_centroid = tree_geom.centroid.distance(point_from)
263
-
264
- points_with_angle = [
265
- (
266
- Point(pt),
267
- round(abs(math.atan2(pt[1] - point_from.y, pt[0] - point_from.x)), 5),
268
- Point(pt).distance(point_from),
269
- )
270
- for pt in tree_geom.exterior.coords
271
- ]
272
-
273
- p0_1 = max(points_with_angle, key=lambda x: (x[1], x[2]))
274
- p0_2 = min(points_with_angle, key=lambda x: (x[1], -x[2]))
275
- delta_angle = 2 * math.pi + p0_1[1] - p0_2[1]
276
- if delta_angle > math.pi:
277
- delta_angle = 2 * math.pi - delta_angle
278
-
279
- a = math.sqrt((dist**2) * (1 + (math.tan(delta_angle / 2) ** 2)))
280
- p1 = get_point_from_a_thorough_b(point_from, p0_1[0], a)
281
- p2 = get_point_from_a_thorough_b(point_from, p0_2[0], a)
282
- red_polygon = unary_union([Polygon([p0_1[0], p1, p2, p0_2[0]]).intersection(vis_poly), tree_geom])
283
- if isinstance(red_polygon, GeometryCollection):
284
- red_polygon = max(
285
- ((poly, poly.area) for poly in red_polygon.geoms if isinstance(poly, (MultiPolygon, Polygon))),
286
- key=lambda x: x[1],
287
- )[0]
288
- if isinstance(red_polygon, MultiPolygon):
289
- red_polygon = red_polygon.buffer(0.1, resolution=1).buffer(-0.1, resolution=1)
290
- if isinstance(red_polygon, MultiPolygon):
291
- red_polygon = max(((poly, poly.area) for poly in red_polygon.geoms), key=lambda x: x[1])[0]
292
- if isinstance(red_polygon, Polygon) and not red_polygon.is_empty:
293
- red_polygon = Polygon(red_polygon.exterior)
294
- r_tree_new = round(
295
- tree_geom.area / (2 * dist_to_centroid * math.sin(abs(p0_1[1] - p0_2[1]) / 2)), 2
296
- )
297
-
298
- noise_reduce = int(round(green_noise_reduce_db(geometric_mean_freq_hz, r_tree_new)))
299
- reduce_polygons.append((red_polygon, noise_reduce))
300
-
301
- # Generating donuts - db values
302
- donuts = []
303
- don_values = []
304
- to_cut_off = point_from
305
- for i in range(len(donuts_dist_values[:-1])):
306
- cur_buffer = point_from.buffer(donuts_dist_values[i + 1][0])
307
- donuts.append(cur_buffer.difference(to_cut_off))
308
- don_values.append(donuts_dist_values[i][1])
309
- to_cut_off = cur_buffer
310
-
311
- noise_from_point = (
312
- gpd.GeoDataFrame(geometry=donuts, data={"noise_level": don_values}, crs=local_crs)
313
- .clip(vis_poly, keep_geom_type=True)
314
- .explode(ignore_index=True)
315
- )
316
-
317
- # intersect noise poly with noise reduce
318
- if len(reduce_polygons) > 0:
319
- reduce_polygons = gpd.GeoDataFrame(
320
- reduce_polygons, columns=["geometry", "reduce"], geometry="geometry", crs=local_crs
321
- )
322
-
323
- all_lines = (
324
- reduce_polygons.geometry.apply(polygons_to_multilinestring).tolist()
325
- + noise_from_point.geometry.apply(polygons_to_multilinestring).tolist()
326
- )
327
-
328
- cutted_polygons = gpd.GeoDataFrame(geometry=list(polygonize(unary_union(all_lines))), crs=local_crs)
329
-
330
- cutted_polygons_points = cutted_polygons.copy()
331
- cutted_polygons_points.geometry = cutted_polygons.representative_point()
332
-
333
- joined = (
334
- cutted_polygons_points.sjoin(noise_from_point, predicate="within", how="left")
335
- .drop(columns="index_right")
336
- .sjoin(reduce_polygons, predicate="within", how="left")
337
- .drop(columns="index_right")
338
- )
339
- joined.geometry = cutted_polygons.geometry
340
- joined = (
341
- joined.reset_index().groupby("index").agg({"geometry": "first", "reduce": "sum", "noise_level": "first"})
342
- )
343
- joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=local_crs)
344
- noise_from_point = joined.copy()
345
-
346
- noise_from_point = noise_from_point.dropna(subset=["noise_level"])
347
-
348
- noise_from_point["reduce"] = noise_from_point["reduce"].fillna(0)
349
- noise_from_point["noise_level"] = noise_from_point["noise_level"] - noise_from_point["reduce"]
350
- else:
351
- noise_from_point["reduce"] = 0
352
- noise_from_point = noise_from_point[noise_from_point.geom_type.isin(allowed_geom_types)]
353
- noise_from_point = noise_from_point[noise_from_point["noise_level"] >= min_db]
354
- if deep == reflection_n:
355
- return noise_from_point, None
356
-
357
- if isinstance(vis_poly, Polygon):
358
- vis_poly_points = [Point(coords) for coords in vis_poly.exterior.coords]
359
- else:
360
- vis_poly_points = [Point(coords) for geom in vis_poly.geoms for coords in geom.exterior.coords]
361
- vis_poly_points = gpd.GeoDataFrame(geometry=vis_poly_points, crs=local_crs)
362
-
363
- # Generating reflection points
364
- vis_poly_points["point"] = vis_poly_points["geometry"].copy()
365
- vis_poly_points.geometry = vis_poly_points.geometry.buffer(1, resolution=1)
366
- vis_poly_points = vis_poly_points.sjoin(obstacles, predicate="intersects").drop(columns="index_right")
367
- vis_poly_points = vis_poly_points[~vis_poly_points.index.duplicated(keep="first")]
368
- vis_poly_points.dropna(subset=["absorb_ratio"], inplace=True)
369
- nearby_poly = point_from.buffer(1.1, resolution=2)
370
- try:
371
- vis_poly_points.geometry = (
372
- vis_poly_points.difference(vis_poly).difference(obstacles_union).difference(nearby_poly)
373
- )
374
- except GEOSException:
375
- return noise_from_point, None
376
- vis_poly_points = vis_poly_points[~vis_poly_points.is_empty]
377
- vis_poly_points = vis_poly_points[vis_poly_points.area >= 0.01]
378
- vis_poly_points["geometry"] = vis_poly_points["point"]
379
- vis_poly_points["dist"] = vis_poly_points.distance(point_from)
380
- vis_poly_points = vis_poly_points[vis_poly_points["dist"] < max_dist - 5]
381
- vis_poly_points = vis_poly_points.sjoin(noise_from_point, predicate="intersects", how="left")
382
-
383
- if len(vis_poly_points) == 0:
384
- return noise_from_point, None
385
-
386
- new_obs = pd.concat([obstacles, gpd.GeoDataFrame(geometry=[vis_poly], crs=local_crs)], ignore_index=True)
387
-
388
- # Creating new reflection tasks
389
- new_tasks = []
390
- for _, loc in vis_poly_points.iterrows():
391
- if not isinstance(loc.geometry, Point):
392
- continue
393
- new_passed_dist = round(loc.dist + passed_dist, 2)
394
- dist_last = max_dist - new_passed_dist
395
- if dist_last > 1:
396
- db_change = loc["reduce"]
397
- dist_change = loc["absorb_ratio"] * dist_last
398
- new_dist_db = [(dist - dist_change, db - db_change) for dist, db in dist_db]
399
- task_obs = new_obs.copy()
400
- task_obs.geometry = task_obs.difference(loc.geometry.buffer(1, resolution=1))
401
- new_tasks.append(
402
- (
403
- _noise_from_point_task,
404
- (loc.geometry, task_obs, trees_orig, new_passed_dist, deep + 1, new_dist_db),
405
- kwargs,
406
- )
407
- )
408
-
409
- return noise_from_point, new_tasks
410
-
411
-
412
- def _parallel_split_queue(task_queue: multiprocessing.Queue, dead_area_dict: dict, dead_area_r: int):
413
- results = []
414
- total_tasks = task_queue.qsize()
415
-
416
- with tqdm(total=total_tasks, desc="Simulating noise") as pbar:
417
- with concurrent.futures.ProcessPoolExecutor() as executor:
418
- # with concurrent.futures.ThreadPoolExecutor() as executor:
419
- future_to_task = {}
420
- while True:
421
- while not task_queue.empty() and len(future_to_task) < executor._max_workers:
422
- func, task, kwargs = task_queue.get_nowait()
423
- future = executor.submit(func, task, **kwargs)
424
- future_to_task[future] = kwargs["simulation_ind"]
425
- done, _ = concurrent.futures.wait(future_to_task.keys(), return_when=concurrent.futures.FIRST_COMPLETED)
426
- for future in done:
427
- simulation_ind = future_to_task.pop(future)
428
- result, new_tasks = future.result()
429
- if new_tasks:
430
- new_tasks_n = 0
431
- local_dead_area = dead_area_dict.get(simulation_ind)
432
- new_dead_area_points = [local_dead_area]
433
- for func, new_task, new_kwargs in new_tasks:
434
- new_point = new_task[0]
435
- if not local_dead_area.covers(new_point):
436
- task_queue.put((func, new_task, new_kwargs))
437
- new_dead_area_points.append(new_point.buffer(dead_area_r, resolution=2))
438
- new_tasks_n += 1
439
- dead_area_dict[simulation_ind] = unary_union(new_dead_area_points)
440
- total_tasks += new_tasks_n
441
- pbar.total = total_tasks
442
- pbar.refresh()
443
- results.append(result)
444
- pbar.update(1)
445
- time.sleep(0.01)
446
- if not future_to_task and task_queue.empty():
447
- break
448
- return results
1
+ import concurrent.futures
2
+ import math
3
+ import multiprocessing
4
+ import time
5
+
6
+ import geopandas as gpd
7
+ import pandas as pd
8
+ from shapely import GEOSException
9
+ from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon
10
+ from shapely.ops import polygonize, unary_union
11
+ from tqdm import tqdm
12
+
13
+ from objectnat import config
14
+ from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
15
+ from objectnat.methods.noise.noise_simulation_simplified import _eval_donuts_gdf
16
+ from objectnat.methods.utils.geom_utils import (
17
+ gdf_to_circle_zones_from_point,
18
+ get_point_from_a_thorough_b,
19
+ polygons_to_multilinestring,
20
+ )
21
+ from objectnat.methods.visibility.visibility_analysis import get_visibility_accurate
22
+
23
+ logger = config.logger
24
+
25
+ MAX_DB_VALUE = 194
26
+
27
+
28
+ def simulate_noise(
29
+ source_points: gpd.GeoDataFrame,
30
+ obstacles: gpd.GeoDataFrame,
31
+ source_noise_db: float = None,
32
+ geometric_mean_freq_hz: float = None,
33
+ **kwargs,
34
+ ):
35
+ """
36
+ Simulates noise propagation from a set of source points considering obstacles, trees, and environmental factors.
37
+
38
+ Parameters:
39
+ source_points (gpd.GeoDataFrame):
40
+ A GeoDataFrame with one or more point geometries representing noise sources.
41
+ Optionally, it can include 'source_noise_db' and 'geometric_mean_freq_hz' columns for per-point simulation.
42
+ obstacles (gpd.GeoDataFrame):
43
+ A GeoDataFrame representing obstacles in the environment. If a column with sound absorption coefficients
44
+ is present, its name should be provided in the `absorb_ratio_column` argument.
45
+ Missing values will be filled with the `standart_absorb_ratio`.
46
+ source_noise_db (float, optional):
47
+ Default noise level (dB) to use if not specified per-point. Decibels are logarithmic units used to measure
48
+ sound intensity. A value of 20 dB represents a barely audible whisper, while 140 dB is comparable to the
49
+ noise of jet engines.
50
+ geometric_mean_freq_hz (float, optional):
51
+ Default frequency (Hz) to use if not specified per-point. This parameter influences the sound wave's
52
+ propagation and scattering in the presence of trees. Lower frequencies travel longer distances than higher
53
+ frequencies. It's recommended to use values between 63 Hz and 8000 Hz; values outside this range will be
54
+ clamped to the nearest boundary for the sound absorption coefficient calculation.
55
+
56
+ Optional kwargs:
57
+ - absorb_ratio_column (str, optional): The name of the column in the `obstacles` GeoDataFrame that contains the
58
+ sound absorption coefficients for each obstacle. Default is None. If not specified, all obstacles will have
59
+ the `standart_absorb_ratio`.
60
+ - standart_absorb_ratio (float, optional): The default sound absorption coefficient to use for obstacles without
61
+ specified values in the `absorb_ratio_column`. Default is 0.05, which is a typical value for concrete walls.
62
+ - trees (gpd.GeoDataFrame, optional): A GeoDataFrame containing trees or dense vegetation along the sound wave's
63
+ path. Trees will scatter and absorb sound waves.
64
+ - tree_resolution (int, optional): A resolution parameter for simulating tree interactions with sound waves.
65
+ Recommended values are between 2 and 16, with higher values providing more accurate simulation results.
66
+ - air_temperature (float, optional): The air temperature in degrees Celsius. The recommended range is from 0 to
67
+ 30 degrees Celsius, as temperatures outside this range will be clipped. Temperature affects the sound
68
+ propagation in the air.
69
+ - target_noise_db (float, optional): The target noise level (in dB) for the simulation. Default is 40 dB.
70
+ Lower values may not be relevant for further analysis, as they are near the threshold of human hearing.
71
+ - db_sim_step (float, optional): The step size in decibels for the noise simulation. Default is 1. For more
72
+ precise analysis, this can be adjusted. If the difference between `source_noise_db` and `target_noise_db`
73
+ is not divisible by the step size, the function will raise an error.
74
+ - reflection_n (int, optional): The maximum number of reflections (bounces) to simulate for each sound wave.
75
+ Recommended values are between 1 and 3. Larger values will result in longer simulation times.
76
+ - dead_area_r (float, optional): A debugging parameter that defines the radius of the "dead zone" for reflections.
77
+ Points within this area will not generate reflections. This is useful to prevent the algorithm from getting
78
+ stuck in corners or along building walls.
79
+ - use_parallel (bool, optional): Whether to use ProcessPool for task distribution or not. Default is True.
80
+ Returns:
81
+ (gpd.GeoDataFrame): A GeoDataFrame containing the noise simulation results, including noise levels and geometries
82
+ of the affected areas. Each point's simulation results will be merged into a single GeoDataFrame.
83
+ """
84
+ # Obstacles args
85
+ absorb_ratio_column = kwargs.get("absorb_ratio_column", None)
86
+ standart_absorb_ratio = kwargs.get("standart_absorb_ratio", 0.05)
87
+
88
+ # Trees args
89
+ trees = kwargs.get("trees", None)
90
+ tree_res = kwargs.get("tree_resolution", 4)
91
+
92
+ # Simulation conditions
93
+ air_temperature = kwargs.get("air_temperature", 20)
94
+ target_noise_db = kwargs.get("target_noise_db", 40)
95
+
96
+ # Simulation params
97
+ db_sim_step = kwargs.get("db_sim_step", 1)
98
+ reflection_n = kwargs.get("reflection_n", 3)
99
+ dead_area_r = kwargs.get("dead_area_r", 5)
100
+
101
+ # Use paralleling
102
+ use_parallel = kwargs.get("use_parallel", True)
103
+
104
+ # Validate optional columns or default values
105
+ use_column_db = False
106
+ if "source_noise_db" in source_points.columns:
107
+ if (source_points["source_noise_db"] > MAX_DB_VALUE).any():
108
+ raise ValueError(
109
+ f"One or more values in 'source_noise_db' column exceed the physical limit of {MAX_DB_VALUE} dB."
110
+ )
111
+ if source_points["source_noise_db"].isnull().any():
112
+ raise ValueError(f"Column 'source_noise_db' contains missing (NaN) values")
113
+ use_column_db = True
114
+
115
+ use_column_freq = False
116
+ if "geometric_mean_freq_hz" in source_points.columns:
117
+ if source_points["geometric_mean_freq_hz"].isnull().any():
118
+ raise ValueError(f"Column 'geometric_mean_freq_hz' contains missing (NaN) values")
119
+ use_column_freq = True
120
+
121
+ if not use_column_db:
122
+ if source_noise_db is None:
123
+ raise ValueError(
124
+ "Either `source_noise_db` must be provided or the `source_points` must contain a 'source_noise_db' column."
125
+ )
126
+ if source_noise_db > MAX_DB_VALUE:
127
+ raise ValueError(
128
+ f"source_noise_db ({source_noise_db} dB) exceeds the physical limit of {MAX_DB_VALUE} dB in air."
129
+ )
130
+
131
+ if not use_column_freq:
132
+ if geometric_mean_freq_hz is None:
133
+ raise ValueError(
134
+ "Either `geometric_mean_freq_hz` must be provided or the `source_points` must contain a 'geometric_mean_freq_hz' column."
135
+ )
136
+ if not use_column_db and not use_column_freq and len(source_points) > 1:
137
+ logger.warning(
138
+ "`source_noise_db` and `geometric_mean_freq_hz` will be used for all points. Per-point simulation parameters not found."
139
+ )
140
+
141
+ original_crs = source_points.crs
142
+ source_points = source_points.copy()
143
+
144
+ source_points = source_points.copy()
145
+ if len(obstacles) > 0:
146
+ obstacles = obstacles.copy()
147
+ obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
148
+ local_crs = obstacles.estimate_utm_crs()
149
+ obstacles.to_crs(local_crs, inplace=True)
150
+ source_points.to_crs(local_crs, inplace=True)
151
+ else:
152
+ local_crs = source_points.estimate_utm_crs()
153
+ source_points.to_crs(local_crs, inplace=True)
154
+ source_points.reset_index(drop=True)
155
+ source_points.geometry = source_points.centroid
156
+
157
+ # Simplifying trees
158
+ if trees is not None:
159
+ trees = trees.copy()
160
+ trees.to_crs(local_crs, inplace=True)
161
+ trees.geometry = trees.geometry.simplify(tolerance=1)
162
+ else:
163
+ trees = gpd.GeoDataFrame()
164
+
165
+ if absorb_ratio_column is None:
166
+ obstacles["absorb_ratio"] = standart_absorb_ratio
167
+ else:
168
+ obstacles["absorb_ratio"] = obstacles[absorb_ratio_column].fillna(standart_absorb_ratio)
169
+ obstacles = obstacles[["absorb_ratio", "geometry"]]
170
+
171
+ # creating initial task and simulating for each point
172
+ task_queue = multiprocessing.Queue()
173
+ dead_area_dict = {}
174
+ for ind, row in source_points.iterrows():
175
+ source_point = row.geometry
176
+ local_db = row["source_noise_db"] if use_column_db else source_noise_db
177
+ local_freq = row["geometric_mean_freq_hz"] if use_column_freq else geometric_mean_freq_hz
178
+
179
+ # calculating layer dist and db values
180
+ dist_db = [(0, local_db)]
181
+ cur_db = local_db - db_sim_step
182
+ while cur_db > target_noise_db - db_sim_step:
183
+ if cur_db - db_sim_step < target_noise_db:
184
+ cur_db = target_noise_db
185
+ max_dist = dist_to_target_db(local_db, cur_db, local_freq, air_temperature)
186
+ dist_db.append((max_dist, cur_db))
187
+ cur_db -= db_sim_step
188
+
189
+ args = (source_point, obstacles, trees, 0, 0, dist_db)
190
+ kwargs = {
191
+ "reflection_n": reflection_n,
192
+ "geometric_mean_freq_hz": local_freq,
193
+ "tree_res": tree_res,
194
+ "min_db": target_noise_db,
195
+ "simulation_ind": ind,
196
+ }
197
+ task_queue.put((_noise_from_point_task, args, kwargs))
198
+ dead_area_dict[ind] = source_point.buffer(dead_area_r, resolution=2)
199
+
200
+ noise_gdf = _recursive_simulation_queue(
201
+ task_queue, dead_area_dict=dead_area_dict, dead_area_r=dead_area_r, use_parallel=use_parallel
202
+ )
203
+
204
+ noise_gdf = gpd.GeoDataFrame(pd.concat(noise_gdf, ignore_index=True), crs=local_crs)
205
+ polygons = gpd.GeoDataFrame(
206
+ geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).union_all())), crs=local_crs
207
+ )
208
+ polygons_points = polygons.copy()
209
+ polygons_points.geometry = polygons.representative_point()
210
+ sim_result = polygons_points.sjoin(noise_gdf, predicate="within").reset_index()
211
+ sim_result = sim_result.groupby("index").agg({"noise_level": "max"})
212
+ sim_result["geometry"] = polygons
213
+ sim_result = (
214
+ gpd.GeoDataFrame(sim_result, geometry="geometry", crs=local_crs).dissolve(by="noise_level").reset_index()
215
+ )
216
+
217
+ return sim_result.to_crs(original_crs)
218
+
219
+
220
+ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]:
221
+ # Unpacking task
222
+ point_from, obstacles, trees_orig, passed_dist, deep, dist_db = task
223
+
224
+ def donuts_dist_values(dist_db, passed_dist, max_view_dist):
225
+ new_dist_db = dist_db + [(passed_dist, None), (max_view_dist + passed_dist, None)]
226
+ new_dist_db = sorted(new_dist_db, key=lambda x: x[0])
227
+ start = None
228
+ end = None
229
+ for i, (dist, db) in enumerate(new_dist_db[:-1]):
230
+ if db is None:
231
+ if start is None:
232
+ new_dist_db[i] = (dist, new_dist_db[i - 1][1])
233
+ start = i
234
+ else:
235
+ new_dist_db[i] = (dist, new_dist_db[i + 1][1])
236
+ end = i + 1
237
+ break
238
+ return [(dist - passed_dist, db) for dist, db in new_dist_db[start:end]]
239
+
240
+ max_dist = max(dist_db, key=lambda x: x[0])[0]
241
+ min_db = kwargs.get("min_db")
242
+ reflection_n = kwargs.get("reflection_n")
243
+ geometric_mean_freq_hz = kwargs.get("geometric_mean_freq_hz")
244
+ tree_res = kwargs.get("tree_res")
245
+ local_crs = obstacles.crs
246
+ dist = round(max_dist - passed_dist, 1)
247
+
248
+ obstacles = obstacles[obstacles.intersects(point_from.buffer(dist, resolution=8))]
249
+
250
+ if len(obstacles) == 0:
251
+ obstacles_union = Polygon()
252
+ else:
253
+ obstacles_union = obstacles.union_all()
254
+
255
+ vis_poly, max_view_dist = get_visibility_accurate(point_from, obstacles, dist, return_max_view_dist=True)
256
+
257
+ donuts_dist_values = donuts_dist_values(dist_db, passed_dist, max_view_dist)
258
+
259
+ allowed_geom_types = ["MultiPolygon", "Polygon"]
260
+
261
+ # Trees noise reduce
262
+ reduce_polygons = []
263
+ if len(trees_orig) > 0:
264
+ trees_orig = trees_orig[trees_orig.intersects(point_from.buffer(dist, resolution=8))]
265
+ if len(trees_orig) > 0:
266
+ try:
267
+ trees = gdf_to_circle_zones_from_point(trees_orig, point_from, dist, resolution=tree_res)
268
+ trees = trees.clip(vis_poly, keep_geom_type=True).explode(index_parts=False)
269
+ except TypeError:
270
+ trees = gpd.GeoDataFrame()
271
+
272
+ for _, row in trees.iterrows():
273
+ tree_geom = row.geometry
274
+ if tree_geom.area < 1:
275
+ continue
276
+ dist_to_centroid = tree_geom.centroid.distance(point_from)
277
+
278
+ points_with_angle = [
279
+ (
280
+ Point(pt),
281
+ round(abs(math.atan2(pt[1] - point_from.y, pt[0] - point_from.x)), 5),
282
+ Point(pt).distance(point_from),
283
+ )
284
+ for pt in tree_geom.exterior.coords
285
+ ]
286
+
287
+ p0_1 = max(points_with_angle, key=lambda x: (x[1], x[2]))
288
+ p0_2 = min(points_with_angle, key=lambda x: (x[1], -x[2]))
289
+ delta_angle = 2 * math.pi + p0_1[1] - p0_2[1]
290
+ if delta_angle > math.pi:
291
+ delta_angle = 2 * math.pi - delta_angle
292
+
293
+ a = math.sqrt((dist**2) * (1 + (math.tan(delta_angle / 2) ** 2)))
294
+ p1 = get_point_from_a_thorough_b(point_from, p0_1[0], a)
295
+ p2 = get_point_from_a_thorough_b(point_from, p0_2[0], a)
296
+ red_polygon = unary_union([Polygon([p0_1[0], p1, p2, p0_2[0]]).intersection(vis_poly), tree_geom])
297
+ if isinstance(red_polygon, GeometryCollection):
298
+ red_polygon = max(
299
+ ((poly, poly.area) for poly in red_polygon.geoms if isinstance(poly, (MultiPolygon, Polygon))),
300
+ key=lambda x: x[1],
301
+ )[0]
302
+ if isinstance(red_polygon, MultiPolygon):
303
+ red_polygon = red_polygon.buffer(0.1, resolution=1).buffer(-0.1, resolution=1)
304
+ if isinstance(red_polygon, MultiPolygon):
305
+ red_polygon = max(((poly, poly.area) for poly in red_polygon.geoms), key=lambda x: x[1])[0]
306
+ if isinstance(red_polygon, Polygon) and not red_polygon.is_empty:
307
+ red_polygon = Polygon(red_polygon.exterior)
308
+ r_tree_new = round(
309
+ tree_geom.area / (2 * dist_to_centroid * math.sin(abs(p0_1[1] - p0_2[1]) / 2)), 2
310
+ )
311
+
312
+ noise_reduce = int(round(green_noise_reduce_db(geometric_mean_freq_hz, r_tree_new)))
313
+ reduce_polygons.append((red_polygon, noise_reduce))
314
+
315
+ noise_from_point = _eval_donuts_gdf(point_from, donuts_dist_values, local_crs, vis_poly)
316
+ # intersect noise poly with noise reduce
317
+ if len(reduce_polygons) > 0:
318
+ reduce_polygons = gpd.GeoDataFrame(
319
+ reduce_polygons, columns=["geometry", "reduce"], geometry="geometry", crs=local_crs
320
+ )
321
+
322
+ all_lines = (
323
+ reduce_polygons.geometry.apply(polygons_to_multilinestring).tolist()
324
+ + noise_from_point.geometry.apply(polygons_to_multilinestring).tolist()
325
+ )
326
+
327
+ cutted_polygons = gpd.GeoDataFrame(geometry=list(polygonize(unary_union(all_lines))), crs=local_crs)
328
+
329
+ cutted_polygons_points = cutted_polygons.copy()
330
+ cutted_polygons_points.geometry = cutted_polygons.representative_point()
331
+
332
+ joined = (
333
+ cutted_polygons_points.sjoin(noise_from_point, predicate="within", how="left")
334
+ .drop(columns="index_right")
335
+ .sjoin(reduce_polygons, predicate="within", how="left")
336
+ .drop(columns="index_right")
337
+ )
338
+ joined.geometry = cutted_polygons.geometry
339
+ joined = (
340
+ joined.reset_index().groupby("index").agg({"geometry": "first", "reduce": "sum", "noise_level": "first"})
341
+ )
342
+ joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=local_crs)
343
+ noise_from_point = joined.copy()
344
+
345
+ noise_from_point = noise_from_point.dropna(subset=["noise_level"])
346
+
347
+ noise_from_point["reduce"] = noise_from_point["reduce"].fillna(0)
348
+ noise_from_point["noise_level"] = noise_from_point["noise_level"] - noise_from_point["reduce"]
349
+ else:
350
+ noise_from_point["reduce"] = 0
351
+ noise_from_point = noise_from_point[noise_from_point.geom_type.isin(allowed_geom_types)]
352
+ noise_from_point = noise_from_point[noise_from_point["noise_level"] >= min_db]
353
+ if deep == reflection_n:
354
+ return noise_from_point, None
355
+
356
+ if isinstance(vis_poly, Polygon):
357
+ vis_poly_points = [Point(coords) for coords in vis_poly.exterior.coords]
358
+ else:
359
+ vis_poly_points = [Point(coords) for geom in vis_poly.geoms for coords in geom.exterior.coords]
360
+ vis_poly_points = gpd.GeoDataFrame(geometry=vis_poly_points, crs=local_crs)
361
+
362
+ # Generating reflection points
363
+ vis_poly_points["point"] = vis_poly_points["geometry"].copy()
364
+ vis_poly_points.geometry = vis_poly_points.geometry.buffer(1, resolution=1)
365
+ vis_poly_points = vis_poly_points.sjoin(obstacles, predicate="intersects").drop(columns="index_right")
366
+ vis_poly_points = vis_poly_points[~vis_poly_points.index.duplicated(keep="first")]
367
+ vis_poly_points.dropna(subset=["absorb_ratio"], inplace=True)
368
+ nearby_poly = point_from.buffer(1.1, resolution=2)
369
+ try:
370
+ vis_poly_points.geometry = (
371
+ vis_poly_points.difference(vis_poly).difference(obstacles_union).difference(nearby_poly)
372
+ )
373
+ except GEOSException:
374
+ return noise_from_point, None
375
+ vis_poly_points = vis_poly_points[~vis_poly_points.is_empty]
376
+ vis_poly_points = vis_poly_points[vis_poly_points.area >= 0.01]
377
+ vis_poly_points["geometry"] = vis_poly_points["point"]
378
+ vis_poly_points["dist"] = vis_poly_points.distance(point_from)
379
+ vis_poly_points = vis_poly_points[vis_poly_points["dist"] < max_dist - 5]
380
+ vis_poly_points = vis_poly_points.sjoin(noise_from_point, predicate="intersects", how="left")
381
+
382
+ if len(vis_poly_points) == 0:
383
+ return noise_from_point, None
384
+
385
+ new_obs = pd.concat([obstacles, gpd.GeoDataFrame(geometry=[vis_poly], crs=local_crs)], ignore_index=True)
386
+
387
+ # Creating new reflection tasks
388
+ new_tasks = []
389
+ for _, loc in vis_poly_points.iterrows():
390
+ if not isinstance(loc.geometry, Point):
391
+ continue
392
+ new_passed_dist = round(loc.dist + passed_dist, 2)
393
+ dist_last = max_dist - new_passed_dist
394
+ if dist_last > 1:
395
+ db_change = loc["reduce"]
396
+ dist_change = loc["absorb_ratio"] * dist_last
397
+ new_dist_db = [(dist - dist_change, db - db_change) for dist, db in dist_db]
398
+ task_obs = new_obs.copy()
399
+ task_obs.geometry = task_obs.difference(loc.geometry.buffer(1, resolution=1))
400
+ new_tasks.append(
401
+ (
402
+ _noise_from_point_task,
403
+ (loc.geometry, task_obs, trees_orig, new_passed_dist, deep + 1, new_dist_db),
404
+ kwargs,
405
+ )
406
+ )
407
+
408
+ return noise_from_point, new_tasks
409
+
410
+
411
+ def _recursive_simulation_queue(
412
+ task_queue: multiprocessing.Queue, dead_area_dict: dict, dead_area_r: int, use_parallel: bool
413
+ ):
414
+ results = []
415
+ total_tasks = task_queue.qsize()
416
+
417
+ with tqdm(total=total_tasks, desc="Simulating noise") as pbar:
418
+ if use_parallel:
419
+ executor_class = concurrent.futures.ProcessPoolExecutor()
420
+ else:
421
+ executor_class = concurrent.futures.ThreadPoolExecutor()
422
+ with executor_class as executor:
423
+ future_to_task = {}
424
+ while True:
425
+ while not task_queue.empty() and len(future_to_task) < executor._max_workers:
426
+ func, task, kwargs = task_queue.get_nowait()
427
+ future = executor.submit(func, task, **kwargs)
428
+ future_to_task[future] = kwargs["simulation_ind"]
429
+ done, _ = concurrent.futures.wait(future_to_task.keys(), return_when=concurrent.futures.FIRST_COMPLETED)
430
+ for future in done:
431
+ simulation_ind = future_to_task.pop(future)
432
+ result, new_tasks = future.result()
433
+ if new_tasks:
434
+ new_tasks_n = 0
435
+ local_dead_area = dead_area_dict.get(simulation_ind)
436
+ new_dead_area_points = [local_dead_area]
437
+ for func, new_task, new_kwargs in new_tasks:
438
+ new_point = new_task[0]
439
+ if not local_dead_area.covers(new_point):
440
+ task_queue.put((func, new_task, new_kwargs))
441
+ new_dead_area_points.append(new_point.buffer(dead_area_r, resolution=2))
442
+ new_tasks_n += 1
443
+ dead_area_dict[simulation_ind] = unary_union(new_dead_area_points)
444
+ total_tasks += new_tasks_n
445
+ pbar.total = total_tasks
446
+ pbar.refresh()
447
+ results.append(result)
448
+ pbar.update(1)
449
+ time.sleep(0.01)
450
+ if not future_to_task and task_queue.empty():
451
+ break
452
+ return results