ObjectNat 0.2.5__py3-none-any.whl → 0.2.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ObjectNat might be problematic. Click here for more details.

objectnat/_api.py CHANGED
@@ -6,6 +6,7 @@ from .methods.cluster_points_in_polygons import get_clusters_polygon
6
6
  from .methods.coverage_zones import get_isochrone_zone_coverage, get_radius_zone_coverage
7
7
  from .methods.isochrones import get_accessibility_isochrones
8
8
  from .methods.living_buildings_osm import download_buildings
9
+ from .methods.noise import simulate_noise
9
10
  from .methods.provision.provision import clip_provision, get_service_provision, recalculate_links
10
11
  from .methods.visibility_analysis import (
11
12
  calculate_visibility_catchment_area,
objectnat/_version.py CHANGED
@@ -1 +1 @@
1
- VERSION = "0.2.5"
1
+ VERSION = "0.2.7"
@@ -57,7 +57,7 @@ def get_accessibility_isochrones(
57
57
  >>> from iduedu import get_intermodal_graph
58
58
  >>> graph = get_intermodal_graph(polygon=my_territory_polygon)
59
59
  >>> points = gpd.GeoDataFrame(geometry=[Point(30.33, 59.95)], crs=4326).to_crs(graph.graph['crs'])
60
- >>> isochrones, pt_stops, pt_routes = get_accessibility_isochrones(points,weight_value=15, weight_type="time_min", graph_nx=my_graph)
60
+ >>> isochrones, stops, routes = get_accessibility_isochrones(points,15,weight_type="time_min", graph_nx=graph)
61
61
 
62
62
  """
63
63
 
@@ -7,8 +7,6 @@ from shapely import MultiPolygon, Polygon
7
7
 
8
8
  from objectnat import config
9
9
 
10
- from ..utils import get_utm_crs_for_4326_gdf
11
-
12
10
  logger = config.logger
13
11
 
14
12
 
@@ -75,9 +73,9 @@ def eval_population(source: gpd.GeoDataFrame, population_column: str, area_per_p
75
73
  if "building:levels" not in source.columns:
76
74
  raise RuntimeError("No 'building:levels' column in provided GeoDataFrame")
77
75
  df = source.copy()
78
- local_utm_crs = get_utm_crs_for_4326_gdf(source.to_crs(4326))
79
- df["area"] = df.to_crs(local_utm_crs.to_epsg()).geometry.area.astype(float)
80
- df["building:levels_is_real"] = df["building:levels"].apply(lambda x: False if pd.isna(x) else True)
76
+ local_crs = source.estimate_utm_crs()
77
+ df["area"] = df.to_crs(local_crs).geometry.area.astype(float)
78
+ df["building:levels_is_real"] = df["building:levels"].apply(lambda x: not pd.isna(x))
81
79
  df["building:levels"] = df["building:levels"].fillna(1)
82
80
  df["building:levels"] = pd.to_numeric(df["building:levels"], errors="coerce")
83
81
  df = df.dropna(subset=["building:levels"])
@@ -128,7 +126,7 @@ def download_buildings(
128
126
  Returns
129
127
  -------
130
128
  gpd.GeoDataFrame or None
131
- A GeoDataFrame containing building geometries and attributes, or None if no buildings are found or an error occurs.
129
+ A GeoDataFrame containing building geometries and attributes, or None if no buildings are found.
132
130
 
133
131
  Examples
134
132
  --------
@@ -0,0 +1,3 @@
1
+ from .noise_sim import simulate_noise
2
+ from .noise_reduce import dist_to_target_db,green_noise_reduce_db
3
+ from .noise_exceptions import InvalidStepError
@@ -0,0 +1,14 @@
1
+ class InvalidStepError(ValueError):
2
+ def __init__(self, source_noise_db, target_noise_db, db_sim_step, div_, *args):
3
+ if args:
4
+ self.message = args[0]
5
+ else:
6
+ self.message = (
7
+ f"The difference between `source_noise_db`({source_noise_db}) and `target_noise_db`({target_noise_db})"
8
+ f" is not divisible by the step size ({db_sim_step}, remainder = {div_})"
9
+ )
10
+
11
+ def __str__(self):
12
+ if self.message:
13
+ return self.message
14
+ return "The difference between `source_noise_db` and `target_noise_db` is not divisible by the step size"
@@ -0,0 +1,10 @@
1
+ import pandas as pd
2
+
3
+ data = {
4
+ 30: {63: 0, 125: 0.0002, 250: 0.0009, 500: 0.003, 1000: 0.0075, 2000: 0.014, 4000: 0.025, 8000: 0.064},
5
+ 20: {63: 0, 125: 0.0003, 250: 0.0011, 500: 0.0028, 1000: 0.0052, 2000: 0.0096, 4000: 0.025, 8000: 0.083},
6
+ 10: {63: 0, 125: 0.0004, 250: 0.001, 500: 0.002, 1000: 0.0039, 2000: 0.01, 4000: 0.035, 8000: 0.125},
7
+ 0: {63: 0, 125: 0.0004, 250: 0.0008, 500: 0.0017, 1000: 0.0049, 2000: 0.017, 4000: 0.058, 8000: 0.156},
8
+ }
9
+
10
+ air_resist_ratio = pd.DataFrame(data)
@@ -0,0 +1,155 @@
1
+ import numpy as np
2
+ from scipy.optimize import fsolve
3
+
4
+ from objectnat import config
5
+
6
+ from .noise_init_data import air_resist_ratio
7
+
8
+ logger = config.logger
9
+
10
+
11
+ def get_air_resist_ratio(temp, freq, check_temp_freq=False):
12
+ if check_temp_freq:
13
+ if temp > max(air_resist_ratio.columns) or temp < min(air_resist_ratio.columns):
14
+ logger.warning(
15
+ f"The specified temperature of {temp}°C is outside the tabulated data range. "
16
+ f"The air resistance coefficient for these values may be inaccurate. "
17
+ f"Recommended temperature range: {min(air_resist_ratio.columns)}°C "
18
+ f"to {max(air_resist_ratio.columns)}°C."
19
+ )
20
+
21
+ if freq > max(air_resist_ratio.index) or freq < min(air_resist_ratio.index):
22
+ logger.warning(
23
+ f"The specified geometric mean frequency of {freq} Hz is outside the tabulated data range."
24
+ f" The air resistance coefficient for these values may be inaccurate."
25
+ f" Recommended frequency range: {min(air_resist_ratio.index)} Hz to {max(air_resist_ratio.index)} Hz."
26
+ )
27
+
28
+ def get_nearest_values(array, value):
29
+ sorted_array = sorted(array)
30
+ if value in sorted_array:
31
+ return [value]
32
+ if value > max(sorted_array):
33
+ return [sorted_array[-1]]
34
+ if value < min(sorted_array):
35
+ return [sorted_array[0]]
36
+
37
+ for i, val in enumerate(sorted_array):
38
+ if value < val:
39
+ return sorted_array[max(i - 1, 0)], sorted_array[i]
40
+ return sorted_array[-2], sorted_array[-1]
41
+
42
+ nearest_temp = get_nearest_values(air_resist_ratio.columns, temp)
43
+ nearest_freq = get_nearest_values(air_resist_ratio.index, freq)
44
+
45
+ if len(nearest_temp) == 1 and len(nearest_freq) == 1:
46
+ return air_resist_ratio.loc[nearest_freq[0], nearest_temp[0]]
47
+
48
+ if len(nearest_temp) == 2 and len(nearest_freq) == 2:
49
+ freq1, freq2 = nearest_freq
50
+ temp1, temp2 = nearest_temp
51
+
52
+ coef_temp1_freq1 = air_resist_ratio.loc[freq1, temp1]
53
+ coef_temp1_freq2 = air_resist_ratio.loc[freq2, temp1]
54
+ coef_temp2_freq1 = air_resist_ratio.loc[freq1, temp2]
55
+ coef_temp2_freq2 = air_resist_ratio.loc[freq2, temp2]
56
+
57
+ weight_temp1 = (temp2 - temp) / (temp2 - temp1)
58
+ weight_temp2 = (temp - temp1) / (temp2 - temp1)
59
+ weight_freq1 = (freq2 - freq) / (freq2 - freq1)
60
+ weight_freq2 = (freq - freq1) / (freq2 - freq1)
61
+
62
+ coef_freq1 = coef_temp1_freq1 * weight_temp1 + coef_temp2_freq1 * weight_temp2
63
+ coef_freq2 = coef_temp1_freq2 * weight_temp1 + coef_temp2_freq2 * weight_temp2
64
+
65
+ final_coef = coef_freq1 * weight_freq1 + coef_freq2 * weight_freq2
66
+
67
+ return final_coef
68
+
69
+ if len(nearest_temp) == 2 and len(nearest_freq) == 1:
70
+ temp1, temp2 = nearest_temp
71
+ freq1 = nearest_freq[0]
72
+
73
+ coef_temp1 = air_resist_ratio.loc[freq1, temp1]
74
+ coef_temp2 = air_resist_ratio.loc[freq1, temp2]
75
+
76
+ weight_temp1 = (temp2 - temp) / (temp2 - temp1)
77
+ weight_temp2 = (temp - temp1) / (temp2 - temp1)
78
+
79
+ return coef_temp1 * weight_temp1 + coef_temp2 * weight_temp2
80
+
81
+ if len(nearest_temp) == 1 and len(nearest_freq) == 2:
82
+ temp1 = nearest_temp[0]
83
+ freq1, freq2 = nearest_freq
84
+
85
+ coef_freq1 = air_resist_ratio.loc[freq1, temp1]
86
+ coef_freq2 = air_resist_ratio.loc[freq2, temp1]
87
+
88
+ weight_freq1 = (freq2 - freq) / (freq2 - freq1)
89
+ weight_freq2 = (freq - freq1) / (freq2 - freq1)
90
+
91
+ return coef_freq1 * weight_freq1 + coef_freq2 * weight_freq2
92
+
93
+
94
+ def dist_to_target_db(
95
+ init_noise_db, target_noise_db, geometric_mean_freq_hz, air_temperature, return_desc=False, check_temp_freq=False
96
+ ) -> float | str:
97
+ """
98
+ Calculates the distance required for a sound wave to decay from an initial noise level to a target noise level,
99
+ based on the geometric mean frequency of the sound and the air temperature. Optionally, can return a description
100
+ of the sound propagation behavior.
101
+
102
+ Args:
103
+ init_noise_db (float): The initial noise level of the source in decibels (dB). This is the starting sound
104
+ intensity.
105
+ target_noise_db (float): The target noise level in decibels (dB), representing the level to which the sound
106
+ decays over distance.
107
+ geometric_mean_freq_hz (float): The geometric mean frequency of the sound (in Hz). This frequency influences
108
+ the attenuation of sound over distance. Higher frequencies decay faster than lower ones.
109
+ air_temperature (float): The temperature of the air in degrees Celsius. This influences the air's resistance
110
+ to sound propagation.
111
+ return_desc (bool, optional): If set to `True`, the function will return a description of the sound decay
112
+ process instead of the calculated distance.
113
+ check_temp_freq (bool, optional): If `True`, the function will check whether the temperature and frequency
114
+ are within valid ranges.
115
+
116
+ Returns:
117
+ float or str: If `return_desc` is `False`, the function returns the distance (in meters) over which the sound
118
+ decays from `init_noise_db` to `target_noise_db`. If `return_desc` is `True`, a descriptive string is returned
119
+ explaining the calculation and the conditions.
120
+ """
121
+
122
+ def equation(r):
123
+ return l - l_ist + 20 * np.log10(r) + k * r
124
+
125
+ l_ist = init_noise_db
126
+ l = target_noise_db
127
+ k = get_air_resist_ratio(air_temperature, geometric_mean_freq_hz, check_temp_freq)
128
+ initial_guess = 1
129
+ r_solution = fsolve(equation, initial_guess)
130
+ if return_desc:
131
+ string = (
132
+ f"Noise level of {init_noise_db} dB "
133
+ f"with a geometric mean frequency of {geometric_mean_freq_hz} Hz "
134
+ f"at an air temperature of {air_temperature}°C decays to {target_noise_db} dB "
135
+ f"over a distance of {r_solution[0]} meters. Air resistance coefficient: {k}."
136
+ )
137
+ return string
138
+ return r_solution[0]
139
+
140
+
141
+ def green_noise_reduce_db(geometric_mean_freq_hz, r_tree) -> float:
142
+ """
143
+ Calculates the amount of noise reduction (in dB) provided by vegetation of a given thickness at a specified
144
+ geometric mean frequency. The function models the reduction based on the interaction of the sound with trees or
145
+ vegetation.
146
+
147
+ Args:
148
+ geometric_mean_freq_hz (float): The geometric mean frequency of the sound (in Hz).
149
+ r_tree (float): The thickness or density of the vegetation (in meters).
150
+
151
+ Returns:
152
+ float: The noise reduction (in dB) achieved by the vegetation. This value indicates how much quieter the sound
153
+ will be after passing through or interacting with the vegetation of the specified thickness.
154
+ """
155
+ return round(0.08 * r_tree * ((geometric_mean_freq_hz ** (1 / 3)) / 8), 1)
@@ -0,0 +1,418 @@
1
+ import concurrent.futures
2
+ import math
3
+ import multiprocessing
4
+ import time
5
+
6
+ import geopandas as gpd
7
+ import pandas as pd
8
+ from shapely import GEOSException
9
+ from shapely.geometry import GeometryCollection, MultiPolygon, Point, Polygon
10
+ from shapely.ops import polygonize, unary_union
11
+ from tqdm import tqdm
12
+
13
+ from objectnat import config
14
+ from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
15
+ from objectnat.methods.noise.noise_exceptions import InvalidStepError
16
+ from objectnat.methods.utils.geom_utils import (
17
+ gdf_to_circle_zones_from_point,
18
+ get_point_from_a_thorough_b,
19
+ polygons_to_multilinestring,
20
+ )
21
+ from objectnat.methods.visibility_analysis import get_visibility_accurate
22
+
23
+ logger = config.logger
24
+
25
+
26
+ def simulate_noise(
27
+ source_points: gpd.GeoDataFrame, obstacles: gpd.GeoDataFrame, source_noise_db, geometric_mean_freq_hz, **kwargs
28
+ ):
29
+ """
30
+ Simulates noise propagation from a set of source points considering obstacles, trees, and environmental factors.
31
+
32
+ Args:
33
+ source_points (gpd.GeoDataFrame): A GeoDataFrame containing one or more points representing the noise sources.
34
+ A separate simulation will be run for each point.
35
+ obstacles (gpd.GeoDataFrame): A GeoDataFrame representing obstacles in the environment. If a column with
36
+ sound absorption coefficients is present, its name should be provided in the `absorb_ratio_column` argument.
37
+ Missing values will be filled with the `standart_absorb_ratio`.
38
+ source_noise_db (float): The noise level of the point source in decibels (dB). Decibels are logarithmic units
39
+ used to measure sound intensity. A value of 20 dB represents a barely audible whisper, while 140 dB
40
+ is comparable to the noise of jet engines.
41
+ geometric_mean_freq_hz (float): The geometric mean frequency of the sound (in Hz). This parameter influences
42
+ the sound wave's propagation and scattering in the presence of trees. Lower frequencies travel longer
43
+ distances than higher frequencies. It's recommended to use values between 63 Hz and 8000 Hz; values outside
44
+ this range will be clamped to the nearest boundary for the sound absorption coefficient calculation.
45
+
46
+ Optional kwargs:
47
+ absorb_ratio_column (str, optional): The name of the column in the `obstacles` GeoDataFrame that contains the
48
+ sound absorption coefficients for each obstacle. Default is None. If not specified, all obstacles will have
49
+ the `standart_absorb_ratio`.
50
+ standart_absorb_ratio (float, optional): The default sound absorption coefficient to use for obstacles without
51
+ specified values in the `absorb_ratio_column`. Default is 0.05, which is a typical value for concrete walls.
52
+ trees (gpd.GeoDataFrame, optional): A GeoDataFrame containing trees or dense vegetation along the sound wave's
53
+ path. Trees will scatter and absorb sound waves.
54
+ tree_resolution (int, optional): A resolution parameter for simulating tree interactions with sound waves.
55
+ Recommended values are between 2 and 16, with higher values providing more accurate simulation results.
56
+ air_temperature (float, optional): The air temperature in degrees Celsius. The recommended range is from 0 to
57
+ 30 degrees Celsius, as temperatures outside this range will be clipped. Temperature affects the sound
58
+ propagation in the air.
59
+ target_noise_db (float, optional): The target noise level (in dB) for the simulation. Default is 40 dB.
60
+ Lower values may not be relevant for further analysis, as they are near the threshold of human hearing.
61
+ db_sim_step (float, optional): The step size in decibels for the noise simulation. Default is 1. For more
62
+ precise analysis, this can be adjusted. If the difference between `source_noise_db` and `target_noise_db`
63
+ is not divisible by the step size, the function will raise an error.
64
+ reflection_n (int, optional): The maximum number of reflections (bounces) to simulate for each sound wave.
65
+ Recommended values are between 1 and 3. Larger values will result in longer simulation times.
66
+ dead_area_r (float, optional): A debugging parameter that defines the radius of the "dead zone" for reflections.
67
+ Points within this area will not generate reflections. This is useful to prevent the algorithm from getting
68
+ stuck in corners or along building walls.
69
+
70
+ Returns:
71
+ gpd.GeoDataFrame: A GeoDataFrame containing the noise simulation results, including noise levels and geometries
72
+ of the affected areas. Each point's simulation results will be merged into a single GeoDataFrame.
73
+ """
74
+ # Obstacles args
75
+ absorb_ratio_column = kwargs.get("absorb_ratio_column", None)
76
+ standart_absorb_ratio = kwargs.get("standart_absorb_ratio", 0.05)
77
+
78
+ # Trees args
79
+ trees = kwargs.get("trees", None)
80
+ tree_res = kwargs.get("tree_resolution", 4)
81
+
82
+ # Simulation conditions
83
+ air_temperature = kwargs.get("air_temperature", 20)
84
+ target_noise_db = kwargs.get("target_noise_db", 40)
85
+
86
+ # Simulation params
87
+ db_sim_step = kwargs.get("db_sim_step", 1)
88
+ reflection_n = kwargs.get("reflection_n", 3)
89
+ dead_area_r = kwargs.get("dead_area_r", 5)
90
+ div_ = (source_noise_db - target_noise_db) % db_sim_step
91
+ if div_ != 0:
92
+ raise InvalidStepError(source_noise_db, target_noise_db, db_sim_step, div_)
93
+ # Choosing crs and simplifying obs if any
94
+ if len(obstacles) > 0:
95
+ obstacles = obstacles.copy()
96
+ obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
97
+ local_crs = obstacles.estimate_utm_crs()
98
+ obstacles.to_crs(local_crs, inplace=True)
99
+ source_points.to_crs(local_crs, inplace=True)
100
+ else:
101
+ local_crs = source_points.estimate_utm_crs()
102
+ source_points = source_points.copy()
103
+ source_points.to_crs(local_crs, inplace=True)
104
+ source_points.reset_index(drop=True)
105
+ source_points.geometry = source_points.centroid
106
+
107
+ # Simplifying trees
108
+ if trees is not None:
109
+ trees = trees.copy()
110
+ trees.to_crs(local_crs, inplace=True)
111
+ trees.geometry = trees.geometry.simplify(tolerance=1)
112
+ else:
113
+ trees = gpd.GeoDataFrame()
114
+
115
+ if absorb_ratio_column is None:
116
+ obstacles["absorb_ratio"] = standart_absorb_ratio
117
+ else:
118
+ obstacles["absorb_ratio"] = obstacles[absorb_ratio_column]
119
+ obstacles["absorb_ratio"] = obstacles["absorb_ratio"].fillna(standart_absorb_ratio)
120
+ obstacles = obstacles[["absorb_ratio", "geometry"]]
121
+
122
+ logger.info(
123
+ dist_to_target_db(
124
+ source_noise_db,
125
+ target_noise_db,
126
+ geometric_mean_freq_hz,
127
+ air_temperature,
128
+ return_desc=True,
129
+ check_temp_freq=True,
130
+ )
131
+ )
132
+ # calculating layer dist and db values
133
+ dist_db = [(0, source_noise_db)]
134
+ cur_db = source_noise_db - db_sim_step
135
+ while cur_db != target_noise_db - db_sim_step:
136
+ max_dist = dist_to_target_db(source_noise_db, cur_db, geometric_mean_freq_hz, air_temperature)
137
+ dist_db.append((max_dist, cur_db))
138
+ cur_db = cur_db - db_sim_step
139
+
140
+ # creating initial task and simulating for each point
141
+ all_p_res = []
142
+ for ind, row in source_points.iterrows():
143
+ logger.info(f"Started simulation for point {ind+1} / {len(source_points)}")
144
+ source_point = row.geometry
145
+ task_queue = multiprocessing.Queue()
146
+ args = (source_point, obstacles, trees, 0, 0, dist_db)
147
+ kwargs = {
148
+ "reflection_n": reflection_n,
149
+ "geometric_mean_freq_hz": geometric_mean_freq_hz,
150
+ "tree_res": tree_res,
151
+ "min_db": target_noise_db,
152
+ }
153
+ task_queue.put((_noise_from_point_task, args, kwargs))
154
+
155
+ noise_gdf = _parallel_split_queue(
156
+ task_queue, dead_area=source_point.buffer(dead_area_r, resolution=2), dead_area_r=dead_area_r
157
+ )
158
+
159
+ noise_gdf = gpd.GeoDataFrame(pd.concat(noise_gdf, ignore_index=True), crs=local_crs)
160
+ polygons = gpd.GeoDataFrame(
161
+ geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).unary_union)), crs=local_crs
162
+ )
163
+ polygons_points = polygons.copy()
164
+ polygons_points.geometry = polygons.representative_point()
165
+ sim_result = polygons_points.sjoin(noise_gdf, predicate="within").reset_index()
166
+ sim_result = sim_result.groupby("index").agg({"noise_level": "max"})
167
+ sim_result["geometry"] = polygons
168
+ sim_result = (
169
+ gpd.GeoDataFrame(sim_result, geometry="geometry", crs=local_crs).dissolve(by="noise_level").reset_index()
170
+ )
171
+ sim_result["source_point_ind"] = ind
172
+ all_p_res.append(sim_result)
173
+
174
+ return gpd.GeoDataFrame(pd.concat(all_p_res, ignore_index=True), crs=local_crs)
175
+
176
+
177
+ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]:
178
+ # Unpacking task
179
+ point_from, obstacles, trees_orig, passed_dist, deep, dist_db = task
180
+
181
+ def donuts_dist_values(dist_db, passed_dist, max_view_dist):
182
+ new_dist_db = dist_db + [(passed_dist, None), (max_view_dist + passed_dist, None)]
183
+ new_dist_db = sorted(new_dist_db, key=lambda x: x[0])
184
+ start = None
185
+ end = None
186
+ for i, (dist, db) in enumerate(new_dist_db[:-1]):
187
+ if db is None:
188
+ if start is None:
189
+ new_dist_db[i] = (dist, new_dist_db[i - 1][1])
190
+ start = i
191
+ else:
192
+ new_dist_db[i] = (dist, new_dist_db[i + 1][1])
193
+ end = i + 1
194
+ break
195
+ return [(dist - passed_dist, db) for dist, db in new_dist_db[start:end]]
196
+
197
+ max_dist = max(dist_db, key=lambda x: x[0])[0]
198
+ min_db = kwargs.get("min_db")
199
+ reflection_n = kwargs.get("reflection_n")
200
+ geometric_mean_freq_hz = kwargs.get("geometric_mean_freq_hz")
201
+ tree_res = kwargs.get("tree_res")
202
+ local_crs = obstacles.crs
203
+ dist = round(max_dist - passed_dist, 1)
204
+
205
+ obstacles = obstacles[obstacles.intersects(point_from.buffer(dist, resolution=8))]
206
+
207
+ if len(obstacles) == 0:
208
+ obstacles_union = Polygon()
209
+ else:
210
+ obstacles_union = obstacles.unary_union
211
+
212
+ vis_poly, max_view_dist = get_visibility_accurate(point_from, obstacles, dist, return_max_view_dist=True)
213
+
214
+ donuts_dist_values = donuts_dist_values(dist_db, passed_dist, max_view_dist)
215
+
216
+ allowed_geom_types = ["MultiPolygon", "Polygon"]
217
+
218
+ # Trees noise reduce
219
+ reduce_polygons = []
220
+ if len(trees_orig) > 0:
221
+ trees_orig = trees_orig[trees_orig.intersects(point_from.buffer(dist, resolution=8))]
222
+ if len(trees_orig) > 0:
223
+ try:
224
+ trees = gdf_to_circle_zones_from_point(trees_orig, point_from, dist, resolution=tree_res)
225
+ trees = trees.clip(vis_poly, keep_geom_type=True).explode(index_parts=False)
226
+ except TypeError:
227
+ trees = gpd.GeoDataFrame()
228
+
229
+ for _, row in trees.iterrows():
230
+ tree_geom = row.geometry
231
+ if tree_geom.area < 1:
232
+ continue
233
+ dist_to_centroid = tree_geom.centroid.distance(point_from)
234
+
235
+ points_with_angle = [
236
+ (
237
+ Point(pt),
238
+ round(abs(math.atan2(pt[1] - point_from.y, pt[0] - point_from.x)), 5),
239
+ Point(pt).distance(point_from),
240
+ )
241
+ for pt in tree_geom.exterior.coords
242
+ ]
243
+
244
+ p0_1 = max(points_with_angle, key=lambda x: (x[1], x[2]))
245
+ p0_2 = min(points_with_angle, key=lambda x: (x[1], -x[2]))
246
+ delta_angle = 2 * math.pi + p0_1[1] - p0_2[1]
247
+ if delta_angle > math.pi:
248
+ delta_angle = 2 * math.pi - delta_angle
249
+
250
+ a = math.sqrt((dist**2) * (1 + (math.tan(delta_angle / 2) ** 2)))
251
+ p1 = get_point_from_a_thorough_b(point_from, p0_1[0], a)
252
+ p2 = get_point_from_a_thorough_b(point_from, p0_2[0], a)
253
+ red_polygon = unary_union([Polygon([p0_1[0], p1, p2, p0_2[0]]).intersection(vis_poly), tree_geom])
254
+ if isinstance(red_polygon, GeometryCollection):
255
+ red_polygon = max(
256
+ ((poly, poly.area) for poly in red_polygon.geoms if isinstance(poly, (MultiPolygon, Polygon))),
257
+ key=lambda x: x[1],
258
+ )[0]
259
+ if isinstance(red_polygon, MultiPolygon):
260
+ red_polygon = red_polygon.buffer(0.1, resolution=1).buffer(-0.1, resolution=1)
261
+ if isinstance(red_polygon, MultiPolygon):
262
+ red_polygon = max(((poly, poly.area) for poly in red_polygon.geoms), key=lambda x: x[1])[0]
263
+ if isinstance(red_polygon, Polygon) and not red_polygon.is_empty:
264
+ red_polygon = Polygon(red_polygon.exterior)
265
+ r_tree_new = round(
266
+ tree_geom.area / (2 * dist_to_centroid * math.sin(abs(p0_1[1] - p0_2[1]) / 2)), 2
267
+ )
268
+
269
+ noise_reduce = int(round(green_noise_reduce_db(geometric_mean_freq_hz, r_tree_new)))
270
+ reduce_polygons.append((red_polygon, noise_reduce))
271
+
272
+ # Generating donuts - db values
273
+ donuts = []
274
+ don_values = []
275
+ to_cut_off = point_from
276
+ for i in range(len(donuts_dist_values[:-1])):
277
+ cur_buffer = point_from.buffer(donuts_dist_values[i + 1][0])
278
+ donuts.append(cur_buffer.difference(to_cut_off))
279
+ don_values.append(donuts_dist_values[i][1])
280
+ to_cut_off = cur_buffer
281
+
282
+ noise_from_point = (
283
+ gpd.GeoDataFrame(geometry=donuts, data={"noise_level": don_values}, crs=local_crs)
284
+ .clip(vis_poly, keep_geom_type=True)
285
+ .explode(ignore_index=True)
286
+ )
287
+
288
+ # intersect noise poly with noise reduce
289
+ if len(reduce_polygons) > 0:
290
+ reduce_polygons = gpd.GeoDataFrame(
291
+ reduce_polygons, columns=["geometry", "reduce"], geometry="geometry", crs=local_crs
292
+ )
293
+
294
+ all_lines = (
295
+ reduce_polygons.geometry.apply(polygons_to_multilinestring).tolist()
296
+ + noise_from_point.geometry.apply(polygons_to_multilinestring).tolist()
297
+ )
298
+
299
+ cutted_polygons = gpd.GeoDataFrame(geometry=list(polygonize(unary_union(all_lines))), crs=local_crs)
300
+
301
+ cutted_polygons_points = cutted_polygons.copy()
302
+ cutted_polygons_points.geometry = cutted_polygons.representative_point()
303
+
304
+ joined = (
305
+ cutted_polygons_points.sjoin(noise_from_point, predicate="within", how="left")
306
+ .drop(columns="index_right")
307
+ .sjoin(reduce_polygons, predicate="within", how="left")
308
+ .drop(columns="index_right")
309
+ )
310
+ joined.geometry = cutted_polygons.geometry
311
+ joined = (
312
+ joined.reset_index().groupby("index").agg({"geometry": "first", "reduce": "sum", "noise_level": "first"})
313
+ )
314
+ joined = gpd.GeoDataFrame(joined, geometry="geometry", crs=local_crs)
315
+ noise_from_point = joined.copy()
316
+
317
+ noise_from_point = noise_from_point.dropna(subset=["noise_level"])
318
+
319
+ noise_from_point["reduce"] = noise_from_point["reduce"].fillna(0)
320
+ noise_from_point["noise_level"] = noise_from_point["noise_level"] - noise_from_point["reduce"]
321
+ else:
322
+ noise_from_point["reduce"] = 0
323
+ noise_from_point = noise_from_point[noise_from_point.geom_type.isin(allowed_geom_types)]
324
+ noise_from_point = noise_from_point[noise_from_point["noise_level"] >= min_db]
325
+ if deep == reflection_n:
326
+ return noise_from_point, None
327
+
328
+ if isinstance(vis_poly, Polygon):
329
+ vis_poly_points = [Point(coords) for coords in vis_poly.exterior.coords]
330
+ else:
331
+ vis_poly_points = [Point(coords) for geom in vis_poly.geoms for coords in geom.exterior.coords]
332
+ vis_poly_points = gpd.GeoDataFrame(geometry=vis_poly_points, crs=local_crs)
333
+
334
+ # Generating reflection points
335
+ vis_poly_points["point"] = vis_poly_points.geometry
336
+ vis_poly_points.geometry = vis_poly_points.geometry.buffer(1, resolution=1)
337
+ vis_poly_points = vis_poly_points.sjoin(obstacles, predicate="intersects").drop(columns="index_right")
338
+ vis_poly_points = vis_poly_points[~vis_poly_points.index.duplicated(keep="first")]
339
+ vis_poly_points.dropna(subset=["absorb_ratio"], inplace=True)
340
+ nearby_poly = point_from.buffer(1.1, resolution=2)
341
+ try:
342
+ vis_poly_points.geometry = (
343
+ vis_poly_points.difference(vis_poly).difference(obstacles_union).difference(nearby_poly)
344
+ )
345
+ except GEOSException:
346
+ return noise_from_point, None
347
+ vis_poly_points = vis_poly_points[~vis_poly_points.is_empty]
348
+ vis_poly_points = vis_poly_points[vis_poly_points.area >= 0.01]
349
+ vis_poly_points.geometry = vis_poly_points["point"]
350
+ vis_poly_points["dist"] = vis_poly_points.distance(point_from)
351
+ vis_poly_points = vis_poly_points[vis_poly_points["dist"] < max_dist - 5]
352
+ vis_poly_points = vis_poly_points.sjoin(noise_from_point, predicate="intersects", how="left")
353
+
354
+ if len(vis_poly_points) == 0:
355
+ return noise_from_point, None
356
+
357
+ new_obs = pd.concat([obstacles, gpd.GeoDataFrame(geometry=[vis_poly], crs=local_crs)], ignore_index=True)
358
+
359
+ # Creating new reflection tasks
360
+ new_tasks = []
361
+ for _, loc in vis_poly_points.iterrows():
362
+ new_passed_dist = round(loc.dist + passed_dist, 2)
363
+ dist_last = max_dist - new_passed_dist
364
+ if dist_last > 1:
365
+ db_change = loc["reduce"]
366
+ dist_change = loc["absorb_ratio"] * dist_last
367
+ new_dist_db = [(dist - dist_change, db - db_change) for dist, db in dist_db]
368
+ task_obs = new_obs.copy()
369
+ task_obs.geometry = task_obs.difference(loc.geometry.buffer(1, resolution=1))
370
+ new_tasks.append(
371
+ (
372
+ _noise_from_point_task,
373
+ (loc.geometry, task_obs, trees_orig, new_passed_dist, deep + 1, new_dist_db),
374
+ kwargs,
375
+ )
376
+ )
377
+
378
+ return noise_from_point, new_tasks
379
+
380
+
381
+ def _parallel_split_queue(task_queue: multiprocessing.Queue, dead_area: Polygon, dead_area_r: int):
382
+ results = []
383
+ total_tasks = task_queue.qsize()
384
+
385
+ with tqdm(total=total_tasks, desc="Simulating noise") as pbar:
386
+ with concurrent.futures.ProcessPoolExecutor() as executor:
387
+ future_to_task = {}
388
+ while True:
389
+ while not task_queue.empty() and len(future_to_task) < executor._max_workers:
390
+ func, task, kwargs = task_queue.get_nowait()
391
+ future = executor.submit(func, task, **kwargs)
392
+ future_to_task[future] = task
393
+
394
+ done, _ = concurrent.futures.wait(future_to_task.keys(), return_when=concurrent.futures.FIRST_COMPLETED)
395
+
396
+ for future in done:
397
+ future_to_task.pop(future)
398
+ result, new_tasks = future.result()
399
+ if new_tasks:
400
+ new_tasks_n = 0
401
+ new_dead_area_points = [dead_area]
402
+ for func, new_task, kwargs in new_tasks:
403
+ if not dead_area.covers(new_task[0]):
404
+ new_tasks_n = new_tasks_n + 1
405
+ task_queue.put((func, new_task, kwargs))
406
+ new_dead_area_points.append(new_task[0].buffer(dead_area_r, resolution=2))
407
+
408
+ dead_area = unary_union(new_dead_area_points)
409
+ total_tasks += new_tasks_n
410
+ pbar.total = total_tasks
411
+ pbar.refresh()
412
+ results.append(result)
413
+ pbar.update(1)
414
+ time.sleep(0.01)
415
+ if not future_to_task and task_queue.empty():
416
+ break
417
+
418
+ return results