ObjectNat 0.2.7__py3-none-any.whl → 1.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of ObjectNat might be problematic. Click here for more details.

Files changed (31) hide show
  1. objectnat/_api.py +5 -8
  2. objectnat/_config.py +0 -24
  3. objectnat/_version.py +1 -1
  4. objectnat/methods/coverage_zones/__init__.py +2 -0
  5. objectnat/methods/coverage_zones/graph_coverage.py +118 -0
  6. objectnat/methods/coverage_zones/radius_voronoi.py +45 -0
  7. objectnat/methods/isochrones/__init__.py +1 -0
  8. objectnat/methods/isochrones/isochrone_utils.py +130 -0
  9. objectnat/methods/isochrones/isochrones.py +325 -0
  10. objectnat/methods/noise/__init__.py +2 -2
  11. objectnat/methods/noise/noise_sim.py +14 -9
  12. objectnat/methods/point_clustering/__init__.py +1 -0
  13. objectnat/methods/{cluster_points_in_polygons.py → point_clustering/cluster_points_in_polygons.py} +22 -28
  14. objectnat/methods/provision/__init__.py +1 -0
  15. objectnat/methods/provision/provision.py +4 -4
  16. objectnat/methods/provision/provision_model.py +17 -18
  17. objectnat/methods/utils/geom_utils.py +54 -3
  18. objectnat/methods/utils/graph_utils.py +127 -0
  19. objectnat/methods/utils/math_utils.py +32 -0
  20. objectnat/methods/visibility/__init__.py +6 -0
  21. objectnat/methods/{visibility_analysis.py → visibility/visibility_analysis.py} +167 -208
  22. objectnat-1.0.1.dist-info/METADATA +142 -0
  23. objectnat-1.0.1.dist-info/RECORD +32 -0
  24. objectnat/methods/balanced_buildings.py +0 -69
  25. objectnat/methods/coverage_zones.py +0 -90
  26. objectnat/methods/isochrones.py +0 -143
  27. objectnat/methods/living_buildings_osm.py +0 -168
  28. objectnat-0.2.7.dist-info/METADATA +0 -118
  29. objectnat-0.2.7.dist-info/RECORD +0 -26
  30. {objectnat-0.2.7.dist-info → objectnat-1.0.1.dist-info}/LICENSE.txt +0 -0
  31. {objectnat-0.2.7.dist-info → objectnat-1.0.1.dist-info}/WHEEL +0 -0
@@ -0,0 +1,325 @@
1
+ from typing import Literal
2
+
3
+ import geopandas as gpd
4
+ import networkx as nx
5
+ import numpy as np
6
+ from shapely.ops import polygonize
7
+
8
+ from objectnat import config
9
+ from objectnat.methods.isochrones.isochrone_utils import (
10
+ _calculate_distance_matrix,
11
+ _create_isochrones_gdf,
12
+ _prepare_graph_and_nodes,
13
+ _process_pt_data,
14
+ _validate_inputs,
15
+ )
16
+ from objectnat.methods.utils.geom_utils import polygons_to_multilinestring, remove_inner_geom
17
+ from objectnat.methods.utils.graph_utils import graph_to_gdf
18
+
19
+ logger = config.logger
20
+
21
+
22
+ def get_accessibility_isochrone_stepped(
23
+ isochrone_type: Literal["radius", "ways", "separate"],
24
+ point: gpd.GeoDataFrame,
25
+ weight_value: float,
26
+ weight_type: Literal["time_min", "length_meter"],
27
+ nx_graph: nx.Graph,
28
+ step: float = None,
29
+ **kwargs,
30
+ ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]:
31
+ """
32
+ Calculate stepped accessibility isochrones for a single point with specified intervals.
33
+
34
+ Parameters
35
+ ----------
36
+ isochrone_type : Literal["radius", "ways", "separate"]
37
+ Visualization method for stepped isochrones:
38
+ - "radius": Voronoi-based in circular buffers
39
+ - "ways": Voronoi-based in road network polygons
40
+ - "separate": Circular buffers for each step
41
+ point : gpd.GeoDataFrame
42
+ Single source point for isochrone calculation (uses first geometry if multiple provided).
43
+ weight_value : float
44
+ Maximum travel time (minutes) or distance (meters) threshold.
45
+ weight_type : Literal["time_min", "length_meter"]
46
+ Type of weight calculation:
47
+ - "time_min": Time-based in minutes
48
+ - "length_meter": Distance-based in meters
49
+ nx_graph : nx.Graph
50
+ NetworkX graph representing the transportation network.
51
+ step : float, optional
52
+ Interval between isochrone steps. Defaults to:
53
+ - 100 meters for distance-based
54
+ - 1 minute for time-based
55
+ **kwargs
56
+ Additional buffer parameters:
57
+ - buffer_factor: Size multiplier for buffers (default: 0.7)
58
+ - road_buffer_size: Buffer size for road edges in meters (default: 5)
59
+
60
+ Returns
61
+ -------
62
+ tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]
63
+ Tuple containing:
64
+ - stepped_isochrones: GeoDataFrame with stepped polygons and distance/time attributes
65
+ - pt_stops: Public transport stops within isochrones (if available)
66
+ - pt_routes: Public transport routes within isochrones (if available)
67
+
68
+ Examples
69
+ --------
70
+ >>> from iduedu import get_intermodal_graph # pip install iduedu to get OSM city network graph
71
+ >>> graph = get_intermodal_graph(polygon=my_territory_polygon)
72
+ >>> point = gpd.GeoDataFrame(geometry=[Point(30.33, 59.95)], crs=4326)
73
+ >>> # Stepped radius isochrones with 5-minute intervals
74
+ >>> radius_stepped, stops, _ = get_accessibility_isochrone_stepped(
75
+ ... "radius", point, 30, "time_min", graph, step=5
76
+ ... )
77
+ >>> # Stepped road isochrones with 200m intervals
78
+ >>> ways_stepped, _, routes = get_accessibility_isochrone_stepped(
79
+ ... "ways", point, 1000, "length_meter", graph, step=200
80
+ ... )
81
+ >>> # Voronoi-based stepped isochrones
82
+ >>> separate_stepped, stops, _ = get_accessibility_isochrone_stepped(
83
+ ... "separate", point, 15, "time_min", graph
84
+ ... )
85
+ """
86
+ buffer_params = {
87
+ "buffer_factor": 0.7,
88
+ "road_buffer_size": 5,
89
+ }
90
+
91
+ buffer_params.update(kwargs)
92
+ original_crs = point.crs
93
+ point = point.copy()
94
+ if len(point) > 1:
95
+ logger.warning(
96
+ f"This method processes only single point. The GeoDataFrame contains {len(point)} points - "
97
+ "only the first geometry will be used for isochrone calculation. "
98
+ )
99
+ point = point.iloc[[0]]
100
+
101
+ local_crs, graph_type = _validate_inputs(point, weight_value, weight_type, nx_graph)
102
+
103
+ if step is None:
104
+ if weight_type == "length_meter":
105
+ step = 100
106
+ else:
107
+ step = 1
108
+ nx_graph, points, dist_nearest, speed = _prepare_graph_and_nodes(
109
+ point, nx_graph, graph_type, weight_type, weight_value
110
+ )
111
+
112
+ dist_matrix, subgraph = _calculate_distance_matrix(
113
+ nx_graph, points["nearest_node"].values, weight_type, weight_value, dist_nearest
114
+ )
115
+
116
+ logger.info("Building isochrones geometry...")
117
+ nodes, edges = graph_to_gdf(subgraph)
118
+ nodes.loc[dist_matrix.columns, "dist"] = dist_matrix.iloc[0]
119
+ steps = np.arange(0, weight_value + step, step)
120
+ if steps[-1] > weight_value:
121
+ steps[-1] = weight_value # Ensure last step doesn't exceed weight_value
122
+
123
+ if isochrone_type == "separate":
124
+ for i in range(len(steps) - 1):
125
+ min_dist = steps[i]
126
+ max_dist = steps[i + 1]
127
+ nodes_in_step = nodes["dist"].between(min_dist, max_dist, inclusive="left")
128
+ nodes_in_step = nodes_in_step[nodes_in_step].index
129
+ if not nodes_in_step.empty:
130
+ buffer_size = (max_dist - nodes.loc[nodes_in_step, "dist"]) * 0.7
131
+ if weight_type == "time_min":
132
+ buffer_size = buffer_size * speed
133
+ nodes.loc[nodes_in_step, "buffer_size"] = buffer_size
134
+ nodes.geometry = nodes.geometry.buffer(nodes["buffer_size"])
135
+ nodes["dist"] = np.round(nodes["dist"], 0)
136
+ nodes = nodes.dissolve(by="dist", as_index=False)
137
+ polygons = gpd.GeoDataFrame(
138
+ geometry=list(polygonize(nodes.geometry.apply(polygons_to_multilinestring).union_all())),
139
+ crs=local_crs,
140
+ )
141
+ polygons_points = polygons.copy()
142
+ polygons_points.geometry = polygons.representative_point()
143
+
144
+ stepped_iso = polygons_points.sjoin(nodes, predicate="within").reset_index()
145
+ stepped_iso = stepped_iso.groupby("index").agg({"dist": "mean"})
146
+ stepped_iso["geometry"] = polygons
147
+ stepped_iso = gpd.GeoDataFrame(stepped_iso, geometry="geometry", crs=local_crs).reset_index(drop=True)
148
+ else:
149
+ if isochrone_type == "radius":
150
+ isochrone_geoms = _build_radius_isochrones(
151
+ dist_matrix, weight_value, weight_type, speed, nodes, buffer_params["buffer_factor"]
152
+ )
153
+ else: # isochrone_type == 'ways':
154
+ if graph_type in ["intermodal", "walk"]:
155
+ isochrone_edges = edges[edges["type"] == "walk"]
156
+ else:
157
+ isochrone_edges = edges.copy()
158
+ all_isochrones_edges = isochrone_edges.buffer(buffer_params["road_buffer_size"], resolution=1).union_all()
159
+ all_isochrones_edges = gpd.GeoDataFrame(geometry=[all_isochrones_edges], crs=local_crs)
160
+ isochrone_geoms = _build_ways_isochrones(
161
+ dist_matrix=dist_matrix,
162
+ weight_value=weight_value,
163
+ weight_type=weight_type,
164
+ speed=speed,
165
+ nodes=nodes,
166
+ all_isochrones_edges=all_isochrones_edges,
167
+ buffer_factor=buffer_params["buffer_factor"],
168
+ )
169
+ nodes = nodes.clip(isochrone_geoms[0], keep_geom_type=True)
170
+ nodes["dist"] = np.minimum(np.ceil(nodes["dist"] / step) * step, weight_value)
171
+ voronois = gpd.GeoDataFrame(geometry=nodes.voronoi_polygons(), crs=local_crs)
172
+ stepped_iso = (
173
+ voronois.sjoin(nodes[["dist", "geometry"]]).dissolve(by="dist", as_index=False).drop(columns="index_right")
174
+ )
175
+ stepped_iso = stepped_iso.clip(isochrone_geoms[0], keep_geom_type=True)
176
+
177
+ pt_nodes, pt_edges = _process_pt_data(nodes, edges, graph_type)
178
+ if pt_nodes is not None:
179
+ pt_nodes.to_crs(original_crs, inplace=True)
180
+ if pt_edges is not None:
181
+ pt_edges.to_crs(original_crs, inplace=True)
182
+ return stepped_iso.to_crs(original_crs), pt_nodes, pt_edges
183
+
184
+
185
+ def get_accessibility_isochrones(
186
+ isochrone_type: Literal["radius", "ways"],
187
+ points: gpd.GeoDataFrame,
188
+ weight_value: float,
189
+ weight_type: Literal["time_min", "length_meter"],
190
+ nx_graph: nx.Graph,
191
+ **kwargs,
192
+ ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]:
193
+ """
194
+ Calculate accessibility isochrones from input points based on the provided city graph.
195
+
196
+ Supports two types of isochrones:
197
+ - 'radius': Circular buffer-based isochrones
198
+ - 'ways': Road network-based isochrones
199
+
200
+ Parameters
201
+ ----------
202
+ isochrone_type : Literal["radius", "ways"]
203
+ Type of isochrone to calculate:
204
+ - "radius": Creates circular buffers around reachable nodes
205
+ - "ways": Creates polygons based on reachable road network
206
+ points : gpd.GeoDataFrame
207
+ GeoDataFrame containing source points for isochrone calculation.
208
+ weight_value : float
209
+ Maximum travel time (minutes) or distance (meters) threshold.
210
+ weight_type : Literal["time_min", "length_meter"]
211
+ Type of weight calculation:
212
+ - "time_min": Time-based accessibility in minutes
213
+ - "length_meter": Distance-based accessibility in meters
214
+ nx_graph : nx.Graph
215
+ NetworkX graph representing the transportation network.
216
+ Must contain CRS and speed attributes for time calculations.
217
+ **kwargs
218
+ Additional buffer parameters:
219
+ - buffer_factor: Size multiplier for buffers (default: 0.7)
220
+ - road_buffer_size: Buffer size for road edges in meters (default: 5)
221
+
222
+ Returns
223
+ -------
224
+ tuple[gpd.GeoDataFrame, gpd.GeoDataFrame | None, gpd.GeoDataFrame | None]
225
+ Tuple containing:
226
+ - isochrones: GeoDataFrame with calculated isochrone polygons
227
+ - pt_stops: Public transport stops within isochrones (if available)
228
+ - pt_routes: Public transport routes within isochrones (if available)
229
+
230
+ Examples
231
+ --------
232
+ >>> from iduedu import get_intermodal_graph # pip install iduedu to get OSM city network graph
233
+ >>> graph = get_intermodal_graph(polygon=my_territory_polygon)
234
+ >>> points = gpd.GeoDataFrame(geometry=[Point(30.33, 59.95)], crs=4326)
235
+ >>> # Radius isochrones
236
+ >>> radius_iso, stops, routes = get_accessibility_isochrones(
237
+ ... "radius", points, 15, "time_min", graph, buffer_factor=0.8
238
+ ... )
239
+ >>> # Road network isochrones
240
+ >>> ways_iso, stops, routes = get_accessibility_isochrones(
241
+ ... "ways", points, 1000, "length_meter", graph, road_buffer_size=7
242
+ ... )
243
+ """
244
+
245
+ buffer_params = {
246
+ "buffer_factor": 0.7,
247
+ "road_buffer_size": 5,
248
+ }
249
+ original_crs = points.crs
250
+ buffer_params.update(kwargs)
251
+
252
+ points = points.copy()
253
+ local_crs, graph_type = _validate_inputs(points, weight_value, weight_type, nx_graph)
254
+
255
+ nx_graph, points, dist_nearest, speed = _prepare_graph_and_nodes(
256
+ points, nx_graph, graph_type, weight_type, weight_value
257
+ )
258
+
259
+ weight_cutoff = (
260
+ weight_value + (100 if weight_type == "length_meter" else 1) if isochrone_type == "ways" else weight_value
261
+ )
262
+
263
+ dist_matrix, subgraph = _calculate_distance_matrix(
264
+ nx_graph, points["nearest_node"].values, weight_type, weight_cutoff, dist_nearest
265
+ )
266
+
267
+ logger.info("Building isochrones geometry...")
268
+ nodes, edges = graph_to_gdf(subgraph)
269
+ if isochrone_type == "radius":
270
+ isochrone_geoms = _build_radius_isochrones(
271
+ dist_matrix, weight_value, weight_type, speed, nodes, buffer_params["buffer_factor"]
272
+ )
273
+ else: # isochrone_type == 'ways':
274
+ if graph_type in ["intermodal", "walk"]:
275
+ isochrone_edges = edges[edges["type"] == "walk"]
276
+ else:
277
+ isochrone_edges = edges.copy()
278
+ all_isochrones_edges = isochrone_edges.buffer(buffer_params["road_buffer_size"], resolution=1).union_all()
279
+ all_isochrones_edges = gpd.GeoDataFrame(geometry=[all_isochrones_edges], crs=local_crs)
280
+ isochrone_geoms = _build_ways_isochrones(
281
+ dist_matrix=dist_matrix,
282
+ weight_value=weight_value,
283
+ weight_type=weight_type,
284
+ speed=speed,
285
+ nodes=nodes,
286
+ all_isochrones_edges=all_isochrones_edges,
287
+ buffer_factor=buffer_params["buffer_factor"],
288
+ )
289
+ isochrones = _create_isochrones_gdf(points, isochrone_geoms, dist_matrix, local_crs, weight_type, weight_value)
290
+ pt_nodes, pt_edges = _process_pt_data(nodes, edges, graph_type)
291
+ if pt_nodes is not None:
292
+ pt_nodes.to_crs(original_crs, inplace=True)
293
+ if pt_edges is not None:
294
+ pt_edges.to_crs(original_crs, inplace=True)
295
+ return isochrones.to_crs(original_crs), pt_nodes, pt_edges
296
+
297
+
298
+ def _build_radius_isochrones(dist_matrix, weight_value, weight_type, speed, nodes, buffer_factor):
299
+ results = []
300
+ for source in dist_matrix.index:
301
+ buffers = (weight_value - dist_matrix.loc[source]) * buffer_factor
302
+ if weight_type == "time_min":
303
+ buffers = buffers * speed
304
+ buffers = nodes.merge(buffers, left_index=True, right_index=True)
305
+ buffers.geometry = buffers.geometry.buffer(buffers[source], resolution=8)
306
+ results.append(buffers.union_all())
307
+ return results
308
+
309
+
310
+ def _build_ways_isochrones(dist_matrix, weight_value, weight_type, speed, nodes, all_isochrones_edges, buffer_factor):
311
+ results = []
312
+ for source in dist_matrix.index:
313
+ reachable_nodes = dist_matrix.loc[source]
314
+ reachable_nodes = reachable_nodes[reachable_nodes <= weight_value]
315
+ reachable_nodes = (weight_value - reachable_nodes) * buffer_factor
316
+ if weight_type == "time_min":
317
+ reachable_nodes = reachable_nodes * speed
318
+ reachable_nodes = nodes.merge(reachable_nodes, left_index=True, right_index=True)
319
+ clip_zone = reachable_nodes.buffer(reachable_nodes[source], resolution=4).union_all()
320
+
321
+ isochrone_edges = all_isochrones_edges.clip(clip_zone, keep_geom_type=True).explode(ignore_index=True)
322
+ geom_to_keep = isochrone_edges.sjoin(reachable_nodes, how="inner").index.unique()
323
+ isochrone = remove_inner_geom(isochrone_edges.loc[geom_to_keep].union_all())
324
+ results.append(isochrone)
325
+ return results
@@ -1,3 +1,3 @@
1
1
  from .noise_sim import simulate_noise
2
- from .noise_reduce import dist_to_target_db,green_noise_reduce_db
3
- from .noise_exceptions import InvalidStepError
2
+ from .noise_reduce import dist_to_target_db, green_noise_reduce_db
3
+ from .noise_exceptions import InvalidStepError
@@ -11,14 +11,14 @@ from shapely.ops import polygonize, unary_union
11
11
  from tqdm import tqdm
12
12
 
13
13
  from objectnat import config
14
- from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
15
14
  from objectnat.methods.noise.noise_exceptions import InvalidStepError
15
+ from objectnat.methods.noise.noise_reduce import dist_to_target_db, green_noise_reduce_db
16
16
  from objectnat.methods.utils.geom_utils import (
17
17
  gdf_to_circle_zones_from_point,
18
18
  get_point_from_a_thorough_b,
19
19
  polygons_to_multilinestring,
20
20
  )
21
- from objectnat.methods.visibility_analysis import get_visibility_accurate
21
+ from objectnat.methods.visibility.visibility_analysis import get_visibility_accurate
22
22
 
23
23
  logger = config.logger
24
24
 
@@ -87,10 +87,14 @@ def simulate_noise(
87
87
  db_sim_step = kwargs.get("db_sim_step", 1)
88
88
  reflection_n = kwargs.get("reflection_n", 3)
89
89
  dead_area_r = kwargs.get("dead_area_r", 5)
90
+
91
+ original_crs = source_points.crs
92
+
90
93
  div_ = (source_noise_db - target_noise_db) % db_sim_step
91
94
  if div_ != 0:
92
95
  raise InvalidStepError(source_noise_db, target_noise_db, db_sim_step, div_)
93
96
  # Choosing crs and simplifying obs if any
97
+ source_points = source_points.copy()
94
98
  if len(obstacles) > 0:
95
99
  obstacles = obstacles.copy()
96
100
  obstacles.geometry = obstacles.geometry.simplify(tolerance=1)
@@ -99,7 +103,6 @@ def simulate_noise(
99
103
  source_points.to_crs(local_crs, inplace=True)
100
104
  else:
101
105
  local_crs = source_points.estimate_utm_crs()
102
- source_points = source_points.copy()
103
106
  source_points.to_crs(local_crs, inplace=True)
104
107
  source_points.reset_index(drop=True)
105
108
  source_points.geometry = source_points.centroid
@@ -158,7 +161,7 @@ def simulate_noise(
158
161
 
159
162
  noise_gdf = gpd.GeoDataFrame(pd.concat(noise_gdf, ignore_index=True), crs=local_crs)
160
163
  polygons = gpd.GeoDataFrame(
161
- geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).unary_union)), crs=local_crs
164
+ geometry=list(polygonize(noise_gdf.geometry.apply(polygons_to_multilinestring).union_all())), crs=local_crs
162
165
  )
163
166
  polygons_points = polygons.copy()
164
167
  polygons_points.geometry = polygons.representative_point()
@@ -171,10 +174,10 @@ def simulate_noise(
171
174
  sim_result["source_point_ind"] = ind
172
175
  all_p_res.append(sim_result)
173
176
 
174
- return gpd.GeoDataFrame(pd.concat(all_p_res, ignore_index=True), crs=local_crs)
177
+ return gpd.GeoDataFrame(pd.concat(all_p_res, ignore_index=True), crs=local_crs).to_crs(original_crs)
175
178
 
176
179
 
177
- def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]:
180
+ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple] | None]: # pragma: no cover
178
181
  # Unpacking task
179
182
  point_from, obstacles, trees_orig, passed_dist, deep, dist_db = task
180
183
 
@@ -207,7 +210,7 @@ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple
207
210
  if len(obstacles) == 0:
208
211
  obstacles_union = Polygon()
209
212
  else:
210
- obstacles_union = obstacles.unary_union
213
+ obstacles_union = obstacles.union_all()
211
214
 
212
215
  vis_poly, max_view_dist = get_visibility_accurate(point_from, obstacles, dist, return_max_view_dist=True)
213
216
 
@@ -332,7 +335,7 @@ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple
332
335
  vis_poly_points = gpd.GeoDataFrame(geometry=vis_poly_points, crs=local_crs)
333
336
 
334
337
  # Generating reflection points
335
- vis_poly_points["point"] = vis_poly_points.geometry
338
+ vis_poly_points["point"] = vis_poly_points["geometry"].copy()
336
339
  vis_poly_points.geometry = vis_poly_points.geometry.buffer(1, resolution=1)
337
340
  vis_poly_points = vis_poly_points.sjoin(obstacles, predicate="intersects").drop(columns="index_right")
338
341
  vis_poly_points = vis_poly_points[~vis_poly_points.index.duplicated(keep="first")]
@@ -346,7 +349,7 @@ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple
346
349
  return noise_from_point, None
347
350
  vis_poly_points = vis_poly_points[~vis_poly_points.is_empty]
348
351
  vis_poly_points = vis_poly_points[vis_poly_points.area >= 0.01]
349
- vis_poly_points.geometry = vis_poly_points["point"]
352
+ vis_poly_points["geometry"] = vis_poly_points["point"]
350
353
  vis_poly_points["dist"] = vis_poly_points.distance(point_from)
351
354
  vis_poly_points = vis_poly_points[vis_poly_points["dist"] < max_dist - 5]
352
355
  vis_poly_points = vis_poly_points.sjoin(noise_from_point, predicate="intersects", how="left")
@@ -359,6 +362,8 @@ def _noise_from_point_task(task, **kwargs) -> tuple[gpd.GeoDataFrame, list[tuple
359
362
  # Creating new reflection tasks
360
363
  new_tasks = []
361
364
  for _, loc in vis_poly_points.iterrows():
365
+ if not isinstance(loc.geometry, Point):
366
+ continue
362
367
  new_passed_dist = round(loc.dist + passed_dist, 2)
363
368
  dist_last = max_dist - new_passed_dist
364
369
  if dist_last > 1:
@@ -0,0 +1 @@
1
+ from .cluster_points_in_polygons import get_clusters_polygon
@@ -37,7 +37,7 @@ def get_clusters_polygon(
37
37
  ) -> tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]:
38
38
  """
39
39
  Generate cluster polygons for given points based on a specified minimum distance and minimum points per cluster.
40
- Optionally, calculate the relative ratio between types of services within the clusters.
40
+ Optionally, calculate the relative ratio between types of points within the clusters.
41
41
 
42
42
  Parameters
43
43
  ----------
@@ -56,22 +56,12 @@ def get_clusters_polygon(
56
56
  -------
57
57
  tuple[gpd.GeoDataFrame, gpd.GeoDataFrame]
58
58
  A tuple containing the clustered polygons GeoDataFrame and the original points GeoDataFrame with cluster labels.
59
-
60
- Examples
61
- --------
62
- >>> import geopandas as gpd
63
- >>> from shapely.geometry import Point
64
-
65
- >>> points = gpd.GeoDataFrame({
66
- ... 'geometry': [Point(0, 0), Point(1, 1), Point(2, 2)],
67
- ... 'service_code': [1, 1, 2]
68
- ... }, crs=4326)
69
-
70
- >>> clusters, services = get_clusters_polygon(points, min_dist=50, min_point=2)
71
59
  """
72
60
  if method not in ["DBSCAN", "HDBSCAN"]:
73
61
  raise ValueError("Method must be either 'DBSCAN' or 'HDBSCAN'")
74
-
62
+ original_crs = points.crs
63
+ local_crs = points.estimate_utm_crs()
64
+ points = points.to_crs(local_crs)
75
65
  services_select = _get_cluster(points, min_dist, min_point, method)
76
66
 
77
67
  if service_code_column not in points.columns:
@@ -80,43 +70,47 @@ def get_clusters_polygon(
80
70
  )
81
71
  points[service_code_column] = service_code_column
82
72
 
83
- services_normal = services_select[services_select["cluster"] != -1]
84
- services_outlier = services_select[services_select["cluster"] == -1]
73
+ points_normal = services_select[services_select["cluster"] != -1].copy()
74
+ points_outlier = services_select[services_select["cluster"] == -1].copy()
85
75
 
86
- if len(services_normal) > 0:
87
- cluster_service = services_normal.groupby("cluster", group_keys=True).apply(
76
+ if len(points_normal) > 0:
77
+ cluster_service = points_normal.groupby("cluster", group_keys=True).apply(
88
78
  _get_service_ratio, service_code_column=service_code_column
89
79
  )
90
80
  if isinstance(cluster_service, pd.Series):
91
81
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
92
82
 
93
- polygons_normal = services_normal.dissolve("cluster").concave_hull(ratio=0.7, allow_holes=False)
83
+ polygons_normal = points_normal.dissolve("cluster").concave_hull(ratio=0.1, allow_holes=True)
94
84
  df_clusters_normal = pd.concat([cluster_service, polygons_normal.rename("geometry")], axis=1)
95
85
  cluster_normal = df_clusters_normal.index.max()
86
+ points_normal["outlier"] = False
87
+ df_clusters_normal["outlier"] = False
96
88
  else:
97
89
  df_clusters_normal = None
98
90
  cluster_normal = 0
99
91
 
100
- if len(services_outlier) > 0:
92
+ if len(points_outlier) > 0:
101
93
  clusters_outlier = cluster_normal + 1
102
- new_clusters = list(range(clusters_outlier, clusters_outlier + len(services_outlier)))
103
- services_outlier.loc[:, "cluster"] = new_clusters
94
+ new_clusters = list(range(clusters_outlier, clusters_outlier + len(points_outlier)))
95
+ points_outlier.loc[:, "cluster"] = new_clusters
104
96
 
105
- cluster_service = services_outlier.groupby("cluster", group_keys=True).apply(
97
+ cluster_service = points_outlier.groupby("cluster", group_keys=True).apply(
106
98
  _get_service_ratio, service_code_column=service_code_column
107
99
  )
108
100
  if isinstance(cluster_service, pd.Series):
109
101
  cluster_service = cluster_service.unstack(level=1, fill_value=0)
110
102
 
111
- df_clusters_outlier = cluster_service.join(services_outlier.set_index("cluster")["geometry"])
103
+ df_clusters_outlier = cluster_service.join(points_outlier.set_index("cluster")["geometry"])
104
+ points_outlier["outlier"] = True
105
+ df_clusters_outlier["outlier"] = True
112
106
  else:
113
- services_outlier = None
107
+ points_outlier = None
114
108
  df_clusters_outlier = None
115
109
 
116
110
  df_clusters = pd.concat([df_clusters_normal, df_clusters_outlier]).fillna(0).set_geometry("geometry")
117
111
  df_clusters["geometry"] = df_clusters["geometry"].buffer(min_dist / 2)
118
- df_clusters = df_clusters.rename(columns={"index": "cluster_id"})
112
+ df_clusters = df_clusters.reset_index().rename(columns={"index": "cluster"})
119
113
 
120
- services = pd.concat([services_normal, services_outlier])
114
+ points = pd.concat([points_normal, points_outlier])
121
115
 
122
- return df_clusters, services
116
+ return df_clusters.to_crs(original_crs), points.to_crs(original_crs)
@@ -0,0 +1 @@
1
+ from .provision import clip_provision, get_service_provision, recalculate_links
@@ -61,7 +61,7 @@ def clip_provision(
61
61
  links = links.copy()
62
62
  services = services.copy()
63
63
 
64
- s = buildings.intersects(selection_zone.unary_union)
64
+ s = buildings.intersects(selection_zone.union_all())
65
65
  buildings = buildings.loc[s[s].index]
66
66
  links = links[links["building_index"].isin(buildings.index.tolist())]
67
67
  services_to_keep = set(links["service_index"].tolist())
@@ -88,14 +88,14 @@ def recalculate_links(
88
88
  )
89
89
  free_demand["demand"] = free_demand["demand"].apply(sum)
90
90
  free_demand = free_demand.reindex(buildings.index, fill_value=0)
91
- new_sum_time = (buildings["supplyed_demands_within"] + buildings["supplyed_demands_without"]) * buildings[
91
+ new_sum_time = (buildings["supplied_demands_within"] + buildings["supplied_demands_without"]) * buildings[
92
92
  "avg_dist"
93
93
  ] - free_demand["distance"]
94
94
 
95
95
  buildings["demand_left"] = buildings["demand_left"] + free_demand["demand"]
96
- buildings["supplyed_demands_without"] = buildings["supplyed_demands_without"] - free_demand["demand"]
96
+ buildings["supplied_demands_without"] = buildings["supplied_demands_without"] - free_demand["demand"]
97
97
  buildings["avg_dist"] = new_sum_time / (
98
- buildings["supplyed_demands_without"] + buildings["supplyed_demands_within"]
98
+ buildings["supplied_demands_without"] + buildings["supplied_demands_within"]
99
99
  )
100
100
  buildings["avg_dist"] = buildings.apply(
101
101
  lambda x: np.nan if (x["demand"] == x["demand_left"]) else round(x["avg_dist"], 2), axis=1
@@ -23,7 +23,6 @@ class Provision:
23
23
  demanded_buildings (gpd.GeoDataFrame): GeoDataFrame representing the buildings with demands for services.
24
24
  adjacency_matrix (pd.DataFrame): DataFrame representing the adjacency matrix between buildings.
25
25
  threshold (int): Threshold value for the provision calculations.
26
- calculation_type (str, optional): Type of calculation ("gravity" or "linear"). Defaults to "gravity".
27
26
 
28
27
  Returns:
29
28
  Provision: The CityProvision object.
@@ -48,7 +47,7 @@ class Provision:
48
47
  adjacency_matrix.copy(), demanded_buildings, services
49
48
  ).copy()
50
49
  self.threshold = threshold
51
- self.check_crs(self.demanded_buildings, self.services)
50
+ self.services.to_crs(self.demanded_buildings.crs, inplace=True)
52
51
  pandarallel.initialize(progress_bar=False, verbose=0, use_memory_fs=config.pandarallel_use_file_system)
53
52
 
54
53
  @staticmethod
@@ -65,14 +64,6 @@ class Provision:
65
64
  v["capacity_left"] = v["capacity"]
66
65
  return v
67
66
 
68
- @staticmethod
69
- def check_crs(demanded_buildings, services):
70
- assert demanded_buildings.crs == services.crs, (
71
- f"\nThe CRS in the provided geodataframes are different."
72
- f"\nBuildings CRS:{demanded_buildings.crs}"
73
- f"\nServices CRS:{services.crs}"
74
- )
75
-
76
67
  @staticmethod
77
68
  def delete_useless_matrix_rows_columns(adjacency_matrix, demanded_buildings, services):
78
69
  adjacency_matrix.index = adjacency_matrix.index.astype(int)
@@ -268,7 +259,15 @@ def _calc_links(
268
259
  flat_matrix = destination_matrix.transpose().apply(lambda x: subfunc(x[x > 0]), result_type="reduce")
269
260
 
270
261
  distribution_links = gpd.GeoDataFrame(data=[item for sublist in list(flat_matrix) for item in sublist])
271
-
262
+ if distribution_links.empty:
263
+ logger.warning(
264
+ "Unable to create distribution links - no demand could be matched with service locations. "
265
+ "This is likely because either: "
266
+ "1) The demand column in buildings contains zero values, or "
267
+ "2) The capacity column in services contains zero values, or "
268
+ "3) There are no service locations within the maximum allowed distance"
269
+ )
270
+ return distribution_links
272
271
  distribution_links["distance"] = distribution_links.apply(
273
272
  lambda x: distance_matrix.loc[x["service_index"]][x["building_index"]],
274
273
  axis=1,
@@ -292,8 +291,8 @@ def _additional_options(
292
291
  normative_distance,
293
292
  ):
294
293
  buildings["avg_dist"] = 0
295
- buildings["supplyed_demands_within"] = 0
296
- buildings["supplyed_demands_without"] = 0
294
+ buildings["supplied_demands_within"] = 0
295
+ buildings["supplied_demands_without"] = 0
297
296
  services["carried_capacity_within"] = 0
298
297
  services["carried_capacity_without"] = 0
299
298
  for _, loc in destination_matrix.iterrows():
@@ -310,8 +309,8 @@ def _additional_options(
310
309
  .add(distances_all.multiply(without, fill_value=0), fill_value=0)
311
310
  )
312
311
  buildings["demand_left"] = buildings["demand_left"].sub(within.add(without, fill_value=0), fill_value=0)
313
- buildings["supplyed_demands_within"] = buildings["supplyed_demands_within"].add(within, fill_value=0)
314
- buildings["supplyed_demands_without"] = buildings["supplyed_demands_without"].add(without, fill_value=0)
312
+ buildings["supplied_demands_within"] = buildings["supplied_demands_within"].add(within, fill_value=0)
313
+ buildings["supplied_demands_without"] = buildings["supplied_demands_without"].add(without, fill_value=0)
315
314
 
316
315
  services.at[loc.name, "capacity_left"] = (
317
316
  services.at[loc.name, "capacity_left"] - within.add(without, fill_value=0).sum()
@@ -329,10 +328,10 @@ def _additional_options(
329
328
  buildings["avg_dist"] = buildings.apply(
330
329
  lambda x: np.nan if (x["demand"] == x["demand_left"]) else round(x["avg_dist"], 2), axis=1
331
330
  )
332
- buildings["provison_value"] = (buildings["supplyed_demands_within"] / buildings["demand"]).astype(float).round(2)
331
+ buildings["provision_value"] = (buildings["supplied_demands_within"] / buildings["demand"]).astype(float).round(2)
333
332
  services["service_load"] = (services["capacity"] - services["capacity_left"]).astype(np.uint16)
334
- buildings["supplyed_demands_within"] = buildings["supplyed_demands_within"].astype(np.uint16)
335
- buildings["supplyed_demands_without"] = buildings["supplyed_demands_without"].astype(np.uint16)
333
+ buildings["supplied_demands_within"] = buildings["supplied_demands_within"].astype(np.uint16)
334
+ buildings["supplied_demands_without"] = buildings["supplied_demands_without"].astype(np.uint16)
336
335
  services["carried_capacity_within"] = services["carried_capacity_within"].astype(np.uint16)
337
336
  services["carried_capacity_without"] = services["carried_capacity_without"].astype(np.uint16)
338
337
  logger.debug("Done adding additional options")