BERATools 0.2.2__py3-none-any.whl → 0.2.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +8 -3
- beratools/core/{algo_footprint_rel.py → algo_canopy_footprint_exp.py} +176 -139
- beratools/core/algo_centerline.py +61 -77
- beratools/core/algo_common.py +48 -57
- beratools/core/algo_cost.py +18 -25
- beratools/core/algo_dijkstra.py +37 -45
- beratools/core/algo_line_grouping.py +100 -100
- beratools/core/algo_merge_lines.py +40 -8
- beratools/core/algo_split_with_lines.py +289 -304
- beratools/core/algo_vertex_optimization.py +25 -46
- beratools/core/canopy_threshold_relative.py +755 -0
- beratools/core/constants.py +8 -9
- beratools/{tools → core}/line_footprint_functions.py +411 -258
- beratools/core/logger.py +18 -2
- beratools/core/tool_base.py +17 -75
- beratools/gui/assets/BERALogo.ico +0 -0
- beratools/gui/assets/BERA_Splash.gif +0 -0
- beratools/gui/assets/BERA_WizardImage.png +0 -0
- beratools/gui/assets/beratools.json +475 -2171
- beratools/gui/bt_data.py +585 -234
- beratools/gui/bt_gui_main.py +129 -91
- beratools/gui/main.py +4 -7
- beratools/gui/tool_widgets.py +530 -354
- beratools/tools/__init__.py +0 -7
- beratools/tools/{line_footprint_absolute.py → canopy_footprint_absolute.py} +81 -56
- beratools/tools/canopy_footprint_exp.py +113 -0
- beratools/tools/centerline.py +30 -37
- beratools/tools/check_seed_line.py +127 -0
- beratools/tools/common.py +65 -586
- beratools/tools/{line_footprint_fixed.py → ground_footprint.py} +140 -117
- beratools/tools/line_footprint_relative.py +64 -35
- beratools/tools/tool_template.py +48 -40
- beratools/tools/vertex_optimization.py +20 -34
- beratools/utility/env_checks.py +53 -0
- beratools/utility/spatial_common.py +210 -0
- beratools/utility/tool_args.py +138 -0
- beratools-0.2.4.dist-info/METADATA +134 -0
- beratools-0.2.4.dist-info/RECORD +50 -0
- {beratools-0.2.2.dist-info → beratools-0.2.4.dist-info}/WHEEL +1 -1
- beratools-0.2.4.dist-info/entry_points.txt +3 -0
- beratools-0.2.4.dist-info/licenses/LICENSE +674 -0
- beratools/core/algo_tiler.py +0 -428
- beratools/gui/__init__.py +0 -11
- beratools/gui/batch_processing_dlg.py +0 -513
- beratools/gui/map_window.py +0 -162
- beratools/tools/Beratools_r_script.r +0 -1120
- beratools/tools/Ht_metrics.py +0 -116
- beratools/tools/batch_processing.py +0 -136
- beratools/tools/canopy_threshold_relative.py +0 -672
- beratools/tools/canopycostraster.py +0 -222
- beratools/tools/fl_regen_csf.py +0 -428
- beratools/tools/forest_line_attributes.py +0 -408
- beratools/tools/line_grouping.py +0 -45
- beratools/tools/ln_relative_metrics.py +0 -615
- beratools/tools/r_cal_lpi_elai.r +0 -25
- beratools/tools/r_generate_pd_focalraster.r +0 -101
- beratools/tools/r_interface.py +0 -80
- beratools/tools/r_point_density.r +0 -9
- beratools/tools/rpy_chm2trees.py +0 -86
- beratools/tools/rpy_dsm_chm_by.py +0 -81
- beratools/tools/rpy_dtm_by.py +0 -63
- beratools/tools/rpy_find_cellsize.py +0 -43
- beratools/tools/rpy_gnd_csf.py +0 -74
- beratools/tools/rpy_hummock_hollow.py +0 -85
- beratools/tools/rpy_hummock_hollow_raster.py +0 -71
- beratools/tools/rpy_las_info.py +0 -51
- beratools/tools/rpy_laz2las.py +0 -40
- beratools/tools/rpy_lpi_elai_lascat.py +0 -466
- beratools/tools/rpy_normalized_lidar_by.py +0 -56
- beratools/tools/rpy_percent_above_dbh.py +0 -80
- beratools/tools/rpy_points2trees.py +0 -88
- beratools/tools/rpy_vegcoverage.py +0 -94
- beratools/tools/tiler.py +0 -48
- beratools/tools/zonal_threshold.py +0 -144
- beratools-0.2.2.dist-info/METADATA +0 -108
- beratools-0.2.2.dist-info/RECORD +0 -74
- beratools-0.2.2.dist-info/entry_points.txt +0 -2
- beratools-0.2.2.dist-info/licenses/LICENSE +0 -22
|
@@ -12,6 +12,7 @@ Description:
|
|
|
12
12
|
|
|
13
13
|
This file hosts code to deal with line grouping and merging, cleanups.
|
|
14
14
|
"""
|
|
15
|
+
|
|
15
16
|
import enum
|
|
16
17
|
from collections import defaultdict
|
|
17
18
|
from dataclasses import dataclass, field
|
|
@@ -30,6 +31,7 @@ import beratools.core.constants as bt_const
|
|
|
30
31
|
TRIMMING_DISTANCE = 75 # meters
|
|
31
32
|
SMALL_BUFFER = 1
|
|
32
33
|
|
|
34
|
+
|
|
33
35
|
@enum.unique
|
|
34
36
|
class VertexClass(enum.IntEnum):
|
|
35
37
|
"""Enum class for vertex class."""
|
|
@@ -107,7 +109,7 @@ def get_angle(line, end_index):
|
|
|
107
109
|
@dataclass
|
|
108
110
|
class SingleLine:
|
|
109
111
|
"""Class to store line and its simplified line."""
|
|
110
|
-
|
|
112
|
+
|
|
111
113
|
line_id: int = field(default=0)
|
|
112
114
|
line: Union[sh_geom.LineString, sh_geom.MultiLineString] = field(default=None)
|
|
113
115
|
sim_line: Union[sh_geom.LineString, sh_geom.MultiLineString] = field(default=None)
|
|
@@ -171,7 +173,7 @@ class VertexNode:
|
|
|
171
173
|
|
|
172
174
|
def get_line_geom(self, line_id):
|
|
173
175
|
return self.get_line_obj(line_id).line
|
|
174
|
-
|
|
176
|
+
|
|
175
177
|
def get_all_line_ids(self):
|
|
176
178
|
all_line_ids = {i.line_id for i in self.line_list}
|
|
177
179
|
return all_line_ids
|
|
@@ -188,7 +190,7 @@ class VertexNode:
|
|
|
188
190
|
def get_trim_transect(self, poly, line_indices):
|
|
189
191
|
if not poly:
|
|
190
192
|
return None
|
|
191
|
-
|
|
193
|
+
|
|
192
194
|
internal_line = None
|
|
193
195
|
for line_idx in line_indices:
|
|
194
196
|
line = self.get_line_obj(line_idx)
|
|
@@ -197,13 +199,13 @@ class VertexNode:
|
|
|
197
199
|
|
|
198
200
|
if not internal_line:
|
|
199
201
|
# print("No line is retrieved")
|
|
200
|
-
return
|
|
202
|
+
return None
|
|
201
203
|
return internal_line.end_transect()
|
|
202
|
-
|
|
204
|
+
|
|
203
205
|
def _trim_polygon(self, poly, trim_transect):
|
|
204
206
|
if not poly or not trim_transect:
|
|
205
|
-
return
|
|
206
|
-
|
|
207
|
+
return None
|
|
208
|
+
|
|
207
209
|
split_poly = shapely.ops.split(poly, trim_transect)
|
|
208
210
|
|
|
209
211
|
if len(split_poly.geoms) != 2:
|
|
@@ -216,7 +218,7 @@ class VertexNode:
|
|
|
216
218
|
none_poly = True
|
|
217
219
|
|
|
218
220
|
if none_poly:
|
|
219
|
-
return
|
|
221
|
+
return None
|
|
220
222
|
|
|
221
223
|
# only two polygons in split_poly
|
|
222
224
|
if split_poly.geoms[0].area > split_poly.geoms[1].area:
|
|
@@ -242,15 +244,15 @@ class VertexNode:
|
|
|
242
244
|
new_polys.append((idx, out_poly))
|
|
243
245
|
|
|
244
246
|
return new_polys
|
|
245
|
-
|
|
247
|
+
|
|
246
248
|
def trim_end(self, poly):
|
|
247
249
|
transect = self.get_trim_transect(poly, self.line_not_connected)
|
|
248
250
|
if not transect:
|
|
249
|
-
return
|
|
250
|
-
|
|
251
|
+
return None
|
|
252
|
+
|
|
251
253
|
poly = self._trim_polygon(poly, transect)
|
|
252
254
|
return poly
|
|
253
|
-
|
|
255
|
+
# Helper to get the neighbor coordinate based on vertex_index.
|
|
254
256
|
|
|
255
257
|
@staticmethod
|
|
256
258
|
def get_vertex(line_obj, index):
|
|
@@ -260,6 +262,7 @@ class VertexNode:
|
|
|
260
262
|
index += len(coords)
|
|
261
263
|
if 0 <= index < len(coords):
|
|
262
264
|
return sh_geom.Point(coords[index])
|
|
265
|
+
return None
|
|
263
266
|
|
|
264
267
|
@staticmethod
|
|
265
268
|
def get_neighbor(line_obj):
|
|
@@ -269,9 +272,9 @@ class VertexNode:
|
|
|
269
272
|
index = 1
|
|
270
273
|
elif line_obj.vertex_index == -1:
|
|
271
274
|
index = -2
|
|
272
|
-
|
|
275
|
+
|
|
273
276
|
return VertexNode.get_vertex(line_obj, index)
|
|
274
|
-
|
|
277
|
+
|
|
275
278
|
@staticmethod
|
|
276
279
|
def parallel_line_centered(p1, p2, center, length):
|
|
277
280
|
"""Generate a parallel line."""
|
|
@@ -295,7 +298,7 @@ class VertexNode:
|
|
|
295
298
|
new_p2 = sh_geom.Point(center.x + half_dx, center.y + half_dy)
|
|
296
299
|
|
|
297
300
|
return sh_geom.LineString([new_p1, new_p2])
|
|
298
|
-
|
|
301
|
+
|
|
299
302
|
def get_transect_for_primary(self):
|
|
300
303
|
"""
|
|
301
304
|
Get a transect line from two primary connected lines.
|
|
@@ -309,7 +312,7 @@ class VertexNode:
|
|
|
309
312
|
"""
|
|
310
313
|
if not self.line_connected or len(self.line_connected[0]) != 2:
|
|
311
314
|
return None
|
|
312
|
-
|
|
315
|
+
|
|
313
316
|
# Retrieve the two connected line objects from the first connectivity group.
|
|
314
317
|
line_ids = self.line_connected[0]
|
|
315
318
|
pt1 = None
|
|
@@ -330,20 +333,18 @@ class VertexNode:
|
|
|
330
333
|
if pt1 is None or pt2 is None:
|
|
331
334
|
return None
|
|
332
335
|
|
|
333
|
-
transect = algo_common.generate_perpendicular_line_precise(
|
|
334
|
-
[pt1, self.vertex, pt2], offset=40
|
|
335
|
-
)
|
|
336
|
+
transect = algo_common.generate_perpendicular_line_precise([pt1, self.vertex, pt2], offset=40)
|
|
336
337
|
return transect
|
|
337
|
-
|
|
338
|
+
|
|
338
339
|
def get_transect_for_primary_second(self):
|
|
339
340
|
"""
|
|
340
341
|
Get a transect line from the second primary connected line.
|
|
341
|
-
|
|
342
|
-
For the second primary line, this method retrieves the neighbor point from
|
|
343
|
-
two lines in the second connectivity group, creates a reference line through the
|
|
344
|
-
vertex by mirroring the neighbor point about the vertex, and then generates a
|
|
342
|
+
|
|
343
|
+
For the second primary line, this method retrieves the neighbor point from
|
|
344
|
+
two lines in the second connectivity group, creates a reference line through the
|
|
345
|
+
vertex by mirroring the neighbor point about the vertex, and then generates a
|
|
345
346
|
parallel line centered at the vertex.
|
|
346
|
-
|
|
347
|
+
|
|
347
348
|
Returns:
|
|
348
349
|
A LineString representing the transect if available, otherwise None.
|
|
349
350
|
|
|
@@ -378,8 +379,8 @@ class VertexNode:
|
|
|
378
379
|
|
|
379
380
|
"""
|
|
380
381
|
if len(self.line_connected) == 0:
|
|
381
|
-
return
|
|
382
|
-
|
|
382
|
+
return None
|
|
383
|
+
|
|
383
384
|
new_polys = []
|
|
384
385
|
line = self.line_connected[0]
|
|
385
386
|
|
|
@@ -415,23 +416,29 @@ class VertexNode:
|
|
|
415
416
|
new_polys.append([idx_2, poly_2])
|
|
416
417
|
|
|
417
418
|
return new_polys
|
|
418
|
-
|
|
419
|
+
|
|
419
420
|
def trim_intersection(self, polys, merge_group=True):
|
|
420
|
-
"""
|
|
421
|
+
"""
|
|
422
|
+
Trim intersection of lines and polygons.
|
|
423
|
+
|
|
424
|
+
TODO: there are polygons of 0 zero.
|
|
425
|
+
|
|
426
|
+
"""
|
|
427
|
+
|
|
421
428
|
def get_poly_with_info(line, polys):
|
|
422
429
|
if polys.empty:
|
|
423
430
|
return None, None, None
|
|
424
|
-
|
|
431
|
+
|
|
425
432
|
for idx, row in polys.iterrows():
|
|
426
433
|
poly = row.geometry
|
|
427
|
-
if not poly:
|
|
434
|
+
if not poly: # TODO: no polygon
|
|
428
435
|
continue
|
|
429
436
|
|
|
430
437
|
if poly.buffer(SMALL_BUFFER).contains(line):
|
|
431
|
-
return idx, poly, row[
|
|
432
|
-
|
|
438
|
+
return idx, poly, row["max_width"]
|
|
439
|
+
|
|
433
440
|
return None, None, None
|
|
434
|
-
|
|
441
|
+
|
|
435
442
|
poly_trim_list = []
|
|
436
443
|
primary_lines = []
|
|
437
444
|
p_primary_list = []
|
|
@@ -458,7 +465,8 @@ class VertexNode:
|
|
|
458
465
|
for line_idx in line_idx_to_trim:
|
|
459
466
|
line = self.get_line_geom(line_idx)
|
|
460
467
|
poly_idx, poly, max_width = get_poly_with_info(line, polys)
|
|
461
|
-
|
|
468
|
+
if poly_idx:
|
|
469
|
+
poly_list.append((line_idx, poly_idx, max_width))
|
|
462
470
|
|
|
463
471
|
poly_list = sorted(poly_list, key=lambda x: x[2])
|
|
464
472
|
|
|
@@ -466,7 +474,7 @@ class VertexNode:
|
|
|
466
474
|
for i, indices in enumerate(poly_list):
|
|
467
475
|
line_idx = indices[0]
|
|
468
476
|
poly_idx = indices[1]
|
|
469
|
-
line_cleanup=self.get_line(line_idx)
|
|
477
|
+
line_cleanup = self.get_line(line_idx)
|
|
470
478
|
poly_cleanup = polys.loc[poly_idx].geometry
|
|
471
479
|
poly_trim = PolygonTrimming(
|
|
472
480
|
line_index=line_idx,
|
|
@@ -486,7 +494,7 @@ class VertexNode:
|
|
|
486
494
|
# poly_list and poly_trim_list have same index
|
|
487
495
|
for i, indices in enumerate(poly_list):
|
|
488
496
|
p_list = []
|
|
489
|
-
for p in poly_list[i+1:]:
|
|
497
|
+
for p in poly_list[i + 1 :]:
|
|
490
498
|
p_list.append(polys.loc[p[1]].geometry)
|
|
491
499
|
|
|
492
500
|
poly_trim = poly_trim_list[i]
|
|
@@ -520,7 +528,7 @@ class VertexNode:
|
|
|
520
528
|
elif len(self.line_list) == 1:
|
|
521
529
|
self.vertex_class = VertexClass.SINGLE_WAY
|
|
522
530
|
|
|
523
|
-
def
|
|
531
|
+
def all_has_valid_group_attr(self):
|
|
524
532
|
"""If all values in group list are valid value, return True."""
|
|
525
533
|
# TODO: if some line has no group, give advice
|
|
526
534
|
for i in self.line_list:
|
|
@@ -532,15 +540,19 @@ class VertexNode:
|
|
|
532
540
|
def need_regrouping(self):
|
|
533
541
|
pass
|
|
534
542
|
|
|
535
|
-
def check_connectivity(self):
|
|
536
|
-
#
|
|
537
|
-
|
|
538
|
-
if
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
|
|
543
|
+
def check_connectivity(self, use_angle_grouping=True):
|
|
544
|
+
# Fill missing group with -1
|
|
545
|
+
for line in self.line_list:
|
|
546
|
+
if line.group is None:
|
|
547
|
+
line.group = -1
|
|
548
|
+
|
|
549
|
+
if self.need_regrouping():
|
|
550
|
+
self.group_regroup()
|
|
551
|
+
|
|
552
|
+
if use_angle_grouping:
|
|
543
553
|
self.group_line_by_angle()
|
|
554
|
+
else:
|
|
555
|
+
self.update_connectivity_by_group()
|
|
544
556
|
|
|
545
557
|
# record line not connected
|
|
546
558
|
all_line_ids = self.get_all_line_ids()
|
|
@@ -551,7 +563,7 @@ class VertexNode:
|
|
|
551
563
|
def group_regroup(self):
|
|
552
564
|
pass
|
|
553
565
|
|
|
554
|
-
def
|
|
566
|
+
def update_connectivity_by_group(self):
|
|
555
567
|
group_line = defaultdict(list)
|
|
556
568
|
for i in self.line_list:
|
|
557
569
|
group_line[i.group].append(i.line_id)
|
|
@@ -573,6 +585,7 @@ class VertexNode:
|
|
|
573
585
|
angle_diff = abs(new_angles[0] - new_angles[1])
|
|
574
586
|
angle_diff = angle_diff if angle_diff <= np.pi else angle_diff - np.pi
|
|
575
587
|
|
|
588
|
+
# TODO: check if need to connect when turn angle < TURN_ANGLE_TOLERANCE
|
|
576
589
|
# if angle_diff >= TURN_ANGLE_TOLERANCE:
|
|
577
590
|
self.line_connected.append(
|
|
578
591
|
(
|
|
@@ -587,14 +600,10 @@ class VertexNode:
|
|
|
587
600
|
for j, angle_2 in enumerate(new_angles[i + 1 :]):
|
|
588
601
|
if not angle_visited[i + j + 1]:
|
|
589
602
|
angle_diff = abs(angle_1 - angle_2)
|
|
590
|
-
angle_diff =
|
|
591
|
-
angle_diff if angle_diff <= np.pi else angle_diff - np.pi
|
|
592
|
-
)
|
|
603
|
+
angle_diff = angle_diff if angle_diff <= np.pi else angle_diff - np.pi
|
|
593
604
|
if (
|
|
594
605
|
angle_diff < ANGLE_TOLERANCE
|
|
595
|
-
or np.pi - ANGLE_TOLERANCE
|
|
596
|
-
< abs(angle_1 - angle_2)
|
|
597
|
-
< np.pi + ANGLE_TOLERANCE
|
|
606
|
+
or np.pi - ANGLE_TOLERANCE < abs(angle_1 - angle_2) < np.pi + ANGLE_TOLERANCE
|
|
598
607
|
):
|
|
599
608
|
angle_visited[j + i + 1] = True # tenth of PI
|
|
600
609
|
self.line_connected.append(
|
|
@@ -607,19 +616,15 @@ class VertexNode:
|
|
|
607
616
|
|
|
608
617
|
class LineGrouping:
|
|
609
618
|
"""Class to group lines and merge them."""
|
|
610
|
-
|
|
611
|
-
def __init__(self, in_line_gdf, merge_group=True) -> None:
|
|
612
|
-
# remove empty and null geometry
|
|
613
|
-
# self.lines = in_line_gdf.copy()
|
|
614
|
-
# self.lines = self.lines[
|
|
615
|
-
# ~self.lines.geometry.isna() & ~self.lines.geometry.is_empty
|
|
616
|
-
# ]
|
|
619
|
+
|
|
620
|
+
def __init__(self, in_line_gdf, merge_group=True, use_angle_grouping=True) -> None:
|
|
617
621
|
if in_line_gdf is None:
|
|
618
622
|
raise ValueError("Line GeoDataFrame cannot be None")
|
|
623
|
+
self.use_angle_grouping = use_angle_grouping
|
|
619
624
|
|
|
620
625
|
if in_line_gdf.empty:
|
|
621
626
|
raise ValueError("Line GeoDataFrame cannot be empty")
|
|
622
|
-
|
|
627
|
+
|
|
623
628
|
self.lines = algo_common.clean_line_geometries(in_line_gdf)
|
|
624
629
|
self.lines.reset_index(inplace=True, drop=True)
|
|
625
630
|
self.merge_group = merge_group
|
|
@@ -650,17 +655,19 @@ class LineGrouping:
|
|
|
650
655
|
if bt_const.BT_GROUP in self.lines.keys():
|
|
651
656
|
self.groups = self.lines[bt_const.BT_GROUP]
|
|
652
657
|
self.has_group_attr = True
|
|
653
|
-
if self.groups.hasnans:
|
|
658
|
+
if self.groups.hasnans: # Todo: check for other invalid values
|
|
654
659
|
self.need_regrouping = True
|
|
655
660
|
|
|
656
|
-
for idx, s_geom, geom, group in zip(
|
|
657
|
-
*zip(*self.sim_geom.items()), self.lines.geometry, self.groups
|
|
658
|
-
):
|
|
661
|
+
for idx, s_geom, geom, group in zip(*zip(*self.sim_geom.items()), self.lines.geometry, self.groups):
|
|
659
662
|
self.vertex_list.append(VertexNode(idx, geom, s_geom, 0, group))
|
|
660
663
|
self.vertex_list.append(VertexNode(idx, geom, s_geom, -1, group))
|
|
661
664
|
|
|
662
665
|
v_points = []
|
|
663
666
|
for i in self.vertex_list:
|
|
667
|
+
if i.vertex is None:
|
|
668
|
+
print("Vertex is None, skipping.")
|
|
669
|
+
continue
|
|
670
|
+
|
|
664
671
|
v_points.append(i.vertex.buffer(SMALL_BUFFER)) # small polygon
|
|
665
672
|
|
|
666
673
|
# Spatial index of all vertices
|
|
@@ -677,10 +684,7 @@ class LineGrouping:
|
|
|
677
684
|
for j in s_list:
|
|
678
685
|
if j != i:
|
|
679
686
|
# some short line will be very close to each other
|
|
680
|
-
if (
|
|
681
|
-
vertex.vertex.distance(self.vertex_list[j].vertex)
|
|
682
|
-
> bt_const.SMALL_BUFFER
|
|
683
|
-
):
|
|
687
|
+
if vertex.vertex.distance(self.vertex_list[j].vertex) > bt_const.SMALL_BUFFER:
|
|
684
688
|
continue
|
|
685
689
|
|
|
686
690
|
vertex.merge(self.vertex_list[j])
|
|
@@ -690,7 +694,7 @@ class LineGrouping:
|
|
|
690
694
|
vertex_visited[i] = True
|
|
691
695
|
|
|
692
696
|
for i in self.merged_vertex_list:
|
|
693
|
-
i.check_connectivity()
|
|
697
|
+
i.check_connectivity(self.use_angle_grouping)
|
|
694
698
|
|
|
695
699
|
for i in self.merged_vertex_list:
|
|
696
700
|
if i.line_connected:
|
|
@@ -721,9 +725,7 @@ class LineGrouping:
|
|
|
721
725
|
return algo_merge_lines.run_line_merge(self.lines, self.merge_group)
|
|
722
726
|
|
|
723
727
|
def find_vertex_for_poly_trimming(self):
|
|
724
|
-
self.vertex_of_concern = [
|
|
725
|
-
i for i in self.merged_vertex_list if i.vertex_class in CONCERN_CLASSES
|
|
726
|
-
]
|
|
728
|
+
self.vertex_of_concern = [i for i in self.merged_vertex_list if i.vertex_class in CONCERN_CLASSES]
|
|
727
729
|
|
|
728
730
|
def line_and_poly_cleanup(self):
|
|
729
731
|
sindex_poly = self.polys.sindex
|
|
@@ -732,20 +734,21 @@ class LineGrouping:
|
|
|
732
734
|
s_idx = sindex_poly.query(vertex.vertex, predicate="within")
|
|
733
735
|
if len(s_idx) == 0:
|
|
734
736
|
continue
|
|
735
|
-
|
|
737
|
+
|
|
736
738
|
# Trim intersections of primary lines
|
|
737
739
|
polys = self.polys.loc[s_idx].geometry
|
|
738
740
|
if not self.merge_group:
|
|
739
|
-
if (
|
|
741
|
+
if (
|
|
742
|
+
vertex.vertex_class == VertexClass.FIVE_WAY_TWO_PRIMARY_LINE
|
|
740
743
|
or vertex.vertex_class == VertexClass.FIVE_WAY_ONE_PRIMARY_LINE
|
|
741
744
|
or vertex.vertex_class == VertexClass.FOUR_WAY_ONE_PRIMARY_LINE
|
|
742
745
|
or vertex.vertex_class == VertexClass.FOUR_WAY_TWO_PRIMARY_LINE
|
|
743
|
-
or vertex.vertex_class == VertexClass.THREE_WAY_ONE_PRIMARY_LINE
|
|
744
|
-
|
|
746
|
+
or vertex.vertex_class == VertexClass.THREE_WAY_ONE_PRIMARY_LINE
|
|
747
|
+
):
|
|
745
748
|
out_polys = vertex.trim_primary_end(polys)
|
|
746
749
|
if len(out_polys) == 0:
|
|
747
750
|
continue
|
|
748
|
-
|
|
751
|
+
|
|
749
752
|
# update polygon DataFrame
|
|
750
753
|
for idx, out_poly in out_polys:
|
|
751
754
|
if out_poly:
|
|
@@ -766,7 +769,7 @@ class LineGrouping:
|
|
|
766
769
|
out_polys = vertex.trim_end_all(polys)
|
|
767
770
|
if len(out_polys) == 0:
|
|
768
771
|
continue
|
|
769
|
-
|
|
772
|
+
|
|
770
773
|
# update polygon DataFrame
|
|
771
774
|
for idx, out_poly in out_polys:
|
|
772
775
|
self.polys.at[idx, "geometry"] = out_poly
|
|
@@ -780,9 +783,7 @@ class LineGrouping:
|
|
|
780
783
|
self.lines.at[p_trim.line_index, "geometry"] = p_trim.line_cleanup
|
|
781
784
|
|
|
782
785
|
# update VertexNode's line
|
|
783
|
-
self.update_line_in_vertex_node(
|
|
784
|
-
p_trim.line_index, p_trim.line_cleanup
|
|
785
|
-
)
|
|
786
|
+
self.update_line_in_vertex_node(p_trim.line_index, p_trim.line_cleanup)
|
|
786
787
|
|
|
787
788
|
def get_merged_lines_original(self):
|
|
788
789
|
return self.lines.dissolve(by=bt_const.BT_GROUP)
|
|
@@ -793,7 +794,7 @@ class LineGrouping:
|
|
|
793
794
|
self.group_lines()
|
|
794
795
|
|
|
795
796
|
self.find_vertex_for_poly_trimming()
|
|
796
|
-
self.lines[
|
|
797
|
+
self.lines[bt_const.BT_GROUP] = self.groups # assign group attribute
|
|
797
798
|
|
|
798
799
|
def run_regrouping(self):
|
|
799
800
|
"""
|
|
@@ -821,19 +822,14 @@ class LineGrouping:
|
|
|
821
822
|
# remove null geometry
|
|
822
823
|
# TODO make sure lines and polygons match in pairs
|
|
823
824
|
# they should have same amount and spatial coverage
|
|
824
|
-
self.valid_polys = self.polys[
|
|
825
|
-
~self.polys.geometry.isna() & ~self.polys.geometry.is_empty
|
|
826
|
-
]
|
|
825
|
+
self.valid_polys = self.polys[~self.polys.geometry.isna() & ~self.polys.geometry.is_empty]
|
|
827
826
|
|
|
828
827
|
# save sh_geom.MultiLineString and sh_geom.MultiPolygon
|
|
829
|
-
self.invalid_polys = self.polys[
|
|
830
|
-
(self.polys.geometry.geom_type == "MultiPolygon")
|
|
831
|
-
]
|
|
828
|
+
self.invalid_polys = self.polys[(self.polys.geometry.geom_type == "MultiPolygon")]
|
|
832
829
|
|
|
833
830
|
# check lines
|
|
834
831
|
self.valid_lines = self.merged_lines_trimmed[
|
|
835
|
-
~self.merged_lines_trimmed.geometry.isna()
|
|
836
|
-
& ~self.merged_lines_trimmed.geometry.is_empty
|
|
832
|
+
~self.merged_lines_trimmed.geometry.isna() & ~self.merged_lines_trimmed.geometry.is_empty
|
|
837
833
|
]
|
|
838
834
|
self.valid_lines.reset_index(inplace=True, drop=True)
|
|
839
835
|
|
|
@@ -842,7 +838,7 @@ class LineGrouping:
|
|
|
842
838
|
]
|
|
843
839
|
self.invalid_lines.reset_index(inplace=True, drop=True)
|
|
844
840
|
|
|
845
|
-
def save_file(self, out_file):
|
|
841
|
+
def save_file(self, out_file, out_layer="ground_footprint"):
|
|
846
842
|
if not self.valid_lines.empty:
|
|
847
843
|
self.valid_lines["length"] = self.valid_lines.length
|
|
848
844
|
self.valid_lines.to_file(out_file, layer="merged_lines")
|
|
@@ -850,9 +846,10 @@ class LineGrouping:
|
|
|
850
846
|
if not self.valid_polys.empty:
|
|
851
847
|
if "length" in self.valid_polys.columns:
|
|
852
848
|
self.valid_polys.drop(columns=["length"], inplace=True)
|
|
853
|
-
|
|
849
|
+
|
|
854
850
|
self.valid_polys["area"] = self.valid_polys.area
|
|
855
|
-
|
|
851
|
+
layer_name = out_layer
|
|
852
|
+
self.valid_polys.to_file(out_file, layer=layer_name)
|
|
856
853
|
|
|
857
854
|
if not self.invalid_lines.empty:
|
|
858
855
|
self.invalid_lines.to_file(out_file, layer="invalid_lines")
|
|
@@ -860,6 +857,7 @@ class LineGrouping:
|
|
|
860
857
|
if not self.invalid_polys.empty:
|
|
861
858
|
self.invalid_polys.to_file(out_file, layer="invalid_polygons")
|
|
862
859
|
|
|
860
|
+
|
|
863
861
|
@dataclass
|
|
864
862
|
class PolygonTrimming:
|
|
865
863
|
"""Store polygon and line to trim. Primary polygon is used to trim both."""
|
|
@@ -878,18 +876,20 @@ class PolygonTrimming:
|
|
|
878
876
|
if self.line_cleanup.length < 100.0:
|
|
879
877
|
trim_distance = 50.0
|
|
880
878
|
|
|
881
|
-
poly_primary = poly_primary.intersection(
|
|
882
|
-
|
|
883
|
-
)
|
|
884
|
-
|
|
879
|
+
poly_primary = poly_primary.intersection(vertex.buffer(trim_distance))
|
|
880
|
+
|
|
885
881
|
self.poly_primary = poly_primary
|
|
886
|
-
|
|
882
|
+
|
|
887
883
|
# TODO: check why there is such cases
|
|
888
884
|
if self.poly_cleanup is None:
|
|
889
885
|
print("No polygon to trim.")
|
|
890
886
|
return
|
|
891
|
-
|
|
887
|
+
|
|
892
888
|
midpoint = self.line_cleanup.interpolate(0.5, normalized=True)
|
|
889
|
+
if self.poly_primary is None or self.poly_primary.is_empty:
|
|
890
|
+
# print("Warning: No valid primary polygon for trimming; skipping difference operation.")
|
|
891
|
+
# TODO: handle this case
|
|
892
|
+
return
|
|
893
893
|
diff = self.poly_cleanup.difference(self.poly_primary)
|
|
894
894
|
if diff.geom_type == "Polygon":
|
|
895
895
|
self.poly_cleanup = diff
|
|
@@ -10,27 +10,57 @@ Description:
|
|
|
10
10
|
This script is part of the BERA Tools.
|
|
11
11
|
Webpage: https://github.com/appliedgrg/beratools
|
|
12
12
|
|
|
13
|
-
This file is intended to be hosting algorithms and utility functions/classes
|
|
13
|
+
This file is intended to be hosting algorithms and utility functions/classes
|
|
14
14
|
for merging lines.
|
|
15
15
|
"""
|
|
16
|
+
|
|
16
17
|
from itertools import pairwise
|
|
17
18
|
from operator import itemgetter
|
|
18
19
|
|
|
19
20
|
import networkit as nk
|
|
20
21
|
import shapely.geometry as sh_geom
|
|
22
|
+
from shapely.geometry import GeometryCollection, LineString, MultiLineString
|
|
23
|
+
from shapely.ops import linemerge
|
|
21
24
|
|
|
22
25
|
import beratools.core.algo_common as algo_common
|
|
23
26
|
import beratools.core.constants as bt_const
|
|
24
27
|
|
|
25
28
|
|
|
29
|
+
def safe_linemerge(geom):
|
|
30
|
+
if isinstance(geom, (MultiLineString, GeometryCollection)):
|
|
31
|
+
return linemerge(geom)
|
|
32
|
+
elif isinstance(geom, LineString):
|
|
33
|
+
return geom
|
|
34
|
+
else:
|
|
35
|
+
return geom
|
|
36
|
+
|
|
37
|
+
def custom_line_merge(geom):
|
|
38
|
+
if geom.geom_type == "MultiLineString":
|
|
39
|
+
# First try shapely's linemerge (fast)
|
|
40
|
+
merged = linemerge(geom)
|
|
41
|
+
# If still MultiLineString, use MergeLines for complex cases
|
|
42
|
+
if isinstance(merged, sh_geom.MultiLineString):
|
|
43
|
+
worker = MergeLines(merged)
|
|
44
|
+
merged = worker.merge_all_lines()
|
|
45
|
+
return merged if merged else geom
|
|
46
|
+
else:
|
|
47
|
+
return merged
|
|
48
|
+
elif geom.geom_type == "LineString":
|
|
49
|
+
return geom
|
|
50
|
+
else:
|
|
51
|
+
return geom
|
|
52
|
+
|
|
53
|
+
|
|
26
54
|
def run_line_merge(in_line_gdf, merge_group):
|
|
27
55
|
out_line_gdf = in_line_gdf
|
|
28
56
|
if merge_group:
|
|
57
|
+
if bt_const.BT_GROUP not in in_line_gdf.columns:
|
|
58
|
+
in_line_gdf = in_line_gdf.copy()
|
|
59
|
+
in_line_gdf[bt_const.BT_GROUP] = range(1, len(in_line_gdf) + 1)
|
|
29
60
|
out_line_gdf = in_line_gdf.dissolve(by=bt_const.BT_GROUP, as_index=False)
|
|
30
61
|
|
|
31
|
-
out_line_gdf.geometry = out_line_gdf.
|
|
32
|
-
|
|
33
|
-
for row in out_line_gdf.itertuples():
|
|
62
|
+
out_line_gdf.geometry = out_line_gdf.geometry.apply(safe_linemerge)
|
|
63
|
+
for i, row in enumerate(out_line_gdf.itertuples()):
|
|
34
64
|
if isinstance(row.geometry, sh_geom.MultiLineString):
|
|
35
65
|
worker = MergeLines(row.geometry)
|
|
36
66
|
merged_line = worker.merge_all_lines()
|
|
@@ -39,9 +69,10 @@ def run_line_merge(in_line_gdf, merge_group):
|
|
|
39
69
|
|
|
40
70
|
out_line_gdf = algo_common.clean_line_geometries(out_line_gdf)
|
|
41
71
|
out_line_gdf.reset_index(inplace=True, drop=True)
|
|
42
|
-
out_line_gdf[
|
|
72
|
+
out_line_gdf["length"] = out_line_gdf.geometry.length
|
|
43
73
|
return out_line_gdf
|
|
44
74
|
|
|
75
|
+
|
|
45
76
|
class MergeLines:
|
|
46
77
|
"""Merge line segments in MultiLineString."""
|
|
47
78
|
|
|
@@ -61,7 +92,7 @@ class MergeLines:
|
|
|
61
92
|
self.line_segs = [line for line in self.line_segs if line.length > 1e-3]
|
|
62
93
|
self.multi_line = sh_geom.MultiLineString(self.line_segs)
|
|
63
94
|
m = sh_geom.mapping(self.multi_line)
|
|
64
|
-
self.end = [(i[0], i[-1]) for i in m[
|
|
95
|
+
self.end = [(i[0], i[-1]) for i in m["coordinates"]]
|
|
65
96
|
|
|
66
97
|
self.G = nk.Graph(edgesIndexed=True)
|
|
67
98
|
self.G.addNodes(2)
|
|
@@ -112,12 +143,13 @@ class MergeLines:
|
|
|
112
143
|
single_path = False
|
|
113
144
|
|
|
114
145
|
return single_path
|
|
146
|
+
|
|
115
147
|
def get_merged_line_for_component(self, component):
|
|
116
148
|
sub = nk.graphtools.subgraphFromNodes(self.G, component)
|
|
117
149
|
lines = None
|
|
118
150
|
if nk.graphtools.maxDegree(sub) >= 3: # not simple path
|
|
119
151
|
edges = [self.G.edgeId(i[0], i[1]) for i in list(sub.iterEdges())]
|
|
120
|
-
lines =
|
|
152
|
+
lines = itemgetter(*edges)(self.line_segs)
|
|
121
153
|
elif nk.graphtools.maxDegree(sub) == 2:
|
|
122
154
|
lines = self.merge_single_line(component)
|
|
123
155
|
|
|
@@ -180,7 +212,7 @@ class MergeLines:
|
|
|
180
212
|
pair = pairs[i]
|
|
181
213
|
poly_t = self.node_poly[pair[0]]
|
|
182
214
|
point_t = sh_geom.Point(self.end[id][0])
|
|
183
|
-
if
|
|
215
|
+
if poly_t.contains(point_t):
|
|
184
216
|
line = self.line_segs[id]
|
|
185
217
|
else:
|
|
186
218
|
# line = reverse(self.line_segs[id])
|