BERATools 0.2.0__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +1 -7
- beratools/core/algo_centerline.py +491 -351
- beratools/core/algo_common.py +497 -0
- beratools/core/algo_cost.py +192 -0
- beratools/core/{dijkstra_algorithm.py → algo_dijkstra.py} +503 -460
- beratools/core/algo_footprint_rel.py +577 -0
- beratools/core/algo_line_grouping.py +944 -0
- beratools/core/algo_merge_lines.py +214 -0
- beratools/core/algo_split_with_lines.py +304 -0
- beratools/core/algo_tiler.py +428 -0
- beratools/core/algo_vertex_optimization.py +469 -0
- beratools/core/constants.py +52 -86
- beratools/core/logger.py +76 -85
- beratools/core/tool_base.py +196 -133
- beratools/gui/__init__.py +11 -15
- beratools/gui/{beratools.json → assets/beratools.json} +2185 -2300
- beratools/gui/batch_processing_dlg.py +513 -463
- beratools/gui/bt_data.py +481 -487
- beratools/gui/bt_gui_main.py +710 -691
- beratools/gui/main.py +26 -0
- beratools/gui/map_window.py +162 -146
- beratools/gui/tool_widgets.py +725 -493
- beratools/tools/Beratools_r_script.r +1120 -1120
- beratools/tools/Ht_metrics.py +116 -116
- beratools/tools/__init__.py +7 -7
- beratools/tools/batch_processing.py +136 -132
- beratools/tools/canopy_threshold_relative.py +672 -670
- beratools/tools/canopycostraster.py +222 -222
- beratools/tools/centerline.py +136 -176
- beratools/tools/common.py +857 -885
- beratools/tools/fl_regen_csf.py +428 -428
- beratools/tools/forest_line_attributes.py +408 -408
- beratools/tools/line_footprint_absolute.py +213 -363
- beratools/tools/line_footprint_fixed.py +436 -282
- beratools/tools/line_footprint_functions.py +733 -720
- beratools/tools/line_footprint_relative.py +73 -64
- beratools/tools/line_grouping.py +45 -0
- beratools/tools/ln_relative_metrics.py +615 -615
- beratools/tools/r_cal_lpi_elai.r +24 -24
- beratools/tools/r_generate_pd_focalraster.r +100 -100
- beratools/tools/r_interface.py +79 -79
- beratools/tools/r_point_density.r +8 -8
- beratools/tools/rpy_chm2trees.py +86 -86
- beratools/tools/rpy_dsm_chm_by.py +81 -81
- beratools/tools/rpy_dtm_by.py +63 -63
- beratools/tools/rpy_find_cellsize.py +43 -43
- beratools/tools/rpy_gnd_csf.py +74 -74
- beratools/tools/rpy_hummock_hollow.py +85 -85
- beratools/tools/rpy_hummock_hollow_raster.py +71 -71
- beratools/tools/rpy_las_info.py +51 -51
- beratools/tools/rpy_laz2las.py +40 -40
- beratools/tools/rpy_lpi_elai_lascat.py +466 -466
- beratools/tools/rpy_normalized_lidar_by.py +56 -56
- beratools/tools/rpy_percent_above_dbh.py +80 -80
- beratools/tools/rpy_points2trees.py +88 -88
- beratools/tools/rpy_vegcoverage.py +94 -94
- beratools/tools/tiler.py +48 -206
- beratools/tools/tool_template.py +69 -54
- beratools/tools/vertex_optimization.py +61 -620
- beratools/tools/zonal_threshold.py +144 -144
- beratools-0.2.1.dist-info/METADATA +109 -0
- beratools-0.2.1.dist-info/RECORD +74 -0
- {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/WHEEL +1 -1
- {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/licenses/LICENSE +22 -22
- beratools/gui/cli.py +0 -18
- beratools/gui/gui.json +0 -8
- beratools/gui_tk/ASCII Banners.txt +0 -248
- beratools/gui_tk/__init__.py +0 -20
- beratools/gui_tk/beratools_main.py +0 -515
- beratools/gui_tk/bt_widgets.py +0 -442
- beratools/gui_tk/cli.py +0 -18
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +0 -14
- beratools/gui_tk/map_window.py +0 -144
- beratools/gui_tk/runner.py +0 -1481
- beratools/gui_tk/tooltip.py +0 -55
- beratools/third_party/pyqtlet2/__init__.py +0 -9
- beratools/third_party/pyqtlet2/leaflet/__init__.py +0 -26
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +0 -6
- beratools/third_party/pyqtlet2/leaflet/control/control.py +0 -59
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +0 -52
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +0 -20
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +0 -24
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +0 -180
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +0 -34
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +0 -30
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +0 -105
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +0 -45
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +0 -91
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +0 -4
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +0 -16
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +0 -15
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/map/map.py +0 -220
- beratools/third_party/pyqtlet2/mapwidget.py +0 -45
- beratools/third_party/pyqtlet2/web/custom.js +0 -43
- beratools/third_party/pyqtlet2/web/map.html +0 -23
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +0 -656
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +0 -6
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +0 -14
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +0 -4
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +0 -43
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +0 -20
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +0 -156
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +0 -57
- beratools/tools/forest_line_ecosite.py +0 -216
- beratools/tools/lapis_all.py +0 -103
- beratools/tools/least_cost_path_from_chm.py +0 -152
- beratools-0.2.0.dist-info/METADATA +0 -63
- beratools-0.2.0.dist-info/RECORD +0 -142
- /beratools/gui/{img → assets}/BERALogo.png +0 -0
- /beratools/gui/{img → assets}/closed.gif +0 -0
- /beratools/gui/{img → assets}/closed.png +0 -0
- /beratools/{gui_tk → gui/assets}/gui.json +0 -0
- /beratools/gui/{img → assets}/open.gif +0 -0
- /beratools/gui/{img → assets}/open.png +0 -0
- /beratools/gui/{img → assets}/tool.gif +0 -0
- /beratools/gui/{img → assets}/tool.png +0 -0
- {beratools-0.2.0.dist-info → beratools-0.2.1.dist-info}/entry_points.txt +0 -0
|
@@ -0,0 +1,944 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Copyright (C) 2025 Applied Geospatial Research Group.
|
|
3
|
+
|
|
4
|
+
This script is licensed under the GNU General Public License v3.0.
|
|
5
|
+
See <https://gnu.org/licenses/gpl-3.0> for full license details.
|
|
6
|
+
|
|
7
|
+
Author: Richard Zeng, Maverick Fong
|
|
8
|
+
|
|
9
|
+
Description:
|
|
10
|
+
This script is part of the BERA Tools.
|
|
11
|
+
Webpage: https://github.com/appliedgrg/beratools
|
|
12
|
+
|
|
13
|
+
This file hosts code to deal with line grouping and merging, cleanups.
|
|
14
|
+
"""
|
|
15
|
+
import enum
|
|
16
|
+
from collections import defaultdict
|
|
17
|
+
from dataclasses import dataclass, field
|
|
18
|
+
from itertools import chain
|
|
19
|
+
from typing import Optional, Union
|
|
20
|
+
|
|
21
|
+
import networkit as nk
|
|
22
|
+
import numpy as np
|
|
23
|
+
import shapely
|
|
24
|
+
import shapely.geometry as sh_geom
|
|
25
|
+
|
|
26
|
+
import beratools.core.algo_common as algo_common
|
|
27
|
+
import beratools.core.algo_merge_lines as algo_merge_lines
|
|
28
|
+
import beratools.core.constants as bt_const
|
|
29
|
+
|
|
30
|
+
TRIMMING_DISTANCE = 75 # meters
|
|
31
|
+
SMALL_BUFFER = 1
|
|
32
|
+
|
|
33
|
+
@enum.unique
|
|
34
|
+
class VertexClass(enum.IntEnum):
|
|
35
|
+
"""Enum class for vertex class."""
|
|
36
|
+
|
|
37
|
+
TWO_WAY_ZERO_PRIMARY_LINE = 1
|
|
38
|
+
THREE_WAY_ZERO_PRIMARY_LINE = 2
|
|
39
|
+
THREE_WAY_ONE_PRIMARY_LINE = 3
|
|
40
|
+
FOUR_WAY_ZERO_PRIMARY_LINE = 4
|
|
41
|
+
FOUR_WAY_ONE_PRIMARY_LINE = 5
|
|
42
|
+
FOUR_WAY_TWO_PRIMARY_LINE = 6
|
|
43
|
+
FIVE_WAY_ZERO_PRIMARY_LINE = 7
|
|
44
|
+
FIVE_WAY_ONE_PRIMARY_LINE = 8
|
|
45
|
+
FIVE_WAY_TWO_PRIMARY_LINE = 9
|
|
46
|
+
SINGLE_WAY = 10
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
CONCERN_CLASSES = (
|
|
50
|
+
VertexClass.FIVE_WAY_ZERO_PRIMARY_LINE,
|
|
51
|
+
VertexClass.FIVE_WAY_ONE_PRIMARY_LINE,
|
|
52
|
+
VertexClass.FIVE_WAY_TWO_PRIMARY_LINE,
|
|
53
|
+
VertexClass.FOUR_WAY_ZERO_PRIMARY_LINE,
|
|
54
|
+
VertexClass.FOUR_WAY_ONE_PRIMARY_LINE,
|
|
55
|
+
VertexClass.FOUR_WAY_TWO_PRIMARY_LINE,
|
|
56
|
+
VertexClass.THREE_WAY_ZERO_PRIMARY_LINE,
|
|
57
|
+
VertexClass.THREE_WAY_ONE_PRIMARY_LINE,
|
|
58
|
+
VertexClass.TWO_WAY_ZERO_PRIMARY_LINE,
|
|
59
|
+
VertexClass.SINGLE_WAY,
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
ANGLE_TOLERANCE = np.pi / 10
|
|
63
|
+
TURN_ANGLE_TOLERANCE = np.pi * 0.5 # (little bigger than right angle)
|
|
64
|
+
TRIM_THRESHOLD = 0.15
|
|
65
|
+
TRANSECT_LENGTH = 40
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def points_in_line(line):
|
|
69
|
+
"""Get point list of line."""
|
|
70
|
+
point_list = []
|
|
71
|
+
try:
|
|
72
|
+
for point in list(line.coords): # loops through every point in a line
|
|
73
|
+
# loops through every vertex of every segment
|
|
74
|
+
if point: # adds all the vertices to segment_list, which creates an array
|
|
75
|
+
point_list.append(sh_geom.Point(point[0], point[1]))
|
|
76
|
+
except Exception as e:
|
|
77
|
+
print(e)
|
|
78
|
+
|
|
79
|
+
return point_list
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def get_angle(line, end_index):
|
|
83
|
+
"""
|
|
84
|
+
Calculate the angle of the first or last segment.
|
|
85
|
+
|
|
86
|
+
Args:
|
|
87
|
+
line: sh_geom.LineString
|
|
88
|
+
end_index: 0 or -1 of the line vertices. Consider the multipart.
|
|
89
|
+
|
|
90
|
+
"""
|
|
91
|
+
pts = points_in_line(line)
|
|
92
|
+
|
|
93
|
+
if end_index == 0:
|
|
94
|
+
pt_1 = pts[0]
|
|
95
|
+
pt_2 = pts[1]
|
|
96
|
+
elif end_index == -1:
|
|
97
|
+
pt_1 = pts[-1]
|
|
98
|
+
pt_2 = pts[-2]
|
|
99
|
+
|
|
100
|
+
delta_x = pt_2.x - pt_1.x
|
|
101
|
+
delta_y = pt_2.y - pt_1.y
|
|
102
|
+
angle = np.arctan2(delta_y, delta_x)
|
|
103
|
+
|
|
104
|
+
return angle
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
@dataclass
|
|
108
|
+
class SingleLine:
|
|
109
|
+
"""Class to store line and its simplified line."""
|
|
110
|
+
|
|
111
|
+
line_id: int = field(default=0)
|
|
112
|
+
line: Union[sh_geom.LineString, sh_geom.MultiLineString] = field(default=None)
|
|
113
|
+
sim_line: Union[sh_geom.LineString, sh_geom.MultiLineString] = field(default=None)
|
|
114
|
+
vertex_index: int = field(default=0)
|
|
115
|
+
group: int = field(default=0)
|
|
116
|
+
|
|
117
|
+
def get_angle_for_line(self):
|
|
118
|
+
return get_angle(self.sim_line, self.vertex_index)
|
|
119
|
+
|
|
120
|
+
def end_transect(self):
|
|
121
|
+
coords = self.sim_line.coords
|
|
122
|
+
end_seg = None
|
|
123
|
+
if self.vertex_index == 0:
|
|
124
|
+
end_seg = sh_geom.LineString([coords[0], coords[1]])
|
|
125
|
+
elif self.vertex_index == -1:
|
|
126
|
+
end_seg = sh_geom.LineString([coords[-1], coords[-2]])
|
|
127
|
+
|
|
128
|
+
l_left = end_seg.offset_curve(TRANSECT_LENGTH)
|
|
129
|
+
l_right = end_seg.offset_curve(-TRANSECT_LENGTH)
|
|
130
|
+
|
|
131
|
+
return sh_geom.LineString([l_left.coords[0], l_right.coords[0]])
|
|
132
|
+
|
|
133
|
+
def midpoint(self):
|
|
134
|
+
return shapely.force_2d(self.line.interpolate(0.5, normalized=True))
|
|
135
|
+
|
|
136
|
+
def update_line(self, line):
|
|
137
|
+
self.line = line
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class VertexNode:
|
|
141
|
+
"""Class to store vertex and lines connected to it."""
|
|
142
|
+
|
|
143
|
+
def __init__(self, line_id, line, sim_line, vertex_index, group=None) -> None:
|
|
144
|
+
self.vertex = None
|
|
145
|
+
self.line_list = []
|
|
146
|
+
self.line_connected = [] # pairs of lines connected
|
|
147
|
+
self.line_not_connected = []
|
|
148
|
+
self.vertex_class = None
|
|
149
|
+
|
|
150
|
+
if line:
|
|
151
|
+
self.add_line(SingleLine(line_id, line, sim_line, vertex_index, group))
|
|
152
|
+
|
|
153
|
+
def set_vertex(self, line, vertex_index):
|
|
154
|
+
"""Set vertex coordinates."""
|
|
155
|
+
self.vertex = shapely.force_2d(shapely.get_point(line, vertex_index))
|
|
156
|
+
|
|
157
|
+
def add_line(self, line_class):
|
|
158
|
+
"""Add line when creating or merging other VertexNode."""
|
|
159
|
+
self.line_list.append(line_class)
|
|
160
|
+
self.set_vertex(line_class.line, line_class.vertex_index)
|
|
161
|
+
|
|
162
|
+
def get_line(self, line_id):
|
|
163
|
+
for line in self.line_list:
|
|
164
|
+
if line.line_id == line_id:
|
|
165
|
+
return line.line
|
|
166
|
+
|
|
167
|
+
def get_line_obj(self, line_id):
|
|
168
|
+
for line in self.line_list:
|
|
169
|
+
if line.line_id == line_id:
|
|
170
|
+
return line
|
|
171
|
+
|
|
172
|
+
def get_line_geom(self, line_id):
|
|
173
|
+
return self.get_line_obj(line_id).line
|
|
174
|
+
|
|
175
|
+
def get_all_line_ids(self):
|
|
176
|
+
all_line_ids = {i.line_id for i in self.line_list}
|
|
177
|
+
return all_line_ids
|
|
178
|
+
|
|
179
|
+
def update_line(self, line_id, line):
|
|
180
|
+
for i in self.line_list:
|
|
181
|
+
if i.line_id == line_id:
|
|
182
|
+
i.update_line(line)
|
|
183
|
+
|
|
184
|
+
def merge(self, vertex):
|
|
185
|
+
"""Merge other VertexNode if they have same vertex coords."""
|
|
186
|
+
self.add_line(vertex.line_list[0])
|
|
187
|
+
|
|
188
|
+
def get_trim_transect(self, poly, line_indices):
|
|
189
|
+
if not poly:
|
|
190
|
+
return None
|
|
191
|
+
|
|
192
|
+
internal_line = None
|
|
193
|
+
for line_idx in line_indices:
|
|
194
|
+
line = self.get_line_obj(line_idx)
|
|
195
|
+
if poly.contains(line.midpoint()):
|
|
196
|
+
internal_line = line
|
|
197
|
+
|
|
198
|
+
if not internal_line:
|
|
199
|
+
# print("No line is retrieved")
|
|
200
|
+
return
|
|
201
|
+
return internal_line.end_transect()
|
|
202
|
+
|
|
203
|
+
def _trim_polygon(self, poly, trim_transect):
|
|
204
|
+
if not poly or not trim_transect:
|
|
205
|
+
return
|
|
206
|
+
|
|
207
|
+
split_poly = shapely.ops.split(poly, trim_transect)
|
|
208
|
+
|
|
209
|
+
if len(split_poly.geoms) != 2:
|
|
210
|
+
return
|
|
211
|
+
|
|
212
|
+
# check geom_type
|
|
213
|
+
none_poly = False
|
|
214
|
+
for geom in split_poly.geoms:
|
|
215
|
+
if geom.geom_type != "Polygon":
|
|
216
|
+
none_poly = True
|
|
217
|
+
|
|
218
|
+
if none_poly:
|
|
219
|
+
return
|
|
220
|
+
|
|
221
|
+
# only two polygons in split_poly
|
|
222
|
+
if split_poly.geoms[0].area > split_poly.geoms[1].area:
|
|
223
|
+
poly = split_poly.geoms[0]
|
|
224
|
+
else:
|
|
225
|
+
poly = split_poly.geoms[1]
|
|
226
|
+
|
|
227
|
+
return poly
|
|
228
|
+
|
|
229
|
+
def trim_end_all(self, polys):
|
|
230
|
+
"""
|
|
231
|
+
Trim all unconnected lines in the vertex.
|
|
232
|
+
|
|
233
|
+
Args:
|
|
234
|
+
polys: list of polygons returned by sindex.query
|
|
235
|
+
|
|
236
|
+
"""
|
|
237
|
+
polys = polys.geometry
|
|
238
|
+
new_polys = []
|
|
239
|
+
for idx, poly in polys.items():
|
|
240
|
+
out_poly = self.trim_end(poly)
|
|
241
|
+
if out_poly:
|
|
242
|
+
new_polys.append((idx, out_poly))
|
|
243
|
+
|
|
244
|
+
return new_polys
|
|
245
|
+
|
|
246
|
+
def trim_end(self, poly):
|
|
247
|
+
transect = self.get_trim_transect(poly, self.line_not_connected)
|
|
248
|
+
if not transect:
|
|
249
|
+
return
|
|
250
|
+
|
|
251
|
+
poly = self._trim_polygon(poly, transect)
|
|
252
|
+
return poly
|
|
253
|
+
# Helper to get the neighbor coordinate based on vertex_index.
|
|
254
|
+
|
|
255
|
+
@staticmethod
|
|
256
|
+
def get_vertex(line_obj, index):
|
|
257
|
+
coords = list(line_obj.sim_line.coords)
|
|
258
|
+
# Normalize negative indices.
|
|
259
|
+
if index < 0:
|
|
260
|
+
index += len(coords)
|
|
261
|
+
if 0 <= index < len(coords):
|
|
262
|
+
return sh_geom.Point(coords[index])
|
|
263
|
+
|
|
264
|
+
@staticmethod
|
|
265
|
+
def get_neighbor(line_obj):
|
|
266
|
+
index = 0
|
|
267
|
+
|
|
268
|
+
if line_obj.vertex_index == 0:
|
|
269
|
+
index = 1
|
|
270
|
+
elif line_obj.vertex_index == -1:
|
|
271
|
+
index = -2
|
|
272
|
+
|
|
273
|
+
return VertexNode.get_vertex(line_obj, index)
|
|
274
|
+
|
|
275
|
+
@staticmethod
|
|
276
|
+
def parallel_line_centered(p1, p2, center, length):
|
|
277
|
+
"""Generate a parallel line."""
|
|
278
|
+
# Compute the direction vector.
|
|
279
|
+
dx = p2.x - p1.x
|
|
280
|
+
dy = p2.y - p1.y
|
|
281
|
+
|
|
282
|
+
# Normalize the direction vector.
|
|
283
|
+
magnitude = (dx**2 + dy**2) ** 0.5
|
|
284
|
+
if magnitude == 0:
|
|
285
|
+
return None
|
|
286
|
+
dx /= magnitude
|
|
287
|
+
dy /= magnitude
|
|
288
|
+
|
|
289
|
+
# Compute half-length shifts.
|
|
290
|
+
half_dx = (dx * length) / 2
|
|
291
|
+
half_dy = (dy * length) / 2
|
|
292
|
+
|
|
293
|
+
# Compute the endpoints of the new parallel line.
|
|
294
|
+
new_p1 = sh_geom.Point(center.x - half_dx, center.y - half_dy)
|
|
295
|
+
new_p2 = sh_geom.Point(center.x + half_dx, center.y + half_dy)
|
|
296
|
+
|
|
297
|
+
return sh_geom.LineString([new_p1, new_p2])
|
|
298
|
+
|
|
299
|
+
def get_transect_for_primary(self):
|
|
300
|
+
"""
|
|
301
|
+
Get a transect line from two primary connected lines.
|
|
302
|
+
|
|
303
|
+
This method calculates a transect line that is perpendicular to the line segment
|
|
304
|
+
formed by the next vertex neighbors of these two lines and the current vertex.
|
|
305
|
+
|
|
306
|
+
Return:
|
|
307
|
+
A transect line object if the conditions are met, otherwise None.
|
|
308
|
+
|
|
309
|
+
"""
|
|
310
|
+
if not self.line_connected or len(self.line_connected[0]) != 2:
|
|
311
|
+
return None
|
|
312
|
+
|
|
313
|
+
# Retrieve the two connected line objects from the first connectivity group.
|
|
314
|
+
line_ids = self.line_connected[0]
|
|
315
|
+
pt1 = None
|
|
316
|
+
pt1 = None
|
|
317
|
+
if line_ids[0] == line_ids[1]: # line ring
|
|
318
|
+
# TODO: check line ring when merging vertex nodes.
|
|
319
|
+
# TODO: change one end index to -1
|
|
320
|
+
line_id = line_ids[0]
|
|
321
|
+
pt1 = self.get_vertex(self.get_line_obj(line_id), 1)
|
|
322
|
+
pt2 = self.get_vertex(self.get_line_obj(line_id), -2)
|
|
323
|
+
else: # two different lines
|
|
324
|
+
line_obj1 = self.get_line_obj(line_ids[0])
|
|
325
|
+
line_obj2 = self.get_line_obj(line_ids[1])
|
|
326
|
+
|
|
327
|
+
pt1 = self.get_neighbor(line_obj1)
|
|
328
|
+
pt2 = self.get_neighbor(line_obj2)
|
|
329
|
+
|
|
330
|
+
if pt1 is None or pt2 is None:
|
|
331
|
+
return None
|
|
332
|
+
|
|
333
|
+
transect = algo_common.generate_perpendicular_line_precise(
|
|
334
|
+
[pt1, self.vertex, pt2], offset=40
|
|
335
|
+
)
|
|
336
|
+
return transect
|
|
337
|
+
|
|
338
|
+
def get_transect_for_primary_second(self):
|
|
339
|
+
"""
|
|
340
|
+
Get a transect line from the second primary connected line.
|
|
341
|
+
|
|
342
|
+
For the second primary line, this method retrieves the neighbor point from
|
|
343
|
+
two lines in the second connectivity group, creates a reference line through the
|
|
344
|
+
vertex by mirroring the neighbor point about the vertex, and then generates a
|
|
345
|
+
parallel line centered at the vertex.
|
|
346
|
+
|
|
347
|
+
Returns:
|
|
348
|
+
A LineString representing the transect if available, otherwise None.
|
|
349
|
+
|
|
350
|
+
"""
|
|
351
|
+
# Ensure there is a second connectivity group.
|
|
352
|
+
if not self.line_connected or len(self.line_connected) < 2:
|
|
353
|
+
return None
|
|
354
|
+
|
|
355
|
+
# Use the first line of the second connectivity group.
|
|
356
|
+
second_primary = self.line_connected[1]
|
|
357
|
+
line_obj1 = self.get_line_obj(second_primary[0])
|
|
358
|
+
line_obj2 = self.get_line_obj(second_primary[1])
|
|
359
|
+
if not line_obj1 or not line_obj2:
|
|
360
|
+
return None
|
|
361
|
+
|
|
362
|
+
pt1 = self.get_neighbor(line_obj1)
|
|
363
|
+
pt2 = self.get_neighbor(line_obj2)
|
|
364
|
+
|
|
365
|
+
if pt1 is None or pt2 is None:
|
|
366
|
+
return None
|
|
367
|
+
|
|
368
|
+
center = self.vertex
|
|
369
|
+
transect = self.parallel_line_centered(pt1, pt2, center, TRANSECT_LENGTH)
|
|
370
|
+
return transect
|
|
371
|
+
|
|
372
|
+
def trim_primary_end(self, polys):
|
|
373
|
+
"""
|
|
374
|
+
Trim first primary line in the vertex.
|
|
375
|
+
|
|
376
|
+
Args:
|
|
377
|
+
polys: list of polygons returned by sindex.query
|
|
378
|
+
|
|
379
|
+
"""
|
|
380
|
+
if len(self.line_connected) == 0:
|
|
381
|
+
return
|
|
382
|
+
|
|
383
|
+
new_polys = []
|
|
384
|
+
line = self.line_connected[0]
|
|
385
|
+
|
|
386
|
+
# use the first line to get transect
|
|
387
|
+
# transect = self.get_line_obj(line[0]).end_transect()
|
|
388
|
+
# if len(self.line_connected) == 1:
|
|
389
|
+
transect = self.get_transect_for_primary()
|
|
390
|
+
# elif len(self.line_connected) > 1:
|
|
391
|
+
# transect = self.get_transect_for_primary_second()
|
|
392
|
+
|
|
393
|
+
idx_1 = line[0]
|
|
394
|
+
poly_1 = None
|
|
395
|
+
idx_1 = line[1]
|
|
396
|
+
poly_2 = None
|
|
397
|
+
|
|
398
|
+
for idx, poly in polys.items():
|
|
399
|
+
# TODO: no polygons
|
|
400
|
+
if not poly:
|
|
401
|
+
continue
|
|
402
|
+
|
|
403
|
+
if poly.buffer(SMALL_BUFFER).contains(self.get_line_geom(line[0])):
|
|
404
|
+
poly_1 = poly
|
|
405
|
+
idx_1 = idx
|
|
406
|
+
elif poly.buffer(SMALL_BUFFER).contains(self.get_line_geom(line[1])):
|
|
407
|
+
poly_2 = poly
|
|
408
|
+
idx_2 = idx
|
|
409
|
+
|
|
410
|
+
if poly_1:
|
|
411
|
+
poly_1 = self._trim_polygon(poly_1, transect)
|
|
412
|
+
new_polys.append([idx_1, poly_1])
|
|
413
|
+
if poly_2:
|
|
414
|
+
poly_2 = self._trim_polygon(poly_2, transect)
|
|
415
|
+
new_polys.append([idx_2, poly_2])
|
|
416
|
+
|
|
417
|
+
return new_polys
|
|
418
|
+
|
|
419
|
+
def trim_intersection(self, polys, merge_group=True):
|
|
420
|
+
"""Trim intersection of lines and polygons."""
|
|
421
|
+
def get_poly_with_info(line, polys):
|
|
422
|
+
if polys.empty:
|
|
423
|
+
return None, None, None
|
|
424
|
+
|
|
425
|
+
for idx, row in polys.iterrows():
|
|
426
|
+
poly = row.geometry
|
|
427
|
+
if not poly: # TODO: no polygon
|
|
428
|
+
continue
|
|
429
|
+
|
|
430
|
+
if poly.buffer(SMALL_BUFFER).contains(line):
|
|
431
|
+
return idx, poly, row['max_width']
|
|
432
|
+
|
|
433
|
+
return None, None, None
|
|
434
|
+
|
|
435
|
+
poly_trim_list = []
|
|
436
|
+
primary_lines = []
|
|
437
|
+
p_primary_list = []
|
|
438
|
+
|
|
439
|
+
# retrieve primary lines
|
|
440
|
+
if len(self.line_connected) > 0:
|
|
441
|
+
for idx in self.line_connected[0]: # only one connected line is used
|
|
442
|
+
primary_lines.append(self.get_line(idx))
|
|
443
|
+
_, poly, _ = get_poly_with_info(self.get_line(idx), polys)
|
|
444
|
+
|
|
445
|
+
if poly:
|
|
446
|
+
p_primary_list.append(poly.buffer(bt_const.SMALL_BUFFER))
|
|
447
|
+
else:
|
|
448
|
+
print("trim_intersection: No primary polygon found.")
|
|
449
|
+
|
|
450
|
+
line_idx_to_trim = self.line_not_connected
|
|
451
|
+
poly_list = []
|
|
452
|
+
if not merge_group: # add all remaining primary lines for trimming
|
|
453
|
+
if len(self.line_connected) > 1:
|
|
454
|
+
for line in self.line_connected[1:]:
|
|
455
|
+
line_idx_to_trim.extend(line)
|
|
456
|
+
|
|
457
|
+
# sort line index to by footprint area
|
|
458
|
+
for line_idx in line_idx_to_trim:
|
|
459
|
+
line = self.get_line_geom(line_idx)
|
|
460
|
+
poly_idx, poly, max_width = get_poly_with_info(line, polys)
|
|
461
|
+
poly_list.append((line_idx, poly_idx, max_width))
|
|
462
|
+
|
|
463
|
+
poly_list = sorted(poly_list, key=lambda x: x[2])
|
|
464
|
+
|
|
465
|
+
# create PolygonTrimming object and trim all by primary line
|
|
466
|
+
for i, indices in enumerate(poly_list):
|
|
467
|
+
line_idx = indices[0]
|
|
468
|
+
poly_idx = indices[1]
|
|
469
|
+
line_cleanup=self.get_line(line_idx)
|
|
470
|
+
poly_cleanup = polys.loc[poly_idx].geometry
|
|
471
|
+
poly_trim = PolygonTrimming(
|
|
472
|
+
line_index=line_idx,
|
|
473
|
+
line_cleanup=line_cleanup,
|
|
474
|
+
poly_index=poly_idx,
|
|
475
|
+
poly_cleanup=poly_cleanup,
|
|
476
|
+
)
|
|
477
|
+
|
|
478
|
+
poly_trim_list.append(poly_trim)
|
|
479
|
+
if p_primary_list:
|
|
480
|
+
poly_trim.process(p_primary_list, self.vertex)
|
|
481
|
+
|
|
482
|
+
# use poly_trim.poly_cleanup to update polys gdf's geometry
|
|
483
|
+
polys.at[poly_trim.poly_index, "geometry"] = poly_trim.poly_cleanup
|
|
484
|
+
|
|
485
|
+
# further trimming overlaps by non-primary lines
|
|
486
|
+
# poly_list and poly_trim_list have same index
|
|
487
|
+
for i, indices in enumerate(poly_list):
|
|
488
|
+
p_list = []
|
|
489
|
+
for p in poly_list[i+1:]:
|
|
490
|
+
p_list.append(polys.loc[p[1]].geometry)
|
|
491
|
+
|
|
492
|
+
poly_trim = poly_trim_list[i]
|
|
493
|
+
poly_trim.process(p_list, self.vertex)
|
|
494
|
+
|
|
495
|
+
return poly_trim_list
|
|
496
|
+
|
|
497
|
+
def assign_vertex_class(self):
|
|
498
|
+
if len(self.line_list) == 5:
|
|
499
|
+
if len(self.line_connected) == 0:
|
|
500
|
+
self.vertex_class = VertexClass.FIVE_WAY_ZERO_PRIMARY_LINE
|
|
501
|
+
if len(self.line_connected) == 1:
|
|
502
|
+
self.vertex_class = VertexClass.FIVE_WAY_ONE_PRIMARY_LINE
|
|
503
|
+
if len(self.line_connected) == 2:
|
|
504
|
+
self.vertex_class = VertexClass.FIVE_WAY_TWO_PRIMARY_LINE
|
|
505
|
+
elif len(self.line_list) == 4:
|
|
506
|
+
if len(self.line_connected) == 0:
|
|
507
|
+
self.vertex_class = VertexClass.FOUR_WAY_ZERO_PRIMARY_LINE
|
|
508
|
+
if len(self.line_connected) == 1:
|
|
509
|
+
self.vertex_class = VertexClass.FOUR_WAY_ONE_PRIMARY_LINE
|
|
510
|
+
if len(self.line_connected) == 2:
|
|
511
|
+
self.vertex_class = VertexClass.FOUR_WAY_TWO_PRIMARY_LINE
|
|
512
|
+
elif len(self.line_list) == 3:
|
|
513
|
+
if len(self.line_connected) == 0:
|
|
514
|
+
self.vertex_class = VertexClass.THREE_WAY_ZERO_PRIMARY_LINE
|
|
515
|
+
if len(self.line_connected) == 1:
|
|
516
|
+
self.vertex_class = VertexClass.THREE_WAY_ONE_PRIMARY_LINE
|
|
517
|
+
elif len(self.line_list) == 2:
|
|
518
|
+
if len(self.line_connected) == 0:
|
|
519
|
+
self.vertex_class = VertexClass.TWO_WAY_ZERO_PRIMARY_LINE
|
|
520
|
+
elif len(self.line_list) == 1:
|
|
521
|
+
self.vertex_class = VertexClass.SINGLE_WAY
|
|
522
|
+
|
|
523
|
+
def has_group_attr(self):
|
|
524
|
+
"""If all values in group list are valid value, return True."""
|
|
525
|
+
# TODO: if some line has no group, give advice
|
|
526
|
+
for i in self.line_list:
|
|
527
|
+
if i.group is None:
|
|
528
|
+
return False
|
|
529
|
+
|
|
530
|
+
return True
|
|
531
|
+
|
|
532
|
+
def need_regrouping(self):
|
|
533
|
+
pass
|
|
534
|
+
|
|
535
|
+
def check_connectivity(self):
|
|
536
|
+
# TODO add regrouping when new lines are added
|
|
537
|
+
if self.has_group_attr():
|
|
538
|
+
if self.need_regrouping():
|
|
539
|
+
self.group_regroup()
|
|
540
|
+
else:
|
|
541
|
+
self.group_line_by_attribute()
|
|
542
|
+
else:
|
|
543
|
+
self.group_line_by_angle()
|
|
544
|
+
|
|
545
|
+
# record line not connected
|
|
546
|
+
all_line_ids = self.get_all_line_ids()
|
|
547
|
+
self.line_not_connected = list(all_line_ids - set(chain(*self.line_connected)))
|
|
548
|
+
|
|
549
|
+
self.assign_vertex_class()
|
|
550
|
+
|
|
551
|
+
def group_regroup(self):
|
|
552
|
+
pass
|
|
553
|
+
|
|
554
|
+
def group_line_by_attribute(self):
|
|
555
|
+
group_line = defaultdict(list)
|
|
556
|
+
for i in self.line_list:
|
|
557
|
+
group_line[i.group].append(i.line_id)
|
|
558
|
+
|
|
559
|
+
for value in group_line.values():
|
|
560
|
+
if len(value) > 1:
|
|
561
|
+
self.line_connected.append(value)
|
|
562
|
+
|
|
563
|
+
def group_line_by_angle(self):
|
|
564
|
+
"""Generate connectivity of all lines."""
|
|
565
|
+
if len(self.line_list) == 1:
|
|
566
|
+
return
|
|
567
|
+
|
|
568
|
+
# if there are 2 and more lines
|
|
569
|
+
new_angles = [i.get_angle_for_line() for i in self.line_list]
|
|
570
|
+
angle_visited = [False] * len(new_angles)
|
|
571
|
+
|
|
572
|
+
if len(self.line_list) == 2:
|
|
573
|
+
angle_diff = abs(new_angles[0] - new_angles[1])
|
|
574
|
+
angle_diff = angle_diff if angle_diff <= np.pi else angle_diff - np.pi
|
|
575
|
+
|
|
576
|
+
# if angle_diff >= TURN_ANGLE_TOLERANCE:
|
|
577
|
+
self.line_connected.append(
|
|
578
|
+
(
|
|
579
|
+
self.line_list[0].line_id,
|
|
580
|
+
self.line_list[1].line_id,
|
|
581
|
+
)
|
|
582
|
+
)
|
|
583
|
+
return
|
|
584
|
+
|
|
585
|
+
# three and more lines
|
|
586
|
+
for i, angle_1 in enumerate(new_angles):
|
|
587
|
+
for j, angle_2 in enumerate(new_angles[i + 1 :]):
|
|
588
|
+
if not angle_visited[i + j + 1]:
|
|
589
|
+
angle_diff = abs(angle_1 - angle_2)
|
|
590
|
+
angle_diff = (
|
|
591
|
+
angle_diff if angle_diff <= np.pi else angle_diff - np.pi
|
|
592
|
+
)
|
|
593
|
+
if (
|
|
594
|
+
angle_diff < ANGLE_TOLERANCE
|
|
595
|
+
or np.pi - ANGLE_TOLERANCE
|
|
596
|
+
< abs(angle_1 - angle_2)
|
|
597
|
+
< np.pi + ANGLE_TOLERANCE
|
|
598
|
+
):
|
|
599
|
+
angle_visited[j + i + 1] = True # tenth of PI
|
|
600
|
+
self.line_connected.append(
|
|
601
|
+
(
|
|
602
|
+
self.line_list[i].line_id,
|
|
603
|
+
self.line_list[i + j + 1].line_id,
|
|
604
|
+
)
|
|
605
|
+
)
|
|
606
|
+
|
|
607
|
+
|
|
608
|
+
class LineGrouping:
|
|
609
|
+
"""Class to group lines and merge them."""
|
|
610
|
+
|
|
611
|
+
def __init__(self, in_line_gdf, merge_group=True) -> None:
|
|
612
|
+
# remove empty and null geometry
|
|
613
|
+
# self.lines = in_line_gdf.copy()
|
|
614
|
+
# self.lines = self.lines[
|
|
615
|
+
# ~self.lines.geometry.isna() & ~self.lines.geometry.is_empty
|
|
616
|
+
# ]
|
|
617
|
+
if in_line_gdf is None:
|
|
618
|
+
raise ValueError("Line GeoDataFrame cannot be None")
|
|
619
|
+
|
|
620
|
+
if in_line_gdf.empty:
|
|
621
|
+
raise ValueError("Line GeoDataFrame cannot be empty")
|
|
622
|
+
|
|
623
|
+
self.lines = algo_common.clean_line_geometries(in_line_gdf)
|
|
624
|
+
self.lines.reset_index(inplace=True, drop=True)
|
|
625
|
+
self.merge_group = merge_group
|
|
626
|
+
|
|
627
|
+
self.sim_geom = self.lines.simplify(1)
|
|
628
|
+
|
|
629
|
+
self.G = nk.Graph(len(self.lines))
|
|
630
|
+
self.merged_vertex_list = []
|
|
631
|
+
self.has_group_attr = False
|
|
632
|
+
self.need_regrouping = False
|
|
633
|
+
self.groups = [None] * len(self.lines)
|
|
634
|
+
self.merged_lines_trimmed = None # merged trimmed lines
|
|
635
|
+
|
|
636
|
+
self.vertex_list = []
|
|
637
|
+
self.vertex_of_concern = []
|
|
638
|
+
self.v_index = None # sindex of all vertices for vertex_list
|
|
639
|
+
|
|
640
|
+
self.polys = None
|
|
641
|
+
|
|
642
|
+
# invalid geoms in final geom list
|
|
643
|
+
self.valid_lines = None
|
|
644
|
+
self.valid_polys = None
|
|
645
|
+
self.invalid_lines = None
|
|
646
|
+
self.invalid_polys = None
|
|
647
|
+
|
|
648
|
+
def create_vertex_list(self):
|
|
649
|
+
# check if data has group column
|
|
650
|
+
if bt_const.BT_GROUP in self.lines.keys():
|
|
651
|
+
self.groups = self.lines[bt_const.BT_GROUP]
|
|
652
|
+
self.has_group_attr = True
|
|
653
|
+
if self.groups.hasnans:
|
|
654
|
+
self.need_regrouping = True
|
|
655
|
+
|
|
656
|
+
for idx, s_geom, geom, group in zip(
|
|
657
|
+
*zip(*self.sim_geom.items()), self.lines.geometry, self.groups
|
|
658
|
+
):
|
|
659
|
+
self.vertex_list.append(VertexNode(idx, geom, s_geom, 0, group))
|
|
660
|
+
self.vertex_list.append(VertexNode(idx, geom, s_geom, -1, group))
|
|
661
|
+
|
|
662
|
+
v_points = []
|
|
663
|
+
for i in self.vertex_list:
|
|
664
|
+
v_points.append(i.vertex.buffer(SMALL_BUFFER)) # small polygon
|
|
665
|
+
|
|
666
|
+
# Spatial index of all vertices
|
|
667
|
+
self.v_index = shapely.STRtree(v_points)
|
|
668
|
+
|
|
669
|
+
vertex_visited = [False] * len(self.vertex_list)
|
|
670
|
+
for i, pt in enumerate(v_points):
|
|
671
|
+
if vertex_visited[i]:
|
|
672
|
+
continue
|
|
673
|
+
|
|
674
|
+
s_list = self.v_index.query(pt)
|
|
675
|
+
vertex = self.vertex_list[i]
|
|
676
|
+
if len(s_list) > 1:
|
|
677
|
+
for j in s_list:
|
|
678
|
+
if j != i:
|
|
679
|
+
# some short line will be very close to each other
|
|
680
|
+
if (
|
|
681
|
+
vertex.vertex.distance(self.vertex_list[j].vertex)
|
|
682
|
+
> bt_const.SMALL_BUFFER
|
|
683
|
+
):
|
|
684
|
+
continue
|
|
685
|
+
|
|
686
|
+
vertex.merge(self.vertex_list[j])
|
|
687
|
+
vertex_visited[j] = True
|
|
688
|
+
|
|
689
|
+
self.merged_vertex_list.append(vertex)
|
|
690
|
+
vertex_visited[i] = True
|
|
691
|
+
|
|
692
|
+
for i in self.merged_vertex_list:
|
|
693
|
+
i.check_connectivity()
|
|
694
|
+
|
|
695
|
+
for i in self.merged_vertex_list:
|
|
696
|
+
if i.line_connected:
|
|
697
|
+
for edge in i.line_connected:
|
|
698
|
+
self.G.addEdge(edge[0], edge[1])
|
|
699
|
+
|
|
700
|
+
def group_lines(self):
|
|
701
|
+
cc = nk.components.ConnectedComponents(self.G)
|
|
702
|
+
cc.run()
|
|
703
|
+
# print("number of components ", cc.numberOfComponents())
|
|
704
|
+
|
|
705
|
+
group = 0
|
|
706
|
+
for i in range(cc.numberOfComponents()):
|
|
707
|
+
component = cc.getComponents()[i]
|
|
708
|
+
for id in component:
|
|
709
|
+
self.groups[id] = group
|
|
710
|
+
|
|
711
|
+
group += 1
|
|
712
|
+
|
|
713
|
+
def update_line_in_vertex_node(self, line_id, line):
|
|
714
|
+
"""Update line in VertexNode after trimming."""
|
|
715
|
+
idx = self.v_index.query(line)
|
|
716
|
+
for i in idx:
|
|
717
|
+
v = self.vertex_list[i]
|
|
718
|
+
v.update_line(line_id, line)
|
|
719
|
+
|
|
720
|
+
def run_line_merge(self):
|
|
721
|
+
return algo_merge_lines.run_line_merge(self.lines, self.merge_group)
|
|
722
|
+
|
|
723
|
+
def find_vertex_for_poly_trimming(self):
|
|
724
|
+
self.vertex_of_concern = [
|
|
725
|
+
i for i in self.merged_vertex_list if i.vertex_class in CONCERN_CLASSES
|
|
726
|
+
]
|
|
727
|
+
|
|
728
|
+
def line_and_poly_cleanup(self):
|
|
729
|
+
sindex_poly = self.polys.sindex
|
|
730
|
+
|
|
731
|
+
for vertex in self.vertex_of_concern:
|
|
732
|
+
s_idx = sindex_poly.query(vertex.vertex, predicate="within")
|
|
733
|
+
if len(s_idx) == 0:
|
|
734
|
+
continue
|
|
735
|
+
|
|
736
|
+
# Trim intersections of primary lines
|
|
737
|
+
polys = self.polys.loc[s_idx].geometry
|
|
738
|
+
if not self.merge_group:
|
|
739
|
+
if (vertex.vertex_class == VertexClass.FIVE_WAY_TWO_PRIMARY_LINE
|
|
740
|
+
or vertex.vertex_class == VertexClass.FIVE_WAY_ONE_PRIMARY_LINE
|
|
741
|
+
or vertex.vertex_class == VertexClass.FOUR_WAY_ONE_PRIMARY_LINE
|
|
742
|
+
or vertex.vertex_class == VertexClass.FOUR_WAY_TWO_PRIMARY_LINE
|
|
743
|
+
or vertex.vertex_class == VertexClass.THREE_WAY_ONE_PRIMARY_LINE):
|
|
744
|
+
|
|
745
|
+
out_polys = vertex.trim_primary_end(polys)
|
|
746
|
+
if len(out_polys) == 0:
|
|
747
|
+
continue
|
|
748
|
+
|
|
749
|
+
# update polygon DataFrame
|
|
750
|
+
for idx, out_poly in out_polys:
|
|
751
|
+
if out_poly:
|
|
752
|
+
self.polys.at[idx, "geometry"] = out_poly
|
|
753
|
+
|
|
754
|
+
# retrieve polygons again. Some polygons may be updated
|
|
755
|
+
polys = self.polys.loc[s_idx]
|
|
756
|
+
if (
|
|
757
|
+
vertex.vertex_class == VertexClass.SINGLE_WAY
|
|
758
|
+
or vertex.vertex_class == VertexClass.TWO_WAY_ZERO_PRIMARY_LINE
|
|
759
|
+
or vertex.vertex_class == VertexClass.THREE_WAY_ZERO_PRIMARY_LINE
|
|
760
|
+
or vertex.vertex_class == VertexClass.FOUR_WAY_ZERO_PRIMARY_LINE
|
|
761
|
+
or vertex.vertex_class == VertexClass.FIVE_WAY_ZERO_PRIMARY_LINE
|
|
762
|
+
):
|
|
763
|
+
if vertex.vertex_class == VertexClass.THREE_WAY_ZERO_PRIMARY_LINE:
|
|
764
|
+
pass
|
|
765
|
+
|
|
766
|
+
out_polys = vertex.trim_end_all(polys)
|
|
767
|
+
if len(out_polys) == 0:
|
|
768
|
+
continue
|
|
769
|
+
|
|
770
|
+
# update polygon DataFrame
|
|
771
|
+
for idx, out_poly in out_polys:
|
|
772
|
+
self.polys.at[idx, "geometry"] = out_poly
|
|
773
|
+
|
|
774
|
+
polys = self.polys.loc[s_idx]
|
|
775
|
+
if vertex.vertex_class != VertexClass.SINGLE_WAY:
|
|
776
|
+
poly_trim_list = vertex.trim_intersection(polys, self.merge_group)
|
|
777
|
+
for p_trim in poly_trim_list:
|
|
778
|
+
# update main line and polygon DataFrame
|
|
779
|
+
self.polys.at[p_trim.poly_index, "geometry"] = p_trim.poly_cleanup
|
|
780
|
+
self.lines.at[p_trim.line_index, "geometry"] = p_trim.line_cleanup
|
|
781
|
+
|
|
782
|
+
# update VertexNode's line
|
|
783
|
+
self.update_line_in_vertex_node(
|
|
784
|
+
p_trim.line_index, p_trim.line_cleanup
|
|
785
|
+
)
|
|
786
|
+
|
|
787
|
+
def get_merged_lines_original(self):
|
|
788
|
+
return self.lines.dissolve(by=bt_const.BT_GROUP)
|
|
789
|
+
|
|
790
|
+
def run_grouping(self):
|
|
791
|
+
self.create_vertex_list()
|
|
792
|
+
if not self.has_group_attr:
|
|
793
|
+
self.group_lines()
|
|
794
|
+
|
|
795
|
+
self.find_vertex_for_poly_trimming()
|
|
796
|
+
self.lines["group"] = self.groups # assign group attribute
|
|
797
|
+
|
|
798
|
+
def run_regrouping(self):
|
|
799
|
+
"""
|
|
800
|
+
Run this when new lines are added to grouped file.
|
|
801
|
+
|
|
802
|
+
Some new lines has empty group attributes
|
|
803
|
+
"""
|
|
804
|
+
pass
|
|
805
|
+
|
|
806
|
+
def run_cleanup(self, in_polys):
|
|
807
|
+
self.polys = in_polys.copy()
|
|
808
|
+
self.line_and_poly_cleanup()
|
|
809
|
+
self.run_line_merge_trimmed()
|
|
810
|
+
self.check_geom_validity()
|
|
811
|
+
|
|
812
|
+
def run_line_merge_trimmed(self):
|
|
813
|
+
self.merged_lines_trimmed = self.run_line_merge()
|
|
814
|
+
|
|
815
|
+
def check_geom_validity(self):
|
|
816
|
+
"""
|
|
817
|
+
Check MultiLineString and MultiPolygon in line and polygon dataframe.
|
|
818
|
+
|
|
819
|
+
Save to separate layers for user to double check
|
|
820
|
+
"""
|
|
821
|
+
# remove null geometry
|
|
822
|
+
# TODO make sure lines and polygons match in pairs
|
|
823
|
+
# they should have same amount and spatial coverage
|
|
824
|
+
self.valid_polys = self.polys[
|
|
825
|
+
~self.polys.geometry.isna() & ~self.polys.geometry.is_empty
|
|
826
|
+
]
|
|
827
|
+
|
|
828
|
+
# save sh_geom.MultiLineString and sh_geom.MultiPolygon
|
|
829
|
+
self.invalid_polys = self.polys[
|
|
830
|
+
(self.polys.geometry.geom_type == "MultiPolygon")
|
|
831
|
+
]
|
|
832
|
+
|
|
833
|
+
# check lines
|
|
834
|
+
self.valid_lines = self.merged_lines_trimmed[
|
|
835
|
+
~self.merged_lines_trimmed.geometry.isna()
|
|
836
|
+
& ~self.merged_lines_trimmed.geometry.is_empty
|
|
837
|
+
]
|
|
838
|
+
self.valid_lines.reset_index(inplace=True, drop=True)
|
|
839
|
+
|
|
840
|
+
self.invalid_lines = self.merged_lines_trimmed[
|
|
841
|
+
(self.merged_lines_trimmed.geometry.geom_type == "MultiLineString")
|
|
842
|
+
]
|
|
843
|
+
self.invalid_lines.reset_index(inplace=True, drop=True)
|
|
844
|
+
|
|
845
|
+
def save_file(self, out_file):
|
|
846
|
+
if not self.valid_lines.empty:
|
|
847
|
+
self.valid_lines["length"] = self.valid_lines.length
|
|
848
|
+
self.valid_lines.to_file(out_file, layer="merged_lines")
|
|
849
|
+
|
|
850
|
+
if not self.valid_polys.empty:
|
|
851
|
+
if "length" in self.valid_polys.columns:
|
|
852
|
+
self.valid_polys.drop(columns=["length"], inplace=True)
|
|
853
|
+
|
|
854
|
+
self.valid_polys["area"] = self.valid_polys.area
|
|
855
|
+
self.valid_polys.to_file(out_file, layer="clean_footprint")
|
|
856
|
+
|
|
857
|
+
if not self.invalid_lines.empty:
|
|
858
|
+
self.invalid_lines.to_file(out_file, layer="invalid_lines")
|
|
859
|
+
|
|
860
|
+
if not self.invalid_polys.empty:
|
|
861
|
+
self.invalid_polys.to_file(out_file, layer="invalid_polygons")
|
|
862
|
+
|
|
863
|
+
@dataclass
|
|
864
|
+
class PolygonTrimming:
|
|
865
|
+
"""Store polygon and line to trim. Primary polygon is used to trim both."""
|
|
866
|
+
|
|
867
|
+
poly_primary: Optional[sh_geom.MultiPolygon] = None
|
|
868
|
+
poly_index: int = field(default=-1)
|
|
869
|
+
poly_cleanup: Optional[sh_geom.Polygon] = None
|
|
870
|
+
line_index: int = field(default=-1)
|
|
871
|
+
line_cleanup: Optional[sh_geom.LineString] = None
|
|
872
|
+
|
|
873
|
+
def process(self, primary_poly_list=None, vertex=None):
|
|
874
|
+
# prepare primary polygon
|
|
875
|
+
poly_primary = shapely.union_all(primary_poly_list)
|
|
876
|
+
trim_distance = TRIMMING_DISTANCE
|
|
877
|
+
|
|
878
|
+
if self.line_cleanup.length < 100.0:
|
|
879
|
+
trim_distance = 50.0
|
|
880
|
+
|
|
881
|
+
poly_primary = poly_primary.intersection(
|
|
882
|
+
vertex.buffer(trim_distance)
|
|
883
|
+
)
|
|
884
|
+
|
|
885
|
+
self.poly_primary = poly_primary
|
|
886
|
+
|
|
887
|
+
# TODO: check why there is such cases
|
|
888
|
+
if self.poly_cleanup is None:
|
|
889
|
+
print("No polygon to trim.")
|
|
890
|
+
return
|
|
891
|
+
|
|
892
|
+
midpoint = self.line_cleanup.interpolate(0.5, normalized=True)
|
|
893
|
+
diff = self.poly_cleanup.difference(self.poly_primary)
|
|
894
|
+
if diff.geom_type == "Polygon":
|
|
895
|
+
self.poly_cleanup = diff
|
|
896
|
+
elif diff.geom_type == "MultiPolygon":
|
|
897
|
+
# area = self.poly_cleanup.area
|
|
898
|
+
reserved = []
|
|
899
|
+
for i in diff.geoms:
|
|
900
|
+
# if i.area > TRIM_THRESHOLD * area: # small part
|
|
901
|
+
# reserved.append(i)
|
|
902
|
+
if i.contains(midpoint):
|
|
903
|
+
reserved.append(i)
|
|
904
|
+
|
|
905
|
+
if len(reserved) == 0:
|
|
906
|
+
pass
|
|
907
|
+
elif len(reserved) == 1:
|
|
908
|
+
self.poly_cleanup = sh_geom.Polygon(*reserved)
|
|
909
|
+
else:
|
|
910
|
+
# TODO output all MultiPolygons which should be dealt with
|
|
911
|
+
# self.poly_cleanup = sh_geom.MultiPolygon(reserved)
|
|
912
|
+
print("trim: MultiPolygon detected, please check")
|
|
913
|
+
|
|
914
|
+
diff = self.line_cleanup.intersection(self.poly_cleanup)
|
|
915
|
+
if diff.geom_type == "GeometryCollection":
|
|
916
|
+
geoms = []
|
|
917
|
+
for item in diff.geoms:
|
|
918
|
+
if item.geom_type == "LineString":
|
|
919
|
+
geoms.append(item)
|
|
920
|
+
elif item.geom_type == "MultiLineString":
|
|
921
|
+
print("trim: sh_geom.MultiLineString detected, please check")
|
|
922
|
+
if len(geoms) == 0:
|
|
923
|
+
return
|
|
924
|
+
elif len(geoms) == 1:
|
|
925
|
+
diff = geoms[0]
|
|
926
|
+
else:
|
|
927
|
+
diff = sh_geom.MultiLineString(geoms)
|
|
928
|
+
|
|
929
|
+
if diff.geom_type == "LineString":
|
|
930
|
+
self.line_cleanup = diff
|
|
931
|
+
elif diff.geom_type == "MultiLineString":
|
|
932
|
+
length = self.line_cleanup.length
|
|
933
|
+
reserved = []
|
|
934
|
+
for i in diff.geoms:
|
|
935
|
+
if i.length > TRIM_THRESHOLD * length: # small part
|
|
936
|
+
reserved.append(i)
|
|
937
|
+
|
|
938
|
+
if len(reserved) == 0:
|
|
939
|
+
pass
|
|
940
|
+
elif len(reserved) == 1:
|
|
941
|
+
self.line_cleanup = sh_geom.LineString(*reserved)
|
|
942
|
+
else:
|
|
943
|
+
# TODO output all MultiPolygons which should be dealt with
|
|
944
|
+
self.poly_cleanup = sh_geom.MultiLineString(reserved)
|