BERATools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +9 -0
- beratools/core/__init__.py +0 -0
- beratools/core/algo_centerline.py +351 -0
- beratools/core/constants.py +86 -0
- beratools/core/dijkstra_algorithm.py +460 -0
- beratools/core/logger.py +85 -0
- beratools/core/tool_base.py +133 -0
- beratools/gui/__init__.py +15 -0
- beratools/gui/batch_processing_dlg.py +463 -0
- beratools/gui/beratools.json +2300 -0
- beratools/gui/bt_data.py +487 -0
- beratools/gui/bt_gui_main.py +691 -0
- beratools/gui/cli.py +18 -0
- beratools/gui/gui.json +8 -0
- beratools/gui/img/BERALogo.png +0 -0
- beratools/gui/img/closed.gif +0 -0
- beratools/gui/img/closed.png +0 -0
- beratools/gui/img/open.gif +0 -0
- beratools/gui/img/open.png +0 -0
- beratools/gui/img/tool.gif +0 -0
- beratools/gui/img/tool.png +0 -0
- beratools/gui/map_window.py +146 -0
- beratools/gui/tool_widgets.py +493 -0
- beratools/gui_tk/ASCII Banners.txt +248 -0
- beratools/gui_tk/__init__.py +20 -0
- beratools/gui_tk/beratools_main.py +515 -0
- beratools/gui_tk/bt_widgets.py +442 -0
- beratools/gui_tk/cli.py +18 -0
- beratools/gui_tk/gui.json +8 -0
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +14 -0
- beratools/gui_tk/map_window.py +144 -0
- beratools/gui_tk/runner.py +1481 -0
- beratools/gui_tk/tooltip.py +55 -0
- beratools/third_party/pyqtlet2/__init__.py +9 -0
- beratools/third_party/pyqtlet2/leaflet/__init__.py +26 -0
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +6 -0
- beratools/third_party/pyqtlet2/leaflet/control/control.py +59 -0
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +52 -0
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +20 -0
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +24 -0
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +180 -0
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +34 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +30 -0
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +105 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +45 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +91 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +4 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +16 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +15 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/map/map.py +220 -0
- beratools/third_party/pyqtlet2/mapwidget.py +45 -0
- beratools/third_party/pyqtlet2/web/custom.js +43 -0
- beratools/third_party/pyqtlet2/web/map.html +23 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +656 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +6 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +14 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +4 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +43 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +20 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +156 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +57 -0
- beratools/tools/Beratools_r_script.r +1120 -0
- beratools/tools/Ht_metrics.py +116 -0
- beratools/tools/__init__.py +7 -0
- beratools/tools/batch_processing.py +132 -0
- beratools/tools/canopy_threshold_relative.py +670 -0
- beratools/tools/canopycostraster.py +222 -0
- beratools/tools/centerline.py +176 -0
- beratools/tools/common.py +885 -0
- beratools/tools/fl_regen_csf.py +428 -0
- beratools/tools/forest_line_attributes.py +408 -0
- beratools/tools/forest_line_ecosite.py +216 -0
- beratools/tools/lapis_all.py +103 -0
- beratools/tools/least_cost_path_from_chm.py +152 -0
- beratools/tools/line_footprint_absolute.py +363 -0
- beratools/tools/line_footprint_fixed.py +282 -0
- beratools/tools/line_footprint_functions.py +720 -0
- beratools/tools/line_footprint_relative.py +64 -0
- beratools/tools/ln_relative_metrics.py +615 -0
- beratools/tools/r_cal_lpi_elai.r +25 -0
- beratools/tools/r_generate_pd_focalraster.r +101 -0
- beratools/tools/r_interface.py +80 -0
- beratools/tools/r_point_density.r +9 -0
- beratools/tools/rpy_chm2trees.py +86 -0
- beratools/tools/rpy_dsm_chm_by.py +81 -0
- beratools/tools/rpy_dtm_by.py +63 -0
- beratools/tools/rpy_find_cellsize.py +43 -0
- beratools/tools/rpy_gnd_csf.py +74 -0
- beratools/tools/rpy_hummock_hollow.py +85 -0
- beratools/tools/rpy_hummock_hollow_raster.py +71 -0
- beratools/tools/rpy_las_info.py +51 -0
- beratools/tools/rpy_laz2las.py +40 -0
- beratools/tools/rpy_lpi_elai_lascat.py +466 -0
- beratools/tools/rpy_normalized_lidar_by.py +56 -0
- beratools/tools/rpy_percent_above_dbh.py +80 -0
- beratools/tools/rpy_points2trees.py +88 -0
- beratools/tools/rpy_vegcoverage.py +94 -0
- beratools/tools/tiler.py +206 -0
- beratools/tools/tool_template.py +54 -0
- beratools/tools/vertex_optimization.py +620 -0
- beratools/tools/zonal_threshold.py +144 -0
- beratools-0.2.0.dist-info/METADATA +63 -0
- beratools-0.2.0.dist-info/RECORD +142 -0
- beratools-0.2.0.dist-info/WHEEL +4 -0
- beratools-0.2.0.dist-info/entry_points.txt +2 -0
- beratools-0.2.0.dist-info/licenses/LICENSE +22 -0
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from scipy.spatial import distance
|
|
3
|
+
|
|
4
|
+
from beratools.tools.common import *
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def line_split(callback, HasOLnFID, in_cl, seg_length, max_ln_width, sampling_type, verbose):
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
Parameters
|
|
11
|
+
----------
|
|
12
|
+
callback
|
|
13
|
+
HasOLnFID
|
|
14
|
+
in_cl
|
|
15
|
+
seg_length
|
|
16
|
+
max_ln_width
|
|
17
|
+
sampling_type
|
|
18
|
+
verbose
|
|
19
|
+
|
|
20
|
+
Returns
|
|
21
|
+
-------
|
|
22
|
+
Refer to: https://gis.stackexchange.com/questions/416284/
|
|
23
|
+
splitting-multiline-or-linestring-into-equal-segments-of-particular-length-using
|
|
24
|
+
|
|
25
|
+
"""
|
|
26
|
+
in_ln_shp = gpd.GeoDataFrame.from_file(in_cl)
|
|
27
|
+
|
|
28
|
+
# Check the OLnFID column in data. If it is not, column will be created
|
|
29
|
+
if 'OLnFID' not in in_ln_shp.columns.array:
|
|
30
|
+
if BT_DEBUGGING:
|
|
31
|
+
print("Cannot find {} column in input line data")
|
|
32
|
+
|
|
33
|
+
print("New column created: {}".format('OLnFID', 'OLnFID'))
|
|
34
|
+
in_ln_shp['OLnFID'] = in_ln_shp.index
|
|
35
|
+
|
|
36
|
+
# Copy all the input line into geodataframe
|
|
37
|
+
in_cl_line = gpd.GeoDataFrame.copy(in_ln_shp)
|
|
38
|
+
|
|
39
|
+
# Prepare line for arbitrary split lines
|
|
40
|
+
if sampling_type == 'ARBITRARY':
|
|
41
|
+
# copy the input line into split points GoeDataframe
|
|
42
|
+
# in_cl_split_point = gpd.GeoDataFrame.copy(in_cl_line)
|
|
43
|
+
|
|
44
|
+
# create empty geodataframe for split line and straight line from split points
|
|
45
|
+
split_line = gpd.GeoDataFrame(columns=list(in_cl_line.columns), geometry='geometry', crs=in_ln_shp.crs)
|
|
46
|
+
line_id = 0
|
|
47
|
+
line_list = []
|
|
48
|
+
|
|
49
|
+
# loop though all the centerlines records
|
|
50
|
+
for row in in_cl_line.index:
|
|
51
|
+
# get geometry from record
|
|
52
|
+
in_line = in_cl_line.loc[[row]]
|
|
53
|
+
in_ln_feat = in_line.iloc[0].geometry
|
|
54
|
+
lines = cut_line(in_ln_feat, seg_length)
|
|
55
|
+
|
|
56
|
+
for seg in lines:
|
|
57
|
+
line_data = in_line.copy()
|
|
58
|
+
line_data['geometry'] = seg
|
|
59
|
+
line_id = line_id + 1
|
|
60
|
+
line_list.append(line_data)
|
|
61
|
+
|
|
62
|
+
split_line = pd.concat(line_list)
|
|
63
|
+
split_line.reset_index(drop=True, inplace=True)
|
|
64
|
+
|
|
65
|
+
return split_line
|
|
66
|
+
elif sampling_type == "LINE-CROSSINGS":
|
|
67
|
+
# create empty geodataframe for lines
|
|
68
|
+
in_cl_dissolved = gpd.GeoDataFrame(columns=['geometry'], geometry='geometry', crs=in_ln_shp.crs)
|
|
69
|
+
|
|
70
|
+
lines = list(line for line in in_cl_line['geometry'])
|
|
71
|
+
in_cl_dissolved['geometry'] = list(shapely.ops.linemerge(lines).geoms)
|
|
72
|
+
|
|
73
|
+
identical_segs = in_cl_dissolved.sjoin(in_cl_line, predicate='covered_by')
|
|
74
|
+
identical_segs['Disso_ID'] = identical_segs.index
|
|
75
|
+
columns = list(col for col in identical_segs.columns
|
|
76
|
+
if col not in ['geometry', 'index_right', 'Shape_Leng', 'Shape_Le_1', 'len'])
|
|
77
|
+
identical_segs = pd.DataFrame(identical_segs[columns])
|
|
78
|
+
identical_segs.reset_index()
|
|
79
|
+
|
|
80
|
+
share_seg = in_cl_line.sjoin(in_cl_dissolved, predicate='covered_by')
|
|
81
|
+
share_seg = share_seg[share_seg.duplicated('index_right', keep=False)]
|
|
82
|
+
share_seg['Disso_ID'] = share_seg['index_right']
|
|
83
|
+
|
|
84
|
+
share_seg = pd.DataFrame(share_seg[columns])
|
|
85
|
+
share_seg.reset_index()
|
|
86
|
+
|
|
87
|
+
segs_identity = pd.concat([identical_segs, share_seg])
|
|
88
|
+
segs_identity.reset_index()
|
|
89
|
+
|
|
90
|
+
for seg in range(0, len(in_cl_dissolved.index)):
|
|
91
|
+
in_cl_dissolved.loc[seg, 'Disso_ID'] = seg
|
|
92
|
+
common_segs = segs_identity.query("Disso_ID=={}".format(seg))
|
|
93
|
+
fp_list = common_segs['OLnFID']
|
|
94
|
+
|
|
95
|
+
for col in common_segs.columns:
|
|
96
|
+
in_cl_dissolved.loc[seg, col] = common_segs.loc[common_segs.index[0], col]
|
|
97
|
+
|
|
98
|
+
in_cl_dissolved.loc[seg, 'OLnSEG'] = seg
|
|
99
|
+
in_cl_dissolved.loc[[seg], 'FP_ID'] = pd.Series([fp_list], index=in_cl_dissolved.index[[seg]])
|
|
100
|
+
in_cl_dissolved.loc[[seg], 'OLnFID'] = pd.Series([fp_list], index=in_cl_dissolved.index[[seg]])
|
|
101
|
+
|
|
102
|
+
in_cl_dissolved['Disso_ID'].astype(int)
|
|
103
|
+
return in_cl_dissolved
|
|
104
|
+
else: # Return Line as input and create two columns as Primary Key
|
|
105
|
+
if not HasOLnFID:
|
|
106
|
+
in_cl_line['OLnFID'] = in_cl_line.index
|
|
107
|
+
|
|
108
|
+
in_cl_line['OLnSEG'] = 0
|
|
109
|
+
in_cl_line.reset_index()
|
|
110
|
+
return in_cl_line
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def find_direction(bearing):
|
|
114
|
+
ori = "N-S"
|
|
115
|
+
if 22.5 <= bearing < 67.5 or 202.5 <= bearing < 247.5:
|
|
116
|
+
ori = "NE-SW"
|
|
117
|
+
elif 67.5 <= bearing < 112.5 or 247.5 <= bearing < 292.5:
|
|
118
|
+
ori = "E-W"
|
|
119
|
+
elif 112.5 <= bearing < 157.5 or 292.5 <= bearing < 337.5:
|
|
120
|
+
ori = "NW-SE"
|
|
121
|
+
return ori
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def find_euc_distance(in_feat):
|
|
125
|
+
line_coords = list(in_feat.coords)
|
|
126
|
+
x1, y1 = line_coords[0][0:2] # in case has_z
|
|
127
|
+
x2, y2 = line_coords[-1][0:2]
|
|
128
|
+
|
|
129
|
+
return distance.euclidean([x1, y1], [x2, y2])
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
def find_bearing(seg): # Geodataframe
|
|
133
|
+
line_coords = list(seg.iloc[0].geometry.coords)
|
|
134
|
+
|
|
135
|
+
x1, y1 = line_coords[0][0:2] # in case has_z
|
|
136
|
+
x2, y2 = line_coords[-1][0:2]
|
|
137
|
+
dx = x2 - x1
|
|
138
|
+
dy = y2 - y1
|
|
139
|
+
|
|
140
|
+
bearing = np.nan
|
|
141
|
+
|
|
142
|
+
if dx == 0.0 and dy < 0.0:
|
|
143
|
+
bearing = 180.0
|
|
144
|
+
|
|
145
|
+
elif dx == 0.0 and dy > 0.0:
|
|
146
|
+
bearing = 0.0
|
|
147
|
+
elif dx > 0.0 and dy == 0.0:
|
|
148
|
+
bearing = 90.0
|
|
149
|
+
elif dx < 0.0 and dy == 0.0:
|
|
150
|
+
bearing = 270.0
|
|
151
|
+
elif dx > 0.0:
|
|
152
|
+
angle = math.degrees(math.atan(dy / dx))
|
|
153
|
+
bearing = 90.0 - angle
|
|
154
|
+
elif dx < 0.0:
|
|
155
|
+
angle = math.degrees(math.atan(dy / dx))
|
|
156
|
+
bearing = 270.0 - angle
|
|
157
|
+
|
|
158
|
+
return bearing
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def fill_attributes(line_args):
|
|
162
|
+
# (result_identity,attr_seg_lines,area_analysis,height_analysis, in_chm, max_lin_width)
|
|
163
|
+
attr_seg_line = line_args[0]
|
|
164
|
+
result_identity = line_args[1]
|
|
165
|
+
|
|
166
|
+
area_analysis = line_args[2]
|
|
167
|
+
height_analysis = line_args[3]
|
|
168
|
+
in_chm = line_args[4]
|
|
169
|
+
max_ln_width = line_args[5]
|
|
170
|
+
|
|
171
|
+
# determine if footprint is empty
|
|
172
|
+
has_footprint = True
|
|
173
|
+
if type(result_identity) is gpd.GeoDataFrame:
|
|
174
|
+
if result_identity.empty:
|
|
175
|
+
has_footprint = False
|
|
176
|
+
elif not result_identity:
|
|
177
|
+
has_footprint = False
|
|
178
|
+
|
|
179
|
+
# if line is empty, then skip
|
|
180
|
+
if attr_seg_line.empty:
|
|
181
|
+
return None
|
|
182
|
+
|
|
183
|
+
index = attr_seg_line.index[0]
|
|
184
|
+
fields = ['LENGTH', 'FP_Area', 'Perimeter', 'Bearing', 'Direction', 'Sinuosity',
|
|
185
|
+
'AvgWidth', 'Fragment', 'AvgHeight', 'Volume', 'Roughness']
|
|
186
|
+
values = dict.fromkeys(fields, np.nan)
|
|
187
|
+
line_feat = attr_seg_line.geometry.iloc[0]
|
|
188
|
+
# default footprint by buffering
|
|
189
|
+
line_buffer = line_feat.buffer(float(max_ln_width), cap_style=shapely.BufferCapStyle.flat)
|
|
190
|
+
|
|
191
|
+
# merge result_identity
|
|
192
|
+
if has_footprint:
|
|
193
|
+
result_identity = result_identity.dissolve()
|
|
194
|
+
fp = result_identity.iloc[0].geometry
|
|
195
|
+
line_buffer = fp
|
|
196
|
+
|
|
197
|
+
# assign common attributes
|
|
198
|
+
euc_distance = find_euc_distance(line_feat) # Euclidean distance from start to end points of segment line
|
|
199
|
+
values['LENGTH'] = line_feat.length
|
|
200
|
+
values['FP_Area'] = line_buffer.area
|
|
201
|
+
values['Perimeter'] = line_buffer.length
|
|
202
|
+
values['Bearing'] = find_bearing(attr_seg_line)
|
|
203
|
+
values['Direction'] = find_direction(values['Bearing'])
|
|
204
|
+
|
|
205
|
+
try:
|
|
206
|
+
values['Sinuosity'] = line_feat.length / euc_distance
|
|
207
|
+
except ZeroDivisionError as e:
|
|
208
|
+
values['Sinuosity'] = np.nan
|
|
209
|
+
try:
|
|
210
|
+
values["AvgWidth"] = values['FP_Area'] / line_feat.length
|
|
211
|
+
except ZeroDivisionError as e:
|
|
212
|
+
values["AvgWidth"] = np.nan
|
|
213
|
+
try:
|
|
214
|
+
values["Fragment"] = values['Perimeter'] / values['FP_Area']
|
|
215
|
+
except ZeroDivisionError as e:
|
|
216
|
+
values["Fragment"] = np.nan
|
|
217
|
+
|
|
218
|
+
if height_analysis: # with CHM
|
|
219
|
+
with rasterio.open(in_chm) as in_chm_file:
|
|
220
|
+
cell_size_x = in_chm_file.transform[0]
|
|
221
|
+
cell_size_y = -in_chm_file.transform[4]
|
|
222
|
+
|
|
223
|
+
# clipped the chm base on polygon of line buffer or footprint
|
|
224
|
+
clipped_chm, out_transform = rasterio.mask.mask(in_chm_file, [line_buffer], crop=True)
|
|
225
|
+
|
|
226
|
+
# drop the ndarray to 2D ndarray
|
|
227
|
+
clipped_chm = np.squeeze(clipped_chm, axis=0)
|
|
228
|
+
|
|
229
|
+
# masked all NoData value cells
|
|
230
|
+
clean_chm = np.ma.masked_where(clipped_chm == in_chm_file.nodata, clipped_chm)
|
|
231
|
+
|
|
232
|
+
# Calculate the summary statistics from the clipped CHM
|
|
233
|
+
chm_mean = np.ma.mean(clean_chm)
|
|
234
|
+
chm_std = np.ma.std(clean_chm)
|
|
235
|
+
chm_sum = np.ma.sum(clean_chm)
|
|
236
|
+
chm_count = np.ma.count(clean_chm)
|
|
237
|
+
one_cell_area = cell_size_y * cell_size_x
|
|
238
|
+
|
|
239
|
+
sq_std_pow = 0.0
|
|
240
|
+
try:
|
|
241
|
+
sq_std_pow = math.pow(chm_std, 2) * (chm_count - 1) / chm_count
|
|
242
|
+
except ZeroDivisionError as e:
|
|
243
|
+
sq_std_pow = 0.0
|
|
244
|
+
|
|
245
|
+
values["AvgHeight"] = chm_mean
|
|
246
|
+
values["Volume"] = chm_sum * one_cell_area
|
|
247
|
+
values["Roughness"] = math.sqrt(math.pow(chm_mean, 2) + sq_std_pow)
|
|
248
|
+
else: # No CHM
|
|
249
|
+
# remove fields not used
|
|
250
|
+
fields.remove('AvgHeight')
|
|
251
|
+
values.pop('AvgHeight')
|
|
252
|
+
|
|
253
|
+
fields.remove('Volume')
|
|
254
|
+
values.pop('Volume')
|
|
255
|
+
|
|
256
|
+
fields.remove('Roughness')
|
|
257
|
+
values.pop('Roughness')
|
|
258
|
+
|
|
259
|
+
attr_seg_line.loc[index, fields] = values
|
|
260
|
+
footprint = gpd.GeoDataFrame({'geometry': [line_buffer]}, crs=attr_seg_line.crs)
|
|
261
|
+
|
|
262
|
+
return attr_seg_line, footprint
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
def forest_line_attributes(callback, in_line, in_footprint, in_chm, sampling_type, seg_len,
|
|
266
|
+
ln_split_tol, max_ln_width, out_line, processes, verbose):
|
|
267
|
+
# assign Tool arguments
|
|
268
|
+
in_cl = in_line
|
|
269
|
+
in_fp = in_footprint
|
|
270
|
+
seg_len = float(seg_len)
|
|
271
|
+
ln_split_tol = float(ln_split_tol)
|
|
272
|
+
max_ln_width = float(max_ln_width)
|
|
273
|
+
|
|
274
|
+
# Valid input footprint shapefile has geometry
|
|
275
|
+
in_fp_shp = gpd.GeoDataFrame.from_file(in_fp)
|
|
276
|
+
in_ln_shp = gpd.read_file(in_cl, rows=1) # TODO: check projection
|
|
277
|
+
in_fields = list(in_ln_shp.columns)
|
|
278
|
+
|
|
279
|
+
# check coordinate systems between line and raster features
|
|
280
|
+
try:
|
|
281
|
+
with rasterio.open(in_chm) as in_raster:
|
|
282
|
+
if in_fp_shp.crs.to_epsg() != in_raster.crs.to_epsg():
|
|
283
|
+
print("Line and raster spatial references are not the same, please check.")
|
|
284
|
+
exit()
|
|
285
|
+
except Exception as e:
|
|
286
|
+
print(e)
|
|
287
|
+
|
|
288
|
+
HasOLnFID = False
|
|
289
|
+
|
|
290
|
+
# determine to do area or/and height analysis
|
|
291
|
+
if len(in_fp_shp) == 0:
|
|
292
|
+
print('No footprints provided, buffer of the input lines will be used instead')
|
|
293
|
+
area_analysis = False
|
|
294
|
+
else:
|
|
295
|
+
area_analysis = True
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
with rasterio.open(in_chm) as in_CHM:
|
|
299
|
+
height_analysis = True
|
|
300
|
+
except Exception as error_in_CHM:
|
|
301
|
+
print(error_in_CHM)
|
|
302
|
+
height_analysis = False
|
|
303
|
+
|
|
304
|
+
# Process the following SamplingType
|
|
305
|
+
sampling_list = ["IN-FEATURES", "LINE-CROSSINGS", "ARBITRARY"]
|
|
306
|
+
if sampling_type not in sampling_list:
|
|
307
|
+
print("SamplingType is not correct, please verify it.")
|
|
308
|
+
exit()
|
|
309
|
+
|
|
310
|
+
print("Preparing line segments...")
|
|
311
|
+
|
|
312
|
+
# Segment lines
|
|
313
|
+
# Return split lines with two extra columns:['OLnFID','OLnSEG']
|
|
314
|
+
# or return Dissolved whole line
|
|
315
|
+
print("Input_Lines: {}".format(in_cl))
|
|
316
|
+
attr_seg_lines = line_split(print, HasOLnFID, in_cl, seg_len, max_ln_width, sampling_type, verbose=in_args.verbose)
|
|
317
|
+
|
|
318
|
+
print('%{}'.format(10))
|
|
319
|
+
|
|
320
|
+
print("Line segments preparation done.")
|
|
321
|
+
print("{} footprints to be identified by {} segments ...".format(len(in_fp_shp.index), len(attr_seg_lines)))
|
|
322
|
+
|
|
323
|
+
# Prepare line parameters for multiprocessing
|
|
324
|
+
line_args = []
|
|
325
|
+
|
|
326
|
+
# prepare line args: list of line, line buffer and footprint polygon
|
|
327
|
+
# footprint spatial searching
|
|
328
|
+
footprint_sindex = in_fp_shp.sindex
|
|
329
|
+
|
|
330
|
+
for i in attr_seg_lines.index:
|
|
331
|
+
line = attr_seg_lines.iloc[[i]]
|
|
332
|
+
line_buffer = line.copy()
|
|
333
|
+
line_buffer['geometry'] = line.buffer(max_ln_width, cap_style=shapely.BufferCapStyle.flat)
|
|
334
|
+
fp_intersected = in_fp_shp.iloc[footprint_sindex.query(line_buffer.iloc[0].geometry)]
|
|
335
|
+
list_item = [line, line_buffer, fp_intersected]
|
|
336
|
+
|
|
337
|
+
line_args.append(list_item)
|
|
338
|
+
|
|
339
|
+
# multiprocessing of identity polygons
|
|
340
|
+
features = []
|
|
341
|
+
# features = execute_multiprocessing_identity(line_args, processes)
|
|
342
|
+
features = execute_multiprocessing(identity_polygon, line_args, 'Identify polygons',
|
|
343
|
+
processes, 1, verbose=verbose)
|
|
344
|
+
|
|
345
|
+
print("Prepare for filling attributes ...")
|
|
346
|
+
# prepare list of result_identity, Att_seg_lines, areaAnalysis, heightAnalysis, args.input
|
|
347
|
+
line_args = []
|
|
348
|
+
for index in range(0, len(features)):
|
|
349
|
+
list_item = [features[index][0], features[index][1], area_analysis, height_analysis, in_chm, max_ln_width]
|
|
350
|
+
line_args.append(list_item)
|
|
351
|
+
|
|
352
|
+
# Linear attributes
|
|
353
|
+
print("Adding attributes ...")
|
|
354
|
+
print('%{}'.format(60))
|
|
355
|
+
|
|
356
|
+
# Multiprocessing identity polygon
|
|
357
|
+
# features = execute_multiprocessing_attributes(line_args, processes)
|
|
358
|
+
features = execute_multiprocessing(fill_attributes, line_args, 'Filling attributes',
|
|
359
|
+
processes, 1, verbose=verbose)
|
|
360
|
+
|
|
361
|
+
# Combine into one geodataframe
|
|
362
|
+
if len(features) == 0:
|
|
363
|
+
print('No lines found.')
|
|
364
|
+
exit()
|
|
365
|
+
|
|
366
|
+
line_segments = []
|
|
367
|
+
line_footprints = []
|
|
368
|
+
|
|
369
|
+
for item in features:
|
|
370
|
+
line_segments.append(item[0])
|
|
371
|
+
line_footprints.append(item[1])
|
|
372
|
+
|
|
373
|
+
result_segments = gpd.GeoDataFrame(pd.concat(line_segments, ignore_index=True))
|
|
374
|
+
result_segments.reset_index()
|
|
375
|
+
|
|
376
|
+
result_footprints = gpd.GeoDataFrame(pd.concat(line_footprints, ignore_index=True))
|
|
377
|
+
result_footprints.reset_index()
|
|
378
|
+
|
|
379
|
+
print('Attribute processing done.')
|
|
380
|
+
print('%{}'.format(80))
|
|
381
|
+
|
|
382
|
+
# Clean the split line attribute columns
|
|
383
|
+
field_list = ['geometry', 'LENGTH', 'FP_Area', 'Perimeter', 'Bearing', 'Direction',
|
|
384
|
+
'Sinuosity', 'AvgWidth', 'AvgHeight', 'Fragment', 'Volume', 'Roughness']
|
|
385
|
+
field_list.extend(in_fields)
|
|
386
|
+
del_list = list(col for col in result_segments.columns if col not in field_list)
|
|
387
|
+
result_segments = result_segments.drop(columns=del_list)
|
|
388
|
+
result_segments.reset_index()
|
|
389
|
+
|
|
390
|
+
print('%{}'.format(90))
|
|
391
|
+
print('Saving output ...')
|
|
392
|
+
|
|
393
|
+
# Save attributed lines, was output_att_line
|
|
394
|
+
result_segments.to_file(out_line)
|
|
395
|
+
|
|
396
|
+
print('%{}'.format(100))
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
if __name__ == '__main__':
|
|
400
|
+
start_time = time.time()
|
|
401
|
+
print('Line Attributes started at {}'.format(time.strftime("%b %Y %H:%M:%S", time.localtime())))
|
|
402
|
+
|
|
403
|
+
# Get tool arguments
|
|
404
|
+
in_args, in_verbose = check_arguments()
|
|
405
|
+
forest_line_attributes(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
406
|
+
|
|
407
|
+
print('Current time: {}'.format(time.strftime("%d %b %Y %H:%M:%S", time.localtime())))
|
|
408
|
+
print('Line Attributes processing done in {} seconds'.format(round(time.time() - start_time, 5)))
|
|
@@ -0,0 +1,216 @@
|
|
|
1
|
+
from collections import OrderedDict
|
|
2
|
+
from multiprocessing.pool import Pool
|
|
3
|
+
import time
|
|
4
|
+
|
|
5
|
+
import fiona
|
|
6
|
+
from fiona import Feature, Properties
|
|
7
|
+
from shapely.geometry import shape, mapping
|
|
8
|
+
from shapely.geometry import LineString, MultiLineString, Polygon, MultiPolygon
|
|
9
|
+
from shapely.ops import split
|
|
10
|
+
from shapely import STRtree
|
|
11
|
+
import ray
|
|
12
|
+
|
|
13
|
+
from beratools.tools.common import *
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class OperationCancelledException(Exception):
|
|
17
|
+
pass
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
def forest_line_ecosite(callback, in_line, in_ecosite, out_line, processes, verbose):
|
|
21
|
+
if not compare_crs(vector_crs(in_line), vector_crs(in_ecosite)):
|
|
22
|
+
print("Line and CHM spatial references are not same, please check.")
|
|
23
|
+
return
|
|
24
|
+
|
|
25
|
+
# Read input line features
|
|
26
|
+
layer_crs = None
|
|
27
|
+
input_lines = []
|
|
28
|
+
in_properties = None
|
|
29
|
+
with fiona.open(in_line) as in_file_vector:
|
|
30
|
+
layer_crs = in_file_vector.crs
|
|
31
|
+
in_properties = in_file_vector.meta['schema']['properties']
|
|
32
|
+
for line in in_file_vector:
|
|
33
|
+
if line.geometry:
|
|
34
|
+
if line.geometry.type != 'MultiLineString' or \
|
|
35
|
+
line.geometry.type == 'Polygon' or \
|
|
36
|
+
line.geometry.type == 'MultiPolygon':
|
|
37
|
+
input_lines.append([line.geometry, line.properties])
|
|
38
|
+
else:
|
|
39
|
+
print('MultiLineString found.')
|
|
40
|
+
geoms = shape(line.geometry).geoms
|
|
41
|
+
for item in geoms:
|
|
42
|
+
line_part = Geometry.from_dict(item)
|
|
43
|
+
if line_part:
|
|
44
|
+
input_lines.append([line_part, line.properties])
|
|
45
|
+
else:
|
|
46
|
+
print(f'Line {line.id} has empty geometry.')
|
|
47
|
+
|
|
48
|
+
out_fields_list = OrderedDict(in_properties)
|
|
49
|
+
out_fields_list["ecosite"] = 'str'
|
|
50
|
+
|
|
51
|
+
# Create search tree
|
|
52
|
+
feats = read_feature_from_shapefile(in_ecosite)
|
|
53
|
+
geoms = [i[0] for i in feats]
|
|
54
|
+
tree = STRtree(geoms)
|
|
55
|
+
|
|
56
|
+
# Process lines
|
|
57
|
+
fiona_features = []
|
|
58
|
+
all_lines = []
|
|
59
|
+
feat_all = []
|
|
60
|
+
id = 0
|
|
61
|
+
for line in input_lines:
|
|
62
|
+
line_geom = shape(line[0])
|
|
63
|
+
line_prop = line[1]
|
|
64
|
+
index_query = tree.query(line_geom)
|
|
65
|
+
geoms_intersected = []
|
|
66
|
+
for i in index_query:
|
|
67
|
+
# geoms_intersected.append({'geom': feats[i][0], 'prop': feats[i][1]}) # polygon has property
|
|
68
|
+
geoms_intersected.append({"geom": tree.geometries.take(i), "prop": feats[i][1]})
|
|
69
|
+
|
|
70
|
+
all_lines.append(({'geom': line_geom, 'prop': line_prop}, geoms_intersected, id))
|
|
71
|
+
id += 1
|
|
72
|
+
|
|
73
|
+
print('{} lines to be processed.'.format(len(all_lines)))
|
|
74
|
+
step = 0
|
|
75
|
+
total_steps = len(all_lines)
|
|
76
|
+
|
|
77
|
+
if PARALLEL_MODE == ParallelMode.MULTIPROCESSING:
|
|
78
|
+
feat_all = execute_multiprocessing(all_lines, processes, verbose)
|
|
79
|
+
elif PARALLEL_MODE == ParallelMode.SEQUENTIAL:
|
|
80
|
+
for line in all_lines:
|
|
81
|
+
line_collection = process_single_line(line)
|
|
82
|
+
if line_collection:
|
|
83
|
+
feat_all.append(line_collection)
|
|
84
|
+
step += 1
|
|
85
|
+
if verbose:
|
|
86
|
+
print(' "PROGRESS_LABEL Ceterline {} of {}" '.format(step, total_steps), flush=True)
|
|
87
|
+
print(' %{} '.format(step / total_steps * 100), flush=True)
|
|
88
|
+
|
|
89
|
+
i = 0
|
|
90
|
+
feat_type = None
|
|
91
|
+
if len(feat_all) > 0:
|
|
92
|
+
feat_type = type(feat_all[0][0][0])
|
|
93
|
+
|
|
94
|
+
for feature in feat_all:
|
|
95
|
+
if not feature:
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
i += 1
|
|
99
|
+
for line in feature:
|
|
100
|
+
try:
|
|
101
|
+
single_line = {
|
|
102
|
+
'geometry': mapping(line[0]),
|
|
103
|
+
'properties': line[1] # TODO: add attributes
|
|
104
|
+
}
|
|
105
|
+
except Exception as e:
|
|
106
|
+
print(e)
|
|
107
|
+
else:
|
|
108
|
+
fiona_features.append(single_line)
|
|
109
|
+
|
|
110
|
+
geom_type = 'LineString'
|
|
111
|
+
if feat_type is Polygon or feat_type is MultiPolygon:
|
|
112
|
+
geom_type = 'Polygon'
|
|
113
|
+
|
|
114
|
+
schema = {
|
|
115
|
+
'geometry': geom_type,
|
|
116
|
+
'properties': out_fields_list
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
driver = 'ESRI Shapefile'
|
|
120
|
+
print('Writing lines to shapefile')
|
|
121
|
+
|
|
122
|
+
# Save lines to shapefile
|
|
123
|
+
with fiona.open(out_line, 'w', driver, schema, layer_crs.to_proj4()) as out_line_file:
|
|
124
|
+
for feature in fiona_features:
|
|
125
|
+
out_line_file.write(feature)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def split_line_with_polygon(lines, polygon):
|
|
129
|
+
line_list = []
|
|
130
|
+
|
|
131
|
+
for line in lines:
|
|
132
|
+
line_collection = split(line, polygon)
|
|
133
|
+
|
|
134
|
+
if not line_collection.is_empty:
|
|
135
|
+
for i in line_collection.geoms:
|
|
136
|
+
line_list.append(i)
|
|
137
|
+
|
|
138
|
+
return line_list
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
def split_polygon_with_polygon(in_poly, polygon):
|
|
142
|
+
inter = in_poly.intersection(polygon)
|
|
143
|
+
if not inter.is_empty:
|
|
144
|
+
return inter
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def process_single_line(line_args, find_nearest=True, output_linear_reference=False):
|
|
148
|
+
"""
|
|
149
|
+
Parameters
|
|
150
|
+
----------
|
|
151
|
+
line_args : tuple
|
|
152
|
+
line_args has three items: {line geometry, line properties}, intersected polygons (list) and line ID
|
|
153
|
+
|
|
154
|
+
Returns
|
|
155
|
+
--------
|
|
156
|
+
list
|
|
157
|
+
The return list consist of split lines by intersection with polygons
|
|
158
|
+
|
|
159
|
+
"""
|
|
160
|
+
in_feat = line_args[0]
|
|
161
|
+
in_geom = in_feat['geom']
|
|
162
|
+
poly_feats = line_args[1]
|
|
163
|
+
|
|
164
|
+
if type(in_geom) is LineString or type(in_geom) is MultiLineString:
|
|
165
|
+
out_geom = [in_geom]
|
|
166
|
+
if len(poly_feats) > 0: # none intersecting polygons
|
|
167
|
+
for poly in poly_feats:
|
|
168
|
+
out_geom = split_line_with_polygon(out_geom, poly['geom'])
|
|
169
|
+
elif type(in_geom) is Polygon or type(in_geom) is MultiPolygon:
|
|
170
|
+
out_geom = []
|
|
171
|
+
if len(poly_feats) > 0: # none intersecting polygons
|
|
172
|
+
for poly in poly_feats:
|
|
173
|
+
out_geom.append(split_polygon_with_polygon(in_geom, poly['geom']))
|
|
174
|
+
|
|
175
|
+
final_geoms = []
|
|
176
|
+
if len(out_geom) > 0:
|
|
177
|
+
for i in out_geom:
|
|
178
|
+
temp_prop = Properties.from_dict(dict(in_feat['prop'].items()))
|
|
179
|
+
for j in poly_feats:
|
|
180
|
+
if j['geom'].contains(i):
|
|
181
|
+
temp_prop['ecosite'] = j['prop']['ecosite'] # TODO: specify 'ecosite' field name
|
|
182
|
+
final_geoms.append([i, temp_prop])
|
|
183
|
+
|
|
184
|
+
return final_geoms
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def execute_multiprocessing(line_args, processes, verbose):
|
|
188
|
+
try:
|
|
189
|
+
total_steps = len(line_args)
|
|
190
|
+
feat_all = []
|
|
191
|
+
with Pool(processes) as pool:
|
|
192
|
+
step = 0
|
|
193
|
+
# execute tasks in order, process results out of order
|
|
194
|
+
for result in pool.imap_unordered(process_single_line, line_args):
|
|
195
|
+
if BT_DEBUGGING:
|
|
196
|
+
print('Got result: {}'.format(result), flush=True)
|
|
197
|
+
|
|
198
|
+
feat_all.append(result)
|
|
199
|
+
step += 1
|
|
200
|
+
if verbose:
|
|
201
|
+
print(' "PROGRESS_LABEL Ecosite {} of {}" '.format(step, total_steps), flush=True)
|
|
202
|
+
|
|
203
|
+
print('Line processed: {}'.format(step), flush=True)
|
|
204
|
+
print(' %{} '.format(step / total_steps * 100), flush=True)
|
|
205
|
+
|
|
206
|
+
return feat_all
|
|
207
|
+
except OperationCancelledException:
|
|
208
|
+
print("Operation cancelled")
|
|
209
|
+
return None
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
if __name__ == '__main__':
|
|
213
|
+
in_args, in_verbose = check_arguments()
|
|
214
|
+
start_time = time.time()
|
|
215
|
+
forest_line_ecosite(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
216
|
+
print('Elapsed time: {}'.format(time.time() - start_time))
|