BERATools 0.2.0__py3-none-any.whl → 0.2.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +1 -7
- beratools/core/algo_centerline.py +491 -351
- beratools/core/algo_common.py +497 -0
- beratools/core/algo_cost.py +192 -0
- beratools/core/{dijkstra_algorithm.py → algo_dijkstra.py} +503 -460
- beratools/core/algo_footprint_rel.py +577 -0
- beratools/core/algo_line_grouping.py +944 -0
- beratools/core/algo_merge_lines.py +214 -0
- beratools/core/algo_split_with_lines.py +304 -0
- beratools/core/algo_tiler.py +428 -0
- beratools/core/algo_vertex_optimization.py +469 -0
- beratools/core/constants.py +52 -86
- beratools/core/logger.py +76 -85
- beratools/core/tool_base.py +196 -133
- beratools/gui/__init__.py +11 -15
- beratools/gui/{beratools.json → assets/beratools.json} +2185 -2300
- beratools/gui/batch_processing_dlg.py +513 -463
- beratools/gui/bt_data.py +481 -487
- beratools/gui/bt_gui_main.py +710 -691
- beratools/gui/main.py +26 -0
- beratools/gui/map_window.py +162 -146
- beratools/gui/tool_widgets.py +725 -493
- beratools/tools/Beratools_r_script.r +1120 -1120
- beratools/tools/Ht_metrics.py +116 -116
- beratools/tools/__init__.py +7 -7
- beratools/tools/batch_processing.py +136 -132
- beratools/tools/canopy_threshold_relative.py +672 -670
- beratools/tools/canopycostraster.py +222 -222
- beratools/tools/centerline.py +136 -176
- beratools/tools/common.py +857 -885
- beratools/tools/fl_regen_csf.py +428 -428
- beratools/tools/forest_line_attributes.py +408 -408
- beratools/tools/line_footprint_absolute.py +213 -363
- beratools/tools/line_footprint_fixed.py +436 -282
- beratools/tools/line_footprint_functions.py +733 -720
- beratools/tools/line_footprint_relative.py +73 -64
- beratools/tools/line_grouping.py +45 -0
- beratools/tools/ln_relative_metrics.py +615 -615
- beratools/tools/r_cal_lpi_elai.r +24 -24
- beratools/tools/r_generate_pd_focalraster.r +100 -100
- beratools/tools/r_interface.py +79 -79
- beratools/tools/r_point_density.r +8 -8
- beratools/tools/rpy_chm2trees.py +86 -86
- beratools/tools/rpy_dsm_chm_by.py +81 -81
- beratools/tools/rpy_dtm_by.py +63 -63
- beratools/tools/rpy_find_cellsize.py +43 -43
- beratools/tools/rpy_gnd_csf.py +74 -74
- beratools/tools/rpy_hummock_hollow.py +85 -85
- beratools/tools/rpy_hummock_hollow_raster.py +71 -71
- beratools/tools/rpy_las_info.py +51 -51
- beratools/tools/rpy_laz2las.py +40 -40
- beratools/tools/rpy_lpi_elai_lascat.py +466 -466
- beratools/tools/rpy_normalized_lidar_by.py +56 -56
- beratools/tools/rpy_percent_above_dbh.py +80 -80
- beratools/tools/rpy_points2trees.py +88 -88
- beratools/tools/rpy_vegcoverage.py +94 -94
- beratools/tools/tiler.py +48 -206
- beratools/tools/tool_template.py +69 -54
- beratools/tools/vertex_optimization.py +61 -620
- beratools/tools/zonal_threshold.py +144 -144
- beratools-0.2.2.dist-info/METADATA +108 -0
- beratools-0.2.2.dist-info/RECORD +74 -0
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/WHEEL +1 -1
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/licenses/LICENSE +22 -22
- beratools/gui/cli.py +0 -18
- beratools/gui/gui.json +0 -8
- beratools/gui_tk/ASCII Banners.txt +0 -248
- beratools/gui_tk/__init__.py +0 -20
- beratools/gui_tk/beratools_main.py +0 -515
- beratools/gui_tk/bt_widgets.py +0 -442
- beratools/gui_tk/cli.py +0 -18
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +0 -14
- beratools/gui_tk/map_window.py +0 -144
- beratools/gui_tk/runner.py +0 -1481
- beratools/gui_tk/tooltip.py +0 -55
- beratools/third_party/pyqtlet2/__init__.py +0 -9
- beratools/third_party/pyqtlet2/leaflet/__init__.py +0 -26
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +0 -6
- beratools/third_party/pyqtlet2/leaflet/control/control.py +0 -59
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +0 -52
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +0 -20
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +0 -24
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +0 -180
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +0 -34
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +0 -30
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +0 -105
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +0 -45
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +0 -91
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +0 -2
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +0 -4
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +0 -16
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +0 -15
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +0 -5
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +0 -18
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +0 -14
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +0 -1
- beratools/third_party/pyqtlet2/leaflet/map/map.py +0 -220
- beratools/third_party/pyqtlet2/mapwidget.py +0 -45
- beratools/third_party/pyqtlet2/web/custom.js +0 -43
- beratools/third_party/pyqtlet2/web/map.html +0 -23
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +0 -656
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +0 -6
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +0 -14
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +0 -4
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +0 -43
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +0 -20
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +0 -156
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +0 -10
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +0 -22
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +0 -57
- beratools/tools/forest_line_ecosite.py +0 -216
- beratools/tools/lapis_all.py +0 -103
- beratools/tools/least_cost_path_from_chm.py +0 -152
- beratools-0.2.0.dist-info/METADATA +0 -63
- beratools-0.2.0.dist-info/RECORD +0 -142
- /beratools/gui/{img → assets}/BERALogo.png +0 -0
- /beratools/gui/{img → assets}/closed.gif +0 -0
- /beratools/gui/{img → assets}/closed.png +0 -0
- /beratools/{gui_tk → gui/assets}/gui.json +0 -0
- /beratools/gui/{img → assets}/open.gif +0 -0
- /beratools/gui/{img → assets}/open.png +0 -0
- /beratools/gui/{img → assets}/tool.gif +0 -0
- /beratools/gui/{img → assets}/tool.png +0 -0
- {beratools-0.2.0.dist-info → beratools-0.2.2.dist-info}/entry_points.txt +0 -0
beratools/tools/fl_regen_csf.py
CHANGED
|
@@ -1,428 +1,428 @@
|
|
|
1
|
-
import math
|
|
2
|
-
import time
|
|
3
|
-
import pandas
|
|
4
|
-
import geopandas
|
|
5
|
-
import numpy
|
|
6
|
-
import scipy
|
|
7
|
-
import os
|
|
8
|
-
import pyogrio
|
|
9
|
-
import shapely
|
|
10
|
-
from shapely.ops import unary_union, split
|
|
11
|
-
from rasterio import mask
|
|
12
|
-
import argparse
|
|
13
|
-
import json
|
|
14
|
-
from multiprocessing.pool import Pool
|
|
15
|
-
|
|
16
|
-
from common import *
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
class OperationCancelledException(Exception):
|
|
20
|
-
pass
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def regen_csf(line_args):
|
|
24
|
-
# (result_identity,attr_seg_lines, area_analysis, change_analysis, in_change,in_tree_shp)
|
|
25
|
-
attr_seg_line = line_args[0]
|
|
26
|
-
result_identity = line_args[1]
|
|
27
|
-
|
|
28
|
-
area_analysis = line_args[2]
|
|
29
|
-
change_analysis = line_args[3]
|
|
30
|
-
in_change = line_args[4]
|
|
31
|
-
in_tree = line_args[5]
|
|
32
|
-
|
|
33
|
-
has_footprint = True
|
|
34
|
-
if type(result_identity) is geopandas.geodataframe.GeoDataFrame:
|
|
35
|
-
if result_identity.empty:
|
|
36
|
-
has_footprint = False
|
|
37
|
-
else:
|
|
38
|
-
# merge result_identity
|
|
39
|
-
result_identity = result_identity.dissolve()
|
|
40
|
-
|
|
41
|
-
elif not result_identity:
|
|
42
|
-
has_footprint = False
|
|
43
|
-
|
|
44
|
-
# Check if query result is not empty, if empty input identity footprint will be skipped
|
|
45
|
-
if attr_seg_line.empty:
|
|
46
|
-
return None
|
|
47
|
-
|
|
48
|
-
if "AvgWidth" in attr_seg_line.columns.array:
|
|
49
|
-
max_ln_width = math.ceil(attr_seg_line["AvgWidth"])
|
|
50
|
-
if not max_ln_width >= 1.0:
|
|
51
|
-
max_ln_width = 0.5
|
|
52
|
-
else:
|
|
53
|
-
if has_footprint:
|
|
54
|
-
# estimate width= (Perimeter -Sqrt(Perimeter^2-16*Area))/4
|
|
55
|
-
# for long and skinny: estimate width = 2*Area / Perimeter
|
|
56
|
-
P = float(result_identity.geometry.length)
|
|
57
|
-
A = float(result_identity.geometry.area)
|
|
58
|
-
max_ln_width = math.ceil((2 * A) / P)
|
|
59
|
-
if not max_ln_width >= 1.0:
|
|
60
|
-
max_ln_width = 0.5
|
|
61
|
-
else:
|
|
62
|
-
max_ln_width = 0.5
|
|
63
|
-
index = 0
|
|
64
|
-
|
|
65
|
-
if change_analysis and has_footprint: # with change raster and footprint
|
|
66
|
-
|
|
67
|
-
fp = result_identity.iloc[0].geometry
|
|
68
|
-
line_feat = attr_seg_line.iloc[0].geometry
|
|
69
|
-
|
|
70
|
-
# if the selected seg do not have identity footprint geometry
|
|
71
|
-
if shapely.is_empty(fp):
|
|
72
|
-
# use the buffer from the segment line
|
|
73
|
-
line_buffer = shapely.buffer(line_feat, float(max_ln_width) / 4)
|
|
74
|
-
else:
|
|
75
|
-
# if identity footprint has geometry, use as a buffer area
|
|
76
|
-
line_buffer = fp
|
|
77
|
-
# check trees
|
|
78
|
-
with rasterio.open(in_change) as in_change_file:
|
|
79
|
-
cell_size_x = in_change_file.transform[0]
|
|
80
|
-
cell_size_y = -in_change_file.transform[4]
|
|
81
|
-
# clipped the change base on polygon of line buffer or footprint
|
|
82
|
-
clipped_change, out_transform = rasterio.mask.mask(in_change_file, [line_buffer], crop=True)
|
|
83
|
-
|
|
84
|
-
# drop the ndarray to 2D ndarray
|
|
85
|
-
clipped_change = numpy.squeeze(clipped_change, axis=0)
|
|
86
|
-
|
|
87
|
-
# masked all NoData value cells
|
|
88
|
-
clean_change = numpy.ma.masked_where(clipped_change == in_change_file.nodata, clipped_change)
|
|
89
|
-
|
|
90
|
-
# Calculate the summary statistics from the clipped change
|
|
91
|
-
change_mean = numpy.nanmean(clean_change)
|
|
92
|
-
# count trees within FP area
|
|
93
|
-
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
94
|
-
trees_density = trees_counts / line_buffer.area
|
|
95
|
-
if trees_density >= 0.6:
|
|
96
|
-
reg_class = "Advanced"
|
|
97
|
-
elif 0.2 < trees_density < 0.6:
|
|
98
|
-
reg_class = "Regenerating"
|
|
99
|
-
else: # 0-60 trees counts
|
|
100
|
-
if change_mean > 0.06:
|
|
101
|
-
reg_class = "Regenerating"
|
|
102
|
-
else:
|
|
103
|
-
reg_class = "Arrested"
|
|
104
|
-
|
|
105
|
-
elif change_analysis and not has_footprint: # with change raster but no footprint
|
|
106
|
-
|
|
107
|
-
line_feat = attr_seg_line.geometry.iloc[0]
|
|
108
|
-
line_buffer = shapely.buffer(line_feat, float(max_ln_width))
|
|
109
|
-
|
|
110
|
-
with rasterio.open(in_change) as in_change_file:
|
|
111
|
-
cell_size_x = in_change_file.transform[0]
|
|
112
|
-
cell_size_y = -in_change_file.transform[4]
|
|
113
|
-
# Calculate the mean changes
|
|
114
|
-
# clipped the change base on polygon of line buffer or footprint
|
|
115
|
-
clipped_change, out_transform = rasterio.mask.mask(in_change_file, [line_buffer], crop=True)
|
|
116
|
-
|
|
117
|
-
# drop the ndarray to 2D ndarray
|
|
118
|
-
clipped_change = numpy.squeeze(clipped_change, axis=0)
|
|
119
|
-
|
|
120
|
-
# masked all NoData value cells
|
|
121
|
-
clean_change = numpy.ma.masked_where(clipped_change == in_change_file.nodata, clipped_change)
|
|
122
|
-
|
|
123
|
-
# Calculate the summary statistics from the clipped change
|
|
124
|
-
change_mean = numpy.nanmean(clean_change)
|
|
125
|
-
# count trees within FP area
|
|
126
|
-
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
127
|
-
trees_density = trees_counts / line_buffer.area
|
|
128
|
-
if trees_density >= 0.6:
|
|
129
|
-
reg_class = "Advanced"
|
|
130
|
-
elif 0.2 < trees_density < 0.6:
|
|
131
|
-
reg_class = "Regenerating"
|
|
132
|
-
else: # 0-60 trees counts
|
|
133
|
-
if change_mean > 0.06:
|
|
134
|
-
reg_class = "Regenerating"
|
|
135
|
-
else:
|
|
136
|
-
reg_class = "Arrested"
|
|
137
|
-
elif not change_analysis or not has_footprint: # Either no change_analysis or no footprint
|
|
138
|
-
line_feat = attr_seg_line.geometry.iloc[0]
|
|
139
|
-
|
|
140
|
-
# if the selected seg do not have identity footprint geometry
|
|
141
|
-
line_buffer = shapely.buffer(line_feat, float(max_ln_width))
|
|
142
|
-
|
|
143
|
-
# count trees within FP area
|
|
144
|
-
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
145
|
-
trees_density = trees_counts / line_buffer.area
|
|
146
|
-
if trees_density >= 0.6:
|
|
147
|
-
reg_class = "Advanced"
|
|
148
|
-
elif 0.2 < trees_density < 0.6:
|
|
149
|
-
reg_class = "Regenerating"
|
|
150
|
-
else:
|
|
151
|
-
reg_class = "Not Available"
|
|
152
|
-
|
|
153
|
-
change_mean = numpy.nan
|
|
154
|
-
elif not change_analysis and not has_footprint: # no change raster and no footprint
|
|
155
|
-
reg_class = "Not Available"
|
|
156
|
-
change_mean = numpy.nan
|
|
157
|
-
trees_counts = numpy.nan
|
|
158
|
-
trees_density = numpy.nan
|
|
159
|
-
|
|
160
|
-
attr_seg_line["AveChanges"] = change_mean
|
|
161
|
-
attr_seg_line["Num_trees"] = trees_counts
|
|
162
|
-
attr_seg_line["trees_density"] = trees_density
|
|
163
|
-
attr_seg_line["Reg_Class"] = reg_class
|
|
164
|
-
return attr_seg_line
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
def identity_polygon(line_args):
|
|
168
|
-
line = line_args[0]
|
|
169
|
-
in_touched_fp = line_args[1][['geometry', 'OLnFID', 'OLnSEG']]
|
|
170
|
-
in_search_polygon = line_args[2]
|
|
171
|
-
if 'OLnSEG' not in in_search_polygon.columns.array:
|
|
172
|
-
in_search_polygon = in_search_polygon.assign(OLnSEG=0)
|
|
173
|
-
if 'OLnFID' not in in_search_polygon.columns.array:
|
|
174
|
-
in_search_polygon = in_search_polygon.assign(OLnFID=in_search_polygon['OLnFID'].index)
|
|
175
|
-
identity = None
|
|
176
|
-
try:
|
|
177
|
-
# TODO: determine when there is empty polygon
|
|
178
|
-
# TODO: this will produce empty identity
|
|
179
|
-
if not in_search_polygon.empty:
|
|
180
|
-
identity = in_search_polygon.overlay(in_touched_fp, how='identity')
|
|
181
|
-
identity = identity.dropna(subset=['OLnSEG_2', 'OLnFID_2'])
|
|
182
|
-
identity = identity.drop(columns=['OLnSEG_1', 'OLnFID_2'])
|
|
183
|
-
identity = identity.rename(columns={'OLnFID_1': 'OLnFID', 'OLnSEG_2': 'OLnSEG'})
|
|
184
|
-
except Exception as e:
|
|
185
|
-
print(e)
|
|
186
|
-
|
|
187
|
-
return line, identity
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
def execute_multiprocessing_identity(line_args, processes):
|
|
191
|
-
# Multiprocessing identity polygon
|
|
192
|
-
try:
|
|
193
|
-
total_steps = len(line_args)
|
|
194
|
-
features = []
|
|
195
|
-
with Pool(processes) as pool:
|
|
196
|
-
step = 0
|
|
197
|
-
# execute tasks in order, process results out of order
|
|
198
|
-
for result in pool.imap_unordered(identity_polygon, line_args):
|
|
199
|
-
if BT_DEBUGGING:
|
|
200
|
-
print('Got result: {}'.format(result), flush=True)
|
|
201
|
-
features.append(result)
|
|
202
|
-
step += 1
|
|
203
|
-
print('%{}'.format(step / total_steps * 100))
|
|
204
|
-
|
|
205
|
-
except OperationCancelledException:
|
|
206
|
-
print("Operation cancelled")
|
|
207
|
-
exit()
|
|
208
|
-
|
|
209
|
-
print("Identifies are done.")
|
|
210
|
-
return features
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
def execute_multiprocessing_csf(line_args, processes):
|
|
214
|
-
try:
|
|
215
|
-
total_steps = len(line_args)
|
|
216
|
-
features = []
|
|
217
|
-
with Pool(processes) as pool:
|
|
218
|
-
step = 0
|
|
219
|
-
# execute tasks in order, process results out of order
|
|
220
|
-
for result in pool.imap_unordered(regen_csf, line_args):
|
|
221
|
-
if BT_DEBUGGING:
|
|
222
|
-
print('Got result: {}'.format(result), flush=True)
|
|
223
|
-
print('Line processed: {}'.format(step))
|
|
224
|
-
|
|
225
|
-
features.append(result)
|
|
226
|
-
step += 1
|
|
227
|
-
print('%{}'.format(step / total_steps * 100))
|
|
228
|
-
|
|
229
|
-
except OperationCancelledException:
|
|
230
|
-
print("Operation cancelled")
|
|
231
|
-
exit()
|
|
232
|
-
|
|
233
|
-
return features
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
def fl_restration_csf(callback, in_line, in_footprint, in_trees, in_change, proc_segments, out_line, processes,
|
|
237
|
-
verbose):
|
|
238
|
-
# assign Tool arguments
|
|
239
|
-
BT_DEBUGGING = False
|
|
240
|
-
in_cl = in_line
|
|
241
|
-
in_fp = in_footprint
|
|
242
|
-
|
|
243
|
-
print("Checking input parameters ...")
|
|
244
|
-
|
|
245
|
-
try:
|
|
246
|
-
print("loading shapefile(s) ...")
|
|
247
|
-
in_line_shp = pyogrio.read_dataframe(in_line)
|
|
248
|
-
in_tree_shp = pyogrio.read_dataframe(in_trees)
|
|
249
|
-
in_fp_shp = pyogrio.read_dataframe(in_footprint)
|
|
250
|
-
except SystemError:
|
|
251
|
-
print("Invalid input feature, please check!")
|
|
252
|
-
exit()
|
|
253
|
-
|
|
254
|
-
# Check datum, at this stage only check input data against NAD 83 datum
|
|
255
|
-
print("Checking datum....")
|
|
256
|
-
sameDatum = False
|
|
257
|
-
for shp in [in_line_shp, in_tree_shp, in_fp_shp]:
|
|
258
|
-
if shp.crs.datum.name in NADDatum:
|
|
259
|
-
sameDatum = True
|
|
260
|
-
else:
|
|
261
|
-
sameDatum = False
|
|
262
|
-
try:
|
|
263
|
-
# Check projection zone among input data with NAD 83 datum
|
|
264
|
-
if sameDatum:
|
|
265
|
-
if in_line_shp.crs.utm_zone != in_tree_shp.crs.utm_zone != in_fp_shp.crs.utm_zone:
|
|
266
|
-
print("Input shapefiles are on different project Zone, please check.")
|
|
267
|
-
exit()
|
|
268
|
-
else:
|
|
269
|
-
print("Input shapefiles are not on supported Datum, please check.")
|
|
270
|
-
exit()
|
|
271
|
-
except Exception as error_in_shapefiles:
|
|
272
|
-
print("Input shapefiles are invalid: {} , please check.".format(error_in_shapefiles))
|
|
273
|
-
exit()
|
|
274
|
-
|
|
275
|
-
if not os.path.exists(os.path.dirname(out_line)):
|
|
276
|
-
os.makedirs(os.path.dirname(out_line))
|
|
277
|
-
else:
|
|
278
|
-
pass
|
|
279
|
-
print("Checking input parameters ... Done")
|
|
280
|
-
|
|
281
|
-
in_fields = list(in_line_shp.columns)
|
|
282
|
-
|
|
283
|
-
# check coordinate systems between line and raster features
|
|
284
|
-
try:
|
|
285
|
-
# Check projection zone among input raster with input vector data
|
|
286
|
-
with rasterio.open(in_change) as in_raster:
|
|
287
|
-
if not in_raster.crs.to_epsg() in [in_fp_shp.crs.to_epsg(), in_line_shp.crs.to_epsg(),
|
|
288
|
-
in_tree_shp.crs.to_epsg(), 2956]:
|
|
289
|
-
print("Line and raster spatial references are different , please check.")
|
|
290
|
-
exit()
|
|
291
|
-
else:
|
|
292
|
-
change_analysis = True
|
|
293
|
-
except Exception as error_in_raster:
|
|
294
|
-
|
|
295
|
-
print("Invalid input raster: {}, please check!".format(error_in_raster))
|
|
296
|
-
change_analysis = False
|
|
297
|
-
exit()
|
|
298
|
-
|
|
299
|
-
HasOLnFID = False
|
|
300
|
-
|
|
301
|
-
# determine to do area or/and height analysis
|
|
302
|
-
if len(in_fp_shp) == 0:
|
|
303
|
-
print('No footprints provided, buffer of the input lines will be used instead')
|
|
304
|
-
area_analysis = False
|
|
305
|
-
else:
|
|
306
|
-
area_analysis = True
|
|
307
|
-
|
|
308
|
-
print("Preparing line segments...")
|
|
309
|
-
|
|
310
|
-
# Segment lines
|
|
311
|
-
# Return split lines with two extra columns:['OLnFID','OLnSEG']
|
|
312
|
-
# or return whole input line
|
|
313
|
-
print("Input_Lines: {}".format(in_cl))
|
|
314
|
-
if not 'OLnFID' in in_line_shp.columns.array:
|
|
315
|
-
print(
|
|
316
|
-
"Cannot find {} column in input line data.\n '{}' column will be create".format('OLnFID', 'OLnFID'))
|
|
317
|
-
in_line_shp['OLnFID'] = in_line_shp.index
|
|
318
|
-
if proc_segments:
|
|
319
|
-
attr_seg_lines = line_split2(in_line_shp, 10)
|
|
320
|
-
else:
|
|
321
|
-
# copy original line input to another Geodataframe
|
|
322
|
-
attr_seg_lines = geopandas.GeoDataFrame.copy(in_line_shp)
|
|
323
|
-
if not "OLnSEG" in attr_seg_lines.columns.array:
|
|
324
|
-
if proc_segments:
|
|
325
|
-
attr_seg_lines["OLnSEG"] = int(attr_seg_lines["OLnSEG"])
|
|
326
|
-
else:
|
|
327
|
-
attr_seg_lines["OLnSEG"] = 0
|
|
328
|
-
print('%{}'.format(10))
|
|
329
|
-
|
|
330
|
-
print("Line segments preparation done.")
|
|
331
|
-
print("{} footprints to be identified by {} segments ...".format(len(in_fp_shp.index), len(attr_seg_lines)))
|
|
332
|
-
|
|
333
|
-
# Prepare line parameters for multiprocessing
|
|
334
|
-
line_args = []
|
|
335
|
-
|
|
336
|
-
# prepare line args: list of line, line buffer and footprint polygon
|
|
337
|
-
# footprint spatial searching
|
|
338
|
-
footprint_sindex = in_fp_shp.sindex
|
|
339
|
-
|
|
340
|
-
for i in attr_seg_lines.index:
|
|
341
|
-
line = attr_seg_lines.iloc[[i]]
|
|
342
|
-
line_buffer = line.copy()
|
|
343
|
-
if proc_segments:
|
|
344
|
-
line_buffer['geometry'] = line.simplify(tolerance=1, preserve_topology=True).buffer(10,
|
|
345
|
-
cap_style=shapely.BufferCapStyle.flat)
|
|
346
|
-
else:
|
|
347
|
-
line_buffer['geometry'] = line.buffer(10, cap_style=shapely.BufferCapStyle.flat)
|
|
348
|
-
fp_touched = in_fp_shp.iloc[
|
|
349
|
-
footprint_sindex.query(line_buffer.iloc[0].geometry, predicate="overlaps", sort=True)]
|
|
350
|
-
if not "OLnFID" in fp_touched.columns.array:
|
|
351
|
-
fp_touched["OLnFID"] = int(line["OLnFID"])
|
|
352
|
-
if not "OLnSEG" in fp_touched.columns.array:
|
|
353
|
-
if proc_segments:
|
|
354
|
-
fp_touched["OLnSEG"] = int(line["OLnSEG"])
|
|
355
|
-
else:
|
|
356
|
-
fp_touched["OLnSEG"] = 0
|
|
357
|
-
fp_intersected = fp_touched.dissolve()
|
|
358
|
-
fp_intersected.geometry = fp_intersected.geometry.clip(line_buffer)
|
|
359
|
-
fp_intersected['geometry'] = fp_intersected.geometry.map(lambda x: unary_union(x))
|
|
360
|
-
list_item = [line, fp_touched, fp_intersected]
|
|
361
|
-
|
|
362
|
-
line_args.append(list_item)
|
|
363
|
-
print("Identifying footprint.... ")
|
|
364
|
-
# multiprocessing of identity polygons
|
|
365
|
-
features = []
|
|
366
|
-
if not BT_DEBUGGING:
|
|
367
|
-
features = execute_multiprocessing_identity(line_args, processes)
|
|
368
|
-
else:
|
|
369
|
-
# Debug use
|
|
370
|
-
for index in range(0, len(line_args)):
|
|
371
|
-
result = (identity_polygon(line_args[index]))
|
|
372
|
-
if not len(result) == 0:
|
|
373
|
-
features.append(result)
|
|
374
|
-
|
|
375
|
-
print("Prepare for classify ...")
|
|
376
|
-
|
|
377
|
-
# prepare list of result_identity, Att_seg_lines, areaAnalysis, heightAnalysis, args.input
|
|
378
|
-
AOI_trees = in_tree_shp
|
|
379
|
-
line_args = []
|
|
380
|
-
for index in range(0, len(features)):
|
|
381
|
-
list_item = [features[index][0], features[index][1], area_analysis, change_analysis, in_change, AOI_trees]
|
|
382
|
-
line_args.append(list_item)
|
|
383
|
-
|
|
384
|
-
# Linear attributes
|
|
385
|
-
print("Classify lines ...")
|
|
386
|
-
print('%{}'.format(60))
|
|
387
|
-
|
|
388
|
-
# Multiprocessing regeneration classifying
|
|
389
|
-
features = []
|
|
390
|
-
BT_DEBUGGING = True
|
|
391
|
-
if not BT_DEBUGGING:
|
|
392
|
-
features = execute_multiprocessing_csf(line_args, processes)
|
|
393
|
-
else:
|
|
394
|
-
# Debug use
|
|
395
|
-
for index in range(0, len(line_args) - 1):
|
|
396
|
-
result = (regen_csf(line_args[index]))
|
|
397
|
-
if not len(result) == 0:
|
|
398
|
-
features.append(result)
|
|
399
|
-
print(result)
|
|
400
|
-
|
|
401
|
-
# Combine identity polygons into once geodataframe
|
|
402
|
-
if len(features) == 0:
|
|
403
|
-
print('No lines found.')
|
|
404
|
-
exit()
|
|
405
|
-
print('Appending output ...')
|
|
406
|
-
result_attr = geopandas.GeoDataFrame(pandas.concat(features, ignore_index=True))
|
|
407
|
-
result_attr.reset_index()
|
|
408
|
-
|
|
409
|
-
print('%{}'.format(90))
|
|
410
|
-
print('Saving output ...')
|
|
411
|
-
|
|
412
|
-
# Save attributed lines, was output_att_line
|
|
413
|
-
geopandas.GeoDataFrame.to_file(result_attr, out_line)
|
|
414
|
-
|
|
415
|
-
print('%{}'.format(100))
|
|
416
|
-
|
|
417
|
-
|
|
418
|
-
if __name__ == '__main__':
|
|
419
|
-
start_time = time.time()
|
|
420
|
-
print('Line regeneration classify started at {}'.format(time.strftime("%b %Y %H:%M:%S", time.localtime())))
|
|
421
|
-
|
|
422
|
-
# Get tool arguments
|
|
423
|
-
|
|
424
|
-
in_args, in_verbose = check_arguments()
|
|
425
|
-
fl_restration_csf(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
426
|
-
|
|
427
|
-
print('Current time: {}'.format(time.strftime("%d %b %Y %H:%M:%S", time.localtime())))
|
|
428
|
-
print('Line regeneration classify done in {} seconds'.format(round(time.time() - start_time, 5)))
|
|
1
|
+
import math
|
|
2
|
+
import time
|
|
3
|
+
import pandas
|
|
4
|
+
import geopandas
|
|
5
|
+
import numpy
|
|
6
|
+
import scipy
|
|
7
|
+
import os
|
|
8
|
+
import pyogrio
|
|
9
|
+
import shapely
|
|
10
|
+
from shapely.ops import unary_union, split
|
|
11
|
+
from rasterio import mask
|
|
12
|
+
import argparse
|
|
13
|
+
import json
|
|
14
|
+
from multiprocessing.pool import Pool
|
|
15
|
+
|
|
16
|
+
from common import *
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class OperationCancelledException(Exception):
|
|
20
|
+
pass
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
def regen_csf(line_args):
|
|
24
|
+
# (result_identity,attr_seg_lines, area_analysis, change_analysis, in_change,in_tree_shp)
|
|
25
|
+
attr_seg_line = line_args[0]
|
|
26
|
+
result_identity = line_args[1]
|
|
27
|
+
|
|
28
|
+
area_analysis = line_args[2]
|
|
29
|
+
change_analysis = line_args[3]
|
|
30
|
+
in_change = line_args[4]
|
|
31
|
+
in_tree = line_args[5]
|
|
32
|
+
|
|
33
|
+
has_footprint = True
|
|
34
|
+
if type(result_identity) is geopandas.geodataframe.GeoDataFrame:
|
|
35
|
+
if result_identity.empty:
|
|
36
|
+
has_footprint = False
|
|
37
|
+
else:
|
|
38
|
+
# merge result_identity
|
|
39
|
+
result_identity = result_identity.dissolve()
|
|
40
|
+
|
|
41
|
+
elif not result_identity:
|
|
42
|
+
has_footprint = False
|
|
43
|
+
|
|
44
|
+
# Check if query result is not empty, if empty input identity footprint will be skipped
|
|
45
|
+
if attr_seg_line.empty:
|
|
46
|
+
return None
|
|
47
|
+
|
|
48
|
+
if "AvgWidth" in attr_seg_line.columns.array:
|
|
49
|
+
max_ln_width = math.ceil(attr_seg_line["AvgWidth"])
|
|
50
|
+
if not max_ln_width >= 1.0:
|
|
51
|
+
max_ln_width = 0.5
|
|
52
|
+
else:
|
|
53
|
+
if has_footprint:
|
|
54
|
+
# estimate width= (Perimeter -Sqrt(Perimeter^2-16*Area))/4
|
|
55
|
+
# for long and skinny: estimate width = 2*Area / Perimeter
|
|
56
|
+
P = float(result_identity.geometry.length)
|
|
57
|
+
A = float(result_identity.geometry.area)
|
|
58
|
+
max_ln_width = math.ceil((2 * A) / P)
|
|
59
|
+
if not max_ln_width >= 1.0:
|
|
60
|
+
max_ln_width = 0.5
|
|
61
|
+
else:
|
|
62
|
+
max_ln_width = 0.5
|
|
63
|
+
index = 0
|
|
64
|
+
|
|
65
|
+
if change_analysis and has_footprint: # with change raster and footprint
|
|
66
|
+
|
|
67
|
+
fp = result_identity.iloc[0].geometry
|
|
68
|
+
line_feat = attr_seg_line.iloc[0].geometry
|
|
69
|
+
|
|
70
|
+
# if the selected seg do not have identity footprint geometry
|
|
71
|
+
if shapely.is_empty(fp):
|
|
72
|
+
# use the buffer from the segment line
|
|
73
|
+
line_buffer = shapely.buffer(line_feat, float(max_ln_width) / 4)
|
|
74
|
+
else:
|
|
75
|
+
# if identity footprint has geometry, use as a buffer area
|
|
76
|
+
line_buffer = fp
|
|
77
|
+
# check trees
|
|
78
|
+
with rasterio.open(in_change) as in_change_file:
|
|
79
|
+
cell_size_x = in_change_file.transform[0]
|
|
80
|
+
cell_size_y = -in_change_file.transform[4]
|
|
81
|
+
# clipped the change base on polygon of line buffer or footprint
|
|
82
|
+
clipped_change, out_transform = rasterio.mask.mask(in_change_file, [line_buffer], crop=True)
|
|
83
|
+
|
|
84
|
+
# drop the ndarray to 2D ndarray
|
|
85
|
+
clipped_change = numpy.squeeze(clipped_change, axis=0)
|
|
86
|
+
|
|
87
|
+
# masked all NoData value cells
|
|
88
|
+
clean_change = numpy.ma.masked_where(clipped_change == in_change_file.nodata, clipped_change)
|
|
89
|
+
|
|
90
|
+
# Calculate the summary statistics from the clipped change
|
|
91
|
+
change_mean = numpy.nanmean(clean_change)
|
|
92
|
+
# count trees within FP area
|
|
93
|
+
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
94
|
+
trees_density = trees_counts / line_buffer.area
|
|
95
|
+
if trees_density >= 0.6:
|
|
96
|
+
reg_class = "Advanced"
|
|
97
|
+
elif 0.2 < trees_density < 0.6:
|
|
98
|
+
reg_class = "Regenerating"
|
|
99
|
+
else: # 0-60 trees counts
|
|
100
|
+
if change_mean > 0.06:
|
|
101
|
+
reg_class = "Regenerating"
|
|
102
|
+
else:
|
|
103
|
+
reg_class = "Arrested"
|
|
104
|
+
|
|
105
|
+
elif change_analysis and not has_footprint: # with change raster but no footprint
|
|
106
|
+
|
|
107
|
+
line_feat = attr_seg_line.geometry.iloc[0]
|
|
108
|
+
line_buffer = shapely.buffer(line_feat, float(max_ln_width))
|
|
109
|
+
|
|
110
|
+
with rasterio.open(in_change) as in_change_file:
|
|
111
|
+
cell_size_x = in_change_file.transform[0]
|
|
112
|
+
cell_size_y = -in_change_file.transform[4]
|
|
113
|
+
# Calculate the mean changes
|
|
114
|
+
# clipped the change base on polygon of line buffer or footprint
|
|
115
|
+
clipped_change, out_transform = rasterio.mask.mask(in_change_file, [line_buffer], crop=True)
|
|
116
|
+
|
|
117
|
+
# drop the ndarray to 2D ndarray
|
|
118
|
+
clipped_change = numpy.squeeze(clipped_change, axis=0)
|
|
119
|
+
|
|
120
|
+
# masked all NoData value cells
|
|
121
|
+
clean_change = numpy.ma.masked_where(clipped_change == in_change_file.nodata, clipped_change)
|
|
122
|
+
|
|
123
|
+
# Calculate the summary statistics from the clipped change
|
|
124
|
+
change_mean = numpy.nanmean(clean_change)
|
|
125
|
+
# count trees within FP area
|
|
126
|
+
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
127
|
+
trees_density = trees_counts / line_buffer.area
|
|
128
|
+
if trees_density >= 0.6:
|
|
129
|
+
reg_class = "Advanced"
|
|
130
|
+
elif 0.2 < trees_density < 0.6:
|
|
131
|
+
reg_class = "Regenerating"
|
|
132
|
+
else: # 0-60 trees counts
|
|
133
|
+
if change_mean > 0.06:
|
|
134
|
+
reg_class = "Regenerating"
|
|
135
|
+
else:
|
|
136
|
+
reg_class = "Arrested"
|
|
137
|
+
elif not change_analysis or not has_footprint: # Either no change_analysis or no footprint
|
|
138
|
+
line_feat = attr_seg_line.geometry.iloc[0]
|
|
139
|
+
|
|
140
|
+
# if the selected seg do not have identity footprint geometry
|
|
141
|
+
line_buffer = shapely.buffer(line_feat, float(max_ln_width))
|
|
142
|
+
|
|
143
|
+
# count trees within FP area
|
|
144
|
+
trees_counts = len(in_tree[in_tree.within(line_buffer)])
|
|
145
|
+
trees_density = trees_counts / line_buffer.area
|
|
146
|
+
if trees_density >= 0.6:
|
|
147
|
+
reg_class = "Advanced"
|
|
148
|
+
elif 0.2 < trees_density < 0.6:
|
|
149
|
+
reg_class = "Regenerating"
|
|
150
|
+
else:
|
|
151
|
+
reg_class = "Not Available"
|
|
152
|
+
|
|
153
|
+
change_mean = numpy.nan
|
|
154
|
+
elif not change_analysis and not has_footprint: # no change raster and no footprint
|
|
155
|
+
reg_class = "Not Available"
|
|
156
|
+
change_mean = numpy.nan
|
|
157
|
+
trees_counts = numpy.nan
|
|
158
|
+
trees_density = numpy.nan
|
|
159
|
+
|
|
160
|
+
attr_seg_line["AveChanges"] = change_mean
|
|
161
|
+
attr_seg_line["Num_trees"] = trees_counts
|
|
162
|
+
attr_seg_line["trees_density"] = trees_density
|
|
163
|
+
attr_seg_line["Reg_Class"] = reg_class
|
|
164
|
+
return attr_seg_line
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def identity_polygon(line_args):
|
|
168
|
+
line = line_args[0]
|
|
169
|
+
in_touched_fp = line_args[1][['geometry', 'OLnFID', 'OLnSEG']]
|
|
170
|
+
in_search_polygon = line_args[2]
|
|
171
|
+
if 'OLnSEG' not in in_search_polygon.columns.array:
|
|
172
|
+
in_search_polygon = in_search_polygon.assign(OLnSEG=0)
|
|
173
|
+
if 'OLnFID' not in in_search_polygon.columns.array:
|
|
174
|
+
in_search_polygon = in_search_polygon.assign(OLnFID=in_search_polygon['OLnFID'].index)
|
|
175
|
+
identity = None
|
|
176
|
+
try:
|
|
177
|
+
# TODO: determine when there is empty polygon
|
|
178
|
+
# TODO: this will produce empty identity
|
|
179
|
+
if not in_search_polygon.empty:
|
|
180
|
+
identity = in_search_polygon.overlay(in_touched_fp, how='identity')
|
|
181
|
+
identity = identity.dropna(subset=['OLnSEG_2', 'OLnFID_2'])
|
|
182
|
+
identity = identity.drop(columns=['OLnSEG_1', 'OLnFID_2'])
|
|
183
|
+
identity = identity.rename(columns={'OLnFID_1': 'OLnFID', 'OLnSEG_2': 'OLnSEG'})
|
|
184
|
+
except Exception as e:
|
|
185
|
+
print(e)
|
|
186
|
+
|
|
187
|
+
return line, identity
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def execute_multiprocessing_identity(line_args, processes):
|
|
191
|
+
# Multiprocessing identity polygon
|
|
192
|
+
try:
|
|
193
|
+
total_steps = len(line_args)
|
|
194
|
+
features = []
|
|
195
|
+
with Pool(processes) as pool:
|
|
196
|
+
step = 0
|
|
197
|
+
# execute tasks in order, process results out of order
|
|
198
|
+
for result in pool.imap_unordered(identity_polygon, line_args):
|
|
199
|
+
if BT_DEBUGGING:
|
|
200
|
+
print('Got result: {}'.format(result), flush=True)
|
|
201
|
+
features.append(result)
|
|
202
|
+
step += 1
|
|
203
|
+
print('%{}'.format(step / total_steps * 100))
|
|
204
|
+
|
|
205
|
+
except OperationCancelledException:
|
|
206
|
+
print("Operation cancelled")
|
|
207
|
+
exit()
|
|
208
|
+
|
|
209
|
+
print("Identifies are done.")
|
|
210
|
+
return features
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
def execute_multiprocessing_csf(line_args, processes):
|
|
214
|
+
try:
|
|
215
|
+
total_steps = len(line_args)
|
|
216
|
+
features = []
|
|
217
|
+
with Pool(processes) as pool:
|
|
218
|
+
step = 0
|
|
219
|
+
# execute tasks in order, process results out of order
|
|
220
|
+
for result in pool.imap_unordered(regen_csf, line_args):
|
|
221
|
+
if BT_DEBUGGING:
|
|
222
|
+
print('Got result: {}'.format(result), flush=True)
|
|
223
|
+
print('Line processed: {}'.format(step))
|
|
224
|
+
|
|
225
|
+
features.append(result)
|
|
226
|
+
step += 1
|
|
227
|
+
print('%{}'.format(step / total_steps * 100))
|
|
228
|
+
|
|
229
|
+
except OperationCancelledException:
|
|
230
|
+
print("Operation cancelled")
|
|
231
|
+
exit()
|
|
232
|
+
|
|
233
|
+
return features
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
def fl_restration_csf(callback, in_line, in_footprint, in_trees, in_change, proc_segments, out_line, processes,
|
|
237
|
+
verbose):
|
|
238
|
+
# assign Tool arguments
|
|
239
|
+
BT_DEBUGGING = False
|
|
240
|
+
in_cl = in_line
|
|
241
|
+
in_fp = in_footprint
|
|
242
|
+
|
|
243
|
+
print("Checking input parameters ...")
|
|
244
|
+
|
|
245
|
+
try:
|
|
246
|
+
print("loading shapefile(s) ...")
|
|
247
|
+
in_line_shp = pyogrio.read_dataframe(in_line)
|
|
248
|
+
in_tree_shp = pyogrio.read_dataframe(in_trees)
|
|
249
|
+
in_fp_shp = pyogrio.read_dataframe(in_footprint)
|
|
250
|
+
except SystemError:
|
|
251
|
+
print("Invalid input feature, please check!")
|
|
252
|
+
exit()
|
|
253
|
+
|
|
254
|
+
# Check datum, at this stage only check input data against NAD 83 datum
|
|
255
|
+
print("Checking datum....")
|
|
256
|
+
sameDatum = False
|
|
257
|
+
for shp in [in_line_shp, in_tree_shp, in_fp_shp]:
|
|
258
|
+
if shp.crs.datum.name in NADDatum:
|
|
259
|
+
sameDatum = True
|
|
260
|
+
else:
|
|
261
|
+
sameDatum = False
|
|
262
|
+
try:
|
|
263
|
+
# Check projection zone among input data with NAD 83 datum
|
|
264
|
+
if sameDatum:
|
|
265
|
+
if in_line_shp.crs.utm_zone != in_tree_shp.crs.utm_zone != in_fp_shp.crs.utm_zone:
|
|
266
|
+
print("Input shapefiles are on different project Zone, please check.")
|
|
267
|
+
exit()
|
|
268
|
+
else:
|
|
269
|
+
print("Input shapefiles are not on supported Datum, please check.")
|
|
270
|
+
exit()
|
|
271
|
+
except Exception as error_in_shapefiles:
|
|
272
|
+
print("Input shapefiles are invalid: {} , please check.".format(error_in_shapefiles))
|
|
273
|
+
exit()
|
|
274
|
+
|
|
275
|
+
if not os.path.exists(os.path.dirname(out_line)):
|
|
276
|
+
os.makedirs(os.path.dirname(out_line))
|
|
277
|
+
else:
|
|
278
|
+
pass
|
|
279
|
+
print("Checking input parameters ... Done")
|
|
280
|
+
|
|
281
|
+
in_fields = list(in_line_shp.columns)
|
|
282
|
+
|
|
283
|
+
# check coordinate systems between line and raster features
|
|
284
|
+
try:
|
|
285
|
+
# Check projection zone among input raster with input vector data
|
|
286
|
+
with rasterio.open(in_change) as in_raster:
|
|
287
|
+
if not in_raster.crs.to_epsg() in [in_fp_shp.crs.to_epsg(), in_line_shp.crs.to_epsg(),
|
|
288
|
+
in_tree_shp.crs.to_epsg(), 2956]:
|
|
289
|
+
print("Line and raster spatial references are different , please check.")
|
|
290
|
+
exit()
|
|
291
|
+
else:
|
|
292
|
+
change_analysis = True
|
|
293
|
+
except Exception as error_in_raster:
|
|
294
|
+
|
|
295
|
+
print("Invalid input raster: {}, please check!".format(error_in_raster))
|
|
296
|
+
change_analysis = False
|
|
297
|
+
exit()
|
|
298
|
+
|
|
299
|
+
HasOLnFID = False
|
|
300
|
+
|
|
301
|
+
# determine to do area or/and height analysis
|
|
302
|
+
if len(in_fp_shp) == 0:
|
|
303
|
+
print('No footprints provided, buffer of the input lines will be used instead')
|
|
304
|
+
area_analysis = False
|
|
305
|
+
else:
|
|
306
|
+
area_analysis = True
|
|
307
|
+
|
|
308
|
+
print("Preparing line segments...")
|
|
309
|
+
|
|
310
|
+
# Segment lines
|
|
311
|
+
# Return split lines with two extra columns:['OLnFID','OLnSEG']
|
|
312
|
+
# or return whole input line
|
|
313
|
+
print("Input_Lines: {}".format(in_cl))
|
|
314
|
+
if not 'OLnFID' in in_line_shp.columns.array:
|
|
315
|
+
print(
|
|
316
|
+
"Cannot find {} column in input line data.\n '{}' column will be create".format('OLnFID', 'OLnFID'))
|
|
317
|
+
in_line_shp['OLnFID'] = in_line_shp.index
|
|
318
|
+
if proc_segments:
|
|
319
|
+
attr_seg_lines = line_split2(in_line_shp, 10)
|
|
320
|
+
else:
|
|
321
|
+
# copy original line input to another Geodataframe
|
|
322
|
+
attr_seg_lines = geopandas.GeoDataFrame.copy(in_line_shp)
|
|
323
|
+
if not "OLnSEG" in attr_seg_lines.columns.array:
|
|
324
|
+
if proc_segments:
|
|
325
|
+
attr_seg_lines["OLnSEG"] = int(attr_seg_lines["OLnSEG"])
|
|
326
|
+
else:
|
|
327
|
+
attr_seg_lines["OLnSEG"] = 0
|
|
328
|
+
print('%{}'.format(10))
|
|
329
|
+
|
|
330
|
+
print("Line segments preparation done.")
|
|
331
|
+
print("{} footprints to be identified by {} segments ...".format(len(in_fp_shp.index), len(attr_seg_lines)))
|
|
332
|
+
|
|
333
|
+
# Prepare line parameters for multiprocessing
|
|
334
|
+
line_args = []
|
|
335
|
+
|
|
336
|
+
# prepare line args: list of line, line buffer and footprint polygon
|
|
337
|
+
# footprint spatial searching
|
|
338
|
+
footprint_sindex = in_fp_shp.sindex
|
|
339
|
+
|
|
340
|
+
for i in attr_seg_lines.index:
|
|
341
|
+
line = attr_seg_lines.iloc[[i]]
|
|
342
|
+
line_buffer = line.copy()
|
|
343
|
+
if proc_segments:
|
|
344
|
+
line_buffer['geometry'] = line.simplify(tolerance=1, preserve_topology=True).buffer(10,
|
|
345
|
+
cap_style=shapely.BufferCapStyle.flat)
|
|
346
|
+
else:
|
|
347
|
+
line_buffer['geometry'] = line.buffer(10, cap_style=shapely.BufferCapStyle.flat)
|
|
348
|
+
fp_touched = in_fp_shp.iloc[
|
|
349
|
+
footprint_sindex.query(line_buffer.iloc[0].geometry, predicate="overlaps", sort=True)]
|
|
350
|
+
if not "OLnFID" in fp_touched.columns.array:
|
|
351
|
+
fp_touched["OLnFID"] = int(line["OLnFID"])
|
|
352
|
+
if not "OLnSEG" in fp_touched.columns.array:
|
|
353
|
+
if proc_segments:
|
|
354
|
+
fp_touched["OLnSEG"] = int(line["OLnSEG"])
|
|
355
|
+
else:
|
|
356
|
+
fp_touched["OLnSEG"] = 0
|
|
357
|
+
fp_intersected = fp_touched.dissolve()
|
|
358
|
+
fp_intersected.geometry = fp_intersected.geometry.clip(line_buffer)
|
|
359
|
+
fp_intersected['geometry'] = fp_intersected.geometry.map(lambda x: unary_union(x))
|
|
360
|
+
list_item = [line, fp_touched, fp_intersected]
|
|
361
|
+
|
|
362
|
+
line_args.append(list_item)
|
|
363
|
+
print("Identifying footprint.... ")
|
|
364
|
+
# multiprocessing of identity polygons
|
|
365
|
+
features = []
|
|
366
|
+
if not BT_DEBUGGING:
|
|
367
|
+
features = execute_multiprocessing_identity(line_args, processes)
|
|
368
|
+
else:
|
|
369
|
+
# Debug use
|
|
370
|
+
for index in range(0, len(line_args)):
|
|
371
|
+
result = (identity_polygon(line_args[index]))
|
|
372
|
+
if not len(result) == 0:
|
|
373
|
+
features.append(result)
|
|
374
|
+
|
|
375
|
+
print("Prepare for classify ...")
|
|
376
|
+
|
|
377
|
+
# prepare list of result_identity, Att_seg_lines, areaAnalysis, heightAnalysis, args.input
|
|
378
|
+
AOI_trees = in_tree_shp
|
|
379
|
+
line_args = []
|
|
380
|
+
for index in range(0, len(features)):
|
|
381
|
+
list_item = [features[index][0], features[index][1], area_analysis, change_analysis, in_change, AOI_trees]
|
|
382
|
+
line_args.append(list_item)
|
|
383
|
+
|
|
384
|
+
# Linear attributes
|
|
385
|
+
print("Classify lines ...")
|
|
386
|
+
print('%{}'.format(60))
|
|
387
|
+
|
|
388
|
+
# Multiprocessing regeneration classifying
|
|
389
|
+
features = []
|
|
390
|
+
BT_DEBUGGING = True
|
|
391
|
+
if not BT_DEBUGGING:
|
|
392
|
+
features = execute_multiprocessing_csf(line_args, processes)
|
|
393
|
+
else:
|
|
394
|
+
# Debug use
|
|
395
|
+
for index in range(0, len(line_args) - 1):
|
|
396
|
+
result = (regen_csf(line_args[index]))
|
|
397
|
+
if not len(result) == 0:
|
|
398
|
+
features.append(result)
|
|
399
|
+
print(result)
|
|
400
|
+
|
|
401
|
+
# Combine identity polygons into once geodataframe
|
|
402
|
+
if len(features) == 0:
|
|
403
|
+
print('No lines found.')
|
|
404
|
+
exit()
|
|
405
|
+
print('Appending output ...')
|
|
406
|
+
result_attr = geopandas.GeoDataFrame(pandas.concat(features, ignore_index=True))
|
|
407
|
+
result_attr.reset_index()
|
|
408
|
+
|
|
409
|
+
print('%{}'.format(90))
|
|
410
|
+
print('Saving output ...')
|
|
411
|
+
|
|
412
|
+
# Save attributed lines, was output_att_line
|
|
413
|
+
geopandas.GeoDataFrame.to_file(result_attr, out_line)
|
|
414
|
+
|
|
415
|
+
print('%{}'.format(100))
|
|
416
|
+
|
|
417
|
+
|
|
418
|
+
if __name__ == '__main__':
|
|
419
|
+
start_time = time.time()
|
|
420
|
+
print('Line regeneration classify started at {}'.format(time.strftime("%b %Y %H:%M:%S", time.localtime())))
|
|
421
|
+
|
|
422
|
+
# Get tool arguments
|
|
423
|
+
|
|
424
|
+
in_args, in_verbose = check_arguments()
|
|
425
|
+
fl_restration_csf(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
426
|
+
|
|
427
|
+
print('Current time: {}'.format(time.strftime("%d %b %Y %H:%M:%S", time.localtime())))
|
|
428
|
+
print('Line regeneration classify done in {} seconds'.format(round(time.time() - start_time, 5)))
|