BERATools 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- beratools/__init__.py +9 -0
- beratools/core/__init__.py +0 -0
- beratools/core/algo_centerline.py +351 -0
- beratools/core/constants.py +86 -0
- beratools/core/dijkstra_algorithm.py +460 -0
- beratools/core/logger.py +85 -0
- beratools/core/tool_base.py +133 -0
- beratools/gui/__init__.py +15 -0
- beratools/gui/batch_processing_dlg.py +463 -0
- beratools/gui/beratools.json +2300 -0
- beratools/gui/bt_data.py +487 -0
- beratools/gui/bt_gui_main.py +691 -0
- beratools/gui/cli.py +18 -0
- beratools/gui/gui.json +8 -0
- beratools/gui/img/BERALogo.png +0 -0
- beratools/gui/img/closed.gif +0 -0
- beratools/gui/img/closed.png +0 -0
- beratools/gui/img/open.gif +0 -0
- beratools/gui/img/open.png +0 -0
- beratools/gui/img/tool.gif +0 -0
- beratools/gui/img/tool.png +0 -0
- beratools/gui/map_window.py +146 -0
- beratools/gui/tool_widgets.py +493 -0
- beratools/gui_tk/ASCII Banners.txt +248 -0
- beratools/gui_tk/__init__.py +20 -0
- beratools/gui_tk/beratools_main.py +515 -0
- beratools/gui_tk/bt_widgets.py +442 -0
- beratools/gui_tk/cli.py +18 -0
- beratools/gui_tk/gui.json +8 -0
- beratools/gui_tk/img/BERALogo.png +0 -0
- beratools/gui_tk/img/closed.gif +0 -0
- beratools/gui_tk/img/closed.png +0 -0
- beratools/gui_tk/img/open.gif +0 -0
- beratools/gui_tk/img/open.png +0 -0
- beratools/gui_tk/img/tool.gif +0 -0
- beratools/gui_tk/img/tool.png +0 -0
- beratools/gui_tk/main.py +14 -0
- beratools/gui_tk/map_window.py +144 -0
- beratools/gui_tk/runner.py +1481 -0
- beratools/gui_tk/tooltip.py +55 -0
- beratools/third_party/pyqtlet2/__init__.py +9 -0
- beratools/third_party/pyqtlet2/leaflet/__init__.py +26 -0
- beratools/third_party/pyqtlet2/leaflet/control/__init__.py +6 -0
- beratools/third_party/pyqtlet2/leaflet/control/control.py +59 -0
- beratools/third_party/pyqtlet2/leaflet/control/draw.py +52 -0
- beratools/third_party/pyqtlet2/leaflet/control/layers.py +20 -0
- beratools/third_party/pyqtlet2/leaflet/core/Parser.py +24 -0
- beratools/third_party/pyqtlet2/leaflet/core/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/core/evented.py +180 -0
- beratools/third_party/pyqtlet2/leaflet/layer/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/featuregroup.py +34 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/icon/icon.py +30 -0
- beratools/third_party/pyqtlet2/leaflet/layer/imageoverlay.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layer.py +105 -0
- beratools/third_party/pyqtlet2/leaflet/layer/layergroup.py +45 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/layer/marker/marker.py +91 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/__init__.py +2 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/gridlayer.py +4 -0
- beratools/third_party/pyqtlet2/leaflet/layer/tile/tilelayer.py +16 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/__init__.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circle.py +15 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/circlemarker.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/path.py +5 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polygon.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/polyline.py +18 -0
- beratools/third_party/pyqtlet2/leaflet/layer/vector/rectangle.py +14 -0
- beratools/third_party/pyqtlet2/leaflet/map/__init__.py +1 -0
- beratools/third_party/pyqtlet2/leaflet/map/map.py +220 -0
- beratools/third_party/pyqtlet2/mapwidget.py +45 -0
- beratools/third_party/pyqtlet2/web/custom.js +43 -0
- beratools/third_party/pyqtlet2/web/map.html +23 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.css +656 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_193/leaflet.js +6 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.codeclimate.yml +14 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.editorconfig +4 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.gitattributes +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/.travis.yml +43 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/LICENSE +20 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/layers.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-icon.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/marker-shadow.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet-2x.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.png +0 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/images/spritesheet.svg +156 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.css +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_draw_414/leaflet.draw.js +10 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/LICENSE +22 -0
- beratools/third_party/pyqtlet2/web/modules/leaflet_rotatedMarker_020/leaflet.rotatedMarker.js +57 -0
- beratools/tools/Beratools_r_script.r +1120 -0
- beratools/tools/Ht_metrics.py +116 -0
- beratools/tools/__init__.py +7 -0
- beratools/tools/batch_processing.py +132 -0
- beratools/tools/canopy_threshold_relative.py +670 -0
- beratools/tools/canopycostraster.py +222 -0
- beratools/tools/centerline.py +176 -0
- beratools/tools/common.py +885 -0
- beratools/tools/fl_regen_csf.py +428 -0
- beratools/tools/forest_line_attributes.py +408 -0
- beratools/tools/forest_line_ecosite.py +216 -0
- beratools/tools/lapis_all.py +103 -0
- beratools/tools/least_cost_path_from_chm.py +152 -0
- beratools/tools/line_footprint_absolute.py +363 -0
- beratools/tools/line_footprint_fixed.py +282 -0
- beratools/tools/line_footprint_functions.py +720 -0
- beratools/tools/line_footprint_relative.py +64 -0
- beratools/tools/ln_relative_metrics.py +615 -0
- beratools/tools/r_cal_lpi_elai.r +25 -0
- beratools/tools/r_generate_pd_focalraster.r +101 -0
- beratools/tools/r_interface.py +80 -0
- beratools/tools/r_point_density.r +9 -0
- beratools/tools/rpy_chm2trees.py +86 -0
- beratools/tools/rpy_dsm_chm_by.py +81 -0
- beratools/tools/rpy_dtm_by.py +63 -0
- beratools/tools/rpy_find_cellsize.py +43 -0
- beratools/tools/rpy_gnd_csf.py +74 -0
- beratools/tools/rpy_hummock_hollow.py +85 -0
- beratools/tools/rpy_hummock_hollow_raster.py +71 -0
- beratools/tools/rpy_las_info.py +51 -0
- beratools/tools/rpy_laz2las.py +40 -0
- beratools/tools/rpy_lpi_elai_lascat.py +466 -0
- beratools/tools/rpy_normalized_lidar_by.py +56 -0
- beratools/tools/rpy_percent_above_dbh.py +80 -0
- beratools/tools/rpy_points2trees.py +88 -0
- beratools/tools/rpy_vegcoverage.py +94 -0
- beratools/tools/tiler.py +206 -0
- beratools/tools/tool_template.py +54 -0
- beratools/tools/vertex_optimization.py +620 -0
- beratools/tools/zonal_threshold.py +144 -0
- beratools-0.2.0.dist-info/METADATA +63 -0
- beratools-0.2.0.dist-info/RECORD +142 -0
- beratools-0.2.0.dist-info/WHEEL +4 -0
- beratools-0.2.0.dist-info/entry_points.txt +2 -0
- beratools-0.2.0.dist-info/licenses/LICENSE +22 -0
|
@@ -0,0 +1,103 @@
|
|
|
1
|
+
|
|
2
|
+
from random import random
|
|
3
|
+
import time
|
|
4
|
+
from multiprocessing.pool import Pool
|
|
5
|
+
from numpy import mean
|
|
6
|
+
from beratools.tools.common import *
|
|
7
|
+
import json
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class OperationCancelledException(Exception):
|
|
11
|
+
pass
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def lapis_all(args, callback=print, processes=None, verbose=None):
|
|
15
|
+
lapis_path = '../third_party/Lapis_0_8/Lapis.exe'
|
|
16
|
+
lapis_path = Path(__file__).parent.joinpath(lapis_path).resolve()
|
|
17
|
+
ini_file = Path(__file__).parents[2].joinpath(r'.\.data\lapis.ini').resolve().as_posix()
|
|
18
|
+
|
|
19
|
+
arg_parsed = json.loads(args)
|
|
20
|
+
in_dtm = arg_parsed['in_dtm']
|
|
21
|
+
in_las = arg_parsed['in_las']
|
|
22
|
+
out_dir = arg_parsed['out_dir']
|
|
23
|
+
|
|
24
|
+
f = open(ini_file, 'w')
|
|
25
|
+
f.write('#Data options\n')
|
|
26
|
+
f.write('dem=' + in_dtm + '\n')
|
|
27
|
+
f.write('dem-units = unspecified' + '\n')
|
|
28
|
+
f.write('dem-algo = raster' + '\n')
|
|
29
|
+
f.write('las=' + in_las + '\n')
|
|
30
|
+
f.write('las-units=unspecified' + '\n')
|
|
31
|
+
f.write('output=' + out_dir + '\n')
|
|
32
|
+
|
|
33
|
+
f.write('\n # Computer-specific options\n')
|
|
34
|
+
f.write('thread=50' + '\n')
|
|
35
|
+
f.write('bench=' + '\n')
|
|
36
|
+
|
|
37
|
+
f.write('\n # Processing options\n')
|
|
38
|
+
f.write('xres=0.15' + '\n')
|
|
39
|
+
f.write('yres=0.15' + '\n')
|
|
40
|
+
f.write('xorigin=0' + '\n')
|
|
41
|
+
f.write('yorigin=0' + '\n')
|
|
42
|
+
f.write('csm-cellsize=0.15' + '\n')
|
|
43
|
+
f.write('footprint=0.1' + '\n')
|
|
44
|
+
f.write('smooth=1' + '\n')
|
|
45
|
+
f.write('minht=-8' + '\n')
|
|
46
|
+
f.write('maxht=100' + '\n')
|
|
47
|
+
f.write('class=~7, 9, 18' + '\n')
|
|
48
|
+
f.write('max-scan-angle=32' + '\n')
|
|
49
|
+
f.write('user-units=meters' + '\n')
|
|
50
|
+
f.write('canopy=3' + '\n')
|
|
51
|
+
f.write('strata=0.5, 1, 2, 4, 8, 16, 32, 48, 64,' + '\n')
|
|
52
|
+
f.write('min-tao-dist=1' + '\n')
|
|
53
|
+
f.write('id-algo=highpoint' + '\n')
|
|
54
|
+
f.write('seg-algo=watershed' + '\n')
|
|
55
|
+
f.write('topo-scale=500, 1000, 2000,' + '\n')
|
|
56
|
+
f.write('fine-int=' + '\n')
|
|
57
|
+
|
|
58
|
+
args = lapis_path.as_posix() + ' --ini-file' + ' ' + ini_file
|
|
59
|
+
|
|
60
|
+
callback('Lapis parameters returned.')
|
|
61
|
+
return args
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
# task executed in a worker process
|
|
65
|
+
def worker(task_data):
|
|
66
|
+
# report a message
|
|
67
|
+
value = mean(task_data)
|
|
68
|
+
print(f'Task {len(task_data)} with {value} executed', flush=True)
|
|
69
|
+
|
|
70
|
+
# block for a moment
|
|
71
|
+
time.sleep(value * 10)
|
|
72
|
+
|
|
73
|
+
# return the generated value
|
|
74
|
+
return value
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
# protect the entry point
|
|
78
|
+
def execute_multiprocessing():
|
|
79
|
+
# create and configure the process pool
|
|
80
|
+
data = [[random() for n in range(100)] for i in range(300)]
|
|
81
|
+
try:
|
|
82
|
+
total_steps = 300
|
|
83
|
+
with Pool() as pool:
|
|
84
|
+
step = 0
|
|
85
|
+
# execute tasks in order, process results out of order
|
|
86
|
+
for result in pool.imap_unordered(worker, data):
|
|
87
|
+
print(f'Got result: {result}', flush=True)
|
|
88
|
+
step += 1
|
|
89
|
+
print(step)
|
|
90
|
+
print('%{}'.format(step/total_steps*100))
|
|
91
|
+
|
|
92
|
+
except OperationCancelledException:
|
|
93
|
+
print("Operation cancelled")
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
if __name__ == '__main__':
|
|
97
|
+
# in_args, in_verbose = check_arguments()
|
|
98
|
+
# start_time = time.time()
|
|
99
|
+
# lapis_all(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
100
|
+
#
|
|
101
|
+
# print('Elapsed time: {}'.format(time.time() - start_time))
|
|
102
|
+
|
|
103
|
+
pass
|
|
@@ -0,0 +1,152 @@
|
|
|
1
|
+
import time
|
|
2
|
+
from xrspatial import convolution
|
|
3
|
+
|
|
4
|
+
from beratools.tools.common import *
|
|
5
|
+
from beratools.core.algo_centerline import *
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def LCP_centerline(callback, in_line, in_chm, line_radius,
|
|
9
|
+
proc_segments, out_line, processes, verbose):
|
|
10
|
+
if not compare_crs(vector_crs(in_line), raster_crs(in_chm)):
|
|
11
|
+
print("Line and CHM have different spatial references, please check.")
|
|
12
|
+
return
|
|
13
|
+
|
|
14
|
+
# Read input line features
|
|
15
|
+
layer_crs = None
|
|
16
|
+
schema = None
|
|
17
|
+
input_lines = []
|
|
18
|
+
|
|
19
|
+
df, found = chk_df_multipart(gpd.GeoDataFrame.from_file(in_line), 'MultiLineString')
|
|
20
|
+
if found:
|
|
21
|
+
df.to_file(in_line)
|
|
22
|
+
else:
|
|
23
|
+
del df, found
|
|
24
|
+
|
|
25
|
+
with fiona.open(in_line) as open_line_file:
|
|
26
|
+
layer_crs = open_line_file.crs
|
|
27
|
+
schema = open_line_file.meta['schema']
|
|
28
|
+
for line in open_line_file:
|
|
29
|
+
if line.geometry:
|
|
30
|
+
if line.geometry['type'] != 'MultiLineString':
|
|
31
|
+
input_lines.append([line.geometry, line.properties])
|
|
32
|
+
else:
|
|
33
|
+
print('MultiLineString found.')
|
|
34
|
+
geoms = shape(line['geometry']).geoms
|
|
35
|
+
for item in geoms:
|
|
36
|
+
line_part = fiona.Geometry.from_dict(item)
|
|
37
|
+
if line_part:
|
|
38
|
+
input_lines.append([line_part, line.properties])
|
|
39
|
+
else:
|
|
40
|
+
print(f'Line {line.id} has empty geometry.')
|
|
41
|
+
|
|
42
|
+
if proc_segments:
|
|
43
|
+
# split line segments at vertices
|
|
44
|
+
input_lines_temp = []
|
|
45
|
+
for line in input_lines:
|
|
46
|
+
line_seg = line[0]
|
|
47
|
+
line_prop = line[1]
|
|
48
|
+
line_segs = segments(line_seg.coordinates)
|
|
49
|
+
line_feats = [(line, line_prop) for line in line_segs]
|
|
50
|
+
if line_segs:
|
|
51
|
+
input_lines_temp.extend(line_feats)
|
|
52
|
+
|
|
53
|
+
input_lines = input_lines_temp
|
|
54
|
+
|
|
55
|
+
# Process lines
|
|
56
|
+
all_lines = []
|
|
57
|
+
i = 0
|
|
58
|
+
for line in input_lines:
|
|
59
|
+
all_lines.append((line, line_radius, in_chm, i))
|
|
60
|
+
i += 1
|
|
61
|
+
|
|
62
|
+
print('{} lines to be processed.'.format(len(all_lines)))
|
|
63
|
+
|
|
64
|
+
feat_geoms = []
|
|
65
|
+
feat_props = []
|
|
66
|
+
center_line_geoms = []
|
|
67
|
+
corridor_poly_list = []
|
|
68
|
+
result = execute_multiprocessing(process_single_line, all_lines, 'Centerline',
|
|
69
|
+
processes, 1, verbose=verbose)
|
|
70
|
+
|
|
71
|
+
for item in result:
|
|
72
|
+
geom = item[0]
|
|
73
|
+
prop = item[1]
|
|
74
|
+
center_line = item[2]
|
|
75
|
+
corridor_poly = item[3]
|
|
76
|
+
|
|
77
|
+
if geom and prop:
|
|
78
|
+
feat_geoms.append(geom)
|
|
79
|
+
feat_props.append(prop)
|
|
80
|
+
center_line_geoms.append(center_line)
|
|
81
|
+
corridor_poly_list.append(corridor_poly)
|
|
82
|
+
|
|
83
|
+
out_least_cost_path = Path(out_line)
|
|
84
|
+
out_least_cost_path = out_least_cost_path.with_stem(out_least_cost_path.stem + '_least_cost_path')
|
|
85
|
+
schema['properties']['status'] = 'int'
|
|
86
|
+
|
|
87
|
+
save_features_to_shapefile(out_least_cost_path.as_posix(), layer_crs, feat_geoms, feat_props, schema)
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
def process_single_line(line_args):
|
|
91
|
+
line = line_args[0][0]
|
|
92
|
+
prop = line_args[0][1]
|
|
93
|
+
line_radius = line_args[1]
|
|
94
|
+
in_chm_raster = line_args[2]
|
|
95
|
+
line_id = line_args[3]
|
|
96
|
+
seed_line = shape(line) # LineString
|
|
97
|
+
line_radius = float(line_radius)
|
|
98
|
+
|
|
99
|
+
chm_clip, out_meta = clip_raster(in_chm_raster, seed_line, line_radius)
|
|
100
|
+
in_chm = np.squeeze(chm_clip, axis=0)
|
|
101
|
+
cell_x, cell_y = out_meta['transform'][0], -out_meta['transform'][4]
|
|
102
|
+
kernel = convolution.circle_kernel(cell_x, cell_y, 2.5)
|
|
103
|
+
dyn_canopy_ndarray = dyn_np_cc_map(in_chm, FP_CORRIDOR_THRESHOLD, BT_NODATA)
|
|
104
|
+
cc_std, cc_mean = dyn_fs_raster_stdmean(dyn_canopy_ndarray, kernel, BT_NODATA)
|
|
105
|
+
cc_smooth = dyn_smooth_cost(dyn_canopy_ndarray, 2.5, [cell_x, cell_y])
|
|
106
|
+
avoidance = max(min(float(0.4), 1), 0)
|
|
107
|
+
cost_clip = dyn_np_cost_raster(dyn_canopy_ndarray, cc_mean, cc_std,
|
|
108
|
+
cc_smooth, 0.4, 1.5)
|
|
109
|
+
|
|
110
|
+
# if CL_USE_SKIMAGE_GRAPH:
|
|
111
|
+
# skimage shortest path (Cost Array elements with infinite or negative costs will simply be ignored.)
|
|
112
|
+
negative_cost_clip = np.where(np.isnan(cost_clip), -9999, cost_clip)
|
|
113
|
+
lc_path = LCP_skimage_mcp_connect(negative_cost_clip, out_meta, seed_line)
|
|
114
|
+
|
|
115
|
+
if lc_path:
|
|
116
|
+
lc_path_coords = lc_path.coords
|
|
117
|
+
else:
|
|
118
|
+
lc_path_coords = []
|
|
119
|
+
|
|
120
|
+
# search for centerline
|
|
121
|
+
if len(lc_path_coords) < 2:
|
|
122
|
+
print('No least cost path detected, use input line.')
|
|
123
|
+
prop['status'] = CenterlineStatus.FAILED.value
|
|
124
|
+
return seed_line, prop, seed_line, None
|
|
125
|
+
|
|
126
|
+
# get corridor raster
|
|
127
|
+
out_transform = out_meta['transform']
|
|
128
|
+
transformer = rasterio.transform.AffineTransformer(out_transform)
|
|
129
|
+
cell_size = (out_transform[0], -out_transform[4])
|
|
130
|
+
|
|
131
|
+
x1, y1 = lc_path_coords[0]
|
|
132
|
+
x2, y2 = lc_path_coords[-1]
|
|
133
|
+
source = [transformer.rowcol(x1, y1)]
|
|
134
|
+
destination = [transformer.rowcol(x2, y2)]
|
|
135
|
+
corridor_thresh_cl = corridor_raster(negative_cost_clip, out_meta, source, destination,
|
|
136
|
+
cell_size, FP_CORRIDOR_THRESHOLD)
|
|
137
|
+
|
|
138
|
+
# find contiguous corridor polygon and extract centerline
|
|
139
|
+
df = gpd.GeoDataFrame(geometry=[seed_line], crs=out_meta['crs'])
|
|
140
|
+
corridor_poly_gpd = find_corridor_polygon(corridor_thresh_cl, out_transform, df)
|
|
141
|
+
center_line, status = find_centerline(corridor_poly_gpd.geometry.iloc[0], lc_path)
|
|
142
|
+
prop['status'] = status.value
|
|
143
|
+
|
|
144
|
+
print(" Searching centerline: line {} ".format(line_id), flush=True)
|
|
145
|
+
return lc_path, prop, center_line, corridor_poly_gpd
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
if __name__ == '__main__':
|
|
149
|
+
in_args, in_verbose = check_arguments()
|
|
150
|
+
start_time = time.time()
|
|
151
|
+
LCP_centerline(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
152
|
+
print('Elapsed time: {}'.format(time.time() - start_time))
|
|
@@ -0,0 +1,363 @@
|
|
|
1
|
+
import time
|
|
2
|
+
import itertools
|
|
3
|
+
|
|
4
|
+
from line_footprint_functions import *
|
|
5
|
+
from common import *
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def line_footprint(callback, in_line, in_canopy, in_cost, corridor_th_value, max_ln_width,
|
|
9
|
+
exp_shk_cell, out_footprint, out_centerline, processes, verbose):
|
|
10
|
+
corridor_th_field = 'CorridorTh'
|
|
11
|
+
line_seg = gpd.GeoDataFrame.from_file(in_line)
|
|
12
|
+
max_ln_width = float(max_ln_width)
|
|
13
|
+
exp_shk_cell = int(exp_shk_cell)
|
|
14
|
+
|
|
15
|
+
if not compare_crs(vector_crs(in_line), raster_crs(in_canopy)):
|
|
16
|
+
print("Line and canopy have different spatial references, please check.")
|
|
17
|
+
return
|
|
18
|
+
|
|
19
|
+
if not compare_crs(vector_crs(in_line), raster_crs(in_cost)):
|
|
20
|
+
print("Line and cost have different spatial references, please check.")
|
|
21
|
+
return
|
|
22
|
+
|
|
23
|
+
if 'OLnFID' not in line_seg.columns.array:
|
|
24
|
+
print("Cannot find 'OLnFID' column in input line data.\n 'OLnFID' will be created")
|
|
25
|
+
line_seg['OLnFID'] = line_seg.index
|
|
26
|
+
|
|
27
|
+
if 'CorridorTh' not in line_seg.columns.array:
|
|
28
|
+
if BT_DEBUGGING:
|
|
29
|
+
print("Cannot find 'CorridorTh' column in input line data")
|
|
30
|
+
print("New column created: 'CorridorTh")
|
|
31
|
+
line_seg['CorridorTh'] = corridor_th_value
|
|
32
|
+
else:
|
|
33
|
+
corridor_th_value = float(9999999)
|
|
34
|
+
if 'OLnSEG' not in line_seg.columns.array:
|
|
35
|
+
line_seg['OLnSEG'] = 0
|
|
36
|
+
|
|
37
|
+
ori_total_feat = len(line_seg)
|
|
38
|
+
|
|
39
|
+
proc_segments = False
|
|
40
|
+
if proc_segments:
|
|
41
|
+
print("Splitting lines into segments...")
|
|
42
|
+
line_seg = split_into_segments(line_seg)
|
|
43
|
+
print("Splitting lines into segments... Done")
|
|
44
|
+
else:
|
|
45
|
+
line_seg = split_into_equal_nth_segments(line_seg)
|
|
46
|
+
|
|
47
|
+
line_args = line_prepare(callback, line_seg, in_canopy, in_cost, corridor_th_field, corridor_th_value,
|
|
48
|
+
max_ln_width, exp_shk_cell, proc_segments, out_footprint, out_centerline, ori_total_feat)
|
|
49
|
+
|
|
50
|
+
# pass single line one at a time for footprint
|
|
51
|
+
feat_list = []
|
|
52
|
+
footprint_list = []
|
|
53
|
+
poly_list = []
|
|
54
|
+
centerline_list = []
|
|
55
|
+
|
|
56
|
+
process_single_line = process_single_line_segment
|
|
57
|
+
if GROUPING_SEGMENT:
|
|
58
|
+
process_single_line = process_single_line_whole
|
|
59
|
+
|
|
60
|
+
feat_list = execute_multiprocessing(process_single_line, line_args, 'Line footprint',
|
|
61
|
+
processes, 1, verbose=verbose)
|
|
62
|
+
|
|
63
|
+
print('Generating shapefile ...', flush=True)
|
|
64
|
+
|
|
65
|
+
if feat_list:
|
|
66
|
+
for i in feat_list:
|
|
67
|
+
footprint_list.append(i[0])
|
|
68
|
+
poly_list.append(i[1])
|
|
69
|
+
|
|
70
|
+
for item in i[2]:
|
|
71
|
+
if item:
|
|
72
|
+
centerline_list.append(item)
|
|
73
|
+
|
|
74
|
+
results = gpd.GeoDataFrame(pd.concat(footprint_list))
|
|
75
|
+
results = results.sort_values(by=['OLnFID', 'OLnSEG'])
|
|
76
|
+
results = results.reset_index(drop=True)
|
|
77
|
+
|
|
78
|
+
# dissolved polygon group by column 'OLnFID'
|
|
79
|
+
dissolved_results = results.dissolve(by='OLnFID', as_index=False)
|
|
80
|
+
dissolved_results = dissolved_results.drop(columns=['OLnSEG'])
|
|
81
|
+
print("Saving output ...", flush=True)
|
|
82
|
+
dissolved_results.to_file(out_footprint)
|
|
83
|
+
|
|
84
|
+
# detect centerlines
|
|
85
|
+
if out_centerline:
|
|
86
|
+
# dissolved polygon group by column 'OLnFID'
|
|
87
|
+
print("Saving polygons for generating centerlines ...", flush=True)
|
|
88
|
+
polys_for_centerline = gpd.GeoDataFrame(pd.concat(poly_list))
|
|
89
|
+
polys_for_centerline = polys_for_centerline.dissolve(by='OLnFID', as_index=False)
|
|
90
|
+
|
|
91
|
+
# save polygons
|
|
92
|
+
path = Path(out_centerline)
|
|
93
|
+
path = path.with_stem(path.stem + '_poly')
|
|
94
|
+
polys_for_centerline.to_file(path.as_posix())
|
|
95
|
+
|
|
96
|
+
centerline_gpd = gpd.GeoDataFrame(geometry=centerline_list, crs=polys_for_centerline.crs)
|
|
97
|
+
centerline_gpd.to_file(out_centerline)
|
|
98
|
+
print("Centerline file saved", flush=True)
|
|
99
|
+
|
|
100
|
+
print(f'%{100}')
|
|
101
|
+
print(f'Finishing footprint processing in {time.time() - start_time} seconds')
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def field_name_list(fc):
|
|
105
|
+
# return a list of column name from shapefile
|
|
106
|
+
if isinstance(fc, gpd.GeoDataFrame):
|
|
107
|
+
field_list = fc.columns.array
|
|
108
|
+
else:
|
|
109
|
+
field_list = gpd.read_file(fc).columns.array
|
|
110
|
+
return field_list
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def has_field(fc, fi):
|
|
114
|
+
# Check column name
|
|
115
|
+
field_list = field_name_list(fc)
|
|
116
|
+
if fi in field_list:
|
|
117
|
+
print("column: {fi} is found")
|
|
118
|
+
return True
|
|
119
|
+
elif fi == 'CorridorTh':
|
|
120
|
+
shapefile = gpd.GeoDataFrame.from_file(fc)
|
|
121
|
+
for row in range(0, len(shapefile)):
|
|
122
|
+
shapefile.loc[row, fi] = 3.0
|
|
123
|
+
|
|
124
|
+
shapefile.to_file(fc)
|
|
125
|
+
print("Warning: There is no field named {} in the input data".format('CorridorTh'))
|
|
126
|
+
print("Field: 'CorridorTh' is added and default threshold (i.e.3) is adopted")
|
|
127
|
+
return True
|
|
128
|
+
else:
|
|
129
|
+
print("Warning: There is no field named {fi} in the input data")
|
|
130
|
+
return False
|
|
131
|
+
|
|
132
|
+
|
|
133
|
+
def process_single_line_whole(line):
|
|
134
|
+
footprints = []
|
|
135
|
+
line_polys = []
|
|
136
|
+
centerline_list = []
|
|
137
|
+
for line_seg in line:
|
|
138
|
+
footprint = process_single_line_segment(line_seg)
|
|
139
|
+
if footprint:
|
|
140
|
+
footprints.append(footprint[0])
|
|
141
|
+
line_polys.append(footprint[1])
|
|
142
|
+
centerline_list.append(footprint[2])
|
|
143
|
+
else:
|
|
144
|
+
print('No footprint or centerline found.')
|
|
145
|
+
|
|
146
|
+
polys = None
|
|
147
|
+
if line_polys:
|
|
148
|
+
polys = pd.concat(line_polys)
|
|
149
|
+
polys = polys.dissolve()
|
|
150
|
+
|
|
151
|
+
footprint_merge = None
|
|
152
|
+
if footprints:
|
|
153
|
+
if not all(item is None for item in footprints):
|
|
154
|
+
footprint_merge = pd.concat(footprints)
|
|
155
|
+
footprint_merge.dissolve()
|
|
156
|
+
footprint_merge.drop(columns=['OLnSEG'])
|
|
157
|
+
else:
|
|
158
|
+
print(f'Empty footprint returned.')
|
|
159
|
+
|
|
160
|
+
if len(line) > 0:
|
|
161
|
+
print(f"Processing line: {line[0]['OLnFID']}, done.", flush=True)
|
|
162
|
+
|
|
163
|
+
return footprint_merge, polys, centerline_list
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
def process_single_line_segment(dict_segment):
|
|
167
|
+
# this function takes single line to work the line footprint
|
|
168
|
+
# (regardless it process the whole line or individual segment)
|
|
169
|
+
in_canopy = dict_segment['in_canopy']
|
|
170
|
+
in_cost = dict_segment['in_cost']
|
|
171
|
+
corridor_th_value = dict_segment['corridor_th_value']
|
|
172
|
+
line_gpd = dict_segment['line_gpd']
|
|
173
|
+
|
|
174
|
+
line_id = ''
|
|
175
|
+
if 'BT_UID' in dict_segment.keys():
|
|
176
|
+
line_id = dict_segment['BT_UID']
|
|
177
|
+
|
|
178
|
+
try:
|
|
179
|
+
corridor_th_value = float(corridor_th_value)
|
|
180
|
+
if corridor_th_value < 0.0:
|
|
181
|
+
corridor_th_value = 3.0
|
|
182
|
+
except ValueError as e:
|
|
183
|
+
print(e)
|
|
184
|
+
corridor_th_value = 3.0
|
|
185
|
+
|
|
186
|
+
max_ln_width = dict_segment['max_ln_width']
|
|
187
|
+
exp_shk_cell = dict_segment['exp_shk_cell']
|
|
188
|
+
shapefile_proj = dict_segment['Proj_crs']
|
|
189
|
+
original_col_name_list = dict_segment['org_col']
|
|
190
|
+
|
|
191
|
+
FID = dict_segment['OLnSEG'] # segment line feature ID
|
|
192
|
+
OID = dict_segment['OLnFID'] # original line ID for segment line
|
|
193
|
+
|
|
194
|
+
segment_list = []
|
|
195
|
+
feat = dict_segment['geometry']
|
|
196
|
+
for coord in feat.coords:
|
|
197
|
+
segment_list.append(coord)
|
|
198
|
+
|
|
199
|
+
# Find origin and destination coordinates
|
|
200
|
+
x1, y1 = segment_list[0][0], segment_list[0][1]
|
|
201
|
+
x2, y2 = segment_list[-1][0], segment_list[-1][1]
|
|
202
|
+
|
|
203
|
+
# Create Point "origin"
|
|
204
|
+
origin_point = shapely.Point([x1, y1])
|
|
205
|
+
origin = [shapes for shapes in gpd.GeoDataFrame(geometry=[origin_point],
|
|
206
|
+
crs=shapefile_proj).geometry]
|
|
207
|
+
|
|
208
|
+
# Create Point "destination"
|
|
209
|
+
destination_point = shapely.Point([x2, y2])
|
|
210
|
+
destination = [shapes for shapes in gpd.GeoDataFrame(geometry=[destination_point],
|
|
211
|
+
crs=shapefile_proj).geometry]
|
|
212
|
+
|
|
213
|
+
# Buffer around line and clip cost raster and canopy raster
|
|
214
|
+
# TODO: deal with NODATA
|
|
215
|
+
clip_cost, out_meta = clip_raster(in_cost, feat, max_ln_width)
|
|
216
|
+
out_transform = out_meta['transform']
|
|
217
|
+
cell_size_x = out_transform[0]
|
|
218
|
+
cell_size_y = -out_transform[4]
|
|
219
|
+
|
|
220
|
+
if not HAS_COST_RASTER:
|
|
221
|
+
clip_cost, clip_canopy = cost_raster(clip_cost, out_meta)
|
|
222
|
+
else:
|
|
223
|
+
clip_canopy, out_meta = clip_raster(in_canopy, feat, max_ln_width)
|
|
224
|
+
|
|
225
|
+
# Work out the corridor from both end of the centerline
|
|
226
|
+
try:
|
|
227
|
+
if len(clip_canopy.shape) > 2:
|
|
228
|
+
clip_canopy = np.squeeze(clip_canopy, axis=0)
|
|
229
|
+
|
|
230
|
+
transformer = rasterio.transform.AffineTransformer(out_transform)
|
|
231
|
+
source = [transformer.rowcol(x1, y1)]
|
|
232
|
+
destination = [transformer.rowcol(x2, y2)]
|
|
233
|
+
|
|
234
|
+
corridor_thresh = corridor_raster(clip_cost, out_meta, source, destination,
|
|
235
|
+
(cell_size_x, cell_size_y), corridor_th_value)
|
|
236
|
+
|
|
237
|
+
def morph_raster(corridor_raster, canopy_raster, exp_shk_cell, cell_size_x):
|
|
238
|
+
# Process: Stamp CC and Max Line Width
|
|
239
|
+
temp1 = (corridor_thresh + clip_canopy)
|
|
240
|
+
raster_class = np.ma.where(temp1 == 0, 1, 0).data
|
|
241
|
+
|
|
242
|
+
if exp_shk_cell > 0 and cell_size_x < 1:
|
|
243
|
+
# Process: Expand
|
|
244
|
+
# FLM original Expand equivalent
|
|
245
|
+
cell_size = int(exp_shk_cell * 2 + 1)
|
|
246
|
+
expanded = ndimage.grey_dilation(raster_class, size=(cell_size, cell_size))
|
|
247
|
+
|
|
248
|
+
# Process: Shrink
|
|
249
|
+
# FLM original Shrink equivalent
|
|
250
|
+
file_shrink = ndimage.grey_erosion(expanded, size=(cell_size, cell_size))
|
|
251
|
+
|
|
252
|
+
else:
|
|
253
|
+
if BT_DEBUGGING:
|
|
254
|
+
print('No Expand And Shrink cell performed.')
|
|
255
|
+
file_shrink = raster_class
|
|
256
|
+
|
|
257
|
+
# Process: Boundary Clean
|
|
258
|
+
clean_raster = ndimage.gaussian_filter(file_shrink, sigma=0, mode='nearest')
|
|
259
|
+
|
|
260
|
+
return clean_raster
|
|
261
|
+
|
|
262
|
+
clean_raster = morph_raster(corridor_thresh, in_canopy, exp_shk_cell, cell_size_x)
|
|
263
|
+
|
|
264
|
+
# creat mask for non-polygon area
|
|
265
|
+
msk = np.where(clean_raster == 1, True, False)
|
|
266
|
+
|
|
267
|
+
# Process: ndarray to shapely Polygon
|
|
268
|
+
out_polygon = features.shapes(clean_raster, mask=msk, transform=out_transform)
|
|
269
|
+
|
|
270
|
+
# create a shapely multipolygon
|
|
271
|
+
multi_polygon = []
|
|
272
|
+
for shp, value in out_polygon:
|
|
273
|
+
multi_polygon.append(shapely.geometry.shape(shp))
|
|
274
|
+
poly = shapely.geometry.MultiPolygon(multi_polygon)
|
|
275
|
+
|
|
276
|
+
# create a pandas dataframe for the footprint
|
|
277
|
+
out_data = pd.DataFrame({'OLnFID': [OID], 'OLnSEG': [FID], 'geometry': poly})
|
|
278
|
+
out_gdata = gpd.GeoDataFrame(out_data, geometry='geometry', crs=shapefile_proj)
|
|
279
|
+
|
|
280
|
+
if not GROUPING_SEGMENT:
|
|
281
|
+
print(f"LP:PSLS: Processing line ID: {dict_segment['OLnSEG']}, done.", flush=True)
|
|
282
|
+
|
|
283
|
+
# find contiguous corridor polygon for centerline
|
|
284
|
+
corridor_poly_gpd = find_corridor_polygon(corridor_thresh, out_transform, line_gpd)
|
|
285
|
+
centerline, status = find_centerline(corridor_poly_gpd.geometry.iloc[0], feat)
|
|
286
|
+
|
|
287
|
+
return out_gdata, corridor_poly_gpd, centerline
|
|
288
|
+
|
|
289
|
+
except Exception as e:
|
|
290
|
+
print(f'Exception: {e}')
|
|
291
|
+
return None
|
|
292
|
+
|
|
293
|
+
|
|
294
|
+
def line_prepare(callback, line_seg, in_canopy, in_cost, corridor_th_field, corridor_th_value,
|
|
295
|
+
max_ln_width, exp_shk_cell, proc_seg, out_footprint, out_centerline, ori_total_feat):
|
|
296
|
+
# get the list of original columns names
|
|
297
|
+
field_list_col = field_name_list(line_seg)
|
|
298
|
+
keep_field_name = []
|
|
299
|
+
for col_name in line_seg.columns:
|
|
300
|
+
if col_name != 'geometry':
|
|
301
|
+
keep_field_name.append(col_name)
|
|
302
|
+
|
|
303
|
+
list_of_segment = []
|
|
304
|
+
|
|
305
|
+
i = 0
|
|
306
|
+
# process when shapefile is not an empty feature class
|
|
307
|
+
if len(line_seg) > 0:
|
|
308
|
+
for row in range(0, len(line_seg)):
|
|
309
|
+
# creates a geometry object
|
|
310
|
+
line_gpd = line_seg.loc[[row]]
|
|
311
|
+
feat = line_gpd.geometry.iloc[0]
|
|
312
|
+
if feat:
|
|
313
|
+
feature_attributes = {'seg_length': feat.length, 'geometry': feat,
|
|
314
|
+
'Proj_crs': line_seg.crs, 'line_gpd': line_gpd}
|
|
315
|
+
|
|
316
|
+
for col_name in keep_field_name:
|
|
317
|
+
feature_attributes[col_name] = line_seg.loc[row, col_name]
|
|
318
|
+
list_of_segment.append(feature_attributes)
|
|
319
|
+
i += 1
|
|
320
|
+
|
|
321
|
+
print(f"There are {ori_total_feat} lines to be processed.")
|
|
322
|
+
else:
|
|
323
|
+
print("Input line feature is corrupted, exit!")
|
|
324
|
+
exit()
|
|
325
|
+
|
|
326
|
+
# Add tools arguments into GeoDataFrame record
|
|
327
|
+
for record in list_of_segment:
|
|
328
|
+
record['in_canopy'] = in_canopy
|
|
329
|
+
record['in_cost'] = in_cost
|
|
330
|
+
record['corridor_th_field'] = corridor_th_field
|
|
331
|
+
record['corridor_th_value'] = record['CorridorTh']
|
|
332
|
+
record['max_ln_width'] = max_ln_width
|
|
333
|
+
record['exp_shk_cell'] = exp_shk_cell
|
|
334
|
+
record['proc_seg'] = proc_seg
|
|
335
|
+
record['out_footprint'] = out_footprint
|
|
336
|
+
record['out_centerline'] = out_centerline
|
|
337
|
+
record['org_col'] = field_list_col
|
|
338
|
+
|
|
339
|
+
# TODO: data type changed - return list of GeoDataFrame represents each line or segment
|
|
340
|
+
# returns list of list of line attributes, arguments and line gpd
|
|
341
|
+
if GROUPING_SEGMENT:
|
|
342
|
+
# group line segments by line id
|
|
343
|
+
def key_func(x):
|
|
344
|
+
return x['OLnFID']
|
|
345
|
+
|
|
346
|
+
lines = []
|
|
347
|
+
|
|
348
|
+
for key, group in itertools.groupby(list_of_segment, key_func):
|
|
349
|
+
lines.append(list(group))
|
|
350
|
+
|
|
351
|
+
return lines
|
|
352
|
+
else:
|
|
353
|
+
return list_of_segment
|
|
354
|
+
|
|
355
|
+
|
|
356
|
+
if __name__ == '__main__':
|
|
357
|
+
start_time = time.time()
|
|
358
|
+
print('Footprint processing started')
|
|
359
|
+
print(f'Current time: {time.strftime("%b %Y %H:%M:%S", time.localtime())}')
|
|
360
|
+
|
|
361
|
+
in_args, in_verbose = check_arguments()
|
|
362
|
+
line_footprint(print, **in_args.input, processes=int(in_args.processes), verbose=in_verbose)
|
|
363
|
+
print(f'Current time: {time.strftime("%b %Y %H:%M:%S", time.localtime())}')
|